repo_name
stringlengths 4
116
| path
stringlengths 4
379
| size
stringlengths 1
7
| content
stringlengths 3
1.05M
| license
stringclasses 15
values |
---|---|---|---|---|
quanla/quan-util-core
|
src/main/java/qj/tool/scheduler/Scheduler.java
|
2034
|
package qj.tool.scheduler;
import java.io.File;
import java.text.DecimalFormat;
import java.util.Calendar;
import java.util.TimeZone;
import java.util.regex.Matcher;
import qj.util.PropertiesUtil;
import qj.util.RegexUtil;
import qj.util.StringUtil;
import qj.util.bean.AValue;
import qj.util.funct.P0;
public class Scheduler {
private String pattern;
private AValue<String> persist;
private P0 action;
public P0 every5m() {
Matcher m;
if ((m=RegexUtil.matcher("(\\d\\d):(\\d\\d) ([\\w/]+) every day", pattern)).matches()) {
final int hour = Integer.parseInt(m.group(1));
final int minute = Integer.parseInt(m.group(2));
final TimeZone tz = TimeZone.getTimeZone(m.group(3));
return new P0() {public void e() {
Calendar ca = Calendar.getInstance(tz);
int nowHour = ca.get(Calendar.HOUR_OF_DAY);
int nowMinute = ca.get(Calendar.MINUTE);
if (nowHour == hour && nowMinute >= minute && nowMinute <= minute + 8) {
DecimalFormat nf = new DecimalFormat("00");
String today = nf.format(ca.get(Calendar.DAY_OF_MONTH)) + "/" + nf.format((ca.get(Calendar.MONTH) + 1));
String lastRun = persist.get();
if (StringUtil.isEmpty(lastRun)
|| !lastRun.equals(today)
) {
action.e();
persist.set(today);
}
}
}};
}
throw new RuntimeException();
}
public Scheduler(String pattern, AValue<String> persist, P0 action) {
this.pattern = pattern;
this.persist = persist;
this.action = action;
}
public static void main(String[] args) {
// System.out.println(TimeZone.getDefault());
// System.out.println(TimeZone.getTimeZone("Asia/Bangkok"));
// System.out.println(Arrays.toString(TimeZone.getAvailableIDs()));
AValue<String> persist = PropertiesUtil.persistValue("last_run", new File("temp.properties"));
Scheduler scheduler = new Scheduler("11:38 Asia/Bangkok every day", persist, new P0() {public void e() {
System.out.println("Running");
}});
P0 every5m = scheduler.every5m();
every5m.e();
every5m.e();
}
}
|
apache-2.0
|
reben/AncestraRemake
|
revision55/src/game/objects/Sort.java
|
8286
|
package objects;
import game.GameServer;
import java.util.ArrayList;
import java.util.Map;
import java.util.TreeMap;
import common.Constants;
import common.Formulas;
import common.Pathfinding;
import common.World;
import objects.Carte.Case;
import objects.Fight.Fighter;
public class Sort {
private int spellID;
private int spriteID;
private String spriteInfos;
private Map<Integer,SortStats> sortStats = new TreeMap<Integer,SortStats>();
private ArrayList<Integer> effectTargets = new ArrayList<Integer>();
private ArrayList<Integer> CCeffectTargets = new ArrayList<Integer>();
public static class SortStats
{
private int spellID;
private int level;
private int PACost;
private int minPO;
private int maxPO;
private int TauxCC;
private int TauxEC;
private boolean isLineLaunch;
private boolean hasLDV;
private boolean isEmptyCell;
private boolean isModifPO;
private int maxLaunchbyTurn;
private int maxLaunchbyByTarget;
private int coolDown;
private int reqLevel;
private boolean isEcEndTurn;
private ArrayList<SpellEffect> effects;
private ArrayList<SpellEffect> CCeffects;
private String porteeType;
public SortStats(int AspellID,int Alevel,int cost, int minPO, int maxPO, int tauxCC,int tauxEC, boolean isLineLaunch, boolean hasLDV,
boolean isEmptyCell, boolean isModifPO, int maxLaunchbyTurn,int maxLaunchbyByTarget, int coolDown,
int reqLevel,boolean isEcEndTurn, String effects,String ceffects,String typePortee)
{
this.spellID = AspellID;
this.level = Alevel;
this.PACost = cost;
this.minPO = minPO;
this.maxPO = maxPO;
this.TauxCC = tauxCC;
this.TauxEC = tauxEC;
this.isLineLaunch = isLineLaunch;
this.hasLDV = hasLDV;
this.isEmptyCell = isEmptyCell;
this.isModifPO = isModifPO;
this.maxLaunchbyTurn = maxLaunchbyTurn;
this.maxLaunchbyByTarget = maxLaunchbyByTarget;
this.coolDown = coolDown;
this.reqLevel = reqLevel;
this.isEcEndTurn = isEcEndTurn;
this.effects = parseEffect(effects);
this.CCeffects = parseEffect(ceffects);
this.porteeType = typePortee;
}
private ArrayList<SpellEffect> parseEffect(String e)
{
ArrayList<SpellEffect> effets = new ArrayList<SpellEffect>();
String[] splt = e.split("\\|");
for(String a : splt)
{
try
{
if(e.equals("-1"))continue;
int id = Integer.parseInt(a.split(";",2)[0]);
String args = a.split(";",2)[1];
effets.add(new SpellEffect(id, args,spellID,level));
}catch(Exception f){f.printStackTrace();System.out.println(a);System.exit(1);};
}
return effets;
}
public int getSpellID() {
return spellID;
}
public Sort getSpell()
{
return World.getSort(spellID);
}
public int getSpriteID()
{
return getSpell().getSpriteID();
}
public String getSpriteInfos()
{
return getSpell().getSpriteInfos();
}
public int getLevel() {
return level;
}
public int getPACost() {
return PACost;
}
public int getMinPO() {
return minPO;
}
public int getMaxPO() {
return maxPO;
}
public int getTauxCC() {
return TauxCC;
}
public int getTauxEC() {
return TauxEC;
}
public boolean isLineLaunch() {
return isLineLaunch;
}
public boolean hasLDV() {
return hasLDV;
}
public boolean isEmptyCell() {
return isEmptyCell;
}
public boolean isModifPO() {
return isModifPO;
}
public int getMaxLaunchbyTurn() {
return maxLaunchbyTurn;
}
public int getMaxLaunchbyByTarget() {
return maxLaunchbyByTarget;
}
public int getCoolDown() {
return coolDown;
}
public int getReqLevel() {
return reqLevel;
}
public boolean isEcEndTurn() {
return isEcEndTurn;
}
public ArrayList<SpellEffect> getEffects() {
return effects;
}
public ArrayList<SpellEffect> getCCeffects() {
return CCeffects;
}
public String getPorteeType() {
return porteeType;
}
public void applySpellEffectToFight(Fight fight, Fighter perso,Case cell,ArrayList<Case> cells,boolean isCC)
{
//Seulement appellé par les pieges, or les sorts de piege
ArrayList<SpellEffect> effets;
if(isCC)
effets = CCeffects;
else
effets = effects;
GameServer.addToLog("Nombre d'effets: "+effets.size());
int jetChance = Formulas.getRandomValue(0, 99);
int curMin = 0;
for(SpellEffect SE : effets)
{
if(SE.getChance() != 0 && SE.getChance() != 100)//Si pas 100% lancement
{
if(jetChance <= curMin || jetChance >= (SE.getChance() + curMin))
{
curMin += SE.getChance();
continue;
}
curMin += SE.getChance();
}
ArrayList<Fighter> cibles = SpellEffect.getTargets(SE,fight,cells);
SE.applyToFight(fight, perso, cell,cibles);
}
}
public void applySpellEffectToFight(Fight fight, Fighter perso,Case cell,boolean isCC)
{
ArrayList<SpellEffect> effets;
if(isCC)
effets = CCeffects;
else
effets = effects;
GameServer.addToLog("Nombre d'effets: "+effets.size());
int jetChance = Formulas.getRandomValue(0, 99);
int curMin = 0;
int num = 0;
for(SpellEffect SE : effets)
{
if(fight.get_state()>=Constants.FIGHT_STATE_FINISHED)return;
if(SE.getChance() != 0 && SE.getChance() != 100)//Si pas 100% lancement
{
if(jetChance <= curMin || jetChance >= (SE.getChance() + curMin))
{
curMin += SE.getChance();
continue;
}
curMin += SE.getChance();
}
int POnum = num*2;
if(isCC)
{
POnum += effects.size()*2;//On zaap la partie du String des effets hors CC
}
ArrayList<Case> cells = Pathfinding.getCellListFromAreaString(fight.get_map(),cell.getID(),perso.get_fightCell().getID(),porteeType,POnum,isCC);
ArrayList<Case> finalCells = new ArrayList<Case>();
int TE = 0;
Sort S = World.getSort(spellID);
//on prend le targetFlag corespondant au num de l'effet
if(S!= null?S.getEffectTargets().size()>num:false)TE = S.getEffectTargets().get(num);
for(Case C : cells)
{
if(C == null)continue;
Fighter F = C.getFirstFighter();
if(F == null)continue;
//Ne touches pas les alliés
if(((TE & 1) == 1) && (F.getTeam() == perso.getTeam()))continue;
//Ne touche pas le lanceur
if((((TE>>1) & 1) == 1) && (F.getGUID() == perso.getGUID()))continue;
//Ne touche pas les ennemies
if((((TE>>2) & 1) == 1) && (F.getTeam() != perso.getTeam()))continue;
//Ne touche pas les combatants (seulement invocations)
if((((TE>>3) & 1) == 1) && (!F.isInvocation()))continue;
//Ne touche pas les invocations
if((((TE>>4) & 1) == 1) && (F.isInvocation()))continue;
//N'affecte que le lanceur
if((((TE>>5) & 1) == 1) && (F.getGUID() != perso.getGUID()))continue;
//Si pas encore eu de continue, on ajoute la case
finalCells.add(C);
}
//Si le sort n'affecte que le lanceur et que le lanceur n'est pas dans la zone
if(((TE>>5) & 1) == 1)if(!finalCells.contains(perso.get_fightCell()))finalCells.add(perso.get_fightCell());
ArrayList<Fighter> cibles = SpellEffect.getTargets(SE,fight,finalCells);
SE.applyToFight(fight, perso, cell,cibles);
num++;
}
}
}
public Sort(int aspellID, int aspriteID, String aspriteInfos,String ET)
{
spellID = aspellID;
spriteID = aspriteID;
spriteInfos = aspriteInfos;
String nET = ET.split(":")[0];
String ccET = "";
if(ET.split(":").length>1)ccET = ET.split(":")[1];
for(String num : nET.split(";"))
{
try
{
effectTargets.add(Integer.parseInt(num));
}catch(Exception e)
{
effectTargets.add(0);
continue;
};
}
for(String num : ccET.split(";"))
{
try
{
CCeffectTargets.add(Integer.parseInt(num));
}catch(Exception e)
{
CCeffectTargets.add(0);
continue;
};
}
}
public ArrayList<Integer> getEffectTargets()
{
return effectTargets;
}
public int getSpriteID() {
return spriteID;
}
public String getSpriteInfos() {
return spriteInfos;
}
public int getSpellID() {
return spellID;
}
public SortStats getStatsByLevel(int lvl)
{
return sortStats.get(lvl);
}
public void addSortStats(Integer lvl,SortStats stats)
{
if(sortStats.get(lvl) != null)return;
sortStats.put(lvl,stats);
}
}
|
apache-2.0
|
AdityaMili95/Wallte
|
vendor/github.com/knq/chromedp/cdp/network/network.go
|
29727
|
// Package network provides the Chrome Debugging Protocol
// commands, types, and events for the Network domain.
//
// Network domain allows tracking network activities of the page. It exposes
// information about http, file, data and other requests and responses, their
// headers, bodies, timing, etc.
//
// Generated by the chromedp-gen command.
package network
// Code generated by chromedp-gen. DO NOT EDIT.
import (
"context"
"encoding/base64"
cdp "github.com/knq/chromedp/cdp"
)
// EnableParams enables network tracking, network events will now be
// delivered to the client.
type EnableParams struct {
MaxTotalBufferSize int64 `json:"maxTotalBufferSize,omitempty"` // Buffer size in bytes to use when preserving network payloads (XHRs, etc).
MaxResourceBufferSize int64 `json:"maxResourceBufferSize,omitempty"` // Per-resource buffer size in bytes to use when preserving network payloads (XHRs, etc).
}
// Enable enables network tracking, network events will now be delivered to
// the client.
//
// parameters:
func Enable() *EnableParams {
return &EnableParams{}
}
// WithMaxTotalBufferSize buffer size in bytes to use when preserving network
// payloads (XHRs, etc).
func (p EnableParams) WithMaxTotalBufferSize(maxTotalBufferSize int64) *EnableParams {
p.MaxTotalBufferSize = maxTotalBufferSize
return &p
}
// WithMaxResourceBufferSize per-resource buffer size in bytes to use when
// preserving network payloads (XHRs, etc).
func (p EnableParams) WithMaxResourceBufferSize(maxResourceBufferSize int64) *EnableParams {
p.MaxResourceBufferSize = maxResourceBufferSize
return &p
}
// Do executes Network.enable against the provided context and
// target handler.
func (p *EnableParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkEnable, p, nil)
}
// DisableParams disables network tracking, prevents network events from
// being sent to the client.
type DisableParams struct{}
// Disable disables network tracking, prevents network events from being sent
// to the client.
func Disable() *DisableParams {
return &DisableParams{}
}
// Do executes Network.disable against the provided context and
// target handler.
func (p *DisableParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkDisable, nil, nil)
}
// SetUserAgentOverrideParams allows overriding user agent with the given
// string.
type SetUserAgentOverrideParams struct {
UserAgent string `json:"userAgent"` // User agent to use.
}
// SetUserAgentOverride allows overriding user agent with the given string.
//
// parameters:
// userAgent - User agent to use.
func SetUserAgentOverride(userAgent string) *SetUserAgentOverrideParams {
return &SetUserAgentOverrideParams{
UserAgent: userAgent,
}
}
// Do executes Network.setUserAgentOverride against the provided context and
// target handler.
func (p *SetUserAgentOverrideParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkSetUserAgentOverride, p, nil)
}
// SetExtraHTTPHeadersParams specifies whether to always send extra HTTP
// headers with the requests from this page.
type SetExtraHTTPHeadersParams struct {
Headers Headers `json:"headers"` // Map with extra HTTP headers.
}
// SetExtraHTTPHeaders specifies whether to always send extra HTTP headers
// with the requests from this page.
//
// parameters:
// headers - Map with extra HTTP headers.
func SetExtraHTTPHeaders(headers Headers) *SetExtraHTTPHeadersParams {
return &SetExtraHTTPHeadersParams{
Headers: headers,
}
}
// Do executes Network.setExtraHTTPHeaders against the provided context and
// target handler.
func (p *SetExtraHTTPHeadersParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkSetExtraHTTPHeaders, p, nil)
}
// GetResponseBodyParams returns content served for the given request.
type GetResponseBodyParams struct {
RequestID RequestID `json:"requestId"` // Identifier of the network request to get content for.
}
// GetResponseBody returns content served for the given request.
//
// parameters:
// requestID - Identifier of the network request to get content for.
func GetResponseBody(requestID RequestID) *GetResponseBodyParams {
return &GetResponseBodyParams{
RequestID: requestID,
}
}
// GetResponseBodyReturns return values.
type GetResponseBodyReturns struct {
Body string `json:"body,omitempty"` // Response body.
Base64encoded bool `json:"base64Encoded,omitempty"` // True, if content was sent as base64.
}
// Do executes Network.getResponseBody against the provided context and
// target handler.
//
// returns:
// body - Response body.
func (p *GetResponseBodyParams) Do(ctxt context.Context, h cdp.Handler) (body []byte, err error) {
// execute
var res GetResponseBodyReturns
err = h.Execute(ctxt, cdp.CommandNetworkGetResponseBody, p, &res)
if err != nil {
return nil, err
}
// decode
var dec []byte
if res.Base64encoded {
dec, err = base64.StdEncoding.DecodeString(res.Body)
if err != nil {
return nil, err
}
} else {
dec = []byte(res.Body)
}
return dec, nil
}
// SetBlockedURLSParams blocks URLs from loading.
type SetBlockedURLSParams struct {
Urls []string `json:"urls"` // URL patterns to block. Wildcards ('*') are allowed.
}
// SetBlockedURLS blocks URLs from loading.
//
// parameters:
// urls - URL patterns to block. Wildcards ('*') are allowed.
func SetBlockedURLS(urls []string) *SetBlockedURLSParams {
return &SetBlockedURLSParams{
Urls: urls,
}
}
// Do executes Network.setBlockedURLs against the provided context and
// target handler.
func (p *SetBlockedURLSParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkSetBlockedURLS, p, nil)
}
// ReplayXHRParams this method sends a new XMLHttpRequest which is identical
// to the original one. The following parameters should be identical: method,
// url, async, request body, extra headers, withCredentials attribute, user,
// password.
type ReplayXHRParams struct {
RequestID RequestID `json:"requestId"` // Identifier of XHR to replay.
}
// ReplayXHR this method sends a new XMLHttpRequest which is identical to the
// original one. The following parameters should be identical: method, url,
// async, request body, extra headers, withCredentials attribute, user,
// password.
//
// parameters:
// requestID - Identifier of XHR to replay.
func ReplayXHR(requestID RequestID) *ReplayXHRParams {
return &ReplayXHRParams{
RequestID: requestID,
}
}
// Do executes Network.replayXHR against the provided context and
// target handler.
func (p *ReplayXHRParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkReplayXHR, p, nil)
}
// CanClearBrowserCacheParams tells whether clearing browser cache is
// supported.
type CanClearBrowserCacheParams struct{}
// CanClearBrowserCache tells whether clearing browser cache is supported.
func CanClearBrowserCache() *CanClearBrowserCacheParams {
return &CanClearBrowserCacheParams{}
}
// CanClearBrowserCacheReturns return values.
type CanClearBrowserCacheReturns struct {
Result bool `json:"result,omitempty"` // True if browser cache can be cleared.
}
// Do executes Network.canClearBrowserCache against the provided context and
// target handler.
//
// returns:
// result - True if browser cache can be cleared.
func (p *CanClearBrowserCacheParams) Do(ctxt context.Context, h cdp.Handler) (result bool, err error) {
// execute
var res CanClearBrowserCacheReturns
err = h.Execute(ctxt, cdp.CommandNetworkCanClearBrowserCache, nil, &res)
if err != nil {
return false, err
}
return res.Result, nil
}
// ClearBrowserCacheParams clears browser cache.
type ClearBrowserCacheParams struct{}
// ClearBrowserCache clears browser cache.
func ClearBrowserCache() *ClearBrowserCacheParams {
return &ClearBrowserCacheParams{}
}
// Do executes Network.clearBrowserCache against the provided context and
// target handler.
func (p *ClearBrowserCacheParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkClearBrowserCache, nil, nil)
}
// CanClearBrowserCookiesParams tells whether clearing browser cookies is
// supported.
type CanClearBrowserCookiesParams struct{}
// CanClearBrowserCookies tells whether clearing browser cookies is
// supported.
func CanClearBrowserCookies() *CanClearBrowserCookiesParams {
return &CanClearBrowserCookiesParams{}
}
// CanClearBrowserCookiesReturns return values.
type CanClearBrowserCookiesReturns struct {
Result bool `json:"result,omitempty"` // True if browser cookies can be cleared.
}
// Do executes Network.canClearBrowserCookies against the provided context and
// target handler.
//
// returns:
// result - True if browser cookies can be cleared.
func (p *CanClearBrowserCookiesParams) Do(ctxt context.Context, h cdp.Handler) (result bool, err error) {
// execute
var res CanClearBrowserCookiesReturns
err = h.Execute(ctxt, cdp.CommandNetworkCanClearBrowserCookies, nil, &res)
if err != nil {
return false, err
}
return res.Result, nil
}
// ClearBrowserCookiesParams clears browser cookies.
type ClearBrowserCookiesParams struct{}
// ClearBrowserCookies clears browser cookies.
func ClearBrowserCookies() *ClearBrowserCookiesParams {
return &ClearBrowserCookiesParams{}
}
// Do executes Network.clearBrowserCookies against the provided context and
// target handler.
func (p *ClearBrowserCookiesParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkClearBrowserCookies, nil, nil)
}
// GetCookiesParams returns all browser cookies for the current URL.
// Depending on the backend support, will return detailed cookie information in
// the cookies field.
type GetCookiesParams struct {
Urls []string `json:"urls,omitempty"` // The list of URLs for which applicable cookies will be fetched
}
// GetCookies returns all browser cookies for the current URL. Depending on
// the backend support, will return detailed cookie information in the cookies
// field.
//
// parameters:
func GetCookies() *GetCookiesParams {
return &GetCookiesParams{}
}
// WithUrls the list of URLs for which applicable cookies will be fetched.
func (p GetCookiesParams) WithUrls(urls []string) *GetCookiesParams {
p.Urls = urls
return &p
}
// GetCookiesReturns return values.
type GetCookiesReturns struct {
Cookies []*Cookie `json:"cookies,omitempty"` // Array of cookie objects.
}
// Do executes Network.getCookies against the provided context and
// target handler.
//
// returns:
// cookies - Array of cookie objects.
func (p *GetCookiesParams) Do(ctxt context.Context, h cdp.Handler) (cookies []*Cookie, err error) {
// execute
var res GetCookiesReturns
err = h.Execute(ctxt, cdp.CommandNetworkGetCookies, p, &res)
if err != nil {
return nil, err
}
return res.Cookies, nil
}
// GetAllCookiesParams returns all browser cookies. Depending on the backend
// support, will return detailed cookie information in the cookies field.
type GetAllCookiesParams struct{}
// GetAllCookies returns all browser cookies. Depending on the backend
// support, will return detailed cookie information in the cookies field.
func GetAllCookies() *GetAllCookiesParams {
return &GetAllCookiesParams{}
}
// GetAllCookiesReturns return values.
type GetAllCookiesReturns struct {
Cookies []*Cookie `json:"cookies,omitempty"` // Array of cookie objects.
}
// Do executes Network.getAllCookies against the provided context and
// target handler.
//
// returns:
// cookies - Array of cookie objects.
func (p *GetAllCookiesParams) Do(ctxt context.Context, h cdp.Handler) (cookies []*Cookie, err error) {
// execute
var res GetAllCookiesReturns
err = h.Execute(ctxt, cdp.CommandNetworkGetAllCookies, nil, &res)
if err != nil {
return nil, err
}
return res.Cookies, nil
}
// DeleteCookiesParams deletes browser cookies with matching name and url or
// domain/path pair.
type DeleteCookiesParams struct {
Name string `json:"name"` // Name of the cookies to remove.
URL string `json:"url,omitempty"` // If specified, deletes all the cookies with the given name where domain and path match provided URL.
Domain string `json:"domain,omitempty"` // If specified, deletes only cookies with the exact domain.
Path string `json:"path,omitempty"` // If specified, deletes only cookies with the exact path.
}
// DeleteCookies deletes browser cookies with matching name and url or
// domain/path pair.
//
// parameters:
// name - Name of the cookies to remove.
func DeleteCookies(name string) *DeleteCookiesParams {
return &DeleteCookiesParams{
Name: name,
}
}
// WithURL if specified, deletes all the cookies with the given name where
// domain and path match provided URL.
func (p DeleteCookiesParams) WithURL(url string) *DeleteCookiesParams {
p.URL = url
return &p
}
// WithDomain if specified, deletes only cookies with the exact domain.
func (p DeleteCookiesParams) WithDomain(domain string) *DeleteCookiesParams {
p.Domain = domain
return &p
}
// WithPath if specified, deletes only cookies with the exact path.
func (p DeleteCookiesParams) WithPath(path string) *DeleteCookiesParams {
p.Path = path
return &p
}
// Do executes Network.deleteCookies against the provided context and
// target handler.
func (p *DeleteCookiesParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkDeleteCookies, p, nil)
}
// SetCookieParams sets a cookie with the given cookie data; may overwrite
// equivalent cookies if they exist.
type SetCookieParams struct {
Name string `json:"name"` // Cookie name.
Value string `json:"value"` // Cookie value.
URL string `json:"url,omitempty"` // The request-URI to associate with the setting of the cookie. This value can affect the default domain and path values of the created cookie.
Domain string `json:"domain,omitempty"` // Cookie domain.
Path string `json:"path,omitempty"` // Cookie path.
Secure bool `json:"secure,omitempty"` // True if cookie is secure.
HTTPOnly bool `json:"httpOnly,omitempty"` // True if cookie is http-only.
SameSite CookieSameSite `json:"sameSite,omitempty"` // Cookie SameSite type.
Expires *cdp.TimeSinceEpoch `json:"expires,omitempty"` // Cookie expiration date, session cookie if not set
}
// SetCookie sets a cookie with the given cookie data; may overwrite
// equivalent cookies if they exist.
//
// parameters:
// name - Cookie name.
// value - Cookie value.
func SetCookie(name string, value string) *SetCookieParams {
return &SetCookieParams{
Name: name,
Value: value,
}
}
// WithURL the request-URI to associate with the setting of the cookie. This
// value can affect the default domain and path values of the created cookie.
func (p SetCookieParams) WithURL(url string) *SetCookieParams {
p.URL = url
return &p
}
// WithDomain cookie domain.
func (p SetCookieParams) WithDomain(domain string) *SetCookieParams {
p.Domain = domain
return &p
}
// WithPath cookie path.
func (p SetCookieParams) WithPath(path string) *SetCookieParams {
p.Path = path
return &p
}
// WithSecure true if cookie is secure.
func (p SetCookieParams) WithSecure(secure bool) *SetCookieParams {
p.Secure = secure
return &p
}
// WithHTTPOnly true if cookie is http-only.
func (p SetCookieParams) WithHTTPOnly(httpOnly bool) *SetCookieParams {
p.HTTPOnly = httpOnly
return &p
}
// WithSameSite cookie SameSite type.
func (p SetCookieParams) WithSameSite(sameSite CookieSameSite) *SetCookieParams {
p.SameSite = sameSite
return &p
}
// WithExpires cookie expiration date, session cookie if not set.
func (p SetCookieParams) WithExpires(expires *cdp.TimeSinceEpoch) *SetCookieParams {
p.Expires = expires
return &p
}
// SetCookieReturns return values.
type SetCookieReturns struct {
Success bool `json:"success,omitempty"` // True if successfully set cookie.
}
// Do executes Network.setCookie against the provided context and
// target handler.
//
// returns:
// success - True if successfully set cookie.
func (p *SetCookieParams) Do(ctxt context.Context, h cdp.Handler) (success bool, err error) {
// execute
var res SetCookieReturns
err = h.Execute(ctxt, cdp.CommandNetworkSetCookie, p, &res)
if err != nil {
return false, err
}
return res.Success, nil
}
// SetCookiesParams sets given cookies.
type SetCookiesParams struct {
Cookies []*CookieParam `json:"cookies"` // Cookies to be set.
}
// SetCookies sets given cookies.
//
// parameters:
// cookies - Cookies to be set.
func SetCookies(cookies []*CookieParam) *SetCookiesParams {
return &SetCookiesParams{
Cookies: cookies,
}
}
// Do executes Network.setCookies against the provided context and
// target handler.
func (p *SetCookiesParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkSetCookies, p, nil)
}
// CanEmulateNetworkConditionsParams tells whether emulation of network
// conditions is supported.
type CanEmulateNetworkConditionsParams struct{}
// CanEmulateNetworkConditions tells whether emulation of network conditions
// is supported.
func CanEmulateNetworkConditions() *CanEmulateNetworkConditionsParams {
return &CanEmulateNetworkConditionsParams{}
}
// CanEmulateNetworkConditionsReturns return values.
type CanEmulateNetworkConditionsReturns struct {
Result bool `json:"result,omitempty"` // True if emulation of network conditions is supported.
}
// Do executes Network.canEmulateNetworkConditions against the provided context and
// target handler.
//
// returns:
// result - True if emulation of network conditions is supported.
func (p *CanEmulateNetworkConditionsParams) Do(ctxt context.Context, h cdp.Handler) (result bool, err error) {
// execute
var res CanEmulateNetworkConditionsReturns
err = h.Execute(ctxt, cdp.CommandNetworkCanEmulateNetworkConditions, nil, &res)
if err != nil {
return false, err
}
return res.Result, nil
}
// EmulateNetworkConditionsParams activates emulation of network conditions.
type EmulateNetworkConditionsParams struct {
Offline bool `json:"offline"` // True to emulate internet disconnection.
Latency float64 `json:"latency"` // Minimum latency from request sent to response headers received (ms).
DownloadThroughput float64 `json:"downloadThroughput"` // Maximal aggregated download throughput (bytes/sec). -1 disables download throttling.
UploadThroughput float64 `json:"uploadThroughput"` // Maximal aggregated upload throughput (bytes/sec). -1 disables upload throttling.
ConnectionType ConnectionType `json:"connectionType,omitempty"` // Connection type if known.
}
// EmulateNetworkConditions activates emulation of network conditions.
//
// parameters:
// offline - True to emulate internet disconnection.
// latency - Minimum latency from request sent to response headers received (ms).
// downloadThroughput - Maximal aggregated download throughput (bytes/sec). -1 disables download throttling.
// uploadThroughput - Maximal aggregated upload throughput (bytes/sec). -1 disables upload throttling.
func EmulateNetworkConditions(offline bool, latency float64, downloadThroughput float64, uploadThroughput float64) *EmulateNetworkConditionsParams {
return &EmulateNetworkConditionsParams{
Offline: offline,
Latency: latency,
DownloadThroughput: downloadThroughput,
UploadThroughput: uploadThroughput,
}
}
// WithConnectionType connection type if known.
func (p EmulateNetworkConditionsParams) WithConnectionType(connectionType ConnectionType) *EmulateNetworkConditionsParams {
p.ConnectionType = connectionType
return &p
}
// Do executes Network.emulateNetworkConditions against the provided context and
// target handler.
func (p *EmulateNetworkConditionsParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkEmulateNetworkConditions, p, nil)
}
// SetCacheDisabledParams toggles ignoring cache for each request. If true,
// cache will not be used.
type SetCacheDisabledParams struct {
CacheDisabled bool `json:"cacheDisabled"` // Cache disabled state.
}
// SetCacheDisabled toggles ignoring cache for each request. If true, cache
// will not be used.
//
// parameters:
// cacheDisabled - Cache disabled state.
func SetCacheDisabled(cacheDisabled bool) *SetCacheDisabledParams {
return &SetCacheDisabledParams{
CacheDisabled: cacheDisabled,
}
}
// Do executes Network.setCacheDisabled against the provided context and
// target handler.
func (p *SetCacheDisabledParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkSetCacheDisabled, p, nil)
}
// SetBypassServiceWorkerParams toggles ignoring of service worker for each
// request.
type SetBypassServiceWorkerParams struct {
Bypass bool `json:"bypass"` // Bypass service worker and load from network.
}
// SetBypassServiceWorker toggles ignoring of service worker for each
// request.
//
// parameters:
// bypass - Bypass service worker and load from network.
func SetBypassServiceWorker(bypass bool) *SetBypassServiceWorkerParams {
return &SetBypassServiceWorkerParams{
Bypass: bypass,
}
}
// Do executes Network.setBypassServiceWorker against the provided context and
// target handler.
func (p *SetBypassServiceWorkerParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkSetBypassServiceWorker, p, nil)
}
// SetDataSizeLimitsForTestParams for testing.
type SetDataSizeLimitsForTestParams struct {
MaxTotalSize int64 `json:"maxTotalSize"` // Maximum total buffer size.
MaxResourceSize int64 `json:"maxResourceSize"` // Maximum per-resource size.
}
// SetDataSizeLimitsForTest for testing.
//
// parameters:
// maxTotalSize - Maximum total buffer size.
// maxResourceSize - Maximum per-resource size.
func SetDataSizeLimitsForTest(maxTotalSize int64, maxResourceSize int64) *SetDataSizeLimitsForTestParams {
return &SetDataSizeLimitsForTestParams{
MaxTotalSize: maxTotalSize,
MaxResourceSize: maxResourceSize,
}
}
// Do executes Network.setDataSizeLimitsForTest against the provided context and
// target handler.
func (p *SetDataSizeLimitsForTestParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkSetDataSizeLimitsForTest, p, nil)
}
// GetCertificateParams returns the DER-encoded certificate.
type GetCertificateParams struct {
Origin string `json:"origin"` // Origin to get certificate for.
}
// GetCertificate returns the DER-encoded certificate.
//
// parameters:
// origin - Origin to get certificate for.
func GetCertificate(origin string) *GetCertificateParams {
return &GetCertificateParams{
Origin: origin,
}
}
// GetCertificateReturns return values.
type GetCertificateReturns struct {
TableNames []string `json:"tableNames,omitempty"`
}
// Do executes Network.getCertificate against the provided context and
// target handler.
//
// returns:
// tableNames
func (p *GetCertificateParams) Do(ctxt context.Context, h cdp.Handler) (tableNames []string, err error) {
// execute
var res GetCertificateReturns
err = h.Execute(ctxt, cdp.CommandNetworkGetCertificate, p, &res)
if err != nil {
return nil, err
}
return res.TableNames, nil
}
// SetRequestInterceptionParams sets the requests to intercept that match a
// the provided patterns and optionally resource types.
type SetRequestInterceptionParams struct {
Patterns []*RequestPattern `json:"patterns"` // Requests matching any of these patterns will be forwarded and wait for the corresponding continueInterceptedRequest call.
}
// SetRequestInterception sets the requests to intercept that match a the
// provided patterns and optionally resource types.
//
// parameters:
// patterns - Requests matching any of these patterns will be forwarded and wait for the corresponding continueInterceptedRequest call.
func SetRequestInterception(patterns []*RequestPattern) *SetRequestInterceptionParams {
return &SetRequestInterceptionParams{
Patterns: patterns,
}
}
// Do executes Network.setRequestInterception against the provided context and
// target handler.
func (p *SetRequestInterceptionParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkSetRequestInterception, p, nil)
}
// ContinueInterceptedRequestParams response to Network.requestIntercepted
// which either modifies the request to continue with any modifications, or
// blocks it, or completes it with the provided response bytes. If a network
// fetch occurs as a result which encounters a redirect an additional
// Network.requestIntercepted event will be sent with the same InterceptionId.
type ContinueInterceptedRequestParams struct {
InterceptionID InterceptionID `json:"interceptionId"`
ErrorReason ErrorReason `json:"errorReason,omitempty"` // If set this causes the request to fail with the given reason. Passing Aborted for requests marked with isNavigationRequest also cancels the navigation. Must not be set in response to an authChallenge.
RawResponse string `json:"rawResponse,omitempty"` // If set the requests completes using with the provided base64 encoded raw response, including HTTP status line and headers etc... Must not be set in response to an authChallenge.
URL string `json:"url,omitempty"` // If set the request url will be modified in a way that's not observable by page. Must not be set in response to an authChallenge.
Method string `json:"method,omitempty"` // If set this allows the request method to be overridden. Must not be set in response to an authChallenge.
PostData string `json:"postData,omitempty"` // If set this allows postData to be set. Must not be set in response to an authChallenge.
Headers Headers `json:"headers,omitempty"` // If set this allows the request headers to be changed. Must not be set in response to an authChallenge.
AuthChallengeResponse *AuthChallengeResponse `json:"authChallengeResponse,omitempty"` // Response to a requestIntercepted with an authChallenge. Must not be set otherwise.
}
// ContinueInterceptedRequest response to Network.requestIntercepted which
// either modifies the request to continue with any modifications, or blocks it,
// or completes it with the provided response bytes. If a network fetch occurs
// as a result which encounters a redirect an additional
// Network.requestIntercepted event will be sent with the same InterceptionId.
//
// parameters:
// interceptionID
func ContinueInterceptedRequest(interceptionID InterceptionID) *ContinueInterceptedRequestParams {
return &ContinueInterceptedRequestParams{
InterceptionID: interceptionID,
}
}
// WithErrorReason if set this causes the request to fail with the given
// reason. Passing Aborted for requests marked with isNavigationRequest also
// cancels the navigation. Must not be set in response to an authChallenge.
func (p ContinueInterceptedRequestParams) WithErrorReason(errorReason ErrorReason) *ContinueInterceptedRequestParams {
p.ErrorReason = errorReason
return &p
}
// WithRawResponse if set the requests completes using with the provided
// base64 encoded raw response, including HTTP status line and headers etc...
// Must not be set in response to an authChallenge.
func (p ContinueInterceptedRequestParams) WithRawResponse(rawResponse string) *ContinueInterceptedRequestParams {
p.RawResponse = rawResponse
return &p
}
// WithURL if set the request url will be modified in a way that's not
// observable by page. Must not be set in response to an authChallenge.
func (p ContinueInterceptedRequestParams) WithURL(url string) *ContinueInterceptedRequestParams {
p.URL = url
return &p
}
// WithMethod if set this allows the request method to be overridden. Must
// not be set in response to an authChallenge.
func (p ContinueInterceptedRequestParams) WithMethod(method string) *ContinueInterceptedRequestParams {
p.Method = method
return &p
}
// WithPostData if set this allows postData to be set. Must not be set in
// response to an authChallenge.
func (p ContinueInterceptedRequestParams) WithPostData(postData string) *ContinueInterceptedRequestParams {
p.PostData = postData
return &p
}
// WithHeaders if set this allows the request headers to be changed. Must not
// be set in response to an authChallenge.
func (p ContinueInterceptedRequestParams) WithHeaders(headers Headers) *ContinueInterceptedRequestParams {
p.Headers = headers
return &p
}
// WithAuthChallengeResponse response to a requestIntercepted with an
// authChallenge. Must not be set otherwise.
func (p ContinueInterceptedRequestParams) WithAuthChallengeResponse(authChallengeResponse *AuthChallengeResponse) *ContinueInterceptedRequestParams {
p.AuthChallengeResponse = authChallengeResponse
return &p
}
// Do executes Network.continueInterceptedRequest against the provided context and
// target handler.
func (p *ContinueInterceptedRequestParams) Do(ctxt context.Context, h cdp.Handler) (err error) {
return h.Execute(ctxt, cdp.CommandNetworkContinueInterceptedRequest, p, nil)
}
|
apache-2.0
|
tensorflow/quantum
|
tensorflow_quantum/core/ops/noise/tfq_noisy_samples.cc
|
13316
|
/* Copyright 2020 The TensorFlow Quantum Authors. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
==============================================================================*/
#include <stdlib.h>
#include <string>
#include "../qsim/lib/channel.h"
#include "../qsim/lib/channels_cirq.h"
#include "../qsim/lib/circuit.h"
#include "../qsim/lib/circuit_noisy.h"
#include "../qsim/lib/fuser_mqubit.h"
#include "../qsim/lib/gate_appl.h"
#include "../qsim/lib/gates_cirq.h"
#include "../qsim/lib/io.h"
#include "../qsim/lib/qtrajectory.h"
#include "../qsim/lib/seqfor.h"
#include "../qsim/lib/simmux.h"
#include "tensorflow/core/framework/op_kernel.h"
#include "tensorflow/core/framework/shape_inference.h"
#include "tensorflow/core/framework/tensor_shape.h"
#include "tensorflow/core/lib/core/error_codes.pb.h"
#include "tensorflow/core/lib/core/status.h"
#include "tensorflow/core/lib/core/threadpool.h"
#include "tensorflow/core/lib/random/random.h"
#include "tensorflow/core/lib/random/simple_philox.h"
#include "tensorflow/core/util/guarded_philox_random.h"
#include "tensorflow_quantum/core/ops/parse_context.h"
#include "tensorflow_quantum/core/proto/program.pb.h"
#include "tensorflow_quantum/core/src/circuit_parser_qsim.h"
#include "tensorflow_quantum/core/src/util_qsim.h"
namespace tfq {
using ::tensorflow::Status;
using ::tfq::proto::Program;
typedef qsim::Cirq::GateCirq<float> QsimGate;
typedef qsim::Circuit<QsimGate> QsimCircuit;
typedef qsim::NoisyCircuit<QsimGate> NoisyQsimCircuit;
class TfqNoisySamplesOp : public tensorflow::OpKernel {
public:
explicit TfqNoisySamplesOp(tensorflow::OpKernelConstruction* context)
: OpKernel(context) {}
void Compute(tensorflow::OpKernelContext* context) override {
// TODO (mbbrough): add more dimension checks for other inputs here.
DCHECK_EQ(4, context->num_inputs());
// Parse to Program Proto and num_qubits.
std::vector<Program> programs;
std::vector<int> num_qubits;
OP_REQUIRES_OK(context,
GetProgramsAndNumQubits(context, &programs, &num_qubits));
// Parse symbol maps for parameter resolution in the circuits.
std::vector<SymbolMap> maps;
OP_REQUIRES_OK(context, GetSymbolMaps(context, &maps));
OP_REQUIRES(
context, maps.size() == programs.size(),
tensorflow::errors::InvalidArgument(absl::StrCat(
"Number of circuits and values do not match. Got ", programs.size(),
" circuits and ", maps.size(), " values.")));
int num_samples = 0;
OP_REQUIRES_OK(context, GetIndividualSample(context, &num_samples));
// Construct qsim circuits.
std::vector<NoisyQsimCircuit> qsim_circuits(programs.size(),
NoisyQsimCircuit());
Status parse_status = Status::OK();
auto p_lock = tensorflow::mutex();
auto construct_f = [&](int start, int end) {
for (int i = start; i < end; i++) {
auto r = NoisyQsimCircuitFromProgram(
programs[i], maps[i], num_qubits[i], true, &qsim_circuits[i]);
NESTED_FN_STATUS_SYNC(parse_status, r, p_lock);
}
};
const int num_cycles = 1000;
context->device()->tensorflow_cpu_worker_threads()->workers->ParallelFor(
programs.size(), num_cycles, construct_f);
OP_REQUIRES_OK(context, parse_status);
int max_num_qubits = 0;
for (const int num : num_qubits) {
max_num_qubits = std::max(max_num_qubits, num);
}
const int output_dim_size = maps.size();
tensorflow::TensorShape output_shape;
output_shape.AddDim(output_dim_size);
output_shape.AddDim(num_samples);
output_shape.AddDim(max_num_qubits);
tensorflow::Tensor* output = nullptr;
OP_REQUIRES_OK(context, context->allocate_output(0, output_shape, &output));
auto output_tensor = output->tensor<int8_t, 3>();
if (num_samples == 0 || output_dim_size == 0 || max_num_qubits == 0) {
return; // bug in qsim dependency we can't control.
}
// Cross reference with standard google cloud compute instances
// Memory ~= 2 * num_threads * (2 * 64 * 2 ** num_qubits in circuits)
// e2s2 = 2 CPU, 8GB -> Can safely do 25 since Memory = 4GB
// e2s4 = 4 CPU, 16GB -> Can safely do 25 since Memory = 8GB
// ...
if (max_num_qubits >= 26) {
ComputeLarge(num_qubits, max_num_qubits, num_samples, qsim_circuits,
context, &output_tensor);
} else {
ComputeSmall(num_qubits, max_num_qubits, num_samples, qsim_circuits,
context, &output_tensor);
}
}
private:
void ComputeLarge(const std::vector<int>& num_qubits,
const int max_num_qubits, const int num_samples,
const std::vector<NoisyQsimCircuit>& ncircuits,
tensorflow::OpKernelContext* context,
tensorflow::TTypes<int8_t, 3>::Tensor* output_tensor) {
// Instantiate qsim objects.
const auto tfq_for = tfq::QsimFor(context);
using Simulator = qsim::Simulator<const tfq::QsimFor&>;
using StateSpace = Simulator::StateSpace;
using QTSimulator =
qsim::QuantumTrajectorySimulator<qsim::IO, QsimGate,
qsim::MultiQubitGateFuser, Simulator>;
// Begin simulation.
int largest_nq = 1;
Simulator sim = Simulator(tfq_for);
StateSpace ss = StateSpace(tfq_for);
auto sv = ss.Create(largest_nq);
auto scratch = ss.Create(largest_nq);
tensorflow::GuardedPhiloxRandom random_gen;
random_gen.Init(tensorflow::random::New64(), tensorflow::random::New64());
auto local_gen =
random_gen.ReserveSamples32(2 * num_samples * ncircuits.size() + 2);
tensorflow::random::SimplePhilox rand_source(&local_gen);
// Simulate programs one by one. Parallelizing over state vectors
// we no longer parallelize over circuits. Each time we encounter a
// a larger circuit we will grow the Statevector as nescessary.
for (int i = 0; i < ncircuits.size(); i++) {
int nq = num_qubits[i];
if (nq > largest_nq) {
// need to switch to larger statespace.
largest_nq = nq;
sv = ss.Create(largest_nq);
scratch = ss.Create(largest_nq);
}
QTSimulator::Parameter param;
param.collect_kop_stat = false;
param.collect_mea_stat = true;
param.normalize_before_mea_gates = true;
// Track op-wise stats.
std::vector<uint64_t> gathered_samples;
for (int j = 0; j < num_samples; j++) {
ss.SetStateZero(sv);
QTSimulator::RunOnce(param, ncircuits[i], rand_source.Rand64(), ss, sim,
scratch, sv, gathered_samples);
uint64_t q_ind = 0;
uint64_t mask = 1;
bool val = 0;
while (q_ind < nq) {
val = gathered_samples[0] & mask;
(*output_tensor)(
i, j, static_cast<ptrdiff_t>(max_num_qubits - q_ind - 1)) = val;
q_ind++;
mask <<= 1;
}
while (q_ind < max_num_qubits) {
(*output_tensor)(
i, j, static_cast<ptrdiff_t>(max_num_qubits - q_ind - 1)) = -2;
q_ind++;
}
}
}
}
void ComputeSmall(const std::vector<int>& num_qubits,
const int max_num_qubits, const int num_samples,
const std::vector<NoisyQsimCircuit>& ncircuits,
tensorflow::OpKernelContext* context,
tensorflow::TTypes<int8_t, 3>::Tensor* output_tensor) {
using Simulator = qsim::Simulator<const qsim::SequentialFor&>;
using StateSpace = Simulator::StateSpace;
using QTSimulator =
qsim::QuantumTrajectorySimulator<qsim::IO, QsimGate,
qsim::MultiQubitGateFuser, Simulator>;
const int output_dim_batch_size = output_tensor->dimension(0);
const int num_threads = context->device()
->tensorflow_cpu_worker_threads()
->workers->NumThreads();
// [num_threads, batch_size].
std::vector<std::vector<int>> rep_offsets(
num_threads, std::vector<int>(output_dim_batch_size, 0));
BalanceTrajectory(num_samples, num_threads, &rep_offsets);
// [num_threads, batch_size] stores the number of
// samples written by thread range [0, i].
std::vector<std::vector<long>> offset_prefix_sum(
num_threads, std::vector<long>(output_dim_batch_size, 0));
for (int i = 0; i < output_dim_batch_size; i++) {
int p_reps = (num_samples + num_threads - 1) / num_threads;
offset_prefix_sum[0][i] = rep_offsets[0][i] + p_reps;
for (int j = 1; j < num_threads; j++) {
offset_prefix_sum[j][i] += offset_prefix_sum[j - 1][i];
offset_prefix_sum[j][i] += rep_offsets[j][i] + p_reps;
}
}
tensorflow::GuardedPhiloxRandom random_gen;
random_gen.Init(tensorflow::random::New64(), tensorflow::random::New64());
auto DoWork = [&](int start, int end) {
// Begin simulation.
const auto tfq_for = qsim::SequentialFor(1);
int largest_nq = 1;
Simulator sim = Simulator(tfq_for);
StateSpace ss = StateSpace(tfq_for);
auto sv = ss.Create(largest_nq);
auto scratch = ss.Create(largest_nq);
int needed_random =
4 * (num_samples * ncircuits.size() + num_threads) / num_threads;
needed_random += 4;
auto local_gen = random_gen.ReserveSamples32(needed_random);
tensorflow::random::SimplePhilox rand_source(&local_gen);
for (int i = 0; i < ncircuits.size(); i++) {
int nq = num_qubits[i];
int j = start > 0 ? offset_prefix_sum[start - 1][i] : 0;
int needed_samples = offset_prefix_sum[start][i] - j;
if (needed_samples <= 0) {
continue;
}
if (nq > largest_nq) {
largest_nq = nq;
sv = ss.Create(largest_nq);
scratch = ss.Create(largest_nq);
}
QTSimulator::Parameter param;
param.collect_kop_stat = false;
param.collect_mea_stat = true;
param.normalize_before_mea_gates = true;
// Track op-wise stats.
std::vector<uint64_t> gathered_samples;
int run_samples = 0;
while (1) {
ss.SetStateZero(sv);
QTSimulator::RunOnce(param, ncircuits[i], rand_source.Rand64(), ss,
sim, scratch, sv, gathered_samples);
uint64_t q_ind = 0;
uint64_t mask = 1;
bool val = 0;
while (q_ind < nq) {
val = gathered_samples[0] & mask;
(*output_tensor)(
i, j, static_cast<ptrdiff_t>(max_num_qubits - q_ind - 1)) = val;
q_ind++;
mask <<= 1;
}
while (q_ind < max_num_qubits) {
(*output_tensor)(
i, j, static_cast<ptrdiff_t>(max_num_qubits - q_ind - 1)) = -2;
q_ind++;
}
j++;
run_samples++;
// Check if we have gathered enough samples.
if (run_samples >= needed_samples) {
break;
}
}
}
};
// block_size = 1.
tensorflow::thread::ThreadPool::SchedulingParams scheduling_params(
tensorflow::thread::ThreadPool::SchedulingStrategy::kFixedBlockSize,
absl::nullopt, 1);
context->device()->tensorflow_cpu_worker_threads()->workers->ParallelFor(
num_threads, scheduling_params, DoWork);
}
};
REGISTER_KERNEL_BUILDER(Name("TfqNoisySamples").Device(tensorflow::DEVICE_CPU),
TfqNoisySamplesOp);
REGISTER_OP("TfqNoisySamples")
.Input("programs: string")
.Input("symbol_names: string")
.Input("symbol_values: float")
.Input("num_samples: int32")
.Output("samples: int8")
.SetShapeFn([](tensorflow::shape_inference::InferenceContext* c) {
tensorflow::shape_inference::ShapeHandle programs_shape;
TF_RETURN_IF_ERROR(c->WithRank(c->input(0), 1, &programs_shape));
tensorflow::shape_inference::ShapeHandle symbol_names_shape;
TF_RETURN_IF_ERROR(c->WithRank(c->input(1), 1, &symbol_names_shape));
tensorflow::shape_inference::ShapeHandle symbol_values_shape;
TF_RETURN_IF_ERROR(c->WithRank(c->input(2), 2, &symbol_values_shape));
tensorflow::shape_inference::ShapeHandle num_samples_shape;
TF_RETURN_IF_ERROR(c->WithRank(c->input(3), 1, &num_samples_shape));
// [batch_size, n_samples, largest_n_qubits]
c->set_output(
0, c->MakeShape(
{c->Dim(programs_shape, 0),
tensorflow::shape_inference::InferenceContext::kUnknownDim,
tensorflow::shape_inference::InferenceContext::kUnknownDim}));
return tensorflow::Status::OK();
});
} // namespace tfq
|
apache-2.0
|
svn2github/hwmail-mirror
|
hedwig-adm/src/main/java/com/hs/mail/adm/command/ExpungeCommand.java
|
712
|
package com.hs.mail.adm.command;
import java.util.List;
import org.apache.commons.collections.CollectionUtils;
import com.hs.mail.imap.mailbox.MailboxManager;
public class ExpungeCommand extends AbstractMailboxCommand {
@Override
protected void runTask(List<String> tokens) throws Exception {
super.runTask(tokens);
MailboxManager manager = getMailboxManager();
List<Long> mailboxIds = getMailboxIDList();
if (CollectionUtils.isNotEmpty(mailboxIds)) {
for (Long mailboxID : mailboxIds) {
List<Long> uids = search(mailboxID);
if (CollectionUtils.isNotEmpty(uids)) {
for (Long uid : uids) {
manager.deleteMessage(uid);
}
}
}
}
}
}
|
apache-2.0
|
wuziliang18/source-code-learn-dubbo
|
dubbo-remoting/dubbo-remoting-http/src/main/java/com/alibaba/dubbo/remoting/http/servlet/DispatcherServlet.java
|
2212
|
/*
* Copyright 1999-2011 Alibaba Group.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.alibaba.dubbo.remoting.http.servlet;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import com.alibaba.dubbo.remoting.http.HttpHandler;
/**
* Service dispatcher Servlet.
*
* @author qian.lei
*/
public class DispatcherServlet extends HttpServlet {
private static final long serialVersionUID = 5766349180380479888L;
private static DispatcherServlet INSTANCE;
private static final Map<Integer, HttpHandler> handlers = new ConcurrentHashMap<Integer, HttpHandler>();
public static void addHttpHandler(int port, HttpHandler processor) {
handlers.put(port, processor);
}
public static void removeHttpHandler(int port) {
handlers.remove(port);
}
public static DispatcherServlet getInstance() {
return INSTANCE;
}
public DispatcherServlet() {
DispatcherServlet.INSTANCE = this;
}
/**
* 交给HttpHandler去处理
*/
protected void service(HttpServletRequest request, HttpServletResponse response)
throws ServletException, IOException {
HttpHandler handler = handlers.get(request.getLocalPort());
if( handler == null ) {// service not found.
response.sendError(HttpServletResponse.SC_NOT_FOUND, "Service not found.");
} else {
handler.handle(request, response);
}
}
}
|
apache-2.0
|
googleads/google-ads-ruby
|
lib/google/ads/google_ads/v8/services/paid_organic_search_term_view_service_services_pb.rb
|
2133
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# Source: google/ads/googleads/v8/services/paid_organic_search_term_view_service.proto for package 'Google.Ads.GoogleAds.V8.Services'
# Original file comments:
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
require 'grpc'
require 'google/ads/google_ads/v8/services/paid_organic_search_term_view_service_pb'
module Google
module Ads
module GoogleAds
module V8
module Services
module PaidOrganicSearchTermViewService
# Proto file describing the Paid Organic Search Term View service.
#
# Service to fetch paid organic search term views.
class Service
include GRPC::GenericService
self.marshal_class_method = :encode
self.unmarshal_class_method = :decode
self.service_name = 'google.ads.googleads.v8.services.PaidOrganicSearchTermViewService'
# Returns the requested paid organic search term view in full detail.
#
# List of thrown errors:
# [AuthenticationError]()
# [AuthorizationError]()
# [HeaderError]()
# [InternalError]()
# [QuotaError]()
# [RequestError]()
rpc :GetPaidOrganicSearchTermView, ::Google::Ads::GoogleAds::V8::Services::GetPaidOrganicSearchTermViewRequest, ::Google::Ads::GoogleAds::V8::Resources::PaidOrganicSearchTermView
end
Stub = Service.rpc_stub_class
end
end
end
end
end
end
|
apache-2.0
|
TheTicks/Speak2Copyright
|
app/src/main/java/theticks/s2t/SpeakCopy.java
|
5308
|
package theticks.s2t;
import android.annotation.SuppressLint;
import android.content.Intent;
import android.net.Uri;
import android.os.Bundle;
import android.speech.RecognizerIntent;
import android.support.design.widget.FloatingActionButton;
import android.support.v7.app.AppCompatActivity;
import android.view.Menu;
import android.view.MenuItem;
import android.widget.ListView;
import com.google.android.gms.appindexing.Action;
import com.google.android.gms.appindexing.AppIndex;
import com.google.android.gms.appindexing.Thing;
import com.google.android.gms.common.api.GoogleApiClient;
import java.util.ArrayList;
import theticks.s2t.actions.IAction;
import theticks.s2t.charts.LandingSuggestions;
import theticks.s2t.charts.SimpleTextChart;
import theticks.s2t.parser.LanguageParser;
@SuppressLint("SetJavaScriptEnabled")
public class SpeakCopy extends AppCompatActivity {
public static final int REQ_CODE_SPEECH_INPUT = 110;
private ChartsAdapter charts;
private ListView chartsView;
public static final String CHARTS_PATH = "file:///android_asset/";
/**
* ATTENTION: This was auto-generated to implement the App Indexing API.
* See https://g.co/AppIndexing/AndroidStudio for more information.
*/
private GoogleApiClient client;
private DatabaseAccess databaseAccess;
private LanguageParser languageParser;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_speak_copy);
charts = new ChartsAdapter(this);
chartsView = (ListView) findViewById(R.id.charts);
chartsView.setAdapter(this.charts);
append(new SimpleTextChart(R.layout.fragment_landing_top));
append(new LandingSuggestions(this));
FloatingActionButton fab = (FloatingActionButton) findViewById(R.id.fab);
fab.setOnClickListener(new SpeakAfterButton(this));
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client = new GoogleApiClient.Builder(this).addApi(AppIndex.API).build();
databaseAccess = DatabaseAccess.getInstance(this);
languageParser = new LanguageParser(this);
}
public void append(IChart chart) {
this.charts.append(chart);
chartsView.post(new Runnable(){
public void run() {
chartsView.setSelection(chartsView.getCount() - 1);
}});
}
public void processQuery(String query) {
IAction action = languageParser.parse(query);
IChart chart = action.execute(databaseAccess);
append(chart);
}
@Override
protected void onActivityResult(int requestCode, int resultCode, Intent data) {
super.onActivityResult(requestCode, resultCode, data);
switch (requestCode) {
case REQ_CODE_SPEECH_INPUT: {
if (resultCode == RESULT_OK && null != data) {
ArrayList<String> result = data.getStringArrayListExtra(RecognizerIntent.EXTRA_RESULTS);
processQuery(result.get(0));
}
break;
}
}
}
@Override
public boolean onCreateOptionsMenu(Menu menu) {
// Inflate the menu; this adds items to the action bar if it is present.
getMenuInflater().inflate(R.menu.menu_speak_copy, menu);
return true;
}
@Override
public boolean onOptionsItemSelected(MenuItem item) {
// Handle action bar item clicks here. The action bar will
// automatically handle clicks on the Home/Up button, so long
// as you specify a parent activity in AndroidManifest.xml.
int id = item.getItemId();
//noinspection SimplifiableIfStatement
if (id == R.id.action_settings) {
return true;
}
return super.onOptionsItemSelected(item);
}
/**
* ATTENTION: This was auto-generated to implement the App Indexing API.
* See https://g.co/AppIndexing/AndroidStudio for more information.
*/
public Action getIndexApiAction() {
Thing object = new Thing.Builder()
.setName("SpeakCopy Page") // TODO: Define a title for the content shown.
// TODO: Make sure this auto-generated URL is correct.
.setUrl(Uri.parse("http://[ENTER-YOUR-URL-HERE]"))
.build();
return new Action.Builder(Action.TYPE_VIEW)
.setObject(object)
.setActionStatus(Action.STATUS_TYPE_COMPLETED)
.build();
}
@Override
public void onStart() {
super.onStart();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
client.connect();
AppIndex.AppIndexApi.start(client, getIndexApiAction());
}
@Override
public void onStop() {
super.onStop();
// ATTENTION: This was auto-generated to implement the App Indexing API.
// See https://g.co/AppIndexing/AndroidStudio for more information.
AppIndex.AppIndexApi.end(client, getIndexApiAction());
client.disconnect();
}
}
|
apache-2.0
|
partouf/Chatty-Twitch-Client
|
src/chatty/gui/components/settings/TableEditor.java
|
23967
|
package chatty.gui.components.settings;
import java.awt.Component;
import java.awt.Dimension;
import java.awt.GridBagConstraints;
import java.awt.GridBagLayout;
import java.awt.Insets;
import java.awt.Point;
import java.awt.Rectangle;
import java.awt.event.ActionEvent;
import java.awt.event.ActionListener;
import java.awt.event.KeyEvent;
import java.awt.event.MouseAdapter;
import java.awt.event.MouseEvent;
import java.util.List;
import java.util.regex.PatternSyntaxException;
import javax.swing.AbstractAction;
import javax.swing.ImageIcon;
import javax.swing.JButton;
import javax.swing.JDialog;
import javax.swing.JLabel;
import javax.swing.JOptionPane;
import javax.swing.JPanel;
import javax.swing.JPopupMenu;
import javax.swing.JScrollPane;
import javax.swing.JTable;
import javax.swing.JTextField;
import javax.swing.KeyStroke;
import javax.swing.ListSelectionModel;
import javax.swing.RowFilter;
import javax.swing.event.DocumentEvent;
import javax.swing.event.DocumentListener;
import javax.swing.event.ListSelectionEvent;
import javax.swing.event.ListSelectionListener;
import javax.swing.table.TableCellRenderer;
import javax.swing.table.TableRowSorter;
/**
* A table containing one element per row, with editing features.
*
* @author tduva
*/
public class TableEditor<T> extends JPanel {
private static final Dimension BUTTON_SIZE = new Dimension(27,27);
public static final int SORTING_MODE_MANUAL = 0;
public static final int SORTING_MODE_SORTED = 1;
private final ButtonAction buttonActionListener = new ButtonAction();
private final JTable table;
private ListTableModel<T> data;
private ItemEditor<T> editor;
private TableRowSorter<ListTableModel<T>> sorter;
private int sortingMode;
private boolean currentlyFiltering;
/**
* Edit buttons
*/
private final JButton add = new JButton();
private final JButton remove = new JButton();
private final JButton edit = new JButton();
private final JButton moveUp = new JButton();
private final JButton moveDown = new JButton();
private final JButton refresh = new JButton();
private final JTextField filterInput = new JTextField();
private TableEditorListener listener;
private TableContextMenu contextMenu;
/**
*
* The {@code sortingMode} determines the sorting features this table provides:
* <ul>
* <li>{@code SORTING_MODE_MANUAL} means the user can/has to order the entries
* manually, which may be required for some applications</li>
* <li> {@code SORTING_MODE_SORTED} means the table can sort the entries by their
* natural order and items can also be filtered</li>
* </ul>
*
* @param sortingMode The sorting mode
* @param refreshButton Whether this table should have a reload button,
* which may only be applicable for some uses
*/
public TableEditor(int sortingMode, boolean refreshButton) {
this.sortingMode = sortingMode;
table = new JTable();
table.setSelectionMode(ListSelectionModel.SINGLE_SELECTION);
table.setFillsViewportHeight(true);
// Selection Listener to update buttons
table.getSelectionModel().addListSelectionListener(new ListSelectionListener() {
@Override
public void valueChanged(ListSelectionEvent e) {
updateButtons();
}
});
// Mouse Listener to edit items and open context menu
table.addMouseListener(new MouseAdapter() {
@Override
public void mouseClicked(MouseEvent e) {
if (e.getClickCount() == 2) {
editSelectedItem();
}
}
@Override
public void mousePressed(MouseEvent e) {
selectRowAt(e.getPoint());
popupMenu(e);
}
@Override
public void mouseReleased(MouseEvent e) {
popupMenu(e);
}
});
// Delete key
table.getInputMap().put(KeyStroke.getKeyStroke(KeyEvent.VK_DELETE, 0), "removeItems");
table.getActionMap().put("removeItems", new AbstractAction() {
@Override
public void actionPerformed(ActionEvent e) {
removeSelected();
}
});
// Buttons Configuration
configureButton(add, "list-add.png", "Add selected item");
configureButton(edit, "edit.png", "Edit selected item");
configureButton(remove, "list-remove.png", "Remove selected item");
configureButton(moveUp, "go-up.png", "Move selected item up");
configureButton(moveDown, "go-down.png", "Move selected item down");
configureButton(refresh, "view-refresh.png", "Refresh data");
// Layout
setLayout(new GridBagLayout());
GridBagConstraints gbc;
gbc = makeGbc(0, 0, 2, 7);
gbc.fill = GridBagConstraints.BOTH;
gbc.weightx = 1;
gbc.weighty = 1;
add(new JScrollPane(table), gbc);
// Filter
if (sortingMode == SORTING_MODE_SORTED) {
gbc = makeGbc(0, 7, 1, 1);
gbc.insets = new Insets(0,2,0,1);
JLabel filterInputLabel = new JLabel("Filter: ");
filterInputLabel.setLabelFor(filterInput);
add(filterInputLabel, gbc);
gbc = makeGbc(1, 7, 1, 1);
gbc.fill = GridBagConstraints.HORIZONTAL;
gbc.weightx = 1;
add(filterInput, gbc);
filterInput.getDocument().addDocumentListener(new DocumentListener() {
@Override
public void insertUpdate(DocumentEvent e) {
updateFiltering();
}
@Override
public void removeUpdate(DocumentEvent e) {
updateFiltering();
}
@Override
public void changedUpdate(DocumentEvent e) {
updateFiltering();
}
});
}
// Buttons
gbc = makeGbc(2, 0, 1, 1);
add(add, gbc);
gbc = makeGbc(2, 1, 1, 1);
add(remove, gbc);
gbc = makeGbc(2, 2, 1, 1);
add(edit, gbc);
if (sortingMode == SORTING_MODE_MANUAL) {
gbc = makeGbc(2, 3, 1, 1);
add(moveUp, gbc);
gbc = makeGbc(2, 4, 1, 1);
add(moveDown, gbc);
}
if (refreshButton) {
gbc = makeGbc(2, 5, 1, 1);
add(refresh, gbc);
}
updateButtons();
}
/**
* Set the model for this table, which must be done before it is used.
*
* @param model
*/
protected final void setModel(ListTableModel<T> model) {
data = model;
table.setModel(model);
if (sortingMode == SORTING_MODE_SORTED) {
sorter = new TableRowSorter<>(model);
table.setRowSorter(sorter);
sorter.toggleSortOrder(0);
}
}
/**
* Allows to set custom renderers for certain classes.
*
* @param cellClass
* @param renderer
*/
protected final void setDefaultRenderer(Class cellClass, TableCellRenderer renderer) {
table.setDefaultRenderer(cellClass, renderer);
}
protected final void setRendererForColumn(int column, TableCellRenderer renderer) {
table.getColumnModel().getColumn(column).setCellRenderer(renderer);
}
protected final void setFixedColumnWidth(int column, int size) {
table.getColumnModel().getColumn(column).setMaxWidth(size);
table.getColumnModel().getColumn(column).setMinWidth(size);
}
protected final void setColumnWidth(int column, int size) {
table.getColumnModel().getColumn(column).setPreferredWidth(size);
}
/**
* Set the data for this table.
*
* @param data
*/
public void setData(List<T> data) {
this.data.setData(data);
updateButtons();
}
/**
* Returns the (possibly edited by the user) data of this table.
*
* @return
*/
public List<T> getData() {
return this.data.getData();
}
/**
* Sets the item editor, which must be done before stuff can be edited.
*
* @param editor
*/
public void setItemEditor(ItemEditor<T> editor) {
this.editor = editor;
}
/**
* Sets the context menu for this.
*
* @param menu
*/
public final void setPopupMenu(TableContextMenu<T> menu) {
contextMenu = menu;
}
/**
* Sets the {@code TableEditorListener}. Only one listener can be set at a
* time.
*
* @param listener The listener to set
*/
public final void setTableEditorListener(TableEditorListener<T> listener) {
this.listener = listener;
}
/**
* Opens the context menu if this MouseEvent was a popup trigger and a menu
* is set.
*
* @param e The MouseEvent
*/
private void popupMenu(MouseEvent e) {
if (contextMenu != null && e.isPopupTrigger()) {
int modelIndex = indexToModel(table.getSelectedRow());
if (modelIndex != -1) {
T entry = data.get(modelIndex);
contextMenu.showMenu(entry, table, e.getX(), e.getY());
}
}
}
/**
* Select the row at the given coordinates.
*
* @param p The {@code Point} containing the coordinates
*/
private void selectRowAt(Point p) {
int row = table.rowAtPoint(p);
if (row != -1) {
setRowSelected(row);
}
}
/**
* Convenience method to create {@code GridBagConstraints}.
*
* @param x The x coordinate in the grid
* @param y The y coordinate in the grid
* @param w The width in the grid
* @param h The height in the grid
* @return {@code GridBagConstraints} with the given values
*/
private GridBagConstraints makeGbc(int x, int y, int w, int h) {
GridBagConstraints gbc = new GridBagConstraints();
gbc.gridx = x;
gbc.gridy = y;
gbc.gridwidth = w;
gbc.gridheight = h;
return gbc;
}
private void updateFiltering() {
String filterText = filterInput.getText();
RowFilter<ListTableModel<T>, Object> rf = null;
try {
rf = RowFilter.regexFilter(filterText, 0);
} catch (PatternSyntaxException ex) {
return;
}
currentlyFiltering = rf != null && !filterText.isEmpty();
sorter.setRowFilter(rf);
scrollToSelection();
updateButtons();
}
/**
* Sets the size, icon and tooltip of a button and adds the ActionListener.
*
* @param button
* @param icon
* @param tooltip
*/
private void configureButton(JButton button, String icon, String tooltip) {
button.setIcon(new ImageIcon(ListSelector.class.getResource(icon)));
button.setToolTipText(tooltip);
button.setPreferredSize(BUTTON_SIZE);
button.setSize(BUTTON_SIZE);
button.setMaximumSize(BUTTON_SIZE);
button.setMinimumSize(BUTTON_SIZE);
button.addActionListener(buttonActionListener);
}
/**
* Update the enabled-state of the buttons.
*/
private void updateButtons() {
boolean enabled = table.getSelectedRowCount() == 1;
add.setEnabled(true);
remove.setEnabled(enabled);
edit.setEnabled(enabled);
moveUp.setEnabled(enabled);
moveDown.setEnabled(enabled);
if (currentlyFiltering) {
add.setEnabled(false);
edit.setEnabled(false);
moveUp.setEnabled(false);
moveDown.setEnabled(false);
}
}
/**
* Sets the given row as selected and scrolls to it if necessary.
*
* @param viewIndex
*/
private void setRowSelected(int viewIndex) {
table.getSelectionModel().setSelectionInterval(viewIndex, viewIndex);
scrollToRow(viewIndex);
}
private void scrollToSelection() {
int index = table.getSelectedRow();
scrollToRow(index);
}
private void scrollToRow(int index) {
if (index != -1) {
table.scrollRectToVisible(table.getCellRect(index, 0, true));
// System.out.println(table.getVisibleRect()+" "+);
// Rectangle row = table.getCellRect(index, 0, true);
// int visibleHeight = table.getVisibleRect().height;
// int rowHeight = row.height;
// if (visibleHeight > rowHeight*4) {
//
// }
}
}
/**
* Open the edit dialog with the given {@code preset} already filling in
* the data it contains. If the edit dialog isn't canceled, the resulting
* entry is added after checking for duplicates. It is added at the selected
* position or at the beginning of the table if nothing is selected.
*
* @param preset The entry used to fill out some data in the edit dialog
*/
protected void addItem(T preset) {
T result = editor.showEditor(preset, this, false);
// If the user didn't cancel the dialog, work with the result.
if (result != null) {
// Check if the resulting entry is already in the table.
if (data.contains(result)) {
String[] options = new String[]{"Don't save", "Edit again"};
int r = JOptionPane.showOptionDialog(this, "Another item with the same name"
+ " is already in the list.", "Duplicate item",
JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, null);
if (r == 1) {
addItem(result);
}
} else {
// Insert at the selected position or at the beginning of the
// table if nothing is selected.
int selectedIndex = table.getSelectedRow();
int modelIndex = indexToModel(selectedIndex);
if (modelIndex != -1) {
data.insert(modelIndex, result);
setRowSelected(indexToView(modelIndex));
} else {
data.insert(0, result);
setRowSelected(indexToView(0));
}
if (listener != null) {
listener.itemAdded(result);
}
}
}
}
/**
* Edit the currently selected item.
*
* @see editItem(int modelIndex, T preset)
*/
private void editSelectedItem() {
editItem(-1, null);
}
/**
* Edit the entry at the given {@code modelIndex}.
*
* @param modelIndex The index
* @see editItem(int modelIndex, T preset)
*/
protected void editItem(int modelIndex) {
editItem(modelIndex, null);
}
/**
* Open an edit dialog for the entry at the given {@code modelIndex}.
*
* @param modelIndex The model index. If this is -1 then the currently
* selected entry is edited. If no entry is selected, then nothing is done.
* @param preset The preset is used to fill out the dialog with the data it
* contains, if it is {@code null}, then the edited entry is used as preset
*/
protected void editItem(int modelIndex, T preset) {
if (modelIndex == -1) {
modelIndex = indexToModel(table.getSelectedRow());
if (modelIndex == -1) {
return;
}
}
setRowSelected(indexToView(modelIndex));
if (preset == null) {
preset = data.get(modelIndex);
}
T result = editor.showEditor(preset, this, true);
// Done editing in the dialog, work with the result if the user didn't
// cancel the dialog.
if (result != null) {
// Check if the resulting entry is already in the data, but is not
// the one being edited, which means it would be a duplicate.
int present = data.indexOf(result);
if (present != -1 && present != modelIndex) {
String[] options = new String[]{"Don't save", "Edit again"};
int r = JOptionPane.showOptionDialog(this, "Another item with the same name"
+ " is already in the list.", "Duplicate item",
JOptionPane.YES_NO_OPTION, JOptionPane.QUESTION_MESSAGE, null, options, "Replace");
if (r == 1) {
editItem(modelIndex, result);
}
} else {
data.set(modelIndex, result);
if (listener != null) {
listener.itemEdited(preset, result);
}
}
}
setRowSelected(indexToView(modelIndex));
}
/**
* Remove the selected entry. If no entry is selected, nothing is done.
* After removing, an appropriate remaining entry is selected.
*/
protected void removeSelected() {
// If table is empty, nothing can be selected to remove
if (table.getRowCount() == 0) {
return;
}
// Get selected entry and remove it if present
int viewIndex = table.getSelectedRow();
int modelIndex = indexToModel(viewIndex);
if (modelIndex == -1) {
return;
}
T removedItem = data.remove(modelIndex);
// Select appropriate row after removing
if (table.getRowCount() > viewIndex) {
setRowSelected(viewIndex);
} else if (viewIndex-1 >= 0 && table.getRowCount() > viewIndex-1) {
setRowSelected(viewIndex-1);
}
// Update buttons state and inform listener
updateButtons();
if (listener != null) {
listener.itemRemoved(removedItem);
}
}
/**
* Moves the selected item up in the model (and table). This can behave
* kind of odd when the table is filtered or sorted automatically, so it
* should not be used then.
*/
protected void moveUpSelected() {
int selectedIndex = table.getSelectedRow();
if (selectedIndex > -1) {
int index = data.moveUp(indexToModel(selectedIndex));
setRowSelected(indexToView(index));
}
}
/**
* Moves the selected item down in the model (and table). This can behave
* kind of odd when the table is filtered or sorted automatically, so it
* should not be used then.
*/
protected void moveDownSelected() {
int selectedIndex = table.getSelectedRow();
if (selectedIndex > -1) {
int index = data.moveDown(indexToModel(selectedIndex));
setRowSelected(indexToView(index));
}
}
/**
* Convert a view index to model index.
*
* @param index The index to convert
* @return The converted index, or {@code -1} if {@code index} was
* {@code -1}
*/
private int indexToModel(int index) {
if (index == -1) {
return -1;
}
return table.convertRowIndexToModel(index);
}
/**
* Convert a model index to view index.
*
* @param index The index to convert
* @return The corresponding index of the view, or {@code -1} if the row
* isn't visible
*/
private int indexToView(int index) {
return table.convertRowIndexToView(index);
}
/**
* Receives events from the buttons and calls the appropriate table methods.
*/
private class ButtonAction implements ActionListener {
@Override
public void actionPerformed(ActionEvent e) {
if (e.getSource() == moveUp) {
moveUpSelected();
} else if (e.getSource() == moveDown) {
moveDownSelected();
} else if (e.getSource() == remove) {
removeSelected();
} else if (e.getSource() == edit) {
editSelectedItem();
} else if (e.getSource() == add) {
addItem(null);
} else if (e.getSource() == refresh) {
if (listener != null) {
listener.refreshData();
}
}
}
}
/**
* A context menu that in addition to the invoker and coordinates, also
* receives the item it was opened on, so it can build the menu accordingly.
*
* @param <T> The type of the item
*/
public static abstract class TableContextMenu<T> extends JPopupMenu {
/**
* The menu should open itself at the given coordinates. It can
* customize itself based on which {@code item} it was opened for.
*
* @param item The item it was opened for (usually by right-clicking
* on it)
* @param invoker The Component it was opened on
* @param x The x-coordinate where it should be opened
* @param y The y-coordinate where it should be opened
*/
public abstract void showMenu(T item, Component invoker, int x, int y);
}
/**
* An item editor is opened with the item to edit, the parent component
* and whether the item is being edited or added. The implementation can
* then build the GUI accordingly. When finished, the editor should give
* the edited item back, or null if the action was canceled.
*
* @param <T> The type of the item to edit
*/
public static interface ItemEditor<T> {
/**
* Opens the editor, which the user can use to add or change an item.
*
* @param preset The item to fill the GUI with initially, can be
* {@code null}
* @param c The parent component
* @param edit Whether this item is edited or added (might set the title
* accordingly for example)
* @return The changed or added item, or {@code null} if the action was
* canceled
*/
public T showEditor(T preset, Component c, boolean edit);
}
/**
* Users of the TableEditor can register a listener of this type to be
* informed about edits to the table. This is one of the main ways to
* actually change the data that is edited in this table elsewhere.
*
* @param <T> The type of the items to be edited
*/
public static interface TableEditorListener<T> {
/**
* Called when an item has been added to the table. The table should
* not allow for duplicates to be added, but it is prudent to not rely
* on that.
*
* @param item The item that was added
*/
public void itemAdded(T item);
/**
* Called when an item has been removed in the table.
*
* @param item The item that was removed
*/
public void itemRemoved(T item);
/**
* Called when an item was edited in the table. The {@code oldItem}
* contains the item before editing, the {@code newItem} contains the
* changed item, so it can also be determined what changed.
*
* @param oldItem The item before editing
* @param newItem The item after editing
*/
public void itemEdited(T oldItem, T newItem);
/**
* Called when the user requested the data in the table to be refreshed.
*/
public void refreshData();
}
}
|
apache-2.0
|
evanchooly/morphia
|
morphia/src/main/java/org/mongodb/morphia/utils/package-info.java
|
695
|
/*
* Copyright (c) 2008-2015 MongoDB, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* Provides various utilities used my Morphia.
*/
package org.mongodb.morphia.utils;
|
apache-2.0
|
ssvlab/esbmc-gpu
|
regression/esbmc-cpp/try_catch/try-catch_value_02/main.cpp
|
152
|
#include <cassert>
class X {
public:
X() { throw 5; }
};
int main()
{
try {
X x;
}
catch(int e) {
assert(e==5);
}
return 0;
}
|
apache-2.0
|
igitras-blog/auth-server
|
src/main/java/com/igitras/auth/common/audit/SpringSecurityAuditAware.java
|
539
|
package com.igitras.auth.common.audit;
import com.igitras.auth.utils.Constrains;
import com.igitras.auth.utils.SecurityUtils;
import org.springframework.data.domain.AuditorAware;
/**
* Implementation of AuditorAware based on Spring Security.
*
* @author mason
*/
public class SpringSecurityAuditAware implements AuditorAware<String> {
@Override
public String getCurrentAuditor() {
String login = SecurityUtils.getCurrentUserLogin();
return login != null ? login : Constrains.Security.SYSTEM_ACCOUNT;
}
}
|
apache-2.0
|
cloudfoundry/buildpacks-ci
|
vendor/github.com/cloudfoundry/libcfbuildpack/helper/write_symlink.go
|
1132
|
/*
* Copyright 2018-2019 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package helper
import (
"fmt"
"os"
"path/filepath"
)
// WriteSymlink creates newName as a symbolic link to oldName. Before writing, it creates all required parent
// directories for the newName.
func WriteSymlink(oldName string, newName string) error {
if err := os.MkdirAll(filepath.Dir(newName), 0755); err != nil {
return err
}
if err := os.Symlink(oldName, newName); err != nil {
return fmt.Errorf("error while creating '%s' as symlink to '%s': %v", newName, oldName, err)
}
return nil
}
|
apache-2.0
|
friendranjith/vizzly
|
src/ch/ethz/vizzly/cache/sqldb/package-info.java
|
146
|
/**
* This package contains the implementation of a cache that stores aggregated data in a SQL database.
*/
package ch.ethz.vizzly.cache.sqldb;
|
apache-2.0
|
inbloom/ldap-in-memory
|
src/main/java/org/slc/sli/ldap/inmemory/domain/Ldif.java
|
772
|
package org.slc.sli.ldap.inmemory.domain;
import org.apache.commons.lang3.builder.ToStringBuilder;
import org.apache.commons.lang3.builder.ToStringStyle;
import javax.xml.bind.annotation.XmlElement;
import javax.xml.bind.annotation.XmlRootElement;
import javax.xml.bind.annotation.XmlType;
/**
* Created by tfritz on 1/7/14.
*/
@XmlRootElement(name= "ldif")
@XmlType(propOrder = {"name"})
public class Ldif {
private String name;
public String getName() {
return name;
}
@XmlElement(name="name", required = true)
public void setName(String name) {
this.name = name;
}
@Override
public String toString() {
return ToStringBuilder.reflectionToString(this, ToStringStyle.SHORT_PREFIX_STYLE).toString();
}
}
|
apache-2.0
|
Leonti/slickpm
|
src/main/webapp/resources/js/views/stagelist.js
|
1283
|
define([
'jquery',
'underscore',
'backbone',
'views/stagelistitem',
'text!templates/stageList.html',
'jqueryui/sortable'
], function( $, _, Backbone, StageListItemView, stageListTemplate, sortable) {
var StageListView = Backbone.View.extend({
template: _.template(stageListTemplate),
className: 'list',
initialize:function () {
this.model.bind("reset", this.render, this);
this.model.bind("add", function (stage) {
$(this.el).find('ul').append(new StageListItemView({model: stage}).render().el);
}, this);
},
render:function (eventName) {
$(this.el).html(this.template());
_.each(this.model.models, function (stage) {
$(this.el).find('ul').append(new StageListItemView({ model: stage }).render().el);
}, this);
var self = this;
$(this.el).find('ul').sortable({
update: function(event, ui) {
var newOrder = [];
$('.stage', this).each(function(i, el) {
newOrder.push($(el).data('id'));
});
self.model.updateOrder(newOrder);
}
});
return this;
}
});
return StageListView;
});
|
apache-2.0
|
hamazy/sri
|
test/src/main/scala/sri/test/router/HomeScreen.scala
|
903
|
package sri.test.router
import sri.test.components.Text
import sri.universal.components._
import sri.web.all._
import sri.web.router.{WebRouterComponent, WebRouterCtrl}
import sri.web.styles.WebStyleSheet
import scala.scalajs.js
import scala.scalajs.js.annotation.ScalaJSDefined
object HomeScreen {
/**
* this hack is only for testing don't do this in real world :)
*/
var ctrl: WebRouterCtrl = null
@ScalaJSDefined
class Component extends WebRouterComponent[Unit, Unit] {
def render() = Text()("home")
override def componentDidMount(): Unit = {
ctrl = getRouterCtrl()
}
}
val ctor = getTypedConstructor(js.constructorOf[Component], classOf[Component])
ctor.contextTypes = sri.web.router.routerContextTypes
def apply(key: js.UndefOr[String] = js.undefined, ref: js.Function1[Component, _] = null) = createElementNoProps(ctor, key = key, ref = ref)
}
|
apache-2.0
|
jfoy/heapster
|
manager/manager.go
|
6892
|
// Copyright 2015 Google Inc. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package manager
import (
"fmt"
"sync"
"time"
"github.com/golang/glog"
"k8s.io/heapster/model"
"k8s.io/heapster/sinks"
sink_api "k8s.io/heapster/sinks/api/v1"
"k8s.io/heapster/sinks/cache"
source_api "k8s.io/heapster/sources/api"
"k8s.io/heapster/store"
)
// Manager provides an interface to control the core of heapster.
// Implementations are not required to be thread safe.
type Manager interface {
// Housekeep collects data from all the configured sources and
// stores the data to all the configured sinks.
Housekeep()
// HousekeepModel performs housekeeping for the Model entity
HousekeepModel()
// Export the latest data point of all metrics.
ExportMetrics() ([]*sink_api.Point, error)
// Set the sinks to use
SetSinkUris(Uris) error
// Get the sinks currently in use
SinkUris() Uris
// Get a reference to the cluster entity of the model, if it exists.
GetCluster() model.Cluster
}
type realManager struct {
sources []source_api.Source
cache cache.Cache
model model.Cluster
sinkManager sinks.ExternalSinkManager
sinkUris Uris
lastSync time.Time
resolution time.Duration
align bool
decoder sink_api.Decoder
}
type syncData struct {
data source_api.AggregateData
mutex sync.Mutex
}
func NewManager(sources []source_api.Source, sinkManager sinks.ExternalSinkManager, res, bufferDuration time.Duration, c cache.Cache, useModel bool, modelRes time.Duration, align bool) (Manager, error) {
// TimeStore constructor passed to the cluster implementation.
tsConstructor := func() store.TimeStore {
// TODO(afein): determine default analogy of cache duration to Timestore durations.
return store.NewGCStore(store.NewCMAStore(), 5*bufferDuration)
}
var newCluster model.Cluster = nil
if useModel {
newCluster = model.NewCluster(tsConstructor, modelRes)
}
firstSync := time.Now()
if align {
firstSync = firstSync.Truncate(res).Add(res)
}
return &realManager{
sources: sources,
sinkManager: sinkManager,
cache: c,
model: newCluster,
lastSync: firstSync,
resolution: res,
align: align,
decoder: sink_api.NewDecoder(),
}, nil
}
func (rm *realManager) GetCluster() model.Cluster {
return rm.model
}
func (rm *realManager) scrapeSource(s source_api.Source, start, end time.Time, sd *syncData, errChan chan<- error) {
glog.V(2).Infof("attempting to get data from source %q", s.Name())
data, err := s.GetInfo(start, end, rm.resolution, rm.align)
if err != nil {
errChan <- fmt.Errorf("failed to get information from source %q - %v", s.Name(), err)
return
}
sd.mutex.Lock()
defer sd.mutex.Unlock()
sd.data.Merge(&data)
errChan <- nil
}
// HousekeepModel periodically populates the manager model from the manager cache.
func (rm *realManager) HousekeepModel() {
if rm.model != nil {
if err := rm.model.Update(rm.cache); err != nil {
glog.V(1).Infof("Model housekeeping returned error: %s", err.Error())
}
}
}
func (rm *realManager) Housekeep() {
errChan := make(chan error, len(rm.sources))
var sd syncData
start := rm.lastSync
end := time.Now()
if rm.align {
end = end.Truncate(rm.resolution)
if start.After(end) {
return
}
}
rm.lastSync = end
glog.V(2).Infof("starting to scrape data from sources start:%v end:%v", start, end)
for idx := range rm.sources {
s := rm.sources[idx]
go rm.scrapeSource(s, start, end, &sd, errChan)
}
var errors []string
for i := 0; i < len(rm.sources); i++ {
if err := <-errChan; err != nil {
errors = append(errors, err.Error())
}
}
glog.V(2).Infof("completed scraping data from sources. Errors: %v", errors)
if err := rm.cache.StorePods(sd.data.Pods); err != nil {
errors = append(errors, err.Error())
}
if err := rm.cache.StoreContainers(sd.data.Machine); err != nil {
errors = append(errors, err.Error())
}
if err := rm.cache.StoreContainers(sd.data.Containers); err != nil {
errors = append(errors, err.Error())
}
if err := rm.sinkManager.Store(sd.data); err != nil {
errors = append(errors, err.Error())
}
if len(errors) > 0 {
glog.V(1).Infof("housekeeping resulted in following errors: %v", errors)
}
}
func (rm *realManager) ExportMetrics() ([]*sink_api.Point, error) {
var zero time.Time
// Get all pods as points.
pods := trimStatsForPods(rm.cache.GetPods(zero, zero))
timeseries, err := rm.decoder.TimeseriesFromPods(pods)
if err != nil {
return []*sink_api.Point{}, err
}
points := make([]*sink_api.Point, 0, len(timeseries))
points = appendPoints(points, timeseries)
// Get all nodes as points.
containers := trimStatsForContainers(rm.cache.GetNodes(zero, zero))
timeseries, err = rm.decoder.TimeseriesFromContainers(containers)
if err != nil {
return []*sink_api.Point{}, err
}
points = appendPoints(points, timeseries)
// Get all free containers as points.
containers = trimStatsForContainers(rm.cache.GetFreeContainers(zero, zero))
timeseries, err = rm.decoder.TimeseriesFromContainers(containers)
if err != nil {
return []*sink_api.Point{}, err
}
points = appendPoints(points, timeseries)
return points, nil
}
// Extract the points from the specified timeseries and append them to output.
func appendPoints(output []*sink_api.Point, toExtract []sink_api.Timeseries) []*sink_api.Point {
for i := range toExtract {
output = append(output, toExtract[i].Point)
}
return output
}
// Only keep latest stats for the specified pods
func trimStatsForPods(pods []*cache.PodElement) []*cache.PodElement {
for _, pod := range pods {
trimStatsForContainers(pod.Containers)
}
return pods
}
// Only keep latest stats for the specified containers
func trimStatsForContainers(containers []*cache.ContainerElement) []*cache.ContainerElement {
for _, cont := range containers {
onlyKeepLatestStat(cont)
}
return containers
}
// Only keep the latest stats data point.
func onlyKeepLatestStat(cont *cache.ContainerElement) {
if len(cont.Metrics) > 1 {
cont.Metrics = cont.Metrics[0:1]
}
}
func (rm *realManager) SetSinkUris(sinkUris Uris) error {
sinks, err := newSinks(sinkUris)
if err != nil {
return err
}
if err := rm.sinkManager.SetSinks(sinks); err != nil {
return err
}
rm.sinkUris = sinkUris
return nil
}
func (rm *realManager) SinkUris() Uris {
return rm.sinkUris
}
|
apache-2.0
|
yandex-disk/yandex-disk-restapi-java
|
disk-restapi-sdk/src/main/java/com/yandex/disk/rest/exceptions/http/UnsupportedMediaTypeException.java
|
487
|
/*
* (C) 2015 Yandex LLC (https://yandex.com/)
*
* The source code of Java SDK for Yandex.Disk REST API
* is available to use under terms of Apache License,
* Version 2.0. See the file LICENSE for the details.
*/
package com.yandex.disk.rest.exceptions.http;
import com.yandex.disk.rest.json.ApiError;
public class UnsupportedMediaTypeException extends HttpCodeException {
public UnsupportedMediaTypeException(int code, ApiError response) {
super(code, response);
}
}
|
apache-2.0
|
AfricaRegex/SjcProduct
|
SjcProject/WebRoot/js/basicInfo/actUnit.js
|
2138
|
var actUnit={
query:function(){
$("#unitName").val($("#unitName").val().replace("计费单位",""));
$("#query").submit();
} ,
modify:function(){
var cbs = $('input[name="items"]:checked');
if(cbs.length==0||cbs.length>1){
Dialog.alert("请选择一条修改记录 ");
return ;
}else{
var id=$(cbs[0]).val();
pf.ajaxGetRequest("actUnitAction!queryArtUnitById.action?actUnitVo.id="+id,function(data){
obj=strToJson(data);
$("#actUnitId").val(obj.id);
$("#actUnitName").val(obj.unitName);
$("#actUnitSort").val(obj.unitSort);
opendiv('light1');
});
}
},
refreshList:function(){
window.location.reload(true);
},
deleteStart:function(){
var cbs = $('input[name="items"]:checked');
if(cbs.length==0){
Dialog.alert("请选择至少一条待删除记录 ");
return ;
}
opendiv('light3');
},
deleteObj:function(){
colsediv('light3');
var cbs = $('input[name="items"]:checked');
var ids="";
for(var i=0;i<cbs.length;i++){
ids+=$(cbs[i]).val()+"-";
}
pf.ajaxGetRequest("actUnitAction!deleteArtUnit.action?ids="+ids,function(){
window.location.reload(true);
});
},
saveNew:function(){
$("#actUnitId").val("");
$("#actUnitName").val("");
$("#actUnitSort").val("");
opendiv('light1');
},
saveOrUpdate: function(){
var unitName=$("#actUnitName").val();
var unitSort=$("#actUnitSort").val();
if(unitName.length>20 || unitName.trim()==""){
Dialog.alert("计费单位不能超过20个字符,不能为空!");
return;
}
var r = /^[-+]?\d*$/;
if(!r.test(unitSort) || unitSort<1 || unitSort>99 || unitSort.trim()==null){
Dialog.alert("排序请输入1-99整数,不能为空!");
return ;
}
$.ajax({
type: "POST",
dataType: "text",
url: "actUnitAction!saveOrUpdateArtUnit.action", //提交到一般处理程序请求数据
data: $("#fm").serialize(),
success: function(data) {
obj=strToJson(data);
Dialog.alert(obj.data);
window.location.reload(true);
}
});
}
}
|
apache-2.0
|
GavinHohenheim/ccw-idle-dev-angular
|
src/app/project/project.ts
|
190
|
export class Project {
id: number;
duration: number; // in ticks
reward: number;
loc: number;
}
export class ActiveProject extends Project {
startDate: Date; // set at accept()
}
|
apache-2.0
|
wrouesnel/vaultfs
|
vendor/github.com/sirupsen/logrus/text_formatter.go
|
4120
|
package logrus
import (
"bytes"
"fmt"
"sort"
"strings"
"sync"
"time"
)
const (
nocolor = 0
red = 31
green = 32
yellow = 33
blue = 36
gray = 37
)
var (
baseTimestamp time.Time
)
func init() {
baseTimestamp = time.Now()
}
// TextFormatter formats logs into text
type TextFormatter struct {
// Set to true to bypass checking for a TTY before outputting colors.
ForceColors bool
// Force disabling colors.
DisableColors bool
// Disable timestamp logging. useful when output is redirected to logging
// system that already adds timestamps.
DisableTimestamp bool
// Enable logging the full timestamp when a TTY is attached instead of just
// the time passed since beginning of execution.
FullTimestamp bool
// TimestampFormat to use for display when a full timestamp is printed
TimestampFormat string
// The fields are sorted by default for a consistent output. For applications
// that log extremely frequently and don't use the JSON formatter this may not
// be desired.
DisableSorting bool
// QuoteEmptyFields will wrap empty fields in quotes if true
QuoteEmptyFields bool
// Whether the logger's out is to a terminal
isTerminal bool
sync.Once
}
func (f *TextFormatter) init(entry *Entry) {
if entry.Logger != nil {
f.isTerminal = IsTerminal(entry.Logger.Out)
}
}
// Format renders a single log entry
func (f *TextFormatter) Format(entry *Entry) ([]byte, error) {
var b *bytes.Buffer
keys := make([]string, 0, len(entry.Data))
for k := range entry.Data {
keys = append(keys, k)
}
if !f.DisableSorting {
sort.Strings(keys)
}
if entry.Buffer != nil {
b = entry.Buffer
} else {
b = &bytes.Buffer{}
}
prefixFieldClashes(entry.Data)
f.Do(func() { f.init(entry) })
isColored := (f.ForceColors || f.isTerminal) && !f.DisableColors
timestampFormat := f.TimestampFormat
if timestampFormat == "" {
timestampFormat = defaultTimestampFormat
}
if isColored {
f.printColored(b, entry, keys, timestampFormat)
} else {
if !f.DisableTimestamp {
f.appendKeyValue(b, "time", entry.Time.Format(timestampFormat))
}
f.appendKeyValue(b, "level", entry.Level.String())
if entry.Message != "" {
f.appendKeyValue(b, "msg", entry.Message)
}
for _, key := range keys {
f.appendKeyValue(b, key, entry.Data[key])
}
}
b.WriteByte('\n')
return b.Bytes(), nil
}
func (f *TextFormatter) printColored(b *bytes.Buffer, entry *Entry, keys []string, timestampFormat string) {
var levelColor int
switch entry.Level {
case DebugLevel:
levelColor = gray
case WarnLevel:
levelColor = yellow
case ErrorLevel, FatalLevel, PanicLevel:
levelColor = red
default:
levelColor = blue
}
levelText := strings.ToUpper(entry.Level.String())[0:4]
if f.DisableTimestamp {
fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m %-44s ", levelColor, levelText, entry.Message)
} else if !f.FullTimestamp {
fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%04d] %-44s ", levelColor, levelText, int(entry.Time.Sub(baseTimestamp)/time.Second), entry.Message)
} else {
fmt.Fprintf(b, "\x1b[%dm%s\x1b[0m[%s] %-44s ", levelColor, levelText, entry.Time.Format(timestampFormat), entry.Message)
}
for _, k := range keys {
v := entry.Data[k]
fmt.Fprintf(b, " \x1b[%dm%s\x1b[0m=", levelColor, k)
f.appendValue(b, v)
}
}
func (f *TextFormatter) needsQuoting(text string) bool {
if f.QuoteEmptyFields && len(text) == 0 {
return true
}
for _, ch := range text {
if !((ch >= 'a' && ch <= 'z') ||
(ch >= 'A' && ch <= 'Z') ||
(ch >= '0' && ch <= '9') ||
ch == '-' || ch == '.' || ch == '_' || ch == '/' || ch == '@' || ch == '^' || ch == '+') {
return true
}
}
return false
}
func (f *TextFormatter) appendKeyValue(b *bytes.Buffer, key string, value interface{}) {
if b.Len() > 0 {
b.WriteByte(' ')
}
b.WriteString(key)
b.WriteByte('=')
f.appendValue(b, value)
}
func (f *TextFormatter) appendValue(b *bytes.Buffer, value interface{}) {
stringVal, ok := value.(string)
if !ok {
stringVal = fmt.Sprint(value)
}
if !f.needsQuoting(stringVal) {
b.WriteString(stringVal)
} else {
b.WriteString(fmt.Sprintf("%q", stringVal))
}
}
|
apache-2.0
|
mnottheone/deeplearnjs
|
demos/model-builder/layer_builder.ts
|
10931
|
/**
* @license
* Copyright 2017 Google Inc. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
// tslint:disable-next-line:max-line-length
import {Array1D, Array2D, Array4D, conv_util, Graph, Initializer, NDArrayInitializer, Tensor, util, VarianceScalingInitializer, ZerosInitializer} from '../deeplearn';
/**
* Classes that specify operation parameters, how they affect output shape,
* and methods for building the operations themselves. Any new ops to be added
* to the model builder UI should be added here.
*/
export type LayerName = 'Fully connected' | 'ReLU' | 'Convolution' |
'Max pool' | 'Reshape' | 'Flatten';
/**
* Creates a layer builder object.
*
* @param layerName The name of the layer to build.
* @param layerBuilderJson An optional LayerBuilder JSON object. This doesn't
* have the prototype methods on them as it comes from serialization. This
* method creates the object with the necessary prototype methods.
*/
export function getLayerBuilder(
layerName: LayerName, layerBuilderJson?: LayerBuilder): LayerBuilder {
let layerBuilder: LayerBuilder;
switch (layerName) {
case 'Fully connected':
layerBuilder = new FullyConnectedLayerBuilder();
break;
case 'ReLU':
layerBuilder = new ReLULayerBuilder();
break;
case 'Convolution':
layerBuilder = new Convolution2DLayerBuilder();
break;
case 'Max pool':
layerBuilder = new MaxPoolLayerBuilder();
break;
case 'Reshape':
layerBuilder = new ReshapeLayerBuilder();
break;
case 'Flatten':
layerBuilder = new FlattenLayerBuilder();
break;
default:
throw new Error('Layer builder for ' + layerName + ' not found.');
}
// For layer builders passed as serialized objects, we create the objects and
// set the fields.
if (layerBuilderJson != null) {
for (const prop in layerBuilderJson) {
if (layerBuilderJson.hasOwnProperty(prop)) {
// tslint:disable-next-line:no-any
(layerBuilder as any)[prop] = (layerBuilderJson as any)[prop];
}
}
}
return layerBuilder;
}
export interface LayerParam {
label: string;
initialValue(inputShape: number[]): number|string;
type: 'number'|'text';
min?: number;
max?: number;
setValue(value: number|string): void;
getValue(): number|string;
}
export type LayerWeightsDict = {
[name: string]: number[]
};
export interface LayerBuilder {
layerName: LayerName;
getLayerParams(): LayerParam[];
getOutputShape(inputShape: number[]): number[];
addLayer(
g: Graph, network: Tensor, inputShape: number[], index: number,
weights?: LayerWeightsDict|null): Tensor;
// Return null if no errors, otherwise return an array of errors.
validate(inputShape: number[]): string[]|null;
}
export class FullyConnectedLayerBuilder implements LayerBuilder {
layerName: LayerName = 'Fully connected';
hiddenUnits: number;
getLayerParams(): LayerParam[] {
return [{
label: 'Hidden units',
initialValue: (inputShape: number[]) => 10,
type: 'number',
min: 1,
max: 1000,
setValue: (value: number) => this.hiddenUnits = value,
getValue: () => this.hiddenUnits
}];
}
getOutputShape(inputShape: number[]): number[] {
return [this.hiddenUnits];
}
addLayer(
g: Graph, network: Tensor, inputShape: number[], index: number,
weights: LayerWeightsDict|null): Tensor {
const inputSize = util.sizeFromShape(inputShape);
const wShape: [number, number] = [this.hiddenUnits, inputSize];
let weightsInitializer: Initializer;
let biasInitializer: Initializer;
if (weights != null) {
weightsInitializer =
new NDArrayInitializer(Array2D.new(wShape, weights['W']));
biasInitializer = new NDArrayInitializer(Array1D.new(weights['b']));
} else {
weightsInitializer = new VarianceScalingInitializer();
biasInitializer = new ZerosInitializer();
}
const useBias = true;
return g.layers.dense(
'fc1', network, this.hiddenUnits, null, useBias, weightsInitializer,
biasInitializer);
}
validate(inputShape: number[]) {
if (inputShape.length !== 1) {
return ['Input shape must be a Array1D.'];
}
return null;
}
}
export class ReLULayerBuilder implements LayerBuilder {
layerName: LayerName = 'ReLU';
getLayerParams(): LayerParam[] {
return [];
}
getOutputShape(inputShape: number[]): number[] {
return inputShape;
}
addLayer(
g: Graph, network: Tensor, inputShape: number[], index: number,
weights: LayerWeightsDict|null): Tensor {
return g.relu(network);
}
validate(inputShape: number[]): string[]|null {
return null;
}
}
export class Convolution2DLayerBuilder implements LayerBuilder {
layerName: LayerName = 'Convolution';
fieldSize: number;
stride: number;
zeroPad: number;
outputDepth: number;
getLayerParams(): LayerParam[] {
return [
{
label: 'Field size',
initialValue: (inputShape: number[]) => 3,
type: 'number',
min: 1,
max: 100,
setValue: (value: number) => this.fieldSize = value,
getValue: () => this.fieldSize
},
{
label: 'Stride',
initialValue: (inputShape: number[]) => 1,
type: 'number',
min: 1,
max: 100,
setValue: (value: number) => this.stride = value,
getValue: () => this.stride
},
{
label: 'Zero pad',
initialValue: (inputShape: number[]) => 0,
type: 'number',
min: 0,
max: 100,
setValue: (value: number) => this.zeroPad = value,
getValue: () => this.zeroPad
},
{
label: 'Output depth',
initialValue: (inputShape: number[]) =>
this.outputDepth != null ? this.outputDepth : 1,
type: 'number',
min: 1,
max: 1000,
setValue: (value: number) => this.outputDepth = value,
getValue: () => this.outputDepth
}
];
}
getOutputShape(inputShape: number[]): number[] {
return conv_util.computeOutputShape3D(
inputShape as [number, number, number], this.fieldSize,
this.outputDepth, this.stride, this.zeroPad);
}
addLayer(
g: Graph, network: Tensor, inputShape: number[], index: number,
weights: LayerWeightsDict|null): Tensor {
const wShape: [number, number, number, number] =
[this.fieldSize, this.fieldSize, inputShape[2], this.outputDepth];
let w: Array4D;
let b: Array1D;
if (weights != null) {
w = Array4D.new(wShape, weights['W']);
b = Array1D.new(weights['b']);
} else {
w = Array4D.randTruncatedNormal(wShape, 0, 0.1);
b = Array1D.zeros([this.outputDepth]);
}
const wTensor = g.variable('conv2d-' + index + '-w', w);
const bTensor = g.variable('conv2d-' + index + '-b', b);
return g.conv2d(
network, wTensor, bTensor, this.fieldSize, this.outputDepth,
this.stride, this.zeroPad);
}
validate(inputShape: number[]) {
if (inputShape.length !== 3) {
return ['Input shape must be a Array3D.'];
}
return null;
}
}
export class MaxPoolLayerBuilder implements LayerBuilder {
layerName: LayerName = 'Max pool';
fieldSize: number;
stride: number;
zeroPad: number;
getLayerParams(): LayerParam[] {
return [
{
label: 'Field size',
initialValue: (inputShape: number[]) => 3,
type: 'number',
min: 1,
max: 100,
setValue: (value: number) => this.fieldSize = value,
getValue: () => this.fieldSize
},
{
label: 'Stride',
initialValue: (inputShape: number[]) => 1,
type: 'number',
min: 1,
max: 100,
setValue: (value: number) => this.stride = value,
getValue: () => this.stride
},
{
label: 'Zero pad',
initialValue: (inputShape: number[]) => 0,
type: 'number',
min: 0,
max: 100,
setValue: (value: number) => this.zeroPad = value,
getValue: () => this.zeroPad
}
];
}
getOutputShape(inputShape: number[]): number[] {
return conv_util.computeOutputShape3D(
inputShape as [number, number, number], this.fieldSize, inputShape[2],
this.stride, this.zeroPad);
}
addLayer(
g: Graph, network: Tensor, inputShape: number[], index: number,
weights: LayerWeightsDict|null): Tensor {
return g.maxPool(network, this.fieldSize, this.stride, this.zeroPad);
}
validate(inputShape: number[]) {
if (inputShape.length !== 3) {
return ['Input shape must be a Array3D.'];
}
return null;
}
}
export class ReshapeLayerBuilder implements LayerBuilder {
layerName: LayerName = 'Reshape';
outputShape: number[];
getLayerParams() {
return [{
label: 'Shape (comma separated)',
initialValue: (inputShape: number[]) => inputShape.join(', '),
type: 'text' as 'text',
setValue: (value: string) => this.outputShape =
value.split(',').map((value) => +value),
getValue: () => this.outputShape.join(', ')
}];
}
getOutputShape(inputShape: number[]): number[] {
return this.outputShape;
}
addLayer(
g: Graph, network: Tensor, inputShape: number[], index: number,
weights: LayerWeightsDict|null): Tensor {
return g.reshape(network, this.outputShape);
}
validate(inputShape: number[]) {
const inputSize = util.sizeFromShape(inputShape);
const outputSize = util.sizeFromShape(this.outputShape);
if (inputSize !== outputSize) {
return [
`Input size (${inputSize}) must match output size (${outputSize}).`
];
}
return null;
}
}
export class FlattenLayerBuilder implements LayerBuilder {
layerName: LayerName = 'Flatten';
getLayerParams(): LayerParam[] {
return [];
}
getOutputShape(inputShape: number[]): number[] {
return [util.sizeFromShape(inputShape)];
}
addLayer(
g: Graph, network: Tensor, inputShape: number[], index: number,
weights: LayerWeightsDict|null): Tensor {
return g.reshape(network, this.getOutputShape(inputShape));
}
validate(inputShape: number[]): string[]|null {
return null;
}
}
|
apache-2.0
|
PayStax/paystax-java-client
|
src/main/java/com/paystax/client/LinkedResourceAdapter.java
|
848
|
/**
* Copyright 2013-2016 PayStax, LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.paystax.client;
import lombok.Data;
import java.util.Map;
/**
* @author Erik R. Jensen
*/
@Data
public class LinkedResourceAdapter implements LinkedResource {
private Map<String, String> links;
}
|
apache-2.0
|
oeg-upm/epnoi
|
harvester/src/test/java/org/epnoi/harvester/routes/file/FileTest.java
|
2662
|
package org.epnoi.harvester.routes.file;
import es.cbadenes.lab.test.IntegrationTest;
import org.apache.camel.EndpointInject;
import org.apache.camel.builder.RouteBuilder;
import org.apache.camel.component.mock.MockEndpoint;
import org.apache.camel.test.junit4.CamelTestSupport;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import java.nio.file.Path;
import java.nio.file.Paths;
/**
* Created by cbadenes on 31/12/15.
*/
@Category(IntegrationTest.class)
public class FileTest extends CamelTestSupport {
@EndpointInject(uri = "mock:result")
protected MockEndpoint resultEndpoint;
@Test
public void oaipmhMessage() throws Exception {
resultEndpoint.expectedMessageCount(3);
Thread.sleep(60000);
// template.sendBody(xml);
resultEndpoint.assertIsSatisfied();
}
@Override
protected RouteBuilder createRouteBuilder() {
return new RouteBuilder() {
public void configure() {
Path path = Paths.get("src/test/resources/inbox/siggraph-2015");
/**
* Route[[
* idempotentRepository=#fileStore]] -> [To[log:org.epnoi.harvester.routes.file.FileRouteMaker?level=INFO], SetProperty[epnoi.domain.uri, constant{}], SetProperty[epnoi.source.url, constant{file://siggraph-2006}], SetProperty[epnoi.source.uri, constant{http://epnoi.org/sources/48afa130-28c1-4bb3-bbc2-dac5da760fa1}], SetProperty[epnoi.source.protocol, constant{file}], SetProperty[epnoi.source.name, constant{siggraph-2006}], SetProperty[epnoi.publication.published, simple{${header.CamelFileLastModified}}], SetProperty[epnoi.publication.reference.format, constant{pdf}], SetProperty[epnoi.publication.format, constant{pdf}], SetProperty[epnoi.publication.url.local, simple{${header.CamelFileAbsolutePath}}], SetProperty[epnoi.publication.reference.url, simple{${header.CamelFileAbsolutePath}}], SetProperty[epnoi.publication.uri, simple{${header.CamelFileAbsolutePath}}], To[direct:common.ro.build]]]
*/
from("file://"+path.toFile().getAbsolutePath()+"?"+
"recursive=true&" +
"noop=true&"+
"chmod=777&" +
"delete=false&" +
"readLock=changed" +
"readLockCheckInterval=2000" +
"idempotent=true&" +
"idempotentKey=${file:name}-${file:size}").
to("log:org.epnoi.harvester.routes.FileTest?level=INFO").
to("mock:result");
}
};
}
}
|
apache-2.0
|
structurizr/java
|
structurizr-client/src/com/structurizr/io/json/AbstractJsonReader.java
|
584
|
package com.structurizr.io.json;
import com.fasterxml.jackson.databind.DeserializationFeature;
import com.fasterxml.jackson.databind.ObjectMapper;
class AbstractJsonReader {
ObjectMapper createObjectMapper() {
ObjectMapper objectMapper = new ObjectMapper();
objectMapper.enable(DeserializationFeature.ACCEPT_EMPTY_STRING_AS_NULL_OBJECT);
objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, false);
objectMapper.configure(DeserializationFeature.READ_UNKNOWN_ENUM_VALUES_AS_NULL, true);
return objectMapper;
}
}
|
apache-2.0
|
zstackio/zstack-woodpecker
|
integrationtest/vm/multihosts/backup/paths/path170.py
|
2303
|
import zstackwoodpecker.test_state as ts_header
TestAction = ts_header.TestAction
def path():
return dict(initial_formation="template2", \
path_list=[[TestAction.create_volume, "volume1","=scsi"], \
[TestAction.attach_volume, "vm1", "volume1"], \
[TestAction.create_volume, "volume2","=scsi"], \
[TestAction.attach_volume, "vm1", "volume2"], \
[TestAction.create_volume, "volume3","=scsi"], \
[TestAction.attach_volume, "vm1", "volume3"], \
[TestAction.create_volume, "volume4","=scsi"], \
[TestAction.attach_volume, "vm1", "volume4"], \
[TestAction.create_volume, "volume5","=scsi"], \
[TestAction.attach_volume, "vm1", "volume5"], \
[TestAction.create_volume, "volume6","=scsi"], \
[TestAction.attach_volume, "vm1", "volume6"], \
[TestAction.create_volume, "volume7","=scsi"], \
[TestAction.attach_volume, "vm1", "volume7"], \
[TestAction.create_volume, "volume8","=scsi"], \
[TestAction.attach_volume, "vm1", "volume8"], \
[TestAction.create_volume_backup, "volume1", "backup-volume-1"], \
[TestAction.create_vm_backup, "vm1", "backup-vm-1"], \
[TestAction.create_volume_snapshot, "vm1-root", "snapshot-root1"], \
[TestAction.create_volume_snapshot, "vm1-root", "snapshot-root2"], \
[TestAction.create_volume_snapshot, "vm1-root", "snapshot-root3"], \
[TestAction.batch_delete_volume_snapshot, ["snapshot-root2","snapshot-root3"]], \
[TestAction.stop_vm, "vm1"], \
[TestAction.use_vm_backup, "backup-vm-1"], \
[TestAction.change_vm_image, "vm1"], \
[TestAction.create_image_from_volume, "vm1", "image1"], \
[TestAction.start_vm, "vm1"], \
[TestAction.create_volume_backup, "volume2", "backup-volume-2"], \
[TestAction.stop_vm, "vm1"], \
[TestAction.use_vm_backup, "backup-vm-1"], \
[TestAction.reinit_vm, "vm1"], \
[TestAction.use_volume_snapshot, 'snapshot-root1'], \
[TestAction.start_vm, "vm1"], \
[TestAction.create_volume_backup, "volume3", "backup-volume-3"], \
[TestAction.stop_vm, "vm1"], \
[TestAction.use_volume_backup, "backup-volume-3"]])
|
apache-2.0
|
simp/puppetlabs-puppetdb
|
spec/unit/classes/server/db_read_uri_spec.rb
|
2154
|
require 'spec_helper'
describe 'puppetdb::server::read_database', type: :class do
context 'on a supported platform' do
let(:facts) do
{
osfamily: 'RedHat',
operatingsystem: 'RedHat',
operatingsystemrelease: '7.0',
fqdn: 'test.domain.local',
}
end
describe 'when passing jdbc subparams' do
let(:params) do
{
database_host: 'localhost',
jdbc_ssl_properties: '?ssl=true',
}
end
it {
is_expected.to contain_ini_setting('puppetdb_read_subname')
.with(
section: 'read-database',
setting: 'subname',
value: '//localhost:5432/puppetdb?ssl=true',
)
}
end
describe 'when using ssl communication' do
let(:params) do
{
database_host: 'cheery-rime.puppetlabs.net',
postgresql_ssl_on: true,
ssl_key_pk8_path: '/tmp/private_key.pk8',
}
end
it 'configures subname correctly' do
is_expected.to contain_ini_setting('puppetdb_read_subname')
.with(
ensure: 'present',
path: '/etc/puppetlabs/puppetdb/conf.d/read_database.ini',
section: 'read-database',
setting: 'subname',
value: '//cheery-rime.puppetlabs.net:5432/puppetdb?' \
'ssl=true&sslfactory=org.postgresql.ssl.LibPQFactory&' \
'sslmode=verify-full&' \
'sslrootcert=/etc/puppetlabs/puppetdb/ssl/ca.pem&' \
'sslkey=/tmp/private_key.pk8&' \
'sslcert=/etc/puppetlabs/puppetdb/ssl/public.pem',
)
end
context 'when setting jdbc_ssl_properties as well' do
let(:params) do
{
database_host: 'puppetdb',
jdbc_ssl_properties: '?ssl=true',
postgresql_ssl_on: true,
}
end
it 'raises an error' do
is_expected.to compile
.and_raise_error(%r{Variables 'postgresql_ssl_on' and 'jdbc_ssl_properties' can not be used at the same time!})
end
end
end
end
end
|
apache-2.0
|
coding-sig/Swift
|
src/app/services/auth.service.ts
|
919
|
// This service make the app doesn't actually log in, it just tring to mimic the login process. It has
// an "isLoggedIn" flag to tell you whether the user is authenticated. Its login method simulates an
// API call to an external service by returning an Observable that resolves successfully after a short
// pause. The "redirectUrl" property will store the attempted URL so you can navigate to it after
// authenticating.
import { Injectable } from '@angular/core';
import { Observable } from 'rxjs/Observable';
import 'rxjs/add/observable/of';
import 'rxjs/add/operator/do';
import 'rxjs/add/operator/delay';
@Injectable()
export class AuthService {
isLoggedIn = false;
// store the URL so we can redirect after logging in
redirectUrl: string;
login(): Observable<boolean> {
return Observable.of(true).delay(1000).do(val => this.isLoggedIn = true);
}
logout(): void {
this.isLoggedIn = false;
}
}
|
apache-2.0
|
cbadenes/oeg-stemming
|
lib/src/main/java/es/upm/oeg/stemming/lib/domain/Analysis.java
|
250
|
package es.upm.oeg.stemming.lib.domain;
import lombok.Data;
import java.util.List;
/**
* Created by cbadenes on 09/07/15.
*/
@Data
public class Analysis {
private String id;
private String stemmer;
private List<Keyword> stems;
}
|
apache-2.0
|
HubSpot/Singularity
|
SingularityService/src/main/java/com/hubspot/singularity/scheduler/SingularityDeployAcceptanceManager.java
|
3266
|
package com.hubspot.singularity.scheduler;
import com.google.inject.Inject;
import com.google.inject.Singleton;
import com.hubspot.singularity.DeployAcceptanceResult;
import com.hubspot.singularity.DeployAcceptanceState;
import com.hubspot.singularity.DeployState;
import com.hubspot.singularity.SingularityDeploy;
import com.hubspot.singularity.SingularityPendingDeploy;
import com.hubspot.singularity.SingularityRequest;
import com.hubspot.singularity.SingularityTaskId;
import com.hubspot.singularity.hooks.DeployAcceptanceHook;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import java.util.Set;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@Singleton
public class SingularityDeployAcceptanceManager {
private static final Logger LOG = LoggerFactory.getLogger(
SingularityDeployAcceptanceManager.class
);
private final Set<DeployAcceptanceHook> acceptanceHooks;
@Inject
public SingularityDeployAcceptanceManager(Set<DeployAcceptanceHook> acceptanceHooks) {
this.acceptanceHooks = acceptanceHooks;
}
public Map<String, DeployAcceptanceResult> getAcceptanceResults(
SingularityRequest request,
SingularityDeploy deploy,
SingularityPendingDeploy pendingDeploy,
Collection<SingularityTaskId> activeTasksForPendingDeploy,
Collection<SingularityTaskId> inactiveTasksForPendingDeploy,
Collection<SingularityTaskId> otherActiveTasksForRequest
) {
Map<String, DeployAcceptanceResult> results = new HashMap<>();
Map<String, DeployAcceptanceState> existing = pendingDeploy
.getDeployProgress()
.getStepAcceptanceResults();
for (DeployAcceptanceHook hook : acceptanceHooks) {
if (
!existing.containsKey(hook.getName()) ||
existing.get(hook.getName()) == DeployAcceptanceState.PENDING
) {
try {
results.put(
hook.getName(),
hook.getAcceptanceResult(
request,
deploy,
pendingDeploy,
activeTasksForPendingDeploy,
inactiveTasksForPendingDeploy,
otherActiveTasksForRequest
)
);
} catch (Exception e) {
LOG.error("Uncaught exception running hook {}", hook.getName(), e);
if (hook.isFailOnUncaughtException()) {
results.put(
hook.getName(),
new DeployAcceptanceResult(DeployAcceptanceState.FAILED, e.getMessage())
);
} else {
results.put(
hook.getName(),
new DeployAcceptanceResult(
DeployAcceptanceState.SUCCEEDED,
String.format("Ignored err: %s", e.getMessage())
)
);
}
}
}
}
return results;
}
public static DeployState resultsToDeployState(
Map<String, DeployAcceptanceState> results
) {
if (results.isEmpty()) {
return DeployState.SUCCEEDED;
}
if (results.values().stream().anyMatch(d -> d == DeployAcceptanceState.FAILED)) {
return DeployState.FAILED;
}
if (results.values().stream().anyMatch(d -> d == DeployAcceptanceState.PENDING)) {
return DeployState.WAITING;
}
return DeployState.SUCCEEDED;
}
}
|
apache-2.0
|
aparod/jonix
|
jonix-common/src/main/java/com/tectonica/jonix/struct/JonixExtent.java
|
1203
|
/*
* Copyright (C) 2012 Zach Melamed
*
* Latest version available online at https://github.com/zach-m/jonix
* Contact me at zach@tectonica.co.il
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.tectonica.jonix.struct;
import java.io.Serializable;
import com.tectonica.jonix.codelist.ExtentTypes;
import com.tectonica.jonix.codelist.ExtentUnits;
/*
* NOTE: THIS IS AN AUTO-GENERATED FILE, DON'T EDIT MANUALLY
*/
@SuppressWarnings("serial")
public class JonixExtent implements Serializable
{
/**
* The key of this struct
*/
public ExtentTypes extentType;
public ExtentUnits extentUnit;
/**
* (type: dt.StrictPositiveDecimal)
*/
public Double extentValue;
}
|
apache-2.0
|
aws/aws-sdk-java
|
aws-java-sdk-dynamodb/src/main/java/com/amazonaws/services/dynamodbv2/model/transform/ShardMarshaller.java
|
2528
|
/*
* Copyright 2017-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.model.transform;
import javax.annotation.Generated;
import com.amazonaws.SdkClientException;
import com.amazonaws.services.dynamodbv2.model.*;
import com.amazonaws.protocol.*;
import com.amazonaws.annotation.SdkInternalApi;
/**
* ShardMarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
@SdkInternalApi
public class ShardMarshaller {
private static final MarshallingInfo<String> SHARDID_BINDING = MarshallingInfo.builder(MarshallingType.STRING).marshallLocation(MarshallLocation.PAYLOAD)
.marshallLocationName("ShardId").build();
private static final MarshallingInfo<StructuredPojo> SEQUENCENUMBERRANGE_BINDING = MarshallingInfo.builder(MarshallingType.STRUCTURED)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("SequenceNumberRange").build();
private static final MarshallingInfo<String> PARENTSHARDID_BINDING = MarshallingInfo.builder(MarshallingType.STRING)
.marshallLocation(MarshallLocation.PAYLOAD).marshallLocationName("ParentShardId").build();
private static final ShardMarshaller instance = new ShardMarshaller();
public static ShardMarshaller getInstance() {
return instance;
}
/**
* Marshall the given parameter object.
*/
public void marshall(Shard shard, ProtocolMarshaller protocolMarshaller) {
if (shard == null) {
throw new SdkClientException("Invalid argument passed to marshall(...)");
}
try {
protocolMarshaller.marshall(shard.getShardId(), SHARDID_BINDING);
protocolMarshaller.marshall(shard.getSequenceNumberRange(), SEQUENCENUMBERRANGE_BINDING);
protocolMarshaller.marshall(shard.getParentShardId(), PARENTSHARDID_BINDING);
} catch (Exception e) {
throw new SdkClientException("Unable to marshall request to JSON: " + e.getMessage(), e);
}
}
}
|
apache-2.0
|
aws/aws-sdk-java
|
aws-java-sdk-dynamodb/src/main/java/com/amazonaws/services/dynamodbv2/datamodeling/DynamoDBTyped.java
|
6589
|
/*
* Copyright 2016-2022 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://aws.amazon.com/apache2.0
*
* This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
* OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and
* limitations under the License.
*/
package com.amazonaws.services.dynamodbv2.datamodeling;
import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapperFieldModel.DynamoDBAttributeType;
import java.lang.annotation.ElementType;
import java.lang.annotation.Inherited;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
/**
* Annotation to override the standard attribute type binding.
*
* <pre class="brush: java">
* @DynamoDBTyped(DynamoDBAttributeType.S)
* public MyObject getMyObject()
* </pre>
* <p><b>Standard Types</b></p>
* <p>Standard types do not require the annotation if applying the default
* attribute binding for that type.</p>
* <p>String/{@code S} types,</p>
* <ul>
* <li>{@link java.lang.Character}/{@code char}</li>
* <li>{@link java.lang.String}</li>
* <li>{@link java.net.URL}</li>
* <li>{@link java.net.URI}</li>
* <li>{@link java.util.Calendar}</li>
* <li>{@link java.util.Currency}</li>
* <li>{@link java.util.Date}</li>
* <li>{@link java.util.Locale}</li>
* <li>{@link java.util.TimeZone}</li>
* <li>{@link java.util.UUID}</li>
* <li>{@link S3Link}</li>
* </ul>
* <p>Number/{@code N} types,</p>
* <ul>
* <li>{@link java.math.BigDecimal}</li>
* <li>{@link java.math.BigInteger}</li>
* <li>{@link java.lang.Boolean}/{@code boolean}</li>
* <li>{@link java.lang.Byte}/{@code byte}</li>
* <li>{@link java.lang.Double}/{@code double}</li>
* <li>{@link java.lang.Float}/{@code float}</li>
* <li>{@link java.lang.Integer}/{@code int}</li>
* <li>{@link java.lang.Long}/{@code long}</li>
* <li>{@link java.lang.Short}/{@code short}</li>
* </ul>
* <p>Binary/{@code B} types,</p>
* <ul>
* <li>{@link java.nio.ByteBuffer}</li>
* <li>{@code byte[]}</li>
* </ul>
*
* <p><b>{@link DynamoDBTypeConverter}</b></p>
* <p>A custom type-converter maybe applied to any attribute, either by
* annotation or by overriding the standard type-converter factory.</p>
* <pre class="brush: java">
* DynamoDBMapperConfig config = DynamoDBMapperConfig.builder()
* .withTypeConverterFactory(DynamoDBTypeConverterFactory.standard().override()
* .with(String.class, MyObject.class, new StringToMyObjectConverter())
* .build())
* .build();
* </pre>
* <p>If the converter being applied is already a supported data type and
* the conversion is of the same attribute type, for instance,
* {@link java.util.Date} to {@link String} to {@code S},
* the annotation may be omited. The annotation is require for all non-standard
* types or if the attribute type binding is being overriden.</p>
*
* <p><b>{@link com.amazonaws.services.dynamodbv2.model.AttributeValue}</b></p>
* <p>Direct native conversion is supported by default in all schemas.
* If the attribute is a primary or index key, it must specify either
* {@code B}, {@code N}, or {@code S}, otherwise, it may be omited.</p>
*
* <p><b>{@link Boolean} to {@code BOOL}</b></p>
* <p>The standard V2 conversion schema will by default serialize booleans
* natively using the DynamoDB {@code BOOL} type.</p>
* <pre class="brush: java">
* @DynamoDBTyped(DynamoDBAttributeType.BOOL)
* public boolean isTesting()
* </pre>
*
* <p><b>{@link Boolean} to {@code N}</b></p>
* <p>The standard V1 and V2 compatible conversion schemas will by default
* serialize booleans using the DynamoDB {@code N} type, with a value of '1'
* representing 'true' and a value of '0' representing 'false'.</p>
* <pre class="brush: java">
* @DynamoDBTyped(DynamoDBAttributeType.N)
* public boolean isTesting()
* </pre>
*
* <p><b>{@link Enum} to {@code S}</b></p>
* <p>The {@code enum} type is only supported by override or custom converter.
* There are some risks in distributed systems when using enumerations as
* attributes instead of simply using a String. When adding new values to the
* enumeration, the enum only changes must deployed before the enumeration
* value can be persisted. This will ensure that all systems have the correct
* code to map it from the item record in DynamoDB to your objects.</p>
* <pre class="brush: java">
* public enum Status { OPEN, PENDING, CLOSED };
*
* @DynamoDBTyped(DynamoDBAttributeType.S)
* public Status getStatus()
* </pre>
*
* <p><b>{@link UUID} to {@code B}</b></p>
* <p>The {@code UUID} type will serialize to {@link String}/{@code S} by
* default in all conversion schemas. The schemas do support serializing to
* {@link ByteBuffer}/{@code B} by override.</p>
* <pre class="brush: java">
* @DynamoDBTyped(DynamoDBAttributeType.B)
* public UUID getKey()
* </pre>
*
* <p><b>{@link Set} to {@code L}</b></p>
* <p>The standard V1 and V2 compatible conversion schemas do not by default
* support non-scalar {@code Set} types. They are supported in V2. In
* non-supported schemas, the {@link List}/{@code L} override may be applied
* to any {@code Set} type.</p>
* <pre class="brush: java">
* @DynamoDBTyped(DynamoDBAttributeType.L)
* public Set<MyObject> getMyObjects()
* </pre>
*
* <p><b>{@link Object} to {@code M}</b></p>
* <p>Also supported as {@link DynamoDBDocument}.</p>
* <pre class="brush: java">
* @DynamoDBTyped(DynamoDBAttributeType.M)
* public MyObject getMyObject()
* </pre>
*
* <p>May be combined with {@link DynamoDBTypeConverted}.</p>
*
* <p>May be used as a meta-annotation.</p>
*
* @see com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBTypeConverted
* @see com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBTypeConverterFactory
*/
@DynamoDB
@Inherited
@Retention(RetentionPolicy.RUNTIME)
@Target({ElementType.TYPE, ElementType.FIELD, ElementType.METHOD, ElementType.ANNOTATION_TYPE})
public @interface DynamoDBTyped {
/**
* Use when the type of the attribute as stored in DynamoDB should differ
* from the standard type assigned by DynamoDBMapper.
*/
DynamoDBAttributeType value() default DynamoDBAttributeType.NULL;
}
|
apache-2.0
|
fuinorg/commons-vfs2-filters
|
src/main/java/org/fuin/vfs2/filter/CanReadFileFilter.java
|
3748
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.fuin.vfs2.filter;
import java.io.Serializable;
import org.apache.commons.vfs2.FileFilter;
import org.apache.commons.vfs2.FileSelectInfo;
import org.apache.commons.vfs2.FileSystemException;
/**
* This filter accepts <code>File</code>s that can be read.
* <p>
* Example, showing how to print out a list of the current directory's
* <i>readable</i> files:
*
* <pre>
* FileSystemManager fsManager = VFS.getManager();
* FileObject dir = fsManager.toFileObject(new File("."));
* FileObject[] files = dir.findFiles(new FileFilterSelector(CanReadFileFilter.CAN_READ));
* for (int i = 0; i < files.length; i++) {
* System.out.println(files[i]);
* }
* </pre>
*
* <p>
* Example, showing how to print out a list of the current directory's
* <i>un-readable</i> files:
*
* <pre>
* FileSystemManager fsManager = VFS.getManager();
* FileObject dir = fsManager.toFileObject(new File("."));
* FileObject[] files = dir.findFiles(new FileFilterSelector(CanReadFileFilter.CANNOT_READ));
* for (int i = 0; i < files.length; i++) {
* System.out.println(files[i]);
* }
* </pre>
*
* <p>
* Example, showing how to print out a list of the current directory's
* <i>read-only</i> files:
*
* <pre>
* FileSystemManager fsManager = VFS.getManager();
* FileObject dir = fsManager.toFileObject(new File("."));
* FileObject[] files = dir.findFiles(new FileFilterSelector(CanReadFileFilter.READ_ONLY));
* for (int i = 0; i < files.length; i++) {
* System.out.println(files[i]);
* }
* </pre>
*
* @author This code was originally ported from Apache Commons IO File Filter
* @see "http://commons.apache.org/proper/commons-io/"
*/
public class CanReadFileFilter implements FileFilter, Serializable {
/** Singleton instance of <i>readable</i> filter. */
public static final FileFilter CAN_READ = new CanReadFileFilter();
/** Singleton instance of not <i>readable</i> filter. */
public static final FileFilter CANNOT_READ = new NotFileFilter(CAN_READ);
/** Singleton instance of <i>read-only</i> filter. */
public static final FileFilter READ_ONLY = new AndFileFilter(CAN_READ,
CanWriteFileFilter.CANNOT_WRITE);
private static final long serialVersionUID = 1L;
/**
* Restrictive constructor.
*/
protected CanReadFileFilter() {
}
/**
* Checks to see if the file can be read.
*
* @param fileInfo
* the File to check.
*
* @return {@code true} if the file can be read, otherwise {@code false}.
*/
@Override
public boolean accept(final FileSelectInfo fileInfo) {
try {
return fileInfo.getFile().isReadable();
} catch (final FileSystemException ex) {
throw new RuntimeException(ex);
}
}
}
|
apache-2.0
|
leva24/leva24.github.io
|
standard.php
|
444
|
<html>
<head>
<title>standard</title>
</head>
<body>
<center><font color=purple size=6><a href=standard1.php>standard</a></font></center>
<img src="images/accom_roomdetail_poolview.jpg" width="858" height=360 width750>
<font color=purple size=3>
<pre>
luxuriously furnished room having a sofa cum bed for an extra adult or child with wall to wall carpet.
</font>
</pre>
<a href=accommodation.php>home</a>
</body>
</html>
|
apache-2.0
|
quarkusio/quarkus
|
extensions/spring-data-jpa/deployment/src/main/java/io/quarkus/spring/data/deployment/generate/AbstractMethodsAdder.java
|
17863
|
package io.quarkus.spring.data.deployment.generate;
import java.util.Collection;
import java.util.Iterator;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collector;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import javax.persistence.NoResultException;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationValue;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.DotName;
import org.jboss.jandex.IndexView;
import org.jboss.jandex.Type;
import org.springframework.data.domain.PageImpl;
import org.springframework.data.domain.Pageable;
import org.springframework.data.domain.SliceImpl;
import org.springframework.data.jpa.repository.Modifying;
import io.quarkus.gizmo.BytecodeCreator;
import io.quarkus.gizmo.CatchBlockCreator;
import io.quarkus.gizmo.FieldDescriptor;
import io.quarkus.gizmo.FunctionCreator;
import io.quarkus.gizmo.MethodCreator;
import io.quarkus.gizmo.MethodDescriptor;
import io.quarkus.gizmo.ResultHandle;
import io.quarkus.gizmo.TryBlock;
import io.quarkus.hibernate.orm.panache.PanacheQuery;
import io.quarkus.panache.common.Page;
import io.quarkus.runtime.util.HashUtil;
import io.quarkus.spring.data.deployment.DotNames;
import io.quarkus.spring.data.runtime.RepositorySupport;
import io.quarkus.spring.data.runtime.TypesConverter;
public abstract class AbstractMethodsAdder {
protected void handleLongReturnValue(BytecodeCreator methodCreator, ResultHandle resultHandle, DotName returnType) {
if (DotNames.LONG.equals(returnType)) { // handle object Long return type
resultHandle = methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(Long.class, "valueOf", Long.class, long.class),
resultHandle);
}
methodCreator.returnValue(resultHandle);
}
protected void handleIntegerReturnValue(BytecodeCreator methodCreator, ResultHandle resultHandle, DotName returnType) {
if (DotNames.INTEGER.equals(returnType)) { // handle object Integer return type
resultHandle = methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(Integer.class, "valueOf", Integer.class, int.class),
resultHandle);
}
methodCreator.returnValue(resultHandle);
}
protected void handleBooleanReturnValue(BytecodeCreator methodCreator, ResultHandle resultHandle, DotName returnType) {
if (DotNames.BOOLEAN.equals(returnType)) { // handle object Long return type
resultHandle = methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(Boolean.class, "valueOf", Boolean.class, boolean.class),
resultHandle);
}
methodCreator.returnValue(resultHandle);
}
protected void generateFindQueryResultHandling(MethodCreator methodCreator, ResultHandle panacheQuery,
Integer pageableParameterIndex, ClassInfo repositoryClassInfo, ClassInfo entityClassInfo,
DotName returnType, Integer limit, String methodName, DotName customResultType, String originalResultType) {
ResultHandle page = null;
if (limit != null) {
// create a custom page object that will limit the results by the limit size
page = methodCreator.newInstance(MethodDescriptor.ofConstructor(Page.class, int.class), methodCreator.load(limit));
} else if (pageableParameterIndex != null) {
page = methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(TypesConverter.class, "toPanachePage", Page.class, Pageable.class),
methodCreator.getMethodParam(pageableParameterIndex));
}
if (page != null) {
panacheQuery = methodCreator.invokeInterfaceMethod(
MethodDescriptor.ofMethod(PanacheQuery.class, "page", PanacheQuery.class, Page.class),
panacheQuery, page);
}
if (returnType.equals(entityClassInfo.name())) {
// implement by issuing PanacheQuery.singleResult
// if there is one result return
// if there are no results (known due to NoResultException) return null
// if there are multiple results just let the relevant exception be thrown
// when limit is specified we don't want to fail when there are multiple results, we just want to return the first one
String panacheQueryMethodToUse = (limit != null) ? "firstResult" : "singleResult";
TryBlock tryBlock = methodCreator.tryBlock();
ResultHandle singleResult = tryBlock.invokeInterfaceMethod(
MethodDescriptor.ofMethod(PanacheQuery.class, panacheQueryMethodToUse, Object.class),
panacheQuery);
ResultHandle casted = tryBlock.checkCast(singleResult, entityClassInfo.name().toString());
tryBlock.returnValue(casted);
CatchBlockCreator catchBlock = tryBlock.addCatch(NoResultException.class);
catchBlock.returnValue(catchBlock.loadNull());
} else if (DotNames.OPTIONAL.equals(returnType)) {
// implement by issuing PanacheQuery.singleResult
// if there is one result return an Optional containing it
// if there are no results (known due to NoResultException) return empty Optional
// if there are multiple results just let the relevant exception be thrown
// when limit is specified we don't want to fail when there are multiple results, we just want to return the first one
String panacheQueryMethodToUse = (limit != null) ? "firstResult" : "singleResult";
TryBlock tryBlock = methodCreator.tryBlock();
ResultHandle singleResult = tryBlock.invokeInterfaceMethod(
MethodDescriptor.ofMethod(PanacheQuery.class, panacheQueryMethodToUse, Object.class),
panacheQuery);
if (customResultType == null) {
ResultHandle casted = tryBlock.checkCast(singleResult, entityClassInfo.name().toString());
ResultHandle optional = tryBlock.invokeStaticMethod(
MethodDescriptor.ofMethod(Optional.class, "of", Optional.class, Object.class),
casted);
tryBlock.returnValue(optional);
} else {
ResultHandle customResult = tryBlock.invokeStaticMethod(
MethodDescriptor.ofMethod(customResultType.toString(), "convert_" + methodName,
customResultType.toString(),
originalResultType),
singleResult);
ResultHandle optional = tryBlock.invokeStaticMethod(
MethodDescriptor.ofMethod(Optional.class, "of", Optional.class, Object.class),
customResult);
tryBlock.returnValue(optional);
}
CatchBlockCreator catchBlock = tryBlock.addCatch(NoResultException.class);
ResultHandle emptyOptional = catchBlock.invokeStaticMethod(
MethodDescriptor.ofMethod(Optional.class, "empty", Optional.class));
catchBlock.returnValue(emptyOptional);
} else if (DotNames.LIST.equals(returnType) || DotNames.COLLECTION.equals(returnType)
|| DotNames.SET.equals(returnType) || DotNames.ITERATOR.equals(returnType)
|| DotNames.SPRING_DATA_PAGE.equals(returnType) || DotNames.SPRING_DATA_SLICE.equals(returnType)) {
ResultHandle list;
if (customResultType == null) {
list = methodCreator.invokeInterfaceMethod(
MethodDescriptor.ofMethod(PanacheQuery.class, "list", List.class),
panacheQuery);
} else {
ResultHandle stream = methodCreator.invokeInterfaceMethod(
MethodDescriptor.ofMethod(PanacheQuery.class, "stream", Stream.class),
panacheQuery);
// Function to convert `originResultType` (Object[] or entity class)
// to the custom type (using the generated static convert method)
FunctionCreator customResultMappingFunction = methodCreator.createFunction(Function.class);
BytecodeCreator funcBytecode = customResultMappingFunction.getBytecode();
ResultHandle obj = funcBytecode.invokeStaticMethod(
MethodDescriptor.ofMethod(customResultType.toString(), "convert_" + methodName,
customResultType.toString(),
originalResultType),
funcBytecode.getMethodParam(0));
funcBytecode.returnValue(obj);
stream = methodCreator.invokeInterfaceMethod(
MethodDescriptor.ofMethod(Stream.class, "map", Stream.class, Function.class),
stream, customResultMappingFunction.getInstance());
// Re-collect the stream into a list
ResultHandle collector = methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(Collectors.class, "toList", Collector.class));
list = methodCreator.invokeInterfaceMethod(
MethodDescriptor.ofMethod(Stream.class, "collect", Object.class, Collector.class),
stream, collector);
}
if (DotNames.ITERATOR.equals(returnType)) {
ResultHandle iterator = methodCreator.invokeInterfaceMethod(
MethodDescriptor.ofMethod(Iterable.class, "iterator", Iterator.class),
list);
methodCreator.returnValue(iterator);
} else if (DotNames.SET.equals(returnType)) {
ResultHandle set = methodCreator.newInstance(
MethodDescriptor.ofConstructor(LinkedHashSet.class, Collection.class), list);
methodCreator.returnValue(set);
} else if (DotNames.SPRING_DATA_PAGE.equals(returnType)) {
ResultHandle pageResult;
if (pageableParameterIndex != null) {
ResultHandle count = methodCreator.invokeInterfaceMethod(
MethodDescriptor.ofMethod(PanacheQuery.class, "count", long.class),
panacheQuery);
pageResult = methodCreator.newInstance(
MethodDescriptor.ofConstructor(PageImpl.class, List.class, Pageable.class, long.class),
list, methodCreator.getMethodParam(pageableParameterIndex), count);
} else {
pageResult = methodCreator.newInstance(MethodDescriptor.ofConstructor(PageImpl.class, List.class), list);
}
methodCreator.returnValue(pageResult);
} else if (DotNames.SPRING_DATA_SLICE.equals(returnType)) {
ResultHandle sliceResult;
if (pageableParameterIndex != null) {
ResultHandle hasNextPage = methodCreator.invokeInterfaceMethod(
MethodDescriptor.ofMethod(PanacheQuery.class, "hasNextPage", boolean.class),
panacheQuery);
sliceResult = methodCreator.newInstance(
MethodDescriptor.ofConstructor(SliceImpl.class, List.class, Pageable.class, boolean.class),
list, methodCreator.getMethodParam(pageableParameterIndex), hasNextPage);
} else {
sliceResult = methodCreator.newInstance(MethodDescriptor.ofConstructor(SliceImpl.class, List.class), list);
}
methodCreator.returnValue(sliceResult);
}
methodCreator.returnValue(list);
} else if (DotNames.STREAM.equals(returnType)) {
ResultHandle stream = methodCreator.invokeInterfaceMethod(
MethodDescriptor.ofMethod(PanacheQuery.class, "stream", Stream.class),
panacheQuery);
methodCreator.returnValue(stream);
} else if (isHibernateSupportedReturnType(returnType)) {
ResultHandle singleResult = methodCreator.invokeInterfaceMethod(
MethodDescriptor.ofMethod(PanacheQuery.class, "singleResult", Object.class),
panacheQuery);
methodCreator.returnValue(singleResult);
} else if (customResultType != null) {
// when limit is specified we don't want to fail when there are multiple results, we just want to return the first one
String panacheQueryMethodToUse = (limit != null) ? "firstResult" : "singleResult";
TryBlock tryBlock = methodCreator.tryBlock();
ResultHandle singleResult = tryBlock.invokeInterfaceMethod(
MethodDescriptor.ofMethod(PanacheQuery.class, panacheQueryMethodToUse, Object.class),
panacheQuery);
ResultHandle customResult = tryBlock.invokeStaticMethod(
MethodDescriptor.ofMethod(customResultType.toString(), "convert_" + methodName,
customResultType.toString(),
originalResultType),
singleResult);
tryBlock.returnValue(customResult);
CatchBlockCreator catchBlock = tryBlock.addCatch(NoResultException.class);
catchBlock.returnValue(catchBlock.loadNull());
tryBlock.returnValue(customResult);
} else {
throw new IllegalArgumentException(
"Return type of method " + methodName + " of Repository " + repositoryClassInfo
+ " does not match find query type");
}
}
/**
* Flush the underlying persistence context before executing the modifying query if enabled by {@link Modifying}
* annotation.
*/
protected void handleFlushAutomatically(AnnotationInstance modifyingAnnotation, MethodCreator methodCreator,
FieldDescriptor entityClassFieldDescriptor) {
final AnnotationValue flushAutomatically = modifyingAnnotation != null ? modifyingAnnotation.value("flushAutomatically")
: null;
if (flushAutomatically != null && flushAutomatically.asBoolean()) {
methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(RepositorySupport.class, "flush", void.class, Class.class),
methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()));
}
}
/**
* Clear the underlying persistence context after executing the modifying query if enabled by {@link Modifying}
* annotation.
*/
protected void handleClearAutomatically(AnnotationInstance modifyingAnnotation, MethodCreator methodCreator,
FieldDescriptor entityClassFieldDescriptor) {
final AnnotationValue clearAutomatically = modifyingAnnotation != null ? modifyingAnnotation.value("clearAutomatically")
: null;
if (clearAutomatically != null && clearAutomatically.asBoolean()) {
methodCreator.invokeStaticMethod(
MethodDescriptor.ofMethod(RepositorySupport.class, "clear", void.class, Class.class),
methodCreator.readInstanceField(entityClassFieldDescriptor, methodCreator.getThis()));
}
}
protected boolean isHibernateSupportedReturnType(DotName dotName) {
return dotName.equals(DotNames.OBJECT) || DotNames.HIBERNATE_PROVIDED_BASIC_TYPES.contains(dotName);
}
protected Type verifyQueryResultType(Type t, IndexView index) {
if (isHibernateSupportedReturnType(t.name())) {
return t;
}
if (t.kind() == Type.Kind.ARRAY) {
return verifyQueryResultType(t.asArrayType().component(), index);
} else if (t.kind() == Type.Kind.PARAMETERIZED_TYPE) {
final List<Type> types = t.asParameterizedType().arguments();
if (types.size() == 1) {
return verifyQueryResultType(types.get(0), index);
} else {
for (Type type : types) {
verifyQueryResultType(type, index);
}
}
} else {
final ClassInfo typeClass = index.getClassByName(t.name());
if (typeClass == null) {
throw new IllegalStateException(t.name() + " was not part of the Quarkus index");
}
}
return t;
}
protected DotName createSimpleInterfaceImpl(DotName ifaceName) {
String fullName = ifaceName.toString();
// package name: must be in the same package as the interface
final int index = fullName.lastIndexOf('.');
String packageName = "";
if (index > 0 && index < fullName.length() - 1) {
packageName = fullName.substring(0, index) + ".";
}
return DotName.createSimple(packageName
+ (ifaceName.isInner() ? ifaceName.local() : ifaceName.withoutPackagePrefix()) + "_"
+ HashUtil.sha1(ifaceName.toString()));
}
protected DotName getPrimitiveTypeName(DotName returnTypeName) {
if (DotNames.LONG.equals(returnTypeName)) {
return DotNames.PRIMITIVE_LONG;
}
if (DotNames.INTEGER.equals(returnTypeName)) {
return DotNames.PRIMITIVE_INTEGER;
}
if (DotNames.BOOLEAN.equals(returnTypeName)) {
return DotNames.PRIMITIVE_BOOLEAN;
}
return returnTypeName;
}
}
|
apache-2.0
|
quarkusio/quarkus
|
independent-projects/tools/devtools-common/src/main/java/io/quarkus/devtools/codestarts/quarkus/QuarkusCodestartData.java
|
5248
|
package io.quarkus.devtools.codestarts.quarkus;
import io.quarkus.devtools.codestarts.DataKey;
import io.quarkus.devtools.codestarts.utils.NestedMaps;
import java.util.HashMap;
import java.util.Map;
import java.util.Optional;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;
public final class QuarkusCodestartData {
private QuarkusCodestartData() {
}
public enum QuarkusDataKey implements DataKey {
BOM_GROUP_ID("quarkus.platform.group-id"),
BOM_ARTIFACT_ID("quarkus.platform.artifact-id"),
BOM_VERSION("quarkus.platform.version"),
PROJECT_GROUP_ID("project.group-id"),
PROJECT_ARTIFACT_ID("project.artifact-id"),
PROJECT_VERSION("project.version"),
PROJECT_PACKAGE_NAME("project.package-name"),
QUARKUS_MAVEN_PLUGIN_GROUP_ID("quarkus.maven-plugin.group-id"),
QUARKUS_MAVEN_PLUGIN_ARTIFACT_ID("quarkus.maven-plugin.artifact-id"),
QUARKUS_MAVEN_PLUGIN_VERSION("quarkus.maven-plugin.version"),
QUARKUS_GRADLE_PLUGIN_ID("quarkus.gradle-plugin.id"),
QUARKUS_GRADLE_PLUGIN_VERSION("quarkus.gradle-plugin.version"),
QUARKUS_VERSION("quarkus.version"),
JAVA_VERSION("java.version"),
KOTLIN_VERSION("kotlin.version"),
SCALA_VERSION("scala.version"),
SCALA_MAVEN_PLUGIN_VERSION("scala-maven-plugin.version"),
MAVEN_COMPILER_PLUGIN_VERSION("maven-compiler-plugin.version"),
MAVEN_SUREFIRE_PLUGIN_VERSION("maven-surefire-plugin.version"),
RESTEASY_CODESTART_RESOURCE_PATH("resteasy-codestart.resource.path"),
RESTEASY_CODESTART_RESOURCE_CLASS_NAME("resteasy-codestart.resource.class-name"),
RESTEASY_REACTIVE_CODESTART_RESOURCE_PATH("resteasy-reactive-codestart.resource.path"),
RESTEASY_REACTIVE_CODESTART_RESOURCE_CLASS_NAME("resteasy-reactive-codestart.resource.class-name"),
SPRING_WEB_CODESTART_RESOURCE_PATH("spring-web-codestart.resource.path"),
SPRING_WEB_CODESTART_RESOURCE_CLASS_NAME("spring-web-codestart.resource.class-name"),
APP_CONFIG("app-config");
private final String key;
QuarkusDataKey(String key) {
this.key = key;
}
@Override
public String key() {
return key;
}
}
public enum LegacySupport implements DataKey {
BOM_GROUP_ID("bom_groupId"),
BOM_ARTIFACT_ID("bom_artifactId"),
BOM_VERSION("bom_version"),
PROJECT_GROUP_ID("project_groupId"),
PROJECT_ARTIFACT_ID("project_artifactId"),
PROJECT_VERSION("project_version"),
PROJECT_PACKAGE_NAME("package_name"),
QUARKUS_MAVEN_PLUGIN_GROUP_ID("maven_plugin_groupId"),
QUARKUS_MAVEN_PLUGIN_ARTIFACT_ID("maven_plugin_artifactId"),
QUARKUS_MAVEN_PLUGIN_VERSION("maven_plugin_version"),
QUARKUS_GRADLE_PLUGIN_ID("gradle_plugin_id"),
QUARKUS_GRADLE_PLUGIN_VERSION("gradle_plugin_version"),
QUARKUS_VERSION("quarkus_version"),
JAVA_VERSION("java_target"),
KOTLIN_VERSION("kotlin_version"),
SCALA_VERSION("scala_version"),
SCALA_MAVEN_PLUGIN_VERSION("scala_plugin_version"),
MAVEN_COMPILER_PLUGIN_VERSION("compiler_plugin_version"),
MAVEN_SUREFIRE_PLUGIN_VERSION("surefire_plugin_version"),
RESTEASY_CODESTART_RESOURCE_PATH("path"),
RESTEASY_CODESTART_RESOURCE_CLASS_NAME(QuarkusCodestartData::convertClassName),
RESTEASY_REACTIVE_CODESTART_RESOURCE_PATH("path"),
RESTEASY_REACTIVE_CODESTART_RESOURCE_CLASS_NAME(QuarkusCodestartData::convertClassName),
SPRING_WEB_CODESTART_RESOURCE_PATH("path"),
SPRING_WEB_CODESTART_RESOURCE_CLASS_NAME(QuarkusCodestartData::convertClassName);
private final String key;
private final Function<Map<String, Object>, Object> converter;
LegacySupport(String legacyKey) {
this((m) -> m.get(legacyKey));
}
LegacySupport(Function<Map<String, Object>, Object> converter) {
this.key = QuarkusDataKey.valueOf(this.name()).key();
this.converter = converter;
}
@Override
public String key() {
return key;
}
public static Map<String, Object> convertFromLegacy(Map<String, Object> legacy) {
return NestedMaps.unflatten(Stream.of(values())
.map(v -> new HashMap.SimpleImmutableEntry<>(v.key(), v.converter.apply(legacy)))
.filter(v -> v.getValue() != null)
.collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue)));
}
}
// TODO remove the class_name convertion when its removed
private static String convertClassName(final Map<String, Object> legacyData) {
Optional<String> classNameValue = NestedMaps.getValue(legacyData, "class_name");
if (classNameValue.isPresent()) {
final String className = classNameValue.get();
int idx = classNameValue.get().lastIndexOf('.');
if (idx < 0) {
return className;
}
return className.substring(idx + 1);
}
return null;
}
}
|
apache-2.0
|
enisoc/kubernetes
|
test/e2e/network/service_latency.go
|
11048
|
/*
Copyright 2015 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package network
import (
"fmt"
"sort"
"strings"
"time"
v1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/runtime"
"k8s.io/apimachinery/pkg/util/sets"
"k8s.io/apimachinery/pkg/watch"
restclient "k8s.io/client-go/rest"
"k8s.io/client-go/tools/cache"
"k8s.io/client-go/util/flowcontrol"
"k8s.io/kubernetes/test/e2e/framework"
e2elog "k8s.io/kubernetes/test/e2e/framework/log"
testutils "k8s.io/kubernetes/test/utils"
imageutils "k8s.io/kubernetes/test/utils/image"
. "github.com/onsi/ginkgo"
)
type durations []time.Duration
func (d durations) Len() int { return len(d) }
func (d durations) Less(i, j int) bool { return d[i] < d[j] }
func (d durations) Swap(i, j int) { d[i], d[j] = d[j], d[i] }
var _ = SIGDescribe("Service endpoints latency", func() {
f := framework.NewDefaultFramework("svc-latency")
/*
Release : v1.9
Testname: Service endpoint latency, thresholds
Description: Run 100 iterations of create service with the Pod running the pause image, measure the time it takes for creating the service and the endpoint with the service name is available. These durations are captured for 100 iterations, then the durations are sorted to compute 50th, 90th and 99th percentile. The single server latency MUST not exceed liberally set thresholds of 20s for 50th percentile and 50s for the 90th percentile.
*/
framework.ConformanceIt("should not be very high ", func() {
const (
// These are very generous criteria. Ideally we will
// get this much lower in the future. See issue
// #10436.
limitMedian = time.Second * 20
limitTail = time.Second * 50
// Numbers chosen to make the test complete in a short amount
// of time. This sample size is not actually large enough to
// reliably measure tails (it may give false positives, but not
// false negatives), but it should catch low hanging fruit.
//
// Note that these are fixed and do not depend on the
// size of the cluster. Setting parallelTrials larger
// distorts the measurements. Perhaps this wouldn't be
// true on HA clusters.
totalTrials = 200
parallelTrials = 15
minSampleSize = 100
// Acceptable failure ratio for getting service latencies.
acceptableFailureRatio = .05
)
// Turn off rate limiting--it interferes with our measurements.
oldThrottle := f.ClientSet.CoreV1().RESTClient().GetRateLimiter()
f.ClientSet.CoreV1().RESTClient().(*restclient.RESTClient).Throttle = flowcontrol.NewFakeAlwaysRateLimiter()
defer func() { f.ClientSet.CoreV1().RESTClient().(*restclient.RESTClient).Throttle = oldThrottle }()
failing := sets.NewString()
d, err := runServiceLatencies(f, parallelTrials, totalTrials, acceptableFailureRatio)
if err != nil {
failing.Insert(fmt.Sprintf("Not all RC/pod/service trials succeeded: %v", err))
}
dSorted := durations(d)
sort.Sort(dSorted)
n := len(dSorted)
if n < minSampleSize {
failing.Insert(fmt.Sprintf("Did not get a good sample size: %v", dSorted))
}
if n < 2 {
failing.Insert("Less than two runs succeeded; aborting.")
framework.Failf(strings.Join(failing.List(), "\n"))
}
percentile := func(p int) time.Duration {
est := n * p / 100
if est >= n {
return dSorted[n-1]
}
return dSorted[est]
}
e2elog.Logf("Latencies: %v", dSorted)
p50 := percentile(50)
p90 := percentile(90)
p99 := percentile(99)
e2elog.Logf("50 %%ile: %v", p50)
e2elog.Logf("90 %%ile: %v", p90)
e2elog.Logf("99 %%ile: %v", p99)
e2elog.Logf("Total sample count: %v", len(dSorted))
if p50 > limitMedian {
failing.Insert("Median latency should be less than " + limitMedian.String())
}
if p99 > limitTail {
failing.Insert("Tail (99 percentile) latency should be less than " + limitTail.String())
}
if failing.Len() > 0 {
errList := strings.Join(failing.List(), "\n")
helpfulInfo := fmt.Sprintf("\n50, 90, 99 percentiles: %v %v %v", p50, p90, p99)
framework.Failf(errList + helpfulInfo)
}
})
})
func runServiceLatencies(f *framework.Framework, inParallel, total int, acceptableFailureRatio float32) (output []time.Duration, err error) {
cfg := testutils.RCConfig{
Client: f.ClientSet,
Image: imageutils.GetPauseImageName(),
Name: "svc-latency-rc",
Namespace: f.Namespace.Name,
Replicas: 1,
PollInterval: time.Second,
}
if err := framework.RunRC(cfg); err != nil {
return nil, err
}
// Run a single watcher, to reduce the number of API calls we have to
// make; this is to minimize the timing error. It's how kube-proxy
// consumes the endpoints data, so it seems like the right thing to
// test.
endpointQueries := newQuerier()
startEndpointWatcher(f, endpointQueries)
defer close(endpointQueries.stop)
// run one test and throw it away-- this is to make sure that the pod's
// ready status has propagated.
singleServiceLatency(f, cfg.Name, endpointQueries)
// These channels are never closed, and each attempt sends on exactly
// one of these channels, so the sum of the things sent over them will
// be exactly total.
errs := make(chan error, total)
durations := make(chan time.Duration, total)
blocker := make(chan struct{}, inParallel)
for i := 0; i < total; i++ {
go func() {
defer GinkgoRecover()
blocker <- struct{}{}
defer func() { <-blocker }()
if d, err := singleServiceLatency(f, cfg.Name, endpointQueries); err != nil {
errs <- err
} else {
durations <- d
}
}()
}
errCount := 0
for i := 0; i < total; i++ {
select {
case e := <-errs:
e2elog.Logf("Got error: %v", e)
errCount++
case d := <-durations:
output = append(output, d)
}
}
if errCount != 0 {
e2elog.Logf("Got %d errors out of %d tries", errCount, total)
errRatio := float32(errCount) / float32(total)
if errRatio > acceptableFailureRatio {
return output, fmt.Errorf("error ratio %g is higher than the acceptable ratio %g", errRatio, acceptableFailureRatio)
}
}
return output, nil
}
type endpointQuery struct {
endpointsName string
endpoints *v1.Endpoints
result chan<- struct{}
}
type endpointQueries struct {
requests map[string]*endpointQuery
stop chan struct{}
requestChan chan *endpointQuery
seenChan chan *v1.Endpoints
}
func newQuerier() *endpointQueries {
eq := &endpointQueries{
requests: map[string]*endpointQuery{},
stop: make(chan struct{}, 100),
requestChan: make(chan *endpointQuery),
seenChan: make(chan *v1.Endpoints, 100),
}
go eq.join()
return eq
}
// join merges the incoming streams of requests and added endpoints. It has
// nice properties like:
// * remembering an endpoint if it happens to arrive before it is requested.
// * closing all outstanding requests (returning nil) if it is stopped.
func (eq *endpointQueries) join() {
defer func() {
// Terminate all pending requests, so that no goroutine will
// block indefinitely.
for _, req := range eq.requests {
if req.result != nil {
close(req.result)
}
}
}()
for {
select {
case <-eq.stop:
return
case req := <-eq.requestChan:
if cur, ok := eq.requests[req.endpointsName]; ok && cur.endpoints != nil {
// We've already gotten the result, so we can
// immediately satisfy this request.
delete(eq.requests, req.endpointsName)
req.endpoints = cur.endpoints
close(req.result)
} else {
// Save this request.
eq.requests[req.endpointsName] = req
}
case got := <-eq.seenChan:
if req, ok := eq.requests[got.Name]; ok {
if req.result != nil {
// Satisfy a request.
delete(eq.requests, got.Name)
req.endpoints = got
close(req.result)
} else {
// We've already recorded a result, but
// haven't gotten the request yet. Only
// keep the first result.
}
} else {
// We haven't gotten the corresponding request
// yet, save this result.
eq.requests[got.Name] = &endpointQuery{
endpoints: got,
}
}
}
}
}
// request blocks until the requested endpoint is seen.
func (eq *endpointQueries) request(endpointsName string) *v1.Endpoints {
result := make(chan struct{})
req := &endpointQuery{
endpointsName: endpointsName,
result: result,
}
eq.requestChan <- req
<-result
return req.endpoints
}
// marks e as added; does not block.
func (eq *endpointQueries) added(e *v1.Endpoints) {
eq.seenChan <- e
}
// blocks until it has finished syncing.
func startEndpointWatcher(f *framework.Framework, q *endpointQueries) {
_, controller := cache.NewInformer(
&cache.ListWatch{
ListFunc: func(options metav1.ListOptions) (runtime.Object, error) {
obj, err := f.ClientSet.CoreV1().Endpoints(f.Namespace.Name).List(options)
return runtime.Object(obj), err
},
WatchFunc: func(options metav1.ListOptions) (watch.Interface, error) {
return f.ClientSet.CoreV1().Endpoints(f.Namespace.Name).Watch(options)
},
},
&v1.Endpoints{},
0,
cache.ResourceEventHandlerFuncs{
AddFunc: func(obj interface{}) {
if e, ok := obj.(*v1.Endpoints); ok {
if len(e.Subsets) > 0 && len(e.Subsets[0].Addresses) > 0 {
q.added(e)
}
}
},
UpdateFunc: func(old, cur interface{}) {
if e, ok := cur.(*v1.Endpoints); ok {
if len(e.Subsets) > 0 && len(e.Subsets[0].Addresses) > 0 {
q.added(e)
}
}
},
},
)
go controller.Run(q.stop)
// Wait for the controller to sync, so that we don't count any warm-up time.
for !controller.HasSynced() {
time.Sleep(100 * time.Millisecond)
}
}
func singleServiceLatency(f *framework.Framework, name string, q *endpointQueries) (time.Duration, error) {
// Make a service that points to that pod.
svc := &v1.Service{
ObjectMeta: metav1.ObjectMeta{
GenerateName: "latency-svc-",
},
Spec: v1.ServiceSpec{
Ports: []v1.ServicePort{{Protocol: v1.ProtocolTCP, Port: 80}},
Selector: map[string]string{"name": name},
Type: v1.ServiceTypeClusterIP,
SessionAffinity: v1.ServiceAffinityNone,
},
}
startTime := time.Now()
gotSvc, err := f.ClientSet.CoreV1().Services(f.Namespace.Name).Create(svc)
if err != nil {
return 0, err
}
e2elog.Logf("Created: %v", gotSvc.Name)
if e := q.request(gotSvc.Name); e == nil {
return 0, fmt.Errorf("Never got a result for endpoint %v", gotSvc.Name)
}
stopTime := time.Now()
d := stopTime.Sub(startTime)
e2elog.Logf("Got endpoints: %v [%v]", gotSvc.Name, d)
return d, nil
}
|
apache-2.0
|
vespa-engine/vespa
|
application-model/src/main/java/com/yahoo/vespa/applicationmodel/ServiceType.java
|
2060
|
// Copyright Yahoo. Licensed under the terms of the Apache 2.0 license. See LICENSE in the project root.
package com.yahoo.vespa.applicationmodel;
import com.fasterxml.jackson.annotation.JsonValue;
import java.util.Objects;
/**
* @author bjorncs
*/
public class ServiceType {
// Common service types.
public static final ServiceType CONTAINER = new ServiceType("container");
public static final ServiceType SLOBROK = new ServiceType("slobrok");
public static final ServiceType HOST_ADMIN = new ServiceType("hostadmin");
public static final ServiceType CONFIG_SERVER = new ServiceType("configserver");
public static final ServiceType CONTROLLER = new ServiceType("controller");
public static final ServiceType TRANSACTION_LOG_SERVER = new ServiceType("transactionlogserver");
public static final ServiceType CLUSTER_CONTROLLER = new ServiceType("container-clustercontroller");
public static final ServiceType DISTRIBUTOR = new ServiceType("distributor");
public static final ServiceType SEARCH = new ServiceType("searchnode");
public static final ServiceType STORAGE = new ServiceType("storagenode");
public static final ServiceType METRICS_PROXY = new ServiceType("metricsproxy-container");
private final String id;
public ServiceType(String id) {
this.id = id;
}
// Jackson's StdKeySerializer uses toString() (and ignores annotations) for objects used as Map keys.
// Therefore, we use toString() as the JSON-producing method, which is really sad.
@JsonValue
@Override
public String toString() {
return id;
}
// For compatibility with original Scala case class
public String s() {
return id;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
ServiceType that = (ServiceType) o;
return Objects.equals(id, that.id);
}
@Override
public int hashCode() {
return Objects.hash(id);
}
}
|
apache-2.0
|
hypercube1024/firefly
|
firefly-common/src/main/java/com/fireflysource/common/bytecode/ClassProxy.java
|
180
|
package com.fireflysource.common.bytecode;
@FunctionalInterface
public interface ClassProxy {
Object intercept(MethodProxy handler, Object originalInstance, Object[] args);
}
|
apache-2.0
|
tmatsuo/google-cloud-php
|
src/BigQuery/QueryResults.php
|
8859
|
<?php
/**
* Copyright 2016 Google Inc. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace Google\Cloud\BigQuery;
use Google\Cloud\BigQuery\Connection\ConnectionInterface;
use Google\Cloud\Core\Exception\GoogleException;
use Google\Cloud\Core\Iterator\ItemIterator;
use Google\Cloud\Core\Iterator\PageIterator;
/**
* QueryResults represent the result of a BigQuery SQL query. Read more at the
* [Query Response API Documentation](https://cloud.google.com/bigquery/docs/reference/v2/jobs/query#response).
*
* This class should be not instantiated directly, but as a result of
* calling {@see Google\Cloud\BigQuery\BigQueryClient::runQuery()} or
* {@see Google\Cloud\BigQuery\Job::queryResults()}.
*/
class QueryResults
{
/**
* @var ConnectionInterface $connection Represents a connection to BigQuery.
*/
protected $connection;
/**
* @var array The query result's identity.
*/
private $identity;
/**
* @var array The query result's metadata.
*/
private $info;
/**
* @var ValueMapper $mapper Maps values between PHP and BigQuery.
*/
private $mapper;
/**
* @var array The options to use when reloading query data.
*/
private $reloadOptions;
/**
* @param ConnectionInterface $connection Represents a connection to
* BigQuery.
* @param string $jobId The job's ID.
* @param string $projectId The project's ID.
* @param array $info The query result's metadata.
* @param array $reloadOptions The options to use when reloading query data.
* @param ValueMapper $mapper Maps values between PHP and BigQuery.
*/
public function __construct(
ConnectionInterface $connection,
$jobId,
$projectId,
array $info,
array $reloadOptions,
ValueMapper $mapper
) {
$this->connection = $connection;
$this->info = $info;
$this->reloadOptions = $reloadOptions;
$this->identity = [
'jobId' => $jobId,
'projectId' => $projectId
];
$this->mapper = $mapper;
}
/**
* Retrieves the rows associated with the query and merges them together
* with the table's schema. It is recommended to check the completeness of
* the query before attempting to access rows.
*
* Refer to the table below for a guide on how BigQuery types are mapped as
* they come back from the API.
*
* | **PHP Type** | **BigQuery Data Type** |
* |--------------------------------------------|--------------------------------------|
* | `\DateTimeInterface` | `DATETIME` |
* | {@see Google\Cloud\BigQuery\Bytes} | `BYTES` |
* | {@see Google\Cloud\BigQuery\Date} | `DATE` |
* | {@see Google\Cloud\Core\Int64} | `INTEGER` |
* | {@see Google\Cloud\BigQuery\Time} | `TIME` |
* | {@see Google\Cloud\BigQuery\Timestamp} | `TIMESTAMP` |
* | Associative Array | `RECORD` |
* | Non-Associative Array | `RECORD` (Repeated) |
* | `float` | `FLOAT` |
* | `int` | `INTEGER` |
* | `string` | `STRING` |
* | `bool` | `BOOLEAN` |
*
* Example:
* ```
* $isComplete = $queryResults->isComplete();
*
* if ($isComplete) {
* $rows = $queryResults->rows();
*
* foreach ($rows as $row) {
* echo $row['name'] . PHP_EOL;
* }
* }
* ```
*
* @param array $options [optional] Configuration options.
* @return ItemIterator
* @throws GoogleException Thrown if the query has not yet completed.
*/
public function rows(array $options = [])
{
if (!$this->isComplete()) {
throw new GoogleException('The query has not completed yet.');
}
$schema = $this->info['schema']['fields'];
return new ItemIterator(
new PageIterator(
function (array $row) use ($schema) {
$mergedRow = [];
if ($row === null) {
return $mergedRow;
}
if (!array_key_exists('f', $row)) {
throw new GoogleException('Bad response - missing key "f" for a row.');
}
foreach ($row['f'] as $key => $value) {
$fieldSchema = $schema[$key];
$mergedRow[$fieldSchema['name']] = $this->mapper->fromBigQuery($value, $fieldSchema);
}
return $mergedRow;
},
[$this->connection, 'getQueryResults'],
$options + $this->identity,
[
'itemsKey' => 'rows',
'firstPage' => $this->info,
'nextResultTokenKey' => 'pageToken'
]
)
);
}
/**
* Checks the query's completeness. Useful in combination with
* {@see Google\Cloud\BigQuery\QueryResults::reload()} to poll for query status.
*
* Example:
* ```
* $isComplete = $queryResults->isComplete();
*
* while (!$isComplete) {
* sleep(1); // small delay between requests
* $queryResults->reload();
* $isComplete = $queryResults->isComplete();
* }
*
* echo 'Query complete!';
* ```
*
* @return bool
*/
public function isComplete()
{
return $this->info['jobComplete'];
}
/**
* Retrieves the cached query details.
*
* Example:
* ```
* $info = $queryResults->info();
* echo $info['totalBytesProcessed'];
* ```
*
* @see https://cloud.google.com/bigquery/docs/reference/v2/jobs/getQueryResults#response
* Jobs getQueryResults API response documentation.
*
* @return array
*/
public function info()
{
return $this->info;
}
/**
* Triggers a network request to reload the query's details.
*
* Useful when needing to poll an incomplete query
* for status. Configuration options will be inherited from
* {@see Google\Cloud\BigQuery\Job::queryResults()} or
* {@see Google\Cloud\BigQuery\BigQueryClient::runQuery()}, but they can be
* overridden if needed.
*
* Example:
* ```
* $queryResults->isComplete(); // returns false
* sleep(1); // let's wait for a moment...
* $queryResults->reload(); // executes a network request
* if ($queryResults->isComplete()) {
* echo "Query complete!";
* }
* ```
*
* @see https://cloud.google.com/bigquery/docs/reference/v2/jobs/getQueryResults
* Jobs getQueryResults API documentation.
*
* @param array $options [optional] {
* Configuration options.
*
* @type int $maxResults Maximum number of results to read per page.
* @type int $startIndex Zero-based index of the starting row.
* @type int $timeoutMs How long to wait for the query to complete, in
* milliseconds. **Defaults to** `10000` milliseconds (10 seconds).
* }
* @return array
*/
public function reload(array $options = [])
{
$options += $this->identity;
return $this->info = $this->connection->getQueryResults($options + $this->reloadOptions);
}
/**
* Retrieves the query result's identity.
*
* An identity provides a description of a nested resource.
*
* Example:
* ```
* echo $queryResults->identity()['projectId'];
* ```
*
* @return array
*/
public function identity()
{
return $this->identity;
}
}
|
apache-2.0
|
motorina0/flowable-engine
|
modules/flowable-engine/src/main/java/org/flowable/engine/impl/calendar/MapBusinessCalendarManager.java
|
1936
|
/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.engine.impl.calendar;
import java.util.HashMap;
import java.util.Map;
import org.flowable.engine.common.api.FlowableException;
/**
* @author Tom Baeyens
*/
public class MapBusinessCalendarManager implements BusinessCalendarManager {
private final Map<String, BusinessCalendar> businessCalendars;
public MapBusinessCalendarManager(){
this.businessCalendars = new HashMap<String, BusinessCalendar>();
}
public MapBusinessCalendarManager(Map<String, BusinessCalendar> businessCalendars) {
if (businessCalendars == null) {
throw new IllegalArgumentException("businessCalendars can not be null");
}
this.businessCalendars = new HashMap<String, BusinessCalendar>(businessCalendars);
}
public BusinessCalendar getBusinessCalendar(String businessCalendarRef) {
BusinessCalendar businessCalendar = businessCalendars.get(businessCalendarRef);
if (businessCalendar == null) {
throw new FlowableException("Requested business calendar " + businessCalendarRef +
" does not exist. Allowed calendars are " + this.businessCalendars.keySet() + ".");
}
return businessCalendar;
}
public BusinessCalendarManager addBusinessCalendar(String businessCalendarRef, BusinessCalendar businessCalendar) {
businessCalendars.put(businessCalendarRef, businessCalendar);
return this;
}
}
|
apache-2.0
|
lsimons/phloc-schematron-standalone
|
phloc-commons/src/test/java/com/phloc/commons/jaxb/JAXBMarshallerUtilsTest.java
|
2786
|
/**
* Copyright (C) 2006-2013 phloc systems
* http://www.phloc.com
* office[at]phloc[dot]com
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.phloc.commons.jaxb;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import javax.xml.bind.JAXBContext;
import javax.xml.bind.JAXBException;
import javax.xml.bind.Marshaller;
import org.junit.Test;
import com.phloc.commons.charset.CCharset;
/**
* Test class for class {@link JAXBMarshallerUtils}.
*
* @author Philip Helger
*/
public final class JAXBMarshallerUtilsTest
{
@Test
public void testAll () throws JAXBException
{
final JAXBContext aCtx = JAXBContextCache.getInstance ().getFromCache (MockJAXBArchive.class);
assertNotNull (aCtx);
final Marshaller aMarshaller = aCtx.createMarshaller ();
assertTrue (JAXBMarshallerUtils.isSunJAXB2Marshaller (aMarshaller));
// Encoding
assertEquals (CCharset.CHARSET_UTF_8, JAXBMarshallerUtils.getEncoding (aMarshaller));
JAXBMarshallerUtils.setEncoding (aMarshaller, CCharset.CHARSET_ISO_8859_1);
assertEquals (CCharset.CHARSET_ISO_8859_1, JAXBMarshallerUtils.getEncoding (aMarshaller));
// Formatted output?
assertFalse (JAXBMarshallerUtils.isFormattedOutput (aMarshaller));
JAXBMarshallerUtils.setFormattedOutput (aMarshaller, true);
assertTrue (JAXBMarshallerUtils.isFormattedOutput (aMarshaller));
// Schema location
assertNull (JAXBMarshallerUtils.getSchemaLocation (aMarshaller));
JAXBMarshallerUtils.setSchemaLocation (aMarshaller, "any");
assertEquals ("any", JAXBMarshallerUtils.getSchemaLocation (aMarshaller));
// no-namespace Schema location
assertNull (JAXBMarshallerUtils.getNoNamespaceSchemaLocation (aMarshaller));
JAXBMarshallerUtils.setNoNamespaceSchemaLocation (aMarshaller, "any");
assertEquals ("any", JAXBMarshallerUtils.getNoNamespaceSchemaLocation (aMarshaller));
// Fragment?
assertFalse (JAXBMarshallerUtils.isFragment (aMarshaller));
JAXBMarshallerUtils.setFragment (aMarshaller, true);
assertTrue (JAXBMarshallerUtils.isFragment (aMarshaller));
}
}
|
apache-2.0
|
BriData/DBus
|
dbus-keeper/keeper-web/app/components/ProjectManage/ProjectTable/AddProjectTable/ProjectTableTabs/SinkForm.js
|
8211
|
/**
* @author 戎晓伟
* @description 基本信息设置
*/
import React, { PropTypes, Component } from 'react'
import { Form, Input, Checkbox, Radio, Select, Row, Col } from 'antd'
import { FormattedMessage } from 'react-intl'
import { intlMessage } from '@/app/i18n'
const FormItem = Form.Item
const RadioGroup = Radio.Group
const Option = Select.Option
@Form.create({ warppedComponentRef: true })
export default class SinkForm extends Component {
constructor (props) {
super(props)
this.formMessage = {
en: {
descriptionPlaceholder: 'description,Up to 150 words',
projectNameMessage: 'The project name is required',
principalMessage: 'The principal is required',
topologyMessage:
'The number of topology must be integers and 0<topologyNum<=100.'
},
zh: {
descriptionPlaceholder: '项目描述,最多150字符',
projectNameMessage: '项目名称为必填项',
principalMessage: '负责人为必填项',
topologyMessage: 'topology 个数必须为整数且 0<topologyNum<=100'
}
}
}
componentDidMount = () => {
const { projectId } = this.props
this.handleSaveToReudx({ projectId })
const {onGetTableTopics} = this.props
onGetTableTopics({projectId})
};
/**
* @deprecated radio change 存储数据
*/
handleRadioChange = e => {
const value = e.target.value
// 存储数据
this.handleSaveToReudx({ outputType: value })
};
/**
* @param value [object String] 存储的值
* @param key [object String] 存储的key
* @description select onchange
*/
handleSelectChange = (value, key) => {
// 存储数据
this.handleSaveToReudx({ [key]: value })
};
/**
* @param value [object String] 存储的值
* @param key [object String] 存储的key
* @description select onBlur
*/
handleSelectBlur = (value, key) => {
// 存储数据
this.handleSaveToReudx({ [key]: value })
};
/**
* @deprecated sink change 存储数据
*/
handleSinkChange = (value, key) => {
this.handleSelectChange(value, key)
};
/**
* @param value [object object] 需要存储的键值对
* @description 将sink数据存储到redux中
*/
handleSaveToReudx = value => {
const { getFieldsValue } = this.props.form
const { sink, onSetSink } = this.props
// 设置sink
onSetSink({ ...sink, ...getFieldsValue(), ...value })
};
/**
* @deprecated input placeholder
*/
handlePlaceholder = fun => id =>
fun({
id: 'app.components.input.placeholder',
valus: {
name: fun({ id })
}
});
render () {
const { sink, sinkList, topicList } = this.props
const { getFieldDecorator } = this.props.form
const sinkListArray = Object.values(sinkList.result) || []
const sinkId = sink && sink.sinkId
let topicListArray = topicList.result || {}
topicListArray = sinkId && topicListArray[sinkId] ? topicListArray[sinkId] : []
const localeMessage = intlMessage(this.props.locale, this.formMessage)
const placeholder = this.handlePlaceholder(localeMessage)
let {projectInfo} = this.props
projectInfo = projectInfo.result.project || {}
let outputTopic = sink && sink.outputTopic
if (outputTopic && projectInfo && projectInfo.projectName) {
if (outputTopic.indexOf(projectInfo.projectName + '.') === 0)
outputTopic = sink.outputTopic.substr(projectInfo.projectName.length + 1)
}
topicListArray = topicListArray.map(topic => {
if (projectInfo && projectInfo.projectName && topic.indexOf(projectInfo.projectName + '.') === 0) {
return topic.substr(projectInfo.projectName.length + 1)
} else {
return topic
}
})
const formItemLayout = {
labelCol: { span: 4 },
wrapperCol: { span: 12 }
}
return (
<Form autoComplete="off" layout="horizontal">
<FormItem label={<FormattedMessage
id="app.components.projectManage.projectTable.dataOutputFormat"
defaultMessage="数据输出格式"
/>} {...formItemLayout}>
<RadioGroup
defaultValue={sink && ((sink.outputType === 'json') ? 'json' : 'ums1.3')}
onChange={this.handleRadioChange}
>
<Radio value="ums1.3">ums</Radio>
<Radio disabled={true} value="json">json</Radio>
</RadioGroup>
</FormItem>
{sink && sink.outputType &&
sink.outputType !== 'json' && (
<FormItem label={<FormattedMessage
id="app.components.projectManage.projectTable.umsOutputVersion"
defaultMessage="UMS输出版本"
/>} {...formItemLayout}>
{getFieldDecorator('outputType', {
initialValue: sink.outputType && 'ums1.3',
rules: [
{
required: true,
message: localeMessage({ id: 'projectNameMessage' }),
whitespace: true
}
]
})(
<Select
showSearch
optionFilterProp='children'
placeholder="Please select outputType"
onChange={value => this.handleSelectChange(value, 'outputType')}
>
<Option value="ums1.1">ums 1.1</Option>
<Option value="ums1.2">ums 1.2</Option>
<Option value="ums1.3">ums 1.3</Option>
</Select>
)}
</FormItem>
)}
<FormItem label="Sink" {...formItemLayout}>
{getFieldDecorator('sinkId', {
initialValue:
(sink && `${sink.sinkId}`) || `${sinkListArray && sinkListArray[0].id}`
})(
<Select
showSearch
optionFilterProp='children'
placeholder="Please select sinkId"
onChange={value => this.handleSinkChange(value, 'sinkId')}
>
{sinkListArray.length > 0 &&
sinkListArray.map((item, index) => (
<Option value={`${item.id}`} key={`${item.id}`}>
{item.sinkName}
</Option>
))}
</Select>
)}
</FormItem>
<FormItem label="Topic" {...formItemLayout}>
<span
style={{
width: "auto",
border: "1px solid rgb(217, 217, 217)",
float: "left",
display: "block",
borderRadius: "4px 0 0 4px",
boxShadow: "none",
padding: "9px 5px",
}}
className="ant-input-group-addon">
{projectInfo.projectName + '.'}
</span>
{getFieldDecorator('outputTopic', {
initialValue:
outputTopic ||
(topicListArray && topicListArray[0])
})(
<Select
className="sinkTopicSelect"
style={{
overflow: "hidden",
display: "block",
width: "auto",
}}
mode="combobox"
showSearch
optionFilterProp="children"
filterOption={(input, option) =>
option.props.children
.toLowerCase()
.indexOf(input.toLowerCase()) >= 0
}
placeholder="Please select topic"
onChange={value => this.handleSelectBlur(value, 'outputTopic')}
>
{topicListArray.length > 0 &&
topicListArray.map(item => (
<Option value={`${item}`} key={item}>
{item}
</Option>
))}
</Select>
)}
</FormItem>
</Form>
)
}
}
SinkForm.propTypes = {
locale: PropTypes.any,
form: PropTypes.object,
projectId: PropTypes.string,
sinkList: PropTypes.object,
topicList: PropTypes.object,
sink: PropTypes.object,
setBasicInfo: PropTypes.func,
onGetTableSinks: PropTypes.func,
onGetTableTopics: PropTypes.func,
onSetSink: PropTypes.func
}
|
apache-2.0
|
hmrc/fset-faststream
|
app/model/persisted/ApplicationForNotification.scala
|
1437
|
/*
* Copyright 2022 HM Revenue & Customs
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package model.persisted
import play.api.libs.json.Json
import reactivemongo.bson.BSONDocument
case class ApplicationForNotification(
applicationId: String,
userId: String,
preferredName: String,
applicationStatus: String
)
object ApplicationForNotification {
def fromBson(doc: BSONDocument) = {
val applicationId = doc.getAs[String]("applicationId").get
val userId = doc.getAs[String]("userId").get
val applicationStatus = doc.getAs[String]("applicationStatus").get
val personalDetailsRoot = doc.getAs[BSONDocument]("personal-details").get
val preferredName = personalDetailsRoot.getAs[String]("preferredName").get
ApplicationForNotification(applicationId, userId, preferredName, applicationStatus)
}
implicit val applicationForNotificationFormats = Json.format[ApplicationForNotification]
}
|
apache-2.0
|
stevenhva/InfoLearn_OpenOLAT
|
src/main/java/org/olat/course/nodes/projectbroker/ProjectBrokerDropboxController.java
|
4378
|
/**
* OLAT - Online Learning and Training<br>
* http://www.olat.org
* <p>
* Licensed under the Apache License, Version 2.0 (the "License"); <br>
* you may not use this file except in compliance with the License.<br>
* You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,<br>
* software distributed under the License is distributed on an "AS IS" BASIS, <br>
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. <br>
* See the License for the specific language governing permissions and <br>
* limitations under the License.
* <p>
* Copyright (c) since 2004 at Multimedia- & E-Learning Services (MELS),<br>
* University of Zurich, Switzerland.
* <hr>
* <a href="http://www.openolat.org">
* OpenOLAT - Online Learning and Training</a><br>
* This file has been modified by the OpenOLAT community. Changes are licensed
* under the Apache 2.0 license as the original file.
*/
package org.olat.course.nodes.projectbroker;
import java.io.File;
import java.util.Date;
import org.olat.core.gui.UserRequest;
import org.olat.core.gui.control.Controller;
import org.olat.core.gui.control.Event;
import org.olat.core.gui.control.WindowControl;
import org.olat.core.id.Identity;
import org.olat.course.nodes.CourseNode;
import org.olat.course.nodes.projectbroker.datamodel.Project;
import org.olat.course.nodes.projectbroker.datamodel.ProjectEvent;
import org.olat.course.nodes.projectbroker.datamodel.Project.EventType;
import org.olat.course.nodes.projectbroker.service.ProjectBrokerModuleConfiguration;
import org.olat.course.nodes.ta.DropboxController;
import org.olat.course.run.environment.CourseEnvironment;
import org.olat.course.run.userview.UserCourseEnvironment;
import org.olat.modules.ModuleConfiguration;
/**
* @author Christian Guretzki
*/
public class ProjectBrokerDropboxController extends DropboxController {
private Project project;
private ProjectBrokerModuleConfiguration moduleConfig;
public ProjectBrokerDropboxController(UserRequest ureq, WindowControl wControl, ModuleConfiguration config, CourseNode node, UserCourseEnvironment userCourseEnv, boolean previewMode, Project project, ProjectBrokerModuleConfiguration moduleConfig) {
super(ureq, wControl);
this.config = config;
this.node = node;
this.userCourseEnv = userCourseEnv;
this.project = project;
this.moduleConfig = moduleConfig;
init(ureq, wControl, previewMode, false);
}
/**
* @see org.olat.core.gui.control.DefaultController#event(org.olat.core.gui.UserRequest, org.olat.core.gui.control.Controller, org.olat.core.gui.control.Event)
*/
public void event(UserRequest ureq, Controller source, Event event) {
if (isDropboxAccessible(project, moduleConfig)) {
super.event(ureq, source, event);
} else {
getLogger().debug("Dropbos is no longer accessible");
this.showInfo("dropbox.is.not.accessible");
}
}
private boolean isDropboxAccessible(Project project, ProjectBrokerModuleConfiguration moduleConfig) {
if (moduleConfig.isProjectEventEnabled(EventType.HANDOUT_EVENT)) {
ProjectEvent handoutEvent = project.getProjectEvent(EventType.HANDOUT_EVENT);
Date now = new Date();
if (handoutEvent.getStartDate() != null) {
if (now.before(handoutEvent.getStartDate())) {
return false;
}
}
if (handoutEvent.getEndDate() != null) {
if (now.after(handoutEvent.getEndDate())) {
return false;
}
}
}
return true;
}
/**
* Return dropbox base-path. e.g. course/<COURSE_ID>/dropbox/<NODE_id>/<USER_NAME>
* @see org.olat.course.nodes.ta.DropboxController#getRelativeDropBoxFilePath(org.olat.core.id.Identity)
*/
protected String getRelativeDropBoxFilePath(Identity identity) {
return getDropboxBasePathForProject(this.project, userCourseEnv.getCourseEnvironment(), node) + File.separator + identity.getName();
}
/**
* Return dropbox base-path. e.g. course/<COURSE_ID>/dropbox/<NODE_id>
* To have the path for certain user you must call method 'getRelativeDropBoxFilePath'
*
* @param project
* @param courseEnv
* @param cNode
* @return
*/
public static String getDropboxBasePathForProject(Project project, CourseEnvironment courseEnv, CourseNode cNode) {
return getDropboxPathRelToFolderRoot(courseEnv, cNode) + File.separator + project.getKey() ;
}
}
|
apache-2.0
|
sciactive/tilmeld
|
src/Entities/Mail/CancelEmailChange.php
|
1211
|
<?php namespace Tilmeld\Entities\Mail;
// phpcs:disable Generic.Files.LineLength.TooLong
/**
* Cancel Email Change
*
* @license https://www.apache.org/licenses/LICENSE-2.0
* @author Hunter Perrin <hperrin@gmail.com>
* @copyright SciActive.com
* @link http://tilmeld.org/
*/
class CancelEmailChange extends \uMailPHP\Definition {
public static $cname = 'Cancel Email Change';
public static $description = 'This email is sent to a user\'s old email when they change their email to let them cancel their change.';
public static $expectsRecipient = true;
public static $macros = [
'cancel_link' => 'The URL to cancel the change, to be used in a link.',
'old_email' => 'The old email address.',
'new_email' => 'The new email address.',
'to_phone' => 'The recipient\'s phone number.',
'to_timezone' => 'The recipient\'s timezone.',
'to_address' => 'The recipient\'s address.',
];
public static function getMacro($name) {
}
public static function getSubject() {
return 'Hey #to_first_name#, your email address has been changed on #site_name#.';
}
public static function getHTML() {
return file_get_contents(__DIR__.'/html/CancelEmailChange.html');
}
}
|
apache-2.0
|
wyukawa/presto
|
presto-main/src/main/java/io/prestosql/execution/QueryStateTimer.java
|
8606
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.prestosql.execution;
import com.google.common.base.Ticker;
import io.airlift.units.Duration;
import org.joda.time.DateTime;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicReference;
import static io.airlift.units.Duration.succinctNanos;
import static java.lang.Math.max;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
class QueryStateTimer
{
private final Ticker ticker;
private final DateTime createTime = DateTime.now();
private final long createNanos;
private final AtomicReference<Long> beginResourceWaitingNanos = new AtomicReference<>();
private final AtomicReference<Long> beginDispatchingNanos = new AtomicReference<>();
private final AtomicReference<Long> beginPlanningNanos = new AtomicReference<>();
private final AtomicReference<Long> beginFinishingNanos = new AtomicReference<>();
private final AtomicReference<Long> endNanos = new AtomicReference<>();
private final AtomicReference<Duration> queuedTime = new AtomicReference<>();
private final AtomicReference<Duration> resourceWaitingTime = new AtomicReference<>();
private final AtomicReference<Duration> dispatchingTime = new AtomicReference<>();
private final AtomicReference<Duration> executionTime = new AtomicReference<>();
private final AtomicReference<Duration> planningTime = new AtomicReference<>();
private final AtomicReference<Duration> finishingTime = new AtomicReference<>();
private final AtomicReference<Long> beginAnalysisNanos = new AtomicReference<>();
private final AtomicReference<Duration> analysisTime = new AtomicReference<>();
private final AtomicReference<Long> beginDistributedPlanningNanos = new AtomicReference<>();
private final AtomicReference<Duration> distributedPlanningTime = new AtomicReference<>();
private final AtomicReference<Long> lastHeartbeatNanos;
public QueryStateTimer(Ticker ticker)
{
this.ticker = requireNonNull(ticker, "ticker is null");
this.createNanos = tickerNanos();
this.lastHeartbeatNanos = new AtomicReference<>(createNanos);
}
//
// State transitions
//
public void beginWaitingForResources()
{
beginWaitingForResources(tickerNanos());
}
private void beginWaitingForResources(long now)
{
queuedTime.compareAndSet(null, nanosSince(createNanos, now));
beginResourceWaitingNanos.compareAndSet(null, now);
}
public void beginDispatching()
{
beginDispatching(tickerNanos());
}
private void beginDispatching(long now)
{
beginWaitingForResources(now);
resourceWaitingTime.compareAndSet(null, nanosSince(beginResourceWaitingNanos, now));
beginDispatchingNanos.compareAndSet(null, now);
}
public void beginPlanning()
{
beginPlanning(tickerNanos());
}
private void beginPlanning(long now)
{
beginDispatching(now);
dispatchingTime.compareAndSet(null, nanosSince(beginDispatchingNanos, now));
beginPlanningNanos.compareAndSet(null, now);
}
public void beginStarting()
{
beginStarting(tickerNanos());
}
private void beginStarting(long now)
{
beginPlanning(now);
planningTime.compareAndSet(null, nanosSince(beginPlanningNanos, now));
}
public void beginRunning()
{
beginRunning(tickerNanos());
}
private void beginRunning(long now)
{
beginStarting(now);
}
public void beginFinishing()
{
beginFinishing(tickerNanos());
}
private void beginFinishing(long now)
{
beginRunning(now);
beginFinishingNanos.compareAndSet(null, now);
}
public void endQuery()
{
endQuery(tickerNanos());
}
private void endQuery(long now)
{
beginFinishing(now);
finishingTime.compareAndSet(null, nanosSince(beginFinishingNanos, now));
executionTime.compareAndSet(null, nanosSince(beginPlanningNanos, now));
endNanos.compareAndSet(null, now);
}
//
// Additional timings
//
public void beginAnalyzing()
{
beginAnalysisNanos.compareAndSet(null, tickerNanos());
}
public void endAnalysis()
{
analysisTime.compareAndSet(null, nanosSince(beginAnalysisNanos, tickerNanos()));
}
public void beginDistributedPlanning()
{
beginDistributedPlanningNanos.compareAndSet(null, tickerNanos());
}
public void endDistributedPlanning()
{
distributedPlanningTime.compareAndSet(null, nanosSince(beginDistributedPlanningNanos, tickerNanos()));
}
public void recordHeartbeat()
{
lastHeartbeatNanos.set(tickerNanos());
}
//
// Stats
//
public DateTime getCreateTime()
{
return createTime;
}
public Optional<DateTime> getExecutionStartTime()
{
return toDateTime(beginPlanningNanos);
}
public Duration getElapsedTime()
{
if (endNanos.get() != null) {
return succinctNanos(endNanos.get() - createNanos);
}
return nanosSince(createNanos, tickerNanos());
}
public Duration getQueuedTime()
{
Duration queuedTime = this.queuedTime.get();
if (queuedTime != null) {
return queuedTime;
}
// if queue time is not set, the query is still queued
return getElapsedTime();
}
public Duration getResourceWaitingTime()
{
return getDuration(resourceWaitingTime, beginResourceWaitingNanos);
}
public Duration getDispatchingTime()
{
return getDuration(dispatchingTime, beginDispatchingNanos);
}
public Duration getPlanningTime()
{
return getDuration(planningTime, beginPlanningNanos);
}
public Duration getFinishingTime()
{
return getDuration(finishingTime, beginFinishingNanos);
}
public Duration getExecutionTime()
{
return getDuration(executionTime, beginPlanningNanos);
}
public Optional<DateTime> getEndTime()
{
return toDateTime(endNanos);
}
public Duration getAnalysisTime()
{
return getDuration(analysisTime, beginAnalysisNanos);
}
public Duration getDistributedPlanningTime()
{
return getDuration(distributedPlanningTime, beginDistributedPlanningNanos);
}
public DateTime getLastHeartbeat()
{
return toDateTime(lastHeartbeatNanos.get());
}
//
// Helper methods
//
private long tickerNanos()
{
return ticker.read();
}
private static Duration nanosSince(AtomicReference<Long> start, long end)
{
Long startNanos = start.get();
if (startNanos == null) {
throw new IllegalStateException("Start time not set");
}
return nanosSince(startNanos, end);
}
private static Duration nanosSince(long start, long now)
{
return succinctNanos(max(0, now - start));
}
private Duration getDuration(AtomicReference<Duration> finalDuration, AtomicReference<Long> start)
{
Duration duration = finalDuration.get();
if (duration != null) {
return duration;
}
Long startNanos = start.get();
if (startNanos != null) {
return nanosSince(startNanos, tickerNanos());
}
return new Duration(0, MILLISECONDS);
}
private Optional<DateTime> toDateTime(AtomicReference<Long> instantNanos)
{
Long nanos = instantNanos.get();
if (nanos == null) {
return Optional.empty();
}
return Optional.of(toDateTime(nanos));
}
private DateTime toDateTime(long instantNanos)
{
long millisSinceCreate = NANOSECONDS.toMillis(instantNanos - createNanos);
return new DateTime(createTime.getMillis() + millisSinceCreate);
}
}
|
apache-2.0
|
raghuvenmarathoor/ng-commons
|
modules/error/cs-error.module.js
|
92
|
(function() {
'use strict';
angular
.module('cs-error', [
]);
})();
|
apache-2.0
|
finsterthecat/dailysales
|
src/test/java/bean/MyBeanTest.java
|
1480
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package bean;
import javax.ejb.embeddable.EJBContainer;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
import org.junit.Ignore;
/**
*
* @author tonybrouwer
*/
@Ignore
public class MyBeanTest {
public MyBeanTest() {
}
@BeforeClass
public static void setUpClass() {
}
@AfterClass
public static void tearDownClass() {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of addNumbers method, of class MyBean.
*/
@Test
public void testAddNumbers() throws Exception {
System.out.println("addNumbers");
int numberA = 1;
int numberB = 2;
EJBContainer container = javax.ejb.embeddable.EJBContainer.createEJBContainer();
MyBean instance = (MyBean)container.getContext().lookup("java:global/classes/MyBean");
int expResult = 3;
int result = instance.addNumbers(numberA, numberB);
assertEquals(expResult, result);
container.close();
// TODO review the generated test code and remove the default call to fail.
//fail("The test case is a prototype.");
}
}
|
apache-2.0
|
tavis-software/Tavis.Link
|
src/Link/MediaTypes/ILinkExtractor.cs
|
602
|
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
namespace Tavis
{
/// <summary>
/// This interface can be implemented by media type parsers to provide a generic way to access links in a representation
/// </summary>
public interface ILinkExtractor
{
Type SupportedType { get; }
ILink GetLink(Func<string, ILink> factory, object content, string relation, string anchor = null);
IEnumerable<ILink> GetLinks(Func<string,ILink> factory, object content, string relation = null, string anchor = null);
}
}
|
apache-2.0
|
lmcgupe/theiagreenkeeperTest
|
src/filesystem/node/node-filesystem.spec.ts
|
45676
|
/*
* Copyright (C) 2017 TypeFox and others.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0
*/
import { FileChangesEvent, FileChange, FileChangeType, FileSystemClient } from '../common';
import * as chai from "chai";
import * as chaiAsPromised from "chai-as-promised";
import * as fs from "fs-extra";
import * as os from "os";
import URI from "../../application/common/uri";
import { FileUri } from "../../application/node/file-uri";
import { FileSystem } from "../common/filesystem";
import { FileSystemNode } from "./node-filesystem";
const expect = chai.expect;
const uuidV1 = require('uuid/v1');
const TEST_ROOT = FileUri.create(os.tmpdir()).appendPath("node-fs-root");
describe("NodeFileSystem", () => {
const roots: URI[] = [];
const fileSystems: FileSystem[] = [];
let root: URI;
let fileSystem: FileSystem;
before(() => {
chai.config.showDiff = true;
chai.config.includeStack = true;
chai.should();
chai.use(chaiAsPromised);
});
beforeEach(() => {
root = TEST_ROOT.appendPath(uuidV1());
fs.mkdirsSync(FileUri.fsPath(root));
expect(fs.existsSync(FileUri.fsPath(root))).to.be.true;
expect(fs.readdirSync(FileUri.fsPath(root))).to.be.empty;
roots.push(root);
fileSystem = createFileSystem();
fileSystems.push(fileSystem);
});
after(() => {
fileSystems.forEach(fileSystem => {
(fileSystem as any).watcher.close();
});
roots.map(root => FileUri.fsPath(root)).forEach(root => {
if (fs.existsSync(root)) {
try {
fs.removeSync(root);
} catch (error) {
// Please do not fail during the clean-up phase.
}
}
});
});
describe("01 #getFileStat", () => {
it("Should be rejected if not file exists under the given URI.", () => {
const uri = root.appendPath("foo.txt");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.getFileStat(uri.toString()).should.eventually.be.rejectedWith(Error);
});
it("Should return a proper result for a file.", () => {
const uri = root.appendPath("foo.txt");
fs.writeFileSync(FileUri.fsPath(uri), "foo");
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
return fileSystem.getFileStat(uri.toString()).then(stat => {
expect(stat.isDirectory).to.be.false;
expect(stat.uri).to.eq(uri.toString());
});
});
it("Should return a proper result for a directory.", () => {
const uri_1 = root.appendPath("foo.txt");
const uri_2 = root.appendPath("bar.txt");
fs.writeFileSync(FileUri.fsPath(uri_1), "foo");
fs.writeFileSync(FileUri.fsPath(uri_2), "bar");
expect(fs.statSync(FileUri.fsPath(uri_1)).isFile()).to.be.true;
expect(fs.statSync(FileUri.fsPath(uri_2)).isFile()).to.be.true;
return fileSystem.getFileStat(root.toString()).then(stat => {
expect(stat.hasChildren).to.be.true;
expect(stat.children!.length).to.equal(2);
});
});
});
describe("02 #resolveContent", () => {
it("Should be rejected with an error when trying to resolve the content of a non-existing file.", () => {
const uri = root.appendPath("foo.txt");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.resolveContent(uri.toString()).should.eventually.be.rejectedWith(Error);
});
it("Should be rejected with an error when trying to resolve the content of a directory.", () => {
const uri = root.appendPath("foo");
fs.mkdirSync(FileUri.fsPath(uri));
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.true;
expect(fs.statSync(FileUri.fsPath(uri)).isDirectory()).to.be.true;
return fileSystem.resolveContent(uri.toString()).should.eventually.be.rejectedWith(Error);
});
it("Should be rejected with an error if the desired encoding cannot be handled.", () => {
const uri = root.appendPath("foo.txt");
fs.writeFileSync(FileUri.fsPath(uri), "foo", { encoding: "utf8" });
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.true;
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(uri), { encoding: "utf8" })).to.be.equal("foo");
return fileSystem.resolveContent(uri.toString(), { encoding: "unknownEncoding" }).should.eventually.be.rejectedWith(Error);
})
it("Should be return with the content for an existing file.", () => {
const uri = root.appendPath("foo.txt");
fs.writeFileSync(FileUri.fsPath(uri), "foo", { encoding: "utf8" });
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.true;
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(uri), { encoding: "utf8" })).to.be.equal("foo");
return fileSystem.resolveContent(uri.toString()).should.eventually.have.property("content").that.is.equal("foo");
});
it("Should be return with the stat object for an existing file.", () => {
const uri = root.appendPath("foo.txt");
fs.writeFileSync(FileUri.fsPath(uri), "foo", { encoding: "utf8" });
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.true;
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(uri), { encoding: "utf8" })).to.be.equal("foo");
const content = fileSystem.resolveContent(uri.toString());
return Promise.all([
content.should.eventually.be.fulfilled,
content.should.eventually.have.be.an("object"),
content.should.eventually.have.property("stat"),
content.should.eventually.have.property("stat").that.has.property("uri").that.is.equal(uri.toString()),
content.should.eventually.have.property("stat").that.has.property("size").that.is.greaterThan(1),
content.should.eventually.have.property("stat").that.has.property("lastModification").that.is.greaterThan(1),
content.should.eventually.have.property("stat").that.has.property("isDirectory").that.is.false,
content.should.eventually.have.property("stat").that.not.have.property("hasChildren"),
content.should.eventually.have.property("stat").that.not.have.property("children"),
]);
});
});
describe("03 #setContent", () => {
it("Should be rejected with an error when trying to set the content of a non-existing file.", () => {
const uri = root.appendPath("foo.txt");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
const stat = {
uri: uri.toString(),
lastModification: new Date().getTime(),
isDirectory: false
};
return fileSystem.setContent(stat, "foo").should.eventually.be.rejectedWith(Error);
});
it("Should be rejected with an error when trying to set the content of a directory.", () => {
const uri = root.appendPath("foo");
fs.mkdirSync(FileUri.fsPath(uri));
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.true;
expect(fs.statSync(FileUri.fsPath(uri)).isDirectory()).to.be.true;
const fileSystem = createFileSystem();
return fileSystem.getFileStat(uri.toString()).then(stat => {
fileSystem.setContent(stat, "foo").should.be.eventually.be.rejectedWith(Error);
});
});
it("Should be rejected with an error when trying to set the content of a file which is out-of-sync.", () => {
const uri = root.appendPath("foo.txt");
fs.writeFileSync(FileUri.fsPath(uri), "foo", { encoding: "utf8" });
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.true;
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(uri), { encoding: "utf8" })).to.be.equal("foo");
const fileSystem = createFileSystem();
return fileSystem.getFileStat(uri.toString()).then(stat => {
// Make sure current file stat is out-of-sync.
// Here the content is modified in the way that file sizes will differ.
fs.writeFileSync(FileUri.fsPath(uri), "longer", { encoding: "utf8" });
expect(fs.readFileSync(FileUri.fsPath(uri), { encoding: "utf8" })).to.be.equal("longer");
fileSystem.setContent(stat, "baz").should.be.eventually.be.rejectedWith(Error);
});
});
it("Should be rejected with an error when trying to set the content when the desired encoding cannot be handled.", () => {
const uri = root.appendPath("foo.txt");
fs.writeFileSync(FileUri.fsPath(uri), "foo", { encoding: "utf8" });
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.true;
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(uri), { encoding: "utf8" })).to.be.equal("foo");
const fileSystem = createFileSystem();
return fileSystem.getFileStat(uri.toString()).then(stat => {
fileSystem.setContent(stat, "baz", { encoding: "unknownEncoding" }).should.be.eventually.be.rejectedWith(Error);
});
});
it("Should return with a stat representing the latest state of the successfully modified file.", () => {
const uri = root.appendPath("foo.txt");
fs.writeFileSync(FileUri.fsPath(uri), "foo", { encoding: "utf8" });
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.true;
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(uri), { encoding: "utf8" })).to.be.equal("foo");
const fileSystem = createFileSystem();
return fileSystem.getFileStat(uri.toString()).then(currentStat => {
return fileSystem.setContent(currentStat, "baz");
}).then(newStat => {
expect(fs.readFileSync(FileUri.fsPath(uri), { encoding: "utf8" })).to.be.equal("baz");
});
});
});
describe("04 #move", () => {
it("Should be rejected with an error if no file exists under the source location.", () => {
const sourceUri = root.appendPath("foo.txt");
const targetUri = root.appendPath("bar.txt");
expect(fs.existsSync(FileUri.fsPath(sourceUri))).to.be.false;
return fileSystem.move(sourceUri.toString(), targetUri.toString()).should.eventually.be.rejectedWith(Error);
});
it("Should be rejected with an error if target exists and overwrite is not set to \'true\'.", () => {
const sourceUri = root.appendPath("foo.txt");
const targetUri = root.appendPath("bar.txt");
fs.writeFileSync(FileUri.fsPath(sourceUri), "foo");
fs.writeFileSync(FileUri.fsPath(targetUri), "bar");
expect(fs.statSync(FileUri.fsPath(sourceUri)).isFile()).to.be.true;
expect(fs.statSync(FileUri.fsPath(targetUri)).isFile()).to.be.true;
return fileSystem.move(sourceUri.toString(), targetUri.toString()).should.eventually.be.rejectedWith(Error);
});
it("Moving a file to an empty directory. Should be rejected with an error because files cannot be moved to an existing directory locations.", () => {
const sourceUri = root.appendPath("foo.txt");
const targetUri = root.appendPath("bar");
fs.writeFileSync(FileUri.fsPath(sourceUri), "foo");
fs.mkdirSync(FileUri.fsPath(targetUri));
expect(fs.statSync(FileUri.fsPath(sourceUri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(sourceUri), "utf8")).to.be.equal("foo");
expect(fs.statSync(FileUri.fsPath(targetUri)).isDirectory()).to.be.true;
expect(fs.readdirSync(FileUri.fsPath(targetUri))).to.be.empty;
return fileSystem.move(sourceUri.toString(), targetUri.toString(), { overwrite: true }).should.eventually.be.rejectedWith(Error);
});
it("Moving a file to a non-empty directory. Should be rejected with and error because files cannot be moved to an existing directory locations.", () => {
const sourceUri = root.appendPath("foo.txt");
const targetUri = root.appendPath("bar");
const targetFileUri_01 = targetUri.appendPath("bar_01.txt");
const targetFileUri_02 = targetUri.appendPath("bar_02.txt");
fs.writeFileSync(FileUri.fsPath(sourceUri), "foo");
fs.mkdirSync(FileUri.fsPath(targetUri));
fs.writeFileSync(FileUri.fsPath(targetFileUri_01), "bar_01");
fs.writeFileSync(FileUri.fsPath(targetFileUri_02), "bar_02");
expect(fs.statSync(FileUri.fsPath(sourceUri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(sourceUri), "utf8")).to.be.equal("foo");
expect(fs.statSync(FileUri.fsPath(targetUri)).isDirectory()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(targetFileUri_01), "utf8")).to.be.equal("bar_01");
expect(fs.readFileSync(FileUri.fsPath(targetFileUri_02), "utf8")).to.be.equal("bar_02");
expect(fs.readdirSync(FileUri.fsPath(targetUri))).to.include("bar_01.txt").and.to.include("bar_02.txt");
return fileSystem.move(sourceUri.toString(), targetUri.toString(), { overwrite: true }).should.eventually.be.rejectedWith(Error);
});
it("Moving an empty directory to file. Should be rejected with an error because directories and cannot be moved to existing file locations.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("bar.txt");
fs.mkdirSync(FileUri.fsPath(sourceUri));
fs.writeFileSync(FileUri.fsPath(targetUri), "bar");
expect(fs.statSync(FileUri.fsPath(sourceUri)).isDirectory()).to.be.true;
expect(fs.statSync(FileUri.fsPath(targetUri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(targetUri), "utf8")).to.be.equal("bar");
expect(fs.readdirSync(FileUri.fsPath(sourceUri))).to.be.empty;
return fileSystem.move(sourceUri.toString(), targetUri.toString(), { overwrite: true }).should.eventually.be.rejectedWith(Error);
});
it("Moving a non-empty directory to file. Should be rejected with an error because directories cannot be moved to existing file locations.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("bar.txt");
const sourceFileUri_01 = sourceUri.appendPath("foo_01.txt");
const sourceFileUri_02 = sourceUri.appendPath("foo_02.txt");
fs.mkdirSync(FileUri.fsPath(sourceUri));
fs.writeFileSync(FileUri.fsPath(targetUri), "bar");
fs.writeFileSync(FileUri.fsPath(sourceFileUri_01), "foo_01");
fs.writeFileSync(FileUri.fsPath(sourceFileUri_02), "foo_02");
expect(fs.statSync(FileUri.fsPath(sourceUri)).isDirectory()).to.be.true;
expect(fs.statSync(FileUri.fsPath(targetUri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(targetUri), "utf8")).to.be.equal("bar");
expect(fs.readdirSync(FileUri.fsPath(sourceUri))).to.include("foo_01.txt").and.to.include("foo_02.txt");
return fileSystem.move(sourceUri.toString(), targetUri.toString(), { overwrite: true }).should.eventually.be.rejectedWith(Error);
});
it("Moving file to file. Should overwrite the target file content and delete the source file.", () => {
const sourceUri = root.appendPath("foo.txt");
const targetUri = root.appendPath("bar.txt");
fs.writeFileSync(FileUri.fsPath(sourceUri), "foo");
expect(fs.statSync(FileUri.fsPath(sourceUri)).isFile()).to.be.true;
expect(fs.existsSync(FileUri.fsPath(targetUri))).to.be.false;
return fileSystem.move(sourceUri.toString(), targetUri.toString(), { overwrite: true }).then(stat => {
expect(stat).is.an("object").and.has.property("uri").that.equals(targetUri.toString());
expect(fs.existsSync(FileUri.fsPath(sourceUri))).to.be.false;
expect(fs.statSync(FileUri.fsPath(targetUri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(targetUri), "utf8")).to.be.equal("foo");
});
});
it("Moving an empty directory to an empty directory. Should remove the source directory.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("bar");
fs.mkdirSync(FileUri.fsPath(sourceUri));
fs.mkdirSync(FileUri.fsPath(targetUri));
expect(fs.statSync(FileUri.fsPath(sourceUri)).isDirectory()).to.be.true;
expect(fs.statSync(FileUri.fsPath(targetUri)).isDirectory()).to.be.true;
expect(fs.readdirSync(FileUri.fsPath(sourceUri))).to.be.empty;
expect(fs.readdirSync(FileUri.fsPath(targetUri))).to.be.empty;
return fileSystem.move(sourceUri.toString(), targetUri.toString(), { overwrite: true }).then(stat => {
expect(stat).is.an("object").and.has.property("uri").that.equals(targetUri.toString());
expect(fs.existsSync(FileUri.fsPath(sourceUri))).to.be.false;
expect(fs.statSync(FileUri.fsPath(targetUri)).isDirectory()).to.be.true;
expect(fs.readdirSync(FileUri.fsPath(targetUri))).to.be.empty;
});
});
it("Moving an empty directory to a non-empty directory. Should be rejected because the target folder is not empty.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("bar");
const targetFileUri_01 = targetUri.appendPath("bar_01.txt");
const targetFileUri_02 = targetUri.appendPath("bar_02.txt");
fs.mkdirSync(FileUri.fsPath(sourceUri));
fs.mkdirSync(FileUri.fsPath(targetUri));
fs.writeFileSync(FileUri.fsPath(targetFileUri_01), "bar_01");
fs.writeFileSync(FileUri.fsPath(targetFileUri_02), "bar_02");
expect(fs.statSync(FileUri.fsPath(sourceUri)).isDirectory()).to.be.true;
expect(fs.statSync(FileUri.fsPath(targetUri)).isDirectory()).to.be.true;
expect(fs.readdirSync(FileUri.fsPath(sourceUri))).to.be.empty;
expect(fs.readFileSync(FileUri.fsPath(targetFileUri_01), "utf8")).to.be.equal("bar_01");
expect(fs.readFileSync(FileUri.fsPath(targetFileUri_02), "utf8")).to.be.equal("bar_02");
expect(fs.readdirSync(FileUri.fsPath(targetUri))).to.include("bar_01.txt").and.to.include("bar_02.txt");
return fileSystem.move(sourceUri.toString(), targetUri.toString(), { overwrite: true }).should.eventually.be.rejectedWith(Error);
});
it("Moving a non-empty directory to an empty directory. Source folder and its content should be moved to the target location.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("bar");
const sourceFileUri_01 = sourceUri.appendPath("foo_01.txt");
const sourceFileUri_02 = sourceUri.appendPath("foo_02.txt");
fs.mkdirSync(FileUri.fsPath(sourceUri));
fs.mkdirSync(FileUri.fsPath(targetUri));
fs.writeFileSync(FileUri.fsPath(sourceFileUri_01), "foo_01");
fs.writeFileSync(FileUri.fsPath(sourceFileUri_02), "foo_02");
expect(fs.statSync(FileUri.fsPath(sourceUri)).isDirectory()).to.be.true;
expect(fs.statSync(FileUri.fsPath(targetUri)).isDirectory()).to.be.true;
expect(fs.readdirSync(FileUri.fsPath(targetUri))).to.be.empty;
expect(fs.readdirSync(FileUri.fsPath(sourceUri))).to.include("foo_01.txt").and.to.include("foo_02.txt");
expect(fs.readFileSync(FileUri.fsPath(sourceFileUri_01), "utf8")).to.be.equal("foo_01");
expect(fs.readFileSync(FileUri.fsPath(sourceFileUri_02), "utf8")).to.be.equal("foo_02");
return fileSystem.move(sourceUri.toString(), targetUri.toString(), { overwrite: true }).then(stat => {
expect(stat).is.an("object").and.has.property("uri").that.equals(targetUri.toString());
expect(fs.existsSync(FileUri.fsPath(sourceUri))).to.be.false;
expect(fs.statSync(FileUri.fsPath(targetUri)).isDirectory()).to.be.true;
expect(fs.readdirSync(FileUri.fsPath(targetUri))).to.include("foo_01.txt").and.to.include("foo_02.txt");
expect(fs.readFileSync(FileUri.fsPath(targetUri.appendPath("foo_01.txt")), "utf8")).to.be.equal("foo_01");
expect(fs.readFileSync(FileUri.fsPath(targetUri.appendPath("foo_02.txt")), "utf8")).to.be.equal("foo_02");
});
});
it("Moving a non-empty directory to a non-empty directory. Should be rejected because the target location is not empty.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("bar");
const sourceFileUri_01 = sourceUri.appendPath("foo_01.txt");
const sourceFileUri_02 = sourceUri.appendPath("foo_02.txt");
const targetFileUri_01 = targetUri.appendPath("bar_01.txt");
const targetFileUri_02 = targetUri.appendPath("bar_02.txt");
fs.mkdirSync(FileUri.fsPath(sourceUri));
fs.mkdirSync(FileUri.fsPath(targetUri));
fs.writeFileSync(FileUri.fsPath(sourceFileUri_01), "foo_01");
fs.writeFileSync(FileUri.fsPath(sourceFileUri_02), "foo_02");
fs.writeFileSync(FileUri.fsPath(targetFileUri_01), "bar_01");
fs.writeFileSync(FileUri.fsPath(targetFileUri_02), "bar_02");
expect(fs.statSync(FileUri.fsPath(sourceUri)).isDirectory()).to.be.true;
expect(fs.statSync(FileUri.fsPath(targetUri)).isDirectory()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(sourceFileUri_01), "utf8")).to.be.equal("foo_01");
expect(fs.readFileSync(FileUri.fsPath(sourceFileUri_02), "utf8")).to.be.equal("foo_02");
expect(fs.readFileSync(FileUri.fsPath(targetFileUri_01), "utf8")).to.be.equal("bar_01");
expect(fs.readFileSync(FileUri.fsPath(targetFileUri_02), "utf8")).to.be.equal("bar_02");
expect(fs.readdirSync(FileUri.fsPath(sourceUri))).to.include("foo_01.txt").and.to.include("foo_02.txt");
expect(fs.readdirSync(FileUri.fsPath(targetUri))).to.include("bar_01.txt").and.to.include("bar_02.txt");
return fileSystem.move(sourceUri.toString(), targetUri.toString(), { overwrite: true }).should.eventually.be.rejectedWith(Error);
});
});
describe("05 #copy", () => {
it("Copy a file from non existing location. Should be rejected with an error. Nothing to copy.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("bar");
fs.mkdirSync(FileUri.fsPath(targetUri));
expect(fs.existsSync(FileUri.fsPath(sourceUri))).to.be.false;
expect(fs.statSync(FileUri.fsPath(targetUri)).isDirectory()).to.be.true;
return fileSystem.copy(sourceUri.toString(), targetUri.toString()).should.eventually.be.rejectedWith(Error);
});
it("Copy a file to existing location without overwrite enabled. Should be rejected with an error.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("bar");
fs.mkdirSync(FileUri.fsPath(targetUri));
fs.mkdirSync(FileUri.fsPath(sourceUri));
expect(fs.statSync(FileUri.fsPath(sourceUri)).isDirectory()).to.be.true;
expect(fs.statSync(FileUri.fsPath(targetUri)).isDirectory()).to.be.true;
return fileSystem.copy(sourceUri.toString(), targetUri.toString()).should.eventually.be.rejectedWith(Error);
});
it("Copy an empty directory to a non-existing location. Should return with the file stat representing the new file at the target location.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("bar");
fs.mkdirSync(FileUri.fsPath(sourceUri));
expect(fs.statSync(FileUri.fsPath(sourceUri)).isDirectory()).to.be.true;
expect(fs.existsSync(FileUri.fsPath(targetUri))).to.be.false;
return fileSystem.copy(sourceUri.toString(), targetUri.toString()).then(stat => {
expect(stat).to.be.an("object");
expect(stat).to.have.property("uri").that.is.equal(targetUri.toString());
expect(fs.existsSync(FileUri.fsPath(sourceUri))).to.be.true;
expect(fs.existsSync(FileUri.fsPath(targetUri))).to.be.true;
});
});
it("Copy an empty directory to a non-existing, nested location. Should return with the file stat representing the new file at the target location.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("nested/path/to/bar");
fs.mkdirSync(FileUri.fsPath(sourceUri));
expect(fs.statSync(FileUri.fsPath(sourceUri)).isDirectory()).to.be.true;
expect(fs.existsSync(FileUri.fsPath(targetUri))).to.be.false;
return fileSystem.copy(sourceUri.toString(), targetUri.toString()).then(stat => {
expect(stat).to.be.an("object");
expect(stat).to.have.property("uri").that.is.equal(targetUri.toString());
expect(fs.existsSync(FileUri.fsPath(sourceUri))).to.be.true;
expect(fs.existsSync(FileUri.fsPath(targetUri))).to.be.true;
});
});
it("Copy a directory with content to a non-existing location. Should return with the file stat representing the new file at the target location.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("bar");
const subSourceUri = sourceUri.appendPath("foo_01.txt");
fs.mkdirSync(FileUri.fsPath(sourceUri));
fs.writeFileSync(FileUri.fsPath(subSourceUri), "foo");
expect(fs.statSync(FileUri.fsPath(sourceUri)).isDirectory()).to.be.true;
expect(fs.statSync(FileUri.fsPath(subSourceUri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(subSourceUri), "utf8")).to.be.equal("foo");
expect(fs.existsSync(FileUri.fsPath(targetUri))).to.be.false;
return fileSystem.copy(sourceUri.toString(), targetUri.toString()).then(stat => {
expect(stat).to.be.an("object");
expect(stat).to.have.property("uri").that.is.equal(targetUri.toString());
expect(fs.existsSync(FileUri.fsPath(sourceUri))).to.be.true;
expect(fs.existsSync(FileUri.fsPath(targetUri))).to.be.true;
expect(fs.readdirSync(FileUri.fsPath(sourceUri))).to.contain("foo_01.txt");
expect(fs.readdirSync(FileUri.fsPath(targetUri))).to.contain("foo_01.txt");
expect(fs.readFileSync(FileUri.fsPath(subSourceUri), "utf8")).to.be.equal("foo");
expect(fs.readFileSync(FileUri.fsPath(targetUri.appendPath("foo_01.txt")), "utf8")).to.be.equal("foo");
});
});
it("Copy a directory with content to a non-existing, nested location. Should return with the file stat representing the new file at the target location.", () => {
const sourceUri = root.appendPath("foo");
const targetUri = root.appendPath("nested/path/to/bar");
const subSourceUri = sourceUri.appendPath("foo_01.txt");
fs.mkdirSync(FileUri.fsPath(sourceUri));
fs.writeFileSync(FileUri.fsPath(subSourceUri), "foo");
expect(fs.statSync(FileUri.fsPath(sourceUri)).isDirectory()).to.be.true;
expect(fs.statSync(FileUri.fsPath(subSourceUri)).isFile()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(subSourceUri), "utf8")).to.be.equal("foo");
expect(fs.existsSync(FileUri.fsPath(targetUri))).to.be.false;
return fileSystem.copy(sourceUri.toString(), targetUri.toString()).then(stat => {
expect(stat).to.be.an("object");
expect(stat).to.have.property("uri").that.is.equal(targetUri.toString());
expect(fs.existsSync(FileUri.fsPath(sourceUri))).to.be.true;
expect(fs.existsSync(FileUri.fsPath(targetUri))).to.be.true;
expect(fs.readdirSync(FileUri.fsPath(sourceUri))).to.contain("foo_01.txt");
expect(fs.readdirSync(FileUri.fsPath(targetUri))).to.contain("foo_01.txt");
expect(fs.readFileSync(FileUri.fsPath(subSourceUri), "utf8")).to.be.equal("foo");
expect(fs.readFileSync(FileUri.fsPath(targetUri.appendPath("foo_01.txt")), "utf8")).to.be.equal("foo");
});
});
});
describe("06 #getWorkspaceRoot", () => {
it("Should be return with the stat of the root. The root stat has information of its direct descendants but not the children of the descendants.", () => {
const uri_1 = root.appendPath("foo");
const uri_2 = root.appendPath("bar");
const uri_1_01 = uri_1.appendPath("foo_01.txt");
const uri_1_02 = uri_1.appendPath("foo_02.txt");
const uri_2_01 = uri_2.appendPath("bar_01.txt");
const uri_2_02 = uri_2.appendPath("bar_02.txt");
fs.mkdirSync(FileUri.fsPath(uri_1));
fs.mkdirSync(FileUri.fsPath(uri_2));
fs.writeFileSync(FileUri.fsPath(uri_1_01), "foo_01");
fs.writeFileSync(FileUri.fsPath(uri_1_02), "foo_02");
fs.writeFileSync(FileUri.fsPath(uri_2_01), "bar_01");
fs.writeFileSync(FileUri.fsPath(uri_2_02), "bar_02");
expect(fs.statSync(FileUri.fsPath(uri_1)).isDirectory()).to.be.true;
expect(fs.statSync(FileUri.fsPath(uri_2)).isDirectory()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(uri_1_01), "utf8")).to.be.equal("foo_01");
expect(fs.readFileSync(FileUri.fsPath(uri_1_02), "utf8")).to.be.equal("foo_02");
expect(fs.readFileSync(FileUri.fsPath(uri_2_01), "utf8")).to.be.equal("bar_01");
expect(fs.readFileSync(FileUri.fsPath(uri_2_02), "utf8")).to.be.equal("bar_02");
expect(fs.readdirSync(FileUri.fsPath(uri_1))).to.include("foo_01.txt").and.to.include("foo_02.txt");
expect(fs.readdirSync(FileUri.fsPath(uri_2))).to.include("bar_01.txt").and.to.include("bar_02.txt");
return fileSystem.getWorkspaceRoot().then(stat => {
expect(stat).to.be.an("object");
expect(stat).to.have.property("uri").that.equals(root.toString());
expect(stat).to.have.property("hasChildren").that.be.true;
expect(stat).to.have.property("children").that.is.not.undefined;
expect(stat).to.have.property("children").that.has.lengthOf(2);
expect(stat.children!.map(childStat => childStat.uri)).to.contain(uri_1.toString()).and.contain(uri_2.toString());
expect(stat.children!.find(childStat => childStat.uri === uri_1.toString())).to.be.not.undefined;
expect(stat.children!.find(childStat => childStat.uri === uri_2.toString())).to.be.not.undefined;
expect(stat.children!.find(childStat => childStat.uri === uri_1.toString())!.children).to.be.undefined;
expect(stat.children!.find(childStat => childStat.uri === uri_2.toString())!.children).to.be.undefined;
});
});
});
describe("07 #createFile", () => {
it("Should be rejected with an error if a file already exists with the given URI.", () => {
const uri = root.appendPath("foo.txt");
fs.writeFileSync(FileUri.fsPath(uri), "foo");
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
return fileSystem.createFile(uri.toString()).should.be.eventually.rejectedWith(Error);
});
it("Should be rejected with an error if the encoding is given but cannot be handled.", () => {
const uri = root.appendPath("foo.txt");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.createFile(uri.toString(), { encoding: "unknownEncoding" }).should.be.eventually.rejectedWith(Error);
});
it("Should create an empty file without any contents by default.", () => {
const uri = root.appendPath("foo.txt");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.createFile(uri.toString()).then(stat => {
expect(stat).is.an("object");
expect(stat).has.property("uri").that.is.equal(uri.toString());
expect(stat).not.has.property("children");
expect(stat).not.has.property("hasChildren");
expect(fs.readFileSync(FileUri.fsPath(uri), "utf8")).to.be.empty;
});
});
it("Should create a file with the desired content.", () => {
const uri = root.appendPath("foo.txt");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.createFile(uri.toString(), { content: "foo" }).then(stat => {
expect(stat).is.an("object");
expect(stat).has.property("uri").that.is.equal(uri.toString());
expect(stat).not.has.property("children");
expect(stat).not.has.property("hasChildren");
expect(fs.readFileSync(FileUri.fsPath(uri), "utf8")).to.be.equal("foo");
});
});
it("Should create a file with the desired content into a non-existing, nested location.", () => {
const uri = root.appendPath("foo/bar/baz.txt");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.createFile(uri.toString(), { content: "foo" }).then(stat => {
expect(stat).is.an("object");
expect(stat).has.property("uri").that.is.equal(uri.toString());
expect(stat).not.has.property("children");
expect(stat).not.has.property("hasChildren");
expect(fs.readFileSync(FileUri.fsPath(uri), "utf8")).to.be.equal("foo");
});
});
it("Should create a file with the desired content and encoding.", () => {
const uri = root.appendPath("foo.txt");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.createFile(uri.toString(), { content: "foo", encoding: "utf8" }).then(stat => {
expect(stat).is.an("object");
expect(stat).has.property("uri").that.is.equal(uri.toString());
expect(stat).not.has.property("children");
expect(stat).not.has.property("hasChildren");
expect(fs.readFileSync(FileUri.fsPath(uri), "utf8")).to.be.equal("foo");
});
});
});
describe("08 #createFolder", () => {
it("Should be rejected with an error if a directory already exist under the desired URI.", () => {
const uri = root.appendPath("foo");
fs.mkdirSync(FileUri.fsPath(uri));
expect(fs.statSync(FileUri.fsPath(uri)).isDirectory()).to.be.true;
return fileSystem.createFolder(uri.toString()).should.eventually.be.rejectedWith(Error);
});
it("Should create a directory and return with the stat object on successful directory creation.", () => {
const uri = root.appendPath("foo");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.createFolder(uri.toString()).then(stat => {
expect(stat).to.be.an("object");
expect(stat).to.have.property("uri").that.equals(uri.toString());
expect(stat).to.have.property("hasChildren").that.is.false;
expect(stat).to.have.property("children").that.is.empty;
});
});
it("Should create a directory and return with the stat object on successful directory creation.", () => {
const uri = root.appendPath("foo/bar");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.createFolder(uri.toString()).then(stat => {
expect(stat).to.be.an("object");
expect(stat).to.have.property("uri").that.equals(uri.toString());
expect(stat).to.have.property("hasChildren").that.is.false;
expect(stat).to.have.property("children").that.is.empty;
});
});
});
describe("09 #touch", () => {
it("Should create a new file if it does not exist yet.", () => {
const uri = root.appendPath("foo.txt");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.touchFile(uri.toString()).then(stat => {
expect(stat).is.an("object");
expect(stat).has.property("uri").that.equals(uri.toString());
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
})
});
it("Should update the modification timestamp on an existing file.", done => {
const uri = root.appendPath("foo.txt");
fs.writeFileSync(FileUri.fsPath(uri), "foo");
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
const fileSystem = createFileSystem();
fileSystem.getFileStat(uri.toString()).then(initialStat => {
expect(initialStat).is.an("object");
expect(initialStat).has.property("uri").that.equals(uri.toString());
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
return initialStat;
}).then(initialStat => {
// https://nodejs.org/en/docs/guides/working-with-different-filesystems/#timestamp-resolution
sleep(1000).then(() => {
fileSystem.touchFile(uri.toString()).then(updatedStat => {
expect(updatedStat).is.an("object");
expect(updatedStat).has.property("uri").that.equals(uri.toString());
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
expect(updatedStat.lastModification).to.be.greaterThan(initialStat.lastModification);
done();
});
});
});
});
});
describe("#10 delete", () => {
it("Should be rejected when the file to delete does not exist.", () => {
const uri = root.appendPath("foo.txt");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.delete(uri.toString(), { moveToTrash: false }).should.be.eventually.rejectedWith(Error);
});
it("Should delete the file.", () => {
const uri = root.appendPath("foo.txt");
fs.writeFileSync(FileUri.fsPath(uri), "foo");
expect(fs.readFileSync(FileUri.fsPath(uri), "utf8")).to.be.equal("foo");
return fileSystem.delete(uri.toString(), { moveToTrash: false }).then(() => {
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
});
});
it("Should delete a directory without content.", () => {
const uri = root.appendPath("foo");
fs.mkdirSync(FileUri.fsPath(uri));
expect(fs.statSync(FileUri.fsPath(uri)).isDirectory()).to.be.true;
return fileSystem.delete(uri.toString(), { moveToTrash: false }).then(() => {
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
});
});
it("Should delete a directory with all its content.", () => {
const uri = root.appendPath("foo");
const subUri = uri.appendPath("bar.txt");
fs.mkdirSync(FileUri.fsPath(uri));
fs.writeFileSync(FileUri.fsPath(subUri), "bar");
expect(fs.statSync(FileUri.fsPath(uri)).isDirectory()).to.be.true;
expect(fs.readFileSync(FileUri.fsPath(subUri), "utf8")).to.be.equal("bar");
return fileSystem.delete(uri.toString(), { moveToTrash: false }).then(() => {
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
expect(fs.existsSync(FileUri.fsPath(subUri))).to.be.false;
});
});
});
describe("#11 getEncoding", () => {
it("Should be rejected with an error if no file exists under the given URI.", () => {
const uri = root.appendPath("foo.txt");
expect(fs.existsSync(FileUri.fsPath(uri))).to.be.false;
return fileSystem.getEncoding(uri.toString()).should.be.eventually.rejectedWith(Error);
});
it("Should be rejected with an error if the URI points to a directory instead of a file.", () => {
const uri = root.appendPath("foo");
fs.mkdirSync(FileUri.fsPath(uri));
expect(fs.statSync(FileUri.fsPath(uri)).isDirectory()).to.be.true;
return fileSystem.getEncoding(uri.toString()).should.be.eventually.rejectedWith(Error);
});
it("Should return with the encoding of the file.", () => {
const uri = root.appendPath("foo.txt");
fs.writeFileSync(FileUri.fsPath(uri), "foo");
expect(fs.statSync(FileUri.fsPath(uri)).isFile()).to.be.true;
return fileSystem.getEncoding(uri.toString()).should.be.eventually.be.equal("utf8");
});
});
describe("#12 constructor", () => {
it("Should throw an exception if the workspace root does not exist.", () => {
return expect(() => new FileSystemNode(FileUri.create("some/missing/path"))).to.throw(Error);
});
});
describe("#13 watchFileChanges", () => {
it("Should receive file changes events from in the workspace by default.", function (done) {
this.timeout(4000);
let expectedEvents = [
new FileChange(root.toString(), FileChangeType.ADDED),
new FileChange(root.appendPath("foo").toString(), FileChangeType.ADDED),
new FileChange(root.appendPath("foo").appendPath("bar").toString(), FileChangeType.ADDED),
new FileChange(root.appendPath("foo").appendPath("bar").appendPath("baz.txt").toString(), FileChangeType.ADDED)
];
const fileSystem = createFileSystem();
const client: FileSystemClient = {
onFileChanges(event: FileChangesEvent) {
const index = expectedEvents.findIndex((value, index, obj) => {
return event.changes.length === 1 && event.changes[0].equals(value);
});
if (index >= 0) {
expectedEvents.splice(index, 1);
}
if (expectedEvents.length === 0) {
(<FileSystemNode>fileSystem).setClient(undefined);
fileSystem.dispose();
done();
}
}
};
(<FileSystemNode>fileSystem).setClient(client);
fs.mkdirSync(FileUri.fsPath(root.appendPath("foo")));
expect(fs.statSync(FileUri.fsPath(root.appendPath("foo"))).isDirectory()).to.be.true;
fs.mkdirSync(FileUri.fsPath(root.appendPath("foo").appendPath("bar")));
expect(fs.statSync(FileUri.fsPath(root.appendPath("foo").appendPath("bar"))).isDirectory()).to.be.true;
fs.writeFileSync(FileUri.fsPath(root.appendPath("foo").appendPath("bar").appendPath("baz.txt")), "baz");
expect(fs.readFileSync(FileUri.fsPath(root.appendPath("foo").appendPath("bar").appendPath("baz.txt")), "utf8")).to.be.equal("baz");
sleep(3000).then(() => {
expect(expectedEvents).to.be.empty;
});
});
});
function createFileSystem(uri: URI = root): FileSystem {
return new FileSystemNode(uri);
}
function sleep(time: number) {
return new Promise((resolve) => setTimeout(resolve, time));
}
});
process.on("unhandledRejection", (reason: any) => {
console.error("Unhandled promise rejection: " + reason);
});
|
apache-2.0
|
cbeams-archive/spring-framework-2.5.x
|
src/org/springframework/jca/cci/core/support/CciDaoSupport.java
|
5006
|
/*
* Copyright 2002-2008 the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.springframework.jca.cci.core.support;
import javax.resource.cci.Connection;
import javax.resource.cci.ConnectionFactory;
import javax.resource.cci.ConnectionSpec;
import org.springframework.dao.support.DaoSupport;
import org.springframework.jca.cci.CannotGetCciConnectionException;
import org.springframework.jca.cci.connection.ConnectionFactoryUtils;
import org.springframework.jca.cci.core.CciTemplate;
/**
* Convenient super class for CCI-based data access objects.
*
* <p>Requires a {@link javax.resource.cci.ConnectionFactory} to be set,
* providing a {@link org.springframework.jca.cci.core.CciTemplate} based
* on it to subclasses through the {@link #getCciTemplate()} method.
*
* <p>This base class is mainly intended for CciTemplate usage but can
* also be used when working with a Connection directly or when using
* <code>org.springframework.jca.cci.object</code> classes.
*
* @author Thierry Templier
* @author Juergen Hoeller
* @since 1.2
* @see #setConnectionFactory
* @see #getCciTemplate
* @see org.springframework.jca.cci.core.CciTemplate
*/
public abstract class CciDaoSupport extends DaoSupport {
private CciTemplate cciTemplate;
/**
* Set the ConnectionFactory to be used by this DAO.
*/
public final void setConnectionFactory(ConnectionFactory connectionFactory) {
if (this.cciTemplate == null || connectionFactory != this.cciTemplate.getConnectionFactory()) {
this.cciTemplate = createCciTemplate(connectionFactory);
}
}
/**
* Create a CciTemplate for the given ConnectionFactory.
* Only invoked if populating the DAO with a ConnectionFactory reference!
* <p>Can be overridden in subclasses to provide a CciTemplate instance
* with different configuration, or a custom CciTemplate subclass.
* @param connectionFactory the CCI ConnectionFactory to create a CciTemplate for
* @return the new CciTemplate instance
* @see #setConnectionFactory(javax.resource.cci.ConnectionFactory)
*/
protected CciTemplate createCciTemplate(ConnectionFactory connectionFactory) {
return new CciTemplate(connectionFactory);
}
/**
* Return the ConnectionFactory used by this DAO.
*/
public final ConnectionFactory getConnectionFactory() {
return this.cciTemplate.getConnectionFactory();
}
/**
* Set the CciTemplate for this DAO explicitly,
* as an alternative to specifying a ConnectionFactory.
*/
public final void setCciTemplate(CciTemplate cciTemplate) {
this.cciTemplate = cciTemplate;
}
/**
* Return the CciTemplate for this DAO,
* pre-initialized with the ConnectionFactory or set explicitly.
*/
public final CciTemplate getCciTemplate() {
return this.cciTemplate;
}
protected final void checkDaoConfig() {
if (this.cciTemplate == null) {
throw new IllegalArgumentException("'connectionFactory' or 'cciTemplate' is required");
}
}
/**
* Obtain a CciTemplate derived from the main template instance,
* inheriting the ConnectionFactory and other settings but
* overriding the ConnectionSpec used for obtaining Connections.
* @param connectionSpec the CCI ConnectionSpec that the returned
* template instance is supposed to obtain Connections for
* @return the derived template instance
* @see org.springframework.jca.cci.core.CciTemplate#getDerivedTemplate(javax.resource.cci.ConnectionSpec)
*/
protected final CciTemplate getCciTemplate(ConnectionSpec connectionSpec) {
return getCciTemplate().getDerivedTemplate(connectionSpec);
}
/**
* Get a CCI Connection, either from the current transaction or a new one.
* @return the CCI Connection
* @throws org.springframework.jca.cci.CannotGetCciConnectionException
* if the attempt to get a Connection failed
* @see org.springframework.jca.cci.connection.ConnectionFactoryUtils#getConnection(javax.resource.cci.ConnectionFactory)
*/
protected final Connection getConnection() throws CannotGetCciConnectionException {
return ConnectionFactoryUtils.getConnection(getConnectionFactory());
}
/**
* Close the given CCI Connection, created via this bean's ConnectionFactory,
* if it isn't bound to the thread.
* @param con Connection to close
* @see org.springframework.jca.cci.connection.ConnectionFactoryUtils#releaseConnection
*/
protected final void releaseConnection(Connection con) {
ConnectionFactoryUtils.releaseConnection(con, getConnectionFactory());
}
}
|
apache-2.0
|
kktts/AppCompare
|
app/src/main/java/com/konstantinost/appcompare/MenuActivity.java
|
1681
|
package com.konstantinost.appcompare;
import android.content.Intent;
import android.support.v7.app.AppCompatActivity;
import android.os.Bundle;
import android.view.View;
import android.widget.AdapterView;
import android.widget.ListView;
public class MenuActivity extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.menu_activity_layout);
final ListView menuOptionsList = (ListView) findViewById(R.id.listview);
String[] values = new String[]{
"Applications",
"Results",
"About"
};
MenuListAdapter adapter = new MenuListAdapter(this, values);
menuOptionsList.setAdapter(adapter);
menuOptionsList.setOnItemClickListener(new AdapterView.OnItemClickListener() {
@Override
public void onItemClick(AdapterView<?> parent, View view, int position, long id) {
String option = String.valueOf(menuOptionsList.getAdapter().getItem(position));
switch (option) {
case "Applications":
startActivity(new Intent(MenuActivity.this, InstalledApplicationsActivity.class));
break;
case "Results":
startActivity(new Intent(MenuActivity.this, ResultActivity.class));
break;
case "About":
startActivity(new Intent(MenuActivity.this, AboutActivity.class));
break;
}
}
});
}
}
|
apache-2.0
|
yacloud-io/design-pattern
|
cmd/composite.go
|
1224
|
// Copyright © 2017 Yacloud Co.,Ltd. <yacloud.inc@gmail.com>
//
package cmd
import (
"github.com/spf13/cobra"
lbs "github.com/yacloud-io/design-pattern/library"
)
// compositeCmd represents the composite command
var compositeCmd = &cobra.Command{
Use: "composite",
Short: "Composite design pattern",
Long: `Has a implemation of inheritance.`,
Run: func(cmd *cobra.Command, args []string) {
localSwim := lbs.Swim
swimer := lbs.CompositeSwimmerA{
MySwim: &localSwim,
}
swimer.MyAthelete.Train()
(*swimer.MySwim)()
swimer2 := lbs.CompositeSwimmerB{
&lbs.Athlete{},
&lbs.SwimmerImplementor{},
}
swimer2.Train()
swimer2.Swim()
shark := lbs.Shark{
SharkSwim: lbs.Swim,
}
shark.Eat()
shark.SharkSwim()
},
}
func init() {
RootCmd.AddCommand(compositeCmd)
// Here you will define your flags and configuration settings.
// Cobra supports Persistent Flags which will work for this command
// and all subcommands, e.g.:
// compositeCmd.PersistentFlags().String("foo", "", "A help for foo")
// Cobra supports local flags which will only run when this command
// is called directly, e.g.:
// compositeCmd.Flags().BoolP("toggle", "t", false, "Help message for toggle")
}
|
apache-2.0
|
hortonworks/cloudbreak
|
cluster-cm/src/main/java/com/sequenceiq/cloudbreak/cm/commands/CommandResourceListTypeReference.java
|
274
|
package com.sequenceiq.cloudbreak.cm.commands;
import java.util.List;
import com.fasterxml.jackson.core.type.TypeReference;
import com.sequenceiq.cloudbreak.cm.model.CommandResource;
class CommandResourceListTypeReference extends TypeReference<List<CommandResource>> {
}
|
apache-2.0
|
spepping/fop-cs
|
src/java/org/apache/fop/complexscripts/fonts/GlyphSubstitutionTable.java
|
67477
|
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/* $Id$ */
package org.apache.fop.complexscripts.fonts;
import java.nio.CharBuffer;
import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.fop.complexscripts.scripts.ScriptProcessor;
import org.apache.fop.complexscripts.util.GlyphSequence;
import org.apache.fop.complexscripts.util.GlyphTester;
// CSOFF: InnerAssignmentCheck
// CSOFF: LineLengthCheck
// CSOFF: NoWhitespaceAfterCheck
/**
* The <code>GlyphSubstitutionTable</code> class is a glyph table that implements
* <code>GlyphSubstitution</code> functionality.
* @author Glenn Adams
*/
public class GlyphSubstitutionTable extends GlyphTable {
/** logging instance */
private static final Log log = LogFactory.getLog(GlyphSubstitutionTable.class); // CSOK: ConstantNameCheck
/** single substitution subtable type */
public static final int GSUB_LOOKUP_TYPE_SINGLE = 1;
/** multiple substitution subtable type */
public static final int GSUB_LOOKUP_TYPE_MULTIPLE = 2;
/** alternate substitution subtable type */
public static final int GSUB_LOOKUP_TYPE_ALTERNATE = 3;
/** ligature substitution subtable type */
public static final int GSUB_LOOKUP_TYPE_LIGATURE = 4;
/** contextual substitution subtable type */
public static final int GSUB_LOOKUP_TYPE_CONTEXTUAL = 5;
/** chained contextual substitution subtable type */
public static final int GSUB_LOOKUP_TYPE_CHAINED_CONTEXTUAL = 6;
/** extension substitution substitution subtable type */
public static final int GSUB_LOOKUP_TYPE_EXTENSION_SUBSTITUTION = 7;
/** reverse chained contextual single substitution subtable type */
public static final int GSUB_LOOKUP_TYPE_REVERSE_CHAINED_SINGLE = 8;
/**
* Instantiate a <code>GlyphSubstitutionTable</code> object using the specified lookups
* and subtables.
* @param gdef glyph definition table that applies
* @param lookups a map of lookup specifications to subtable identifier strings
* @param subtables a list of identified subtables
*/
public GlyphSubstitutionTable ( GlyphDefinitionTable gdef, Map lookups, List subtables ) {
super ( gdef, lookups );
if ( ( subtables == null ) || ( subtables.size() == 0 ) ) {
throw new AdvancedTypographicTableFormatException ( "subtables must be non-empty" );
} else {
for ( Iterator it = subtables.iterator(); it.hasNext();) {
Object o = it.next();
if ( o instanceof GlyphSubstitutionSubtable ) {
addSubtable ( (GlyphSubtable) o );
} else {
throw new AdvancedTypographicTableFormatException ( "subtable must be a glyph substitution subtable" );
}
}
freezeSubtables();
}
}
/**
* Perform substitution processing using all matching lookups.
* @param gs an input glyph sequence
* @param script a script identifier
* @param language a language identifier
* @return the substituted (output) glyph sequence
*/
public GlyphSequence substitute ( GlyphSequence gs, String script, String language ) {
GlyphSequence ogs;
Map/*<LookupSpec,List<LookupTable>>*/ lookups = matchLookups ( script, language, "*" );
if ( ( lookups != null ) && ( lookups.size() > 0 ) ) {
ScriptProcessor sp = ScriptProcessor.getInstance ( script );
ogs = sp.substitute ( this, gs, script, language, lookups );
} else {
ogs = gs;
}
return ogs;
}
/**
* Map a lookup type name to its constant (integer) value.
* @param name lookup type name
* @return lookup type
*/
public static int getLookupTypeFromName ( String name ) {
int t;
String s = name.toLowerCase();
if ( "single".equals ( s ) ) {
t = GSUB_LOOKUP_TYPE_SINGLE;
} else if ( "multiple".equals ( s ) ) {
t = GSUB_LOOKUP_TYPE_MULTIPLE;
} else if ( "alternate".equals ( s ) ) {
t = GSUB_LOOKUP_TYPE_ALTERNATE;
} else if ( "ligature".equals ( s ) ) {
t = GSUB_LOOKUP_TYPE_LIGATURE;
} else if ( "contextual".equals ( s ) ) {
t = GSUB_LOOKUP_TYPE_CONTEXTUAL;
} else if ( "chainedcontextual".equals ( s ) ) {
t = GSUB_LOOKUP_TYPE_CHAINED_CONTEXTUAL;
} else if ( "extensionsubstitution".equals ( s ) ) {
t = GSUB_LOOKUP_TYPE_EXTENSION_SUBSTITUTION;
} else if ( "reversechainiingcontextualsingle".equals ( s ) ) {
t = GSUB_LOOKUP_TYPE_REVERSE_CHAINED_SINGLE;
} else {
t = -1;
}
return t;
}
/**
* Map a lookup type constant (integer) value to its name.
* @param type lookup type
* @return lookup type name
*/
public static String getLookupTypeName ( int type ) {
String tn = null;
switch ( type ) {
case GSUB_LOOKUP_TYPE_SINGLE:
tn = "single";
break;
case GSUB_LOOKUP_TYPE_MULTIPLE:
tn = "multiple";
break;
case GSUB_LOOKUP_TYPE_ALTERNATE:
tn = "alternate";
break;
case GSUB_LOOKUP_TYPE_LIGATURE:
tn = "ligature";
break;
case GSUB_LOOKUP_TYPE_CONTEXTUAL:
tn = "contextual";
break;
case GSUB_LOOKUP_TYPE_CHAINED_CONTEXTUAL:
tn = "chainedcontextual";
break;
case GSUB_LOOKUP_TYPE_EXTENSION_SUBSTITUTION:
tn = "extensionsubstitution";
break;
case GSUB_LOOKUP_TYPE_REVERSE_CHAINED_SINGLE:
tn = "reversechainiingcontextualsingle";
break;
default:
tn = "unknown";
break;
}
return tn;
}
/**
* Create a substitution subtable according to the specified arguments.
* @param type subtable type
* @param id subtable identifier
* @param sequence subtable sequence
* @param flags subtable flags
* @param format subtable format
* @param coverage subtable coverage table
* @param entries subtable entries
* @return a glyph subtable instance
*/
public static GlyphSubtable createSubtable ( int type, String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
GlyphSubtable st = null;
switch ( type ) {
case GSUB_LOOKUP_TYPE_SINGLE:
st = SingleSubtable.create ( id, sequence, flags, format, coverage, entries );
break;
case GSUB_LOOKUP_TYPE_MULTIPLE:
st = MultipleSubtable.create ( id, sequence, flags, format, coverage, entries );
break;
case GSUB_LOOKUP_TYPE_ALTERNATE:
st = AlternateSubtable.create ( id, sequence, flags, format, coverage, entries );
break;
case GSUB_LOOKUP_TYPE_LIGATURE:
st = LigatureSubtable.create ( id, sequence, flags, format, coverage, entries );
break;
case GSUB_LOOKUP_TYPE_CONTEXTUAL:
st = ContextualSubtable.create ( id, sequence, flags, format, coverage, entries );
break;
case GSUB_LOOKUP_TYPE_CHAINED_CONTEXTUAL:
st = ChainedContextualSubtable.create ( id, sequence, flags, format, coverage, entries );
break;
case GSUB_LOOKUP_TYPE_REVERSE_CHAINED_SINGLE:
st = ReverseChainedSingleSubtable.create ( id, sequence, flags, format, coverage, entries );
break;
default:
break;
}
return st;
}
/**
* Create a substitution subtable according to the specified arguments.
* @param type subtable type
* @param id subtable identifier
* @param sequence subtable sequence
* @param flags subtable flags
* @param format subtable format
* @param coverage list of coverage table entries
* @param entries subtable entries
* @return a glyph subtable instance
*/
public static GlyphSubtable createSubtable ( int type, String id, int sequence, int flags, int format, List coverage, List entries ) {
return createSubtable ( type, id, sequence, flags, format, GlyphCoverageTable.createCoverageTable ( coverage ), entries );
}
private abstract static class SingleSubtable extends GlyphSubstitutionSubtable {
SingleSubtable ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage );
}
/** {@inheritDoc} */
public int getType() {
return GSUB_LOOKUP_TYPE_SINGLE;
}
/** {@inheritDoc} */
public boolean isCompatible ( GlyphSubtable subtable ) {
return subtable instanceof SingleSubtable;
}
/** {@inheritDoc} */
public boolean substitute ( GlyphSubstitutionState ss ) {
int gi = ss.getGlyph(), ci;
if ( ( ci = getCoverageIndex ( gi ) ) < 0 ) {
return false;
} else {
int go = getGlyphForCoverageIndex ( ci, gi );
if ( ( go < 0 ) || ( go > 65535 ) ) {
go = 65535;
}
ss.putGlyph ( go, ss.getAssociation(), Boolean.TRUE );
ss.consume(1);
return true;
}
}
/**
* Obtain glyph for coverage index.
* @param ci coverage index
* @param gi original glyph index
* @return substituted glyph value
* @throws IllegalArgumentException if coverage index is not valid
*/
public abstract int getGlyphForCoverageIndex ( int ci, int gi ) throws IllegalArgumentException;
static GlyphSubstitutionSubtable create ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
if ( format == 1 ) {
return new SingleSubtableFormat1 ( id, sequence, flags, format, coverage, entries );
} else if ( format == 2 ) {
return new SingleSubtableFormat2 ( id, sequence, flags, format, coverage, entries );
} else {
throw new UnsupportedOperationException();
}
}
}
private static class SingleSubtableFormat1 extends SingleSubtable {
private int delta;
private int ciMax;
SingleSubtableFormat1 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
List entries = new ArrayList ( 1 );
entries.add ( Integer.valueOf ( delta ) );
return entries;
}
/** {@inheritDoc} */
public int getGlyphForCoverageIndex ( int ci, int gi ) throws IllegalArgumentException {
if ( ci <= ciMax ) {
return gi + delta;
} else {
throw new IllegalArgumentException ( "coverage index " + ci + " out of range, maximum coverage index is " + ciMax );
}
}
private void populate ( List entries ) {
if ( ( entries == null ) || ( entries.size() != 1 ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, must be non-null and contain exactly one entry" );
} else {
Object o = entries.get(0);
int delta = 0;
if ( o instanceof Integer ) {
delta = ( (Integer) o ) . intValue();
} else {
throw new AdvancedTypographicTableFormatException ( "illegal entries entry, must be Integer, but is: " + o );
}
this.delta = delta;
this.ciMax = getCoverageSize() - 1;
}
}
}
private static class SingleSubtableFormat2 extends SingleSubtable {
private int[] glyphs;
SingleSubtableFormat2 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
List entries = new ArrayList ( glyphs.length );
for ( int i = 0, n = glyphs.length; i < n; i++ ) {
entries.add ( Integer.valueOf ( glyphs[i] ) );
}
return entries;
}
/** {@inheritDoc} */
public int getGlyphForCoverageIndex ( int ci, int gi ) throws IllegalArgumentException {
if ( glyphs == null ) {
return -1;
} else if ( ci >= glyphs.length ) {
throw new IllegalArgumentException ( "coverage index " + ci + " out of range, maximum coverage index is " + glyphs.length );
} else {
return glyphs [ ci ];
}
}
private void populate ( List entries ) {
int i = 0, n = entries.size();
int[] glyphs = new int [ n ];
for ( Iterator it = entries.iterator(); it.hasNext();) {
Object o = it.next();
if ( o instanceof Integer ) {
int gid = ( (Integer) o ) .intValue();
if ( ( gid >= 0 ) && ( gid < 65536 ) ) {
glyphs [ i++ ] = gid;
} else {
throw new AdvancedTypographicTableFormatException ( "illegal glyph index: " + gid );
}
} else {
throw new AdvancedTypographicTableFormatException ( "illegal entries entry, must be Integer: " + o );
}
}
assert i == n;
assert this.glyphs == null;
this.glyphs = glyphs;
}
}
private abstract static class MultipleSubtable extends GlyphSubstitutionSubtable {
public MultipleSubtable ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage );
}
/** {@inheritDoc} */
public int getType() {
return GSUB_LOOKUP_TYPE_MULTIPLE;
}
/** {@inheritDoc} */
public boolean isCompatible ( GlyphSubtable subtable ) {
return subtable instanceof MultipleSubtable;
}
/** {@inheritDoc} */
public boolean substitute ( GlyphSubstitutionState ss ) {
int gi = ss.getGlyph(), ci;
if ( ( ci = getCoverageIndex ( gi ) ) < 0 ) {
return false;
} else {
int[] ga = getGlyphsForCoverageIndex ( ci, gi );
if ( ga != null ) {
ss.putGlyphs ( ga, GlyphSequence.CharAssociation.replicate ( ss.getAssociation(), ga.length ), Boolean.TRUE );
ss.consume(1);
}
return true;
}
}
/**
* Obtain glyph sequence for coverage index.
* @param ci coverage index
* @param gi original glyph index
* @return sequence of glyphs to substitute for input glyph
* @throws IllegalArgumentException if coverage index is not valid
*/
public abstract int[] getGlyphsForCoverageIndex ( int ci, int gi ) throws IllegalArgumentException;
static GlyphSubstitutionSubtable create ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
if ( format == 1 ) {
return new MultipleSubtableFormat1 ( id, sequence, flags, format, coverage, entries );
} else {
throw new UnsupportedOperationException();
}
}
}
private static class MultipleSubtableFormat1 extends MultipleSubtable {
private int[][] gsa; // glyph sequence array, ordered by coverage index
MultipleSubtableFormat1 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
if ( gsa != null ) {
List entries = new ArrayList ( 1 );
entries.add ( gsa );
return entries;
} else {
return null;
}
}
/** {@inheritDoc} */
public int[] getGlyphsForCoverageIndex ( int ci, int gi ) throws IllegalArgumentException {
if ( gsa == null ) {
return null;
} else if ( ci >= gsa.length ) {
throw new IllegalArgumentException ( "coverage index " + ci + " out of range, maximum coverage index is " + gsa.length );
} else {
return gsa [ ci ];
}
}
private void populate ( List entries ) {
if ( entries == null ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, must be non-null" );
} else if ( entries.size() != 1 ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, " + entries.size() + " entries present, but requires 1 entry" );
} else {
Object o;
if ( ( ( o = entries.get(0) ) == null ) || ! ( o instanceof int[][] ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, first entry must be an int[][], but is: " + ( ( o != null ) ? o.getClass() : null ) );
} else {
gsa = (int[][]) o;
}
}
}
}
private abstract static class AlternateSubtable extends GlyphSubstitutionSubtable {
public AlternateSubtable ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage );
}
/** {@inheritDoc} */
public int getType() {
return GSUB_LOOKUP_TYPE_ALTERNATE;
}
/** {@inheritDoc} */
public boolean isCompatible ( GlyphSubtable subtable ) {
return subtable instanceof AlternateSubtable;
}
/** {@inheritDoc} */
public boolean substitute ( GlyphSubstitutionState ss ) {
int gi = ss.getGlyph(), ci;
if ( ( ci = getCoverageIndex ( gi ) ) < 0 ) {
return false;
} else {
int[] ga = getAlternatesForCoverageIndex ( ci, gi );
int ai = ss.getAlternatesIndex ( ci );
int go;
if ( ( ai < 0 ) || ( ai >= ga.length ) ) {
go = gi;
} else {
go = ga [ ai ];
}
if ( ( go < 0 ) || ( go > 65535 ) ) {
go = 65535;
}
ss.putGlyph ( go, ss.getAssociation(), Boolean.TRUE );
ss.consume(1);
return true;
}
}
/**
* Obtain glyph alternates for coverage index.
* @param ci coverage index
* @param gi original glyph index
* @return sequence of glyphs to substitute for input glyph
* @throws IllegalArgumentException if coverage index is not valid
*/
public abstract int[] getAlternatesForCoverageIndex ( int ci, int gi ) throws IllegalArgumentException;
static GlyphSubstitutionSubtable create ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
if ( format == 1 ) {
return new AlternateSubtableFormat1 ( id, sequence, flags, format, coverage, entries );
} else {
throw new UnsupportedOperationException();
}
}
}
private static class AlternateSubtableFormat1 extends AlternateSubtable {
private int[][] gaa; // glyph alternates array, ordered by coverage index
AlternateSubtableFormat1 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
List entries = new ArrayList ( gaa.length );
for ( int i = 0, n = gaa.length; i < n; i++ ) {
entries.add ( gaa[i] );
}
return entries;
}
/** {@inheritDoc} */
public int[] getAlternatesForCoverageIndex ( int ci, int gi ) throws IllegalArgumentException {
if ( gaa == null ) {
return null;
} else if ( ci >= gaa.length ) {
throw new IllegalArgumentException ( "coverage index " + ci + " out of range, maximum coverage index is " + gaa.length );
} else {
return gaa [ ci ];
}
}
private void populate ( List entries ) {
int i = 0, n = entries.size();
int[][] gaa = new int [ n ][];
for ( Iterator it = entries.iterator(); it.hasNext();) {
Object o = it.next();
if ( o instanceof int[] ) {
gaa [ i++ ] = (int[]) o;
} else {
throw new AdvancedTypographicTableFormatException ( "illegal entries entry, must be int[]: " + o );
}
}
assert i == n;
assert this.gaa == null;
this.gaa = gaa;
}
}
private abstract static class LigatureSubtable extends GlyphSubstitutionSubtable {
public LigatureSubtable ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage );
}
/** {@inheritDoc} */
public int getType() {
return GSUB_LOOKUP_TYPE_LIGATURE;
}
/** {@inheritDoc} */
public boolean isCompatible ( GlyphSubtable subtable ) {
return subtable instanceof LigatureSubtable;
}
/** {@inheritDoc} */
public boolean substitute ( GlyphSubstitutionState ss ) {
int gi = ss.getGlyph(), ci;
if ( ( ci = getCoverageIndex ( gi ) ) < 0 ) {
return false;
} else {
LigatureSet ls = getLigatureSetForCoverageIndex ( ci, gi );
if ( ls != null ) {
boolean reverse = false;
GlyphTester ignores = ss.getIgnoreDefault();
int[] counts = ss.getGlyphsAvailable ( 0, reverse, ignores );
int nga = counts[0], ngi;
if ( nga > 1 ) {
int[] iga = ss.getGlyphs ( 0, nga, reverse, ignores, null, counts );
Ligature l = findLigature ( ls, iga );
if ( l != null ) {
int go = l.getLigature();
if ( ( go < 0 ) || ( go > 65535 ) ) {
go = 65535;
}
int nmg = 1 + l.getNumComponents();
// fetch matched number of component glyphs to determine matched and ignored count
ss.getGlyphs ( 0, nmg, reverse, ignores, null, counts );
nga = counts[0];
ngi = counts[1];
// fetch associations of matched component glyphs
GlyphSequence.CharAssociation[] laa = ss.getAssociations ( 0, nga );
// output ligature glyph and its association
ss.putGlyph ( go, GlyphSequence.CharAssociation.join ( laa ), Boolean.TRUE );
// fetch and output ignored glyphs (if necessary)
if ( ngi > 0 ) {
ss.putGlyphs ( ss.getIgnoredGlyphs ( 0, ngi ), ss.getIgnoredAssociations ( 0, ngi ), null );
}
ss.consume ( nga + ngi );
}
}
}
return true;
}
}
private Ligature findLigature ( LigatureSet ls, int[] glyphs ) {
Ligature[] la = ls.getLigatures();
int k = -1;
int maxComponents = -1;
for ( int i = 0, n = la.length; i < n; i++ ) {
Ligature l = la [ i ];
if ( l.matchesComponents ( glyphs ) ) {
int nc = l.getNumComponents();
if ( nc > maxComponents ) {
maxComponents = nc;
k = i;
}
}
}
if ( k >= 0 ) {
return la [ k ];
} else {
return null;
}
}
/**
* Obtain ligature set for coverage index.
* @param ci coverage index
* @param gi original glyph index
* @return ligature set (or null if none defined)
* @throws IllegalArgumentException if coverage index is not valid
*/
public abstract LigatureSet getLigatureSetForCoverageIndex ( int ci, int gi ) throws IllegalArgumentException;
static GlyphSubstitutionSubtable create ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
if ( format == 1 ) {
return new LigatureSubtableFormat1 ( id, sequence, flags, format, coverage, entries );
} else {
throw new UnsupportedOperationException();
}
}
}
private static class LigatureSubtableFormat1 extends LigatureSubtable {
private LigatureSet[] ligatureSets;
public LigatureSubtableFormat1 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
List entries = new ArrayList ( ligatureSets.length );
for ( int i = 0, n = ligatureSets.length; i < n; i++ ) {
entries.add ( ligatureSets[i] );
}
return entries;
}
/** {@inheritDoc} */
public LigatureSet getLigatureSetForCoverageIndex ( int ci, int gi ) throws IllegalArgumentException {
if ( ligatureSets == null ) {
return null;
} else if ( ci >= ligatureSets.length ) {
throw new IllegalArgumentException ( "coverage index " + ci + " out of range, maximum coverage index is " + ligatureSets.length );
} else {
return ligatureSets [ ci ];
}
}
private void populate ( List entries ) {
int i = 0, n = entries.size();
LigatureSet[] ligatureSets = new LigatureSet [ n ];
for ( Iterator it = entries.iterator(); it.hasNext();) {
Object o = it.next();
if ( o instanceof LigatureSet ) {
ligatureSets [ i++ ] = (LigatureSet) o;
} else {
throw new AdvancedTypographicTableFormatException ( "illegal ligatures entry, must be LigatureSet: " + o );
}
}
assert i == n;
assert this.ligatureSets == null;
this.ligatureSets = ligatureSets;
}
}
private abstract static class ContextualSubtable extends GlyphSubstitutionSubtable {
public ContextualSubtable ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage );
}
/** {@inheritDoc} */
public int getType() {
return GSUB_LOOKUP_TYPE_CONTEXTUAL;
}
/** {@inheritDoc} */
public boolean isCompatible ( GlyphSubtable subtable ) {
return subtable instanceof ContextualSubtable;
}
/** {@inheritDoc} */
public boolean substitute ( GlyphSubstitutionState ss ) {
int gi = ss.getGlyph(), ci;
if ( ( ci = getCoverageIndex ( gi ) ) < 0 ) {
return false;
} else {
int[] rv = new int[1];
RuleLookup[] la = getLookups ( ci, gi, ss, rv );
if ( la != null ) {
ss.apply ( la, rv[0] );
}
return true;
}
}
/**
* Obtain rule lookups set associated current input glyph context.
* @param ci coverage index of glyph at current position
* @param gi glyph index of glyph at current position
* @param ss glyph substitution state
* @param rv array of ints used to receive multiple return values, must be of length 1 or greater,
* where the first entry is used to return the input sequence length of the matched rule
* @return array of rule lookups or null if none applies
*/
public abstract RuleLookup[] getLookups ( int ci, int gi, GlyphSubstitutionState ss, int[] rv );
static GlyphSubstitutionSubtable create ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
if ( format == 1 ) {
return new ContextualSubtableFormat1 ( id, sequence, flags, format, coverage, entries );
} else if ( format == 2 ) {
return new ContextualSubtableFormat2 ( id, sequence, flags, format, coverage, entries );
} else if ( format == 3 ) {
return new ContextualSubtableFormat3 ( id, sequence, flags, format, coverage, entries );
} else {
throw new UnsupportedOperationException();
}
}
}
private static class ContextualSubtableFormat1 extends ContextualSubtable {
private RuleSet[] rsa; // rule set array, ordered by glyph coverage index
ContextualSubtableFormat1 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
if ( rsa != null ) {
List entries = new ArrayList ( 1 );
entries.add ( rsa );
return entries;
} else {
return null;
}
}
/** {@inheritDoc} */
public void resolveLookupReferences ( Map/*<String,LookupTable>*/ lookupTables ) {
GlyphTable.resolveLookupReferences ( rsa, lookupTables );
}
/** {@inheritDoc} */
public RuleLookup[] getLookups ( int ci, int gi, GlyphSubstitutionState ss, int[] rv ) {
assert ss != null;
assert ( rv != null ) && ( rv.length > 0 );
assert rsa != null;
if ( rsa.length > 0 ) {
RuleSet rs = rsa [ 0 ];
if ( rs != null ) {
Rule[] ra = rs.getRules();
for ( int i = 0, n = ra.length; i < n; i++ ) {
Rule r = ra [ i ];
if ( ( r != null ) && ( r instanceof ChainedGlyphSequenceRule ) ) {
ChainedGlyphSequenceRule cr = (ChainedGlyphSequenceRule) r;
int[] iga = cr.getGlyphs ( gi );
if ( matches ( ss, iga, 0, rv ) ) {
return r.getLookups();
}
}
}
}
}
return null;
}
static boolean matches ( GlyphSubstitutionState ss, int[] glyphs, int offset, int[] rv ) {
if ( ( glyphs == null ) || ( glyphs.length == 0 ) ) {
return true; // match null or empty glyph sequence
} else {
boolean reverse = offset < 0;
GlyphTester ignores = ss.getIgnoreDefault();
int[] counts = ss.getGlyphsAvailable ( offset, reverse, ignores );
int nga = counts[0];
int ngm = glyphs.length;
if ( nga < ngm ) {
return false; // insufficient glyphs available to match
} else {
int[] ga = ss.getGlyphs ( offset, ngm, reverse, ignores, null, counts );
for ( int k = 0; k < ngm; k++ ) {
if ( ga [ k ] != glyphs [ k ] ) {
return false; // match fails at ga [ k ]
}
}
if ( rv != null ) {
rv[0] = counts[0] + counts[1];
}
return true; // all glyphs match
}
}
}
private void populate ( List entries ) {
if ( entries == null ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, must be non-null" );
} else if ( entries.size() != 1 ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, " + entries.size() + " entries present, but requires 1 entry" );
} else {
Object o;
if ( ( ( o = entries.get(0) ) == null ) || ! ( o instanceof RuleSet[] ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, first entry must be an RuleSet[], but is: " + ( ( o != null ) ? o.getClass() : null ) );
} else {
rsa = (RuleSet[]) o;
}
}
}
}
private static class ContextualSubtableFormat2 extends ContextualSubtable {
private GlyphClassTable cdt; // class def table
private int ngc; // class set count
private RuleSet[] rsa; // rule set array, ordered by class number [0...ngc - 1]
ContextualSubtableFormat2 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
if ( rsa != null ) {
List entries = new ArrayList ( 3 );
entries.add ( cdt );
entries.add ( Integer.valueOf ( ngc ) );
entries.add ( rsa );
return entries;
} else {
return null;
}
}
/** {@inheritDoc} */
public void resolveLookupReferences ( Map/*<String,LookupTable>*/ lookupTables ) {
GlyphTable.resolveLookupReferences ( rsa, lookupTables );
}
/** {@inheritDoc} */
public RuleLookup[] getLookups ( int ci, int gi, GlyphSubstitutionState ss, int[] rv ) {
assert ss != null;
assert ( rv != null ) && ( rv.length > 0 );
assert rsa != null;
if ( rsa.length > 0 ) {
RuleSet rs = rsa [ 0 ];
if ( rs != null ) {
Rule[] ra = rs.getRules();
for ( int i = 0, n = ra.length; i < n; i++ ) {
Rule r = ra [ i ];
if ( ( r != null ) && ( r instanceof ChainedClassSequenceRule ) ) {
ChainedClassSequenceRule cr = (ChainedClassSequenceRule) r;
int[] ca = cr.getClasses ( cdt.getClassIndex ( gi, ss.getClassMatchSet ( gi ) ) );
if ( matches ( ss, cdt, ca, 0, rv ) ) {
return r.getLookups();
}
}
}
}
}
return null;
}
static boolean matches ( GlyphSubstitutionState ss, GlyphClassTable cdt, int[] classes, int offset, int[] rv ) {
if ( ( cdt == null ) || ( classes == null ) || ( classes.length == 0 ) ) {
return true; // match null class definitions, null or empty class sequence
} else {
boolean reverse = offset < 0;
GlyphTester ignores = ss.getIgnoreDefault();
int[] counts = ss.getGlyphsAvailable ( offset, reverse, ignores );
int nga = counts[0];
int ngm = classes.length;
if ( nga < ngm ) {
return false; // insufficient glyphs available to match
} else {
int[] ga = ss.getGlyphs ( offset, ngm, reverse, ignores, null, counts );
for ( int k = 0; k < ngm; k++ ) {
int gi = ga [ k ];
int ms = ss.getClassMatchSet ( gi );
int gc = cdt.getClassIndex ( gi, ms );
if ( ( gc < 0 ) || ( gc >= cdt.getClassSize ( ms ) ) ) {
return false; // none or invalid class fails mat ch
} else if ( gc != classes [ k ] ) {
return false; // match fails at ga [ k ]
}
}
if ( rv != null ) {
rv[0] = counts[0] + counts[1];
}
return true; // all glyphs match
}
}
}
private void populate ( List entries ) {
if ( entries == null ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, must be non-null" );
} else if ( entries.size() != 3 ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, " + entries.size() + " entries present, but requires 3 entries" );
} else {
Object o;
if ( ( ( o = entries.get(0) ) == null ) || ! ( o instanceof GlyphClassTable ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, first entry must be an GlyphClassTable, but is: " + ( ( o != null ) ? o.getClass() : null ) );
} else {
cdt = (GlyphClassTable) o;
}
if ( ( ( o = entries.get(1) ) == null ) || ! ( o instanceof Integer ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, second entry must be an Integer, but is: " + ( ( o != null ) ? o.getClass() : null ) );
} else {
ngc = ((Integer)(o)).intValue();
}
if ( ( ( o = entries.get(2) ) == null ) || ! ( o instanceof RuleSet[] ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, third entry must be an RuleSet[], but is: " + ( ( o != null ) ? o.getClass() : null ) );
} else {
rsa = (RuleSet[]) o;
if ( rsa.length != ngc ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, RuleSet[] length is " + rsa.length + ", but expected " + ngc + " glyph classes" );
}
}
}
}
}
private static class ContextualSubtableFormat3 extends ContextualSubtable {
private RuleSet[] rsa; // rule set array, containing a single rule set
ContextualSubtableFormat3 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
if ( rsa != null ) {
List entries = new ArrayList ( 1 );
entries.add ( rsa );
return entries;
} else {
return null;
}
}
/** {@inheritDoc} */
public void resolveLookupReferences ( Map/*<String,LookupTable>*/ lookupTables ) {
GlyphTable.resolveLookupReferences ( rsa, lookupTables );
}
/** {@inheritDoc} */
public RuleLookup[] getLookups ( int ci, int gi, GlyphSubstitutionState ss, int[] rv ) {
assert ss != null;
assert ( rv != null ) && ( rv.length > 0 );
assert rsa != null;
if ( rsa.length > 0 ) {
RuleSet rs = rsa [ 0 ];
if ( rs != null ) {
Rule[] ra = rs.getRules();
for ( int i = 0, n = ra.length; i < n; i++ ) {
Rule r = ra [ i ];
if ( ( r != null ) && ( r instanceof ChainedCoverageSequenceRule ) ) {
ChainedCoverageSequenceRule cr = (ChainedCoverageSequenceRule) r;
GlyphCoverageTable[] gca = cr.getCoverages();
if ( matches ( ss, gca, 0, rv ) ) {
return r.getLookups();
}
}
}
}
}
return null;
}
static boolean matches ( GlyphSubstitutionState ss, GlyphCoverageTable[] gca, int offset, int[] rv ) {
if ( ( gca == null ) || ( gca.length == 0 ) ) {
return true; // match null or empty coverage array
} else {
boolean reverse = offset < 0;
GlyphTester ignores = ss.getIgnoreDefault();
int[] counts = ss.getGlyphsAvailable ( offset, reverse, ignores );
int nga = counts[0];
int ngm = gca.length;
if ( nga < ngm ) {
return false; // insufficient glyphs available to match
} else {
int[] ga = ss.getGlyphs ( offset, ngm, reverse, ignores, null, counts );
for ( int k = 0; k < ngm; k++ ) {
GlyphCoverageTable ct = gca [ k ];
if ( ct != null ) {
if ( ct.getCoverageIndex ( ga [ k ] ) < 0 ) {
return false; // match fails at ga [ k ]
}
}
}
if ( rv != null ) {
rv[0] = counts[0] + counts[1];
}
return true; // all glyphs match
}
}
}
private void populate ( List entries ) {
if ( entries == null ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, must be non-null" );
} else if ( entries.size() != 1 ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, " + entries.size() + " entries present, but requires 1 entry" );
} else {
Object o;
if ( ( ( o = entries.get(0) ) == null ) || ! ( o instanceof RuleSet[] ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, first entry must be an RuleSet[], but is: " + ( ( o != null ) ? o.getClass() : null ) );
} else {
rsa = (RuleSet[]) o;
}
}
}
}
private abstract static class ChainedContextualSubtable extends GlyphSubstitutionSubtable {
public ChainedContextualSubtable ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage );
}
/** {@inheritDoc} */
public int getType() {
return GSUB_LOOKUP_TYPE_CHAINED_CONTEXTUAL;
}
/** {@inheritDoc} */
public boolean isCompatible ( GlyphSubtable subtable ) {
return subtable instanceof ChainedContextualSubtable;
}
/** {@inheritDoc} */
public boolean substitute ( GlyphSubstitutionState ss ) {
int gi = ss.getGlyph(), ci;
if ( ( ci = getCoverageIndex ( gi ) ) < 0 ) {
return false;
} else {
int[] rv = new int[1];
RuleLookup[] la = getLookups ( ci, gi, ss, rv );
if ( la != null ) {
ss.apply ( la, rv[0] );
return true;
} else {
return false;
}
}
}
/**
* Obtain rule lookups set associated current input glyph context.
* @param ci coverage index of glyph at current position
* @param gi glyph index of glyph at current position
* @param ss glyph substitution state
* @param rv array of ints used to receive multiple return values, must be of length 1 or greater
* @return array of rule lookups or null if none applies
*/
public abstract RuleLookup[] getLookups ( int ci, int gi, GlyphSubstitutionState ss, int[] rv );
static GlyphSubstitutionSubtable create ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
if ( format == 1 ) {
return new ChainedContextualSubtableFormat1 ( id, sequence, flags, format, coverage, entries );
} else if ( format == 2 ) {
return new ChainedContextualSubtableFormat2 ( id, sequence, flags, format, coverage, entries );
} else if ( format == 3 ) {
return new ChainedContextualSubtableFormat3 ( id, sequence, flags, format, coverage, entries );
} else {
throw new UnsupportedOperationException();
}
}
}
private static class ChainedContextualSubtableFormat1 extends ChainedContextualSubtable {
private RuleSet[] rsa; // rule set array, ordered by glyph coverage index
ChainedContextualSubtableFormat1 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
if ( rsa != null ) {
List entries = new ArrayList ( 1 );
entries.add ( rsa );
return entries;
} else {
return null;
}
}
/** {@inheritDoc} */
public void resolveLookupReferences ( Map/*<String,LookupTable>*/ lookupTables ) {
GlyphTable.resolveLookupReferences ( rsa, lookupTables );
}
/** {@inheritDoc} */
public RuleLookup[] getLookups ( int ci, int gi, GlyphSubstitutionState ss, int[] rv ) {
assert ss != null;
assert ( rv != null ) && ( rv.length > 0 );
assert rsa != null;
if ( rsa.length > 0 ) {
RuleSet rs = rsa [ 0 ];
if ( rs != null ) {
Rule[] ra = rs.getRules();
for ( int i = 0, n = ra.length; i < n; i++ ) {
Rule r = ra [ i ];
if ( ( r != null ) && ( r instanceof ChainedGlyphSequenceRule ) ) {
ChainedGlyphSequenceRule cr = (ChainedGlyphSequenceRule) r;
int[] iga = cr.getGlyphs ( gi );
if ( matches ( ss, iga, 0, rv ) ) {
int[] bga = cr.getBacktrackGlyphs();
if ( matches ( ss, bga, -1, null ) ) {
int[] lga = cr.getLookaheadGlyphs();
if ( matches ( ss, lga, rv[0], null ) ) {
return r.getLookups();
}
}
}
}
}
}
}
return null;
}
private boolean matches ( GlyphSubstitutionState ss, int[] glyphs, int offset, int[] rv ) {
return ContextualSubtableFormat1.matches ( ss, glyphs, offset, rv );
}
private void populate ( List entries ) {
if ( entries == null ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, must be non-null" );
} else if ( entries.size() != 1 ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, " + entries.size() + " entries present, but requires 1 entry" );
} else {
Object o;
if ( ( ( o = entries.get(0) ) == null ) || ! ( o instanceof RuleSet[] ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, first entry must be an RuleSet[], but is: " + ( ( o != null ) ? o.getClass() : null ) );
} else {
rsa = (RuleSet[]) o;
}
}
}
}
private static class ChainedContextualSubtableFormat2 extends ChainedContextualSubtable {
private GlyphClassTable icdt; // input class def table
private GlyphClassTable bcdt; // backtrack class def table
private GlyphClassTable lcdt; // lookahead class def table
private int ngc; // class set count
private RuleSet[] rsa; // rule set array, ordered by class number [0...ngc - 1]
ChainedContextualSubtableFormat2 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
if ( rsa != null ) {
List entries = new ArrayList ( 5 );
entries.add ( icdt );
entries.add ( bcdt );
entries.add ( lcdt );
entries.add ( Integer.valueOf ( ngc ) );
entries.add ( rsa );
return entries;
} else {
return null;
}
}
/** {@inheritDoc} */
public RuleLookup[] getLookups ( int ci, int gi, GlyphSubstitutionState ss, int[] rv ) {
assert ss != null;
assert ( rv != null ) && ( rv.length > 0 );
assert rsa != null;
if ( rsa.length > 0 ) {
RuleSet rs = rsa [ 0 ];
if ( rs != null ) {
Rule[] ra = rs.getRules();
for ( int i = 0, n = ra.length; i < n; i++ ) {
Rule r = ra [ i ];
if ( ( r != null ) && ( r instanceof ChainedClassSequenceRule ) ) {
ChainedClassSequenceRule cr = (ChainedClassSequenceRule) r;
int[] ica = cr.getClasses ( icdt.getClassIndex ( gi, ss.getClassMatchSet ( gi ) ) );
if ( matches ( ss, icdt, ica, 0, rv ) ) {
int[] bca = cr.getBacktrackClasses();
if ( matches ( ss, bcdt, bca, -1, null ) ) {
int[] lca = cr.getLookaheadClasses();
if ( matches ( ss, lcdt, lca, rv[0], null ) ) {
return r.getLookups();
}
}
}
}
}
}
}
return null;
}
private boolean matches ( GlyphSubstitutionState ss, GlyphClassTable cdt, int[] classes, int offset, int[] rv ) {
return ContextualSubtableFormat2.matches ( ss, cdt, classes, offset, rv );
}
/** {@inheritDoc} */
public void resolveLookupReferences ( Map/*<String,LookupTable>*/ lookupTables ) {
GlyphTable.resolveLookupReferences ( rsa, lookupTables );
}
private void populate ( List entries ) {
if ( entries == null ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, must be non-null" );
} else if ( entries.size() != 5 ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, " + entries.size() + " entries present, but requires 5 entries" );
} else {
Object o;
if ( ( ( o = entries.get(0) ) == null ) || ! ( o instanceof GlyphClassTable ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, first entry must be an GlyphClassTable, but is: " + ( ( o != null ) ? o.getClass() : null ) );
} else {
icdt = (GlyphClassTable) o;
}
if ( ( ( o = entries.get(1) ) != null ) && ! ( o instanceof GlyphClassTable ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, second entry must be an GlyphClassTable, but is: " + o.getClass() );
} else {
bcdt = (GlyphClassTable) o;
}
if ( ( ( o = entries.get(2) ) != null ) && ! ( o instanceof GlyphClassTable ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, third entry must be an GlyphClassTable, but is: " + o.getClass() );
} else {
lcdt = (GlyphClassTable) o;
}
if ( ( ( o = entries.get(3) ) == null ) || ! ( o instanceof Integer ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, fourth entry must be an Integer, but is: " + ( ( o != null ) ? o.getClass() : null ) );
} else {
ngc = ((Integer)(o)).intValue();
}
if ( ( ( o = entries.get(4) ) == null ) || ! ( o instanceof RuleSet[] ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, fifth entry must be an RuleSet[], but is: " + ( ( o != null ) ? o.getClass() : null ) );
} else {
rsa = (RuleSet[]) o;
if ( rsa.length != ngc ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, RuleSet[] length is " + rsa.length + ", but expected " + ngc + " glyph classes" );
}
}
}
}
}
private static class ChainedContextualSubtableFormat3 extends ChainedContextualSubtable {
private RuleSet[] rsa; // rule set array, containing a single rule set
ChainedContextualSubtableFormat3 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
if ( rsa != null ) {
List entries = new ArrayList ( 1 );
entries.add ( rsa );
return entries;
} else {
return null;
}
}
/** {@inheritDoc} */
public void resolveLookupReferences ( Map/*<String,LookupTable>*/ lookupTables ) {
GlyphTable.resolveLookupReferences ( rsa, lookupTables );
}
/** {@inheritDoc} */
public RuleLookup[] getLookups ( int ci, int gi, GlyphSubstitutionState ss, int[] rv ) {
assert ss != null;
assert ( rv != null ) && ( rv.length > 0 );
assert rsa != null;
if ( rsa.length > 0 ) {
RuleSet rs = rsa [ 0 ];
if ( rs != null ) {
Rule[] ra = rs.getRules();
for ( int i = 0, n = ra.length; i < n; i++ ) {
Rule r = ra [ i ];
if ( ( r != null ) && ( r instanceof ChainedCoverageSequenceRule ) ) {
ChainedCoverageSequenceRule cr = (ChainedCoverageSequenceRule) r;
GlyphCoverageTable[] igca = cr.getCoverages();
if ( matches ( ss, igca, 0, rv ) ) {
GlyphCoverageTable[] bgca = cr.getBacktrackCoverages();
if ( matches ( ss, bgca, -1, null ) ) {
GlyphCoverageTable[] lgca = cr.getLookaheadCoverages();
if ( matches ( ss, lgca, rv[0], null ) ) {
return r.getLookups();
}
}
}
}
}
}
}
return null;
}
private boolean matches ( GlyphSubstitutionState ss, GlyphCoverageTable[] gca, int offset, int[] rv ) {
return ContextualSubtableFormat3.matches ( ss, gca, offset, rv );
}
private void populate ( List entries ) {
if ( entries == null ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, must be non-null" );
} else if ( entries.size() != 1 ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, " + entries.size() + " entries present, but requires 1 entry" );
} else {
Object o;
if ( ( ( o = entries.get(0) ) == null ) || ! ( o instanceof RuleSet[] ) ) {
throw new AdvancedTypographicTableFormatException ( "illegal entries, first entry must be an RuleSet[], but is: " + ( ( o != null ) ? o.getClass() : null ) );
} else {
rsa = (RuleSet[]) o;
}
}
}
}
private abstract static class ReverseChainedSingleSubtable extends GlyphSubstitutionSubtable {
public ReverseChainedSingleSubtable ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage );
}
/** {@inheritDoc} */
public int getType() {
return GSUB_LOOKUP_TYPE_REVERSE_CHAINED_SINGLE;
}
/** {@inheritDoc} */
public boolean isCompatible ( GlyphSubtable subtable ) {
return subtable instanceof ReverseChainedSingleSubtable;
}
/** {@inheritDoc} */
public boolean usesReverseScan() {
return true;
}
static GlyphSubstitutionSubtable create ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
if ( format == 1 ) {
return new ReverseChainedSingleSubtableFormat1 ( id, sequence, flags, format, coverage, entries );
} else {
throw new UnsupportedOperationException();
}
}
}
private static class ReverseChainedSingleSubtableFormat1 extends ReverseChainedSingleSubtable {
ReverseChainedSingleSubtableFormat1 ( String id, int sequence, int flags, int format, GlyphCoverageTable coverage, List entries ) {
super ( id, sequence, flags, format, coverage, entries );
populate ( entries );
}
/** {@inheritDoc} */
public List getEntries() {
return null;
}
private void populate ( List entries ) {
}
}
/**
* The <code>Ligature</code> class implements a ligature lookup result in terms of
* a ligature glyph (code) and the <emph>N+1...</emph> components that comprise the ligature,
* where the <emph>Nth</emph> component was consumed in the coverage table lookup mapping to
* this ligature instance.
*/
public static class Ligature {
private final int ligature; // (resulting) ligature glyph
private final int[] components; // component glyph codes (note that first component is implied)
/**
* Instantiate a ligature.
* @param ligature glyph id
* @param components sequence of <emph>N+1...</emph> component glyph (or character) identifiers
*/
public Ligature ( int ligature, int[] components ) {
if ( ( ligature < 0 ) || ( ligature > 65535 ) ) {
throw new AdvancedTypographicTableFormatException ( "invalid ligature glyph index: " + ligature );
} else if ( components == null ) {
throw new AdvancedTypographicTableFormatException ( "invalid ligature components, must be non-null array" );
} else {
for ( int i = 0, n = components.length; i < n; i++ ) {
int gc = components [ i ];
if ( ( gc < 0 ) || ( gc > 65535 ) ) {
throw new AdvancedTypographicTableFormatException ( "invalid component glyph index: " + gc );
}
}
this.ligature = ligature;
this.components = components;
}
}
/** @return ligature glyph id */
public int getLigature() {
return ligature;
}
/** @return array of <emph>N+1...</emph> components */
public int[] getComponents() {
return components;
}
/** @return components count */
public int getNumComponents() {
return components.length;
}
/**
* Determine if input sequence at offset matches ligature's components.
* @param glyphs array of glyph components to match (including first, implied glyph)
* @return true if matches
*/
public boolean matchesComponents ( int[] glyphs ) {
if ( glyphs.length < ( components.length + 1 ) ) {
return false;
} else {
for ( int i = 0, n = components.length; i < n; i++ ) {
if ( glyphs [ i + 1 ] != components [ i ] ) {
return false;
}
}
return true;
}
}
/** {@inheritDoc} */
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("{components={");
for ( int i = 0, n = components.length; i < n; i++ ) {
if ( i > 0 ) {
sb.append(',');
}
sb.append(Integer.toString(components[i]));
}
sb.append("},ligature=");
sb.append(Integer.toString(ligature));
sb.append("}");
return sb.toString();
}
}
/**
* The <code>LigatureSet</code> class implements a set of ligatures.
*/
public static class LigatureSet {
private final Ligature[] ligatures; // set of ligatures all of which share the first (implied) component
private final int maxComponents; // maximum number of components (including first)
/**
* Instantiate a set of ligatures.
* @param ligatures collection of ligatures
*/
public LigatureSet ( List ligatures ) {
this ( (Ligature[]) ligatures.toArray ( new Ligature [ ligatures.size() ] ) );
}
/**
* Instantiate a set of ligatures.
* @param ligatures array of ligatures
*/
public LigatureSet ( Ligature[] ligatures ) {
if ( ligatures == null ) {
throw new AdvancedTypographicTableFormatException ( "invalid ligatures, must be non-null array" );
} else {
this.ligatures = ligatures;
int ncMax = -1;
for ( int i = 0, n = ligatures.length; i < n; i++ ) {
Ligature l = ligatures [ i ];
int nc = l.getNumComponents() + 1;
if ( nc > ncMax ) {
ncMax = nc;
}
}
maxComponents = ncMax;
}
}
/** @return array of ligatures in this ligature set */
public Ligature[] getLigatures() {
return ligatures;
}
/** @return count of ligatures in this ligature set */
public int getNumLigatures() {
return ligatures.length;
}
/** @return maximum number of components in one ligature (including first component) */
public int getMaxComponents() {
return maxComponents;
}
/** {@inheritDoc} */
public String toString() {
StringBuffer sb = new StringBuffer();
sb.append("{ligs={");
for ( int i = 0, n = ligatures.length; i < n; i++ ) {
if ( i > 0 ) {
sb.append(',');
}
sb.append(ligatures[i]);
}
sb.append("}}");
return sb.toString();
}
}
}
|
apache-2.0
|
runnerfish/ssh2_code_learn
|
SSH2 Code Kit/08/8.4/XML-config/src/crazyit/app/service/impl/Chinese.java
|
686
|
package org.crazyit.app.service.impl;
import org.crazyit.app.service.*;
/**
* Description:
* <br/>ÍøÕ¾: <a href="http://www.crazyit.org">·è¿ñJavaÁªÃË</a>
* <br/>Copyright (C), 2001-2012, Yeeku.H.Lee
* <br/>This program is protected by copyright laws.
* <br/>Program Name:
* <br/>Date:
* @author Yeeku.H.Lee kongyeeku@163.com
* @version 1.0
*/
public class Chinese
implements Person
{
//ʵÏÖPerson½Ó¿ÚµÄsayHello()·½·¨
public String sayHello(String name)
{
System.out.println("sayHello·½·¨±»µ÷ÓÃ...");
//·µ»Ø¼òµ¥µÄ×Ö·û´®
return name + " Hello , Spring AOP";
}
//¶¨ÒåÒ»¸öeat()·½·¨
public void eat(String food)
{
System.out.println("ÎÒÕýÔÚ³Ô:"
+ food);
}
}
|
apache-2.0
|
stephentyrone/swift
|
lib/Sema/TypeCheckGeneric.cpp
|
37646
|
//===--- TypeCheckGeneric.cpp - Generics ----------------------------------===//
//
// This source file is part of the Swift.org open source project
//
// Copyright (c) 2014 - 2017 Apple Inc. and the Swift project authors
// Licensed under Apache License v2.0 with Runtime Library Exception
//
// See https://swift.org/LICENSE.txt for license information
// See https://swift.org/CONTRIBUTORS.txt for the list of Swift project authors
//
//===----------------------------------------------------------------------===//
//
// This file implements support for generics.
//
//===----------------------------------------------------------------------===//
#include "TypeChecker.h"
#include "TypeCheckProtocol.h"
#include "TypeCheckType.h"
#include "swift/AST/DiagnosticsSema.h"
#include "swift/AST/ExistentialLayout.h"
#include "swift/AST/GenericEnvironment.h"
#include "swift/AST/GenericSignatureBuilder.h"
#include "swift/AST/ProtocolConformance.h"
#include "swift/AST/ParameterList.h"
#include "swift/AST/TypeCheckRequests.h"
#include "swift/AST/TypeResolutionStage.h"
#include "swift/AST/Types.h"
#include "swift/Basic/Defer.h"
#include "llvm/Support/ErrorHandling.h"
using namespace swift;
///
/// Common code for generic functions, generic types
///
std::string
TypeChecker::gatherGenericParamBindingsText(
ArrayRef<Type> types,
TypeArrayView<GenericTypeParamType> genericParams,
TypeSubstitutionFn substitutions) {
llvm::SmallPtrSet<GenericTypeParamType *, 2> knownGenericParams;
for (auto type : types) {
if (type.isNull()) continue;
type.visit([&](Type type) {
if (auto gp = type->getAs<GenericTypeParamType>()) {
knownGenericParams.insert(
gp->getCanonicalType()->castTo<GenericTypeParamType>());
}
});
}
if (knownGenericParams.empty())
return "";
SmallString<128> result;
for (auto gp : genericParams) {
auto canonGP = gp->getCanonicalType()->castTo<GenericTypeParamType>();
if (!knownGenericParams.count(canonGP))
continue;
if (result.empty())
result += " [with ";
else
result += ", ";
result += gp->getName().str();
result += " = ";
auto type = substitutions(canonGP);
if (!type)
return "";
result += type.getString();
}
result += "]";
return result.str().str();
}
//
// Generic functions
//
/// Get the opaque type representing the return type of a declaration, or
/// create it if it does not yet exist.
OpaqueTypeDecl *
OpaqueResultTypeRequest::evaluate(Evaluator &evaluator,
ValueDecl *originatingDecl) const {
auto *repr = originatingDecl->getOpaqueResultTypeRepr();
assert(repr && "Declaration does not have an opaque result type");
auto *dc = originatingDecl->getInnermostDeclContext();
auto &ctx = dc->getASTContext();
// Protocol requirements can't have opaque return types.
//
// TODO: Maybe one day we could treat this as sugar for an associated type.
if (isa<ProtocolDecl>(originatingDecl->getDeclContext())
&& originatingDecl->isProtocolRequirement()) {
SourceLoc fixitLoc;
if (auto vd = dyn_cast<VarDecl>(originatingDecl)) {
fixitLoc = vd->getParentPatternBinding()->getStartLoc();
} else {
fixitLoc = originatingDecl->getStartLoc();
}
ctx.Diags.diagnose(repr->getLoc(),
diag::opaque_type_in_protocol_requirement)
.fixItInsert(fixitLoc, "associatedtype <#AssocType#>\n")
.fixItReplace(repr->getSourceRange(), "<#AssocType#>");
return nullptr;
}
// Check the availability of the opaque type runtime support.
if (!ctx.LangOpts.DisableAvailabilityChecking) {
auto runningOS =
TypeChecker::overApproximateAvailabilityAtLocation(
repr->getLoc(),
originatingDecl->getInnermostDeclContext());
auto availability = ctx.getOpaqueTypeAvailability();
if (!runningOS.isContainedIn(availability)) {
TypeChecker::diagnosePotentialOpaqueTypeUnavailability(
repr->getSourceRange(),
originatingDecl->getInnermostDeclContext(),
UnavailabilityReason::requiresVersionRange(availability.getOSVersion()));
}
}
// Try to resolve the constraint repr. It should be some kind of existential
// type.
TypeResolutionOptions options(TypeResolverContext::GenericRequirement);
// Pass along the error type if resolving the repr failed.
auto constraintType = TypeResolution::forInterface(
dc, dc->getGenericSignatureOfContext(), options)
.resolveType(repr->getConstraint());
if (constraintType->hasError())
return nullptr;
// Error out if the constraint type isn't a class or existential type.
if (!constraintType->getClassOrBoundGenericClass()
&& !constraintType->isExistentialType()) {
ctx.Diags.diagnose(repr->getConstraint()->getLoc(),
diag::opaque_type_invalid_constraint);
return nullptr;
}
if (constraintType->hasArchetype())
constraintType = constraintType->mapTypeOutOfContext();
// Create a generic signature for the opaque environment. This is the outer
// generic signature with an added generic parameter representing the opaque
// type and its interface constraints.
auto originatingDC = originatingDecl->getInnermostDeclContext();
unsigned returnTypeDepth = 0;
auto outerGenericSignature = originatingDC->getGenericSignatureOfContext();
if (outerGenericSignature) {
returnTypeDepth =
outerGenericSignature->getGenericParams().back()->getDepth() + 1;
}
auto returnTypeParam = GenericTypeParamType::get(returnTypeDepth, 0, ctx);
SmallVector<GenericTypeParamType *, 2> genericParamTypes;
genericParamTypes.push_back(returnTypeParam);
SmallVector<Requirement, 2> requirements;
if (constraintType->getClassOrBoundGenericClass()) {
requirements.push_back(Requirement(RequirementKind::Superclass,
returnTypeParam, constraintType));
} else {
auto constraints = constraintType->getExistentialLayout();
if (auto superclass = constraints.getSuperclass()) {
requirements.push_back(Requirement(RequirementKind::Superclass,
returnTypeParam, superclass));
}
for (auto protocol : constraints.getProtocols()) {
requirements.push_back(Requirement(RequirementKind::Conformance,
returnTypeParam, protocol));
}
if (auto layout = constraints.getLayoutConstraint()) {
requirements.push_back(Requirement(RequirementKind::Layout,
returnTypeParam, layout));
}
}
auto interfaceSignature = evaluateOrDefault(
ctx.evaluator,
AbstractGenericSignatureRequest{
outerGenericSignature.getPointer(),
std::move(genericParamTypes),
std::move(requirements)},
GenericSignature());
// Create the OpaqueTypeDecl for the result type.
// It has the same parent context and generic environment as the originating
// decl.
auto parentDC = originatingDecl->getDeclContext();
auto originatingGenericContext = originatingDecl->getAsGenericContext();
GenericParamList *genericParams = originatingGenericContext
? originatingGenericContext->getGenericParams()
: nullptr;
auto opaqueDecl = new (ctx) OpaqueTypeDecl(originatingDecl,
genericParams,
parentDC,
interfaceSignature,
returnTypeParam);
opaqueDecl->copyFormalAccessFrom(originatingDecl);
if (auto originatingSig = originatingDC->getGenericSignatureOfContext()) {
opaqueDecl->setGenericSignature(originatingSig);
}
// The declared interface type is an opaque ArchetypeType.
SubstitutionMap subs;
if (outerGenericSignature) {
subs = outerGenericSignature->getIdentitySubstitutionMap();
}
auto opaqueTy = OpaqueTypeArchetypeType::get(opaqueDecl, subs);
auto metatype = MetatypeType::get(opaqueTy);
opaqueDecl->setInterfaceType(metatype);
return opaqueDecl;
}
/// Determine whether the given type is \c Self, an associated type of \c Self,
/// or a concrete type.
static bool isSelfDerivedOrConcrete(Type protoSelf, Type type) {
// Check for a concrete type.
if (!type->hasTypeParameter())
return true;
if (type->isTypeParameter() &&
type->getRootGenericParam()->isEqual(protoSelf))
return true;
return false;
}
// For a generic requirement in a protocol, make sure that the requirement
// set didn't add any requirements to Self or its associated types.
void TypeChecker::checkProtocolSelfRequirements(ValueDecl *decl) {
// For a generic requirement in a protocol, make sure that the requirement
// set didn't add any requirements to Self or its associated types.
if (auto *proto = dyn_cast<ProtocolDecl>(decl->getDeclContext())) {
auto &ctx = proto->getASTContext();
auto protoSelf = proto->getSelfInterfaceType();
auto sig = decl->getInnermostDeclContext()->getGenericSignatureOfContext();
for (auto req : sig->getRequirements()) {
// If one of the types in the requirement is dependent on a non-Self
// type parameter, this requirement is okay.
if (!isSelfDerivedOrConcrete(protoSelf, req.getFirstType()) ||
!isSelfDerivedOrConcrete(protoSelf, req.getSecondType()))
continue;
// The conformance of 'Self' to the protocol is okay.
if (req.getKind() == RequirementKind::Conformance &&
req.getSecondType()->getAs<ProtocolType>()->getDecl() == proto &&
req.getFirstType()->is<GenericTypeParamType>())
continue;
ctx.Diags.diagnose(decl,
diag::requirement_restricts_self,
decl->getDescriptiveKind(), decl->getName(),
req.getFirstType().getString(),
static_cast<unsigned>(req.getKind()),
req.getSecondType().getString());
}
}
}
/// All generic parameters of a generic function must be referenced in the
/// declaration's type, otherwise we have no way to infer them.
void TypeChecker::checkReferencedGenericParams(GenericContext *dc) {
// Don't do this check for accessors: they're not used directly, so we
// never need to infer their generic arguments. This is mostly a
// compile-time optimization, but it also avoids problems with accessors
// like 'read' and 'modify' that would arise due to yields not being
// part of the formal type.
if (isa<AccessorDecl>(dc))
return;
auto *genericParams = dc->getGenericParams();
auto genericSig = dc->getGenericSignatureOfContext();
if (!genericParams)
return;
auto *decl = cast<ValueDecl>(dc->getInnermostDeclarationDeclContext());
// A helper class to collect referenced generic type parameters
// and dependent member types.
class ReferencedGenericTypeWalker : public TypeWalker {
SmallPtrSet<CanType, 4> ReferencedGenericParams;
public:
ReferencedGenericTypeWalker() {}
Action walkToTypePre(Type ty) override {
// Find generic parameters or dependent member types.
// Once such a type is found, don't recurse into its children.
if (!ty->hasTypeParameter())
return Action::SkipChildren;
if (ty->isTypeParameter()) {
ReferencedGenericParams.insert(ty->getCanonicalType());
return Action::SkipChildren;
}
return Action::Continue;
}
SmallPtrSetImpl<CanType> &getReferencedGenericParams() {
return ReferencedGenericParams;
}
};
// Collect all generic params referenced in parameter types and
// return type.
ReferencedGenericTypeWalker paramsAndResultWalker;
auto *funcTy = decl->getInterfaceType()->castTo<GenericFunctionType>();
for (const auto ¶m : funcTy->getParams())
param.getPlainType().walk(paramsAndResultWalker);
funcTy->getResult().walk(paramsAndResultWalker);
// Set of generic params referenced in parameter types,
// return type or requirements.
auto &referencedGenericParams =
paramsAndResultWalker.getReferencedGenericParams();
// Check if at least one of the generic params in the requirement refers
// to an already referenced generic parameter. If this is the case,
// then the other type is also considered as referenced, because
// it is used to put requirements on the first type.
auto reqTypesVisitor = [&referencedGenericParams](Requirement req) -> bool {
Type first;
Type second;
switch (req.getKind()) {
case RequirementKind::Superclass:
case RequirementKind::SameType:
second = req.getSecondType();
LLVM_FALLTHROUGH;
case RequirementKind::Conformance:
case RequirementKind::Layout:
first = req.getFirstType();
break;
}
// Collect generic parameter types referenced by types used in a requirement.
ReferencedGenericTypeWalker walker;
if (first && first->hasTypeParameter())
first.walk(walker);
if (second && second->hasTypeParameter())
second.walk(walker);
auto &genericParamsUsedByRequirementTypes =
walker.getReferencedGenericParams();
// If at least one of the collected generic types or a root generic
// parameter of dependent member types is known to be referenced by
// parameter types, return types or other types known to be "referenced",
// then all the types used in the requirement are considered to be
// referenced, because they are used to defined something that is known
// to be referenced.
bool foundNewReferencedGenericParam = false;
if (std::any_of(genericParamsUsedByRequirementTypes.begin(),
genericParamsUsedByRequirementTypes.end(),
[&referencedGenericParams](CanType t) {
assert(t->isTypeParameter());
return referencedGenericParams.find(
t->getRootGenericParam()
->getCanonicalType()) !=
referencedGenericParams.end();
})) {
std::for_each(genericParamsUsedByRequirementTypes.begin(),
genericParamsUsedByRequirementTypes.end(),
[&referencedGenericParams,
&foundNewReferencedGenericParam](CanType t) {
// Add only generic type parameters, but ignore any
// dependent member types, because requirement
// on a dependent member type does not provide enough
// information to infer the base generic type
// parameter.
if (!t->is<GenericTypeParamType>())
return;
if (referencedGenericParams.insert(t).second)
foundNewReferencedGenericParam = true;
});
}
return foundNewReferencedGenericParam;
};
ArrayRef<Requirement> requirements;
auto FindReferencedGenericParamsInRequirements =
[&requirements, genericSig, &reqTypesVisitor] {
requirements = genericSig->getRequirements();
// Try to find new referenced generic parameter types in requirements until
// we reach a fix point. We need to iterate until a fix point, because we
// may have e.g. chains of same-type requirements like:
// not-yet-referenced-T1 == not-yet-referenced-T2.DepType2,
// not-yet-referenced-T2 == not-yet-referenced-T3.DepType3,
// not-yet-referenced-T3 == referenced-T4.DepType4.
// When we process the first of these requirements, we don't know yet that
// T2
// will be referenced, because T3 will be referenced,
// because T3 == T4.DepType4.
while (true) {
bool foundNewReferencedGenericParam = false;
for (auto req : requirements) {
if (reqTypesVisitor(req))
foundNewReferencedGenericParam = true;
}
if (!foundNewReferencedGenericParam)
break;
}
};
// Find the depth of the function's own generic parameters.
unsigned fnGenericParamsDepth = genericParams->getParams().front()->getDepth();
// Check that every generic parameter type from the signature is
// among referencedGenericParams.
for (auto *genParam : genericSig->getGenericParams()) {
auto *paramDecl = genParam->getDecl();
if (paramDecl->getDepth() != fnGenericParamsDepth)
continue;
if (!referencedGenericParams.count(genParam->getCanonicalType())) {
// Lazily search for generic params that are indirectly used in the
// function signature. Do it only if there is a generic parameter
// that is not known to be referenced yet.
if (requirements.empty()) {
FindReferencedGenericParamsInRequirements();
// Nothing to do if this generic parameter is considered to be
// referenced after analyzing the requirements from the generic
// signature.
if (referencedGenericParams.count(genParam->getCanonicalType()))
continue;
}
// Produce an error that this generic parameter cannot be bound.
paramDecl->diagnose(diag::unreferenced_generic_parameter,
paramDecl->getNameStr());
decl->setInvalid();
}
}
}
///
/// Generic types
///
GenericSignature TypeChecker::checkGenericSignature(
GenericParamSource paramSource,
DeclContext *dc,
GenericSignature parentSig,
bool allowConcreteGenericParams,
SmallVector<Requirement, 2> additionalRequirements,
SmallVector<TypeLoc, 2> inferenceSources) {
if (auto genericParamList = paramSource.dyn_cast<GenericParamList *>())
assert(genericParamList && "Missing generic parameters?");
auto request = InferredGenericSignatureRequest{
dc->getParentModule(), parentSig.getPointer(), paramSource,
additionalRequirements, inferenceSources,
allowConcreteGenericParams};
auto sig = evaluateOrDefault(dc->getASTContext().evaluator,
request, nullptr);
// Debugging of the generic signature builder and generic signature
// generation.
if (dc->getASTContext().TypeCheckerOpts.DebugGenericSignatures) {
llvm::errs() << "\n";
if (auto *VD = dyn_cast_or_null<ValueDecl>(dc->getAsDecl())) {
VD->dumpRef(llvm::errs());
llvm::errs() << "\n";
} else {
dc->printContext(llvm::errs());
}
llvm::errs() << "Generic signature: ";
sig->print(llvm::errs());
llvm::errs() << "\n";
llvm::errs() << "Canonical generic signature: ";
sig.getCanonicalSignature()->print(llvm::errs());
llvm::errs() << "\n";
}
return sig;
}
/// Form the interface type of an extension from the raw type and the
/// extension's list of generic parameters.
static Type formExtensionInterfaceType(
ExtensionDecl *ext, Type type,
const GenericParamList *genericParams,
SmallVectorImpl<Requirement> &sameTypeReqs,
bool &mustInferRequirements) {
if (type->is<ErrorType>())
return type;
// Find the nominal type declaration and its parent type.
if (type->is<ProtocolCompositionType>())
type = type->getCanonicalType();
Type parentType = type->getNominalParent();
GenericTypeDecl *genericDecl = type->getAnyGeneric();
// Reconstruct the parent, if there is one.
if (parentType) {
// Build the nested extension type.
auto parentGenericParams = genericDecl->getGenericParams()
? genericParams->getOuterParameters()
: genericParams;
parentType =
formExtensionInterfaceType(ext, parentType, parentGenericParams,
sameTypeReqs, mustInferRequirements);
}
// Find the nominal type.
auto nominal = dyn_cast<NominalTypeDecl>(genericDecl);
auto typealias = dyn_cast<TypeAliasDecl>(genericDecl);
if (!nominal) {
Type underlyingType = typealias->getUnderlyingType();
nominal = underlyingType->getNominalOrBoundGenericNominal();
}
// Form the result.
Type resultType;
SmallVector<Type, 2> genericArgs;
if (!nominal->isGeneric() || isa<ProtocolDecl>(nominal)) {
resultType = NominalType::get(nominal, parentType,
nominal->getASTContext());
} else {
auto currentBoundType = type->getAs<BoundGenericType>();
// Form the bound generic type with the type parameters provided.
unsigned gpIndex = 0;
for (auto gp : *genericParams) {
SWIFT_DEFER { ++gpIndex; };
auto gpType = gp->getDeclaredInterfaceType();
genericArgs.push_back(gpType);
if (currentBoundType) {
sameTypeReqs.emplace_back(RequirementKind::SameType, gpType,
currentBoundType->getGenericArgs()[gpIndex]);
}
}
resultType = BoundGenericType::get(nominal, parentType, genericArgs);
}
// If we have a typealias, try to form type sugar.
if (typealias && TypeChecker::isPassThroughTypealias(
typealias, typealias->getUnderlyingType(), nominal)) {
auto typealiasSig = typealias->getGenericSignature();
SubstitutionMap subMap;
if (typealiasSig) {
subMap = typealiasSig->getIdentitySubstitutionMap();
mustInferRequirements = true;
}
resultType = TypeAliasType::get(typealias, parentType, subMap, resultType);
}
return resultType;
}
/// Retrieve the generic parameter depth of the extended type.
static unsigned getExtendedTypeGenericDepth(ExtensionDecl *ext) {
auto nominal = ext->getSelfNominalTypeDecl();
if (!nominal) return static_cast<unsigned>(-1);
auto sig = nominal->getGenericSignatureOfContext();
if (!sig) return static_cast<unsigned>(-1);
return sig->getGenericParams().back()->getDepth();
}
GenericSignature
GenericSignatureRequest::evaluate(Evaluator &evaluator,
GenericContext *GC) const {
// The signature of a Protocol is trivial (Self: TheProtocol) so let's compute
// it.
if (auto PD = dyn_cast<ProtocolDecl>(GC)) {
auto self = PD->getSelfInterfaceType()->castTo<GenericTypeParamType>();
auto req =
Requirement(RequirementKind::Conformance, self, PD->getDeclaredType());
auto sig = GenericSignature::get({self}, {req});
// Debugging of the generic signature builder and generic signature
// generation.
if (GC->getASTContext().TypeCheckerOpts.DebugGenericSignatures) {
llvm::errs() << "\n";
PD->printContext(llvm::errs());
llvm::errs() << "Generic signature: ";
sig->print(llvm::errs());
llvm::errs() << "\n";
llvm::errs() << "Canonical generic signature: ";
sig.getCanonicalSignature()->print(llvm::errs());
llvm::errs() << "\n";
}
return sig;
}
bool allowConcreteGenericParams = false;
const auto *genericParams = GC->getGenericParams();
if (genericParams) {
// Setup the depth of the generic parameters.
const_cast<GenericParamList *>(genericParams)
->setDepth(GC->getGenericContextDepth());
// Accessors can always use the generic context of their storage
// declarations. This is a compile-time optimization since it lets us
// avoid the requirements-gathering phase, but it also simplifies that
// work for accessors which don't mention the value type in their formal
// signatures (like the read and modify coroutines, since yield types
// aren't tracked in the AST type yet).
if (auto accessor = dyn_cast<AccessorDecl>(GC->getAsDecl())) {
return cast<SubscriptDecl>(accessor->getStorage())->getGenericSignature();
}
// ...or we may only have a contextual where clause.
} else if (const auto *where = GC->getTrailingWhereClause()) {
// If there is no generic context for the where clause to
// rely on, diagnose that now and bail out.
if (!GC->isGenericContext()) {
GC->getASTContext().Diags.diagnose(where->getWhereLoc(),
GC->getParent()->isModuleScopeContext()
? diag::where_nongeneric_toplevel
: diag::where_nongeneric_ctx);
return nullptr;
}
allowConcreteGenericParams = true;
} else {
// We can fast-path computing the generic signature of non-generic
// declarations by re-using the parent context's signature.
if (auto accessor = dyn_cast<AccessorDecl>(GC->getAsDecl()))
if (auto subscript = dyn_cast<SubscriptDecl>(accessor->getStorage()))
return subscript->getGenericSignature();
return GC->getParent()->getGenericSignatureOfContext();
}
auto parentSig = GC->getParent()->getGenericSignatureOfContext();
SmallVector<TypeLoc, 2> inferenceSources;
SmallVector<Requirement, 2> sameTypeReqs;
if (auto VD = dyn_cast_or_null<ValueDecl>(GC->getAsDecl())) {
auto func = dyn_cast<AbstractFunctionDecl>(VD);
auto subscr = dyn_cast<SubscriptDecl>(VD);
// For functions and subscripts, resolve the parameter and result types and
// note them as inference sources.
if (subscr || func) {
// Gather requirements from the parameter list.
TypeResolutionOptions options =
(func ? TypeResolverContext::AbstractFunctionDecl
: TypeResolverContext::SubscriptDecl);
auto resolution = TypeResolution::forStructural(GC, options);
auto params = func ? func->getParameters() : subscr->getIndices();
for (auto param : *params) {
auto *typeRepr = param->getTypeRepr();
if (typeRepr == nullptr)
continue;
auto paramOptions = options;
paramOptions.setContext(param->isVariadic()
? TypeResolverContext::VariadicFunctionInput
: TypeResolverContext::FunctionInput);
paramOptions |= TypeResolutionFlags::Direct;
auto type = resolution.withOptions(paramOptions).resolveType(typeRepr);
if (auto *specifier = dyn_cast<SpecifierTypeRepr>(typeRepr))
typeRepr = specifier->getBase();
inferenceSources.emplace_back(typeRepr, type);
}
// Gather requirements from the result type.
auto *resultTypeRepr = [&subscr, &func]() -> TypeRepr * {
if (subscr) {
return subscr->getElementTypeLoc().getTypeRepr();
} else if (auto *FD = dyn_cast<FuncDecl>(func)) {
return FD->getBodyResultTypeLoc().getTypeRepr();
} else {
return nullptr;
}
}();
if (resultTypeRepr && !isa<OpaqueReturnTypeRepr>(resultTypeRepr)) {
auto resultType =
resolution.withOptions(TypeResolverContext::FunctionResult)
.resolveType(resultTypeRepr);
inferenceSources.emplace_back(resultTypeRepr, resultType);
}
}
} else if (auto *ext = dyn_cast<ExtensionDecl>(GC)) {
// Form the interface type of the extension so we can use it as an inference
// source.
//
// FIXME: Push this into the "get interface type" request.
bool mustInferRequirements = false;
Type extInterfaceType =
formExtensionInterfaceType(ext, ext->getExtendedType(),
genericParams, sameTypeReqs,
mustInferRequirements);
auto cannotReuseNominalSignature = [&]() -> bool {
const auto finalDepth = genericParams->getParams().back()->getDepth();
return mustInferRequirements
|| !sameTypeReqs.empty()
|| ext->getTrailingWhereClause()
|| (getExtendedTypeGenericDepth(ext) != finalDepth);
};
// Re-use the signature of the type being extended by default.
if (!cannotReuseNominalSignature()) {
return ext->getSelfNominalTypeDecl()->getGenericSignatureOfContext();
}
// Allow parameters to be equated with concrete types.
allowConcreteGenericParams = true;
// Extensions must occur at the top level, they have no
// (valid) parent signature.
parentSig = nullptr;
inferenceSources.emplace_back(nullptr, extInterfaceType);
}
// EGREGIOUS HACK: The GSB cannot handle the addition of parent signatures
// from malformed decls in many cases. Check the invalid bit and null out the
// parent signature.
if (auto *DD = GC->getParent()->getAsDecl()) {
parentSig = DD->isInvalid() ? nullptr : parentSig;
}
return TypeChecker::checkGenericSignature(
GC, GC, parentSig,
allowConcreteGenericParams,
sameTypeReqs, inferenceSources);
}
///
/// Checking bound generic type arguments
///
RequirementCheckResult TypeChecker::checkGenericArguments(
DeclContext *dc, SourceLoc loc, SourceLoc noteLoc, Type owner,
TypeArrayView<GenericTypeParamType> genericParams,
ArrayRef<Requirement> requirements,
TypeSubstitutionFn substitutions,
GenericRequirementsCheckListener *listener,
SubstOptions options) {
bool valid = true;
struct RequirementSet {
ArrayRef<Requirement> Requirements;
SmallVector<ParentConditionalConformance, 4> Parents;
};
SmallVector<RequirementSet, 8> pendingReqs;
pendingReqs.push_back({requirements, {}});
auto *module = dc->getParentModule();
ASTContext &ctx = module->getASTContext();
while (!pendingReqs.empty()) {
auto current = pendingReqs.pop_back_val();
for (const auto &rawReq : current.Requirements) {
auto req = rawReq;
if (current.Parents.empty()) {
auto substed = rawReq.subst(
substitutions,
LookUpConformanceInModule(module),
options);
if (!substed) {
// Another requirement will fail later; just continue.
valid = false;
continue;
}
req = *substed;
}
auto kind = req.getKind();
Type rawFirstType = rawReq.getFirstType();
Type firstType = req.getFirstType();
if (firstType->hasTypeParameter())
firstType = dc->mapTypeIntoContext(firstType);
Type rawSecondType, secondType;
if (kind != RequirementKind::Layout) {
rawSecondType = rawReq.getSecondType();
secondType = req.getSecondType();
if (secondType->hasTypeParameter())
secondType = dc->mapTypeIntoContext(secondType);
}
// Don't do further checking on error types.
if (firstType->hasError() || (secondType && secondType->hasError())) {
// Another requirement will fail later; just continue.
valid = false;
continue;
}
bool requirementFailure = false;
if (listener && !listener->shouldCheck(kind, firstType, secondType))
continue;
Diag<Type, Type, Type> diagnostic;
Diag<Type, Type, StringRef> diagnosticNote;
switch (kind) {
case RequirementKind::Conformance: {
// Protocol conformance requirements.
auto proto = secondType->castTo<ProtocolType>();
auto conformance = module->lookupConformance(firstType, proto->getDecl());
if (conformance) {
// Report the conformance.
if (listener && valid && current.Parents.empty()) {
listener->satisfiedConformance(rawFirstType, firstType,
conformance);
}
auto conditionalReqs = conformance.getConditionalRequirements();
if (!conditionalReqs.empty()) {
auto history = current.Parents;
history.push_back({firstType, proto});
pendingReqs.push_back({conditionalReqs, std::move(history)});
}
continue;
}
if (loc.isValid())
diagnoseConformanceFailure(firstType, proto->getDecl(), module, loc);
if (current.Parents.empty())
return RequirementCheckResult::Failure;
// Failure needs to emit a diagnostic.
diagnostic = diag::type_does_not_conform_owner;
diagnosticNote = diag::type_does_not_inherit_or_conform_requirement;
requirementFailure = true;
break;
}
case RequirementKind::Layout:
// TODO: Statically check other layout constraints, once they can
// be spelled in Swift.
if (req.getLayoutConstraint()->isClass() &&
!firstType->satisfiesClassConstraint()) {
diagnostic = diag::type_is_not_a_class;
diagnosticNote = diag::anyobject_requirement;
requirementFailure = true;
}
break;
case RequirementKind::Superclass: {
// Superclass requirements.
if (!secondType->isExactSuperclassOf(firstType)) {
diagnostic = diag::type_does_not_inherit;
diagnosticNote = diag::type_does_not_inherit_or_conform_requirement;
requirementFailure = true;
}
break;
}
case RequirementKind::SameType:
if (!firstType->isEqual(secondType)) {
diagnostic = diag::types_not_equal;
diagnosticNote = diag::types_not_equal_requirement;
requirementFailure = true;
}
break;
}
if (!requirementFailure)
continue;
if (listener &&
listener->diagnoseUnsatisfiedRequirement(rawReq, firstType,
secondType, current.Parents))
return RequirementCheckResult::Failure;
if (loc.isValid()) {
// FIXME: Poor source-location information.
ctx.Diags.diagnose(loc, diagnostic, owner, firstType, secondType);
std::string genericParamBindingsText;
if (!genericParams.empty()) {
genericParamBindingsText =
gatherGenericParamBindingsText(
{rawFirstType, rawSecondType}, genericParams, substitutions);
}
ctx.Diags.diagnose(noteLoc, diagnosticNote, rawFirstType, rawSecondType,
genericParamBindingsText);
ParentConditionalConformance::diagnoseConformanceStack(
ctx.Diags, noteLoc, current.Parents);
}
return RequirementCheckResult::Failure;
}
}
if (valid)
return RequirementCheckResult::Success;
return RequirementCheckResult::SubstitutionFailure;
}
Requirement
RequirementRequest::evaluate(Evaluator &evaluator,
WhereClauseOwner owner,
unsigned index,
TypeResolutionStage stage) const {
// Figure out the type resolution.
TypeResolutionOptions options = TypeResolverContext::GenericRequirement;
Optional<TypeResolution> resolution;
switch (stage) {
case TypeResolutionStage::Structural:
resolution = TypeResolution::forStructural(owner.dc, options);
break;
case TypeResolutionStage::Interface:
resolution = TypeResolution::forInterface(owner.dc, options);
break;
case TypeResolutionStage::Contextual:
llvm_unreachable("No clients care about this. Use mapTypeIntoContext()");
}
auto &reqRepr = getRequirement();
switch (reqRepr.getKind()) {
case RequirementReprKind::TypeConstraint: {
Type subject = resolution->resolveType(reqRepr.getSubjectRepr());
Type constraint = resolution->resolveType(reqRepr.getConstraintRepr());
return Requirement(constraint->getClassOrBoundGenericClass()
? RequirementKind::Superclass
: RequirementKind::Conformance,
subject, constraint);
}
case RequirementReprKind::SameType:
return Requirement(RequirementKind::SameType,
resolution->resolveType(reqRepr.getFirstTypeRepr()),
resolution->resolveType(reqRepr.getSecondTypeRepr()));
case RequirementReprKind::LayoutConstraint:
return Requirement(RequirementKind::Layout,
resolution->resolveType(reqRepr.getSubjectRepr()),
reqRepr.getLayoutConstraint());
}
llvm_unreachable("unhandled kind");
}
Type StructuralTypeRequest::evaluate(Evaluator &evaluator,
TypeAliasDecl *typeAlias) const {
TypeResolutionOptions options((typeAlias->getGenericParams()
? TypeResolverContext::GenericTypeAliasDecl
: TypeResolverContext::TypeAliasDecl));
if (!typeAlias->getDeclContext()->isCascadingContextForLookup(
/*functionsAreNonCascading*/ true)) {
options |= TypeResolutionFlags::KnownNonCascadingDependency;
}
// This can happen when code completion is attempted inside
// of typealias underlying type e.g. `typealias F = () -> Int#^TOK^#`
auto &ctx = typeAlias->getASTContext();
auto underlyingTypeRepr = typeAlias->getUnderlyingTypeRepr();
if (!underlyingTypeRepr) {
typeAlias->setInvalid();
return ErrorType::get(ctx);
}
auto resolution = TypeResolution::forStructural(typeAlias, options);
auto type = resolution.resolveType(underlyingTypeRepr);
auto genericSig = typeAlias->getGenericSignature();
SubstitutionMap subs;
if (genericSig)
subs = genericSig->getIdentitySubstitutionMap();
Type parent;
auto parentDC = typeAlias->getDeclContext();
if (parentDC->isTypeContext())
parent = parentDC->getSelfInterfaceType();
return TypeAliasType::get(typeAlias, parent, subs, type);
}
|
apache-2.0
|
alancnet/artifactory
|
backend/core/src/main/java/org/artifactory/schedule/ArtifactorySchedulerFactoryBean.java
|
971
|
/*
* Artifactory is a binaries repository manager.
* Copyright (C) 2012 JFrog Ltd.
*
* Artifactory is free software: you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Artifactory is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with Artifactory. If not, see <http://www.gnu.org/licenses/>.
*/
package org.artifactory.schedule;
import org.springframework.scheduling.quartz.SchedulerFactoryBean;
/**
* @author Yoav Landman
*/
public class ArtifactorySchedulerFactoryBean extends SchedulerFactoryBean {
}
|
apache-2.0
|
RobAltena/deeplearning4j
|
libnd4j/include/helpers/cpu/ConstantTadHelper.cpp
|
4700
|
/*******************************************************************************
* Copyright (c) 2015-2018 Skymind, Inc.
*
* This program and the accompanying materials are made available under the
* terms of the Apache License, Version 2.0 which is available at
* https://www.apache.org/licenses/LICENSE-2.0.
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations
* under the License.
*
* SPDX-License-Identifier: Apache-2.0
******************************************************************************/
//
// @author raver119@gmail.com
//
#include "../ConstantTadHelper.h"
#include <TAD.h>
#include <ShapeUtils.h>
#ifndef __CUDABLAS__
namespace nd4j {
ConstantTadHelper::ConstantTadHelper() {
MAP_IMPL<TadDescriptor, TadPack> pack;
_cache.emplace_back(pack);
}
ConstantTadHelper* ConstantTadHelper::getInstance() {
if (!_INSTANCE)
_INSTANCE = new ConstantTadHelper();
return _INSTANCE;
}
TadPack ConstantTadHelper::tadForDimensions(const Nd4jLong *originalShape, int dimension, const bool keepUnitiesInShape) {
return tadForDimensions(originalShape, &dimension, 1, keepUnitiesInShape);
}
TadPack ConstantTadHelper::tadForDimensions(const Nd4jLong *originalShape, const std::vector<int> &dimensions, const bool keepUnitiesInShape) {
return tadForDimensions(originalShape, const_cast<int *>(dimensions.data()), dimensions.size(), keepUnitiesInShape);
}
TadPack ConstantTadHelper::tadForDimensions(const Nd4jLong *originalShape, int* dimensions, int dimLength, const bool keepUnitiesInShape) {
TadDescriptor tadDescriptor(originalShape, dimensions, dimLength, keepUnitiesInShape);
return tadForDimensions(tadDescriptor);
}
TadPack ConstantTadHelper::tadForDimensions(ShapeDescriptor &descriptor, std::vector<int> &dimensions, const bool keepUnitiesInShape) {
TadDescriptor tadDescriptor(descriptor, dimensions, keepUnitiesInShape);
return tadForDimensions(tadDescriptor);
}
TadPack ConstantTadHelper::tadForDimensions(TadDescriptor &descriptor) {
const int deviceId = 0;
_mutex.lock();
if (_cache[deviceId].count(descriptor) == 0) {
const auto shapeInfo = descriptor.originalShape().toShapeInfo();
const int rank = shape::rank(shapeInfo);
const std::vector<int> dimsToExclude = ShapeUtils::evalDimsToExclude(rank, descriptor.axis());
const Nd4jLong numOfSubArrs = ShapeUtils::getNumOfSubArrs(shapeInfo, dimsToExclude);
const int subArrRank = (rank == dimsToExclude.size() || descriptor.areUnitiesinShape()) ? rank : rank - dimsToExclude.size();
auto sPtr = new Nd4jLong[shape::shapeInfoLength(subArrRank)]; // shape of sub-arrays (same for all for them)
auto oPtr = new Nd4jLong[numOfSubArrs];
if (numOfSubArrs > 0)
shape::calcSubArrShapeAndOffsets(shapeInfo, numOfSubArrs, dimsToExclude.size(), dimsToExclude.data(), sPtr, oPtr, descriptor.areUnitiesinShape());
ConstantDataBuffer shapesBuffer(sPtr, nullptr, shape::shapeInfoLength(subArrRank)*sizeof(Nd4jLong), DataType::INT64);
ConstantDataBuffer offsetsBuffer(oPtr, nullptr, numOfSubArrs*sizeof(Nd4jLong), DataType::INT64);
TadPack t(shapesBuffer, offsetsBuffer, numOfSubArrs);
// auto shapeInfo = descriptor.originalShape().toShapeInfo();
// shape::TAD tad;
// tad.init(shapeInfo, descriptor.axis().data(), descriptor.axis().size());
// tad.createTadOnlyShapeInfo();
// tad.createOffsets();
// auto sPtr = new Nd4jLong[shape::shapeInfoLength(tad.tadOnlyShapeInfo)];
// auto oPtr = new Nd4jLong[tad.numTads];
// memcpy(sPtr, tad.tadOnlyShapeInfo, shape::shapeInfoByteLength(tad.tadOnlyShapeInfo));
// memcpy(oPtr, tad.tadOffsets, tad.numTads * sizeof(Nd4jLong));
// TadPack t(shapesBuffer, offsetsBuffer, tad.numTads);
_cache[deviceId][descriptor] = t;
TadPack &r = _cache[deviceId][descriptor];
_mutex.unlock();
delete[] shapeInfo;
return r;
} else {
TadPack r = _cache[deviceId][descriptor];
_mutex.unlock();
return r;
}
}
nd4j::ConstantTadHelper* nd4j::ConstantTadHelper::_INSTANCE = 0;
}
#endif
|
apache-2.0
|
shzisg/wechat-java-sdk
|
wechat-java-mp/src/main/java/me/chanjar/weixin/mp/builder/outxml/NewsBuilder.java
|
799
|
package me.chanjar.weixin.mp.builder.outxml;
import me.chanjar.weixin.mp.bean.message.WxMpXmlOutNewsMessage;
import java.util.ArrayList;
import java.util.List;
/**
* 图文消息builder
*
* @author chanjarster
*/
public final class NewsBuilder extends BaseBuilder<NewsBuilder, WxMpXmlOutNewsMessage> {
protected final List<WxMpXmlOutNewsMessage.Item> articles = new ArrayList<>();
public NewsBuilder addArticle(WxMpXmlOutNewsMessage.Item item) {
this.articles.add(item);
return this;
}
@Override
public WxMpXmlOutNewsMessage build() {
WxMpXmlOutNewsMessage m = new WxMpXmlOutNewsMessage();
for (WxMpXmlOutNewsMessage.Item item : this.articles) {
m.addArticle(item);
}
setCommon(m);
return m;
}
}
|
apache-2.0
|
hughperkins/pub-prototyping
|
py/checkkey2.py
|
686
|
import time
def _find_getch():
try:
import termios
except ImportError:
# Non-POSIX. Return msvcrt's (Windows') getch.
import msvcrt
return msvcrt.getch
# POSIX system. Create and return a getch that manipulates the tty.
import sys, tty
def _getch():
fd = sys.stdin.fileno()
old_settings = termios.tcgetattr(fd)
try:
tty.setraw(fd)
ch = sys.stdin.read(1)
finally:
termios.tcsetattr(fd, termios.TCSADRAIN, old_settings)
return ch
return _getch
getch = _find_getch()
while True:
c = getch()
print('c', c)
print('dot!')
time.sleep(0.2)
|
apache-2.0
|
OpenNTF/org.openntf.domino
|
domino/formula/src/main/java/org/openntf/formula/ast/ASTValueDateOrKW.java
|
2517
|
/* Generated By:JJTree: Do not edit this line. ASTValueDateOrKW.java Version 4.3 */
/* JavaCCOptions:MULTI=true,NODE_USES_PARSER=true,VISITOR=false,TRACK_TOKENS=false,NODE_PREFIX=AST,NODE_EXTENDS=,NODE_FACTORY=,SUPPORT_CLASS_VISIBILITY_PUBLIC=true */
/*
* © Copyright FOCONIS AG, 2014
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at:
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
* implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package org.openntf.formula.ast;
import java.util.Set;
import org.openntf.formula.DateTime;
import org.openntf.formula.FormulaContext;
import org.openntf.formula.ValueHolder;
import org.openntf.formula.ValueHolder.DataType;
import org.openntf.formula.parse.AtFormulaParserImpl;
import org.openntf.formula.parse.ParseException;
public class ASTValueDateOrKW extends SimpleNode {
DateTime dateValue = null;
String image = null;
public ASTValueDateOrKW(final AtFormulaParserImpl p, final int id) {
super(p, id);
}
@Override
public ValueHolder evaluate(final FormulaContext ctx) {
if (dateValue != null)
return ValueHolder.valueOf(dateValue);
ValueHolder vh = ValueHolder.valueOf(image);
vh.dataType = DataType.KEYWORD_STRING;
return vh;
}
public void init(final String image) throws ParseException {
String inner = image.substring(1, image.length() - 1); // remove first [ and last ]
try {
dateValue = parser.getFormatter().parseDate(inner);
} catch (IllegalArgumentException e) {
if (inner.contains(".") || inner.contains("/") || inner.contains("-") || // this MUST be a date //$NON-NLS-1$ //$NON-NLS-2$ //$NON-NLS-3$
inner.contains("\\") || inner.contains("\"") || inner.trim().isEmpty()) { //$NON-NLS-1$ //$NON-NLS-2$
throw new ParseException(parser, e.getMessage());
}
}
this.image = image; // tried to parse. but this seems to be a Keyword
}
@Override
protected void analyzeThis(final Set<String> readFields, final Set<String> modifiedFields, final Set<String> variables,
final Set<String> functions) {
}
}
/* JavaCC - OriginalChecksum=56ca1fdb501387745d81cc4e6f2b1b55 (do not edit this line) */
|
apache-2.0
|
globalbus/blueprint-gradle-plugin
|
src/test/java/info/globalbus/blueprint/plugin/test/ServiceReferencesParent.java
|
1404
|
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* <p>
* http://www.apache.org/licenses/LICENSE-2.0
* <p>
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package info.globalbus.blueprint.plugin.test;
import info.globalbus.blueprint.plugin.test.interfaces.ServiceB;
import java.util.List;
import javax.inject.Inject;
import javax.inject.Named;
import org.ops4j.pax.cdi.api.OsgiService;
public class ServiceReferencesParent {
@Inject
@OsgiService(filter = "(type=B1)")
ServiceB serviceB;
@Named("serviceB2Id")
@Inject
@OsgiService(filter = "(type=B2)")
ServiceB serviceB2;
@Inject
@OsgiService(filter = "(type=B3)")
ServiceB serviceB3;
@Inject
@OsgiService
List<ServiceB> serviceBList;
}
|
apache-2.0
|
simeonf/sfpython
|
sfpython/events/migrations/0002_auto_20151028_1531.py
|
400
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('events', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='event',
name='link',
field=models.CharField(max_length=255, blank=True),
),
]
|
apache-2.0
|
mikebski/jquery-cascade-list
|
src/cascade.js
|
4658
|
/*
Copyright 2014 Mike Baranski (mike.baranski@gmail.com)
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
(function($) {
$.fn.cascadeList = function(options, command) {
if(command == 'destroy') {
return this.each(function(){
$(this).siblings().remove();
}
);} else {
return this.each( function(){ cl = new CascadingList(this.id, options, command); });
}
}
function CascadingList(marker_id, options, command){
this.settings = $.extend({
prompt_text: "Choose...",
prompt_value: -1,
base_list_name: "Start",
base_list_id: "base_list_id",
debug: true,
data: null,
prompt: true,
}, options);
this.rendered_lists = new Array();
this.marker = $("#" + marker_id);
if(this.settings.data == null || typeof this.settings.data == "undefined"){
throw "No data passed to Cascading List - make sure your options has a 'data' element";
}
var root = this.get_items_for_list("")[0];
var root_id = root[1];
var root_text = root[0];
var root_items = this.get_items_for_list(root_id);
if(typeof command == 'undefined' || command == null || command == "" || command == "create"){
this.render_select(root_id, root_text, items);
}
}
CascadingList.prototype.render_select = function(parent_id, title, items){
if(items == null || items.length == 0){ return; }
d = $("<div/>", {
id: "outer_" + parent_id,
'class': "list_wrapper"
}).insertBefore(this.marker);
$("<div/>", {
id: "list_" + parent_id,
'class': "list_container"
}).appendTo(d);
ld = $("#list_" + parent_id);
$("<span class='list_title'>" + title + "</span>").appendTo(ld);
var sel = $("<select/>", {
id: parent_id,
name: title,
'class': "list_select"
});
sel.appendTo(ld);
if(this.settings.prompt){
$("<option/>", {
value: this.prompt_text,
id: "NOOPT_" + parent_id,
text: this.settings.prompt_text,
class: "list_option no_selection"
}).appendTo($("#" + parent_id));
}
var i = 0;
while(i < items.length){
item = items[i];
var id = item[1];
var val = item[0];
$("<option/>", {
value: id,
text: val,
class: "list_option"
}).appendTo($("#" + parent_id));
i = i + 1;
}
this.rendered_lists.push(parent_id);
pc = $.proxy(this.select_changed, this);
//$(document.body).on('change', "#" + parent_id, pc);
$(sel).on('change', pc);
return true;
}
CascadingList.prototype.select_changed = function(event){
var select_id = $(event.target)[0].id;
var selected_id = $(event.target).find(":selected").val();
var selected_value = $(event.target).find(":selected").text();
var items = this.get_items_for_list(selected_id);
var flag = true;
while(flag){
if(this.rendered_lists[this.rendered_lists.length - 1] == select_id){
flag = false;
} else {
var to_remove = this.rendered_lists.pop();
$("#outer_" + to_remove).remove();
}
}
this.render_select(selected_id, selected_value, items);
}
CascadingList.prototype.get_items_for_list = function(parent_item_id){
items = Array();
for(var i in this.settings.data.rows){
item = this.settings.data.rows[i];
var item_text = item['c'][0]['f'];
var item_id = item['c'][0]['v'];
var parent_id = item['c'][1]['v'];
if(parent_id == parent_item_id){
items.push([item_text, item_id]);
}
}
return items;
}
}(jQuery));
|
apache-2.0
|
trasa/aws-sdk-java
|
aws-java-sdk-devicefarm/src/main/java/com/amazonaws/services/devicefarm/model/GetProjectRequest.java
|
3035
|
/*
* Copyright 2010-2016 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
package com.amazonaws.services.devicefarm.model;
import java.io.Serializable;
import com.amazonaws.AmazonWebServiceRequest;
/**
* <p>
* Represents a request to the get project operation.
* </p>
*/
public class GetProjectRequest extends AmazonWebServiceRequest implements
Serializable, Cloneable {
/**
* <p>
* The project's ARN.
* </p>
*/
private String arn;
/**
* <p>
* The project's ARN.
* </p>
*
* @param arn
* The project's ARN.
*/
public void setArn(String arn) {
this.arn = arn;
}
/**
* <p>
* The project's ARN.
* </p>
*
* @return The project's ARN.
*/
public String getArn() {
return this.arn;
}
/**
* <p>
* The project's ARN.
* </p>
*
* @param arn
* The project's ARN.
* @return Returns a reference to this object so that method calls can be
* chained together.
*/
public GetProjectRequest withArn(String arn) {
setArn(arn);
return this;
}
/**
* Returns a string representation of this object; useful for testing and
* debugging.
*
* @return A string representation of this object.
*
* @see java.lang.Object#toString()
*/
@Override
public String toString() {
StringBuilder sb = new StringBuilder();
sb.append("{");
if (getArn() != null)
sb.append("Arn: " + getArn());
sb.append("}");
return sb.toString();
}
@Override
public boolean equals(Object obj) {
if (this == obj)
return true;
if (obj == null)
return false;
if (obj instanceof GetProjectRequest == false)
return false;
GetProjectRequest other = (GetProjectRequest) obj;
if (other.getArn() == null ^ this.getArn() == null)
return false;
if (other.getArn() != null
&& other.getArn().equals(this.getArn()) == false)
return false;
return true;
}
@Override
public int hashCode() {
final int prime = 31;
int hashCode = 1;
hashCode = prime * hashCode
+ ((getArn() == null) ? 0 : getArn().hashCode());
return hashCode;
}
@Override
public GetProjectRequest clone() {
return (GetProjectRequest) super.clone();
}
}
|
apache-2.0
|
markflyhigh/incubator-beam
|
sdks/python/apache_beam/runners/worker/bundle_processor.py
|
50215
|
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""SDK harness for executing Python Fns via the Fn API."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import base64
import collections
import json
import logging
import random
import re
import threading
from builtins import next
from builtins import object
from future.utils import itervalues
from google.protobuf import timestamp_pb2
import apache_beam as beam
from apache_beam import coders
from apache_beam.coders import WindowedValueCoder
from apache_beam.coders import coder_impl
from apache_beam.internal import pickler
from apache_beam.io import iobase
from apache_beam.metrics import monitoring_infos
from apache_beam.portability import common_urns
from apache_beam.portability import python_urns
from apache_beam.portability.api import beam_fn_api_pb2
from apache_beam.portability.api import beam_runner_api_pb2
from apache_beam.runners import common
from apache_beam.runners import pipeline_context
from apache_beam.runners.worker import operation_specs
from apache_beam.runners.worker import operations
from apache_beam.runners.worker import statesampler
from apache_beam.transforms import sideinputs
from apache_beam.transforms import userstate
from apache_beam.utils import counters
from apache_beam.utils import proto_utils
from apache_beam.utils import timestamp
from apache_beam.utils import windowed_value
# This module is experimental. No backwards-compatibility guarantees.
DATA_INPUT_URN = 'beam:source:runner:0.1'
DATA_OUTPUT_URN = 'beam:sink:runner:0.1'
IDENTITY_DOFN_URN = 'beam:dofn:identity:0.1'
# TODO(vikasrk): Fix this once runner sends appropriate common_urns.
OLD_DATAFLOW_RUNNER_HARNESS_PARDO_URN = 'beam:dofn:javasdk:0.1'
OLD_DATAFLOW_RUNNER_HARNESS_READ_URN = 'beam:source:java:0.1'
URNS_NEEDING_PCOLLECTIONS = set([monitoring_infos.ELEMENT_COUNT_URN,
monitoring_infos.SAMPLED_BYTE_SIZE_URN])
class RunnerIOOperation(operations.Operation):
"""Common baseclass for runner harness IO operations."""
def __init__(self, name_context, step_name, consumers, counter_factory,
state_sampler, windowed_coder, transform_id, data_channel):
super(RunnerIOOperation, self).__init__(
name_context, None, counter_factory, state_sampler)
self.windowed_coder = windowed_coder
self.windowed_coder_impl = windowed_coder.get_impl()
# transform_id represents the consumer for the bytes in the data plane for a
# DataInputOperation or a producer of these bytes for a DataOutputOperation.
self.transform_id = transform_id
self.data_channel = data_channel
for _, consumer_ops in consumers.items():
for consumer in consumer_ops:
self.add_receiver(consumer, 0)
class DataOutputOperation(RunnerIOOperation):
"""A sink-like operation that gathers outputs to be sent back to the runner.
"""
def set_output_stream(self, output_stream):
self.output_stream = output_stream
def process(self, windowed_value):
self.windowed_coder_impl.encode_to_stream(
windowed_value, self.output_stream, True)
self.output_stream.maybe_flush()
def finish(self):
self.output_stream.close()
super(DataOutputOperation, self).finish()
class DataInputOperation(RunnerIOOperation):
"""A source-like operation that gathers input from the runner."""
def __init__(self, operation_name, step_name, consumers, counter_factory,
state_sampler, windowed_coder, transform_id, data_channel):
super(DataInputOperation, self).__init__(
operation_name, step_name, consumers, counter_factory, state_sampler,
windowed_coder, transform_id=transform_id, data_channel=data_channel)
# We must do this manually as we don't have a spec or spec.output_coders.
self.receivers = [
operations.ConsumerSet.create(
self.counter_factory, self.name_context.step_name, 0,
next(iter(itervalues(consumers))), self.windowed_coder)]
self.splitting_lock = threading.Lock()
self.started = False
def start(self):
super(DataInputOperation, self).start()
with self.splitting_lock:
self.index = -1
self.stop = float('inf')
self.started = True
def process(self, windowed_value):
self.output(windowed_value)
def process_encoded(self, encoded_windowed_values):
input_stream = coder_impl.create_InputStream(encoded_windowed_values)
while input_stream.size() > 0:
with self.splitting_lock:
if self.index == self.stop - 1:
return
self.index += 1
decoded_value = self.windowed_coder_impl.decode_from_stream(
input_stream, True)
self.output(decoded_value)
def try_split(self, fraction_of_remainder, total_buffer_size):
with self.splitting_lock:
if not self.started:
return
if total_buffer_size < self.index + 1:
total_buffer_size = self.index + 1
elif self.stop and total_buffer_size > self.stop:
total_buffer_size = self.stop
if self.index == -1:
# We are "finished" with the (non-existent) previous element.
current_element_progress = 1
else:
current_element_progress_object = (
self.receivers[0].current_element_progress())
if current_element_progress_object is None:
current_element_progress = 0.5
else:
current_element_progress = (
current_element_progress_object.fraction_completed)
# Now figure out where to split.
# The units here (except for keep_of_element_remainder) are all in
# terms of number of (possibly fractional) elements.
remainder = total_buffer_size - self.index - current_element_progress
keep = remainder * fraction_of_remainder
if current_element_progress < 1:
keep_of_element_remainder = keep / (1 - current_element_progress)
# If it's less than what's left of the current element,
# try splitting at the current element.
if keep_of_element_remainder < 1:
split = self.receivers[0].try_split(keep_of_element_remainder)
if split:
element_primary, element_residual = split
self.stop = self.index + 1
return self.index - 1, element_primary, element_residual, self.stop
# Otherwise, split at the closest element boundary.
# pylint: disable=round-builtin
stop_index = (
self.index + max(1, int(round(current_element_progress + keep))))
if stop_index < self.stop:
self.stop = stop_index
return self.stop - 1, None, None, self.stop
def progress_metrics(self):
with self.splitting_lock:
metrics = super(DataInputOperation, self).progress_metrics()
current_element_progress = self.receivers[0].current_element_progress()
if current_element_progress:
metrics.active_elements.fraction_remaining = (
current_element_progress.fraction_remaining)
return metrics
def finish(self):
with self.splitting_lock:
self.started = False
class _StateBackedIterable(object):
def __init__(self, state_handler, state_key, coder_or_impl):
self._state_handler = state_handler
self._state_key = state_key
if isinstance(coder_or_impl, coders.Coder):
self._coder_impl = coder_or_impl.get_impl()
else:
self._coder_impl = coder_or_impl
def __iter__(self):
# This is the continuation token this might be useful
data, continuation_token = self._state_handler.blocking_get(self._state_key)
while True:
input_stream = coder_impl.create_InputStream(data)
while input_stream.size() > 0:
yield self._coder_impl.decode_from_stream(input_stream, True)
if not continuation_token:
break
else:
data, continuation_token = self._state_handler.blocking_get(
self._state_key, continuation_token)
def __reduce__(self):
return list, (list(self),)
coder_impl.FastPrimitivesCoderImpl.register_iterable_like_type(
_StateBackedIterable)
class StateBackedSideInputMap(object):
def __init__(self, state_handler, transform_id, tag, side_input_data, coder):
self._state_handler = state_handler
self._transform_id = transform_id
self._tag = tag
self._side_input_data = side_input_data
self._element_coder = coder.wrapped_value_coder
self._target_window_coder = coder.window_coder
# TODO(robertwb): Limit the cache size.
self._cache = {}
def __getitem__(self, window):
target_window = self._side_input_data.window_mapping_fn(window)
if target_window not in self._cache:
state_key = beam_fn_api_pb2.StateKey(
multimap_side_input=beam_fn_api_pb2.StateKey.MultimapSideInput(
ptransform_id=self._transform_id,
side_input_id=self._tag,
window=self._target_window_coder.encode(target_window),
key=b''))
state_handler = self._state_handler
access_pattern = self._side_input_data.access_pattern
if access_pattern == common_urns.side_inputs.ITERABLE.urn:
raw_view = _StateBackedIterable(
state_handler, state_key, self._element_coder)
elif access_pattern == common_urns.side_inputs.MULTIMAP.urn:
cache = {}
key_coder_impl = self._element_coder.key_coder().get_impl()
value_coder = self._element_coder.value_coder()
class MultiMap(object):
def __getitem__(self, key):
if key not in cache:
keyed_state_key = beam_fn_api_pb2.StateKey()
keyed_state_key.CopyFrom(state_key)
keyed_state_key.multimap_side_input.key = (
key_coder_impl.encode_nested(key))
cache[key] = _StateBackedIterable(
state_handler, keyed_state_key, value_coder)
return cache[key]
def __reduce__(self):
# TODO(robertwb): Figure out how to support this.
raise TypeError(common_urns.side_inputs.MULTIMAP.urn)
raw_view = MultiMap()
else:
raise ValueError(
"Unknown access pattern: '%s'" % access_pattern)
self._cache[target_window] = self._side_input_data.view_fn(raw_view)
return self._cache[target_window]
def is_globally_windowed(self):
return (self._side_input_data.window_mapping_fn
== sideinputs._global_window_mapping_fn)
def reset(self):
# TODO(BEAM-5428): Cross-bundle caching respecting cache tokens.
self._cache = {}
class CombiningValueRuntimeState(userstate.CombiningValueRuntimeState):
def __init__(self, underlying_bag_state, combinefn):
self._combinefn = combinefn
self._underlying_bag_state = underlying_bag_state
def _read_accumulator(self, rewrite=True):
merged_accumulator = self._combinefn.merge_accumulators(
self._underlying_bag_state.read())
if rewrite:
self._underlying_bag_state.clear()
self._underlying_bag_state.add(merged_accumulator)
return merged_accumulator
def read(self):
return self._combinefn.extract_output(self._read_accumulator())
def add(self, value):
# Prefer blind writes, but don't let them grow unboundedly.
# This should be tuned to be much lower, but for now exercise
# both paths well.
if random.random() < 0.5:
accumulator = self._read_accumulator(False)
self._underlying_bag_state.clear()
else:
accumulator = self._combinefn.create_accumulator()
self._underlying_bag_state.add(
self._combinefn.add_input(accumulator, value))
def clear(self):
self._underlying_bag_state.clear()
def _commit(self):
self._underlying_bag_state._commit()
class _ConcatIterable(object):
"""An iterable that is the concatination of two iterables.
Unlike itertools.chain, this allows reiteration.
"""
def __init__(self, first, second):
self.first = first
self.second = second
def __iter__(self):
for elem in self.first:
yield elem
for elem in self.second:
yield elem
coder_impl.FastPrimitivesCoderImpl.register_iterable_like_type(_ConcatIterable)
# TODO(BEAM-5428): Implement cross-bundle state caching.
class SynchronousBagRuntimeState(userstate.BagRuntimeState):
def __init__(self, state_handler, state_key, value_coder):
self._state_handler = state_handler
self._state_key = state_key
self._value_coder = value_coder
self._cleared = False
self._added_elements = []
def read(self):
return _ConcatIterable(
[] if self._cleared else _StateBackedIterable(
self._state_handler, self._state_key, self._value_coder),
self._added_elements)
def add(self, value):
self._added_elements.append(value)
def clear(self):
self._cleared = True
self._added_elements = []
def _commit(self):
if self._cleared:
self._state_handler.blocking_clear(self._state_key)
if self._added_elements:
value_coder_impl = self._value_coder.get_impl()
out = coder_impl.create_OutputStream()
for element in self._added_elements:
value_coder_impl.encode_to_stream(element, out, True)
self._state_handler.blocking_append(self._state_key, out.get())
# TODO(BEAM-5428): Implement cross-bundle state caching.
class SynchronousSetRuntimeState(userstate.SetRuntimeState):
def __init__(self, state_handler, state_key, value_coder):
self._state_handler = state_handler
self._state_key = state_key
self._value_coder = value_coder
self._cleared = False
self._added_elements = set()
def _compact_data(self, rewrite=True):
accumulator = set(_ConcatIterable(
set() if self._cleared else _StateBackedIterable(
self._state_handler, self._state_key, self._value_coder),
self._added_elements))
if rewrite and accumulator:
self._state_handler.blocking_clear(self._state_key)
value_coder_impl = self._value_coder.get_impl()
out = coder_impl.create_OutputStream()
for element in accumulator:
value_coder_impl.encode_to_stream(element, out, True)
self._state_handler.blocking_append(self._state_key, out.get())
# Since everthing is already committed so we can safely reinitialize
# added_elements here.
self._added_elements = set()
return accumulator
def read(self):
return self._compact_data(rewrite=False)
def add(self, value):
if self._cleared:
# This is a good time explicitly clear.
self._state_handler.blocking_clear(self._state_key)
self._cleared = False
self._added_elements.add(value)
if random.random() > 0.5:
self._compact_data()
def clear(self):
self._cleared = True
self._added_elements = set()
def _commit(self):
if self._cleared:
self._state_handler.blocking_clear(self._state_key)
if self._added_elements:
value_coder_impl = self._value_coder.get_impl()
out = coder_impl.create_OutputStream()
for element in self._added_elements:
value_coder_impl.encode_to_stream(element, out, True)
self._state_handler.blocking_append(self._state_key, out.get())
class OutputTimer(object):
def __init__(self, key, window, receiver):
self._key = key
self._window = window
self._receiver = receiver
def set(self, ts):
ts = timestamp.Timestamp.of(ts)
self._receiver.receive(
windowed_value.WindowedValue(
(self._key, dict(timestamp=ts)), ts, (self._window,)))
def clear(self):
dummy_millis = int(common_urns.constants.MAX_TIMESTAMP_MILLIS.constant) + 1
clear_ts = timestamp.Timestamp(micros=dummy_millis * 1000)
self._receiver.receive(
windowed_value.WindowedValue(
(self._key, dict(timestamp=clear_ts)), 0, (self._window,)))
class FnApiUserStateContext(userstate.UserStateContext):
"""Interface for state and timers from SDK to Fn API servicer of state.."""
def __init__(
self, state_handler, transform_id, key_coder, window_coder, timer_specs):
"""Initialize a ``FnApiUserStateContext``.
Args:
state_handler: A StateServicer object.
transform_id: The name of the PTransform that this context is associated.
key_coder:
window_coder:
timer_specs: A list of ``userstate.TimerSpec`` objects specifying the
timers associated with this operation.
"""
self._state_handler = state_handler
self._transform_id = transform_id
self._key_coder = key_coder
self._window_coder = window_coder
self._timer_specs = timer_specs
self._timer_receivers = None
self._all_states = {}
def update_timer_receivers(self, receivers):
"""TODO"""
self._timer_receivers = {}
for tag in self._timer_specs:
self._timer_receivers[tag] = receivers.pop(tag)
def get_timer(self, timer_spec, key, window):
return OutputTimer(
key, window, self._timer_receivers[timer_spec.name])
def get_state(self, *args):
state_handle = self._all_states.get(args)
if state_handle is None:
state_handle = self._all_states[args] = self._create_state(*args)
return state_handle
def _create_state(self, state_spec, key, window):
if isinstance(state_spec,
(userstate.BagStateSpec, userstate.CombiningValueStateSpec)):
bag_state = SynchronousBagRuntimeState(
self._state_handler,
state_key=beam_fn_api_pb2.StateKey(
bag_user_state=beam_fn_api_pb2.StateKey.BagUserState(
ptransform_id=self._transform_id,
user_state_id=state_spec.name,
window=self._window_coder.encode(window),
key=self._key_coder.encode(key))),
value_coder=state_spec.coder)
if isinstance(state_spec, userstate.BagStateSpec):
return bag_state
else:
return CombiningValueRuntimeState(bag_state, state_spec.combine_fn)
elif isinstance(state_spec, userstate.SetStateSpec):
return SynchronousSetRuntimeState(
self._state_handler,
state_key=beam_fn_api_pb2.StateKey(
bag_user_state=beam_fn_api_pb2.StateKey.BagUserState(
ptransform_id=self._transform_id,
user_state_id=state_spec.name,
window=self._window_coder.encode(window),
key=self._key_coder.encode(key))),
value_coder=state_spec.coder)
else:
raise NotImplementedError(state_spec)
def commit(self):
for state in self._all_states.values():
state._commit()
def reset(self):
# TODO(BEAM-5428): Implement cross-bundle state caching.
self._all_states = {}
def memoize(func):
cache = {}
missing = object()
def wrapper(*args):
result = cache.get(args, missing)
if result is missing:
result = cache[args] = func(*args)
return result
return wrapper
def only_element(iterable):
element, = iterable
return element
class BundleProcessor(object):
""" A class for processing bundles of elements. """
def __init__(
self, process_bundle_descriptor, state_handler, data_channel_factory):
"""Initialize a bundle processor.
Args:
process_bundle_descriptor (``beam_fn_api_pb2.ProcessBundleDescriptor``):
a description of the stage that this ``BundleProcessor``is to execute.
state_handler (beam_fn_api_pb2_grpc.BeamFnStateServicer).
data_channel_factory (``data_plane.DataChannelFactory``).
"""
self.process_bundle_descriptor = process_bundle_descriptor
self.state_handler = state_handler
self.data_channel_factory = data_channel_factory
# TODO(robertwb): Figure out the correct prefix to use for output counters
# from StateSampler.
self.counter_factory = counters.CounterFactory()
self.state_sampler = statesampler.StateSampler(
'fnapi-step-%s' % self.process_bundle_descriptor.id,
self.counter_factory)
self.ops = self.create_execution_tree(self.process_bundle_descriptor)
for op in self.ops.values():
op.setup()
self.splitting_lock = threading.Lock()
def create_execution_tree(self, descriptor):
transform_factory = BeamTransformFactory(
descriptor, self.data_channel_factory, self.counter_factory,
self.state_sampler, self.state_handler)
def is_side_input(transform_proto, tag):
if transform_proto.spec.urn == common_urns.primitives.PAR_DO.urn:
return tag in proto_utils.parse_Bytes(
transform_proto.spec.payload,
beam_runner_api_pb2.ParDoPayload).side_inputs
pcoll_consumers = collections.defaultdict(list)
for transform_id, transform_proto in descriptor.transforms.items():
for tag, pcoll_id in transform_proto.inputs.items():
if not is_side_input(transform_proto, tag):
pcoll_consumers[pcoll_id].append(transform_id)
@memoize
def get_operation(transform_id):
transform_consumers = {
tag: [get_operation(op) for op in pcoll_consumers[pcoll_id]]
for tag, pcoll_id
in descriptor.transforms[transform_id].outputs.items()
}
return transform_factory.create_operation(
transform_id, transform_consumers)
# Operations must be started (hence returned) in order.
@memoize
def topological_height(transform_id):
return 1 + max(
[0] +
[topological_height(consumer)
for pcoll in descriptor.transforms[transform_id].outputs.values()
for consumer in pcoll_consumers[pcoll]])
return collections.OrderedDict([
(transform_id, get_operation(transform_id))
for transform_id in sorted(
descriptor.transforms, key=topological_height, reverse=True)])
def reset(self):
self.counter_factory.reset()
self.state_sampler.reset()
# Side input caches.
for op in self.ops.values():
op.reset()
def process_bundle(self, instruction_id):
expected_inputs = []
for op in self.ops.values():
if isinstance(op, DataOutputOperation):
# TODO(robertwb): Is there a better way to pass the instruction id to
# the operation?
op.set_output_stream(op.data_channel.output_stream(
instruction_id, op.transform_id))
elif isinstance(op, DataInputOperation):
# We must wait until we receive "end of stream" for each of these ops.
expected_inputs.append(op)
try:
execution_context = ExecutionContext()
self.state_sampler.start()
# Start all operations.
for op in reversed(self.ops.values()):
logging.debug('start %s', op)
op.execution_context = execution_context
op.start()
# Inject inputs from data plane.
data_channels = collections.defaultdict(list)
input_op_by_transform_id = {}
for input_op in expected_inputs:
data_channels[input_op.data_channel].append(input_op.transform_id)
input_op_by_transform_id[input_op.transform_id] = input_op
for data_channel, expected_transforms in data_channels.items():
for data in data_channel.input_elements(
instruction_id, expected_transforms):
input_op_by_transform_id[
data.ptransform_id].process_encoded(data.data)
# Finish all operations.
for op in self.ops.values():
logging.debug('finish %s', op)
op.finish()
return ([self.delayed_bundle_application(op, residual)
for op, residual in execution_context.delayed_applications],
self.requires_finalization())
finally:
# Ensure any in-flight split attempts complete.
with self.splitting_lock:
pass
self.state_sampler.stop_if_still_running()
def finalize_bundle(self):
for op in self.ops.values():
op.finalize_bundle()
return beam_fn_api_pb2.FinalizeBundleResponse()
def requires_finalization(self):
return any(op.needs_finalization() for op in self.ops.values())
def try_split(self, bundle_split_request):
split_response = beam_fn_api_pb2.ProcessBundleSplitResponse()
with self.splitting_lock:
for op in self.ops.values():
if isinstance(op, DataInputOperation):
desired_split = bundle_split_request.desired_splits.get(
op.transform_id)
if desired_split:
split = op.try_split(desired_split.fraction_of_remainder,
desired_split.estimated_input_elements)
if split:
(primary_end, element_primary, element_residual, residual_start,
) = split
if element_primary:
split_response.primary_roots.add().CopyFrom(
self.delayed_bundle_application(
*element_primary).application)
if element_residual:
split_response.residual_roots.add().CopyFrom(
self.delayed_bundle_application(*element_residual))
split_response.channel_splits.extend([
beam_fn_api_pb2.ProcessBundleSplitResponse.ChannelSplit(
ptransform_id=op.transform_id,
last_primary_element=primary_end,
first_residual_element=residual_start)])
return split_response
def delayed_bundle_application(self, op, deferred_remainder):
ptransform_id, main_input_tag, main_input_coder, outputs = op.input_info
# TODO(SDF): For non-root nodes, need main_input_coder + residual_coder.
element_and_restriction, watermark = deferred_remainder
if watermark:
proto_watermark = timestamp_pb2.Timestamp()
proto_watermark.FromMicroseconds(watermark.micros)
output_watermarks = {output: proto_watermark for output in outputs}
else:
output_watermarks = None
return beam_fn_api_pb2.DelayedBundleApplication(
application=beam_fn_api_pb2.BundleApplication(
ptransform_id=ptransform_id,
input_id=main_input_tag,
output_watermarks=output_watermarks,
element=main_input_coder.get_impl().encode_nested(
element_and_restriction)))
def metrics(self):
# DEPRECATED
return beam_fn_api_pb2.Metrics(
# TODO(robertwb): Rename to progress?
ptransforms={
transform_id:
self._fix_output_tags(transform_id, op.progress_metrics())
for transform_id, op in self.ops.items()})
def _fix_output_tags(self, transform_id, metrics):
# DEPRECATED
actual_output_tags = list(
self.process_bundle_descriptor.transforms[transform_id].outputs.keys())
# Outputs are still referred to by index, not by name, in many Operations.
# However, if there is exactly one output, we can fix up the name here.
def fix_only_output_tag(actual_output_tag, mapping):
if len(mapping) == 1:
fake_output_tag, count = only_element(list(mapping.items()))
if fake_output_tag != actual_output_tag:
del mapping[fake_output_tag]
mapping[actual_output_tag] = count
if len(actual_output_tags) == 1:
fix_only_output_tag(
actual_output_tags[0],
metrics.processed_elements.measured.output_element_counts)
fix_only_output_tag(
actual_output_tags[0],
metrics.active_elements.measured.output_element_counts)
return metrics
def monitoring_infos(self):
"""Returns the list of MonitoringInfos collected processing this bundle."""
# Construct a new dict first to remove duplciates.
all_monitoring_infos_dict = {}
for transform_id, op in self.ops.items():
for mi in op.monitoring_infos(transform_id).values():
fixed_mi = self._fix_output_tags_monitoring_info(transform_id, mi)
all_monitoring_infos_dict[monitoring_infos.to_key(fixed_mi)] = fixed_mi
infos_list = list(all_monitoring_infos_dict.values())
def inject_pcollection(monitoring_info):
"""
If provided metric is element count metric:
Finds relevant transform output info in current process_bundle_descriptor
and adds tag with PCOLLECTION_LABEL and pcollection_id into monitoring
info.
"""
if monitoring_info.urn in URNS_NEEDING_PCOLLECTIONS:
if not monitoring_infos.PTRANSFORM_LABEL in monitoring_info.labels:
return
ptransform_label = monitoring_info.labels[
monitoring_infos.PTRANSFORM_LABEL]
if not monitoring_infos.TAG_LABEL in monitoring_info.labels:
return
tag_label = monitoring_info.labels[monitoring_infos.TAG_LABEL]
if not ptransform_label in self.process_bundle_descriptor.transforms:
return
if not tag_label in self.process_bundle_descriptor.transforms[
ptransform_label].outputs:
return
pcollection_name = (self.process_bundle_descriptor
.transforms[ptransform_label].outputs[tag_label])
monitoring_info.labels[
monitoring_infos.PCOLLECTION_LABEL] = pcollection_name
# Cleaning up labels that are not in specification.
monitoring_info.labels.pop(monitoring_infos.PTRANSFORM_LABEL)
monitoring_info.labels.pop(monitoring_infos.TAG_LABEL)
for mi in infos_list:
inject_pcollection(mi)
return infos_list
def _fix_output_tags_monitoring_info(self, transform_id, monitoring_info):
actual_output_tags = list(
self.process_bundle_descriptor.transforms[transform_id].outputs.keys())
if ('TAG' in monitoring_info.labels and
monitoring_info.labels['TAG'] == 'ONLY_OUTPUT'):
if len(actual_output_tags) == 1:
monitoring_info.labels['TAG'] = actual_output_tags[0]
return monitoring_info
def shutdown(self):
for op in self.ops.values():
op.teardown()
class ExecutionContext(object):
def __init__(self):
self.delayed_applications = []
class BeamTransformFactory(object):
"""Factory for turning transform_protos into executable operations."""
def __init__(self, descriptor, data_channel_factory, counter_factory,
state_sampler, state_handler):
self.descriptor = descriptor
self.data_channel_factory = data_channel_factory
self.counter_factory = counter_factory
self.state_sampler = state_sampler
self.state_handler = state_handler
self.context = pipeline_context.PipelineContext(
descriptor,
iterable_state_read=lambda token, element_coder_impl:
_StateBackedIterable(
state_handler,
beam_fn_api_pb2.StateKey(
runner=beam_fn_api_pb2.StateKey.Runner(key=token)),
element_coder_impl))
_known_urns = {}
@classmethod
def register_urn(cls, urn, parameter_type):
def wrapper(func):
cls._known_urns[urn] = func, parameter_type
return func
return wrapper
def create_operation(self, transform_id, consumers):
transform_proto = self.descriptor.transforms[transform_id]
if not transform_proto.unique_name:
logging.debug("No unique name set for transform %s" % transform_id)
transform_proto.unique_name = transform_id
creator, parameter_type = self._known_urns[transform_proto.spec.urn]
payload = proto_utils.parse_Bytes(
transform_proto.spec.payload, parameter_type)
return creator(self, transform_id, transform_proto, payload, consumers)
def get_coder(self, coder_id):
if coder_id not in self.descriptor.coders:
raise KeyError("No such coder: %s" % coder_id)
coder_proto = self.descriptor.coders[coder_id]
if coder_proto.spec.urn:
return self.context.coders.get_by_id(coder_id)
else:
# No URN, assume cloud object encoding json bytes.
return operation_specs.get_coder_from_spec(
json.loads(coder_proto.spec.payload.decode('utf-8')))
def get_windowed_coder(self, pcoll_id):
coder = self.get_coder(self.descriptor.pcollections[pcoll_id].coder_id)
# TODO(robertwb): Remove this condition once all runners are consistent.
if not isinstance(coder, WindowedValueCoder):
windowing_strategy = self.descriptor.windowing_strategies[
self.descriptor.pcollections[pcoll_id].windowing_strategy_id]
return WindowedValueCoder(
coder, self.get_coder(windowing_strategy.window_coder_id))
else:
return coder
def get_output_coders(self, transform_proto):
return {
tag: self.get_windowed_coder(pcoll_id)
for tag, pcoll_id in transform_proto.outputs.items()
}
def get_only_output_coder(self, transform_proto):
return only_element(self.get_output_coders(transform_proto).values())
def get_input_coders(self, transform_proto):
return {
tag: self.get_windowed_coder(pcoll_id)
for tag, pcoll_id in transform_proto.inputs.items()
}
def get_only_input_coder(self, transform_proto):
return only_element(list(self.get_input_coders(transform_proto).values()))
# TODO(robertwb): Update all operations to take these in the constructor.
@staticmethod
def augment_oldstyle_op(op, step_name, consumers, tag_list=None):
op.step_name = step_name
for tag, op_consumers in consumers.items():
for consumer in op_consumers:
op.add_receiver(consumer, tag_list.index(tag) if tag_list else 0)
return op
class TimerConsumer(operations.Operation):
def __init__(self, timer_tag, do_op):
self._timer_tag = timer_tag
self._do_op = do_op
def process(self, windowed_value):
self._do_op.process_timer(self._timer_tag, windowed_value)
@BeamTransformFactory.register_urn(
DATA_INPUT_URN, beam_fn_api_pb2.RemoteGrpcPort)
def create(factory, transform_id, transform_proto, grpc_port, consumers):
# Timers are the one special case where we don't want to call the
# (unlabeled) operation.process() method, which we detect here.
# TODO(robertwb): Consider generalizing if there are any more cases.
output_pcoll = only_element(transform_proto.outputs.values())
output_consumers = only_element(consumers.values())
if (len(output_consumers) == 1
and isinstance(only_element(output_consumers), operations.DoOperation)):
do_op = only_element(output_consumers)
for tag, pcoll_id in do_op.timer_inputs.items():
if pcoll_id == output_pcoll:
output_consumers[:] = [TimerConsumer(tag, do_op)]
break
if grpc_port.coder_id:
output_coder = factory.get_coder(grpc_port.coder_id)
else:
logging.info(
'Missing required coder_id on grpc_port for %s; '
'using deprecated fallback.',
transform_id)
output_coder = factory.get_only_output_coder(transform_proto)
return DataInputOperation(
transform_proto.unique_name,
transform_proto.unique_name,
consumers,
factory.counter_factory,
factory.state_sampler,
output_coder,
transform_id=transform_id,
data_channel=factory.data_channel_factory.create_data_channel(grpc_port))
@BeamTransformFactory.register_urn(
DATA_OUTPUT_URN, beam_fn_api_pb2.RemoteGrpcPort)
def create(factory, transform_id, transform_proto, grpc_port, consumers):
if grpc_port.coder_id:
output_coder = factory.get_coder(grpc_port.coder_id)
else:
logging.info(
'Missing required coder_id on grpc_port for %s; '
'using deprecated fallback.',
transform_id)
output_coder = factory.get_only_input_coder(transform_proto)
return DataOutputOperation(
transform_proto.unique_name,
transform_proto.unique_name,
consumers,
factory.counter_factory,
factory.state_sampler,
output_coder,
transform_id=transform_id,
data_channel=factory.data_channel_factory.create_data_channel(grpc_port))
@BeamTransformFactory.register_urn(OLD_DATAFLOW_RUNNER_HARNESS_READ_URN, None)
def create(factory, transform_id, transform_proto, parameter, consumers):
# The Dataflow runner harness strips the base64 encoding.
source = pickler.loads(base64.b64encode(parameter))
spec = operation_specs.WorkerRead(
iobase.SourceBundle(1.0, source, None, None),
[factory.get_only_output_coder(transform_proto)])
return factory.augment_oldstyle_op(
operations.ReadOperation(
transform_proto.unique_name,
spec,
factory.counter_factory,
factory.state_sampler),
transform_proto.unique_name,
consumers)
@BeamTransformFactory.register_urn(
common_urns.deprecated_primitives.READ.urn, beam_runner_api_pb2.ReadPayload)
def create(factory, transform_id, transform_proto, parameter, consumers):
source = iobase.SourceBase.from_runner_api(parameter.source, factory.context)
spec = operation_specs.WorkerRead(
iobase.SourceBundle(1.0, source, None, None),
[WindowedValueCoder(source.default_output_coder())])
return factory.augment_oldstyle_op(
operations.ReadOperation(
transform_proto.unique_name,
spec,
factory.counter_factory,
factory.state_sampler),
transform_proto.unique_name,
consumers)
@BeamTransformFactory.register_urn(
python_urns.IMPULSE_READ_TRANSFORM, beam_runner_api_pb2.ReadPayload)
def create(factory, transform_id, transform_proto, parameter, consumers):
return operations.ImpulseReadOperation(
transform_proto.unique_name,
factory.counter_factory,
factory.state_sampler,
consumers,
iobase.SourceBase.from_runner_api(
parameter.source, factory.context),
factory.get_only_output_coder(transform_proto))
@BeamTransformFactory.register_urn(OLD_DATAFLOW_RUNNER_HARNESS_PARDO_URN, None)
def create(factory, transform_id, transform_proto, serialized_fn, consumers):
return _create_pardo_operation(
factory, transform_id, transform_proto, consumers, serialized_fn)
@BeamTransformFactory.register_urn(
common_urns.sdf_components.PAIR_WITH_RESTRICTION.urn,
beam_runner_api_pb2.ParDoPayload)
def create(*args):
class PairWithRestriction(beam.DoFn):
def __init__(self, fn, restriction_provider):
self.restriction_provider = restriction_provider
# An unused window is requested to force explosion of multi-window
# WindowedValues.
def process(
self, element, _unused_window=beam.DoFn.WindowParam, *args, **kwargs):
# TODO(SDF): Do we want to allow mutation of the element?
# (E.g. it could be nice to shift bulky description to the portion
# that can be distributed.)
yield element, self.restriction_provider.initial_restriction(element)
return _create_sdf_operation(PairWithRestriction, *args)
@BeamTransformFactory.register_urn(
common_urns.sdf_components.SPLIT_AND_SIZE_RESTRICTIONS.urn,
beam_runner_api_pb2.ParDoPayload)
def create(*args):
class SplitAndSizeRestrictions(beam.DoFn):
def __init__(self, fn, restriction_provider):
self.restriction_provider = restriction_provider
def process(self, element_restriction, *args, **kwargs):
element, restriction = element_restriction
for part, size in self.restriction_provider.split_and_size(
element, restriction):
yield ((element, part), size)
return _create_sdf_operation(SplitAndSizeRestrictions, *args)
@BeamTransformFactory.register_urn(
common_urns.sdf_components.PROCESS_SIZED_ELEMENTS_AND_RESTRICTIONS.urn,
beam_runner_api_pb2.ParDoPayload)
def create(factory, transform_id, transform_proto, parameter, consumers):
assert parameter.do_fn.spec.urn == python_urns.PICKLED_DOFN_INFO
serialized_fn = parameter.do_fn.spec.payload
return _create_pardo_operation(
factory, transform_id, transform_proto, consumers,
serialized_fn, parameter,
operation_cls=operations.SdfProcessSizedElements)
def _create_sdf_operation(
proxy_dofn,
factory, transform_id, transform_proto, parameter, consumers):
dofn_data = pickler.loads(parameter.do_fn.spec.payload)
dofn = dofn_data[0]
restriction_provider = common.DoFnSignature(dofn).get_restriction_provider()
serialized_fn = pickler.dumps(
(proxy_dofn(dofn, restriction_provider),) + dofn_data[1:])
return _create_pardo_operation(
factory, transform_id, transform_proto, consumers,
serialized_fn, parameter)
@BeamTransformFactory.register_urn(
common_urns.primitives.PAR_DO.urn, beam_runner_api_pb2.ParDoPayload)
def create(factory, transform_id, transform_proto, parameter, consumers):
assert parameter.do_fn.spec.urn == python_urns.PICKLED_DOFN_INFO
serialized_fn = parameter.do_fn.spec.payload
return _create_pardo_operation(
factory, transform_id, transform_proto, consumers,
serialized_fn, parameter)
def _create_pardo_operation(
factory, transform_id, transform_proto, consumers,
serialized_fn, pardo_proto=None, operation_cls=operations.DoOperation):
if pardo_proto and pardo_proto.side_inputs:
input_tags_to_coders = factory.get_input_coders(transform_proto)
tagged_side_inputs = [
(tag, beam.pvalue.SideInputData.from_runner_api(si, factory.context))
for tag, si in pardo_proto.side_inputs.items()]
tagged_side_inputs.sort(
key=lambda tag_si: int(re.match('side([0-9]+)(-.*)?$',
tag_si[0]).group(1)))
side_input_maps = [
StateBackedSideInputMap(
factory.state_handler,
transform_id,
tag,
si,
input_tags_to_coders[tag])
for tag, si in tagged_side_inputs]
else:
side_input_maps = []
output_tags = list(transform_proto.outputs.keys())
# Hack to match out prefix injected by dataflow runner.
def mutate_tag(tag):
if 'None' in output_tags:
if tag == 'None':
return 'out'
else:
return 'out_' + tag
else:
return tag
dofn_data = pickler.loads(serialized_fn)
if not dofn_data[-1]:
# Windowing not set.
if pardo_proto:
other_input_tags = set.union(
set(pardo_proto.side_inputs), set(pardo_proto.timer_specs))
else:
other_input_tags = ()
pcoll_id, = [pcoll for tag, pcoll in transform_proto.inputs.items()
if tag not in other_input_tags]
windowing = factory.context.windowing_strategies.get_by_id(
factory.descriptor.pcollections[pcoll_id].windowing_strategy_id)
serialized_fn = pickler.dumps(dofn_data[:-1] + (windowing,))
if pardo_proto and (pardo_proto.timer_specs or pardo_proto.state_specs
or pardo_proto.splittable):
main_input_coder = None
timer_inputs = {}
for tag, pcoll_id in transform_proto.inputs.items():
if tag in pardo_proto.timer_specs:
timer_inputs[tag] = pcoll_id
elif tag in pardo_proto.side_inputs:
pass
else:
# Must be the main input
assert main_input_coder is None
main_input_tag = tag
main_input_coder = factory.get_windowed_coder(pcoll_id)
assert main_input_coder is not None
if pardo_proto.timer_specs or pardo_proto.state_specs:
user_state_context = FnApiUserStateContext(
factory.state_handler,
transform_id,
main_input_coder.key_coder(),
main_input_coder.window_coder,
timer_specs=pardo_proto.timer_specs)
else:
user_state_context = None
else:
user_state_context = None
timer_inputs = None
output_coders = factory.get_output_coders(transform_proto)
spec = operation_specs.WorkerDoFn(
serialized_fn=serialized_fn,
output_tags=[mutate_tag(tag) for tag in output_tags],
input=None,
side_inputs=None, # Fn API uses proto definitions and the Fn State API
output_coders=[output_coders[tag] for tag in output_tags])
result = factory.augment_oldstyle_op(
operation_cls(
transform_proto.unique_name,
spec,
factory.counter_factory,
factory.state_sampler,
side_input_maps,
user_state_context,
timer_inputs=timer_inputs),
transform_proto.unique_name,
consumers,
output_tags)
if pardo_proto and pardo_proto.splittable:
result.input_info = (
transform_id, main_input_tag, main_input_coder,
transform_proto.outputs.keys())
return result
def _create_simple_pardo_operation(
factory, transform_id, transform_proto, consumers, dofn):
serialized_fn = pickler.dumps((dofn, (), {}, [], None))
return _create_pardo_operation(
factory, transform_id, transform_proto, consumers, serialized_fn)
@BeamTransformFactory.register_urn(
common_urns.primitives.ASSIGN_WINDOWS.urn,
beam_runner_api_pb2.WindowingStrategy)
def create(factory, transform_id, transform_proto, parameter, consumers):
class WindowIntoDoFn(beam.DoFn):
def __init__(self, windowing):
self.windowing = windowing
def process(self, element, timestamp=beam.DoFn.TimestampParam,
window=beam.DoFn.WindowParam):
new_windows = self.windowing.windowfn.assign(
WindowFn.AssignContext(timestamp, element=element, window=window))
yield WindowedValue(element, timestamp, new_windows)
from apache_beam.transforms.core import Windowing
from apache_beam.transforms.window import WindowFn, WindowedValue
windowing = Windowing.from_runner_api(parameter, factory.context)
return _create_simple_pardo_operation(
factory, transform_id, transform_proto, consumers,
WindowIntoDoFn(windowing))
@BeamTransformFactory.register_urn(IDENTITY_DOFN_URN, None)
def create(factory, transform_id, transform_proto, unused_parameter, consumers):
return factory.augment_oldstyle_op(
operations.FlattenOperation(
transform_proto.unique_name,
operation_specs.WorkerFlatten(
None, [factory.get_only_output_coder(transform_proto)]),
factory.counter_factory,
factory.state_sampler),
transform_proto.unique_name,
consumers)
@BeamTransformFactory.register_urn(
common_urns.combine_components.COMBINE_PER_KEY_PRECOMBINE.urn,
beam_runner_api_pb2.CombinePayload)
def create(factory, transform_id, transform_proto, payload, consumers):
serialized_combine_fn = pickler.dumps(
(beam.CombineFn.from_runner_api(payload.combine_fn, factory.context),
[], {}))
return factory.augment_oldstyle_op(
operations.PGBKCVOperation(
transform_proto.unique_name,
operation_specs.WorkerPartialGroupByKey(
serialized_combine_fn,
None,
[factory.get_only_output_coder(transform_proto)]),
factory.counter_factory,
factory.state_sampler),
transform_proto.unique_name,
consumers)
@BeamTransformFactory.register_urn(
common_urns.combine_components.COMBINE_PER_KEY_MERGE_ACCUMULATORS.urn,
beam_runner_api_pb2.CombinePayload)
def create(factory, transform_id, transform_proto, payload, consumers):
return _create_combine_phase_operation(
factory, transform_proto, payload, consumers, 'merge')
@BeamTransformFactory.register_urn(
common_urns.combine_components.COMBINE_PER_KEY_EXTRACT_OUTPUTS.urn,
beam_runner_api_pb2.CombinePayload)
def create(factory, transform_id, transform_proto, payload, consumers):
return _create_combine_phase_operation(
factory, transform_proto, payload, consumers, 'extract')
@BeamTransformFactory.register_urn(
common_urns.combine_components.COMBINE_GROUPED_VALUES.urn,
beam_runner_api_pb2.CombinePayload)
def create(factory, transform_id, transform_proto, payload, consumers):
return _create_combine_phase_operation(
factory, transform_proto, payload, consumers, 'all')
def _create_combine_phase_operation(
factory, transform_proto, payload, consumers, phase):
serialized_combine_fn = pickler.dumps(
(beam.CombineFn.from_runner_api(payload.combine_fn, factory.context),
[], {}))
return factory.augment_oldstyle_op(
operations.CombineOperation(
transform_proto.unique_name,
operation_specs.WorkerCombineFn(
serialized_combine_fn,
phase,
None,
[factory.get_only_output_coder(transform_proto)]),
factory.counter_factory,
factory.state_sampler),
transform_proto.unique_name,
consumers)
@BeamTransformFactory.register_urn(common_urns.primitives.FLATTEN.urn, None)
def create(factory, transform_id, transform_proto, unused_parameter, consumers):
return factory.augment_oldstyle_op(
operations.FlattenOperation(
transform_proto.unique_name,
operation_specs.WorkerFlatten(
None,
[factory.get_only_output_coder(transform_proto)]),
factory.counter_factory,
factory.state_sampler),
transform_proto.unique_name,
consumers)
@BeamTransformFactory.register_urn(
common_urns.primitives.MAP_WINDOWS.urn,
beam_runner_api_pb2.SdkFunctionSpec)
def create(factory, transform_id, transform_proto, mapping_fn_spec, consumers):
assert mapping_fn_spec.spec.urn == python_urns.PICKLED_WINDOW_MAPPING_FN
window_mapping_fn = pickler.loads(mapping_fn_spec.spec.payload)
class MapWindows(beam.DoFn):
def process(self, element):
key, window = element
return [(key, window_mapping_fn(window))]
return _create_simple_pardo_operation(
factory, transform_id, transform_proto, consumers,
MapWindows())
|
apache-2.0
|
unclesky4/CompanyEmployeesManage
|
php/position_update.php
|
1283
|
<?php header('Content-type: text/html;charset=UTF-8');
session_start();
if(!isset($_SESSION['R'])) {
echo '0'; //未登陆
return;
}
if($_SESSION['R'] !== "administrator") {
echo '1';
return;
}
$id = test_input($_POST['id']);
$name = test_input($_POST['name']);
$bool_name=false;
$con = mysql_connect("localhost","uncle","uncle");
if(!$con) {
echo "-1"; //连接数据库失败!
die('Could not connect: ' . mysql_error());
}
mysql_select_db("CompanyEmployee",$con);
mysql_query("set names utf8");
$result = mysql_query("select name from position");
while($row = mysql_fetch_array($result)) {
if($row['name'] === $name) {
$bool_name = true;
break;
}
}
if($bool_name) {
echo "2"; //要修改的职位名称已存在!
return ;
}
if(mysql_query("update position set name='$name' where id='$id'")) {
echo "3"; //修改成功!
}else {
echo "4"; //修改失败!
}
function test_input($data) {
$data = trim($data); //去除用户输入数据中不必要的字符(多余的空格、制表符、换行)
$data = stripslashes($data); //删除用户输入数据中的反斜杠(\)
$data = htmlspecialchars($data); // PHP 的 htmlspecialchars() 函数传递所有变量
return $data;
}
?>
|
apache-2.0
|
romanzenka/swift
|
lib/config/src/test/java/edu/mayo/mprc/config/TestResource.java
|
1564
|
package edu.mayo.mprc.config;
import org.testng.Assert;
import java.util.Arrays;
import java.util.List;
/**
* @author Roman Zenka
*/
class TestResource implements ResourceConfig {
private TestResource2 ref1;
private TestResource2 ref2;
TestResource() {
ref1 = new TestResource2();
ref2 = new TestResource2();
}
public TestResource2 getRef1() {
return ref1;
}
public TestResource2 getRef2() {
return ref2;
}
@Override
public void save(ConfigWriter writer) {
writer.comment("Test resource");
writer.put("boolean", true);
writer.put("integer", 123, "Integer");
writer.put("key", "value");
writer.put("key2", "value2", "Comment");
writer.put("resource", ref1);
writer.put("resources", Arrays.asList(ref1, ref2));
}
@Override
public void load(ConfigReader reader) {
Assert.assertEquals(reader.getBoolean("boolean"), true);
Assert.assertEquals(reader.getInteger("integer"), 123);
Assert.assertEquals(reader.get("key"), "value");
Assert.assertEquals(reader.get("key2"), "value2");
final ResourceConfig resource = reader.getObject("resource");
Assert.assertEquals(resource.getClass(), TestResource2.class);
final List<? extends ResourceConfig> resources = reader.getResourceList("resources");
Assert.assertEquals(resources.size(), 2);
// A missing resource should produce an empty list
final List<? extends ResourceConfig> missing = reader.getResourceList("missing");
}
@Override
public int getPriority() {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
}
|
apache-2.0
|
torrances/swtk-commons
|
commons-dict-wordnet-indexbyid/src/main/java/org/swtk/commons/dict/wordnet/indexbyid/instance/p0/p7/WordnetNounIndexIdInstance0747.java
|
11410
|
package org.swtk.commons.dict.wordnet.indexbyid.instance.p0.p7; import java.util.ArrayList; import java.util.Collection; import java.util.Map; import java.util.TreeMap; import org.swtk.common.dict.dto.wordnet.IndexNoun; import com.trimc.blogger.commons.utils.GsonUtils; public final class WordnetNounIndexIdInstance0747 { private static Map<String, Collection<IndexNoun>> map = new TreeMap<String, Collection<IndexNoun>>(); static { add("07470250", "{\"term\":\"fundraiser\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07470250\", \"10136135\"]}");
add("07470370", "{\"term\":\"photo op\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07470370\"]}");
add("07470370", "{\"term\":\"photo opportunity\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07470370\"]}");
add("07470594", "{\"term\":\"sleepover\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07470594\"]}");
add("07470798", "{\"term\":\"sellout\", \"synsetCount\":4, \"upperType\":\"NOUN\", \"ids\":[\"00751514\", \"01117127\", \"07470798\", \"09851585\"]}");
add("07470961", "{\"term\":\"competition\", \"synsetCount\":4, \"upperType\":\"NOUN\", \"ids\":[\"10552570\", \"01171059\", \"07470961\", \"13859208\"]}");
add("07470961", "{\"term\":\"contest\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"01171451\", \"07470961\"]}");
add("07471429", "{\"term\":\"athletic competition\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07471429\"]}");
add("07471429", "{\"term\":\"athletic contest\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07471429\"]}");
add("07471429", "{\"term\":\"athletics\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"00888260\", \"07471429\", \"00524569\"]}");
add("07471697", "{\"term\":\"bout\", \"synsetCount\":4, \"upperType\":\"NOUN\", \"ids\":[\"00511734\", \"07471697\", \"15144014\", \"15281525\"]}");
add("07471807", "{\"term\":\"decathlon\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07471807\"]}");
add("07471917", "{\"term\":\"olympiad\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07471917\", \"15229516\"]}");
add("07471917", "{\"term\":\"olympic games\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"00517776\", \"07471917\"]}");
add("07471917", "{\"term\":\"olympics\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07471917\"]}");
add("07472102", "{\"term\":\"special olympics\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07472102\"]}");
add("07472276", "{\"term\":\"winter olympic games\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07472276\"]}");
add("07472276", "{\"term\":\"winter olympics\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07472276\"]}");
add("07472390", "{\"term\":\"prelim\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07213673\", \"07472390\"]}");
add("07472390", "{\"term\":\"preliminary\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07341613\", \"07472390\"]}");
add("07472513", "{\"term\":\"pentathlon\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07472513\"]}");
add("07472625", "{\"term\":\"championship\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"01216376\", \"07472625\", \"13972130\"]}");
add("07472727", "{\"term\":\"chicken\", \"synsetCount\":4, \"upperType\":\"NOUN\", \"ids\":[\"07472727\", \"10801493\", \"01794266\", \"07660576\"]}");
add("07472890", "{\"term\":\"cliffhanger\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"06634188\", \"07472890\"]}");
add("07473003", "{\"term\":\"dogfight\", \"synsetCount\":4, \"upperType\":\"NOUN\", \"ids\":[\"00973906\", \"00976310\", \"01174931\", \"07473003\"]}");
add("07473244", "{\"term\":\"race\", \"synsetCount\":6, \"upperType\":\"NOUN\", \"ids\":[\"04044349\", \"11443311\", \"08127614\", \"07984596\", \"07473244\", \"07487797\"]}");
add("07473683", "{\"term\":\"auto race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07473683\"]}");
add("07473683", "{\"term\":\"automobile race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07473683\"]}");
add("07473683", "{\"term\":\"car race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07473683\"]}");
add("07473857", "{\"term\":\"grand prix\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07473857\"]}");
add("07473948", "{\"term\":\"rally\", \"synsetCount\":5, \"upperType\":\"NOUN\", \"ids\":[\"01169007\", \"07473948\", \"13567048\", \"00046387\", \"08375259\"]}");
add("07474040", "{\"term\":\"bicycle race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07474040\"]}");
add("07474040", "{\"term\":\"bike race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07474040\"]}");
add("07474184", "{\"term\":\"tour de france\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07474184\"]}");
add("07474344", "{\"term\":\"crit\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07474344\"]}");
add("07474344", "{\"term\":\"criterium\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07474344\"]}");
add("07474480", "{\"term\":\"boat race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07474480\"]}");
add("07474599", "{\"term\":\"burnup\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"00357342\", \"07474599\"]}");
add("07474735", "{\"term\":\"chariot race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07474735\"]}");
add("07474825", "{\"term\":\"dog racing\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07474825\"]}");
add("07474949", "{\"term\":\"sailing-race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07474949\"]}");
add("07474949", "{\"term\":\"yacht race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07474949\"]}");
add("07475061", "{\"term\":\"foot race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07475061\"]}");
add("07475061", "{\"term\":\"footrace\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07475061\"]}");
add("07475061", "{\"term\":\"run\", \"synsetCount\":16, \"upperType\":\"NOUN\", \"ids\":[\"00309837\", \"05052992\", \"07422377\", \"07457610\", \"07488069\", \"09438816\", \"13782269\", \"14019420\", \"15286931\", \"00294760\", \"00309697\", \"00559920\", \"08477596\", \"07475061\", \"00792739\", \"00190414\"]}");
add("07475360", "{\"term\":\"fun run\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07475360\"]}");
add("07475360", "{\"term\":\"funrun\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07475360\"]}");
add("07475503", "{\"term\":\"marathon\", \"synsetCount\":3, \"upperType\":\"NOUN\", \"ids\":[\"01289062\", \"07475503\", \"00799022\"]}");
add("07475608", "{\"term\":\"freestyle\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07475608\"]}");
add("07475750", "{\"term\":\"cross country\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07475750\"]}");
add("07475861", "{\"term\":\"iditarod\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07475861\"]}");
add("07475861", "{\"term\":\"iditarod trail dog sled race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07475861\"]}");
add("07476007", "{\"term\":\"three-day event\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07476007\"]}");
add("07476245", "{\"term\":\"heat\", \"synsetCount\":7, \"upperType\":\"NOUN\", \"ids\":[\"03514169\", \"07476245\", \"14061746\", \"04635549\", \"05733472\", \"05023355\", \"11486442\"]}");
add("07476368", "{\"term\":\"horse race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07476368\"]}");
add("07476607", "{\"term\":\"claiming race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07476607\"]}");
add("07476788", "{\"term\":\"selling race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07476788\"]}");
add("07476913", "{\"term\":\"harness race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07476913\"]}");
add("07476913", "{\"term\":\"harness racing\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07476913\"]}");
add("07477076", "{\"term\":\"kentucky derby\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07477076\"]}");
add("07477226", "{\"term\":\"preakness\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07477226\"]}");
add("07477362", "{\"term\":\"belmont stakes\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07477362\"]}");
add("07477504", "{\"term\":\"stake race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07477504\"]}");
add("07477650", "{\"term\":\"steeplechase\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07477650\", \"07477986\"]}");
add("07477765", "{\"term\":\"grand national\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07477765\"]}");
add("07477874", "{\"term\":\"obstacle race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07477874\"]}");
add("07477986", "{\"term\":\"steeplechase\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07477650\", \"07477986\"]}");
add("07478128", "{\"term\":\"thoroughbred race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07478128\"]}");
add("07478280", "{\"term\":\"potato race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07478280\"]}");
add("07478427", "{\"term\":\"sack race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07478427\"]}");
add("07478564", "{\"term\":\"scratch race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07478564\"]}");
add("07478690", "{\"term\":\"ski race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07478690\"]}");
add("07478690", "{\"term\":\"skiing race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07478690\"]}");
add("07478829", "{\"term\":\"downhill\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07478829\", \"09292378\"]}");
add("07478907", "{\"term\":\"slalom\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07478907\"]}");
add("07479040", "{\"term\":\"relay\", \"synsetCount\":5, \"upperType\":\"NOUN\", \"ids\":[\"04080072\", \"07479040\", \"08001975\", \"08235886\", \"00319393\"]}");
add("07479040", "{\"term\":\"relay race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07479040\"]}");
add("07479191", "{\"term\":\"repechage\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07479191\"]}");
add("07479359", "{\"term\":\"torch race\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07479359\"]}");
add("07479525", "{\"term\":\"world cup\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07479525\"]}");
add("07479682", "{\"term\":\"tournament\", \"synsetCount\":2, \"upperType\":\"NOUN\", \"ids\":[\"07487467\", \"07479682\"]}");
add("07479682", "{\"term\":\"tourney\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07479682\"]}");
add("07479926", "{\"term\":\"elimination tournament\", \"synsetCount\":1, \"upperType\":\"NOUN\", \"ids\":[\"07479926\"]}");
} private static void add(final String ID, final String JSON) { IndexNoun indexNoun = GsonUtils.toObject(JSON, IndexNoun.class); Collection<IndexNoun> list = (map.containsKey(ID)) ? map.get(ID) : new ArrayList<IndexNoun>(); list.add(indexNoun); map.put(ID, list); } public static Collection<IndexNoun> get(final String TERM) { return map.get(TERM); } public static boolean has(final String TERM) { return map.containsKey(TERM); } public static Collection<String> ids() { return map.keySet(); } }
|
apache-2.0
|
mharrison13/petfoster
|
src/app/components/sign-out.component.ts
|
443
|
import {Component, OnInit} from "@angular/core";
import {Status} from "../classes/status";
import {SignOutService} from "../services/sign-out.service";
@Component({
templateUrl: "./templates/sign-out.php"
})
export class SignOutComponent implements OnInit{
status: Status = null;
constructor(private signOutService:SignOutService){}
ngOnInit(): void {
this.signOutService.signOut()
.subscribe(status => this.status = status);
}
}
|
apache-2.0
|
nkolosnjaji/generator-jhipster
|
generators/client/templates/angular/src/test/javascript/spec/app/admin/user-management/_user-management-dialog.component.spec.ts
|
5392
|
<%#
Copyright 2013-2018 the original author or authors from the JHipster project.
This file is part of the JHipster project, see http://www.jhipster.tech/
for more information.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-%>
<%_
const tsKeyId = generateTestEntityId(pkType, prodDatabaseType);
_%>
import { ComponentFixture, TestBed, async, inject, fakeAsync, tick } from '@angular/core/testing';
import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap';
import { Observable } from 'rxjs/Observable';
import { JhiEventManager } from 'ng-jhipster';
import { <%=angularXAppName%>TestModule } from '../../../test.module';
import { UserMgmtDialogComponent } from '../../../../../../main/webapp/app/admin/user-management/user-management-dialog.component';
import { UserService, User<% if(enableTranslation) { %>, JhiLanguageHelper<% } %> } from '../../../../../../main/webapp/app/shared';
describe('Component Tests', () => {
describe('User Management Dialog Component', () => {
let comp: UserMgmtDialogComponent;
let fixture: ComponentFixture<UserMgmtDialogComponent>;
let service: UserService;
let mockEventManager: any;
let mockActiveModal: any;
<%_ if(enableTranslation) { _%>
let mockLanguageHelper: any;
<%_ } _%>
beforeEach(async(() => {
TestBed.configureTestingModule({
imports: [<%=angularXAppName%>TestModule],
declarations: [UserMgmtDialogComponent],
providers: [
UserService
]
})
.overrideTemplate(UserMgmtDialogComponent, '')
.compileComponents();
}));
beforeEach(() => {
fixture = TestBed.createComponent(UserMgmtDialogComponent);
comp = fixture.componentInstance;
service = fixture.debugElement.injector.get(UserService);
mockEventManager = fixture.debugElement.injector.get(JhiEventManager);
mockActiveModal = fixture.debugElement.injector.get(NgbActiveModal);
<%_ if(enableTranslation) { _%>
mockLanguageHelper = fixture.debugElement.injector.get(JhiLanguageHelper);
<%_ } _%>
});
describe('OnInit', () => {
it('Should load authorities and language on init',
inject([],
fakeAsync(() => {
// GIVEN
spyOn(service, 'authorities').and.returnValue(Observable.of(['USER']));
// WHEN
comp.ngOnInit();
// THEN
expect(service.authorities).toHaveBeenCalled();
expect(comp.authorities).toEqual(['USER']);
<%_ if(enableTranslation) { _%>
expect(mockLanguageHelper.getAllSpy).toHaveBeenCalled();
<%_ } _%>
})
)
);
});
describe('save', () => {
it('Should call update service on save for existing user',
inject([],
fakeAsync(() => {
// GIVEN
const entity = new User(<%- tsKeyId %>);
spyOn(service, 'update').and.returnValue(Observable.of(entity));
comp.user = entity;
// WHEN
comp.save();
tick(); // simulate async
// THEN
expect(service.update).toHaveBeenCalledWith(entity);
expect(comp.isSaving).toEqual(false);
expect(mockEventManager.broadcastSpy).toHaveBeenCalledWith({ name: 'userListModification', content: 'OK'});
expect(mockActiveModal.dismissSpy).toHaveBeenCalled();
})
)
);
it('Should call create service on save for new user',
inject([],
fakeAsync(() => {
// GIVEN
const entity = new User();
spyOn(service, 'create').and.returnValue(Observable.of(entity));
comp.user = entity;
// WHEN
comp.save();
tick(); // simulate async
// THEN
expect(service.create).toHaveBeenCalledWith(entity);
expect(comp.isSaving).toEqual(false);
expect(mockEventManager.broadcastSpy).toHaveBeenCalledWith({ name: 'userListModification', content: 'OK'});
expect(mockActiveModal.dismissSpy).toHaveBeenCalled();
})
)
);
});
});
});
|
apache-2.0
|
TFarla/pong
|
public/js/game.js
|
11649
|
var gameProperties = {
screenWidth: 640,
screenHeight: 480,
dashSize: 5,
paddleLeft_x: 50,
paddleRight_x: 590,
paddleVelocity: 600,
paddleSegmentsMax: 4,
paddleSegmentHeight: 4,
paddleSegmentAngle: 15,
paddleTopGap: 22,
ballVelocity: 500,
ballRandomStartingAngleLeft: [-120, 120],
ballRandomStartingAngleRight: [-60, 60],
ballStartDelay: 2,
ballVelocityIncrement: 25,
ballReturnCount: 4,
scoreToWin: 11,
};
var graphicAssets = {
ballURL: 'assets/ball.png',
ballName: 'ball',
paddleURL: 'assets/paddle.png',
paddleName: 'paddle',
};
var soundAssets = {
ballBounceURL: 'assets/ballBounce',
ballBounceName: 'ballBounce',
ballHitURL: 'assets/ballHit',
ballHitName: 'ballHit',
ballMissedURL: 'assets/ballMissed',
ballMissedName: 'ballMissed',
mp4URL: '.m4a',
oggURL: '.ogg',
};
var fontAssets = {
scoreLeft_x: gameProperties.screenWidth * 0.25,
scoreRight_x: gameProperties.screenWidth * 0.75,
scoreTop_y: 10,
scoreFontStyle: {
font: '80px Arial',
fill: '#FFFFFF',
align: 'center'
},
instructionsFontStyle: {
font: '24px Arial',
fill: '#FFFFFF',
align: 'center'
},
}
var labels = {
clickToStart: 'Left paddle: A to move up, Z to move down.\n\nRight paddle: UP and DOWN arrow keys.\n\n- click to start -',
winner: 'Winner!',
};
var mainState = function (game) {
this.backgroundGraphics;
this.ballSprite;
this.paddleLeftSprite;
this.paddleRightSprite;
this.paddleGroup;
this.paddleLeft_up;
this.paddleLeft_down;
this.paddleRight_up;
this.paddleRight_down;
this.missedSide;
this.scoreLeft;
this.scoreRight;
this.tf_scoreLeft;
this.tf_scoreRight;
this.sndBallHit;
this.sndBallBounce;
this.sndBallMissed;
this.instructions;
this.winnerLeft;
this.winnerRight;
this.ballVelocity;
}
mainState.prototype = {
preload: function () {
game.load.image(graphicAssets.ballName, graphicAssets.ballURL);
game.load.image(graphicAssets.paddleName, graphicAssets.paddleURL);
game.load.audio(soundAssets.ballBounceName, [soundAssets.ballBounceURL +
soundAssets.mp4URL, soundAssets.ballBounceURL + soundAssets.oggURL
]);
game.load.audio(soundAssets.ballHitName, [soundAssets.ballHitURL +
soundAssets.mp4URL, soundAssets.ballHitURL + soundAssets.oggURL
]);
game.load.audio(soundAssets.ballMissedName, [soundAssets.ballMissedURL +
soundAssets.mp4URL, soundAssets.ballMissedURL + soundAssets.oggURL
]);
},
create: function () {
this.initGraphics();
this.initPhysics();
this.initKeyboard();
this.initSounds();
this.startDemo();
},
update: function () {
this.moveLeftPaddle();
this.moveRightPaddle();
game.physics.arcade.overlap(this.ballSprite, this.paddleGroup, this.collideWithPaddle,
null, this);
if (this.ballSprite.body.blocked.up || this.ballSprite.body.blocked.down ||
this.ballSprite.body.blocked.left || this.ballSprite.body.blocked.right
) {
this.sndBallBounce.play();
}
},
initGraphics: function () {
this.backgroundGraphics = game.add.graphics(0, 0);
this.backgroundGraphics.lineStyle(2, 0xFFFFFF, 1);
for (var y = 0; y < gameProperties.screenHeight; y += gameProperties.dashSize *
2) {
this.backgroundGraphics.moveTo(game.world.centerX, y);
this.backgroundGraphics.lineTo(game.world.centerX, y + gameProperties
.dashSize);
}
this.ballSprite = game.add.sprite(game.world.centerX, game.world.centerY,
graphicAssets.ballName);
this.ballSprite.anchor.set(0.5, 0.5);
this.paddleLeftSprite = game.add.sprite(gameProperties.paddleLeft_x,
game.world.centerY, graphicAssets.paddleName);
this.paddleLeftSprite.anchor.set(0.5, 0.5);
this.paddleRightSprite = game.add.sprite(gameProperties.paddleRight_x,
game.world.centerY, graphicAssets.paddleName);
this.paddleRightSprite.anchor.set(0.5, 0.5);
this.tf_scoreLeft = game.add.text(fontAssets.scoreLeft_x, fontAssets.scoreTop_y,
"0", fontAssets.scoreFontStyle);
this.tf_scoreLeft.anchor.set(0.5, 0);
this.tf_scoreRight = game.add.text(fontAssets.scoreRight_x, fontAssets.scoreTop_y,
"0", fontAssets.scoreFontStyle);
this.tf_scoreRight.anchor.set(0.5, 0);
this.instructions = game.add.text(game.world.centerX, game.world.centerY,
labels.clickToStart, fontAssets.instructionsFontStyle);
this.instructions.anchor.set(0.5, 0.5);
this.winnerLeft = game.add.text(gameProperties.screenWidth * 0.25,
gameProperties.screenHeight * 0.25, labels.winner, fontAssets.instructionsFontStyle
);
this.winnerLeft.anchor.set(0.5, 0.5);
this.winnerRight = game.add.text(gameProperties.screenWidth * 0.75,
gameProperties.screenHeight * 0.25, labels.winner, fontAssets.instructionsFontStyle
);
this.winnerRight.anchor.set(0.5, 0.5);
this.hideTextFields();
},
initPhysics: function () {
game.physics.startSystem(Phaser.Physics.ARCADE);
game.physics.enable(this.ballSprite, Phaser.Physics.ARCADE);
this.ballSprite.checkWorldBounds = true;
this.ballSprite.body.collideWorldBounds = true;
this.ballSprite.body.immovable = true;
this.ballSprite.body.bounce.set(1);
this.ballSprite.events.onOutOfBounds.add(this.ballOutOfBounds, this);
this.paddleGroup = game.add.group();
this.paddleGroup.enableBody = true;
this.paddleGroup.physicsBodyType = Phaser.Physics.ARCADE;
this.paddleGroup.add(this.paddleLeftSprite);
this.paddleGroup.add(this.paddleRightSprite);
this.paddleGroup.setAll('checkWorldBounds', true);
this.paddleGroup.setAll('body.collideWorldBounds', true);
this.paddleGroup.setAll('body.immovable', true);
},
initKeyboard: function () {
this.paddleLeft_up = game.input.keyboard.addKey(Phaser.Keyboard.A);
this.paddleLeft_down = game.input.keyboard.addKey(Phaser.Keyboard.Z);
this.paddleRight_up = game.input.keyboard.addKey(Phaser.Keyboard.UP);
this.paddleRight_down = game.input.keyboard.addKey(Phaser.Keyboard.DOWN);
},
initSounds: function () {
this.sndBallHit = game.add.audio(soundAssets.ballHitName);
this.sndBallBounce = game.add.audio(soundAssets.ballBounceName);
this.sndBallMissed = game.add.audio(soundAssets.ballMissedName);
},
startDemo: function () {
this.ballSprite.visible = false;
this.resetBall();
this.enablePaddles(false);
this.enableBoundaries(true);
game.input.onDown.add(this.startGame, this);
this.instructions.visible = true;
},
startGame: function () {
game.input.onDown.remove(this.startGame, this);
this.enablePaddles(true);
this.enableBoundaries(false);
this.resetBall();
this.resetScores();
this.hideTextFields();
},
startBall: function () {
this.ballVelocity = gameProperties.ballVelocity;
this.ballReturnCount = 0;
this.ballSprite.visible = true;
var randomAngle = game.rnd.pick(gameProperties.ballRandomStartingAngleRight
.concat(gameProperties.ballRandomStartingAngleLeft));
if (this.missedSide == 'right') {
randomAngle = game.rnd.pick(gameProperties.ballRandomStartingAngleRight);
} else if (this.missedSide == 'left') {
randomAngle = game.rnd.pick(gameProperties.ballRandomStartingAngleLeft);
}
game.physics.arcade.velocityFromAngle(randomAngle, gameProperties.ballVelocity,
this.ballSprite.body.velocity);
},
resetBall: function () {
this.ballSprite.reset(game.world.centerX, game.rnd.between(0,
gameProperties.screenHeight));
this.ballSprite.visible = false;
game.time.events.add(Phaser.Timer.SECOND * gameProperties.ballStartDelay,
this.startBall, this);
},
enablePaddles: function (enabled) {
this.paddleGroup.setAll('visible', enabled);
this.paddleGroup.setAll('body.enable', enabled);
this.paddleLeft_up.enabled = enabled;
this.paddleLeft_down.enabled = enabled;
this.paddleRight_up.enabled = enabled;
this.paddleRight_down.enabled = enabled;
this.paddleLeftSprite.y = game.world.centerY;
this.paddleRightSprite.y = game.world.centerY;
},
enableBoundaries: function (enabled) {
game.physics.arcade.checkCollision.left = enabled;
game.physics.arcade.checkCollision.right = enabled;
},
moveLeftPaddle: function () {
if (this.paddleLeft_up.isDown) {
this.paddleLeftSprite.body.velocity.y = -gameProperties.paddleVelocity;
} else if (this.paddleLeft_down.isDown) {
this.paddleLeftSprite.body.velocity.y = gameProperties.paddleVelocity;
} else {
this.paddleLeftSprite.body.velocity.y = 0;
}
if (this.paddleLeftSprite.body.y < gameProperties.paddleTopGap) {
this.paddleLeftSprite.body.y = gameProperties.paddleTopGap;
}
},
moveRightPaddle: function () {
if (this.paddleRight_up.isDown) {
this.paddleRightSprite.body.velocity.y = -gameProperties.paddleVelocity;
} else if (this.paddleRight_down.isDown) {
this.paddleRightSprite.body.velocity.y = gameProperties.paddleVelocity;
} else {
this.paddleRightSprite.body.velocity.y = 0;
}
if (this.paddleRightSprite.body.y < gameProperties.paddleTopGap) {
this.paddleRightSprite.body.y = gameProperties.paddleTopGap;
}
},
collideWithPaddle: function (ball, paddle) {
this.sndBallHit.play();
var returnAngle;
var segmentHit = Math.floor((ball.y - paddle.y) / gameProperties.paddleSegmentHeight);
if (segmentHit >= gameProperties.paddleSegmentsMax) {
segmentHit = gameProperties.paddleSegmentsMax - 1;
} else if (segmentHit <= -gameProperties.paddleSegmentsMax) {
segmentHit = -(gameProperties.paddleSegmentsMax - 1);
}
if (paddle.x < gameProperties.screenWidth * 0.5) {
returnAngle = segmentHit * gameProperties.paddleSegmentAngle;
game.physics.arcade.velocityFromAngle(returnAngle, this.ballVelocity,
this.ballSprite.body.velocity);
} else {
returnAngle = 180 - (segmentHit * gameProperties.paddleSegmentAngle);
if (returnAngle > 180) {
returnAngle -= 360;
}
game.physics.arcade.velocityFromAngle(returnAngle, this.ballVelocity,
this.ballSprite.body.velocity);
}
this.ballReturnCount++;
if (this.ballReturnCount >= gameProperties.ballReturnCount) {
this.ballReturnCount = 0;
this.ballVelocity += gameProperties.ballVelocityIncrement;
}
},
ballOutOfBounds: function () {
this.sndBallMissed.play();
if (this.ballSprite.x < 0) {
this.missedSide = 'left';
this.scoreRight++;
} else if (this.ballSprite.x > gameProperties.screenWidth) {
this.missedSide = 'right';
this.scoreLeft++;
}
this.updateScoreTextFields();
if (this.scoreLeft >= gameProperties.scoreToWin) {
this.winnerLeft.visible = true;
this.startDemo();
} else if (this.scoreRight >= gameProperties.scoreToWin) {
this.winnerRight.visible = true;
this.startDemo();
} else {
this.resetBall();
}
},
resetScores: function () {
this.scoreLeft = 0;
this.scoreRight = 0;
this.updateScoreTextFields();
},
updateScoreTextFields: function () {
this.tf_scoreLeft.text = this.scoreLeft;
this.tf_scoreRight.text = this.scoreRight;
},
hideTextFields: function () {
this.instructions.visible = false;
this.winnerLeft.visible = false;
this.winnerRight.visible = false;
},
};
var game = new Phaser.Game(gameProperties.screenWidth, gameProperties.screenHeight,
Phaser.AUTO, 'gameDiv');
game.state.add('main', mainState);
game.state.start('main');
|
apache-2.0
|
apioo/psx-framework
|
tests/Console/RouteCommandTest.php
|
1773
|
<?php
/*
* PSX is an open source PHP framework to develop RESTful APIs.
* For the current version and information visit <https://phpsx.org>
*
* Copyright 2010-2022 Christoph Kappestein <christoph.kappestein@gmail.com>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
namespace PSX\Framework\Tests\Console;
use PSX\Framework\Test\ControllerTestCase;
use PSX\Framework\Test\Environment;
use PSX\Framework\Tests\Controller\Foo\Application\TestApiController;
use Symfony\Component\Console\Tester\CommandTester;
/**
* RouteCommandTest
*
* @author Christoph Kappestein <christoph.kappestein@gmail.com>
* @license http://www.apache.org/licenses/LICENSE-2.0
* @link http://phpsx.org
*/
class RouteCommandTest extends ControllerTestCase
{
public function testCommand()
{
$command = Environment::getService('console')->find('route');
$commandTester = new CommandTester($command);
$commandTester->execute(array(
));
$response = $commandTester->getDisplay();
$this->assertEquals('GET /controller ' . TestApiController::class, trim($response));
}
protected function getPaths()
{
return array(
[['GET'], '/controller', TestApiController::class],
);
}
}
|
apache-2.0
|
nsivabalan/ambry
|
ambry-clustermap/src/main/java/com.github.ambry.clustermap/PartitionLayout.java
|
9417
|
/**
* Copyright 2016 LinkedIn Corp. All rights reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
*/
package com.github.ambry.clustermap;
import java.io.DataInputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.json.JSONArray;
import org.json.JSONException;
import org.json.JSONObject;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* PartitionLayout of {@link Partition}s and {@link Replica}s on an Ambry cluster (see {@link HardwareLayout}).
*/
public class PartitionLayout {
private static final long MinPartitionId = 0;
private final HardwareLayout hardwareLayout;
private final String clusterName;
private final long version;
private final Map<ByteBuffer, Partition> partitionMap;
private long maxPartitionId;
private long allocatedRawCapacityInBytes;
private long allocatedUsableCapacityInBytes;
private final Logger logger = LoggerFactory.getLogger(getClass());
public PartitionLayout(HardwareLayout hardwareLayout, JSONObject jsonObject) throws JSONException {
if (logger.isTraceEnabled()) {
logger.trace("PartitionLayout " + hardwareLayout + ", " + jsonObject.toString());
}
this.hardwareLayout = hardwareLayout;
this.clusterName = jsonObject.getString("clusterName");
this.version = jsonObject.getLong("version");
this.partitionMap = new HashMap<ByteBuffer, Partition>();
for (int i = 0; i < jsonObject.getJSONArray("partitions").length(); ++i) {
addPartition(new Partition(this, jsonObject.getJSONArray("partitions").getJSONObject(i)));
}
validate();
}
// Constructor for initial PartitionLayout.
public PartitionLayout(HardwareLayout hardwareLayout) {
if (logger.isTraceEnabled()) {
logger.trace("PartitionLayout " + hardwareLayout);
}
this.hardwareLayout = hardwareLayout;
this.clusterName = hardwareLayout.getClusterName();
this.version = 1;
this.maxPartitionId = MinPartitionId;
this.partitionMap = new HashMap<ByteBuffer, Partition>();
validate();
}
public HardwareLayout getHardwareLayout() {
return hardwareLayout;
}
public String getClusterName() {
return clusterName;
}
public long getVersion() {
return version;
}
public long getPartitionCount() {
return partitionMap.size();
}
public long getPartitionInStateCount(PartitionState partitionState) {
int count = 0;
for (Partition partition : partitionMap.values()) {
if (partition.getPartitionState() == partitionState) {
count++;
}
}
return count;
}
public List<PartitionId> getPartitions() {
return new ArrayList<PartitionId>(partitionMap.values());
}
public List<PartitionId> getWritablePartitions() {
List<PartitionId> writablePartitions = new ArrayList();
List<PartitionId> healthyWritablePartitions = new ArrayList();
for (Partition partition : partitionMap.values()) {
if (partition.getPartitionState() == PartitionState.READ_WRITE) {
writablePartitions.add(partition);
boolean up = true;
for (Replica replica : partition.getReplicas()) {
if (replica.isDown()) {
up = false;
break;
}
}
if (up) {
healthyWritablePartitions.add(partition);
}
}
}
return healthyWritablePartitions.isEmpty() ? writablePartitions : healthyWritablePartitions;
}
public long getAllocatedRawCapacityInBytes() {
return allocatedRawCapacityInBytes;
}
private long calculateAllocatedRawCapacityInBytes() {
long allocatedRawCapacityInBytes = 0;
for (Partition partition : partitionMap.values()) {
allocatedRawCapacityInBytes += partition.getAllocatedRawCapacityInBytes();
}
return allocatedRawCapacityInBytes;
}
public long getAllocatedUsableCapacityInBytes() {
return allocatedUsableCapacityInBytes;
}
private long calculateAllocatedUsableCapacityInBytes() {
long allocatedUsableCapacityInBytes = 0;
for (Partition partition : partitionMap.values()) {
allocatedUsableCapacityInBytes += partition.getReplicaCapacityInBytes();
}
return allocatedUsableCapacityInBytes;
}
/**
* Adds Partition to and validates Partition is unique. A duplicate Partition results in an exception.
*/
private void addPartition(Partition partition) {
if (partitionMap.put(ByteBuffer.wrap(partition.getBytes()), partition) != null) {
throw new IllegalStateException("Duplicate Partition detected: " + partition.toString());
}
if (partition.getId() >= maxPartitionId) {
maxPartitionId = partition.getId() + 1;
}
}
protected void validateClusterName() {
if (clusterName == null) {
throw new IllegalStateException("ClusterName cannot be null.");
}
if (!hardwareLayout.getClusterName().equals(clusterName)) {
throw new IllegalStateException(
"PartitionLayout cluster name does not match that of HardwareLayout: " + clusterName + " != " + hardwareLayout
.getClusterName());
}
}
protected void validatePartitionIds() {
for (Partition partition : partitionMap.values()) {
long partitionId = partition.getId();
if (partitionId < MinPartitionId) {
throw new IllegalStateException("Partition has invalid ID: Less than " + MinPartitionId);
}
if (partitionId >= maxPartitionId) {
throw new IllegalStateException("Partition has invalid ID: Greater than or equal to " + maxPartitionId);
}
}
}
protected void validateUniqueness() {
// Validate uniqueness of each logical component. Partition uniqueness is validated by method addPartition.
Set<Replica> replicaSet = new HashSet<Replica>();
for (Partition partition : partitionMap.values()) {
for (Replica replica : partition.getReplicas()) {
if (!replicaSet.add(replica)) {
throw new IllegalStateException("Duplicate Replica detected: " + replica.toString());
}
}
}
}
protected void validate() {
logger.trace("begin validate.");
validateClusterName();
validatePartitionIds();
validateUniqueness();
this.allocatedRawCapacityInBytes = calculateAllocatedRawCapacityInBytes();
this.allocatedUsableCapacityInBytes = calculateAllocatedUsableCapacityInBytes();
logger.trace("complete validate.");
}
protected long getNewPartitionId() {
long currentPartitionId = maxPartitionId;
maxPartitionId++;
return currentPartitionId;
}
// Creates a Partition and corresponding Replicas for each specified disk
public Partition addNewPartition(List<Disk> disks, long replicaCapacityInBytes) {
if (disks == null || disks.size() == 0) {
throw new IllegalArgumentException("Disks either null or of zero length.");
}
Partition partition = new Partition(getNewPartitionId(), PartitionState.READ_WRITE, replicaCapacityInBytes);
for (Disk disk : disks) {
partition.addReplica(new Replica(partition, disk));
}
addPartition(partition);
validate();
return partition;
}
// Adds replicas to the partition for each specified disk
public void addNewReplicas(Partition partition, List<Disk> disks) {
if (partition == null || disks == null || disks.size() == 0) {
throw new IllegalArgumentException("Partition or disks is null or disks is of zero length");
}
for (Disk disk : disks) {
partition.addReplica(new Replica(partition, disk));
}
validate();
}
/**
* Gets Partition with specified byte-serialized ID.
*
* @param stream byte-serialized partition ID
* @return requested Partition else null.
*/
public Partition getPartition(DataInputStream stream) throws IOException {
byte[] partitionBytes = Partition.readPartitionBytesFromStream(stream);
return partitionMap.get(ByteBuffer.wrap(partitionBytes));
}
public JSONObject toJSONObject() throws JSONException {
JSONObject jsonObject = new JSONObject().put("clusterName", hardwareLayout.getClusterName())
.put("version", version)
.put("partitions", new JSONArray());
for (Partition partition : partitionMap.values()) {
jsonObject.accumulate("partitions", partition.toJSONObject());
}
return jsonObject;
}
@Override
public String toString() {
try {
return toJSONObject().toString(2);
} catch (JSONException e) {
logger.error("JSONException caught in toString: {}", e.getCause());
}
return null;
}
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
PartitionLayout that = (PartitionLayout) o;
if (!clusterName.equals(that.clusterName)) {
return false;
}
return hardwareLayout.equals(that.hardwareLayout);
}
}
|
apache-2.0
|
FrankHossfeld/Training
|
GWT-OLD/Module0812DomainServer/src/de/gishmo/module0812/domain/server/filter/LoggingResponseFilter.java
|
1219
|
package de.gishmo.module0812.domain.server.filter;
import java.io.IOException;
import javax.ws.rs.container.ContainerRequestContext;
import javax.ws.rs.container.ContainerResponseContext;
import javax.ws.rs.container.ContainerResponseFilter;
import org.codehaus.jackson.map.ObjectMapper;
public class LoggingResponseFilter implements ContainerResponseFilter {
public void filter(ContainerRequestContext requestContext,
ContainerResponseContext responseContext) throws IOException {
String method = requestContext.getMethod();
System.out
.println("=====================================================================================================================");
System.out.println("DomainService: Requesting " + method + " for path " + requestContext.getUriInfo().getPath());
Object entity = responseContext.getEntity();
if (entity != null) {
System.out.println("DomainService: Response " + new ObjectMapper().writerWithDefaultPrettyPrinter()
.writeValueAsString(entity));
}
System.out
.println("=====================================================================================================================");
}
}
|
apache-2.0
|
StateStrategyPOC/Client
|
src/main/java/client_store_actions/ClientSetPlayerAction.java
|
1045
|
package client_store_actions;
import common.PlayerToken;
import common.StoreAction;
/**
* An action for signalling the availability of an in game identifier for the Player
*/
public class ClientSetPlayerAction extends StoreAction {
private final String playerName;
private final PlayerToken playerToken;
public ClientSetPlayerAction(String playerName, PlayerToken playerToken) {
super("@CLIENT_SET_PLAYER","@CLIENT_GROUP");
this.playerName = playerName;
this.playerToken = playerToken;
}
public String getPlayerName() {
return playerName;
}
public PlayerToken getPlayerToken() {
return playerToken;
}
@Override
public String toString() {
return "ClientSetPlayerAction{" +
"playerName='" + playerName + '\'' +
", playerToken=" + playerToken +
", actionIdentifier='" + actionIdentifier + '\'' +
", actionGroupIdentifier='" + actionGroupIdentifier + '\'' +
'}';
}
}
|
apache-2.0
|
zuowang/Paddle
|
paddle/cuda/src/hl_cuda_cublas.cc
|
9936
|
/* Copyright (c) 2016 Baidu, Inc. All Rights Reserve.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License. */
#include <sys/time.h>
#include <mutex>
#include "hl_cuda_cublas.h"
#include "hl_thread.ph"
#include "hl_dso_loader.h"
#include "paddle/utils/Logging.h"
namespace dynload {
std::once_flag cublas_dso_flag;
void* cublas_dso_handle = nullptr;
/**
* The following macro definition can generate structs
* (for each function) to dynamic load cublas routine
* via operator overloading.
*
* note: default dynamic linked libs
*/
#ifdef PADDLE_USE_DSO
#define DYNAMIC_LOAD_CUBLAS_WRAP(__name) \
struct DynLoad__##__name { \
template <typename... Args> \
cublasStatus_t operator()(Args... args) { \
typedef cublasStatus_t (*cublasFunc)(Args...); \
std::call_once(cublas_dso_flag, GetCublasDsoHandle, \
&cublas_dso_handle); \
void* p_##__name = dlsym(cublas_dso_handle, #__name); \
return reinterpret_cast<cublasFunc>(p_##__name)(args...); \
} \
} __name; // struct DynLoad__##__name
#else
#define DYNAMIC_LOAD_CUBLAS_WRAP(__name) \
struct DynLoad__##__name { \
template <typename... Args> \
cublasStatus_t operator()(Args... args) { \
return __name(args...); \
} \
} __name; // struct DynLoad__##__name
#endif
#define DYNAMIC_LOAD_CUBLAS_V2_WRAP(__name) \
DYNAMIC_LOAD_CUBLAS_WRAP(__name)
// include all needed cublas functions in HPPL
#define CUBLAS_BLAS_ROUTINE_EACH(__macro) \
__macro(cublasSgemv) \
__macro(cublasDgemv) \
__macro(cublasSgemm) \
__macro(cublasDgemm) \
__macro(cublasSgeam) \
__macro(cublasDgeam) \
DYNAMIC_LOAD_CUBLAS_V2_WRAP(cublasCreate)
DYNAMIC_LOAD_CUBLAS_V2_WRAP(cublasDestroy)
DYNAMIC_LOAD_CUBLAS_V2_WRAP(cublasSetStream)
DYNAMIC_LOAD_CUBLAS_V2_WRAP(cublasSetPointerMode)
DYNAMIC_LOAD_CUBLAS_V2_WRAP(cublasGetPointerMode)
DYNAMIC_LOAD_CUBLAS_WRAP(cublasSgemmBatched)
DYNAMIC_LOAD_CUBLAS_WRAP(cublasDgemmBatched)
DYNAMIC_LOAD_CUBLAS_WRAP(cublasCgemmBatched)
DYNAMIC_LOAD_CUBLAS_WRAP(cublasZgemmBatched)
CUBLAS_BLAS_ROUTINE_EACH(DYNAMIC_LOAD_CUBLAS_V2_WRAP)
#undef DYNAMIC_LOAD_CUBLAS_WRAP
#undef DYNAMIC_LOAD_CUBLAS_V2_WRAP
#undef CUBLAS_BLAS_ROUTINE_EACH
} /* namespace dynload */
#ifndef PADDLE_TYPE_DOUBLE
#define CUBLAS_GEAM dynload::cublasSgeam
#define CUBLAS_GEMV dynload::cublasSgemv
#define CUBLAS_GEMM dynload::cublasSgemm
#else
#define CUBLAS_GEAM dynload::cublasDgeam
#define CUBLAS_GEMV dynload::cublasDgemv
#define CUBLAS_GEMM dynload::cublasDgemm
#endif
const char* hl_cublas_get_error_string(cublasStatus_t status) {
switch(status) {
case CUBLAS_STATUS_NOT_INITIALIZED:
return "[cublas status]: not initialized";
case CUBLAS_STATUS_ALLOC_FAILED:
return "[cublas status]: allocate failed";
case CUBLAS_STATUS_INVALID_VALUE:
return "[cublas status]: invalid value";
case CUBLAS_STATUS_ARCH_MISMATCH:
return "[cublas status]: arch mismatch";
case CUBLAS_STATUS_MAPPING_ERROR:
return "[cublas status]: mapping error";
case CUBLAS_STATUS_EXECUTION_FAILED:
return "[cublas status]: execution failed";
case CUBLAS_STATUS_INTERNAL_ERROR:
return "[cublas status]: internal error";
case CUBLAS_STATUS_SUCCESS:
return "[cublas status]: success";
default:
return "[cublas status]: unknown error";
}
}
/**
* Check build-in cublas function using glog and it also
* support << operator for more details error info.
*/
cublasStatus_t g_cublasStat;
#define CHECK_CUBLAS(cublas_func) \
g_cublasStat = cublas_func; \
CHECK_EQ(CUBLAS_STATUS_SUCCESS, g_cublasStat) \
<< "Cublas Error: " \
<< hl_cublas_get_error_string(g_cublasStat) \
<< " "
void hl_cublas_init(cublasHandle_t *cublas_handle, cudaStream_t stream) {
CHECK_CUBLAS(dynload::cublasCreate(cublas_handle))
<< "[cublas init] Cublas create handle faild!";
CHECK_CUBLAS(dynload::cublasSetStream(*cublas_handle, stream))
<< "[cublas init] Cublas set stream faild!";
}
void hl_matrix_transpose(real *A_d,
real *C_d,
int dimM,
int dimN,
int lda,
int ldc) {
real alpha = 1.0;
real beta = 0.0;
CHECK_NOTNULL(A_d);
CHECK_NOTNULL(C_d);
CHECK_CUBLAS(CUBLAS_GEAM(t_resource.handle,
CUBLAS_OP_T, CUBLAS_OP_N,
dimM, dimN,
&alpha, A_d, lda,
&beta, nullptr, dimM,
C_d, ldc));
CHECK_SYNC("hl_matrix_transpose failed");
}
void hl_matrix_transpose(real *A_d, real *C_d, int dimM, int dimN) {
hl_matrix_transpose(A_d, C_d, dimM, dimN, dimN, dimM);
}
void hl_matrix_mul(real *A_d, hl_trans_op_t transa,
real *B_d, hl_trans_op_t transb,
real *C_d,
int dimM, int dimN, int dimK,
real alpha, real beta,
int lda, int ldb, int ldc) {
CHECK_NOTNULL(A_d);
CHECK_NOTNULL(B_d);
CHECK_NOTNULL(C_d);
if (dimN == 1 && dimM != 1 && dimK != 1 && transb == HPPL_OP_N) {
int m = (transa == HPPL_OP_N) ? dimM : dimK;
int n = (transa == HPPL_OP_N) ? dimK : dimM;
hl_matrix_mul_vector(A_d, transa, B_d, C_d, m, n,
alpha, beta, lda, ldb, ldc);
return;
}
if (dimM == 1 && dimN != 1 && dimK != 1 && transa == HPPL_OP_N) {
int m = (transb == HPPL_OP_N) ? dimK : dimN;
int n = (transb == HPPL_OP_N) ? dimN : dimK;
hl_trans_op_t trans = (transb == HPPL_OP_N) ? HPPL_OP_T : HPPL_OP_N;
hl_matrix_mul_vector(B_d, trans, A_d, C_d, m, n,
alpha, beta, ldb, 1, 1);
return;
}
cublasStatus_t stat;
if ((HPPL_OP_N == transa) && (HPPL_OP_N == transb)) {
stat = CUBLAS_GEMM(t_resource.handle,
CUBLAS_OP_N,
CUBLAS_OP_N,
dimN, dimM, dimK,
&alpha, B_d, ldb,
A_d, lda,
&beta, C_d, ldc);
} else if ((HPPL_OP_T == transa) && (HPPL_OP_N == transb)) {
stat = CUBLAS_GEMM(t_resource.handle,
CUBLAS_OP_N,
CUBLAS_OP_T,
dimN, dimM, dimK,
&alpha, B_d, ldb,
A_d, lda,
&beta, C_d, ldc);
} else if ((HPPL_OP_N == transa) && (HPPL_OP_T == transb)) {
stat = CUBLAS_GEMM(t_resource.handle,
CUBLAS_OP_T,
CUBLAS_OP_N,
dimN, dimM, dimK,
&alpha, B_d, ldb,
A_d, lda,
&beta, C_d, ldc);
} else {
LOG(FATAL) << "parameter transa error!";
}
CHECK_EQ(stat, CUBLAS_STATUS_SUCCESS) << hl_cublas_get_error_string(stat);
CHECK_SYNC("hl_matrix_mul failed");
}
void hl_matrix_mul(real *A_d, hl_trans_op_t transa,
real *B_d, hl_trans_op_t transb,
real *C_d,
int dimM, int dimN, int dimK,
real alpha, real beta) {
int lda = (HPPL_OP_N == transa) ? dimK : dimM;
int ldb = (HPPL_OP_N == transb) ? dimN : dimK;
int ldc = dimN;
hl_matrix_mul(A_d, transa, B_d, transb, C_d, dimM, dimN,
dimK, alpha, beta, lda, ldb, ldc);
}
void hl_matrix_mul_vector(real *A_d, hl_trans_op_t trans,
real *B_d, real *C_d,
int dimM, int dimN,
real alpha, real beta,
int lda, int incb, int incc) {
CHECK_NOTNULL(A_d);
CHECK_NOTNULL(B_d);
CHECK_NOTNULL(C_d);
cublasStatus_t stat;
if (HPPL_OP_N == trans) {
stat = CUBLAS_GEMV(t_resource.handle,
CUBLAS_OP_T,
dimN, dimM,
&alpha,
A_d, lda,
B_d, incb,
&beta,
C_d, incc);
} else if (HPPL_OP_T == trans) {
stat = CUBLAS_GEMV(t_resource.handle,
CUBLAS_OP_N,
dimN, dimM,
&alpha,
A_d, lda,
B_d, incb,
&beta,
C_d, incc);
} else {
LOG(FATAL) << "parameter transa error!";
}
CHECK_EQ(stat, CUBLAS_STATUS_SUCCESS) << hl_cublas_get_error_string(stat);
CHECK_SYNC("hl_matrix_mul_vector");
}
void hl_matrix_mul_vector(real *A_d, hl_trans_op_t trans,
real *B_d, real *C_d,
int dimM, int dimN,
real alpha, real beta) {
hl_matrix_mul_vector(A_d, trans, B_d, C_d, dimM, dimN,
alpha, beta, dimN, 1, 1);
}
|
apache-2.0
|
modaclouds/modaclouds-sla-core
|
sla-repository/src/main/java/eu/atos/sla/dao/IPolicyDAO.java
|
1954
|
/**
* Copyright 2015 Atos
* Contact: Atos <roman.sosa@atos.net>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package eu.atos.sla.dao;
import java.util.List;
import eu.atos.sla.datamodel.IPolicy;
/**
* DAO interface to access to the Policy information
*
* @author Pedro Rey - Atos
*
*/
public interface IPolicyDAO {
/**
* Returns the Policy from the database by its Id
*
* @param id
* of the Policy
* @return the corresponding Policy from the database
*/
public IPolicy getById(Long id);
/**
* Returns all the Policy stored in the database
*
* @return all the Policy stored in the database
*/
public List<IPolicy> getAll();
/**
* Stores a Policy into the database
*
* @param Policy
* Policy to be saved.
* @return <code>true</code> if the SLAPolicyType was saved correctly
* @throws Exception
*/
public IPolicy save(IPolicy policy);
/**
* Updates a Policy in the database
*
* @param Policy
* Policy to be updated
* @return <code>true</code> if the Policy was saved correctly
*/
public boolean update(IPolicy policy);
/**
* Deletes a Policy from the database
*
* @param Policy
* to be deleted
* @return <code>true</code> if the Policy was deleted correctly
*/
public boolean delete(IPolicy policy);
}
|
apache-2.0
|
googleads/google-ads-ruby
|
lib/google/ads/google_ads/v8/services/managed_placement_view_service_pb.rb
|
991
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/ads/googleads/v8/services/managed_placement_view_service.proto
require 'google/protobuf'
require 'google/ads/google_ads/v8/resources/managed_placement_view_pb'
require 'google/api/annotations_pb'
require 'google/api/client_pb'
require 'google/api/field_behavior_pb'
require 'google/api/resource_pb'
Google::Protobuf::DescriptorPool.generated_pool.build do
add_file("google/ads/googleads/v8/services/managed_placement_view_service.proto", :syntax => :proto3) do
add_message "google.ads.googleads.v8.services.GetManagedPlacementViewRequest" do
optional :resource_name, :string, 1
end
end
end
module Google
module Ads
module GoogleAds
module V8
module Services
GetManagedPlacementViewRequest = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("google.ads.googleads.v8.services.GetManagedPlacementViewRequest").msgclass
end
end
end
end
end
|
apache-2.0
|
googleapis/google-api-java-client-services
|
clients/google-api-services-firebasestorage/v1beta/1.31.0/com/google/api/services/firebasestorage/v1beta/model/GoogleFirebaseStorageControlplaneV1betaMigrateLocationDestructivelyMetadata.java
|
3688
|
/*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.firebasestorage.v1beta.model;
/**
* Metadata for MigrateLocationDestructively LRO.
*
* <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is
* transmitted over HTTP when working with the Cloud Storage for Firebase API. For a detailed
* explanation see:
* <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a>
* </p>
*
* @author Google, Inc.
*/
@SuppressWarnings("javadoc")
public final class GoogleFirebaseStorageControlplaneV1betaMigrateLocationDestructivelyMetadata extends com.google.api.client.json.GenericJson {
/**
* The time the LRO was created.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String createTime;
/**
* The time the LRO was last updated.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private String lastUpdateTime;
/**
* The current state of the migration.
* The value may be {@code null}.
*/
@com.google.api.client.util.Key
private java.lang.String state;
/**
* The time the LRO was created.
* @return value or {@code null} for none
*/
public String getCreateTime() {
return createTime;
}
/**
* The time the LRO was created.
* @param createTime createTime or {@code null} for none
*/
public GoogleFirebaseStorageControlplaneV1betaMigrateLocationDestructivelyMetadata setCreateTime(String createTime) {
this.createTime = createTime;
return this;
}
/**
* The time the LRO was last updated.
* @return value or {@code null} for none
*/
public String getLastUpdateTime() {
return lastUpdateTime;
}
/**
* The time the LRO was last updated.
* @param lastUpdateTime lastUpdateTime or {@code null} for none
*/
public GoogleFirebaseStorageControlplaneV1betaMigrateLocationDestructivelyMetadata setLastUpdateTime(String lastUpdateTime) {
this.lastUpdateTime = lastUpdateTime;
return this;
}
/**
* The current state of the migration.
* @return value or {@code null} for none
*/
public java.lang.String getState() {
return state;
}
/**
* The current state of the migration.
* @param state state or {@code null} for none
*/
public GoogleFirebaseStorageControlplaneV1betaMigrateLocationDestructivelyMetadata setState(java.lang.String state) {
this.state = state;
return this;
}
@Override
public GoogleFirebaseStorageControlplaneV1betaMigrateLocationDestructivelyMetadata set(String fieldName, Object value) {
return (GoogleFirebaseStorageControlplaneV1betaMigrateLocationDestructivelyMetadata) super.set(fieldName, value);
}
@Override
public GoogleFirebaseStorageControlplaneV1betaMigrateLocationDestructivelyMetadata clone() {
return (GoogleFirebaseStorageControlplaneV1betaMigrateLocationDestructivelyMetadata) super.clone();
}
}
|
apache-2.0
|
kenfinnigan/DeltaSpike
|
deltaspike/modules/security/impl/src/main/java/org/apache/deltaspike/security/impl/credential/DefaultLoginCredential.java
|
2704
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.deltaspike.security.impl.credential;
import org.apache.deltaspike.security.api.authentication.events.LoginFailedEvent;
import org.apache.deltaspike.security.api.authentication.events.PostAuthenticateEvent;
import org.apache.deltaspike.security.api.credential.Credential;
import org.apache.deltaspike.security.api.credential.LoginCredential;
import javax.enterprise.context.RequestScoped;
import javax.enterprise.event.Observes;
import javax.inject.Named;
/**
* The default LoginCredential implementation. This implementation allows for a
* username and plain text password to be set, and uses the PasswordCredential
* implementation of the Credential interface for authentication.
*/
@Named("loginCredential")
@RequestScoped
public class DefaultLoginCredential implements LoginCredential
{
private Credential credential;
private String userId;
@Override
public String getUserId()
{
return userId;
}
@Override
public void setUserId(String userId)
{
this.userId = userId;
}
public Credential getCredential()
{
return credential;
}
public void setCredential(Credential credential)
{
this.credential = credential;
//X TODO manager.fireEvent(new CredentialsUpdatedEvent(this.credential));
}
public void invalidate()
{
this.credential = null;
this.userId = null;
}
protected void setValid(@Observes PostAuthenticateEvent event)
{
invalidate();
}
protected void afterLogin(@Observes PostAuthenticateEvent event)
{
invalidate();
}
//X TODO discuss
protected void loginFailed(@Observes LoginFailedEvent event)
{
invalidate();
}
@Override
public String toString()
{
return "LoginCredential[" + (this.userId != null ? this.userId : "unknown" ) + "]";
}
}
|
apache-2.0
|
FreeUKGen/MyopicVicar
|
app/helpers/freecen_pieces_helper.rb
|
1763
|
module FreecenPiecesHelper
#return hyperlink to map lat,long (unless 0,0 or 60,0; then just return text)
def map_link_helper(text, lat, long, zoom=10, title='Show on Map')
return text if (0.0==lat.to_f || 60.0==lat.to_f) && 0.0==long.to_f
if(true)#google maps
return raw '<a href="https://google.com/maps/place/'+(lat.to_f.to_s)+','+(long.to_f.to_s)+'/@'+(lat.to_f.to_s)+','+(long.to_f.to_s)+','+(zoom.to_i.to_s)+'z" target="_blank" title="'+(title.to_s)+'">'+(text.to_s)+'</a>'
else#openstreetmap.org
return raw '<a href="https://www.openstreetmap.org/?mlat='+(lat.to_f.to_s)+'&mlon='+(long.to_f.to_s)+'#map='+(zoom.to_i.to_s)+'/'+(lat.to_f.to_s)+'/'+(long.to_f.to_s)+'" target="_blank" title="'+(title.to_s)+'">'+(text.to_s)+'</a>'
end
end
def sub_pieces(subplaces)
place_names = []
subplaces.each do |place|
place_names << place[:name] if place[:name].present?
end
place_names.sort_by! { |e| ActiveSupport::Inflector.transliterate(e.downcase) }
place_names.join(', ')
end
def piece_number(file)
actual_piece = file.freecen_piece
piece_number = actual_piece.piece_number
end
def chapman(file)
actual_piece = file.freecen_piece
piece_number = actual_piece.chapman_code
end
def year(file)
actual_piece = file.freecen_piece
piece_number = actual_piece.year
end
def district_name(file)
actual_piece = file.freecen_piece
piece_number = actual_piece.district_name
end
def vldfile(file_name)
file = Freecen1VldFile.find_by(file_name: file_name)
link_to "#{file_name}" , freecen1_vld_file_path(file.id), class: 'btn btn--small' if file.present?
end
def status_date(piece)
piece.status_date if piece.status_date.present?
end
end
|
apache-2.0
|
aspnet/AspNetCore
|
src/Shared/Components/ServerComponent.cs
|
1623
|
// Licensed to the .NET Foundation under one or more agreements.
// The .NET Foundation licenses this file to you under the MIT license.
using System;
using System.Collections.Generic;
namespace Microsoft.AspNetCore.Components
{
// The DTO that we data-protect and include into any
// generated component marker and that allows the client
// to bootstrap a blazor server-side application.
internal struct ServerComponent
{
public ServerComponent(
int sequence,
string assemblyName,
string typeName,
IList<ComponentParameter> parametersDefinitions,
IList<object> parameterValues,
Guid invocationId) =>
(Sequence, AssemblyName, TypeName, ParameterDefinitions, ParameterValues, InvocationId) =
(sequence, assemblyName, typeName, parametersDefinitions, parameterValues, invocationId);
// The order in which this component was rendered
public int Sequence { get; set; }
// The assembly name for the rendered component.
public string AssemblyName { get; set; }
// The type name of the component.
public string TypeName { get; set; }
// The definition for the parameters for the component.
public IList<ComponentParameter> ParameterDefinitions { get; set; }
// The values for the parameters for the component.
public IList<object> ParameterValues { get; set; }
// An id that uniquely identifies all components generated as part of a single HTTP response.
public Guid InvocationId { get; set; }
}
}
|
apache-2.0
|
octaware/super-volley
|
sample/src/main/java/com/android/supervolley/sample/samples/json_query_params/api/Service.java
|
514
|
package com.android.supervolley.sample.samples.json_query_params.api;
import com.android.supervolley.Call;
import com.android.supervolley.annotation.GET;
import com.android.supervolley.annotation.Query;
import com.android.supervolley.sample.samples.json_query_params.annotations.Json;
import com.android.supervolley.sample.samples.json_query_params.model.Filter;
import okhttp3.ResponseBody;
public interface Service {
@GET("/filter")
Call<ResponseBody> example(@Json @Query("value") Filter value);
}
|
apache-2.0
|
sara-izaap/Fitness-app
|
src/pages/workout/history/history.ts
|
1052
|
import { Component } from '@angular/core';
import {NavController,NavParams,ViewController} from 'ionic-angular';
import { WorkoutService } from '../../../providers/workoutService';
@Component({ selector:'workout-history',templateUrl:'history.html'})
export class HistoryPage {
segment='workout';
workout:any=[];
allworkout:any=[];
historydata:any;
constructor(
public navCtrl: NavController,
public params:NavParams,
public viewCtrl: ViewController,
public workservice:WorkoutService
)
{
this.historydata = this.params.get('data');
console.log(this.historydata);
}
ngOnInit(){
this.getWorkoutbyUser();
}
getWorkoutbyUser(){
this.workservice.getworkoutByUser(this.historydata.user_id,this.historydata.exercise_id,this.historydata.temp_id).then(res =>{
this.workout = res.currworkout;
this.allworkout = res.allworkout;
console.log(this.workout);
})
.catch(error => console.log(error));
}
dismiss(){
this.viewCtrl.dismiss(false);
}
}
|
apache-2.0
|
pennmanor/LaptopsandTickets-1-1
|
auth/Auth/OpenID/SQLStore.php
|
18043
|
<?php
/**
* SQL-backed OpenID stores.
*
* PHP versions 4 and 5
*
* LICENSE: See the COPYING file included in this distribution.
*
* @package OpenID
* @author JanRain, Inc. <openid@janrain.com>
* @copyright 2005-2008 Janrain, Inc.
* @license http://www.apache.org/licenses/LICENSE-2.0 Apache
*/
/**
* @access private
*/
require_once 'Auth/OpenID/Interface.php';
require_once 'Auth/OpenID/Nonce.php';
/**
* @access private
*/
require_once 'Auth/OpenID.php';
/**
* @access private
*/
require_once 'Auth/OpenID/Nonce.php';
/**
* This is the parent class for the SQL stores, which contains the
* logic common to all of the SQL stores.
*
* The table names used are determined by the class variables
* associations_table_name and nonces_table_name. To change the name
* of the tables used, pass new table names into the constructor.
*
* To create the tables with the proper schema, see the createTables
* method.
*
* This class shouldn't be used directly. Use one of its subclasses
* instead, as those contain the code necessary to use a specific
* database. If you're an OpenID integrator and you'd like to create
* an SQL-driven store that wraps an application's database
* abstraction, be sure to create a subclass of
* {@link Auth_OpenID_DatabaseConnection} that calls the application's
* database abstraction calls. Then, pass an instance of your new
* database connection class to your SQLStore subclass constructor.
*
* All methods other than the constructor and createTables should be
* considered implementation details.
*
* @package OpenID
*/
class Auth_OpenID_SQLStore extends Auth_OpenID_OpenIDStore {
/**
* This creates a new SQLStore instance. It requires an
* established database connection be given to it, and it allows
* overriding the default table names.
*
* @param connection $connection This must be an established
* connection to a database of the correct type for the SQLStore
* subclass you're using. This must either be an PEAR DB
* connection handle or an instance of a subclass of
* Auth_OpenID_DatabaseConnection.
*
* @param associations_table: This is an optional parameter to
* specify the name of the table used for storing associations.
* The default value is 'oid_associations'.
*
* @param nonces_table: This is an optional parameter to specify
* the name of the table used for storing nonces. The default
* value is 'oid_nonces'.
*/
function Auth_OpenID_SQLStore($connection,
$associations_table = null,
$nonces_table = null)
{
$this->associations_table_name = "oid_associations";
$this->nonces_table_name = "oid_nonces";
// Check the connection object type to be sure it's a PEAR
// database connection.
if (!(is_object($connection) &&
(is_subclass_of($connection, 'db_common') ||
is_subclass_of($connection,
'auth_openid_databaseconnection')))) {
trigger_error("Auth_OpenID_SQLStore expected PEAR connection " .
"object (got ".get_class($connection).")",
E_USER_ERROR);
return;
}
$this->connection = $connection;
// Be sure to set the fetch mode so the results are keyed on
// column name instead of column index. This is a PEAR
// constant, so only try to use it if PEAR is present. Note
// that Auth_Openid_Databaseconnection instances need not
// implement ::setFetchMode for this reason.
if (is_subclass_of($this->connection, 'db_common')) {
$this->connection->setFetchMode(DB_FETCHMODE_ASSOC);
}
if ($associations_table) {
$this->associations_table_name = $associations_table;
}
if ($nonces_table) {
$this->nonces_table_name = $nonces_table;
}
$this->max_nonce_age = 6 * 60 * 60;
// Be sure to run the database queries with auto-commit mode
// turned OFF, because we want every function to run in a
// transaction, implicitly. As a rule, methods named with a
// leading underscore will NOT control transaction behavior.
// Callers of these methods will worry about transactions.
$this->connection->autoCommit(false);
// Create an empty SQL strings array.
$this->sql = array();
// Call this method (which should be overridden by subclasses)
// to populate the $this->sql array with SQL strings.
$this->setSQL();
// Verify that all required SQL statements have been set, and
// raise an error if any expected SQL strings were either
// absent or empty.
list($missing, $empty) = $this->_verifySQL();
if ($missing) {
trigger_error("Expected keys in SQL query list: " .
implode(", ", $missing),
E_USER_ERROR);
return;
}
if ($empty) {
trigger_error("SQL list keys have no SQL strings: " .
implode(", ", $empty),
E_USER_ERROR);
return;
}
// Add table names to queries.
$this->_fixSQL();
}
function tableExists($table_name)
{
return !$this->isError(
$this->connection->query(
sprintf("SELECT * FROM %s LIMIT 0",
$table_name)));
}
/**
* Returns true if $value constitutes a database error; returns
* false otherwise.
*/
function isError($value)
{
return PEAR::isError($value);
}
/**
* Converts a query result to a boolean. If the result is a
* database error according to $this->isError(), this returns
* false; otherwise, this returns true.
*/
function resultToBool($obj)
{
if ($this->isError($obj)) {
return false;
} else {
return true;
}
}
/**
* This method should be overridden by subclasses. This method is
* called by the constructor to set values in $this->sql, which is
* an array keyed on sql name.
*/
function setSQL()
{
}
/**
* Resets the store by removing all records from the store's
* tables.
*/
function reset()
{
$this->connection->query(sprintf("DELETE FROM %s",
$this->associations_table_name));
$this->connection->query(sprintf("DELETE FROM %s",
$this->nonces_table_name));
}
/**
* @access private
*/
function _verifySQL()
{
$missing = array();
$empty = array();
$required_sql_keys = array(
'nonce_table',
'assoc_table',
'set_assoc',
'get_assoc',
'get_assocs',
'remove_assoc'
);
foreach ($required_sql_keys as $key) {
if (!array_key_exists($key, $this->sql)) {
$missing[] = $key;
} else if (!$this->sql[$key]) {
$empty[] = $key;
}
}
return array($missing, $empty);
}
/**
* @access private
*/
function _fixSQL()
{
$replacements = array(
array(
'value' => $this->nonces_table_name,
'keys' => array('nonce_table',
'add_nonce',
'clean_nonce')
),
array(
'value' => $this->associations_table_name,
'keys' => array('assoc_table',
'set_assoc',
'get_assoc',
'get_assocs',
'remove_assoc',
'clean_assoc')
)
);
foreach ($replacements as $item) {
$value = $item['value'];
$keys = $item['keys'];
foreach ($keys as $k) {
if (is_array($this->sql[$k])) {
foreach ($this->sql[$k] as $part_key => $part_value) {
$this->sql[$k][$part_key] = sprintf($part_value,
$value);
}
} else {
$this->sql[$k] = sprintf($this->sql[$k], $value);
}
}
}
}
function blobDecode($blob)
{
return $blob;
}
function blobEncode($str)
{
return $str;
}
function createTables()
{
$this->connection->autoCommit(true);
$n = $this->create_nonce_table();
$a = $this->create_assoc_table();
$this->connection->autoCommit(false);
if ($n && $a) {
return true;
} else {
return false;
}
}
function create_nonce_table()
{
if (!$this->tableExists($this->nonces_table_name)) {
$r = $this->connection->query($this->sql['nonce_table']);
return $this->resultToBool($r);
}
return true;
}
function create_assoc_table()
{
if (!$this->tableExists($this->associations_table_name)) {
$r = $this->connection->query($this->sql['assoc_table']);
return $this->resultToBool($r);
}
return true;
}
/**
* @access private
*/
function _set_assoc($server_url, $handle, $secret, $issued,
$lifetime, $assoc_type)
{
return $this->connection->query($this->sql['set_assoc'],
array(
$server_url,
$handle,
$secret,
$issued,
$lifetime,
$assoc_type));
}
function storeAssociation($server_url, $association)
{
if ($this->resultToBool($this->_set_assoc(
$server_url,
$association->handle,
$this->blobEncode(
$association->secret),
$association->issued,
$association->lifetime,
$association->assoc_type
))) {
$this->connection->commit();
} else {
$this->connection->rollback();
}
}
/**
* @access private
*/
function _get_assoc($server_url, $handle)
{
$result = $this->connection->getRow($this->sql['get_assoc'],
array($server_url, $handle));
if ($this->isError($result)) {
return null;
} else {
return $result;
}
}
/**
* @access private
*/
function _get_assocs($server_url)
{
$result = $this->connection->getAll($this->sql['get_assocs'],
array($server_url));
if ($this->isError($result)) {
return array();
} else {
return $result;
}
}
function removeAssociation($server_url, $handle)
{
if ($this->_get_assoc($server_url, $handle) == null) {
return false;
}
if ($this->resultToBool($this->connection->query(
$this->sql['remove_assoc'],
array($server_url, $handle)))) {
$this->connection->commit();
} else {
$this->connection->rollback();
}
return true;
}
function getAssociation($server_url, $handle = null)
{
if ($handle !== null) {
$assoc = $this->_get_assoc($server_url, $handle);
$assocs = array();
if ($assoc) {
$assocs[] = $assoc;
}
} else {
$assocs = $this->_get_assocs($server_url);
}
if (!$assocs || (count($assocs) == 0)) {
return null;
} else {
$associations = array();
foreach ($assocs as $assoc_row) {
$assoc = new Auth_OpenID_Association($assoc_row['handle'],
$assoc_row['secret'],
$assoc_row['issued'],
$assoc_row['lifetime'],
$assoc_row['assoc_type']);
$assoc->secret = $this->blobDecode($assoc->secret);
if ($assoc->getExpiresIn() == 0) {
$this->removeAssociation($server_url, $assoc->handle);
} else {
$associations[] = array($assoc->issued, $assoc);
}
}
if ($associations) {
$issued = array();
$assocs = array();
foreach ($associations as $key => $assoc) {
$issued[$key] = $assoc[0];
$assocs[$key] = $assoc[1];
}
array_multisort($issued, SORT_DESC, $assocs, SORT_DESC,
$associations);
// return the most recently issued one.
list($issued, $assoc) = $associations[0];
return $assoc;
} else {
return null;
}
}
}
/**
* @access private
*/
function _add_nonce($server_url, $timestamp, $salt)
{
$sql = $this->sql['add_nonce'];
$result = $this->connection->query($sql, array($server_url,
$timestamp,
$salt));
if ($this->isError($result)) {
$this->connection->rollback();
} else {
$this->connection->commit();
}
return $this->resultToBool($result);
}
function useNonce($server_url, $timestamp, $salt)
{
global $Auth_OpenID_SKEW;
if ( abs($timestamp - time()) > $Auth_OpenID_SKEW ) {
return false;
}
return $this->_add_nonce($server_url, $timestamp, $salt);
}
/**
* "Octifies" a binary string by returning a string with escaped
* octal bytes. This is used for preparing binary data for
* PostgreSQL BYTEA fields.
*
* @access private
*/
function _octify($str)
{
$result = "";
for ($i = 0; $i < Auth_OpenID::bytes($str); $i++) {
$ch = substr($str, $i, 1);
if ($ch == "\\") {
$result .= "\\\\\\\\";
} else if (ord($ch) == 0) {
$result .= "\\\\000";
} else {
$result .= "\\" . strval(decoct(ord($ch)));
}
}
return $result;
}
/**
* "Unoctifies" octal-escaped data from PostgreSQL and returns the
* resulting ASCII (possibly binary) string.
*
* @access private
*/
function _unoctify($str)
{
$result = "";
$i = 0;
while ($i < strlen($str)) {
$char = $str[$i];
if ($char == "\\") {
// Look to see if the next char is a backslash and
// append it.
if ($str[$i + 1] != "\\") {
$octal_digits = substr($str, $i + 1, 3);
$dec = octdec($octal_digits);
$char = chr($dec);
$i += 4;
} else {
$char = "\\";
$i += 2;
}
} else {
$i += 1;
}
$result .= $char;
}
return $result;
}
function cleanupNonces()
{
global $Auth_OpenID_SKEW;
$v = time() - $Auth_OpenID_SKEW;
$this->connection->query($this->sql['clean_nonce'], array($v));
$num = $this->connection->affectedRows();
$this->connection->commit();
return $num;
}
function cleanupAssociations()
{
$this->connection->query($this->sql['clean_assoc'],
array(time()));
$num = $this->connection->affectedRows();
$this->connection->commit();
return $num;
}
}
|
apache-2.0
|
aestesis/elektronika
|
src/BaseClasses.to_delete/vtrans.cpp
|
17945
|
//------------------------------------------------------------------------------
// File: Vtrans.cpp
//
// Desc: DirectShow base classes.
//
// Copyright (c) 1992-2001 Microsoft Corporation. All rights reserved.
//------------------------------------------------------------------------------
#include <streams.h>
#include <measure.h>
// #include <vtransfr.h> // now in precomp file streams.h
CVideoTransformFilter::CVideoTransformFilter
( TCHAR *pName, LPUNKNOWN pUnk, REFCLSID clsid)
: CTransformFilter(pName, pUnk, clsid)
, m_itrLate(0)
, m_nKeyFramePeriod(0) // No QM until we see at least 2 key frames
, m_nFramesSinceKeyFrame(0)
, m_bSkipping(FALSE)
, m_tDecodeStart(0)
, m_itrAvgDecode(300000) // 30mSec - probably allows skipping
, m_bQualityChanged(FALSE) {
#ifdef PERF
RegisterPerfId();
#endif // PERF
}
CVideoTransformFilter::~CVideoTransformFilter() {
// nothing to do
}
// Reset our quality management state
HRESULT CVideoTransformFilter::StartStreaming() {
m_itrLate = 0;
m_nKeyFramePeriod = 0; // No QM until we see at least 2 key frames
m_nFramesSinceKeyFrame = 0;
m_bSkipping = FALSE;
m_tDecodeStart = 0;
m_itrAvgDecode = 300000; // 30mSec - probably allows skipping
m_bQualityChanged = FALSE;
m_bSampleSkipped = FALSE;
return NOERROR;
}
// Overriden to reset quality management information
HRESULT CVideoTransformFilter::EndFlush() { {
// Synchronize
CAutoLock lck(&m_csReceive);
// Reset our stats
//
// Note - we don't want to call derived classes here,
// we only want to reset our internal variables and this
// is a convenient way to do it
CVideoTransformFilter::StartStreaming();
}
return CTransformFilter::EndFlush();
}
HRESULT CVideoTransformFilter::AbortPlayback(HRESULT hr) {
NotifyEvent(EC_ERRORABORT, hr, 0);
m_pOutput->DeliverEndOfStream();
return hr;
}
// Receive()
//
// Accept a sample from upstream, decide whether to process it
// or drop it. If we process it then get a buffer from the
// allocator of the downstream connection, transform it into the
// new buffer and deliver it to the downstream filter.
// If we decide not to process it then we do not get a buffer.
// Remember that although this code will notice format changes coming into
// the input pin, it will NOT change its output format if that results
// in the filter needing to make a corresponding output format change. Your
// derived filter will have to take care of that. (eg. a palette change if
// the input and output is an 8 bit format). If the input sample is discarded
// and nothing is sent out for this Receive, please remember to put the format
// change on the first output sample that you actually do send.
// If your filter will produce the same output type even when the input type
// changes, then this base class code will do everything you need.
HRESULT CVideoTransformFilter::Receive(IMediaSample *pSample) {
// If the next filter downstream is the video renderer, then it may
// be able to operate in DirectDraw mode which saves copying the data
// and gives higher performance. In that case the buffer which we
// get from GetDeliveryBuffer will be a DirectDraw buffer, and
// drawing into this buffer draws directly onto the display surface.
// This means that any waiting for the correct time to draw occurs
// during GetDeliveryBuffer, and that once the buffer is given to us
// the video renderer will count it in its statistics as a frame drawn.
// This means that any decision to drop the frame must be taken before
// calling GetDeliveryBuffer.
ASSERT(CritCheckIn(&m_csReceive));
AM_MEDIA_TYPE *pmtOut, *pmt;
#ifdef DEBUG
FOURCCMap fccOut;
#endif
HRESULT hr;
ASSERT(pSample);
IMediaSample * pOutSample;
// If no output pin to deliver to then no point sending us data
ASSERT(m_pOutput != NULL) ;
// The source filter may dynamically ask us to start transforming from a
// different media type than the one we're using now. If we don't, we'll
// draw garbage. (typically, this is a palette change in the movie,
// but could be something more sinister like the compression type changing,
// or even the video size changing)
#define rcS1 ((VIDEOINFOHEADER *)(pmt->pbFormat))->rcSource
#define rcT1 ((VIDEOINFOHEADER *)(pmt->pbFormat))->rcTarget
pSample->GetMediaType(&pmt);
if(pmt != NULL && pmt->pbFormat != NULL) {
// spew some debug output
ASSERT(!IsEqualGUID(pmt->majortype, GUID_NULL));
#ifdef DEBUG
fccOut.SetFOURCC(&pmt->subtype);
LONG lCompression = HEADER(pmt->pbFormat)->biCompression;
LONG lBitCount = HEADER(pmt->pbFormat)->biBitCount;
LONG lStride = (HEADER(pmt->pbFormat)->biWidth * lBitCount + 7) / 8;
lStride = (lStride + 3) & ~3;
DbgLog((LOG_TRACE,3,TEXT("*Changing input type on the fly to")));
DbgLog((LOG_TRACE,3,TEXT("FourCC: %lx Compression: %lx BitCount: %ld"),
fccOut.GetFOURCC(), lCompression, lBitCount));
DbgLog((LOG_TRACE,3,TEXT("biHeight: %ld rcDst: (%ld, %ld, %ld, %ld)"),
HEADER(pmt->pbFormat)->biHeight,
rcT1.left, rcT1.top, rcT1.right, rcT1.bottom));
DbgLog((LOG_TRACE,3,TEXT("rcSrc: (%ld, %ld, %ld, %ld) Stride: %ld"),
rcS1.left, rcS1.top, rcS1.right, rcS1.bottom,
lStride));
#endif
// now switch to using the new format. I am assuming that the
// derived filter will do the right thing when its media type is
// switched and streaming is restarted.
StopStreaming();
m_pInput->CurrentMediaType() = *pmt;
DeleteMediaType(pmt);
// if this fails, playback will stop, so signal an error
hr = StartStreaming();
if(FAILED(hr)) {
return AbortPlayback(hr);
}
}
// Now that we have noticed any format changes on the input sample, it's
// OK to discard it.
if(ShouldSkipFrame(pSample)) {
MSR_NOTE(m_idSkip);
m_bSampleSkipped = TRUE;
return NOERROR;
}
// Set up the output sample
hr = InitializeOutputSample(pSample, &pOutSample);
if(FAILED(hr)) {
return hr;
}
m_bSampleSkipped = FALSE;
// The renderer may ask us to on-the-fly to start transforming to a
// different format. If we don't obey it, we'll draw garbage
#define rcS ((VIDEOINFOHEADER *)(pmtOut->pbFormat))->rcSource
#define rcT ((VIDEOINFOHEADER *)(pmtOut->pbFormat))->rcTarget
pOutSample->GetMediaType(&pmtOut);
if(pmtOut != NULL && pmtOut->pbFormat != NULL) {
// spew some debug output
ASSERT(!IsEqualGUID(pmtOut->majortype, GUID_NULL));
#ifdef DEBUG
fccOut.SetFOURCC(&pmtOut->subtype);
LONG lCompression = HEADER(pmtOut->pbFormat)->biCompression;
LONG lBitCount = HEADER(pmtOut->pbFormat)->biBitCount;
LONG lStride = (HEADER(pmtOut->pbFormat)->biWidth * lBitCount + 7) / 8;
lStride = (lStride + 3) & ~3;
DbgLog((LOG_TRACE,3,TEXT("*Changing output type on the fly to")));
DbgLog((LOG_TRACE,3,TEXT("FourCC: %lx Compression: %lx BitCount: %ld"),
fccOut.GetFOURCC(), lCompression, lBitCount));
DbgLog((LOG_TRACE,3,TEXT("biHeight: %ld rcDst: (%ld, %ld, %ld, %ld)"),
HEADER(pmtOut->pbFormat)->biHeight,
rcT.left, rcT.top, rcT.right, rcT.bottom));
DbgLog((LOG_TRACE,3,TEXT("rcSrc: (%ld, %ld, %ld, %ld) Stride: %ld"),
rcS.left, rcS.top, rcS.right, rcS.bottom,
lStride));
#endif
// now switch to using the new format. I am assuming that the
// derived filter will do the right thing when its media type is
// switched and streaming is restarted.
StopStreaming();
m_pOutput->CurrentMediaType() = *pmtOut;
DeleteMediaType(pmtOut);
hr = StartStreaming();
if(SUCCEEDED(hr)) {
// a new format, means a new empty buffer, so wait for a keyframe
// before passing anything on to the renderer.
// !!! a keyframe may never come, so give up after 30 frames
DbgLog((LOG_TRACE,3,TEXT("Output format change means we must wait for a keyframe")));
m_nWaitForKey = 30;
// if this fails, playback will stop, so signal an error
}
else {
// Must release the sample before calling AbortPlayback
// because we might be holding the win16 lock or
// ddraw lock
pOutSample->Release();
AbortPlayback(hr);
return hr;
}
}
// After a discontinuity, we need to wait for the next key frame
if(pSample->IsDiscontinuity() == S_OK) {
DbgLog((LOG_TRACE,3,TEXT("Non-key discontinuity - wait for keyframe")));
m_nWaitForKey = 30;
}
// Start timing the transform (and log it if PERF is defined)
if(SUCCEEDED(hr)) {
m_tDecodeStart = timeGetTime();
MSR_START(m_idTransform);
// have the derived class transform the data
hr = Transform(pSample, pOutSample);
// Stop the clock (and log it if PERF is defined)
MSR_STOP(m_idTransform);
m_tDecodeStart = timeGetTime()-m_tDecodeStart;
m_itrAvgDecode = m_tDecodeStart*(10000/16) + 15*(m_itrAvgDecode/16);
// Maybe we're waiting for a keyframe still?
if(m_nWaitForKey)
m_nWaitForKey--;
if(m_nWaitForKey && pSample->IsSyncPoint() == S_OK)
m_nWaitForKey = FALSE;
// if so, then we don't want to pass this on to the renderer
if(m_nWaitForKey && hr == NOERROR) {
DbgLog((LOG_TRACE,3,TEXT("still waiting for a keyframe")));
hr = S_FALSE;
}
}
if(FAILED(hr)) {
DbgLog((LOG_TRACE,1,TEXT("Error from video transform")));
}
else {
// the Transform() function can return S_FALSE to indicate that the
// sample should not be delivered; we only deliver the sample if it's
// really S_OK (same as NOERROR, of course.)
// Try not to return S_FALSE to a direct draw buffer (it's wasteful)
// Try to take the decision earlier - before you get it.
if(hr == NOERROR) {
hr = m_pOutput->Deliver(pOutSample);
}
else {
// S_FALSE returned from Transform is a PRIVATE agreement
// We should return NOERROR from Receive() in this case because returning S_FALSE
// from Receive() means that this is the end of the stream and no more data should
// be sent.
if(S_FALSE == hr) {
// We must Release() the sample before doing anything
// like calling the filter graph because having the
// sample means we may have the DirectDraw lock
// (== win16 lock on some versions)
pOutSample->Release();
m_bSampleSkipped = TRUE;
if(!m_bQualityChanged) {
m_bQualityChanged = TRUE;
NotifyEvent(EC_QUALITY_CHANGE,0,0);
}
return NOERROR;
}
}
}
// release the output buffer. If the connected pin still needs it,
// it will have addrefed it itself.
pOutSample->Release();
ASSERT(CritCheckIn(&m_csReceive));
return hr;
}
BOOL CVideoTransformFilter::ShouldSkipFrame( IMediaSample * pIn) {
REFERENCE_TIME trStart, trStopAt;
HRESULT hr = pIn->GetTime(&trStart, &trStopAt);
// Don't skip frames with no timestamps
if(hr != S_OK)
return FALSE;
int itrFrame = (int)(trStopAt - trStart); // frame duration
if(S_OK==pIn->IsSyncPoint()) {
MSR_INTEGER(m_idFrameType, 1);
if(m_nKeyFramePeriod < m_nFramesSinceKeyFrame) {
// record the max
m_nKeyFramePeriod = m_nFramesSinceKeyFrame;
}
m_nFramesSinceKeyFrame = 0;
m_bSkipping = FALSE;
}
else {
MSR_INTEGER(m_idFrameType, 2);
if(m_nFramesSinceKeyFrame>m_nKeyFramePeriod
&& m_nKeyFramePeriod>0) {
// We haven't seen the key frame yet, but we were clearly being
// overoptimistic about how frequent they are.
m_nKeyFramePeriod = m_nFramesSinceKeyFrame;
}
}
// Whatever we might otherwise decide,
// if we are taking only a small fraction of the required frame time to decode
// then any quality problems are actually coming from somewhere else.
// Could be a net problem at the source for instance. In this case there's
// no point in us skipping frames here.
if(m_itrAvgDecode*4>itrFrame) {
// Don't skip unless we are at least a whole frame late.
// (We would skip B frames if more than 1/2 frame late, but they're safe).
if(m_itrLate > itrFrame) {
// Don't skip unless the anticipated key frame would be no more than
// 1 frame early. If the renderer has not been waiting (we *guess*
// it hasn't because we're late) then it will allow frames to be
// played early by up to a frame.
// Let T = Stream time from now to anticipated next key frame
// = (frame duration) * (KeyFramePeriod - FramesSinceKeyFrame)
// So we skip if T - Late < one frame i.e.
// (duration) * (freq - FramesSince) - Late < duration
// or (duration) * (freq - FramesSince - 1) < Late
// We don't dare skip until we have seen some key frames and have
// some idea how often they occur and they are reasonably frequent.
if(m_nKeyFramePeriod>0) {
// It would be crazy - but we could have a stream with key frames
// a very long way apart - and if they are further than about
// 3.5 minutes apart then we could get arithmetic overflow in
// reference time units. Therefore we switch to mSec at this point
int it = (itrFrame/10000)
* (m_nKeyFramePeriod-m_nFramesSinceKeyFrame - 1);
MSR_INTEGER(m_idTimeTillKey, it);
// For debug - might want to see the details - dump them as scratch pad
#ifdef VTRANSPERF
MSR_INTEGER(0, itrFrame);
MSR_INTEGER(0, m_nFramesSinceKeyFrame);
MSR_INTEGER(0, m_nKeyFramePeriod);
#endif
if(m_itrLate/10000 > it) {
m_bSkipping = TRUE;
// Now we are committed. Once we start skipping, we
// cannot stop until we hit a key frame.
}
else {
#ifdef VTRANSPERF
MSR_INTEGER(0, 777770); // not near enough to next key
#endif
}
}
else {
#ifdef VTRANSPERF
MSR_INTEGER(0, 777771); // Next key not predictable
#endif
}
}
else {
#ifdef VTRANSPERF
MSR_INTEGER(0, 777772); // Less than one frame late
MSR_INTEGER(0, m_itrLate);
MSR_INTEGER(0, itrFrame);
#endif
}
}
else {
#ifdef VTRANSPERF
MSR_INTEGER(0, 777773); // Decode time short - not not worth skipping
MSR_INTEGER(0, m_itrAvgDecode);
MSR_INTEGER(0, itrFrame);
#endif
}
++m_nFramesSinceKeyFrame;
if(m_bSkipping) {
// We will count down the lateness as we skip each frame.
// We re-assess each frame. The key frame might not arrive when expected.
// We reset m_itrLate if we get a new Quality message, but actually that's
// not likely because we're not sending frames on to the Renderer. In
// fact if we DID get another one it would mean that there's a long
// pipe between us and the renderer and we might need an altogether
// better strategy to avoid hunting!
m_itrLate = m_itrLate - itrFrame;
}
MSR_INTEGER(m_idLate, (int)m_itrLate/10000); // Note how late we think we are
if(m_bSkipping) {
if(!m_bQualityChanged) {
m_bQualityChanged = TRUE;
NotifyEvent(EC_QUALITY_CHANGE,0,0);
}
}
return m_bSkipping;
}
HRESULT CVideoTransformFilter::AlterQuality(Quality q) {
// to reduce the amount of 64 bit arithmetic, m_itrLate is an int.
// +, -, >, == etc are not too bad, but * and / are painful.
if(m_itrLate>300000000) {
// Avoid overflow and silliness - more than 30 secs late is already silly
m_itrLate = 300000000;
}
else {
m_itrLate = (int)q.Late;
}
// We ignore the other fields
// We're actually not very good at handling this. In non-direct draw mode
// most of the time can be spent in the renderer which can skip any frame.
// In that case we'd rather the renderer handled things.
// Nevertheless we will keep an eye on it and if we really start getting
// a very long way behind then we will actually skip - but we'll still tell
// the renderer (or whoever is downstream) that they should handle quality.
return E_FAIL; // Tell the renderer to do his thing.
}
// This will avoid several hundred useless warnings if compiled -W4 by MS VC++ v4
#pragma warning(disable:4514)
|
apache-2.0
|
18098924759/WuWen
|
WuWen.Infrastructure/Data/InBuilder.cs
|
1086
|
using System;
using System.Collections.Generic;
using System.Data;
using System.Linq;
namespace WuWen.Infrastructure.Data
{
public class InBuilder<T> : WhereBuilderBase
{
private readonly IEnumerable<T> source;
public InBuilder(string fieldName, IEnumerable<T> col) : base(fieldName)
{
source = (col ?? Enumerable.Empty<T>());
}
public override Tuple<string, IEnumerable<IDataParameter>> Build()
{
if (!source.Any<T>())
{
return null;
}
List<IDataParameter> list = new List<IDataParameter>();
for (int i = 0; i < source.Count<T>(); i++)
{
list.Add(base.BuildParameter<T>(string.Format("{0}_{1}", FieldName, i), source.ElementAt(i)));
}
string item2 = string.Format("{0} in ({1})", FieldName, string.Join(",",
from item in list
select item.ParameterName));
return new Tuple<string, IEnumerable<IDataParameter>>(item2, list);
}
}
}
|
apache-2.0
|
syphr42/liblametrictime-java
|
src/test/java/org/syphr/lametrictime/api/test/TestUtil.java
|
1324
|
/**
* Copyright 2017-2018 Gregory Moyer and contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.syphr.lametrictime.api.test;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class TestUtil
{
private static final String RESOURCES_PATH = "src/test/resources/";
public static Path getTestDataPath(Class<?> clazz, String name)
{
String packageName = clazz.getPackage().getName();
List<String> paths = new ArrayList<>();
paths.addAll(Arrays.asList(packageName.split("\\.")));
paths.add(name);
return Paths.get(RESOURCES_PATH, paths.toArray(new String[paths.size()]));
}
// @formatter:off
private TestUtil() {}
// @formatter:on
}
|
apache-2.0
|
mineral-ui/mineral-ui
|
src/website/app/demos/Menu/Menu/examples/menu.js
|
1654
|
/* @flow */
import { mineralTheme } from '../../../../../../library/themes';
import IconCloud from 'mineral-ui-icons/IconCloud';
import Menu, {
MenuDivider,
MenuGroup,
MenuItem
} from '../../../../../../library/Menu';
import DemoLayout from '../../common/DemoLayout';
export default {
id: 'basic',
title: 'Basic Usage',
backgroundColor: mineralTheme.color_gray_10,
description: `Menus are composed of [MenuDivider](/components/menu-divider), [MenuGroup](/components/menu-group), and [MenuItem](/components/menu-item).
Menus display a list of actions or navigation options.
<Callout title="Note">
<p key={0}>
Menus normally occupy the full available width of their container.
The Menus here are width-constrained for illustration purposes.
</p>
</Callout>`,
scope: { DemoLayout, IconCloud, Menu, MenuDivider, MenuGroup, MenuItem },
source: `
<DemoLayout>
<Menu>
<MenuItem onClick={event => { console.log(event) }}>
Menu item with onClick
</MenuItem>
<MenuItem secondaryText="Secondary text">
Menu item
</MenuItem>
<MenuItem iconStart={<IconCloud />}>Icon at start</MenuItem>
<MenuItem iconEnd={<IconCloud />}>Icon at end</MenuItem>
<MenuDivider />
<MenuItem disabled onClick={event => { console.log(event) }}>Disabled menu item</MenuItem>
<MenuGroup title="Group Title">
<MenuItem variant="success">Success variant</MenuItem>
<MenuItem variant="warning">Warning variant</MenuItem>
<MenuItem variant="danger">Danger variant</MenuItem>
</MenuGroup>
</Menu>
</DemoLayout>`
};
|
apache-2.0
|
felixbb/forseti-security
|
google/cloud/security/iam/explain/service.py
|
9450
|
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
""" Explain gRPC service. """
import time
from collections import defaultdict
from concurrent import futures
import grpc
from google.cloud.security.iam.explain import explain_pb2
from google.cloud.security.iam.explain import explain_pb2_grpc
from google.cloud.security.iam.explain import explainer
from google.cloud.security.iam.dao import session_creator
# pylint: disable=W0212
def autoclose_stream(f):
"""Decorator to close gRPC stream."""
def wrapper(*args):
"""Wrapper function, checks context state to close stream."""
def closed(context):
"""Returns true iff the connection is closed."""
return context._state.client == 'closed'
context = args[-1]
for result in f(*args):
if closed(context):
return
yield result
return wrapper
# pylint: disable=C0103
# pylint: disable=R0201
# pylint: disable=E1101
class GrpcExplainer(explain_pb2_grpc.ExplainServicer):
"""IAM Explain gRPC implementation."""
HANDLE_KEY = "handle"
def _get_handle(self, context):
"""Return the handle associated with the gRPC call."""
metadata = context.invocation_metadata()
metadata_dict = {}
for key, value in metadata:
metadata_dict[key] = value
return metadata_dict[self.HANDLE_KEY]
def __init__(self, explainer_api):
super(GrpcExplainer, self).__init__()
self.explainer = explainer_api
def Ping(self, request, _):
"""Provides the capability to check for service availability."""
return explain_pb2.PingReply(data=request.data)
def ExplainDenied(self, request, context):
"""Provides information on how to grant access."""
model_name = self._get_handle(context)
binding_strategies = self.explainer.ExplainDenied(model_name,
request.member,
request.resources,
request.permissions,
request.roles)
reply = explain_pb2.ExplainDeniedReply()
strategies = []
for overgranting, bindings in binding_strategies:
strategy = explain_pb2.BindingStrategy(overgranting=overgranting)
strategy.bindings.extend([explain_pb2.Binding(
member=b[1], resource=b[2], role=b[0]) for b in bindings])
strategies.append(strategy)
reply.strategies.extend(strategies)
return reply
def ExplainGranted(self, request, context):
"""Provides information on why a member has access to a resource."""
model_name = self._get_handle(context)
result = self.explainer.ExplainGranted(model_name,
request.member,
request.resource,
request.role,
request.permission)
reply = explain_pb2.ExplainGrantedReply()
bindings, member_graph, resource_names = result
memberships = []
for child, parents in member_graph.iteritems():
memberships.append(
explain_pb2.Membership(
member=child,
parents=parents))
reply.memberships.extend(memberships)
reply.resource_ancestors.extend(resource_names)
reply.bindings.extend(
[explain_pb2.Binding(member=member, resource=resource, role=role)
for resource, role, member in bindings])
return reply
def GetAccessByResources(self, request, context):
"""Returns members having access to the specified resource."""
model_name = self._get_handle(context)
mapping = self.explainer.GetAccessByResources(model_name,
request.resource_name,
request.permission_names,
request.expand_groups)
accesses = []
for role, members in mapping.iteritems():
access = explain_pb2.GetAccessByResourcesReply.Access(
role=role, resource=request.resource_name, members=members)
accesses.append(access)
reply = explain_pb2.GetAccessByResourcesReply()
reply.accesses.extend(accesses)
return reply
def GetAccessByMembers(self, request, context):
"""Returns resources which can be accessed by the specified members."""
model_name = self._get_handle(context)
accesses = []
for role, resources in\
self.explainer.GetAccessByMembers(model_name,
request.member_name,
request.permission_names,
request.expand_resources):
access = explain_pb2.GetAccessByMembersReply.Access(
role=role, resources=resources, member=request.member_name)
accesses.append(access)
reply = explain_pb2.GetAccessByMembersReply()
reply.accesses.extend(accesses)
return reply
def GetPermissionsByRoles(self, request, context):
"""Returns permissions for the specified roles."""
model_name = self._get_handle(context)
result = self.explainer.GetPermissionsByRoles(model_name,
request.role_names,
request.role_prefixes)
permissions_by_roles_map = defaultdict(list)
for role, permission in result:
permissions_by_roles_map[role.name].append(permission.name)
permissions_by_roles_list = []
for role, permissions in permissions_by_roles_map.iteritems():
permissions_by_roles_list.append(
explain_pb2.GetPermissionsByRolesReply.PermissionsByRole(
role=role, permissions=permissions))
reply = explain_pb2.GetPermissionsByRolesReply()
reply.permissionsbyroles.extend(permissions_by_roles_list)
return reply
def CreateModel(self, request, context):
"""Creates a new model from an import source."""
handle = self.explainer.CreateModel(request.type)
reply = explain_pb2.CreateModelReply()
reply.handle = handle
return reply
def DeleteModel(self, request, _):
"""Deletes a model and all associated data."""
model_name = request.handle
self.explainer.DeleteModel(model_name)
return explain_pb2.DeleteModelReply()
def ListModel(self, request, _):
"""List all models."""
model_names = self.explainer.ListModel()
reply = explain_pb2.ListModelReply()
reply.handles.extend(model_names)
return reply
def Denormalize(self, _, context):
"""Denormalize the entire model into access triples."""
model_name = self._get_handle(context)
for permission, resource, member in self.explainer.Denormalize(
model_name):
yield explain_pb2.AuthorizationTuple(member=member,
permission=permission,
resource=resource)
class GrpcExplainerFactory(object):
"""Factory class for Explain service gRPC interface"""
def __init__(self, config):
self.config = config
def create_and_register_service(self, server):
"""Create and register the IAM Explain service."""
service = GrpcExplainer(explainer_api=explainer.Explainer(self.config))
explain_pb2_grpc.add_ExplainServicer_to_server(service, server)
return service
def serve(endpoint, config, max_workers=10, wait_shutdown_secs=3):
"""Serve IAM Explain with the provided parameters."""
server = grpc.server(futures.ThreadPoolExecutor(max_workers))
GrpcExplainerFactory(config).create_and_register_service(server)
server.add_insecure_port(endpoint)
server.start()
while True:
try:
time.sleep(1)
print "Looping\n"
except KeyboardInterrupt:
server.stop(wait_shutdown_secs).wait()
return
if __name__ == "__main__":
class DummyConfig(object):
"""Dummy configuration."""
def __init__(self):
self.session_creator = session_creator('/tmp/explain.db')
def run_in_background(self, function):
"""Run function in background."""
function()
import sys
serve(endpoint=sys.argv[1] if len(sys.argv) >
1 else '[::]:50051', config=DummyConfig())
|
apache-2.0
|
vroosky/Intell-wei
|
edit_home.php
|
5285
|
<html lang="en">
<?php
/**
* Created by PhpStorm.
* User: shenya
* Date: 16-4-7
* Time: 下午5:59
*/
include "head.php";
?>
<?php
session_start(); //启用session
if($_SESSION['duser']){ // 检查用户是否登录
}
else{
header("location: index.php"); // redirects if user is not logged in
}
$duser = $_SESSION['duser']; //assigns user value
?>
<body>
<div class="container">
<header>
<div class="logo" >贵阳学院汽车衡智能称重系统</div>
<nav class="float-right">
<div class="pure-menu pure-menu-open pure-menu-horizontal">
<ul>
<li><a href="home.php">返回</a></li>
<li><a href="logout.php">退出登录</a></li>
</ul>
</div>
</nav>
</header>
<h2 align="center">个人信息修改</h2>
<div class="pure-skin-shenya">
<table class="pure-table pure-table-bordered">
<thead>
<tr>
<th>驾驶证号</th>
<th>车牌号</th>
<th>姓名</th>
<th>生日</th>
<th>驾照类型</th>
<th>部门</th>
<th>电话</th>
</tr>
</thead>
<tbody>
<?php
if(!empty($duser)) {
$id = $duser;
$_SESSION['id'] = $id;
$id_exists = true;
include "connect.inc.php";//连接到数据库
$query = mysql_query("Select * from driver Where driver_id='$id'"); // SQL请求
$count = mysql_num_rows($query);
if ($count > 0) {
while ($row = mysql_fetch_array($query)) {
echo "<tr>";
echo '<td align="center">' . $row['driver_id'] . "</td>";
echo '<td align="center">' . $row['carnum'] . "</td>";
echo '<td align="center">' . $row['name'] . "</td>";
echo '<td align="center">' . $row['bday'] . "</td>";
echo '<td align="center">' . $row['dkind'] . "</td>";
echo '<td align="center">' . $row['part'] . "</td>";
echo '<td align="center">' . $row['telnum'] . "</td>";
echo "</tr>";
$driver_id=$row['driver_id'];
$dpassword=$row['dpassword'];
$carnum=$row['carnum'];
$name=$row['name'];
$bday=$row['bday'];
$dkind =$row['dkind'];
$part=$row['part'];
$telnum=$row['telnum'];
}
}
else
{
$id_exists = false;
}
}
?>
</tbody>
</table>
</div>
<?php
$carnum_query=mysql_query("SELECT carnum FROM car") or die ("Error Occurred");
?>
<br/>
<br/>
<form class="pure-form pure-form-aligned" action="editdriver.php" method="POST">
<fieldset>
<div class="pure-control-group">
<label for="driver_id">驾驶证号</label>
<?php
echo '<input readonly id="driver_id" type="text" value="'.$driver_id.'" name="driver_id">'
?>
</div>
<div class="pure-control-group">
<label for="carnum">车牌号</label>
<?php
echo '<input readonly id="carnum" type="text" value="'.$carnum.'" name="carnum" required="required>'
?>
</div>
<div class="pure-control-group">
<label for="dpassword">密码</label>
<?php
echo '<input id="dpassword" type="text" value="'.$dpassword.'" name="dpassword">'
?>
</div>
<div class="pure-control-group">
<label for="name">姓名</label>
<?php
echo '<input id="name" type="text" value="'.$name.'" name="name" required="required>'
?>
</div>
<div class="pure-control-group">
<label for="bday">出生日期</label>
<?php
echo '<input id="bday" type="date" value="'.$bday.'" name="bday">'
?>
</div>
<div class="pure-control-group">
<label for="dkind">驾照类型</label>
<?php
echo '<input id="dkind" type="text" value="'.$dkind.'" name="dkind">'
?>
</div>
<div class="pure-control-group">
<label for="part">所属部门</label>
<?php
echo '<input readonly id="part" type="text" value="'.$part.'" name="part">'
?>
</div>
<div class="pure-control-group">
<label for="telnum">电话号码</label>
<?php
echo '<input id="telnum" type="text" value="'.$telnum.'" name="telnum">'
?>
</div>
<div class="pure-controls">
<button type="submit" class="pure-button pure-button-primary">提交</button>
</div>
</fieldset>
</form>
<?php
include "footer.php";
?>
</div>
</body>
</html>
|
apache-2.0
|
jentfoo/aws-sdk-java
|
aws-java-sdk-route53/src/main/java/com/amazonaws/services/route53/model/transform/GetAccountLimitResultStaxUnmarshaller.java
|
2692
|
/*
* Copyright 2014-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance with
* the License. A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
* CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions
* and limitations under the License.
*/
package com.amazonaws.services.route53.model.transform;
import javax.xml.stream.events.XMLEvent;
import javax.annotation.Generated;
import com.amazonaws.services.route53.model.*;
import com.amazonaws.transform.Unmarshaller;
import com.amazonaws.transform.StaxUnmarshallerContext;
import com.amazonaws.transform.SimpleTypeStaxUnmarshallers.*;
/**
* GetAccountLimitResult StAX Unmarshaller
*/
@Generated("com.amazonaws:aws-java-sdk-code-generator")
public class GetAccountLimitResultStaxUnmarshaller implements Unmarshaller<GetAccountLimitResult, StaxUnmarshallerContext> {
public GetAccountLimitResult unmarshall(StaxUnmarshallerContext context) throws Exception {
GetAccountLimitResult getAccountLimitResult = new GetAccountLimitResult();
int originalDepth = context.getCurrentDepth();
int targetDepth = originalDepth + 1;
if (context.isStartOfDocument())
targetDepth += 1;
while (true) {
XMLEvent xmlEvent = context.nextEvent();
if (xmlEvent.isEndDocument())
return getAccountLimitResult;
if (xmlEvent.isAttribute() || xmlEvent.isStartElement()) {
if (context.testExpression("Limit", targetDepth)) {
getAccountLimitResult.setLimit(AccountLimitStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
if (context.testExpression("Count", targetDepth)) {
getAccountLimitResult.setCount(LongStaxUnmarshaller.getInstance().unmarshall(context));
continue;
}
} else if (xmlEvent.isEndElement()) {
if (context.getCurrentDepth() < originalDepth) {
return getAccountLimitResult;
}
}
}
}
private static GetAccountLimitResultStaxUnmarshaller instance;
public static GetAccountLimitResultStaxUnmarshaller getInstance() {
if (instance == null)
instance = new GetAccountLimitResultStaxUnmarshaller();
return instance;
}
}
|
apache-2.0
|
mike-melo/bluewire
|
bluewire/src/obj_file.hpp
|
280
|
#pragma once
#include <GL/glew.h>
#include <string>
#include <vector>
using namespace std;
class obj_file {
public:
void load(const string& file_name);
vector<GLfloat> vertices();
vector<GLushort> faces();
private:
vector<GLfloat> mVertices;
vector<GLushort> mFaces;
};
|
apache-2.0
|
quarkusio/quarkus
|
extensions/mailer/runtime/src/test/java/io/quarkus/mailer/runtime/FakeSmtpTestBase.java
|
2902
|
package io.quarkus.mailer.runtime;
import java.time.Duration;
import java.util.Optional;
import java.util.OptionalInt;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.BeforeEach;
import io.quarkus.mailer.Mail;
import io.quarkus.runtime.TlsConfig;
import io.vertx.mutiny.core.Vertx;
import io.vertx.mutiny.ext.mail.MailClient;
public class FakeSmtpTestBase {
protected static final int FAKE_SMTP_PORT = 1465;
protected static final String SERVER_JKS = "certs/server2.jks";
protected static final String CLIENT_JKS = "certs/client.jks";
protected static final String FROM = "test@test.org";
protected static final String TO = "foo@quarkus.io";
protected Vertx vertx;
protected FakeSmtpServer smtpServer;
@BeforeEach
void startVertx() {
vertx = Vertx.vertx();
}
@AfterEach
void stopVertx() {
vertx.close().await().indefinitely();
}
protected void startServer(String keystore) {
smtpServer = new FakeSmtpServer(vertx, true, keystore);
}
protected Mail getMail() {
return new Mail().setFrom(FROM).addTo(TO).setSubject("Subject").setText("Message");
}
protected MutinyMailerImpl getMailer(MailConfig config) {
return getMailer(config, false);
}
protected MutinyMailerImpl getMailer(MailConfig config, boolean globalTrustAll) {
TlsConfig tlsConfig = new TlsConfig();
if (globalTrustAll) {
tlsConfig.trustAll = true;
}
MailClientProducer producer = new MailClientProducer(vertx.getDelegate(), config, tlsConfig);
MailClient client = producer.mutinyClient();
MutinyMailerImpl mailer = new MutinyMailerImpl();
mailer.vertx = vertx;
mailer.mailerSupport = new MailerSupport(FROM, null, false);
mailer.client = client;
return mailer;
}
protected MailConfig getDefaultConfig() {
MailConfig config = new MailConfig();
config.host = "localhost";
config.port = OptionalInt.of(FAKE_SMTP_PORT);
config.startTLS = "DISABLED";
config.login = "DISABLED";
config.ssl = false;
config.authMethods = Optional.empty();
config.maxPoolSize = 10;
config.ownHostName = Optional.empty();
config.username = Optional.empty();
config.password = Optional.empty();
config.poolCleanerPeriod = Duration.ofSeconds(1);
config.keepAlive = true;
config.keepAliveTimeout = Duration.ofMinutes(5);
config.trustAll = Optional.empty();
config.keyStore = Optional.empty();
config.keyStorePassword = Optional.empty();
config.truststore = new TrustStoreConfig();
config.truststore.paths = Optional.empty();
config.truststore.password = Optional.empty();
config.truststore.type = Optional.empty();
return config;
}
}
|
apache-2.0
|
leleuj/cas
|
support/cas-server-support-jpa-eclipselink/src/main/java/org/apereo/cas/eclipselink/CasEclipseLinkJpaBeanFactory.java
|
2095
|
package org.apereo.cas.eclipselink;
import org.apereo.cas.configuration.model.support.jpa.AbstractJpaProperties;
import org.apereo.cas.configuration.model.support.jpa.DatabaseProperties;
import org.apereo.cas.configuration.model.support.jpa.JpaConfigurationContext;
import org.apereo.cas.configuration.support.JpaBeans;
import org.apereo.cas.jpa.JpaBeanFactory;
import lombok.val;
import org.eclipse.persistence.config.BatchWriting;
import org.eclipse.persistence.config.PersistenceUnitProperties;
import org.eclipse.persistence.logging.SessionLog;
import org.springframework.orm.jpa.JpaVendorAdapter;
import org.springframework.orm.jpa.LocalContainerEntityManagerFactoryBean;
import org.springframework.orm.jpa.vendor.EclipseLinkJpaVendorAdapter;
import java.util.HashMap;
/**
* This is {@link CasEclipseLinkJpaBeanFactory}.
*
* @author Misagh Moayyed
* @since 6.2.0
*/
public class CasEclipseLinkJpaBeanFactory implements JpaBeanFactory {
@Override
public JpaVendorAdapter newJpaVendorAdapter(final DatabaseProperties properties) {
val adapter = new EclipseLinkJpaVendorAdapter();
adapter.setGenerateDdl(properties.isGenDdl());
adapter.setShowSql(properties.isShowSql());
return adapter;
}
@Override
public LocalContainerEntityManagerFactoryBean newEntityManagerFactoryBean(final JpaConfigurationContext config,
final AbstractJpaProperties jpaProperties) {
val bean = JpaBeans.newEntityManagerFactoryBean(config);
val map = new HashMap<String, Object>();
map.put(PersistenceUnitProperties.WEAVING, Boolean.TRUE);
map.put(PersistenceUnitProperties.DDL_GENERATION, jpaProperties.getDdlAuto());
map.put(PersistenceUnitProperties.BATCH_WRITING_SIZE, jpaProperties.getBatchSize());
map.put(PersistenceUnitProperties.BATCH_WRITING, BatchWriting.JDBC);
map.put(PersistenceUnitProperties.LOGGING_LEVEL, SessionLog.FINE_LABEL);
bean.getJpaPropertyMap().putAll(map);
return bean;
}
}
|
apache-2.0
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.