file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
template-if.js
|
/******/ (function(modules) { // webpackBootstrap
/******/ // The module cache
/******/ var installedModules = {};
/******/ // The require function
/******/ function __webpack_require__(moduleId) {
/******/ // Check if module is in cache
/******/ if(installedModules[moduleId])
/******/ return installedModules[moduleId].exports;
/******/ // Create a new module (and put it into the cache)
/******/ var module = installedModules[moduleId] = {
/******/ exports: {},
/******/ id: moduleId,
/******/ loaded: false
/******/ };
/******/ // Execute the module function
/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__);
/******/ // Flag the module as loaded
/******/ module.loaded = true;
/******/ // Return the exports of the module
/******/ return module.exports;
/******/ }
/******/ // expose the modules object (__webpack_modules__)
/******/ __webpack_require__.m = modules;
/******/ // expose the module cache
/******/ __webpack_require__.c = installedModules;
/******/ // __webpack_public_path__
/******/ __webpack_require__.p = "";
/******/ // Load entry module and return exports
/******/ return __webpack_require__(0);
/******/ })
/************************************************************************/
/******/ ({
/***/ 0:
/***/ function(module, exports, __webpack_require__) {
var __weex_template__ = __webpack_require__(363)
var __weex_style__ = __webpack_require__(364)
var __weex_script__ = __webpack_require__(365)
__weex_define__('@weex-component/6019f2874c768d147034c91e003e5ba4', [], function(__weex_require__, __weex_exports__, __weex_module__) {
__weex_script__(__weex_module__, __weex_exports__, __weex_require__)
if (__weex_exports__.__esModule && __weex_exports__.default) {
__weex_module__.exports = __weex_exports__.default
}
__weex_module__.exports.template = __weex_template__
__weex_module__.exports.style = __weex_style__
})
__weex_bootstrap__('@weex-component/6019f2874c768d147034c91e003e5ba4',undefined,undefined)
/***/ },
/***/ 363:
/***/ function(module, exports) {
module.exports = {
"type": "div",
"children": [
{
"type": "text",
"classList": [
"btn"
],
"events": {
"click": "toggle"
},
"attr": {
"value": "Toggle"
}
},
{
"type": "text",
"shown": function () {return this.flag},
"classList": [
"title"
],
"attr": {
"value": "I'm ON"
}
},
{
"type": "text",
"shown": function () {return !this.flag},
"classList": [
"title"
],
"attr": {
"value": "I'm Off"
}
}
]
}
/***/ },
|
/***/ 364:
/***/ function(module, exports) {
module.exports = {
"title": {
"fontSize": 48
},
"btn": {
"fontSize": 36,
"textAlign": "center",
"color": "#FFFFFF",
"backgroundColor": "#808080",
"padding": 20,
"borderRadius": 5
}
}
/***/ },
/***/ 365:
/***/ function(module, exports) {
module.exports = function(module, exports, __weex_require__){"use strict";
module.exports = {
data: function () {return {
flag: true
}},
methods: {
toggle: function toggle(e) {
this.flag = !this.flag;
}
}
};}
/* generated by weex-loader */
/***/ }
/******/ });
| |
off.rs
|
// rustfmt-use_small_heuristics: Off
enum Lorem {
Ipsum,
Dolor(bool),
Sit {
amet: Consectetur,
adipiscing: Elit,
},
}
fn main() {
lorem("lorem", "ipsum", "dolor", "sit", "amet", "consectetur", "adipiscing");
let lorem = Lorem {
ipsum: dolor,
sit: amet,
};
let lorem = if ipsum {
dolor
} else
|
;
}
|
{
sit
}
|
keygen.go
|
package cbnt
import (
"crypto"
"crypto/aes"
"crypto/cipher"
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"crypto/rsa"
"crypto/x509"
"encoding/pem"
"fmt"
"io/ioutil"
"os"
"strings"
)
const (
//Supported RSA bit length of Intel TXT/CBnT technology
rsaLen2048 = int(2048)
rsaLen3072 = int(3072)
)
// GenRSAKey takes the required keylength, two boolean to decide for KM and BPM key and a path
// to create a RSA key pair and writes its public and private keys to files.
func GenRSAKey(len int, password string, kmPubFile, kmPrivFile, bpmPubFile, bpmPrivFile *os.File) error {
if len == rsaLen2048 || len == rsaLen3072 {
key, err := rsa.GenerateKey(rand.Reader, len)
if err != nil {
return err
}
if err := writePrivKeyToFile(key, kmPrivFile, password); err != nil {
return err
}
if err := writePubKeyToFile(key.Public(), kmPubFile); err != nil {
return err
}
key, err = rsa.GenerateKey(rand.Reader, len)
if err != nil {
return err
}
if err := writePrivKeyToFile(key, bpmPrivFile, password); err != nil {
return err
}
if err := writePubKeyToFile(key.Public(), bpmPubFile); err != nil {
return err
}
return nil
}
return fmt.Errorf("RSA key length must be 2048 or 3084 Bits, but length is: %d", len)
}
// GenECCKey takes the required curve, two boolean to decide for KM and BPM key and a path
// to create a ECDSA key pair and writes its public and private keys to files.
func GenECCKey(curve int, password string, kmPubFile, kmPrivFile, bpmPubFile, bpmPrivFile *os.File) error {
var ellCurve elliptic.Curve
switch curve {
case 224:
ellCurve = elliptic.P224()
case 256:
ellCurve = elliptic.P256()
default:
return fmt.Errorf("selected ECC algorithm not supported")
}
key, err := ecdsa.GenerateKey(ellCurve, rand.Reader)
if err != nil {
return err
}
if err := writePrivKeyToFile(key, kmPrivFile, password); err != nil {
return err
}
if err := writePubKeyToFile(key.Public(), kmPubFile); err != nil {
return err
}
key, err = ecdsa.GenerateKey(ellCurve, rand.Reader)
if err != nil {
return err
}
if err := writePrivKeyToFile(key, bpmPrivFile, password); err != nil {
return err
}
if err := writePubKeyToFile(key.Public(), bpmPubFile); err != nil {
return err
}
return nil
}
func writePrivKeyToFile(k crypto.PrivateKey, f *os.File, password string) error {
var key *[]byte
b, err := x509.MarshalPKCS8PrivateKey(k)
if err != nil {
return fmt.Errorf("unable to marshal the private key: %w", err)
}
bpemBlock := &pem.Block{
Bytes: b,
}
bpem := pem.EncodeToMemory(bpemBlock)
if password != "" {
encKey, err := encryptPrivFile(&bpem, password)
if err != nil {
return err
}
key = encKey
} else {
key = &bpem
}
_, err = f.Write(*key)
if err != nil {
return err
}
return nil
}
func writePubKeyToFile(k crypto.PublicKey, f *os.File) error {
b, err := x509.MarshalPKIXPublicKey(k)
if err != nil
|
bpemBlock := &pem.Block{
Bytes: b,
}
bpem := pem.EncodeToMemory(bpemBlock)
_, err = f.Write(bpem)
if err != nil {
return err
}
return nil
}
func encryptPrivFile(data *[]byte, password string) (*[]byte, error) {
// Hash key to select aes-256 -> using SHA256
hash := crypto.SHA256.New()
if _, err := hash.Write([]byte(password)); err != nil {
return nil, fmt.Errorf("unable to hash: %w", err)
}
hashPW := hash.Sum(nil)
bc, err := aes.NewCipher(hashPW)
if err != nil {
return nil, err
}
gcm, err := cipher.NewGCM(bc)
if err != nil {
return nil, err
}
nonce := make([]byte, gcm.NonceSize())
if _, err := rand.Read(nonce); err != nil {
return nil, err
}
ct := gcm.Seal(nonce, nonce, *data, nil)
return &ct, nil
}
// DecryptPrivKey takes the encrypted Key as byte slice and the password to decrypt the private key and returns it with it's type.
func DecryptPrivKey(data []byte, password string) (crypto.PrivateKey, error) {
var plain []byte
if password != "" {
// Set up the crypto stuff
hash := crypto.SHA256.New()
if _, err := hash.Write([]byte(password)); err != nil {
return nil, fmt.Errorf("unable to hash: %w", err)
}
hashPW := hash.Sum(nil)
aes, err := aes.NewCipher(hashPW)
if err != nil {
return nil, err
}
aesGCM, err := cipher.NewGCM(aes)
if err != nil {
return nil, err
}
nonceSize := aesGCM.NonceSize()
nonce, ciphertext := data[:nonceSize], data[nonceSize:]
plain, err = aesGCM.Open(nil, nonce, ciphertext, nil)
if err != nil {
return nil, err
}
} else {
plain = data
}
key, err := parsePrivateKey(plain)
if err != nil {
return nil, err
}
if err != nil {
return nil, err
}
return key, nil
}
// ReadPubKey ready a pem encoded RSA/ECC public key file
func ReadPubKey(path string) (crypto.PublicKey, error) {
raw, err := ioutil.ReadFile(path)
if err != nil {
return nil, err
}
for {
block, rest := pem.Decode(raw)
if block == nil {
break
}
if !strings.Contains(block.Type, "CERTIFICATE") {
if strings.Contains(block.Type, "RSA") {
key, err := x509.ParsePKCS1PublicKey(block.Bytes)
if err != nil {
return nil, fmt.Errorf("parsing error in x509.ParsePKCS1PublicKey: %v", err)
}
return key, nil
}
key, err := x509.ParsePKIXPublicKey(block.Bytes)
if err == nil {
if key, ok := key.(crypto.PublicKey); ok {
return key, nil
}
return nil, fmt.Errorf("found unknown public key type (%T) in PKIX wrapping", key)
}
return nil, err
}
raw = rest
}
return nil, fmt.Errorf("failed to parse public key")
}
|
{
return err
}
|
LocalStreamMedia.tsx
|
// © Microsoft Corporation. All rights reserved.
import React, { useEffect, useState } from 'react';
import { Image, ImageFit, Label } from '@fluentui/react';
import { LocalVideoStream, Renderer, RendererView } from '@azure/communication-calling';
import { videoHint, mediaContainer, localVideoContainerStyle } from './styles/StreamMedia.styles';
import { Constants } from '../core/constants';
import staticMediaSVG from '../assets/staticmedia.svg';
export interface LocalStreamMediaProps {
label: string;
stream: LocalVideoStream;
}
export default (props: LocalStreamMediaProps): JSX.Element => {
let rendererView: RendererView;
const [available, setAvailable] = useState(false);
const imageProps = {
src: staticMediaSVG.toString(),
imageFit: ImageFit.contain,
maximizeFrame: true
};
useEffect(() => {
(async () => {
if (props.stream) {
var renderer: Renderer = new Renderer(props.stream);
rendererView = await renderer.createView({ scalingMode: 'Crop' });
var container = document.getElementById(Constants.LOCAL_VIDEO_PREVIEW_ID);
if (container && container.childElementCount === 0) {
container.appendChild(rendererView.target);
|
setAvailable(true);
}
} else {
if (rendererView) {
rendererView.dispose();
setAvailable(false);
}
}
})();
return () => {
if (rendererView) {
rendererView.dispose();
setAvailable(false);
}
};
}, [props.stream]);
return (
<div className={mediaContainer}>
<div
style={{ display: available ? 'block' : 'none' }}
className={localVideoContainerStyle}
id={Constants.LOCAL_VIDEO_PREVIEW_ID}
/>
<Image style={{ display: available ? 'none' : 'block' }} {...imageProps} />
<Label className={videoHint}>{props.label}</Label>
</div>
);
};
| |
test_filesystem.py
|
import os
import boto3
import fsspec
import pytest
from moto import mock_s3
from datasets.filesystems import (
COMPRESSION_FILESYSTEMS,
HfFileSystem,
S3FileSystem,
extract_path_from_uri,
is_remote_filesystem,
)
from .utils import require_lz4, require_zstandard
@pytest.fixture(scope="function")
def
|
():
"""Mocked AWS Credentials for moto."""
os.environ["AWS_ACCESS_KEY_ID"] = "fake_access_key"
os.environ["AWS_SECRET_ACCESS_KEY"] = "fake_secret_key"
os.environ["AWS_SECURITY_TOKEN"] = "fake_secrurity_token"
os.environ["AWS_SESSION_TOKEN"] = "fake_session_token"
@pytest.fixture(scope="function")
def s3(aws_credentials):
with mock_s3():
yield boto3.client("s3", region_name="us-east-1")
def test_extract_path_from_uri(s3):
mock_bucket = "moto-mock-s3-bucket"
# We need to create the bucket since this is all in Moto's 'virtual' AWS account
s3.create_bucket(Bucket=mock_bucket)
dataset_path = f"s3://{mock_bucket}"
dataset_path = extract_path_from_uri(dataset_path)
assert dataset_path.startswith("s3://") is False
dataset_path = "./local/path"
new_dataset_path = extract_path_from_uri(dataset_path)
assert dataset_path == new_dataset_path
def test_is_remote_filesystem():
fs = S3FileSystem(key="fake_access_key", secret="fake_secret")
is_remote = is_remote_filesystem(fs)
assert is_remote is True
fs = fsspec.filesystem("file")
is_remote = is_remote_filesystem(fs)
assert is_remote is False
@require_zstandard
@require_lz4
@pytest.mark.parametrize("compression_fs_class", COMPRESSION_FILESYSTEMS)
def test_compression_filesystems(compression_fs_class, gz_file, bz2_file, lz4_file, zstd_file, xz_file, text_file):
input_paths = {"gzip": gz_file, "xz": xz_file, "zstd": zstd_file, "bz2": bz2_file, "lz4": lz4_file}
input_path = str(input_paths[compression_fs_class.protocol])
fs = fsspec.filesystem(compression_fs_class.protocol, fo=input_path)
assert isinstance(fs, compression_fs_class)
expected_filename = os.path.basename(input_path)
expected_filename = expected_filename[: expected_filename.rindex(".")]
assert fs.ls("/") == [expected_filename]
with fs.open(expected_filename, "r", encoding="utf-8") as f, open(text_file, encoding="utf-8") as expected_file:
assert f.read() == expected_file.read()
def test_hf_filesystem(hf_token, hf_api, hf_private_dataset_repo_txt_data, text_file):
repo_info = hf_api.dataset_info(hf_private_dataset_repo_txt_data, token=hf_token)
hffs = HfFileSystem(repo_info=repo_info, token=hf_token)
assert sorted(hffs.glob("*")) == [".gitattributes", "data.txt"]
with open(text_file) as f:
assert hffs.open("data.txt", "r").read() == f.read()
|
aws_credentials
|
serializer.rs
|
use super::util::internal_number_from_string;
use xrpl_types::{Amount, Memo, Transaction};
// https://xrpl.org/serialization.html
// https://github.com/ripple/ripple-binary-codec/blob/master/src/enums/definitions.json
// https://xrpl.org/basic-data-types.html#hash-prefixes
pub const HASH_PREFIX_TRANSACTION: [u8; 4] = [0x53, 0x4E, 0x44, 0x00];
pub const HASH_PREFIX_UNSIGNED_TRANSACTION_SINGLE: [u8; 4] = [0x53, 0x54, 0x58, 0x00];
// TODO: Define type_code constants / enum
#[derive(Default)]
pub struct Serializer {
pub buf: Vec<u8>,
}
impl Serializer {
pub fn new() -> Self {
Self::default()
}
pub fn push(&mut self, value: u8) {
self.buf.push(value);
}
pub fn push_u16(&mut self, value: u16) {
self.push((value >> 8) as u8);
self.push((value & 0xff) as u8);
}
pub fn push_u32(&mut self, value: u32) {
self.push((value >> 24) as u8);
self.push(((value >> 16) & 0xff) as u8);
self.push(((value >> 8) & 0xff) as u8);
self.push((value & 0xff) as u8);
}
pub fn push_u64(&mut self, value: u64) {
self.push((value >> 56) as u8);
self.push(((value >> 48) & 0xff) as u8);
self.push(((value >> 40) & 0xff) as u8);
self.push(((value >> 32) & 0xff) as u8);
self.push(((value >> 24) & 0xff) as u8);
self.push(((value >> 16) & 0xff) as u8);
self.push(((value >> 8) & 0xff) as u8);
self.push((value & 0xff) as u8);
}
/// https://xrpl.org/serialization.html#field-ids
/// https://github.com/seelabs/rippled/blob/cecc0ad75849a1d50cc573188ad301ca65519a5b/src/ripple/protocol/impl/Serializer.cpp#L117-L148
pub fn push_field_id(&mut self, type_code: u8, field_code: u8) {
if type_code < 16 {
if field_code < 16 {
self.push(type_code << 4 | field_code);
} else {
self.push(type_code << 4);
self.push(field_code);
}
} else if field_code < 16 {
self.push(field_code);
self.push(type_code);
} else {
self.push(0);
self.push(type_code);
self.push(field_code);
}
}
/// https://xrpl.org/serialization.html#length-prefixing
pub fn push_vl_prefix(&mut self, length: u32) {
if length < 192 {
self.push(length as u8);
} else if length <= 12480 {
let length = length - 192;
self.push(193 + (length >> 8) as u8);
self.push((length & 0xff) as u8);
} else if length <= 918744 {
let length = length - 12481;
self.push(241 + (length >> 16) as u8);
self.push(((length >> 8) & 0xff) as u8);
self.push((length & 0xff) as u8);
} else {
todo!()
}
}
// TODO: use more descriptive name.
pub fn push_slice(&mut self, bytes: &[u8]) {
for byte in bytes {
self.push(*byte);
}
}
pub fn push_drops_amount(&mut self, value: u64) {
self.push_u64(value | 0x4000000000000000);
}
/// - https://xrpl.org/serialization.html#issued-currency-amount-format
/// - https://github.com/ripple/ripple-binary-codec/blob/master/src/types/amount.ts
/// - https://github.com/ripple/rippled/blob/develop/src/ripple/protocol/impl/STAmount.cpp
pub fn push_issued_amount(&mut self, value: &str, currency: &str, issuer: &str) {
self.push_u64(internal_number_from_string(value));
self.push_currency(currency);
self.push_account_id(issuer);
}
pub fn push_amount(&mut self, amount: &Amount) {
match amount {
Amount::Drops(value) => self.push_drops_amount(value.parse::<u64>().unwrap()),
Amount::Issued {
value,
currency,
issuer,
} => self.push_issued_amount(value, currency, issuer),
}
}
/// https://xrpl.org/serialization.html#currency-codes
pub fn push_currency(&mut self, currency: &str) {
// Non-standard currency codes are 160 bits = 20 bytes in hex (40 chars).
if currency.len() == 40 {
// Non-standard currency code.
let currency_bytes = hex::decode(currency).unwrap();
// if currency_bytes[0] == 0x00 {
self.push_slice(¤cy_bytes);
return;
// }
}
// Standard currency code.
// 8 bits
self.push(0x00);
// 88 bits
for _ in 0..11 {
self.push(0x00);
}
// 24 bits
self.push_slice(currency.as_bytes());
// 40 bits
for _ in 0..5 {
self.push(0x00);
}
}
pub fn push_account_id(&mut self, id: &str) {
// https://xrpl.org/serialization.html#accountid-fields
// https://xrpl.org/accounts.html#address-encoding
let decoded = bs58::decode(id)
.with_alphabet(bs58::Alphabet::RIPPLE)
.into_vec()
.unwrap();
// Skip the 0x00 ('r') version prefix, skip the 4-byte checksum postfix.
let decoded = &decoded[1..21];
self.push_slice(decoded);
}
// TODO: implement generic `push_array`
// https://xrpl.org/serialization.html#array-fields
pub fn push_blob(&mut self, field_code: u8, blob: &[u8]) {
self.push_field_id(7, field_code);
self.push_vl_prefix(blob.len() as u32);
self.push_slice(blob);
}
pub fn push_memo(&mut self, memo: &Memo) {
// https://xrpl.org/serialization.html#object-fields
self.push_field_id(14, 10);
self.push_blob(12, &memo.memo_type);
self.push_blob(13, &memo.memo_data);
if let Some(memo_format) = &memo.memo_format {
self.push_blob(14, memo_format);
}
self.push(0xe1); // Object end
}
/// ## Serialization order
///
/// https://github.com/ripple/rippled/blob/master/src/ripple/protocol/impl/SField.cpp
///
/// 16 bit integers (common)
///
/// transaction_type: u16, UInt16, 1, 2 "TransactionType"
///
|
///
/// flags: u32, UInt32, 2, 2 "Flags"
/// sequence: u32, UInt32, 2, 4 "Sequence"
///
/// 32 bit integers (uncommon)
///
/// quality_in: u32, UInt32, 2, 20 "QualityIn"
/// quality_out: u32, UInt32, 2, 21 "QualityOut"
/// offer_sequence: u32, UInt32, 2, 25 "OfferSequence"
/// last_ledger_sequence: u32, UInt32, 2, 27
///
/// currency amount (common)
///
/// amount: String, Amount, 6, 1 "Amount"
/// limit_amount: String, Amount, 6, 3 "LimitAmount"
/// taker_pays: String, Amount, 6, 4 "TakerPays"
/// taker_gets: String, Amount, 6, 5 "TakerGets"
/// fee: String, Amount, 6, 8 "Fee"
///
/// variable length (common)
///
/// signing_public_key, SigningPubKey Blob, VL, 7, 3
/// signature, TxnSignature Blob, VL, 7, 4
///
/// account
///
/// account: String, AccountID, VL, 8, 1 "Account"
/// destination: String, AccountID, VL, 8, 3 "Destination"
///
/// array of objects
///
/// memos: Vec<Memo>, STArray, 15, 9 "Memos"
pub fn push_transaction(&mut self, tx: &Transaction, prefix: Option<&[u8]>) {
if let Some(prefix) = prefix {
self.push_slice(prefix);
}
// 16 bit integers (common)
self.push_field_id(1, 2);
self.push_u16(tx.transaction_type as u16);
// 32 bit integers (common)
if let Some(flags) = tx.flags {
self.push_field_id(2, 2);
self.push_u32(flags);
}
self.push_field_id(2, 4);
self.push_u32(tx.sequence.unwrap());
// 32 bit integers (uncommon)
if let Some(quality_in) = tx.quality_in {
self.push_field_id(2, 20);
self.push_u32(quality_in);
}
if let Some(quality_out) = tx.quality_out {
self.push_field_id(2, 21);
self.push_u32(quality_out);
}
if let Some(offer_sequence) = tx.offer_sequence {
self.push_field_id(2, 25);
self.push_u32(offer_sequence);
}
self.push_field_id(2, 27);
self.push_u32(tx.last_ledger_sequence.unwrap());
// currency amount (common)
if let Some(amount) = &tx.amount {
self.push_field_id(6, 1);
self.push_amount(amount);
}
if let Some(limit_amount) = &tx.limit_amount {
self.push_field_id(6, 3);
self.push_amount(limit_amount);
}
if let Some(taker_pays) = &tx.taker_pays {
self.push_field_id(6, 4);
self.push_amount(taker_pays);
}
if let Some(taker_gets) = &tx.taker_gets {
self.push_field_id(6, 5);
self.push_amount(taker_gets);
}
self.push_field_id(6, 8);
self.push_drops_amount(tx.fee.unwrap());
// variable length (common)
if let Some(signing_public_key) = &tx.signing_public_key {
self.push_field_id(7, 3);
self.push_vl_prefix(signing_public_key.len() as u32);
self.push_slice(signing_public_key);
}
if let Some(signature) = &tx.signature {
self.push_field_id(7, 4);
self.push_vl_prefix(signature.len() as u32);
self.push_slice(signature);
}
// account
self.push_field_id(8, 1);
self.push_vl_prefix(160 / 8);
self.push_account_id(&tx.account);
if let Some(destination) = &tx.destination {
self.push_field_id(8, 3);
self.push_vl_prefix(160 / 8);
self.push_account_id(destination);
}
// array of objects
if let Some(memos) = &tx.memos {
// https://xrpl.org/serialization.html#array-fields
self.push_field_id(15, 9);
for memo in memos {
self.push_memo(memo);
}
// self.push_field_id(15, 1);
self.push(0xf1); // Array end
}
}
pub fn to_vec(&self) -> Vec<u8> {
self.buf.clone()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_push_currency() {
let mut s = Serializer::new();
s.push_currency("XRP");
let h = hex::encode(s.to_vec());
assert_eq!(h, "0000000000000000000000005852500000000000");
}
#[test]
fn test_push_issued_amount() {
let mut s = Serializer::new();
s.push_issued_amount("1200.34", "USD", "rhub8VRN55s94qWKDv6jmDy1pUykJzF3wq");
let h = hex::encode(s.to_vec());
// println!("{}", h);
assert_eq!(h, "d54443b3ef4f480000000000000000000000000055534400000000002adb0b3959d60a6e6991f729e1918b7163925230");
}
}
|
/// 32 bit integers (common)
|
types.py
|
import dataclasses
import enum
import typing
import dacite
@dataclasses.dataclass
class IgnoreArea:
x: int = None
y: int = None
width: int = None
height: int = None
@dataclasses.dataclass
class TestRun:
name: str = None
imageBase64: str = None
os: str = None
browser: str = None
|
device: str = None
diffTollerancePercent: float = None
ignoreAreas: typing.List[IgnoreArea] = None
@dataclasses.dataclass
class Build:
id: str = None
projectId: str = None
class TestRunStatus(enum.Enum):
NEW = 'new'
OK = 'ok'
UNRESOLVED = 'unresolved'
@dataclasses.dataclass
class TestRunResponse:
id: str = None
imageName: str = None
diffName: typing.Optional[str] = None
baselineName: typing.Optional[str] = None
url: str = None
merge: bool = False
status: TestRunStatus = None
pixelMisMatchCount: typing.Optional[float] = None
diffPercent: typing.Optional[float] = None
diffTollerancePercent: typing.Optional[float] = None
@dataclasses.dataclass
class TestRunResult:
testRunResponse: TestRunResponse = None
imageUrl: str = None
diffUrl: str = None
baselineUrl: str = None
def __init__(self, test_run_response: TestRunResponse, api_url: str):
"""
Converts image names into urls
:param test_run_response: The response to convert.
:param api_url: URL to use in image urls
"""
self.testRunResponse = test_run_response
self.imageUrl = f'{api_url}/{test_run_response.imageName}'
self.diffUrl = test_run_response.diffName and f'{api_url}/{test_run_response.diffName}'
self.baselineUrl = test_run_response.baselineName and f'{api_url}/{test_run_response.baselineName}'
def _to_dict(obj):
def dict_factory(key_values):
return dict(kv for kv in key_values if kv[1] is not None)
data = dataclasses.asdict(obj, dict_factory=dict_factory)
return data
def _from_dict(data, clazz):
obj = dacite.from_dict(clazz, data)
return obj
|
viewport: str = None
|
utils.py
|
if (config.get("topic_id") is not None and config.get("project_id") is not None
and config.get("subscription_id") is not None):
return True
return False
def valid_kafka(config):
if config.get("bootstrap_server") is not None and config.get("port") is not None:
return True
return False
|
def valid_pubsub(config):
|
|
zarr.py
|
import warnings
import numpy as np
from .. import coding, conventions
from ..core import indexing
from ..core.pycompat import integer_types
from ..core.utils import FrozenDict, HiddenKeyDict
from ..core.variable import Variable
from .common import AbstractWritableDataStore, BackendArray, _encode_variable_name
# need some special secret attributes to tell us the dimensions
DIMENSION_KEY = "_ARRAY_DIMENSIONS"
def encode_zarr_attr_value(value):
"""
Encode a attribute value as something that can be serialized as json
Many xarray datasets / variables have numpy arrays and values. This
function handles encoding / decoding of such items.
ndarray -> list
scalar array -> scalar
other -> other (no change)
"""
if isinstance(value, np.ndarray):
encoded = value.tolist()
# this checks if it's a scalar number
elif isinstance(value, np.generic):
encoded = value.item()
else:
encoded = value
return encoded
class ZarrArrayWrapper(BackendArray):
__slots__ = ("datastore", "dtype", "shape", "variable_name")
def __init__(self, variable_name, datastore):
self.datastore = datastore
self.variable_name = variable_name
array = self.get_array()
self.shape = array.shape
dtype = array.dtype
self.dtype = dtype
def get_array(self):
return self.datastore.ds[self.variable_name]
def __getitem__(self, key):
array = self.get_array()
if isinstance(key, indexing.BasicIndexer):
return array[key.tuple]
elif isinstance(key, indexing.VectorizedIndexer):
return array.vindex[
indexing._arrayize_vectorized_indexer(key, self.shape).tuple
]
else:
assert isinstance(key, indexing.OuterIndexer)
return array.oindex[key.tuple]
# if self.ndim == 0:
# could possibly have a work-around for 0d data here
def _determine_zarr_chunks(enc_chunks, var_chunks, ndim, name):
"""
Given encoding chunks (possibly None) and variable chunks (possibly None)
"""
# zarr chunk spec:
# chunks : int or tuple of ints, optional
# Chunk shape. If not provided, will be guessed from shape and dtype.
# if there are no chunks in encoding and the variable data is a numpy
# array, then we let zarr use its own heuristics to pick the chunks
if var_chunks is None and enc_chunks is None:
return None
# if there are no chunks in encoding but there are dask chunks, we try to
# use the same chunks in zarr
# However, zarr chunks needs to be uniform for each array
# http://zarr.readthedocs.io/en/latest/spec/v1.html#chunks
# while dask chunks can be variable sized
# http://dask.pydata.org/en/latest/array-design.html#chunks
if var_chunks and enc_chunks is None:
if any(len(set(chunks[:-1])) > 1 for chunks in var_chunks):
raise ValueError(
"Zarr requires uniform chunk sizes except for final chunk. "
f"Variable named {name!r} has incompatible dask chunks: {var_chunks!r}. "
"Consider rechunking using `chunk()`."
)
if any((chunks[0] < chunks[-1]) for chunks in var_chunks):
raise ValueError(
"Final chunk of Zarr array must be the same size or smaller "
f"than the first. Variable named {name!r} has incompatible Dask chunks {var_chunks!r}."
"Consider either rechunking using `chunk()` or instead deleting "
"or modifying `encoding['chunks']`."
)
# return the first chunk for each dimension
return tuple(chunk[0] for chunk in var_chunks)
# from here on, we are dealing with user-specified chunks in encoding
# zarr allows chunks to be an integer, in which case it uses the same chunk
# size on each dimension.
# Here we re-implement this expansion ourselves. That makes the logic of
# checking chunk compatibility easier
if isinstance(enc_chunks, integer_types):
enc_chunks_tuple = ndim * (enc_chunks,)
else:
enc_chunks_tuple = tuple(enc_chunks)
if len(enc_chunks_tuple) != ndim:
# throw away encoding chunks, start over
return _determine_zarr_chunks(None, var_chunks, ndim, name)
for x in enc_chunks_tuple:
if not isinstance(x, int):
raise TypeError(
"zarr chunk sizes specified in `encoding['chunks']` "
"must be an int or a tuple of ints. "
f"Instead found encoding['chunks']={enc_chunks_tuple!r} "
f"for variable named {name!r}."
)
# if there are chunks in encoding and the variable data is a numpy array,
# we use the specified chunks
if var_chunks is None:
return enc_chunks_tuple
# the hard case
# DESIGN CHOICE: do not allow multiple dask chunks on a single zarr chunk
# this avoids the need to get involved in zarr synchronization / locking
# From zarr docs:
# "If each worker in a parallel computation is writing to a separate
# region of the array, and if region boundaries are perfectly aligned
# with chunk boundaries, then no synchronization is required."
# TODO: incorporate synchronizer to allow writes from multiple dask
# threads
if var_chunks and enc_chunks_tuple:
for zchunk, dchunks in zip(enc_chunks_tuple, var_chunks):
if len(dchunks) == 1:
continue
for dchunk in dchunks[:-1]:
if dchunk % zchunk:
raise NotImplementedError(
f"Specified zarr chunks encoding['chunks']={enc_chunks_tuple!r} for "
f"variable named {name!r} would overlap multiple dask chunks {var_chunks!r}. "
"This is not implemented in xarray yet. "
"Consider either rechunking using `chunk()` or instead deleting "
"or modifying `encoding['chunks']`."
)
if dchunks[-1] > zchunk:
raise ValueError(
"Final chunk of Zarr array must be the same size or "
"smaller than the first. "
f"Specified Zarr chunk encoding['chunks']={enc_chunks_tuple}, "
f"for variable named {name!r} "
f"but {dchunks} in the variable's Dask chunks {var_chunks} is "
"incompatible with this encoding. "
"Consider either rechunking using `chunk()` or instead deleting "
"or modifying `encoding['chunks']`."
)
return enc_chunks_tuple
raise AssertionError("We should never get here. Function logic must be wrong.")
def _get_zarr_dims_and_attrs(zarr_obj, dimension_key):
# Zarr arrays do not have dimenions. To get around this problem, we add
# an attribute that specifies the dimension. We have to hide this attribute
# when we send the attributes to the user.
# zarr_obj can be either a zarr group or zarr array
try:
dimensions = zarr_obj.attrs[dimension_key]
except KeyError:
raise KeyError(
"Zarr object is missing the attribute `%s`, which is "
"required for xarray to determine variable dimensions." % (dimension_key)
)
attributes = HiddenKeyDict(zarr_obj.attrs, [dimension_key])
return dimensions, attributes
def extract_zarr_variable_encoding(variable, raise_on_invalid=False, name=None):
"""
Extract zarr encoding dictionary from xarray Variable
Parameters
----------
variable : Variable
raise_on_invalid : bool, optional
Returns
-------
encoding : dict
Zarr encoding for `variable`
"""
encoding = variable.encoding.copy()
valid_encodings = {"chunks", "compressor", "filters", "cache_metadata"}
if raise_on_invalid:
invalid = [k for k in encoding if k not in valid_encodings]
if invalid:
raise ValueError(
"unexpected encoding parameters for zarr " "backend: %r" % invalid
)
else:
for k in list(encoding):
if k not in valid_encodings:
del encoding[k]
chunks = _determine_zarr_chunks(
encoding.get("chunks"), variable.chunks, variable.ndim, name
)
encoding["chunks"] = chunks
return encoding
# Function below is copied from conventions.encode_cf_variable.
# The only change is to raise an error for object dtypes.
def encode_zarr_variable(var, needs_copy=True, name=None):
"""
Converts an Variable into an Variable which follows some
of the CF conventions:
- Nans are masked using _FillValue (or the deprecated missing_value)
- Rescaling via: scale_factor and add_offset
- datetimes are converted to the CF 'units since time' format
- dtype encodings are enforced.
Parameters
----------
var : Variable
A variable holding un-encoded data.
Returns
-------
out : Variable
A variable which has been encoded as described above.
"""
var = conventions.encode_cf_variable(var, name=name)
# zarr allows unicode, but not variable-length strings, so it's both
# simpler and more compact to always encode as UTF-8 explicitly.
# TODO: allow toggling this explicitly via dtype in encoding.
coder = coding.strings.EncodedStringCoder(allows_unicode=True)
var = coder.encode(var, name=name)
var = coding.strings.ensure_fixed_length_bytes(var)
return var
class ZarrStore(AbstractWritableDataStore):
"""Store for reading and writing data via zarr"""
__slots__ = (
"append_dim",
"ds",
"_consolidate_on_close",
"_group",
"_read_only",
"_synchronizer",
)
@classmethod
def open_group(
cls,
store,
mode="r",
synchronizer=None,
group=None,
consolidated=False,
consolidate_on_close=False,
chunk_store=None,
):
import zarr
open_kwargs = dict(mode=mode, synchronizer=synchronizer, path=group)
if chunk_store:
open_kwargs["chunk_store"] = chunk_store
if consolidated:
# TODO: an option to pass the metadata_key keyword
zarr_group = zarr.open_consolidated(store, **open_kwargs)
else:
zarr_group = zarr.open_group(store, **open_kwargs)
return cls(zarr_group, consolidate_on_close)
def __init__(self, zarr_group, consolidate_on_close=False):
self.ds = zarr_group
self._read_only = self.ds.read_only
self._synchronizer = self.ds.synchronizer
self._group = self.ds.path
self._consolidate_on_close = consolidate_on_close
self.append_dim = None
def open_store_variable(self, name, zarr_array):
data = indexing.LazilyOuterIndexedArray(ZarrArrayWrapper(name, self))
dimensions, attributes = _get_zarr_dims_and_attrs(zarr_array, DIMENSION_KEY)
attributes = dict(attributes)
encoding = {
"chunks": zarr_array.chunks,
"compressor": zarr_array.compressor,
"filters": zarr_array.filters,
}
# _FillValue needs to be in attributes, not encoding, so it will get
# picked up by decode_cf
if getattr(zarr_array, "fill_value") is not None:
attributes["_FillValue"] = zarr_array.fill_value
return Variable(dimensions, data, attributes, encoding)
def get_variables(self):
return FrozenDict(
(k, self.open_store_variable(k, v)) for k, v in self.ds.arrays()
)
def get_attrs(self):
attributes = dict(self.ds.attrs.asdict())
return attributes
def get_dimensions(self):
dimensions = {}
for k, v in self.ds.arrays():
try:
for d, s in zip(v.attrs[DIMENSION_KEY], v.shape):
if d in dimensions and dimensions[d] != s:
raise ValueError(
"found conflicting lengths for dimension %s "
"(%d != %d)" % (d, s, dimensions[d])
)
dimensions[d] = s
except KeyError:
raise KeyError(
"Zarr object is missing the attribute `%s`, "
"which is required for xarray to determine "
"variable dimensions." % (DIMENSION_KEY)
)
return dimensions
def set_dimensions(self, variables, unlimited_dims=None):
if unlimited_dims is not None:
raise NotImplementedError(
"Zarr backend doesn't know how to handle unlimited dimensions"
)
def set_attributes(self, attributes):
self.ds.attrs.put(attributes)
def encode_variable(self, variable):
variable = encode_zarr_variable(variable)
return variable
def encode_attribute(self, a):
return encode_zarr_attr_value(a)
def store(
self,
variables,
attributes,
check_encoding_set=frozenset(),
writer=None,
unlimited_dims=None,
):
"""
Top level method for putting data on this store, this method:
- encodes variables/attributes
- sets dimensions
- sets variables
Parameters
----------
variables : dict-like
Dictionary of key/value (variable name / xr.Variable) pairs
attributes : dict-like
Dictionary of key/value (attribute name / attribute) pairs
check_encoding_set : list-like
List of variables that should be checked for invalid encoding
values
writer : ArrayWriter
unlimited_dims : list-like
List of dimension names that should be treated as unlimited
dimensions.
dimension on which the zarray will be appended
only needed in append mode
"""
existing_variables = {
vn for vn in variables if _encode_variable_name(vn) in self.ds
}
new_variables = set(variables) - existing_variables
variables_without_encoding = {vn: variables[vn] for vn in new_variables}
variables_encoded, attributes = self.encode(
variables_without_encoding, attributes
)
if len(existing_variables) > 0:
# there are variables to append
# their encoding must be the same as in the store
ds = open_zarr(self.ds.store, group=self.ds.path, chunks=None)
variables_with_encoding = {}
for vn in existing_variables:
variables_with_encoding[vn] = variables[vn].copy(deep=False)
variables_with_encoding[vn].encoding = ds[vn].encoding
variables_with_encoding, _ = self.encode(variables_with_encoding, {})
variables_encoded.update(variables_with_encoding)
self.set_attributes(attributes)
self.set_dimensions(variables_encoded, unlimited_dims=unlimited_dims)
self.set_variables(
variables_encoded, check_encoding_set, writer, unlimited_dims=unlimited_dims
)
def
|
(self):
pass
def set_variables(self, variables, check_encoding_set, writer, unlimited_dims=None):
"""
This provides a centralized method to set the variables on the data
store.
Parameters
----------
variables : dict-like
Dictionary of key/value (variable name / xr.Variable) pairs
check_encoding_set : list-like
List of variables that should be checked for invalid encoding
values
writer :
unlimited_dims : list-like
List of dimension names that should be treated as unlimited
dimensions.
"""
for vn, v in variables.items():
name = _encode_variable_name(vn)
check = vn in check_encoding_set
attrs = v.attrs.copy()
dims = v.dims
dtype = v.dtype
shape = v.shape
fill_value = attrs.pop("_FillValue", None)
if v.encoding == {"_FillValue": None} and fill_value is None:
v.encoding = {}
if self.append_dim is not None and self.append_dim in dims:
# resize existing variable
zarr_array = self.ds[name]
append_axis = dims.index(self.append_dim)
new_region = [slice(None)] * len(dims)
new_region[append_axis] = slice(zarr_array.shape[append_axis], None)
region = tuple(new_region)
new_shape = list(zarr_array.shape)
new_shape[append_axis] += v.shape[append_axis]
zarr_array.resize(new_shape)
elif name in self.ds:
# override existing variable
zarr_array = self.ds[name]
region = None
else:
# new variable
encoding = extract_zarr_variable_encoding(
v, raise_on_invalid=check, name=vn
)
encoded_attrs = {}
# the magic for storing the hidden dimension data
encoded_attrs[DIMENSION_KEY] = dims
for k2, v2 in attrs.items():
encoded_attrs[k2] = self.encode_attribute(v2)
if coding.strings.check_vlen_dtype(dtype) == str:
dtype = str
zarr_array = self.ds.create(
name, shape=shape, dtype=dtype, fill_value=fill_value, **encoding
)
zarr_array.attrs.put(encoded_attrs)
region = None
writer.add(v.data, zarr_array, region=region)
def close(self):
if self._consolidate_on_close:
import zarr
zarr.consolidate_metadata(self.ds.store)
def open_zarr(
store,
group=None,
synchronizer=None,
chunks="auto",
decode_cf=True,
mask_and_scale=True,
decode_times=True,
concat_characters=True,
decode_coords=True,
drop_variables=None,
consolidated=False,
overwrite_encoded_chunks=False,
chunk_store=None,
decode_timedelta=None,
use_cftime=None,
**kwargs,
):
"""Load and decode a dataset from a Zarr store.
.. note:: Experimental
The Zarr backend is new and experimental. Please report any
unexpected behavior via github issues.
The `store` object should be a valid store for a Zarr group. `store`
variables must contain dimension metadata encoded in the
`_ARRAY_DIMENSIONS` attribute.
Parameters
----------
store : MutableMapping or str
A MutableMapping where a Zarr Group has been stored or a path to a
directory in file system where a Zarr DirectoryStore has been stored.
synchronizer : object, optional
Array synchronizer provided to zarr
group : str, optional
Group path. (a.k.a. `path` in zarr terminology.)
chunks : int or dict or tuple or {None, 'auto'}, optional
Chunk sizes along each dimension, e.g., ``5`` or
``{'x': 5, 'y': 5}``. If `chunks='auto'`, dask chunks are created
based on the variable's zarr chunks. If `chunks=None`, zarr array
data will lazily convert to numpy arrays upon access. This accepts
all the chunk specifications as Dask does.
overwrite_encoded_chunks: bool, optional
Whether to drop the zarr chunks encoded for each variable when a
dataset is loaded with specified chunk sizes (default: False)
decode_cf : bool, optional
Whether to decode these variables, assuming they were saved according
to CF conventions.
mask_and_scale : bool, optional
If True, replace array values equal to `_FillValue` with NA and scale
values according to the formula `original_values * scale_factor +
add_offset`, where `_FillValue`, `scale_factor` and `add_offset` are
taken from variable attributes (if they exist). If the `_FillValue` or
`missing_value` attribute contains multiple values a warning will be
issued and all array values matching one of the multiple values will
be replaced by NA.
decode_times : bool, optional
If True, decode times encoded in the standard NetCDF datetime format
into datetime objects. Otherwise, leave them encoded as numbers.
concat_characters : bool, optional
If True, concatenate along the last dimension of character arrays to
form string arrays. Dimensions will only be concatenated over (and
removed) if they have no corresponding variable and if they are only
used as the last dimension of character arrays.
decode_coords : bool, optional
If True, decode the 'coordinates' attribute to identify coordinates in
the resulting dataset.
drop_variables : str or iterable, optional
A variable or list of variables to exclude from being parsed from the
dataset. This may be useful to drop variables with problems or
inconsistent values.
consolidated : bool, optional
Whether to open the store using zarr's consolidated metadata
capability. Only works for stores that have already been consolidated.
chunk_store : MutableMapping, optional
A separate Zarr store only for chunk data.
decode_timedelta : bool, optional
If True, decode variables and coordinates with time units in
{'days', 'hours', 'minutes', 'seconds', 'milliseconds', 'microseconds'}
into timedelta objects. If False, leave them encoded as numbers.
If None (default), assume the same value of decode_time.
use_cftime: bool, optional
Only relevant if encoded dates come from a standard calendar
(e.g. "gregorian", "proleptic_gregorian", "standard", or not
specified). If None (default), attempt to decode times to
``np.datetime64[ns]`` objects; if this is not possible, decode times to
``cftime.datetime`` objects. If True, always decode times to
``cftime.datetime`` objects, regardless of whether or not they can be
represented using ``np.datetime64[ns]`` objects. If False, always
decode times to ``np.datetime64[ns]`` objects; if this is not possible
raise an error.
Returns
-------
dataset : Dataset
The newly created dataset.
See Also
--------
open_dataset
References
----------
http://zarr.readthedocs.io/
"""
if "auto_chunk" in kwargs:
auto_chunk = kwargs.pop("auto_chunk")
if auto_chunk:
chunks = "auto" # maintain backwards compatibility
else:
chunks = None
warnings.warn(
"auto_chunk is deprecated. Use chunks='auto' instead.",
FutureWarning,
stacklevel=2,
)
if kwargs:
raise TypeError(
"open_zarr() got unexpected keyword arguments " + ",".join(kwargs.keys())
)
if not isinstance(chunks, (int, dict)):
if chunks != "auto" and chunks is not None:
raise ValueError(
"chunks must be an int, dict, 'auto', or None. "
"Instead found %s. " % chunks
)
if chunks == "auto":
try:
import dask.array # noqa
except ImportError:
chunks = None
if not decode_cf:
mask_and_scale = False
decode_times = False
concat_characters = False
decode_coords = False
decode_timedelta = False
def maybe_decode_store(store, lock=False):
ds = conventions.decode_cf(
store,
mask_and_scale=mask_and_scale,
decode_times=decode_times,
concat_characters=concat_characters,
decode_coords=decode_coords,
drop_variables=drop_variables,
decode_timedelta=decode_timedelta,
use_cftime=use_cftime,
)
# TODO: this is where we would apply caching
return ds
# Zarr supports a wide range of access modes, but for now xarray either
# reads or writes from a store, never both. For open_zarr, we only read
mode = "r"
zarr_store = ZarrStore.open_group(
store,
mode=mode,
synchronizer=synchronizer,
group=group,
consolidated=consolidated,
chunk_store=chunk_store,
)
ds = maybe_decode_store(zarr_store)
# auto chunking needs to be here and not in ZarrStore because variable
# chunks do not survive decode_cf
# return trivial case
if not chunks:
return ds
# adapted from Dataset.Chunk()
if isinstance(chunks, int):
chunks = dict.fromkeys(ds.dims, chunks)
if isinstance(chunks, tuple) and len(chunks) == len(ds.dims):
chunks = dict(zip(ds.dims, chunks))
def get_chunk(name, var, chunks):
chunk_spec = dict(zip(var.dims, var.encoding.get("chunks")))
# Coordinate labels aren't chunked
if var.ndim == 1 and var.dims[0] == name:
return chunk_spec
if chunks == "auto":
return chunk_spec
for dim in var.dims:
if dim in chunks:
spec = chunks[dim]
if isinstance(spec, int):
spec = (spec,)
if isinstance(spec, (tuple, list)) and chunk_spec[dim]:
if any(s % chunk_spec[dim] for s in spec):
warnings.warn(
"Specified Dask chunks %r would "
"separate Zarr chunk shape %r for "
"dimension %r. This significantly "
"degrades performance. Consider "
"rechunking after loading instead."
% (chunks[dim], chunk_spec[dim], dim),
stacklevel=2,
)
chunk_spec[dim] = chunks[dim]
return chunk_spec
def maybe_chunk(name, var, chunks):
from dask.base import tokenize
chunk_spec = get_chunk(name, var, chunks)
if (var.ndim > 0) and (chunk_spec is not None):
# does this cause any data to be read?
token2 = tokenize(name, var._data)
name2 = "zarr-%s" % token2
var = var.chunk(chunk_spec, name=name2, lock=None)
if overwrite_encoded_chunks and var.chunks is not None:
var.encoding["chunks"] = tuple(x[0] for x in var.chunks)
return var
else:
return var
variables = {k: maybe_chunk(k, v, chunks) for k, v in ds.variables.items()}
return ds._replace_vars_and_dims(variables)
|
sync
|
parser.py
|
"""Top down operator precedence parser.
This is an implementation of Vaughan R. Pratt's
"Top Down Operator Precedence" parser.
(http://dl.acm.org/citation.cfm?doid=512927.512931).
These are some additional resources that help explain the
general idea behind a Pratt parser:
* http://effbot.org/zone/simple-top-down-parsing.htm
* http://javascript.crockford.com/tdop/tdop.html
A few notes on the implementation.
* All the nud/led tokens are on the Parser class itself, and are dispatched
using getattr(). This keeps all the parsing logic contained to a single
class.
* We use two passes through the data. One to create a list of token,
then one pass through the tokens to create the AST. While the lexer actually
yields tokens, we convert it to a list so we can easily implement two tokens
of lookahead. A previous implementation used a fixed circular buffer, but it
was significantly slower. Also, the average jmespath expression typically
does not have a large amount of token so this is not an issue. And
interestingly enough, creating a token list first is actually faster than
consuming from the token iterator one token at a time.
"""
import random
from jmespath import lexer
from jmespath.compat import with_repr_method
from jmespath import ast
from jmespath import exceptions
from jmespath import visitor
class Parser(object):
BINDING_POWER = {
'eof': 0,
'unquoted_identifier': 0,
'quoted_identifier': 0,
'rbracket': 0,
'rparen': 0,
'comma': 0,
'rbrace': 0,
'number': 0,
'current': 0,
'expref': 0,
'colon': 0,
'pipe': 1,
'eq': 2,
'gt': 2,
'lt': 2,
'gte': 2,
'lte': 2,
'ne': 2,
'or': 5,
'flatten': 6,
'star': 20,
'filter': 21,
'dot': 40,
'lbrace': 50,
'lbracket': 55,
'lparen': 60,
}
# The _MAX_SIZE most recent expressions are cached in
# _CACHE dict.
_CACHE = {}
_MAX_SIZE = 128
def __init__(self, lookahead=2):
self.tokenizer = None
self._tokens = [None] * lookahead
self._buffer_size = lookahead
self._index = 0
def parse(self, expression):
cached = self._CACHE.get(expression)
if cached is not None:
return cached
parsed_result = self._do_parse(expression)
self._CACHE[expression] = parsed_result
if len(self._CACHE) > self._MAX_SIZE:
self._free_cache_entries()
return parsed_result
|
try:
return self._parse(expression)
except exceptions.LexerError as e:
e.expression = expression
raise
except exceptions.IncompleteExpressionError as e:
e.set_expression(expression)
raise
except exceptions.ParseError as e:
e.expression = expression
raise
def _parse(self, expression):
self.tokenizer = lexer.Lexer().tokenize(expression)
self._tokens = list(self.tokenizer)
self._index = 0
parsed = self._expression(binding_power=0)
if not self._current_token() == 'eof':
t = self._lookahead_token(0)
raise exceptions.ParseError(t['start'], t['value'], t['type'],
"Unexpected token: %s" % t['value'])
return ParsedResult(expression, parsed)
def _expression(self, binding_power=0):
left_token = self._lookahead_token(0)
self._advance()
nud_function = getattr(
self, '_token_nud_%s' % left_token['type'],
self._error_nud_token)
left = nud_function(left_token)
current_token = self._current_token()
while binding_power < self.BINDING_POWER[current_token]:
led = getattr(self, '_token_led_%s' % current_token, None)
if led is None:
error_token = self._lookahead_token(0)
self._error_led_token(error_token)
else:
self._advance()
left = led(left)
current_token = self._current_token()
return left
def _token_nud_string_literal(self, token):
return ast.literal(token['value'])
def _token_nud_literal(self, token):
return ast.literal(token['value'])
def _token_nud_unquoted_identifier(self, token):
return ast.field(token['value'])
def _token_nud_quoted_identifier(self, token):
field = ast.field(token['value'])
# You can't have a quoted identifier as a function
# name.
if self._current_token() == 'lparen':
t = self._lookahead_token(0)
raise exceptions.ParseError(
0, t['value'], t['type'],
'Quoted identifier not allowed for function names.')
return field
def _token_nud_star(self, token):
left = ast.identity()
if self._current_token() == 'rbracket':
right = ast.identity()
else:
right = self._parse_projection_rhs(self.BINDING_POWER['star'])
return ast.value_projection(left, right)
def _token_nud_filter(self, token):
return self._token_led_filter(ast.identity())
def _token_nud_lbrace(self, token):
return self._parse_multi_select_hash()
def _token_nud_flatten(self, token):
left = ast.flatten(ast.identity())
right = self._parse_projection_rhs(
self.BINDING_POWER['flatten'])
return ast.projection(left, right)
def _token_nud_lbracket(self, token):
if self._current_token() in ['number', 'colon']:
right = self._parse_index_expression()
# We could optimize this and remove the identity() node.
# We don't really need an index_expression node, we can
# just use emit an index node here if we're not dealing
# with a slice.
return self._project_if_slice(ast.identity(), right)
elif self._current_token() == 'star' and \
self._lookahead(1) == 'rbracket':
self._advance()
self._advance()
right = self._parse_projection_rhs(self.BINDING_POWER['star'])
return ast.projection(ast.identity(), right)
else:
return self._parse_multi_select_list()
def _parse_index_expression(self):
# We're here:
# [<current>
# ^
# | current token
if (self._lookahead(0) == 'colon' or
self._lookahead(1) == 'colon'):
return self._parse_slice_expression()
else:
# Parse the syntax [number]
node = ast.index(self._lookahead_token(0)['value'])
self._advance()
self._match('rbracket')
return node
def _parse_slice_expression(self):
# [start:end:step]
# Where start, end, and step are optional.
# The last colon is optional as well.
parts = [None, None, None]
index = 0
current_token = self._current_token()
while not current_token == 'rbracket' and index < 3:
if current_token == 'colon':
index += 1
self._advance()
elif current_token == 'number':
parts[index] = self._lookahead_token(0)['value']
self._advance()
else:
t = self._lookahead_token(0)
lex_position = t['start']
actual_value = t['value']
actual_type = t['type']
raise exceptions.ParseError(lex_position, actual_value,
actual_type, 'syntax error')
current_token = self._current_token()
self._match('rbracket')
return ast.slice(*parts)
def _token_nud_current(self, token):
return ast.current_node()
def _token_nud_expref(self, token):
expression = self._expression(self.BINDING_POWER['expref'])
return ast.expref(expression)
def _token_led_dot(self, left):
if not self._current_token() == 'star':
right = self._parse_dot_rhs(self.BINDING_POWER['dot'])
if left['type'] == 'subexpression':
left['children'].append(right)
return left
else:
return ast.subexpression([left, right])
else:
# We're creating a projection.
self._advance()
right = self._parse_projection_rhs(
self.BINDING_POWER['dot'])
return ast.value_projection(left, right)
def _token_led_pipe(self, left):
right = self._expression(self.BINDING_POWER['pipe'])
return ast.pipe(left, right)
def _token_led_or(self, left):
right = self._expression(self.BINDING_POWER['or'])
return ast.or_expression(left, right)
def _token_led_lparen(self, left):
name = left['value']
args = []
while not self._current_token() == 'rparen':
if self._current_token() == 'current':
expression = ast.current_node()
self._advance()
else:
expression = self._expression()
if self._current_token() == 'comma':
self._match('comma')
args.append(expression)
self._match('rparen')
function_node = ast.function_expression(name, args)
return function_node
def _token_led_filter(self, left):
# Filters are projections.
condition = self._expression(0)
self._match('rbracket')
if self._current_token() == 'flatten':
right = ast.identity()
else:
right = self._parse_projection_rhs(self.BINDING_POWER['filter'])
return ast.filter_projection(left, right, condition)
def _token_led_eq(self, left):
return self._parse_comparator(left, 'eq')
def _token_led_ne(self, left):
return self._parse_comparator(left, 'ne')
def _token_led_gt(self, left):
return self._parse_comparator(left, 'gt')
def _token_led_gte(self, left):
return self._parse_comparator(left, 'gte')
def _token_led_lt(self, left):
return self._parse_comparator(left, 'lt')
def _token_led_lte(self, left):
return self._parse_comparator(left, 'lte')
def _token_led_flatten(self, left):
left = ast.flatten(left)
right = self._parse_projection_rhs(
self.BINDING_POWER['flatten'])
return ast.projection(left, right)
def _token_led_lbracket(self, left):
token = self._lookahead_token(0)
if token['type'] in ['number', 'colon']:
right = self._parse_index_expression()
if left['type'] == 'index_expression':
# Optimization: if the left node is an index expr,
# we can avoid creating another node and instead just add
# the right node as a child of the left.
left['children'].append(right)
return left
else:
return self._project_if_slice(left, right)
else:
# We have a projection
self._match('star')
self._match('rbracket')
right = self._parse_projection_rhs(self.BINDING_POWER['star'])
return ast.projection(left, right)
def _project_if_slice(self, left, right):
index_expr = ast.index_expression([left, right])
if right['type'] == 'slice':
return ast.projection(
index_expr,
self._parse_projection_rhs(self.BINDING_POWER['star']))
else:
return index_expr
def _parse_comparator(self, left, comparator):
right = self._expression(self.BINDING_POWER[comparator])
return ast.comparator(comparator, left, right)
def _parse_multi_select_list(self):
expressions = []
while not self._current_token() == 'rbracket':
expression = self._expression()
expressions.append(expression)
if self._current_token() == 'comma':
self._match('comma')
self._assert_not_token('rbracket')
self._match('rbracket')
return ast.multi_select_list(expressions)
def _parse_multi_select_hash(self):
pairs = []
while True:
key_token = self._lookahead_token(0)
# Before getting the token value, verify it's
# an identifier.
self._match_multiple_tokens(
token_types=['quoted_identifier', 'unquoted_identifier'])
key_name = key_token['value']
self._match('colon')
value = self._expression(0)
node = ast.key_val_pair(key_name=key_name, node=value)
pairs.append(node)
if self._current_token() == 'comma':
self._match('comma')
elif self._current_token() == 'rbrace':
self._match('rbrace')
break
return ast.multi_select_dict(nodes=pairs)
def _parse_projection_rhs(self, binding_power):
# Parse the right hand side of the projection.
if self.BINDING_POWER[self._current_token()] < 10:
# BP of 10 are all the tokens that stop a projection.
right = ast.identity()
elif self._current_token() == 'lbracket':
right = self._expression(binding_power)
elif self._current_token() == 'filter':
right = self._expression(binding_power)
elif self._current_token() == 'dot':
self._match('dot')
right = self._parse_dot_rhs(binding_power)
else:
t = self._lookahead_token(0)
lex_position = t['start']
actual_value = t['value']
actual_type = t['type']
raise exceptions.ParseError(lex_position, actual_value,
actual_type, 'syntax error')
return right
def _parse_dot_rhs(self, binding_power):
# From the grammar:
# expression '.' ( identifier /
# multi-select-list /
# multi-select-hash /
# function-expression /
# *
# In terms of tokens that means that after a '.',
# you can have:
lookahead = self._current_token()
# Common case "foo.bar", so first check for an identifier.
if lookahead in ['quoted_identifier', 'unquoted_identifier', 'star']:
return self._expression(binding_power)
elif lookahead == 'lbracket':
self._match('lbracket')
return self._parse_multi_select_list()
elif lookahead == 'lbrace':
self._match('lbrace')
return self._parse_multi_select_hash()
else:
t = self._lookahead_token(0)
allowed = ['quoted_identifier', 'unquoted_identifier',
'lbracket', 'lbrace']
lex_position = t['start']
actual_value = t['value']
actual_type = t['type']
raise exceptions.ParseError(
lex_position, actual_value, actual_type,
"Expecting: %s, got: %s" % (allowed,
actual_type))
def _assert_not_token(self, *token_types):
if self._current_token() in token_types:
t = self._lookahead_token(0)
lex_position = t['start']
actual_value = t['value']
actual_type = t['type']
raise exceptions.ParseError(
lex_position, actual_value, actual_type,
"Token %s not allowed to be: %s" % (actual_type, token_types))
def _error_nud_token(self, token):
if token['type'] == 'eof':
raise exceptions.IncompleteExpressionError(
token['start'], token['value'], token['type'])
raise exceptions.ParseError(token['start'], token['value'],
token['type'], 'Invalid token.')
def _error_led_token(self, token):
raise exceptions.ParseError(token['start'], token['value'],
token['type'], 'Invalid token')
def _match(self, token_type=None):
# inline'd self._current_token()
if self._current_token() == token_type:
# inline'd self._advance()
self._advance()
else:
t = self._lookahead_token(0)
lex_position = t['start']
actual_value = t['value']
actual_type = t['type']
if actual_type == 'eof':
raise exceptions.IncompleteExpressionError(
lex_position, actual_value, actual_type)
else:
message = 'Expecting: %s, got: %s' % (token_type,
actual_type)
raise exceptions.ParseError(
lex_position, actual_value, actual_type, message)
def _match_multiple_tokens(self, token_types):
if self._current_token() not in token_types:
t = self._lookahead_token(0)
lex_position = t['start']
actual_value = t['value']
actual_type = t['type']
if actual_type == 'eof':
raise exceptions.IncompleteExpressionError(
lex_position, actual_value, actual_type)
else:
message = 'Expecting: %s, got: %s' % (token_types,
actual_type)
raise exceptions.ParseError(
lex_position, actual_value, actual_type, message)
self._advance()
def _advance(self):
self._index += 1
def _current_token(self):
return self._tokens[self._index]['type']
def _lookahead(self, number):
return self._tokens[self._index + number]['type']
def _lookahead_token(self, number):
return self._tokens[self._index + number]
def _free_cache_entries(self):
for key in random.sample(list(self._CACHE.keys()), int(self._MAX_SIZE / 2)):
del self._CACHE[key]
@classmethod
def purge(cls):
"""Clear the expression compilation cache."""
cls._CACHE.clear()
@with_repr_method
class ParsedResult(object):
def __init__(self, expression, parsed):
self.expression = expression
self.parsed = parsed
def search(self, value):
interpreter = visitor.TreeInterpreter()
result = interpreter.visit(self.parsed, value)
return result
def _render_dot_file(self):
"""Render the parsed AST as a dot file.
Note that this is marked as an internal method because
the AST is an implementation detail and is subject
to change. This method can be used to help troubleshoot
or for development purposes, but is not considered part
of the public supported API. Use at your own risk.
"""
renderer = visitor.GraphvizVisitor()
contents = renderer.visit(self.parsed)
return contents
def __repr__(self):
return repr(self.parsed)
|
def _do_parse(self, expression):
|
project_manager.py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import git
import os
import re
import sys
import toml
from pathlib import Path
from alchemist_py.brokergen import createProject
from alchemist_py.deviceinfo import searchDevice
from alchemist_py.plugin_manager import PluginManager
class Manager(object):
def __init__(self):
|
def updateNode(self, node):
path_to_project = Path("nodes")/node["name"]
# make mini alchemist data for the node
mini_alchemist = {
"device": {
"board": self.board,
"fpga": self.fpga,
"clock": self.clock
},
"node": node,
"topics": []
}
for port in node["ports"]:
for topic in self.topics:
if port["attribute"] in ["wire"]:
break
elif port["attribute"] in ["publisher", "subscriber"] and port["topic"] == topic["name"]:
mini_alchemist["topics"].append(topic)
break
else:
print("Unknown topic:", port["topic"], file=sys.stderr)
print("node:", node["name"])
exit(1)
# write mini alchemist to TOML
os.makedirs(path_to_project)
toml.dump(mini_alchemist, open(path_to_project/".Alchemist.toml", "w"))
# update project
plugin = self.p_manager.loadPlugin(node["plugin"])
plugin.createProject(node["name"])
def updateNodes(self):
# update projects for nodes
for node in self.nodes:
path_to_project = Path("nodes")/node["name"]
# if no project for a node, make a directory and Alchemist.toml
if not os.path.exists(path_to_project):
if "repo" in node.keys():
git.Repo.clone_from(node["repo"], "nodes")
else:
self.updateNode(node)
# if Alchemist.toml was updated, update mini Alchemist.toml
t_alchemist = os.path.getatime("Alchemist.toml")
t_mini_alchemist = os.path.getatime(path_to_project/".Alchemist.toml")
if t_alchemist > t_mini_alchemist:
if "repo" in node.keys():
git.Repo.clone_from(node["repo"], "nodes")
else:
self.updateNode(node)
def updateTopic(self, topic:dict):
path_to_project = Path("brokers") / ("broker"+topic["name"])
if not os.path.exists(path_to_project):
byte = 0
for m in re.finditer(r"(?P<type>((unsigned\s+){0,1}(char|short|int|long)|(float|double)|(ap_(u){0,1}int\s*\<\s*[1-9]{1,4}\s*>)))\s+(?P<var>([a-zA-Z_][a-zA-Z0-9_]*(\s*\[\s*([0-9]|[1-9][0-9]*)\s*\]){0,1}))\s*;", topic["message"]):
byte += self.getByte(m.group("type"), m.group("var"))
mini_alchemist = {
"device": {
"board": self.board,
"fpga": self.fpga,
"clock": self.clock
},
"topic": topic,
}
mini_alchemist["topic"]["pub"] = len(list(filter(
lambda x: x["attribute"] == "publisher" and x["topic"] == topic["name"],
self.ports
)))
mini_alchemist["topic"]["sub"] = len(list(filter(
lambda x: x["attribute"] == "subscriber" and x["topic"] == topic["name"],
self.ports
)))
mini_alchemist["topic"]["width"] = 64
mini_alchemist["topic"]["count"] = int(byte / 8)
os.makedirs(path_to_project)
toml.dump(mini_alchemist, open(path_to_project / ".Alchemist.toml", "w"))
createProject(topic["name"])
def updateTopics(self):
for topic in self.topics:
self.updateTopic(topic)
def getByte(self, vType:str, var:str):
width_of_type = 0
if vType == "char":
width_of_type = 1
elif vType == "short":
width_of_type = 2
elif vType == "int":
width_of_type = 4
elif vType == "long":
width_of_type = 8
elif vType.split()[0] == "unsigned":
if vType.split()[1] == "char":
width_of_type = 1
elif vType.split()[1] == "short":
width_of_type = 2
elif vType.split()[1] == "int":
width_of_type = 4
elif vType.split()[1] == "long":
width_of_type = 8
else:
print("Unknown type!")
exit(1)
else:
print("Unknown type!")
exit(1)
length_of_var = 1
m = re.match(
r"[a-zA-Z_][a-zA-Z0-9_]*\s*\[\s*(?P<length>[1-9][0-9]*)\s*\]",
var
)
if m:
length_of_var = int(m.group("length"))
return width_of_type * length_of_var
|
config = toml.load(open("Alchemist.toml"))
self.board = config["board"]
self.nodes = config["nodes"]
self.topics = config["topics"]
self.fpga, self.clock = searchDevice(self.board)
self.topic_table = {}
for topic in self.topics:
self.topic_table[topic["name"]] =\
"struct {name} {{\n {message}}};".format(
name=topic["name"], message=topic["message"]
)
self.p_manager = PluginManager()
self.ports = []
for ps in list(map(lambda x:x["ports"], self.nodes)):
self.ports.extend(ps)
|
main.ts
|
import * as angularCore from '@angular/core';
import { enableProdMode } from '@angular/core';
import * as angularCommon from '@angular/common';
import { platformBrowserDynamic } from '@angular/platform-browser-dynamic';
import { AppModule } from './app/app.module';
import { AppConfig } from './environments/environment';
SystemJS.set('@angular/core', SystemJS.newModule(angularCore));
SystemJS.set('@angular/common', SystemJS.newModule(angularCommon));
if (AppConfig.production) {
enableProdMode();
|
.bootstrapModule(AppModule, {
preserveWhitespaces: false
})
.catch(err => console.error(err));
|
}
platformBrowserDynamic()
|
tests_helpers.py
|
import numpy as np
from algorithms import helpers
def
|
(Ntests):
passed = 0
critical = 0
for _ in range(Ntests):
try:
n = np.random.randint(2, 11)
X = np.random.uniform(low=0.0,
high=100.0,
size=(n, n))
Q, R = helpers.qr_factorize(X)
assert all(np.isclose(Q.dot(R), X).flatten())
passed += 1
except AssertionError:
print("AssertionError with:")
print(X)
continue
except Exception:
print("Other Error with:")
print(X)
critical += 1
print("Test Results:")
print("Passed {} of {} Tests.".format(passed, Ntests))
print("Failed {} tests.".format(Ntests-passed-critical))
print("{} tests failed critically".format(critical))
if passed == Ntests:
return True
else:
return False
assert test_QR(1000)
|
test_QR
|
socket.io.go
|
// Copyright (c) 2018-2021, The Decred developers
// See LICENSE for details.
package insight
import (
"encoding/json"
"fmt"
"net/http"
"sync"
"time"
"github.com/decred/dcrd/chaincfg/v3"
"github.com/decred/dcrd/dcrutil/v4"
chainjson "github.com/decred/dcrd/rpc/jsonrpc/types/v3"
"github.com/decred/dcrd/txscript/v4/stdaddr"
"github.com/decred/dcrd/txscript/v4/stdscript"
"github.com/decred/dcrd/wire"
socketio "github.com/googollee/go-socket.io"
"github.com/googollee/go-socket.io/engineio"
"github.com/googollee/go-socket.io/engineio/transport"
"github.com/googollee/go-socket.io/engineio/transport/websocket"
"github.com/decred/dcrdata/v7/blockdata"
"github.com/decred/dcrdata/v7/txhelpers"
)
const maxAddressSubsPerConn uint32 = 32768
type roomSubscriptionCounter struct {
sync.RWMutex
c map[string]int
}
// SocketServer wraps the socket.io server with the watched address list.
type SocketServer struct {
*socketio.Server
params *chaincfg.Params
watchedAddresses *roomSubscriptionCounter
txGetter txhelpers.RawTransactionGetter
}
// InsightSocketVin represents a single vin for the Insight "vin" JSON object
// that appears in a "tx" message from the "inv" room.
type InsightSocketVin struct {
TxID string `json:"txid,omitempty"`
Vout *uint32 `json:"vout,omitempty"`
Addresses []string `json:"addresses,omitempty"`
Value *int64 `json:"value,omitempty"`
}
func newInt64Ptr(i int64) *int64 {
ii := i
return &ii
}
func newUint32Ptr(i uint32) *uint32 {
ii := i
return &ii
}
// InsightSocketVout represents a single vout for the Insight "vout" JSON object
// that appears in a "tx" message from the "inv" room.
type InsightSocketVout struct {
Address string
Value int64
}
// MarshalJSON implements json.Marshaler so that an InsightSocketVout will
// marshal to JSON like:
// {
// "DsZQaCQES5vh3JmcyyFokJYz3aSw8Sm1dsQ": 13741789
// }
func (v *InsightSocketVout) MarshalJSON() ([]byte, error) {
vout := map[string]int64{
v.Address: v.Value,
}
return json.Marshal(vout)
}
// WebSocketTx models the JSON data sent as the tx event in the inv room.
type WebSocketTx struct {
Hash string `json:"txid"`
Size int `json:"size"`
TotalOut int64 `json:"valueOut"`
Vins []InsightSocketVin `json:"vins,omitempty"`
Vouts []InsightSocketVout `json:"vout,omitempty"`
}
// NewSocketServer constructs a new SocketServer, registering handlers for the
// "connection", "disconnection", and "subscribe" events.
func NewSocketServer(params *chaincfg.Params, txGetter txhelpers.RawTransactionGetter) (*SocketServer, error) {
wsTrans := &websocket.Transport{
// Without this affirmative CheckOrigin, gorilla's "sensible default" is
// to ensure same origin.
CheckOrigin: func(req *http.Request) bool {
return true
},
}
opts := &engineio.Options{
PingInterval: 3 * time.Second,
PingTimeout: 5 * time.Second,
Transports: []transport.Transport{wsTrans},
}
socketIOServer, err := socketio.NewServer(opts)
if err != nil {
apiLog.Errorf("Could not create socket.io server: %v", err)
return nil, err
}
// Each address subscription uses its own room, which has the same name as
// the address. The number of subscribers for each room is tracked.
addrs := &roomSubscriptionCounter{
c: make(map[string]int),
}
server := &SocketServer{
Server: socketIOServer,
params: params,
watchedAddresses: addrs,
txGetter: txGetter,
}
// OnConnect sets the address room subscription counter to 0. There are no
// default subscriptions. The client must subscribe to "inv" if they want
// notification of all new transactions. Note that OnConnect previously
// subscribed all clients to "inv", but this was incorrect. Clients that
// need it should explicitly subscribe, and this seems to be how clients
// behave already.
server.OnConnect("", func(so socketio.Conn) error {
// Initialize the Conn's context, the connection's general purpose data,
// to hold the address room subscription count.
so.SetContext(uint32(0))
apiLog.Debugf("New socket.io connection (%s). %d clients are connected.",
so.ID(), server.RoomLen("", "inv"))
return nil
})
// Subscription to a room checks the room name is a valid subscription
// (currently just "inv" or a valid Decred address), joins the room, and
// increments the room's subscriber count.
server.OnEvent("", "subscribe", func(so socketio.Conn, room string) string {
switch room {
case "inv": // list other valid non-address rooms here
so.Join(room)
return "ok"
case "sync":
msg := `"sync" not implemented`
so.Emit("error", msg)
return "error: " + msg
}
// See if the room is a Decred address.
if _, err = stdaddr.DecodeAddress(room, params); err != nil {
apiLog.Debugf("socket.io connection %s requested invalid subscription: %s",
so.ID(), room)
msg := fmt.Sprintf(`invalid subscription "%s"`, room)
so.Emit("error", msg)
return "error: " + msg
}
// The room is a valid address, but enforce the maximum address room
// subscription limit.
numAddrSubs, _ := so.Context().(uint32)
if numAddrSubs >= maxAddressSubsPerConn {
apiLog.Warnf("Client %s failed to subscribe, at the limit.", so.ID())
msg := `"too many address subscriptions"`
so.Emit("error", msg)
return "error: " + msg
}
numAddrSubs++
so.SetContext(numAddrSubs)
so.Join(room)
apiLog.Debugf("socket.io client %s joined address room %s (%d subscriptions)",
so.ID(), room, numAddrSubs)
addrs.Lock()
addrs.c[room]++
addrs.Unlock()
return "ok"
})
// Disconnection decrements or deletes the subscriber counter for each
// address room to which the client was subscribed.
server.OnDisconnect("", func(so socketio.Conn, msg string) {
apiLog.Debugf("socket.io client disconnected (%s). %d clients are connected. msg: %s",
so.ID(), server.RoomLen("", "inv"), msg)
addrs.Lock()
for _, str := range so.Rooms() {
if c, ok := addrs.c[str]; ok {
if c == 1 {
delete(addrs.c, str)
} else {
addrs.c[str]--
}
}
}
addrs.Unlock()
})
server.OnError("", func(_ socketio.Conn, err error) {
apiLog.Errorf("Insight socket.io server error: %v", err)
})
apiLog.Infof("Started Insight socket.io server.")
go server.Serve()
return server, nil
}
// Store broadcasts the lastest block hash to the the inv room. The coinbase
// transaction is also relayed to the new Tx channel where it is included in tx
// and address broadcasts.
func (soc *SocketServer) Store(blockData *blockdata.BlockData, msgBlock *wire.MsgBlock) error {
apiLog.Debugf("Sending new websocket block %s", blockData.Header.Hash)
soc.BroadcastToRoom("", "inv", "block", blockData.Header.Hash)
// Since the coinbase transaction is generated by the miner, it will never
// hit mempool. It must be processed now, with the new block.
return soc.sendNewMsgTx(msgBlock.Transactions[0])
}
// SendNewTx prepares a dcrd mempool tx for broadcast. This method satisfies
// notification.TxHandler and is registered as a handler in main.go.
func (soc *SocketServer) SendNewTx(rawTx *chainjson.TxRawResult) error {
msgTx, err := txhelpers.MsgTxFromHex(rawTx.Hex)
if err != nil {
return err
}
return soc.sendNewTx(msgTx, rawTx.Vout)
}
// sendNewMsgTx processes and broadcasts a msgTx to subscribers.
func (soc *SocketServer) sendNewMsgTx(msgTx *wire.MsgTx) error {
return soc.sendNewTx(msgTx, nil)
}
// sendNewTx processes and broadcasts a msgTx to subscribers, using an existing
// []Vout, if it is available. If vouts is zero-length, the output addresses are
// decoded from their pkScripts.
func (soc *SocketServer) sendNewTx(msgTx *wire.MsgTx, vouts []chainjson.Vout) error {
// Gather vins and their prevouts.
|
txid := v.PreviousOutPoint.Hash.String()
idx := v.PreviousOutPoint.Index
tree := v.PreviousOutPoint.Tree
var addrs []string
var amt dcrutil.Amount
if txhelpers.IsZeroHashStr(txid) {
// Coinbase and stake base inputs need to be "{}".
vins = append(vins, InsightSocketVin{})
continue
} else {
var err error
// Assume dcrd validated the tx and treasury could be true, and this
// could be a treasury txn if this is the stake tree.
addrs, amt, err = txhelpers.OutPointAddressesFromString(
txid, idx, tree, soc.txGetter, soc.params)
if err != nil {
apiLog.Warnf("failed to get outpoint address from txid: %v", err)
// Still must append this vin to maintain valid implicit
// indexing of vins array.
}
}
vins = append(vins, InsightSocketVin{
TxID: txid,
Vout: newUint32Ptr(idx),
Addresses: addrs,
Value: newInt64Ptr(int64(amt)),
})
}
// Gather vouts.
var voutAddrs [][]string
for i, v := range msgTx.TxOut {
// Allow Vouts to be nil or empty, extracting the addresses from the
// pkScripts here.
if len(vouts) == 0 {
_, scriptAddrs := stdscript.ExtractAddrs(v.Version, v.PkScript, soc.params)
var addrs []string
for i := range scriptAddrs {
addrs = append(addrs, scriptAddrs[i].String())
}
voutAddrs = append(voutAddrs, addrs)
} else {
voutAddrs = append(voutAddrs, vouts[i].ScriptPubKey.Addresses)
}
}
// All addresses that have client subscriptions, and are paid to by vouts
// and the vins' prevouts.
addrTxs := make(map[string]struct{})
// Create the InsightSocketVout slice for the WebSocketTx struct sent to all
// "inv" subscribers. Also record all vout addresses with corresponding
// address room subscriptions.
var voutsInsight []InsightSocketVout
var total int64
for i, v := range msgTx.TxOut {
total += v.Value
if len(voutAddrs[i]) == 0 {
continue
}
soc.watchedAddresses.RLock()
for _, address := range voutAddrs[i] {
if _, ok := soc.watchedAddresses.c[address]; ok {
addrTxs[address] = struct{}{}
}
voutsInsight = append(voutsInsight, InsightSocketVout{
Address: address,
Value: v.Value,
})
}
soc.watchedAddresses.RUnlock()
}
// Record all prevout addresses with corresponding address room
// subscriptions.
for i := range vins {
soc.watchedAddresses.RLock()
for _, address := range vins[i].Addresses {
if _, ok := soc.watchedAddresses.c[address]; ok {
addrTxs[address] = struct{}{}
}
}
soc.watchedAddresses.RUnlock()
}
// Broadcast this tx hash to each relevant address room.
hash := msgTx.TxHash().String()
for address := range addrTxs {
soc.BroadcastToRoom("", address, address, hash)
}
// Broadcast the WebSocketTx data to add "inv" room subscribers.
tx := WebSocketTx{
Hash: hash,
Size: msgTx.SerializeSize(),
TotalOut: total,
Vins: vins,
Vouts: voutsInsight,
}
apiLog.Tracef("Sending new websocket tx %s", hash)
soc.BroadcastToRoom("", "inv", "tx", tx)
return nil
}
|
vins := make([]InsightSocketVin, 0, len(msgTx.TxIn))
for _, v := range msgTx.TxIn {
|
graph_processing.py
|
# -*- coding: utf-8 -*-
"""Methods to generalize any graph for its path analysis."""
import logging
from typing import Dict, Tuple, Any, List
from networkx import DiGraph, isolates
__all__ = [
'generate_reduced_graph',
]
logger = logging.getLogger(__name__)
def
|
(graph: DiGraph):
"""Remove isolated nodes from the graph.
:param graph: graph to be filtered
"""
nodes = list(isolates(graph))
graph.remove_nodes_from(nodes)
def _dict_to_graph(data: Dict[str, Any]) -> Tuple[DiGraph, Dict[str, int]]:
"""Convert dictionary representation of the graph to a directed graph.
:param data: graph as a dictionary
:return: directed graph
"""
graph = DiGraph()
node2id = {}
for node, properties in data['node_list'].items():
node2id[node] = properties['id']
graph.add_node(
int(properties['id']),
name=node,
isTarget=bool(properties['isTarget'])
)
for node, adj in data['adj_list'].items():
source = int(node)
increases = adj.get('increases', [])
decreases = adj.get('decreases', [])
for n in increases:
graph.add_edge(source, n, polarity=1)
for n in decreases:
graph.add_edge(source, n, polarity=-1)
return graph, node2id
def generate_reduced_graph(graph: DiGraph, target_nodes: List[Any]) -> Tuple[DiGraph, Dict[str, int]]:
"""Generate a reduced version of a graph.
:param graph: directed graph
:param target_nodes: target nodes
:return:
"""
remove_isolated_nodes(graph)
node_list = {
f'{node}': {
'id': i,
'isTarget': True if node in target_nodes else False,
}
for i, node in enumerate(graph.nodes())
}
adj_list = {}
# Counters
num_edges = 0
count_increases = 0
count_decreases = 0
for i, node in enumerate(graph.nodes()):
increases = []
decreases = []
for neighbor in graph.neighbors(node):
relation_sign = graph[node][neighbor].get('relation')
if not relation_sign:
raise ValueError('Ensure that your graph has been loaded within the "polarity" attribute')
# Add positive relation
if relation_sign == 1:
increases.append(node_list[f'{neighbor}']['id'])
count_increases += 1
# Add negative relation
elif relation_sign == -1:
decreases.append(node_list[f'{neighbor}']['id'])
count_decreases += 1
# Raise error if it doesnt recognize the relation type
else:
ValueError(f"Unknown relation: {relation_sign}")
if increases or decreases:
adj_list[i] = {}
if increases:
adj_list[i]['increases'] = increases
if decreases:
adj_list[i]['decreases'] = decreases
num_edges += len(increases) + len(decreases)
num_nodes = len(node_list)
graph_data = {
'num_nodes': num_nodes,
'num_edges': num_edges,
'node_list': node_list,
'adj_list': adj_list
}
logger.debug(
f"Number of nodes:{num_nodes}\n"
f"Number of edges: {num_edges}\n"
f"Number of activations: {count_increases}\n"
f"Number of inhibitions: {count_decreases}\n"
)
return _dict_to_graph(graph_data)
|
remove_isolated_nodes
|
worksheet.py
|
from __future__ import absolute_import
# Copyright (c) 2010-2016 openpyxl
"""Write worksheets to xml representations."""
# Python stdlib imports
from io import BytesIO
from openpyxl import LXML
# package imports
from openpyxl.xml.functions import (
Element,
xmlfile,
)
from openpyxl.xml.constants import SHEET_MAIN_NS
from openpyxl.formatting import ConditionalFormatting
from openpyxl.styles.differential import DifferentialStyle
from openpyxl.packaging.relationship import Relationship
from openpyxl.worksheet.merge import MergeCells, MergeCell
from openpyxl.worksheet.properties import WorksheetProperties
from openpyxl.worksheet.hyperlink import Hyperlink
from openpyxl.worksheet.related import Related
from openpyxl.worksheet.header_footer import HeaderFooter
from openpyxl.worksheet.dimensions import (
SheetFormatProperties,
SheetDimension,
)
from .etree_worksheet import write_cell
def write_mergecells(worksheet):
"""Write merged cells to xml."""
merged = [MergeCell(ref) for ref in worksheet._merged_cells]
if not merged:
return
return MergeCells(mergeCell=merged).to_tree()
def write_conditional_formatting(worksheet):
"""Write conditional formatting to xml."""
wb = worksheet.parent
for range_string, rules in worksheet.conditional_formatting.cf_rules.items():
cf = Element('conditionalFormatting', {'sqref': range_string})
for rule in rules:
if rule.dxf is not None:
if rule.dxf != DifferentialStyle():
rule.dxfId = len(wb._differential_styles)
wb._differential_styles.append(rule.dxf)
cf.append(rule.to_tree())
yield cf
def write_hyperlinks(worksheet):
"""Write worksheet hyperlinks to xml."""
if not worksheet._hyperlinks:
return
tag = Element('hyperlinks')
for link in worksheet._hyperlinks:
if link.target:
rel = Relationship(type="hyperlink", TargetMode="External", Target=link.target)
worksheet._rels.append(rel)
link.id = "rId{0}".format(len(worksheet._rels))
tag.append(link.to_tree())
return tag
def write_drawing(worksheet):
"""
Add link to drawing if required
"""
if worksheet._charts or worksheet._images:
rel = Relationship(type="drawing", Target="")
worksheet._rels.append(rel)
drawing = Related()
drawing.id = "rId%s" % len(worksheet._rels)
return drawing.to_tree("drawing")
def
|
(worksheet, shared_strings):
"""Write a worksheet to an xml file."""
ws = worksheet
ws._rels = []
ws._hyperlinks = []
if LXML is True:
from .lxml_worksheet import write_cell, write_rows
else:
from .etree_worksheet import write_cell, write_rows
out = BytesIO()
with xmlfile(out) as xf:
with xf.element('worksheet', xmlns=SHEET_MAIN_NS):
props = ws.sheet_properties.to_tree()
xf.write(props)
dim = SheetDimension(ref=ws.calculate_dimension())
xf.write(dim.to_tree())
xf.write(ws.views.to_tree())
cols = ws.column_dimensions.to_tree()
ws.sheet_format.outlineLevelCol = ws.column_dimensions.max_outline
xf.write(ws.sheet_format.to_tree())
if cols is not None:
xf.write(cols)
# write data
write_rows(xf, ws)
if ws.protection.sheet:
xf.write(ws.protection.to_tree())
if ws.auto_filter:
xf.write(ws.auto_filter.to_tree())
if ws.sort_state:
xf.write(ws.sort_state.to_tree())
merge = write_mergecells(ws)
if merge is not None:
xf.write(merge)
cfs = write_conditional_formatting(ws)
for cf in cfs:
xf.write(cf)
if ws.data_validations:
xf.write(ws.data_validations.to_tree())
hyper = write_hyperlinks(ws)
if hyper is not None:
xf.write(hyper)
options = ws.print_options
if dict(options):
new_element = options.to_tree()
xf.write(new_element)
margins = ws.page_margins.to_tree()
xf.write(margins)
setup = ws.page_setup
if dict(setup):
new_element = setup.to_tree()
xf.write(new_element)
if bool(ws.HeaderFooter):
xf.write(ws.HeaderFooter.to_tree())
drawing = write_drawing(ws)
if drawing is not None:
xf.write(drawing)
# if there is an existing vml file associated with this sheet or if there
# are any comments we need to add a legacyDrawing relation to the vml file.
if (ws.legacy_drawing is not None or ws._comments):
legacyDrawing = Related(id="anysvml")
xml = legacyDrawing.to_tree("legacyDrawing")
xf.write(xml)
if ws.page_breaks:
xf.write(ws.page_breaks.to_tree())
xml = out.getvalue()
out.close()
return xml
|
write_worksheet
|
gen_TextTrackMode.rs
|
#![allow(unused_imports)]
use wasm_bindgen::prelude::*;
#[wasm_bindgen]
#[doc = "The `TextTrackMode` enum."]
#[doc = ""]
#[doc = "*This API requires the following crate features to be activated: `TextTrackMode`*"]
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum
|
{
Disabled = "disabled",
Hidden = "hidden",
Showing = "showing",
}
|
TextTrackMode
|
sphingomonashankookensis.py
|
"""
This file offers the methods to automatically retrieve the graph Sphingomonas hankookensis.
The graph is automatically retrieved from the STRING repository.
References
---------------------
Please cite the following if you use the data:
|
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen import Graph # pylint: disable=import-error
def SphingomonasHankookensis(
directed: bool = False,
preprocess: bool = True,
load_nodes: bool = True,
verbose: int = 2,
cache: bool = True,
cache_path: str = "graphs/string",
version: str = "links.v11.5",
**additional_graph_kwargs: Dict
) -> Graph:
"""Return new instance of the Sphingomonas hankookensis graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False
Wether to load the graph as directed or undirected.
By default false.
preprocess: bool = True
Whether to preprocess the graph to be loaded in
optimal time and memory.
load_nodes: bool = True,
Whether to load the nodes vocabulary or treat the nodes
simply as a numeric range.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache: bool = True
Whether to use cache, i.e. download files only once
and preprocess them only once.
cache_path: str = "graphs"
Where to store the downloaded graphs.
version: str = "links.v11.5"
The version of the graph to retrieve.
The available versions are:
- homology.v11.5
- physical.links.v11.5
- links.v11.5
additional_graph_kwargs: Dict
Additional graph kwargs.
Returns
-----------------------
Instace of Sphingomonas hankookensis graph.
References
---------------------
Please cite the following if you use the data:
```bib
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
```
"""
return AutomaticallyRetrievedGraph(
graph_name="SphingomonasHankookensis",
repository="string",
version=version,
directed=directed,
preprocess=preprocess,
load_nodes=load_nodes,
verbose=verbose,
cache=cache,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| |
toy.py
|
#!/usr/bin/env python3
"""
搭积木
小球相同,格子不同
"""
def combination(n, m):
r
|
r i in range(1, m + 1):
t = (r * (n - i + 1) // i) % (10 ** 9 + 7)
t *= r
return r
if __name__ == '__main__':
n, m = map(int, input().split(' '))
print(combination(m * m + n - 1, n) % (10 ** 9 + 7))
|
= 1
fo
|
cyber_lord.go
|
package dm03
import (
"duel-masters/game/civ"
"duel-masters/game/family"
"duel-masters/game/fx"
"duel-masters/game/match"
"fmt"
)
// Emeral ...
func Emeral(c *match.Card) {
c.Name = "Emeral"
c.Power = 1000
c.Civ = civ.Water
c.Family = family.CyberLord
c.ManaCost = 2
c.ManaRequirement = []string{civ.Water}
c.Use(fx.Creature, func(card *match.Card, ctx *match.Context) {
if event, ok := ctx.Event.(*match.CardMoved); ok {
if event.CardID == card.ID && event.To == match.BATTLEZONE {
nrShields, err := card.Player.Container(match.SHIELDZONE)
if err != nil {
return
}
if len(nrShields) < 1 {
return
}
toShield := match.Search(card.Player, ctx.Match, card.Player, match.HAND, "Emeral: You may select 1 card from your hand and put it into the shield zone", 0, 1, true)
if len(toShield) < 1 {
return
}
toHand := fx.SelectBackside(
card.Player,
ctx.Match,
card.Player,
match.SHIELDZONE,
"Emeral: Select 1 of your shields that will be moved to your hand",
1,
1,
false,
)
for _, card := range toShield {
card.Player.MoveCard(card.ID, match.HAND, match.SHIELDZONE)
}
for _, card := range toHand {
card.Player.MoveCard(card.ID, match.SHIELDZONE, match.HAND)
}
|
})
}
// Shtra ...
func Shtra(c *match.Card) {
c.Name = "Shtra"
c.Power = 2000
c.Civ = civ.Water
c.Family = family.CyberLord
c.ManaCost = 4
c.ManaRequirement = []string{civ.Water}
c.Use(fx.Creature, func(card *match.Card, ctx *match.Context) {
if event, ok := ctx.Event.(*match.CardMoved); ok {
if event.CardID == card.ID && event.To == match.BATTLEZONE {
cards := match.Search(card.Player, ctx.Match, card.Player, match.MANAZONE, "Shtra: Select 1 card from your manazone that will be sent to your hand", 1, 1, false)
for _, crd := range cards {
card.Player.MoveCard(crd.ID, match.MANAZONE, match.HAND)
ctx.Match.Chat("Server", fmt.Sprintf("%s was moved to %s's hand from their mana zone", crd.Name, ctx.Match.PlayerRef(card.Player).Socket.User.Username))
}
ctx.Match.Wait(card.Player, "Waiting for your opponent to make an action")
defer ctx.Match.EndWait(card.Player)
opponentCards := match.Search(ctx.Match.Opponent(card.Player), ctx.Match, ctx.Match.Opponent(card.Player), match.MANAZONE, "Shtra: Select 1 card from your manazone that will be sent to your hand", 1, 1, false)
for _, crd := range opponentCards {
ctx.Match.Opponent(card.Player).MoveCard(crd.ID, match.MANAZONE, match.HAND)
ctx.Match.Chat("Server", fmt.Sprintf("%s was moved to %s's hand from their mana zone", crd.Name, ctx.Match.PlayerRef(ctx.Match.Opponent(card.Player)).Socket.User.Username))
}
}
}
})
}
|
}
}
|
context.rs
|
// surfman/surfman/src/platform/generic/egl/context.rs
//
//! Functionality common to backends using EGL contexts.
use crate::context::{self, CREATE_CONTEXT_MUTEX};
use crate::egl::types::{EGLConfig, EGLContext, EGLDisplay, EGLSurface, EGLint};
use crate::egl;
use crate::gl::types::GLuint;
use crate::gl;
use crate::surface::Framebuffer;
use crate::{ContextAttributeFlags, ContextAttributes, ContextID, Error, GLApi, GLVersion};
use crate::{Gl, SurfaceInfo};
use super::device::EGL_FUNCTIONS;
use super::error::ToWindowingApiError;
use super::ffi::{EGL_CONTEXT_MINOR_VERSION_KHR, EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT};
use super::ffi::{EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT, EGL_CONTEXT_OPENGL_PROFILE_MASK};
use super::surface::{EGLBackedSurface, ExternalEGLSurfaces};
use std::ffi::{CStr, CString};
use std::mem;
use std::os::raw::{c_char, c_void};
use std::ptr;
use std::thread;
#[allow(dead_code)]
const DUMMY_PBUFFER_SIZE: EGLint = 16;
const RGB_CHANNEL_BIT_DEPTH: EGLint = 8;
pub(crate) struct EGLBackedContext {
pub(crate) egl_context: EGLContext,
pub(crate) id: ContextID,
framebuffer: Framebuffer<EGLBackedSurface, ExternalEGLSurfaces>,
context_is_owned: bool,
}
/// Wrapper for a native `EGLContext`.
#[derive(Clone, Copy)]
pub struct NativeContext {
/// The EGL context.
pub egl_context: EGLContext,
/// The EGL read surface that is to be attached to that context.
pub egl_read_surface: EGLSurface,
/// The EGL draw surface that is to be attached to that context.
pub egl_draw_surface: EGLSurface,
}
/// Information needed to create a context. Some APIs call this a "config" or a "pixel format".
///
/// These are local to a device.
#[derive(Clone)]
pub struct ContextDescriptor {
pub(crate) egl_config_id: EGLint,
pub(crate) gl_version: GLVersion,
pub(crate) compatibility_profile: bool,
}
#[must_use]
pub(crate) struct CurrentContextGuard {
egl_display: EGLDisplay,
old_egl_draw_surface: EGLSurface,
old_egl_read_surface: EGLSurface,
old_egl_context: EGLContext,
}
impl Drop for EGLBackedContext {
#[inline]
fn drop(&mut self) {
if self.egl_context != egl::NO_CONTEXT && !thread::panicking() {
panic!("Contexts must be destroyed explicitly with `destroy_context`!")
}
}
}
impl Drop for CurrentContextGuard {
fn drop(&mut self) {
EGL_FUNCTIONS.with(|egl| {
unsafe {
if self.egl_display != egl::NO_DISPLAY {
egl.MakeCurrent(self.egl_display,
self.old_egl_draw_surface,
self.old_egl_read_surface,
self.old_egl_context);
}
}
})
}
}
impl EGLBackedContext {
pub(crate) unsafe fn new(egl_display: EGLDisplay,
descriptor: &ContextDescriptor,
gl_api: GLApi)
-> Result<EGLBackedContext, Error> {
let mut next_context_id = CREATE_CONTEXT_MUTEX.lock().unwrap();
// Create the context.
let egl_context = create_context(egl_display, descriptor, gl_api)?;
// Wrap and return it.
let context = EGLBackedContext {
egl_context,
id: *next_context_id,
framebuffer: Framebuffer::None,
context_is_owned: true,
};
next_context_id.0 += 1;
Ok(context)
}
pub(crate) unsafe fn from_native_context(native_context: NativeContext) -> EGLBackedContext {
let mut next_context_id = CREATE_CONTEXT_MUTEX.lock().unwrap();
let context = EGLBackedContext {
egl_context: native_context.egl_context,
id: *next_context_id,
framebuffer: Framebuffer::External(ExternalEGLSurfaces {
draw: native_context.egl_draw_surface,
read: native_context.egl_read_surface,
}),
context_is_owned: false,
};
next_context_id.0 += 1;
context
}
pub(crate) unsafe fn destroy(&mut self, egl_display: EGLDisplay) {
EGL_FUNCTIONS.with(|egl| {
egl.MakeCurrent(egl_display, egl::NO_SURFACE, egl::NO_SURFACE, egl::NO_CONTEXT);
if self.context_is_owned {
let result = egl.DestroyContext(egl_display, self.egl_context);
assert_ne!(result, egl::FALSE);
}
self.egl_context = egl::NO_CONTEXT;
});
}
pub(crate) fn native_context(&self) -> NativeContext {
let egl_surfaces = match self.framebuffer {
Framebuffer::Surface(ref surface) => surface.egl_surfaces(),
Framebuffer::External(ref surfaces) => (*surfaces).clone(),
Framebuffer::None => ExternalEGLSurfaces::default(),
};
NativeContext {
egl_context: self.egl_context,
egl_draw_surface: egl_surfaces.draw,
egl_read_surface: egl_surfaces.read,
}
}
pub(crate) unsafe fn make_current(&self, egl_display: EGLDisplay) -> Result<(), Error> {
let egl_surfaces = match self.framebuffer {
Framebuffer::Surface(ref surface) => surface.egl_surfaces(),
Framebuffer::External(ref surfaces) => (*surfaces).clone(),
Framebuffer::None => ExternalEGLSurfaces::default(),
};
EGL_FUNCTIONS.with(|egl| {
let result = egl.MakeCurrent(egl_display,
egl_surfaces.draw,
egl_surfaces.read,
self.egl_context);
if result == egl::FALSE {
let err = egl.GetError().to_windowing_api_error();
return Err(Error::MakeCurrentFailed(err));
}
Ok(())
})
}
#[inline]
pub(crate) fn is_current(&self) -> bool {
unsafe {
EGL_FUNCTIONS.with(|egl| egl.GetCurrentContext() == self.egl_context)
}
}
pub(crate) unsafe fn bind_surface(&mut self,
egl_display: EGLDisplay,
surface: EGLBackedSurface)
-> Result<(), (Error, EGLBackedSurface)> {
if self.id != surface.context_id {
return Err((Error::IncompatibleSurface, surface));
}
match self.framebuffer {
Framebuffer::None => self.framebuffer = Framebuffer::Surface(surface),
Framebuffer::External(_) => return Err((Error::ExternalRenderTarget, surface)),
Framebuffer::Surface(_) => return Err((Error::SurfaceAlreadyBound, surface)),
}
// If we're current, call `make_context_current()` again to switch to the new framebuffer.
if self.is_current() {
drop(self.make_current(egl_display))
}
Ok(())
}
pub(crate) unsafe fn unbind_surface(&mut self, gl: &Gl, egl_display: EGLDisplay)
-> Result<Option<EGLBackedSurface>, Error> {
match self.framebuffer {
Framebuffer::None => return Ok(None),
Framebuffer::Surface(_) => {}
Framebuffer::External(_) => return Err(Error::ExternalRenderTarget),
}
let surface = match mem::replace(&mut self.framebuffer, Framebuffer::None) {
Framebuffer::Surface(surface) => surface,
Framebuffer::None | Framebuffer::External(_) => unreachable!(),
};
// If we're current, we stay current, but with no surface attached.
surface.unbind(gl, egl_display, self.egl_context);
Ok(Some(surface))
}
pub(crate) fn surface_info(&self) -> Result<Option<SurfaceInfo>, Error> {
match self.framebuffer {
Framebuffer::None => Ok(None),
Framebuffer::External(_) => Err(Error::ExternalRenderTarget),
Framebuffer::Surface(ref surface) => Ok(Some(surface.info())),
}
}
}
impl NativeContext {
/// Returns the current EGL context and surfaces, if applicable.
///
/// If there is no current EGL context, this returns a `NoCurrentContext` error.
pub fn current() -> Result<NativeContext, Error> {
EGL_FUNCTIONS.with(|egl| {
unsafe {
let egl_context = egl.GetCurrentContext();
if egl_context == egl::NO_CONTEXT {
Err(Error::NoCurrentContext)
} else {
Ok(NativeContext {
egl_context,
egl_read_surface: egl.GetCurrentSurface(egl::READ as EGLint),
egl_draw_surface: egl.GetCurrentSurface(egl::DRAW as EGLint),
})
}
}
})
}
}
impl ContextDescriptor {
pub(crate) unsafe fn new(egl_display: EGLDisplay,
attributes: &ContextAttributes,
extra_config_attributes: &[EGLint])
-> Result<ContextDescriptor, Error> {
let flags = attributes.flags;
let alpha_size = if flags.contains(ContextAttributeFlags::ALPHA) { 8 } else { 0 };
let depth_size = if flags.contains(ContextAttributeFlags::DEPTH) { 24 } else { 0 };
let stencil_size = if flags.contains(ContextAttributeFlags::STENCIL) { 8 } else { 0 };
let compatibility_profile = flags.contains(ContextAttributeFlags::COMPATIBILITY_PROFILE);
// Mesa doesn't support the OpenGL compatibility profile post version 3.0. Take that into
// account.
if compatibility_profile &&
(attributes.version.major > 3 ||
attributes.version.major == 3 && attributes.version.minor > 0) {
return Err(Error::UnsupportedGLProfile);
}
// Create required config attributes.
//
// We check these separately because `eglChooseConfig` on its own might give us 32-bit
// color when 24-bit color is requested, and that can break code.
let required_config_attributes = [
egl::RED_SIZE as EGLint, RGB_CHANNEL_BIT_DEPTH,
egl::GREEN_SIZE as EGLint, RGB_CHANNEL_BIT_DEPTH,
egl::BLUE_SIZE as EGLint, RGB_CHANNEL_BIT_DEPTH,
];
// Create config attributes.
let mut requested_config_attributes = required_config_attributes.to_vec();
requested_config_attributes.extend_from_slice(&[
egl::ALPHA_SIZE as EGLint, alpha_size,
egl::DEPTH_SIZE as EGLint, depth_size,
egl::STENCIL_SIZE as EGLint, stencil_size,
]);
requested_config_attributes.extend_from_slice(extra_config_attributes);
requested_config_attributes.extend_from_slice(&[egl::NONE as EGLint, 0, 0, 0]);
EGL_FUNCTIONS.with(|egl| {
// See how many applicable configs there are.
let mut config_count = 0;
let result = egl.ChooseConfig(egl_display,
requested_config_attributes.as_ptr(),
ptr::null_mut(),
0,
&mut config_count);
if result == egl::FALSE {
let err = egl.GetError().to_windowing_api_error();
return Err(Error::PixelFormatSelectionFailed(err));
}
if config_count == 0 {
return Err(Error::NoPixelFormatFound);
}
// Enumerate all those configs.
let mut configs = vec![ptr::null(); config_count as usize];
let mut real_config_count = config_count;
let result = egl.ChooseConfig(egl_display,
requested_config_attributes.as_ptr(),
configs.as_mut_ptr(),
config_count,
&mut real_config_count);
if result == egl::FALSE {
let err = egl.GetError().to_windowing_api_error();
return Err(Error::PixelFormatSelectionFailed(err));
}
// Sanitize configs.
let egl_config = configs.into_iter().filter(|&egl_config| {
required_config_attributes.chunks(2).all(|pair| {
get_config_attr(egl_display, egl_config, pair[0]) == pair[1]
})
}).next();
let egl_config = match egl_config {
None => return Err(Error::NoPixelFormatFound),
Some(egl_config) => egl_config,
};
// Get the config ID and version.
let egl_config_id = get_config_attr(egl_display, egl_config, egl::CONFIG_ID as EGLint);
let gl_version = attributes.version;
Ok(ContextDescriptor {
egl_config_id,
gl_version,
compatibility_profile,
})
})
}
pub(crate) unsafe fn from_egl_context(gl: &Gl,
egl_display: EGLDisplay,
egl_context: EGLContext)
-> ContextDescriptor {
let egl_config_id = get_context_attr(egl_display, egl_context, egl::CONFIG_ID as EGLint);
EGL_FUNCTIONS.with(|egl| {
let _guard = CurrentContextGuard::new();
egl.MakeCurrent(egl_display, egl::NO_SURFACE, egl::NO_SURFACE, egl_context);
let gl_version = GLVersion::current(gl);
let compatibility_profile = context::current_context_uses_compatibility_profile(gl);
ContextDescriptor { egl_config_id, gl_version, compatibility_profile }
})
}
#[allow(dead_code)]
|
0, 0,
];
EGL_FUNCTIONS.with(|egl| {
let (mut config, mut config_count) = (ptr::null(), 0);
let result = egl.ChooseConfig(egl_display,
config_attributes.as_ptr(),
&mut config,
1,
&mut config_count);
assert_ne!(result, egl::FALSE);
assert!(config_count > 0);
config
})
}
pub(crate) unsafe fn attributes(&self, egl_display: EGLDisplay) -> ContextAttributes {
let egl_config = egl_config_from_id(egl_display, self.egl_config_id);
let alpha_size = get_config_attr(egl_display, egl_config, egl::ALPHA_SIZE as EGLint);
let depth_size = get_config_attr(egl_display, egl_config, egl::DEPTH_SIZE as EGLint);
let stencil_size = get_config_attr(egl_display, egl_config, egl::STENCIL_SIZE as EGLint);
// Convert to `surfman` context attribute flags.
let mut attribute_flags = ContextAttributeFlags::empty();
attribute_flags.set(ContextAttributeFlags::ALPHA, alpha_size != 0);
attribute_flags.set(ContextAttributeFlags::DEPTH, depth_size != 0);
attribute_flags.set(ContextAttributeFlags::STENCIL, stencil_size != 0);
attribute_flags.set(ContextAttributeFlags::COMPATIBILITY_PROFILE,
self.compatibility_profile);
// Create appropriate context attributes.
ContextAttributes { flags: attribute_flags, version: self.gl_version }
}
}
impl CurrentContextGuard {
pub(crate) fn new() -> CurrentContextGuard {
EGL_FUNCTIONS.with(|egl| {
unsafe {
CurrentContextGuard {
egl_display: egl.GetCurrentDisplay(),
old_egl_draw_surface: egl.GetCurrentSurface(egl::DRAW as EGLint),
old_egl_read_surface: egl.GetCurrentSurface(egl::READ as EGLint),
old_egl_context: egl.GetCurrentContext(),
}
}
})
}
}
pub(crate) unsafe fn create_context(egl_display: EGLDisplay,
descriptor: &ContextDescriptor,
gl_api: GLApi)
-> Result<EGLContext, Error> {
EGL_FUNCTIONS.with(|egl| {
let ok = match gl_api {
GLApi::GL => egl.BindAPI(egl::OPENGL_API),
GLApi::GLES => egl.BindAPI(egl::OPENGL_ES_API),
};
assert_ne!(ok, egl::FALSE);
});
let egl_config = egl_config_from_id(egl_display, descriptor.egl_config_id);
let mut profile_mask = EGL_CONTEXT_OPENGL_CORE_PROFILE_BIT;
if descriptor.compatibility_profile {
profile_mask |= EGL_CONTEXT_OPENGL_COMPATIBILITY_PROFILE_BIT;
}
// Include some extra zeroes to work around broken implementations.
//
// FIXME(pcwalton): Which implementations are those? (This is copied from Gecko.)
let egl_context_attributes = [
egl::CONTEXT_CLIENT_VERSION as EGLint, descriptor.gl_version.major as EGLint,
EGL_CONTEXT_MINOR_VERSION_KHR as EGLint, descriptor.gl_version.minor as EGLint,
EGL_CONTEXT_OPENGL_PROFILE_MASK as EGLint, profile_mask,
egl::NONE as EGLint, 0,
0, 0,
];
EGL_FUNCTIONS.with(|egl| {
let egl_context = egl.CreateContext(egl_display,
egl_config,
egl::NO_CONTEXT,
egl_context_attributes.as_ptr());
if egl_context == egl::NO_CONTEXT {
let err = egl.GetError();
let err = err.to_windowing_api_error();
return Err(Error::ContextCreationFailed(err));
}
Ok(egl_context)
})
}
pub(crate) unsafe fn make_no_context_current(egl_display: EGLDisplay) -> Result<(), Error> {
EGL_FUNCTIONS.with(|egl| {
let result = egl.MakeCurrent(egl_display,
egl::NO_SURFACE,
egl::NO_SURFACE,
egl::NO_CONTEXT);
if result == egl::FALSE {
let err = egl.GetError().to_windowing_api_error();
return Err(Error::MakeCurrentFailed(err));
}
Ok(())
})
}
pub(crate) unsafe fn get_config_attr(egl_display: EGLDisplay, egl_config: EGLConfig, attr: EGLint)
-> EGLint {
EGL_FUNCTIONS.with(|egl| {
let mut value = 0;
let result = egl.GetConfigAttrib(egl_display, egl_config, attr, &mut value);
assert_ne!(result, egl::FALSE);
value
})
}
pub(crate) unsafe fn get_context_attr(egl_display: EGLDisplay,
egl_context: EGLContext,
attr: EGLint)
-> EGLint {
EGL_FUNCTIONS.with(|egl| {
let mut value = 0;
let result = egl.QueryContext(egl_display, egl_context, attr, &mut value);
assert_ne!(result, egl::FALSE);
value
})
}
pub(crate) unsafe fn egl_config_from_id(egl_display: EGLDisplay, egl_config_id: EGLint)
-> EGLConfig {
let config_attributes = [
egl::CONFIG_ID as EGLint, egl_config_id,
egl::NONE as EGLint, 0,
0, 0,
];
EGL_FUNCTIONS.with(|egl| {
let (mut config, mut config_count) = (ptr::null(), 0);
let result = egl.ChooseConfig(egl_display,
config_attributes.as_ptr(),
&mut config,
1,
&mut config_count);
assert_ne!(result, egl::FALSE);
assert!(config_count > 0);
config
})
}
pub(crate) fn get_proc_address(symbol_name: &str) -> *const c_void {
EGL_FUNCTIONS.with(|egl| {
unsafe {
let symbol_name: CString = CString::new(symbol_name).unwrap();
egl.GetProcAddress(symbol_name.as_ptr() as *const u8 as *const c_char) as *const c_void
}
})
}
// Creates and returns a dummy pbuffer surface for the given context. This is used as the default
// framebuffer on some backends.
#[allow(dead_code)]
pub(crate) unsafe fn create_dummy_pbuffer(egl_display: EGLDisplay, egl_context: EGLContext)
-> EGLSurface {
let egl_config_id = get_context_attr(egl_display, egl_context, egl::CONFIG_ID as EGLint);
let egl_config = egl_config_from_id(egl_display, egl_config_id);
let pbuffer_attributes = [
egl::WIDTH as EGLint, DUMMY_PBUFFER_SIZE,
egl::HEIGHT as EGLint, DUMMY_PBUFFER_SIZE,
egl::NONE as EGLint, 0,
0, 0,
];
EGL_FUNCTIONS.with(|egl| {
let pbuffer = egl.CreatePbufferSurface(egl_display,
egl_config,
pbuffer_attributes.as_ptr());
assert_ne!(pbuffer, egl::NO_SURFACE);
pbuffer
})
}
|
pub(crate) unsafe fn to_egl_config(&self, egl_display: EGLDisplay) -> EGLConfig {
let config_attributes = [
egl::CONFIG_ID as EGLint, self.egl_config_id,
egl::NONE as EGLint, 0,
|
addShortRangeComm.js
|
import React from "react";
import {FormGroup, Label, Input} from "helpers/reactstrap";
export default ({updateArgs, args, client}) => {
return (
<FormGroup className="macro-addShortRangeComm">
<p>
Create a comm using either the name of the signal, the frequency, or
nothing. If the simulator doesn't have a signal name, it defaults to the
frequency.
</p>
<Label>
Signal Name{" "}
<small>
Use the name of the signal in the Short Range Comm simulator config
</small>
</Label>
<Input
type="text"
placeholder="Optional"
defaultValue={args.signalName}
onBlur={evt => updateArgs("signalName", evt.target.value)}
/>
<Label>
Frequency{" "}
<small>
Use a number between 0 and 1. 0 is the top, 1 is the bottom.
</small>
</Label>
<Input
type="number"
min="0"
|
placeholder="Optional"
defaultValue={args.frequency}
onBlur={evt => updateArgs("frequency", evt.target.value)}
/>
</FormGroup>
);
};
|
max="1"
|
ARC076e.py
|
#ARC076e
def
|
():
import sys
input=sys.stdin.readline
sys.setrecursionlimit(10**6)
if __name__ == '__main__':
main()
|
main
|
and_immediate.py
|
from armulator.armv6.opcodes.abstract_opcode import AbstractOpcode
class AndImmediate(AbstractOpcode):
|
def __init__(self, setflags, d, n, imm32, carry):
super(AndImmediate, self).__init__()
self.setflags = setflags
self.d = d
self.n = n
self.imm32 = imm32
self.carry = carry
def execute(self, processor):
if processor.condition_passed():
result = processor.registers.get(self.n) & self.imm32
if self.d == 15:
processor.alu_write_pc(result)
else:
processor.registers.set(self.d, result)
if self.setflags:
processor.registers.cpsr.set_n(result[0])
processor.registers.cpsr.set_z(result.all(False))
processor.registers.cpsr.set_c(self.carry)
|
|
auth-context.tsx
|
import React from 'react';
import firebase from 'firebase/app';
import 'firebase/auth';
import { useRouter } from 'next/router';
import { Course, Grade } from '../common/data';
import { YearClassification } from '../common/student';
/**
* A user for this app.
*/
export interface ServiceUser {
/**
* This user's globally unique primary identifier unrelated to any other
* forms of identification.
*/
id: string;
/**
* The user's preferred name. If the user has not set a preferred name yet (on
* first-time set-up), and the user is authenticatd using a third-party
* identity provider, this may be the user's full name.
*/
name: string;
/**
* The user's email or null.
*
* If the user is signed in using a federated identify provider, this may be
* null. If the user is not authenticated, this will be null.
*
* If the user has signed in with email and password, this will always be a
* valid email.
*/
email: string | null;
/**
* A profile image for the user.
*
* This may be null if the user has chosen not to upload a profile image or if
* their federated identity provider does not have an image associated with
* the user.
*/
image: string | null;
/**
* Determines whether or not this user requires re-authentication to perform
* a task.
*/
requiresAuthentication: () => boolean;
}
export interface PlanData {
id: string;
title: string;
type: 'major' | 'minor' | 'certificate' | 'honors';
}
export interface CourseAttempt {
semester: string;
grade: Grade;
course: Course;
}
export interface StudentInfo {
classification: YearClassification;
/**
* An ID corresponding to the user's primary major of study.
*/
primaryMajor: string;
/**
* A list of IDs corresponding to all selected plans of study.
*/
requiredPlans: PlanData[];
joinDate: Date;
attemptedCourses: CourseAttempt[];
}
const ANONYMOUS_USER = {
id: 'guest',
email: null,
name: 'Student',
image: null,
requiresAuthentication(): boolean {
return false;
},
};
/**
* A mapping of all users.
*/
export const users: { [key: string]: ServiceUser } = {
anonymous: ANONYMOUS_USER,
default: {
id: 'default',
email: null,
name: 'Student',
image: 'https://picsum.photos/256',
// TODO: Probably rethink this approach
requiresAuthentication(): boolean {
return false;
},
}, // First (and only) signed in user
};
/**
* Utility attributes and functions used to handle user auth state within an AuthContext.
*/
interface AuthContextState {
/**
* The current user. If no user is signed in, the user is anonymous.
*/
user: ServiceUser;
/**
* Returns whether a user is currently signed in to the Service.
*/
isSignedIn: boolean;
/**
* Forces a sign-in and redirects to the given link after success.
*
* @param redirect An in-app location to redirect to after sign-in
*/
authWithRedirect: (redirect: string) => void;
/**
* Signs in using Google OAuth pop-up.
*/
signInWithGoogle: () => void;
/**
* Signs in with email and password.
|
signInWithEmail: (email: string, password: string) => void;
/**
* Creates a new account with email and password.f
*/
signUpWithEmail: (email: string, password: string) => void;
/**
* Switches the currently active account.
*/
switchAccounts: (accountId: 'guest' | 'default') => void;
/**
* Signs out of the current user session if active.
*/
signOut: () => Promise<void>;
/**
* Attempts sending a password reset link to the user with the given email.
*/
resetPassword: (email: string) => void;
}
const AuthContext = React.createContext<AuthContextState | undefined>(undefined); // Find a better solution for this
/**
* A React hook that exposes
*/
function useAuthContext(): AuthContextState {
const context = React.useContext(AuthContext);
if (context == null) {
throw new Error('useAuthState must be used in an AuthContextProvider');
}
return context;
}
function AuthProvider({ children }: { children: React.ReactNode }): JSX.Element {
const [user, setUser] = React.useState<ServiceUser>(users.anonymous);
const [redirect, setRedirect] = React.useState('/app');
const [shouldRedirect, setShouldRedirect] = React.useState(false);
const history = useRouter();
const updateUser = React.useCallback((firebaseUser: firebase.User | null) => {
if (firebaseUser === null) {
// User is signed out
// TODO(auth): Determine if we want to remove user data from device on sign out
setUser(ANONYMOUS_USER);
return;
}
const { displayName, email, photoURL, uid } = firebaseUser;
setUser({
id: uid,
name: displayName || 'Student',
email: email,
image: photoURL,
requiresAuthentication(): boolean {
// TODO: Determine based on last sign in time
return false;
},
});
}, []);
React.useEffect(() => {
firebase.auth().onAuthStateChanged((user) => {
updateUser(user);
});
}, []);
React.useEffect(() => {
if (shouldRedirect) {
if (redirect) {
history.push(redirect);
setShouldRedirect(false);
} else {
console.error('Redirect location is null');
}
}
}, [shouldRedirect]);
/**
* Switches the currently active user session.
*
* @param accountId The UID of the account to switch to
*/
function switchAccounts(accountId: 'guest' | 'default') {
const user = users[accountId];
setUser(user);
// TODO: Update localStorage
console.log('Switched active account to ' + accountId);
}
/**
* Signs out the currently signed-in user.
*
* This switches to the guest user.
*/
async function signOut() {
return firebase
.auth()
.signOut()
.then(() => {
const user = users.anonymous;
setUser(user);
setRedirect('/');
setShouldRedirect(true);
// TODO: Update localStorage
console.log('Signed out user; switched to guest.');
})
.catch(() => {
console.error('Could not sign out.');
});
}
/**
* Attempts resetting the password for the user with the given email.
*
* If an account with the given email does not exist, this is a no-op.
*
* @param email The user's email
*/
const resetPassword = React.useCallback(async (email: string) => {
// TODO: Probably create another function that uses the currently signed in user
return firebase
.auth()
.sendPasswordResetEmail(email)
.then(() => {
console.log('Password reset email sent.');
setRedirect('/app');
})
.catch((error) => {
console.error('Could not send password reset.', error);
// TODO(auth): Handle error in UI
});
}, []);
/**
* Tries creating an account using the email and password provided.
*
* @param email The user's email
* @param password The user's desired password
*/
const signUpWithEmail = React.useCallback(async (email: string, password: string) => {
return firebase
.auth()
.createUserWithEmailAndPassword(email, password)
.then(({ /* credential, */ user }) => {
updateUser(user);
setRedirect('/app/onboarding');
setShouldRedirect(true);
})
.catch((error) => {
// Handle Errors here.
const { code, message } = error;
if (code == 'auth/weak-password') {
console.warn('The password is too weak.');
} else {
console.log(message);
}
console.log(error);
});
}, []);
/**
* Authenticates using an email and password.
*
* @param email The user's email
* @param password The user's password
*/
const signInWithEmail = React.useCallback(async (email: string, password: string) => {
return firebase
.auth()
.signInWithEmailAndPassword(email, password)
.then(({ credential, user }) => {
console.log('Credential', credential);
console.log('User', user);
if (user === null) {
// Something really went wrong
console.error("The signed-in user is null? That doesn't seem right.");
return;
}
updateUser(user);
setShouldRedirect(true);
})
.catch((error) => {
console.error('Error when signing in', error);
// TODO(auth): Handle error appropriately
});
}, []);
const signInWithGoogle = React.useCallback(async () => {
const provider = new firebase.auth.GoogleAuthProvider();
return firebase
.auth()
.signInWithPopup(provider)
.then(({ credential, user }) => {
console.log('Credential', credential);
console.log('User', user);
if (user === null) {
// Something really went wrong
console.error("The signed-in user is null? That doesn't seem right.");
return;
}
updateUser(user);
setShouldRedirect(true);
})
.catch((error) => {
console.error('Error when signing in', error);
// TODO(auth): Handle error appropriately
});
}, []);
/**
* Navigates to the AuthPage and forces a sign-in.
*
* @param redirect The link to navigate back after a successfull sign-in
*/
const authWithRedirect = (redirect: string) => {
history.push('/auth');
setRedirect(redirect);
};
const isSignedIn = user.id !== 'anonymous';
const authContextValue: AuthContextState = {
user,
isSignedIn,
authWithRedirect,
signUpWithEmail,
switchAccounts,
signInWithGoogle,
signInWithEmail,
signOut,
resetPassword,
};
return <AuthContext.Provider value={authContextValue}>{children}</AuthContext.Provider>;
}
export { AuthContext, AuthProvider, useAuthContext };
|
*/
|
chap04.py
|
from matrices import Matrix
from tuples import Point
from canvas import Canvas
from colours import Colour
from math import pi
def run():
# our clock face will be drawn in the x-y plane, so z-components will always be 0
WIDTH = 500
HEIGHT = 500
c = Canvas(WIDTH, HEIGHT)
for i in range(12):
p = Point(0, 0, 0)
# Move (translate) the point to the 12-o'clock position
t = Matrix.translation(0, 200, 0)
p12 = t * p
# Rotate the point by -i * pi / 6 radians
r = Matrix.rotation_z(-i * pi / 6)
# pr = r * p12
# c.write_pixel(int(pr.x + WIDTH / 2), int(HEIGHT / 2 - pr.y), Colour(0.5, 0.2, 1))
# Create compound matrix (in reverse order!)
trans_rot = r * t
pr = trans_rot * p
print_pixel_block(c, int(pr.x), int(pr.y))
c.write_to_ppm('..\\images\\clock.ppm')
def
|
(c, x, y, size=5):
for yy in range(y - size // 2, y + 1 + size // 2):
for xx in range(x - size // 2, x + 1 + size // 2):
c.write_pixel(int(xx + c.width / 2), int(c.height / 2 - yy), Colour(0.5, 0.2, 1))
if __name__ == '__main__':
run()
|
print_pixel_block
|
mm.py
|
from hwt.hdl.constants import DIRECTION, READ, WRITE, NOP, READ_WRITE
from hwt.interfaces.agents.handshaked import HandshakedAgent
from hwt.interfaces.std import VectSignal, Signal
from hwt.simulator.agentBase import SyncAgentBase
from hwt.synthesizer.interface import Interface
from hwt.synthesizer.param import Param
from hwt.interfaces.agents.vldSynced import VldSyncedAgent
from collections import deque
from pyMathBitPrecise.bit_utils import mask
from hwtSimApi.hdlSimulator import HdlSimulator
from hwt.math import log2ceil
RESP_OKAY = 0b00
# RESP_RESERVED = 0b01
RESP_SLAVEERROR = 0b10
RESP_DECODEERROR = 0b11
class AvalonMM(Interface):
"""
Avalon Memory Mapped interface
:note: handshaked, shared address and response channel
https://www.intel.com/content/dam/altera-www/global/en_US/pdfs/literature/manual/mnl_avalon_spec.pdf
.. hwt-autodoc::
"""
def _config(self):
self.ADDR_WIDTH = Param(32)
self.DATA_WIDTH = Param(32)
self.MAX_BURST = Param(0)
def _declr(self):
# self.debugAccess = Signal()
IN = DIRECTION.IN
self.address = VectSignal(self.ADDR_WIDTH)
self.byteEnable = VectSignal(self.DATA_WIDTH // 8)
self.read = Signal()
self.readData = VectSignal(self.DATA_WIDTH, masterDir=IN)
self.readDataValid = Signal(masterDir=IN) # read data valid
self.response = VectSignal(2, masterDir=IN)
self.write = Signal()
self.writeData = VectSignal(self.DATA_WIDTH)
# self.lock = Signal()
self.waitRequest = Signal(masterDir=IN)
self.writeResponseValid = Signal(masterDir=IN)
if self.MAX_BURST != 0:
self.burstCount = VectSignal(log2ceil(self.MAX_BURST))
# self.beginBurstTransfer = Signal()
def _getWordAddrStep(self):
"""
:return: size of one word in unit of address
"""
return int(self.DATA_WIDTH) // self._getAddrStep()
def _getAddrStep(self):
"""
:return: how many bits is one unit of address
(e.g. 8 bits for char * pointer, 36 for 36 bit bram)
"""
return 8
def _initSimAgent(self, sim: HdlSimulator):
self._ag = AvalonMmAgent(sim, self)
class AvalonMmDataRAgent(VldSyncedAgent):
"""
Simulation/verification agent for data part of AvalomMM interface
* vld signal = readDataValid
* data signal = (readData, response)
"""
@classmethod
def get_valid_signal(cls, intf):
return intf.readDataValid
def get_valid(self):
return self._vld.read()
def set_valid(self, val):
self._vld.write(val)
def
|
(self):
"""extract data from interface"""
intf = self.intf
return (intf.readData.read(), intf.response.read())
def set_data(self, data):
"""write data to interface"""
intf = self.intf
if data is None:
intf.readData.write(None)
intf.response.write(None)
else:
readData, response = data
intf.readData.write(readData)
intf.response.write(response)
class AvalonMmAddrAgent(HandshakedAgent):
"""
data format is tuple (address, byteEnable, read/write, burstCount)
* two valid signals "read", "write"
* one ready_n signal "waitrequest")
* on write set data and byteenamble as well
"""
def __init__(self, sim: HdlSimulator, intf, allowNoReset=False):
HandshakedAgent.__init__(self, sim, intf, allowNoReset=allowNoReset)
self.wData = deque()
@classmethod
def get_ready_signal(cls, intf):
return intf.waitRequest
def get_ready(self):
rd = self._rd.read()
rd.val = int(not rd.val)
return rd
def set_ready(self, val):
self._rd.write(int(not val))
@classmethod
def get_valid_signal(cls, intf):
return (intf.read, intf.write)
def get_valid(self):
r = self._vld[0].read()
w = self._vld[1].read()
r.val = r.val | w.val
r.vld_mask = r.vld_mask & w.vld_mask
return r
def set_valid(self, val):
if self.actualData is None or self.actualData is NOP:
r = 0
w = 0
else:
mode = self.actualData[0]
if mode is READ:
r = val
w = 0
elif mode is WRITE:
r = 0
w = val
else:
raise ValueError("Unknown mode", mode)
self._vld[0].write(r)
self._vld[1].write(w)
def get_data(self):
intf = self.intf
address = intf.address.read()
byteEnable = intf.byteEnable.read()
read = intf.read.read()
write = intf.write.read()
wdata = intf.writeData.read()
if intf.MAX_BURST != 0:
burstCount = intf.burstCount.read()
else:
burstCount = 1
if read.val:
if write.val:
rw = READ_WRITE
else:
rw = READ
elif write.val:
rw = WRITE
else:
raise AssertionError(
"This funtion should not be called when data"
"is not ready on interface")
if rw == WRITE or rw == READ_WRITE:
self.wData.append((wdata, byteEnable))
return (rw, address, burstCount)
def set_data(self, data):
intf = self.intf
if data is None:
intf.address.write(None)
intf.byteEnable.write(None)
if intf.MAX_BURST != 0:
intf.burstCount.write(None)
intf.read.write(0)
intf.write.write(0)
else:
rw, address, burstCount = data
if rw is READ:
rd, wr = 1, 0
be = mask(intf.readData._dtype.bit_length() // 8)
elif rw is WRITE:
rd, wr = 0, 1
rw, address, burstCount = data
d, be = self.wData.popleft()
intf.writeData.write(d)
else:
raise TypeError(f"rw is in invalid format {rw}")
intf.address.write(address)
intf.byteEnable.write(be)
assert int(burstCount) >= 1, burstCount
if intf.MAX_BURST:
intf.burstCount.write(burstCount)
intf.read.write(rd)
intf.write.write(wr)
class AvalonMmWRespAgent(VldSyncedAgent):
@classmethod
def get_valid_signal(cls, intf):
return intf.writeResponseValid
def get_data(self):
return self.intf.response.read()
def set_data(self, data):
self.intf.response.write(data)
class AvalonMmAgent(SyncAgentBase):
"""
Simulation agent for AvalonMM bus interface
:ivar ~.req: request data, items are tuples (READ/WRITE, address, burstCount)
:ivar ~.wData: data to write, items are tuples (data, byteenable)
:ivar ~.wResp: write response data
:ivar ~.rData: data read from interface, items are typles (data, response)
"""
def __init__(self, sim: HdlSimulator, intf, allowNoReset=False):
SyncAgentBase.__init__(self, sim, intf, allowNoReset=allowNoReset)
self.addrAg = AvalonMmAddrAgent(sim, intf, allowNoReset=allowNoReset)
self.rDataAg = AvalonMmDataRAgent(sim, intf, allowNoReset=allowNoReset)
self.wRespAg = AvalonMmWRespAgent(sim, intf, allowNoReset=allowNoReset)
def req_get(self):
return self.addrAg.data
def req_set(self, v):
self.addrAg.data = v
req = property(req_get, req_set)
def wData_get(self):
return self.addrAg.wData
def wData_set(self, v):
self.addrAg.wData = v
wData = property(wData_get, wData_set)
def wResp_get(self):
return self.wRespAg.data
def wResp_set(self, v):
self.wRespAg = v
wResp = property(wResp_get, wResp_set)
def rData_get(self):
return self.rDataAg.data
def rData_set(self, v):
self.rDataAg.data = v
rData = property(rData_get, rData_set)
def getDrivers(self):
self.setEnable = self.setEnable_asDriver
return (self.rDataAg.getMonitors()
+self.addrAg.getDrivers()
+self.wRespAg.getMonitors())
def getMonitors(self):
self.setEnable = self.setEnable_asMonitor
return (self.rDataAg.getDrivers()
+self.addrAg.getMonitors()
+self.wRespAg.getDrivers())
|
get_data
|
user.controller.ts
|
import { Body, Controller, Get, Param, Post, UseGuards } from '@nestjs/common';
import { AdminGuard } from 'src/guards/admin.guard';
import { IdGuard } from 'src/guards/id.guard';
import { UserResponse } from 'src/response/user.response';
import { UserDTO } from 'src/validation/user.dto';
import { UserService } from './user.service';
@Controller('user')
export class UserController {
constructor(
private readonly userService: UserService,
) { }
@UseGuards(IdGuard)
@Get('me')
me(@Param('id') id: number): Promise<UserResponse> {
return this.userService.getUser(id)
}
@UseGuards(IdGuard, AdminGuard)
@Get()
|
getAllUsers(): Promise<UserResponse> {
return this.userService.getAllUsers()
}
@Post('signup')
signup(@Body() userDTO: UserDTO): Promise<UserResponse> {
return this.userService.signup(userDTO)
}
@Post('login')
login(@Body() userDTO: UserDTO): Promise<UserResponse> {
return this.userService.login(userDTO)
}
// update(): Promise<UserResponse> {
// return null
// }
}
| |
version_enum_test.go
|
/*
Copyright 2021 The KubeSphere Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package kubesphere
import (
"reflect"
"testing"
)
func TestDevRelease(t *testing.T) {
tests := []struct {
name string
version string
want *KsInstaller
ok bool
}{
{
name: "test_v3.2.1-rc.1",
version: "v3.2.1-rc.1",
want: KsV321,
ok: true,
},
{
name: "test_v3.2.1",
version: "v3.2.1",
want: nil,
ok: false,
},
{
name: "test_v3.2.0",
version: "v3.2.0",
want: nil,
ok: false,
},
{
name: "test_v3.2.0-alpha.1",
version: "v3.2.0-alpha.1",
want: KsV320,
ok: true,
},
{
name: "test_v3.2.0-beta.1",
version: "v3.2.0-beta.1",
want: KsV320,
ok: true,
},
{
name: "test_v3.1.0-alpha.1",
version: "v3.1.0-alpha.1",
want: KsV310,
ok: true,
},
{
name: "test_latest",
version: "latest",
want: KsV321,
ok: true,
},
{
name: "test_master",
version: "master",
want: KsV321,
ok: true,
},
{
name: "test_release-3.2",
version: "release-3.2",
want: KsV321,
ok: true,
},
{
name: "test_v1.2.0",
version: "v1.2.0",
want: nil,
ok: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, ok := DevRelease(tt.version)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("DevRelease() got = %v, want %v", got, tt.want)
}
if ok != tt.ok {
t.Errorf("DevRelease() got1 = %v, want %v", ok, tt.ok)
}
})
}
}
func TestLatest(t *testing.T) {
tests := []struct {
name string
want *KsInstaller
}{
{
name: "test_latest",
want: KsV321,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
if got := Latest(); !reflect.DeepEqual(got, tt.want) {
t.Errorf("Latest() = %v, want %v", got, tt.want)
}
})
}
}
func TestLatestRelease(t *testing.T) {
tests := []struct {
name string
version string
want *KsInstaller
ok bool
}{
{
name: "test_latest",
version: "latest",
want: KsV321,
ok: true,
},
{
name: "test_master",
version: "master",
want: KsV321,
ok: true,
},
{
name: "test_release-3.2",
version: "release-3.2",
want: KsV321,
ok: true,
},
{
name: "test_v3.2.1",
version: "v3.2.1",
want: KsV321,
ok: true,
},
{
name: "test_v3.2.1-rc.1",
version: "v3.2.1-rc.1",
want: KsV321,
ok: true,
},
{
name: "test_v3.2.0",
version: "v3.2.0",
want: nil,
ok: false,
},
{
name: "test_v3.1.0",
version: "v3.1.0",
want: nil,
ok: false,
},
{
name: "test_v3.2.0-alpha.1",
version: "v3.2.0-alpha.1",
want: nil,
ok: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, ok := LatestRelease(tt.version)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("LatestRelease() got = %v, want %v", got, tt.want)
}
if ok != tt.ok {
t.Errorf("LatestRelease() got1 = %v, want %v", ok, tt.ok)
}
})
}
}
func TestStabledVersionSupport(t *testing.T)
|
{
tests := []struct {
name string
version string
want *KsInstaller
ok bool
}{
{
name: "test_v3.2.1-rc.1",
version: "v3.2.1-rc.1",
want: nil,
ok: false,
},
{
name: "test_v3.2.0",
version: "v3.2.0",
want: KsV320,
ok: true,
},
{
name: "test_3.2.0",
version: "3.2.0",
want: nil,
ok: false,
},
{
name: "test_v3.2.0-alpha.1",
version: "v3.2.0-alpha.1",
want: nil,
ok: false,
},
{
name: "test_v1.2.0",
version: "v1.2.0",
want: nil,
ok: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got, ok := StabledVersionSupport(tt.version)
if !reflect.DeepEqual(got, tt.want) {
t.Errorf("StabledVersionSupport() got = %v, want %v", got, tt.want)
}
if ok != tt.ok {
t.Errorf("StabledVersionSupport() got1 = %v, want %v", ok, tt.ok)
}
})
}
}
|
|
IconBadge.tsx
|
import {IconComponent} from '@sanity/icons'
import styled, {css} from 'styled-components'
import React, {createElement, memo} from 'react'
import {Text, Theme} from '@sanity/ui'
export type IconBadgeTone = 'positive' | 'caution' | 'critical'
interface IconBadgeRootStyleProps {
$disabled: boolean
$muted: boolean
$tone: IconBadgeTone
}
const Root = styled.div<IconBadgeRootStyleProps>(
({$disabled, $muted, $tone, theme}: IconBadgeRootStyleProps & {theme: Theme}) => {
const {color} = theme.sanity
const tone = $muted ? color.muted[$tone] : color.solid[$tone]
const state = $disabled ? tone.disabled : tone.enabled
return css`
--icon-badge-size: 27px;
--card-bg-color: ${state.bg};
--card-fg-color: ${state.fg};
--card-border-color: ${state.border};
width: var(--icon-badge-size);
height: var(--icon-badge-size);
border-radius: calc(var(--icon-badge-size) / 2);
display: flex;
align-items: center;
justify-content: center;
background-color: var(--card-bg-color);
color: var(--card-fg-color);
box-shadow: inset 0 0 0 1px var(--card-border-color);
margin: -3px;
& > span {
|
color: inherit;
}
`
}
)
export const IconBadge = memo(function IconBadge(props: {
disabled?: boolean
icon: IconComponent
muted?: boolean
tone: IconBadgeTone
}) {
const {disabled = false, icon, muted = false, tone} = props
return (
<Root $disabled={disabled} $muted={muted} $tone={tone} aria-hidden>
<Text as="span">{createElement(icon)}</Text>
</Root>
)
})
| |
queue.py
|
# Copyright 2017 reinforce.io. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
import tensorflow as tf
from tensorforce import util
from tensorforce.core.memories import Memory
class Queue(Memory):
"""
Base class for memories organized as a queue (FIFO).
"""
def __init__(self, states, internals, actions, include_next_states, capacity, scope='queue', summary_labels=None):
"""
Queue memory.
Args:
capacity: Memory capacity.
"""
self.capacity = capacity
self.scope = scope
# Pieces of the records are stored in different tensors:
self.states_memory = dict() # keys=state space components
self.internals_memory = dict() # keys=internal state components
self.actions_memory = dict() # keys=action space components
self.terminal_memory = None # 1D tensor
self.reward_memory = None # 1D tensor
self.memory_index = None # 0D (int) tensor (points to the next record to be overwritten)
self.episode_indices = None # 1D tensor of indexes where episodes start.
self.episode_count = None # 0D (int) tensor: How many episodes do we have stored?
self.retrieve_indices = None
super(Queue, self).__init__(
states=states,
internals=internals,
actions=actions,
include_next_states=include_next_states,
scope=scope,
summary_labels=summary_labels
)
def setup_template_funcs(self, custom_getter=None):
custom_getter = super(Queue, self).setup_template_funcs(custom_getter=custom_getter)
self.retrieve_indices = tf.make_template(
name_=(self.scope + '/retrieve_indices'),
func_=self.tf_retrieve_indices,
custom_getter_=custom_getter
)
def tf_initialize(self):
# States
for name in sorted(self.states_spec):
state = self.states_spec[name]
self.states_memory[name] = tf.get_variable(
name=('state-' + name),
shape=(self.capacity,) + tuple(state['shape']),
dtype=util.tf_dtype(state['type']),
trainable=False
)
# Internals
for name in sorted(self.internals_spec):
internal = self.internals_spec[name]
self.internals_memory[name] = tf.get_variable(
name=('internal-' + name),
shape=(self.capacity,) + tuple(internal['shape']),
dtype=util.tf_dtype(internal['type']),
trainable=False
)
# Actions
for name in sorted(self.actions_spec):
action = self.actions_spec[name]
self.actions_memory[name] = tf.get_variable(
name=('action-' + name),
shape=(self.capacity,) + tuple(action['shape']),
dtype=util.tf_dtype(action['type']),
trainable=False
)
# Terminal
self.terminal_memory = tf.get_variable(
name='terminal',
shape=(self.capacity,),
dtype=util.tf_dtype('bool'),
initializer=tf.constant_initializer(
value=False,
dtype=util.tf_dtype('bool')
),
trainable=False
)
# Reward
self.reward_memory = tf.get_variable(
name='reward',
shape=(self.capacity,),
dtype=util.tf_dtype('float'),
trainable=False
)
# Memory index
self.memory_index = tf.get_variable(
name='memory-index',
dtype=util.tf_dtype('int'),
initializer=0,
trainable=False
)
# Episode indices
self.episode_indices = tf.get_variable(
name='episode-indices',
shape=(self.capacity + 1,),
dtype=util.tf_dtype('int'),
initializer=tf.constant_initializer(value=(self.capacity - 1), dtype=util.tf_dtype('int')),
trainable=False
)
# Episodes index
self.episode_count = tf.get_variable(
name='episode-count',
dtype=util.tf_dtype('int'),
initializer=0,
trainable=False
)
def tf_store(self, states, internals, actions, terminal, reward):
# Memory indices to overwrite.
num_instances = tf.shape(input=terminal)[0]
with tf.control_dependencies([tf.assert_less_equal(num_instances, self.capacity)]):
indices = tf.range(self.memory_index, self.memory_index + num_instances) % self.capacity
# Remove episode indices.
num_episodes = tf.count_nonzero(
input_tensor=tf.gather(params=self.terminal_memory, indices=indices),
axis=0,
dtype=util.tf_dtype('int')
)
num_episodes = tf.minimum(x=num_episodes, y=self.episode_count)
assignment = tf.assign(
ref=self.episode_indices[:self.episode_count - num_episodes],
value=self.episode_indices[num_episodes: self.episode_count]
)
# Decrement episode count.
with tf.control_dependencies(control_inputs=(assignment,)):
assignment = tf.assign_sub(ref=self.episode_count, value=num_episodes)
# Assign new observations.
with tf.control_dependencies(control_inputs=(assignment,)):
assignments = list()
for name in sorted(states):
assignments.append(tf.scatter_update(
ref=self.states_memory[name],
indices=indices,
updates=states[name]
))
for name in sorted(internals):
assignments.append(tf.scatter_update(
ref=self.internals_memory[name],
indices=indices,
updates=internals[name]
))
for name in sorted(actions):
assignments.append(tf.scatter_update(
ref=self.actions_memory[name],
indices=indices,
updates=actions[name]
))
assignments.append(tf.scatter_update(ref=self.terminal_memory, indices=indices, updates=terminal))
assignments.append(tf.scatter_update(ref=self.reward_memory, indices=indices, updates=reward))
# Add episode indices.
with tf.control_dependencies(control_inputs=assignments):
num_episodes = tf.count_nonzero(input_tensor=terminal, axis=0, dtype=util.tf_dtype('int'))
assignment = tf.assign(
ref=self.episode_indices[self.episode_count: self.episode_count + num_episodes],
value=tf.boolean_mask(tensor=indices, mask=terminal)
)
# Increment episode count.
with tf.control_dependencies(control_inputs=(assignment,)):
assignment = tf.assign_add(ref=self.episode_count, value=num_episodes)
# Increment memory index.
with tf.control_dependencies(control_inputs=(assignment,)):
assignment = tf.assign(
ref=self.episode_indices[-1],
value=tf.where(self.memory_index + num_instances > self.capacity,
self.episode_indices[self.episode_count - 1], self.capacity - 1)
)
with tf.control_dependencies(control_inputs=(assignment,)):
assignment = tf.assign(ref=self.memory_index, value=((self.memory_index + num_instances) % self.capacity))
with tf.control_dependencies(control_inputs=(assignment,)):
return tf.no_op()
def tf_retrieve_indices(self, indices):
|
"""
Fetches experiences for given indices.
Args:
indices: Index tensor
Returns: Batch of experiences
"""
states = dict()
for name in sorted(self.states_memory):
states[name] = tf.gather(params=self.states_memory[name], indices=indices)
internals = dict()
for name in sorted(self.internals_memory):
internals[name] = tf.gather(params=self.internals_memory[name], indices=indices)
actions = dict()
for name in sorted(self.actions_memory):
actions[name] = tf.gather(params=self.actions_memory[name], indices=indices)
terminal = tf.gather(params=self.terminal_memory, indices=indices)
reward = tf.gather(params=self.reward_memory, indices=indices)
if self.include_next_states:
assert util.rank(indices) == 1
next_indices = (indices + 1) % self.capacity
next_states = dict()
for name in sorted(self.states_memory):
next_states[name] = tf.gather(params=self.states_memory[name], indices=next_indices)
next_internals = dict()
for name in sorted(self.internals_memory):
next_internals[name] = tf.gather(params=self.internals_memory[name], indices=next_indices)
return dict(
states=states,
internals=internals,
actions=actions,
terminal=terminal,
reward=reward,
next_states=next_states,
next_internals=next_internals
)
else:
return dict(
states=states,
internals=internals,
actions=actions,
terminal=terminal,
reward=reward
)
|
|
constants.rs
|
//! Physical constants
//!
//! Many [unit magnitudes][crate::units] have their own associated constants.
//! E.g. [`Speed::SOUND`][crate::units::Speed::SOUND].
//!
//! The ones defined in this module, doesn't pertain to any magnitude.
//!
//! - <https://en.wikipedia.org/wiki/Physical_constant>
//! - <https://en.wikipedia.org/wiki/List_of_physical_constants>
//! - <https://en.wikipedia.org/wiki/Dimensionless_physical_constant>
use crate::Magnitude;
/// The [Coulomb constant][0], the electric force constant,
/// or the electrostatic constant.
///
/// `8.987551792.3×10⁹ N×m²/C²`
///
/// [0]:https://en.wikipedia.org/wiki/Coulomb_constant
//
// https://www.johndcook.com/blog/2021/03/31/coulombs-constant/
|
pub const COULOMB_CONSTANT: Magnitude = 8_987_551_792.3;
|
// https://physics.stackexchange.com/questions/93588/why-does-coulombs-constant-have-units
|
test_asyncio_transport.py
|
import aiohttp
import pytest
from aioresponses import aioresponses
from lxml import etree
from pretend import stub
from zeep import asyncio, exceptions
from zeep.cache import InMemoryCache
@pytest.mark.requests
def test_no_cache(event_loop):
transport = asyncio.AsyncTransport(loop=event_loop)
assert transport.cache is None
@pytest.mark.requests
def test_load(event_loop):
cache = stub(get=lambda url: None, add=lambda url, content: None)
transport = asyncio.AsyncTransport(loop=event_loop, cache=cache)
with aioresponses() as m:
m.get("http://tests.python-zeep.org/test.xml", body="x")
result = transport.load("http://tests.python-zeep.org/test.xml")
assert result == b"x"
@pytest.mark.requests
def test_load_cache(event_loop):
cache = InMemoryCache()
transport = asyncio.AsyncTransport(loop=event_loop, cache=cache)
with aioresponses() as m:
m.get("http://tests.python-zeep.org/test.xml", body="x")
result = transport.load("http://tests.python-zeep.org/test.xml")
assert result == b"x"
assert cache.get("http://tests.python-zeep.org/test.xml") == b"x"
def test_cache_checks_type():
cache = InMemoryCache()
async def foo():
|
pass
with pytest.raises(TypeError):
cache.add("x", foo())
@pytest.mark.requests
@pytest.mark.asyncio
async def test_post(event_loop):
cache = stub(get=lambda url: None, add=lambda url, content: None)
transport = asyncio.AsyncTransport(loop=event_loop, cache=cache)
envelope = etree.Element("Envelope")
with aioresponses() as m:
m.post("http://tests.python-zeep.org/test.xml", body="x")
result = await transport.post_xml(
"http://tests.python-zeep.org/test.xml", envelope=envelope, headers={}
)
assert result.content == b"x"
@pytest.mark.requests
@pytest.mark.asyncio
async def test_session_close(event_loop):
transport = asyncio.AsyncTransport(loop=event_loop)
session = transport.session # copy session object from transport
del transport
assert session.closed
@pytest.mark.requests
@pytest.mark.asyncio
async def test_session_no_close(event_loop):
session = aiohttp.ClientSession(loop=event_loop)
transport = asyncio.AsyncTransport(loop=event_loop, session=session)
del transport
assert not session.closed
@pytest.mark.requests
def test_http_error(event_loop):
transport = asyncio.AsyncTransport(loop=event_loop)
with aioresponses() as m:
m.get("http://tests.python-zeep.org/test.xml", body="x", status=500)
with pytest.raises(exceptions.TransportError) as exc:
transport.load("http://tests.python-zeep.org/test.xml")
assert exc.value.status_code == 500
assert exc.value.message is None
| |
AppState.tsx
|
import { FC, useReducer } from "react";
import socketIOClient, { Socket } from "socket.io-client";
import AppContext from "./AppContext";
import AppReducer from "./AppReducer";
import {
SET_SOCKET,
CLEAR_SOCKET,
IDENTIFIED_USER,
TOGGLE_DARK_MODE,
SET_DARK_MODE,
} from "../types";
import {
AppState as IAppState,
InitialiseSocket,
CloseSocket,
ClearSocket,
SetSocket,
ResetSocket,
ToggleDarkMode,
SetDarkMode,
IdentifySelf,
UpdateSocketList,
} from "context";
const AppState: FC = (props) => {
const initialState: IAppState = {
socket: null,
darkMode: false,
identifed: false,
};
const [state, dispatch] = useReducer(AppReducer, initialState);
/*
* Actions
*/
const initialiseSocket: InitialiseSocket = () => {
const socket: Socket = socketIOClient();
setSocket(socket);
};
const closeSocket: CloseSocket = () => {
state.socket && state.socket.close();
};
const clearSocket: ClearSocket = () => {
closeSocket();
dispatch({
type: CLEAR_SOCKET,
});
};
const setSocket: SetSocket = (socket) => {
closeSocket();
dispatch({
type: SET_SOCKET,
payload: socket,
});
};
const identifySelf: IdentifySelf = (user) => {
if (!state.socket) return;
state.socket.emit("identify", user);
dispatch({ type: IDENTIFIED_USER });
};
const updateSocketList: UpdateSocketList = (list) => {
if (!state.socket) return;
state.socket.emit("updateList", list);
};
const resetSocket: ResetSocket = () => {
closeSocket();
initialiseSocket();
};
const toggleDarkMode: ToggleDarkMode = () => {
dispatch({
type: TOGGLE_DARK_MODE,
});
};
const setDarkMode: SetDarkMode = (darkMode) => {
dispatch({
type: SET_DARK_MODE,
payload: darkMode,
});
};
return (
<AppContext.Provider
value={{
socket: state.socket,
darkMode: state.darkMode,
identifed: state.identifed,
initialiseSocket,
clearSocket,
setSocket,
identifySelf,
updateSocketList,
resetSocket,
toggleDarkMode,
setDarkMode,
}}
>
{props.children}
|
);
};
export default AppState;
|
</AppContext.Provider>
|
set_iterator_test.go
|
// Copyright 2019 Dolthub, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
// This file incorporates work covered by the following copyright and
// permission notice:
//
// Copyright 2016 Attic Labs, Inc. All rights reserved.
// Licensed under the Apache License, version 2.0:
// http://www.apache.org/licenses/LICENSE-2.0
package types
import (
"context"
"math"
"testing"
"github.com/stretchr/testify/assert"
)
func TestSetIterator(t *testing.T) {
assert := assert.New(t)
vs := newTestValueStore()
numbers := append(generateNumbersAsValues(10), Float(20), Float(25))
s, err := NewSet(context.Background(), vs, numbers...)
assert.NoError(err)
i, err := s.Iterator(context.Background())
assert.NoError(err)
vals, err := iterToSlice(i)
assert.NoError(err)
assert.True(vals.Equals(numbers), "Expected: %v != actual: %v", numbers, vs)
i, err = s.Iterator(context.Background())
assert.NoError(err)
assert.Panics(func() {
_, _ = i.SkipTo(context.Background(), nil)
})
assert.Equal(Float(0), mustValue(i.SkipTo(context.Background(), Float(-20))))
assert.Equal(Float(2), mustValue(i.SkipTo(context.Background(), Float(2))))
assert.Equal(Float(3), mustValue(i.SkipTo(context.Background(), Float(-20))))
assert.Equal(Float(5), mustValue(i.SkipTo(context.Background(), Float(5))))
assert.Equal(Float(6), mustValue(i.Next(context.Background())))
assert.Equal(Float(7), mustValue(i.SkipTo(context.Background(), Float(6))))
assert.Equal(Float(20), mustValue(i.SkipTo(context.Background(), Float(15))))
assert.Nil(i.SkipTo(context.Background(), Float(30)))
assert.Nil(i.SkipTo(context.Background(), Float(30)))
assert.Nil(i.SkipTo(context.Background(), Float(1)))
i, err = s.Iterator(context.Background())
assert.NoError(err)
assert.Equal(Float(0), mustValue(i.Next(context.Background())))
assert.Equal(Float(1), mustValue(i.Next(context.Background())))
assert.Equal(Float(3), mustValue(i.SkipTo(context.Background(), Float(3))))
assert.Equal(Float(4), mustValue(i.Next(context.Background())))
empty, err := NewSet(context.Background(), vs)
assert.NoError(err)
assert.Nil(mustSIter(empty.Iterator(context.Background())).Next(context.Background()))
assert.Nil(mustSIter(empty.Iterator(context.Background())).SkipTo(context.Background(), Float(-30)))
set, err := NewSet(context.Background(), vs, Float(42))
assert.NoError(err)
single, err := set.Iterator(context.Background())
assert.NoError(err)
assert.Equal(Float(42), mustValue(single.SkipTo(context.Background(), Float(42))))
assert.Equal(nil, mustValue(single.SkipTo(context.Background(), Float(42))))
set, err = NewSet(context.Background(), vs, Float(42))
assert.NoError(err)
single, err = set.Iterator(context.Background())
assert.NoError(err)
assert.Equal(Float(42), mustValue(single.SkipTo(context.Background(), Float(42))))
assert.Equal(nil, mustValue(single.Next(context.Background())))
set, err = NewSet(context.Background(), vs, Float(42))
assert.NoError(err)
single, err = set.Iterator(context.Background())
assert.NoError(err)
assert.Equal(Float(42), mustValue(single.SkipTo(context.Background(), Float(21))))
}
func TestSetIteratorAt(t *testing.T) {
assert := assert.New(t)
vs := newTestValueStore()
numbers := append(generateNumbersAsValues(5), Float(10))
s, err := NewSet(context.Background(), vs, numbers...)
assert.NoError(err)
i, err := s.IteratorAt(context.Background(), 0)
assert.NoError(err)
vals, err := iterToSlice(i)
assert.NoError(err)
assert.True(vals.Equals(numbers), "Expected: %v != actual: %v", numbers, vs)
i, err = s.IteratorAt(context.Background(), 2)
assert.NoError(err)
vals, err = iterToSlice(i)
assert.NoError(err)
assert.True(vals.Equals(numbers[2:]), "Expected: %v != actual: %v", numbers[2:], vs)
i, err = s.IteratorAt(context.Background(), 10)
assert.NoError(err)
vals, err = iterToSlice(i)
assert.NoError(err)
assert.True(vals.Equals(nil), "Expected: %v != actual: %v", nil, vs)
}
func TestSetIteratorFrom(t *testing.T) {
assert := assert.New(t)
vs := newTestValueStore()
numbers := append(generateNumbersAsValues(5), Float(10), Float(20))
s, err := NewSet(context.Background(), vs, numbers...)
assert.NoError(err)
i, err := s.IteratorFrom(context.Background(), Float(0))
vals, err := iterToSlice(i)
assert.NoError(err)
assert.True(vals.Equals(numbers), "Expected: %v != actual: %v", numbers, vs)
i, err = s.IteratorFrom(context.Background(), Float(2))
assert.NoError(err)
vals, err = iterToSlice(i)
assert.NoError(err)
assert.True(vals.Equals(numbers[2:]), "Expected: %v != actual: %v", numbers[2:], vs)
i, err = s.IteratorFrom(context.Background(), Float(10))
assert.NoError(err)
vals, err = iterToSlice(i)
assert.NoError(err)
assert.True(vals.Equals(ValueSlice{Float(10), Float(20)}), "Expected: %v != actual: %v", nil, vs)
i, err = s.IteratorFrom(context.Background(), Float(20))
assert.NoError(err)
vals, err = iterToSlice(i)
assert.NoError(err)
assert.True(vals.Equals(ValueSlice{Float(20)}), "Expected: %v != actual: %v", nil, vs)
i, err = s.IteratorFrom(context.Background(), Float(100))
assert.NoError(err)
vals, err = iterToSlice(i)
assert.NoError(err)
assert.True(vals.Equals(nil), "Expected: %v != actual: %v", nil, vs)
// Not present. Starts at next larger.
i, err = s.IteratorFrom(context.Background(), Float(15))
assert.NoError(err)
vals, err = iterToSlice(i)
assert.NoError(err)
assert.True(vals.Equals(ValueSlice{Float(20)}), "Expected: %v != actual: %v", nil, vs)
}
func TestUnionIterator(t *testing.T) {
assert := assert.New(t)
vs := newTestValueStore()
set1, err := NewSet(context.Background(), vs, generateNumbersAsValuesFromToBy(0, 10, 1)...)
set2, err := NewSet(context.Background(), vs, generateNumbersAsValuesFromToBy(5, 15, 1)...)
set3, err := NewSet(context.Background(), vs, generateNumbersAsValuesFromToBy(10, 20, 1)...)
set4, err := NewSet(context.Background(), vs, generateNumbersAsValuesFromToBy(15, 25, 1)...)
ui1, err := NewUnionIterator(context.Background(), Format_7_18, mustSIter(set1.Iterator(context.Background())), mustSIter(set2.Iterator(context.Background())))
vals, err := iterToSlice(ui1)
expectedRes := generateNumbersAsValues(15)
assert.True(vals.Equals(expectedRes), "Expected: %v != actual: %v", expectedRes, vs)
ui1, err = NewUnionIterator(context.Background(), Format_7_18, mustSIter(set1.Iterator(context.Background())), mustSIter(set4.Iterator(context.Background())))
ui2, err := NewUnionIterator(context.Background(), Format_7_18, mustSIter(set3.Iterator(context.Background())), mustSIter(set2.Iterator(context.Background())))
ui3, err := NewUnionIterator(context.Background(), Format_7_18, ui1, ui2)
vals, err = iterToSlice(ui3)
expectedRes = generateNumbersAsValues(25)
assert.True(vals.Equals(expectedRes), "Expected: %v != actual: %v", expectedRes, vs)
ui1, err = NewUnionIterator(context.Background(), Format_7_18, mustSIter(set1.Iterator(context.Background())), mustSIter(set4.Iterator(context.Background())))
ui2, err = NewUnionIterator(context.Background(), Format_7_18, mustSIter(set3.Iterator(context.Background())), mustSIter(set2.Iterator(context.Background())))
ui3, err = NewUnionIterator(context.Background(), Format_7_18, ui1, ui2)
assert.Panics(func() {
_, _ = ui3.SkipTo(context.Background(), nil)
assert.Error(err)
})
assert.Equal(Float(0), mustValue(ui3.SkipTo(context.Background(), Float(-5))))
assert.Equal(Float(5), mustValue(ui3.SkipTo(context.Background(), Float(5))))
assert.Equal(Float(8), mustValue(ui3.SkipTo(context.Background(), Float(8))))
assert.Equal(Float(9), mustValue(ui3.SkipTo(context.Background(), Float(8))))
assert.Equal(Float(10), mustValue(ui3.SkipTo(context.Background(), Float(8))))
assert.Equal(Float(11), mustValue(ui3.SkipTo(context.Background(), Float(7))))
assert.Equal(Float(12), mustValue(ui3.Next(context.Background())))
assert.Equal(Float(15), mustValue(ui3.SkipTo(context.Background(), Float(15))))
assert.Equal(Float(24), mustValue(ui3.SkipTo(context.Background(), Float(24))))
assert.Nil(ui3.SkipTo(context.Background(), Float(25)))
singleElemSet, err := NewSet(context.Background(), vs, Float(4))
emptySet, err := NewSet(context.Background(), vs)
ui10, err := NewUnionIterator(context.Background(), Format_7_18, mustSIter(singleElemSet.Iterator(context.Background())), mustSIter(singleElemSet.Iterator(context.Background())))
ui20, err := NewUnionIterator(context.Background(), Format_7_18, mustSIter(emptySet.Iterator(context.Background())), mustSIter(emptySet.Iterator(context.Background())))
ui30, err := NewUnionIterator(context.Background(), Format_7_18, ui10, ui20)
vals, err = iterToSlice(ui30)
expectedRes = ValueSlice{Float(4)}
assert.True(vals.Equals(expectedRes), "%v != %v\n", expectedRes, vs)
}
func
|
(t *testing.T) {
assert := assert.New(t)
vs := newTestValueStore()
byTwos, err := NewSet(context.Background(), vs, generateNumbersAsValuesFromToBy(0, 200, 2)...)
assert.NoError(err)
byThrees, err := NewSet(context.Background(), vs, generateNumbersAsValuesFromToBy(0, 200, 3)...)
assert.NoError(err)
byFives, err := NewSet(context.Background(), vs, generateNumbersAsValuesFromToBy(0, 200, 5)...)
assert.NoError(err)
i1, err := NewIntersectionIterator(context.Background(), Format_7_18, mustSIter(byTwos.Iterator(context.Background())), mustSIter(byThrees.Iterator(context.Background())))
assert.NoError(err)
vals, err := iterToSlice(i1)
assert.NoError(err)
expectedRes := generateNumbersAsValuesFromToBy(0, 200, 6)
assert.True(vals.Equals(expectedRes), "Expected: %v != actual: %v", expectedRes, vs)
it1, err := NewIntersectionIterator(context.Background(), Format_7_18, mustSIter(byTwos.Iterator(context.Background())), mustSIter(byThrees.Iterator(context.Background())))
assert.NoError(err)
it2, err := NewIntersectionIterator(context.Background(), Format_7_18, it1, mustSIter(byFives.Iterator(context.Background())))
assert.NoError(err)
vals, err = iterToSlice(it2)
assert.NoError(err)
expectedRes = generateNumbersAsValuesFromToBy(0, 200, 30)
assert.True(vals.Equals(expectedRes), "Expected: %v != actual: %v", expectedRes, vs)
it1, err = NewIntersectionIterator(context.Background(), Format_7_18, mustSIter(byThrees.Iterator(context.Background())), mustSIter(byFives.Iterator(context.Background())))
assert.NoError(err)
it2, err = NewIntersectionIterator(context.Background(), Format_7_18, it1, mustSIter(byTwos.Iterator(context.Background())))
assert.NoError(err)
assert.Panics(func() {
_, _ = it2.SkipTo(context.Background(), nil)
})
assert.Equal(Float(30), mustValue(it2.SkipTo(context.Background(), Float(5))))
assert.Equal(Float(60), mustValue(it2.SkipTo(context.Background(), Float(60))))
assert.Equal(Float(90), mustValue(it2.SkipTo(context.Background(), Float(5))))
assert.Equal(Float(120), mustValue(it2.Next(context.Background())))
assert.Equal(Float(150), mustValue(it2.SkipTo(context.Background(), Float(150))))
assert.Nil(it2.SkipTo(context.Background(), Float(40000)))
}
func TestCombinationIterator(t *testing.T) {
assert := assert.New(t)
vs := newTestValueStore()
byTwos, err := NewSet(context.Background(), vs, generateNumbersAsValuesFromToBy(0, 70, 2)...)
assert.NoError(err)
byThrees, err := NewSet(context.Background(), vs, generateNumbersAsValuesFromToBy(0, 70, 3)...)
assert.NoError(err)
byFives, err := NewSet(context.Background(), vs, generateNumbersAsValuesFromToBy(0, 70, 5)...)
assert.NoError(err)
bySevens, err := NewSet(context.Background(), vs, generateNumbersAsValuesFromToBy(0, 70, 7)...)
assert.NoError(err)
it1, err := NewIntersectionIterator(context.Background(), Format_7_18, mustSIter(byTwos.Iterator(context.Background())), mustSIter(bySevens.Iterator(context.Background())))
assert.NoError(err)
it2, err := NewIntersectionIterator(context.Background(), Format_7_18, mustSIter(byFives.Iterator(context.Background())), mustSIter(byThrees.Iterator(context.Background())))
assert.NoError(err)
ut1, err := NewUnionIterator(context.Background(), Format_7_18, it1, it2)
assert.NoError(err)
vals, err := iterToSlice(ut1)
assert.NoError(err)
expectedRes := intsToValueSlice(0, 14, 15, 28, 30, 42, 45, 56, 60)
assert.NoError(err)
assert.True(vals.Equals(expectedRes), "Expected: %v != actual: %v", expectedRes, vs)
ut1, err = NewUnionIterator(context.Background(), Format_7_18, mustSIter(byTwos.Iterator(context.Background())), mustSIter(bySevens.Iterator(context.Background())))
assert.NoError(err)
it2, err = NewIntersectionIterator(context.Background(), Format_7_18, mustSIter(byFives.Iterator(context.Background())), mustSIter(byThrees.Iterator(context.Background())))
assert.NoError(err)
ut2, err := NewIntersectionIterator(context.Background(), Format_7_18, ut1, it2)
assert.NoError(err)
vals, err = iterToSlice(ut2)
assert.NoError(err)
expectedRes = intsToValueSlice(0, 30, 60)
assert.True(vals.Equals(expectedRes), "Expected: %v != actual: %v", expectedRes, vs)
}
type UnionTestIterator struct {
*UnionIterator
cntr *int
}
func (ui *UnionTestIterator) Next(ctx context.Context) (Value, error) {
*ui.cntr++
return ui.UnionIterator.Next(ctx)
}
func (ui *UnionTestIterator) SkipTo(ctx context.Context, v Value) (Value, error) {
*ui.cntr++
return ui.UnionIterator.SkipTo(ctx, v)
}
func NewUnionTestIterator(i1, i2 SetIterator, cntr *int) (SetIterator, error) {
ui, err := NewUnionIterator(context.Background(), Format_7_18, i1, i2)
if err != nil {
return nil, err
}
return &UnionTestIterator{ui.(*UnionIterator), cntr}, nil
}
// When a binary tree of union operators is built on top of a list of sets, the complexity to
// retrieve all of the elements in sorted order should be Log(N) * M where N is the number of sets func init() {
// the list and M is the total number of elements in all of the sets.
func TestUnionComplexity(t *testing.T) {
assert := assert.New(t)
vs := newTestValueStore()
numSets := 256
numElemsPerSet := 1000
logNumSets := int(math.Ceil(math.Log2(float64(numSets))))
totalElems := numSets * numElemsPerSet
expectedMax := logNumSets*totalElems + numSets
callCount1 := 0
itrs, err := createSetsWithDistinctNumbers(vs, numSets, numElemsPerSet)
assert.NoError(err)
iter, err := iterize(itrs, NewUnionTestIterator, &callCount1)
assert.NoError(err)
vals, err := iterToSlice(iter)
assert.NoError(err)
expected := generateNumbersAsValueSlice(numSets * numElemsPerSet)
assert.True(expected.Equals(vals), "expected: %v != actual: %v", expected, vals)
assert.True(expectedMax > callCount1, "callCount: %d exceeds expectedMax: %d", callCount1, expectedMax)
callCount2 := 0
itrs, err = createSetsWithSameNumbers(vs, numSets, numElemsPerSet)
assert.NoError(err)
iter, err = iterize(itrs, NewUnionTestIterator, &callCount2)
assert.NoError(err)
vals, err = iterToSlice(iter)
assert.NoError(err)
expected = generateNumbersAsValueSlice(numElemsPerSet)
assert.True(expected.Equals(vals), "expected: %v != actual: %v", expected, vals)
assert.True(expectedMax > callCount2, "callCount: %d exceeds expectedMax: %d", callCount2, expectedMax)
}
type IntersectionTestIterator struct {
*IntersectionIterator
cntr *int
}
func (i *IntersectionTestIterator) Next(ctx context.Context) (Value, error) {
*i.cntr++
return i.IntersectionIterator.Next(ctx)
}
func (i *IntersectionTestIterator) SkipTo(ctx context.Context, v Value) (Value, error) {
*i.cntr++
return i.IntersectionIterator.SkipTo(ctx, v)
}
func NewIntersectionTestIterator(i1, i2 SetIterator, cntr *int) (SetIterator, error) {
ui, err := NewIntersectionIterator(context.Background(), Format_7_18, i1, i2)
if err != nil {
return nil, err
}
return &IntersectionTestIterator{ui.(*IntersectionIterator), cntr}, nil
}
// When a binary tree of intersection operators is built on top of a list of sets, the complexity to
// retrieve all of the elements in sorted order should be Log(N) * M where N is the number of sets func init() {
// the list and M is the total number of elements in all of the sets.
func TestIntersectComplexity(t *testing.T) {
assert := assert.New(t)
vs := newTestValueStore()
numSets := 256
numElemsPerSet := 1000
logNumSets := int(math.Ceil(math.Log2(float64(numSets))))
totalElems := numSets * numElemsPerSet
expectedMax := logNumSets*totalElems + numSets
callCount1 := 0
itrs, err := createSetsWithDistinctNumbers(vs, numSets, numElemsPerSet)
assert.NoError(err)
iter, err := iterize(itrs, NewIntersectionTestIterator, &callCount1)
assert.NoError(err)
vals, err := iterToSlice(iter)
assert.NoError(err)
expected := ValueSlice{}
assert.True(expected.Equals(vals), "expected: %v != actual: %v", expected, vals)
assert.True(expectedMax > callCount1, "callCount: %d exceeds expectedMax: %d", callCount1, expectedMax)
callCount2 := 0
itrs, err = createSetsWithSameNumbers(vs, numSets, numElemsPerSet)
assert.NoError(err)
iter, err = iterize(itrs, NewIntersectionTestIterator, &callCount2)
assert.NoError(err)
vals, err = iterToSlice(iter)
assert.NoError(err)
expected = generateNumbersAsValueSlice(numElemsPerSet)
assert.True(expected.Equals(vals), "expected: %v != actual: %v", expected, vals)
assert.True(expectedMax > callCount2, "callCount: %d exceeds expectedMax: %d", callCount2, expectedMax)
}
func createSetsWithDistinctNumbers(vrw ValueReadWriter, numSets, numElemsPerSet int) ([]SetIterator, error) {
iterSlice := []SetIterator{}
for i := 0; i < numSets; i++ {
vals := ValueSlice{}
for j := 0; j < numElemsPerSet; j++ {
vals = append(vals, Float(i+(numSets*j)))
}
s, err := NewSet(context.Background(), vrw, vals...)
if err != nil {
return nil, err
}
itr, err := s.Iterator(context.Background())
if err != nil {
return nil, err
}
iterSlice = append(iterSlice, itr)
}
return iterSlice, nil
}
func createSetsWithSameNumbers(vrw ValueReadWriter, numSets, numElemsPerSet int) ([]SetIterator, error) {
vs := ValueSlice{}
for j := 0; j < numElemsPerSet; j++ {
vs = append(vs, Float(j))
}
iterSlice := []SetIterator{}
for i := 0; i < numSets; i++ {
s, err := NewSet(context.Background(), vrw, vs...)
if err != nil {
return nil, err
}
itr, err := s.Iterator(context.Background())
if err != nil {
return nil, err
}
iterSlice = append(iterSlice, itr)
}
return iterSlice, nil
}
type newIterFunc func(i1, i2 SetIterator, cntr *int) (SetIterator, error)
// Iterize calls itself recursively to build a binary tree of iterators over the original set.
func iterize(iters []SetIterator, newIter newIterFunc, cntr *int) (SetIterator, error) {
if len(iters) == 0 {
return nil, nil
}
if len(iters) <= 1 {
return iters[0], nil
}
var iter0 SetIterator
newIters := []SetIterator{}
for i, iter := range iters {
if i%2 == 0 {
iter0 = iter
} else {
ni, err := newIter(iter0, iter, cntr)
if err != nil {
return nil, err
}
newIters = append(newIters, ni)
iter0 = nil
}
}
if iter0 != nil {
newIters = append(newIters, iter0)
}
return iterize(newIters, newIter, cntr)
}
|
TestIntersectionIterator
|
cloud_server_request_fragment_publicip.rs
|
/*
* Open Service Cloud API
*
* Open Service Cloud API to manage different backend cloud services.
*
* The version of the OpenAPI document: 0.0.3
* Contact: wanghui71leon@gmail.com
* Generated by: https://openapi-generator.tech
*/
#[derive(Debug, PartialEq, Serialize, Deserialize)]
pub struct CloudServerRequestFragmentPublicip {
#[serde(rename = "id", skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "eip", skip_serializing_if = "Option::is_none")]
pub eip: Option<crate::models::CloudServerRequestFragmentPublicipEip>,
}
impl CloudServerRequestFragmentPublicip {
pub fn new() -> CloudServerRequestFragmentPublicip
|
}
|
{
CloudServerRequestFragmentPublicip {
id: None,
eip: None,
}
}
|
SignUpView.js
|
import NavigationActions from '../../actions/navigation';
import {
Text,
View,
StyleSheet
} from 'react-native';
class SignUpView extends Component {
render() {
return (
<View style={[styles.container]}>
<Text>
Sign up page
</Text>
</View>
);
}
}
SignUpView.PropTypes = {
index: PropTypes.number.isRequired,
pushRoute: PropTypes.func.isRequired
};
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center'
}
});
const mapDispatchToProps = (dispatch) => (bindActionCreators(new NavigationActions, dispatch));
export default connect(undefined, mapDispatchToProps)(SignUpView);
|
import React, {Component, PropTypes} from 'react';
import {connect} from 'react-redux';
import { bindActionCreators } from 'redux';
|
|
getContentKeyPolicyPropertiesWithSecrets.go
|
// *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20200501
import (
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
// The properties of the Content Key Policy.
func GetContentKeyPolicyPropertiesWithSecrets(ctx *pulumi.Context, args *GetContentKeyPolicyPropertiesWithSecretsArgs, opts ...pulumi.InvokeOption) (*GetContentKeyPolicyPropertiesWithSecretsResult, error) {
var rv GetContentKeyPolicyPropertiesWithSecretsResult
err := ctx.Invoke("azure-native:media/v20200501:getContentKeyPolicyPropertiesWithSecrets", args, &rv, opts...)
if err != nil
|
return &rv, nil
}
type GetContentKeyPolicyPropertiesWithSecretsArgs struct {
// The Media Services account name.
AccountName string `pulumi:"accountName"`
// The Content Key Policy name.
ContentKeyPolicyName string `pulumi:"contentKeyPolicyName"`
// The name of the resource group within the Azure subscription.
ResourceGroupName string `pulumi:"resourceGroupName"`
}
// The properties of the Content Key Policy.
type GetContentKeyPolicyPropertiesWithSecretsResult struct {
// The creation date of the Policy
Created string `pulumi:"created"`
// A description for the Policy.
Description *string `pulumi:"description"`
// The last modified date of the Policy
LastModified string `pulumi:"lastModified"`
// The Key Policy options.
Options []ContentKeyPolicyOptionResponse `pulumi:"options"`
// The legacy Policy ID.
PolicyId string `pulumi:"policyId"`
}
|
{
return nil, err
}
|
parse_cast.js
|
const kaelin = require("./kaelin.js");
const parse_cast = (code) => {
const parse_hero = (str) => {
switch (str.toLowerCase()) {
case "to": var hero = kaelin.TOPHORO ; break;
case "go": var hero = kaelin.GONK ; break;
case "st": var hero = kaelin.STANCI ; break;
case "h3": var hero = kaelin.HERO_3 ; break;
case "er": var hero = kaelin.ERKOS ; break;
case "cr": var hero = kaelin.CRONI ; break;
case "sn": var hero = kaelin.SNARCH ; break;
case "h7": var hero = kaelin.HERO_7 ; break;
case "si": var hero = kaelin.SIRPIX ; break;
case "ke": var hero = kaelin.KENLUA ; break;
case "fl": var hero = kaelin.FLINA ; break;
case "hb": var hero = kaelin.HERO_B ; break;
|
case "za": var hero = kaelin.ZAGATUR ; break;
case "ag": var hero = kaelin.AGDRIS ; break;
case "me": var hero = kaelin.MEWEM ; break;
case "hf": var hero = kaelin.HERO_F ; break;
default: throw "Invalid hero `" + code.slice(0,2) + "\`.";
}
return hero;
};
const parse_pos = (pos) => {
if (pos.length !== 2 || !/[0-9a-fA-F]/.test(pos[0]) || !/[0-9a-fA-F]/.test(pos[1])) {
throw "Invalid position `" + pos + "`.";
}
return [parseInt(pos[0], 16), parseInt(pos[1], 16)];
};
var words = code.split(" ").slice(0, 5);
while (words.length < 5) {
words.push(".");
}
var hero = parse_hero(words[0]);
try { var walk = words[1] === "." ? null : parse_pos(words[1]); } catch (e) { throw e + " (When parsing walk.)"; }
try { var skl0 = words[2] === "." ? null : parse_pos(words[2]); } catch (e) { throw e + " (When parsing skill 0.)" };
try { var skl1 = words[3] === "." ? null : parse_pos(words[3]); } catch (e) { throw e + " (When parsing skill 1.)" };
try { var skl2 = words[4] === "." ? null : parse_pos(words[4]); } catch (e) { throw e + " (When parsing skill 2.)" };
return [[hero * 4 + 0, walk], [hero * 4 + 1, skl0], [hero * 4 + 2, skl1], [hero * 4 + 3, skl2]];
};
module.exports = parse_cast;
| |
interface.rs
|
use crate::config::raw::{RawAction, RawActionType};
use crate::logs::Logs;
use crate::requestfields::RequestField;
use crate::utils::RequestInfo;
use serde::{Deserialize, Serialize};
use serde_json::json;
/// this file contains all the data type that are used when interfacing with a proxy
use std::collections::{HashMap, HashSet};
#[derive(Debug, Clone)]
pub enum SimpleDecision {
Pass,
Action(SimpleAction, serde_json::Value),
}
#[derive(Debug, Clone)]
pub enum Decision {
Pass,
Action(Action),
}
impl Decision {
pub fn to_json_raw(&self, request_map: serde_json::Value, logs: Logs) -> String {
let (action_desc, response) = match self {
Decision::Pass => ("pass", None),
Decision::Action(a) => ("custom_response", Some(a)),
};
let j = serde_json::json!({
"request_map": request_map,
"action": action_desc,
"response": response,
"logs": logs.logs
});
serde_json::to_string(&j).unwrap_or_else(|_| "{}".to_string())
}
pub fn to_json(&self, rinfo: RequestInfo, tags: Tags, logs: Logs) -> String {
let mut tgs = tags;
let (action_desc, response) = match self {
Decision::Pass => ("pass", None),
Decision::Action(a) => ("custom_response", Some(a)),
};
if let Decision::Action(a) = &self {
if let Some(extra) = &a.extra_tags {
for t in extra {
tgs.insert(t);
}
}
}
let request_map = rinfo.into_json(tgs);
let j = serde_json::json!({
"request_map": request_map,
"action": action_desc,
"response": response,
"logs": logs.logs
});
serde_json::to_string(&j).unwrap_or_else(|_| "{}".to_string())
}
/// is the action blocking (not passed to the underlying server)
pub fn is_blocking(&self) -> bool {
match self {
Decision::Pass => false,
Decision::Action(a) => a.atype.is_blocking(),
}
}
/// is the action final (no further processing)
pub fn is_final(&self) -> bool {
match self {
Decision::Pass => false,
Decision::Action(a) => a.atype.is_final(),
}
}
}
/// a newtype representing tags, to make sure they are tagified when inserted
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Tags(HashSet<String>);
fn tagify(tag: &str) -> String {
fn filter_char(c: char) -> char {
if c.is_ascii_alphanumeric() || c == ':' {
c
} else {
'-'
}
}
tag.to_lowercase().chars().map(filter_char).collect()
}
impl Default for Tags {
fn default() -> Self {
Tags(HashSet::new())
}
}
impl Tags {
pub fn insert(&mut self, value: &str) -> bool {
self.0.insert(tagify(value))
}
pub fn insert_qualified(&mut self, id: &str, value: &str) -> bool {
let mut to_insert = id.to_string();
to_insert.push(':');
to_insert += &tagify(value);
self.0.insert(to_insert)
}
pub fn extend(&mut self, other: Self) {
self.0.extend(other.0)
}
pub fn from_slice(slice: &[String]) -> Self {
Tags(slice.iter().map(|s| tagify(&s)).collect())
}
pub fn contains(&self, s: &str) -> bool {
self.0.contains(s)
}
pub fn as_hash_ref(&self) -> &HashSet<String> {
&self.0
}
}
// an action, as formatted for outside consumption
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Action {
pub atype: ActionType,
pub ban: bool,
pub block_mode: bool,
pub status: u32,
pub headers: Option<HashMap<String, String>>,
pub reason: serde_json::value::Value,
pub content: String,
pub extra_tags: Option<HashSet<String>>,
}
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum SimpleActionT {
Default,
Monitor,
Ban(Box<SimpleAction>, u64), // duration, ttl
RequestHeader(HashMap<String, String>),
Response(String),
Redirect(String),
Challenge,
}
// an action with its semantic meaning
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct SimpleAction {
pub atype: SimpleActionT,
pub status: u32,
pub reason: String,
}
impl std::default::Default for SimpleActionT {
fn
|
() -> Self {
SimpleActionT::Default
}
}
#[derive(Debug, Clone, PartialEq, Eq, Copy, Serialize, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum ActionType {
Monitor,
Block,
AlterHeaders,
}
impl ActionType {
/// is the action blocking (not passed to the underlying server)
pub fn is_blocking(&self) -> bool {
matches!(self, ActionType::Block)
}
/// is the action final (no further processing)
pub fn is_final(&self) -> bool {
!matches!(self, ActionType::Monitor)
}
}
impl std::default::Default for Action {
fn default() -> Self {
Action {
atype: ActionType::Block,
block_mode: true,
ban: false,
status: 503,
headers: None,
reason: serde_json::value::Value::Null,
content: "curiefense - request denied".to_string(),
extra_tags: None,
}
}
}
impl SimpleAction {
pub fn from_reason(reason: String) -> Self {
SimpleAction {
atype: SimpleActionT::default(),
status: 503,
reason,
}
}
pub fn resolve(rawaction: &RawAction) -> anyhow::Result<SimpleAction> {
let atype = match rawaction.type_ {
RawActionType::Default => SimpleActionT::Default,
RawActionType::Monitor => SimpleActionT::Monitor,
RawActionType::Ban => SimpleActionT::Ban(
Box::new(
rawaction
.params
.action
.as_ref()
.map(|x| SimpleAction::resolve(x).ok())
.flatten()
.unwrap_or_else(|| {
SimpleAction::from_reason(rawaction.params.reason.clone().unwrap_or_else(|| "?".into()))
}),
),
rawaction
.params
.duration
.as_ref()
.and_then(|s| s.parse::<u64>().ok())
.unwrap_or(3600),
),
RawActionType::RequestHeader => {
SimpleActionT::RequestHeader(rawaction.params.headers.clone().unwrap_or_else(HashMap::default))
}
RawActionType::Response => SimpleActionT::Response(
rawaction
.params
.content
.clone()
.unwrap_or_else(|| "default content".into()),
),
RawActionType::Challenge => SimpleActionT::Challenge,
RawActionType::Redirect => SimpleActionT::Redirect(
rawaction
.params
.location
.clone()
.ok_or_else(|| anyhow::anyhow!("no location for redirect in rule {:?}", rawaction))?,
),
};
let status = if let Some(sstatus) = &rawaction.params.status {
match sstatus.parse::<u32>() {
Ok(s) => s,
Err(rr) => return Err(anyhow::anyhow!("Unparseable status: {} -> {}", sstatus, rr)),
}
} else {
503
};
Ok(SimpleAction {
atype,
status,
reason: rawaction.params.reason.clone().unwrap_or_else(|| "no reason".into()),
})
}
/// returns None when it is a challenge, Some(action) otherwise
fn to_action(&self, is_human: bool) -> Option<Action> {
let mut action = Action::default();
action.block_mode = action.atype.is_blocking();
action.status = self.status;
match &self.atype {
SimpleActionT::Default => {}
SimpleActionT::Monitor => action.atype = ActionType::Monitor,
SimpleActionT::Ban(sub, _) => {
action = sub.to_action(is_human).unwrap_or_default();
action.ban = true;
}
SimpleActionT::RequestHeader(hdrs) => {
action.headers = Some(hdrs.clone());
action.atype = ActionType::AlterHeaders;
}
SimpleActionT::Response(content) => {
action.atype = ActionType::Block;
action.content = content.clone();
}
SimpleActionT::Challenge => {
if !is_human {
return None;
}
action.atype = ActionType::Monitor;
}
SimpleActionT::Redirect(to) => {
let mut headers = HashMap::new();
action.content = "You are being redirected".into();
headers.insert("Location".into(), to.clone());
action.atype = ActionType::Block;
action.headers = Some(headers);
}
}
Some(action)
}
pub fn to_decision<GH: Grasshopper>(
&self,
is_human: bool,
mgh: &Option<GH>,
headers: &RequestField,
reason: serde_json::Value,
) -> Decision {
let mut action = match self.to_action(is_human) {
None => match (mgh, headers.get("user-agent")) {
(Some(gh), Some(ua)) => return challenge_phase01(gh, ua, Vec::new()),
_ => Action::default(),
},
Some(a) => a,
};
action.reason = reason;
Decision::Action(action)
}
pub fn to_decision_no_challenge(&self, reason: serde_json::Value) -> Decision {
let mut action = match self.to_action(true) {
None => Action::default(),
Some(a) => a,
};
action.reason = reason;
Decision::Action(action)
}
}
impl SimpleDecision {
pub fn into_decision_no_challenge(self) -> Decision {
match self {
SimpleDecision::Pass => Decision::Pass,
SimpleDecision::Action(action, reason) => action.to_decision_no_challenge(reason),
}
}
}
pub trait Grasshopper {
fn js_app(&self) -> Option<String>;
fn js_bio(&self) -> Option<String>;
fn parse_rbzid(&self, rbzid: &str, seed: &str) -> Option<bool>;
fn gen_new_seed(&self, seed: &str) -> Option<String>;
fn verify_workproof(&self, workproof: &str, seed: &str) -> Option<String>;
}
pub fn gh_fail_decision(reason: &str) -> Decision {
Decision::Action(Action {
atype: ActionType::Block,
block_mode: true,
ban: false,
reason: json!({"initiator": "phase01", "reason": reason}),
headers: None,
status: 500,
content: "internal_error".to_string(),
extra_tags: None,
})
}
pub fn challenge_phase01<GH: Grasshopper>(gh: &GH, ua: &str, tags: Vec<String>) -> Decision {
let seed = match gh.gen_new_seed(ua) {
None => return gh_fail_decision("could not call gen_new_seed"),
Some(s) => s,
};
let chall_lib = match gh.js_app() {
None => return gh_fail_decision("could not call chall_lib"),
Some(s) => s,
};
let hdrs: HashMap<String, String> = [
("Content-Type", "text/html; charset=utf-8"),
("Expires", "Thu, 01 Aug 1978 00:01:48 GMT"),
("Cache-Control", "no-cache, private, no-transform, no-store"),
("Pragma", "no-cache"),
(
"P3P",
"CP=\"IDC DSP COR ADM DEVi TAIi PSA PSD IVAi IVDi CONi HIS OUR IND CNT\"",
),
]
.iter()
.map(|(k, v)| (k.to_string(), v.to_string()))
.collect();
let mut content = "<html><head><meta charset=\"utf-8\"><script>".to_string();
content += &chall_lib;
content += ";;window.rbzns={bereshit: \"1\", seed: \"";
content += &seed;
content += "\", storage:\"3\"};winsocks();";
content += "</script></head><body></body></html>";
// here humans are accepted, as they were not denied
// (this would have been caught by the previous guard)
Decision::Action(Action {
atype: ActionType::Block,
block_mode: true,
ban: false,
reason: if tags.is_empty() {
// this happens for rate limit / flow control / tag action
json!({"initiator": "phase01", "reason": "challenge"})
} else {
// this only happens for acl challenges
json!({"initiator": "phase01", "reason": "challenge", "tags": tags})
},
headers: Some(hdrs),
status: 247,
content,
extra_tags: Some(["challenge_phase01"].iter().map(|s| s.to_string()).collect()),
})
}
fn extract_zebra(headers: &RequestField) -> Option<String> {
for (k, v) in headers.iter() {
if k.starts_with("x-zebra-") {
return Some(v.replace('-', "="));
}
}
None
}
pub fn challenge_phase02<GH: Grasshopper>(gh: &GH, uri: &str, headers: &RequestField) -> Option<Decision> {
if !uri.starts_with("/7060ac19f50208cbb6b45328ef94140a612ee92387e015594234077b4d1e64f1/") {
return None;
}
let ua = headers.get("user-agent")?;
let workproof = extract_zebra(headers)?;
let verified = gh.verify_workproof(&workproof, ua)?;
let mut nheaders = HashMap::<String, String>::new();
let mut cookie = "rbzid=".to_string();
cookie += &verified.replace('=', "-");
cookie += "; Path=/; HttpOnly";
nheaders.insert("Set-Cookie".to_string(), cookie);
Some(Decision::Action(Action {
atype: ActionType::Block,
block_mode: true,
ban: false,
reason: json!({"initiator": "phase02", "reason": "challenge"}),
headers: Some(nheaders),
status: 248,
content: "{}".to_string(),
extra_tags: Some(["challenge_phase02"].iter().map(|s| s.to_string()).collect()),
}))
}
|
default
|
__init__.py
|
from .random_transform import RandomTransform
__all__ = [
'RandomTransform',
|
]
|
|
getDataset.go
|
// *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package latest
import (
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
func LookupDataset(ctx *pulumi.Context, args *LookupDatasetArgs, opts ...pulumi.InvokeOption) (*LookupDatasetResult, error)
|
type LookupDatasetArgs struct {
// The dataset name.
DatasetName string `pulumi:"datasetName"`
// The factory name.
FactoryName string `pulumi:"factoryName"`
// The resource group name.
ResourceGroupName string `pulumi:"resourceGroupName"`
}
// Dataset resource type.
type LookupDatasetResult struct {
// Etag identifies change in the resource.
Etag string `pulumi:"etag"`
// The resource name.
Name string `pulumi:"name"`
// Dataset properties.
Properties interface{} `pulumi:"properties"`
// The resource type.
Type string `pulumi:"type"`
}
|
{
var rv LookupDatasetResult
err := ctx.Invoke("azure-nextgen:datafactory/latest:getDataset", args, &rv, opts...)
if err != nil {
return nil, err
}
return &rv, nil
}
|
fontReplacePixel.py
|
# @author: GaryMK
# @EMAIL: chenxingmk@gmail.com
# @Date: 2021/2/14 0:28
# @Version: 1.0
# @Description:
from PIL import Image, ImageDraw, ImageFont
import cv2
import os
def draw(pic):
img = cv2.imread('source/' + pic)
img = img[:, :, (2, 1, 0)]
blank = Image.new("RGB", [len(img[0]), len(img)], "white")
drawObj = ImageDraw.Draw(blank)
|
font = ImageFont.truetype('C:/Windows/Fonts/Microsoft YaHei UI/msyhbd.ttc', size=n - 1)
for i in range(0, len(img), n):
for j in range(0, len(img[i]), n):
text = '晨星'
drawObj.ink = img[i][j][0] + img[i][j][1] * 256 + img[i][j][2] * 256 * 256
drawObj.text([j, i], text[int(j / n) % len(text)], font=font)
print('完成处理——', i, j)
blank.save('replaced/replaced_' + pic, 'jpeg')
filelist = os.listdir('source')
for file in filelist:
draw(file)
|
n = 10
|
data.pb.go
|
// Code generated by protoc-gen-go. DO NOT EDIT.
// source: data.proto
package dataArray
import (
fmt "fmt"
proto "github.com/golang/protobuf/proto"
math "math"
)
import (
context "golang.org/x/net/context"
grpc "google.golang.org/grpc"
)
// Reference imports to suppress errors if they are not otherwise used.
var _ = proto.Marshal
var _ = fmt.Errorf
var _ = math.Inf
// This is a compile-time assertion to ensure that this generated file
// is compatible with the proto package it is being compiled against.
// A compilation error at this line likely means your copy of the
// proto package needs to be updated.
const _ = proto.ProtoPackageIsVersion2 // please upgrade the proto package
type GetIntDataArrayStreamRequest struct {
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *GetIntDataArrayStreamRequest) Reset() { *m = GetIntDataArrayStreamRequest{} }
func (m *GetIntDataArrayStreamRequest) String() string { return proto.CompactTextString(m) }
func (*GetIntDataArrayStreamRequest) ProtoMessage() {}
func (*GetIntDataArrayStreamRequest) Descriptor() ([]byte, []int) {
return fileDescriptor_871986018790d2fd, []int{0}
}
func (m *GetIntDataArrayStreamRequest) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_GetIntDataArrayStreamRequest.Unmarshal(m, b)
}
func (m *GetIntDataArrayStreamRequest) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_GetIntDataArrayStreamRequest.Marshal(b, m, deterministic)
}
func (m *GetIntDataArrayStreamRequest) XXX_Merge(src proto.Message) {
xxx_messageInfo_GetIntDataArrayStreamRequest.Merge(m, src)
}
func (m *GetIntDataArrayStreamRequest) XXX_Size() int {
return xxx_messageInfo_GetIntDataArrayStreamRequest.Size(m)
}
func (m *GetIntDataArrayStreamRequest) XXX_DiscardUnknown() {
xxx_messageInfo_GetIntDataArrayStreamRequest.DiscardUnknown(m)
}
var xxx_messageInfo_GetIntDataArrayStreamRequest proto.InternalMessageInfo
type ServerSendResponse struct {
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *ServerSendResponse) Reset() { *m = ServerSendResponse{} }
func (m *ServerSendResponse) String() string { return proto.CompactTextString(m) }
func (*ServerSendResponse) ProtoMessage() {}
func (*ServerSendResponse) Descriptor() ([]byte, []int) {
return fileDescriptor_871986018790d2fd, []int{1}
}
func (m *ServerSendResponse) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_ServerSendResponse.Unmarshal(m, b)
}
func (m *ServerSendResponse) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_ServerSendResponse.Marshal(b, m, deterministic)
}
func (m *ServerSendResponse) XXX_Merge(src proto.Message) {
xxx_messageInfo_ServerSendResponse.Merge(m, src)
}
func (m *ServerSendResponse) XXX_Size() int {
return xxx_messageInfo_ServerSendResponse.Size(m)
}
func (m *ServerSendResponse) XXX_DiscardUnknown() {
xxx_messageInfo_ServerSendResponse.DiscardUnknown(m)
}
var xxx_messageInfo_ServerSendResponse proto.InternalMessageInfo
type IntDataArray struct {
Data []int32 `protobuf:"varint,1,rep,packed,name=data,proto3" json:"data,omitempty"`
XXX_NoUnkeyedLiteral struct{} `json:"-"`
XXX_unrecognized []byte `json:"-"`
XXX_sizecache int32 `json:"-"`
}
func (m *IntDataArray) Reset() { *m = IntDataArray{} }
func (m *IntDataArray) String() string { return proto.CompactTextString(m) }
func (*IntDataArray) ProtoMessage() {}
func (*IntDataArray) Descriptor() ([]byte, []int) {
return fileDescriptor_871986018790d2fd, []int{2}
}
func (m *IntDataArray) XXX_Unmarshal(b []byte) error {
return xxx_messageInfo_IntDataArray.Unmarshal(m, b)
}
func (m *IntDataArray) XXX_Marshal(b []byte, deterministic bool) ([]byte, error) {
return xxx_messageInfo_IntDataArray.Marshal(b, m, deterministic)
}
func (m *IntDataArray) XXX_Merge(src proto.Message) {
xxx_messageInfo_IntDataArray.Merge(m, src)
}
func (m *IntDataArray) XXX_Size() int {
return xxx_messageInfo_IntDataArray.Size(m)
}
func (m *IntDataArray) XXX_DiscardUnknown() {
xxx_messageInfo_IntDataArray.DiscardUnknown(m)
}
var xxx_messageInfo_IntDataArray proto.InternalMessageInfo
func (m *IntDataArray) GetData() []int32 {
if m != nil {
return m.Data
}
return nil
}
func init() {
proto.RegisterType((*GetIntDataArrayStreamRequest)(nil), "dataArray.GetIntDataArrayStreamRequest")
proto.RegisterType((*ServerSendResponse)(nil), "dataArray.ServerSendResponse")
proto.RegisterType((*IntDataArray)(nil), "dataArray.IntDataArray")
}
// Reference imports to suppress errors if they are not otherwise used.
var _ context.Context
var _ grpc.ClientConn
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
const _ = grpc.SupportPackageIsVersion4
// DataServerClient is the client API for DataServer service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type DataServerClient interface {
WriteIntDataArray(ctx context.Context, in *IntDataArray, opts ...grpc.CallOption) (*ServerSendResponse, error)
// A simple service for reading a stream of IntDataArrays.
GetIntDataArrays(ctx context.Context, in *GetIntDataArrayStreamRequest, opts ...grpc.CallOption) (DataServer_GetIntDataArraysClient, error)
}
type dataServerClient struct {
cc *grpc.ClientConn
}
func NewDataServerClient(cc *grpc.ClientConn) DataServerClient {
return &dataServerClient{cc}
}
func (c *dataServerClient) WriteIntDataArray(ctx context.Context, in *IntDataArray, opts ...grpc.CallOption) (*ServerSendResponse, error) {
out := new(ServerSendResponse)
err := c.cc.Invoke(ctx, "/dataArray.DataServer/WriteIntDataArray", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
func (c *dataServerClient) GetIntDataArrays(ctx context.Context, in *GetIntDataArrayStreamRequest, opts ...grpc.CallOption) (DataServer_GetIntDataArraysClient, error) {
stream, err := c.cc.NewStream(ctx, &_DataServer_serviceDesc.Streams[0], "/dataArray.DataServer/GetIntDataArrays", opts...)
if err != nil {
return nil, err
}
x := &dataServerGetIntDataArraysClient{stream}
if err := x.ClientStream.SendMsg(in); err != nil {
return nil, err
}
if err := x.ClientStream.CloseSend(); err != nil {
return nil, err
}
return x, nil
}
type DataServer_GetIntDataArraysClient interface {
Recv() (*IntDataArray, error)
grpc.ClientStream
}
type dataServerGetIntDataArraysClient struct {
|
}
func (x *dataServerGetIntDataArraysClient) Recv() (*IntDataArray, error) {
m := new(IntDataArray)
if err := x.ClientStream.RecvMsg(m); err != nil {
return nil, err
}
return m, nil
}
// DataServerServer is the server API for DataServer service.
type DataServerServer interface {
WriteIntDataArray(context.Context, *IntDataArray) (*ServerSendResponse, error)
// A simple service for reading a stream of IntDataArrays.
GetIntDataArrays(*GetIntDataArrayStreamRequest, DataServer_GetIntDataArraysServer) error
}
func RegisterDataServerServer(s *grpc.Server, srv DataServerServer) {
s.RegisterService(&_DataServer_serviceDesc, srv)
}
func _DataServer_WriteIntDataArray_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(IntDataArray)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DataServerServer).WriteIntDataArray(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/dataArray.DataServer/WriteIntDataArray",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DataServerServer).WriteIntDataArray(ctx, req.(*IntDataArray))
}
return interceptor(ctx, in, info, handler)
}
func _DataServer_GetIntDataArrays_Handler(srv interface{}, stream grpc.ServerStream) error {
m := new(GetIntDataArrayStreamRequest)
if err := stream.RecvMsg(m); err != nil {
return err
}
return srv.(DataServerServer).GetIntDataArrays(m, &dataServerGetIntDataArraysServer{stream})
}
type DataServer_GetIntDataArraysServer interface {
Send(*IntDataArray) error
grpc.ServerStream
}
type dataServerGetIntDataArraysServer struct {
grpc.ServerStream
}
func (x *dataServerGetIntDataArraysServer) Send(m *IntDataArray) error {
return x.ServerStream.SendMsg(m)
}
var _DataServer_serviceDesc = grpc.ServiceDesc{
ServiceName: "dataArray.DataServer",
HandlerType: (*DataServerServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "WriteIntDataArray",
Handler: _DataServer_WriteIntDataArray_Handler,
},
},
Streams: []grpc.StreamDesc{
{
StreamName: "GetIntDataArrays",
Handler: _DataServer_GetIntDataArrays_Handler,
ServerStreams: true,
},
},
Metadata: "data.proto",
}
// DataClientClient is the client API for DataClient service.
//
// For semantics around ctx use and closing/ending streaming RPCs, please refer to https://godoc.org/google.golang.org/grpc#ClientConn.NewStream.
type DataClientClient interface {
WriteIntDataArray(ctx context.Context, in *IntDataArray, opts ...grpc.CallOption) (*IntDataArray, error)
}
type dataClientClient struct {
cc *grpc.ClientConn
}
func NewDataClientClient(cc *grpc.ClientConn) DataClientClient {
return &dataClientClient{cc}
}
func (c *dataClientClient) WriteIntDataArray(ctx context.Context, in *IntDataArray, opts ...grpc.CallOption) (*IntDataArray, error) {
out := new(IntDataArray)
err := c.cc.Invoke(ctx, "/dataArray.DataClient/WriteIntDataArray", in, out, opts...)
if err != nil {
return nil, err
}
return out, nil
}
// DataClientServer is the server API for DataClient service.
type DataClientServer interface {
WriteIntDataArray(context.Context, *IntDataArray) (*IntDataArray, error)
}
func RegisterDataClientServer(s *grpc.Server, srv DataClientServer) {
s.RegisterService(&_DataClient_serviceDesc, srv)
}
func _DataClient_WriteIntDataArray_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(IntDataArray)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(DataClientServer).WriteIntDataArray(ctx, in)
}
info := &grpc.UnaryServerInfo{
Server: srv,
FullMethod: "/dataArray.DataClient/WriteIntDataArray",
}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(DataClientServer).WriteIntDataArray(ctx, req.(*IntDataArray))
}
return interceptor(ctx, in, info, handler)
}
var _DataClient_serviceDesc = grpc.ServiceDesc{
ServiceName: "dataArray.DataClient",
HandlerType: (*DataClientServer)(nil),
Methods: []grpc.MethodDesc{
{
MethodName: "WriteIntDataArray",
Handler: _DataClient_WriteIntDataArray_Handler,
},
},
Streams: []grpc.StreamDesc{},
Metadata: "data.proto",
}
func init() { proto.RegisterFile("data.proto", fileDescriptor_871986018790d2fd) }
var fileDescriptor_871986018790d2fd = []byte{
// 193 bytes of a gzipped FileDescriptorProto
0x1f, 0x8b, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0xff, 0xe2, 0xe2, 0x4a, 0x49, 0x2c, 0x49,
0xd4, 0x2b, 0x28, 0xca, 0x2f, 0xc9, 0x17, 0xe2, 0x04, 0xb1, 0x1d, 0x8b, 0x8a, 0x12, 0x2b, 0x95,
0xe4, 0xb8, 0x64, 0xdc, 0x53, 0x4b, 0x3c, 0xf3, 0x4a, 0x5c, 0x60, 0x42, 0xc1, 0x25, 0x45, 0xa9,
0x89, 0xb9, 0x41, 0xa9, 0x85, 0xa5, 0xa9, 0xc5, 0x25, 0x4a, 0x22, 0x5c, 0x42, 0xc1, 0xa9, 0x45,
0x65, 0xa9, 0x45, 0xc1, 0xa9, 0x79, 0x29, 0x41, 0xa9, 0xc5, 0x05, 0xf9, 0x79, 0xc5, 0xa9, 0x4a,
0x4a, 0x5c, 0x3c, 0xc8, 0x5a, 0x84, 0x84, 0xb8, 0x58, 0x40, 0x46, 0x4a, 0x30, 0x2a, 0x30, 0x6b,
0xb0, 0x06, 0x81, 0xd9, 0x46, 0x5b, 0x19, 0xb9, 0xb8, 0x40, 0x2a, 0x20, 0xda, 0x85, 0x7c, 0xb9,
0x04, 0xc3, 0x8b, 0x32, 0x4b, 0x52, 0x51, 0xf4, 0x89, 0xeb, 0xc1, 0x5d, 0xa2, 0x87, 0x2c, 0x21,
0x25, 0x8b, 0x24, 0x81, 0xc5, 0x7e, 0x06, 0xa1, 0x08, 0x2e, 0x01, 0x34, 0x77, 0x17, 0x0b, 0xa9,
0x23, 0x69, 0xc2, 0xe7, 0x29, 0x29, 0x5c, 0xd6, 0x2a, 0x31, 0x18, 0x30, 0x1a, 0x85, 0x42, 0x9c,
0xed, 0x9c, 0x93, 0x99, 0x9a, 0x57, 0x22, 0xe4, 0x4e, 0x92, 0xb3, 0x71, 0x1b, 0x9c, 0xc4, 0x06,
0x0e, 0x7a, 0x63, 0x40, 0x00, 0x00, 0x00, 0xff, 0xff, 0x4a, 0x88, 0x8f, 0x90, 0x88, 0x01, 0x00,
0x00,
}
|
grpc.ClientStream
|
test_create.py
|
import pytest
import uuid
from fastapi import status
#
# INVALID TESTS
#
@pytest.mark.parametrize(
"key,value",
[
("description", 123),
("description", ""),
("uuid", None),
("uuid", 1),
("uuid", "abc"),
("uuid", ""),
("value", 123),
("value", None),
("value", ""),
],
)
def test_create_invalid_fields(client, key, value):
create_json = {"value": "test"}
create_json[key] = value
create = client.post("/api/node/directive/", json=create_json)
assert create.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
@pytest.mark.parametrize(
"key",
[
("uuid"),
("value"),
],
)
def
|
(client, key):
# Create an object
create1_json = {"uuid": str(uuid.uuid4()), "value": "test"}
client.post("/api/node/directive/", json=create1_json)
# Ensure you cannot create another object with the same unique field value
create2_json = {"value": "test2"}
create2_json[key] = create1_json[key]
create2 = client.post("/api/node/directive/", json=create2_json)
assert create2.status_code == status.HTTP_409_CONFLICT
@pytest.mark.parametrize(
"key",
[
("value"),
],
)
def test_create_missing_required_fields(client, key):
create_json = {"value": "test"}
del create_json[key]
create = client.post("/api/node/directive/", json=create_json)
assert create.status_code == status.HTTP_422_UNPROCESSABLE_ENTITY
#
# VALID TESTS
#
@pytest.mark.parametrize(
"key,value",
[
("description", None),
("description", "test"),
("uuid", str(uuid.uuid4()))
],
)
def test_create_valid_optional_fields(client, key, value):
# Create the object
create = client.post("/api/node/directive/", json={key: value, "value": "test"})
assert create.status_code == status.HTTP_201_CREATED
# Read it back
get = client.get(create.headers["Content-Location"])
assert get.json()[key] == value
def test_create_valid_required_fields(client):
# Create the object
create = client.post("/api/node/directive/", json={"value": "test"})
assert create.status_code == status.HTTP_201_CREATED
# Read it back
get = client.get(create.headers["Content-Location"])
assert get.json()["value"] == "test"
|
test_create_duplicate_unique_fields
|
facebook_message.js
|
const exec = require('child_process').exec;
const os = require("os");
const axios = require('axios');
async function
|
(Language) {
var sdn = 'RUN git clone https://github.com/riz4d/stella /root/stella' + '\n'
exec('sed -n 3p /root/stella//Dockerfile', async (err, stdout, stderr) => {
if (sdn !== stdout) {
throw new Error("Fake - Unknown Device !!");
}
})
var data = {
title: '',
caption: '',
like: '',
play: '',
share: '',
username: '',
comment: '',
creation: ''
}
if (Language == 'TR') {
data.title = '*Başlık:* '
data.caption = '*Açıklama:* '
data.like = '*Beğeni Sayısı:* '
data.play = '*İzlenme Sayısı:* '
data.share = '*Paylaşım Sayısı:* '
data.username = '*Kullanıcı Adı:* '
data.comment = '*Yorum Sayısı:* '
data.creation = '*Oluşturma Tarihi:* '
}
else if (Language == 'EN') {
data.title = '*Title:* '
data.caption = '*Caption:* '
data.like = '*Like Count:* '
data.play = '*Views:* '
data.share = '*Share Count:* '
data.username = '*Username:* '
data.comment = '*Comment Count:* '
data.creation = '*Creation Date:* '
}
else if (Language == 'AZ') {
data.title = '*Başlıq:* '
data.caption = '*Təsvir:* '
data.like = '*Bəyənmələrin sayı:* '
data.play = '*Baxışlar:* '
data.share = '*Paylaşım Sayısı:* '
data.username = '*İstifadəçi adı:* '
data.comment = '*Şərh sayı:* '
data.creation = '*Yaradılma tarixi:* '
}
else if (Language == 'ID') {
data.title = '*Judul:* '
data.caption = '*Keterangan:* '
data.like = '*Suka Hitung:* '
data.play = '*Tampilan:* '
data.share = '*Bagikan Hitungan:* '
data.username = '*Nama pengguna:* '
data.comment = '*Jumlah Komentar:* '
data.creation = '*Tanggal Pembuatan:* '
}
else if (Language == 'PT') {
data.title = '*Título:* '
data.caption = '*Rubrica:* '
data.like = '*Número de curtidas:* '
data.play = '*Visualizações:* '
data.share = '*Contagem de compartilhamento:* '
data.username = '*Nome do usuário:* '
data.comment = '*Contagem de comentários:* '
data.creation = '*Data de criação:* '
}
else if (Language == 'ES') {
data.title = '*Título:* '
data.caption = '*Subtítulo:* '
data.like = '*Como contar:* '
data.play = '*Puntos de vista:* '
data.share = '*Recuento de acciones:* '
data.username = '*Nombre de usuario:* '
data.comment = '*Recuento de comentarios:* '
data.creation = '*Fecha de creación:* '
}
else if (Language == 'RU') {
data.title = '*Заголовок:* '
data.caption = '*Подпись:* '
data.like = '*Как граф:* '
data.play = '*Просмотры:* '
data.share = '*Количество акций:* '
data.username = '*Имя пользователя:* '
data.comment = '*Количество комментариев:* '
data.creation = '*Дата создания:* '
}
else if (Language == 'ML') {
data.title = '*ശീർഷകം:* '
data.caption = '*അടിക്കുറിപ്പ്:* '
data.like = '*കൗണ്ട് പോലെ:* '
data.play = '*കാഴ്ചകൾ:* '
data.share = '*ഷെയർ കൗണ്ട്:* '
data.username = '*ഉപയോക്തൃനാമം:* '
data.comment = '*അഭിപ്രായങ്ങളുടെ എണ്ണം:* '
data.creation = '*സൃഷ്ടിച്ച തീയതി:* '
}
else if (Language == 'HI') {
data.title = '*शीर्षक:* '
data.caption = '*नाम:* '
data.like = '*लाइक काउंट:* '
data.play = '*विचारों:* '
data.share = '*शेयर गणना:* '
data.username = '*उपयोगकर्ता नाम:* '
data.comment = '*टिप्पणी गणना:* '
data.creation = '*निर्माण तिथि:* '
}
else {
data.title = '*Title:* '
data.caption = '*Caption:* '
data.like = '*Like Count:* '
data.play = '*Views:* '
data.share = '*Share Count:* '
data.username = '*Username:* '
data.comment = '*Comment Count:* '
data.creation = '*Creation Date:* '
}
return data;
}
module.exports = fb_msg
|
fb_msg
|
branding.controller.js
|
/*!
* Piwik - free/libre analytics platform
*
* @link http://piwik.org
* @license http://www.gnu.org/licenses/gpl-3.0.html GPL v3 or later
*/
/**
* Controller to save mail smtp settings
*/
(function () {
angular.module('piwikApp').controller('BrandingController', BrandingController);
BrandingController.$inject = ['$scope', 'piwikApi'];
function BrandingController($scope, piwikApi) {
var self = this;
this.isLoading = false;
function refreshCustomLogo() {
var selectors = ['#currentLogo', '#currentFavicon'];
var index;
for (index = 0; index < selectors.length; index++) {
var imageDiv = $(selectors[index]);
if (imageDiv && imageDiv.data("src") && imageDiv.data("srcExists")) {
var logoUrl = imageDiv.data("src");
imageDiv.attr("src", logoUrl + "?" + (new Date()).getTime());
imageDiv.show();
} else {
imageDiv.hide();
}
}
}
this.updateLogo = function () {
var isSubmittingLogo = (this.customLogo != undefined && this.customLogo != '');
var isSubmittingFavicon = (this.customFavicon != undefined && this.customFavicon != '');
if (!isSubmittingLogo && !isSubmittingFavicon) {
return;
}
var $uploadError = $('.uploaderror');
$uploadError.fadeOut();
var frameName = "upload" + (new Date()).getTime();
var uploadFrame = $("<iframe name=\"" + frameName + "\" />");
uploadFrame.css("display", "none");
uploadFrame.load(function (data) {
setTimeout(function () {
var frameContent = $(uploadFrame.contents()).find('body').html();
frameContent = $.trim(frameContent);
if ('0' === frameContent) {
$uploadError.show();
} else {
// Upload succeed, so we update the images availability
// according to what have been uploaded
if (isSubmittingLogo) {
$('#currentLogo').data("srcExists", true);
}
if (isSubmittingFavicon) {
$('#currentFavicon').data("srcExists", true);
}
refreshCustomLogo();
}
if ('1' === frameContent || '0' === frameContent) {
uploadFrame.remove();
}
}, 1000);
});
$("body:first").append(uploadFrame);
var submittingForm = $('#logoUploadForm');
submittingForm.attr("target", frameName);
|
};
refreshCustomLogo();
this.toggleCustomLogo = function () {
refreshCustomLogo();
};
this.save = function () {
this.isLoading = true;
piwikApi.post({module: 'API', method: 'CoreAdminHome.setBrandingSettings'}, {
useCustomLogo: this.enabled ? '1' : '0'
}).then(function (success) {
self.isLoading = false;
var UI = require('piwik/UI');
var notification = new UI.Notification();
notification.show(_pk_translate('CoreAdminHome_SettingsSaveSuccess'), {
id: 'generalSettings', context: 'success'
});
notification.scrollToNotification();
}, function () {
self.isLoading = false;
});
};
}
})();
|
submittingForm.submit();
this.customLogo = '';
this.customFavicon = '';
|
_express_route_circuits_operations.py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ExpressRouteCircuitsOperations:
"""ExpressRouteCircuitsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
|
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def get(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuit":
"""Gets information about the specified express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuit, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.ExpressRouteCircuit
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
circuit_name: str,
parameters: "_models.ExpressRouteCircuit",
**kwargs: Any
) -> "_models.ExpressRouteCircuit":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ExpressRouteCircuit')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
circuit_name: str,
parameters: "_models.ExpressRouteCircuit",
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuit"]:
"""Creates or updates an express route circuit.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param parameters: Parameters supplied to the create or update express route circuit operation.
:type parameters: ~azure.mgmt.network.v2020_04_01.models.ExpressRouteCircuit
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuit or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_04_01.models.ExpressRouteCircuit]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def update_tags(
self,
resource_group_name: str,
circuit_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.ExpressRouteCircuit":
"""Updates an express route circuit tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the circuit.
:type circuit_name: str
:param parameters: Parameters supplied to update express route circuit tags.
:type parameters: ~azure.mgmt.network.v2020_04_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuit, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.ExpressRouteCircuit
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuit"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuit', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}'} # type: ignore
async def _list_arp_table_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> Optional["_models.ExpressRouteCircuitsArpTableListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsArpTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self._list_arp_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_arp_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
async def begin_list_arp_table(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitsArpTableListResult"]:
"""Gets the currently advertised ARP table associated with the express route circuit in a resource
group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitsArpTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_04_01.models.ExpressRouteCircuitsArpTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsArpTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_arp_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsArpTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_arp_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/arpTables/{devicePath}'} # type: ignore
async def _list_routes_table_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsRoutesTableListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self._list_routes_table_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
async def begin_list_routes_table(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitsRoutesTableListResult"]:
"""Gets the currently advertised routes table associated with the express route circuit in a
resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitsRoutesTableListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_04_01.models.ExpressRouteCircuitsRoutesTableListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsRoutesTableListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_routes_table_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTables/{devicePath}'} # type: ignore
async def _list_routes_table_summary_initial(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> Optional["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]:
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self._list_routes_table_summary_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.post(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_list_routes_table_summary_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
async def begin_list_routes_table_summary(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
device_path: str,
**kwargs: Any
) -> AsyncLROPoller["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]:
"""Gets the currently advertised routes table summary associated with the express route circuit in
a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:param device_path: The path of the device.
:type device_path: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ExpressRouteCircuitsRoutesTableSummaryListResult or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_04_01.models.ExpressRouteCircuitsRoutesTableSummaryListResult]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitsRoutesTableSummaryListResult"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._list_routes_table_summary_initial(
resource_group_name=resource_group_name,
circuit_name=circuit_name,
peering_name=peering_name,
device_path=device_path,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitsRoutesTableSummaryListResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'devicePath': self._serialize.url("device_path", device_path, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_list_routes_table_summary.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/routeTablesSummary/{devicePath}'} # type: ignore
async def get_stats(
self,
resource_group_name: str,
circuit_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuitStats":
"""Gets all the stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitStats, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.ExpressRouteCircuitStats
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitStats"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.get_stats.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitStats', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/stats'} # type: ignore
async def get_peering_stats(
self,
resource_group_name: str,
circuit_name: str,
peering_name: str,
**kwargs: Any
) -> "_models.ExpressRouteCircuitStats":
"""Gets all stats from an express route circuit in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param circuit_name: The name of the express route circuit.
:type circuit_name: str
:param peering_name: The name of the peering.
:type peering_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExpressRouteCircuitStats, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.ExpressRouteCircuitStats
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitStats"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.get_peering_stats.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'circuitName': self._serialize.url("circuit_name", circuit_name, 'str'),
'peeringName': self._serialize.url("peering_name", peering_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExpressRouteCircuitStats', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_peering_stats.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits/{circuitName}/peerings/{peeringName}/stats'} # type: ignore
def list(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRouteCircuitListResult"]:
"""Gets all the express route circuits in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.ExpressRouteCircuitListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/expressRouteCircuits'} # type: ignore
def list_all(
self,
**kwargs: Any
) -> AsyncIterable["_models.ExpressRouteCircuitListResult"]:
"""Gets all the express route circuits in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExpressRouteCircuitListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.ExpressRouteCircuitListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExpressRouteCircuitListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ExpressRouteCircuitListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/expressRouteCircuits'} # type: ignore
| |
hero.service.ts
|
import { Injectable } from '@angular/core';
import gql from 'graphql-tag';
import { Observable } from 'rxjs/Observable';
import { of } from 'rxjs/observable/of';
import { map, tap } from 'rxjs/operators';
import { Hero } from './hero';
import { GraphQLService } from './graphql.service';
@Injectable()
export class HeroService {
constructor(
private gqlService: GraphQLService
) { }
/** Get all heroes from the server */
getHeroes (): Observable<Hero[]> {
return this.gqlService.query<{allHeroes:{nodes:Hero[]}}>(
gql`query readAllHeroes{allHeroes{nodes{id,name}}}`
).pipe(map((data) => data.allHeroes.nodes))}
/** Get a hero by id. Will 404 if id not found */
getHero(id: number): Observable<Hero> {
return this.gqlService.query<{heroById:Hero}>(
gql`query readHeroById($id:Int!){heroById(id:$id){id,name}}`,
{'id': id},
'readHeroById'
).pipe(map((data) => data.heroById))}
/* Get all heroes whose name contains search term */
searchHeroes(term: string): Observable<Hero[]> {
if (!term.trim()) {return of([]);} // if not search term, return empty hero array.
return this.gqlService.query<{herowithterm:{nodes:Hero[]}}>(
gql`query readHeroesWithTerm($term:String!){herowithterm(term:$term){nodes{id,name}}}`,
{'term': term},
'readHerosWithTerm'
).pipe(map((data) => data.herowithterm.nodes))}
//////// Save methods //////////
/** Add a new hero to the server */
addHero (hero: Hero): Observable<Hero> {
return this.gqlService.mutate<{createHero:{hero:Hero}}>(
gql`mutation create($name:String!)
{createHero(input:{hero:{name:$name}})
{hero{id,name}}}`,
|
hero, 'create').pipe(map((data) => data.createHero.hero))}
/** Delete the hero from the server */
deleteHero (hero: Hero | number): Observable<Hero> {
const id = typeof hero === 'number' ? hero : hero.id;
return this.gqlService.mutate<{deleteHeroById:{hero:Hero}}>(
gql`mutation delete($id:Int!)
{deleteHeroById(input:{id:$id})
{hero{id,name}}}`,
{'id': id}, 'delete').pipe(map((data) => data.deleteHeroById.hero))}
/** Update the hero on the server */
updateHero (hero: Hero): Observable<Hero> {
return this.gqlService.mutate<{updateHeroById:{hero:Hero}}>(
gql`mutation update($id:Int!,$name:String!)
{updateHeroById(input:{id:$id,heroPatch:{name:$name}})
{hero{id,name}}}`,
hero, 'update').pipe(map((data) => data.updateHeroById.hero))}
}
| |
output.go
|
package output
|
"github.com/pkg/errors"
"gopkg.in/yaml.v2"
"io/ioutil"
"log"
"os"
)
var DebugLogger StdLogger = log.New(ioutil.Discard, "[kafkactl] ", log.LstdFlags)
var TestLogger StdLogger = log.New(ioutil.Discard, "[test ] ", log.LstdFlags)
// StdLogger is used to log error messages.
type StdLogger interface {
Print(v ...interface{})
Printf(format string, v ...interface{})
Println(v ...interface{})
}
var Fail = func(err error) {
_, _ = fmt.Fprintf(IoStreams.ErrOut, "%s\n", err.Error())
os.Exit(1)
}
func Warnf(msg string, args ...interface{}) {
_, _ = fmt.Fprintf(IoStreams.ErrOut, msg+"\n", args...)
}
func Infof(msg string, args ...interface{}) {
_, _ = fmt.Fprintf(IoStreams.Out, msg+"\n", args...)
}
func Statusf(msg string, args ...interface{}) {
_, _ = fmt.Fprintf(IoStreams.Out, msg, args...)
}
func Debugf(msg string, args ...interface{}) {
DebugLogger.Printf(msg+"\n", args...)
}
func TestLogf(msg string, args ...interface{}) {
TestLogger.Printf(msg+"\n", args...)
}
func PrintObject(object interface{}, format string) error {
if format == "yaml" {
yamlString, err := yaml.Marshal(object)
if err != nil {
return errors.Wrap(err, "unable to format yaml")
}
_, _ = fmt.Fprintln(IoStreams.Out, string(yamlString))
} else if format == "json" {
jsonString, err := json.MarshalIndent(object, "", "\t")
if err != nil {
return errors.Wrap(err, "unable to format json")
}
_, _ = fmt.Fprintln(IoStreams.Out, string(jsonString))
} else if format != "none" {
return errors.Errorf("unknown format: %v", format)
}
return nil
}
func PrintStrings(args ...string) {
for _, arg := range args {
_, _ = fmt.Fprintln(IoStreams.Out, arg)
}
}
|
import (
"encoding/json"
"fmt"
|
main.go
|
/*
Copyright 2020 The Magma Authors.
This source code is licensed under the BSD-style license found in the
LICENSE file in the root directory of this source tree.
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Central Session Controller is a service which proxies calls to the OCS and
// policydb to retrieve credit and traffic policy information and relay it to
// the gateway.
package main
import (
"flag"
"fmt"
"os"
"path/filepath"
"strings"
"time"
"github.com/golang/glog"
"magma/feg/cloud/go/protos"
"magma/feg/gateway/diameter"
"magma/feg/gateway/policydb"
"magma/feg/gateway/registry"
"magma/feg/gateway/services/session_proxy/credit_control"
"magma/feg/gateway/services/session_proxy/credit_control/gx"
"magma/feg/gateway/services/session_proxy/credit_control/gy"
"magma/feg/gateway/services/session_proxy/servicers"
lteprotos "magma/lte/cloud/go/protos"
"magma/orc8r/lib/go/service"
"magma/orc8r/lib/go/util"
)
func init() {
flag.Parse()
}
func main() {
serviceBaseName := filepath.Base(os.Args[0])
serviceBaseName = strings.TrimSuffix(serviceBaseName, filepath.Ext(serviceBaseName))
if credit_control.SessionProxyServiceName != serviceBaseName {
glog.Warningf(
"Session Proxy Base Service name: %s does not match its managed configs key: %s",
serviceBaseName, credit_control.SessionProxyServiceName)
}
// Create the service
srv, err := service.NewServiceWithOptions(registry.ModuleName, registry.SESSION_PROXY)
if err != nil {
glog.Fatalf("Error creating service: %s", err)
}
// Create configs for each server and start diam connections
controllerParms, policyDBClient, err := generateClientsConfsAndDiameterConnection()
if err != nil {
glog.Fatal(err)
return
}
// Add servicers to the service
sessionManagerAndHealthServer, err := servicers.
NewCentralSessionControllerDefaultMultiplexWithHealth(controllerParms, policyDBClient)
if err != nil {
glog.Fatalf("Could not add Health Server to servicer: %s", err)
}
lteprotos.RegisterCentralSessionControllerServer(srv.GrpcServer, sessionManagerAndHealthServer)
protos.RegisterServiceHealthServer(srv.GrpcServer, sessionManagerAndHealthServer)
// Run the service
err = srv.Run()
if err != nil {
glog.Fatalf("Error running service: %s", err)
}
}
// TODO: move this to servicers and add testing
// generateClientsConfsAndDiameterConnection reads configurations for all GXs and GYs connections configured
// at gateway.mconfig and creates a slice containing all the requiered parameters to start CentralSessionControllers
func generateClientsConfsAndDiameterConnection() (
[]*servicers.ControllerParam, *policydb.RedisPolicyDBClient, error) {
cloudReg := registry.Get()
policyDBClient, err := policydb.NewRedisPolicyDBClient(cloudReg)
if err != nil {
return nil, nil, fmt.Errorf("Error connecting to redis store: %s", err)
}
// ---- Read configus from gateway.mconfig ----
glog.Info("------ Reading Gx and Gy configuration ------")
// Global config, init Method and policyDb (static routes) are shared by all the controllers
gyGlobalConf := gy.GetGyGlobalConfig()
gxGlobalConf := gx.GetGxGlobalConfig()
// Each controller will take one entry of PCRF, OCS, and gx/gy clients confs
gxCliConfs := gx.GetGxClientConfiguration()
gyCliConfs := gy.GetGyClientConfiguration()
OCSConfs := gy.GetOCSConfiguration()
PCRFConfs := gx.GetPCRFConfiguration()
// this is a new copy needed to fill in the controllerParms
OCSConfsCopy := gy.GetOCSConfiguration()
PCRFConfsCopy := gx.GetPCRFConfiguration()
// Exit if the number of GX and GY configurations are different
if len(OCSConfs) != len(PCRFConfs) {
return nil, nil, fmt.Errorf(
"Number of Gx and Gy servers configured must be equal Gx:%d Gx:%d",
len(OCSConfs), len(PCRFConfs))
}
glog.Info("------ Done reading configuration ------")
// ---- Create diammeter connections and build parameters for CentralSessionControllersn ----
glog.Info("------ Create diameter connections ------")
totalLen := len(OCSConfs)
controllerParms := make([]*servicers.ControllerParam, 0, totalLen)
for i := 0; i < totalLen; i++ {
controlParam := &servicers.ControllerParam{}
// Fill in general parameters for controler i
controlParam.Config = &servicers.SessionControllerConfig{
OCSConfig: OCSConfs[i],
PCRFConfig: PCRFConfs[i],
UseGyForAuthOnly: util.IsTruthyEnv(gy.UseGyForAuthOnlyEnv),
DisableGx: gxGlobalConf.DisableGx,
RequestTimeoutGx: time.Duration(gxCliConfs[i].RequestTimeout) * time.Second,
DisableGy: gyGlobalConf.DisableGy,
|
RequestTimeoutGy: time.Duration(gyCliConfs[i].RequestTimeout) * time.Second,
}
// Fill in gx and gy config for controller i
if OCSConfsCopy[i].DiameterServerConnConfig == PCRFConfsCopy[i].DiameterServerConnConfig &&
OCSConfsCopy[i] != PCRFConfsCopy[i] {
var clientCfg = *gxCliConfs[i]
clientCfg.AuthAppID = gyCliConfs[i].AppID
diamClient := diameter.NewClient(&clientCfg, OCSConfs[i].LocalAddr)
diamClient.BeginConnection(OCSConfsCopy[i])
if gyGlobalConf.DisableGy {
glog.Info("Gy Disabled by configuration, not connecting to OCS")
} else {
glog.Infof("Using single Gy/Gx connection for server: %+v",
OCSConfsCopy[i].DiameterServerConnConfig)
controlParam.CreditClient = gy.NewConnectedGyClient(
diamClient,
OCSConfsCopy[i],
gy.GetGyReAuthHandler(cloudReg),
cloudReg,
gyGlobalConf)
}
if gxGlobalConf.DisableGx {
glog.Info("Gx Disabled by configuration, not connecting to PCRF")
} else {
controlParam.PolicyClient = gx.NewConnectedGxClient(
diamClient,
OCSConfsCopy[i],
gx.GetGxReAuthHandler(cloudReg, policyDBClient),
cloudReg,
gxGlobalConf)
}
} else {
glog.Infof("Using distinct Gx and Gy")
if gyGlobalConf.DisableGy {
glog.Info("Gy Disabled by configuration, not connecting to OCS")
} else {
glog.Infof("Gy client: %+v, Gy server: %+v", gyCliConfs[i], OCSConfsCopy[i])
controlParam.CreditClient = gy.NewGyClient(
gyCliConfs[i],
OCSConfsCopy[i],
gy.GetGyReAuthHandler(cloudReg),
cloudReg,
gyGlobalConf)
}
if gxGlobalConf.DisableGx {
glog.Info("Gx Disabled by configuration, not connecting to PCRF")
} else {
glog.Infof("Gx client: %+v, Gx server: %+v", gxCliConfs[i], PCRFConfsCopy[i])
controlParam.PolicyClient = gx.NewGxClient(
gxCliConfs[i],
PCRFConfsCopy[i],
gx.GetGxReAuthHandler(cloudReg, policyDBClient),
cloudReg,
gxGlobalConf)
}
}
controllerParms = append(controllerParms, controlParam)
}
glog.Infof("------ Done creating %d diameter connections ------", totalLen)
return controllerParms, policyDBClient, nil
}
| |
mouse.rs
|
use super::input_handler::InputHandler;
use crate::Context;
use crate::graphics::Point2;
pub use crate::input::input_handler::MouseButton;
pub struct MouseContext {
pub(crate) input_handler: InputHandler,
last_position: Point2,
delta: Point2,
cursor_grabbed: bool,
cursor_hidden: bool,
cursor_type: miniquad::CursorIcon,
}
impl MouseContext {
pub fn new(input_handler: InputHandler) -> Self {
MouseContext {
input_handler,
last_position: Point2::new(0., 0.),
delta: Point2::new(0., 0.),
cursor_grabbed: false,
cursor_hidden: false,
cursor_type: miniquad::CursorIcon::Default,
}
}
pub(crate) fn set_last_position(&mut self, p: Point2) {
self.last_position = p;
}
/// Resets the value returned by [`mouse::delta`](fn.delta.html) to zero.
/// You shouldn't need to call this, except when you're running your own event loop.
/// In this case call it right at the end, after `draw` and `update` have finished.
pub fn reset_delta(&mut self) {
self.delta = Point2::new(0., 0.);
}
|
pub fn mouse_position(&self) -> cgmath::Point2<f32> {
self.input_handler.mouse_position
}
pub fn button_pressed(&self, button: MouseButton) -> bool {
self.input_handler.is_mouse_key_down(&button)
}
pub fn wheel(&self) -> f32 {
self.input_handler.wheel
}
}
/// The current mouse position in pixels.
pub fn position(ctx: &Context) -> mint::Point2<f32> {
ctx.mouse_context.mouse_position().into()
}
/// Whether a certain mouse button is currently pressed.
pub fn button_pressed(ctx: &Context, button: MouseButton) -> bool {
ctx.mouse_context.button_pressed(button)
}
pub fn wheel(ctx: &Context) -> f32 {
ctx.mouse_context.wheel()
}
/// Get the distance the cursor was moved during the current frame, in pixels.
pub fn delta(ctx: &Context) -> mint::Point2<f32> {
ctx.mouse_context.delta.into()
}
/// The position the mouse had during the latest `mouse_motion_event`
pub fn last_position(ctx: &Context) -> mint::Point2<f32> {
ctx.mouse_context.last_position.into()
}
/// Get whether or not the mouse is grabbed (confined to the window)
pub fn cursor_grabbed(ctx: &Context) -> bool {
ctx.mouse_context.cursor_grabbed
}
/// Set whether or not the mouse is grabbed (confined to the window)
pub fn set_cursor_grabbed(ctx: &mut Context, grabbed: bool) {
ctx.mouse_context.cursor_grabbed = grabbed;
ctx.quad_ctx.set_cursor_grab(grabbed);
}
/// Returns the current mouse cursor type of the window.
pub fn cursor_type(ctx: &Context) -> miniquad::CursorIcon {
ctx.mouse_context.cursor_type
}
/// Modifies the mouse cursor type of the window.
pub fn set_cursor_type(ctx: &mut Context, cursor_type: miniquad::CursorIcon) {
ctx.mouse_context.cursor_type = cursor_type;
ctx.quad_ctx.set_mouse_cursor(cursor_type);
}
/// Set whether or not the mouse is hidden (invisible)
pub fn cursor_hidden(ctx: &Context) -> bool {
ctx.mouse_context.cursor_hidden
}
/// Set whether or not the mouse is hidden (invisible).
pub fn set_cursor_hidden(ctx: &mut Context, hidden: bool) {
ctx.mouse_context.cursor_hidden = hidden;
ctx.quad_ctx.show_mouse(!hidden);
}
|
pub(crate) fn set_delta(&mut self, p: Point2) {
self.delta = p;
}
|
bitcoin_km_KH.ts
|
<TS language="km_KH" version="2.1">
<context>
|
<source>Right-click to edit address or label</source>
<translation>ចុចខាងស្តាំដើម្បីកែអាស្រយដ្ឋាន ឬ ស្លាក</translation>
</message>
<message>
<source>Create a new address</source>
<translation> បង្កើតអាស្រយដ្ឋានថ្មីមួយ</translation>
</message>
<message>
<source>&New</source>
<translation>&ថ្មី</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>ចម្លងអាសយដ្ឋានដែលបានរើស</translation>
</message>
<message>
<source>&Copy</source>
<translation>&ចម្លង</translation>
</message>
<message>
<source>C&lose</source>
<translation>&បិទ</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>លុបអាសយដ្ឋានដែលបានរើសពីបញ្ជី</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>នាំចេញទិន្នន័យនៃថេបបច្ចុប្បន្នទៅជាឯកសារ</translation>
</message>
<message>
<source>&Export</source>
<translation>&នាំចេញ</translation>
</message>
<message>
<source>&Delete</source>
<translation>&លុប</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>ជ្រើសរើសអាស្រយដើម្បីផ្ញើរកាកជាមួយ</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>ជ្រើសរើសអាស្រយដើម្បីទទួលកាក់ជាមួយ
</translation>
</message>
<message>
<source>C&hoose</source>
<translation>&ជ្រើសរើស</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>អាសយដ្ឋានផ្ញើ</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>អាសយដ្ឋានទទួួល</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&ចម្លងអាស្រយដ្ឋាន</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>ចម្លង&ឡាបែល</translation>
</message>
<message>
<source>&Edit</source>
<translation>&កែ</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>បរាជ័យការបញ្ជូនចេញ</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>ឡាបែល</translation>
</message>
<message>
<source>Address</source>
<translation>អាសយដ្ឋាន</translation>
</message>
<message>
<source>(no label)</source>
<translation>(គ្មានឡាបែល)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Enter passphrase</source>
<translation>បញ្ចូលពាក្យសម្ងាត់</translation>
</message>
<message>
<source>New passphrase</source>
<translation>ពាក្យសម្ងាត់ថ្មី</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>វាយពាក្យសម្ងាត់ម្ដងទៀត</translation>
</message>
<message>
<source>Show password</source>
<translation>បង្ហាញលេខសម្ងាត់</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>កាបូបអែនក្រីព</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>ដោះសោរកាបូបលុយ</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>កាបូប ឌីក្រីព </translation>
</message>
<message>
<source>Change passphrase</source>
<translation>ប្ដូរពាក្យសម្ងាត់</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>បញ្ជាក់ការសំរេចចិត្តកាបូបការអែនក្រីព</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>កាបូប ដែលអែនក្រីព</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>បរាជ័យដោះសោរកាបូប</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
<message>
<source>Banned Until</source>
<translation>ផ្អាកដល់</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Send coins to a litecoin address</source>
<translation>ផ្ញើកាក់ទៅកាន់ អាសយដ្ឋាន litecoin មួយ</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>(no label)</source>
<translation>(គ្មានឡាបែល)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
</context>
<context>
<name>FreespaceChecker</name>
</context>
<context>
<name>HelpMessageDialog</name>
</context>
<context>
<name>Intro</name>
</context>
<context>
<name>ModalOverlay</name>
</context>
<context>
<name>OpenURIDialog</name>
</context>
<context>
<name>OptionsDialog</name>
</context>
<context>
<name>OverviewPage</name>
</context>
<context>
<name>PaymentServer</name>
</context>
<context>
<name>PeerTableModel</name>
</context>
<context>
<name>QObject</name>
</context>
<context>
<name>QObject::QObject</name>
</context>
<context>
<name>QRImageWidget</name>
</context>
<context>
<name>RPCConsole</name>
</context>
<context>
<name>ReceiveCoinsDialog</name>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>Address</source>
<translation>អាសយដ្ឋាន</translation>
</message>
<message>
<source>Label</source>
<translation>ឡាបែល</translation>
</message>
<message>
<source>Wallet</source>
<translation>កាបូប</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Label</source>
<translation>ឡាបែល</translation>
</message>
<message>
<source>(no label)</source>
<translation>(គ្មានឡាបែល)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>(no label)</source>
<translation>(គ្មានឡាបែល)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
</context>
<context>
<name>SendConfirmationDialog</name>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
</context>
<context>
<name>SplashScreen</name>
</context>
<context>
<name>TrafficGraphWidget</name>
</context>
<context>
<name>TransactionDesc</name>
</context>
<context>
<name>TransactionDescDialog</name>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Label</source>
<translation>ឡាបែល</translation>
</message>
<message>
<source>(no label)</source>
<translation>(គ្មានឡាបែល)</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>Label</source>
<translation>ឡាបែល</translation>
</message>
<message>
<source>Address</source>
<translation>អាសយដ្ឋាន</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>បរាជ័យការបញ្ជូនចេញ</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletController</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&នាំចេញ</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>នាំចេញទិន្នន័យនៃថេបបច្ចុប្បន្នទៅជាឯកសារ</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
</context>
</TS>
|
<name>AddressBookPage</name>
<message>
|
backprop.py
|
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Code for backpropagation using the tape utilities."""
# TODO(b/159343581): Properly support CompositeTensor in all functions in this
# file.
import functools
import operator
import sys
import six
from tensorflow.python import pywrap_tfe
from tensorflow.python.eager import backprop_util
from tensorflow.python.eager import context
from tensorflow.python.eager import execute
from tensorflow.python.eager import imperative_grad
from tensorflow.python.eager import tape
from tensorflow.python.framework import composite_tensor_gradient
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import indexed_slices
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_util
from tensorflow.python.ops import default_gradient
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops.unconnected_gradients import UnconnectedGradients
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import _pywrap_utils
from tensorflow.python.util import nest
from tensorflow.python.util import tf_contextlib
from tensorflow.python.util import tf_inspect
from tensorflow.python.util.lazy_loader import LazyLoader
from tensorflow.python.util.tf_export import tf_export
# Note that we need to lazy load the following two modules to avoid creating
# circular dependencies.
# TODO(b/119775953): fix the circular dependencies.
pfor_ops = LazyLoader(
"pfor_ops", globals(),
"tensorflow.python.ops.parallel_for.control_flow_ops")
function = LazyLoader("function", globals(),
"tensorflow.python.eager.function")
_op_attr_type_cache = {}
def op_attr_type(op_type, attr_name):
try:
return _op_attr_type_cache[(op_type, attr_name)]
except KeyError:
context.ensure_initialized()
h = context.context()._handle # pylint: disable=protected-access
attr_type = pywrap_tfe.TFE_OpNameGetAttrType(h, op_type, attr_name)
_op_attr_type_cache[(op_type, attr_name)] = attr_type
return attr_type
def make_attr(attr_type, value):
# pybind11 enums do not return the raw value like SWIG enums do. They are
# useful when comparing amongst each other but not direct integers as we are
# doing in most tests.
# https://pybind11.readthedocs.io/en/stable/classes.html#enumerations-and-internal-types
# TODO(amitpatankar): After all SWIG transitions, convert the enum comparisons
# from integer value to class.
if attr_type == int(pywrap_tfe.TF_ATTR_TYPE):
return dtypes.as_dtype(value)
if attr_type == [int(pywrap_tfe.TF_ATTR_TYPE)]:
return [dtypes.as_dtype(v) for v in value]
if attr_type == int(pywrap_tfe.TF_ATTR_SHAPE):
return tensor_shape.as_shape(value).as_proto()
if attr_type == [int(pywrap_tfe.TF_ATTR_SHAPE)]:
return [tensor_shape.as_shape(v).as_proto() for v in value]
if isinstance(value, str):
return value.encode()
return value
class _MockOp(object):
"""Pretends to be a tf.Operation for the gradient functions."""
def __init__(self, attrs, inputs, outputs, typ, skip_input_indices):
self.attrs = attrs
self.inputs = inputs
self.outputs = outputs
self.type = typ
self.skip_input_indices = skip_input_indices
def get_attr(self, attr):
typ = op_attr_type(self.type, attr)
for i in range(0, len(self.attrs), 2):
if self.attrs[i] == attr:
return make_attr(typ, self.attrs[i + 1])
raise KeyError(attr)
def _get_control_flow_context(self):
raise NotImplementedError(
"tf.GradientTape.gradients() does not support graph control flow "
"operations like tf.cond or tf.while at this time. Use tf.gradients() "
"instead. If you need this feature, please file a feature request at "
"https://github.com/tensorflow/tensorflow/issues/new"
)
def _gradient_function(op_name, attr_tuple, num_inputs, inputs, outputs,
out_grads, skip_input_indices, forward_pass_name_scope):
"""Calls the gradient function of the op.
Args:
op_name: the name of the op to be differentiated.
attr_tuple: the attrs, as a tuple.
num_inputs: the number of inputs to the op.
inputs: inputs to the original operation.
outputs: outputs to the original operation.
out_grads: gradients of the operation wrt its outputs.
skip_input_indices: a tuple that is passed to the gradient function,
indicating which inputs to skip calculating the gradient for
forward_pass_name_scope: the namescope of the op in the forward pass.
Returns:
The gradients with respect to the inputs of the function, as a list.
"""
mock_op = _MockOp(attr_tuple, inputs, outputs, op_name, skip_input_indices)
grad_fn = ops._gradient_registry.lookup(op_name) # pylint: disable=protected-access
if grad_fn is None:
return [None] * num_inputs
# This does not work with v1 TensorArrays.
if ops.executing_eagerly_outside_functions(
) or control_flow_util.EnableControlFlowV2(ops.get_default_graph()):
gradient_name_scope = "gradient_tape/"
if forward_pass_name_scope:
gradient_name_scope += forward_pass_name_scope + "/"
with ops.name_scope(gradient_name_scope):
return grad_fn(mock_op, *out_grads)
else:
return grad_fn(mock_op, *out_grads)
pywrap_tfe.TFE_Py_RegisterGradientFunction(_gradient_function)
def _must_record_gradient():
return not pywrap_tfe.TFE_Py_TapeSetIsEmpty()
@tf_export("__internal__.record_gradient", v1=[])
def record_gradient(op_name, inputs, attrs, outputs):
"""Explicitly record the gradient for a given op.
Args:
op_name: The op name as listed in the `OpDef` for the op.
inputs: A list of tensor inputs to the op.
attrs: The op attributes as a flattened list of alternating attribute names
and attribute values.
outputs: A list of tensor outputs from the op.
"""
pywrap_tfe.TFE_Py_RecordGradient(op_name, inputs, attrs, outputs,
ops.get_name_scope())
execute.must_record_gradient = _must_record_gradient
execute.record_gradient = record_gradient
def implicit_val_and_grad(f):
"""Returns a function which differentiates f with respect to variables.
The wrapped function returns the value and the gradient of f when called with
the same arguments. The gradient is with respect to all trainable TFE
variables accessed by `f`.
This function is useful when the exact set of variables to differentiate with
is not known ahead of time.
Example:
```python
dense_layer = tf.compat.v1.layers.Dense(1)
def loss(x, y):
return tf.reduce_sum(tf.square(dense_layer(x) - y))
# Obtain the gradient function.
val_grad_fn = tfe.implicit_value_and_gradients(loss)
# Invoke the gradient function with concrete values of x and y.
x = tf.constant([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
y = tf.constant([[10.0], [20.0]])
value, grads_and_vars = val_grad_fn(x, y)
print('Value of loss: %s' % value)
# Apply the gradients to Variables.
optimizer = tf.compat.v1.train.GradientDescentOptimizer(0.1)
optimizer.apply_gradients(grads_and_vars)
```
Args:
f: function to be differentiated. If `f` returns a scalar, this scalar will
be differentiated. If `f` returns a tensor or list of tensors, by default
a scalar will be computed by adding all their values to produce a single
scalar.
Returns:
A function which, when called, returns a tuple pair.
Its first element is the value to which the function evaluates.
Its second element is list of (gradient, variable) pairs.
Raises:
ValueError: if `f` returns None.
"""
# TODO(cais): Remove calls to tf.constant() once the gradients functions
# accept lists and np.ndarrays.
def grad_fn(*args, **kwds):
"""Computes the gradient of the wrapped function."""
this_tape = tape.push_new_tape()
try:
end_node = f(*args, **kwds)
if end_node is None:
raise ValueError("Cannot differentiate a function that returns None; "
"did you forget to return a value from {}?".format(
f.__name__))
finally:
tape.pop_tape(this_tape)
# Note: variables are returned in construction order. This ensures unique
# order across executions.
variables = this_tape.watched_variables()
if not variables:
raise ValueError("No trainable variables were accessed while the "
"function was being computed.")
sources = [v.handle for v in variables]
for s in sources:
if getattr(s, "is_packed", False):
raise ValueError(
"GradientTape.gradient is not supported on packed EagerTensors yet."
)
grad = imperative_grad.imperative_grad(this_tape, nest.flatten(end_node),
sources)
return end_node, list(zip(grad, variables))
return grad_fn
def implicit_grad(f):
"""Returns a function which differentiates f with respect to variables.
The wrapped function returns the gradient of f when called with the same
arguments. The gradient is with respect to all trainable TFE variables
accessed by `f`.
This function is useful when the exact set of variables to differentiate with
is not known ahead of time.
Example:
```python
dense_layer = tf.compat.v1.layers.Dense(1)
def loss(x, y):
return tf.reduce_sum(tf.square(dense_layer(x) - y))
# Obtain the gradient function.
grad_fn = tfe.implicit_gradients(loss)
# Invoke the gradient function with concrete values of x and y.
x = tf.constant([[1.0, 2.0, 3.0], [4.0, 5.0, 6.0]])
y = tf.constant([[10.0], [20.0]])
grads_and_vars = grad_fn(x, y)
# Apply the gradients to Variables.
optimizer = tf.compat.v1.train.GradientDescentOptimizer(0.1)
optimizer.apply_gradients(grads_and_vars)
```
Args:
f: function to be differentiated. If `f` returns a scalar, this scalar will
be differentiated. If `f` returns a tensor or list of tensors, by default
a scalar will be computed by adding all their values to produce a single
scalar.
Returns:
A function which, when called, returns a list of (gradient, variable) pairs.
"""
# TODO(cais): Remove calls to tf.constant() once the gradients functions
# accept lists and np.ndarrays.
def grad_fn(*args, **kwds):
"""Computes the gradient of the wrapped function."""
return implicit_val_and_grad(f)(*args, **kwds)[1]
return grad_fn
def _get_arg_spec(f, params, param_args):
"""The positions of the parameters of f to be differentiated in param_args."""
try:
args = tf_inspect.getfullargspec(f).args
except TypeError as e:
# TypeError can happen when f is a callable object.
if params is None:
return range(len(param_args))
elif all(isinstance(x, int) for x in params):
return params
raise ValueError("Either callable provided is not a function or could not "
"inspect its arguments by name: %s. Original error: %s"
% (f, e))
if params is None:
if not args:
return range(len(param_args))
if args[0] == "self":
return range(len(args) - 1)
else:
return range(len(args))
elif all(isinstance(x, six.string_types) for x in params):
return [args.index(n) for n in params]
elif all(isinstance(x, int) for x in params):
return params
else:
raise ValueError(
"params must be all strings or all integers; got %s." % params)
def gradients_function(f, params=None):
"""Returns a function which differentiates f with respect to params.
Example:
```python
# f(x, y) = (x ^ 3) * y - x * (y ^ 2)
# Therefore, the 1st order derivatives are:
# df / dx = 3 * (x ^ 2) * y - y ^ 2
# df / dy = x ^ 3 - 2 * x * y
# The 2nd order derivatives with respect to x is:
# d^2 f / (dx)^2 = 6 * x * y
def f(x, y):
return x * x * x * y - x * y * y
# Obtain a function that returns 1st order gradients.
grad_fn = tfe.gradients_function(f)
x = 2.0
y = 3.0
# Invoke the 1st order gradient function.
x_grad, y_grad = grad_fn(x, y)
assert x_grad.numpy() == 3 * (2 ** 2) * 3 - 3 ** 2
assert y_grad.numpy() == (2 ** 3) - 2 * 2 * 3
# Obtain a function that returns the 2nd order gradient with respect to x.
gradgrad_fn = tfe.gradients_function(lambda x, y: grad_fn(x, y)[0])
# Invoke the 2nd order gradient function.
x_gradgrad = gradgrad_fn(x, y)[0]
assert x_gradgrad.numpy() == 6 * 2 * 3
# To obtain a callable that returns the gradient(s) of `f` with respect to a
# subset of its inputs, use the `params` keyword argument with
# `gradients_function()`.
ygrad_fn = tfe.gradients_function(f, params=[1])
(y_grad,) = ygrad_fn(x, y)
assert y_grad.numpy() == (2 ** 3) - 2 * 2 * 3
```
Note that only tensors with real or complex dtypes are differentiable.
Args:
f: function to be differentiated. If `f` returns a scalar, this scalar will
be differentiated. If `f` returns a tensor or list of tensors, by default
a scalar will be computed by adding all their values to produce a single
scalar. If desired, the tensors can be elementwise multiplied by the
tensors passed as the `dy` keyword argument to the returned gradient
function.
params: list of parameter names of f or list of integers indexing the
parameters with respect to which we'll differentiate. Passing None
differentiates with respect to all parameters.
Returns:
function which, when called, returns the value of f and the gradient
of `f` with respect to all of `params`. The function takes an extra optional
keyword argument `dy`. Setting it allows computation of vector jacobian
products for vectors other than the vector of ones.
Raises:
ValueError: if the params are not all strings or all integers.
"""
def decorated(*args, **kwds):
"""Computes the gradient of the decorated function."""
_, grad = val_and_grad_function(f, params=params)(*args, **kwds)
return grad
return decorated
def _ensure_unique_tensor_objects(parameter_positions, args):
"""Make each of the parameter_positions in args a unique ops.Tensor object.
Ensure that each parameter is treated independently.
For example:
def f(x, y): return x * y
g = gradients_function(f)
one = tf.constant(1.)
g(one, one) should return [1., 1.]
(even though the two arguments are the same Tensor object).
Args:
parameter_positions: List of indices into args defining the arguments to
differentiate against.
args: A list of arguments to the function to be differentiated.
Returns:
args, possibly edited in-place.
"""
s = set()
for (i, t) in enumerate(args):
if i in parameter_positions:
tid = ops.tensor_id(t)
if tid in s:
args[i] = gen_array_ops.identity(args[i])
else:
s.add(tid)
return args
def val_and_grad_function(f, params=None):
"""Returns a function that computes f and its derivative w.r.t. params.
Example:
```python
# f(x, y) = (x ^ 3) * y - x * (y ^ 2)
# Therefore, the 1st order derivatives are:
# df / dx = 3 * (x ^ 2) * y - y ^ 2
# df / dy = x ^ 3 - 2 * x * y
def f(x, y):
return x * x * x * y - x * y * y
# Obtain a function that returns the function value and the 1st order
# gradients.
val_grads_fn = tfe.value_and_gradients_function(f)
x = 2.0
y = 3.0
# Invoke the value-and-gradients function.
f_val, (x_grad, y_grad) = val_grads_fn(x, y)
assert f_val.numpy() == (2 ** 3) * 3 - 2 * (3 ** 2)
assert x_grad.numpy() == 3 * (2 ** 2) * 3 - 3 ** 2
assert y_grad.numpy() == (2 ** 3) - 2 * 2 * 3
# To obtain a callable that returns the value of `f` and the gradient(s) of
# `f` with respect to a subset of its inputs, use the `params` keyword
# argument with `value_and_gradients_function()`.
val_ygrad_fn = tfe.value_and_gradients_function(f, params=[1])
f_val, (y_grad,) = val_ygrad_fn(x, y)
assert f_val.numpy() == (2 ** 3) * 3 - 2 * (3 ** 2)
assert y_grad.numpy() == (2 ** 3) - 2 * 2 * 3
```
Args:
f: function to be differentiated. If `f` returns a scalar, this scalar will
be differentiated. If `f` returns a tensor or list of tensors, by default
a scalar will be computed by adding all their values to produce a single
scalar. If desired, the tensors can be elementwise multiplied by the
tensors passed as the `dy` keyword argument to the returned gradient
function.
params: list of parameter names of f or list of integers indexing the
parameters with respect to which we'll differentiate. Passing `None`
differentiates with respect to all parameters.
Returns:
function which, when called, returns the value of f and the gradient
of f with respect to all of `params`. The function takes an extra optional
keyword argument "dy". Setting it allows computation of vector jacobian
products for vectors other than the vector of ones.
Raises:
ValueError: if the params are not all strings or all integers.
"""
def decorated(*args, **kwds):
"""Computes the value and gradient of the decorated function."""
dy = kwds.pop("dy", None)
if kwds:
raise ValueError("Functions to be differentiated cannot "
"receive keyword arguments.")
val, vjp = make_vjp(f, params)(*args, **kwds)
return val, vjp(dy=dy)
return decorated
def make_vjp(f, params=None, persistent=True):
"""Returns a function that computes f and its vjp w.r.t.
params.
The term "vjp" here is an abbreviation for vector-jacobian product.
Args:
f: the function to be differentiated.
params: the parameters (numbers or names) to differentiate with respect to.
A value of None will differentiate with respect to all parameters.
persistent: Boolean controlling whether the VJP function can be re-used.
Must be True or False.
Returns:
A function, which when called, returns a tuple (value, vjp), where:
- value is the result of calling f.
- vjp is a function, which takes a vector as an argument and
returns the product of that vector with the Jacobian of f.
Providing no argument to vjp is equivalent to providing a
vector of ones.
For example,
```python
def f(x):
return x * x
wrapped_fn = tfe.make_vjp(f)
result, vjp = wrapped_fn(tf.constant(3.0))
# result is 9.0
vjp() # the vjp function returns 6.0
Raises:
ValueError: if `f` returns None.
"""
def decorated(*args, **kwds):
"""Computes the value and gradient of the decorated function."""
parameter_positions = _get_arg_spec(f, params, args)
assert not kwds, "The gradient function can't take keyword arguments."
this_tape = tape.push_new_tape(persistent=persistent)
try:
sources = []
args = [
ops.convert_to_tensor(arg) if i in parameter_positions else arg
for i, arg in enumerate(args)
]
args = _ensure_unique_tensor_objects(parameter_positions, args)
for i in parameter_positions:
if getattr(args[i], "is_packed", False):
raise ValueError(
"GradientTape.gradient is not supported on packed EagerTensors"
"yet.")
sources.append(args[i])
tape.watch(this_tape, args[i])
result = f(*args)
if result is None:
raise ValueError("Cannot differentiate a function that returns None; "
"did you forget to return a value from {}?".format(
f.__name__))
flat_result = nest.flatten(result)
flat_result = [gen_array_ops.identity(x) for x in flat_result]
result = nest.pack_sequence_as(result, flat_result)
finally:
tape.pop_tape(this_tape)
def vjp(dy=None):
if dy is not None:
dy = [ops.convert_to_tensor(x) for x in nest.flatten(dy)]
return imperative_grad.imperative_grad(
this_tape, nest.flatten(result), sources, output_gradients=dy)
return result, vjp
return decorated
def flatten_nested_indexed_slices(grad):
assert isinstance(grad, indexed_slices.IndexedSlices)
if isinstance(grad.values, ops.Tensor):
return grad
else:
assert isinstance(grad.values, indexed_slices.IndexedSlices)
g = flatten_nested_indexed_slices(grad.values)
return indexed_slices.IndexedSlices(
g.values, array_ops.gather(grad.indices, g.indices), g.dense_shape)
def aggregate_indexed_slices_gradients(grads):
"""Aggregates gradients containing `IndexedSlices`s."""
if len(grads) < 1:
return None
if len(grads) == 1:
return grads[0]
grads = [g for g in grads if g is not None]
# If any gradient is a `Tensor`, sum them up and return a dense tensor
# object.
if any(isinstance(g, ops.Tensor) for g in grads):
return math_ops.add_n(grads)
# The following `_as_indexed_slices_list` casts ids of IndexedSlices into
# int64. It is to make sure the inputs of `concat` all have same the data
# type.
grads = math_ops._as_indexed_slices_list(grads) # pylint: disable=protected-access
grads = [flatten_nested_indexed_slices(x) for x in grads]
# Form IndexedSlices out of the concatenated values and indices.
concat_grad = indexed_slices.IndexedSlices(
array_ops.concat([x.values for x in grads], axis=0),
array_ops.concat([x.indices for x in grads], axis=0),
grads[0].dense_shape)
return concat_grad
def _aggregate_grads(gradients):
"""Aggregate gradients from multiple sources.
Args:
gradients: A list of 'Tensor' or 'IndexedSlices' gradients.
Returns:
If 'gradients' only has 'Tensor', returns an aggregated 'Tensor'.
Otherwise returns an aggregated 'IndexedSlices'.
"""
assert gradients, "No gradients to aggregate"
if len(gradients) == 1:
return gradients[0]
if all(isinstance(g, ops.Tensor) for g in gradients):
return gen_math_ops.add_n(gradients)
else:
assert all(
isinstance(g, (ops.Tensor, indexed_slices.IndexedSlices))
for g in gradients)
return aggregate_indexed_slices_gradients(gradients)
def
|
(grad):
"""The number of elements in the `grad` tensor."""
if isinstance(grad, ops.Tensor):
shape_tuple = grad._shape_tuple() # pylint: disable=protected-access
elif isinstance(grad, indexed_slices.IndexedSlices):
shape_tuple = grad.values._shape_tuple() # pylint: disable=protected-access
else:
raise ValueError("`grad` not a Tensor or IndexedSlices.")
if shape_tuple is None or None in shape_tuple:
return 0
return functools.reduce(operator.mul, shape_tuple, 1)
def _fast_fill(value, shape, dtype):
return array_ops.fill(
constant_op.constant(shape, dtype=dtypes.int32),
constant_op.constant(value, dtype=dtype))
def _zeros(shape, dtype):
"""Helper to return (possibly cached) zero tensors in eager mode."""
# Note: variants will use _zeros_like
if dtype == dtypes.string or dtype == dtypes.resource:
return None
ctx = context.context()
if not ctx.executing_eagerly():
return array_ops.zeros(shape, dtype)
device = ctx.device_name
if tensor_util.is_tf_type(shape):
shape_key = shape.ref()
else:
shape_key = shape
cache_key = shape_key, dtype, device
cached = ctx.zeros_cache().get(cache_key)
if cached is None:
if dtypes.as_dtype(dtype).is_bool:
value = False
else:
value = 0
cached = _fast_fill(value, shape, dtype)
ctx.zeros_cache().put(cache_key, cached)
return cached
def _ones(shape, dtype):
as_dtype = dtypes.as_dtype(dtype)
if as_dtype == dtypes.string:
return None
if not context.executing_eagerly():
return array_ops.ones(shape, dtype)
if as_dtype.is_bool:
value = True
else:
value = 1
if shape == (): # pylint: disable=g-explicit-bool-comparison
return constant_op.constant(value, dtype=dtype)
return _fast_fill(value, shape, dtype)
_default_vspace = imperative_grad.VSpace(
num_elements_fn=_num_elements,
aggregate_fn=_aggregate_grads,
zeros_fn=_zeros,
ones_fn=_ones,
zeros_like_fn=default_gradient.zeros_like,
ones_like_fn=default_gradient.ones_like,
graph_shape_fn=gen_array_ops.shape)
pywrap_tfe.TFE_Py_RegisterVSpace(_default_vspace)
def _handle_or_self(x):
"""Unwrap resource variable/ndarray to return tensors."""
if resource_variable_ops.is_resource_variable(x):
return x.handle
return x
@tf_export("GradientTape", "autodiff.GradientTape", v1=["GradientTape"])
class GradientTape(object):
"""Record operations for automatic differentiation.
Operations are recorded if they are executed within this context manager and
at least one of their inputs is being "watched".
Trainable variables (created by `tf.Variable` or `tf.compat.v1.get_variable`,
where `trainable=True` is default in both cases) are automatically watched.
Tensors can be manually watched by invoking the `watch` method on this context
manager.
For example, consider the function `y = x * x`. The gradient at `x = 3.0` can
be computed as:
>>> x = tf.constant(3.0)
>>> with tf.GradientTape() as g:
... g.watch(x)
... y = x * x
>>> dy_dx = g.gradient(y, x)
>>> print(dy_dx)
tf.Tensor(6.0, shape=(), dtype=float32)
GradientTapes can be nested to compute higher-order derivatives. For example,
>>> x = tf.constant(5.0)
>>> with tf.GradientTape() as g:
... g.watch(x)
... with tf.GradientTape() as gg:
... gg.watch(x)
... y = x * x
... dy_dx = gg.gradient(y, x) # dy_dx = 2 * x
>>> d2y_dx2 = g.gradient(dy_dx, x) # d2y_dx2 = 2
>>> print(dy_dx)
tf.Tensor(10.0, shape=(), dtype=float32)
>>> print(d2y_dx2)
tf.Tensor(2.0, shape=(), dtype=float32)
By default, the resources held by a GradientTape are released as soon as
GradientTape.gradient() method is called. To compute multiple gradients over
the same computation, create a persistent gradient tape. This allows multiple
calls to the gradient() method as resources are released when the tape object
is garbage collected. For example:
>>> x = tf.constant(3.0)
>>> with tf.GradientTape(persistent=True) as g:
... g.watch(x)
... y = x * x
... z = y * y
>>> dz_dx = g.gradient(z, x) # (4*x^3 at x = 3)
>>> print(dz_dx)
tf.Tensor(108.0, shape=(), dtype=float32)
>>> dy_dx = g.gradient(y, x)
>>> print(dy_dx)
tf.Tensor(6.0, shape=(), dtype=float32)
By default GradientTape will automatically watch any trainable variables that
are accessed inside the context. If you want fine grained control over which
variables are watched you can disable automatic tracking by passing
`watch_accessed_variables=False` to the tape constructor:
>>> x = tf.Variable(2.0)
>>> w = tf.Variable(5.0)
>>> with tf.GradientTape(
... watch_accessed_variables=False, persistent=True) as tape:
... tape.watch(x)
... y = x ** 2 # Gradients will be available for `x`.
... z = w ** 3 # No gradients will be available as `w` isn't being watched.
>>> dy_dx = tape.gradient(y, x)
>>> print(dy_dx)
tf.Tensor(4.0, shape=(), dtype=float32)
>>> # No gradients will be available as `w` isn't being watched.
>>> dz_dw = tape.gradient(z, w)
>>> print(dz_dw)
None
Note that when using models you should ensure that your variables exist when
using `watch_accessed_variables=False`. Otherwise it's quite easy to make your
first iteration not have any gradients:
```python
a = tf.keras.layers.Dense(32)
b = tf.keras.layers.Dense(32)
with tf.GradientTape(watch_accessed_variables=False) as tape:
tape.watch(a.variables) # Since `a.build` has not been called at this point
# `a.variables` will return an empty list and the
# tape will not be watching anything.
result = b(a(inputs))
tape.gradient(result, a.variables) # The result of this computation will be
# a list of `None`s since a's variables
# are not being watched.
```
Note that only tensors with real or complex dtypes are differentiable.
"""
def __init__(self, persistent=False, watch_accessed_variables=True):
"""Creates a new GradientTape.
Args:
persistent: Boolean controlling whether a persistent gradient tape
is created. False by default, which means at most one call can
be made to the gradient() method on this object.
watch_accessed_variables: Boolean controlling whether the tape will
automatically `watch` any (trainable) variables accessed while the tape
is active. Defaults to True meaning gradients can be requested from any
result computed in the tape derived from reading a trainable `Variable`.
If False users must explicitly `watch` any `Variable`s they want to
request gradients from.
"""
self._tape = None
self._persistent = persistent
self._watch_accessed_variables = watch_accessed_variables
self._watched_variables = ()
self._recording = False
def __enter__(self):
"""Enters a context inside which operations are recorded on this tape."""
self._push_tape()
return self
def __exit__(self, typ, value, traceback):
"""Exits the recording context, no further operations are traced."""
if self._recording:
self._pop_tape()
def _push_tape(self):
"""Pushes a new tape onto the tape stack."""
if self._recording:
raise ValueError("Tape is still recording, This can happen if you try to "
"re-enter an already-active tape.")
if self._tape is None:
self._tape = tape.push_new_tape(
persistent=self._persistent,
watch_accessed_variables=self._watch_accessed_variables)
else:
tape.push_tape(self._tape)
self._recording = True
def _pop_tape(self):
if not self._recording:
raise ValueError("Tape is not recording.")
tape.pop_tape(self._tape)
self._recording = False
@tf_contextlib.contextmanager
def _ensure_recording(self):
"""Ensures that this tape is recording."""
if not self._recording:
try:
self._push_tape()
yield
finally:
self._pop_tape()
else:
yield
def watch(self, tensor):
"""Ensures that `tensor` is being traced by this tape.
Args:
tensor: a Tensor or list of Tensors.
Raises:
ValueError: if it encounters something that is not a tensor.
"""
for t in nest.flatten(tensor, expand_composites=True):
if not (_pywrap_utils.IsTensor(t) or _pywrap_utils.IsVariable(t)):
raise ValueError("Passed in object of type {}, not tf.Tensor".format(
type(t)))
if not backprop_util.IsTrainable(t):
logging.log_first_n(
logging.WARN, "The dtype of the watched tensor must be "
"floating (e.g. tf.float32), got %r", 5, t.dtype)
if hasattr(t, "handle"):
# There are many variable-like objects, all of them currently have
# `handle` attribute that points to a tensor. If this changes, internals
# of watch_variable need to change as well.
tape.watch_variable(self._tape, t)
else:
tape.watch(self._tape, t)
@tf_contextlib.contextmanager
def stop_recording(self):
"""Temporarily stops recording operations on this tape.
Operations executed while this context manager is active will not be
recorded on the tape. This is useful for reducing the memory used by tracing
all computations.
For example:
>>> x = tf.constant(4.0)
>>> with tf.GradientTape() as tape:
... with tape.stop_recording():
... y = x ** 2
>>> dy_dx = tape.gradient(y, x)
>>> print(dy_dx)
None
Yields:
None
Raises:
RuntimeError: if the tape is not currently recording.
"""
if self._tape is None:
raise RuntimeError(
"Trying to stop recording a tape which is not recording.")
self._pop_tape()
try:
yield
finally:
self._push_tape()
def reset(self):
"""Clears all information stored in this tape.
Equivalent to exiting and reentering the tape context manager with a new
tape. For example, the two following code blocks are equivalent:
```
with tf.GradientTape() as t:
loss = loss_fn()
with tf.GradientTape() as t:
loss += other_loss_fn()
t.gradient(loss, ...) # Only differentiates other_loss_fn, not loss_fn
# The following is equivalent to the above
with tf.GradientTape() as t:
loss = loss_fn()
t.reset()
loss += other_loss_fn()
t.gradient(loss, ...) # Only differentiates other_loss_fn, not loss_fn
```
This is useful if you don't want to exit the context manager for the tape,
or can't because the desired reset point is inside a control flow construct:
```
with tf.GradientTape() as t:
loss = ...
if loss > k:
t.reset()
```
"""
self._pop_tape()
self._tape = None
self._push_tape()
def watched_variables(self):
"""Returns variables watched by this tape in order of construction."""
if self._tape is not None:
self._watched_variables = self._tape.watched_variables()
return self._watched_variables
def gradient(self,
target,
sources,
output_gradients=None,
unconnected_gradients=UnconnectedGradients.NONE):
"""Computes the gradient using operations recorded in context of this tape.
Note: Unless you set `persistent=True` a GradientTape can only be used to
compute one set of gradients (or jacobians).
In addition to Tensors, gradient also supports RaggedTensors. For example,
>>> x = tf.ragged.constant([[1.0, 2.0], [3.0]])
>>> with tf.GradientTape() as g:
... g.watch(x)
... y = x * x
>>> g.gradient(y, x)
<tf.RaggedTensor [[2.0, 4.0], [6.0]]>
Args:
target: a list or nested structure of Tensors or Variables or
CompositeTensors to be differentiated.
sources: a list or nested structure of Tensors or Variables or
CompositeTensors. `target` will be differentiated against elements in
`sources`.
output_gradients: a list of gradients, one for each differentiable
element of target. Defaults to None.
unconnected_gradients: a value which can either hold 'none' or 'zero' and
alters the value which will be returned if the target and sources are
unconnected. The possible values and effects are detailed in
'UnconnectedGradients' and it defaults to 'none'.
Returns:
a list or nested structure of Tensors (or IndexedSlices, or None, or
CompositeTensor), one for each element in `sources`. Returned structure
is the same as the structure of `sources`.
Raises:
RuntimeError: If called on a used, non-persistent tape.
RuntimeError: If called inside the context of the tape.
TypeError: If the target is a None object.
ValueError: If the target is a variable or if unconnected gradients is
called with an unknown value.
"""
if self._tape is None:
raise RuntimeError("A non-persistent GradientTape can only be used to "
"compute one set of gradients (or jacobians)")
if self._recording:
if not self._persistent:
self._pop_tape()
else:
logging.log_first_n(
logging.WARN, "Calling GradientTape.gradient on a persistent "
"tape inside its context is significantly less "
"efficient than calling it outside the context (it "
"causes the gradient ops to be recorded on the "
"tape, leading to increased CPU and memory usage). "
"Only call GradientTape.gradient inside the "
"context if you actually want to trace the "
"gradient in order to compute higher order "
"derivatives.", 1)
if target is None:
raise TypeError("Argument `target` should be a list or nested structure"
" of Tensors, Variables or CompositeTensors to be "
"differentiated, but received None.")
flat_targets = []
for t in nest.flatten(target):
if not backprop_util.IsTrainable(t):
logging.vlog(
logging.WARN, "The dtype of the target tensor must be "
"floating (e.g. tf.float32) when calling GradientTape.gradient, "
"got %r", t.dtype)
if resource_variable_ops.is_resource_variable(t):
with self:
t = ops.convert_to_tensor(t)
flat_targets.append(t)
flat_targets = composite_tensor_gradient.get_flat_tensors_for_gradients(
flat_targets)
flat_sources = nest.flatten(sources)
for t in flat_sources:
if not backprop_util.IsTrainable(t):
logging.vlog(
logging.WARN, "The dtype of the source tensor must be "
"floating (e.g. tf.float32) when calling GradientTape.gradient, "
"got %r", t.dtype)
if getattr(t, "is_packed", False):
raise ValueError(
"GradientTape.gradient is not supported on packed EagerTensors yet."
)
flat_sources_raw = flat_sources
flat_sources = composite_tensor_gradient.get_flat_tensors_for_gradients(
flat_sources)
flat_sources = [_handle_or_self(x) for x in flat_sources]
if output_gradients is not None:
output_gradients = nest.flatten(output_gradients)
output_gradients = (
composite_tensor_gradient.get_flat_tensors_for_gradients(
output_gradients))
output_gradients = [None if x is None else ops.convert_to_tensor(x)
for x in output_gradients]
flat_grad = imperative_grad.imperative_grad(
self._tape,
flat_targets,
flat_sources,
output_gradients=output_gradients,
sources_raw=flat_sources_raw,
unconnected_gradients=unconnected_gradients)
if not self._persistent:
# Keep track of watched variables before setting tape to None
self._watched_variables = self._tape.watched_variables()
self._tape = None
flat_grad = composite_tensor_gradient.replace_flat_tensors_for_gradients(
flat_sources_raw, flat_grad)
grad = nest.pack_sequence_as(sources, flat_grad)
return grad
def jacobian(self,
target,
sources,
unconnected_gradients=UnconnectedGradients.NONE,
parallel_iterations=None,
experimental_use_pfor=True):
"""Computes the jacobian using operations recorded in context of this tape.
Note: Unless you set `persistent=True` a GradientTape can only be used to
compute one set of gradients (or jacobians).
Note: By default the jacobian implementation uses parallel for (pfor), which
creates a tf.function under the hood for each jacobian call. For better
performance, and to avoid recompilation and vectorization rewrites on each
call, enclose GradientTape code in @tf.function.
See[wikipedia
article](http://en.wikipedia.org/wiki/jacobian_matrix_and_determinant)
for the definition of a Jacobian.
Example usage:
```python
with tf.GradientTape() as g:
x = tf.constant([1.0, 2.0])
g.watch(x)
y = x * x
jacobian = g.jacobian(y, x)
# jacobian value is [[2., 0.], [0., 4.]]
```
Args:
target: Tensor to be differentiated.
sources: a list or nested structure of Tensors or Variables. `target`
will be differentiated against elements in `sources`.
unconnected_gradients: a value which can either hold 'none' or 'zero' and
alters the value which will be returned if the target and sources are
unconnected. The possible values and effects are detailed in
'UnconnectedGradients' and it defaults to 'none'.
parallel_iterations: A knob to control how many iterations are dispatched
in parallel. This knob can be used to control the total memory usage.
experimental_use_pfor: If true, vectorizes the jacobian computation. Else
falls back to a sequential while_loop. Vectorization can sometimes fail
or lead to excessive memory usage. This option can be used to disable
vectorization in such cases.
Returns:
A list or nested structure of Tensors (or None), one for each element in
`sources`. Returned structure is the same as the structure of `sources`.
Note if any gradient is sparse (IndexedSlices), jacobian function
currently makes it dense and returns a Tensor instead. This may change in
the future.
Raises:
RuntimeError: If called on a used, non-persistent tape.
RuntimeError: If called on a non-persistent tape with eager execution
enabled and without enabling experimental_use_pfor.
ValueError: If vectorization of jacobian computation fails.
"""
if self._tape is None:
raise RuntimeError("A non-persistent GradientTape can only be used to "
"compute one set of gradients (or jacobians)")
flat_sources = nest.flatten(sources)
target_static_shape = target.shape
target_shape = array_ops.shape(target)
# Note that we push and pop the tape here and below. This is needed since we
# need gradients through the enclosed operations.
with self._ensure_recording():
target = array_ops.reshape(target, [-1])
def loop_fn(i):
with self._ensure_recording():
y = array_ops.gather(target, i)
return self.gradient(y, flat_sources,
unconnected_gradients=unconnected_gradients)
try:
target_size = int(target.shape[0])
except TypeError:
target_size = array_ops.shape(target)[0]
if experimental_use_pfor:
try:
output = pfor_ops.pfor(loop_fn, target_size,
parallel_iterations=parallel_iterations)
except ValueError as err:
six.reraise(
ValueError,
ValueError(
str(err) + "\nEncountered an exception while vectorizing the "
"jacobian computation. Vectorization can be disabled by setting"
" experimental_use_pfor to False."),
sys.exc_info()[2])
else:
if context.executing_eagerly() and not self._persistent:
raise RuntimeError(
"GradientTape must be created with persistent=True"
" to compute the jacobian with eager execution enabled and with "
" experimental_use_pfor set to False.")
output = pfor_ops.for_loop(
loop_fn, [target.dtype] * len(flat_sources), target_size,
parallel_iterations=parallel_iterations)
for i, out in enumerate(output):
if out is not None:
new_shape = array_ops.concat(
[target_shape, array_ops.shape(out)[1:]], axis=0)
out = array_ops.reshape(out, new_shape)
if context.executing_eagerly():
out.set_shape(target_static_shape.concatenate(flat_sources[i].shape))
output[i] = out
return nest.pack_sequence_as(sources, output)
def batch_jacobian(self,
target,
source,
unconnected_gradients=UnconnectedGradients.NONE,
parallel_iterations=None,
experimental_use_pfor=True):
"""Computes and stacks per-example jacobians.
See [wikipedia article](http://en.wikipedia.org/wiki/jacobian_matrix_and_determinant)
for the definition of a Jacobian. This function is essentially an efficient
implementation of the following:
`tf.stack([self.jacobian(y[i], x[i]) for i in range(x.shape[0])])`.
Note that compared to `GradientTape.jacobian` which computes gradient of
each output value w.r.t each input value, this function is useful when
`target[i,...]` is independent of `source[j,...]` for `j != i`. This
assumption allows more efficient computation as compared to
`GradientTape.jacobian`. The output, as well as intermediate activations,
are lower dimensional and avoid a bunch of redundant zeros which would
result in the jacobian computation given the independence assumption.
Note: Unless you set `persistent=True` a GradientTape can only be used to
compute one set of gradients (or jacobians).
Note: By default the batch_jacobian implementation uses parallel for (pfor),
which creates a tf.function under the hood for each batch_jacobian call.
For better performance, and to avoid recompilation and vectorization
rewrites on each call, enclose GradientTape code in @tf.function.
Example usage:
```python
with tf.GradientTape() as g:
x = tf.constant([[1., 2.], [3., 4.]], dtype=tf.float32)
g.watch(x)
y = x * x
batch_jacobian = g.batch_jacobian(y, x)
# batch_jacobian is [[[2, 0], [0, 4]], [[6, 0], [0, 8]]]
```
Args:
target: A tensor with rank 2 or higher and with shape [b, y1, ..., y_n].
`target[i,...]` should only depend on `source[i,...]`.
source: A tensor with rank 2 or higher and with shape [b, x1, ..., x_m].
unconnected_gradients: a value which can either hold 'none' or 'zero' and
alters the value which will be returned if the target and sources are
unconnected. The possible values and effects are detailed in
'UnconnectedGradients' and it defaults to 'none'.
parallel_iterations: A knob to control how many iterations are dispatched
in parallel. This knob can be used to control the total memory usage.
experimental_use_pfor: If true, uses pfor for computing the Jacobian. Else
uses a tf.while_loop.
Returns:
A tensor `t` with shape [b, y_1, ..., y_n, x1, ..., x_m] where `t[i, ...]`
is the jacobian of `target[i, ...]` w.r.t. `source[i, ...]`, i.e. stacked
per-example jacobians.
Raises:
RuntimeError: If called on a used, non-persistent tape.
RuntimeError: If called on a non-persistent tape with eager execution
enabled and without enabling experimental_use_pfor.
ValueError: If vectorization of jacobian computation fails or if first
dimension of `target` and `source` do not match.
"""
if self._tape is None:
raise RuntimeError("A non-persistent GradientTape can only be used to"
"compute one set of gradients (or jacobians)")
target_shape = target.shape
if target_shape.rank is None:
dim = tensor_shape.Dimension(None)
else:
dim = target_shape.dims[0]
if not (target_shape.with_rank_at_least(2) and
source.shape.with_rank_at_least(2) and
dim.is_compatible_with(source.shape[0])):
raise ValueError(
"Need first dimension of target shape (%s) and "
"source shape (%s) to match." % (target.shape, source.shape))
if target_shape.is_fully_defined():
batch_size = int(target_shape[0])
target_row_size = target_shape.num_elements() // batch_size
else:
target_shape = array_ops.shape(target)
batch_size = target_shape[0]
target_row_size = array_ops.size(target) // batch_size
source_shape = array_ops.shape(source)
# Flatten target to 2-D.
# Note that we push and pop the tape here and below. This is needed since we
# need gradients through the enclosed operations.
with self._ensure_recording():
with ops.control_dependencies(
[check_ops.assert_equal(batch_size, source_shape[0])]):
target = array_ops.reshape(target, [batch_size, target_row_size])
run_once = False
def loop_fn(i):
nonlocal run_once
if run_once and not self._persistent:
if parallel_iterations is not None:
raise RuntimeError(
"GradientTape must be created with persistent=True"
" to compute the batch_jacobian with parallel_iterations.")
else:
raise RuntimeError(
"GradientTape must be created with persistent=True"
" to compute the batch_jacobian.")
run_once = True
with self._ensure_recording():
y = array_ops.gather(target, i, axis=1)
return self.gradient(y, source,
unconnected_gradients=unconnected_gradients)
if experimental_use_pfor:
try:
output = pfor_ops.pfor(loop_fn, target_row_size,
parallel_iterations=parallel_iterations)
except ValueError as err:
six.reraise(
ValueError,
ValueError(
str(err) + "\nEncountered an exception while vectorizing the "
"batch_jacobian computation. Vectorization can be disabled by "
"setting experimental_use_pfor to False."),
sys.exc_info()[2])
else:
if context.executing_eagerly() and not self._persistent:
raise RuntimeError(
"GradientTape must be created with persistent=True"
" to compute the batch_jacobian with eager execution enabled and "
" with experimental_use_pfor set to False.")
output = pfor_ops.for_loop(loop_fn, target.dtype, target_row_size,
parallel_iterations=parallel_iterations)
new_shape = array_ops.concat([target_shape, source_shape[1:]], axis=0)
if output is None:
# Note that this block is returning zeros when it could use `None` to
# represent unconnected gradients. This is to maintain compatibility with
# the previous behavior, which ignored `unconnected_gradients`.
output = array_ops.zeros(new_shape, target.dtype)
return output
else:
output = array_ops.reshape(output,
[target_row_size, batch_size, -1])
output = array_ops.transpose(output, [1, 0, 2])
output = array_ops.reshape(output, new_shape)
return output
|
_num_elements
|
index.test.js
|
import * as sel from "../index";
import { MOCK_STATE } from "./mock_state";
describe("test index selector", () => {
test("testing selectorMap", () => {
expect(sel.selectorMap()).toEqual(expect.any(Function));
expect(sel.selectorMap({
email: sel.email,
|
});
|
isAdmin: sel.isAdmin
})(MOCK_STATE)).toEqual({ "email": "testme@email.com", "isAdmin": true });
});
|
math.rs
|
#[allow(dead_code)]
pub mod math {
# [rustfmt :: skip] use ethcontract as ethcontract ;
#[doc = "Generated by `ethcontract`"]
#[derive(Clone)]
pub struct Contract {
methods: Methods,
}
impl Contract {
#[doc = r" Retrieves the raw contract instance used to generate the type safe"]
#[doc = r" API for this contract."]
pub fn raw_contract() -> &'static self::ethcontract::Contract {
use self::ethcontract::common::artifact::truffle::TruffleLoader;
use self::ethcontract::private::lazy_static;
use self::ethcontract::Contract;
lazy_static! {
pub static ref CONTRACT: Contract = {
# [allow (unused_mut)] let mut contract = TruffleLoader :: new () . load_contract_from_str ("{\"contractName\":\"Math\",\"abi\":[],\"bytecode\":\"60566037600b82828239805160001a607314602a57634e487b7160e01b600052600060045260246000fd5b30600052607381538281f3fe73000000000000000000000000000000000000000030146080604052600080fdfea2646970667358221220c197dd04e81d5f5b8f7425e916cdcd5a0a46e9402ab1e1383e047ecbd0a38ace64736f6c63430008030033\",\"networks\":{},\"devdoc\":{\"details\":null,\"methods\":{}},\"userdoc\":{\"details\":null,\"methods\":{}}}") . expect ("valid contract JSON") ;
contract
};
}
&CONTRACT
}
#[doc = r" Creates a new contract instance with the specified `web3`"]
#[doc = r" provider at the given `Address`."]
#[doc = r""]
#[doc = r" Note that this does not verify that a contract with a matching"]
#[doc = r" `Abi` is actually deployed at the given address."]
pub fn
|
<F, B, T>(
web3: &self::ethcontract::web3::api::Web3<T>,
address: self::ethcontract::Address,
) -> Self
where
F: std::future::Future<
Output = Result<self::ethcontract::json::Value, self::ethcontract::web3::Error>,
> + Send
+ 'static,
B: std::future::Future<
Output = Result<
Vec<Result<self::ethcontract::json::Value, self::ethcontract::web3::Error>>,
self::ethcontract::web3::Error,
>,
> + Send
+ 'static,
T: self::ethcontract::web3::Transport<Out = F>
+ self::ethcontract::web3::BatchTransport<Batch = B>
+ Send
+ Sync
+ 'static,
{
Contract::with_deployment_info(web3, address, None)
}
#[doc = r" Creates a new contract instance with the specified `web3` provider with"]
#[doc = r" the given `Abi` at the given `Address` and an optional transaction hash."]
#[doc = r" This hash is used to retrieve contract related information such as the"]
#[doc = r" creation block (which is useful for fetching all historic events)."]
#[doc = r""]
#[doc = r" Note that this does not verify that a contract with a matching `Abi` is"]
#[doc = r" actually deployed at the given address nor that the transaction hash,"]
#[doc = r" when provided, is actually for this contract deployment."]
pub fn with_deployment_info<F, B, T>(
web3: &self::ethcontract::web3::api::Web3<T>,
address: self::ethcontract::Address,
deployment_information: Option<ethcontract::common::DeploymentInformation>,
) -> Self
where
F: std::future::Future<
Output = Result<self::ethcontract::json::Value, self::ethcontract::web3::Error>,
> + Send
+ 'static,
B: std::future::Future<
Output = Result<
Vec<Result<self::ethcontract::json::Value, self::ethcontract::web3::Error>>,
self::ethcontract::web3::Error,
>,
> + Send
+ 'static,
T: self::ethcontract::web3::Transport<Out = F>
+ self::ethcontract::web3::BatchTransport<Batch = B>
+ Send
+ Sync
+ 'static,
{
use self::ethcontract::transport::DynTransport;
use self::ethcontract::web3::api::Web3;
use self::ethcontract::Instance;
let transport = DynTransport::new(web3.transport().clone());
let web3 = Web3::new(transport);
let abi = Self::raw_contract().abi.clone();
let instance = Instance::with_deployment_info(web3, abi, address, deployment_information);
Contract::from_raw(instance)
}
#[doc = r" Creates a contract from a raw instance."]
fn from_raw(instance: self::ethcontract::dyns::DynInstance) -> Self {
let methods = Methods { instance };
Contract { methods }
}
#[doc = r" Returns the contract address being used by this instance."]
pub fn address(&self) -> self::ethcontract::Address {
self.raw_instance().address()
}
#[doc = r" Returns the deployment information of the contract"]
#[doc = r" if it is known, `None` otherwise."]
pub fn deployment_information(&self) -> Option<ethcontract::common::DeploymentInformation> {
self.raw_instance().deployment_information()
}
#[doc = r" Returns a reference to the default method options used by this"]
#[doc = r" contract."]
pub fn defaults(&self) -> &self::ethcontract::contract::MethodDefaults {
&self.raw_instance().defaults
}
#[doc = r" Returns a mutable reference to the default method options used"]
#[doc = r" by this contract."]
pub fn defaults_mut(&mut self) -> &mut self::ethcontract::contract::MethodDefaults {
&mut self.raw_instance_mut().defaults
}
#[doc = r" Returns a reference to the raw runtime instance used by this"]
#[doc = r" contract."]
pub fn raw_instance(&self) -> &self::ethcontract::dyns::DynInstance {
&self.methods.instance
}
#[doc = r" Returns a mutable reference to the raw runtime instance used by"]
#[doc = r" this contract."]
fn raw_instance_mut(&mut self) -> &mut self::ethcontract::dyns::DynInstance {
&mut self.methods.instance
}
}
impl std::fmt::Debug for Contract {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
f.debug_tuple(stringify!(Math))
.field(&self.address())
.finish()
}
}
impl Contract {
#[doc = "Generated by `ethcontract`"]
#[allow(clippy::too_many_arguments)]
pub fn builder<F, B, T>(
web3: &self::ethcontract::web3::api::Web3<T>,
) -> self::ethcontract::dyns::DynDeployBuilder<Self>
where
F: std::future::Future<
Output = Result<self::ethcontract::json::Value, self::ethcontract::web3::Error>,
> + Send
+ 'static,
B: std::future::Future<
Output = Result<
Vec<Result<self::ethcontract::json::Value, self::ethcontract::web3::Error>>,
self::ethcontract::web3::Error,
>,
> + Send
+ 'static,
T: self::ethcontract::web3::Transport<Out = F>
+ self::ethcontract::web3::BatchTransport<Batch = B>
+ Send
+ Sync
+ 'static,
{
use self::ethcontract::contract::DeployBuilder;
use self::ethcontract::dyns::DynTransport;
use self::ethcontract::web3::api::Web3;
let transport = DynTransport::new(web3.transport().clone());
let web3 = Web3::new(transport);
let bytecode = Self::raw_contract().bytecode.clone();
DeployBuilder::new(web3, bytecode, ()).expect("valid deployment args")
}
}
impl self::ethcontract::contract::Deploy<self::ethcontract::dyns::DynTransport> for Contract {
type Context = self::ethcontract::common::Bytecode;
fn bytecode(cx: &Self::Context) -> &self::ethcontract::common::Bytecode {
cx
}
fn abi(_: &Self::Context) -> &self::ethcontract::common::Abi {
&Self::raw_contract().abi
}
fn from_deployment(
web3: self::ethcontract::dyns::DynWeb3,
address: self::ethcontract::Address,
transaction_hash: self::ethcontract::H256,
_: Self::Context,
) -> Self {
Self::with_deployment_info(&web3, address, Some(transaction_hash.into()))
}
}
#[derive(Clone)]
struct Methods {
instance: self::ethcontract::dyns::DynInstance,
}
#[derive(Clone, Copy)]
struct Signatures;
}
pub use self::math::Contract as Math;
|
at
|
0001_initial.py
|
# Generated by Django 3.1.3 on 2021-03-13 11:57
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
|
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=1000)),
('is_correct', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Course',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(default='online course', max_length=30)),
('image', models.ImageField(upload_to='course_images/')),
('description', models.CharField(max_length=1000)),
('pub_date', models.DateField(null=True)),
('total_enrollment', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Enrollment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('date_enrolled', models.DateField(default=django.utils.timezone.now)),
('mode', models.CharField(choices=[('audit', 'Audit'), ('honor', 'Honor'), ('BETA', 'BETA')], default='audit', max_length=5)),
('rating', models.FloatField(default=5.0)),
('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='onlinecourse.course')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Submission',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choices', models.ManyToManyField(to='onlinecourse.Choice')),
('enrollment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='onlinecourse.enrollment')),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=1000)),
('grade', models.IntegerField(default=1)),
('course', models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='onlinecourse.course')),
],
),
migrations.CreateModel(
name='Lesson',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(default='title', max_length=200)),
('order', models.IntegerField(default=0)),
('content', models.TextField()),
('course', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='onlinecourse.course')),
],
),
migrations.CreateModel(
name='Learner',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('occupation', models.CharField(choices=[('student', 'Student'), ('developer', 'Developer'), ('data_scientist', 'Data Scientist'), ('dba', 'Database Admin')], default='student', max_length=20)),
('social_link', models.URLField()),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Instructor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_time', models.BooleanField(default=True)),
('total_learners', models.IntegerField()),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='course',
name='instructors',
field=models.ManyToManyField(to='onlinecourse.Instructor'),
),
migrations.AddField(
model_name='course',
name='users',
field=models.ManyToManyField(through='onlinecourse.Enrollment', to=settings.AUTH_USER_MODEL),
),
migrations.AddField(
model_name='choice',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='onlinecourse.question'),
),
]
|
|
config.rs
|
// Copyright 2020 IOTA Stiftung
//
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
// the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
// an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and limitations under the License.
use bee_network::{Address, Url};
use async_std::task::block_on;
pub struct ConfigBuilder {
host_addr: Option<Address>,
peers: Vec<Url>,
}
impl ConfigBuilder {
pub fn new() -> Self {
|
host_addr: None,
peers: vec![],
}
}
pub fn with_host_addr(mut self, host_addr: Address) -> Self {
self.host_addr.replace(host_addr);
self
}
pub fn with_peer_url(mut self, peer_url: Url) -> Self {
self.peers.push(peer_url);
self
}
pub fn finish(self) -> Config {
Config {
host_addr: self
.host_addr
.unwrap_or_else(|| block_on(Address::from_addr_str("localhost:1337")).unwrap()),
peers: self.peers,
}
}
}
#[derive(Clone)]
pub struct Config {
pub host_addr: Address,
pub peers: Vec<Url>,
}
impl Config {
pub fn build() -> ConfigBuilder {
ConfigBuilder::new()
}
}
|
Self {
|
lockwaiter_test.go
|
// Copyright 2019-present PingCAP, Inc.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// See the License for the specific language governing permissions and
// limitations under the License.
package lockwaiter
import (
"sync"
"testing"
"time"
. "github.com/pingcap/check"
deadlockPb "github.com/pingcap/kvproto/pkg/deadlock"
"github.com/pingcap/log"
"github.com/pingcap/tidb/store/mockstore/unistore/config"
)
func
|
(t *testing.T) {
TestingT(t)
}
var _ = Suite(&testLockwaiter{})
type testLockwaiter struct{}
func (t *testLockwaiter) TestLockwaiterBasic(c *C) {
mgr := NewManager(&config.DefaultConf)
keyHash := uint64(100)
mgr.NewWaiter(1, 2, keyHash, 10)
// basic check queue and waiter
q := mgr.waitingQueues[keyHash]
c.Assert(q, NotNil)
waiter := q.waiters[0]
c.Assert(waiter.startTS, Equals, uint64(1))
c.Assert(waiter.LockTS, Equals, uint64(2))
c.Assert(waiter.KeyHash, Equals, uint64(100))
// check ready waiters
keysHash := make([]uint64, 0, 10)
keysHash = append(keysHash, keyHash)
rdyWaiter, _ := q.getOldestWaiter()
c.Assert(rdyWaiter.startTS, Equals, uint64(1))
c.Assert(rdyWaiter.LockTS, Equals, uint64(2))
c.Assert(rdyWaiter.KeyHash, Equals, uint64(100))
// basic wake up test
waiter = mgr.NewWaiter(3, 2, keyHash, 10)
mgr.WakeUp(2, 222, keysHash)
res := <-waiter.ch
c.Assert(res.CommitTS, Equals, uint64(222))
c.Assert(len(q.waiters), Equals, 0)
q = mgr.waitingQueues[keyHash]
// verify queue deleted from map
c.Assert(q, IsNil)
// basic wake up for deadlock test
waiter = mgr.NewWaiter(3, 4, keyHash, 10)
resp := &deadlockPb.DeadlockResponse{}
resp.Entry.Txn = 3
resp.Entry.WaitForTxn = 4
resp.Entry.KeyHash = keyHash
resp.DeadlockKeyHash = 30192
mgr.WakeUpForDeadlock(resp)
res = <-waiter.ch
c.Assert(res.DeadlockResp, NotNil)
c.Assert(res.DeadlockResp.Entry.Txn, Equals, uint64(3))
c.Assert(res.DeadlockResp.Entry.WaitForTxn, Equals, uint64(4))
c.Assert(res.DeadlockResp.Entry.KeyHash, Equals, keyHash)
c.Assert(res.DeadlockResp.DeadlockKeyHash, Equals, uint64(30192))
q = mgr.waitingQueues[4]
// verify queue deleted from map
c.Assert(q, IsNil)
}
func (t *testLockwaiter) TestLockwaiterConcurrent(c *C) {
mgr := NewManager(&config.DefaultConf)
wg := &sync.WaitGroup{}
endWg := &sync.WaitGroup{}
waitForTxn := uint64(100)
commitTs := uint64(199)
deadlockKeyHash := uint64(299)
numbers := uint64(10)
lock := sync.RWMutex{}
for i := uint64(0); i < numbers; i++ {
wg.Add(1)
endWg.Add(1)
go func(num uint64) {
defer endWg.Done()
waiter := mgr.NewWaiter(num, waitForTxn, num*10, 100*time.Millisecond)
// i == numbers - 1 use CleanUp Waiter and the results will be timeout
if num == numbers-1 {
mgr.CleanUp(waiter)
wg.Done()
res := waiter.Wait()
c.Assert(res.WakeupSleepTime, Equals, WaitTimeout)
c.Assert(res.CommitTS, Equals, uint64(0))
c.Assert(res.DeadlockResp, IsNil)
} else {
wg.Done()
res := waiter.Wait()
// even woken up by commit
if num%2 == 0 {
c.Assert(res.CommitTS, Equals, commitTs)
} else {
// odd woken up by deadlock
c.Assert(res.DeadlockResp, NotNil)
lock.RLock()
c.Assert(res.DeadlockResp.DeadlockKeyHash, Equals, deadlockKeyHash)
lock.RUnlock()
}
}
}(i)
}
wg.Wait()
keyHashes := make([]uint64, 0, 4)
resp := &deadlockPb.DeadlockResponse{}
for i := uint64(0); i < numbers; i++ {
keyHashes = keyHashes[:0]
if i%2 == 0 {
log.S().Infof("wakeup i=%v", i)
mgr.WakeUp(waitForTxn, commitTs, append(keyHashes, i*10))
} else {
log.S().Infof("deadlock wakeup i=%v", i)
lock.Lock()
resp.DeadlockKeyHash = deadlockKeyHash
lock.Unlock()
resp.Entry.Txn = i
resp.Entry.WaitForTxn = waitForTxn
resp.Entry.KeyHash = i * 10
mgr.WakeUpForDeadlock(resp)
}
}
endWg.Wait()
}
|
TestT
|
about.tsx
|
import * as React from 'react';
import Head from 'docs/src/modules/components/Head';
import { ThemeProvider as MuiThemeProvider } from '@mui/material/styles';
import Avatar from '@mui/material/Avatar';
import Box from '@mui/material/Box';
import Button from '@mui/material/Button';
import Container from '@mui/material/Container';
import Divider from '@mui/material/Divider';
import IconButton from '@mui/material/IconButton';
import Grid from '@mui/material/Grid';
import Paper, { PaperProps } from '@mui/material/Paper';
import Typography from '@mui/material/Typography';
import Tooltip from '@mui/material/Tooltip';
import KeyboardArrowRightRounded from '@mui/icons-material/KeyboardArrowRightRounded';
import TwitterIcon from '@mui/icons-material/Twitter';
import GitHubIcon from '@mui/icons-material/GitHub';
import Link from 'docs/src/modules/components/Link';
import AppHeader from 'docs/src/layouts/AppHeader';
import References, { CORE_CUSTOMERS } from 'docs/src/components/home/References';
import HeroEnd from 'docs/src/components/home/HeroEnd';
import AppFooter from 'docs/src/layouts/AppFooter';
import MuiStatistics from 'docs/src/components/home/MuiStatistics';
import GradientText from 'docs/src/components/typography/GradientText';
import { brandingDarkTheme } from 'docs/src/modules/brandingTheme';
import ROUTES from 'docs/src/route';
import IconImage from 'docs/src/components/icon/IconImage';
import ForumRoundedIcon from '@mui/icons-material/ForumRounded';
import PeopleRoundedIcon from '@mui/icons-material/PeopleRounded';
import LocalAtmRoundedIcon from '@mui/icons-material/LocalAtmRounded';
import BrandingProvider from 'docs/src/BrandingProvider';
import AppHeaderBanner from 'docs/src/components/banner/AppHeaderBanner';
interface Profile {
/**
* image url
*/
src: string;
name: string;
/**
* Role, what are you workin on?
*/
title: string;
/**
* Country wher you live in, ISO 3166-1.
*/
locationCountry: string; // https://flagpedia.net/download/api
/**
* Lives in
*/
location?: string;
/**
* Short summary about you
*/
about?: string;
github?: string;
twitter?: string;
}
const Person = (props: Profile & { sx?: PaperProps['sx'] }) => {
return (
<Paper variant="outlined" sx={{ p: 2, height: '100%', ...props.sx }}>
<Box
sx={{
display: 'flex',
alignItems: 'flex-start',
flexWrap: 'wrap',
'& > div': { minWidth: 'clamp(0px, (150px - 100%) * 999 ,100%)' },
}}
>
<Tooltip
title={props.location || false}
placement="right-end"
PopperProps={{
popperOptions: {
modifiers: [
{
name: 'offset',
options: {
offset: [3, 2],
},
},
],
},
}}
>
<Box sx={{ position: 'relative', display: 'inline-block' }}>
<Avatar
variant="rounded"
imgProps={{
width: '70',
height: '70',
loading: 'lazy',
}}
src={props.src}
alt={props.name}
{...(props.src.startsWith('https://avatars.githubusercontent.com') && {
src: `${props.src}?s=70`,
srcSet: `${props.src}?s=140 2x`,
})}
sx={{
width: 70,
height: 70,
backgroundColor: (theme) =>
theme.palette.mode === 'dark' ? 'primary.700' : 'primary.100',
borderRadius: 1,
}}
/>
<Box
sx={{
position: 'absolute',
bottom: 0,
right: 0,
transform: 'translateX(50%)',
boxShadow: '0px 4px 20px rgba(61, 71, 82, 0.25)',
width: 24,
height: 24,
border: '2px solid #fff',
backgroundColor: '#fff',
borderRadius: 40,
overflow: 'hidden',
display: 'flex',
justifyContent: 'center',
}}
>
<img
loading="lazy"
height="20"
src={`https://flagcdn.com/${props.locationCountry}.svg`}
alt=""
/>
</Box>
</Box>
</Tooltip>
<Box mx="auto" height={15} />
<Box sx={{ mt: -0.5, mr: -0.5 }}>
{props.github && (
<IconButton
aria-label={`${props.name} github`}
component="a"
href={`https://github.com/${props.github}`}
target="_blank"
rel="noreferrer noopener"
>
<GitHubIcon fontSize="small" sx={{ color: 'grey.500' }} />
</IconButton>
)}
{props.twitter && (
<IconButton
aria-label={`${props.name} twitter`}
component="a"
href={`https://twitter.com/${props.twitter}`}
target="_blank"
rel="noreferrer noopener"
>
<TwitterIcon fontSize="small" sx={{ color: 'grey.500' }} />
</IconButton>
)}
</Box>
</Box>
<Typography variant="body2" fontWeight="bold" sx={{ mt: 2, mb: 0.5 }}>
{props.name}
</Typography>
<Typography variant="body2" color="text.secondary">
{props.title}
</Typography>
{props.about && (
<Divider
sx={{
my: 1,
borderColor: (theme) =>
theme.palette.mode === 'dark' ? 'primaryDark.400' : 'grey.100',
}}
/>
)}
{props.about && (
<Typography variant="body2" color="text.secondary">
{props.about}
</Typography>
)}
</Paper>
);
};
const Widget = ({
children,
title,
icon,
}: {
children: React.ReactNode;
title: string;
icon: React.ReactElement;
}) => {
return (
<Paper variant="outlined" sx={{ height: '100%', px: 2, pt: 2, pb: 1.5 }}>
<Typography component="div" variant="body2" fontWeight="bold" sx={{ mb: 1 }}>
<Box sx={{ display: 'inline-block', lineHeight: 0, verticalAlign: 'bottom', mr: 1 }}>
{icon}
</Box>
{title}
</Typography>
{children}
</Paper>
);
};
const teamMembers: Array<Profile> = [
{
src: '/static/branding/about/olivier.png',
name: 'Olivier Tassinari',
title: 'Co-founder',
location: 'Paris, France',
locationCountry: 'fr',
about: 'Exercise addict and lifelong learner',
twitter: 'olivtassinari',
github: 'oliviertassinari',
},
{
name: 'Matt Brookes',
src: '/static/branding/about/matt.png',
title: 'Co-founder',
location: 'London, UK',
locationCountry: 'gb',
about: "When I'm not 👨🏻💻, I'm 🧗🏼♂️",
twitter: 'randomtechdude',
github: 'mbrookes',
},
{
name: 'Marija Najdova',
src: '/static/branding/about/marija.png',
title: 'MUI Core engineer',
location: 'Skopje, North Macedonia',
locationCountry: 'mk',
about: 'I do karate 🥋 and read 📚. A lot!',
twitter: 'marijanajdova',
github: 'mnajdova',
},
{
name: 'Danail Hadjiatanasov',
src: '/static/branding/about/danail.png',
title: 'MUI X engineer',
location: 'Amsterdam, Netherlands',
locationCountry: 'nl',
about: 'Boringly normal, geek deep down. I like 🚗 and 🏂',
twitter: 'danail_h',
github: 'DanailH',
},
{
name: 'Matheus Wichman',
src: '/static/branding/about/matheus.png',
title: 'MUI X engineer',
location: 'Esteio, Brazil',
locationCountry: 'br',
about: 'I like road cycling 🚲, DIY 🛠 and aviation ✈!',
github: 'm4theushw',
},
{
name: 'Michał Dudak',
src: '/static/branding/about/michal.png',
title: 'MUI Core engineer',
location: 'Silesia, Poland',
locationCountry: 'pl',
about: 'Motorcyclist, gamer, and coder (UI and more!)',
twitter: 'michaldudak',
github: 'michaldudak',
},
{
name: 'Siriwat Kunaporn',
src: '/static/branding/about/siriwat.png',
title: 'MUI Core engineer',
location: 'Bangkok, Thailand',
locationCountry: 'th',
about: 'UI Lover and ⛷ skiing newbie.',
twitter: 'siriwatknp',
github: 'siriwatknp',
},
{
name: 'Danilo Leal',
src: '/static/branding/about/danilo.png',
title: 'Lead designer',
location: 'São Paulo, Brazil',
locationCountry: 'br',
about: 'Music production, hiking, and traveling!',
github: 'danilo-leal',
twitter: 'danilobleal',
},
{
name: 'Flavien Delangle',
src: '/static/branding/about/flavien.png',
title: 'MUI X engineer',
location: 'Lille, France',
about: 'Love cycling 🚴♂️ and reading 📚',
locationCountry: 'fr',
github: 'flaviendelangle',
},
{
name: 'Benny Joo',
src: '/static/branding/about/benny.png',
title: 'MUI Core engineer',
location: 'London, UK',
locationCountry: 'gb',
about: 'Love reading 📚 and working out 🏋️♂️',
github: 'hbjORbj',
},
{
src: '/static/branding/about/alexandre.png',
name: 'Alexandre Fauquette',
title: 'MUI X engineer',
location: 'Nancy, France',
locationCountry: 'fr',
about: 'Love hacking and cycling 🚴♂️',
twitter: 'AleFauquette',
github: 'alexfauquette',
},
|
title: 'MUI Studio engineer',
location: 'New Delhi, India',
locationCountry: 'in',
about: 'Trains 🚅 , architecture 🏛️ , and psychology 🧠 ',
twitter: 'bharattttttt',
github: 'bharatkashyap',
},
{
src: '/static/branding/about/jan.png',
name: 'Jan Potoms',
title: 'MUI Studio engineer',
location: 'Brussels, Belgium',
locationCountry: 'be',
about: 'Always curious, I enjoy cinema and hiking',
github: 'janpot',
},
{
src: '/static/branding/about/prakhar.png',
name: 'Prakhar Gupta',
title: 'MUI Studio PM',
location: 'New Delhi, India',
locationCountry: 'in',
about: 'Into sports and hiking!',
twitter: 'gprakhar123',
github: 'newguy-123',
},
{
src: '/static/branding/about/jose.png',
name: 'José Freitas',
title: 'MUI X PM',
location: 'Augsburg, Germany',
locationCountry: 'de',
about: 'Art, fiction, and bar philosophy',
twitter: 'zehdefreitas',
github: 'joserodolfofreitas',
},
{
src: '/static/branding/about/andrii.png',
name: 'Andrii Cherniavskyi',
title: 'MUI X engineer',
location: 'Wrocław, Poland',
locationCountry: 'pl',
about: 'Love playing music - electric and bass guitar 🎸',
twitter: 'iamcherniavskii',
github: 'cherniavskii',
},
{
src: '/static/branding/about/sycamore.png',
name: 'Sam Sycamore',
title: 'Developer Advocate',
location: 'Saint Paul, Minnesota, USA',
locationCountry: 'us',
about: 'Musician and edible wild plant enthusiast 🌱',
twitter: 'tanoaksam',
github: 'samuelsycamore',
},
];
const contributors = [
{
name: 'Sebastian Silbermann',
github: 'eps1lon',
title: 'MUI Core, everything Open Source',
location: 'Berlin, Germany',
locationCountry: 'de',
src: 'https://avatars.githubusercontent.com/u/12292047',
twitter: 'sebsilbermann',
},
{
name: 'Ryan Cogswell',
github: 'ryancogswell',
title: 'Stack Overflow top contributor',
location: 'Minnesota, United States',
locationCountry: 'us',
src: 'https://avatars.githubusercontent.com/u/287804',
},
{
name: 'Yan Lee',
github: 'AGDholo',
title: 'Chinese docs',
location: 'China',
locationCountry: 'cn',
src: 'https://avatars.githubusercontent.com/u/13300332',
},
{
name: 'Jairon Alves Lima',
github: 'jaironalves',
title: 'Brazilian Portuguese docs',
location: 'São Paulo, Brazil',
locationCountry: 'br',
src: 'https://avatars.githubusercontent.com/u/29267813',
},
{
name: 'Danica Shen',
github: 'DDDDDanica',
title: 'Chinese docs',
location: 'Ireland',
locationCountry: 'ie',
src: 'https://avatars.githubusercontent.com/u/12678455',
},
];
const emeriti = [
{
name: 'Hai Nguyen',
github: 'hai-cea',
twitter: 'haicea',
title: 'MUI Core, v0.x creator',
location: 'Dallas, US',
locationCountry: 'us',
src: 'https://avatars.githubusercontent.com/u/2007468',
},
{
name: 'Nathan Marks',
github: 'nathanmarks',
title: 'MUI Core, v1.x co-creator',
location: 'Toronto, CA',
locationCountry: 'ca',
src: 'https://avatars.githubusercontent.com/u/4420103',
},
{
name: 'Kevin Ross',
github: 'rosskevin',
twitter: 'rosskevin',
title: 'MUI Core, flow',
location: 'Franklin, US',
locationCountry: 'us',
src: 'https://avatars.githubusercontent.com/u/136564',
},
{
name: 'Sebastian Sebald',
github: 'sebald',
twitter: 'sebastiansebald',
title: 'MUI Core',
location: 'Freiburg, Germany',
locationCountry: 'de',
src: 'https://avatars.githubusercontent.com/u/985701',
},
{
name: 'Ken Gregory',
github: 'kgregory',
title: 'MUI Core',
location: 'New Jersey, US',
locationCountry: 'us',
src: 'https://avatars.githubusercontent.com/u/3155127',
},
{
name: 'Tom Crockett',
github: 'pelotom',
twitter: 'pelotom',
title: 'MUI Core',
location: 'Los Angeles, US',
locationCountry: 'us',
src: 'https://avatars.githubusercontent.com/u/128019',
},
{
name: 'Maik Marschner',
github: 'leMaik',
twitter: 'leMaikOfficial',
title: 'MUI Core',
location: 'Hannover, Germany',
locationCountry: 'de',
src: 'https://avatars.githubusercontent.com/u/5544859',
},
{
name: 'Oleg Slobodskoi',
github: 'kof',
twitter: 'oleg008',
title: 'MUI Core, JSS',
location: 'Berlin, Germany',
locationCountry: 'de',
src: 'https://avatars.githubusercontent.com/u/52824',
},
{
name: 'Dmitriy Kovalenko',
github: 'dmtrKovalenko',
twitter: 'goose_plus_plus',
title: 'MUI X, date pickers',
location: 'Kharkiv, Ukraine',
locationCountry: 'ua',
src: 'https://avatars.githubusercontent.com/u/16926049',
},
{
name: 'Josh Wooding',
github: 'joshwooding',
twitter: 'JoshWooding_',
title: 'MUI Core, J.P. Morgan',
location: 'London, UK',
locationCountry: 'gb',
src: 'https://avatars.githubusercontent.com/u/12938082',
},
];
function AboutContent() {
return (
<React.Fragment>
<Container>
<Box
sx={{
height: '40vh',
minHeight: 300,
display: 'flex',
flexDirection: 'column',
justifyContent: 'center',
alignItems: 'center',
maxWidth: 600,
mx: 'auto',
textAlign: 'center',
}}
>
<Typography variant="body2" color="primary.600" fontWeight="bold">
About us
</Typography>
<Typography component="h1" variant="h2" sx={{ my: 1 }}>
We're on a mission to make <br /> building UIs more{' '}
<GradientText>accessible</GradientText>
</Typography>
<Typography
color="text.secondary"
textAlign="center"
sx={{
maxWidth: { md: 500 },
minHeight: 48, // a hack to reduce CLS (layout shift)
}}
>
Our mission is to empower anyone to build UIs, faster. We're reducing the entry
barrier, making design skills accessible.
</Typography>
</Box>
<References companies={CORE_CUSTOMERS} />
</Container>
<Box
sx={{ bgcolor: (theme) => (theme.palette.mode === 'dark' ? 'primaryDark.900' : 'grey.50') }}
>
<Container sx={{ py: { xs: 4, md: 8 } }}>
<Grid container alignItems="center" spacing={4}>
<Grid item xs={12} md={6}>
<Typography variant="h2" sx={{ my: 1 }}>
Our ultimate goal
</Typography>
<Typography color="text.secondary" sx={{ mb: 1, maxWidth: 450 }}>
We aim high trying to design the most effective and efficient tool for building UIs,
for developers and designers. MUI started back in 2014, to unify React and Material
Design. Since then, we've become a community of over 2M developers from every
corner of the world.
</Typography>
<Typography color="text.secondary" sx={{ mb: 2 }}>
We plan on doing all that cultivating our values:
</Typography>
{[
'Customer obsessed. We put our customers front & center.',
'Transparency. Most of our work is public.',
'Freedom. We work from anywhere in the world.',
'Autonomy. We want to create a safe, high-trust team.',
"Excellence. We're aiming high, and we know it.",
].map((text) => (
<Box key={text} sx={{ display: 'flex', alignItems: 'flex-start', mt: 1 }}>
<IconImage name="yes" />
<Typography variant="body2" color="text.primary" fontWeight={700} sx={{ ml: 1 }}>
{text}
</Typography>
</Box>
))}
</Grid>
<MuiStatistics />
</Grid>
</Container>
</Box>
<Container sx={{ py: { xs: 4, md: 8 } }}>
<Box sx={{ display: 'flex', justifyContent: 'space-between', alignItems: 'center' }}>
<div>
<Typography variant="h2" sx={{ my: 1 }}>
Team
</Typography>
<Typography color="text.secondary" sx={{ mb: 2, maxWidth: 450 }}>
MUI is maintained by a group of invaluable core contributors, with the massive support
and involvement of the community.
</Typography>
<Button
component={Link}
noLinkStyle
href={ROUTES.careers}
endIcon={<KeyboardArrowRightRounded fontSize="small" />}
variant="contained"
size="large"
sx={{ width: { xs: '100%', sm: 'auto' } }}
>
See open roles
</Button>
</div>
</Box>
<Divider sx={{ my: { xs: 2, sm: 4 } }} />
<Typography
component="h3"
variant="h5"
color="primary"
fontWeight="extraBold"
sx={{ mb: 1 }}
>
Company
</Typography>
<Typography color="text.secondary" sx={{ maxWidth: { md: 500 } }}>
The development of the project and its ecosystem is guided by an international team.
</Typography>
<Box sx={{ pt: 2 }}>
<Grid container spacing={2}>
{teamMembers.map((profile) => (
<Grid key={profile.name} item xs={12} sm={6} md={3}>
<Person {...profile} />
</Grid>
))}
</Grid>
</Box>
</Container>
<MuiThemeProvider theme={brandingDarkTheme}>
<Box sx={{ bgcolor: 'primaryDark.700' }}>
<Container sx={{ py: { xs: 4, sm: 8 } }}>
<Typography
component="h3"
variant="h5"
color="primary.400"
fontWeight="extraBold"
sx={{ mb: 1 }}
>
Community contributors
</Typography>
<Typography color="text.secondary" sx={{ maxWidth: { md: 500 } }}>
Some members of the community have so enriched it, that they deserve special mention.
</Typography>
<Box sx={{ pt: 2, pb: { xs: 4, sm: 8 } }}>
<Grid container spacing={2}>
{contributors.map((profile) => (
<Grid key={profile.name} item xs={12} sm={6} md={3}>
<Person {...profile} sx={{ bgcolor: 'primaryDark.600' }} />
</Grid>
))}
</Grid>
</Box>
<Typography
component="h3"
variant="h5"
color="warning.500"
fontWeight="extraBold"
sx={{ mb: 1 }}
>
Community emeriti
</Typography>
<Typography color="text.secondary" sx={{ maxWidth: { md: 500 } }}>
We honor some no-longer-active core team members who have made valuable contributions
in the past. They advise us from time to time.
</Typography>
<Box sx={{ pt: 2 }}>
<Grid container spacing={2}>
{emeriti.map((profile) => (
<Grid key={profile.name} item xs={12} sm={6} md={3}>
<Person {...profile} sx={{ bgcolor: 'primaryDark.600' }} />
</Grid>
))}
</Grid>
</Box>
</Container>
</Box>
</MuiThemeProvider>
<Container sx={{ py: { xs: 4, md: 8 } }}>
<Typography variant="h2" sx={{ mt: 1, mb: { xs: 2, sm: 4 } }}>
How can you support us?
</Typography>
<Grid container spacing={2}>
<Grid item xs={12} sm={6} md={4}>
<Widget
icon={<ForumRoundedIcon fontSize="small" color="primary" />}
title="Give feedback"
>
<Typography variant="body2" color="text.secondary" sx={{ mb: 1 }}>
Tell us what and where we can improve or share your happy moments with us! You can
also up or downvote any page on our documentation. <br />
<br /> And lastly, from time to time, we send our community a survey for more
structured feedback, you're always invited to participate to share your
thoughts.
</Typography>
<Button
component="a"
// @ts-expect-error
variant="link"
size="small"
href="https://github.com/mui/material-ui/issues?q=is%3Aissue+is%3Aopen+sort%3Areactions-%2B1-desc"
endIcon={<KeyboardArrowRightRounded />}
>
Leave your feedback{' '}
</Button>
</Widget>
</Grid>
<Grid item xs={12} sm={6} md={4}>
<Widget
icon={<PeopleRoundedIcon fontSize="small" color="primary" />}
title="Join the community"
>
<Typography variant="body2" color="text.secondary" sx={{ mb: 1 }}>
Become a member of a huge community of developers supporting MUI. You can:
</Typography>
<Box
component="ul"
sx={{
typography: 'body2',
color: 'text.secondary',
pl: 2,
}}
>
<li>
Add new features by{' '}
<Link href="https://github.com/mui/material-ui/blob/HEAD/CONTRIBUTING.md#your-first-pull-request">
submitting a pull request
</Link>
.
</li>
<li>
Fix bugs or{' '}
<Link href="https://github.com/mui/material-ui/tree/HEAD/docs">
improve our documentation
</Link>
.
</li>
<li>
Help others by reviewing and commenting on existing{' '}
<Link href="https://github.com/mui/material-ui/pulls">PRs</Link> and{' '}
<Link href="https://github.com/mui/material-ui/issues">issues</Link>.
</li>
<li>
Help <Link href="https://translate.mui.com/">translate</Link> the documentation.
</li>
<li>
Answer questions on{' '}
<Link href="https://stackoverflow.com/questions/tagged/mui">StackOverflow</Link>.
</li>
</Box>
<Button
component="a"
// @ts-expect-error
variant="link"
size="small"
href="https://github.com/mui/material-ui"
endIcon={<KeyboardArrowRightRounded />}
>
See the repository
</Button>
</Widget>
</Grid>
<Grid item xs={12} sm={6} md={4}>
<Widget
icon={<LocalAtmRoundedIcon fontSize="small" color="primary" />}
title="Support us financially"
>
<Typography variant="body2" color="text.secondary" sx={{ mb: 1 }}>
If you use MUI in a commercial project and would like to support its continued
development by becoming a Sponsor, or in a side or hobby project and would like to
become a Backer, you can do so through OpenCollective.
<br />
<br />
All funds donated are managed transparently, and Sponsors receive recognition in the
README and on the MUI home page.
</Typography>
<Button
component="a"
// @ts-expect-error
variant="link"
size="small"
href="https://opencollective.com/mui"
endIcon={<KeyboardArrowRightRounded />}
>
See Open Collective{' '}
</Button>
</Widget>
</Grid>
</Grid>
</Container>
<HeroEnd />
<Divider />
</React.Fragment>
);
}
export default function About() {
return (
<BrandingProvider>
<Head
title="About us - MUI"
description="MUI (formerly Material-UI) started back in 2014 to unify React and Material Design. Today, MUI has grown to become one of the world's most popular React libraries – used by a vibrant community of more than 2M developers in over 180 countries."
/>
<AppHeaderBanner />
<AppHeader />
<main>
<AboutContent />
</main>
<AppFooter />
</BrandingProvider>
);
}
|
{
src: '/static/branding/about/bharat.png',
name: 'Bharat Kashyap',
|
views.py
|
__author__ = 'efelix'
# ----------------------------------------------------------------------------------------------------------------------
from rdkit.Chem import AllChem
from beaker import app
from bottle import request
from beaker.core_apps.D2Coords.impl import _ctab22D, _smiles22D, _is3D
from beaker.utils.io import _parseFlag
# ----------------------------------------------------------------------------------------------------------------------
def ctab22DView(data, params):
kwargs = dict()
kwargs['loadMol'] = _parseFlag(params.get('loadMol', True))
kwargs['useRDKitChemistry'] = _parseFlag(params.get('useRDKitChemistry', False))
return _ctab22D(data, **kwargs)
# ----------------------------------------------------------------------------------------------------------------------
def is3DView(data, params):
kwargs = dict()
kwargs['loadMol'] = _parseFlag(params.get('loadMol', True))
kwargs['useRDKitChemistry'] = _parseFlag(params.get('useRDKitChemistry', False))
return _is3D(data, **kwargs)
# ----------------------------------------------------------------------------------------------------------------------
@app.route('/ctab22D', method=['OPTIONS', 'POST'], name="ctab22D")
def ctab22D():
"""
Generate 2D coordinates for a molecule using Schrodinger's coordgen.
CTAB is either single molfile or SDF file.
cURL examples:
curl -X POST --data-binary @no_coords.mol ${BEAKER_ROOT_URL}ctab22D
curl -X POST -F "file=@no_coords.mol" ${BEAKER_ROOT_URL}ctab22D
"""
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return ctab22DView(data, request.params)
# ----------------------------------------------------------------------------------------------------------------------
def smiles22DView(data, params):
kwargs = dict()
kwargs['computeCoords'] = False
kwargs['delimiter'] = params.get('delimiter', ' ')
kwargs['smilesColumn'] = int(params.get('smilesColumn', 0))
kwargs['nameColumn'] = int(params.get('nameColumn', 1))
kwargs['sanitize'] = _parseFlag(params.get('sanitize', True))
if params.get('titleLine') is None and not data.startswith(b'SMILES Name'):
kwargs['titleLine'] = False
else:
kwargs['titleLine'] = _parseFlag(params.get('titleLine', True))
return _smiles22D(data, **kwargs)
# ----------------------------------------------------------------------------------------------------------------------
@app.route('/smiles22D', method=['OPTIONS', 'POST'], name="smiles22D")
def smiles22D():
"""
Generate 2D coordinates from SMILES using Schrodinger's coordgen.
CTAB is either single molfile or SDF file.
cURL examples:
curl -X POST --data-binary @aspirin_with_header.smi ${BEAKER_ROOT_URL}smiles22D
curl -X POST -F "file=@aspirin_with_header.smi" ${BEAKER_ROOT_URL}smiles22D
curl -X POST --data-binary @aspirin_no_header.smi ${BEAKER_ROOT_URL}smiles22D
curl -X POST -F "file=@aspirin_no_header.smi" ${BEAKER_ROOT_URL}smiles22D
"""
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return smiles22DView(data, request.params)
# ----------------------------------------------------------------------------------------------------------------------
@app.route('/is3D', method=['OPTIONS', 'POST'], name="is3D")
def is3D():
|
# ----------------------------------------------------------------------------------------------------------------------
|
"""
Check if molecule has any 3D coordinate.
CTAB is either single molfile or SDF file.
cURL examples:
curl -X POST --data-binary @aspirin_with_header.smi ${BEAKER_ROOT_URL}is3D
curl -X POST -F "file=@aspirin_with_header.smi" ${BEAKER_ROOT_URL}is3D
curl -X POST --data-binary @aspirin_no_header.smi ${BEAKER_ROOT_URL}is3D
curl -X POST -F "file=@aspirin_no_header.smi" ${BEAKER_ROOT_URL}is3D
"""
data = list(request.files.values())[0].file.read() if len(request.files) else request.body.read()
return is3DView(data, request.params)
|
predict.rs
|
use pinwheel::prelude::*;
use tangram_ui as ui;
pub struct Predict;
impl Component for Predict {
fn
|
(self) -> Node {
let elixir = ui::doc!(
r#"
model = Tangram.load_model_from_path("./heart_disease.tangram")
output = Tangram.predict(model, %{
:age => 63,
:gender => "male",
# ...
})
"#
)
.into();
let go = ui::doc!(
r#"
import "github.com/tangramdotdev/tangram/languages/go"
model, _ := tangram.LoadModelFromPath("./heart_disease.tangram", nil)
output := model.PredictOne(tangram.Input{
"age": 63,
"gender": "male",
// ...
}, nil)
"#
)
.into();
let javascript = ui::doc!(
r#"
const tangram = require("@tangramdotdev/tangram");
const model = new tangram.Model("./heart_disease.tangram");
const output = model.predict({
age: 63,
gender: "male",
// ...
});
"#
)
.into();
let python = ui::doc!(
r#"
import tangram
model = tangram.Model.from_path('./census.tangram')
output = model.predict({
'age': 63,
'gender': 'male',
# ...
})
"#
)
.into();
let ruby = ui::doc!(
r#"
require 'tangram'
model = Tangram::Model.from_path('./heart_disease.tangram')
output = model.predict({
age: 63,
gender: 'male',
# ...
})
"#
)
.into();
let rust = ui::doc!(
r#"
let model: tangram::Model =
tangram::Model::from_path("./heart_disease.tangram", None).unwrap();
let input = tangram::predict_input! {
"age": 63.0,
"gender": "male",
// ...
};
let output = model.predict_one(input, None);
"#
)
.into();
let code_for_language = ui::highlight_code_for_language(ui::CodeForLanguage {
elixir,
go,
javascript,
python,
ruby,
rust,
});
let title = div()
.class("index-step-title")
.child("Make predictions in your favorite language.");
let p1 = div()
.class("index-step-text")
.child("Make predictions with libraries for ")
.child(
ui::Link::new()
.href("https://hex.pm/packages/tangram".to_owned())
.title("Elixir".to_owned())
.child("Elixir"),
)
.child(", ")
.child(
ui::Link::new()
.href("https://pkg.go.dev/github.com/tangramdotdev/tangram-go".to_owned())
.title("Go".to_owned())
.child("Go"),
)
.child(", ")
.child(
ui::Link::new()
.href("https://www.npmjs.com/package/@tangramdotdev/tangram".to_owned())
.title("JavaScript".to_owned())
.child("JavaScript"),
)
.child(", ")
.child(
ui::Link::new()
.href("https://pypi.org/project/tangram".to_owned())
.title("Python".to_owned())
.child("Python"),
)
.child(", ")
.child(
ui::Link::new()
.href("https://rubygems.org/gems/tangram".to_owned())
.title("Ruby".to_owned())
.child("Ruby"),
)
.child(", and ")
.child(
ui::Link::new()
.href("https://lib.rs/tangram".to_owned())
.title("Rust".to_owned())
.child("Rust"),
)
.child(".");
let p2 = div().class("index-step-text").child("Tangram is written in Rust and exposed to each language via native extensions, so predictions are fast and your data never travels over the network.");
let left = div().child(title).child(p1).child(br()).child(p2);
let right =
ui::Window::new().child(ui::CodeSelect::new(code_for_language).line_numbers(true));
div()
.class("index-step")
.child(left)
.child(right)
.into_node()
}
}
|
into_node
|
tcp-server.rs
|
//! A TCP server.
//!
//! First start a server:
//!
//! ```
//! cargo run --example tcp-server
//! ```
//!
//! Then start a client:
//!
//! ```
//! cargo run --example tcp-client
//! ```
use std::net::{TcpListener, TcpStream};
use futures::io;
use smol::{Async, Task};
/// Echoes messages from the client back to it.
|
async fn echo(stream: Async<TcpStream>) -> io::Result<()> {
io::copy(&stream, &mut &stream).await?;
Ok(())
}
fn main() -> io::Result<()> {
smol::run(async {
// Create a listener.
let listener = Async::<TcpListener>::bind("127.0.0.1:7000")?;
println!("Listening on {}", listener.get_ref().local_addr()?);
println!("Now start a TCP client.");
// Accept clients in a loop.
loop {
let (stream, peer_addr) = listener.accept().await?;
println!("Accepted client: {}", peer_addr);
// Spawn a task that echoes messages from the client back to it.
Task::spawn(echo(stream)).unwrap().detach();
}
})
}
| |
Point.tsx
|
import * as React from 'react';
import cx from 'classnames';
|
value: number;
disabled?: boolean;
position: string;
active: boolean;
}
function SliderPoint(props: ISliderPointProps) {
const { value, position, disabled, active } = props;
return (
<div
className={cx('zent-slider-tooltip', {
'zent-slider-tooltip-active': active,
})}
style={{
left: position,
}}
>
<div
className={cx('zent-slider-point', {
'zent-slider-point-disabled': disabled,
})}
/>
<div className="zent-slider-tooltip-content">{value}</div>
</div>
);
}
export default SliderPoint;
|
export type PointId = 'point-left' | 'point-right' | 'point-single';
export interface ISliderPointProps {
|
python_logger.py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
from logging import DEBUG
from logging import INFO
from logging import Formatter
from logging import StreamHandler
from logging import getLogger
from sys import stderr
from sys import stdout
class LogLevelFilter:
def __init__(self, level):
|
def filter(self, log_record):
return log_record.levelno == self.__level
def create_logger(name=None):
# create logger
log = getLogger(name)
log.setLevel(DEBUG)
# create formatter and add it to the handlers
log_format = Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
# create console handler with a higher log level
info_handler = StreamHandler(stdout)
info_handler.setLevel(INFO)
info_handler.setFormatter(log_format)
log.addHandler(info_handler)
# create console handler with a higher log level
debug_handler = StreamHandler(stderr)
debug_handler.setLevel(DEBUG)
debug_handler.setFormatter(log_format)
debug_handler.addFilter(LogLevelFilter(DEBUG))
log.addHandler(debug_handler)
return log
|
self.__level = level
|
attr.rs
|
// Copyright (c) 2016 Yusuke Sasaki
//
// This software is released under the MIT License.
// See http://opensource.org/licenses/mit-license.php or <LICENSE>.
/// Defines the name of attributes
pub mod exports {
pub use ffi::{IntAttr, DoubleAttr, CharAttr, StringAttr};
pub use self::IntAttr::*;
pub use self::DoubleAttr::*;
pub use self::CharAttr::*;
pub use self::StringAttr::*;
}
use self::exports::*;
use ffi;
use std::ffi::CString;
use util;
use error::Result;
/// provides function to query/set the value of scalar attribute.
pub trait Attr: Into<CString> {
type Out;
type Buf: util::Init + util::Into<Self::Out> + util::AsRawPtr<Self::RawGet>;
type RawGet;
type RawSet: util::From<Self::Out>;
unsafe fn get_attr(model: *mut ffi::GRBmodel, attrname: ffi::c_str, value: Self::RawGet) -> ffi::c_int;
unsafe fn set_attr(model: *mut ffi::GRBmodel, attrname: ffi::c_str, value: Self::RawSet) -> ffi::c_int;
}
impl Attr for IntAttr {
type Out = i32;
type Buf = i32;
type RawGet = *mut ffi::c_int;
type RawSet = ffi::c_int;
unsafe fn get_attr(model: *mut ffi::GRBmodel, attrname: ffi::c_str, value: *mut ffi::c_int) -> ffi::c_int {
ffi::GRBgetintattr(model, attrname, value)
}
unsafe fn set_attr(model: *mut ffi::GRBmodel, attrname: ffi::c_str, value: Self::RawSet) -> ffi::c_int {
ffi::GRBsetintattr(model, attrname, value)
}
}
impl Attr for DoubleAttr {
type Out = f64;
type Buf = f64;
type RawGet = *mut ffi::c_double;
type RawSet = ffi::c_double;
unsafe fn get_attr(model: *mut ffi::GRBmodel, attrname: ffi::c_str, value: *mut ffi::c_double) -> ffi::c_int {
ffi::GRBgetdblattr(model, attrname, value)
}
unsafe fn set_attr(model: *mut ffi::GRBmodel, attrname: ffi::c_str, value: Self::RawSet) -> ffi::c_int {
ffi::GRBsetdblattr(model, attrname, value)
}
}
impl Attr for StringAttr {
type Out = String;
type Buf = ffi::c_str;
type RawGet = *mut ffi::c_str;
type RawSet = ffi::c_str;
unsafe fn get_attr(model: *mut ffi::GRBmodel, attrname: ffi::c_str, value: *mut ffi::c_str) -> ffi::c_int {
ffi::GRBgetstrattr(model, attrname, value)
}
unsafe fn set_attr(model: *mut ffi::GRBmodel, attrname: ffi::c_str, value: Self::RawSet) -> ffi::c_int {
ffi::GRBsetstrattr(model, attrname, value)
}
}
pub trait AttrArray: Into<CString> {
type Out: Clone;
type Buf: Clone + util::Init + util::Into<Self::Out> + util::AsRawPtr<Self::RawGet>;
type RawGet;
type RawSet: util::From<Self::Out>;
unsafe fn get_attrelement(model: *mut ffi::GRBmodel, attrname: ffi::c_str, element: ffi::c_int,
values: Self::RawGet)
-> ffi::c_int;
unsafe fn set_attrelement(model: *mut ffi::GRBmodel, attrname: ffi::c_str, element: ffi::c_int,
values: Self::RawSet)
-> ffi::c_int;
unsafe fn get_attrlist(model: *mut ffi::GRBmodel, attrname: ffi::c_str, len: ffi::c_int, ind: *const ffi::c_int,
values: *mut Self::Buf)
-> ffi::c_int;
unsafe fn set_attrlist(model: *mut ffi::GRBmodel, attrname: ffi::c_str, len: ffi::c_int, ind: *const ffi::c_int,
values: *const Self::RawSet)
-> ffi::c_int;
fn to_rawsets(values: &[Self::Out]) -> Result<Vec<Self::RawSet>> {
Ok(values.iter().map(|v| util::From::from(v.clone())).collect())
}
}
impl AttrArray for IntAttr {
type Out = i32;
type Buf = i32;
type RawGet = *mut ffi::c_int;
type RawSet = ffi::c_int;
unsafe fn get_attrelement(model: *mut ffi::GRBmodel, attrname: ffi::c_str, element: ffi::c_int,
value: *mut ffi::c_int)
-> ffi::c_int {
ffi::GRBgetintattrelement(model, attrname, element, value)
}
unsafe fn set_attrelement(model: *mut ffi::GRBmodel, attrname: ffi::c_str, element: ffi::c_int, value: ffi::c_int)
-> ffi::c_int {
ffi::GRBsetintattrelement(model, attrname, element, value)
}
unsafe fn get_attrlist(model: *mut ffi::GRBmodel, attrname: ffi::c_str, len: ffi::c_int, ind: *const ffi::c_int,
values: *mut ffi::c_int)
-> ffi::c_int {
ffi::GRBgetintattrlist(model, attrname, len, ind, values)
}
unsafe fn set_attrlist(model: *mut ffi::GRBmodel, attrname: ffi::c_str, len: ffi::c_int, ind: *const ffi::c_int,
values: *const Self::RawSet)
-> ffi::c_int {
ffi::GRBsetintattrlist(model, attrname, len, ind, values)
}
}
impl AttrArray for DoubleAttr {
type Out = f64;
type Buf = f64;
type RawGet = *mut ffi::c_double;
type RawSet = ffi::c_double;
unsafe fn get_attrelement(model: *mut ffi::GRBmodel, attrname: ffi::c_str, element: ffi::c_int,
value: *mut ffi::c_double)
-> ffi::c_int {
ffi::GRBgetdblattrelement(model, attrname, element, value)
}
unsafe fn set_attrelement(model: *mut ffi::GRBmodel, attrname: ffi::c_str, element: ffi::c_int,
value: ffi::c_double)
-> ffi::c_int {
ffi::GRBsetdblattrelement(model, attrname, element, value)
}
unsafe fn get_attrlist(model: *mut ffi::GRBmodel, attrname: ffi::c_str, len: ffi::c_int, ind: *const ffi::c_int,
values: *mut ffi::c_double)
-> ffi::c_int {
ffi::GRBgetdblattrlist(model, attrname, len, ind, values)
}
unsafe fn set_attrlist(model: *mut ffi::GRBmodel, attrname: ffi::c_str, len: ffi::c_int, ind: *const ffi::c_int,
values: *const Self::RawSet)
-> ffi::c_int {
ffi::GRBsetdblattrlist(model, attrname, len, ind, values)
}
}
impl AttrArray for CharAttr {
type Out = i8;
type Buf = i8;
type RawGet = *mut ffi::c_char;
type RawSet = ffi::c_char;
unsafe fn get_attrelement(model: *mut ffi::GRBmodel, attrname: ffi::c_str, element: ffi::c_int,
value: *mut ffi::c_char)
-> ffi::c_int {
ffi::GRBgetcharattrelement(model, attrname, element, value)
}
unsafe fn set_attrelement(model: *mut ffi::GRBmodel, attrname: ffi::c_str, element: ffi::c_int, value: ffi::c_char)
-> ffi::c_int {
ffi::GRBsetcharattrelement(model, attrname, element, value)
}
unsafe fn get_attrlist(model: *mut ffi::GRBmodel, attrname: ffi::c_str, len: ffi::c_int, ind: *const ffi::c_int,
values: *mut ffi::c_char)
-> ffi::c_int {
ffi::GRBgetcharattrlist(model, attrname, len, ind, values)
}
unsafe fn set_attrlist(model: *mut ffi::GRBmodel, attrname: ffi::c_str, len: ffi::c_int, ind: *const ffi::c_int,
values: *const Self::RawSet)
-> ffi::c_int {
ffi::GRBsetcharattrlist(model, attrname, len, ind, values)
}
}
impl AttrArray for StringAttr {
type Out = String;
type Buf = ffi::c_str;
type RawGet = *mut ffi::c_str;
type RawSet = ffi::c_str;
fn
|
(values: &[String]) -> Result<Vec<ffi::c_str>> {
let mut buf = Vec::with_capacity(values.len());
for value in values.into_iter() {
let value = try!(CString::new(value.as_str()));
buf.push(value.as_ptr())
}
Ok(buf)
}
unsafe fn get_attrelement(model: *mut ffi::GRBmodel, attrname: ffi::c_str, element: ffi::c_int,
value: *mut ffi::c_str)
-> ffi::c_int {
ffi::GRBgetstrattrelement(model, attrname, element, value)
}
unsafe fn set_attrelement(model: *mut ffi::GRBmodel, attrname: ffi::c_str, element: ffi::c_int, value: ffi::c_str)
-> ffi::c_int {
ffi::GRBsetstrattrelement(model, attrname, element, value)
}
unsafe fn get_attrlist(model: *mut ffi::GRBmodel, attrname: ffi::c_str, len: ffi::c_int, ind: *const ffi::c_int,
values: *mut ffi::c_str)
-> ffi::c_int {
ffi::GRBgetstrattrlist(model, attrname, len, ind, values)
}
unsafe fn set_attrlist(model: *mut ffi::GRBmodel, attrname: ffi::c_str, len: ffi::c_int, ind: *const ffi::c_int,
values: *const ffi::c_str)
-> ffi::c_int {
ffi::GRBsetstrattrlist(model, attrname, len, ind, values)
}
}
|
to_rawsets
|
index.d.ts
|
import {
APIMessageContentResolvable,
Message,
Presence,
EmojiIdentifierResolvable,
MessageReaction,
} from 'discord.js';
import type LocaleService from '../src/struct/LocaleService';
declare global {
interface Array<T> {
/**
* Remove a element of the array by it's name
* Returns a copy of the array, but without the elements passed in the parameter
* @param keys The keys to remove from the array
*/
remove(...keys: string[]): string[];
/**
* Returns the indexes of an array that meet the condition specified in a callback function.
* @param predicate A function that accepts up to three arguments. The filter method calls the predicate function one time for each element in the array.
* @param thisArg An object to which the this keyword can refer in the predicate function. If thisArg is omitted, undefined is used as the this value.
*/
filterIndex<S extends T>(
predicate: (value: S, index: number, array: S[]) => value is S,
thisArg?: any
): number[];
}
interface Object {
/**
* Check if an object is empty or not. An object is considered empty if it doesn't have any enumerable properties.\
* Note that this method is not suitable to use as a `filter` predicate.\
* If you want to check if an object is empty, you should use the `Object.keys` method to get the object's keys, and then check that the array's length property. For example: `Object.keys(obj).length === 0`.\
* This method is for purely aesthetic purposes. It is considered bad practice to extend native properties/method. And mongo db don't like it.
*/
isEmpty(): boolean;
}
}
declare namespace Intl {
class
|
{
public format: (items: [string?]) => string;
}
}
declare module 'discord.js' {
export interface Guild {
/**
* The i18n object notation
*/
readonly i18n: LocaleService;
/**
* The prefix of the guild, if there's one.
*/
prefix?: string;
}
export interface User {
/**
* The presence of this user.
*/
readonly presence?: Presence;
}
export interface CommandInteraction {
/**
* Adds a reaction to the reaction.
* @param emoji The emoji to react with
*/
react(emoji: EmojiIdentifierResolvable): Promise<MessageReaction>;
}
}
declare module 'mongoose' {
interface Connection {
/**
* Send `true` if the connection is etablished successfully
*/
_hasOpened: boolean;
}
}
|
ListFormat
|
python_fn_test.go
|
// Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package acceptance
import (
"testing"
"github.com/GoogleCloudPlatform/buildpacks/internal/acceptance"
)
func init() {
acceptance.DefineFlags()
}
func TestAcceptancePythonFn(t *testing.T) {
builder, cleanup := acceptance.CreateBuilder(t)
t.Cleanup(cleanup)
testCases := []acceptance.Test{
{
Name: "function without framework",
App: "without_framework",
Path: "/testFunction",
Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction"},
MustUse: []string{pythonRuntime, pythonFF, pythonPIP},
MustNotUse: []string{entrypoint},
},
{
Name: "function with custom source file",
App: "custom_file",
Path: "/testFunction",
Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction", "GOOGLE_FUNCTION_SOURCE=func.py"},
MustUse: []string{pythonRuntime, pythonFF, pythonPIP},
MustNotUse: []string{entrypoint},
},
{
Name: "function with dependencies",
App: "with_dependencies",
Path: "/testFunction",
Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction"},
MustUse: []string{pythonRuntime, pythonPIP, pythonFF},
MustNotUse: []string{entrypoint},
},
{
Name: "function with framework",
App: "with_framework",
Path: "/testFunction",
Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction"},
MustUse: []string{pythonRuntime, pythonPIP, pythonFF},
MustNotUse: []string{entrypoint},
},
{
Name: "function with runtime env var",
App: "with_env_var",
Path: "/testFunction",
Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction"},
RunEnv: []string{"FOO=foo"},
MustUse: []string{pythonRuntime, pythonFF, pythonPIP},
MustNotUse: []string{entrypoint},
},
}
for _, tc := range testCases {
tc := tc
t.Run(tc.Name, func(t *testing.T) {
t.Parallel()
acceptance.TestApp(t, builder, tc)
})
}
}
func TestFailuresPythonFn(t *testing.T)
|
{
builder, cleanup := acceptance.CreateBuilder(t)
t.Cleanup(cleanup)
testCases := []acceptance.FailureTest{
{
Name: "missing framework file",
App: "with_framework",
Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction", "GOOGLE_FUNCTION_SOURCE=func.py"},
MustMatch: `GOOGLE_FUNCTION_SOURCE specified file "func.py" but it does not exist`,
},
{
Name: "missing main.py",
App: "custom_file",
Env: []string{"GOOGLE_FUNCTION_TARGET=testFunction"},
MustMatch: "missing main.py and GOOGLE_FUNCTION_SOURCE not specified. Either create the function in main.py or specify GOOGLE_FUNCTION_SOURCE to point to the file that contains the function",
},
}
for _, tc := range testCases {
tc := tc
t.Run(tc.Name, func(t *testing.T) {
t.Parallel()
acceptance.TestBuildFailure(t, builder, tc)
})
}
}
|
|
userOutboxEntity.ts
|
/*
* Copyright 2021 Byndyusoft
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import { Column, Entity, PrimaryGeneratedColumn } from "typeorm";
import { UserOutboxDto } from "ᐸDtosᐳ";
@Entity({
name: "users_outbox",
})
export class UserOutboxEntity {
|
public readonly id!: string;
@Column({
type: "jsonb",
name: "entity",
})
public readonly entity!: UserOutboxDto;
@Column({
type: "timestamptz",
name: "timestamp",
})
public readonly timestamp!: Date;
}
|
@PrimaryGeneratedColumn({
type: "bigint",
name: "id",
})
|
utils.go
|
package utils
import (
"fmt"
"io"
"os"
"path/filepath"
"strings"
"golang.org/x/xerrors"
"github.com/aquasecurity/trivy/pkg/log"
)
var cacheDir string
// DefaultCacheDir returns/creates the cache-dir to be used for trivy operations
func DefaultCacheDir() string {
tmpDir, err := os.UserCacheDir()
if err != nil {
tmpDir = os.TempDir()
}
return filepath.Join(tmpDir, "trivy")
}
// CacheDir returns the directory used for caching
func CacheDir() string {
return cacheDir
}
// SetCacheDir sets the trivy cacheDir
func SetCacheDir(dir string) {
cacheDir = dir
}
// FileWalk walks the directory and performs operations on files defined by walkFn
func FileWalk(root string, targetFiles map[string]struct{}, walkFn func(r io.Reader, path string) error) error {
err := filepath.Walk(root, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
if info.IsDir() {
return nil
}
rel, err := filepath.Rel(root, path)
if err != nil {
return xerrors.Errorf("error in filepath rel: %w", err)
}
if _, ok := targetFiles[rel]; !ok {
return nil
}
if info.Size() == 0 {
log.Logger.Debugf("invalid size: %s", path)
return nil
}
f, err := os.Open(path)
if err != nil {
return xerrors.Errorf("failed to open file: %w", err)
}
defer f.Close()
if err = walkFn(f, path); err != nil {
return err
}
return nil
})
if err != nil {
return xerrors.Errorf("error in file walk: %w", err)
}
return nil
}
// StringInSlice checks if strings exist in list of strings
func StringInSlice(a string, list []string) bool {
for _, b := range list {
if b == a {
return true
}
}
return false
}
// FilterTargets filters the target based on prefixPath
func
|
(prefixPath string, targets map[string]struct{}) (map[string]struct{}, error) {
filtered := map[string]struct{}{}
for filename := range targets {
if strings.HasPrefix(filename, prefixPath) {
rel, err := filepath.Rel(prefixPath, filename)
if err != nil {
return nil, xerrors.Errorf("error in filepath rel: %w", err)
}
if strings.HasPrefix(rel, ".."+string(filepath.Separator)) {
continue
}
filtered[rel] = struct{}{}
}
}
return filtered, nil
}
// CopyFile copies the file content from scr to dst
func CopyFile(src, dst string) (int64, error) {
sourceFileStat, err := os.Stat(src)
if err != nil {
return 0, err
}
if !sourceFileStat.Mode().IsRegular() {
return 0, fmt.Errorf("%s is not a regular file", src)
}
source, err := os.Open(src)
if err != nil {
return 0, err
}
defer source.Close()
destination, err := os.Create(dst)
if err != nil {
return 0, err
}
defer destination.Close()
n, err := io.Copy(destination, source)
return n, err
}
|
FilterTargets
|
training_args.py
|
# Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import dataclasses
import json
import os
from dataclasses import dataclass, field
from enum import Enum
from typing import Any, Dict, List, Optional, Tuple
from .file_utils import cached_property, is_torch_available, is_torch_tpu_available, torch_required
from .trainer_utils import EvaluationStrategy
from .utils import logging
if is_torch_available():
import torch
if is_torch_tpu_available():
import torch_xla.core.xla_model as xm
logger = logging.get_logger(__name__)
def default_logdir() -> str:
"""
Same default as PyTorch
"""
import socket
from datetime import datetime
current_time = datetime.now().strftime("%b%d_%H-%M-%S")
return os.path.join("runs", current_time + "_" + socket.gethostname())
@dataclass
class TrainingArguments:
"""
TrainingArguments is the subset of the arguments we use in our example scripts **which relate to the training loop
itself**.
Using :class:`~transformers.HfArgumentParser` we can turn this class into argparse arguments to be able to specify
them on the command line.
Parameters:
output_dir (:obj:`str`):
The output directory where the model predictions and checkpoints will be written.
overwrite_output_dir (:obj:`bool`, `optional`, defaults to :obj:`False`):
If :obj:`True`, overwrite the content of the output directory. Use this to continue training if
:obj:`output_dir` points to a checkpoint directory.
do_train (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether to run training or not. This argument is not directly used by :class:`~transformers.Trainer`, it's
intended to be used by your training/evaluation scripts instead. See the `example scripts
<https://github.com/huggingface/transformers/tree/master/examples>`__ for more details.
do_eval (:obj:`bool`, `optional`):
Whether to run evaluation on the dev set or not. Will be set to :obj:`True` if :obj:`evaluation_strategy`
is different from :obj:`"no"`. This argument is not directly used by :class:`~transformers.Trainer`, it's
intended to be used by your training/evaluation scripts instead. See the `example scripts
<https://github.com/huggingface/transformers/tree/master/examples>`__ for more details.
do_predict (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether to run predictions on the test set or not. This argument is not directly used by
:class:`~transformers.Trainer`, it's intended to be used by your training/evaluation scripts instead. See
the `example scripts <https://github.com/huggingface/transformers/tree/master/examples>`__ for more
details.
evaluation_strategy (:obj:`str` or :class:`~transformers.trainer_utils.EvaluationStrategy`, `optional`, defaults to :obj:`"no"`):
The evaluation strategy to adopt during training. Possible values are:
* :obj:`"no"`: No evaluation is done during training.
* :obj:`"steps"`: Evaluation is done (and logged) every :obj:`eval_steps`.
* :obj:`"epoch"`: Evaluation is done at the end of each epoch.
prediction_loss_only (:obj:`bool`, `optional`, defaults to `False`):
When performing evaluation and predictions, only returns the loss.
per_device_train_batch_size (:obj:`int`, `optional`, defaults to 8):
The batch size per GPU/TPU core/CPU for training.
per_device_eval_batch_size (:obj:`int`, `optional`, defaults to 8):
The batch size per GPU/TPU core/CPU for evaluation.
gradient_accumulation_steps (:obj:`int`, `optional`, defaults to 1):
Number of updates steps to accumulate the gradients for, before performing a backward/update pass.
.. warning::
When using gradient accumulation, one step is counted as one step with backward pass. Therefore,
logging, evaluation, save will be conducted every ``gradient_accumulation_steps * xxx_step`` training
examples.
eval_accumulation_steps (:obj:`int`, `optional`):
Number of predictions steps to accumulate the output tensors for, before moving the results to the CPU. If
left unset, the whole predictions are accumulated on GPU/TPU before being moved to the CPU (faster but
requires more memory).
learning_rate (:obj:`float`, `optional`, defaults to 5e-5):
The initial learning rate for Adam.
weight_decay (:obj:`float`, `optional`, defaults to 0):
The weight decay to apply (if not zero).
adam_beta1 (:obj:`float`, `optional`, defaults to 0.9):
The beta1 for the Adam optimizer.
adam_beta2 (:obj:`float`, `optional`, defaults to 0.999):
The beta2 for the Adam optimizer.
adam_epsilon (:obj:`float`, `optional`, defaults to 1e-8):
Epsilon for the Adam optimizer.
max_grad_norm (:obj:`float`, `optional`, defaults to 1.0):
Maximum gradient norm (for gradient clipping).
num_train_epochs(:obj:`float`, `optional`, defaults to 3.0):
Total number of training epochs to perform (if not an integer, will perform the decimal part percents of
the last epoch before stopping training).
max_steps (:obj:`int`, `optional`, defaults to -1):
If set to a positive number, the total number of training steps to perform. Overrides
:obj:`num_train_epochs`.
warmup_steps (:obj:`int`, `optional`, defaults to 0):
Number of steps used for a linear warmup from 0 to :obj:`learning_rate`.
logging_dir (:obj:`str`, `optional`):
Tensorboard log directory. Will default to `runs/**CURRENT_DATETIME_HOSTNAME**`.
logging_first_step (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether to log and evaluate the first :obj:`global_step` or not.
logging_steps (:obj:`int`, `optional`, defaults to 500):
Number of update steps between two logs.
save_steps (:obj:`int`, `optional`, defaults to 500):
Number of updates steps before two checkpoint saves.
save_total_limit (:obj:`int`, `optional`):
If a value is passed, will limit the total amount of checkpoints. Deletes the older checkpoints in
:obj:`output_dir`.
no_cuda (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether to not use CUDA even when it is available or not.
seed (:obj:`int`, `optional`, defaults to 42):
Random seed for initialization.
fp16 (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether to use 16-bit (mixed) precision training (through NVIDIA apex) instead of 32-bit training.
fp16_opt_level (:obj:`str`, `optional`, defaults to 'O1'):
For :obj:`fp16` training, apex AMP optimization level selected in ['O0', 'O1', 'O2', and 'O3']. See details
on the `apex documentation <https://nvidia.github.io/apex/amp.html>`__.
local_rank (:obj:`int`, `optional`, defaults to -1):
During distributed training, the rank of the process.
tpu_num_cores (:obj:`int`, `optional`):
When training on TPU, the number of TPU cores (automatically passed by launcher script).
debug (:obj:`bool`, `optional`, defaults to :obj:`False`):
When training on TPU, whether to print debug metrics or not.
dataloader_drop_last (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether to drop the last incomplete batch (if the length of the dataset is not divisible by the batch size)
or not.
eval_steps (:obj:`int`, `optional`):
Number of update steps between two evaluations if :obj:`evaluation_strategy="steps"`. Will default to the
same value as :obj:`logging_steps` if not set.
dataloader_num_workers (:obj:`int`, `optional`, defaults to 0):
Number of subprocesses to use for data loading (PyTorch only). 0 means that the data will be loaded in the
main process.
past_index (:obj:`int`, `optional`, defaults to -1):
Some models like :doc:`TransformerXL <../model_doc/transformerxl>` or :doc`XLNet <../model_doc/xlnet>` can
make use of the past hidden states for their predictions. If this argument is set to a positive int, the
``Trainer`` will use the corresponding output (usually index 2) as the past state and feed it to the model
at the next training step under the keyword argument ``mems``.
run_name (:obj:`str`, `optional`):
A descriptor for the run. Notably used for wandb logging.
disable_tqdm (:obj:`bool`, `optional`):
Whether or not to disable the tqdm progress bars. Will default to :obj:`True` if the logging level is set
to warn or lower (default), :obj:`False` otherwise.
remove_unused_columns (:obj:`bool`, `optional`, defaults to :obj:`True`):
If using `nlp.Dataset` datasets, whether or not to automatically remove the columns unused by the model
forward method.
(Note that this behavior is not implemented for :class:`~transformers.TFTrainer` yet.)
label_names (:obj:`List[str]`, `optional`):
The list of keys in your dictionary of inputs that correspond to the labels.
Will eventually default to :obj:`["labels"]` except if the model used is one of the
:obj:`XxxForQuestionAnswering` in which case it will default to :obj:`["start_positions",
"end_positions"]`.
load_best_model_at_end (:obj:`bool`, `optional`, defaults to :obj:`False`):
Whether or not to load the best model found during training at the end of training.
.. note::
When set to :obj:`True`, the parameters :obj:`save_steps` will be ignored and the model will be saved
after each evaluation.
metric_for_best_model (:obj:`str`, `optional`):
Use in conjunction with :obj:`load_best_model_at_end` to specify the metric to use to compare two different
models. Must be the name of a metric returned by the evaluation with or without the prefix :obj:`"eval_"`.
Will default to :obj:`"loss"` if unspecified and :obj:`load_best_model_at_end=True` (to use the evaluation
loss).
If you set this value, :obj:`greater_is_better` will default to :obj:`True`. Don't forget to set it to
:obj:`False` if your metric is better when lower.
greater_is_better (:obj:`bool`, `optional`):
Use in conjunction with :obj:`load_best_model_at_end` and :obj:`metric_for_best_model` to specify if better
models should have a greater metric or not. Will default to:
- :obj:`True` if :obj:`metric_for_best_model` is set to a value that isn't :obj:`"loss"` or
:obj:`"eval_loss"`.
- :obj:`False` if :obj:`metric_for_best_model` is not set, or set to :obj:`"loss"` or :obj:`"eval_loss"`.
model_parallel (:obj:`bool`, `optional`, defaults to :obj:`False`):
If there are more than one devices, whether to use model parallelism to distribute the model's modules
across devices or not.
ignore_data_skip (:obj:`bool`, `optional`, defaults to :obj:`False`):
When resuming training, whether or not to skip the epochs and batches to get the data loading at the same
stage as in the previous training. If set to :obj:`True`, the training will begin faster (as that skipping
step can take a long time) but will not yield the same results as the interrupted training would have.
"""
output_dir: str = field(
metadata={"help": "The output directory where the model predictions and checkpoints will be written."}
)
overwrite_output_dir: bool = field(
default=False,
metadata={
"help": (
"Overwrite the content of the output directory."
"Use this to continue training if output_dir points to a checkpoint directory."
)
},
)
do_train: bool = field(default=False, metadata={"help": "Whether to run training."})
do_eval: bool = field(default=None, metadata={"help": "Whether to run eval on the dev set."})
do_predict: bool = field(default=False, metadata={"help": "Whether to run predictions on the test set."})
|
model_parallel: bool = field(
default=False,
metadata={
"help": (
"If there are more than one devices, whether to use model parallelism to distribute the "
"model's modules across devices."
)
},
)
evaluation_strategy: EvaluationStrategy = field(
default="no",
metadata={"help": "Run evaluation during training at each logging step."},
)
prediction_loss_only: bool = field(
default=False,
metadata={"help": "When performing evaluation and predictions, only returns the loss."},
)
per_device_train_batch_size: int = field(
default=8, metadata={"help": "Batch size per GPU/TPU core/CPU for training."}
)
per_device_eval_batch_size: int = field(
default=8, metadata={"help": "Batch size per GPU/TPU core/CPU for evaluation."}
)
per_gpu_train_batch_size: Optional[int] = field(
default=None,
metadata={
"help": "Deprecated, the use of `--per_device_train_batch_size` is preferred. "
"Batch size per GPU/TPU core/CPU for training."
},
)
per_gpu_eval_batch_size: Optional[int] = field(
default=None,
metadata={
"help": "Deprecated, the use of `--per_device_eval_batch_size` is preferred."
"Batch size per GPU/TPU core/CPU for evaluation."
},
)
gradient_accumulation_steps: int = field(
default=1,
metadata={"help": "Number of updates steps to accumulate before performing a backward/update pass."},
)
eval_accumulation_steps: Optional[int] = field(
default=None,
metadata={"help": "Number of predictions steps to accumulate before moving the tensors to the CPU."},
)
learning_rate: float = field(default=5e-5, metadata={"help": "The initial learning rate for Adam."})
weight_decay: float = field(default=0.0, metadata={"help": "Weight decay if we apply some."})
adam_beta1: float = field(default=0.9, metadata={"help": "Beta1 for Adam optimizer"})
adam_beta2: float = field(default=0.999, metadata={"help": "Beta2 for Adam optimizer"})
adam_epsilon: float = field(default=1e-8, metadata={"help": "Epsilon for Adam optimizer."})
max_grad_norm: float = field(default=1.0, metadata={"help": "Max gradient norm."})
num_train_epochs: float = field(default=3.0, metadata={"help": "Total number of training epochs to perform."})
max_steps: int = field(
default=-1,
metadata={"help": "If > 0: set total number of training steps to perform. Override num_train_epochs."},
)
warmup_steps: int = field(default=0, metadata={"help": "Linear warmup over warmup_steps."})
logging_dir: Optional[str] = field(default_factory=default_logdir, metadata={"help": "Tensorboard log dir."})
logging_first_step: bool = field(default=False, metadata={"help": "Log the first global_step"})
logging_steps: int = field(default=500, metadata={"help": "Log every X updates steps."})
save_steps: int = field(default=500, metadata={"help": "Save checkpoint every X updates steps."})
save_total_limit: Optional[int] = field(
default=None,
metadata={
"help": (
"Limit the total amount of checkpoints."
"Deletes the older checkpoints in the output_dir. Default is unlimited checkpoints"
)
},
)
no_cuda: bool = field(default=False, metadata={"help": "Do not use CUDA even when it is available"})
seed: int = field(default=42, metadata={"help": "random seed for initialization"})
fp16: bool = field(
default=False,
metadata={"help": "Whether to use 16-bit (mixed) precision (through NVIDIA apex) instead of 32-bit"},
)
fp16_opt_level: str = field(
default="O1",
metadata={
"help": (
"For fp16: Apex AMP optimization level selected in ['O0', 'O1', 'O2', and 'O3']."
"See details at https://nvidia.github.io/apex/amp.html"
)
},
)
local_rank: int = field(default=-1, metadata={"help": "For distributed training: local_rank"})
tpu_num_cores: Optional[int] = field(
default=None, metadata={"help": "TPU: Number of TPU cores (automatically passed by launcher script)"}
)
tpu_metrics_debug: bool = field(
default=False,
metadata={"help": "Deprecated, the use of `--debug` is preferred. TPU: Whether to print debug metrics"},
)
debug: bool = field(default=False, metadata={"help": "Whether to print debug metrics on TPU"})
dataloader_drop_last: bool = field(
default=False, metadata={"help": "Drop the last incomplete batch if it is not divisible by the batch size."}
)
eval_steps: int = field(default=None, metadata={"help": "Run an evaluation every X steps."})
dataloader_num_workers: int = field(
default=0,
metadata={
"help": "Number of subprocesses to use for data loading (PyTorch only). 0 means that the data will be loaded in the main process."
},
)
past_index: int = field(
default=-1,
metadata={"help": "If >=0, uses the corresponding part of the output as the past state for next step."},
)
run_name: Optional[str] = field(
default=None, metadata={"help": "An optional descriptor for the run. Notably used for wandb logging."}
)
disable_tqdm: Optional[bool] = field(
default=None, metadata={"help": "Whether or not to disable the tqdm progress bars."}
)
remove_unused_columns: Optional[bool] = field(
default=True, metadata={"help": "Remove columns not required by the model when using an nlp.Dataset."}
)
label_names: Optional[List[str]] = field(
default=None, metadata={"help": "The list of keys in your dictionary of inputs that correspond to the labels."}
)
load_best_model_at_end: Optional[bool] = field(
default=False,
metadata={"help": "Whether or not to load the best model found during training at the end of training."},
)
metric_for_best_model: Optional[str] = field(
default=None, metadata={"help": "The metric to use to compare two different models."}
)
greater_is_better: Optional[bool] = field(
default=None, metadata={"help": "Whether the `metric_for_best_model` should be maximized or not."}
)
ignore_data_skip: bool = field(
default=False,
metadata={
"help": "When resuming training, whether or not to skip the first epochs and batches to get to the same training data."
},
)
def __post_init__(self):
if self.disable_tqdm is None:
self.disable_tqdm = logger.getEffectiveLevel() > logging.WARN
self.evaluation_strategy = EvaluationStrategy(self.evaluation_strategy)
if self.do_eval is False and self.evaluation_strategy != EvaluationStrategy.NO:
self.do_eval = True
if self.eval_steps is None:
self.eval_steps = self.logging_steps
if self.load_best_model_at_end and self.metric_for_best_model is None:
self.metric_for_best_model = "loss"
if self.greater_is_better is None and self.metric_for_best_model is not None:
self.greater_is_better = self.metric_for_best_model not in ["loss", "eval_loss"]
if self.run_name is None:
self.run_name = self.output_dir
if is_torch_available() and self.device.type != "cuda" and self.fp16:
raise ValueError("AMP (`--fp16`) can only be used on CUDA devices.")
@property
def train_batch_size(self) -> int:
"""
The actual batch size for training (may differ from :obj:`per_gpu_train_batch_size` in distributed training).
"""
if self.per_gpu_train_batch_size:
logger.warning(
"Using deprecated `--per_gpu_train_batch_size` argument which will be removed in a future "
"version. Using `--per_device_train_batch_size` is preferred."
)
per_device_batch_size = self.per_gpu_train_batch_size or self.per_device_train_batch_size
if not self.model_parallel:
train_batch_size = per_device_batch_size * max(1, self.n_gpu)
else:
train_batch_size = per_device_batch_size
return train_batch_size
@property
def eval_batch_size(self) -> int:
"""
The actual batch size for evaluation (may differ from :obj:`per_gpu_eval_batch_size` in distributed training).
"""
if self.per_gpu_eval_batch_size:
logger.warning(
"Using deprecated `--per_gpu_eval_batch_size` argument which will be removed in a future "
"version. Using `--per_device_eval_batch_size` is preferred."
)
per_device_batch_size = self.per_gpu_eval_batch_size or self.per_device_eval_batch_size
if not self.model_parallel:
eval_batch_size = per_device_batch_size * max(1, self.n_gpu)
else:
eval_batch_size = per_device_batch_size
return eval_batch_size
@cached_property
@torch_required
def _setup_devices(self) -> Tuple["torch.device", int]:
logger.info("PyTorch: setting up devices")
if self.no_cuda:
device = torch.device("cpu")
n_gpu = 0
elif is_torch_tpu_available():
device = xm.xla_device()
n_gpu = 0
elif self.local_rank == -1:
# if n_gpu is > 1 we'll use nn.DataParallel.
# If you only want to use a specific subset of GPUs use `CUDA_VISIBLE_DEVICES=0`
# Explicitly set CUDA to the first (index 0) CUDA device, otherwise `set_device` will
# trigger an error that a device index is missing. Index 0 takes into account the
# GPUs available in the environment, so `CUDA_VISIBLE_DEVICES=1,2` with `cuda:0`
# will use the first GPU in that env, i.e. GPU#1
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
n_gpu = torch.cuda.device_count()
else:
# Here, we'll use torch.distributed.
# Initializes the distributed backend which will take care of synchronizing nodes/GPUs
torch.distributed.init_process_group(backend="nccl")
device = torch.device("cuda", self.local_rank)
n_gpu = 1
if device.type == "cuda":
torch.cuda.set_device(device)
return device, n_gpu
@property
@torch_required
def device(self) -> "torch.device":
"""
The device used by this process.
"""
return self._setup_devices[0]
@property
@torch_required
def n_gpu(self):
"""
The number of GPUs used by this process.
Note:
This will only be greater than one when you have multiple GPUs available but are not using distributed
training. For distributed training, it will always be 1.
"""
return self._setup_devices[1]
@property
@torch_required
def parallel_mode(self):
"""
The current mode used for parallelism if multiple GPUs/TPU cores are available. One of:
- :obj:`ParallelMode.NOT_PARALLEL`: no parallelism (CPU or one GPU).
- :obj:`ParallelMode.NOT_DISTRIBUTED`: several GPUs in one single process (uses :obj:`torch.nn.DataParallel`).
- :obj:`ParallelMode.DISTRIBUTED`: several GPUs, each ahving its own process (uses
:obj:`torch.nn.DistributedDataParallel`).
- :obj:`ParallelMode.TPU`: several TPU cores.
"""
if is_torch_tpu_available():
return ParallelMode.TPU
elif self.local_rank != -1:
return ParallelMode.DISTRIBUTED
elif self.n_gpu > 1:
return ParallelMode.NOT_DISTRIBUTED
else:
return ParallelMode.NOT_PARALLEL
def to_dict(self):
"""
Serializes this instance while replace `Enum` by their values (for JSON serialization support).
"""
d = dataclasses.asdict(self)
for k, v in d.items():
if isinstance(v, Enum):
d[k] = v.value
return d
def to_json_string(self):
"""
Serializes this instance to a JSON string.
"""
return json.dumps(self.to_dict(), indent=2)
def to_sanitized_dict(self) -> Dict[str, Any]:
"""
Sanitized serialization to use with TensorBoard’s hparams
"""
d = self.to_dict()
d = {**d, **{"train_batch_size": self.train_batch_size, "eval_batch_size": self.eval_batch_size}}
valid_types = [bool, int, float, str]
if is_torch_available():
valid_types.append(torch.Tensor)
return {k: v if type(v) in valid_types else str(v) for k, v in d.items()}
class ParallelMode(Enum):
NOT_PARALLEL = "not_parallel"
NOT_DISTRIBUTED = "not_distributed"
DISTRIBUTED = "distributed"
TPU = "tpu"
| |
polyfill-f2b578758e093ca339ba.js
|
(self.webpackChunkwooffet_labs_site=self.webpackChunkwooffet_labs_site||[]).push([[920],{609:function(t,e,r){!function(){var t="undefined"!=typeof globalThis?globalThis:"undefined"!=typeof window?window:void 0!==r.g?r.g:"undefined"!=typeof self?self:{};function e(t,e,r){return t(r={path:e,exports:{},require:function(t,e){return function(){throw new Error("Dynamic requires are not currently supported by @rollup/plugin-commonjs")}()}},r.exports),r.exports}var n,o,i=function(t){return t&&t.Math==Math&&t},a=i("object"==typeof globalThis&&globalThis)||i("object"==typeof window&&window)||i("object"==typeof self&&self)||i("object"==typeof t&&t)||function(){return this}()||Function("return this")(),c=function(t){try{return!!t()}catch(t){return!0}},u=!c((function(){return 7!=Object.defineProperty({},1,{get:function(){return 7}})[1]})),s={}.propertyIsEnumerable,f=Object.getOwnPropertyDescriptor,l={f:f&&!s.call({1:2},1)?function(t){var e=f(this,t);return!!e&&e.enumerable}:s},h=function(t,e){return{enumerable:!(1&t),configurable:!(2&t),writable:!(4&t),value:e}},p={}.toString,d=function(t){return p.call(t).slice(8,-1)},v="".split,g=c((function(){return!Object("z").propertyIsEnumerable(0)}))?function(t){return"String"==d(t)?v.call(t,""):Object(t)}:Object,y=function(t){if(null==t)throw TypeError("Can't call method on "+t);return t},b=function(t){return g(y(t))},m=function(t){return"object"==typeof t?null!==t:"function"==typeof t},w=function(t){return"function"==typeof t?t:void 0},E=function(t,e){return arguments.length<2?w(a[t]):a[t]&&a[t][e]},S=E("navigator","userAgent")||"",O=a.process,T=a.Deno,R=O&&O.versions||T&&T.version,x=R&&R.v8;x?o=(n=x.split("."))[0]<4?1:n[0]+n[1]:S&&(!(n=S.match(/Edge\/(\d+)/))||n[1]>=74)&&(n=S.match(/Chrome\/(\d+)/))&&(o=n[1]);var A=o&&+o,I=!!Object.getOwnPropertySymbols&&!c((function(){var t=Symbol();return!String(t)||!(Object(t)instanceof Symbol)||!Symbol.sham&&A&&A<41})),j=I&&!Symbol.sham&&"symbol"==typeof Symbol.iterator,_=j?function(t){return"symbol"==typeof t}:function(t){var e=E("Symbol");return"function"==typeof e&&Object(t)instanceof e},P=!1,M=function(t,e){try{Object.defineProperty(a,t,{value:e,configurable:!0,writable:!0})}catch(n){a[t]=e}return e},N="__core-js_shared__",k=a[N]||M(N,{}),U=e((function(t){(t.exports=function(t,e){return k[t]||(k[t]=void 0!==e?e:{})})("versions",[]).push({version:"3.16.0",mode:"global",copyright:"© 2021 Denis Pushkarev (zloirock.ru)"})})),L=function(t){return Object(y(t))},D={}.hasOwnProperty,C=Object.hasOwn||function(t,e){return D.call(L(t),e)},F=0,B=Math.random(),W=function(t){return"Symbol("+String(void 0===t?"":t)+")_"+(++F+B).toString(36)},z=U("wks"),G=a.Symbol,K=j?G:G&&G.withoutSetter||W,$=function(t){return C(z,t)&&(I||"string"==typeof z[t])||(z[t]=I&&C(G,t)?G[t]:K("Symbol."+t)),z[t]},q=$("toPrimitive"),V=function(t,e){if(!m(t)||_(t))return t;var r,n=t[q];if(void 0!==n){if(void 0===e&&(e="default"),r=n.call(t,e),!m(r)||_(r))return r;throw TypeError("Can't convert object to primitive value")}return void 0===e&&(e="number"),function(t,e){var r,n;if("string"===e&&"function"==typeof(r=t.toString)&&!m(n=r.call(t)))return n;if("function"==typeof(r=t.valueOf)&&!m(n=r.call(t)))return n;if("string"!==e&&"function"==typeof(r=t.toString)&&!m(n=r.call(t)))return n;throw TypeError("Can't convert object to primitive value")}(t,e)},H=function(t){var e=V(t,"string");return _(e)?e:String(e)},Y=a.document,X=m(Y)&&m(Y.createElement),J=function(t){return X?Y.createElement(t):{}},Q=!u&&!c((function(){return 7!=Object.defineProperty(J("div"),"a",{get:function(){return 7}}).a})),Z=Object.getOwnPropertyDescriptor,tt={f:u?Z:function(t,e){if(t=b(t),e=H(e),Q)try{return Z(t,e)}catch(t){}if(C(t,e))return h(!l.f.call(t,e),t[e])}},et=function(t){if(!m(t))throw TypeError(String(t)+" is not an object");return t},rt=Object.defineProperty,nt={f:u?rt:function(t,e,r){if(et(t),e=H(e),et(r),Q)try{return rt(t,e,r)}catch(t){}if("get"in r||"set"in r)throw TypeError("Accessors not supported");return"value"in r&&(t[e]=r.value),t}},ot=u?function(t,e,r){return nt.f(t,e,h(1,r))}:function(t,e,r){return t[e]=r,t},it=Function.toString;"function"!=typeof k.inspectSource&&(k.inspectSource=function(t){return it.call(t)});var at,ct,ut,st=k.inspectSource,ft=a.WeakMap,lt="function"==typeof ft&&/native code/.test(st(ft)),ht=U("keys"),pt=function(t){return ht[t]||(ht[t]=W(t))},dt={},vt="Object already initialized";if(lt||k.state){var gt=k.state||(k.state=new(0,a.WeakMap)),yt=gt.get,bt=gt.has,mt=gt.set;at=function(t,e){if(bt.call(gt,t))throw new TypeError(vt);return e.facade=t,mt.call(gt,t,e),e},ct=function(t){return yt.call(gt,t)||{}},ut=function(t){return bt.call(gt,t)}}else{var wt=pt("state");dt[wt]=!0,at=function(t,e){if(C(t,wt))throw new TypeError(vt);return e.facade=t,ot(t,wt,e),e},ct=function(t){return C(t,wt)?t[wt]:{}},ut=function(t){return C(t,wt)}}var Et,St={set:at,get:ct,has:ut,enforce:function(t){return ut(t)?ct(t):at(t,{})},getterFor:function(t){return function(e){var r;if(!m(e)||(r=ct(e)).type!==t)throw TypeError("Incompatible receiver, "+t+" required");return r}}},Ot=e((function(t){var e=St.get,r=St.enforce,n=String(String).split("String");(t.exports=function(t,e,o,i){var c,u=!!i&&!!i.unsafe,s=!!i&&!!i.enumerable,f=!!i&&!!i.noTargetGet;"function"==typeof o&&("string"!=typeof e||C(o,"name")||ot(o,"name",e),(c=r(o)).source||(c.source=n.join("string"==typeof e?e:""))),t!==a?(u?!f&&t[e]&&(s=!0):delete t[e],s?t[e]=o:ot(t,e,o)):s?t[e]=o:M(e,o)})(Function.prototype,"toString",(function(){return"function"==typeof this&&e(this).source||st(this)}))})),Tt=Math.ceil,Rt=Math.floor,xt=function(t){return isNaN(t=+t)?0:(t>0?Rt:Tt)(t)},At=Math.min,It=function(t){return t>0?At(xt(t),9007199254740991):0},jt=Math.max,_t=Math.min,Pt=function(t,e){var r=xt(t);return r<0?jt(r+e,0):_t(r,e)},Mt=function(t){return function(e,r,n){var o,i=b(e),a=It(i.length),c=Pt(n,a);if(t&&r!=r){for(;a>c;)if((o=i[c++])!=o)return!0}else for(;a>c;c++)if((t||c in i)&&i[c]===r)return t||c||0;return!t&&-1}},Nt={includes:Mt(!0),indexOf:Mt(!1)},kt=Nt.indexOf,Ut=function(t,e){var r,n=b(t),o=0,i=[];for(r in n)!C(dt,r)&&C(n,r)&&i.push(r);for(;e.length>o;)C(n,r=e[o++])&&(~kt(i,r)||i.push(r));return i},Lt=["constructor","hasOwnProperty","isPrototypeOf","propertyIsEnumerable","toLocaleString","toString","valueOf"],Dt=Lt.concat("length","prototype"),Ct={f:Object.getOwnPropertyNames||function(t){return Ut(t,Dt)}},Ft={f:Object.getOwnPropertySymbols},Bt=E("Reflect","ownKeys")||function(t){var e=Ct.f(et(t)),r=Ft.f;return r?e.concat(r(t)):e},Wt=function(t,e){for(var r=Bt(e),n=nt.f,o=tt.f,i=0;i<r.length;i++){var a=r[i];C(t,a)||n(t,a,o(e,a))}},zt=/#|\.prototype\./,Gt=function(t,e){var r=$t[Kt(t)];return r==Vt||r!=qt&&("function"==typeof e?c(e):!!e)},Kt=Gt.normalize=function(t){return String(t).replace(zt,".").toLowerCase()},$t=Gt.data={},qt=Gt.NATIVE="N",Vt=Gt.POLYFILL="P",Ht=Gt,Yt=tt.f,Xt=function(t,e){var r,n,o,i,c,u=t.target,s=t.global,f=t.stat;if(r=s?a:f?a[u]||M(u,{}):(a[u]||{}).prototype)for(n in e){if(i=e[n],o=t.noTargetGet?(c=Yt(r,n))&&c.value:r[n],!Ht(s?n:u+(f?".":"#")+n,t.forced)&&void 0!==o){if(typeof i==typeof o)continue;Wt(i,o)}(t.sham||o&&o.sham)&&ot(i,"sham",!0),Ot(r,n,i,t)}},Jt=Math.min,Qt=[].copyWithin||function(t,e){var r=L(this),n=It(r.length),o=Pt(t,n),i=Pt(e,n),a=arguments.length>2?arguments[2]:void 0,c=Jt((void 0===a?n:Pt(a,n))-i,n-o),u=1;for(i<o&&o<i+c&&(u=-1,i+=c-1,o+=c-1);c-- >0;)i in r?r[o]=r[i]:delete r[o],o+=u,i+=u;return r},Zt=Object.keys||function(t){return Ut(t,Lt)},te=u?Object.defineProperties:function(t,e){et(t);for(var r,n=Zt(e),o=n.length,i=0;o>i;)nt.f(t,r=n[i++],e[r]);return t},ee=E("document","documentElement"),re=pt("IE_PROTO"),ne=function(){},oe=function(t){return"<script>"+t+"<\/script>"},ie=function(t){t.write(oe("")),t.close();var e=t.parentWindow.Object;return t=null,e},ae=function(){try{Et=new ActiveXObject("htmlfile")}catch(t){}ae=document.domain&&Et?ie(Et):function(){var t,e=J("iframe");if(e.style)return e.style.display="none",ee.appendChild(e),e.src=String("javascript:"),(t=e.contentWindow.document).open(),t.write(oe("document.F=Object")),t.close(),t.F}()||ie(Et);for(var t=Lt.length;t--;)delete ae.prototype[Lt[t]];return ae()};dt[re]=!0;var ce=Object.create||function(t,e){var r;return null!==t?(ne.prototype=et(t),r=new ne,ne.prototype=null,r[re]=t):r=ae(),void 0===e?r:te(r,e)},ue=$("unscopables"),se=Array.prototype;null==se[ue]&&nt.f(se,ue,{configurable:!0,value:ce(null)});var fe=function(t){se[ue][t]=!0};Xt({target:"Array",proto:!0},{copyWithin:Qt}),fe("copyWithin");var le=function(t){if("function"!=typeof t)throw TypeError(String(t)+" is not a function");return t},he=function(t,e,r){if(le(t),void 0===e)return t;switch(r){case 0:return function(){return t.call(e)};case 1:return function(r){return t.call(e,r)};case 2:return function(r,n){return t.call(e,r,n)};case 3:return function(r,n,o){return t.call(e,r,n,o)}}return function(){return t.apply(e,arguments)}},pe=Function.call,de=function(t,e,r){return he(pe,a[t].prototype[e],r)};de("Array","copyWithin"),Xt({target:"Array",proto:!0},{fill:function(t){for(var e=L(this),r=It(e.length),n=arguments.length,o=Pt(n>1?arguments[1]:void 0,r),i=n>2?arguments[2]:void 0,a=void 0===i?r:Pt(i,r);a>o;)e[o++]=t;return e}}),fe("fill"),de("Array","fill");var ve=Array.isArray||function(t){return"Array"==d(t)},ge=$("species"),ye=function(t,e){return new(function(t){var e;return ve(t)&&("function"!=typeof(e=t.constructor)||e!==Array&&!ve(e.prototype)?m(e)&&null===(e=e[ge])&&(e=void 0):e=void 0),void 0===e?Array:e}(t))(0===e?0:e)},be=[].push,me=function(t){var e=1==t,r=2==t,n=3==t,o=4==t,i=6==t,a=7==t,c=5==t||i;return function(u,s,f,l){for(var h,p,d=L(u),v=g(d),y=he(s,f,3),b=It(v.length),m=0,w=l||ye,E=e?w(u,b):r||a?w(u,0):void 0;b>m;m++)if((c||m in v)&&(p=y(h=v[m],m,d),t))if(e)E[m]=p;else if(p)switch(t){case 3:return!0;case 5:return h;case 6:return m;case 2:be.call(E,h)}else switch(t){case 4:return!1;case 7:be.call(E,h)}return i?-1:n||o?o:E}},we={forEach:me(0),map:me(1),filter:me(2),some:me(3),every:me(4),find:me(5),findIndex:me(6),filterReject:me(7)},Ee=we.find,Se="find",Oe=!0;Se in[]&&Array(1).find((function(){Oe=!1})),Xt({target:"Array",proto:!0,forced:Oe},{find:function(t){return Ee(this,t,arguments.length>1?arguments[1]:void 0)}}),fe(Se),de("Array","find");var Te=we.findIndex,Re="findIndex",xe=!0;Re in[]&&Array(1).findIndex((function(){xe=!1})),Xt({target:"Array",proto:!0,forced:xe},{findIndex:function(t){return Te(this,t,arguments.length>1?arguments[1]:void 0)}}),fe(Re),de("Array","findIndex");var Ae=function t(e,r,n,o,i,a,c,u){for(var s,f=i,l=0,h=!!c&&he(c,u,3);l<o;){if(l in n){if(s=h?h(n[l],l,r):n[l],a>0&&ve(s))f=t(e,r,s,It(s.length),f,a-1)-1;else{if(f>=9007199254740991)throw TypeError("Exceed the acceptable array length");e[f]=s}f++}l++}return f};Xt({target:"Array",proto:!0},{flatMap:function(t){var e,r=L(this),n=It(r.length);return le(t),(e=ye(r,0)).length=Ae(e,r,r,n,0,1,t,arguments.length>1?arguments[1]:void 0),e}}),fe("flatMap"),de("Array","flatMap"),Xt({target:"Array",proto:!0},{flat:function(){var t=arguments.length?arguments[0]:void 0,e=L(this),r=It(e.length),n=ye(e,0);return n.length=Ae(n,e,e,r,0,void 0===t?1:xt(t)),n}}),fe("flat"),de("Array","flat");var Ie,je,_e,Pe=function(t){if(_(t))throw TypeError("Cannot convert a Symbol value to a string");return String(t)},Me=function(t){return function(e,r){var n,o,i=Pe(y(e)),a=xt(r),c=i.length;return a<0||a>=c?t?"":void 0:(n=i.charCodeAt(a))<55296||n>56319||a+1===c||(o=i.charCodeAt(a+1))<56320||o>57343?t?i.charAt(a):n:t?i.slice(a,a+2):o-56320+(n-55296<<10)+65536}},Ne={codeAt:Me(!1),charAt:Me(!0)},ke=!c((function(){function t(){}return t.prototype.constructor=null,Object.getPrototypeOf(new t)!==t.prototype})),Ue=pt("IE_PROTO"),Le=Object.prototype,De=ke?Object.getPrototypeOf:function(t){return t=L(t),C(t,Ue)?t[Ue]:"function"==typeof t.constructor&&t instanceof t.constructor?t.constructor.prototype:t instanceof Object?Le:null},Ce=$("iterator"),Fe=!1;[].keys&&("next"in(_e=[].keys())?(je=De(De(_e)))!==Object.prototype&&(Ie=je):Fe=!0),(null==Ie||c((function(){var t={};return Ie[Ce].call(t)!==t})))&&(Ie={}),C(Ie,Ce)||ot(Ie,Ce,(function(){return this}));var Be={IteratorPrototype:Ie,BUGGY_SAFARI_ITERATORS:Fe},We=nt.f,ze=$("toStringTag"),Ge=function(t,e,r){t&&!C(t=r?t:t.prototype,ze)&&We(t,ze,{configurable:!0,value:e})},Ke={},$e=Be.IteratorPrototype,qe=function(){return this},Ve=function(t){if(!m(t)&&null!==t)throw TypeError("Can't set "+String(t)+" as a prototype");return t},He=Object.setPrototypeOf||("__proto__"in{}?function(){var t,e=!1,r={};try{(t=Object.getOwnPropertyDescriptor(Object.prototype,"__proto__").set).call(r,[]),e=r instanceof Array}catch(t){}return function(r,n){return et(r),Ve(n),e?t.call(r,n):r.__proto__=n,r}}():void 0),Ye=Be.IteratorPrototype,Xe=Be.BUGGY_SAFARI_ITERATORS,Je=$("iterator"),Qe="keys",Ze="values",tr="entries",er=function(){return this},rr=function(t,e,r,n,o,i,a){!function(t,e,r){var n=e+" Iterator";t.prototype=ce($e,{next:h(1,r)}),Ge(t,n,!1),Ke[n]=qe}(r,e,n);var c,u,s,f=function(t){if(t===o&&g)return g;if(!Xe&&t in d)return d[t];switch(t){case Qe:case Ze:case tr:return function(){return new r(this,t)}}return function(){return new r(this)}},l=e+" Iterator",p=!1,d=t.prototype,v=d[Je]||d["@@iterator"]||o&&d[o],g=!Xe&&v||f(o),y="Array"==e&&d.entries||v;if(y&&(c=De(y.call(new t)),Ye!==Object.prototype&&c.next&&(De(c)!==Ye&&(He?He(c,Ye):"function"!=typeof c[Je]&&ot(c,Je,er)),Ge(c,l,!0))),o==Ze&&v&&v.name!==Ze&&(p=!0,g=function(){return v.call(this)}),d[Je]!==g&&ot(d,Je,g),Ke[e]=g,o)if(u={values:f(Ze),keys:i?g:f(Qe),entries:f(tr)},a)for(s in u)(Xe||p||!(s in d))&&Ot(d,s,u[s]);else Xt({target:e,proto:!0,forced:Xe||p},u);return u},nr=Ne.charAt,or="String Iterator",ir=St.set,ar=St.getterFor(or);rr(String,"String",(function(t){ir(this,{type:or,string:Pe(t),index:0})}),(function(){var t,e=ar(this),r=e.string,n=e.index;return n>=r.length?{value:void 0,done:!0}:(t=nr(r,n),e.index+=t.length,{value:t,done:!1})}));var cr=function(t){var e=t.return;if(void 0!==e)return et(e.call(t)).value},ur=function(t,e,r,n){try{return n?e(et(r)[0],r[1]):e(r)}catch(e){throw cr(t),e}},sr=$("iterator"),fr=Array.prototype,lr=function(t){return void 0!==t&&(Ke.Array===t||fr[sr]===t)},hr=function(t,e,r){var n=H(e);n in t?nt.f(t,n,h(0,r)):t[n]=r},pr={};pr[$("toStringTag")]="z";var dr="[object z]"===String(pr),vr=$("toStringTag"),gr="Arguments"==d(function(){return arguments}()),yr=dr?d:function(t){var e,r,n;return void 0===t?"Undefined":null===t?"Null":"string"==typeof(r=function(t,e){try{return t[e]}catch(t){}}(e=Object(t),vr))?r:gr?d(e):"Object"==(n=d(e))&&"function"==typeof e.callee?"Arguments":n},br=$("iterator"),mr=function(t){if(null!=t)return t[br]||t["@@iterator"]||Ke[yr(t)]},wr=$("iterator"),Er=!1;try{var Sr=0,Or={next:function(){return{done:!!Sr++}},return:function(){Er=!0}};Or[wr]=function(){return this},Array.from(Or,(function(){throw 2}))}catch(t){}var Tr=function(t,e){if(!e&&!Er)return!1;var r=!1;try{var n={};n[wr]=function(){return{next:function(){return{done:r=!0}}}},t(n)}catch(t){}return r},Rr=!Tr((function(t){Array.from(t)}));Xt({target:"Array",stat:!0,forced:Rr},{from:function(t){var e,r,n,o,i,a,c=L(t),u="function"==typeof this?this:Array,s=arguments.length,f=s>1?arguments[1]:void 0,l=void 0!==f,h=mr(c),p=0;if(l&&(f=he(f,s>2?arguments[2]:void 0,2)),null==h||u==Array&&lr(h))for(r=new u(e=It(c.length));e>p;p++)a=l?f(c[p],p):c[p],hr(r,p,a);else for(i=(o=h.call(c)).next,r=new u;!(n=i.call(o)).done;p++)a=l?ur(o,f,[n.value,p],!0):n.value,hr(r,p,a);return r.length=p,r}});var xr=a,Ar=Nt.includes;Xt({target:"Array",proto:!0},{includes:function(t){return Ar(this,t,arguments.length>1?arguments[1]:void 0)}}),fe("includes"),de("Array","includes");var Ir="Array Iterator",jr=St.set,_r=St.getterFor(Ir),Pr=rr(Array,"Array",(function(t,e){jr(this,{type:Ir,target:b(t),index:0,kind:e})}),(function(){var t=_r(this),e=t.target,r=t.kind,n=t.index++;return!e||n>=e.length?(t.target=void 0,{value:void 0,done:!0}):"keys"==r?{value:n,done:!1}:"values"==r?{value:e[n],done:!1}:{value:[n,e[n]],done:!1}}),"values");Ke.Arguments=Ke.Array,fe("keys"),fe("values"),fe("entries"),dr||Ot(Object.prototype,"toString",dr?{}.toString:function(){return"[object "+yr(this)+"]"},{unsafe:!0}),de("Array","values");var Mr=c((function(){function t(){}return!(Array.of.call(t)instanceof t)}));Xt({target:"Array",stat:!0,forced:Mr},{of:function(){for(var t=0,e=arguments.length,r=new("function"==typeof this?this:Array)(e);e>t;)hr(r,t,arguments[t++]);return r.length=e,r}});var Nr=$("hasInstance"),kr=Function.prototype;Nr in kr||nt.f(kr,Nr,{value:function(t){if("function"!=typeof this||!m(t))return!1;if(!m(this.prototype))return t instanceof this;for(;t=De(t);)if(this.prototype===t)return!0;return!1}}),$("hasInstance");var Ur=Function.prototype,Lr=Ur.toString,Dr=/^\s*function ([^ (]*)/,Cr="name";u&&!(Cr in Ur)&&(0,nt.f)(Ur,Cr,{configurable:!0,get:function(){try{return Lr.call(this).match(Dr)[1]}catch(t){return""}}});var Fr=Ct.f,Br={}.toString,Wr="object"==typeof window&&window&&Object.getOwnPropertyNames?Object.getOwnPropertyNames(window):[],zr={f:function(t){return Wr&&"[object Window]"==Br.call(t)?function(t){try{return Fr(t)}catch(t){return Wr.slice()}}(t):Fr(b(t))}},Gr=!c((function(){return Object.isExtensible(Object.preventExtensions({}))})),Kr=e((function(t){var e=nt.f,r=!1,n=W("meta"),o=0,i=Object.isExtensible||function(){return!0},a=function(t){e(t,n,{value:{objectID:"O"+o++,weakData:{}}})},c=t.exports={enable:function(){c.enable=function(){},r=!0;var t=Ct.f,e=[].splice,o={};o[n]=1,t(o).length&&(Ct.f=function(r){for(var o=t(r),i=0,a=o.length;i<a;i++)if(o[i]===n){e.call(o,i,1);break}return o},Xt({target:"Object",stat:!0,forced:!0},{getOwnPropertyNames:zr.f}))},fastKey:function(t,e){if(!m(t))return"symbol"==typeof t?t:("string"==typeof t?"S":"P")+t;if(!C(t,n)){if(!i(t))return"F";if(!e)return"E";a(t)}return t[n].objectID},getWeakData:function(t,e){if(!C(t,n)){if(!i(t))return!0;if(!e)return!1;a(t)}return t[n].weakData},onFreeze:function(t){return Gr&&r&&i(t)&&!C(t,n)&&a(t),t}};dt[n]=!0})),$r=function(t,e){this.stopped=t,this.result=e},qr=function(t,e,r){var n,o,i,a,c,u,s,f=!(!r||!r.AS_ENTRIES),l=!(!r||!r.IS_ITERATOR),h=!(!r||!r.INTERRUPTED),p=he(e,r&&r.that,1+f+h),d=function(t){return n&&cr(n),new $r(!0,t)},v=function(t){return f?(et(t),h?p(t[0],t[1],d):p(t[0],t[1])):h?p(t,d):p(t)};if(l)n=t;else{if("function"!=typeof(o=mr(t)))throw TypeError("Target is not iterable");if(lr(o)){for(i=0,a=It(t.length);a>i;i++)if((c=v(t[i]))&&c instanceof $r)return c;return new $r(!1)}n=o.call(t)}for(u=n.next;!(s=u.call(n)).done;){try{c=v(s.value)}catch(t){throw cr(n),t}if("object"==typeof c&&c&&c instanceof $r)return c}return new $r(!1)},Vr=function(t,e,r){if(!(t instanceof e))throw TypeError("Incorrect "+(r?r+" ":"")+"invocation");return t},Hr=function(t,e,r){var n,o;return He&&"function"==typeof(n=e.constructor)&&n!==r&&m(o=n.prototype)&&o!==r.prototype&&He(t,o),t},Yr=function(t,e,r){var n=-1!==t.indexOf("Map"),o=-1!==t.indexOf("Weak"),i=n?"set":"add",u=a[t],s=u&&u.prototype,f=u,l={},h=function(t){var e=s[t];Ot(s,t,"add"==t?function(t){return e.call(this,0===t?0:t),this}:"delete"==t?function(t){return!(o&&!m(t))&&e.call(this,0===t?0:t)}:"get"==t?function(t){return o&&!m(t)?void 0:e.call(this,0===t?0:t)}:"has"==t?function(t){return!(o&&!m(t))&&e.call(this,0===t?0:t)}:function(t,r){return e.call(this,0===t?0:t,r),this})};if(Ht(t,"function"!=typeof u||!(o||s.forEach&&!c((function(){(new u).entries().next()})))))f=r.getConstructor(e,t,n,i),Kr.enable();else if(Ht(t,!0)){var p=new f,d=p[i](o?{}:-0,1)!=p,v=c((function(){p.has(1)})),g=Tr((function(t){new u(t)})),y=!o&&c((function(){for(var t=new u,e=5;e--;)t[i](e,e);return!t.has(-0)}));g||((f=e((function(e,r){Vr(e,f,t);var o=Hr(new u,e,f);return null!=r&&qr(r,o[i],{that:o,AS_ENTRIES:n}),o}))).prototype=s,s.constructor=f),(v||y)&&(h("delete"),h("has"),n&&h("get")),(y||d)&&h(i),o&&s.clear&&delete s.clear}return l[t]=f,Xt({global:!0,forced:f!=u},l),Ge(f,t),o||r.setStrong(f,t,n),f},Xr=function(t,e,r){for(var n in e)Ot(t,n,e[n],r);return t},Jr=$("species"),Qr=function(t){var e=E(t);u&&e&&!e[Jr]&&(0,nt.f)(e,Jr,{configurable:!0,get:function(){return this}})},Zr=nt.f,tn=Kr.fastKey,en=St.set,rn=St.getterFor,nn={getConstructor:function(t,e,r,n){var o=t((function(t,i){Vr(t,o,e),en(t,{type:e,index:ce(null),first:void 0,last:void 0,size:0}),u||(t.size=0),null!=i&&qr(i,t[n],{that:t,AS_ENTRIES:r})})),i=rn(e),a=function(t,e,r){var n,o,a=i(t),s=c(t,e);return s?s.value=r:(a.last=s={index:o=tn(e,!0),key:e,value:r,previous:n=a.last,next:void 0,removed:!1},a.first||(a.first=s),n&&(n.next=s),u?a.size++:t.size++,"F"!==o&&(a.index[o]=s)),t},c=function(t,e){var r,n=i(t),o=tn(e);if("F"!==o)return n.index[o];for(r=n.first;r;r=r.next)if(r.key==e)return r};return Xr(o.prototype,{clear:function(){for(var t=i(this),e=t.index,r=t.first;r;)r.removed=!0,r.previous&&(r.previous=r.previous.next=void 0),delete e[r.index],r=r.next;t.first=t.last=void 0,u?t.size=0:this.size=0},delete:function(t){var e=this,r=i(e),n=c(e,t);if(n){var o=n.next,a=n.previous;delete r.index[n.index],n.removed=!0,a&&(a.next=o),o&&(o.previous=a),r.first==n&&(r.first=o),r.last==n&&(r.last=a),u?r.size--:e.size--}return!!n},forEach:function(t){for(var e,r=i(this),n=he(t,arguments.length>1?arguments[1]:void 0,3);e=e?e.next:r.first;)for(n(e.value,e.key,this);e&&e.removed;)e=e.previous},has:function(t){return!!c(this,t)}}),Xr(o.prototype,r?{get:function(t){var e=c(this,t);return e&&e.value},set:function(t,e){return a(this,0===t?0:t,e)}}:{add:function(t){return a(this,t=0===t?0:t,t)}}),u&&Zr(o.prototype,"size",{get:function(){return i(this).size}}),o},setStrong:function(t,e,r){var n=e+" Iterator",o=rn(e),i=rn(n);rr(t,e,(function(t,e){en(this,{type:n,target:t,state:o(t),kind:e,last:void 0})}),(function(){for(var t=i(this),e=t.kind,r=t.last;r&&r.removed;)r=r.previous;return t.target&&(t.last=r=r?r.next:t.state.first)?"keys"==e?{value:r.key,done:!1}:"values"==e?{value:r.value,done:!1}:{value:[r.key,r.value],done:!1}:(t.target=void 0,{value:void 0,done:!0})}),r?"entries":"values",!r,!0),Qr(e)}},on=Yr("Map",(function(t){return function(){return t(this,arguments.length?arguments[0]:void 0)}}),nn),an=function(t){var e,r,n,o,i=arguments.length,a=i>1?arguments[1]:void 0;return le(this),(e=void 0!==a)&&le(a),null==t?new this:(r=[],e?(n=0,o=he(a,i>2?arguments[2]:void 0,2),qr(t,(function(t){r.push(o(t,n++))}))):qr(t,r.push,{that:r}),new this(r))};Xt({target:"Map",stat:!0},{from:an});var cn=function(){for(var t=arguments.length,e=new Array(t);t--;)e[t]=arguments[t];return new this(e)};Xt({target:"Map",stat:!0},{of:cn});var un=function(){for(var t,e=et(this),r=le(e.delete),n=!0,o=0,i=arguments.length;o<i;o++)t=r.call(e,arguments[o]),n=n&&t;return!!n};Xt({target:"Map",proto:!0,real:!0,forced:P},{deleteAll:function(){return un.apply(this,arguments)}});var sn=function(t,e){var r=et(this),n=r.has(t)&&"update"in e?e.update(r.get(t),t,r):e.insert(t,r);return r.set(t,n),n};Xt({target:"Map",proto:!0,real:!0,forced:P},{emplace:sn});var fn=function(t){return Map.prototype.entries.call(t)};Xt({target:"Map",proto:!0,real:!0,forced:P},{every:function(t){var e=et(this),r=fn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3);return!qr(r,(function(t,r,o){if(!n(r,t,e))return o()}),{AS_ENTRIES:!0,IS_ITERATOR:!0,INTERRUPTED:!0}).stopped}});var ln=$("species"),hn=function(t,e){var r,n=et(t).constructor;return void 0===n||null==(r=et(n)[ln])?e:le(r)};Xt({target:"Map",proto:!0,real:!0,forced:P},{filter:function(t){var e=et(this),r=fn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3),o=new(hn(e,E("Map"))),i=le(o.set);return qr(r,(function(t,r){n(r,t,e)&&i.call(o,t,r)}),{AS_ENTRIES:!0,IS_ITERATOR:!0}),o}}),Xt({target:"Map",proto:!0,real:!0,forced:P},{find:function(t){var e=et(this),r=fn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3);return qr(r,(function(t,r,o){if(n(r,t,e))return o(r)}),{AS_ENTRIES:!0,IS_ITERATOR:!0,INTERRUPTED:!0}).result}}),Xt({target:"Map",proto:!0,real:!0,forced:P},{findKey:function(t){var e=et(this),r=fn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3);return qr(r,(function(t,r,o){if(n(r,t,e))return o(t)}),{AS_ENTRIES:!0,IS_ITERATOR:!0,INTERRUPTED:!0}).result}}),Xt({target:"Map",stat:!0},{groupBy:function(t,e){var r=new this;le(e);var n=le(r.has),o=le(r.get),i=le(r.set);return qr(t,(function(t){var a=e(t);n.call(r,a)?o.call(r,a).push(t):i.call(r,a,[t])})),r}}),Xt({target:"Map",proto:!0,real:!0,forced:P},{includes:function(t){return qr(fn(et(this)),(function(e,r,n){if((o=r)===(i=t)||o!=o&&i!=i)return n();var o,i}),{AS_ENTRIES:!0,IS_ITERATOR:!0,INTERRUPTED:!0}).stopped}}),Xt({target:"Map",stat:!0},{keyBy:function(t,e){var r=new this;le(e);var n=le(r.set);return qr(t,(function(t){n.call(r,e(t),t)})),r}}),Xt({target:"Map",proto:!0,real:!0,forced:P},{keyOf:function(t){return qr(fn(et(this)),(function(e,r,n){if(r===t)return n(e)}),{AS_ENTRIES:!0,IS_ITERATOR:!0,INTERRUPTED:!0}).result}}),Xt({target:"Map",proto:!0,real:!0,forced:P},{mapKeys:function(t){var e=et(this),r=fn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3),o=new(hn(e,E("Map"))),i=le(o.set);return qr(r,(function(t,r){i.call(o,n(r,t,e),r)}),{AS_ENTRIES:!0,IS_ITERATOR:!0}),o}}),Xt({target:"Map",proto:!0,real:!0,forced:P},{mapValues:function(t){var e=et(this),r=fn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3),o=new(hn(e,E("Map"))),i=le(o.set);return qr(r,(function(t,r){i.call(o,t,n(r,t,e))}),{AS_ENTRIES:!0,IS_ITERATOR:!0}),o}}),Xt({target:"Map",proto:!0,real:!0,forced:P},{merge:function(t){for(var e=et(this),r=le(e.set),n=arguments.length,o=0;o<n;)qr(arguments[o++],r,{that:e,AS_ENTRIES:!0});return e}}),Xt({target:"Map",proto:!0,real:!0,forced:P},{reduce:function(t){var e=et(this),r=fn(e),n=arguments.length<2,o=n?void 0:arguments[1];if(le(t),qr(r,(function(r,i){n?(n=!1,o=i):o=t(o,i,r,e)}),{AS_ENTRIES:!0,IS_ITERATOR:!0}),n)throw TypeError("Reduce of empty map with no initial value");return o}}),Xt({target:"Map",proto:!0,real:!0,forced:P},{some:function(t){var e=et(this),r=fn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3);return qr(r,(function(t,r,o){if(n(r,t,e))return o()}),{AS_ENTRIES:!0,IS_ITERATOR:!0,INTERRUPTED:!0}).stopped}}),Xt({target:"Map",proto:!0,real:!0,forced:P},{update:function(t,e){var r=et(this),n=arguments.length;le(e);var o=r.has(t);if(!o&&n<3)throw TypeError("Updating absent value");var i=o?r.get(t):le(n>2?arguments[2]:void 0)(t,r);return r.set(t,e(i,t,r)),r}});var pn=function(t,e){var r,n=et(this),o=arguments.length>2?arguments[2]:void 0;if("function"!=typeof e&&"function"!=typeof o)throw TypeError("At least one callback required");return n.has(t)?(r=n.get(t),"function"==typeof e&&(r=e(r),n.set(t,r))):"function"==typeof o&&(r=o(),n.set(t,r)),r};Xt({target:"Map",proto:!0,real:!0,forced:P},{upsert:pn}),Xt({target:"Map",proto:!0,real:!0,forced:P},{updateOrInsert:pn});var dn=Yr("Set",(function(t){return function(){return t(this,arguments.length?arguments[0]:void 0)}}),nn);Xt({target:"Set",stat:!0},{from:an}),Xt({target:"Set",stat:!0},{of:cn});var vn=function(){for(var t=et(this),e=le(t.add),r=0,n=arguments.length;r<n;r++)e.call(t,arguments[r]);return t};Xt({target:"Set",proto:!0,real:!0,forced:P},{addAll:function(){return vn.apply(this,arguments)}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{deleteAll:function(){return un.apply(this,arguments)}});var gn=function(t){return Set.prototype.values.call(t)};Xt({target:"Set",proto:!0,real:!0,forced:P},{every:function(t){var e=et(this),r=gn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3);return!qr(r,(function(t,r){if(!n(t,t,e))return r()}),{IS_ITERATOR:!0,INTERRUPTED:!0}).stopped}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{difference:function(t){var e=et(this),r=new(hn(e,E("Set")))(e),n=le(r.delete);return qr(t,(function(t){n.call(r,t)})),r}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{filter:function(t){var e=et(this),r=gn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3),o=new(hn(e,E("Set"))),i=le(o.add);return qr(r,(function(t){n(t,t,e)&&i.call(o,t)}),{IS_ITERATOR:!0}),o}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{find:function(t){var e=et(this),r=gn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3);return qr(r,(function(t,r){if(n(t,t,e))return r(t)}),{IS_ITERATOR:!0,INTERRUPTED:!0}).result}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{intersection:function(t){var e=et(this),r=new(hn(e,E("Set"))),n=le(e.has),o=le(r.add);return qr(t,(function(t){n.call(e,t)&&o.call(r,t)})),r}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{isDisjointFrom:function(t){var e=et(this),r=le(e.has);return!qr(t,(function(t,n){if(!0===r.call(e,t))return n()}),{INTERRUPTED:!0}).stopped}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{isSubsetOf:function(t){var e=function(t){var e=mr(t);if("function"!=typeof e)throw TypeError(String(t)+" is not iterable");return et(e.call(t))}(this),r=et(t),n=r.has;return"function"!=typeof n&&(r=new(E("Set"))(t),n=le(r.has)),!qr(e,(function(t,e){if(!1===n.call(r,t))return e()}),{IS_ITERATOR:!0,INTERRUPTED:!0}).stopped}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{isSupersetOf:function(t){var e=et(this),r=le(e.has);return!qr(t,(function(t,n){if(!1===r.call(e,t))return n()}),{INTERRUPTED:!0}).stopped}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{join:function(t){var e=et(this),r=gn(e),n=void 0===t?",":String(t),o=[];return qr(r,o.push,{that:o,IS_ITERATOR:!0}),o.join(n)}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{map:function(t){var e=et(this),r=gn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3),o=new(hn(e,E("Set"))),i=le(o.add);return qr(r,(function(t){i.call(o,n(t,t,e))}),{IS_ITERATOR:!0}),o}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{reduce:function(t){var e=et(this),r=gn(e),n=arguments.length<2,o=n?void 0:arguments[1];if(le(t),qr(r,(function(r){n?(n=!1,o=r):o=t(o,r,r,e)}),{IS_ITERATOR:!0}),n)throw TypeError("Reduce of empty set with no initial value");return o}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{some:function(t){var e=et(this),r=gn(e),n=he(t,arguments.length>1?arguments[1]:void 0,3);return qr(r,(function(t,r){if(n(t,t,e))return r()}),{IS_ITERATOR:!0,INTERRUPTED:!0}).stopped}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{symmetricDifference:function(t){var e=et(this),r=new(hn(e,E("Set")))(e),n=le(r.delete),o=le(r.add);return qr(t,(function(t){n.call(r,t)||o.call(r,t)})),r}}),Xt({target:"Set",proto:!0,real:!0,forced:P},{union:function(t){var e=et(this),r=new(hn(e,E("Set")))(e);return qr(t,le(r.add),{that:r}),r}});var yn=Kr.getWeakData,bn=St.set,mn=St.getterFor,wn=we.find,En=we.findIndex,Sn=0,On=function(t){return t.frozen||(t.frozen=new Tn)},Tn=function(){this.entries=[]},Rn=function(t,e){return wn(t.entries,(function(t){return t[0]===e}))};Tn.prototype={get:function(t){var e=Rn(this,t);if(e)return e[1]},has:function(t){return!!Rn(this,t)},set:function(t,e){var r=Rn(this,t);r?r[1]=e:this.entries.push([t,e])},delete:function(t){var e=En(this.entries,(function(e){return e[0]===t}));return~e&&this.entries.splice(e,1),!!~e}};var xn={getConstructor:function(t,e,r,n){var o=t((function(t,i){Vr(t,o,e),bn(t,{type:e,id:Sn++,frozen:void 0}),null!=i&&qr(i,t[n],{that:t,AS_ENTRIES:r})})),i=mn(e),a=function(t,e,r){var n=i(t),o=yn(et(e),!0);return!0===o?On(n).set(e,r):o[n.id]=r,t};return Xr(o.prototype,{delete:function(t){var e=i(this);if(!m(t))return!1;var r=yn(t);return!0===r?On(e).delete(t):r&&C(r,e.id)&&delete r[e.id]},has:function(t){var e=i(this);if(!m(t))return!1;var r=yn(t);return!0===r?On(e).has(t):r&&C(r,e.id)}}),Xr(o.prototype,r?{get:function(t){var e=i(this);if(m(t)){var r=yn(t);return!0===r?On(e).get(t):r?r[e.id]:void 0}},set:function(t,e){return a(this,t,e)}}:{add:function(t){return a(this,t,!0)}}),o}},An=e((function(t){var e,r=St.enforce,n=!a.ActiveXObject&&"ActiveXObject"in a,o=Object.isExtensible,i=function(t){return function(){return t(this,arguments.length?arguments[0]:void 0)}},c=t.exports=Yr("WeakMap",i,xn);if(lt&&n){e=xn.getConstructor(i,"WeakMap",!0),Kr.enable();var u=c.prototype,s=u.delete,f=u.has,l=u.get,h=u.set;Xr(u,{delete:function(t){if(m(t)&&!o(t)){var n=r(this);return n.frozen||(n.frozen=new e),s.call(this,t)||n.frozen.delete(t)}return s.call(this,t)},has:function(t){if(m(t)&&!o(t)){var n=r(this);return n.frozen||(n.frozen=new e),f.call(this,t)||n.frozen.has(t)}return f.call(this,t)},get:function(t){if(m(t)&&!o(t)){var n=r(this);return n.frozen||(n.frozen=new e),f.call(this,t)?l.call(this,t):n.frozen.get(t)}return l.call(this,t)},set:function(t,n){if(m(t)&&!o(t)){var i=r(this);i.frozen||(i.frozen=new e),f.call(this,t)?h.call(this,t,n):i.frozen.set(t,n)}else h.call(this,t,n);return this}})}}));Xt({target:"WeakMap",proto:!0,real:!0,forced:P},{emplace:sn}),Xt({target:"WeakMap",stat:!0},{from:an}),Xt({target:"WeakMap",stat:!0},{of:cn}),Xt({target:"WeakMap",proto:!0,real:!0,forced:P},{deleteAll:function(){return un.apply(this,arguments)}}),Xt({target:"WeakMap",proto:!0,real:!0,forced:P},{upsert:pn}),Yr("WeakSet",(function(t){return function(){return t(this,arguments.length?arguments[0]:void 0)}}),xn),Xt({target:"WeakSet",proto:!0,real:!0,forced:P},{addAll:function(){return vn.apply(this,arguments)}}),Xt({target:"WeakSet",proto:!0,real:!0,forced:P},{deleteAll:function(){return un.apply(this,arguments)}}),Xt({target:"WeakSet",stat:!0},{from:an}),Xt({target:"WeakSet",stat:!0},{of:cn});var In="\t\n\v\f\r \u2028\u2029\ufeff",jn="["+In+"]",_n=RegExp("^"+jn+jn+"*"),Pn=RegExp(jn+jn+"*$"),Mn=function(t){return function(e){var r=Pe(y(e));return 1&t&&(r=r.replace(_n,"")),2&t&&(r=r.replace(Pn,"")),r}},Nn={start:Mn(1),end:Mn(2),trim:Mn(3)},kn=Ct.f,Un=tt.f,Ln=nt.f,Dn=Nn.trim,Cn="Number",Fn=a.Number,Bn=Fn.prototype,Wn=d(ce(Bn))==Cn,zn=function(t){if(_(t))throw TypeError("Cannot convert a Symbol value to a number");var e,r,n,o,i,a,c,u,s=V(t,"number");if("string"==typeof s&&s.length>2)if(43===(e=(s=Dn(s)).charCodeAt(0))||45===e){if(88===(r=s.charCodeAt(2))||120===r)return NaN}else if(48===e){switch(s.charCodeAt(1)){case 66:case 98:n=2,o=49;break;case 79:case 111:n=8,o=55;break;default:return+s}for(a=(i=s.slice(2)).length,c=0;c<a;c++)if((u=i.charCodeAt(c))<48||u>o)return NaN;return parseInt(i,n)}return+s};if(Ht(Cn,!Fn(" 0o1")||!Fn("0b1")||Fn("+0x1"))){for(var Gn,Kn=function(t){var e=arguments.length<1?0:t,r=this;return r instanceof Kn&&(Wn?c((function(){Bn.valueOf.call(r)})):d(r)!=Cn)?Hr(new Fn(zn(e)),r,Kn):zn(e)},$n=u?kn(Fn):"MAX_VALUE,MIN_VALUE,NaN,NEGATIVE_INFINITY,POSITIVE_INFINITY,EPSILON,isFinite,isInteger,isNaN,isSafeInteger,MAX_SAFE_INTEGER,MIN_SAFE_INTEGER,parseFloat,parseInt,isInteger,fromString,range".split(","),qn=0;$n.length>qn;qn++)C(Fn,Gn=$n[qn])&&!C(Kn,Gn)&&Ln(Kn,Gn,Un(Fn,Gn));Kn.prototype=Bn,Bn.constructor=Kn,Ot(a,Cn,Kn)}Xt({target:"Number",stat:!0},{EPSILON:Math.pow(2,-52)});var Vn=a.isFinite;Xt({target:"Number",stat:!0},{isFinite:Number.isFinite||function(t){return"number"==typeof t&&Vn(t)}});var Hn=Math.floor,Yn=function(t){return!m(t)&&isFinite(t)&&Hn(t)===t};Xt({target:"Number",stat:!0},{isInteger:Yn}),Xt({target:"Number",stat:!0},{isNaN:function(t){return t!=t}});var Xn=Math.abs;Xt({target:"Number",stat:!0},{isSafeInteger:function(t){return Yn(t)&&Xn(t)<=9007199254740991}}),Xt({target:"Number",stat:!0},{MAX_SAFE_INTEGER:9007199254740991}),Xt({target:"Number",stat:!0},{MIN_SAFE_INTEGER:-9007199254740991});var Jn=l.f,Qn=function(t){return function(e){for(var r,n=b(e),o=Zt(n),i=o.length,a=0,c=[];i>a;)r=o[a++],u&&!Jn.call(n,r)||c.push(t?[r,n[r]]:n[r]);return c}},Zn={entries:Qn(!0),values:Qn(!1)},to=Zn.entries;Xt({target:"Object",stat:!0},{entries:function(t){return to(t)}}),Xt({target:"Object",stat:!0,sham:!u},{getOwnPropertyDescriptors:function(t){for(var e,r,n=b(t),o=tt.f,i=Bt(n),a={},c=0;i.length>c;)void 0!==(r=o(n,e=i[c++]))&&hr(a,e,r);return a}});var eo=Object.is||function(t,e){return t===e?0!==t||1/t==1/e:t!=t&&e!=e};Xt({target:"Object",stat:!0},{is:eo});var ro=c((function(){Zt(1)}));Xt({target:"Object",stat:!0,forced:ro},{keys:function(t){return Zt(L(t))}});var no=Zn.values;Xt({target:"Object",stat:!0},{values:function(t){return no(t)}});var oo=Ne.codeAt;Xt({target:"String",proto:!0},{codePointAt:function(t){return oo(this,t)}}),de("String","codePointAt");var io,ao=$("match"),co=function(t){var e;return m(t)&&(void 0!==(e=t[ao])?!!e:"RegExp"==d(t))},uo=function(t){if(co(t))throw TypeError("The method doesn't accept regular expressions");return t},so=$("match"),fo=function(t){var e=/./;try{"/./"[t](e)}catch(n){try{return e[so]=!1,"/./"[t](e)}catch(t){}}return!1},lo=tt.f,ho="".endsWith,po=Math.min,vo=fo("endsWith"),go=!(vo||(io=lo(String.prototype,"endsWith"),!io||io.writable));Xt({target:"String",proto:!0,forced:!go&&!vo},{endsWith:function(t){var e=Pe(y(this));uo(t);var r=arguments.length>1?arguments[1]:void 0,n=It(e.length),o=void 0===r?n:po(It(r),n),i=Pe(t);return ho?ho.call(e,i,o):e.slice(o-i.length,o)===i}}),de("String","endsWith");var yo=String.fromCharCode,bo=String.fromCodePoint;Xt({target:"String",stat:!0,forced:!!bo&&1!=bo.length},{fromCodePoint:function(t){for(var e,r=[],n=arguments.length,o=0;n>o;){if(e=+arguments[o++],Pt(e,1114111)!==e)throw RangeError(e+" is not a valid code point");r.push(e<65536?yo(e):yo(55296+((e-=65536)>>10),e%1024+56320))}return r.join("")}}),Xt({target:"String",proto:!0,forced:!fo("includes")},{includes:function(t){return!!~Pe(y(this)).indexOf(Pe(uo(t)),arguments.length>1?arguments[1]:void 0)}}),de("String","includes");var mo=function(t){var e=Pe(y(this)),r="",n=xt(t);if(n<0||1/0==n)throw RangeError("Wrong number of repetitions");for(;n>0;(n>>>=1)&&(e+=e))1&n&&(r+=e);return r},wo=Math.ceil,Eo=function(t){return function(e,r,n){var o,i,a=Pe(y(e)),c=a.length,u=void 0===n?" ":Pe(n),s=It(r);return s<=c||""==u?a:((i=mo.call(u,wo((o=s-c)/u.length))).length>o&&(i=i.slice(0,o)),t?a+i:i+a)}},So={start:Eo(!1),end:Eo(!0)},Oo=/Version\/10(?:\.\d+){1,2}(?: [\w./]+)?(?: Mobile\/\w+)? Safari\//.test(S),To=So.start;Xt({target:"String",proto:!0,forced:Oo},{padStart:function(t){return To(this,t,arguments.length>1?arguments[1]:void 0)}}),de("String","padStart");var Ro=So.end;Xt({target:"String",proto:!0,forced:Oo},{padEnd:function(t){return Ro(this,t,arguments.length>1?arguments[1]:void 0)}}),de("String","padEnd"),Xt({target:"String",stat:!0},{raw:function(t){for(var e=b(t.raw),r=It(e.length),n=arguments.length,o=[],i=0;r>i;)o.push(Pe(e[i++])),i<n&&o.push(Pe(arguments[i]));return o.join("")}}),Xt({target:"String",proto:!0},{repeat:mo}),de("String","repeat");var xo=tt.f,Ao="".startsWith,Io=Math.min,jo=fo("startsWith"),_o=!jo&&!!function(){var t=xo(String.prototype,"startsWith");return t&&!t.writable}();Xt({target:"String",proto:!0,forced:!_o&&!jo},{startsWith:function(t){var e=Pe(y(this));uo(t);var r=It(Io(arguments.length>1?arguments[1]:void 0,e.length)),n=Pe(t);return Ao?Ao.call(e,n,r):e.slice(r,r+n.length)===n}}),de("String","startsWith");var Po=function(t){return c((function(){return!!In[t]()||"
"!="
"[t]()||In[t].name!==t}))},Mo=Nn.start,No=Po("trimStart"),ko=No?function(){return Mo(this)}:"".trimStart;Xt({target:"String",proto:!0,forced:No},{trimStart:ko,trimLeft:ko}),de("String","trimLeft");var Uo=Nn.end,Lo=Po("trimEnd"),Do=Lo?function(){return Uo(this)}:"".trimEnd;Xt({target:"String",proto:!0,forced:Lo},{trimEnd:Do,trimRight:Do}),de("String","trimRight");var Co=E("Reflect","apply"),Fo=Function.apply,Bo=!c((function(){Co((function(){}))}));Xt({target:"Reflect",stat:!0,forced:Bo},{apply:function(t,e,r){return le(t),et(r),Co?Co(t,e,r):Fo.call(t,e,r)}});var Wo=[].slice,zo={},Go=function(t,e,r){if(!(e in zo)){for(var n=[],o=0;o<e;o++)n[o]="a["+o+"]";zo[e]=Function("C,a","return new C("+n.join(",")+")")}return zo[e](t,r)},Ko=Function.bind||function(t){var e=le(this),r=Wo.call(arguments,1),n=function(){var o=r.concat(Wo.call(arguments));return this instanceof n?Go(e,o.length,o):e.apply(t,o)};return m(e.prototype)&&(n.prototype=e.prototype),n},$o=E("Reflect","construct"),qo=c((function(){function t(){}return!($o((function(){}),[],t)instanceof t)})),Vo=!c((function(){$o((function(){}))})),Ho=qo||Vo;Xt({target:"Reflect",stat:!0,forced:Ho,sham:Ho},{construct:function(t,e){le(t),et(e);var r=arguments.length<3?t:le(arguments[2]);if(Vo&&!qo)return $o(t,e,r);if(t==r){switch(e.length){case 0:return new t;case 1:return new t(e[0]);case 2:return new t(e[0],e[1]);case 3:return new t(e[0],e[1],e[2]);case 4:return new t(e[0],e[1],e[2],e[3])}var n=[null];return n.push.apply(n,e),new(Ko.apply(t,n))}var o=r.prototype,i=ce(m(o)?o:Object.prototype),a=Function.apply.call(t,i,e);return m(a)?a:i}});var Yo=c((function(){Reflect.defineProperty(nt.f({},1,{value:1}),1,{value:2})}));Xt({target:"Reflect",stat:!0,forced:Yo,sham:!u},{defineProperty:function(t,e,r){et(t);var n=H(e);et(r);try{return nt.f(t,n,r),!0}catch(t){return!1}}});var Xo=tt.f;Xt({target:"Reflect",stat:!0},{deleteProperty:function(t,e){var r=Xo(et(t),e);return!(r&&!r.configurable)&&delete t[e]}}),Xt({target:"Reflect",stat:!0},{get:function t(e,r){var n,o,i=arguments.length<3?e:arguments[2];return et(e)===i?e[r]:(n=tt.f(e,r))?C(n,"value")?n.value:void 0===n.get?void 0:n.get.call(i):m(o=De(e))?t(o,r,i):void 0}}),Xt({target:"Reflect",stat:!0,sham:!u},{getOwnPropertyDescriptor:function(t,e){return tt.f(et(t),e)}}),Xt({target:"Reflect",stat:!0,sham:!ke},{getPrototypeOf:function(t){return De(et(t))}}),Xt({target:"Reflect",stat:!0},{has:function(t,e){return e in t}});var Jo=Object.isExtensible;Xt({target:"Reflect",stat:!0},{isExtensible:function(t){return et(t),!Jo||Jo(t)}}),Xt({target:"Reflect",stat:!0},{ownKeys:Bt}),Xt({target:"Reflect",stat:!0,sham:!Gr},{preventExtensions:function(t){et(t);try{var e=E("Object","preventExtensions");return e&&e(t),!0}catch(t){return!1}}});var Qo=c((function(){var t=function(){},e=nt.f(new t,"a",{configurable:!0});return!1!==Reflect.set(t.prototype,"a",1,e)}));Xt({target:"Reflect",stat:!0,forced:Qo},{set:function t(e,r,n){var o,i,a=arguments.length<4?e:arguments[3],c=tt.f(et(e),r);if(!c){if(m(i=De(e)))return t(i,r,n,a);c=h(0)}if(C(c,"value")){if(!1===c.writable||!m(a))return!1;if(o=tt.f(a,r)){if(o.get||o.set||!1===o.writable)return!1;o.value=n,nt.f(a,r,o)}else nt.f(a,r,h(0,n));return!0}return void 0!==c.set&&(c.set.call(a,n),!0)}}),He&&Xt({target:"Reflect",stat:!0},{setPrototypeOf:function(t,e){et(t),Ve(e);try{return He(t,e),!0}catch(t){return!1}}}),Xt({global:!0},{Reflect:{}}),Ge(a.Reflect,"Reflect",!0);var Zo=U("metadata"),ti=Zo.store||(Zo.store=new An),ei=function(t,e,r){var n=ti.get(t);if(!n){if(!r)return;ti.set(t,n=new on)}var o=n.get(e);if(!o){if(!r)return;n.set(e,o=new on)}return o},ri={store:ti,getMap:ei,has:function(t,e,r){var n=ei(e,r,!1);return void 0!==n&&n.has(t)},get:function(t,e,r){var n=ei(e,r,!1);return void 0===n?void 0:n.get(t)},set:function(t,e,r,n){ei(r,n,!0).set(t,e)},keys:function(t,e){var r=ei(t,e,!1),n=[];return r&&r.forEach((function(t,e){n.push(e)})),n},toKey:function(t){return void 0===t||"symbol"==typeof t?t:String(t)}},ni=ri.toKey,oi=ri.set;Xt({target:"Reflect",stat:!0},{defineMetadata:function(t,e,r){var n=arguments.length<4?void 0:ni(arguments[3]);oi(t,e,et(r),n)}});var ii=ri.toKey,ai=ri.getMap,ci=ri.store;Xt({target:"Reflect",stat:!0},{deleteMetadata:function(t,e){var r=arguments.length<3?void 0:ii(arguments[2]),n=ai(et(e),r,!1);if(void 0===n||!n.delete(t))return!1;if(n.size)return!0;var o=ci.get(e);return o.delete(r),!!o.size||ci.delete(e)}});var ui=ri.has,si=ri.get,fi=ri.toKey,li=function t(e,r,n){if(ui(e,r,n))return si(e,r,n);var o=De(r);return null!==o?t(e,o,n):void 0};Xt({target:"Reflect",stat:!0},{getMetadata:function(t,e){var r=arguments.length<3?void 0:fi(arguments[2]);return li(t,et(e),r)}});var hi=ri.keys,pi=ri.toKey,di=function t(e,r){var n=hi(e,r),o=De(e);if(null===o)return n;var i,a,c=t(o,r);return c.length?n.length?(i=new dn(n.concat(c)),qr(i,(a=[]).push,{that:a}),a):c:n};Xt({target:"Reflect",stat:!0},{getMetadataKeys:function(t){var e=arguments.length<2?void 0:pi(arguments[1]);return di(et(t),e)}});var vi=ri.get,gi=ri.toKey;Xt({target:"Reflect",stat:!0},{getOwnMetadata:function(t,e){var r=arguments.length<3?void 0:gi(arguments[2]);return vi(t,et(e),r)}});var yi=ri.keys,bi=ri.toKey;Xt({target:"Reflect",stat:!0},{getOwnMetadataKeys:function(t){var e=arguments.length<2?void 0:bi(arguments[1]);return yi(et(t),e)}});var mi=ri.has,wi=ri.toKey,Ei=function t(e,r,n){if(mi(e,r,n))return!0;var o=De(r);return null!==o&&t(e,o,n)};Xt({target:"Reflect",stat:!0},{hasMetadata:function(t,e){var r=arguments.length<3?void 0:wi(arguments[2]);return Ei(t,et(e),r)}});var Si=ri.has,Oi=ri.toKey;Xt({target:"Reflect",stat:!0},{hasOwnMetadata:function(t,e){var r=arguments.length<3?void 0:Oi(arguments[2]);return Si(t,et(e),r)}});var Ti=ri.toKey,Ri=ri.set;Xt({target:"Reflect",stat:!0},{metadata:function(t,e){return function(r,n){Ri(t,e,et(r),Ti(n))}}});var xi=function(){var t=et(this),e="";return t.global&&(e+="g"),t.ignoreCase&&(e+="i"),t.multiline&&(e+="m"),t.dotAll&&(e+="s"),t.unicode&&(e+="u"),t.sticky&&(e+="y"),e},Ai=function(t,e){return RegExp(t,e)},Ii={UNSUPPORTED_Y:c((function(){var t=Ai("a","y");return t.lastIndex=2,null!=t.exec("abcd")})),BROKEN_CARET:c((function(){var t=Ai("^r","gy");return t.lastIndex=2,null!=t.exec("str")}))},ji=c((function(){var t=RegExp(".","string".charAt(0));return!(t.dotAll&&t.exec("\n")&&"s"===t.flags)})),_i=c((function(){var t=RegExp("(?<a>b)","string".charAt(5));return"b"!==t.exec("b").groups.a||"bc"!=="b".replace(t,"$<a>c")})),Pi=nt.f,Mi=Ct.f,Ni=St.enforce,ki=$("match"),Ui=a.RegExp,Li=Ui.prototype,Di=/^\?<[^\s\d!#%&*+<=>@^][^\s!#%&*+<=>@^]*>/,Ci=/a/g,Fi=/a/g,Bi=new Ui(Ci)!==Ci,Wi=Ii.UNSUPPORTED_Y,zi=u&&(!Bi||Wi||ji||_i||c((function(){return Fi[ki]=!1,Ui(Ci)!=Ci||Ui(Fi)==Fi||"/a/i"!=Ui(Ci,"i")})));if(Ht("RegExp",zi)){for(var Gi=function(t,e){var r,n,o,i,a,c,u=this instanceof Gi,s=co(t),f=void 0===e,l=[],h=t;if(!u&&s&&f&&t.constructor===Gi)return t;if((s||t instanceof Gi)&&(t=t.source,f&&(e="flags"in h?h.flags:xi.call(h))),t=void 0===t?"":Pe(t),e=void 0===e?"":Pe(e),h=t,ji&&"dotAll"in Ci&&(n=!!e&&e.indexOf("s")>-1)&&(e=e.replace(/s/g,"")),r=e,Wi&&"sticky"in Ci&&(o=!!e&&e.indexOf("y")>-1)&&(e=e.replace(/y/g,"")),_i&&(t=(i=function(t){for(var e,r=t.length,n=0,o="",i=[],a={},c=!1,u=!1,s=0,f="";n<=r;n++){if("\\"===(e=t.charAt(n)))e+=t.charAt(++n);else if("]"===e)c=!1;else if(!c)switch(!0){case"["===e:c=!0;break;case"("===e:Di.test(t.slice(n+1))&&(n+=2,u=!0),o+=e,s++;continue;case">"===e&&u:if(""===f||C(a,f))throw new SyntaxError("Invalid capture group name");a[f]=!0,i.push([f,s]),u=!1,f="";continue}u?f+=e:o+=e}return[o,i]}(t))[0],l=i[1]),a=Hr(Ui(t,e),u?this:Li,Gi),(n||o||l.length)&&(c=Ni(a),n&&(c.dotAll=!0,c.raw=Gi(function(t){for(var e,r=t.length,n=0,o="",i=!1;n<=r;n++)"\\"!==(e=t.charAt(n))?i||"."!==e?("["===e?i=!0:"]"===e&&(i=!1),o+=e):o+="[\\s\\S]":o+=e+t.charAt(++n);return o}(t),r)),o&&(c.sticky=!0),l.length&&(c.groups=l)),t!==h)try{ot(a,"source",""===h?"(?:)":h)}catch(t){}return a},Ki=function(t){t in Gi||Pi(Gi,t,{configurable:!0,get:function(){return Ui[t]},set:function(e){Ui[t]=e}})},$i=Mi(Ui),qi=0;$i.length>qi;)Ki($i[qi++]);Li.constructor=Gi,Gi.prototype=Li,Ot(a,"RegExp",Gi)}Qr("RegExp");var Vi="toString",Hi=RegExp.prototype,Yi=Hi.toString;(c((function(){return"/a/b"!=Yi.call({source:"a",flags:"b"})}))||Yi.name!=Vi)&&Ot(RegExp.prototype,Vi,(function(){var t=et(this),e=Pe(t.source),r=t.flags;return"/"+e+"/"+Pe(void 0===r&&t instanceof RegExp&&!("flags"in Hi)?xi.call(t):r)}),{unsafe:!0});var Xi=St.get,Ji=RegExp.prototype;u&&ji&&(0,nt.f)(Ji,"dotAll",{configurable:!0,get:function(){if(this!==Ji){if(this instanceof RegExp)return!!Xi(this).dotAll;throw TypeError("Incompatible receiver, RegExp required")}}});var Qi=St.get,Zi=RegExp.prototype.exec,ta=U("native-string-replace",String.prototype.replace),ea=Zi,ra=function(){var t=/a/,e=/b*/g;return Zi.call(t,"a"),Zi.call(e,"a"),0!==t.lastIndex||0!==e.lastIndex}(),na=Ii.UNSUPPORTED_Y||Ii.BROKEN_CARET,oa=void 0!==/()??/.exec("")[1];(ra||oa||na||ji||_i)&&(ea=function(t){var e,r,n,o,i,a,c,u=this,s=Qi(u),f=Pe(t),l=s.raw;if(l)return l.lastIndex=u.lastIndex,e=ea.call(l,f),u.lastIndex=l.lastIndex,e;var h=s.groups,p=na&&u.sticky,d=xi.call(u),v=u.source,g=0,y=f;if(p&&(-1===(d=d.replace("y","")).indexOf("g")&&(d+="g"),y=f.slice(u.lastIndex),u.lastIndex>0&&(!u.multiline||u.multiline&&"\n"!==f.charAt(u.lastIndex-1))&&(v="(?: "+v+")",y=" "+y,g++),r=new RegExp("^(?:"+v+")",d)),oa&&(r=new RegExp("^"+v+"$(?!\\s)",d)),ra&&(n=u.lastIndex),o=Zi.call(p?r:u,y),p?o?(o.input=o.input.slice(g),o[0]=o[0].slice(g),o.index=u.lastIndex,u.lastIndex+=o[0].length):u.lastIndex=0:ra&&o&&(u.lastIndex=u.global?o.index+o[0].length:n),oa&&o&&o.length>1&&ta.call(o[0],r,(function(){for(i=1;i<arguments.length-2;i++)void 0===arguments[i]&&(o[i]=void 0)})),o&&h)for(o.groups=a=ce(null),i=0;i<h.length;i++)a[(c=h[i])[0]]=o[c[1]];return o});var ia=ea;Xt({target:"RegExp",proto:!0,forced:/./.exec!==ia},{exec:ia}),u&&c((function(){return"sy"!==Object.getOwnPropertyDescriptor(RegExp.prototype,"flags").get.call({dotAll:!0,sticky:!0})}))&&nt.f(RegExp.prototype,"flags",{configurable:!0,get:xi});var aa=St.get,ca=RegExp.prototype;u&&Ii.UNSUPPORTED_Y&&(0,nt.f)(ca,"sticky",{configurable:!0,get:function(){if(this!==ca){if(this instanceof RegExp)return!!aa(this).sticky;throw TypeError("Incompatible receiver, RegExp required")}}});var ua,sa,fa=(ua=!1,(sa=/[ac]/).exec=function(){return ua=!0,/./.exec.apply(this,arguments)},!0===sa.test("abc")&&ua),la=/./.test;Xt({target:"RegExp",proto:!0,forced:!fa},{test:function(t){if("function"!=typeof this.exec)return la.call(this,t);var e=this.exec(t);if(null!==e&&!m(e))throw new Error("RegExp exec method returned something other than an Object or null");return!!e}});var ha=$("species"),pa=RegExp.prototype,da=function(t,e,r,n){var o=$(t),i=!c((function(){var e={};return e[o]=function(){return 7},7!=""[t](e)})),a=i&&!c((function(){var e=!1,r=/a/;return"split"===t&&((r={}).constructor={},r.constructor[ha]=function(){return r},r.flags="",r[o]=/./[o]),r.exec=function(){return e=!0,null},r[o](""),!e}));if(!i||!a||r){var u=/./[o],s=e(o,""[t],(function(t,e,r,n,o){var a=e.exec;return a===ia||a===pa.exec?i&&!o?{done:!0,value:u.call(e,r,n)}:{done:!0,value:t.call(r,e,n)}:{done:!1}}));Ot(String.prototype,t,s[0]),Ot(pa,o,s[1])}n&&ot(pa[o],"sham",!0)},va=Ne.charAt,ga=function(t,e,r){return e+(r?va(t,e).length:1)},ya=function(t,e){var r=t.exec;if("function"==typeof r){var n=r.call(t,e);if("object"!=typeof n)throw TypeError("RegExp exec method returned something other than an Object or null");return n}if("RegExp"!==d(t))throw TypeError("RegExp#exec called on incompatible receiver");return ia.call(t,e)};da("match",(function(t,e,r){return[function(e){var r=y(this),n=null==e?void 0:e[t];return void 0!==n?n.call(e,r):new RegExp(e)[t](Pe(r))},function(t){var n=et(this),o=Pe(t),i=r(e,n,o);if(i.done)return i.value;if(!n.global)return ya(n,o);var a=n.unicode;n.lastIndex=0;for(var c,u=[],s=0;null!==(c=ya(n,o));){var f=Pe(c[0]);u[s]=f,""===f&&(n.lastIndex=ga(o,It(n.lastIndex),a)),s++}return 0===s?null:u}]}));var ba=Math.floor,ma="".replace,wa=/\$([$&'`]|\d{1,2}|<[^>]*>)/g,Ea=/\$([$&'`]|\d{1,2})/g,Sa=function(t,e,r,n,o,i){var a=r+t.length,c=n.length,u=Ea;return void 0!==o&&(o=L(o),u=wa),ma.call(i,u,(function(i,u){var s;switch(u.charAt(0)){case"$":return"$";case"&":return t;case"`":return e.slice(0,r);case"'":return e.slice(a);case"<":s=o[u.slice(1,-1)];break;default:var f=+u;if(0===f)return i;if(f>c){var l=ba(f/10);return 0===l?i:l<=c?void 0===n[l-1]?u.charAt(1):n[l-1]+u.charAt(1):i}s=n[f-1]}return void 0===s?"":s}))},Oa=$("replace"),Ta=Math.max,Ra=Math.min,xa="$0"==="a".replace(/./,"$0"),Aa=!!/./[Oa]&&""===/./[Oa]("a","$0");da("replace",(function(t,e,r){var n=Aa?"$":"$0";return[function(t,r){var n=y(this),o=null==t?void 0:t[Oa];return void 0!==o?o.call(t,n,r):e.call(Pe(n),t,r)},function(t,o){var i=et(this),a=Pe(t);if("string"==typeof o&&-1===o.indexOf(n)&&-1===o.indexOf("$<")){var c=r(e,i,a,o);if(c.done)return c.value}var u="function"==typeof o;u||(o=Pe(o));var s=i.global;if(s){var f=i.unicode;i.lastIndex=0}for(var l=[];;){var h=ya(i,a);if(null===h)break;if(l.push(h),!s)break;""===Pe(h[0])&&(i.lastIndex=ga(a,It(i.lastIndex),f))}for(var p,d="",v=0,g=0;g<l.length;g++){for(var y=Pe((h=l[g])[0]),b=Ta(Ra(xt(h.index),a.length),0),m=[],w=1;w<h.length;w++)m.push(void 0===(p=h[w])?p:String(p));var E=h.groups;if(u){var S=[y].concat(m,b,a);void 0!==E&&S.push(E);var O=Pe(o.apply(void 0,S))}else O=Sa(y,a,b,m,E,o);b>=v&&(d+=a.slice(v,b)+O,v=b+y.length)}return d+a.slice(v)}]}),!!c((function(){var t=/./;return t.exec=function(){var t=[];return t.groups={a:"7"},t},"7"!=="".replace(t,"$<a>")}))||!xa||Aa),da("search",(function(t,e,r){return[function(e){var r=y(this),n=null==e?void 0:e[t];return void 0!==n?n.call(e,r):new RegExp(e)[t](Pe(r))},function(t){var n=et(this),o=Pe(t),i=r(e,n,o);if(i.done)return i.value;var a=n.lastIndex;eo(a,0)||(n.lastIndex=0);var c=ya(n,o);return eo(n.lastIndex,a)||(n.lastIndex=a),null===c?-1:c.index}]}));var Ia=Ii.UNSUPPORTED_Y,ja=[].push,_a=Math.min,Pa=4294967295;da("split",(function(t,e,r){var n;return n="c"=="abbc".split(/(b)*/)[1]||4!="test".split(/(?:)/,-1).length||2!="ab".split(/(?:ab)*/).length||4!=".".split(/(.?)(.?)/).length||".".split(/()()/).length>1||"".split(/.?/).length?function(t,r){var n=Pe(y(this)),o=void 0===r?Pa:r>>>0;if(0===o)return[];if(void 0===t)return[n];if(!co(t))return e.call(n,t,o);for(var i,a,c,u=[],s=0,f=new RegExp(t.source,(t.ignoreCase?"i":"")+(t.multiline?"m":"")+(t.unicode?"u":"")+(t.sticky?"y":"")+"g");(i=ia.call(f,n))&&!((a=f.lastIndex)>s&&(u.push(n.slice(s,i.index)),i.length>1&&i.index<n.length&&ja.apply(u,i.slice(1)),c=i[0].length,s=a,u.length>=o));)f.lastIndex===i.index&&f.lastIndex++;return s===n.length?!c&&f.test("")||u.push(""):u.push(n.slice(s)),u.length>o?u.slice(0,o):u}:"0".split(void 0,0).length?function(t,r){return void 0===t&&0===r?[]:e.call(this,t,r)}:e,[function(e,r){var o=y(this),i=null==e?void 0:e[t];return void 0!==i?i.call(e,o,r):n.call(Pe(o),e,r)},function(t,o){var i=et(this),a=Pe(t),c=r(n,i,a,o,n!==e);if(c.done)return c.value;var u=hn(i,RegExp),s=i.unicode,f=new u(Ia?"^(?:"+i.source+")":i,(i.ignoreCase?"i":"")+(i.multiline?"m":"")+(i.unicode?"u":"")+(Ia?"g":"y")),l=void 0===o?Pa:o>>>0;if(0===l)return[];if(0===a.length)return null===ya(f,a)?[a]:[];for(var h=0,p=0,d=[];p<a.length;){f.lastIndex=Ia?0:p;var v,g=ya(f,Ia?a.slice(p):a);if(null===g||(v=_a(It(f.lastIndex+(Ia?p:0)),a.length))===h)p=ga(a,p,s);else{if(d.push(a.slice(h,p)),d.length===l)return d;for(var y=1;y<=g.length-1;y++)if(d.push(g[y]),d.length===l)return d;p=h=v}}return d.push(a.slice(h)),d}]}),!!c((function(){var t=/(?:)/,e=t.exec;t.exec=function(){return e.apply(this,arguments)};var r="ab".split(t);return 2!==r.length||"a"!==r[0]||"b"!==r[1]})),Ia);var Ma=$("species"),Na=$("isConcatSpreadable"),ka=9007199254740991,Ua="Maximum allowed index exceeded",La=A>=51||!c((function(){var t=[];return t[Na]=!1,t.concat()[0]!==t})),Da=A>=51||!c((function(){var t=[];return(t.constructor={})[Ma]=function(){return{foo:1}},1!==t.concat(Boolean).foo})),Ca=function(t){if(!m(t))return!1;var e=t[Na];return void 0!==e?!!e:ve(t)};Xt({target:"Array",proto:!0,forced:!La||!Da},{concat:function(t){var e,r,n,o,i,a=L(this),c=ye(a,0),u=0;for(e=-1,n=arguments.length;e<n;e++)if(Ca(i=-1===e?a:arguments[e])){if(u+(o=It(i.length))>ka)throw TypeError(Ua);for(r=0;r<o;r++,u++)r in i&&hr(c,u,i[r])}else{if(u>=ka)throw TypeError(Ua);hr(c,u++,i)}return c.length=u,c}});var Fa={f:$},Ba=nt.f,Wa=function(t){var e=xr.Symbol||(xr.Symbol={});C(e,t)||Ba(e,t,{value:Fa.f(t)})},za=we.forEach,Ga=pt("hidden"),Ka="Symbol",$a=$("toPrimitive"),qa=St.set,Va=St.getterFor(Ka),Ha=Object.prototype,Ya=a.Symbol,Xa=E("JSON","stringify"),Ja=tt.f,Qa=nt.f,Za=zr.f,tc=l.f,ec=U("symbols"),rc=U("op-symbols"),nc=U("string-to-symbol-registry"),oc=U("symbol-to-string-registry"),ic=U("wks"),ac=a.QObject,cc=!ac||!ac.prototype||!ac.prototype.findChild,uc=u&&c((function(){return 7!=ce(Qa({},"a",{get:function(){return Qa(this,"a",{value:7}).a}})).a}))?function(t,e,r){var n=Ja(Ha,e);n&&delete Ha[e],Qa(t,e,r),n&&t!==Ha&&Qa(Ha,e,n)}:Qa,sc=function(t,e){var r=ec[t]=ce(Ya.prototype);return qa(r,{type:Ka,tag:t,description:e}),u||(r.description=e),r},fc=function(t,e,r){t===Ha&&fc(rc,e,r),et(t);var n=H(e);return et(r),C(ec,n)?(r.enumerable?(C(t,Ga)&&t[Ga][n]&&(t[Ga][n]=!1),r=ce(r,{enumerable:h(0,!1)})):(C(t,Ga)||Qa(t,Ga,h(1,{})),t[Ga][n]=!0),uc(t,n,r)):Qa(t,n,r)},lc=function(t,e){et(t);var r=b(e),n=Zt(r).concat(vc(r));return za(n,(function(e){u&&!hc.call(r,e)||fc(t,e,r[e])})),t},hc=function(t){var e=H(t),r=tc.call(this,e);return!(this===Ha&&C(ec,e)&&!C(rc,e))&&(!(r||!C(this,e)||!C(ec,e)||C(this,Ga)&&this[Ga][e])||r)},pc=function(t,e){var r=b(t),n=H(e);if(r!==Ha||!C(ec,n)||C(rc,n)){var o=Ja(r,n);return!o||!C(ec,n)||C(r,Ga)&&r[Ga][n]||(o.enumerable=!0),o}},dc=function(t){var e=Za(b(t)),r=[];return za(e,(function(t){C(ec,t)||C(dt,t)||r.push(t)})),r},vc=function(t){var e=t===Ha,r=Za(e?rc:b(t)),n=[];return za(r,(function(t){!C(ec,t)||e&&!C(Ha,t)||n.push(ec[t])})),n};if(I||(Ot((Ya=function(){if(this instanceof Ya)throw TypeError("Symbol is not a constructor");var t=arguments.length&&void 0!==arguments[0]?Pe(arguments[0]):void 0,e=W(t),r=function t(r){this===Ha&&t.call(rc,r),C(this,Ga)&&C(this[Ga],e)&&(this[Ga][e]=!1),uc(this,e,h(1,r))};return u&&cc&&uc(Ha,e,{configurable:!0,set:r}),sc(e,t)}).prototype,"toString",(function(){return Va(this).tag})),Ot(Ya,"withoutSetter",(function(t){return sc(W(t),t)})),l.f=hc,nt.f=fc,tt.f=pc,Ct.f=zr.f=dc,Ft.f=vc,Fa.f=function(t){return sc($(t),t)},u&&(Qa(Ya.prototype,"description",{configurable:!0,get:function(){return Va(this).description}}),Ot(Ha,"propertyIsEnumerable",hc,{unsafe:!0}))),Xt({global:!0,wrap:!0,forced:!I,sham:!I},{Symbol:Ya}),za(Zt(ic),(function(t){Wa(t)})),Xt({target:Ka,stat:!0,forced:!I},{for:function(t){var e=Pe(t);if(C(nc,e))return nc[e];var r=Ya(e);return nc[e]=r,oc[r]=e,r},keyFor:function(t){if(!_(t))throw TypeError(t+" is not a symbol");if(C(oc,t))return oc[t]},useSetter:function(){cc=!0},useSimple:function(){cc=!1}}),Xt({target:"Object",stat:!0,forced:!I,sham:!u},{create:function(t,e){return void 0===e?ce(t):lc(ce(t),e)},defineProperty:fc,defineProperties:lc,getOwnPropertyDescriptor:pc}),Xt({target:"Object",stat:!0,forced:!I},{getOwnPropertyNames:dc,getOwnPropertySymbols:vc}),Xt({target:"Object",stat:!0,forced:c((function(){Ft.f(1)}))},{getOwnPropertySymbols:function(t){return Ft.f(L(t))}}),Xa){var gc=!I||c((function(){var t=Ya();return"[null]"!=Xa([t])||"{}"!=Xa({a:t})||"{}"!=Xa(Object(t))}));Xt({target:"JSON",stat:!0,forced:gc},{stringify:function(t,e,r){for(var n,o=[t],i=1;arguments.length>i;)o.push(arguments[i++]);if(n=e,(m(e)||void 0!==t)&&!_(t))return ve(e)||(e=function(t,e){if("function"==typeof n&&(e=n.call(this,t,e)),!_(e))return e}),o[1]=e,Xa.apply(null,o)}})}Ya.prototype[$a]||ot(Ya.prototype,$a,Ya.prototype.valueOf),Ge(Ya,Ka),dt[Ga]=!0,Wa("asyncIterator");var yc=nt.f,bc=a.Symbol;if(u&&"function"==typeof bc&&(!("description"in bc.prototype)||void 0!==bc().description)){var mc={},wc=function(){var t=arguments.length<1||void 0===arguments[0]?void 0:String(arguments[0]),e=this instanceof wc?new bc(t):void 0===t?bc():bc(t);return""===t&&(mc[e]=!0),e};Wt(wc,bc);var Ec=wc.prototype=bc.prototype;Ec.constructor=wc;var Sc=Ec.toString,Oc="Symbol(test)"==String(bc("test")),Tc=/^Symbol\((.*)\)[^)]+$/;yc(Ec,"description",{configurable:!0,get:function(){var t=m(this)?this.valueOf():this,e=Sc.call(t);if(C(mc,t))return"";var r=Oc?e.slice(7,-1):e.replace(Tc,"$1");return""===r?void 0:r}}),Xt({global:!0,forced:!0},{Symbol:wc})}Wa("hasInstance"),Wa("isConcatSpreadable"),Wa("iterator"),Wa("match"),Wa("matchAll"),Wa("replace"),Wa("search"),Wa("species"),Wa("split"),Wa("toPrimitive"),Wa("toStringTag"),Wa("unscopables"),Ge(a.JSON,"JSON",!0),Ge(Math,"Math",!0),Wa("asyncDispose"),Wa("dispose"),Wa("matcher"),Wa("metadata"),Wa("observable"),Wa("patternMatch"),Wa("replaceAll");var Rc=function(t,e){var r=this;if(!(r instanceof Rc))return new Rc(t,e);He&&(r=He(new Error(void 0),De(r))),void 0!==e&&ot(r,"message",Pe(e));var n=[];return qr(t,n.push,{that:n}),ot(r,"errors",n),r};Rc.prototype=ce(Error.prototype,{constructor:h(5,Rc),message:h(5,""),name:h(5,"AggregateError")}),Xt({global:!0},{AggregateError:Rc});var xc,Ac,Ic,jc,_c=a.Promise,Pc=/(?:iphone|ipod|ipad).*applewebkit/i.test(S),Mc="process"==d(a.process),Nc=a.setImmediate,kc=a.clearImmediate,Uc=a.process,Lc=a.MessageChannel,Dc=a.Dispatch,Cc=0,Fc={};try{xc=a.location}catch(t){}var Bc=function(t){if(Fc.hasOwnProperty(t)){var e=Fc[t];delete Fc[t],e()}},Wc=function(t){return function(){Bc(t)}},zc=function(t){Bc(t.data)},Gc=function(t){a.postMessage(String(t),xc.protocol+"//"+xc.host)};Nc&&kc||(Nc=function(t){for(var e=[],r=arguments.length,n=1;r>n;)e.push(arguments[n++]);return Fc[++Cc]=function(){("function"==typeof t?t:Function(t)).apply(void 0,e)},Ac(Cc),Cc},kc=function(t){delete Fc[t]},Mc?Ac=function(t){Uc.nextTick(Wc(t))}:Dc&&Dc.now?Ac=function(t){Dc.now(Wc(t))}:Lc&&!Pc?(jc=(Ic=new Lc).port2,Ic.port1.onmessage=zc,Ac=he(jc.postMessage,jc,1)):a.addEventListener&&"function"==typeof postMessage&&!a.importScripts&&xc&&"file:"!==xc.protocol&&!c(Gc)?(Ac=Gc,a.addEventListener("message",zc,!1)):Ac="onreadystatechange"in J("script")?function(t){ee.appendChild(J("script")).onreadystatechange=function(){ee.removeChild(this),Bc(t)}}:function(t){setTimeout(Wc(t),0)});var Kc,$c,qc,Vc,Hc,Yc,Xc,Jc,Qc={set:Nc,clear:kc},Zc=/web0s(?!.*chrome)/i.test(S),tu=Qc.set,eu=a.MutationObserver||a.WebKitMutationObserver,ru=a.document,nu=a.process,ou=a.Promise,iu=(0,tt.f)(a,"queueMicrotask"),au=iu&&iu.value;au||(Kc=function(){var t,e;for(Mc&&(t=nu.domain)&&t.exit();$c;){e=$c.fn,$c=$c.next;try{e()}catch(t){throw $c?Vc():qc=void 0,t}}qc=void 0,t&&t.enter()},Pc||Mc||Zc||!eu||!ru?ou&&ou.resolve?((Xc=ou.resolve(void 0)).constructor=ou,Jc=Xc.then,Vc=function(){Jc.call(Xc,Kc)}):Vc=Mc?function(){nu.nextTick(Kc)}:function(){tu.call(a,Kc)}:(Hc=!0,Yc=ru.createTextNode(""),new eu(Kc).observe(Yc,{characterData:!0}),Vc=function(){Yc.data=Hc=!Hc}));var cu,uu,su,fu,lu=au||function(t){var e={fn:t,next:void 0};qc&&(qc.next=e),$c||($c=e,Vc()),qc=e},hu=function(t){var e,r;this.promise=new t((function(t,n){if(void 0!==e||void 0!==r)throw TypeError("Bad Promise constructor");e=t,r=n})),this.resolve=le(e),this.reject=le(r)},pu={f:function(t){return new hu(t)}},du=function(t,e){if(et(t),m(e)&&e.constructor===t)return e;var r=pu.f(t);return(0,r.resolve)(e),r.promise},vu=function(t){try{return{error:!1,value:t()}}catch(t){return{error:!0,value:t}}},gu="object"==typeof window,yu=Qc.set,bu=$("species"),mu="Promise",wu=St.get,Eu=St.set,Su=St.getterFor(mu),Ou=_c&&_c.prototype,Tu=_c,Ru=Ou,xu=a.TypeError,Au=a.document,Iu=a.process,ju=pu.f,_u=ju,Pu=!!(Au&&Au.createEvent&&a.dispatchEvent),Mu="function"==typeof PromiseRejectionEvent,Nu="unhandledrejection",ku=!1,Uu=Ht(mu,(function(){var t=st(Tu),e=t!==String(Tu);if(!e&&66===A)return!0;if(A>=51&&/native code/.test(t))return!1;var r=new Tu((function(t){t(1)})),n=function(t){t((function(){}),(function(){}))};return(r.constructor={})[bu]=n,!(ku=r.then((function(){}))instanceof n)||!e&&gu&&!Mu})),Lu=Uu||!Tr((function(t){Tu.all(t).catch((function(){}))})),Du=function(t){var e;return!(!m(t)||"function"!=typeof(e=t.then))&&e},Cu=function(t,e){if(!t.notified){t.notified=!0;var r=t.reactions;lu((function(){for(var n=t.value,o=1==t.state,i=0;r.length>i;){var a,c,u,s=r[i++],f=o?s.ok:s.fail,l=s.resolve,h=s.reject,p=s.domain;try{f?(o||(2===t.rejection&&zu(t),t.rejection=1),!0===f?a=n:(p&&p.enter(),a=f(n),p&&(p.exit(),u=!0)),a===s.promise?h(xu("Promise-chain cycle")):(c=Du(a))?c.call(a,l,h):l(a)):h(n)}catch(t){p&&!u&&p.exit(),h(t)}}t.reactions=[],t.notified=!1,e&&!t.rejection&&Bu(t)}))}},Fu=function(t,e,r){var n,o;Pu?((n=Au.createEvent("Event")).promise=e,n.reason=r,n.initEvent(t,!1,!0),a.dispatchEvent(n)):n={promise:e,reason:r},!Mu&&(o=a["on"+t])?o(n):t===Nu&&function(t,e){var r=a.console;r&&r.error&&(1===arguments.length?r.error(t):r.error(t,e))}("Unhandled promise rejection",r)},Bu=function(t){yu.call(a,(function(){var e,r=t.facade,n=t.value;if(Wu(t)&&(e=vu((function(){Mc?Iu.emit("unhandledRejection",n,r):Fu(Nu,r,n)})),t.rejection=Mc||Wu(t)?2:1,e.error))throw e.value}))},Wu=function(t){return 1!==t.rejection&&!t.parent},zu=function(t){yu.call(a,(function(){var e=t.facade;Mc?Iu.emit("rejectionHandled",e):Fu("rejectionhandled",e,t.value)}))},Gu=function(t,e,r){return function(n){t(e,n,r)}},Ku=function(t,e,r){t.done||(t.done=!0,r&&(t=r),t.value=e,t.state=2,Cu(t,!0))},$u=function t(e,r,n){if(!e.done){e.done=!0,n&&(e=n);try{if(e.facade===r)throw xu("Promise can't be resolved itself");var o=Du(r);o?lu((function(){var n={done:!1};try{o.call(r,Gu(t,n,e),Gu(Ku,n,e))}catch(t){Ku(n,t,e)}})):(e.value=r,e.state=1,Cu(e,!1))}catch(t){Ku({done:!1},t,e)}}};if(Uu&&(Tu=function(t){Vr(this,Tu,mu),le(t),cu.call(this);var e=wu(this);try{t(Gu($u,e),Gu(Ku,e))}catch(t){Ku(e,t)}},(cu=function(t){Eu(this,{type:mu,done:!1,notified:!1,parent:!1,reactions:[],rejection:!1,state:0,value:void 0})}).prototype=Xr(Ru=Tu.prototype,{then:function(t,e){var r=Su(this),n=ju(hn(this,Tu));return n.ok="function"!=typeof t||t,n.fail="function"==typeof e&&e,n.domain=Mc?Iu.domain:void 0,r.parent=!0,r.reactions.push(n),0!=r.state&&Cu(r,!1),n.promise},catch:function(t){return this.then(void 0,t)}}),uu=function(){var t=new cu,e=wu(t);this.promise=t,this.resolve=Gu($u,e),this.reject=Gu(Ku,e)},pu.f=ju=function(t){return t===Tu||t===su?new uu(t):_u(t)},"function"==typeof _c&&Ou!==Object.prototype)){fu=Ou.then,ku||(Ot(Ou,"then",(function(t,e){var r=this;return new Tu((function(t,e){fu.call(r,t,e)})).then(t,e)}),{unsafe:!0}),Ot(Ou,"catch",Ru.catch,{unsafe:!0}));try{delete Ou.constructor}catch(t){}He&&He(Ou,Ru)}Xt({global:!0,wrap:!0,forced:Uu},{Promise:Tu}),Ge(Tu,mu,!1),Qr(mu),su=E(mu),Xt({target:mu,stat:!0,forced:Uu},{reject:function(t){var e=ju(this);return e.reject.call(void 0,t),e.promise}}),Xt({target:mu,stat:!0,forced:Uu},{resolve:function(t){return du(this,t)}}),Xt({target:mu,stat:!0,forced:Lu},{all:function(t){var e=this,r=ju(e),n=r.resolve,o=r.reject,i=vu((function(){var r=le(e.resolve),i=[],a=0,c=1;qr(t,(function(t){var u=a++,s=!1;i.push(void 0),c++,r.call(e,t).then((function(t){s||(s=!0,i[u]=t,--c||n(i))}),o)})),--c||n(i)}));return i.error&&o(i.value),r.promise},race:function(t){var e=this,r=ju(e),n=r.reject,o=vu((function(){var o=le(e.resolve);qr(t,(function(t){o.call(e,t).then(r.resolve,n)}))}));return o.error&&n(o.value),r.promise}}),Xt({target:"Promise",stat:!0},{allSettled:function(t){var e=this,r=pu.f(e),n=r.resolve,o=r.reject,i=vu((function(){var r=le(e.resolve),o=[],i=0,a=1;qr(t,(function(t){var c=i++,u=!1;o.push(void 0),a++,r.call(e,t).then((function(t){u||(u=!0,o[c]={status:"fulfilled",value:t},--a||n(o))}),(function(t){u||(u=!0,o[c]={status:"rejected",reason:t},--a||n(o))}))})),--a||n(o)}));return i.error&&o(i.value),r.promise}});var qu="No one promise resolved";Xt({target:"Promise",stat:!0},{any:function(t){var e=this,r=pu.f(e),n=r.resolve,o=r.reject,i=vu((function(){var r=le(e.resolve),i=[],a=0,c=1,u=!1;qr(t,(function(t){var s=a++,f=!1;i.push(void 0),c++,r.call(e,t).then((function(t){f||u||(u=!0,n(t))}),(function(t){f||u||(f=!0,i[s]=t,--c||o(new(E("AggregateError"))(i,qu)))}))})),--c||o(new(E("AggregateError"))(i,qu))}));return i.error&&o(i.value),r.promise}});var Vu=!!_c&&c((function(){_c.prototype.finally.call({then:function(){}},(function(){}))}));if(Xt({target:"Promise",proto:!0,real:!0,forced:Vu},{finally:function(t){var e=hn(this,E("Promise")),r="function"==typeof t;return this.then(r?function(r){return du(e,t()).then((function(){return r}))}:t,r?function(r){return du(e,t()).then((function(){throw r}))}:t)}}),"function"==typeof _c){var Hu=E("Promise").prototype.finally;_c.prototype.finally!==Hu&&Ot(_c.prototype,"finally",Hu,{unsafe:!0})}Xt({target:"Promise",stat:!0},{try:function(t){var e=pu.f(this),r=vu(t);return(r.error?e.reject:e.resolve)(r.value),e.promise}});var Yu={CSSRuleList:0,CSSStyleDeclaration:0,CSSValueList:0,ClientRectList:0,DOMRectList:0,DOMStringList:0,DOMTokenList:1,DataTransferItemList:0,FileList:0,HTMLAllCollection:0,HTMLCollection:0,HTMLFormElement:0,HTMLSelectElement:0,MediaList:0,MimeTypeArray:0,NamedNodeMap:0,NodeList:1,PaintRequestList:0,Plugin:0,PluginArray:0,SVGLengthList:0,SVGNumberList:0,SVGPathSegList:0,SVGPointList:0,SVGStringList:0,SVGTransformList:0,SourceBufferList:0,StyleSheetList:0,TextTrackCueList:0,TextTrackList:0,TouchList:0},Xu=we.forEach,Ju=function(t,e){var r=[].forEach;return!!r&&c((function(){r.call(null,(function(){throw 1}),1)}))}()?[].forEach:function(t){return Xu(this,t,arguments.length>1?arguments[1]:void 0)};for(var Qu in Yu){var Zu=a[Qu],ts=Zu&&Zu.prototype;if(ts&&ts.forEach!==Ju)try{ot(ts,"forEach",Ju)}catch(t){ts.forEach=Ju}}var es=$("iterator"),rs=$("toStringTag"),ns=Pr.values;for(var os in Yu){var is=a[os],as=is&&is.prototype;if(as){if(as[es]!==ns)try{ot(as,es,ns)}catch(t){as[es]=ns}if(as[rs]||ot(as,rs,os),Yu[os])for(var cs in Pr)if(as[cs]!==Pr[cs])try{ot(as,cs,Pr[cs])}catch(t){as[cs]=Pr[cs]}}}var us="undefined"!=typeof globalThis&&globalThis||"undefined"!=typeof self&&self||void 0!==us&&us,ss="URLSearchParams"in us,fs="Symbol"in us&&"iterator"in Symbol,ls="FileReader"in us&&"Blob"in us&&function(){try{return new Blob,!0}catch(t){return!1}}(),hs="FormData"in us,ps="ArrayBuffer"in us;if(ps)var ds=["[object Int8Array]","[object Uint8Array]","[object Uint8ClampedArray]","[object Int16Array]","[object Uint16Array]","[object Int32Array]","[object Uint32Array]","[object Float32Array]","[object Float64Array]"],vs=ArrayBuffer.isView||function(t){return t&&ds.indexOf(Object.prototype.toString.call(t))>-1};function gs(t){if("string"!=typeof t&&(t=String(t)),/[^a-z0-9\-#$%&'*+.^_`|~!]/i.test(t)||""===t)throw new TypeError('Invalid character in header field name: "'+t+'"');return t.toLowerCase()}function ys(t){return"string"!=typeof t&&(t=String(t)),t}function bs(t){var e={next:function(){var e=t.shift();return{done:void 0===e,value:e}}};return fs&&(e[Symbol.iterator]=function(){return e}),e}function ms(t){this.map={},t instanceof ms?t.forEach((function(t,e){this.append(e,t)}),this):Array.isArray(t)?t.forEach((function(t){this.append(t[0],t[1])}),this):t&&Object.getOwnPropertyNames(t).forEach((function(e){this.append(e,t[e])}),this)}function ws(t){if(t.bodyUsed)return Promise.reject(new TypeError("Already read"));t.bodyUsed=!0}function Es(t){return new Promise((function(e,r){t.onload=function(){e(t.result)},t.onerror=function(){r(t.error)}}))}function Ss(t){var e=new FileReader,r=Es(e);return e.readAsArrayBuffer(t),r}function Os(t){if(t.slice)return t.slice(0);var e=new Uint8Array(t.byteLength);return e.set(new Uint8Array(t)),e.buffer}function Ts(){return this.bodyUsed=!1,this._initBody=function(t){var e;this.bodyUsed=this.bodyUsed,this._bodyInit=t,t?"string"==typeof t?this._bodyText=t:ls&&Blob.prototype.isPrototypeOf(t)?this._bodyBlob=t:hs&&FormData.prototype.isPrototypeOf(t)?this._bodyFormData=t:ss&&URLSearchParams.prototype.isPrototypeOf(t)?this._bodyText=t.toString():ps&&ls&&(e=t)&&DataView.prototype.isPrototypeOf(e)?(this._bodyArrayBuffer=Os(t.buffer),this._bodyInit=new Blob([this._bodyArrayBuffer])):ps&&(ArrayBuffer.prototype.isPrototypeOf(t)||vs(t))?this._bodyArrayBuffer=Os(t):this._bodyText=t=Object.prototype.toString.call(t):this._bodyText="",this.headers.get("content-type")||("string"==typeof t?this.headers.set("content-type","text/plain;charset=UTF-8"):this._bodyBlob&&this._bodyBlob.type?this.headers.set("content-type",this._bodyBlob.type):ss&&URLSearchParams.prototype.isPrototypeOf(t)&&this.headers.set("content-type","application/x-www-form-urlencoded;charset=UTF-8"))},ls&&(this.blob=function(){var t=ws(this);if(t)return t;if(this._bodyBlob)return Promise.resolve(this._bodyBlob);if(this._bodyArrayBuffer)return Promise.resolve(new Blob([this._bodyArrayBuffer]));if(this._bodyFormData)throw new Error("could not read FormData body as blob");return Promise.resolve(new Blob([this._bodyText]))},this.arrayBuffer=function(){return this._bodyArrayBuffer?ws(this)||(ArrayBuffer.isView(this._bodyArrayBuffer)?Promise.resolve(this._bodyArrayBuffer.buffer.slice(this._bodyArrayBuffer.byteOffset,this._bodyArrayBuffer.byteOffset+this._bodyArrayBuffer.byteLength)):Promise.resolve(this._bodyArrayBuffer)):this.blob().then(Ss)}),this.text=function(){var t=ws(this);if(t)return t;if(this._bodyBlob)return function(t){var e=new FileReader,r=Es(e);return e.readAsText(t),r}(this._bodyBlob);if(this._bodyArrayBuffer)return Promise.resolve(function(t){for(var e=new Uint8Array(t),r=new Array(e.length),n=0;n<e.length;n++)r[n]=String.fromCharCode(e[n]);return r.join("")}(this._bodyArrayBuffer));if(this._bodyFormData)throw new Error("could not read FormData body as text");return Promise.resolve(this._bodyText)},hs&&(this.formData=function(){return this.text().then(As)}),this.json=function(){return this.text().then(JSON.parse)},this}ms.prototype.append=function(t,e){t=gs(t),e=ys(e);var r=this.map[t];this.map[t]=r?r+", "+e:e},ms.prototype.delete=function(t){delete this.map[gs(t)]},ms.prototype.get=function(t){return t=gs(t),this.has(t)?this.map[t]:null},ms.prototype.has=function(t){return this.map.hasOwnProperty(gs(t))},ms.prototype.set=function(t,e){this.map[gs(t)]=ys(e)},ms.prototype.forEach=function(t,e){for(var r in this.map)this.map.hasOwnProperty(r)&&t.call(e,this.map[r],r,this)},ms.prototype.keys=function(){var t=[];return this.forEach((function(e,r){t.push(r)})),bs(t)},ms.prototype.values=function(){var t=[];return this.forEach((function(e){t.push(e)})),bs(t)},ms.prototype.entries=function(){var t=[];return this.forEach((function(e,r){t.push([r,e])})),bs(t)},fs&&(ms.prototype[Symbol.iterator]=ms.prototype.entries);var Rs=["DELETE","GET","HEAD","OPTIONS","POST","PUT"];function xs(t,e){if(!(this instanceof xs))throw new TypeError('Please use the "new" operator, this DOM object constructor cannot be called as a function.');var r=(e=e||{}).body;if(t instanceof xs){if(t.bodyUsed)throw new TypeError("Already read");this.url=t.url,this.credentials=t.credentials,e.headers||(this.headers=new ms(t.headers)),this.method=t.method,this.mode=t.mode,this.signal=t.signal,r||null==t._bodyInit||(r=t._bodyInit,t.bodyUsed=!0)}else this.url=String(t);if(this.credentials=e.credentials||this.credentials||"same-origin",!e.headers&&this.headers||(this.headers=new ms(e.headers)),this.method=function(t){var e=t.toUpperCase();return Rs.indexOf(e)>-1?e:t}(e.method||this.method||"GET"),this.mode=e.mode||this.mode||null,this.signal=e.signal||this.signal,this.referrer=null,("GET"===this.method||"HEAD"===this.method)&&r)throw new TypeError("Body not allowed for GET or HEAD requests");if(this._initBody(r),!("GET"!==this.method&&"HEAD"!==this.method||"no-store"!==e.cache&&"no-cache"!==e.cache)){var n=/([?&])_=[^&]*/;n.test(this.url)?this.url=this.url.replace(n,"$1_="+(new Date).getTime()):this.url+=(/\?/.test(this.url)?"&":"?")+"_="+(new Date).getTime()}}function As(t){var e=new FormData;return t.trim().split("&").forEach((function(t){if(t){var r=t.split("="),n=r.shift().replace(/\+/g," "),o=r.join("=").replace(/\+/g," ");e.append(decodeURIComponent(n),decodeURIComponent(o))}})),e}function Is(t,e){if(!(this instanceof Is))throw new TypeError('Please use the "new" operator, this DOM object constructor cannot be called as a function.');e||(e={}),this.type="default",this.status=void 0===e.status?200:e.status,this.ok=this.status>=200&&this.status<300,this.statusText=void 0===e.statusText?"":""+e.statusText,this.headers=new ms(e.headers),this.url=e.url||"",this._initBody(t)}xs.prototype.clone=function(){return new xs(this,{body:this._bodyInit})},Ts.call(xs.prototype),Ts.call(Is.prototype),Is.prototype.clone=function(){return new Is(this._bodyInit,{status:this.status,statusText:this.statusText,headers:new ms(this.headers),url:this.url})},Is.error=function(){var t=new Is(null,{status:0,statusText:""});return t.type="error",t};var js=[301,302,303,307,308];Is.redirect=function(t,e){if(-1===js.indexOf(e))throw new RangeError("Invalid status code");return new Is(null,{status:e,headers:{location:t}})};var _s=us.DOMException;try{new _s}catch(t){(_s=function(t,e){this.message=t,this.name=e;var r=Error(t);this.stack=r.stack}).prototype=Object.create(Error.prototype),_s.prototype.constructor=_s}function Ps(t,e){return new Promise((function(r,n){var o=new xs(t,e);if(o.signal&&o.signal.aborted)return n(new _s("Aborted","AbortError"));var i=new XMLHttpRequest;function a(){i.abort()}i.onload=function(){var t,e,n={status:i.status,statusText:i.statusText,headers:(t=i.getAllResponseHeaders()||"",e=new ms,t.replace(/\r?\n[\t ]+/g," ").split("\r").map((function(t){return 0===t.indexOf("\n")?t.substr(1,t.length):t})).forEach((function(t){var r=t.split(":"),n=r.shift().trim();if(n){var o=r.join(":").trim();e.append(n,o)}})),e)};n.url="responseURL"in i?i.responseURL:n.headers.get("X-Request-URL");var o="response"in i?i.response:i.responseText;setTimeout((function(){r(new Is(o,n))}),0)},i.onerror=function(){setTimeout((function(){n(new TypeError("Network request failed"))}),0)},i.ontimeout=function(){setTimeout((function(){n(new TypeError("Network request failed"))}),0)},i.onabort=function(){setTimeout((function(){n(new _s("Aborted","AbortError"))}),0)},i.open(o.method,function(t){try{return""===t&&us.location.href?us.location.href:t}catch(e){return t}}(o.url),!0),"include"===o.credentials?i.withCredentials=!0:"omit"===o.credentials&&(i.withCredentials=!1),"responseType"in i&&(ls?i.responseType="blob":ps&&o.headers.get("Content-Type")&&-1!==o.headers.get("Content-Type").indexOf("application/octet-stream")&&(i.responseType="arraybuffer")),!e||"object"!=typeof e.headers||e.headers instanceof ms?o.headers.forEach((function(t,e){i.setRequestHeader(e,t)})):Object.getOwnPropertyNames(e.headers).forEach((function(t){i.setRequestHeader(t,ys(e.headers[t]))})),o.signal&&(o.signal.addEventListener("abort",a),i.onreadystatechange=function(){4===i.readyState&&o.signal.removeEventListener("abort",a)}),i.send(void 0===o._bodyInit?null:o._bodyInit)}))}Ps.polyfill=!0,us.fetch||(us.fetch=Ps,us.Headers=ms,us.Request=xs,us.Response=Is),function(t){var e=function(){try{return!!Symbol.iterator}catch(t){return!1}}(),r=function(t){var r={next:function(){var e=t.shift();return{done:void 0===e,value:e}}};return e&&(r[Symbol.iterator]=function(){return r}),r},n=function(t){return encodeURIComponent(t).replace(/%20/g,"+")},o=function(t){return decodeURIComponent(String(t).replace(/\+/g," "))};(function(){try{var e=t.URLSearchParams;return"a=1"===new e("?a=1").toString()&&"function"==typeof e.prototype.set&&"function"==typeof e.prototype.entries}catch(t){return!1}})()||function(){var o=function t(e){Object.defineProperty(this,"_entries",{writable:!0,value:{}});var r=typeof e;if("undefined"===r);else if("string"===r)""!==e&&this._fromString(e);else if(e instanceof t){var n=this;e.forEach((function(t,e){n.append(e,t)}))}else{if(null===e||"object"!==r)throw new TypeError("Unsupported input's type for URLSearchParams");if("[object Array]"===Object.prototype.toString.call(e))for(var o=0;o<e.length;o++){var i=e[o];if("[object Array]"!==Object.prototype.toString.call(i)&&2===i.length)throw new TypeError("Expected [string, any] as entry at index "+o+" of URLSearchParams's input");this.append(i[0],i[1])}else for(var a in e)e.hasOwnProperty(a)&&this.append(a,e[a])}},i=o.prototype;i.append=function(t,e){t in this._entries?this._entries[t].push(String(e)):this._entries[t]=[String(e)]},i.delete=function(t){delete this._entries[t]},i.get=function(t){return t in this._entries?this._entries[t][0]:null},i.getAll=function(t){return t in this._entries?this._entries[t].slice(0):[]},i.has=function(t){return t in this._entries},i.set=function(t,e){this._entries[t]=[String(e)]},i.forEach=function(t,e){var r;for(var n in this._entries)if(this._entries.hasOwnProperty(n)){r=this._entries[n];for(var o=0;o<r.length;o++)t.call(e,r[o],n,this)}},i.keys=function(){var t=[];return this.forEach((function(e,r){t.push(r)})),r(t)},i.values=function(){var t=[];return this.forEach((function(e){t.push(e)})),r(t)},i.entries=function(){var t=[];return this.forEach((function(e,r){t.push([r,e])})),r(t)},e&&(i[Symbol.iterator]=i.entries),i.toString=function(){var t=[];return this.forEach((function(e,r){t.push(n(r)+"="+n(e))})),t.join("&")},t.URLSearchParams=o}();var i=t.URLSearchParams.prototype;"function"!=typeof i.sort&&(i.sort=function(){var t=this,e=[];this.forEach((function(r,n){e.push([n,r]),t._entries||t.delete(n)})),e.sort((function(t,e){return t[0]<e[0]?-1:t[0]>e[0]?1:0})),t._entries&&(t._entries={});for(var r=0;r<e.length;r++)this.append(e[r][0],e[r][1])}),"function"!=typeof i._fromString&&Object.defineProperty(i,"_fromString",{enumerable:!1,configurable:!1,writable:!1,value:function(t){if(this._entries)this._entries={};else{var e=[];this.forEach((function(t,r){e.push(r)}));for(var r=0;r<e.length;r++)this.delete(e[r])}var n,i=(t=t.replace(/^\?/,"")).split("&");for(r=0;r<i.length;r++)n=i[r].split("="),this.append(o(n[0]),n.length>1?o(n[1]):"")}})}(void 0!==t?t:"undefined"!=typeof window?window:"undefined"!=typeof self?self:t),function(t){var e,r,n;if(function(){try{var e=new t.URL("b","http://a");return e.pathname="c d","http://a/c%20d"===e.href&&e.searchParams}catch(t){return!1}}()||(e=t.URL,n=(r=function(e,r){"string"!=typeof e&&(e=String(e)),r&&"string"!=typeof r&&(r=String(r));var n,o=document;if(r&&(void 0===t.location||r!==t.location.href)){r=r.toLowerCase(),(n=(o=document.implementation.createHTMLDocument("")).createElement("base")).href=r,o.head.appendChild(n);try{if(0!==n.href.indexOf(r))throw new Error(n.href)}catch(t){throw new Error("URL unable to set base "+r+" due to "+t)}}var i=o.createElement("a");i.href=e,n&&(o.body.appendChild(i),i.href=i.href);var a=o.createElement("input");if(a.type="url",a.value=e,":"===i.protocol||!/:/.test(i.href)||!a.checkValidity()&&!r)throw new TypeError("Invalid URL");Object.defineProperty(this,"_anchorElement",{value:i});var c=new t.URLSearchParams(this.search),u=!0,s=!0,f=this;["append","delete","set"].forEach((function(t){var e=c[t];c[t]=function(){e.apply(c,arguments),u&&(s=!1,f.search=c.toString(),s=!0)}})),Object.defineProperty(this,"searchParams",{value:c,enumerable:!0});var l=void 0;Object.defineProperty(this,"_updateSearchParams",{enumerable:!1,configurable:!1,writable:!1,value:function(){this.search!==l&&(l=this.search,s&&(u=!1,this.searchParams._fromString(this.search),u=!0))}})}).prototype,["hash","host","hostname","port","protocol"].forEach((function(t){!function(t){Object.defineProperty(n,t,{get:function(){return this._anchorElement[t]},set:function(e){this._anchorElement[t]=e},enumerable:!0})}(t)})),Object.defineProperty(n,"search",{get:function(){return this._anchorElement.search},set:function(t){this._anchorElement.search=t,this._updateSearchParams()},enumerable:!0}),Object.defineProperties(n,{toString:{get:function(){var t=this;return function(){return t.href}}},href:{get:function(){return this._anchorElement.href.replace(/\?$/,"")},set:function(t){this._anchorElement.href=t,this._updateSearchParams()},enumerable:!0},pathname:{get:function(){return this._anchorElement.pathname.replace(/(^\/?)/,"/")},set:function(t){this._anchorElement.pathname=t},enumerable:!0},origin:{get:function(){return this._anchorElement.protocol+"//"+this._anchorElement.hostname+(this._anchorElement.port!={"http:":80,"https:":443,"ftp:":21}[this._anchorElement.protocol]&&""!==this._anchorElement.port?":"+this._anchorElement.port:"")},enumerable:!0},password:{get:function(){return""},set:function(t){},enumerable:!0},username:{get:function(){return""},set:function(t){},enumerable:!0}}),r.createObjectURL=function(t){return e.createObjectURL.apply(e,arguments)},r.revokeObjectURL=function(t){return e.revokeObjectURL.apply(e,arguments)},t.URL=r),void 0!==t.location&&!("origin"in t.location)){var o=function(){return t.location.protocol+"//"+t.location.hostname+(t.location.port?":"+t.location.port:"")};try{Object.defineProperty(t.location,"origin",{get:o,enumerable:!0})}catch(e){setInterval((function(){t.location.origin=o()}),100)}}}(void 0!==t?t:"undefined"!=typeof window?window:"undefined"!=typeof self?self:t);var Ms=Object.getOwnPropertySymbols,Ns=Object.prototype.hasOwnProperty,ks=Object.prototype.propertyIsEnumerable;function Us(t){if(null==t)throw new TypeError("Object.assign cannot be called with null or undefined");return Object(t)}var Ls=function(){try{if(!Object.assign)return!1;var t=new String("abc");if(t[5]="de","5"===Object.getOwnPropertyNames(t)[0])return!1;for(var e={},r=0;r<10;r++)e["_"+String.fromCharCode(r)]=r;if("0123456789"!==Object.getOwnPropertyNames(e).map((function(t){return e[t]})).join(""))return!1;var n={};return"abcdefghijklmnopqrst".split("").forEach((function(t){n[t]=t})),"abcdefghijklmnopqrst"===Object.keys(Object.assign({},n)).join("")}catch(t){return!1}}()?Object.assign:function(t,e){for(var r,n,o=Us(t),i=1;i<arguments.length;i++){for(var a in r=Object(arguments[i]))Ns.call(r,a)&&(o[a]=r[a]);if(Ms){n=Ms(r);for(var c=0;c<n.length;c++)ks.call(r,n[c])&&(o[n[c]]=r[n[c]])}}return o};Object.assign=Ls}()},947:function(t,e,r){"use strict";r(609)}},function(t){var e;e=947,t(t.s=e)}]);
//# sourceMappingURL=polyfill-f2b578758e093ca339ba.js.map
|
||
error.rs
|
// Copyright 2018 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! An implementation of `Error` type.
/// The error type for I/O operations with storage.
#[derive(Fail, Debug, Clone)]
#[fail(display = "{}", message)]
pub struct
|
{
message: String,
}
impl Error {
/// Creates a new storage error with an information message about the reason.
///
/// # Examples
///
/// ```
/// use exonum::storage::Error;
///
/// let error = Error::new("Oh no!");
/// ```
pub fn new<T: Into<String>>(message: T) -> Error {
Error {
message: message.into(),
}
}
}
|
Error
|
queue_reg.go
|
// Licensed to Elasticsearch B.V. under one or more contributor
// license agreements. See the NOTICE file distributed with
// this work for additional information regarding copyright
// ownership. Elasticsearch B.V. licenses this file to you under
// the Apache License, Version 2.0 (the "License"); you may
// not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package queue
import (
"github.com/elastic/beats/v7/libbeat/feature"
)
// Namespace is the feature namespace for queue definition.
const Namespace = "libbeat.queue"
// RegisterQueueType registers a new queue type.
func RegisterQueueType(name string, factory Factory, details feature.Details) {
feature.MustRegister(feature.New(Namespace, name, factory, details))
}
// FindFactory retrieves a queue types constructor. Returns nil if queue type is unknown
func
|
(name string) Factory {
f, err := feature.GlobalRegistry().Lookup(Namespace, name)
if err != nil {
return nil
}
factory, ok := f.Factory().(Factory)
if !ok {
return nil
}
return factory
}
|
FindFactory
|
cover.js
|
/*! UIkit 2.10.0 | http://www.getuikit.com | (c) 2014 YOOtheme | MIT License */
(function(addon) {
var component;
if (jQuery && jQuery.UIkit) {
component = addon(jQuery, jQuery.UIkit);
|
if (typeof define == "function" && define.amd) {
define("uikit-cover", ["uikit"], function(){
return component || addon(jQuery, jQuery.UIkit);
});
}
})(function($, UI){
"use strict";
UI.component('cover', {
defaults: {
automute : true
},
init: function() {
this.parent = this.element.parent();
this.dimension = {w: this.element.width(), h: this.element.height()};
this.ratio = this.dimension.w / this.dimension.h;
UI.$win.on('load resize orientationchange', UI.Utils.debounce(function(){
this.check();
}.bind(this), 100));
this.check();
this.element.data("cover", this);
if (this.element.is('iframe') && this.options.automute) {
var src = this.element.attr('src');
this.element.attr('src', '').on('load', function(){
this.contentWindow.postMessage('{ "event": "command", "func": "mute", "method":"setVolume", "value":0}', '*');
}).attr('src', [src, (src.indexOf('?') > -1 ? '&':'?'), 'enablejsapi=1&api=1'].join(''));
}
},
check: function() {
var w = this.parent.width(), h = this.parent.height(), width, height;
// if element height < parent height (gap underneath)
if ((w / this.ratio) < h) {
width = Math.ceil(h * this.ratio);
height = h;
// element width < parent width (gap to right)
} else {
width = w;
height = Math.ceil(w / this.ratio);
}
this.element.css({
'width' : width,
'height' : height
});
}
});
// auto init
UI.ready(function(context) {
$("[data-uk-cover]", context).each(function(){
var ele = $(this);
if(!ele.data("cover")) {
var plugin = UI.cover(ele, UI.Utils.options(ele.attr("data-uk-cover")));
}
});
});
});
|
}
|
coherence_copy_like_err_tuple.rs
|
// Test that we are able to introduce a negative constraint that
// `MyType: !MyTrait` along with other "fundamental" wrappers.
// aux-build:coherence_copy_like_lib.rs
extern crate coherence_copy_like_lib as lib;
struct MyType { x: i32 }
trait MyTrait { fn foo() {} }
impl<T: lib::MyCopy> MyTrait for T { }
// Tuples are not fundamental, therefore this would require that
//
// (MyType,): !MyTrait
//
// which we cannot approve.
impl MyTrait for (MyType,) { } //~ ERROR E0119
fn main()
|
{ }
|
|
server.ts
|
//import * as config from "./config.json";
import * as http from "http";
//import * as jss from "json-stringify-safe";
import * as path from "path";
import * as fs from "fs";
import * as os from "os";
let config = JSON.parse(fs.readFileSync("config.json", 'utf8'));
let meshBase = config.meshBase;
let server = http.createServer((req, res) =>
{
if (!allowAccess(req))
{
console.log(`rejected connection`);
res.end();
|
{
const fileInfo = getFile(req.url as string);
if (fileInfo)
{
res.writeHead(200, {"Content-Type": fileInfo.mimeType});
fileInfo.fileStream.pipe(res);
}
else
{
res.writeHead(404, {"Content-Type": "application/x-binary"});
res.end();
}
}
});
server.listen(1337);
console.log("Phoebe server is listening");
console.log(`Host: ${os.hostname}\nMesh base: ${meshBase}`);
function getFile(url: string): { fileStream: fs.ReadStream, mimeType: string, fileName: string } | null
{
let mimeType: string;
let fileName: string = url.substr(1);
fileName = path.join(meshBase, fileName.substr(0,2), fileName.substr(2,2), url);
mimeType = "application/x-binary";
if (fs.existsSync(fileName))
{
const fileStream: fs.ReadStream = fs.createReadStream(fileName);
console.log(`${new Date().toLocaleString()}: ${url} OK`);
return {fileStream, mimeType, fileName};
}
else
{
console.log(`${new Date().toLocaleString()}: ${url} file not found`);
return null;
}
}
function allowAccess(req: http.IncomingMessage): boolean
{
let remoteAddress: string = req.connection.remoteAddress as string;
let realIP = req.headers['x-forwarded-for'];
//console.log(`headers: ${jss(req,null,3)}`);
if (remoteAddress.includes('127.0.0.1'))
{
return true
}
let allowedIP: Set<string> = new Set([
'127.0.0.1',
'192.168.77.111',
'192.168.10.4',
'192.168.77.143'
]);
if (allowedIP.has(remoteAddress))
{
return true;
}
return true;
}
function allowAllAccess(): boolean
{
return true
}
|
}
else
|
OffsetMap.py
|
# -*- coding: utf-8 -*-
#
# michael a.g. aïvázis <michael.aivazis@para-sim.com>
# parasim
# (c) 1998-2021 all rights reserved
#
# the framework
import ampcor
# the extension
from ampcor.ext import ampcor as libampcor
# declaration
class OffsetMap(ampcor.flow.product,
family="ampcor.products.offsets.offsets", implements=ampcor.specs.offsets):
"""
Access to the data of an offset map
"""
# public data
shape = ampcor.properties.tuple(schema=ampcor.properties.int())
shape.default = (0,0)
shape.doc = "the shape of the map"
data = ampcor.properties.path()
data.doc = "the path to my binary data"
# public data
@property
def layout(self):
"""
Get my layout
"""
# ask the spec
return self.spec.layout
@property
def bytesPerCell(self):
"""
Get the memory footprint of my cell
"""
# ask the spec
return self.spec.bytesPerCell
# protocol obligations
@ampcor.export
def cells(self):
"""
Compute the number of points
"""
# ask my spec; it knows
return self.spec.cells
@ampcor.export
def bytes(self):
"""
Compute my memory footprint
"""
# ask my spec; it knows
return self.spec.bytes
@ampcor.export
def slice(self, origin, shape):
"""
Grant access to a slice of data of the given {shape} starting at {origin}
"""
@ampcor.export
def open(self, mode="r"):
"""
Map me over the contents of {filename}
"""
# unpack the shape
shape = self.shape
# attempt to
try:
# resolve the filename using the {vfs}
uri = self.pyre_fileserver[self.data].uri
# if that fails
except Exception:
# use the raw name
uri = self.data
# if we are opening in read-only mode
if mode == "r":
# make a const raster
raster = ampcor.libampcor.OffsetsConstRaster(shape=shape, uri=uri)
# if we are opening an existing one in read/write mode
elif mode == "w":
# make a modifiable raster
raster = ampcor.libampcor.OffsetsRaster(shape=shape, uri=uri, new=False)
# if we are creating one
elif mode == "n":
# make a new raster; careful: this deletes existing products
raster = ampcor.libampcor.OffsetsRaster(shape=shape, uri=uri, new=True)
# otherwise
else:
# grab the journal
import journal
# make a channel
channel = journal.error("ampcor.products.slc")
# and complain
channel.line(f"unknown mode '{mode}'")
channel.line(f" while opening '{uri}'")
channel.line(f" in ampcor.products.OffsetMap.open();")
channel.line(f" valid modes are: 'r', 'w', 'n'")
channel.log()
# just in case errors are non-fatal
raster = None
# attach the raster
self.raster = raster
# all done
return self
# meta-methods
def __init__(self, **kwds):
# chain up
super().__init__(**kwds)
# load my product spec
self.spec = ampcor.libampcor.Offsets(shape=self.shape)
# i get a raster after {open}
self.raster = None
# all done
return
def __getitem__(self, idx):
""
|
def __setitem__(self, idx, points):
"""
Establish a correlation between the reference and secondary {points} at {index}
"""
# delegate to the raster
self.raster[idx] = points
# all done
return
# framework hooks
def pyre_traitModified(self, trait, new, old):
"""
Handle post construction configuration changes
"""
# when my shape changes
if trait.name == "shape":
# recompute my spec
self.spec = ampcor.libampcor.Offsets(shape=self.shape)
# all done
return self
# implementation details
def show(self, indent, margin):
"""
Generate a report of my configuration
"""
# my info
yield f"{margin}name: {self.pyre_name}"
yield f"{margin}family: {self.pyre_family()}"
yield f"{margin}data: {self.data}"
yield f"{margin}shape: {self.shape}"
yield f"{margin}points: {self.cells()}"
yield f"{margin}footprint: {self.bytes()} bytes"
# all done
return
# end of file
|
"
Return the pair of correlated points stored at {index}
"""
# ask the raster
return self.raster[idx]
|
opener.ts
|
/*---------------------------------------------------------------------------------------------
* Copyright (c) Microsoft Corporation. All rights reserved.
* Licensed under the MIT License. See License.txt in the project root for license information.
*--------------------------------------------------------------------------------------------*/
import { CancellationToken } from 'vs/base/common/cancellation';
import { Disposable, IDisposable } from 'vs/base/common/lifecycle';
import { equalsIgnoreCase, startsWithIgnoreCase } from 'vs/base/common/strings';
import { URI } from 'vs/base/common/uri';
import { IEditorOptions } from 'vs/platform/editor/common/editor';
import { createDecorator } from 'vs/platform/instantiation/common/instantiation';
export const IOpenerService = createDecorator<IOpenerService>('openerService');
export type OpenInternalOptions = {
/**
* Signals that the intent is to open an editor to the side
* of the currently active editor.
*/
readonly openToSide?: boolean;
/**
* Extra editor options to apply in case an editor is used to open.
*/
readonly editorOptions?: IEditorOptions;
/**
* Signals that the editor to open was triggered through a user
* action, such as keyboard or mouse usage.
*/
readonly fromUserGesture?: boolean;
/**
* Allow command links to be handled.
*/
readonly allowCommands?: boolean;
};
export type OpenExternalOptions = {
readonly openExternal?: boolean;
readonly allowTunneling?: boolean;
readonly allowContributedOpeners?: boolean | string;
};
export type OpenOptions = OpenInternalOptions & OpenExternalOptions;
export type ResolveExternalUriOptions = { readonly allowTunneling?: boolean };
export interface IResolvedExternalUri extends IDisposable {
resolved: URI;
}
export interface IOpener {
open(resource: URI | string, options?: OpenInternalOptions | OpenExternalOptions): Promise<boolean>;
}
export interface IExternalOpener {
openExternal(href: string, ctx: { sourceUri: URI, preferredOpenerId?: string }, token: CancellationToken): Promise<boolean>;
dispose?(): void;
}
export interface IValidator {
shouldOpen(resource: URI | string): Promise<boolean>;
}
export interface IExternalUriResolver {
resolveExternalUri(resource: URI, options?: OpenOptions): Promise<{ resolved: URI, dispose(): void } | undefined>;
}
export interface IOpenerService {
readonly _serviceBrand: undefined;
/**
* Register a participant that can handle the open() call.
*/
registerOpener(opener: IOpener): IDisposable;
/**
* Register a participant that can validate if the URI resource be opened.
* Validators are run before openers.
*/
registerValidator(validator: IValidator): IDisposable;
/**
* Register a participant that can resolve an external URI resource to be opened.
*/
registerExternalUriResolver(resolver: IExternalUriResolver): IDisposable;
/**
* Sets the handler for opening externally. If not provided,
* a default handler will be used.
*/
setDefaultExternalOpener(opener: IExternalOpener): void;
/**
* Registers a new opener external resources openers.
*/
registerExternalOpener(opener: IExternalOpener): IDisposable;
/**
* Opens a resource, like a webaddress, a document uri, or executes command.
*
* @param resource A resource
* @return A promise that resolves when the opening is done.
*/
open(resource: URI | string, options?: OpenInternalOptions | OpenExternalOptions): Promise<boolean>;
/**
* Resolve a resource to its external form.
* @throws whenever resolvers couldn't resolve this resource externally.
*/
resolveExternalUri(resource: URI, options?: ResolveExternalUriOptions): Promise<IResolvedExternalUri>;
}
export const NullOpenerService = Object.freeze({
_serviceBrand: undefined,
registerOpener() { return Disposable.None; },
registerValidator() { return Disposable.None; },
registerExternalUriResolver() { return Disposable.None; },
setDefaultExternalOpener() { },
registerExternalOpener() { return Disposable.None; },
async open() { return false; },
async resolveExternalUri(uri: URI) { return { resolved: uri, dispose() { } }; },
} as IOpenerService);
export function matchesScheme(target: URI | string, scheme: string): boolean {
if (URI.isUri(target)) {
return equalsIgnoreCase(target.scheme, scheme);
} else {
return startsWithIgnoreCase(target, scheme + ':');
}
}
export function matchesSomeScheme(target: URI | string, ...schemes: string[]): boolean {
return schemes.some(scheme => matchesScheme(target, scheme));
}
export function selectionFragment(target: URI): { startLineNumber: number; startColumn: number; } | undefined {
let selection: { startLineNumber: number; startColumn: number; } | undefined = undefined;
const match = /^L?(\d+)(?:,(\d+))?/.exec(target.fragment);
if (match) {
// support file:///some/file.js#73,84
// support file:///some/file.js#L73
selection = {
startLineNumber: parseInt(match[1]),
startColumn: match[2] ? parseInt(match[2]) : 1
};
}
|
}
|
return selection;
|
apprc.go
|
package cli
import (
"os"
"github.com/appscode/go/ioutil"
"github.com/appscode/go/term"
homeDir "github.com/mitchellh/go-homedir"
)
var (
home, _ = homeDir.Dir()
apprcPath = home + "/.appscode/apprc.json"
)
type Apprc struct {
Context string `json:"context"`
Auths []*Auth `json:"auths"`
}
/* Exits if there is any error.*/
func (rc *Apprc) GetAuth() *Auth {
if rc.Context != "" {
for _, a := range rc.Auths {
if a.TeamAddr() == rc.Context {
term.Env = a.Env
return a
}
}
}
return nil
}
func (rc *Apprc) SetAuth(a *Auth) error {
for i, b := range rc.Auths {
if b.TeamAddr() == a.TeamAddr() {
rc.Auths = append(rc.Auths[:i], rc.Auths[i+1:]...)
break
}
}
rc.Context = a.TeamAddr()
rc.Auths = append(rc.Auths, a)
return rc.Write()
}
func (rc *Apprc) DeleteAuth() error {
if rc.Context != "" {
for i, a := range rc.Auths {
if a.TeamAddr() == rc.Context {
rc.Auths = append(rc.Auths[:i], rc.Auths[i+1:]...)
rc.Context = ""
break
}
}
}
return rc.Write()
}
func (rc *Apprc) Write() error {
err := ioutil.WriteJson(apprcPath, rc)
if err != nil {
return err
}
os.Chmod(apprcPath, 0600)
return nil
}
func LoadApprc() (*Apprc, error)
|
/* Exits if there is any error.*/
func GetAuthOrDie() *Auth {
rc, err := LoadApprc()
if err != nil {
term.Fatalln("Command requires authentication, please run `appctl login`")
}
a := rc.GetAuth()
if a == nil {
term.Fatalln("Command requires authentication, please run `appctl login`")
}
return a
}
/* Exits if there is any error.*/
func GetAuthOrAnon() (*Auth, bool) {
rc, err := LoadApprc()
if err != nil {
return NewAnonAUth(), false
}
a := rc.GetAuth()
if a == nil {
return NewAnonAUth(), false
}
return a, true
}
func SetAuth(a *Auth) error {
rc, err := LoadApprc()
if err != nil {
rc = &Apprc{}
rc.Auths = make([]*Auth, 0)
}
return rc.SetAuth(a)
}
|
{
if _, err := os.Stat(apprcPath); err != nil {
return nil, err
}
os.Chmod(apprcPath, 0600)
rc := &Apprc{}
err := ioutil.ReadFileAs(apprcPath, rc)
if err != nil {
return nil, err
}
return rc, nil
}
|
pulumiUtilities.go
|
// *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package snowflake
import (
"fmt"
"os"
"reflect"
"regexp"
"strconv"
"strings"
"github.com/blang/semver"
"github.com/pulumi/pulumi/sdk/v3/go/pulumi"
)
type envParser func(v string) interface{}
func parseEnvBool(v string) interface{} {
b, err := strconv.ParseBool(v)
if err != nil {
return nil
}
return b
}
func parseEnvInt(v string) interface{} {
i, err := strconv.ParseInt(v, 0, 0)
if err != nil {
return nil
}
return int(i)
}
func parseEnvFloat(v string) interface{} {
f, err := strconv.ParseFloat(v, 64)
if err != nil {
return nil
}
return f
}
func parseEnvStringArray(v string) interface{} {
var result pulumi.StringArray
for _, item := range strings.Split(v, ";") {
result = append(result, pulumi.String(item))
}
return result
}
func getEnvOrDefault(def interface{}, parser envParser, vars ...string) interface{} {
for _, v := range vars {
if value := os.Getenv(v); value != ""
|
}
return def
}
// PkgVersion uses reflection to determine the version of the current package.
func PkgVersion() (semver.Version, error) {
type sentinal struct{}
pkgPath := reflect.TypeOf(sentinal{}).PkgPath()
re := regexp.MustCompile("^.*/pulumi-snowflake/sdk(/v\\d+)?")
if match := re.FindStringSubmatch(pkgPath); match != nil {
vStr := match[1]
if len(vStr) == 0 { // If the version capture group was empty, default to v1.
return semver.Version{Major: 1}, nil
}
return semver.MustParse(fmt.Sprintf("%s.0.0", vStr[2:])), nil
}
return semver.Version{}, fmt.Errorf("failed to determine the package version from %s", pkgPath)
}
// isZero is a null safe check for if a value is it's types zero value.
func isZero(v interface{}) bool {
if v == nil {
return true
}
return reflect.ValueOf(v).IsZero()
}
|
{
if parser != nil {
return parser(value)
}
return value
}
|
random_nums.py
|
import random
def random_nums():
|
random_float = random.random()
random_int = random.randint(1, 100)
random_elem = random.choice(['heads', 'tails'])
|
|
dec.go
|
// The MIT License (MIT)
//
// Copyright (c) 2016 winlin
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
// the Software, and to permit persons to whom the Software is furnished to do so,
// subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
// FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
// COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
// IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
// CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
// The aac decoder, to decode the encoded aac frame to PCM samples.
package fdkaac
/*
#cgo CFLAGS: -I/usr/lib/fdk-acc-2.0/include/fdk-aac/
#cgo LDFLAGS: -L/usr/lib/fdk-acc-2.0/lib/ -lfdk-aac -Wl,-rpath=/usr/lib/fdk-acc-2.0/lib/
#include "aacdecoder_lib.h"
typedef struct {
HANDLE_AACDECODER dec;
// Whether use ADTS mode.
int is_adts;
// Init util the first frame decoded.
CStreamInfo* info;
// The bits of sample, always 16 for fdkaac.
int sample_bits;
// Total filled bytes.
UINT filled_bytes;
} aacdec_t;
static void _aacdec_init(aacdec_t* h) {
// For lib-fdkaac, always use 16bits sample.
// avctx->sample_fmt = AV_SAMPLE_FMT_S16;
h->sample_bits = 16;
h->is_adts = 0;
h->filled_bytes = 0;
h->dec = NULL;
h->info = NULL;
}
static int aacdec_init_adts(aacdec_t* h) {
_aacdec_init(h);
h->is_adts = 1;
h->dec = aacDecoder_Open(TT_MP4_ADTS, 1);
if (!h->dec) {
return -1;
}
return 0;
}
static int aacdec_init_raw(aacdec_t* h, char* asc, int nb_asc) {
_aacdec_init(h);
h->dec = aacDecoder_Open(TT_MP4_RAW, 1);
if (!h->dec) {
return -1;
}
UCHAR* uasc = (UCHAR*)asc;
UINT unb_asc = (UINT)nb_asc;
AAC_DECODER_ERROR err = aacDecoder_ConfigRaw(h->dec, &uasc, &unb_asc);
if (err != AAC_DEC_OK) {
return err;
}
return 0;
}
static void aacdec_close(aacdec_t* h) {
if (h->dec) {
aacDecoder_Close(h->dec);
}
h->dec = NULL;
}
static int aacdec_fill(aacdec_t* h, char* data, int nb_data, int* pnb_left) {
h->filled_bytes += nb_data;
UCHAR* udata = (UCHAR*)data;
UINT unb_data = (UINT)nb_data;
UINT unb_left = unb_data;
AAC_DECODER_ERROR err = aacDecoder_Fill(h->dec, &udata, &unb_data, &unb_left);
if (err != AAC_DEC_OK) {
return err;
}
if (pnb_left) {
*pnb_left = (int)unb_left;
}
return 0;
}
static int aacdec_sample_bits(aacdec_t* h) {
return h->sample_bits;
}
static int aacdec_pcm_size(aacdec_t* h) {
if (!h->info) {
return 0;
}
return (int)(h->info->numChannels * h->info->frameSize * h->sample_bits / 8);
}
static int aacdec_decode_frame(aacdec_t* h, char* pcm, int nb_pcm, int* pnb_valid) {
// when buffer left bytes not enough, directly return not-enough-bits.
// we requires atleast 7bytes header for adts.
if (h->is_adts && h->info && h->filled_bytes - h->info->numTotalBytes <= 7) {
return AAC_DEC_NOT_ENOUGH_BITS;
}
INT_PCM* upcm = (INT_PCM*)pcm;
INT unb_pcm = (INT)nb_pcm;
AAC_DECODER_ERROR err = aacDecoder_DecodeFrame(h->dec, upcm, unb_pcm, 0);
// user should fill more bytes then decode.
if (err == AAC_DEC_NOT_ENOUGH_BITS) {
return err;
}
if (err != AAC_DEC_OK) {
return err;
}
// when decode ok, retrieve the info.
if (!h->info) {
h->info = aacDecoder_GetStreamInfo(h->dec);
}
// the actual size of pcm.
if (pnb_valid) {
*pnb_valid = aacdec_pcm_size(h);
}
return 0;
}
static int aacdec_sample_rate(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->sampleRate;
}
static int aacdec_frame_size(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->frameSize;
}
static int aacdec_num_channels(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->numChannels;
}
static int aacdec_aac_sample_rate(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->aacSampleRate;
}
static int aacdec_profile(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->profile;
}
static int aacdec_audio_object_type(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->aot;
}
static int aacdec_channel_config(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->channelConfig;
}
static int aacdec_bitrate(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->bitRate;
}
static int aacdec_aac_samples_per_frame(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->aacSamplesPerFrame;
}
static int aacdec_aac_num_channels(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->aacNumChannels;
}
static int aacdec_extension_audio_object_type(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->extAot;
}
static int aacdec_extension_sampling_rate(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->extSamplingRate;
}
static int aacdec_num_lost_access_units(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->numLostAccessUnits;
}
static int aacdec_num_total_bytes(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->numTotalBytes;
}
static int aacdec_num_bad_bytes(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->numBadBytes;
}
static int aacdec_num_total_access_units(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->numTotalAccessUnits;
}
static int aacdec_num_bad_access_units(aacdec_t* h) {
if (!h->info) {
return 0;
}
return h->info->numBadAccessUnits;
}
*/
import "C"
import (
"fmt"
"unsafe"
)
const (
aacDecNotEnoughBits = 0x1002
)
type AacDecoder struct {
m C.aacdec_t
}
func NewAacDecoder() *AacDecoder {
return &AacDecoder{}
}
// Open the decoder in RAW mode with ASC.
// For example, the FLV audio payload is a SequenceHeader(ASC) or RAW AAC data,
// user can init the decoder with ASC and decode the raw data.
// @remark user should never get the info util decode one frame.
func (v *AacDecoder) InitRaw(asc []byte) (err error) {
p := (*C.char)(unsafe.Pointer(&asc[0]))
pSize := C.int(len(asc))
r := C.aacdec_init_raw(&v.m, p, pSize)
if int(r) != 0 {
return fmt.Errorf("init RAW decoder failed, code is %d", int(r))
}
|
// Open the decoder in ADTS mode without ASC,
// we never know the stream info util got the first frame,
// because the codec info is insert at begin of each frame.
// @remark The frame to Decode() is muxed in ADTS format.
func (v *AacDecoder) InitAdts() (err error) {
r := C.aacdec_init_adts(&v.m)
if int(r) != 0 {
return fmt.Errorf("init ADTS decoder failed, code is %d", int(r))
}
return nil
}
// De-allocate all resources of an AAC decoder instance.
func (v *AacDecoder) Close() error {
C.aacdec_close(&v.m)
return nil
}
// Fill the buffer of decoder then decode.
// @remark we always expect all input are consumed by decoder.
func (v *AacDecoder) fill(input []byte) (err error) {
p := (*C.char)(unsafe.Pointer(&input[0]))
pSize := C.int(len(input))
leftSize := C.int(0)
r := C.aacdec_fill(&v.m, p, pSize, &leftSize)
if int(r) != 0 {
return fmt.Errorf("fill aac decoder failed, code is %d", int(r))
}
if int(leftSize) > 0 {
return fmt.Errorf("decoder left %v bytes", int(leftSize))
}
return
}
// Decode one audio frame.
// @param the frame contains encoded aac frame, optional can be nil.
// @eturn when pcm is nil, should fill more bytes and decode again.
func (v *AacDecoder) Decode(frame []byte) (pcm []byte, err error) {
if len(frame) > 0 {
if err = v.fill(frame); err != nil {
return
}
}
nbPcm := int(C.aacdec_pcm_size(&v.m))
if nbPcm == 0 {
nbPcm = 50 * 1024
}
pcm = make([]byte, nbPcm)
p := (*C.char)(unsafe.Pointer(&pcm[0]))
pSize := C.int(nbPcm)
validSize := C.int(0)
r := C.aacdec_decode_frame(&v.m, p, pSize, &validSize)
if int(r) == aacDecNotEnoughBits {
return nil, nil
}
if int(r) != 0 {
return nil, fmt.Errorf("decode frame failed, code is %d", int(r))
}
return pcm[0:int(validSize)], nil
}
// The bits of a sample, the fdk aac always use 16bits sample.
func (v *AacDecoder) SampleBits() int {
return int(C.aacdec_sample_bits(&v.m))
}
// The samplerate in Hz of the fully decoded PCM audio signal (after SBR processing).
// @remark The only really relevant ones for the user.
func (v *AacDecoder) SampleRate() int {
return int(C.aacdec_sample_rate(&v.m))
}
// The frame size of the decoded PCM audio signal.
// 1024 or 960 for AAC-LC
// 2048 or 1920 for HE-AAC (v2)
// 512 or 480 for AAC-LD and AAC-ELD
// @remark The only really relevant ones for the user.
func (v *AacDecoder) FrameSize() int {
return int(C.aacdec_frame_size(&v.m))
}
// The number of output audio channels in the decoded and interleaved PCM audio signal.
// @remark The only really relevant ones for the user.
func (v *AacDecoder) NumChannels() int {
return int(C.aacdec_num_channels(&v.m))
}
// sampling rate in Hz without SBR (from configuration info).
// @remark Decoder internal members.
func (v *AacDecoder) AacSampleRate() int {
return int(C.aacdec_aac_sample_rate(&v.m))
}
// MPEG-2 profile (from file header) (-1: not applicable (e. g. MPEG-4)).
// @remark Decoder internal members.
func (v *AacDecoder) Profile() int {
return int(C.aacdec_profile(&v.m))
}
// Audio Object Type (from ASC): is set to the appropriate value for MPEG-2 bitstreams (e. g. 2 for AAC-LC).
// @remark Decoder internal members.
func (v *AacDecoder) AudioObjectType() int {
return int(C.aacdec_audio_object_type(&v.m))
}
// Channel configuration (0: PCE defined, 1: mono, 2: stereo, ...
// @remark Decoder internal members.
func (v *AacDecoder) ChannelConfig() int {
return int(C.aacdec_channel_config(&v.m))
}
// Instantaneous bit rate.
// @remark Decoder internal members.
func (v *AacDecoder) Bitrate() int {
return int(C.aacdec_bitrate(&v.m))
}
// Samples per frame for the AAC core (from ASC).
// 1024 or 960 for AAC-LC
// 512 or 480 for AAC-LD and AAC-ELD
// @remark Decoder internal members.
func (v *AacDecoder) AacSamplesPerFrame() int {
return int(C.aacdec_aac_samples_per_frame(&v.m))
}
// The number of audio channels after AAC core processing (before PS or MPS processing).
// CAUTION: This are not the final number of output channels!
// @remark Decoder internal members.
func (v *AacDecoder) AacNumChannels() int {
return int(C.aacdec_aac_num_channels(&v.m))
}
// Extension Audio Object Type (from ASC)
// @remark Decoder internal members.
func (v *AacDecoder) ExtensionAudioObjectType() int {
return int(C.aacdec_extension_audio_object_type(&v.m))
}
// Extension sampling rate in Hz (from ASC)
// @remark Decoder internal members.
func (v *AacDecoder) ExtensionSamplingRate() int {
return int(C.aacdec_extension_sampling_rate(&v.m))
}
// This integer will reflect the estimated amount of lost access units in case aacDecoder_DecodeFrame()
// returns AAC_DEC_TRANSPORT_SYNC_ERROR. It will be < 0 if the estimation failed.
// @remark Statistics.
func (v *AacDecoder) NumLostAccessUnits() int {
return int(C.aacdec_num_lost_access_units(&v.m))
}
// This is the number of total bytes that have passed through the decoder.
// @remark Statistics.
func (v *AacDecoder) NumTotalBytes() int {
return int(C.aacdec_num_total_bytes(&v.m))
}
// This is the number of total bytes that were considered with errors from numTotalBytes.
// @remark Statistics.
func (v *AacDecoder) NumBadBytes() int {
return int(C.aacdec_num_bad_bytes(&v.m))
}
// This is the number of total access units that have passed through the decoder.
// @remark Statistics.
func (v *AacDecoder) NumTotalAccessUnits() int {
return int(C.aacdec_num_total_access_units(&v.m))
}
// This is the number of total access units that were considered with errors from numTotalBytes.
// @remark Statistics.
func (v *AacDecoder) NumBadAccessUnits() int {
return int(C.aacdec_num_bad_access_units(&v.m))
}
|
return nil
}
|
test_construct.py
|
import unittest
import mock
import numpy
import pytest
import cupy
from cupy import testing
from cupyx.scipy import sparse
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'format': ['csr', 'csc', 'coo'],
'm': [3],
'n': [None, 3, 2],
|
class TestEye(unittest.TestCase):
@testing.numpy_cupy_allclose(sp_name='sp')
def test_eye(self, xp, sp):
x = sp.eye(
self.m, n=self.n, k=self.k, dtype=self.dtype, format=self.format)
self.assertIsInstance(x, sp.spmatrix)
self.assertEqual(x.format, self.format)
return x
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'format': ['csr', 'csc', 'coo'],
}))
@testing.with_requires('scipy')
class TestIdentity(unittest.TestCase):
@testing.numpy_cupy_allclose(sp_name='sp')
def test_eye(self, xp, sp):
x = sp.identity(3, dtype=self.dtype, format=self.format)
self.assertIsInstance(x, sp.spmatrix)
self.assertEqual(x.format, self.format)
return x
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
}))
@testing.with_requires('scipy')
class TestSpdiags(unittest.TestCase):
@testing.numpy_cupy_allclose(sp_name='sp')
def test_spdiags(self, xp, sp):
data = xp.arange(12, dtype=self.dtype).reshape(3, 4)
diags = xp.array([0, -1, 2], dtype='i')
x = sp.spdiags(data, diags, 3, 4)
return x
@testing.parameterize(*testing.product({
'random_method': ['random', 'rand'],
'dtype': [numpy.float32, numpy.float64],
'format': ['csr', 'csc', 'coo'],
}))
class TestRandom(unittest.TestCase):
def test_random(self):
x = getattr(sparse, self.random_method)(
3, 4, density=0.1,
format=self.format, dtype=self.dtype)
self.assertEqual(x.shape, (3, 4))
self.assertEqual(x.dtype, self.dtype)
self.assertEqual(x.format, self.format)
def test_random_with_seed(self):
x = getattr(sparse, self.random_method)(
3, 4, density=0.1,
format=self.format, dtype=self.dtype,
random_state=1)
self.assertEqual(x.shape, (3, 4))
self.assertEqual(x.dtype, self.dtype)
self.assertEqual(x.format, self.format)
y = getattr(sparse, self.random_method)(
3, 4, density=0.1,
format=self.format, dtype=self.dtype,
random_state=1)
self.assertTrue((x.toarray() == y.toarray()).all())
def test_random_with_state(self):
state1 = cupy.random.RandomState(1)
x = getattr(sparse, self.random_method)(
3, 4, density=0.1,
format=self.format, dtype=self.dtype,
random_state=state1)
self.assertEqual(x.shape, (3, 4))
self.assertEqual(x.dtype, self.dtype)
self.assertEqual(x.format, self.format)
state2 = cupy.random.RandomState(1)
y = getattr(sparse, self.random_method)(
3, 4, density=0.1,
format=self.format, dtype=self.dtype,
random_state=state2)
self.assertTrue((x.toarray() == y.toarray()).all())
def test_random_with_data_rvs(self):
if self.random_method == 'rand':
pytest.skip('cupyx.scipy.sparse.rand does not support data_rvs')
data_rvs = mock.MagicMock(side_effect=cupy.zeros)
x = getattr(sparse, self.random_method)(
3, 4, density=0.1, data_rvs=data_rvs,
format=self.format, dtype=self.dtype)
self.assertEqual(x.shape, (3, 4))
self.assertEqual(x.dtype, self.dtype)
self.assertEqual(x.format, self.format)
self.assertEqual(data_rvs.call_count, 1)
# Note that its value is generated randomly
self.assertIsInstance(data_rvs.call_args[0][0], int)
@testing.with_requires('scipy')
class TestRandomInvalidArgument(unittest.TestCase):
@testing.numpy_cupy_raises(sp_name='sp', accept_error=ValueError)
def test_too_small_density(self, xp, sp):
sp.random(3, 4, density=-0.1)
@testing.numpy_cupy_raises(sp_name='sp', accept_error=ValueError)
def test_too_large_density(self, xp, sp):
sp.random(3, 4, density=1.1)
@testing.numpy_cupy_raises(sp_name='sp', accept_error=NotImplementedError)
def test_invalid_dtype(self, xp, sp):
sp.random(3, 4, dtype='i')
@testing.parameterize(*testing.product({
'dtype': [numpy.float32, numpy.float64, numpy.complex64, numpy.complex128],
'format': ['dia', 'csr', 'csc', 'coo'],
}))
@testing.with_requires('scipy')
class TestDiags(unittest.TestCase):
@testing.numpy_cupy_allclose(sp_name='sp')
def test_diags_scalar_offset(self, xp, sp):
x = sp.diags(
xp.arange(16), offsets=0, dtype=self.dtype, format=self.format)
self.assertIsInstance(x, sp.spmatrix)
self.assertEqual(x.format, self.format)
return x
@testing.numpy_cupy_allclose(sp_name='sp')
def test_diags_single_element_lists(self, xp, sp):
x = sp.diags(
[xp.arange(16)], offsets=[0], dtype=self.dtype, format=self.format)
self.assertIsInstance(x, sp.spmatrix)
self.assertEqual(x.format, self.format)
return x
@testing.numpy_cupy_allclose(sp_name='sp')
def test_diags_multiple(self, xp, sp):
x = sp.diags(
[xp.arange(15), xp.arange(16), xp.arange(15), xp.arange(13)],
offsets=[-1, 0, 1, 3],
dtype=self.dtype, format=self.format)
self.assertIsInstance(x, sp.spmatrix)
self.assertEqual(x.format, self.format)
return x
@testing.numpy_cupy_allclose(sp_name='sp')
def test_diags_offsets_as_array(self, xp, sp):
x = sp.diags(
[xp.arange(15), xp.arange(16), xp.arange(15), xp.arange(13)],
offsets=xp.array([-1, 0, 1, 3]),
dtype=self.dtype, format=self.format)
self.assertIsInstance(x, sp.spmatrix)
self.assertEqual(x.format, self.format)
return x
@testing.numpy_cupy_allclose(sp_name='sp')
def test_diags_non_square(self, xp, sp):
x = sp.diags(
[xp.arange(5), xp.arange(3)],
offsets=[0, -2], shape=(5, 10),
dtype=self.dtype, format=self.format)
self.assertIsInstance(x, sp.spmatrix)
self.assertEqual(x.format, self.format)
return x
|
'k': [0, 1],
}))
@testing.with_requires('scipy')
|
eks.go
|
package eks
import (
"context"
"fmt"
"regexp"
"strconv"
"time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/arn"
"github.com/aws/aws-sdk-go/service/ec2"
awseks "github.com/aws/aws-sdk-go/service/eks"
"github.com/kris-nova/logger"
"github.com/pkg/errors"
"k8s.io/client-go/kubernetes"
"github.com/aws/aws-sdk-go/aws/awserr"
api "github.com/weaveworks/eksctl/pkg/apis/eksctl.io/v1alpha5"
"github.com/weaveworks/eksctl/pkg/cfn/manager"
"github.com/weaveworks/eksctl/pkg/cfn/waiter"
"github.com/weaveworks/eksctl/pkg/fargate"
iamoidc "github.com/weaveworks/eksctl/pkg/iam/oidc"
"github.com/weaveworks/eksctl/pkg/utils"
"github.com/weaveworks/eksctl/pkg/version"
"github.com/weaveworks/eksctl/pkg/vpc"
)
const (
eksctlCreatedTrue api.EKSCTLCreated = "True"
eksctlCreatedFalse api.EKSCTLCreated = "False"
eksctlCreatedUnknown api.EKSCTLCreated = "Unknown"
)
// DescribeControlPlane describes the cluster control plane
func (c *ClusterProvider) DescribeControlPlane(meta *api.ClusterMeta) (*awseks.Cluster, error) {
input := &awseks.DescribeClusterInput{
Name: &meta.Name,
}
output, err := c.Provider.EKS().DescribeCluster(input)
if err != nil {
return nil, errors.Wrap(err, "unable to describe cluster control plane")
}
return output.Cluster, nil
}
// RefreshClusterStatus calls c.DescribeControlPlane and caches the results;
// it parses the credentials (endpoint, CA certificate) and stores them in ClusterConfig.Status,
// so that a Kubernetes client can be constructed; additionally it caches Kubernetes
// version (use ctl.ControlPlaneVersion to retrieve it) and other properties in
// c.Status.cachedClusterInfo
func (c *ClusterProvider) RefreshClusterStatus(spec *api.ClusterConfig) error {
cluster, err := c.DescribeControlPlane(spec.Metadata)
if err != nil {
return err
}
logger.Debug("cluster = %#v", cluster)
if isNonEKSCluster(cluster) {
return errors.Errorf("cannot perform this operation on a non-EKS cluster; please follow the documentation for "+
"cluster %s's Kubernetes provider", spec.Metadata.Name)
}
if spec.Status == nil {
spec.Status = &api.ClusterStatus{}
}
c.setClusterInfo(cluster)
switch *cluster.Status {
case awseks.ClusterStatusCreating, awseks.ClusterStatusDeleting, awseks.ClusterStatusFailed:
return nil
default:
return spec.SetClusterStatus(cluster)
}
}
// SupportsManagedNodes reports whether an existing cluster supports Managed Nodes
// The minimum required control plane version and platform version are 1.14 and eks.3 respectively
func (c *ClusterProvider) SupportsManagedNodes(clusterConfig *api.ClusterConfig) (bool, error) {
if err := c.RefreshClusterStatusIfStale(clusterConfig); err != nil {
return false, err
}
return ClusterSupportsManagedNodes(c.Status.ClusterInfo.Cluster)
}
// isNonEKSCluster returns true if the cluster is external
func isNonEKSCluster(cluster *awseks.Cluster) bool {
return cluster.ConnectorConfig != nil
}
// ClusterSupportsManagedNodes reports whether the EKS cluster supports managed nodes
func
|
(cluster *awseks.Cluster) (bool, error) {
supportsManagedNodes, err := utils.IsMinVersion(api.Version1_15, *cluster.Version)
if err != nil {
return false, err
}
if supportsManagedNodes {
return true, nil
}
versionSupportsManagedNodes, err := VersionSupportsManagedNodes(*cluster.Version)
if err != nil {
return false, err
}
if !versionSupportsManagedNodes {
return false, nil
}
if cluster.PlatformVersion == nil {
logger.Warning("could not find cluster's platform version")
return false, nil
}
version, err := PlatformVersion(*cluster.PlatformVersion)
if err != nil {
return false, err
}
minSupportedVersion := 3
return version >= minSupportedVersion, nil
}
// SupportsFargate reports whether an existing cluster supports Fargate.
func (c *ClusterProvider) SupportsFargate(clusterConfig *api.ClusterConfig) (bool, error) {
if err := c.RefreshClusterStatusIfStale(clusterConfig); err != nil {
return false, err
}
return ClusterSupportsFargate(c.Status.ClusterInfo.Cluster)
}
// ClusterSupportsFargate reports whether an existing cluster supports Fargate.
func ClusterSupportsFargate(cluster *awseks.Cluster) (bool, error) {
supportsFargate, err := utils.IsMinVersion(api.Version1_15, *cluster.Version)
if err != nil {
return false, err
}
if supportsFargate {
return true, nil
}
versionSupportsFargate, err := utils.IsMinVersion(fargate.MinKubernetesVersion, *cluster.Version)
if err != nil {
return false, err
}
if !versionSupportsFargate {
return false, nil
}
if cluster.PlatformVersion == nil {
logger.Warning("could not find cluster's platform version")
return false, nil
}
version, err := PlatformVersion(*cluster.PlatformVersion)
if err != nil {
return false, err
}
return version >= fargate.MinPlatformVersion, nil
}
var (
platformVersionRegex = regexp.MustCompile(`^eks\.(\d+)$`)
)
// PlatformVersion extracts the digit X in the provided platform version eks.X
func PlatformVersion(platformVersion string) (int, error) {
match := platformVersionRegex.FindStringSubmatch(platformVersion)
if len(match) != 2 {
return -1, fmt.Errorf("failed to parse cluster's platform version: %q", platformVersion)
}
versionStr := match[1]
version, err := strconv.Atoi(versionStr)
if err != nil {
return -1, err
}
return version, nil
}
// RefreshClusterStatusIfStale refreshes the cluster status if enough time has passed since the last refresh
func (c *ClusterProvider) RefreshClusterStatusIfStale(spec *api.ClusterConfig) error {
if c.clusterInfoNeedsUpdate() {
return c.RefreshClusterStatus(spec)
}
return nil
}
// CanDelete return true when a cluster can be deleted, otherwise it returns false along with an error explaining the reason
func (c *ClusterProvider) CanDelete(spec *api.ClusterConfig) (bool, error) {
err := c.RefreshClusterStatusIfStale(spec)
if err != nil {
if awsError, ok := errors.Unwrap(errors.Unwrap(err)).(awserr.Error); ok &&
awsError.Code() == awseks.ErrCodeResourceNotFoundException {
return true, nil
}
return false, errors.Wrapf(err, "fetching cluster status to determine if it can be deleted")
}
// it must be possible to delete cluster in any state
return true, nil
}
// CanOperate returns true when a cluster can be operated, otherwise it returns false along with an error explaining the reason
func (c *ClusterProvider) CanOperate(spec *api.ClusterConfig) (bool, error) {
err := c.RefreshClusterStatusIfStale(spec)
if err != nil {
return false, errors.Wrapf(err, "unable to fetch cluster status to determine operability")
}
switch status := *c.Status.ClusterInfo.Cluster.Status; status {
case awseks.ClusterStatusCreating, awseks.ClusterStatusDeleting, awseks.ClusterStatusFailed:
return false, fmt.Errorf("cannot perform Kubernetes API operations on cluster %q in %q region due to status %q", spec.Metadata.Name, spec.Metadata.Region, status)
default:
return true, nil
}
}
// CanUpdate return true when a cluster or add-ons can be updated, otherwise it returns false along with an error explaining the reason
func (c *ClusterProvider) CanUpdate(spec *api.ClusterConfig) (bool, error) {
err := c.RefreshClusterStatusIfStale(spec)
if err != nil {
return false, errors.Wrapf(err, "fetching cluster status to determine update status")
}
switch status := *c.Status.ClusterInfo.Cluster.Status; status {
case awseks.ClusterStatusActive:
// only active cluster can be upgraded
return true, nil
default:
return false, fmt.Errorf("cannot update cluster %q in %q region due to status %q", spec.Metadata.Name, spec.Metadata.Region, status)
}
}
// ControlPlaneVersion returns cached version (EKS API)
func (c *ClusterProvider) ControlPlaneVersion() string {
if c.Status.ClusterInfo == nil || c.Status.ClusterInfo.Cluster == nil || c.Status.ClusterInfo.Cluster.Version == nil {
return ""
}
return *c.Status.ClusterInfo.Cluster.Version
}
// ControlPlaneVPCInfo returns cached version (EKS API)
func (c *ClusterProvider) ControlPlaneVPCInfo() awseks.VpcConfigResponse {
if c.Status.ClusterInfo == nil || c.Status.ClusterInfo.Cluster == nil || c.Status.ClusterInfo.Cluster.ResourcesVpcConfig == nil {
return awseks.VpcConfigResponse{}
}
return *c.Status.ClusterInfo.Cluster.ResourcesVpcConfig
}
// UnsupportedOIDCError represents an unsupported OIDC error
type UnsupportedOIDCError struct {
msg string
}
func (u *UnsupportedOIDCError) Error() string {
return u.msg
}
// NewOpenIDConnectManager returns OpenIDConnectManager
func (c *ClusterProvider) NewOpenIDConnectManager(spec *api.ClusterConfig) (*iamoidc.OpenIDConnectManager, error) {
if _, err := c.CanOperate(spec); err != nil {
return nil, err
}
if c.Status.ClusterInfo.Cluster == nil || c.Status.ClusterInfo.Cluster.Identity == nil || c.Status.ClusterInfo.Cluster.Identity.Oidc == nil || c.Status.ClusterInfo.Cluster.Identity.Oidc.Issuer == nil {
return nil, &UnsupportedOIDCError{"unknown OIDC issuer URL"}
}
parsedARN, err := arn.Parse(spec.Status.ARN)
if err != nil {
return nil, errors.Wrapf(err, "unexpected invalid ARN: %q", spec.Status.ARN)
}
switch parsedARN.Partition {
case "aws", "aws-cn", "aws-us-gov":
default:
return nil, fmt.Errorf("unknown EKS ARN: %q", spec.Status.ARN)
}
return iamoidc.NewOpenIDConnectManager(c.Provider.IAM(), parsedARN.AccountID,
*c.Status.ClusterInfo.Cluster.Identity.Oidc.Issuer, parsedARN.Partition, sharedTags(c.Status.ClusterInfo.Cluster))
}
func sharedTags(cluster *awseks.Cluster) map[string]string {
return map[string]string{
api.ClusterNameTag: *cluster.Name,
api.EksctlVersionTag: version.GetVersion(),
}
}
// LoadClusterIntoSpecFromStack uses stack information to load the cluster
// configuration into the spec
// At the moment VPC and KubernetesNetworkConfig are respected
func (c *ClusterProvider) LoadClusterIntoSpecFromStack(spec *api.ClusterConfig, stackManager manager.StackManager) error {
if err := c.LoadClusterVPC(spec, stackManager); err != nil {
return err
}
if err := c.RefreshClusterStatus(spec); err != nil {
return err
}
return c.loadClusterKubernetesNetworkConfig(spec)
}
// LoadClusterVPC loads the VPC configuration
func (c *ClusterProvider) LoadClusterVPC(spec *api.ClusterConfig, stackManager manager.StackManager) error {
stack, err := stackManager.DescribeClusterStack()
if err != nil {
return err
}
if stack == nil {
return &manager.StackNotFoundErr{ClusterName: spec.Metadata.Name}
}
return vpc.UseFromClusterStack(c.Provider, stack, spec)
}
// loadClusterKubernetesNetworkConfig gets the network config of an existing
// cluster, note status must be refreshed!
func (c *ClusterProvider) loadClusterKubernetesNetworkConfig(spec *api.ClusterConfig) error {
if spec.Status == nil {
return errors.New("cluster hasn't been refreshed")
}
knCfg := c.Status.ClusterInfo.Cluster.KubernetesNetworkConfig
if knCfg != nil {
spec.KubernetesNetworkConfig = &api.KubernetesNetworkConfig{
ServiceIPv4CIDR: aws.StringValue(knCfg.ServiceIpv4Cidr),
}
}
return nil
}
// ListClusters returns a list of the EKS cluster in your account
func (c *ClusterProvider) ListClusters(chunkSize int, listAllRegions bool) ([]*api.ClusterConfig, error) {
if listAllRegions {
var clusters []*api.ClusterConfig
// reset region and re-create the client, then make a recursive call
authorizedRegions, err := c.Provider.EC2().DescribeRegions(&ec2.DescribeRegionsInput{})
if err != nil {
return nil, err
}
for _, region := range authorizedRegions.Regions {
spec := &api.ProviderConfig{
Region: *region.RegionName,
Profile: c.Provider.Profile(),
WaitTimeout: c.Provider.WaitTimeout(),
}
ctl, err := New(spec, nil)
if err != nil {
logger.Critical("error creating provider in %q region: %s", region, err.Error())
continue
}
newClusters, err := ctl.listClusters(int64(chunkSize))
if err != nil {
logger.Critical("error listing clusters in %q region: %s", region, err.Error())
}
clusters = append(clusters, newClusters...)
}
return clusters, nil
}
return c.listClusters(int64(chunkSize))
}
func (c *ClusterProvider) listClusters(chunkSize int64) ([]*api.ClusterConfig, error) {
allClusters := []*api.ClusterConfig{}
spec := &api.ClusterConfig{Metadata: &api.ClusterMeta{Name: ""}}
allStacks, err := c.NewStackManager(spec).ListClusterStackNames()
if err != nil {
return nil, err
}
token := ""
for {
clusters, nextToken, err := c.getClustersRequest(chunkSize, token)
if err != nil {
return nil, err
}
for _, clusterName := range clusters {
spec := &api.ClusterConfig{Metadata: &api.ClusterMeta{Name: *clusterName}}
hasClusterStack, err := c.NewStackManager(spec).HasClusterStackUsingCachedList(allStacks)
managed := eksctlCreatedFalse
if err != nil {
managed = eksctlCreatedUnknown
logger.Warning("error fetching stacks for cluster %s: %v", clusterName, err)
} else if hasClusterStack {
managed = eksctlCreatedTrue
}
allClusters = append(allClusters, &api.ClusterConfig{
Metadata: &api.ClusterMeta{
Name: *clusterName,
Region: c.Provider.Region(),
},
Status: &api.ClusterStatus{
EKSCTLCreated: managed,
},
})
}
if api.IsSetAndNonEmptyString(nextToken) {
token = *nextToken
} else {
break
}
}
return allClusters, nil
}
// GetCluster display details of an EKS cluster in your account
func (c *ClusterProvider) GetCluster(clusterName string) (*awseks.Cluster, error) {
input := &awseks.DescribeClusterInput{
Name: &clusterName,
}
output, err := c.Provider.EKS().DescribeCluster(input)
if err != nil {
return nil, errors.Wrapf(err, "unable to describe control plane %q", clusterName)
}
logger.Debug("cluster = %#v", output)
if *output.Cluster.Status == awseks.ClusterStatusActive {
if logger.Level >= 4 {
spec := &api.ClusterConfig{Metadata: &api.ClusterMeta{Name: clusterName}}
stacks, err := c.NewStackManager(spec).ListStacks()
if err != nil {
return nil, errors.Wrapf(err, "listing CloudFormation stack for %q", clusterName)
}
for _, s := range stacks {
logger.Debug("stack = %#v", *s)
}
}
}
return output.Cluster, nil
}
func (c *ClusterProvider) getClustersRequest(chunkSize int64, nextToken string) ([]*string, *string, error) {
input := &awseks.ListClustersInput{
MaxResults: &chunkSize,
Include: aws.StringSlice([]string{"all"}),
}
if nextToken != "" {
input = input.SetNextToken(nextToken)
}
output, err := c.Provider.EKS().ListClusters(input)
if err != nil {
return nil, nil, errors.Wrap(err, "listing control planes")
}
return output.Clusters, output.NextToken, nil
}
// WaitForControlPlane waits till the control plane is ready
func (c *ClusterProvider) WaitForControlPlane(meta *api.ClusterMeta, clientSet *kubernetes.Clientset) error {
successCount := 0
operation := func() (bool, error) {
_, err := clientSet.ServerVersion()
if err == nil {
if successCount >= 5 {
return true, nil
}
successCount++
return false, nil
}
logger.Debug("control plane not ready yet – %s", err.Error())
return false, nil
}
w := waiter.Waiter{
Operation: operation,
NextDelay: func(_ int) time.Duration {
return 20 * time.Second
},
}
if err := w.WaitWithTimeout(c.Provider.WaitTimeout()); err != nil {
if err == context.DeadlineExceeded {
return errors.Errorf("timed out waiting for control plane %q after %s", meta.Name, c.Provider.WaitTimeout())
}
return err
}
return nil
}
|
ClusterSupportsManagedNodes
|
f64x4_.rs
|
use super::*;
pick! {
if #[cfg(target_feature="avx")] {
#[derive(Default, Clone, Copy, PartialEq)]
#[repr(C, align(32))]
pub struct f64x4 { avx: m256d }
} else if #[cfg(target_feature="sse2")] {
#[derive(Default, Clone, Copy, PartialEq)]
#[repr(C, align(32))]
pub struct f64x4 { sse0: m128d, sse1: m128d }
} else if #[cfg(target_feature="simd128")] {
use core::arch::wasm32::*;
#[derive(Clone, Copy)]
#[repr(C, align(32))]
pub struct f64x4 { simd0: v128, simd1: v128 }
impl Default for f64x4 {
fn default() -> Self {
Self::splat(0.0)
}
}
impl PartialEq for f64x4 {
fn eq(&self, other: &Self) -> bool {
u64x2_all_true(f64x2_eq(self.simd0, other.simd0)) &
u64x2_all_true(f64x2_eq(self.simd1, other.simd1))
}
}
} else {
#[derive(Default, Clone, Copy, PartialEq)]
#[repr(C, align(32))]
pub struct f64x4 { arr: [f64;4] }
}
}
macro_rules! const_f64_as_f64x4 {
($i:ident, $f:expr) => {
pub const $i: f64x4 =
unsafe { ConstUnionHack256bit { f64a4: [$f; 4] }.f64x4 };
};
}
impl f64x4 {
const_f64_as_f64x4!(ONE, 1.0);
const_f64_as_f64x4!(ZERO, 0.0);
const_f64_as_f64x4!(HALF, 0.5);
const_f64_as_f64x4!(E, core::f64::consts::E);
const_f64_as_f64x4!(FRAC_1_PI, core::f64::consts::FRAC_1_PI);
const_f64_as_f64x4!(FRAC_2_PI, core::f64::consts::FRAC_2_PI);
const_f64_as_f64x4!(FRAC_2_SQRT_PI, core::f64::consts::FRAC_2_SQRT_PI);
const_f64_as_f64x4!(FRAC_1_SQRT_2, core::f64::consts::FRAC_1_SQRT_2);
const_f64_as_f64x4!(FRAC_PI_2, core::f64::consts::FRAC_PI_2);
const_f64_as_f64x4!(FRAC_PI_3, core::f64::consts::FRAC_PI_3);
const_f64_as_f64x4!(FRAC_PI_4, core::f64::consts::FRAC_PI_4);
const_f64_as_f64x4!(FRAC_PI_6, core::f64::consts::FRAC_PI_6);
const_f64_as_f64x4!(FRAC_PI_8, core::f64::consts::FRAC_PI_8);
const_f64_as_f64x4!(LN_2, core::f64::consts::LN_2);
const_f64_as_f64x4!(LN_10, core::f64::consts::LN_10);
const_f64_as_f64x4!(LOG2_E, core::f64::consts::LOG2_E);
const_f64_as_f64x4!(LOG10_E, core::f64::consts::LOG10_E);
const_f64_as_f64x4!(LOG10_2, core::f64::consts::LOG10_2);
const_f64_as_f64x4!(LOG2_10, core::f64::consts::LOG2_10);
const_f64_as_f64x4!(PI, core::f64::consts::PI);
const_f64_as_f64x4!(SQRT_2, core::f64::consts::SQRT_2);
const_f64_as_f64x4!(TAU, core::f64::consts::TAU);
}
unsafe impl Zeroable for f64x4 {}
unsafe impl Pod for f64x4 {}
impl Add for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn add(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: add_m256d(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: add_m128d(self.sse0, rhs.sse0), sse1: add_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_add(self.simd0, rhs.simd0), simd1: f64x2_add(self.simd1, rhs.simd1) }
} else {
Self { arr: [
self.arr[0] + rhs.arr[0],
self.arr[1] + rhs.arr[1],
self.arr[2] + rhs.arr[2],
self.arr[3] + rhs.arr[3],
]}
}
}
}
}
impl Sub for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn sub(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: sub_m256d(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: sub_m128d(self.sse0, rhs.sse0), sse1: sub_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_sub(self.simd0, rhs.simd0), simd1: f64x2_sub(self.simd1, rhs.simd1) }
} else {
Self { arr: [
self.arr[0] - rhs.arr[0],
self.arr[1] - rhs.arr[1],
self.arr[2] - rhs.arr[2],
self.arr[3] - rhs.arr[3],
]}
}
}
}
}
impl Mul for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn mul(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: mul_m256d(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: mul_m128d(self.sse0, rhs.sse0), sse1: mul_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_mul(self.simd0, rhs.simd0), simd1: f64x2_mul(self.simd1, rhs.simd1) }
} else {
Self { arr: [
self.arr[0] * rhs.arr[0],
self.arr[1] * rhs.arr[1],
self.arr[2] * rhs.arr[2],
self.arr[3] * rhs.arr[3],
]}
}
}
}
}
impl Div for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn div(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: div_m256d(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: div_m128d(self.sse0, rhs.sse0), sse1: div_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_div(self.simd0, rhs.simd0), simd1: f64x2_div(self.simd1, rhs.simd1) }
} else {
Self { arr: [
self.arr[0] / rhs.arr[0],
self.arr[1] / rhs.arr[1],
self.arr[2] / rhs.arr[2],
self.arr[3] / rhs.arr[3],
]}
}
}
}
}
impl Add<f64> for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn add(self, rhs: f64) -> Self::Output {
self.add(Self::splat(rhs))
}
}
impl Sub<f64> for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn sub(self, rhs: f64) -> Self::Output {
self.sub(Self::splat(rhs))
}
}
impl Mul<f64> for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn mul(self, rhs: f64) -> Self::Output {
self.mul(Self::splat(rhs))
}
}
impl Div<f64> for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn div(self, rhs: f64) -> Self::Output {
self.div(Self::splat(rhs))
}
}
impl Add<f64x4> for f64 {
type Output = f64x4;
#[inline]
#[must_use]
fn add(self, rhs: f64x4) -> Self::Output {
f64x4::splat(self).add(rhs)
}
}
impl Sub<f64x4> for f64 {
type Output = f64x4;
#[inline]
#[must_use]
fn sub(self, rhs: f64x4) -> Self::Output {
f64x4::splat(self).sub(rhs)
}
}
impl Mul<f64x4> for f64 {
type Output = f64x4;
#[inline]
#[must_use]
fn mul(self, rhs: f64x4) -> Self::Output {
f64x4::splat(self).mul(rhs)
}
}
impl Div<f64x4> for f64 {
type Output = f64x4;
#[inline]
#[must_use]
fn div(self, rhs: f64x4) -> Self::Output {
f64x4::splat(self).div(rhs)
}
}
impl BitAnd for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn bitand(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: bitand_m256d(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: bitand_m128d(self.sse0, rhs.sse0), sse1: bitand_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: v128_and(self.simd0, rhs.simd0), simd1: v128_and(self.simd1, rhs.simd1) }
} else {
Self { arr: [
f64::from_bits(self.arr[0].to_bits() & rhs.arr[0].to_bits()),
f64::from_bits(self.arr[1].to_bits() & rhs.arr[1].to_bits()),
f64::from_bits(self.arr[2].to_bits() & rhs.arr[2].to_bits()),
f64::from_bits(self.arr[3].to_bits() & rhs.arr[3].to_bits()),
]}
}
}
}
}
impl BitOr for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn bitor(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: bitor_m256d(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: bitor_m128d(self.sse0, rhs.sse0), sse1: bitor_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: v128_or(self.simd0, rhs.simd0), simd1: v128_or(self.simd1, rhs.simd1) }
} else {
Self { arr: [
f64::from_bits(self.arr[0].to_bits() | rhs.arr[0].to_bits()),
f64::from_bits(self.arr[1].to_bits() | rhs.arr[1].to_bits()),
f64::from_bits(self.arr[2].to_bits() | rhs.arr[2].to_bits()),
f64::from_bits(self.arr[3].to_bits() | rhs.arr[3].to_bits()),
]}
}
}
}
}
impl BitXor for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn bitxor(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: bitxor_m256d(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: bitxor_m128d(self.sse0, rhs.sse0), sse1: bitxor_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: v128_xor(self.simd0, rhs.simd0), simd1: v128_xor(self.simd1, rhs.simd1) }
} else {
Self { arr: [
f64::from_bits(self.arr[0].to_bits() ^ rhs.arr[0].to_bits()),
f64::from_bits(self.arr[1].to_bits() ^ rhs.arr[1].to_bits()),
f64::from_bits(self.arr[2].to_bits() ^ rhs.arr[2].to_bits()),
f64::from_bits(self.arr[3].to_bits() ^ rhs.arr[3].to_bits()),
]}
}
}
}
}
impl CmpEq for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn cmp_eq(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")]{
Self { avx: cmp_op_mask_m256d::<{cmp_op!(EqualOrdered)}>(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: cmp_eq_mask_m128d(self.sse0, rhs.sse0), sse1: cmp_eq_mask_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_eq(self.simd0, rhs.simd0), simd1: f64x2_eq(self.simd1, rhs.simd1) }
} else {
Self { arr: [
if self.arr[0] == rhs.arr[0] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[1] == rhs.arr[1] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[2] == rhs.arr[2] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[3] == rhs.arr[3] { f64::from_bits(u64::MAX) } else { 0.0 },
]}
}
}
}
}
impl CmpGe for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn cmp_ge(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")]{
Self { avx: cmp_op_mask_m256d::<{cmp_op!(GreaterEqualOrdered)}>(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: cmp_ge_mask_m128d(self.sse0, rhs.sse0), sse1: cmp_ge_mask_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_ge(self.simd0, rhs.simd0), simd1: f64x2_ge(self.simd1, rhs.simd1) }
} else {
Self { arr: [
if self.arr[0] >= rhs.arr[0] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[1] >= rhs.arr[1] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[2] >= rhs.arr[2] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[3] >= rhs.arr[3] { f64::from_bits(u64::MAX) } else { 0.0 },
]}
}
}
}
}
impl CmpGt for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn cmp_gt(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")]{
Self { avx: cmp_op_mask_m256d::<{cmp_op!( GreaterThanOrdered)}>(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: cmp_gt_mask_m128d(self.sse0, rhs.sse0), sse1: cmp_gt_mask_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_gt(self.simd0, rhs.simd0), simd1: f64x2_gt(self.simd1, rhs.simd1) }
} else {
Self { arr: [
if self.arr[0] > rhs.arr[0] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[1] > rhs.arr[1] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[2] > rhs.arr[2] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[3] > rhs.arr[3] { f64::from_bits(u64::MAX) } else { 0.0 },
]}
}
}
}
}
impl CmpNe for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn cmp_ne(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")]{
Self { avx: cmp_op_mask_m256d::<{cmp_op!(NotEqualOrdered)}>(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: cmp_neq_mask_m128d(self.sse0, rhs.sse0), sse1: cmp_neq_mask_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_ne(self.simd0, rhs.simd0), simd1: f64x2_ne(self.simd1, rhs.simd1) }
} else {
Self { arr: [
if self.arr[0] != rhs.arr[0] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[1] != rhs.arr[1] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[2] != rhs.arr[2] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[3] != rhs.arr[3] { f64::from_bits(u64::MAX) } else { 0.0 },
]}
}
}
}
}
impl CmpLe for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn cmp_le(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")]{
Self { avx: cmp_op_mask_m256d::<{cmp_op!(LessEqualOrdered)}>(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: cmp_le_mask_m128d(self.sse0, rhs.sse0), sse1: cmp_le_mask_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_le(self.simd0, rhs.simd0), simd1: f64x2_le(self.simd1, rhs.simd1) }
} else {
Self { arr: [
if self.arr[0] <= rhs.arr[0] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[1] <= rhs.arr[1] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[2] <= rhs.arr[2] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[3] <= rhs.arr[3] { f64::from_bits(u64::MAX) } else { 0.0 },
]}
}
}
}
}
impl CmpLt for f64x4 {
type Output = Self;
#[inline]
#[must_use]
fn cmp_lt(self, rhs: Self) -> Self::Output {
pick! {
if #[cfg(target_feature="avx")]{
Self { avx: cmp_op_mask_m256d::<{cmp_op!(LessThanOrdered)}>(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: cmp_lt_mask_m128d(self.sse0, rhs.sse0), sse1: cmp_lt_mask_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_lt(self.simd0, rhs.simd0), simd1: f64x2_lt(self.simd1, rhs.simd1) }
} else {
Self { arr: [
if self.arr[0] < rhs.arr[0] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[1] < rhs.arr[1] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[2] < rhs.arr[2] { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[3] < rhs.arr[3] { f64::from_bits(u64::MAX) } else { 0.0 },
]}
}
}
}
}
impl f64x4 {
#[inline]
#[must_use]
pub fn blend(self, t: Self, f: Self) -> Self {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: blend_varying_m256d(f.avx, t.avx, self.avx) }
} else if #[cfg(target_feature="sse4.1")] {
Self { sse0: blend_varying_m128d(f.sse0, t.sse0, self.sse0), sse1: blend_varying_m128d(f.sse1, t.sse1, self.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: v128_bitselect(t.simd0, f.simd0, self.simd0), simd1: v128_bitselect(t.simd1, f.simd1, self.simd1) }
} else {
generic_bit_blend(self, t, f)
}
}
}
#[inline]
#[must_use]
pub fn abs(self) -> Self {
pick! {
if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_abs(self.simd0), simd1: f64x2_abs(self.simd1) }
} else {
let non_sign_bits = f64x4::from(f64::from_bits(i64::MAX as u64));
self & non_sign_bits
}
}
}
/// Calculates the lanewise maximum of both vectors. This is a faster
/// implementation than `max`, but it doesn't specify any behavior if NaNs are
/// involved.
#[inline]
#[must_use]
pub fn fast_max(self, rhs: Self) -> Self {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: max_m256d(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: max_m128d(self.sse0, rhs.sse0), sse1: max_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_pmax(self.simd0, rhs.simd0), simd1: f64x2_pmax(self.simd1, rhs.simd1) }
} else {
Self { arr: [
if self.arr[0] < rhs.arr[0] { rhs.arr[0] } else { self.arr[0] },
if self.arr[1] < rhs.arr[1] { rhs.arr[1] } else { self.arr[1] },
if self.arr[2] < rhs.arr[2] { rhs.arr[2] } else { self.arr[2] },
if self.arr[3] < rhs.arr[3] { rhs.arr[3] } else { self.arr[3] },
]}
}
}
}
/// Calculates the lanewise maximum of both vectors. If either lane is NaN,
/// the other lane gets chosen. Use `fast_max` for a faster implementation
/// that doesn't handle NaNs.
#[inline]
#[must_use]
pub fn max(self, rhs: Self) -> Self {
pick! {
if #[cfg(target_feature="avx")] {
// max_m256d seems to do rhs < self ? self : rhs. So if there's any NaN
// involved, it chooses rhs, so we need to specifically check rhs for
// NaN.
rhs.is_nan().blend(self, Self { avx: max_m256d(self.avx, rhs.avx) })
} else if #[cfg(target_feature="sse2")] {
// max_m128d seems to do rhs < self ? self : rhs. So if there's any NaN
// involved, it chooses rhs, so we need to specifically check rhs for
// NaN.
rhs.is_nan().blend(self, Self { sse0: max_m128d(self.sse0, rhs.sse0), sse1: max_m128d(self.sse1, rhs.sse1) })
} else if #[cfg(target_feature="simd128")] {
// WASM has two max intrinsics:
// - max: This propagates NaN, that's the opposite of what we need.
// - pmax: This is defined as self < rhs ? rhs : self, which basically
// chooses self if either is NaN.
//
// pmax is what we want, but we need to specifically check self for NaN.
Self {
simd0: v128_bitselect(
rhs.simd0,
f64x2_pmax(self.simd0, rhs.simd0),
f64x2_ne(self.simd0, self.simd0), // NaN check
),
simd1: v128_bitselect(
rhs.simd1,
f64x2_pmax(self.simd1, rhs.simd1),
f64x2_ne(self.simd1, self.simd1), // NaN check
),
}
} else {
Self { arr: [
self.arr[0].max(rhs.arr[0]),
self.arr[1].max(rhs.arr[1]),
self.arr[2].max(rhs.arr[2]),
self.arr[3].max(rhs.arr[3]),
]}
}
}
}
/// Calculates the lanewise minimum of both vectors. This is a faster
/// implementation than `min`, but it doesn't specify any behavior if NaNs are
/// involved.
#[inline]
#[must_use]
pub fn fast_min(self, rhs: Self) -> Self {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: min_m256d(self.avx, rhs.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: min_m128d(self.sse0, rhs.sse0), sse1: min_m128d(self.sse1, rhs.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_pmin(self.simd0, rhs.simd0), simd1: f64x2_pmin(self.simd1, rhs.simd1) }
} else {
Self { arr: [
if self.arr[0] < rhs.arr[0] { self.arr[0] } else { rhs.arr[0] },
if self.arr[1] < rhs.arr[1] { self.arr[1] } else { rhs.arr[1] },
if self.arr[2] < rhs.arr[2] { self.arr[2] } else { rhs.arr[2] },
if self.arr[3] < rhs.arr[3] { self.arr[3] } else { rhs.arr[3] },
]}
}
}
}
/// Calculates the lanewise minimum of both vectors. If either lane is NaN,
/// the other lane gets chosen. Use `fast_min` for a faster implementation
/// that doesn't handle NaNs.
#[inline]
#[must_use]
pub fn min(self, rhs: Self) -> Self {
pick! {
if #[cfg(target_feature="avx")] {
// min_m256d seems to do rhs < self ? self : rhs. So if there's any NaN
// involved, it chooses rhs, so we need to specifically check rhs for
// NaN.
rhs.is_nan().blend(self, Self { avx: min_m256d(self.avx, rhs.avx) })
} else if #[cfg(target_feature="sse2")] {
// min_m128d seems to do rhs < self ? self : rhs. So if there's any NaN
// involved, it chooses rhs, so we need to specifically check rhs for
// NaN.
rhs.is_nan().blend(self, Self { sse0: min_m128d(self.sse0, rhs.sse0), sse1: min_m128d(self.sse1, rhs.sse1) })
} else if #[cfg(target_feature="simd128")] {
// WASM has two min intrinsics:
// - min: This propagates NaN, that's the opposite of what we need.
// - pmin: This is defined as rhs < self ? rhs : self, which basically
// chooses self if either is NaN.
//
// pmin is what we want, but we need to specifically check self for NaN.
Self {
simd0: v128_bitselect(
rhs.simd0,
f64x2_pmin(self.simd0, rhs.simd0),
f64x2_ne(self.simd0, self.simd0), // NaN check
),
simd1: v128_bitselect(
rhs.simd1,
f64x2_pmin(self.simd1, rhs.simd1),
f64x2_ne(self.simd1, self.simd1), // NaN check
),
}
} else {
Self { arr: [
self.arr[0].min(rhs.arr[0]),
self.arr[1].min(rhs.arr[1]),
self.arr[2].min(rhs.arr[2]),
self.arr[3].min(rhs.arr[3]),
]}
}
}
}
#[inline]
#[must_use]
pub fn is_nan(self) -> Self {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: cmp_op_mask_m256d::<{cmp_op!(Unordered)}>(self.avx, self.avx ) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: cmp_unord_mask_m128d(self.sse0, self.sse0) , sse1: cmp_unord_mask_m128d(self.sse1, self.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_ne(self.simd0, self.simd0) , simd1: f64x2_ne(self.simd1, self.simd1) }
} else {
Self { arr: [
if self.arr[0].is_nan() { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[1].is_nan() { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[2].is_nan() { f64::from_bits(u64::MAX) } else { 0.0 },
if self.arr[3].is_nan() { f64::from_bits(u64::MAX) } else { 0.0 },
]}
}
}
}
#[inline]
#[must_use]
pub fn is_finite(self) -> Self {
let shifted_exp_mask = u64x4::from(0xFFE0000000000000);
let u: u64x4 = cast(self);
let shift_u = u << 1_u64;
let out = !(shift_u & shifted_exp_mask).cmp_eq(shifted_exp_mask);
cast(out)
}
#[inline]
#[must_use]
pub fn is_inf(self) -> Self {
let shifted_inf = u64x4::from(0xFFE0000000000000);
let u: u64x4 = cast(self);
let shift_u = u << 1_u64;
let out = (shift_u).cmp_eq(shifted_inf);
cast(out)
}
#[inline]
#[must_use]
pub fn round(self) -> Self {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: round_m256d::<{round_op!(Nearest)}>(self.avx) }
} else if #[cfg(target_feature="sse4.1")] {
Self { sse0: round_m128d::<{round_op!(Nearest)}>(self.sse0), sse1: round_m128d::<{round_op!(Nearest)}>(self.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_nearest(self.simd0), simd1: f64x2_nearest(self.simd1) }
} else {
let sign_mask = f64x4::from(-0.0);
let magic = f64x4::from(f64::from_bits(0x43300000_00000000));
let sign = self & sign_mask;
let signed_magic = magic | sign;
self + signed_magic - signed_magic
}
}
}
#[inline]
#[must_use]
pub fn round_int(self) -> i64x4 {
// NOTE:No optimisation for this currently available so delegate to LLVM
let rounded: [f64; 4] = cast(self.round());
cast([
rounded[0] as i64,
rounded[1] as i64,
rounded[2] as i64,
rounded[3] as i64,
])
}
#[inline]
#[must_use]
pub fn mul_add(self, m: Self, a: Self) -> Self {
pick! {
if #[cfg(all(target_feature="avx",target_feature="fma"))] {
Self { avx: fused_mul_add_m256d(self.avx, m.avx, a.avx) }
} else if #[cfg(all(target_feature="avx",target_feature="fma"))]
{
Self { sse0: fused_mul_add_m128d(self.sse0, m.sse0, a.sse0), sse1: fused_mul_add_m128d(self.sse1, m.sse1, a.sse1) }
} else {
(self * m) + a
}
}
}
#[inline]
#[must_use]
pub fn mul_sub(self, m: Self, a: Self) -> Self {
pick! {
if #[cfg(all(target_feature="avx",target_feature="fma"))] {
Self { avx: fused_mul_sub_m256d(self.avx, m.avx, a.avx) }
} else if #[cfg(all(target_feature="avx",target_feature="fma"))]
{
Self { sse0: fused_mul_sub_m128d(self.sse0, m.sse0, a.sse0), sse1: fused_mul_sub_m128d(self.sse1, m.sse1, a.sse1) }
} else {
(self * m) - a
}
}
}
#[inline]
#[must_use]
pub fn mul_neg_add(self, m: Self, a: Self) -> Self {
pick! {
if #[cfg(all(target_feature="avx",target_feature="fma"))] {
Self { avx: fused_mul_neg_add_m256d(self.avx, m.avx, a.avx) }
} else if #[cfg(all(target_feature="avx",target_feature="fma"))]
{
Self { sse0: fused_mul_neg_add_m128d(self.sse0, m.sse0, a.sse0), sse1: fused_mul_neg_add_m128d(self.sse1, m.sse1, a.sse1) }
} else {
a - (self * m)
}
}
}
#[inline]
#[must_use]
pub fn mul_neg_sub(self, m: Self, a: Self) -> Self {
pick! {
if #[cfg(all(target_feature="avx",target_feature="fma"))] {
Self { avx: fused_mul_neg_sub_m256d(self.avx, m.avx, a.avx) }
} else if #[cfg(all(target_feature="avx",target_feature="fma"))]
{
Self { sse0: fused_mul_neg_sub_m128d(self.sse0, m.sse0, a.sse0), sse1: fused_mul_neg_sub_m128d(self.sse1, m.sse1, a.sse1) }
} else {
-(self * m) - a
}
}
}
#[inline]
#[must_use]
pub fn flip_signs(self, signs: Self) -> Self {
self ^ (signs & Self::from(-0.0))
}
#[inline]
#[must_use]
pub fn copysign(self, sign: Self) -> Self {
let magnitude_mask = Self::from(f64::from_bits(u64::MAX >> 1));
(self & magnitude_mask) | (sign & Self::from(-0.0))
}
#[allow(non_upper_case_globals)]
pub fn asin_acos(self) -> (Self, Self) {
// Based on the Agner Fog "vector class library":
// https://github.com/vectorclass/version2/blob/master/vectormath_trig.h
const_f64_as_f64x4!(R4asin, 2.967721961301243206100E-3);
const_f64_as_f64x4!(R3asin, -5.634242780008963776856E-1);
const_f64_as_f64x4!(R2asin, 6.968710824104713396794E0);
const_f64_as_f64x4!(R1asin, -2.556901049652824852289E1);
const_f64_as_f64x4!(R0asin, 2.853665548261061424989E1);
const_f64_as_f64x4!(S3asin, -2.194779531642920639778E1);
const_f64_as_f64x4!(S2asin, 1.470656354026814941758E2);
const_f64_as_f64x4!(S1asin, -3.838770957603691357202E2);
const_f64_as_f64x4!(S0asin, 3.424398657913078477438E2);
const_f64_as_f64x4!(P5asin, 4.253011369004428248960E-3);
const_f64_as_f64x4!(P4asin, -6.019598008014123785661E-1);
const_f64_as_f64x4!(P3asin, 5.444622390564711410273E0);
const_f64_as_f64x4!(P2asin, -1.626247967210700244449E1);
const_f64_as_f64x4!(P1asin, 1.956261983317594739197E1);
const_f64_as_f64x4!(P0asin, -8.198089802484824371615E0);
const_f64_as_f64x4!(Q4asin, -1.474091372988853791896E1);
const_f64_as_f64x4!(Q3asin, 7.049610280856842141659E1);
const_f64_as_f64x4!(Q2asin, -1.471791292232726029859E2);
const_f64_as_f64x4!(Q1asin, 1.395105614657485689735E2);
const_f64_as_f64x4!(Q0asin, -4.918853881490881290097E1);
let xa = self.abs();
let big = xa.cmp_ge(f64x4::splat(0.625));
let x1 = big.blend(f64x4::splat(1.0) - xa, xa * xa);
let x2 = x1 * x1;
let x3 = x2 * x1;
let x4 = x2 * x2;
let x5 = x4 * x1;
let dobig = big.any();
let dosmall = !big.all();
let mut rx = f64x4::default();
let mut sx = f64x4::default();
let mut px = f64x4::default();
let mut qx = f64x4::default();
if dobig {
rx = x3.mul_add(R3asin, x2 * R2asin)
+ x4.mul_add(R4asin, x1.mul_add(R1asin, R0asin));
sx =
x3.mul_add(S3asin, x4) + x2.mul_add(S2asin, x1.mul_add(S1asin, S0asin));
}
if dosmall {
px = x3.mul_add(P3asin, P0asin)
+ x4.mul_add(P4asin, x1 * P1asin)
+ x5.mul_add(P5asin, x2 * P2asin);
qx = x4.mul_add(Q4asin, x5)
+ x3.mul_add(Q3asin, x1 * Q1asin)
+ x2.mul_add(Q2asin, Q0asin);
};
let vx = big.blend(rx, px);
let wx = big.blend(sx, qx);
let y1 = vx / wx * x1;
let mut z1 = f64x4::default();
let mut z2 = f64x4::default();
if dobig {
let xb = (x1 + x1).sqrt();
z1 = xb.mul_add(y1, xb);
}
if dosmall {
z2 = xa.mul_add(y1, xa);
}
// asin
let z3 = f64x4::FRAC_PI_2 - z1;
let asin = big.blend(z3, z2);
let asin = asin.flip_signs(self);
// acos
let z3 = self.cmp_lt(f64x4::ZERO).blend(f64x4::PI - z1, z1);
let z4 = f64x4::FRAC_PI_2 - z2.flip_signs(self);
let acos = big.blend(z3, z4);
(asin, acos)
}
#[allow(non_upper_case_globals)]
pub fn acos(self) -> Self {
// Based on the Agner Fog "vector class library":
// https://github.com/vectorclass/version2/blob/master/vectormath_trig.h
const_f64_as_f64x4!(R4asin, 2.967721961301243206100E-3);
const_f64_as_f64x4!(R3asin, -5.634242780008963776856E-1);
const_f64_as_f64x4!(R2asin, 6.968710824104713396794E0);
const_f64_as_f64x4!(R1asin, -2.556901049652824852289E1);
const_f64_as_f64x4!(R0asin, 2.853665548261061424989E1);
const_f64_as_f64x4!(S3asin, -2.194779531642920639778E1);
const_f64_as_f64x4!(S2asin, 1.470656354026814941758E2);
const_f64_as_f64x4!(S1asin, -3.838770957603691357202E2);
const_f64_as_f64x4!(S0asin, 3.424398657913078477438E2);
const_f64_as_f64x4!(P5asin, 4.253011369004428248960E-3);
const_f64_as_f64x4!(P4asin, -6.019598008014123785661E-1);
const_f64_as_f64x4!(P3asin, 5.444622390564711410273E0);
const_f64_as_f64x4!(P2asin, -1.626247967210700244449E1);
const_f64_as_f64x4!(P1asin, 1.956261983317594739197E1);
const_f64_as_f64x4!(P0asin, -8.198089802484824371615E0);
const_f64_as_f64x4!(Q4asin, -1.474091372988853791896E1);
const_f64_as_f64x4!(Q3asin, 7.049610280856842141659E1);
const_f64_as_f64x4!(Q2asin, -1.471791292232726029859E2);
const_f64_as_f64x4!(Q1asin, 1.395105614657485689735E2);
const_f64_as_f64x4!(Q0asin, -4.918853881490881290097E1);
let xa = self.abs();
let big = xa.cmp_ge(f64x4::splat(0.625));
let x1 = big.blend(f64x4::splat(1.0) - xa, xa * xa);
let x2 = x1 * x1;
let x3 = x2 * x1;
let x4 = x2 * x2;
let x5 = x4 * x1;
let dobig = big.any();
let dosmall = !big.all();
let mut rx = f64x4::default();
let mut sx = f64x4::default();
let mut px = f64x4::default();
let mut qx = f64x4::default();
if dobig {
rx = x3.mul_add(R3asin, x2 * R2asin)
+ x4.mul_add(R4asin, x1.mul_add(R1asin, R0asin));
sx =
x3.mul_add(S3asin, x4) + x2.mul_add(S2asin, x1.mul_add(S1asin, S0asin));
}
if dosmall {
px = x3.mul_add(P3asin, P0asin)
+ x4.mul_add(P4asin, x1 * P1asin)
+ x5.mul_add(P5asin, x2 * P2asin);
qx = x4.mul_add(Q4asin, x5)
+ x3.mul_add(Q3asin, x1 * Q1asin)
+ x2.mul_add(Q2asin, Q0asin);
};
let vx = big.blend(rx, px);
let wx = big.blend(sx, qx);
let y1 = vx / wx * x1;
let mut z1 = f64x4::default();
let mut z2 = f64x4::default();
if dobig {
let xb = (x1 + x1).sqrt();
z1 = xb.mul_add(y1, xb);
}
if dosmall {
z2 = xa.mul_add(y1, xa);
}
// acos
let z3 = self.cmp_lt(f64x4::ZERO).blend(f64x4::PI - z1, z1);
let z4 = f64x4::FRAC_PI_2 - z2.flip_signs(self);
let acos = big.blend(z3, z4);
acos
}
#[inline]
#[must_use]
#[allow(non_upper_case_globals)]
pub fn asin(self) -> Self {
// Based on the Agner Fog "vector class library":
// https://github.com/vectorclass/version2/blob/master/vectormath_trig.h
const_f64_as_f64x4!(R4asin, 2.967721961301243206100E-3);
const_f64_as_f64x4!(R3asin, -5.634242780008963776856E-1);
const_f64_as_f64x4!(R2asin, 6.968710824104713396794E0);
const_f64_as_f64x4!(R1asin, -2.556901049652824852289E1);
const_f64_as_f64x4!(R0asin, 2.853665548261061424989E1);
const_f64_as_f64x4!(S3asin, -2.194779531642920639778E1);
const_f64_as_f64x4!(S2asin, 1.470656354026814941758E2);
const_f64_as_f64x4!(S1asin, -3.838770957603691357202E2);
const_f64_as_f64x4!(S0asin, 3.424398657913078477438E2);
const_f64_as_f64x4!(P5asin, 4.253011369004428248960E-3);
const_f64_as_f64x4!(P4asin, -6.019598008014123785661E-1);
const_f64_as_f64x4!(P3asin, 5.444622390564711410273E0);
const_f64_as_f64x4!(P2asin, -1.626247967210700244449E1);
const_f64_as_f64x4!(P1asin, 1.956261983317594739197E1);
const_f64_as_f64x4!(P0asin, -8.198089802484824371615E0);
const_f64_as_f64x4!(Q4asin, -1.474091372988853791896E1);
const_f64_as_f64x4!(Q3asin, 7.049610280856842141659E1);
const_f64_as_f64x4!(Q2asin, -1.471791292232726029859E2);
const_f64_as_f64x4!(Q1asin, 1.395105614657485689735E2);
const_f64_as_f64x4!(Q0asin, -4.918853881490881290097E1);
let xa = self.abs();
let big = xa.cmp_ge(f64x4::splat(0.625));
let x1 = big.blend(f64x4::splat(1.0) - xa, xa * xa);
let x2 = x1 * x1;
let x3 = x2 * x1;
let x4 = x2 * x2;
let x5 = x4 * x1;
let dobig = big.any();
let dosmall = !big.all();
let mut rx = f64x4::default();
let mut sx = f64x4::default();
let mut px = f64x4::default();
let mut qx = f64x4::default();
if dobig {
rx = x3.mul_add(R3asin, x2 * R2asin)
+ x4.mul_add(R4asin, x1.mul_add(R1asin, R0asin));
sx =
x3.mul_add(S3asin, x4) + x2.mul_add(S2asin, x1.mul_add(S1asin, S0asin));
}
if dosmall {
px = x3.mul_add(P3asin, P0asin)
+ x4.mul_add(P4asin, x1 * P1asin)
+ x5.mul_add(P5asin, x2 * P2asin);
qx = x4.mul_add(Q4asin, x5)
+ x3.mul_add(Q3asin, x1 * Q1asin)
+ x2.mul_add(Q2asin, Q0asin);
};
let vx = big.blend(rx, px);
let wx = big.blend(sx, qx);
let y1 = vx / wx * x1;
let mut z1 = f64x4::default();
let mut z2 = f64x4::default();
if dobig {
let xb = (x1 + x1).sqrt();
z1 = xb.mul_add(y1, xb);
}
if dosmall {
z2 = xa.mul_add(y1, xa);
}
// asin
let z3 = f64x4::FRAC_PI_2 - z1;
let asin = big.blend(z3, z2);
let asin = asin.flip_signs(self);
asin
}
#[allow(non_upper_case_globals)]
pub fn atan(self) -> Self {
// Based on the Agner Fog "vector class library":
// https://github.com/vectorclass/version2/blob/master/vectormath_trig.h
const_f64_as_f64x4!(MOREBITS, 6.123233995736765886130E-17);
const_f64_as_f64x4!(MOREBITSO2, 6.123233995736765886130E-17 * 0.5);
const_f64_as_f64x4!(T3PO8, core::f64::consts::SQRT_2 + 1.0);
const_f64_as_f64x4!(P4atan, -8.750608600031904122785E-1);
const_f64_as_f64x4!(P3atan, -1.615753718733365076637E1);
const_f64_as_f64x4!(P2atan, -7.500855792314704667340E1);
const_f64_as_f64x4!(P1atan, -1.228866684490136173410E2);
const_f64_as_f64x4!(P0atan, -6.485021904942025371773E1);
const_f64_as_f64x4!(Q4atan, 2.485846490142306297962E1);
const_f64_as_f64x4!(Q3atan, 1.650270098316988542046E2);
const_f64_as_f64x4!(Q2atan, 4.328810604912902668951E2);
const_f64_as_f64x4!(Q1atan, 4.853903996359136964868E2);
const_f64_as_f64x4!(Q0atan, 1.945506571482613964425E2);
let t = self.abs();
// small: t < 0.66
// medium: t <= t <= 2.4142 (1+sqrt(2))
// big: t > 2.4142
let notbig = t.cmp_le(T3PO8);
let notsmal = t.cmp_ge(Self::splat(0.66));
let mut s = notbig.blend(Self::FRAC_PI_4, Self::FRAC_PI_2);
s = notsmal & s;
let mut fac = notbig.blend(MOREBITSO2, MOREBITS);
fac = notsmal & fac;
// small: z = t / 1.0;
// medium: z = (t-1.0) / (t+1.0);
// big: z = -1.0 / t;
let mut a = notbig & t;
a = notsmal.blend(a - Self::ONE, a);
let mut b = notbig & Self::ONE;
b = notsmal.blend(b + t, b);
let z = a / b;
let zz = z * z;
let px = polynomial_4!(zz, P0atan, P1atan, P2atan, P3atan, P4atan);
let qx = polynomial_5n!(zz, Q0atan, Q1atan, Q2atan, Q3atan, Q4atan);
let mut re = (px / qx).mul_add(z * zz, z);
re += s + fac;
// get sign bit
re = (self.sign_bit()).blend(-re, re);
re
}
#[allow(non_upper_case_globals)]
pub fn atan2(self, x: Self) -> Self {
// Based on the Agner Fog "vector class library":
// https://github.com/vectorclass/version2/blob/master/vectormath_trig.h
const_f64_as_f64x4!(MOREBITS, 6.123233995736765886130E-17);
const_f64_as_f64x4!(MOREBITSO2, 6.123233995736765886130E-17 * 0.5);
const_f64_as_f64x4!(T3PO8, core::f64::consts::SQRT_2 + 1.0);
const_f64_as_f64x4!(P4atan, -8.750608600031904122785E-1);
const_f64_as_f64x4!(P3atan, -1.615753718733365076637E1);
const_f64_as_f64x4!(P2atan, -7.500855792314704667340E1);
const_f64_as_f64x4!(P1atan, -1.228866684490136173410E2);
const_f64_as_f64x4!(P0atan, -6.485021904942025371773E1);
const_f64_as_f64x4!(Q4atan, 2.485846490142306297962E1);
const_f64_as_f64x4!(Q3atan, 1.650270098316988542046E2);
const_f64_as_f64x4!(Q2atan, 4.328810604912902668951E2);
const_f64_as_f64x4!(Q1atan, 4.853903996359136964868E2);
const_f64_as_f64x4!(Q0atan, 1.945506571482613964425E2);
let y = self;
// move in first octant
let x1 = x.abs();
let y1 = y.abs();
let swapxy = y1.cmp_gt(x1);
// swap x and y if y1 > x1
let mut x2 = swapxy.blend(y1, x1);
let mut y2 = swapxy.blend(x1, y1);
// check for special case: x and y are both +/- INF
let both_infinite = x.is_inf() & y.is_inf();
if both_infinite.any() {
let mone = -Self::ONE;
x2 = both_infinite.blend(x2 & mone, x2);
y2 = both_infinite.blend(y2 & mone, y2);
}
// x = y = 0 gives NAN here
let t = y2 / x2;
// small: t < 0.66
// medium: t <= t <= 2.4142 (1+sqrt(2))
// big: t > 2.4142
let notbig = t.cmp_le(T3PO8);
let notsmal = t.cmp_ge(Self::splat(0.66));
let mut s = notbig.blend(Self::FRAC_PI_4, Self::FRAC_PI_2);
s = notsmal & s;
let mut fac = notbig.blend(MOREBITSO2, MOREBITS);
fac = notsmal & fac;
// small: z = t / 1.0;
// medium: z = (t-1.0) / (t+1.0);
// big: z = -1.0 / t;
let mut a = notbig & t;
a = notsmal.blend(a - Self::ONE, a);
let mut b = notbig & Self::ONE;
b = notsmal.blend(b + t, b);
let z = a / b;
let zz = z * z;
let px = polynomial_4!(zz, P0atan, P1atan, P2atan, P3atan, P4atan);
let qx = polynomial_5n!(zz, Q0atan, Q1atan, Q2atan, Q3atan, Q4atan);
let mut re = (px / qx).mul_add(z * zz, z);
re += s + fac;
// move back in place
re = swapxy.blend(Self::FRAC_PI_2 - re, re);
re = ((x | y).cmp_eq(Self::ZERO)).blend(Self::ZERO, re);
re = (x.sign_bit()).blend(Self::PI - re, re);
// get sign bit
re = (y.sign_bit()).blend(-re, re);
re
}
#[inline]
#[must_use]
#[allow(non_upper_case_globals)]
pub fn sin_cos(self) -> (Self, Self) {
// Based on the Agner Fog "vector class library":
// https://github.com/vectorclass/version2/blob/master/vectormath_trig.h
const_f64_as_f64x4!(P0sin, -1.66666666666666307295E-1);
const_f64_as_f64x4!(P1sin, 8.33333333332211858878E-3);
const_f64_as_f64x4!(P2sin, -1.98412698295895385996E-4);
const_f64_as_f64x4!(P3sin, 2.75573136213857245213E-6);
const_f64_as_f64x4!(P4sin, -2.50507477628578072866E-8);
const_f64_as_f64x4!(P5sin, 1.58962301576546568060E-10);
const_f64_as_f64x4!(P0cos, 4.16666666666665929218E-2);
const_f64_as_f64x4!(P1cos, -1.38888888888730564116E-3);
const_f64_as_f64x4!(P2cos, 2.48015872888517045348E-5);
const_f64_as_f64x4!(P3cos, -2.75573141792967388112E-7);
const_f64_as_f64x4!(P4cos, 2.08757008419747316778E-9);
const_f64_as_f64x4!(P5cos, -1.13585365213876817300E-11);
const_f64_as_f64x4!(DP1, 7.853981554508209228515625E-1 * 2.);
const_f64_as_f64x4!(DP2, 7.94662735614792836714E-9 * 2.);
const_f64_as_f64x4!(DP3, 3.06161699786838294307E-17 * 2.);
const_f64_as_f64x4!(TWO_OVER_PI, 2.0 / core::f64::consts::PI);
let xa = self.abs();
let y = (xa * TWO_OVER_PI).round();
let q = y.round_int();
let x = y.mul_neg_add(DP3, y.mul_neg_add(DP2, y.mul_neg_add(DP1, xa)));
let x2 = x * x;
let mut s = polynomial_5!(x2, P0sin, P1sin, P2sin, P3sin, P4sin, P5sin);
let mut c = polynomial_5!(x2, P0cos, P1cos, P2cos, P3cos, P4cos, P5cos);
s = (x * x2).mul_add(s, x);
c =
(x2 * x2).mul_add(c, x2.mul_neg_add(f64x4::from(0.5), f64x4::from(1.0)));
let swap = !((q & i64x4::from(1)).cmp_eq(i64x4::from(0)));
let mut overflow: f64x4 = cast(q.cmp_gt(i64x4::from(0x80000000000000)));
overflow &= xa.is_finite();
s = overflow.blend(f64x4::from(0.0), s);
c = overflow.blend(f64x4::from(1.0), c);
// calc sin
let mut sin1 = cast::<_, f64x4>(swap).blend(c, s);
let sign_sin: i64x4 = (q << 62) ^ cast::<_, i64x4>(self);
sin1 = sin1.flip_signs(cast(sign_sin));
// calc cos
let mut cos1 = cast::<_, f64x4>(swap).blend(s, c);
let sign_cos: i64x4 = ((q + i64x4::from(1)) & i64x4::from(2)) << 62;
cos1 ^= cast::<_, f64x4>(sign_cos);
(sin1, cos1)
}
#[inline]
#[must_use]
pub fn sin(self) -> Self {
let (s, _) = self.sin_cos();
s
}
#[inline]
#[must_use]
pub fn cos(self) -> Self {
let (_, c) = self.sin_cos();
c
}
#[inline]
#[must_use]
pub fn tan(self) -> Self {
let (s, c) = self.sin_cos();
s / c
}
#[inline]
#[must_use]
pub fn to_degrees(self) -> Self {
const_f64_as_f64x4!(RAD_TO_DEG_RATIO, 180.0_f64 / core::f64::consts::PI);
self * RAD_TO_DEG_RATIO
}
#[inline]
#[must_use]
pub fn to_radians(self) -> Self {
const_f64_as_f64x4!(DEG_TO_RAD_RATIO, core::f64::consts::PI / 180.0_f64);
self * DEG_TO_RAD_RATIO
}
#[inline]
#[must_use]
pub fn sqrt(self) -> Self {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: sqrt_m256d(self.avx) }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: sqrt_m128d(self.sse0), sse1: sqrt_m128d(self.sse1) }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: f64x2_sqrt(self.simd0), simd1: f64x2_sqrt(self.simd1) }
} else if #[cfg(feature="std")] {
Self { arr: [
self.arr[0].sqrt(),
self.arr[1].sqrt(),
self.arr[2].sqrt(),
self.arr[3].sqrt(),
]}
} else {
Self { arr: [
software_sqrt(self.arr[0] as f64) as f64,
software_sqrt(self.arr[1] as f64) as f64,
software_sqrt(self.arr[2] as f64) as f64,
software_sqrt(self.arr[3] as f64) as f64,
]}
}
}
}
#[inline]
#[must_use]
pub fn move_mask(self) -> i32 {
pick! {
if #[cfg(target_feature="avx")] {
move_mask_m256d(self.avx)
} else if #[cfg(target_feature="sse2")] {
(move_mask_m128d(self.sse1) << 2) ^ move_mask_m128d(self.sse0)
} else if #[cfg(target_feature="simd128")] {
((u64x2_bitmask(self.simd1) as i32) << 2) ^ u64x2_bitmask(self.simd0) as i32
} else {
(((self.arr[0].to_bits() as i64) < 0) as i32) << 0 |
(((self.arr[1].to_bits() as i64) < 0) as i32) << 1 |
(((self.arr[2].to_bits() as i64) < 0) as i32) << 2 |
(((self.arr[3].to_bits() as i64) < 0) as i32) << 3
}
}
}
#[inline]
#[must_use]
pub fn any(self) -> bool {
pick! {
if #[cfg(target_feature="simd128")] {
v128_any_true(self.simd0) | v128_any_true(self.simd1)
} else {
self.move_mask() != 0
}
}
}
#[inline]
#[must_use]
pub fn all(self) -> bool {
pick! {
if #[cfg(target_feature="simd128")] {
u64x2_all_true(self.simd0) & u64x2_all_true(self.simd1)
} else {
// four lanes
self.move_mask() == 0b1111
}
}
}
#[inline]
#[must_use]
pub fn none(self) -> bool {
!self.any()
}
#[inline]
#[allow(non_upper_case_globals)]
fn vm_pow2n(self) -> Self {
const_f64_as_f64x4!(pow2_52, 4503599627370496.0);
const_f64_as_f64x4!(bias, 1023.0);
let a = self + (bias + pow2_52);
let c = cast::<_, i64x4>(a) << 52;
cast::<_, f64x4>(c)
}
/// Calculate the exponent of a packed f64x4
#[inline]
#[must_use]
#[allow(non_upper_case_globals)]
pub fn exp(self) -> Self {
const_f64_as_f64x4!(P2, 1.0 / 2.0);
const_f64_as_f64x4!(P3, 1.0 / 6.0);
const_f64_as_f64x4!(P4, 1. / 24.);
const_f64_as_f64x4!(P5, 1. / 120.);
const_f64_as_f64x4!(P6, 1. / 720.);
const_f64_as_f64x4!(P7, 1. / 5040.);
const_f64_as_f64x4!(P8, 1. / 40320.);
const_f64_as_f64x4!(P9, 1. / 362880.);
const_f64_as_f64x4!(P10, 1. / 3628800.);
const_f64_as_f64x4!(P11, 1. / 39916800.);
const_f64_as_f64x4!(P12, 1. / 479001600.);
const_f64_as_f64x4!(P13, 1. / 6227020800.);
const_f64_as_f64x4!(LN2D_HI, 0.693145751953125);
const_f64_as_f64x4!(LN2D_LO, 1.42860682030941723212E-6);
let max_x = f64x4::from(708.39);
let r = (self * Self::LOG2_E).round();
let x = r.mul_neg_add(LN2D_HI, self);
let x = r.mul_neg_add(LN2D_LO, x);
let z =
polynomial_13!(x, P2, P3, P4, P5, P6, P7, P8, P9, P10, P11, P12, P13);
let n2 = Self::vm_pow2n(r);
let z = (z + Self::ONE) * n2;
// check for overflow
let in_range = self.abs().cmp_lt(max_x);
let in_range = in_range & self.is_finite();
in_range.blend(z, Self::ZERO)
}
#[inline]
#[allow(non_upper_case_globals)]
fn exponent(self) -> f64x4 {
const_f64_as_f64x4!(pow2_52, 4503599627370496.0);
const_f64_as_f64x4!(bias, 1023.0);
let a = cast::<_, u64x4>(self);
let b = a >> 52;
let c = b | cast::<_, u64x4>(pow2_52);
let d = cast::<_, f64x4>(c);
let e = d - (pow2_52 + bias);
e
}
#[inline]
#[allow(non_upper_case_globals)]
fn fraction_2(self) -> Self {
let t1 = cast::<_, u64x4>(self);
let t2 = cast::<_, u64x4>(
(t1 & u64x4::from(0x000FFFFFFFFFFFFF)) | u64x4::from(0x3FE0000000000000),
);
cast::<_, f64x4>(t2)
}
fn is_zero_or_subnormal(self) -> Self {
let t = cast::<_, i64x4>(self);
let t = t & i64x4::splat(0x7FF0000000000000);
i64x4::round_float(t.cmp_eq(i64x4::splat(0)))
}
fn infinity() -> Self
|
fn nan_log() -> Self {
cast::<_, f64x4>(i64x4::splat(0x7FF8000000000000 | 0x101 << 29))
}
fn nan_pow() -> Self {
cast::<_, f64x4>(i64x4::splat(0x7FF8000000000000 | 0x101 << 29))
}
fn sign_bit(self) -> Self {
let t1 = cast::<_, i64x4>(self);
let t2 = t1 >> 63;
!cast::<_, f64x4>(t2).cmp_eq(f64x4::ZERO)
}
pub fn reduce_add(self) -> f64 {
pick! {
if #[cfg(target_feature="avx")] {
// From https://stackoverflow.com/questions/49941645/get-sum-of-values-stored-in-m256d-with-sse-avx
let lo = cast_to_m128d_from_m256d(self.avx);
let hi = extract_m128d_from_m256d::<1>(self.avx);
let lo = add_m128d(lo,hi);
let hi64 = unpack_high_m128d(lo,lo);
let sum = add_m128d_s(lo,hi64);
get_f64_from_m128d_s(sum)
} else if #[cfg(target_feature="ssse3")] {
let a = add_horizontal_m128d(self.sse0, self.sse0);
let b = add_horizontal_m128d(self.sse1, self.sse1);
get_f64_from_m128d_s(a) + get_f64_from_m128d_s(b)
} else {
let arr: [f64; 4] = cast(self);
arr.iter().sum()
}
}
}
/// Natural log (ln(x))
#[inline]
#[must_use]
#[allow(non_upper_case_globals)]
pub fn ln(self) -> Self {
const_f64_as_f64x4!(HALF, 0.5);
const_f64_as_f64x4!(P0, 7.70838733755885391666E0);
const_f64_as_f64x4!(P1, 1.79368678507819816313E1);
const_f64_as_f64x4!(P2, 1.44989225341610930846E1);
const_f64_as_f64x4!(P3, 4.70579119878881725854E0);
const_f64_as_f64x4!(P4, 4.97494994976747001425E-1);
const_f64_as_f64x4!(P5, 1.01875663804580931796E-4);
const_f64_as_f64x4!(Q0, 2.31251620126765340583E1);
const_f64_as_f64x4!(Q1, 7.11544750618563894466E1);
const_f64_as_f64x4!(Q2, 8.29875266912776603211E1);
const_f64_as_f64x4!(Q3, 4.52279145837532221105E1);
const_f64_as_f64x4!(Q4, 1.12873587189167450590E1);
const_f64_as_f64x4!(LN2F_HI, 0.693359375);
const_f64_as_f64x4!(LN2F_LO, -2.12194440e-4);
const_f64_as_f64x4!(VM_SQRT2, 1.414213562373095048801);
const_f64_as_f64x4!(VM_SMALLEST_NORMAL, 1.17549435E-38);
let x1 = self;
let x = Self::fraction_2(x1);
let e = Self::exponent(x1);
let mask = x.cmp_gt(VM_SQRT2 * HALF);
let x = (!mask).blend(x + x, x);
let fe = mask.blend(e + Self::ONE, e);
let x = x - Self::ONE;
let px = polynomial_5!(x, P0, P1, P2, P3, P4, P5);
let x2 = x * x;
let px = x2 * x * px;
let qx = polynomial_5n!(x, Q0, Q1, Q2, Q3, Q4);
let res = px / qx;
let res = fe.mul_add(LN2F_LO, res);
let res = res + x2.mul_neg_add(HALF, x);
let res = fe.mul_add(LN2F_HI, res);
let overflow = !self.is_finite();
let underflow = x1.cmp_lt(VM_SMALLEST_NORMAL);
let mask = overflow | underflow;
if !mask.any() {
res
} else {
let iszero = self.is_zero_or_subnormal();
let res = underflow.blend(Self::nan_log(), res);
let res = iszero.blend(Self::infinity(), res);
let res = overflow.blend(self, res);
res
}
}
#[inline]
#[must_use]
pub fn log2(self) -> Self {
Self::ln(self) * Self::LOG2_E
}
#[inline]
#[must_use]
pub fn log10(self) -> Self {
Self::ln(self) * Self::LOG10_E
}
#[inline]
#[must_use]
#[allow(non_upper_case_globals)]
pub fn pow_f64x4(self, y: Self) -> Self {
const_f64_as_f64x4!(ln2d_hi, 0.693145751953125);
const_f64_as_f64x4!(ln2d_lo, 1.42860682030941723212E-6);
const_f64_as_f64x4!(P0log, 2.0039553499201281259648E1);
const_f64_as_f64x4!(P1log, 5.7112963590585538103336E1);
const_f64_as_f64x4!(P2log, 6.0949667980987787057556E1);
const_f64_as_f64x4!(P3log, 2.9911919328553073277375E1);
const_f64_as_f64x4!(P4log, 6.5787325942061044846969E0);
const_f64_as_f64x4!(P5log, 4.9854102823193375972212E-1);
const_f64_as_f64x4!(P6log, 4.5270000862445199635215E-5);
const_f64_as_f64x4!(Q0log, 6.0118660497603843919306E1);
const_f64_as_f64x4!(Q1log, 2.1642788614495947685003E2);
const_f64_as_f64x4!(Q2log, 3.0909872225312059774938E2);
const_f64_as_f64x4!(Q3log, 2.2176239823732856465394E2);
const_f64_as_f64x4!(Q4log, 8.3047565967967209469434E1);
const_f64_as_f64x4!(Q5log, 1.5062909083469192043167E1);
// Taylor expansion constants
const_f64_as_f64x4!(p2, 1.0 / 2.0); // coefficients for Taylor expansion of exp
const_f64_as_f64x4!(p3, 1.0 / 6.0);
const_f64_as_f64x4!(p4, 1.0 / 24.0);
const_f64_as_f64x4!(p5, 1.0 / 120.0);
const_f64_as_f64x4!(p6, 1.0 / 720.0);
const_f64_as_f64x4!(p7, 1.0 / 5040.0);
const_f64_as_f64x4!(p8, 1.0 / 40320.0);
const_f64_as_f64x4!(p9, 1.0 / 362880.0);
const_f64_as_f64x4!(p10, 1.0 / 3628800.0);
const_f64_as_f64x4!(p11, 1.0 / 39916800.0);
const_f64_as_f64x4!(p12, 1.0 / 479001600.0);
const_f64_as_f64x4!(p13, 1.0 / 6227020800.0);
let x1 = self.abs();
let x = x1.fraction_2();
let mask = x.cmp_gt(f64x4::SQRT_2 * f64x4::HALF);
let x = (!mask).blend(x + x, x);
let x = x - f64x4::ONE;
let x2 = x * x;
let px = polynomial_6!(x, P0log, P1log, P2log, P3log, P4log, P5log, P6log);
let px = px * x * x2;
let qx = polynomial_6n!(x, Q0log, Q1log, Q2log, Q3log, Q4log, Q5log);
let lg1 = px / qx;
let ef = x1.exponent();
let ef = mask.blend(ef + f64x4::ONE, ef);
let e1 = (ef * y).round();
let yr = ef.mul_sub(y, e1);
let lg = f64x4::HALF.mul_neg_add(x2, x) + lg1;
let x2err = (f64x4::HALF * x).mul_sub(x, f64x4::HALF * x2);
let lgerr = f64x4::HALF.mul_add(x2, lg - x) - lg1;
let e2 = (lg * y * f64x4::LOG2_E).round();
let v = lg.mul_sub(y, e2 * ln2d_hi);
let v = e2.mul_neg_add(ln2d_lo, v);
let v = v - (lgerr + x2err).mul_sub(y, yr * f64x4::LN_2);
let x = v;
let e3 = (x * f64x4::LOG2_E).round();
let x = e3.mul_neg_add(f64x4::LN_2, x);
let z =
polynomial_13m!(x, p2, p3, p4, p5, p6, p7, p8, p9, p10, p11, p12, p13)
+ f64x4::ONE;
let ee = e1 + e2 + e3;
let ei = cast::<_, i64x4>(ee.round_int());
let ej = cast::<_, i64x4>(ei + (cast::<_, i64x4>(z) >> 52));
let overflow = cast::<_, f64x4>(!ej.cmp_lt(i64x4::splat(0x07FF)))
| ee.cmp_gt(f64x4::splat(3000.0));
let underflow = cast::<_, f64x4>(!ej.cmp_gt(i64x4::splat(0x000)))
| ee.cmp_lt(f64x4::splat(-3000.0));
// Add exponent by integer addition
let z = cast::<_, f64x4>(cast::<_, i64x4>(z) + (ei << 52));
// Check for overflow/underflow
let z = if (overflow | underflow).any() {
let z = underflow.blend(f64x4::ZERO, z);
overflow.blend(Self::infinity(), z)
} else {
z
};
// Check for self == 0
let xzero = self.is_zero_or_subnormal();
let z = xzero.blend(
y.cmp_lt(f64x4::ZERO).blend(
Self::infinity(),
y.cmp_eq(f64x4::ZERO).blend(f64x4::ONE, f64x4::ZERO),
),
z,
);
let xsign = self.sign_bit();
let z = if xsign.any() {
// Y into an integer
let yi = y.cmp_eq(y.round());
// Is y odd?
let yodd = cast::<_, i64x4>(y.round_int() << 63).round_float();
let z1 =
yi.blend(z | yodd, self.cmp_eq(Self::ZERO).blend(z, Self::nan_pow()));
xsign.blend(z1, z)
} else {
z
};
let xfinite = self.is_finite();
let yfinite = y.is_finite();
let efinite = ee.is_finite();
if (xfinite & yfinite & (efinite | xzero)).all() {
return z;
}
(self.is_nan() | y.is_nan()).blend(self + y, z)
}
pub fn powf(self, y: f64) -> Self {
Self::pow_f64x4(self, f64x4::splat(y))
}
pub fn to_array(self) -> [f64; 4] {
cast(self)
}
}
impl Not for f64x4 {
type Output = Self;
fn not(self) -> Self {
pick! {
if #[cfg(target_feature="avx")] {
Self { avx: self.avx.not() }
} else if #[cfg(target_feature="sse2")] {
Self { sse0: self.sse0.not() , sse1: self.sse1.not() }
} else if #[cfg(target_feature="simd128")] {
Self { simd0: v128_not(self.simd0), simd1: v128_not(self.simd1) }
} else {
Self { arr: [
f64::from_bits(!self.arr[0].to_bits()),
f64::from_bits(!self.arr[1].to_bits()),
f64::from_bits(!self.arr[2].to_bits()),
f64::from_bits(!self.arr[3].to_bits()),
]}
}
}
}
}
|
{
cast::<_, f64x4>(i64x4::splat(0x7FF0000000000000))
}
|
witnet.settings.js
|
const packageJson = require("../package.json")
module.exports = {
artifacts: {
default: {
WitnetDecoderLib: "WitnetDecoderLib",
WitnetParserLib: "WitnetParserLib",
WitnetPriceRouter: "WitnetPriceRouter",
WitnetProxy: "WitnetProxy",
WitnetRandomness: "WitnetRandomness",
WitnetRequestBoard: "WitnetRequestBoardTrustableDefault",
},
},
constructorParams: {
default: {
WitnetRequestBoard: [
/* _isUpgradable */ true,
/* _verstionTag */ fromAscii(packageJson.version + "-trustable"),
/* _reportResultGasLimit */ 133000,
],
},
boba: {
WitnetRequestBoard: [
/* _isUpgradable */ true,
/* _verstionTag */ fromAscii(packageJson.version + "-trustable"),
/* _reportResultGasLimit */ 97000,
],
},
celo: {
WitnetRequestBoard: [
/* _isUpgradable */ true,
/* _verstionTag */ fromAscii(packageJson.version + "-trustable"),
/* _reportResultGasLimit */ 114000,
],
},
conflux: {
WitnetRequestBoard: [
/* _isUpgradable */ true,
/* _verstionTag */ fromAscii(packageJson.version + "-trustable"),
/* _reportResultGasLimit */ 78500,
],
},
harmony: {
WitnetRequestBoard: [
/* _isUpgradable */ true,
/* _verstionTag */ fromAscii(packageJson.version + "-trustable"),
/* _reportResultGasLimit */ 530000,
],
},
kcc: {
WitnetRequestBoard: [
/* _isUpgradable */ true,
/* _verstionTag */ fromAscii(packageJson.version + "-trustable"),
/* _reportResultGasLimit */ 92500,
],
},
metis: {
WitnetRequestBoard: [
/* _isUpgradable */ true,
/* _verstionTag */ fromAscii(packageJson.version + "-trustable"),
/* _reportResultGasLimit */ 134800,
],
},
},
compilers: {
default: {
solc: {
version: "0.8.11",
settings: {
optimizer: {
enabled: true,
runs: 200,
},
},
outputSelection: {
"*": {
"*": ["evm.bytecode"],
},
},
},
},
conflux: {
solc: {
evmVersion: "petersburg",
},
},
},
networks: {
default: {
"ethereum.goerli": {
network_id: 5,
host: "localhost",
port: 8545,
skipDryRun: true,
},
"ethereum.kovan": {
network_id: 42,
host: "localhost",
port: 8542,
skipDryRun: true,
},
"ethereum.mainnet": {
network_id: 1,
host: "localhost",
port: 9545,
},
"ethereum.rinkeby": {
network_id: 4,
host: "localhost",
port: 8544,
skipDryRun: true,
},
"ethereum.ropsten": {
network_id: 3,
host: "localhost",
port: 8543,
},
},
avalanche: {
"avalanche.mainnet": {
network_id: 43114,
host: "localhost",
port: 9533,
skipDryRun: true,
},
"avalanche.testnet": {
network_id: 43113,
host: "localhost",
port: 8533,
skipDryRun: true,
gasPrice: 30 * 10 ** 9,
},
},
boba: {
"boba.mainnet": {
network_id: 288,
host: "localhost",
port: 9539,
skipDryRun: true,
},
"boba.rinkeby": {
network_id: 28,
host: "localhost",
port: 8539,
skipDryRun: true,
},
},
celo: {
"celo.alfajores": {
network_id: 44787,
host: "localhost",
port: 8538,
skipDryRun: true,
},
"celo.mainnet": {
network_id: 42220,
host: "localhost",
port: 9538,
skipDryRun: true,
},
},
conflux: {
"conflux.testnet": {
host: "localhost",
port: 8540,
network_id: 1,
gasPrice: 10,
skipDryRun: true,
},
"conflux.mainnet": {
host: "localhost",
port: 9540,
network_id: 1029,
gasPrice: 10,
skipDryRun: true,
},
},
harmony: {
"harmony.testnet#0": {
host: "localhost",
port: 8534,
network_id: 1666700000,
skipDryRun: true,
},
},
kcc: {
"kcc.testnet": {
host: "localhost",
port: 8537,
network_id: 322,
gasPrice: 10 ** 10,
skipDryRun: true,
},
"kcc.mainnet": {
host: "localhost",
port: 9537,
network_id: 321,
gasPrice: 10 ** 10,
skipDryRun: true,
},
},
metis: {
"metis.mainnet": {
host: "localhost",
port: 9536,
network_id: 1088,
skipDryRun: true,
},
"metis.rinkeby": {
host: "localhost",
port: 8536,
network_id: 588,
skipDryRun: true,
gas: 30000000,
},
},
polygon: {
"polygon.goerli": {
host: "localhost",
port: 8535,
network_id: 80001,
skipDryRun: true,
gasPrice: 30 * 10 ** 9,
},
"polygon.mainnet": {
host: "localhost",
port: 9535,
network_id: 137,
skipDryRun: true,
gasPrice: 30 * 10 ** 9,
},
},
},
}
function
|
(str) {
const arr1 = []
for (let n = 0, l = str.length; n < l; n++) {
const hex = Number(str.charCodeAt(n)).toString(16)
arr1.push(hex)
}
return "0x" + arr1.join("")
}
|
fromAscii
|
config.py
|
import os
class Config:
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL')
SQLALCHEMY_TRACK_MODIFICATIONS = True
SECRET_KEY = os.environ.get('SECRET_KEY')
UPLOADED_PHOTOS_DEST = 'app/static/photos'
MAIL_SERVER = 'smtp.gmail.com'
MAIL_PORT = 450
MAIL_USE_TLS = False
MAIL_USE_SSL = True
MAIL_USERNAME = os.environ.get("MAIL_USERNAME")
MAIL_PASSWORD = os.environ.get("MAIL_PASSWORD")
class ProdConfig(Config):
SQLALCHEMY_DATABASE_URI = os.environ.get("DATABASE_URL")
|
class DevConfig(Config):
DEBUG = True
config_options = {
'development': DevConfig,
'production': ProdConfig
}
| |
val_factory.go
|
package ndlvr
import (
"context"
"github.com/teawithsand/ndlvr/value"
)
// Defines what this validation is running on.
type ValidationTarget struct {
FunctionalTarget func(v value.Value, recv func(child value.Value) (err error)) (err error)
IsOrphanValue bool // ignored when FunctionalTarget
IsListValue bool // ignored when FunctionalTarget or IsOrphanValue is set
FieldName string // ignored when FunctionalTarget or IsOrphanValue or IsListValue is set
}
// func(vt *ValidationTarget) GetIterator(v value.Value, recv func(child value.Value) (err error)) (err error)
type ValidationBuildData struct {
Target ValidationTarget
ValidationName string
Argument interface{}
}
type ValidationBuildContext struct {
Ctx context.Context
Options *Options
Parser *Parser
OPs value.OPs
ArgumentParser ArgumentParser
Data ValidationBuildData
}
type ValidationFactory interface {
BuildValidation(bctx ValidationBuildContext) (val Validation, err error)
}
type ValidationFactoryFunc func(bctx ValidationBuildContext) (val Validation, err error)
func (f ValidationFactoryFunc) BuildValidation(bctx ValidationBuildContext) (val Validation, err error) {
return f(bctx)
}
type ValidationAsFactory func(bctx ValidationBuildContext, value value.Value) (err error)
func (f ValidationAsFactory) BuildValidation(bctx ValidationBuildContext) (val Validation, err error) {
val = ValidationFunc(func(ctx context.Context, value value.Value) (err error) {
bctx.Ctx = ctx
return f(bctx, value)
})
return
}
// ValidationFactory, which returns an error when name provided in data does not match given one.
type namedValidationFactory struct {
Name string
Factory ValidationFactory
}
func (nvf *namedValidationFactory) BuildValidation(bctx ValidationBuildContext) (val Validation, err error) {
if bctx.Data.ValidationName != nvf.Name {
err = &ValidationNameMismatchError{
Name: bctx.Data.ValidationName,
ExpectedName: nvf.Name,
}
return
}
return nvf.Factory.BuildValidation(bctx)
}
// Wraps specified factory, in one which verifies validaton name and returns error on mismatch.
func WrapNamed(name string, factory ValidationFactory) ValidationFactory {
// TODO(teawithsand): make this wrapper return errors, which contain validator name
return &namedValidationFactory{
Name: name,
Factory: factory,
}
}
// SimpleFieldValidation, which accesses value of field passed in build data.
func SimpleFieldValidation(
require bool,
inner func(bctx ValidationBuildContext, parentValue value.Value, fieldValue value.Value) (err error),
) ValidationFactory
|
{
return ValidationAsFactory(func(bctx ValidationBuildContext, vv value.Value) (err error) {
// We operate on list or something, so parent value is not accessible
// TODO(teawithsand): make list valid parent value(?) instead of doing this no-parent hack
if bctx.Data.Target.FunctionalTarget != nil {
selector := bctx.Data.Target.FunctionalTarget
err = selector(vv, func(child value.Value) (err error) {
err = inner(bctx, vv, child)
return
})
if err != nil {
return
}
} else if bctx.Data.Target.IsOrphanValue {
err = inner(bctx, nil, vv)
if err != nil {
return
}
} else if bctx.Data.Target.IsListValue {
var listValue value.ListValue
listValue, err = value.ExpectListValue(vv)
if err != nil {
return
}
length := listValue.Len()
for i := 0; i < length; i++ {
var nth value.Value
nth, err = listValue.GetIndex(i)
if err != nil {
return
}
err = inner(bctx, vv, nth)
if err != nil {
return
}
}
} else {
var fieldValue value.Value
fieldValue, err = value.ExpectKeyedValueField(vv, bctx.Data.Target.FieldName, require)
if err != nil {
return
}
err = inner(bctx, vv, fieldValue)
}
return
})
}
|
|
get.go
|
package main
import (
"fmt"
"log"
"os"
"time"
"github.com/crowi/go-crowi"
"github.com/k0kubun/pp"
"golang.org/x/net/context"
)
func
|
() {
config := crowi.Config{
URL: "http://localhost:3000",
Token: os.Getenv("CROWI_ACCESS_TOKEN"),
}
client, err := crowi.NewClient(config)
if err != nil {
panic(err)
}
ctx, cancel := context.WithTimeout(context.Background(), 2*time.Second)
defer cancel()
var (
path = fmt.Sprintf("/user/%s/go-crowi-test-%d", os.Getenv("USER"), time.Now().UnixNano())
body = "# this is a sample\n\ntest"
)
res, err := client.Pages.Create(ctx, path, body)
if err != nil {
panic(err)
}
if !res.OK {
log.Printf("[ERROR] %s", res.Error)
os.Exit(1)
}
res2, err := client.Pages.Get(ctx, path)
if err != nil {
panic(err)
}
pp.Println(res2)
}
|
main
|
clipboard.rs
|
// Copyright 2019 The Druid Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//! Interacting with the system pasteboard/clipboard.
pub use crate::platform::clipboard as platform;
/// A handle to the system clipboard.
///
/// To get access to the global clipboard, call [`Application::clipboard()`].
///
///
/// # Working with text
///
/// Copying and pasting text is simple, using [`Clipboard::put_string`] and
/// [`Clipboard::get_string`]. If this is all you need, you're in luck.
///
/// # Advanced useage
///
/// When working with data more complicated than plaintext, you will generally
/// want to make that data available in multiple formats.
///
/// For instance, if you are writing an image editor, you may have a preferred
/// private format, that preserves metadata or layer information; but in order
/// to interoperate with your user's other programs, you might also make your
/// data available as an SVG, for other editors, and a bitmap image for applications
/// that can accept general image data.
///
/// ## `FormatId`entifiers
///
/// In order for other applications to find data we put on the clipboard,
/// (and for us to use data from other applications) we need to use agreed-upon
/// identifiers for our data types. On macOS, these should be
/// [`Universal Type Identifier`]s; on other platforms they appear to be
/// mostly [MIME types]. Several common types are exposed as constants on
/// [`ClipboardFormat`], these `const`s are set per-platform.
///
/// When defining custom formats, you should use the correct identifier for
/// the current platform.
///
/// ## Setting custom data
///
/// To put custom data on the clipboard, you create a [`ClipboardFormat`] for
/// each type of data you support. You are responsible for ensuring that the
/// data is already correctly serialized.
///
///
/// ### `ClipboardFormat` for text
///
/// If you wish to put text on the clipboard in addition to other formats,
/// take special care to use `ClipboardFormat::TEXT` as the [`FormatId`]. On
/// windows, we treat this identifier specially, and make sure the data is
/// encoded as a wide string; all other data going into and out of the
/// clipboard is treated as an array of bytes.
///
/// # Examples
///
/// ## Getting and setting text:
///
/// ```no_run
/// use druid_shell::{Application, Clipboard};
///
/// let mut clipboard = Application::global().clipboard();
/// clipboard.put_string("watch it there pal");
/// if let Some(contents) = clipboard.get_string() {
/// assert_eq!("what it there pal", contents.as_str());
/// }
///
/// ```
///
/// ## Copying multi-format data
///
/// ```no_run
/// use druid_shell::{Application, Clipboard, ClipboardFormat};
///
/// let mut clipboard = Application::global().clipboard();
///
/// let custom_type_id = "io.xieditor.path-clipboard-type";
///
/// let formats = [
/// ClipboardFormat::new(custom_type_id, make_custom_data()),
/// ClipboardFormat::new(ClipboardFormat::SVG, make_svg_data()),
/// ClipboardFormat::new(ClipboardFormat::PDF, make_pdf_data()),
/// ];
///
/// clipboard.put_formats(&formats);
///
/// # fn make_custom_data() -> Vec<u8> { unimplemented!() }
/// # fn make_svg_data() -> Vec<u8> { unimplemented!() }
/// # fn make_pdf_data() -> Vec<u8> { unimplemented!() }
/// ```
/// ## Supporting multi-format paste
///
/// ```no_run
/// use druid_shell::{Application, Clipboard, ClipboardFormat};
///
/// let clipboard = Application::global().clipboard();
///
/// let custom_type_id = "io.xieditor.path-clipboard-type";
/// let supported_types = &[custom_type_id, ClipboardFormat::SVG, ClipboardFormat::PDF];
/// let best_available_type = clipboard.preferred_format(supported_types);
///
/// if let Some(format) = best_available_type {
/// let data = clipboard.get_format(format).expect("I promise not to unwrap in production");
/// do_something_with_data(format, data)
/// }
///
/// # fn do_something_with_data(_: &str, _: Vec<u8>) {}
/// ```
///
/// [`Application::clipboard()`]: struct.Application.html#method.clipboard
/// [`Clipboard::put_string`]: struct.Clipboard.html#method.put_string
/// [`Clipboard::get_string`]: struct.Clipboard.html#method.get_string
/// [`FormatId`]: type.FormatId.html
/// [`Universal Type Identifier`]: https://escapetech.eu/manuals/qdrop/uti.html
/// [MIME types]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types
/// [`ClipboardFormat`]: struct.ClipboardFormat.html
#[derive(Debug, Clone)]
pub struct Clipboard(platform::Clipboard);
impl Clipboard {
/// Put a string onto the system clipboard.
pub fn put_string(&mut self, s: impl AsRef<str>) {
self.0.put_string(s);
}
/// Put multi-format data on the system clipboard.
pub fn put_formats(&mut self, formats: &[ClipboardFormat]) {
self.0.put_formats(formats)
}
/// Get a string from the system clipboard, if one is available.
pub fn get_string(&self) -> Option<String> {
self.0.get_string()
}
/// Given a list of supported clipboard types, returns the supported type which has
/// highest priority on the system clipboard, or `None` if no types are supported.
pub fn preferred_format(&self, formats: &[FormatId]) -> Option<FormatId> {
self.0.preferred_format(formats)
}
/// Return data in a given format, if available.
///
/// It is recommended that the [`FormatId`] argument be a format returned by
/// [`Clipboard::preferred_format`].
///
/// [`Clipboard::preferred_format`]: struct.Clipboard.html#method.preferred_format
/// [`FormatId`]: type.FormatId.html
pub fn get_format(&self, format: FormatId) -> Option<Vec<u8>> {
self.0.get_format(format)
}
/// For debugging: print the resolved identifiers for each type currently
/// on the clipboard.
#[doc(hidden)]
pub fn available_type_names(&self) -> Vec<String> {
self.0.available_type_names()
}
}
/// A type identifer for the system clipboard.
///
/// These should be [`UTI` strings] on macOS, and (by convention?) [MIME types] elsewhere.
///
/// [`UTI` strings]: https://escapetech.eu/manuals/qdrop/uti.html
/// [MIME types]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Basics_of_HTTP/MIME_types
pub type FormatId = &'static str;
/// Data coupled with a type identifier.
#[derive(Debug, Clone)]
pub struct ClipboardFormat {
pub(crate) identifier: FormatId,
pub(crate) data: Vec<u8>,
}
impl ClipboardFormat {
/// Create a new `ClipboardFormat` with the given `FormatId` and bytes.
///
/// You are responsible for ensuring that this data can be interpreted
/// as the provided format.
pub fn new(identifier: FormatId, data: impl Into<Vec<u8>>) -> Self {
let data = data.into();
ClipboardFormat { identifier, data }
}
}
impl From<String> for ClipboardFormat {
fn from(src: String) -> ClipboardFormat {
let data = src.into_bytes();
ClipboardFormat::new(ClipboardFormat::TEXT, data)
}
}
impl From<&str> for ClipboardFormat {
fn from(src: &str) -> ClipboardFormat {
src.to_string().into()
}
}
impl From<platform::Clipboard> for Clipboard {
fn from(src: platform::Clipboard) -> Clipboard
|
}
cfg_if::cfg_if! {
if #[cfg(target_os = "macos")] {
impl ClipboardFormat {
pub const PDF: &'static str = "com.adobe.pdf";
pub const TEXT: &'static str = "public.utf8-plain-text";
pub const SVG: &'static str = "public.svg-image";
}
} else {
impl ClipboardFormat {
cfg_if::cfg_if! {
if #[cfg(target_os = "linux")] {
// trial and error; this is the most supported string type for gtk?
pub const TEXT: &'static str = "UTF8_STRING";
} else {
pub const TEXT: &'static str = "text/plain";
}
}
pub const PDF: &'static str = "application/pdf";
pub const SVG: &'static str = "image/svg+xml";
}
}
}
|
{
Clipboard(src)
}
|
server.go
|
/*
Copyright The Helm Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package chartmuseum
import (
"strings"
"github.com/helm/chartmuseum/pkg/cache"
cm_logger "github.com/helm/chartmuseum/pkg/chartmuseum/logger"
cm_router "github.com/helm/chartmuseum/pkg/chartmuseum/router"
mt "github.com/helm/chartmuseum/pkg/chartmuseum/server/multitenant"
"github.com/helm/chartmuseum/pkg/storage"
)
type (
// ServerOptions are options for constructing a Server
ServerOptions struct {
StorageBackend storage.Backend
ExternalCacheStore cache.Store
ChartURL string
TlsCert string
TlsKey string
Username string
Password string
ChartPostFormFieldName string
ProvPostFormFieldName string
ContextPath string
LogJSON bool
LogHealth bool
Debug bool
EnableAPI bool
UseStatefiles bool
AllowOverwrite bool
AllowForceOverwrite bool
EnableMetrics bool
AnonymousGet bool
GenIndex bool
MaxStorageObjects int
IndexLimit int
Depth int
MaxUploadSize int
BearerAuth bool
AuthType string
AuthRealm string
AuthService string
AuthIssuer string
AuthCertPath string
}
// Server is a generic interface for web servers
Server interface {
Listen(port int)
}
)
// NewServer creates a new Server instance
func NewServer(options ServerOptions) (Server, error) {
logger, err := cm_logger.NewLogger(cm_logger.LoggerOptions{
Debug: options.Debug,
LogJSON: options.LogJSON,
})
if err != nil {
|
}
contextPath := strings.TrimSuffix(options.ContextPath, "/")
if contextPath != "" && !strings.HasPrefix(contextPath, "/") {
contextPath = "/" + contextPath
}
router := cm_router.NewRouter(cm_router.RouterOptions{
Logger: logger,
Username: options.Username,
Password: options.Password,
ContextPath: contextPath,
TlsCert: options.TlsCert,
TlsKey: options.TlsKey,
LogHealth: options.LogHealth,
EnableMetrics: options.EnableMetrics,
AnonymousGet: options.AnonymousGet,
Depth: options.Depth,
MaxUploadSize: options.MaxUploadSize,
BearerAuth: options.BearerAuth,
AuthType: options.AuthType,
AuthRealm: options.AuthRealm,
AuthService: options.AuthService,
AuthIssuer: options.AuthIssuer,
AuthCertPath: options.AuthCertPath,
})
server, err := mt.NewMultiTenantServer(mt.MultiTenantServerOptions{
Logger: logger,
Router: router,
StorageBackend: options.StorageBackend,
ExternalCacheStore: options.ExternalCacheStore,
ChartURL: strings.TrimSuffix(options.ChartURL, "/"),
ChartPostFormFieldName: options.ChartPostFormFieldName,
ProvPostFormFieldName: options.ProvPostFormFieldName,
MaxStorageObjects: options.MaxStorageObjects,
IndexLimit: options.IndexLimit,
GenIndex: options.GenIndex,
EnableAPI: options.EnableAPI,
UseStatefiles: options.UseStatefiles,
AllowOverwrite: options.AllowOverwrite,
AllowForceOverwrite: options.AllowForceOverwrite,
})
return server, err
}
|
return nil, err
|
faqUpdate.ts
|
import { Client, RichEmbed, TextChannel } from "discord.js";
import { Constants } from "../../constants";
export class
|
{
public static async handle(serverBot: Client, faq: any) {
// Get FAQ channel
let faqChannel = serverBot.channels.find(
x => x.type === "text" && (x as TextChannel).name == "f-a-q"
);
// If FAQ channel is found
if (faqChannel) {
// Get as text channel
let channel = faqChannel as TextChannel;
// Try to find discordMessage with id of updated faq item
let message = await channel.fetchMessage(faq.discordMessage.messageId);
// Create faq embed
let faqEmbed = new RichEmbed()
.setTitle("-Q: " + faq.question)
.setDescription("-A: " + faq.answer)
.setColor(Constants.EmbedColors.GREEN);
// Check if resource link is present
if (faq.resourceLink != null) {
// Add resource link to faq embed
faqEmbed.addField(
"Useful Resource: ",
`[${faq.resourceLink.displayName}](${faq.resourceLink.link})`
);
}
// Try to delete discordMessage, then add the updated version
message
.edit(faqEmbed)
.then(console.log)
.catch(console.error);
return true;
}
}
}
|
FaqUpdateEvent
|
func_test_helpers.py
|
import sys
offset = "\t\t"
woms_keys = {
"title": "title",
"composer": "composer",
"mm_uid": "mm-uid"
}
def
|
(element, one, another):
print(f"{offset} asserting that {element} is {one} ...")
try:
assert one == another
print(f"{offset}\u2713 {one} is {element}!")
except AssertionError as e:
sys.exit(f"\nERROR: {element} {one} does not equal {another}!")
#TODO: report a failure gracefully
def assert_same_title(test_case_name, whatson, playout, system):
response_title = whatson.get(woms_keys["title"])
expected_title = playout.get(system.norm_keys["title"])
assert_and_report(test_case_name, response_title, expected_title)
def assert_same_composer(test_case_name, whatson, playout, system):
response_composer = whatson.get(woms_keys["composer"])
expected_composer = playout.get(system.norm_keys["composer"])
assert_and_report(test_case_name, response_composer, expected_composer)
def assert_same_id(test_case_name, whatson, playout, system):
response_mmid = whatson.get(woms_keys["mm_uid"])
expected_mmid = playout.get(system.norm_keys["mm_uid"])
assert_and_report(test_case_name, response_mmid, expected_mmid)
|
assert_and_report
|
datepicker.js
|
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
import { Injectable, NgModule, ChangeDetectionStrategy, ChangeDetectorRef, Component, EventEmitter, forwardRef, Inject, Input, Optional, Output, ViewChild, ViewEncapsulation, ElementRef, NgZone, InjectionToken, ViewContainerRef, Directive, ContentChild, defineInjectable } from '@angular/core';
import { Subject, merge, Subscription, of } from 'rxjs';
import { take, filter } from 'rxjs/operators';
import { DOWN_ARROW, END, ENTER, HOME, LEFT_ARROW, PAGE_DOWN, PAGE_UP, RIGHT_ARROW, UP_ARROW, ESCAPE } from '@angular/cdk/keycodes';
import { DateAdapter, MAT_DATE_FORMATS, mixinColor } from '@angular/material/core';
import { Directionality } from '@angular/cdk/bidi';
import { ComponentPortal, PortalModule } from '@angular/cdk/portal';
import { animate, state, style, transition, trigger, group, query, animateChild } from '@angular/animations';
import { coerceBooleanProperty } from '@angular/cdk/coercion';
import { Overlay, OverlayConfig, OverlayModule } from '@angular/cdk/overlay';
import { DOCUMENT, CommonModule } from '@angular/common';
import { MatDialog, MatDialogModule } from '@angular/material/dialog';
import { NG_VALIDATORS, NG_VALUE_ACCESSOR, Validators } from '@angular/forms';
import { MatFormField } from '@angular/material/form-field';
import { MAT_INPUT_VALUE_ACCESSOR } from '@angular/material/input';
import { A11yModule } from '@angular/cdk/a11y';
import { MatButtonModule } from '@angular/material/button';
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
/**
* \@docs-private
* @param {?} provider
* @return {?}
*/
function createMissingDateImplError(provider) {
return Error(`MatDatepicker: No provider found for ${provider}. You must import one of the following ` +
`modules at your application root: MatNativeDateModule, MatMomentDateModule, or provide a ` +
`custom implementation.`);
}
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
/**
* Datepicker data that requires internationalization.
*/
class MatDatepickerIntl {
constructor() {
/**
* Stream that emits whenever the labels here are changed. Use this to notify
* components if the labels have changed after initialization.
*/
this.changes = new Subject();
/**
* A label for the calendar popup (used by screen readers).
*/
this.calendarLabel = 'Calendar';
/**
* A label for the button used to open the calendar popup (used by screen readers).
*/
this.openCalendarLabel = 'Open calendar';
/**
* A label for the previous month button (used by screen readers).
*/
this.prevMonthLabel = 'Previous month';
/**
* A label for the next month button (used by screen readers).
*/
this.nextMonthLabel = 'Next month';
/**
* A label for the previous year button (used by screen readers).
*/
this.prevYearLabel = 'Previous year';
/**
* A label for the next year button (used by screen readers).
*/
this.nextYearLabel = 'Next year';
/**
* A label for the previous multi-year button (used by screen readers).
*/
this.prevMultiYearLabel = 'Previous 20 years';
/**
* A label for the next multi-year button (used by screen readers).
*/
this.nextMultiYearLabel = 'Next 20 years';
/**
* A label for the 'switch to month view' button (used by screen readers).
*/
this.switchToMonthViewLabel = 'Choose date';
/**
* A label for the 'switch to year view' button (used by screen readers).
*/
this.switchToMultiYearViewLabel = 'Choose month and year';
}
}
MatDatepickerIntl.decorators = [
{ type: Injectable, args: [{ providedIn: 'root' },] },
];
/** @nocollapse */ MatDatepickerIntl.ngInjectableDef = defineInjectable({ factory: function MatDatepickerIntl_Factory() { return new MatDatepickerIntl(); }, token: MatDatepickerIntl, providedIn: "root" });
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
/**
* An internal class that represents the data corresponding to a single calendar cell.
* \@docs-private
*/
class MatCalendarCell {
/**
* @param {?} value
* @param {?} displayValue
* @param {?} ariaLabel
* @param {?} enabled
*/
constructor(value, displayValue, ariaLabel, enabled) {
this.value = value;
this.displayValue = displayValue;
this.ariaLabel = ariaLabel;
this.enabled = enabled;
}
}
/**
* An internal component used to display calendar data in a table.
* \@docs-private
*/
class MatCalendarBody {
/**
* @param {?} _elementRef
* @param {?} _ngZone
*/
constructor(_elementRef, _ngZone) {
this._elementRef = _elementRef;
this._ngZone = _ngZone;
/**
* The number of columns in the table.
*/
this.numCols = 7;
/**
* Whether to allow selection of disabled cells.
*/
this.allowDisabledSelection = false;
/**
* The cell number of the active cell in the table.
*/
this.activeCell = 0;
/**
* The aspect ratio (width / height) to use for the cells in the table. This aspect ratio will be
* maintained even as the table resizes.
*/
this.cellAspectRatio = 1;
/**
* Emits when a new value is selected.
*/
this.selectedValueChange = new EventEmitter();
}
/**
* @param {?} cell
* @return {?}
*/
_cellClicked(cell) {
if (!this.allowDisabledSelection && !cell.enabled) {
return;
}
this.selectedValueChange.emit(cell.value);
}
/**
* The number of blank cells to put at the beginning for the first row.
* @return {?}
*/
get _firstRowOffset() {
return this.rows && this.rows.length && this.rows[0].length ?
this.numCols - this.rows[0].length : 0;
}
/**
* @param {?} rowIndex
* @param {?} colIndex
* @return {?}
*/
_isActiveCell(rowIndex, colIndex) {
let /** @type {?} */ cellNumber = rowIndex * this.numCols + colIndex;
// Account for the fact that the first row may not have as many cells.
if (rowIndex) {
cellNumber -= this._firstRowOffset;
}
return cellNumber == this.activeCell;
}
/**
* Focuses the active cell after the microtask queue is empty.
* @return {?}
*/
_focusActiveCell() {
this._ngZone.runOutsideAngular(() => {
this._ngZone.onStable.asObservable().pipe(take(1)).subscribe(() => {
this._elementRef.nativeElement.querySelector('.mat-calendar-body-active').focus();
});
});
}
}
MatCalendarBody.decorators = [
{ type: Component, args: [{selector: '[mat-calendar-body]',
template: "<tr *ngIf=\"_firstRowOffset < labelMinRequiredCells\" aria-hidden=\"true\"><td class=\"mat-calendar-body-label\" [attr.colspan]=\"numCols\" [style.paddingTop.%]=\"50 * cellAspectRatio / numCols\" [style.paddingBottom.%]=\"50 * cellAspectRatio / numCols\">{{label}}</td></tr><tr *ngFor=\"let row of rows; let rowIndex = index\" role=\"row\"><td *ngIf=\"rowIndex === 0 && _firstRowOffset\" aria-hidden=\"true\" class=\"mat-calendar-body-label\" [attr.colspan]=\"_firstRowOffset\" [style.paddingTop.%]=\"50 * cellAspectRatio / numCols\" [style.paddingBottom.%]=\"50 * cellAspectRatio / numCols\">{{_firstRowOffset >= labelMinRequiredCells ? label : ''}}</td><td *ngFor=\"let item of row; let colIndex = index\" role=\"gridcell\" class=\"mat-calendar-body-cell\" [tabindex]=\"_isActiveCell(rowIndex, colIndex) ? 0 : -1\" [class.mat-calendar-body-disabled]=\"!item.enabled\" [class.mat-calendar-body-active]=\"_isActiveCell(rowIndex, colIndex)\" [attr.aria-label]=\"item.ariaLabel\" [attr.aria-disabled]=\"!item.enabled || null\" [attr.aria-selected]=\"selectedValue === item.value\" (click)=\"_cellClicked(item)\" [style.width.%]=\"100 / numCols\" [style.paddingTop.%]=\"50 * cellAspectRatio / numCols\" [style.paddingBottom.%]=\"50 * cellAspectRatio / numCols\"><div class=\"mat-calendar-body-cell-content\" [class.mat-calendar-body-selected]=\"selectedValue === item.value\" [class.mat-calendar-body-today]=\"todayValue === item.value\">{{item.displayValue}}</div></td></tr>",
styles: [".mat-calendar-body{min-width:224px}.mat-calendar-body-label{height:0;line-height:0;text-align:left;padding-left:4.71429%;padding-right:4.71429%}.mat-calendar-body-cell{position:relative;height:0;line-height:0;text-align:center;outline:0;cursor:pointer}.mat-calendar-body-disabled{cursor:default}.mat-calendar-body-cell-content{position:absolute;top:5%;left:5%;display:flex;align-items:center;justify-content:center;box-sizing:border-box;width:90%;height:90%;line-height:1;border-width:1px;border-style:solid;border-radius:999px}@media screen and (-ms-high-contrast:active){.mat-calendar-body-cell-content{border:none}}@media screen and (-ms-high-contrast:active){.mat-calendar-body-selected,.mat-datepicker-popup:not(:empty){outline:solid 1px}.mat-calendar-body-today{outline:dotted 1px}}[dir=rtl] .mat-calendar-body-label{text-align:right}"],
host: {
'class': 'mat-calendar-body',
'role': 'grid',
'attr.aria-readonly': 'true'
},
exportAs: 'matCalendarBody',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush,
},] },
];
/** @nocollapse */
MatCalendarBody.ctorParameters = () => [
{ type: ElementRef, },
{ type: NgZone, },
];
MatCalendarBody.propDecorators = {
"label": [{ type: Input },],
"rows": [{ type: Input },],
"todayValue": [{ type: Input },],
"selectedValue": [{ type: Input },],
"labelMinRequiredCells": [{ type: Input },],
"numCols": [{ type: Input },],
"allowDisabledSelection": [{ type: Input },],
"activeCell": [{ type: Input },],
"cellAspectRatio": [{ type: Input },],
"selectedValueChange": [{ type: Output },],
};
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
const /** @type {?} */ DAYS_PER_WEEK = 7;
/**
* An internal component used to display a single month in the datepicker.
* \@docs-private
* @template D
*/
class MatMonthView {
/**
* @param {?} _changeDetectorRef
* @param {?} _dateFormats
* @param {?} _dateAdapter
* @param {?=} _dir
*/
constructor(_changeDetectorRef, _dateFormats, _dateAdapter, _dir) {
this._changeDetectorRef = _changeDetectorRef;
this._dateFormats = _dateFormats;
this._dateAdapter = _dateAdapter;
this._dir = _dir;
/**
* Emits when a new date is selected.
*/
this.selectedChange = new EventEmitter();
/**
* Emits when any date is selected.
*/
this._userSelection = new EventEmitter();
/**
* Emits when any date is activated.
*/
this.activeDateChange = new EventEmitter();
if (!this._dateAdapter) {
throw createMissingDateImplError('DateAdapter');
}
if (!this._dateFormats) {
throw createMissingDateImplError('MAT_DATE_FORMATS');
}
const /** @type {?} */ firstDayOfWeek = this._dateAdapter.getFirstDayOfWeek();
const /** @type {?} */ narrowWeekdays = this._dateAdapter.getDayOfWeekNames('narrow');
const /** @type {?} */ longWeekdays = this._dateAdapter.getDayOfWeekNames('long');
// Rotate the labels for days of the week based on the configured first day of the week.
let /** @type {?} */ weekdays = longWeekdays.map((long, i) => {
return { long, narrow: narrowWeekdays[i] };
});
this._weekdays = weekdays.slice(firstDayOfWeek).concat(weekdays.slice(0, firstDayOfWeek));
this._activeDate = this._dateAdapter.today();
}
/**
* The date to display in this month view (everything other than the month and year is ignored).
* @return {?}
*/
get activeDate() { return this._activeDate; }
/**
* @param {?} value
* @return {?}
*/
set activeDate(value) {
const /** @type {?} */ oldActiveDate = this._activeDate;
const /** @type {?} */ validDate = this._getValidDateOrNull(this._dateAdapter.deserialize(value)) || this._dateAdapter.today();
this._activeDate = this._dateAdapter.clampDate(validDate, this.minDate, this.maxDate);
if (!this._hasSameMonthAndYear(oldActiveDate, this._activeDate)) {
this._init();
}
}
/**
* The currently selected date.
* @return {?}
*/
get selected() { return this._selected; }
/**
* @param {?} value
* @return {?}
*/
set selected(value) {
this._selected = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
this._selectedDate = this._getDateInCurrentMonth(this._selected);
}
/**
* The minimum selectable date.
* @return {?}
*/
get minDate() { return this._minDate; }
/**
* @param {?} value
* @return {?}
*/
set minDate(value) {
this._minDate = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
}
/**
* The maximum selectable date.
* @return {?}
*/
get maxDate() { return this._maxDate; }
/**
* @param {?} value
* @return {?}
*/
set maxDate(value) {
this._maxDate = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
}
/**
* @return {?}
*/
ngAfterContentInit() {
this._init();
}
/**
* Handles when a new date is selected.
* @param {?} date
* @return {?}
*/
_dateSelected(date) {
if (this._selectedDate != date) {
const /** @type {?} */ selectedYear = this._dateAdapter.getYear(this.activeDate);
const /** @type {?} */ selectedMonth = this._dateAdapter.getMonth(this.activeDate);
const /** @type {?} */ selectedDate = this._dateAdapter.createDate(selectedYear, selectedMonth, date);
this.selectedChange.emit(selectedDate);
}
this._userSelection.emit();
}
/**
* Handles keydown events on the calendar body when calendar is in month view.
* @param {?} event
* @return {?}
*/
_handleCalendarBodyKeydown(event) {
// TODO(mmalerba): We currently allow keyboard navigation to disabled dates, but just prevent
// disabled ones from being selected. This may not be ideal, we should look into whether
// navigation should skip over disabled dates, and if so, how to implement that efficiently.
const /** @type {?} */ oldActiveDate = this._activeDate;
const /** @type {?} */ isRtl = this._isRtl();
switch (event.keyCode) {
case LEFT_ARROW:
this.activeDate = this._dateAdapter.addCalendarDays(this._activeDate, isRtl ? 1 : -1);
break;
case RIGHT_ARROW:
this.activeDate = this._dateAdapter.addCalendarDays(this._activeDate, isRtl ? -1 : 1);
break;
case UP_ARROW:
this.activeDate = this._dateAdapter.addCalendarDays(this._activeDate, -7);
break;
case DOWN_ARROW:
this.activeDate = this._dateAdapter.addCalendarDays(this._activeDate, 7);
break;
case HOME:
this.activeDate = this._dateAdapter.addCalendarDays(this._activeDate, 1 - this._dateAdapter.getDate(this._activeDate));
break;
case END:
this.activeDate = this._dateAdapter.addCalendarDays(this._activeDate, (this._dateAdapter.getNumDaysInMonth(this._activeDate) -
this._dateAdapter.getDate(this._activeDate)));
break;
case PAGE_UP:
this.activeDate = event.altKey ?
this._dateAdapter.addCalendarYears(this._activeDate, -1) :
this._dateAdapter.addCalendarMonths(this._activeDate, -1);
break;
case PAGE_DOWN:
this.activeDate = event.altKey ?
this._dateAdapter.addCalendarYears(this._activeDate, 1) :
this._dateAdapter.addCalendarMonths(this._activeDate, 1);
break;
case ENTER:
if (!this.dateFilter || this.dateFilter(this._activeDate)) {
this._dateSelected(this._dateAdapter.getDate(this._activeDate));
this._userSelection.emit();
// Prevent unexpected default actions such as form submission.
event.preventDefault();
}
return;
default:
// Don't prevent default or focus active cell on keys that we don't explicitly handle.
return;
}
if (this._dateAdapter.compareDate(oldActiveDate, this.activeDate)) {
this.activeDateChange.emit(this.activeDate);
}
this._focusActiveCell();
// Prevent unexpected default actions such as form submission.
event.preventDefault();
}
/**
* Initializes this month view.
* @return {?}
*/
_init() {
this._selectedDate = this._getDateInCurrentMonth(this.selected);
this._todayDate = this._getDateInCurrentMonth(this._dateAdapter.today());
this._monthLabel =
this._dateAdapter.getMonthNames('short')[this._dateAdapter.getMonth(this.activeDate)]
.toLocaleUpperCase();
let /** @type {?} */ firstOfMonth = this._dateAdapter.createDate(this._dateAdapter.getYear(this.activeDate), this._dateAdapter.getMonth(this.activeDate), 1);
this._firstWeekOffset =
(DAYS_PER_WEEK + this._dateAdapter.getDayOfWeek(firstOfMonth) -
this._dateAdapter.getFirstDayOfWeek()) % DAYS_PER_WEEK;
this._createWeekCells();
this._changeDetectorRef.markForCheck();
}
/**
* Focuses the active cell after the microtask queue is empty.
* @return {?}
*/
_focusActiveCell() {
this._matCalendarBody._focusActiveCell();
}
/**
* Creates MatCalendarCells for the dates in this month.
* @return {?}
*/
_createWeekCells() {
const /** @type {?} */ daysInMonth = this._dateAdapter.getNumDaysInMonth(this.activeDate);
const /** @type {?} */ dateNames = this._dateAdapter.getDateNames();
this._weeks = [[]];
for (let /** @type {?} */ i = 0, /** @type {?} */ cell = this._firstWeekOffset; i < daysInMonth; i++, cell++) {
if (cell == DAYS_PER_WEEK) {
this._weeks.push([]);
cell = 0;
}
const /** @type {?} */ date = this._dateAdapter.createDate(this._dateAdapter.getYear(this.activeDate), this._dateAdapter.getMonth(this.activeDate), i + 1);
const /** @type {?} */ enabled = this._shouldEnableDate(date);
const /** @type {?} */ ariaLabel = this._dateAdapter.format(date, this._dateFormats.display.dateA11yLabel);
this._weeks[this._weeks.length - 1]
.push(new MatCalendarCell(i + 1, dateNames[i], ariaLabel, enabled));
}
}
/**
* Date filter for the month
* @param {?} date
* @return {?}
*/
_shouldEnableDate(date) {
return !!date &&
(!this.dateFilter || this.dateFilter(date)) &&
(!this.minDate || this._dateAdapter.compareDate(date, this.minDate) >= 0) &&
(!this.maxDate || this._dateAdapter.compareDate(date, this.maxDate) <= 0);
}
/**
* Gets the date in this month that the given Date falls on.
* Returns null if the given Date is in another month.
* @param {?} date
* @return {?}
*/
_getDateInCurrentMonth(date) {
return date && this._hasSameMonthAndYear(date, this.activeDate) ?
this._dateAdapter.getDate(date) : null;
}
/**
* Checks whether the 2 dates are non-null and fall within the same month of the same year.
* @param {?} d1
* @param {?} d2
* @return {?}
*/
_hasSameMonthAndYear(d1, d2) {
return !!(d1 && d2 && this._dateAdapter.getMonth(d1) == this._dateAdapter.getMonth(d2) &&
this._dateAdapter.getYear(d1) == this._dateAdapter.getYear(d2));
}
/**
* @param {?} obj The object to check.
* @return {?} The given object if it is both a date instance and valid, otherwise null.
*/
_getValidDateOrNull(obj) {
return (this._dateAdapter.isDateInstance(obj) && this._dateAdapter.isValid(obj)) ? obj : null;
}
/**
* Determines whether the user has the RTL layout direction.
* @return {?}
*/
_isRtl() {
return this._dir && this._dir.value === 'rtl';
}
}
MatMonthView.decorators = [
{ type: Component, args: [{selector: 'mat-month-view',
template: "<table class=\"mat-calendar-table\"><thead class=\"mat-calendar-table-header\"><tr><th *ngFor=\"let day of _weekdays\" [attr.aria-label]=\"day.long\">{{day.narrow}}</th></tr><tr><th class=\"mat-calendar-table-header-divider\" colspan=\"7\" aria-hidden=\"true\"></th></tr></thead><tbody mat-calendar-body [label]=\"_monthLabel\" [rows]=\"_weeks\" [todayValue]=\"_todayDate\" [selectedValue]=\"_selectedDate\" [labelMinRequiredCells]=\"3\" [activeCell]=\"_dateAdapter.getDate(activeDate) - 1\" (selectedValueChange)=\"_dateSelected($event)\" (keydown)=\"_handleCalendarBodyKeydown($event)\"></tbody></table>",
exportAs: 'matMonthView',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush
},] },
];
/** @nocollapse */
MatMonthView.ctorParameters = () => [
{ type: ChangeDetectorRef, },
{ type: undefined, decorators: [{ type: Optional }, { type: Inject, args: [MAT_DATE_FORMATS,] },] },
{ type: DateAdapter, decorators: [{ type: Optional },] },
{ type: Directionality, decorators: [{ type: Optional },] },
];
MatMonthView.propDecorators = {
"activeDate": [{ type: Input },],
"selected": [{ type: Input },],
"minDate": [{ type: Input },],
"maxDate": [{ type: Input },],
"dateFilter": [{ type: Input },],
"selectedChange": [{ type: Output },],
"_userSelection": [{ type: Output },],
"activeDateChange": [{ type: Output },],
"_matCalendarBody": [{ type: ViewChild, args: [MatCalendarBody,] },],
};
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
const /** @type {?} */ yearsPerPage = 24;
const /** @type {?} */ yearsPerRow = 4;
/**
* An internal component used to display a year selector in the datepicker.
* \@docs-private
* @template D
*/
class MatMultiYearView {
/**
* @param {?} _changeDetectorRef
* @param {?} _dateAdapter
* @param {?=} _dir
*/
constructor(_changeDetectorRef, _dateAdapter, _dir) {
this._changeDetectorRef = _changeDetectorRef;
this._dateAdapter = _dateAdapter;
this._dir = _dir;
/**
* Emits when a new year is selected.
*/
this.selectedChange = new EventEmitter();
/**
* Emits the selected year. This doesn't imply a change on the selected date
*/
this.yearSelected = new EventEmitter();
/**
* Emits when any date is activated.
*/
this.activeDateChange = new EventEmitter();
if (!this._dateAdapter) {
throw createMissingDateImplError('DateAdapter');
}
this._activeDate = this._dateAdapter.today();
}
/**
* The date to display in this multi-year view (everything other than the year is ignored).
* @return {?}
*/
get activeDate() { return this._activeDate; }
/**
* @param {?} value
* @return {?}
*/
set activeDate(value) {
let /** @type {?} */ oldActiveDate = this._activeDate;
const /** @type {?} */ validDate = this._getValidDateOrNull(this._dateAdapter.deserialize(value)) || this._dateAdapter.today();
this._activeDate = this._dateAdapter.clampDate(validDate, this.minDate, this.maxDate);
if (Math.floor(this._dateAdapter.getYear(oldActiveDate) / yearsPerPage) !=
Math.floor(this._dateAdapter.getYear(this._activeDate) / yearsPerPage)) {
this._init();
}
}
/**
* The currently selected date.
* @return {?}
*/
get selected() { return this._selected; }
/**
* @param {?} value
* @return {?}
*/
set selected(value) {
this._selected = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
this._selectedYear = this._selected && this._dateAdapter.getYear(this._selected);
}
/**
* The minimum selectable date.
* @return {?}
*/
get minDate() { return this._minDate; }
/**
* @param {?} value
* @return {?}
*/
set minDate(value) {
this._minDate = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
}
/**
* The maximum selectable date.
* @return {?}
*/
get maxDate() { return this._maxDate; }
/**
* @param {?} value
* @return {?}
*/
set maxDate(value) {
this._maxDate = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
}
/**
* @return {?}
*/
ngAfterContentInit() {
this._init();
}
/**
* Initializes this multi-year view.
* @return {?}
*/
_init() {
this._todayYear = this._dateAdapter.getYear(this._dateAdapter.today());
let /** @type {?} */ activeYear = this._dateAdapter.getYear(this._activeDate);
let /** @type {?} */ activeOffset = activeYear % yearsPerPage;
this._years = [];
for (let /** @type {?} */ i = 0, /** @type {?} */ row = []; i < yearsPerPage; i++) {
row.push(activeYear - activeOffset + i);
if (row.length == yearsPerRow) {
this._years.push(row.map(year => this._createCellForYear(year)));
row = [];
}
}
this._changeDetectorRef.markForCheck();
}
/**
* Handles when a new year is selected.
* @param {?} year
* @return {?}
*/
_yearSelected(year) {
this.yearSelected.emit(this._dateAdapter.createDate(year, 0, 1));
let /** @type {?} */ month = this._dateAdapter.getMonth(this.activeDate);
let /** @type {?} */ daysInMonth = this._dateAdapter.getNumDaysInMonth(this._dateAdapter.createDate(year, month, 1));
this.selectedChange.emit(this._dateAdapter.createDate(year, month, Math.min(this._dateAdapter.getDate(this.activeDate), daysInMonth)));
}
/**
* Handles keydown events on the calendar body when calendar is in multi-year view.
* @param {?} event
* @return {?}
*/
_handleCalendarBodyKeydown(event) {
// TODO(mmalerba): We currently allow keyboard navigation to disabled dates, but just prevent
// disabled ones from being selected. This may not be ideal, we should look into whether
// navigation should skip over disabled dates, and if so, how to implement that efficiently.
const /** @type {?} */ oldActiveDate = this._activeDate;
const /** @type {?} */ isRtl = this._isRtl();
switch (event.keyCode) {
case LEFT_ARROW:
this.activeDate = this._dateAdapter.addCalendarYears(this._activeDate, isRtl ? 1 : -1);
break;
case RIGHT_ARROW:
this.activeDate = this._dateAdapter.addCalendarYears(this._activeDate, isRtl ? -1 : 1);
break;
case UP_ARROW:
this.activeDate = this._dateAdapter.addCalendarYears(this._activeDate, -yearsPerRow);
break;
case DOWN_ARROW:
this.activeDate = this._dateAdapter.addCalendarYears(this._activeDate, yearsPerRow);
break;
case HOME:
this.activeDate = this._dateAdapter.addCalendarYears(this._activeDate, -this._dateAdapter.getYear(this._activeDate) % yearsPerPage);
break;
case END:
this.activeDate = this._dateAdapter.addCalendarYears(this._activeDate, yearsPerPage - this._dateAdapter.getYear(this._activeDate) % yearsPerPage - 1);
break;
case PAGE_UP:
this.activeDate =
this._dateAdapter.addCalendarYears(this._activeDate, event.altKey ? -yearsPerPage * 10 : -yearsPerPage);
break;
case PAGE_DOWN:
this.activeDate =
this._dateAdapter.addCalendarYears(this._activeDate, event.altKey ? yearsPerPage * 10 : yearsPerPage);
break;
case ENTER:
this._yearSelected(this._dateAdapter.getYear(this._activeDate));
break;
default:
// Don't prevent default or focus active cell on keys that we don't explicitly handle.
return;
}
if (this._dateAdapter.compareDate(oldActiveDate, this.activeDate)) {
this.activeDateChange.emit(this.activeDate);
}
this._focusActiveCell();
// Prevent unexpected default actions such as form submission.
event.preventDefault();
}
/**
* @return {?}
*/
_getActiveCell() {
return this._dateAdapter.getYear(this.activeDate) % yearsPerPage;
}
/**
* Focuses the active cell after the microtask queue is empty.
* @return {?}
*/
_focusActiveCell() {
this._matCalendarBody._focusActiveCell();
}
/**
* Creates an MatCalendarCell for the given year.
* @param {?} year
* @return {?}
*/
_createCellForYear(year) {
let /** @type {?} */ yearName = this._dateAdapter.getYearName(this._dateAdapter.createDate(year, 0, 1));
return new MatCalendarCell(year, yearName, yearName, this._shouldEnableYear(year));
}
/**
* Whether the given year is enabled.
* @param {?} year
* @return {?}
*/
_shouldEnableYear(year) {
// disable if the year is greater than maxDate lower than minDate
if (year === undefined || year === null ||
(this.maxDate && year > this._dateAdapter.getYear(this.maxDate)) ||
(this.minDate && year < this._dateAdapter.getYear(this.minDate))) {
return false;
}
// enable if it reaches here and there's no filter defined
if (!this.dateFilter) {
return true;
}
const /** @type {?} */ firstOfYear = this._dateAdapter.createDate(year, 0, 1);
// If any date in the year is enabled count the year as enabled.
for (let /** @type {?} */ date = firstOfYear; this._dateAdapter.getYear(date) == year; date = this._dateAdapter.addCalendarDays(date, 1)) {
if (this.dateFilter(date)) {
return true;
}
}
return false;
}
/**
* @param {?} obj The object to check.
* @return {?} The given object if it is both a date instance and valid, otherwise null.
*/
_getValidDateOrNull(obj) {
return (this._dateAdapter.isDateInstance(obj) && this._dateAdapter.isValid(obj)) ? obj : null;
}
/**
* Determines whether the user has the RTL layout direction.
* @return {?}
*/
_isRtl() {
return this._dir && this._dir.value === 'rtl';
}
}
MatMultiYearView.decorators = [
{ type: Component, args: [{selector: 'mat-multi-year-view',
template: "<table class=\"mat-calendar-table\"><thead class=\"mat-calendar-table-header\"><tr><th class=\"mat-calendar-table-header-divider\" colspan=\"4\"></th></tr></thead><tbody mat-calendar-body allowDisabledSelection=\"true\" [rows]=\"_years\" [todayValue]=\"_todayYear\" [selectedValue]=\"_selectedYear\" [numCols]=\"4\" [cellAspectRatio]=\"4 / 7\" [activeCell]=\"_getActiveCell()\" (selectedValueChange)=\"_yearSelected($event)\" (keydown)=\"_handleCalendarBodyKeydown($event)\"></tbody></table>",
exportAs: 'matMultiYearView',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush
},] },
];
/** @nocollapse */
MatMultiYearView.ctorParameters = () => [
{ type: ChangeDetectorRef, },
{ type: DateAdapter, decorators: [{ type: Optional },] },
{ type: Directionality, decorators: [{ type: Optional },] },
];
MatMultiYearView.propDecorators = {
"activeDate": [{ type: Input },],
"selected": [{ type: Input },],
"minDate": [{ type: Input },],
"maxDate": [{ type: Input },],
"dateFilter": [{ type: Input },],
"selectedChange": [{ type: Output },],
"yearSelected": [{ type: Output },],
"activeDateChange": [{ type: Output },],
"_matCalendarBody": [{ type: ViewChild, args: [MatCalendarBody,] },],
};
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
/**
* An internal component used to display a single year in the datepicker.
* \@docs-private
* @template D
*/
class
|
{
/**
* @param {?} _changeDetectorRef
* @param {?} _dateFormats
* @param {?} _dateAdapter
* @param {?=} _dir
*/
constructor(_changeDetectorRef, _dateFormats, _dateAdapter, _dir) {
this._changeDetectorRef = _changeDetectorRef;
this._dateFormats = _dateFormats;
this._dateAdapter = _dateAdapter;
this._dir = _dir;
/**
* Emits when a new month is selected.
*/
this.selectedChange = new EventEmitter();
/**
* Emits the selected month. This doesn't imply a change on the selected date
*/
this.monthSelected = new EventEmitter();
/**
* Emits when any date is activated.
*/
this.activeDateChange = new EventEmitter();
if (!this._dateAdapter) {
throw createMissingDateImplError('DateAdapter');
}
if (!this._dateFormats) {
throw createMissingDateImplError('MAT_DATE_FORMATS');
}
this._activeDate = this._dateAdapter.today();
}
/**
* The date to display in this year view (everything other than the year is ignored).
* @return {?}
*/
get activeDate() { return this._activeDate; }
/**
* @param {?} value
* @return {?}
*/
set activeDate(value) {
let /** @type {?} */ oldActiveDate = this._activeDate;
const /** @type {?} */ validDate = this._getValidDateOrNull(this._dateAdapter.deserialize(value)) || this._dateAdapter.today();
this._activeDate = this._dateAdapter.clampDate(validDate, this.minDate, this.maxDate);
if (this._dateAdapter.getYear(oldActiveDate) !== this._dateAdapter.getYear(this._activeDate)) {
this._init();
}
}
/**
* The currently selected date.
* @return {?}
*/
get selected() { return this._selected; }
/**
* @param {?} value
* @return {?}
*/
set selected(value) {
this._selected = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
this._selectedMonth = this._getMonthInCurrentYear(this._selected);
}
/**
* The minimum selectable date.
* @return {?}
*/
get minDate() { return this._minDate; }
/**
* @param {?} value
* @return {?}
*/
set minDate(value) {
this._minDate = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
}
/**
* The maximum selectable date.
* @return {?}
*/
get maxDate() { return this._maxDate; }
/**
* @param {?} value
* @return {?}
*/
set maxDate(value) {
this._maxDate = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
}
/**
* @return {?}
*/
ngAfterContentInit() {
this._init();
}
/**
* Handles when a new month is selected.
* @param {?} month
* @return {?}
*/
_monthSelected(month) {
const /** @type {?} */ normalizedDate = this._dateAdapter.createDate(this._dateAdapter.getYear(this.activeDate), month, 1);
this.monthSelected.emit(normalizedDate);
const /** @type {?} */ daysInMonth = this._dateAdapter.getNumDaysInMonth(normalizedDate);
this.selectedChange.emit(this._dateAdapter.createDate(this._dateAdapter.getYear(this.activeDate), month, Math.min(this._dateAdapter.getDate(this.activeDate), daysInMonth)));
}
/**
* Handles keydown events on the calendar body when calendar is in year view.
* @param {?} event
* @return {?}
*/
_handleCalendarBodyKeydown(event) {
// TODO(mmalerba): We currently allow keyboard navigation to disabled dates, but just prevent
// disabled ones from being selected. This may not be ideal, we should look into whether
// navigation should skip over disabled dates, and if so, how to implement that efficiently.
const /** @type {?} */ oldActiveDate = this._activeDate;
const /** @type {?} */ isRtl = this._isRtl();
switch (event.keyCode) {
case LEFT_ARROW:
this.activeDate = this._dateAdapter.addCalendarMonths(this._activeDate, isRtl ? 1 : -1);
break;
case RIGHT_ARROW:
this.activeDate = this._dateAdapter.addCalendarMonths(this._activeDate, isRtl ? -1 : 1);
break;
case UP_ARROW:
this.activeDate = this._dateAdapter.addCalendarMonths(this._activeDate, -4);
break;
case DOWN_ARROW:
this.activeDate = this._dateAdapter.addCalendarMonths(this._activeDate, 4);
break;
case HOME:
this.activeDate = this._dateAdapter.addCalendarMonths(this._activeDate, -this._dateAdapter.getMonth(this._activeDate));
break;
case END:
this.activeDate = this._dateAdapter.addCalendarMonths(this._activeDate, 11 - this._dateAdapter.getMonth(this._activeDate));
break;
case PAGE_UP:
this.activeDate =
this._dateAdapter.addCalendarYears(this._activeDate, event.altKey ? -10 : -1);
break;
case PAGE_DOWN:
this.activeDate =
this._dateAdapter.addCalendarYears(this._activeDate, event.altKey ? 10 : 1);
break;
case ENTER:
this._monthSelected(this._dateAdapter.getMonth(this._activeDate));
break;
default:
// Don't prevent default or focus active cell on keys that we don't explicitly handle.
return;
}
if (this._dateAdapter.compareDate(oldActiveDate, this.activeDate)) {
this.activeDateChange.emit(this.activeDate);
}
this._focusActiveCell();
// Prevent unexpected default actions such as form submission.
event.preventDefault();
}
/**
* Initializes this year view.
* @return {?}
*/
_init() {
this._selectedMonth = this._getMonthInCurrentYear(this.selected);
this._todayMonth = this._getMonthInCurrentYear(this._dateAdapter.today());
this._yearLabel = this._dateAdapter.getYearName(this.activeDate);
let /** @type {?} */ monthNames = this._dateAdapter.getMonthNames('short');
// First row of months only contains 5 elements so we can fit the year label on the same row.
this._months = [[0, 1, 2, 3], [4, 5, 6, 7], [8, 9, 10, 11]].map(row => row.map(month => this._createCellForMonth(month, monthNames[month])));
this._changeDetectorRef.markForCheck();
}
/**
* Focuses the active cell after the microtask queue is empty.
* @return {?}
*/
_focusActiveCell() {
this._matCalendarBody._focusActiveCell();
}
/**
* Gets the month in this year that the given Date falls on.
* Returns null if the given Date is in another year.
* @param {?} date
* @return {?}
*/
_getMonthInCurrentYear(date) {
return date && this._dateAdapter.getYear(date) == this._dateAdapter.getYear(this.activeDate) ?
this._dateAdapter.getMonth(date) : null;
}
/**
* Creates an MatCalendarCell for the given month.
* @param {?} month
* @param {?} monthName
* @return {?}
*/
_createCellForMonth(month, monthName) {
let /** @type {?} */ ariaLabel = this._dateAdapter.format(this._dateAdapter.createDate(this._dateAdapter.getYear(this.activeDate), month, 1), this._dateFormats.display.monthYearA11yLabel);
return new MatCalendarCell(month, monthName.toLocaleUpperCase(), ariaLabel, this._shouldEnableMonth(month));
}
/**
* Whether the given month is enabled.
* @param {?} month
* @return {?}
*/
_shouldEnableMonth(month) {
const /** @type {?} */ activeYear = this._dateAdapter.getYear(this.activeDate);
if (month === undefined || month === null ||
this._isYearAndMonthAfterMaxDate(activeYear, month) ||
this._isYearAndMonthBeforeMinDate(activeYear, month)) {
return false;
}
if (!this.dateFilter) {
return true;
}
const /** @type {?} */ firstOfMonth = this._dateAdapter.createDate(activeYear, month, 1);
// If any date in the month is enabled count the month as enabled.
for (let /** @type {?} */ date = firstOfMonth; this._dateAdapter.getMonth(date) == month; date = this._dateAdapter.addCalendarDays(date, 1)) {
if (this.dateFilter(date)) {
return true;
}
}
return false;
}
/**
* Tests whether the combination month/year is after this.maxDate, considering
* just the month and year of this.maxDate
* @param {?} year
* @param {?} month
* @return {?}
*/
_isYearAndMonthAfterMaxDate(year, month) {
if (this.maxDate) {
const /** @type {?} */ maxYear = this._dateAdapter.getYear(this.maxDate);
const /** @type {?} */ maxMonth = this._dateAdapter.getMonth(this.maxDate);
return year > maxYear || (year === maxYear && month > maxMonth);
}
return false;
}
/**
* Tests whether the combination month/year is before this.minDate, considering
* just the month and year of this.minDate
* @param {?} year
* @param {?} month
* @return {?}
*/
_isYearAndMonthBeforeMinDate(year, month) {
if (this.minDate) {
const /** @type {?} */ minYear = this._dateAdapter.getYear(this.minDate);
const /** @type {?} */ minMonth = this._dateAdapter.getMonth(this.minDate);
return year < minYear || (year === minYear && month < minMonth);
}
return false;
}
/**
* @param {?} obj The object to check.
* @return {?} The given object if it is both a date instance and valid, otherwise null.
*/
_getValidDateOrNull(obj) {
return (this._dateAdapter.isDateInstance(obj) && this._dateAdapter.isValid(obj)) ? obj : null;
}
/**
* Determines whether the user has the RTL layout direction.
* @return {?}
*/
_isRtl() {
return this._dir && this._dir.value === 'rtl';
}
}
MatYearView.decorators = [
{ type: Component, args: [{selector: 'mat-year-view',
template: "<table class=\"mat-calendar-table\"><thead class=\"mat-calendar-table-header\"><tr><th class=\"mat-calendar-table-header-divider\" colspan=\"4\"></th></tr></thead><tbody mat-calendar-body allowDisabledSelection=\"true\" [label]=\"_yearLabel\" [rows]=\"_months\" [todayValue]=\"_todayMonth\" [selectedValue]=\"_selectedMonth\" [labelMinRequiredCells]=\"2\" [numCols]=\"4\" [cellAspectRatio]=\"4 / 7\" [activeCell]=\"_dateAdapter.getMonth(activeDate)\" (selectedValueChange)=\"_monthSelected($event)\" (keydown)=\"_handleCalendarBodyKeydown($event)\"></tbody></table>",
exportAs: 'matYearView',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush
},] },
];
/** @nocollapse */
MatYearView.ctorParameters = () => [
{ type: ChangeDetectorRef, },
{ type: undefined, decorators: [{ type: Optional }, { type: Inject, args: [MAT_DATE_FORMATS,] },] },
{ type: DateAdapter, decorators: [{ type: Optional },] },
{ type: Directionality, decorators: [{ type: Optional },] },
];
MatYearView.propDecorators = {
"activeDate": [{ type: Input },],
"selected": [{ type: Input },],
"minDate": [{ type: Input },],
"maxDate": [{ type: Input },],
"dateFilter": [{ type: Input },],
"selectedChange": [{ type: Output },],
"monthSelected": [{ type: Output },],
"activeDateChange": [{ type: Output },],
"_matCalendarBody": [{ type: ViewChild, args: [MatCalendarBody,] },],
};
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
/**
* Default header for MatCalendar
* @template D
*/
class MatCalendarHeader {
/**
* @param {?} _intl
* @param {?} calendar
* @param {?} _dateAdapter
* @param {?} _dateFormats
* @param {?} changeDetectorRef
*/
constructor(_intl, calendar, _dateAdapter, _dateFormats, changeDetectorRef) {
this._intl = _intl;
this.calendar = calendar;
this._dateAdapter = _dateAdapter;
this._dateFormats = _dateFormats;
this.calendar.stateChanges.subscribe(() => changeDetectorRef.markForCheck());
}
/**
* The label for the current calendar view.
* @return {?}
*/
get periodButtonText() {
if (this.calendar.currentView == 'month') {
return this._dateAdapter
.format(this.calendar.activeDate, this._dateFormats.display.monthYearLabel)
.toLocaleUpperCase();
}
if (this.calendar.currentView == 'year') {
return this._dateAdapter.getYearName(this.calendar.activeDate);
}
const /** @type {?} */ activeYear = this._dateAdapter.getYear(this.calendar.activeDate);
const /** @type {?} */ firstYearInView = this._dateAdapter.getYearName(this._dateAdapter.createDate(activeYear - activeYear % 24, 0, 1));
const /** @type {?} */ lastYearInView = this._dateAdapter.getYearName(this._dateAdapter.createDate(activeYear + yearsPerPage - 1 - activeYear % 24, 0, 1));
return `${firstYearInView} \u2013 ${lastYearInView}`;
}
/**
* @return {?}
*/
get periodButtonLabel() {
return this.calendar.currentView == 'month' ?
this._intl.switchToMultiYearViewLabel : this._intl.switchToMonthViewLabel;
}
/**
* The label for the the previous button.
* @return {?}
*/
get prevButtonLabel() {
return {
'month': this._intl.prevMonthLabel,
'year': this._intl.prevYearLabel,
'multi-year': this._intl.prevMultiYearLabel
}[this.calendar.currentView];
}
/**
* The label for the the next button.
* @return {?}
*/
get nextButtonLabel() {
return {
'month': this._intl.nextMonthLabel,
'year': this._intl.nextYearLabel,
'multi-year': this._intl.nextMultiYearLabel
}[this.calendar.currentView];
}
/**
* Handles user clicks on the period label.
* @return {?}
*/
currentPeriodClicked() {
this.calendar.currentView = this.calendar.currentView == 'month' ? 'multi-year' : 'month';
}
/**
* Handles user clicks on the previous button.
* @return {?}
*/
previousClicked() {
this.calendar.activeDate = this.calendar.currentView == 'month' ?
this._dateAdapter.addCalendarMonths(this.calendar.activeDate, -1) :
this._dateAdapter.addCalendarYears(this.calendar.activeDate, this.calendar.currentView == 'year' ? -1 : -yearsPerPage);
}
/**
* Handles user clicks on the next button.
* @return {?}
*/
nextClicked() {
this.calendar.activeDate = this.calendar.currentView == 'month' ?
this._dateAdapter.addCalendarMonths(this.calendar.activeDate, 1) :
this._dateAdapter.addCalendarYears(this.calendar.activeDate, this.calendar.currentView == 'year' ? 1 : yearsPerPage);
}
/**
* Whether the previous period button is enabled.
* @return {?}
*/
previousEnabled() {
if (!this.calendar.minDate) {
return true;
}
return !this.calendar.minDate ||
!this._isSameView(this.calendar.activeDate, this.calendar.minDate);
}
/**
* Whether the next period button is enabled.
* @return {?}
*/
nextEnabled() {
return !this.calendar.maxDate ||
!this._isSameView(this.calendar.activeDate, this.calendar.maxDate);
}
/**
* Whether the two dates represent the same view in the current view mode (month or year).
* @param {?} date1
* @param {?} date2
* @return {?}
*/
_isSameView(date1, date2) {
if (this.calendar.currentView == 'month') {
return this._dateAdapter.getYear(date1) == this._dateAdapter.getYear(date2) &&
this._dateAdapter.getMonth(date1) == this._dateAdapter.getMonth(date2);
}
if (this.calendar.currentView == 'year') {
return this._dateAdapter.getYear(date1) == this._dateAdapter.getYear(date2);
}
// Otherwise we are in 'multi-year' view.
return Math.floor(this._dateAdapter.getYear(date1) / yearsPerPage) ==
Math.floor(this._dateAdapter.getYear(date2) / yearsPerPage);
}
}
MatCalendarHeader.decorators = [
{ type: Component, args: [{selector: 'mat-calendar-header',
template: "<div class=\"mat-calendar-header\"><div class=\"mat-calendar-controls\"><button mat-button type=\"button\" class=\"mat-calendar-period-button\" (click)=\"currentPeriodClicked()\" [attr.aria-label]=\"periodButtonLabel\" cdkAriaLive=\"polite\">{{periodButtonText}}<div class=\"mat-calendar-arrow\" [class.mat-calendar-invert]=\"calendar.currentView != 'month'\"></div></button><div class=\"mat-calendar-spacer\"></div><button mat-icon-button type=\"button\" class=\"mat-calendar-previous-button\" [disabled]=\"!previousEnabled()\" (click)=\"previousClicked()\" [attr.aria-label]=\"prevButtonLabel\"></button> <button mat-icon-button type=\"button\" class=\"mat-calendar-next-button\" [disabled]=\"!nextEnabled()\" (click)=\"nextClicked()\" [attr.aria-label]=\"nextButtonLabel\"></button></div></div>",
exportAs: 'matCalendarHeader',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush,
},] },
];
/** @nocollapse */
MatCalendarHeader.ctorParameters = () => [
{ type: MatDatepickerIntl, },
{ type: MatCalendar, decorators: [{ type: Inject, args: [forwardRef(() => MatCalendar),] },] },
{ type: DateAdapter, decorators: [{ type: Optional },] },
{ type: undefined, decorators: [{ type: Optional }, { type: Inject, args: [MAT_DATE_FORMATS,] },] },
{ type: ChangeDetectorRef, },
];
/**
* A calendar that is used as part of the datepicker.
* \@docs-private
* @template D
*/
class MatCalendar {
/**
* @param {?} _intl
* @param {?} _dateAdapter
* @param {?} _dateFormats
* @param {?} _changeDetectorRef
*/
constructor(_intl, _dateAdapter, _dateFormats, _changeDetectorRef) {
this._dateAdapter = _dateAdapter;
this._dateFormats = _dateFormats;
this._changeDetectorRef = _changeDetectorRef;
/**
* Used for scheduling that focus should be moved to the active cell on the next tick.
* We need to schedule it, rather than do it immediately, because we have to wait
* for Angular to re-evaluate the view children.
*/
this._moveFocusOnNextTick = false;
/**
* Whether the calendar should be started in month or year view.
*/
this.startView = 'month';
/**
* Emits when the currently selected date changes.
*/
this.selectedChange = new EventEmitter();
/**
* Emits the year chosen in multiyear view.
* This doesn't imply a change on the selected date.
*/
this.yearSelected = new EventEmitter();
/**
* Emits the month chosen in year view.
* This doesn't imply a change on the selected date.
*/
this.monthSelected = new EventEmitter();
/**
* Emits when any date is selected.
*/
this._userSelection = new EventEmitter();
/**
* Emits whenever there is a state change that the header may need to respond to.
*/
this.stateChanges = new Subject();
if (!this._dateAdapter) {
throw createMissingDateImplError('DateAdapter');
}
if (!this._dateFormats) {
throw createMissingDateImplError('MAT_DATE_FORMATS');
}
this._intlChanges = _intl.changes.subscribe(() => {
_changeDetectorRef.markForCheck();
this.stateChanges.next();
});
}
/**
* A date representing the period (month or year) to start the calendar in.
* @return {?}
*/
get startAt() { return this._startAt; }
/**
* @param {?} value
* @return {?}
*/
set startAt(value) {
this._startAt = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
}
/**
* The currently selected date.
* @return {?}
*/
get selected() { return this._selected; }
/**
* @param {?} value
* @return {?}
*/
set selected(value) {
this._selected = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
}
/**
* The minimum selectable date.
* @return {?}
*/
get minDate() { return this._minDate; }
/**
* @param {?} value
* @return {?}
*/
set minDate(value) {
this._minDate = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
}
/**
* The maximum selectable date.
* @return {?}
*/
get maxDate() { return this._maxDate; }
/**
* @param {?} value
* @return {?}
*/
set maxDate(value) {
this._maxDate = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
}
/**
* The current active date. This determines which time period is shown and which date is
* highlighted when using keyboard navigation.
* @return {?}
*/
get activeDate() { return this._clampedActiveDate; }
/**
* @param {?} value
* @return {?}
*/
set activeDate(value) {
this._clampedActiveDate = this._dateAdapter.clampDate(value, this.minDate, this.maxDate);
this.stateChanges.next();
}
/**
* Whether the calendar is in month view.
* @return {?}
*/
get currentView() { return this._currentView; }
/**
* @param {?} value
* @return {?}
*/
set currentView(value) {
this._currentView = value;
this._moveFocusOnNextTick = true;
}
/**
* @return {?}
*/
ngAfterContentInit() {
this._calendarHeaderPortal = new ComponentPortal(this.headerComponent || MatCalendarHeader);
this.activeDate = this.startAt || this._dateAdapter.today();
// Assign to the private property since we don't want to move focus on init.
this._currentView = this.startView;
}
/**
* @return {?}
*/
ngAfterViewChecked() {
if (this._moveFocusOnNextTick) {
this._moveFocusOnNextTick = false;
this.focusActiveCell();
}
}
/**
* @return {?}
*/
ngOnDestroy() {
this._intlChanges.unsubscribe();
this.stateChanges.complete();
}
/**
* @param {?} changes
* @return {?}
*/
ngOnChanges(changes) {
const /** @type {?} */ change = changes["minDate"] || changes["maxDate"] || changes["dateFilter"];
if (change && !change.firstChange) {
const /** @type {?} */ view = this._getCurrentViewComponent();
if (view) {
// We need to `detectChanges` manually here, because the `minDate`, `maxDate` etc. are
// passed down to the view via data bindings which won't be up-to-date when we call `_init`.
this._changeDetectorRef.detectChanges();
view._init();
}
}
this.stateChanges.next();
}
/**
* @return {?}
*/
focusActiveCell() {
this._getCurrentViewComponent()._focusActiveCell();
}
/**
* Updates today's date after an update of the active date
* @return {?}
*/
updateTodaysDate() {
let /** @type {?} */ view = this.currentView == 'month' ? this.monthView :
(this.currentView == 'year' ? this.yearView : this.multiYearView);
view.ngAfterContentInit();
}
/**
* Handles date selection in the month view.
* @param {?} date
* @return {?}
*/
_dateSelected(date) {
if (!this._dateAdapter.sameDate(date, this.selected)) {
this.selectedChange.emit(date);
}
}
/**
* Handles year selection in the multiyear view.
* @param {?} normalizedYear
* @return {?}
*/
_yearSelectedInMultiYearView(normalizedYear) {
this.yearSelected.emit(normalizedYear);
}
/**
* Handles month selection in the year view.
* @param {?} normalizedMonth
* @return {?}
*/
_monthSelectedInYearView(normalizedMonth) {
this.monthSelected.emit(normalizedMonth);
}
/**
* @return {?}
*/
_userSelected() {
this._userSelection.emit();
}
/**
* Handles year/month selection in the multi-year/year views.
* @param {?} date
* @param {?} view
* @return {?}
*/
_goToDateInView(date, view) {
this.activeDate = date;
this.currentView = view;
}
/**
* @param {?} obj The object to check.
* @return {?} The given object if it is both a date instance and valid, otherwise null.
*/
_getValidDateOrNull(obj) {
return (this._dateAdapter.isDateInstance(obj) && this._dateAdapter.isValid(obj)) ? obj : null;
}
/**
* Returns the component instance that corresponds to the current calendar view.
* @return {?}
*/
_getCurrentViewComponent() {
return this.monthView || this.yearView || this.multiYearView;
}
}
MatCalendar.decorators = [
{ type: Component, args: [{selector: 'mat-calendar',
template: "<ng-template [cdkPortalOutlet]=\"_calendarHeaderPortal\"></ng-template><div class=\"mat-calendar-content\" [ngSwitch]=\"currentView\" cdkMonitorSubtreeFocus tabindex=\"-1\"><mat-month-view *ngSwitchCase=\"'month'\" [(activeDate)]=\"activeDate\" [selected]=\"selected\" [dateFilter]=\"dateFilter\" [maxDate]=\"maxDate\" [minDate]=\"minDate\" (selectedChange)=\"_dateSelected($event)\" (_userSelection)=\"_userSelected()\"></mat-month-view><mat-year-view *ngSwitchCase=\"'year'\" [(activeDate)]=\"activeDate\" [selected]=\"selected\" [dateFilter]=\"dateFilter\" [maxDate]=\"maxDate\" [minDate]=\"minDate\" (monthSelected)=\"_monthSelectedInYearView($event)\" (selectedChange)=\"_goToDateInView($event, 'month')\"></mat-year-view><mat-multi-year-view *ngSwitchCase=\"'multi-year'\" [(activeDate)]=\"activeDate\" [selected]=\"selected\" [dateFilter]=\"dateFilter\" [maxDate]=\"maxDate\" [minDate]=\"minDate\" (yearSelected)=\"_yearSelectedInMultiYearView($event)\" (selectedChange)=\"_goToDateInView($event, 'year')\"></mat-multi-year-view></div>",
styles: [".mat-calendar{display:block}.mat-calendar-header{padding:8px 8px 0 8px}.mat-calendar-content{padding:0 8px 8px 8px;outline:0}.mat-calendar-controls{display:flex;margin:5% calc(33% / 7 - 16px)}.mat-calendar-spacer{flex:1 1 auto}.mat-calendar-period-button{min-width:0}.mat-calendar-arrow{display:inline-block;width:0;height:0;border-left:5px solid transparent;border-right:5px solid transparent;border-top-width:5px;border-top-style:solid;margin:0 0 0 5px;vertical-align:middle}.mat-calendar-arrow.mat-calendar-invert{transform:rotate(180deg)}[dir=rtl] .mat-calendar-arrow{margin:0 5px 0 0}.mat-calendar-next-button,.mat-calendar-previous-button{position:relative}.mat-calendar-next-button::after,.mat-calendar-previous-button::after{top:0;left:0;right:0;bottom:0;position:absolute;content:'';margin:15.5px;border:0 solid currentColor;border-top-width:2px}[dir=rtl] .mat-calendar-next-button,[dir=rtl] .mat-calendar-previous-button{transform:rotate(180deg)}.mat-calendar-previous-button::after{border-left-width:2px;transform:translateX(2px) rotate(-45deg)}.mat-calendar-next-button::after{border-right-width:2px;transform:translateX(-2px) rotate(45deg)}.mat-calendar-table{border-spacing:0;border-collapse:collapse;width:100%}.mat-calendar-table-header th{text-align:center;padding:0 0 8px 0}.mat-calendar-table-header-divider{position:relative;height:1px}.mat-calendar-table-header-divider::after{content:'';position:absolute;top:0;left:-8px;right:-8px;height:1px}"],
host: {
'class': 'mat-calendar',
},
exportAs: 'matCalendar',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush,
},] },
];
/** @nocollapse */
MatCalendar.ctorParameters = () => [
{ type: MatDatepickerIntl, },
{ type: DateAdapter, decorators: [{ type: Optional },] },
{ type: undefined, decorators: [{ type: Optional }, { type: Inject, args: [MAT_DATE_FORMATS,] },] },
{ type: ChangeDetectorRef, },
];
MatCalendar.propDecorators = {
"headerComponent": [{ type: Input },],
"startAt": [{ type: Input },],
"startView": [{ type: Input },],
"selected": [{ type: Input },],
"minDate": [{ type: Input },],
"maxDate": [{ type: Input },],
"dateFilter": [{ type: Input },],
"selectedChange": [{ type: Output },],
"yearSelected": [{ type: Output },],
"monthSelected": [{ type: Output },],
"_userSelection": [{ type: Output },],
"monthView": [{ type: ViewChild, args: [MatMonthView,] },],
"yearView": [{ type: ViewChild, args: [MatYearView,] },],
"multiYearView": [{ type: ViewChild, args: [MatMultiYearView,] },],
};
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
/**
* Animations used by the Material datepicker.
*/
const /** @type {?} */ matDatepickerAnimations = {
/** Transforms the height of the datepicker's calendar. */
transformPanel: trigger('transformPanel', [
state('void', style({ opacity: 0, transform: 'scale(1, 0)' })),
state('enter', style({ opacity: 1, transform: 'scale(1, 1)' })),
transition('void => enter', group([
query('@fadeInCalendar', animateChild()),
animate('400ms cubic-bezier(0.25, 0.8, 0.25, 1)')
])),
transition('* => void', animate('100ms linear', style({ opacity: 0 })))
]),
/** Fades in the content of the calendar. */
fadeInCalendar: trigger('fadeInCalendar', [
state('void', style({ opacity: 0 })),
state('enter', style({ opacity: 1 })),
transition('void => *', animate('400ms 100ms cubic-bezier(0.55, 0, 0.55, 0.2)'))
])
};
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
/**
* Used to generate a unique ID for each datepicker instance.
*/
let /** @type {?} */ datepickerUid = 0;
/**
* Injection token that determines the scroll handling while the calendar is open.
*/
const /** @type {?} */ MAT_DATEPICKER_SCROLL_STRATEGY = new InjectionToken('mat-datepicker-scroll-strategy');
/**
* \@docs-private
* @param {?} overlay
* @return {?}
*/
function MAT_DATEPICKER_SCROLL_STRATEGY_FACTORY(overlay) {
return () => overlay.scrollStrategies.reposition();
}
/**
* \@docs-private
*/
const /** @type {?} */ MAT_DATEPICKER_SCROLL_STRATEGY_FACTORY_PROVIDER = {
provide: MAT_DATEPICKER_SCROLL_STRATEGY,
deps: [Overlay],
useFactory: MAT_DATEPICKER_SCROLL_STRATEGY_FACTORY,
};
/**
* \@docs-private
*/
class MatDatepickerContentBase {
/**
* @param {?} _elementRef
*/
constructor(_elementRef) {
this._elementRef = _elementRef;
}
}
const /** @type {?} */ _MatDatepickerContentMixinBase = mixinColor(MatDatepickerContentBase);
/**
* Component used as the content for the datepicker dialog and popup. We use this instead of using
* MatCalendar directly as the content so we can control the initial focus. This also gives us a
* place to put additional features of the popup that are not part of the calendar itself in the
* future. (e.g. confirmation buttons).
* \@docs-private
* @template D
*/
class MatDatepickerContent extends _MatDatepickerContentMixinBase {
/**
* @param {?} elementRef
*/
constructor(elementRef) {
super(elementRef);
}
/**
* @return {?}
*/
ngAfterViewInit() {
this._calendar.focusActiveCell();
}
}
MatDatepickerContent.decorators = [
{ type: Component, args: [{selector: 'mat-datepicker-content',
template: "<mat-calendar cdkTrapFocus [id]=\"datepicker.id\" [ngClass]=\"datepicker.panelClass\" [startAt]=\"datepicker.startAt\" [startView]=\"datepicker.startView\" [minDate]=\"datepicker._minDate\" [maxDate]=\"datepicker._maxDate\" [dateFilter]=\"datepicker._dateFilter\" [headerComponent]=\"datepicker.calendarHeaderComponent\" [selected]=\"datepicker._selected\" [@fadeInCalendar]=\"'enter'\" (selectedChange)=\"datepicker.select($event)\" (yearSelected)=\"datepicker._selectYear($event)\" (monthSelected)=\"datepicker._selectMonth($event)\" (_userSelection)=\"datepicker.close()\"></mat-calendar>",
styles: [".mat-datepicker-content{box-shadow:0 5px 5px -3px rgba(0,0,0,.2),0 8px 10px 1px rgba(0,0,0,.14),0 3px 14px 2px rgba(0,0,0,.12);display:block;border-radius:2px}.mat-datepicker-content .mat-calendar{width:296px;height:354px}.mat-datepicker-content-touch{box-shadow:0 0 0 0 rgba(0,0,0,.2),0 0 0 0 rgba(0,0,0,.14),0 0 0 0 rgba(0,0,0,.12);display:block;max-height:80vh;overflow:auto;margin:-24px}.mat-datepicker-content-touch .mat-calendar{min-width:250px;min-height:312px;max-width:750px;max-height:788px}@media all and (orientation:landscape){.mat-datepicker-content-touch .mat-calendar{width:64vh;height:80vh}}@media all and (orientation:portrait){.mat-datepicker-content-touch .mat-calendar{width:80vw;height:100vw}}"],
host: {
'class': 'mat-datepicker-content',
'[@transformPanel]': '"enter"',
'[class.mat-datepicker-content-touch]': 'datepicker.touchUi',
},
animations: [
matDatepickerAnimations.transformPanel,
matDatepickerAnimations.fadeInCalendar,
],
exportAs: 'matDatepickerContent',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush,
inputs: ['color'],
},] },
];
/** @nocollapse */
MatDatepickerContent.ctorParameters = () => [
{ type: ElementRef, },
];
MatDatepickerContent.propDecorators = {
"_calendar": [{ type: ViewChild, args: [MatCalendar,] },],
};
/**
* Component responsible for managing the datepicker popup/dialog.
* @template D
*/
class MatDatepicker {
/**
* @param {?} _dialog
* @param {?} _overlay
* @param {?} _ngZone
* @param {?} _viewContainerRef
* @param {?} _scrollStrategy
* @param {?} _dateAdapter
* @param {?} _dir
* @param {?} _document
*/
constructor(_dialog, _overlay, _ngZone, _viewContainerRef, _scrollStrategy, _dateAdapter, _dir, _document) {
this._dialog = _dialog;
this._overlay = _overlay;
this._ngZone = _ngZone;
this._viewContainerRef = _viewContainerRef;
this._scrollStrategy = _scrollStrategy;
this._dateAdapter = _dateAdapter;
this._dir = _dir;
this._document = _document;
/**
* The view that the calendar should start in.
*/
this.startView = 'month';
this._touchUi = false;
/**
* Emits selected year in multiyear view.
* This doesn't imply a change on the selected date.
*/
this.yearSelected = new EventEmitter();
/**
* Emits selected month in year view.
* This doesn't imply a change on the selected date.
*/
this.monthSelected = new EventEmitter();
/**
* Emits when the datepicker has been opened.
*/
this.openedStream = new EventEmitter();
/**
* Emits when the datepicker has been closed.
*/
this.closedStream = new EventEmitter();
this._opened = false;
/**
* The id for the datepicker calendar.
*/
this.id = `mat-datepicker-${datepickerUid++}`;
this._validSelected = null;
/**
* The element that was focused before the datepicker was opened.
*/
this._focusedElementBeforeOpen = null;
/**
* Subscription to value changes in the associated input element.
*/
this._inputSubscription = Subscription.EMPTY;
/**
* Emits when the datepicker is disabled.
*/
this._disabledChange = new Subject();
/**
* Emits new selected date when selected date changes.
*/
this._selectedChanged = new Subject();
if (!this._dateAdapter) {
throw createMissingDateImplError('DateAdapter');
}
}
/**
* The date to open the calendar to initially.
* @return {?}
*/
get startAt() {
// If an explicit startAt is set we start there, otherwise we start at whatever the currently
// selected value is.
return this._startAt || (this._datepickerInput ? this._datepickerInput.value : null);
}
/**
* @param {?} value
* @return {?}
*/
set startAt(value) {
this._startAt = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
}
/**
* Color palette to use on the datepicker's calendar.
* @return {?}
*/
get color() {
return this._color ||
(this._datepickerInput ? this._datepickerInput._getThemePalette() : undefined);
}
/**
* @param {?} value
* @return {?}
*/
set color(value) {
this._color = value;
}
/**
* Whether the calendar UI is in touch mode. In touch mode the calendar opens in a dialog rather
* than a popup and elements have more padding to allow for bigger touch targets.
* @return {?}
*/
get touchUi() { return this._touchUi; }
/**
* @param {?} value
* @return {?}
*/
set touchUi(value) {
this._touchUi = coerceBooleanProperty(value);
}
/**
* Whether the datepicker pop-up should be disabled.
* @return {?}
*/
get disabled() {
return this._disabled === undefined && this._datepickerInput ?
this._datepickerInput.disabled : !!this._disabled;
}
/**
* @param {?} value
* @return {?}
*/
set disabled(value) {
const /** @type {?} */ newValue = coerceBooleanProperty(value);
if (newValue !== this._disabled) {
this._disabled = newValue;
this._disabledChange.next(newValue);
}
}
/**
* Whether the calendar is open.
* @return {?}
*/
get opened() { return this._opened; }
/**
* @param {?} value
* @return {?}
*/
set opened(value) { value ? this.open() : this.close(); }
/**
* The currently selected date.
* @return {?}
*/
get _selected() { return this._validSelected; }
/**
* @param {?} value
* @return {?}
*/
set _selected(value) { this._validSelected = value; }
/**
* The minimum selectable date.
* @return {?}
*/
get _minDate() {
return this._datepickerInput && this._datepickerInput.min;
}
/**
* The maximum selectable date.
* @return {?}
*/
get _maxDate() {
return this._datepickerInput && this._datepickerInput.max;
}
/**
* @return {?}
*/
get _dateFilter() {
return this._datepickerInput && this._datepickerInput._dateFilter;
}
/**
* @return {?}
*/
ngOnDestroy() {
this.close();
this._inputSubscription.unsubscribe();
this._disabledChange.complete();
if (this._popupRef) {
this._popupRef.dispose();
this._popupComponentRef = null;
}
}
/**
* Selects the given date
* @param {?} date
* @return {?}
*/
select(date) {
let /** @type {?} */ oldValue = this._selected;
this._selected = date;
if (!this._dateAdapter.sameDate(oldValue, this._selected)) {
this._selectedChanged.next(date);
}
}
/**
* Emits the selected year in multiyear view
* @param {?} normalizedYear
* @return {?}
*/
_selectYear(normalizedYear) {
this.yearSelected.emit(normalizedYear);
}
/**
* Emits selected month in year view
* @param {?} normalizedMonth
* @return {?}
*/
_selectMonth(normalizedMonth) {
this.monthSelected.emit(normalizedMonth);
}
/**
* Register an input with this datepicker.
* @param {?} input The datepicker input to register with this datepicker.
* @return {?}
*/
_registerInput(input) {
if (this._datepickerInput) {
throw Error('A MatDatepicker can only be associated with a single input.');
}
this._datepickerInput = input;
this._inputSubscription =
this._datepickerInput._valueChange.subscribe((value) => this._selected = value);
}
/**
* Open the calendar.
* @return {?}
*/
open() {
if (this._opened || this.disabled) {
return;
}
if (!this._datepickerInput) {
throw Error('Attempted to open an MatDatepicker with no associated input.');
}
if (this._document) {
this._focusedElementBeforeOpen = this._document.activeElement;
}
this.touchUi ? this._openAsDialog() : this._openAsPopup();
this._opened = true;
this.openedStream.emit();
}
/**
* Close the calendar.
* @return {?}
*/
close() {
if (!this._opened) {
return;
}
if (this._popupRef && this._popupRef.hasAttached()) {
this._popupRef.detach();
}
if (this._dialogRef) {
this._dialogRef.close();
this._dialogRef = null;
}
if (this._calendarPortal && this._calendarPortal.isAttached) {
this._calendarPortal.detach();
}
const /** @type {?} */ completeClose = () => {
// The `_opened` could've been reset already if
// we got two events in quick succession.
if (this._opened) {
this._opened = false;
this.closedStream.emit();
this._focusedElementBeforeOpen = null;
}
};
if (this._focusedElementBeforeOpen &&
typeof this._focusedElementBeforeOpen.focus === 'function') {
// Because IE moves focus asynchronously, we can't count on it being restored before we've
// marked the datepicker as closed. If the event fires out of sequence and the element that
// we're refocusing opens the datepicker on focus, the user could be stuck with not being
// able to close the calendar at all. We work around it by making the logic, that marks
// the datepicker as closed, async as well.
this._focusedElementBeforeOpen.focus();
setTimeout(completeClose);
}
else {
completeClose();
}
}
/**
* Open the calendar as a dialog.
* @return {?}
*/
_openAsDialog() {
this._dialogRef = this._dialog.open(MatDatepickerContent, {
direction: this._dir ? this._dir.value : 'ltr',
viewContainerRef: this._viewContainerRef,
panelClass: 'mat-datepicker-dialog',
});
this._dialogRef.afterClosed().subscribe(() => this.close());
this._dialogRef.componentInstance.datepicker = this;
this._setColor();
}
/**
* Open the calendar as a popup.
* @return {?}
*/
_openAsPopup() {
if (!this._calendarPortal) {
this._calendarPortal = new ComponentPortal(MatDatepickerContent, this._viewContainerRef);
}
if (!this._popupRef) {
this._createPopup();
}
if (!this._popupRef.hasAttached()) {
this._popupComponentRef = this._popupRef.attach(this._calendarPortal);
this._popupComponentRef.instance.datepicker = this;
this._setColor();
// Update the position once the calendar has rendered.
this._ngZone.onStable.asObservable().pipe(take(1)).subscribe(() => {
this._popupRef.updatePosition();
});
}
}
/**
* Create the popup.
* @return {?}
*/
_createPopup() {
const /** @type {?} */ overlayConfig = new OverlayConfig({
positionStrategy: this._createPopupPositionStrategy(),
hasBackdrop: true,
backdropClass: 'mat-overlay-transparent-backdrop',
direction: this._dir,
scrollStrategy: this._scrollStrategy(),
panelClass: 'mat-datepicker-popup',
});
this._popupRef = this._overlay.create(overlayConfig);
this._popupRef.overlayElement.setAttribute('role', 'dialog');
merge(this._popupRef.backdropClick(), this._popupRef.detachments(), this._popupRef.keydownEvents().pipe(filter(event => {
// Closing on alt + up is only valid when there's an input associated with the datepicker.
return event.keyCode === ESCAPE ||
(this._datepickerInput && event.altKey && event.keyCode === UP_ARROW);
}))).subscribe(() => this.close());
}
/**
* Create the popup PositionStrategy.
* @return {?}
*/
_createPopupPositionStrategy() {
return this._overlay.position()
.flexibleConnectedTo(this._datepickerInput.getConnectedOverlayOrigin())
.withTransformOriginOn('.mat-datepicker-content')
.withFlexibleDimensions(false)
.withViewportMargin(8)
.withPush(false)
.withPositions([
{
originX: 'start',
originY: 'bottom',
overlayX: 'start',
overlayY: 'top'
},
{
originX: 'start',
originY: 'top',
overlayX: 'start',
overlayY: 'bottom'
},
{
originX: 'end',
originY: 'bottom',
overlayX: 'end',
overlayY: 'top'
},
{
originX: 'end',
originY: 'top',
overlayX: 'end',
overlayY: 'bottom'
}
]);
}
/**
* @param {?} obj The object to check.
* @return {?} The given object if it is both a date instance and valid, otherwise null.
*/
_getValidDateOrNull(obj) {
return (this._dateAdapter.isDateInstance(obj) && this._dateAdapter.isValid(obj)) ? obj : null;
}
/**
* Passes the current theme color along to the calendar overlay.
* @return {?}
*/
_setColor() {
const /** @type {?} */ color = this.color;
if (this._popupComponentRef) {
this._popupComponentRef.instance.color = color;
}
if (this._dialogRef) {
this._dialogRef.componentInstance.color = color;
}
}
}
MatDatepicker.decorators = [
{ type: Component, args: [{selector: 'mat-datepicker',
template: '',
exportAs: 'matDatepicker',
changeDetection: ChangeDetectionStrategy.OnPush,
encapsulation: ViewEncapsulation.None,
},] },
];
/** @nocollapse */
MatDatepicker.ctorParameters = () => [
{ type: MatDialog, },
{ type: Overlay, },
{ type: NgZone, },
{ type: ViewContainerRef, },
{ type: undefined, decorators: [{ type: Inject, args: [MAT_DATEPICKER_SCROLL_STRATEGY,] },] },
{ type: DateAdapter, decorators: [{ type: Optional },] },
{ type: Directionality, decorators: [{ type: Optional },] },
{ type: undefined, decorators: [{ type: Optional }, { type: Inject, args: [DOCUMENT,] },] },
];
MatDatepicker.propDecorators = {
"calendarHeaderComponent": [{ type: Input },],
"startAt": [{ type: Input },],
"startView": [{ type: Input },],
"color": [{ type: Input },],
"touchUi": [{ type: Input },],
"disabled": [{ type: Input },],
"yearSelected": [{ type: Output },],
"monthSelected": [{ type: Output },],
"panelClass": [{ type: Input },],
"openedStream": [{ type: Output, args: ['opened',] },],
"closedStream": [{ type: Output, args: ['closed',] },],
"opened": [{ type: Input },],
};
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
const /** @type {?} */ MAT_DATEPICKER_VALUE_ACCESSOR = {
provide: NG_VALUE_ACCESSOR,
useExisting: forwardRef(() => MatDatepickerInput),
multi: true
};
const /** @type {?} */ MAT_DATEPICKER_VALIDATORS = {
provide: NG_VALIDATORS,
useExisting: forwardRef(() => MatDatepickerInput),
multi: true
};
/**
* An event used for datepicker input and change events. We don't always have access to a native
* input or change event because the event may have been triggered by the user clicking on the
* calendar popup. For consistency, we always use MatDatepickerInputEvent instead.
* @template D
*/
class MatDatepickerInputEvent {
/**
* @param {?} target
* @param {?} targetElement
*/
constructor(target, targetElement) {
this.target = target;
this.targetElement = targetElement;
this.value = this.target.value;
}
}
/**
* Directive used to connect an input to a MatDatepicker.
* @template D
*/
class MatDatepickerInput {
/**
* @param {?} _elementRef
* @param {?} _dateAdapter
* @param {?} _dateFormats
* @param {?} _formField
*/
constructor(_elementRef, _dateAdapter, _dateFormats, _formField) {
this._elementRef = _elementRef;
this._dateAdapter = _dateAdapter;
this._dateFormats = _dateFormats;
this._formField = _formField;
/**
* Emits when a `change` event is fired on this `<input>`.
*/
this.dateChange = new EventEmitter();
/**
* Emits when an `input` event is fired on this `<input>`.
*/
this.dateInput = new EventEmitter();
/**
* Emits when the value changes (either due to user input or programmatic change).
*/
this._valueChange = new EventEmitter();
/**
* Emits when the disabled state has changed
*/
this._disabledChange = new EventEmitter();
this._onTouched = () => { };
this._cvaOnChange = () => { };
this._validatorOnChange = () => { };
this._datepickerSubscription = Subscription.EMPTY;
this._localeSubscription = Subscription.EMPTY;
/**
* The form control validator for whether the input parses.
*/
this._parseValidator = () => {
return this._lastValueValid ?
null : { 'matDatepickerParse': { 'text': this._elementRef.nativeElement.value } };
};
/**
* The form control validator for the min date.
*/
this._minValidator = (control) => {
const /** @type {?} */ controlValue = this._getValidDateOrNull(this._dateAdapter.deserialize(control.value));
return (!this.min || !controlValue ||
this._dateAdapter.compareDate(this.min, controlValue) <= 0) ?
null : { 'matDatepickerMin': { 'min': this.min, 'actual': controlValue } };
};
/**
* The form control validator for the max date.
*/
this._maxValidator = (control) => {
const /** @type {?} */ controlValue = this._getValidDateOrNull(this._dateAdapter.deserialize(control.value));
return (!this.max || !controlValue ||
this._dateAdapter.compareDate(this.max, controlValue) >= 0) ?
null : { 'matDatepickerMax': { 'max': this.max, 'actual': controlValue } };
};
/**
* The form control validator for the date filter.
*/
this._filterValidator = (control) => {
const /** @type {?} */ controlValue = this._getValidDateOrNull(this._dateAdapter.deserialize(control.value));
return !this._dateFilter || !controlValue || this._dateFilter(controlValue) ?
null : { 'matDatepickerFilter': true };
};
/**
* The combined form control validator for this input.
*/
this._validator = Validators.compose([this._parseValidator, this._minValidator, this._maxValidator, this._filterValidator]);
/**
* Whether the last value set on the input was valid.
*/
this._lastValueValid = false;
if (!this._dateAdapter) {
throw createMissingDateImplError('DateAdapter');
}
if (!this._dateFormats) {
throw createMissingDateImplError('MAT_DATE_FORMATS');
}
// Update the displayed date when the locale changes.
this._localeSubscription = _dateAdapter.localeChanges.subscribe(() => {
this.value = this.value;
});
}
/**
* The datepicker that this input is associated with.
* @param {?} value
* @return {?}
*/
set matDatepicker(value) {
if (!value) {
return;
}
this._datepicker = value;
this._datepicker._registerInput(this);
this._datepickerSubscription.unsubscribe();
this._datepickerSubscription = this._datepicker._selectedChanged.subscribe((selected) => {
this.value = selected;
this._cvaOnChange(selected);
this._onTouched();
this.dateInput.emit(new MatDatepickerInputEvent(this, this._elementRef.nativeElement));
this.dateChange.emit(new MatDatepickerInputEvent(this, this._elementRef.nativeElement));
});
}
/**
* Function that can be used to filter out dates within the datepicker.
* @param {?} value
* @return {?}
*/
set matDatepickerFilter(value) {
this._dateFilter = value;
this._validatorOnChange();
}
/**
* The value of the input.
* @return {?}
*/
get value() { return this._value; }
/**
* @param {?} value
* @return {?}
*/
set value(value) {
value = this._dateAdapter.deserialize(value);
this._lastValueValid = !value || this._dateAdapter.isValid(value);
value = this._getValidDateOrNull(value);
const /** @type {?} */ oldDate = this.value;
this._value = value;
this._formatValue(value);
if (!this._dateAdapter.sameDate(oldDate, value)) {
this._valueChange.emit(value);
}
}
/**
* The minimum valid date.
* @return {?}
*/
get min() { return this._min; }
/**
* @param {?} value
* @return {?}
*/
set min(value) {
this._min = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
this._validatorOnChange();
}
/**
* The maximum valid date.
* @return {?}
*/
get max() { return this._max; }
/**
* @param {?} value
* @return {?}
*/
set max(value) {
this._max = this._getValidDateOrNull(this._dateAdapter.deserialize(value));
this._validatorOnChange();
}
/**
* Whether the datepicker-input is disabled.
* @return {?}
*/
get disabled() { return !!this._disabled; }
/**
* @param {?} value
* @return {?}
*/
set disabled(value) {
const /** @type {?} */ newValue = coerceBooleanProperty(value);
const /** @type {?} */ element = this._elementRef.nativeElement;
if (this._disabled !== newValue) {
this._disabled = newValue;
this._disabledChange.emit(newValue);
}
// We need to null check the `blur` method, because it's undefined during SSR.
if (newValue && element.blur) {
// Normally, native input elements automatically blur if they turn disabled. This behavior
// is problematic, because it would mean that it triggers another change detection cycle,
// which then causes a changed after checked error if the input element was focused before.
element.blur();
}
}
/**
* @return {?}
*/
ngOnDestroy() {
this._datepickerSubscription.unsubscribe();
this._localeSubscription.unsubscribe();
this._valueChange.complete();
this._disabledChange.complete();
}
/**
* \@docs-private
* @param {?} fn
* @return {?}
*/
registerOnValidatorChange(fn) {
this._validatorOnChange = fn;
}
/**
* \@docs-private
* @param {?} c
* @return {?}
*/
validate(c) {
return this._validator ? this._validator(c) : null;
}
/**
* @deprecated
* \@breaking-change 7.0.0 Use `getConnectedOverlayOrigin` instead
* @return {?}
*/
getPopupConnectionElementRef() {
return this.getConnectedOverlayOrigin();
}
/**
* Gets the element that the datepicker popup should be connected to.
* @return {?} The element to connect the popup to.
*/
getConnectedOverlayOrigin() {
return this._formField ? this._formField.getConnectedOverlayOrigin() : this._elementRef;
}
/**
* @param {?} value
* @return {?}
*/
writeValue(value) {
this.value = value;
}
/**
* @param {?} fn
* @return {?}
*/
registerOnChange(fn) {
this._cvaOnChange = fn;
}
/**
* @param {?} fn
* @return {?}
*/
registerOnTouched(fn) {
this._onTouched = fn;
}
/**
* @param {?} isDisabled
* @return {?}
*/
setDisabledState(isDisabled) {
this.disabled = isDisabled;
}
/**
* @param {?} event
* @return {?}
*/
_onKeydown(event) {
if (this._datepicker && event.altKey && event.keyCode === DOWN_ARROW) {
this._datepicker.open();
event.preventDefault();
}
}
/**
* @param {?} value
* @return {?}
*/
_onInput(value) {
let /** @type {?} */ date = this._dateAdapter.parse(value, this._dateFormats.parse.dateInput);
this._lastValueValid = !date || this._dateAdapter.isValid(date);
date = this._getValidDateOrNull(date);
if (!this._dateAdapter.sameDate(date, this._value)) {
this._value = date;
this._cvaOnChange(date);
this._valueChange.emit(date);
this.dateInput.emit(new MatDatepickerInputEvent(this, this._elementRef.nativeElement));
}
}
/**
* @return {?}
*/
_onChange() {
this.dateChange.emit(new MatDatepickerInputEvent(this, this._elementRef.nativeElement));
}
/**
* Returns the palette used by the input's form field, if any.
* @return {?}
*/
_getThemePalette() {
return this._formField ? this._formField.color : undefined;
}
/**
* Handles blur events on the input.
* @return {?}
*/
_onBlur() {
// Reformat the input only if we have a valid value.
if (this.value) {
this._formatValue(this.value);
}
this._onTouched();
}
/**
* Formats a value and sets it on the input element.
* @param {?} value
* @return {?}
*/
_formatValue(value) {
this._elementRef.nativeElement.value =
value ? this._dateAdapter.format(value, this._dateFormats.display.dateInput) : '';
}
/**
* @param {?} obj The object to check.
* @return {?} The given object if it is both a date instance and valid, otherwise null.
*/
_getValidDateOrNull(obj) {
return (this._dateAdapter.isDateInstance(obj) && this._dateAdapter.isValid(obj)) ? obj : null;
}
}
MatDatepickerInput.decorators = [
{ type: Directive, args: [{
selector: 'input[matDatepicker]',
providers: [
MAT_DATEPICKER_VALUE_ACCESSOR,
MAT_DATEPICKER_VALIDATORS,
{ provide: MAT_INPUT_VALUE_ACCESSOR, useExisting: MatDatepickerInput },
],
host: {
'[attr.aria-haspopup]': 'true',
'[attr.aria-owns]': '(_datepicker?.opened && _datepicker.id) || null',
'[attr.min]': 'min ? _dateAdapter.toIso8601(min) : null',
'[attr.max]': 'max ? _dateAdapter.toIso8601(max) : null',
'[disabled]': 'disabled',
'(input)': '_onInput($event.target.value)',
'(change)': '_onChange()',
'(blur)': '_onBlur()',
'(keydown)': '_onKeydown($event)',
},
exportAs: 'matDatepickerInput',
},] },
];
/** @nocollapse */
MatDatepickerInput.ctorParameters = () => [
{ type: ElementRef, },
{ type: DateAdapter, decorators: [{ type: Optional },] },
{ type: undefined, decorators: [{ type: Optional }, { type: Inject, args: [MAT_DATE_FORMATS,] },] },
{ type: MatFormField, decorators: [{ type: Optional },] },
];
MatDatepickerInput.propDecorators = {
"matDatepicker": [{ type: Input },],
"matDatepickerFilter": [{ type: Input },],
"value": [{ type: Input },],
"min": [{ type: Input },],
"max": [{ type: Input },],
"disabled": [{ type: Input },],
"dateChange": [{ type: Output },],
"dateInput": [{ type: Output },],
};
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
/**
* Can be used to override the icon of a `matDatepickerToggle`.
*/
class MatDatepickerToggleIcon {
}
MatDatepickerToggleIcon.decorators = [
{ type: Directive, args: [{
selector: '[matDatepickerToggleIcon]'
},] },
];
/**
* @template D
*/
class MatDatepickerToggle {
/**
* @param {?} _intl
* @param {?} _changeDetectorRef
*/
constructor(_intl, _changeDetectorRef) {
this._intl = _intl;
this._changeDetectorRef = _changeDetectorRef;
this._stateChanges = Subscription.EMPTY;
}
/**
* Whether the toggle button is disabled.
* @return {?}
*/
get disabled() {
return this._disabled === undefined ? this.datepicker.disabled : !!this._disabled;
}
/**
* @param {?} value
* @return {?}
*/
set disabled(value) {
this._disabled = coerceBooleanProperty(value);
}
/**
* @param {?} changes
* @return {?}
*/
ngOnChanges(changes) {
if (changes["datepicker"]) {
this._watchStateChanges();
}
}
/**
* @return {?}
*/
ngOnDestroy() {
this._stateChanges.unsubscribe();
}
/**
* @return {?}
*/
ngAfterContentInit() {
this._watchStateChanges();
}
/**
* @param {?} event
* @return {?}
*/
_open(event) {
if (this.datepicker && !this.disabled) {
this.datepicker.open();
event.stopPropagation();
}
}
/**
* @return {?}
*/
_watchStateChanges() {
const /** @type {?} */ datepickerDisabled = this.datepicker ? this.datepicker._disabledChange : of();
const /** @type {?} */ inputDisabled = this.datepicker && this.datepicker._datepickerInput ?
this.datepicker._datepickerInput._disabledChange : of();
const /** @type {?} */ datepickerToggled = this.datepicker ?
merge(this.datepicker.openedStream, this.datepicker.closedStream) :
of();
this._stateChanges.unsubscribe();
this._stateChanges = merge(this._intl.changes, datepickerDisabled, inputDisabled, datepickerToggled).subscribe(() => this._changeDetectorRef.markForCheck());
}
}
MatDatepickerToggle.decorators = [
{ type: Component, args: [{selector: 'mat-datepicker-toggle',
template: "<button mat-icon-button type=\"button\" aria-haspopup=\"true\" [attr.aria-label]=\"_intl.openCalendarLabel\" [disabled]=\"disabled\" (click)=\"_open($event)\"><svg *ngIf=\"!_customIcon\" class=\"mat-datepicker-toggle-default-icon\" viewBox=\"0 0 24 24\" width=\"24px\" height=\"24px\" fill=\"currentColor\" focusable=\"false\"><path d=\"M0 0h24v24H0z\" fill=\"none\"/><path d=\"M19 3h-1V1h-2v2H8V1H6v2H5c-1.11 0-1.99.9-1.99 2L3 19c0 1.1.89 2 2 2h14c1.1 0 2-.9 2-2V5c0-1.1-.9-2-2-2zm0 16H5V8h14v11zM7 10h5v5H7z\"/></svg><ng-content select=\"[matDatepickerToggleIcon]\"></ng-content></button>",
styles: [".mat-form-field-appearance-legacy .mat-form-field-prefix .mat-datepicker-toggle-default-icon,.mat-form-field-appearance-legacy .mat-form-field-suffix .mat-datepicker-toggle-default-icon{width:1em}.mat-form-field:not(.mat-form-field-appearance-legacy) .mat-form-field-prefix .mat-datepicker-toggle-default-icon,.mat-form-field:not(.mat-form-field-appearance-legacy) .mat-form-field-suffix .mat-datepicker-toggle-default-icon{display:block;width:1.5em;height:1.5em}.mat-form-field:not(.mat-form-field-appearance-legacy) .mat-form-field-prefix .mat-icon-button .mat-datepicker-toggle-default-icon,.mat-form-field:not(.mat-form-field-appearance-legacy) .mat-form-field-suffix .mat-icon-button .mat-datepicker-toggle-default-icon{margin:auto}"],
host: {
'class': 'mat-datepicker-toggle',
'[class.mat-datepicker-toggle-active]': 'datepicker && datepicker.opened',
'[class.mat-accent]': 'datepicker && datepicker.color === "accent"',
'[class.mat-warn]': 'datepicker && datepicker.color === "warn"',
},
exportAs: 'matDatepickerToggle',
encapsulation: ViewEncapsulation.None,
changeDetection: ChangeDetectionStrategy.OnPush,
},] },
];
/** @nocollapse */
MatDatepickerToggle.ctorParameters = () => [
{ type: MatDatepickerIntl, },
{ type: ChangeDetectorRef, },
];
MatDatepickerToggle.propDecorators = {
"datepicker": [{ type: Input, args: ['for',] },],
"disabled": [{ type: Input },],
"_customIcon": [{ type: ContentChild, args: [MatDatepickerToggleIcon,] },],
};
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
class MatDatepickerModule {
}
MatDatepickerModule.decorators = [
{ type: NgModule, args: [{
imports: [
CommonModule,
MatButtonModule,
MatDialogModule,
OverlayModule,
A11yModule,
PortalModule,
],
exports: [
MatCalendar,
MatCalendarBody,
MatDatepicker,
MatDatepickerContent,
MatDatepickerInput,
MatDatepickerToggle,
MatDatepickerToggleIcon,
MatMonthView,
MatYearView,
MatMultiYearView,
MatCalendarHeader,
],
declarations: [
MatCalendar,
MatCalendarBody,
MatDatepicker,
MatDatepickerContent,
MatDatepickerInput,
MatDatepickerToggle,
MatDatepickerToggleIcon,
MatMonthView,
MatYearView,
MatMultiYearView,
MatCalendarHeader,
],
providers: [
MatDatepickerIntl,
MAT_DATEPICKER_SCROLL_STRATEGY_FACTORY_PROVIDER,
],
entryComponents: [
MatDatepickerContent,
MatCalendarHeader,
]
},] },
];
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
/**
* @fileoverview added by tsickle
* @suppress {checkTypes} checked by tsc
*/
export { MatDatepickerModule, MatCalendarHeader, MatCalendar, MatCalendarCell, MatCalendarBody, MAT_DATEPICKER_SCROLL_STRATEGY, MAT_DATEPICKER_SCROLL_STRATEGY_FACTORY, MAT_DATEPICKER_SCROLL_STRATEGY_FACTORY_PROVIDER, MatDatepickerContentBase, _MatDatepickerContentMixinBase, MatDatepickerContent, MatDatepicker, matDatepickerAnimations, MAT_DATEPICKER_VALUE_ACCESSOR, MAT_DATEPICKER_VALIDATORS, MatDatepickerInputEvent, MatDatepickerInput, MatDatepickerIntl, MatDatepickerToggleIcon, MatDatepickerToggle, MatMonthView, MatYearView, MatMultiYearView as ɵa34 };
//# sourceMappingURL=datepicker.js.map
|
MatYearView
|
test.py
|
from tic_tac_toe.Board import Board, GameResult
from tic_tac_toe.RandomPlayer import RandomPlayer
from tic_tac_toe.MinMaxAgent import MinMaxAgent
from tic_tac_toe.RndMinMaxAgent import RndMinMaxAgent
from tic_tac_toe.HumanPlayer import HumanPlayer
from tic_tac_toe.TQPlayer import TQPlayer
from tic_tac_toe.VFPlayer import VFPlayer
from util import *
# battle(RandomPlayer("RandomPlayer1"), RandomPlayer("RandomPlayer2"), num_games=10000)
# battle(MinMaxAgent(), RandomPlayer(), num_games=10000)
# battle(RandomPlayer(), MinMaxAgent(), num_games=10000)
# battle(MinMaxAgent(), RndMinMaxAgent(), num_games=10000)
#play_game(Board(), RndMinMaxAgent(), HumanPlayer(), silent=False)
#play_game(Board(), VFPlayer(), MinMaxAgent(), silent=False)
|
player1.set_exloration_rate(0.0)
eval_players(player1, RndMinMaxAgent(), 50)
while True:
play_game(Board(), player1, HumanPlayer(), silent=False)
|
player1 = VFPlayer("VFPlayer1", learning_rate=0.1, exploration_rate=0.01, v_init=0.6)
#player1 = TQPlayer()
eval_players(player1, RndMinMaxAgent(), 50)
|
JoiTestHelper.ts
|
// SPDX-License-Identifier: Apache-2.0
// Licensed to the Ed-Fi Alliance under one or more agreements.
// The Ed-Fi Alliance licenses this file to you under the Apache License, Version 2.0.
// See the LICENSE and NOTICES files in the project root for more information.
/* eslint-disable no-underscore-dangle */
type SchemaProperty = {
name: string;
presence: string;
type: string;
};
export function expectSubschemas(joiSchema: any, schemaProperties: SchemaProperty[]): any[] {
const subschemas: any[] = [...joiSchema._ids._byKey.values()];
if (subschemas.length !== schemaProperties.length) throw new Error('schema property length wrong');
subschemas.forEach((subschema, index) => {
expect(subschema.id).toBe(schemaProperties[index].name);
expect(subschema.schema._flags.presence).toBe(schemaProperties[index].presence);
expect(subschema.schema.type).toBe(schemaProperties[index].type);
});
return subschemas.map((s) => s.schema);
}
export function expectSubschemaReferenceArray(joiSchema: any, schemaProperty: SchemaProperty) {
const subschema: any = [...joiSchema.$_terms.items[0]._ids._byKey.values()][0];
expect(subschema.id).toBe(schemaProperty.name);
expect(subschema.schema._flags.presence).toBe(schemaProperty.presence);
expect(subschema.schema.type).toBe(schemaProperty.type);
return subschema.schema;
}
|
expect(subschema.id).toBe(schemaProperty.name);
expect(subschema.schema._flags.presence).toBeUndefined();
expect(subschema.schema.type).toBe(schemaProperty.type);
return subschema.schema;
}
|
export function expectSubschemaScalarArray(joiSchema: any, schemaProperty: SchemaProperty) {
const subschema: any = [...joiSchema.$_terms.items[0]._ids._byKey.values()][0];
|
addonctl.go
|
/*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package addonctl
import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"strings"
"k8s.io/apimachinery/pkg/runtime/schema"
"k8s.io/client-go/dynamic"
"sigs.k8s.io/controller-runtime/pkg/client/config"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/apis/meta/v1/unstructured"
addonmgrv1alpha1 "github.com/keikoproj/addon-manager/api/v1alpha1"
"github.com/keikoproj/addon-manager/pkg/common"
"github.com/keikoproj/addon-manager/pkg/version"
"github.com/keikoproj/addon-manager/pkg/workflows"
)
var addonName string
var clusterName string
var clusterRegion string
var debug bool
var dryRun bool
var description string
var dependencies string
var install string
var namespace string
var pkgChannel string
var pkgType string
var pkgVersion string
var paramsRaw string
var prereqs string
var secretsRaw string
var selector string
// certain variables parsed into these below
var dependenciesMap = make(map[string]string)
var params = make(map[string]string)
var prereqResources = make([]string, 0)
var prereqScripts = make(map[string]string)
var installResources = make([]string, 0)
var installScripts = make(map[string]string)
var secretsList = make([]string, 0)
var selectorMap = make(map[string]string)
var addonMgrSystemNamespace = "addon-manager-system"
// Execute the command
func Execute() {
root := newRootCommand()
if err := root.Execute(); err != nil {
log.Errorf("%s", err)
os.Exit(1)
}
}
func parseAllArgs(md *cobra.Command, args []string) error {
fmt.Println("Parsing all args")
err := extractResources(prereqs, install)
if err != nil {
return err
}
err = parseAddonParams(paramsRaw)
if err != nil {
return err
}
err = parseDependencies(dependencies)
if err != nil {
return err
}
err = parseSecrets(secretsRaw)
if err != nil {
return err
}
err = parseSelector(selector)
if err != nil {
return err
}
err = validatePkgType(pkgType)
if err != nil {
return err
}
return nil
}
func newRootCommand() *cobra.Command {
rootCmd := &cobra.Command{
Use: "addonctl",
Short: "A control plane for managing addons",
Version: version.ToString(),
}
cfg, err := config.GetConfig()
if err != nil {
fmt.Println(err)
return rootCmd
}
rootCmd.PersistentFlags().StringVar(&clusterName, "cluster-name", "", "Name of the cluster context being used")
rootCmd.MarkFlagRequired("cluster-name")
rootCmd.PersistentFlags().StringVar(&clusterRegion, "cluster-region", "", "Cluster region")
rootCmd.MarkFlagRequired("cluster-region")
rootCmd.PersistentFlags().StringVar(&description, "desc", "", "Description of the addon")
rootCmd.PersistentFlags().StringVar(&dependencies, "deps", "", "Comma separated dependencies list in the format 'pkgName:pkgVersion'")
rootCmd.PersistentFlags().StringVarP(&pkgChannel, "channel", "c", "", "Channel for the addon package")
rootCmd.MarkFlagRequired("channel")
rootCmd.PersistentFlags().StringVarP(&pkgType, "type", "t", "", "Addon package type")
rootCmd.MarkFlagRequired("type")
rootCmd.PersistentFlags().StringVarP(&pkgVersion, "version", "v", "", "Addon package version")
rootCmd.MarkFlagRequired("version")
rootCmd.PersistentFlags().StringVar(&secretsRaw, "secrets", "", "Comma separated list of secret names which are validated as part ofthe addon-manager-system namespace")
rootCmd.PersistentFlags().StringVar(&selector, "selector", "", "Selector applied to all resources?")
rootCmd.PersistentFlags().StringVarP(&namespace, "namespace", "n", "", "Namespace where the addon will be deployed")
rootCmd.MarkFlagRequired("namespace")
// TODO P3 --v verbose
rootCmd.PersistentFlags().StringVarP(¶msRaw, "params", "p", "", "Params to supply to the resource yaml")
rootCmd.PersistentFlags().StringVar(&prereqs, "prereqs", "", "File or directory of resource yaml to submit as prereqs step")
rootCmd.PersistentFlags().StringVar(&install, "install", "", "File or directory of resource yaml to submit as install step")
rootCmd.PersistentFlags().BoolVar(&dryRun, "dryrun", false, "Outputs the addon spec but doesn't submit")
// add commands
rootCmd.AddCommand(&cobra.Command{
Use: "create",
Short: "Create the addon resource with the supplied arguments",
PreRunE: parseAllArgs,
Args: func(cmd *cobra.Command, args []string) error {
if len(args) < 1 {
return errors.New("requires more arguments")
}
// Ensure Addon name is first positional argument
addonName = args[0]
re := regexp.MustCompile("^[a-zA-Z][a-zA-Z0-9-.]*$")
if !re.MatchString(addonName)
|
return nil
},
Run: func(cmd *cobra.Command, args []string) {
instance := &addonmgrv1alpha1.Addon{}
instance.SetGroupVersionKind(schema.GroupVersionKind{
Group: "addonmgr.keikoproj.io",
Version: "v1alpha1",
Kind: "Addon",
})
instance.SetName(addonName)
instance.SetNamespace(addonMgrSystemNamespace)
//assume all args are validated
instance.Spec.PkgChannel = pkgChannel
instance.Spec.PkgDeps = dependenciesMap
instance.Spec.PkgDescription = description
instance.Spec.PkgName = addonName
instance.Spec.PkgType = addonmgrv1alpha1.PackageType(pkgType)
instance.Spec.PkgVersion = pkgVersion
instance.Spec.Selector = metav1.LabelSelector{MatchLabels: selectorMap}
instance.Spec.Secrets = []addonmgrv1alpha1.SecretCmdSpec{}
for _, secret := range secretsList {
scs := addonmgrv1alpha1.SecretCmdSpec{Name: secret}
instance.Spec.Secrets = append(instance.Spec.Secrets, scs)
}
instance.Spec.Params.Namespace = namespace
instance.Spec.Params.Context.ClusterName = clusterName
instance.Spec.Params.Context.ClusterRegion = clusterRegion
// instance.Spec.Params.Context.AdditionalConfigs
// set params as string params, will be coppied over in workflow.go
for name, val := range params {
instance.Spec.Params.Data[name] = addonmgrv1alpha1.FlexString(val)
}
prereqWorkflowBuilder := workflows.New()
prereqWf := prereqWorkflowBuilder.Scripts(prereqScripts).Resources(prereqResources).Build() // Removed SetName(n) because it depends on checksum, addon_controller must set it
instance.Spec.Lifecycle.Prereqs.Template = workflows.ConvertUnstructuredWorkflowToString(prereqWf)
// instance.Spec.Lifecycle.Prereqs.NamePrefix
// instance.Spec.Lifecycle.Prereqs.Role
installWorkflowBuilder := workflows.New()
installWf := installWorkflowBuilder.Scripts(installScripts).Resources(installResources).Build() // Removed SetName(n) because it depends on checksum, addon_controller must set it
instance.Spec.Lifecycle.Install.Template = workflows.ConvertUnstructuredWorkflowToString(installWf)
// instance.Spec.Lifecycle.Install.NamePrefix
// instance.Spec.Lifecycle.Install.Role
deleteWorkflowBuilder := workflows.New()
deleteWf := deleteWorkflowBuilder.Delete().Build()
instance.Spec.Lifecycle.Delete.Template = workflows.ConvertUnstructuredWorkflowToString(deleteWf)
fmt.Println(dryRun)
if dryRun {
fmt.Println("Printing workflow to stdout without submitting:")
prettyPrint(instance)
// TODO output to file
return
}
kubeClient := dynamic.NewForConfigOrDie(cfg)
addonMap := make(map[string]interface{})
jsonInstance, _ := json.Marshal(instance)
err = json.Unmarshal(jsonInstance, &addonMap)
if err != nil {
fmt.Println(err)
return
}
addon := unstructured.Unstructured{}
addon.SetUnstructuredContent(addonMap)
addonObject, err := kubeClient.Resource(common.AddonGVR()).Namespace(addonMgrSystemNamespace).Get(addonName, metav1.GetOptions{})
if err == nil {
fmt.Printf("Updating addon %s...\n", addonName)
resourceVersion := addonObject.GetResourceVersion()
addon.SetResourceVersion(resourceVersion)
_, err = kubeClient.Resource(common.AddonGVR()).Namespace(addonMgrSystemNamespace).Update(&addon, metav1.UpdateOptions{})
if err != nil {
fmt.Println(err)
return
}
} else {
fmt.Printf("Creating addon %s...\n", addonName)
_, err = kubeClient.Resource(common.AddonGVR()).Namespace(addonMgrSystemNamespace).Create(&addon, metav1.CreateOptions{})
if err != nil {
fmt.Println(err)
return
}
}
},
})
return rootCmd
}
func prettyPrint(v interface{}) (err error) {
b, err := json.MarshalIndent(v, "", " ")
if err == nil {
fmt.Println(string(b))
}
return
}
func parseSelector(sel string) error {
fmt.Println("Parsing selector...")
if sel == "" {
return nil
}
s := strings.Split(sel, ":")
if len(s) == 1 && s[0] == ":" {
return errors.New("Missing ':' separator in selector")
} else if len(s) != 2 {
return errors.New("Dependency had multiple separators")
}
selectorMap[s[0]] = s[1]
return nil
}
func parseAddonParams(raw string) error {
fmt.Println("Parsing addon params...")
if raw == "" {
params = nil
return nil
}
rawList := strings.Split(raw, ",")
for _, item := range rawList {
kv := strings.Split(item, "=")
if len(kv) == 1 && kv[0] == "=" {
return fmt.Errorf("Unable to parse addon params: '%s'. Key-value pair %s does not have separator '='", raw, item)
}
params[kv[0]] = kv[1]
}
return nil
}
func validatePkgType(pt string) error {
fmt.Println("Validating pkgType...")
t := addonmgrv1alpha1.PackageType(pt)
if t != addonmgrv1alpha1.HelmPkg && t != addonmgrv1alpha1.ShipPkg && t != addonmgrv1alpha1.KustomizePkg && t != addonmgrv1alpha1.CnabPkg && t != addonmgrv1alpha1.CompositePkg {
return errors.New("unsupported package type")
}
return nil
}
func parseDependencies(deps string) error {
fmt.Println("Parsing dependencies...")
if deps == "" {
return nil
}
for _, dep := range strings.Split(deps, ",") {
d := strings.Split(dep, ":")
if len(d) == 1 && d[0] == ":" {
log.Fatal("Missing ':' separator in dependency")
} else if len(d) != 2 {
log.Fatal("Dependency had multiple separators")
}
dependenciesMap[d[0]] = d[1]
}
return nil
}
//taking in arguments but parsing into global arguments is fine? the best approach?
func extractResources(prereqsPath, installPath string) error {
lifecycleSteps := map[string]string{"prereqs": prereqsPath, "install": installPath}
for stepName, path := range lifecycleSteps {
if path != "" {
path = filepath.Join(os.Getenv("PWD"), path)
fi, err := os.Stat(path)
switch {
case err != nil:
log.Fatal(err)
case fi.IsDir():
files, err := ioutil.ReadDir(path)
if err != nil {
log.Fatal(err)
}
for _, f := range files {
switch {
case strings.HasSuffix(f.Name(), ".py"):
data, err := ioutil.ReadFile(f.Name())
if err != nil {
log.Fatal(fmt.Sprintf("Unable to read file %s", f.Name()))
}
if stepName == "prereqs" {
prereqScripts[f.Name()] = string(data)
} else if stepName == "install" {
installScripts[f.Name()] = string(data)
}
case strings.HasSuffix(f.Name(), ".yaml") || strings.HasSuffix(f.Name(), ".yml"):
parseResources(path, stepName)
default:
// simply ignore the file
continue
}
}
default:
// it's a file
switch {
case strings.HasSuffix(fi.Name(), ".py"):
data, err := ioutil.ReadFile(path)
if err != nil {
log.Fatal(fmt.Sprintf("Unable to read file %s", fi.Name()))
}
if stepName == "prereqs" {
prereqScripts[fi.Name()] = string(data)
} else if stepName == "install" {
installScripts[fi.Name()] = string(data)
}
case strings.HasSuffix(fi.Name(), ".yaml") || strings.HasSuffix(fi.Name(), ".yml"):
parseResources(path, stepName)
default:
// simply ignore the file
continue
}
}
}
}
return nil
}
func parseSecrets(raw string) error {
if raw == "" {
return nil
}
secrets := strings.Split(raw, ",")
if len(secrets) == 1 && secrets[0] == "," {
return fmt.Errorf("Error parsing secrets %s", raw)
}
for _, item := range secrets {
secretsList = append(secretsList, item)
}
return nil
}
// best way to write parsing functions? take no params and work on global variables, or take and modify the global params (need to pass in pointers in that case)
func parseResources(filename, stepName string) {
rawBytes, err := ioutil.ReadFile(filename)
if err != nil {
log.Fatal(err)
}
resources := strings.Split(string(rawBytes), "---\n")
for _, resource := range resources {
if strings.TrimSpace(resource) == "" {
continue
}
if stepName == "prereqs" {
prereqResources = append(prereqResources, resource)
} else if stepName == "install" {
installResources = append(installResources, resource)
}
}
}
|
{
return errors.New("Invalid addon name")
}
|
sketch.js
|
var canvas;
var backgroundImage, car1_img, car2_img, track;
var database, gameState;
var form, player, playerCount;
var allPlayers, car1, car2;
var cars = [];
function preload() {
backgroundImage = loadImage("./assets/planodefundo.png");
|
car2_img = loadImage("../assets/car2.png");
track = loadImage("../assets/PISTA.png");
}
function setup() {
canvas = createCanvas(windowWidth, windowHeight);
database = firebase.database();
game = new Game();
game.getState();
game.start();
}
function draw() {
background(backgroundImage);
if (playerCount === 2) {
game.update(1);
}
if (gameState === 1) {
game.play();
}
}
function windowResized() {
resizeCanvas(windowWidth, windowHeight);
}
|
car1_img = loadImage("../assets/car1.png");
|
challenge_1.ts
|
import { readFileSync, writeFileSync } from 'fs'
type Axis = 'x'|'y'
class Coordinate {
x: number
y: number
constructor(line: string) {
[this.x, this.y] = line.split(',').map(str => parseInt(str))
}
}
class
|
{
axis: Axis
coordinate: number
constructor(line: string) {
const lastToken = line.split(' ')[2] // "fold along x=2".split(' ')[2] gives "x=2"
const axis = lastToken.split('=')[0]
if(axis !== 'x' && axis !== 'y') throw 'axis is not x or y'
this.axis = axis
this.coordinate = parseInt(lastToken.split('=')[1])
}
}
class Paper {
dots: boolean[][]
constructor(coordinates: Coordinate[]) {
const xMax = coordinates.reduce((max, curr) => Math.max(max, curr.x), 0)
const yMax = coordinates.reduce((max, curr) => Math.max(max, curr.y), 0)
this.dots = (new Array(yMax + 1)).fill(0).map(_ => (new Array(xMax + 1)).fill(false))
for(const coordinate of coordinates) {
this.dots[coordinate.y][coordinate.x] = true
}
}
fold(fold: Fold): void {
let newDots: boolean[][]
if(fold.axis === 'x') {
const leftDots = this.dots.map(row => row.filter((dot, index) => index < fold.coordinate))
const rightDotsReversed = this.dots.map(row => row.filter((dot, index) => index > fold.coordinate).reverse())
newDots = [...leftDots]
for(const [rowIndex, row] of rightDotsReversed.entries()) {
for(const [dotIndex, dot] of row.entries()) {
newDots[rowIndex][dotIndex] = newDots[rowIndex][dotIndex] || dot
}
}
}
else {
const bottomDots = this.dots.filter((row, index) => index < fold.coordinate)
const topDotsReversed = this.dots.filter((row, index) => index > fold.coordinate).reverse()
newDots = [...bottomDots]
for(const [rowIndex, row] of topDotsReversed.entries()) {
for(const [dotIndex, dot] of row.entries()) {
newDots[rowIndex][dotIndex] = newDots[rowIndex][dotIndex] || dot
}
}
}
this.dots = newDots
}
writeToFile(): void {
const outputString = this.dots.map(row => row.map(dot => dot ? '#' : ' ').join('')).join('\n')
writeFileSync('./output.txt', outputString)
}
getTrueCount(): number {
return this.dots.reduce((outerSum, row) => outerSum + row.reduce((innerSum, dot) => dot ? innerSum + 1 : innerSum, 0), 0)
}
}
const lines: string[] = String(readFileSync('./input.txt')).split('\n').map(line => line.trim())
const coordinates = lines.filter(line => /^\d+,\d+$/.test(line)).map(line => new Coordinate(line))
const folds = lines.filter(line => /^fold along [xy]=\d+$/.test(line)).map(line => new Fold(line))
const paper = new Paper(coordinates)
for(const fold of folds) {
paper.fold(fold)
console.log(paper.getTrueCount())
}
paper.writeToFile()
|
Fold
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.