file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
collections.component.ts
|
import { Component, Input } from '@angular/core';
import { CollectionViewModel } from '@nakedobjects/view-models';
@Component({
selector: 'nof-collections',
templateUrl: 'collections.component.html',
styleUrls: ['collections.component.css']
})
export class Co
|
@Input()
collections: CollectionViewModel[];
classes(coll: CollectionViewModel) {
const hint = coll.presentationHint ?? '';
return `collection ${hint}`.trim();
}
}
|
llectionsComponent {
|
jwt_test.go
|
package mw_test
import (
"net/http"
"net/http/httptest"
"strings"
"testing"
"github.com/labstack/echo"
"github.com/stretchr/testify/assert"
"github.com/ribice/gorsk/internal"
"github.com/ribice/gorsk/cmd/api/config"
"github.com/ribice/gorsk/internal/mock"
"github.com/ribice/gorsk/cmd/api/mw"
)
func hwHandler(c echo.Context) error {
return c.String(200, "Hello World")
}
func echoHandler(mw ...echo.MiddlewareFunc) *echo.Echo {
e := echo.New()
for _, v := range mw {
e.Use(v)
}
e.GET("/hello", hwHandler)
return e
}
func TestMWFunc(t *testing.T) {
cases := []struct {
name string
wantStatus int
header string
}{
{
name: "Empty header",
wantStatus: http.StatusUnauthorized,
},
{
name: "Header not containing Bearer",
header: "notBearer",
wantStatus: http.StatusUnauthorized,
},
{
name: "Invalid header",
header: mock.HeaderInvalid(),
wantStatus: http.StatusUnauthorized,
},
{
name: "Success",
header: mock.HeaderValid(),
wantStatus: http.StatusOK,
},
}
jwtCfg := &config.JWT{Realm: "testRealm", Secret: "jwtsecret", Duration: 60, SigningAlgorithm: "HS256"}
jwtMW := mw.NewJWT(jwtCfg)
ts := httptest.NewServer(echoHandler(jwtMW.MWFunc()))
defer ts.Close()
path := ts.URL + "/hello"
client := &http.Client{}
for _, tt := range cases {
t.Run(tt.name, func(t *testing.T) {
req, _ := http.NewRequest("GET", path, nil)
req.Header.Set("Authorization", tt.header)
res, err := client.Do(req)
if err != nil {
t.Fatal("Cannot create http request")
}
assert.Equal(t, tt.wantStatus, res.StatusCode)
})
}
}
func TestGenerateToken(t *testing.T)
|
{
cases := []struct {
name string
wantToken string
req *model.User
}{
{
name: "Success",
req: &model.User{
Base: model.Base{
ID: 1,
},
Username: "johndoe",
Email: "johndoe@mail.com",
Role: &model.Role{
AccessLevel: model.SuperAdminRole,
},
CompanyID: 1,
LocationID: 1,
},
wantToken: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9",
},
}
jwtCfg := &config.JWT{Realm: "testRealm", Secret: "jwtsecret", Duration: 60, SigningAlgorithm: "HS256"}
for _, tt := range cases {
t.Run(tt.name, func(t *testing.T) {
jwt := mw.NewJWT(jwtCfg)
str, _, err := jwt.GenerateToken(tt.req)
assert.Nil(t, err)
assert.Equal(t, tt.wantToken, strings.Split(str, ".")[0])
})
}
}
|
|
styles.ts
|
import { StyleSheet } from "react-native";
import { Sizing, Colors, Typography } from "../../styles";
const styles = StyleSheet.create({
container: {
justifyContent: "space-between",
flexDirection: "row",
alignItems: "center",
paddingHorizontal: Sizing.layout.x30,
paddingVertical: Sizing.layout.x20,
},
infoWrapper: {
flexDirection: "row",
alignItems: "center",
},
titleWrapper: {
marginLeft: Sizing.layout.x30,
},
titleText: {
color: Colors.neutral.s900,
fontSize: Typography.fontSize.x20,
|
subTitleText: {
color: Colors.neutral.s400,
fontSize: Typography.fontSize.x10,
fontFamily: Typography.fontWeight.medium,
},
});
export default styles;
|
fontFamily: Typography.fontWeight.medium,
marginBottom: Sizing.layout.x10,
},
|
test_repository.py
|
# coding: utf-8
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from __future__ import unicode_literals
from __future__ import print_function
from __future__ import absolute_import
import os
import shutil
import unittest
from unittest import skipUnless
import pkg_resources
from itertools import count, product
from datetime import datetime
from zipfile import ZipFile
from io import BytesIO
from collections import defaultdict
from tg import tmpl_context as c, app_globals as g
import mock
from alluratest.tools import assert_equal, assert_in
from datadiff.tools import assert_equals
import tg
import ming
from ming.base import Object
from ming.orm import session, ThreadLocalORMSession
from testfixtures import TempDirectory
from alluratest.controller import setup_basic_test, setup_global_objects
from allura import model as M
from allura.model.repo_refresh import send_notifications
from allura.lib import helpers as h
from allura.webhooks import RepoPushWebhookSender
from allura.tests.model.test_repo import RepoImplTestBase
from forgesvn import model as SM
from forgesvn.model.svn import svn_path_exists
from forgesvn.tests import with_svn
from allura.tests.decorators import with_tool
import six
from io import open
from six.moves import range
class TestNewRepo(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit('HEAD')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_last_commit_for(self):
tree = self.rev.tree
for row in tree.ls():
assert row['last_commit']['author'] is not None
def test_commit(self):
latest_rev = 7
assert self.rev.primary() is self.rev
assert self.rev.index_id().startswith('allura/model/repo/Commit#')
self.rev.author_url
self.rev.committer_url
assert_equal(self.rev.tree._id, self.rev.tree_id)
assert_equal(self.rev.shorthand_id(), '[r{}]'.format(latest_rev))
assert_equal(self.rev.symbolic_ids, ([], []))
assert_equal(self.rev.url(), '/p/test/src/{}/'.format(latest_rev))
all_cis = list(self.repo.log(self.rev._id, limit=25))
assert_equal(len(all_cis), latest_rev)
self.rev.tree.ls()
assert_equal(self.rev.tree.readme(), ('README', 'This is readme\nAnother Line\n'))
assert_equal(self.rev.tree.path(), '/')
assert_equal(self.rev.tree.url(), '/p/test/src/{}/tree/'.format(latest_rev))
self.rev.tree.by_name['README']
assert self.rev.tree.is_blob('README') is True
assert_equal(self.rev.tree['a']['b']['c'].ls(), [])
self.assertRaises(KeyError, lambda: self.rev.tree['a']['b']['d'])
assert_equal(self.rev.authored_user, None)
assert_equal(self.rev.committed_user, None)
assert_equal(
sorted(self.rev.webhook_info.keys()),
sorted(['id', 'url', 'timestamp', 'message', 'author',
'committer', 'added', 'removed', 'renamed', 'modified', 'copied']))
class TestSVNRepo(unittest.TestCase, RepoImplTestBase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
@with_tool('test', 'SVN', 'svn-tags', 'SVN with tags')
def setup_with_tools(self):
setup_global_objects()
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
with h.push_context('test', 'src', neighborhood='Projects'):
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
with h.push_context('test', 'svn-tags', neighborhood='Projects'):
c.app.repo.name = 'testsvn-trunk-tags-branches'
c.app.repo.fs_path = repo_dir
self.svn_tags = c.app.repo
self.svn_tags.refresh()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
h.set_context('test', 'src', neighborhood='Projects')
def test_init(self):
repo = SM.Repository(
name='testsvn',
fs_path=g.tmpdir + '/',
url_path='/test/',
tool='svn',
status='creating')
dirname = os.path.join(repo.fs_path, repo.name)
if os.path.exists(dirname):
shutil.rmtree(dirname)
repo.init()
shutil.rmtree(dirname)
def test_fork(self):
repo = SM.Repository(
name='testsvn',
fs_path=g.tmpdir + '/',
url_path='/test/',
tool='svn',
status='creating')
repo_path = pkg_resources.resource_filename(
'forgesvn', 'tests/data/testsvn')
dirname = os.path.join(repo.fs_path, repo.name)
if os.path.exists(dirname):
shutil.rmtree(dirname)
repo.init()
repo._impl.clone_from('file://' + repo_path)
assert not os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/pre-revprop-change'))
assert os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'))
assert os.access(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'), os.X_OK)
with open(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit')) as f:
hook_data = f.read()
self.assertIn(
'curl -s http://localhost/auth/refresh_repo/p/test/src/\n',
hook_data)
self.assertIn('exec $DIR/post-commit-user "$@"\n', hook_data)
repo.refresh(notify=False)
assert len(list(repo.log(limit=100)))
shutil.rmtree(dirname)
@mock.patch('forgesvn.model.svn.tg')
def test_can_hotcopy(self, tg):
from forgesvn.model.svn import SVNImplementation
func = SVNImplementation.can_hotcopy
obj = mock.Mock(spec=SVNImplementation)
for combo in product(
['file:///myfile', 'http://myfile'],
[True, False],
['version 1.7', 'version 1.6', 'version 2.0.3']):
source_url = combo[0]
tg.config = {'scm.svn.hotcopy': combo[1]}
stdout = combo[2]
obj.check_call.return_value = stdout, '', 0
expected = (source_url.startswith('file://') and
tg.config['scm.svn.hotcopy'] and
stdout != 'version 1.6')
result = func(obj, source_url)
assert result == expected
@mock.patch('forgesvn.model.svn.g.post_event')
def test_clone(self, post_event):
repo = SM.Repository(
name='testsvn',
fs_path=g.tmpdir + '/',
url_path='/test/',
tool='svn',
status='creating')
repo_path = pkg_resources.resource_filename(
'forgesvn', 'tests/data/testsvn')
dirname = os.path.join(repo.fs_path, repo.name)
if os.path.exists(dirname):
shutil.rmtree(dirname)
repo.init()
repo._impl.clone_from('file://' + repo_path)
assert not os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/pre-revprop-change'))
assert os.path.exists(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'))
assert os.access(
os.path.join(g.tmpdir, 'testsvn/hooks/post-commit'), os.X_OK)
with open(os.path.join(g.tmpdir, 'testsvn/hooks/post-commit')) as f:
c = f.read()
self.assertIn(
'curl -s http://localhost/auth/refresh_repo/p/test/src/\n', c)
self.assertIn('exec $DIR/post-commit-user "$@"\n', c)
repo.refresh(notify=False)
assert len(list(repo.log(limit=100)))
shutil.rmtree(dirname)
def test_index(self):
i = self.repo.index()
assert i['type_s'] == 'SVN Repository', i
def test_log_id_only(self):
entries = list(self.repo.log(id_only=True, limit=25))
assert_equal(entries, [7, 6, 5, 4, 3, 2, 1])
def test_log(self):
entries = list(self.repo.log(id_only=False, limit=25))
assert_equal(entries[len(entries)-6:], # only 6, so this test doesn't have to change when commits added
[
{'parents': [5],
'refs': [],
'committed': {
'date': datetime(2013, 11, 8, 13, 38, 11, 152821),
'name': 'coldmind', 'email': ''},
'message': '',
'rename_details': {},
'id': 6,
'authored': {
'date': datetime(2013, 11, 8, 13, 38, 11, 152821),
'name': 'coldmind',
'email': ''
}, 'size': None},
{'parents': [4],
'refs': [],
'committed': {
'date': datetime(2010, 11, 18, 20, 14, 21, 515743),
'name': 'rick446',
'email': ''},
'message': 'Copied a => b',
'rename_details': {},
'id': 5,
'authored': {
'date': datetime(2010, 11, 18, 20, 14, 21, 515743),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [3],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 59, 383719),
'name': 'rick446',
'email': ''},
'message': 'Remove hello.txt',
'rename_details': {},
'id': 4,
'authored': {
'date': datetime(2010, 10, 8, 15, 32, 59, 383719),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [2],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'name': 'rick446',
'email': ''},
'message': 'Modify readme',
'rename_details': {},
'id': 3,
'authored':
{'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [1],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 36, 221863),
'name': 'rick446',
'email': ''},
'message': 'Add path',
'rename_details': {},
'id': 2,
'authored': {
'date': datetime(2010, 10, 8, 15, 32, 36, 221863),
'name': 'rick446',
'email': ''},
'size': None},
{'parents': [],
'refs': [],
'committed': {
'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'name': 'rick446',
'email': ''},
'message': 'Create readme',
'rename_details': {},
'id': 1,
'authored': {
'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'name': 'rick446',
'email': ''},
'size': None}])
def test_log_file(self):
entries = list(self.repo.log(path='/README', id_only=False, limit=25))
assert_equal(entries, [
{'authored': {'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'email': '',
'name': 'rick446'},
'committed': {'date': datetime(2010, 10, 8, 15, 32, 48, 272296),
'email': '',
'name': 'rick446'},
'id': 3,
'message': 'Modify readme',
'parents': [2],
'refs': [],
'size': 28,
'rename_details': {}},
{'authored': {'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'email': '',
'name': 'rick446'},
'committed': {'date': datetime(2010, 10, 8, 15, 32, 7, 238375),
'email': '',
'name': 'rick446'},
'id': 1,
'message': 'Create readme',
'parents': [],
'refs': [],
'size': 15,
'rename_details': {}},
])
def test_is_file(self):
assert self.repo.is_file('/README')
assert not self.repo.is_file('/a')
def test_paged_diffs(self):
entry = self.repo.commit(next(self.repo.log(2, id_only=True, limit=1)))
self.assertEqual(entry.diffs, entry.paged_diffs())
self.assertEqual(entry.diffs, entry.paged_diffs(start=0))
added_expected = entry.diffs.added[1:3]
expected = dict(
copied=[], changed=[], removed=[], renamed=[],
added=added_expected, total=4)
actual = entry.paged_diffs(start=1, end=3)
self.assertEqual(expected, actual)
fake_id = self.repo._impl._oid(100)
empty = M.repository.Commit(_id=fake_id, repo=self.repo).paged_diffs()
self.assertEqual(sorted(actual.keys()), sorted(empty.keys()))
def test_diff_create_file(self):
entry = self.repo.commit(next(self.repo.log(1, id_only=True, limit=1)))
self.assertEqual(
entry.diffs, dict(
copied=[], changed=[], renamed=[],
removed=[], added=['/README'], total=1))
def test_diff_create_path(self):
entry = self.repo.commit(next(self.repo.log(2, id_only=True, limit=1)))
actual = entry.diffs
actual.added = sorted(actual.added)
self.assertEqual(
entry.diffs, dict(
copied=[], changed=[], removed=[], renamed=[],
added=sorted([
'/a', '/a/b', '/a/b/c',
'/a/b/c/hello.txt']), total=4))
def test_diff_modify_file(self):
entry = self.repo.commit(next(self.repo.log(3, id_only=True, limit=1)))
self.assertEqual(
entry.diffs, dict(
copied=[], changed=['/README'], renamed=[],
removed=[], added=[], total=1))
def test_diff_delete(self):
entry = self.repo.commit(next(self.repo.log(4, id_only=True, limit=1)))
self.assertEqual(
entry.diffs, dict(
copied=[], changed=[], renamed=[],
removed=['/a/b/c/hello.txt'], added=[], total=1))
def test_diff_copy(self):
entry = self.repo.commit(next(self.repo.log(5, id_only=True, limit=1)))
assert_equals(dict(entry.diffs), dict(
copied=[{'new': '/b', 'old': '/a', 'ratio': 1}], renamed=[],
changed=[], removed=[], added=[], total=1))
def test_commit(self):
entry = self.repo.commit(1)
assert entry.committed.name == 'rick446'
assert entry.message
def test_svn_path_exists(self):
repo_path = pkg_resources.resource_filename(
'forgesvn', 'tests/data/testsvn')
assert svn_path_exists("file://%s/a" % repo_path)
assert svn_path_exists("file://%s" % repo_path)
assert not svn_path_exists("file://%s/badpath" % repo_path)
with mock.patch('forgesvn.model.svn.pysvn') as pysvn:
svn_path_exists('dummy')
pysvn.Client.return_value.info2.assert_called_once_with(
'dummy',
revision=pysvn.Revision.return_value,
recurse=False)
@skipUnless(os.path.exists(tg.config.get('scm.repos.tarball.zip_binary', '/usr/bin/zip')), 'zip binary is missing')
def test_tarball(self):
tmpdir = tg.config['scm.repos.tarball.root']
assert_equal(self.repo.tarball_path,
os.path.join(tmpdir, 'svn/t/te/test/testsvn'))
assert_equal(self.repo.tarball_url('1'),
'file:///svn/t/te/test/testsvn/test-src-r1.zip')
self.repo.tarball('1')
assert os.path.isfile(
os.path.join(tmpdir, "svn/t/te/test/testsvn/test-src-r1.zip"))
tarball_zip = ZipFile(
os.path.join(tmpdir, 'svn/t/te/test/testsvn/test-src-r1.zip'), 'r')
assert_equal(tarball_zip.namelist(),
['test-src-r1/', 'test-src-r1/README'])
shutil.rmtree(self.repo.tarball_path.encode('utf-8'),
ignore_errors=True)
@skipUnless(os.path.exists(tg.config.get('scm.repos.tarball.zip_binary', '/usr/bin/zip')), 'zip binary is missing')
def test_tarball_paths(self):
rev = '19'
h.set_context('test', 'svn-tags', neighborhood='Projects')
tmpdir = tg.config['scm.repos.tarball.root']
tarball_path = os.path.join(tmpdir, 'svn/t/te/test/testsvn-trunk-tags-branches/')
# a tag
self.svn_tags.tarball(rev, '/tags/tag-1.0/')
fn = tarball_path + 'test-svn-tags-r19-tags-tag-1.0.zip'
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
tag_content = sorted(['test-svn-tags-r19-tags-tag-1.0/',
'test-svn-tags-r19-tags-tag-1.0/svn-commit.tmp',
'test-svn-tags-r19-tags-tag-1.0/README'])
assert_equal(sorted(snapshot.namelist()), tag_content)
os.remove(fn)
# a directory (of tags)
self.svn_tags.tarball(rev, '/tags/')
fn = tarball_path + 'test-svn-tags-r19-tags.zip'
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
tags_content = sorted(['test-svn-tags-r19-tags/',
'test-svn-tags-r19-tags/tag-1.0/',
'test-svn-tags-r19-tags/tag-1.0/svn-commit.tmp',
'test-svn-tags-r19-tags/tag-1.0/README'])
assert_equal(sorted(snapshot.namelist()), tags_content)
os.remove(fn)
# no path, but there are trunk in the repo
# expect snapshot of trunk
self.svn_tags.tarball(rev)
fn = tarball_path + 'test-svn-tags-r19-trunk.zip'
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
trunk_content = sorted(['test-svn-tags-r19-trunk/',
'test-svn-tags-r19-trunk/aaa.txt',
'test-svn-tags-r19-trunk/bbb.txt',
'test-svn-tags-r19-trunk/ccc.txt',
'test-svn-tags-r19-trunk/README'])
assert_equal(sorted(snapshot.namelist()), trunk_content)
os.remove(fn)
# no path, and no trunk dir
# expect snapshot of repo root
h.set_context('test', 'src', neighborhood='Projects')
fn = os.path.join(tmpdir, 'svn/t/te/test/testsvn/test-src-r1.zip')
self.repo.tarball('1')
assert os.path.isfile(fn), fn
snapshot = ZipFile(fn, 'r')
assert_equal(snapshot.namelist(), ['test-src-r1/', 'test-src-r1/README'])
shutil.rmtree(os.path.join(tmpdir, 'svn/t/te/test/testsvn/'),
ignore_errors=True)
shutil.rmtree(tarball_path, ignore_errors=True)
def test_is_empty(self):
assert not self.repo.is_empty()
with TempDirectory() as d:
repo2 = SM.Repository(
name='test',
fs_path=d.path,
url_path='/test/',
tool='svn',
status='creating')
repo2.init()
assert repo2.is_empty()
repo2.refresh()
ThreadLocalORMSession.flush_all()
assert repo2.is_empty()
def test_webhook_payload(self):
sender = RepoPushWebhookSender()
all_commits = list(self.repo.all_commit_ids())
start = len(all_commits) - 6 # only get a few so test doesn't have to change after new testdata commits
cids = all_commits[start:start+2]
payload = sender.get_payload(commit_ids=cids)
expected_payload = {
'size': 2,
'after': 'r6',
'before': 'r4',
'commits': [{
'id': 'r6',
'url': 'http://localhost/p/test/src/6/',
'timestamp': datetime(2013, 11, 8, 13, 38, 11, 152000),
'message': '',
'author': {'name': 'coldmind',
'email': '',
'username': ''},
'committer': {'name': 'coldmind',
'email': '',
'username': ''},
'added': ['/ЗРЯЧИЙ_ТА_ПОБАЧИТЬ'],
'removed': [],
'modified': [],
'copied': [],
'renamed': [],
}, {
'id': 'r5',
'url': 'http://localhost/p/test/src/5/',
'timestamp': datetime(2010, 11, 18, 20, 14, 21, 515000),
'message': 'Copied a => b',
'author': {'name': 'rick446',
'email': '',
'username': ''},
'committer': {'name': 'rick446',
'email': '',
'username': ''},
'added': [],
'removed': [],
'modified': [],
'copied': [
{'new': '/b', 'old': '/a', 'ratio': 1},
],
'renamed': [],
}],
'repository': {
'name': 'SVN',
'full_name': '/p/test/src/',
'url': 'http://localhost/p/test/src/',
},
}
assert_equals(payload, expected_payload)
class TestSVNRev(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit(1)
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_url(self):
assert self.rev.url().endswith('/1/')
def test_primary(self):
assert self.rev.primary() == self.rev
def test_shorthand(self):
assert self.rev.shorthand_id() == '[r1]'
def test_diff(self):
diffs = (self.rev.diffs.added
+ self.rev.diffs.removed
+ self.rev.diffs.changed
+ self.rev.diffs.copied)
for d in diffs:
print(d)
def _oid(self, rev_id):
return '%s:%s' % (self.repo._id, rev_id)
def test_log(self):
# path only
commits = list(self.repo.log(self.repo.head, id_only=True, limit=25))
assert_equal(commits, [7, 6, 5, 4, 3, 2, 1])
commits = list(self.repo.log(self.repo.head, 'README', id_only=True, limit=25))
assert_equal(commits, [3, 1])
commits = list(self.repo.log(1, 'README', id_only=True, limit=25))
assert_equal(commits, [1])
commits = list(self.repo.log(self.repo.head, 'a/b/c/', id_only=True, limit=25))
assert_equal(commits, [4, 2])
commits = list(self.repo.log(3, 'a/b/c/', id_only=True, limit=25))
assert_equal(commits, [2])
assert_equal(
list(self.repo.log(self.repo.head, 'does/not/exist', id_only=True, limit=25)), [])
def test_notification_email(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
self.repo = SM.Repository(
name='testsvn',
fs_path=repo_dir,
url_path='/test/',
tool='svn',
status='creating')
self.repo.refresh()
ThreadLocalORMSession.flush_all()
send_notifications(self.repo, [self.repo.rev_to_commit_id(1)])
ThreadLocalORMSession.flush_all()
n = M.Notification.query.find({'subject': '[test:src] New commit [r1] by rick446'}).first()
assert n
assert_in('By rick446', n.text)
assert_in('Create readme', n.text)
class _Test(unittest.TestCase):
idgen = ('obj_%d' % i for i in count())
def _make_tree(self, object_id, **kwargs):
t, isnew = M.repository.Tree.upsert(object_id)
repo = getattr(self, 'repo', None)
t.repo = repo
for k, v in six.iteritems(kwargs):
if isinstance(v, six.string_types):
obj = M.repository.Blob(
t, k, next(self.idgen))
t.blob_ids.append(Object(
name=k, id=obj._id))
else:
obj = self._make_tree(next(self.idgen), **v)
t.tree_ids.append(Object(
name=k, id=obj._id))
session(t).flush()
return t
def _make_commit(self, object_id, **tree_parts):
ci, isnew = M.repository.Commit.upsert(object_id)
if isnew:
ci.committed.email = c.user.email_addresses[0]
ci.authored.email = c.user.email_addresses[0]
dt = datetime.utcnow()
# BSON datetime resolution is to 1 millisecond, not 1 microsecond
# like Python. Round this now so it'll match the value that's
# pulled from MongoDB in the tests.
ci.authored.date = dt.replace(microsecond=dt.microsecond // 1000 * 1000)
ci.message = 'summary\n\nddescription'
ci.set_context(self.repo)
ci.tree_id = 't_' + object_id
ci.tree = self._make_tree(ci.tree_id, **tree_parts)
return ci, isnew
def _make_log(self, ci):
session(ci).flush(ci)
def setUp(self):
setup_basic_test()
setup_global_objects()
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
self.prefix = tg.config.get('scm.repos.root', '/')
class _TestWithRepo(_Test):
def setUp(self):
super(_TestWithRepo, self).setUp()
h.set_context('test', neighborhood='Projects')
c.project.install_app('svn', 'test1')
h.set_context('test', 'test1', neighborhood='Projects')
self.repo = M.Repository(name='test1', tool='svn')
self.repo._impl = mock.Mock(spec=M.RepositoryImplementation())
self.repo._impl.shorthand_for_commit = M.RepositoryImplementation.shorthand_for_commit
self.repo._impl.url_for_commit = (
lambda *a, **kw: M.RepositoryImplementation.url_for_commit(
self.repo._impl, *a, **kw))
self.repo._impl._repo = self.repo
self.repo._impl.all_commit_ids = lambda *a, **kw: []
self.repo._impl.commit().symbolic_ids = None
ThreadLocalORMSession.flush_all()
class _TestWithRepoAndCommit(_TestWithRepo):
def setUp(self):
super(_TestWithRepoAndCommit, self).setUp()
self.ci, isnew = self._make_commit('foo')
ThreadLocalORMSession.flush_all()
# ThreadLocalORMSession.close_all()
class TestRepo(_TestWithRepo):
def test_create(self):
assert self.repo.fs_path == os.path.join(self.prefix, 'svn/p/test/')
assert self.repo.url_path == '/p/test/'
assert self.repo.full_fs_path == os.path.join(
self.prefix, 'svn/p/test/test1')
def test_passthrough(self):
argless = ['init']
for fn in argless:
getattr(self.repo, fn)()
getattr(self.repo._impl, fn).assert_called_with()
unary = ['commit', 'open_blob']
for fn in unary:
getattr(self.repo, fn)('foo')
getattr(self.repo._impl, fn).assert_called_with('foo')
def test_shorthand_for_commit(self):
self.assertEqual(
self.repo.shorthand_for_commit('a' * 40),
'[aaaaaa]')
def test_url_for_commit(self):
self.assertEqual(
self.repo.url_for_commit('a' * 40),
'/p/test/test1/ci/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/')
@mock.patch('allura.model.repository.g.post_event')
def test_init_as_clone(self, post_event):
self.repo.init_as_clone('srcpath', 'srcname', 'srcurl')
assert self.repo.upstream_repo.name == 'srcname'
assert self.repo.upstream_repo.url == 'srcurl'
assert self.repo._impl.clone_from.called_with('srcpath')
post_event.assert_called_once_with('repo_cloned', 'srcurl', 'srcpath')
def test_latest(self):
ci = mock.Mock()
self.repo._impl.commit = mock.Mock(return_value=ci)
assert self.repo.latest() is ci
def test_index(self):
i = self.repo.index()
assert i['type_s'] == 'Repository', i
assert i['name_s'] == 'test1', i
def test_scm_host_url(self):
assert_equal(self.repo.clone_url('rw', 'nobody'),
'svn+ssh://nobody@localhost:8022/scm-repo/p/test/test1/')
assert_equal(self.repo.clone_url('https', 'nobody'),
'https://nobody@localhost:8022/scm-repo/p/test/test1/')
with h.push_config(self.repo.app.config.options, external_checkout_url='https://$username@foo.com/'):
assert_equal(self.repo.clone_url('https', 'user'),
'https://user@foo.com/')
def test_guess_type(self):
assert self.repo.guess_type('foo.txt') == ('text/plain', None)
assert self.repo.guess_type('foo.gbaer') == (
'application/octet-stream', None)
assert self.repo.guess_type('foo.html') == ('text/html', None)
assert self.repo.guess_type('.gitignore') == ('text/plain', None)
def test_refresh(self):
committer_name = 'Test Committer'
committer_email = 'test@example.com'
ci = mock.Mock()
ci.authored.name = committer_name
ci.committed.name = committer_name
ci.committed.email = committer_email
ci.author_url = '/u/test-committer/'
ci.activity_name = '[deadbeef]'
ci.activity_url = 'url'
ci.activity_extras = {}
del ci.node_id
self.repo._impl.commit = mock.Mock(return_value=ci)
self.repo._impl.new_commits = mock.Mock(
return_value=['foo%d' % i for i in range(100)])
self.repo._impl.all_commit_ids = mock.Mock(
return_value=['foo%d' % i for i in range(100)])
self.repo.symbolics_for_commit = mock.Mock(
return_value=[['master', 'branch'], []])
def refresh_commit_info(oid, seen, lazy=False):
M.repository.CommitDoc(dict(
authored=dict(
name=committer_name,
date=datetime(2010, 10, 8, 15, 32, 48, 0),
email=committer_email),
_id=oid)).m.insert()
self.repo._impl.refresh_commit_info = refresh_commit_info
_id = lambda oid: getattr(oid, '_id', str(oid))
self.repo.shorthand_for_commit = lambda oid: '[' + _id(oid) + ']'
self.repo.url_for_commit = lambda oid: '/ci/' + _id(oid) + '/'
self.repo.refresh()
ThreadLocalORMSession.flush_all()
notifications = M.Notification.query.find().all()
for n in notifications:
if '100 new commits' in n.subject:
assert_in('By Test Committer on 10/08/2010 15:32', n.text)
assert_in('http://localhost/ci/foo99/', n.text)
break
else:
assert False, 'Did not find notification'
assert M.Feed.query.find(dict(
author_name=committer_name)).count() == 100
def test_refresh_private(self):
ci = mock.Mock()
self.repo._impl.commit = mock.Mock(return_value=ci)
self.repo._impl.new_commits = mock.Mock(
return_value=['foo%d' % i for i in range(100)])
# make unreadable by *anonymous, so additional notification logic
# executes
self.repo.acl = []
c.project.acl = []
self.repo.refresh()
def test_push_upstream_context(self):
self.repo.init_as_clone('srcpath', '/p/test/svn/', '/p/test/svn/')
old_app_instance = M.Project.app_instance
try:
M.Project.app_instance = mock.Mock(return_value=ming.base.Object(
config=ming.base.Object(_id=None)))
with self.repo.push_upstream_context():
assert c.project.shortname == 'test'
finally:
M.Project.app_instance = old_app_instance
def test_pending_upstream_merges(self):
self.repo.init_as_clone('srcpath', '/p/test/svn/', '/p/test/svn/')
old_app_instance = M.Project.app_instance
try:
M.Project.app_instance = mock.Mock(return_value=ming.base.Object(
config=ming.base.Object(_id=None)))
self.repo.pending_upstream_merges()
finally:
M.Project.app_instance = old_app_instance
class TestRepoObject(_TestWithRepoAndCommit):
def test_upsert(self):
obj0, isnew0 = M.repository.Tree.upsert('foo1')
obj1, isnew1 = M.repository.Tree.upsert('foo1')
assert obj0 is obj1
assert isnew0 and not isnew1
def test_artifact_methods(self):
assert self.ci.index_id(
) == 'allura/model/repo/Commit#foo', self.ci.index_id()
assert self.ci.primary() is self.ci, self.ci.primary()
class TestCommit(_TestWithRepo):
def setUp(self):
super(TestCommit, self).setUp()
self.ci, isnew = self._make_commit(
'foo',
a=dict(
a=dict(
a='',
b='',),
b=''))
self.tree = self.ci.tree
impl = M.RepositoryImplementation()
impl._repo = self.repo
self.repo._impl.shorthand_for_commit = impl.shorthand_for_commit
self.repo._impl.url_for_commit = impl.url_for_commit
def test_upsert(self):
obj0, isnew0 = M.repository.Commit.upsert('foo')
obj1, isnew1 = M.repository.Commit.upsert('foo')
assert obj0 is obj1
assert not isnew1
u = M.User.by_username('test-admin')
assert self.ci.author_url == u.url()
assert self.ci.committer_url == u.url()
assert self.ci.tree is self.tree
assert self.ci.summary == 'summary'
assert self.ci.shorthand_id() == '[foo]'
assert self.ci.url() == '/p/test/test1/ci/foo/'
def test_get_path(self):
b = self.ci.get_path('a/a/a')
assert isinstance(b, M.repository.Blob)
x = self.ci.get_path('a/a')
assert isinstance(x, M.repository.Tree)
def _unique_blobs(self):
def counter():
counter.i += 1
return counter.i
counter.i = 0
blobs = defaultdict(counter)
return lambda blob: BytesIO(str(blobs[blob.path()]))
def test_diffs_file_renames(self):
def open_blob(blob):
blobs = {
'a': 'Leia',
'/b/a/a': 'Darth Vader',
'/b/a/b': 'Luke Skywalker',
'/b/b': 'Death Star will destroy you',
'/b/c': 'Luke Skywalker', # moved from /b/a/b
# moved from /b/b and modified
'/b/a/z': 'Death Star will destroy you\nALL',
}
return BytesIO(blobs.get(blob.path(), ''))
self.repo._impl.open_blob = open_blob
self.repo._impl.commit = mock.Mock(return_value=self.ci)
self.repo._impl.paged_diffs.return_value = {
'added': ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'],
'changed': [],
'copied': [],
'renamed': [],
'removed': [],
'total': 5,
}
assert_equal(self.ci.diffs.added,
['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'])
assert (self.ci.diffs.copied
== self.ci.diffs.changed
== self.ci.diffs.removed
== [])
ci, isnew = self._make_commit(
'bar',
b=dict(
a=dict(
a='',
b='',),
b=''))
ci.parent_ids = ['foo']
self._make_log(ci)
self.repo._impl.paged_diffs.return_value = {
'added': ['b', 'b/a', 'b/a/a', 'b/a/b', 'b/b'],
'renamed': [],
'copied': [],
'changed': [],
'removed': ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'],
'total': 10,
}
assert_equal(ci.diffs.added, ['b', 'b/a', 'b/a/a', 'b/a/b', 'b/b'])
assert_equal(ci.diffs.removed, ['a', 'a/a', 'a/a/a', 'a/a/b', 'a/b'])
assert (ci.diffs.copied
== ci.diffs.changed
== [])
ci, isnew = self._make_commit(
'baz',
b=dict(
a=dict(
z=''),
c=''))
ci.parent_ids = ['bar']
self._make_log(ci)
self.repo._impl.paged_diffs.return_value = {
'added': ['b/c', 'b/a/z'],
'removed': ['/b/a/b', 'b/b'],
'changed': [],
'copied': [
{
'new': 'b/c',
'old': 'b/a/b',
|
},
{
'new': 'b/a/z',
'old': 'b/b',
'ratio': 1,
'diff': '',
},
],
'renamed': [],
'total': 2
}
assert_equal(ci.diffs.added, ['b/a/z', 'b/c'])
assert_equal(ci.diffs.changed, [])
assert_equal(ci.diffs.removed, ['/b/a/b', 'b/b'])
# see mock for open_blob
assert_equal(len(ci.diffs.copied), 2)
assert_equal(ci.diffs.copied[1]['old'], 'b/a/b')
assert_equal(ci.diffs.copied[1]['new'], 'b/c')
assert_equal(ci.diffs.copied[1]['ratio'], 1)
assert_equal(ci.diffs.copied[1]['diff'], '')
assert_equal(ci.diffs.copied[0]['old'], 'b/b')
assert_equal(ci.diffs.copied[0]['new'], 'b/a/z')
def test_context(self):
self.ci.context()
class TestRename(unittest.TestCase):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn-rename'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit('HEAD')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_log_file_with_rename(self):
entry = list(self.repo.log(path='/dir/b.txt', id_only=False, limit=1))[0]
assert_equal(entry['id'], 3)
assert_equal(entry['rename_details']['path'], '/dir/a.txt')
assert_equal(
entry['rename_details']['commit_url'],
self.repo.url_for_commit(2) # previous revision
)
def test_check_changed_path(self):
changed_path = {'copyfrom_path': '/test/path', 'path': '/test/path2'}
result = self.repo._impl._check_changed_path(
changed_path, '/test/path2/file.txt')
assert_equal({'path': '/test/path2/file.txt',
'copyfrom_path': '/test/path/file.txt'}, result)
class TestDirectRepoAccess(object):
def setUp(self):
setup_basic_test()
self.setup_with_tools()
@with_svn
def setup_with_tools(self):
setup_global_objects()
h.set_context('test', 'src', neighborhood='Projects')
repo_dir = pkg_resources.resource_filename(
'forgesvn', 'tests/data/')
c.app.repo.name = 'testsvn'
c.app.repo.fs_path = repo_dir
self.repo = c.app.repo
self.repo.refresh()
self.rev = self.repo.commit('HEAD')
ThreadLocalORMSession.flush_all()
ThreadLocalORMSession.close_all()
def test_paged_diffs(self):
_id = self.repo._impl._oid(6)
diffs = self.repo.commit(_id).diffs
expected = {
'added': ['/ЗРЯЧИЙ_ТА_ПОБАЧИТЬ'],
'removed': [],
'changed': [],
'copied': [],
'renamed': [],
'total': 1,
}
assert_equals(diffs, expected)
_id = self.repo._impl._oid(2)
diffs = self.repo.commit(_id).diffs
expected = {
'added': ['/a', '/a/b', '/a/b/c', '/a/b/c/hello.txt'],
'removed': [],
'changed': [],
'renamed': [],
'copied': [],
'total': 4,
}
assert_equals(diffs, expected)
_id = self.repo._impl._oid(3)
diffs = self.repo.commit(_id).diffs
expected = {
'added': [],
'removed': [],
'renamed': [],
'changed': ['/README'],
'copied': [],
'total': 1,
}
assert_equals(diffs, expected)
_id = self.repo._impl._oid(4)
diffs = self.repo.commit(_id).diffs
expected = {
'added': [],
'removed': ['/a/b/c/hello.txt'],
'changed': [],
'renamed': [],
'copied': [],
'total': 1,
}
assert_equals(diffs, expected)
|
'ratio': 1,
'diff': '',
|
on-demand-entry-handler.ts
|
import { EventEmitter } from 'events'
import { IncomingMessage, ServerResponse } from 'http'
import { join, posix } from 'path'
import { parse } from 'url'
import { webpack, isWebpack5 } from 'next/dist/compiled/webpack/webpack'
import * as Log from '../../build/output/log'
import { normalizePagePath, normalizePathSep } from '../normalize-page-path'
import { pageNotFoundError } from '../require'
import { findPageFile } from '../lib/find-page-file'
import getRouteFromEntrypoint from '../get-route-from-entrypoint'
import { convertPageFilePathToRoutePath } from '../../build/utils'
export const ADDED = Symbol('added')
export const BUILDING = Symbol('building')
export const BUILT = Symbol('built')
export let entries: {
[page: string]: {
serverBundlePath: string
clientBundlePath: string
absolutePagePath: string
status?: typeof ADDED | typeof BUILDING | typeof BUILT
lastActiveTime?: number
}
} = {}
export default function onDemandEntryHandler(
watcher: any,
multiCompiler: webpack.MultiCompiler,
{
pagesDir,
pageExtensions,
maxInactiveAge,
pagesBufferLength,
}: {
pagesDir: string
pageExtensions: string[]
maxInactiveAge: number
pagesBufferLength: number
}
) {
const { compilers } = multiCompiler
const invalidator = new Invalidator(watcher, multiCompiler)
let lastAccessPages = ['']
let doneCallbacks: EventEmitter | null = new EventEmitter()
for (const compiler of compilers) {
compiler.hooks.make.tap(
'NextJsOnDemandEntries',
(_compilation: webpack.compilation.Compilation) => {
invalidator.startBuilding()
}
)
}
function getPagePathsFromEntrypoints(entrypoints: any): string[] {
const pagePaths = []
for (const entrypoint of entrypoints.values()) {
const page = getRouteFromEntrypoint(entrypoint.name)
if (page) {
pagePaths.push(page)
}
}
return pagePaths
}
multiCompiler.hooks.done.tap('NextJsOnDemandEntries', (multiStats) => {
const [clientStats, serverStats] = multiStats.stats
const pagePaths = new Set([
...getPagePathsFromEntrypoints(clientStats.compilation.entrypoints),
...getPagePathsFromEntrypoints(serverStats.compilation.entrypoints),
])
for (const page of pagePaths) {
const entry = entries[page]
if (!entry) {
continue
}
if (entry.status !== BUILDING) {
continue
}
entry.status = BUILT
entry.lastActiveTime = Date.now()
doneCallbacks!.emit(page)
}
invalidator.doneBuilding()
})
const disposeHandler = setInterval(function () {
disposeInactiveEntries(watcher, lastAccessPages, maxInactiveAge)
}, 5000)
disposeHandler.unref()
function handlePing(pg: string) {
const page = normalizePathSep(pg)
const entryInfo = entries[page]
let toSend
// If there's no entry, it may have been invalidated and needs to be re-built.
if (!entryInfo) {
// if (page !== lastEntry) client pings, but there's no entry for page
return { invalid: true }
}
// 404 is an on demand entry but when a new page is added we have to refresh the page
if (page === '/_error') {
toSend = { invalid: true }
} else {
toSend = { success: true }
}
// We don't need to maintain active state of anything other than BUILT entries
if (entryInfo.status !== BUILT) return
// If there's an entryInfo
if (!lastAccessPages.includes(page)) {
lastAccessPages.unshift(page)
// Maintain the buffer max length
if (lastAccessPages.length > pagesBufferLength) {
lastAccessPages.pop()
}
}
entryInfo.lastActiveTime = Date.now()
return toSend
}
return {
async ensurePage(page: string) {
let normalizedPagePath: string
try {
normalizedPagePath = normalizePagePath(page)
} catch (err) {
console.error(err)
throw pageNotFoundError(page)
}
let pagePath = await findPageFile(
pagesDir,
normalizedPagePath,
pageExtensions
)
// Default the /_error route to the Next.js provided default page
if (page === '/_error' && pagePath === null) {
pagePath = 'next/dist/pages/_error'
}
if (pagePath === null) {
throw pageNotFoundError(normalizedPagePath)
}
let pageUrl = convertPageFilePathToRoutePath(
pagePath.replace(/\\/g, '/'),
pageExtensions
)
pageUrl = `${pageUrl[0] !== '/' ? '/' : ''}${pageUrl
.replace(new RegExp(`\\.+(?:${pageExtensions.join('|')})$`), '')
.replace(/\/index$/, '')}`
pageUrl = pageUrl === '' ? '/' : pageUrl
const bundleFile = normalizePagePath(pageUrl)
const serverBundlePath = posix.join('pages', bundleFile)
const clientBundlePath = posix.join('pages', bundleFile)
const absolutePagePath = pagePath.startsWith('next/dist/pages')
? require.resolve(pagePath)
: join(pagesDir, pagePath)
page = posix.normalize(pageUrl)
return new Promise<void>((resolve, reject) => {
// Makes sure the page that is being kept in on-demand-entries matches the webpack output
const normalizedPage = normalizePathSep(page)
const entryInfo = entries[normalizedPage]
if (entryInfo) {
if (entryInfo.status === BUILT) {
resolve()
return
}
if (entryInfo.status === BUILDING) {
doneCallbacks!.once(normalizedPage, handleCallback)
return
}
}
Log.event(`build page: ${normalizedPage}`)
entries[normalizedPage] = {
serverBundlePath,
clientBundlePath,
absolutePagePath,
status: ADDED,
}
doneCallbacks!.once(normalizedPage, handleCallback)
invalidator.invalidate()
function handleCallback(err: Error) {
if (err) return reject(err)
resolve()
}
})
},
middleware(req: IncomingMessage, res: ServerResponse, next: Function) {
if (!req.url?.startsWith('/_next/webpack-hmr')) return next()
const { query } = parse(req.url!, true)
const page = query.page
if (!page) return next()
const runPing = () => {
const data = handlePing(query.page as string)
if (!data) return
res.write('data: ' + JSON.stringify(data) + '\n\n')
}
const pingInterval = setInterval(() => runPing(), 5000)
|
req.on('close', () => {
clearInterval(pingInterval)
})
next()
},
}
}
function disposeInactiveEntries(
watcher: any,
lastAccessPages: any,
maxInactiveAge: number
) {
const disposingPages: any = []
Object.keys(entries).forEach((page) => {
const { lastActiveTime, status } = entries[page]
// This means this entry is currently building or just added
// We don't need to dispose those entries.
if (status !== BUILT) return
// We should not build the last accessed page even we didn't get any pings
// Sometimes, it's possible our XHR ping to wait before completing other requests.
// In that case, we should not dispose the current viewing page
if (lastAccessPages.includes(page)) return
if (lastActiveTime && Date.now() - lastActiveTime > maxInactiveAge) {
disposingPages.push(page)
}
})
if (disposingPages.length > 0) {
disposingPages.forEach((page: any) => {
delete entries[page]
})
// disposing inactive page(s)
watcher.invalidate()
}
}
// Make sure only one invalidation happens at a time
// Otherwise, webpack hash gets changed and it'll force the client to reload.
class Invalidator {
private multiCompiler: webpack.MultiCompiler
private watcher: any
private building: boolean
private rebuildAgain: boolean
constructor(watcher: any, multiCompiler: webpack.MultiCompiler) {
this.multiCompiler = multiCompiler
this.watcher = watcher
// contains an array of types of compilers currently building
this.building = false
this.rebuildAgain = false
}
invalidate() {
// If there's a current build is processing, we won't abort it by invalidating.
// (If aborted, it'll cause a client side hard reload)
// But let it to invalidate just after the completion.
// So, it can re-build the queued pages at once.
if (this.building) {
this.rebuildAgain = true
return
}
this.building = true
if (!isWebpack5) {
// Work around a bug in webpack, calling `invalidate` on Watching.js
// doesn't trigger the invalid call used to keep track of the `.done` hook on multiCompiler
for (const compiler of this.multiCompiler.compilers) {
compiler.hooks.invalid.call()
}
}
this.watcher.invalidate()
}
startBuilding() {
this.building = true
}
doneBuilding() {
this.building = false
if (this.rebuildAgain) {
this.rebuildAgain = false
this.invalidate()
}
}
}
| |
28.cache.js
|
function Lrb(){}
function Frb(){}
function Krb(){var b;while(Grb){b=Grb;Grb=Grb.c;!Grb&&(Hrb=null);rEb(b.b.b,y0b())}}
function
|
(){var b,c,d;c=new wAc;HPc(c.N,Rgd,cWd);for(d=0;d<30;++d){b=new Bsc(PTd+d);_u(b.N,dWd,true);Kqc(c,b,c.N)}return c}
function Nrb(){Irb=new Lrb;OB((LB(),KB),28);!!$stats&&$stats(FC(bWd,_gd,-1,-1));Irb.ld();!!$stats&&$stats(FC(bWd,KEd,-1,-1))}
var eWd='AsyncLoader28',dWd='cw-FlowPanel-checkBox',cWd='cwFlowPanel',bWd='runCallbacks28';_=Lrb.prototype=Frb.prototype=new ir;_.gC=function Mrb(){return S4};_.ld=function Qrb(){Krb()};_.cM={};var S4=t_c(gxd,eWd);$entry(Nrb)();
|
y0b
|
es-mx.js
|
/*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/license
*/
|
CKEDITOR.lang['es-mx']={"editor":"Editor de texto enriquecido","editorPanel":"Panel del editor de texto","common":{"editorHelp":"Presiona ALT + 0 para ayuda","browseServer":"Examinar servidor","url":"URL","protocol":"Protocolo","upload":"Subir","uploadSubmit":"Enviar al servidor","image":"Imagen","flash":"Flash","form":"Formulario","checkbox":"Casilla de verificación","radio":"Botón de opción","textField":"Campo de texto","textarea":"Área de texto","hiddenField":"Campo oculto","button":"Botón","select":"Campo de selección","imageButton":"Botón de imagen","notSet":"<not set>","id":"Id","name":"Nombre","langDir":"Dirección de idiomas","langDirLtr":"Izquierda a derecha (LTR)","langDirRtl":"Derecha a izquierda (RTL)","langCode":"Código de lenguaje","longDescr":"URL descripción larga","cssClass":"Clases de hoja de estilo","advisoryTitle":"Título del anuncio","cssStyle":"Estilo","ok":"OK","cancel":"Cancelar","close":"Cerrar","preview":"Vista previa","resize":"Redimensionar","generalTab":"General","advancedTab":"Avanzada","validateNumberFailed":"Este valor no es un número.","confirmNewPage":"Se perderán todos los cambios no guardados en este contenido. ¿Seguro que quieres cargar nueva página?","confirmCancel":"Ha cambiado algunas opciones. ¿Está seguro de que desea cerrar la ventana de diálogo?","options":"Opciones","target":"Objetivo","targetNew":"Nueva ventana (_blank)","targetTop":"Ventana superior (_top)","targetSelf":"Misma ventana (_self)","targetParent":"Ventana principal (_parent)","langDirLTR":"Izquierda a Derecha (LTR)","langDirRTL":"Derecha a Izquierda (RTL)","styles":"Estilo","cssClasses":"Clases de hojas de estilo","width":"Ancho","height":"Alto","align":"Alineación","left":"Izquierda","right":"Derecha","center":"Centrado","justify":"Justificado","alignLeft":"Alinear a la izquierda","alignRight":"Alinear a la derecha","alignCenter":"Align Center","alignTop":"Arriba","alignMiddle":"En medio","alignBottom":"Abajo","alignNone":"Ninguno","invalidValue":"Valor inválido","invalidHeight":"La altura debe ser un número.","invalidWidth":"La anchura debe ser un número.","invalidLength":"Value specified for the \"%1\" field must be a positive number with or without a valid measurement unit (%2).","invalidCssLength":"El valor especificado para el campo \"% 1\" debe ser un número positivo con o sin una unidad de medida CSS válida (px, %, in, cm, mm, em, ex, pt, or pc).","invalidHtmlLength":"El valor especificado para el campo \"% 1\" debe ser un número positivo con o sin una unidad de medición HTML válida (px or %).","invalidInlineStyle":"El valor especificado para el estilo en línea debe constar de una o más tuplas con el formato de \"nombre: valor\", separados por punto y coma","cssLengthTooltip":"Introduzca un número para un valor en píxeles o un número con una unidad CSS válida (px, %, in, cm, mm, em, ex, pt, or pc).","unavailable":"%1<span class=\"cke_accessibility\">, no disponible</span>","keyboard":{"8":"Retroceso","13":"Intro","16":"Shift","17":"Ctrl","18":"Alt","32":"Espacio","35":"Fin","36":"Inicio","46":"Borrar","112":"F1","113":"F2","114":"F3","115":"F4","116":"F5","117":"F6","118":"F7","119":"F8","120":"F9","121":"F10","122":"F11","123":"F12","124":"F13","125":"F14","126":"F15","127":"F16","128":"F17","129":"F18","130":"F19","131":"F20","132":"F21","133":"F22","134":"F23","135":"F24","224":"Comando"},"keyboardShortcut":"Atajo de teclado","optionDefault":"Default"},"about":{"copy":"Derechos reservados © $1. Todos los derechos reservados","dlgTitle":"Acerca de CKEditor 4","moreInfo":"Para información sobre la licencia por favor visita nuestro sitio web:"},"basicstyles":{"bold":"Negrita","italic":"Cursiva","strike":"Tachado","subscript":"subíndice","superscript":"Sobrescrito","underline":"Subrayada"},"blockquote":{"toolbar":"Entrecomillado"},"notification":{"closed":"Notificación cerrada."},"toolbar":{"toolbarCollapse":"Colapsar barra de herramientas","toolbarExpand":"Expandir barra de herramientas","toolbarGroups":{"document":"Documento","clipboard":"Portapapeles/deshacer","editing":"Editando","forms":"Formularios","basicstyles":"Estilo básico","paragraph":"Párrafo","links":"Enlaces","insert":"Insertar","styles":"Estilos","colors":"Colores","tools":"Herramientas"},"toolbars":"Editor de barra de herramientas"},"clipboard":{"copy":"Copiar","copyError":"La configuración de seguridad de su navegador no permite al editor ejecutar automáticamente operaciones de copiado. Por favor, utilice el teclado para (Ctrl/Cmd+C).","cut":"Cortar","cutError":"La configuración de seguridad de su navegador no permite al editor ejecutar automáticamente operaciones de corte. Por favor, utilice el teclado para (Ctrl/Cmd+X).","paste":"Pegar","pasteNotification":"Press %1 to paste. Your browser doesn‘t support pasting with the toolbar button or context menu option.","pasteArea":"Paste Area","pasteMsg":"Paste your content inside the area below and press OK."},"contextmenu":{"options":"Opciones del menú contextual"},"elementspath":{"eleLabel":"Ruta de los elementos","eleTitle":"%1 elemento"},"filetools":{"loadError":"Ha ocurrido un error al leer el archivo","networkError":"Ha ocurrido un error de red durante la carga del archivo.","httpError404":"Se ha producido un error HTTP durante la subida de archivos (404: archivo no encontrado).","httpError403":"Se ha producido un error HTTP durante la subida de archivos (403: Prohibido).","httpError":"Se ha producido un error HTTP durante la subida de archivos (error: %1).","noUrlError":"La URL de subida no está definida.","responseError":"Respuesta incorrecta del servidor."},"format":{"label":"Formato","panelTitle":"Formato de párrafo","tag_address":"Dirección","tag_div":"Normal (DIV)","tag_h1":"Encabezado 1","tag_h2":"Encabezado 2","tag_h3":"Encabezado 3","tag_h4":"Encabezado 4","tag_h5":"Encabezado 5","tag_h6":"Encabezado 6","tag_p":"Normal","tag_pre":"Formateado"},"horizontalrule":{"toolbar":"Insertar una línea horizontal"},"image":{"alt":"Texto alternativo","border":"Borde","btnUpload":"Enviar al servidor","button2Img":"¿Desea transformar el botón de imagen seleccionado en una imagen simple?","hSpace":"Espacio horizontal","img2Button":"¿Desea transformar la imagen seleccionada en un botón de imagen?","infoTab":"Información de imagen","linkTab":"Enlace","lockRatio":"Bloquear aspecto","menu":"Propiedades de la imagen","resetSize":"Reiniciar tamaño","title":"Propiedades de la imagen","titleButton":"Propiedades del botón de imagen","upload":"Cargar","urlMissing":"Falta la URL de origen de la imagen.","vSpace":"Espacio vertical","validateBorder":"El borde debe ser un número entero.","validateHSpace":"El espacio horizontal debe ser un número entero.","validateVSpace":"El espacio vertical debe ser un número entero."},"indent":{"indent":"Incrementar sangría","outdent":"Decrementar sangría"},"fakeobjects":{"anchor":"Ancla","flash":"Animación flash","hiddenfield":"Campo oculto","iframe":"IFrame","unknown":"Objeto desconocido"},"link":{"acccessKey":"Llave de acceso","advanced":"Avanzada","advisoryContentType":"Tipo de contenido consultivo","advisoryTitle":"Título asesor","anchor":{"toolbar":"Ancla","menu":"Editar ancla","title":"Propiedades del ancla","name":"Nombre del ancla","errorName":"Escriba el nombre del ancla","remove":"Remover ancla"},"anchorId":"Por Id del elemento","anchorName":"Por nombre del ancla","charset":"Recurso relacionado Charset","cssClasses":"Clases de estilo de hoja","download":"Forzar la descarga","displayText":"Mostrar texto","emailAddress":"Dirección de correo electrónico","emailBody":"Cuerpo del mensaje","emailSubject":"Asunto del mensaje","id":"Id","info":"Información del enlace","langCode":"Código del idioma","langDir":"Dirección del idioma","langDirLTR":"Izquierda a Derecha (LTR)","langDirRTL":"Derecha a Izquierda (RTL)","menu":"Editar enlace","name":"Nombre","noAnchors":"(No hay anclas disponibles en el documento)","noEmail":"Escriba la dirección de correo electrónico","noUrl":"Escriba la URL del enlace","noTel":"Please type the phone number","other":"<other>","phoneNumber":"Phone number","popupDependent":"Dependiente (Netscape)","popupFeatures":"Ventana emergente","popupFullScreen":"Pantalla completa (IE)","popupLeft":"Posición Izquierda","popupLocationBar":"Ubicación de la barra","popupMenuBar":"Barra de menú","popupResizable":"Redimensionable","popupScrollBars":"Barras de desplazamiento","popupStatusBar":"Barra de estado","popupToolbar":"Barra de herramienta","popupTop":"Posición superior","rel":"Relación","selectAnchor":"Selecciona un ancla","styles":"Estilo","tabIndex":"Indice de tabulación","target":"Objetivo","targetFrame":"<frame>","targetFrameName":"Nombre del marco de destino","targetPopup":"<popup window>","targetPopupName":"Nombre de ventana emergente","title":"Enlace","toAnchor":"Enlace al ancla en el texto","toEmail":"Correo electrónico","toUrl":"URL","toPhone":"Phone","toolbar":"Enlace","type":"Tipo de enlace","unlink":"Desconectar","upload":"Subir"},"list":{"bulletedlist":"Insertar/Remover Lista con viñetas","numberedlist":"Insertar/Remover Lista numerada"},"magicline":{"title":"Insertar un párrafo aquí"},"maximize":{"maximize":"Maximizar","minimize":"Minimizar"},"pastetext":{"button":"Pegar como texto plano","pasteNotification":"Press %1 to paste. Your browser doesn‘t support pasting with the toolbar button or context menu option.","title":"Paste as Plain Text"},"pastefromword":{"confirmCleanup":"El texto que desea pegar parece estar copiado de Word. ¿Quieres limpiarlo antes de pegarlo?","error":"No fue posible limpiar los datos pegados debido a un error interno","title":"Pegar desde word","toolbar":"Pegar desde word"},"removeformat":{"toolbar":"Remover formato"},"sourcearea":{"toolbar":"Fuente"},"specialchar":{"options":"Opciones de carácteres especiales","title":"Seleccione un carácter especial","toolbar":"Inserta un carácter especial"},"scayt":{"btn_about":"About SCAYT","btn_dictionaries":"Dictionaries","btn_disable":"Disable SCAYT","btn_enable":"Enable SCAYT","btn_langs":"Languages","btn_options":"Options","text_title":"Spell Check As You Type"},"stylescombo":{"label":"Estilos","panelTitle":"Estilos de formatos","panelTitle1":"Estilos de bloques","panelTitle2":"Estilos de líneas","panelTitle3":"Estilo de objetos"},"table":{"border":"Tamaño del borde","caption":"Subtítulo","cell":{"menu":"Celda","insertBefore":"Insertar una celda antes","insertAfter":"Insertar una celda despues","deleteCell":"Borrar celdas","merge":"Unir celdas","mergeRight":"Unir a la derecha","mergeDown":"Unir abajo","splitHorizontal":"Dividir celda horizontalmente","splitVertical":"Dividir celda verticalmente","title":"Propiedades de la celda","cellType":"Tipo de celda","rowSpan":"Extensión de las filas","colSpan":"Extensión de las columnas","wordWrap":"Ajuste de línea","hAlign":"Alineación horizontal","vAlign":"Alineación vertical","alignBaseline":"Base","bgColor":"Color de fondo","borderColor":"Color de borde","data":"Datos","header":"Encabezado","yes":"Si","no":"No","invalidWidth":"El ancho de la celda debe ser un número entero.","invalidHeight":"El alto de la celda debe ser un número entero.","invalidRowSpan":"El intervalo de filas debe ser un número entero.","invalidColSpan":"El intervalo de columnas debe ser un número entero.","chooseColor":"Escoger"},"cellPad":"relleno de celda","cellSpace":"Espacio de celda","column":{"menu":"Columna","insertBefore":"Insertar columna antes","insertAfter":"Insertar columna después","deleteColumn":"Borrar columnas"},"columns":"Columnas","deleteTable":"Borrar tabla","headers":"Encabezados","headersBoth":"Ambos","headersColumn":"Primera columna","headersNone":"Ninguna","headersRow":"Primera fila","heightUnit":"height unit","invalidBorder":"El tamaño del borde debe ser un número entero.","invalidCellPadding":"El relleno de la celda debe ser un número positivo.","invalidCellSpacing":"El espacio de la celda debe ser un número positivo.","invalidCols":"El número de columnas debe ser un número mayo que 0.","invalidHeight":"La altura de la tabla debe ser un número.","invalidRows":"El número de filas debe ser mayor a 0.","invalidWidth":"El ancho de la tabla debe ser un número.","menu":"Propiedades de la tabla","row":{"menu":"Fila","insertBefore":"Inserta una fila antes","insertAfter":"Inserta una fila después","deleteRow":"Borrar filas"},"rows":"Filas","summary":"Resumen","title":"Propiedades de la tabla","toolbar":"Tabla","widthPc":"porcentaje","widthPx":"pixeles","widthUnit":"Unidad de ancho"},"undo":{"redo":"Rehacer","undo":"Deshacer"},"widget":{"move":"Presiona y arrastra para mover","label":"%1 widget"},"uploadwidget":{"abort":"La carga ha sido abortada por el usuario.","doneOne":"El archivo ha sido cargado completamente.","doneMany":"%1 archivos cargados completamente.","uploadOne":"Cargando archivo ({percentage}%)...","uploadMany":"Cargando archivos, {current} de {max} listo ({percentage}%)..."},"wsc":{"btnIgnore":"Ignore","btnIgnoreAll":"Ignore All","btnReplace":"Replace","btnReplaceAll":"Replace All","btnUndo":"Undo","changeTo":"Change to","errorLoading":"Error loading application service host: %s.","ieSpellDownload":"Spell checker not installed. Do you want to download it now?","manyChanges":"Spell check complete: %1 words changed","noChanges":"Spell check complete: No words changed","noMispell":"Spell check complete: No misspellings found","noSuggestions":"- No suggestions -","notAvailable":"Sorry, but service is unavailable now.","notInDic":"Not in dictionary","oneChange":"Spell check complete: One word changed","progress":"Spell check in progress...","title":"Spell Checker","toolbar":"Check Spelling"},"imagebase":{"captionPlaceholder":"Enter image caption"},"easyimage":{"commands":{"fullImage":"Full Size Image","sideImage":"Side Image","altText":"Change image alternative text","upload":"Upload Image"},"uploadFailed":"Your image could not be uploaded due to a network error."}};
|
|
services.ts
|
// Node imports
import { Request, Response } from "express";
// Own imports
import Client from "../database";
export default {
products: async (req: Request, res: Response): Promise<Response> => {
try {
const conn = await Client.connect();
const sql = ` SELECT *
FROM products
WHERE category=($1)
`;
const result = await conn.query(sql, [req.query["category"]]);
conn.release();
|
return res.status(200).json(result.rows);
} catch (error) {
return res.status(501).send(error);
}
},
topProducts: async (req: Request, res: Response): Promise<Response> => {
try {
const conn = await Client.connect();
const sql = ` SELECT productid as product, p.name, SUM(quantity) as sales
FROM orderproducts
INNER JOIN products as p ON p.id = orderproducts.productid
GROUP BY productid, p.name
ORDER BY SUM(quantity) DESC
LIMIT ($1)
`;
const result = await conn.query(sql, [req.query["top"]]);
conn.release();
return res.status(200).json(result.rows);
} catch (error) {
return res.status(501).send(error);
}
},
};
| |
ReplicationLink.js
|
/* eslint-disable new-cap */
import {Record, List} from 'immutable'
const ReplicationLinkRecord = Record({
uuid: '',
displayName: '',
link: '',
slaves: List(),
messageBoxes: List(),
memo: '',
})
/**
* ReplicationLink
|
export default class ReplicationLink extends ReplicationLinkRecord {
/**
* @override
*/
constructor(...args) {
super(...args)
this.label = this.displayName || this.uuid
}
/**
* @param {object} rawReplicationLink
* @param {Map<UUID, MessageBox>} messageBoxes
* @return {ReplicationLink}
*/
static fromObject(rawReplicationLink, messageBoxes) {
let boxes = rawReplicationLink.messageBoxes || []
boxes.sort()
boxes = boxes.map((boxUuid) => messageBoxes.get(boxUuid))
return new ReplicationLink({
uuid: rawReplicationLink.uuid || '',
displayName: rawReplicationLink.displayName || '',
link: rawReplicationLink.link || '',
slaves: List(rawReplicationLink.slaves || []),
messageBoxes: List(boxes),
memo: rawReplicationLink.memo || '',
})
}
/**
* get Replication endpoint
* @return {str} url
*/
get url() {
return this.link
}
/**
* @param {string} value
* @return {ReplicationLink}
*/
updateDisplayName(value) {
return this.set('displayName', value)
}
/**
* @param {string} value
* @return {ReplicationLink}
*/
addMessageBox(value) {
if (this.messageBoxes.includes(value)) {
return this
}
const newMessageBox = this.messageBoxes.push(value)
return this.set('messageBoxes', newMessageBox)
}
/**
* @param {string} value
* @return {ReplicationLink}
*/
deleteMessageBox(value) {
const newMessageBox = this.messageBoxes.filter((messageBox) => messageBox !== value)
return this.set('messageBoxes', newMessageBox)
}
/**
* @param {array} valueList
* @return {ReplicationLink}
*/
updateSlaves(valueList) {
return this.set('slaves', new List(valueList))
}
/**
* @param {array} valueList
* @return {ReplicationLink}
*/
updateMessageBoxes(valueList) {
return this.set('messageBoxes', new List(valueList))
}
/**
* @return {ReplicationLink}
*/
clearMessageBoxes() {
return this.set('messageBoxes', new List())
}
/**
* @param {string} value
* @return {ReplicationLink}
*/
updateMemo(value) {
return this.set('memo', value)
}
/**
* @return {bool}
*/
isReadyToCreate() {
if (this.messageBoxes.size === 0) {
return false
}
if (this.displayName.length === 0) {
return false
}
return true
}
}
|
*/
|
bit.rs
|
//! Type-level bits.
//!
//! These are rather simple and are used as the building blocks of the
//! other number types in this crate.
//!
//!
//! **Type operators** implemented:
//!
//! - From `core::ops`: `BitAnd`, `BitOr`, `BitXor`, and `Not`.
//! - From `typenum`: `Same` and `Cmp`.
//!
use core::ops::{BitAnd, BitOr, BitXor, Not};
use {Cmp, Equal, Greater, Less, NonZero, PowerOfTwo};
pub use marker_traits::Bit;
/// The type-level bit 0.
#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Hash, Debug, Default)]
pub struct B0;
impl B0 {
/// Instantiates a singleton representing this bit.
#[inline]
pub fn new() -> B0 {
B0
}
}
/// The type-level bit 1.
#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy, Hash, Debug, Default)]
pub struct B1;
|
impl B1 {
/// Instantiates a singleton representing this bit.
#[inline]
pub fn new() -> B1 {
B1
}
}
impl Bit for B0 {
const U8: u8 = 0;
const BOOL: bool = false;
#[inline]
fn to_u8() -> u8 {
0
}
#[inline]
fn to_bool() -> bool {
false
}
}
impl Bit for B1 {
const U8: u8 = 1;
const BOOL: bool = true;
#[inline]
fn to_u8() -> u8 {
1
}
#[inline]
fn to_bool() -> bool {
true
}
}
impl NonZero for B1 {}
impl PowerOfTwo for B1 {}
/// Not of 0 (!0 = 1)
impl Not for B0 {
type Output = B1;
fn not(self) -> Self::Output {
B1
}
}
/// Not of 1 (!1 = 0)
impl Not for B1 {
type Output = B0;
fn not(self) -> Self::Output {
B0
}
}
/// And with 0 ( 0 & B = 0)
impl<Rhs: Bit> BitAnd<Rhs> for B0 {
type Output = B0;
fn bitand(self, _: Rhs) -> Self::Output {
B0
}
}
/// And with 1 ( 1 & 0 = 0)
impl BitAnd<B0> for B1 {
type Output = B0;
fn bitand(self, _: B0) -> Self::Output {
B0
}
}
/// And with 1 ( 1 & 1 = 1)
impl BitAnd<B1> for B1 {
type Output = B1;
fn bitand(self, _: B1) -> Self::Output {
B1
}
}
/// Or with 0 ( 0 | 0 = 0)
impl BitOr<B0> for B0 {
type Output = B0;
fn bitor(self, _: B0) -> Self::Output {
B0
}
}
/// Or with 0 ( 0 | 1 = 1)
impl BitOr<B1> for B0 {
type Output = B1;
fn bitor(self, _: B1) -> Self::Output {
B1
}
}
/// Or with 1 ( 1 | B = 1)
impl<Rhs: Bit> BitOr<Rhs> for B1 {
type Output = B1;
fn bitor(self, _: Rhs) -> Self::Output {
B1
}
}
/// Xor between 0 and 0 ( 0 ^ 0 = 0)
impl BitXor<B0> for B0 {
type Output = B0;
fn bitxor(self, _: B0) -> Self::Output {
B0
}
}
/// Xor between 1 and 0 ( 1 ^ 0 = 1)
impl BitXor<B0> for B1 {
type Output = B1;
fn bitxor(self, _: B0) -> Self::Output {
B1
}
}
/// Xor between 0 and 1 ( 0 ^ 1 = 1)
impl BitXor<B1> for B0 {
type Output = B1;
fn bitxor(self, _: B1) -> Self::Output {
B1
}
}
/// Xor between 1 and 1 ( 1 ^ 1 = 0)
impl BitXor<B1> for B1 {
type Output = B0;
fn bitxor(self, _: B1) -> Self::Output {
B0
}
}
#[cfg(tests)]
mod tests {
// macro for testing operation results. Uses `Same` to ensure the types are equal and
// not just the values they evaluate to.
macro_rules! test_bit_op {
($op:ident $Lhs:ident = $Answer:ident) => (
{
type Test = <<$Lhs as $op>::Output as ::Same<$Answer>>::Output;
assert_eq!(<$Answer as Bit>::to_u8(), <Test as Bit>::to_u8());
}
);
($Lhs:ident $op:ident $Rhs:ident = $Answer:ident) => (
{
type Test = <<$Lhs as $op<$Rhs>>::Output as ::Same<$Answer>>::Output;
assert_eq!(<$Answer as Bit>::to_u8(), <Test as Bit>::to_u8());
}
);
}
#[test]
fn bit_operations() {
test_bit_op!(Not B0 = B1);
test_bit_op!(Not B1 = B0);
test_bit_op!(B0 BitAnd B0 = B0);
test_bit_op!(B0 BitAnd B1 = B0);
test_bit_op!(B1 BitAnd B0 = B0);
test_bit_op!(B1 BitAnd B1 = B1);
test_bit_op!(B0 BitOr B0 = B0);
test_bit_op!(B0 BitOr B1 = B1);
test_bit_op!(B1 BitOr B0 = B1);
test_bit_op!(B1 BitOr B1 = B1);
test_bit_op!(B0 BitXor B0 = B0);
test_bit_op!(B0 BitXor B1 = B1);
test_bit_op!(B1 BitXor B0 = B1);
test_bit_op!(B1 BitXor B1 = B0);
}
}
impl Cmp<B0> for B0 {
type Output = Equal;
}
impl Cmp<B1> for B0 {
type Output = Less;
}
impl Cmp<B0> for B1 {
type Output = Greater;
}
impl Cmp<B1> for B1 {
type Output = Equal;
}
use Min;
impl Min<B0> for B0 {
type Output = B0;
fn min(self, _: B0) -> B0 {
self
}
}
impl Min<B1> for B0 {
type Output = B0;
fn min(self, _: B1) -> B0 {
self
}
}
impl Min<B0> for B1 {
type Output = B0;
fn min(self, rhs: B0) -> B0 {
rhs
}
}
impl Min<B1> for B1 {
type Output = B1;
fn min(self, _: B1) -> B1 {
self
}
}
use Max;
impl Max<B0> for B0 {
type Output = B0;
fn max(self, _: B0) -> B0 {
self
}
}
impl Max<B1> for B0 {
type Output = B1;
fn max(self, rhs: B1) -> B1 {
rhs
}
}
impl Max<B0> for B1 {
type Output = B1;
fn max(self, _: B0) -> B1 {
self
}
}
impl Max<B1> for B1 {
type Output = B1;
fn max(self, _: B1) -> B1 {
self
}
}
| |
file_format_common.rs
|
// Copyright (c) The Libra Core Contributors
// SPDX-License-Identifier: Apache-2.0
//! Constants for the binary format.
//!
//! Definition for the constants of the binary format, used by the serializer and the deserializer.
//! This module also offers helpers for the serialization and deserialization of certain
//! integer indexes.
//!
//! We use LEB128 for integer compression. LEB128 is a representation from the DWARF3 spec,
//! http://dwarfstd.org/Dwarf3Std.php or https://en.wikipedia.org/wiki/LEB128.
//! It's used to compress mostly indexes into the main binary tables.
use byteorder::ReadBytesExt;
use failure::*;
use std::{io::Cursor, mem::size_of};
/// Constant values for the binary format header.
///
/// The binary header is magic + version info + table count.
pub enum BinaryConstants {}
impl BinaryConstants {
/// The blob that must start a binary.
pub const LIBRA_MAGIC_SIZE: usize = 8;
pub const LIBRA_MAGIC: [u8; BinaryConstants::LIBRA_MAGIC_SIZE] =
[b'L', b'I', b'B', b'R', b'A', b'V', b'M', b'\n'];
/// The `LIBRA_MAGIC` size, 1 byte for major version, 1 byte for minor version and 1 byte
/// for table count.
pub const HEADER_SIZE: usize = BinaryConstants::LIBRA_MAGIC_SIZE + 3;
/// A (Table Type, Start Offset, Byte Count) size, which is 1 byte for the type and
/// 4 bytes for the offset/count.
pub const TABLE_HEADER_SIZE: u32 = size_of::<u32>() as u32 * 2 + 1;
}
/// Constants for table types in the binary.
///
/// The binary contains a subset of those tables. A table specification is a tuple (table type,
/// start offset, byte count) for a given table.
#[rustfmt::skip]
#[allow(non_camel_case_types)]
#[repr(u8)]
#[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)]
pub enum TableType {
MODULE_HANDLES = 0x1,
STRUCT_HANDLES = 0x2,
FUNCTION_HANDLES = 0x3,
ADDRESS_POOL = 0x4,
STRING_POOL = 0x5,
BYTE_ARRAY_POOL = 0x6,
MAIN = 0x7,
STRUCT_DEFS = 0x8,
FIELD_DEFS = 0x9,
FUNCTION_DEFS = 0xA,
TYPE_SIGNATURES = 0xB,
FUNCTION_SIGNATURES = 0xC,
LOCALS_SIGNATURES = 0xD,
}
/// Constants for signature kinds (type, function, locals). Those values start a signature blob.
#[rustfmt::skip]
#[allow(non_camel_case_types)]
#[repr(u8)]
#[derive(Clone, Copy, Debug)]
pub enum SignatureType {
TYPE_SIGNATURE = 0x1,
FUNCTION_SIGNATURE = 0x2,
LOCAL_SIGNATURE = 0x3,
}
/// Constants for signature blob values.
#[rustfmt::skip]
#[allow(non_camel_case_types)]
#[repr(u8)]
#[derive(Clone, Copy, Debug)]
pub enum
|
{
BOOL = 0x1,
INTEGER = 0x2,
STRING = 0x3,
ADDRESS = 0x4,
REFERENCE = 0x5,
MUTABLE_REFERENCE = 0x6,
STRUCT = 0x7,
BYTEARRAY = 0x8,
TYPE_PARAMETER = 0x9,
}
#[rustfmt::skip]
#[allow(non_camel_case_types)]
#[repr(u8)]
#[derive(Clone, Copy, Debug)]
pub enum SerializedKind {
RESOURCE = 0x1,
COPYABLE = 0x2,
}
/// List of opcodes constants.
#[rustfmt::skip]
#[allow(non_camel_case_types)]
#[repr(u8)]
#[derive(Clone, Copy, Debug)]
pub enum Opcodes {
POP = 0x01,
RET = 0x02,
BR_TRUE = 0x03,
BR_FALSE = 0x04,
BRANCH = 0x05,
LD_CONST = 0x06,
LD_ADDR = 0x07,
LD_STR = 0x08,
LD_TRUE = 0x09,
LD_FALSE = 0x0A,
COPY_LOC = 0x0B,
MOVE_LOC = 0x0C,
ST_LOC = 0x0D,
LD_REF_LOC = 0x0E,
LD_REF_FIELD = 0x0F,
LD_BYTEARRAY = 0x10,
CALL = 0x11,
PACK = 0x12,
UNPACK = 0x13,
READ_REF = 0x14,
WRITE_REF = 0x15,
ADD = 0x16,
SUB = 0x17,
MUL = 0x18,
MOD = 0x19,
DIV = 0x1A,
BIT_OR = 0x1B,
BIT_AND = 0x1C,
XOR = 0x1D,
OR = 0x1E,
AND = 0x1F,
NOT = 0x20,
EQ = 0x21,
NEQ = 0x22,
LT = 0x23,
GT = 0x24,
LE = 0x25,
GE = 0x26,
ABORT = 0x27,
GET_TXN_GAS_UNIT_PRICE = 0x28,
GET_TXN_MAX_GAS_UNITS = 0x29,
GET_GAS_REMAINING = 0x2A,
GET_TXN_SENDER = 0x2B,
EXISTS = 0x2C,
BORROW_REF = 0x2D,
RELEASE_REF = 0x2E,
MOVE_FROM = 0x2F,
MOVE_TO = 0x30,
CREATE_ACCOUNT = 0x31,
EMIT_EVENT = 0x32,
GET_TXN_SEQUENCE_NUMBER = 0x33,
GET_TXN_PUBLIC_KEY = 0x34,
FREEZE_REF = 0x35,
}
/// Take a `Vec<u8>` and a value to write to that vector and applies LEB128 logic to
/// compress the u16.
pub fn write_u16_as_uleb128(binary: &mut Vec<u8>, value: u16) {
write_u32_as_uleb128(binary, u32::from(value));
}
/// Take a `Vec<u8>` and a value to write to that vector and applies LEB128 logic to
/// compress the u32.
pub fn write_u32_as_uleb128(binary: &mut Vec<u8>, value: u32) {
let mut val = value;
loop {
let v: u8 = (val & 0x7f) as u8;
if u32::from(v) != val {
binary.push(v | 0x80);
val >>= 7;
} else {
binary.push(v);
break;
}
}
}
/// Write a `u16` in Little Endian format.
pub fn write_u16(binary: &mut Vec<u8>, value: u16) {
binary.extend(&value.to_le_bytes());
}
/// Write a `u32` in Little Endian format.
pub fn write_u32(binary: &mut Vec<u8>, value: u32) {
binary.extend(&value.to_le_bytes());
}
/// Write a `u64` in Little Endian format.
pub fn write_u64(binary: &mut Vec<u8>, value: u64) {
binary.extend(&value.to_le_bytes());
}
/// Reads a `u16` in ULEB128 format from a `binary`.
///
/// Takes a `&mut Cursor<&[u8]>` and returns a pair:
///
/// u16 - value read
///
/// Return an error on an invalid representation.
pub fn read_uleb128_as_u16(cursor: &mut Cursor<&[u8]>) -> Result<u16> {
let mut value: u16 = 0;
let mut shift: u8 = 0;
while let Ok(byte) = cursor.read_u8() {
let val = byte & 0x7f;
value |= u16::from(val) << shift;
if val == byte {
return Ok(value);
}
shift += 7;
if shift > 14 {
break;
}
}
bail!("invalid ULEB128 representation for u16")
}
/// Reads a `u32` in ULEB128 format from a `binary`.
///
/// Takes a `&mut Cursor<&[u8]>` and returns a pair:
///
/// u32 - value read
///
/// Return an error on an invalid representation.
pub fn read_uleb128_as_u32(cursor: &mut Cursor<&[u8]>) -> Result<u32> {
let mut value: u32 = 0;
let mut shift: u8 = 0;
while let Ok(byte) = cursor.read_u8() {
let val = byte & 0x7f;
value |= u32::from(val) << shift;
if val == byte {
return Ok(value);
}
shift += 7;
if shift > 28 {
break;
}
}
bail!("invalid ULEB128 representation for u32")
}
|
SerializedType
|
sugg.rs
|
//! Contains utility functions to generate suggestions.
#![deny(clippy::missing_docs_in_private_items)]
use crate::source::{snippet, snippet_opt, snippet_with_applicability, snippet_with_macro_callsite};
use crate::{get_parent_expr_for_hir, higher};
use rustc_ast::util::parser::AssocOp;
use rustc_ast::{ast, token};
use rustc_ast_pretty::pprust::token_kind_to_string;
use rustc_errors::Applicability;
use rustc_hir as hir;
use rustc_hir::{ExprKind, HirId, MutTy, TyKind};
use rustc_infer::infer::TyCtxtInferExt;
use rustc_lint::{EarlyContext, LateContext, LintContext};
use rustc_middle::hir::place::ProjectionKind;
use rustc_middle::mir::{FakeReadCause, Mutability};
use rustc_middle::ty;
use rustc_span::source_map::{BytePos, CharPos, Pos, Span, SyntaxContext};
use rustc_typeck::expr_use_visitor::{Delegate, ExprUseVisitor, PlaceBase, PlaceWithHirId};
use std::borrow::Cow;
use std::convert::TryInto;
use std::fmt::Display;
use std::iter;
use std::ops::{Add, Neg, Not, Sub};
/// A helper type to build suggestion correctly handling parentheses.
#[derive(Clone, PartialEq)]
pub enum Sugg<'a> {
/// An expression that never needs parentheses such as `1337` or `[0; 42]`.
NonParen(Cow<'a, str>),
/// An expression that does not fit in other variants.
MaybeParen(Cow<'a, str>),
/// A binary operator expression, including `as`-casts and explicit type
/// coercion.
BinOp(AssocOp, Cow<'a, str>, Cow<'a, str>),
}
/// Literal constant `0`, for convenience.
pub const ZERO: Sugg<'static> = Sugg::NonParen(Cow::Borrowed("0"));
/// Literal constant `1`, for convenience.
pub const ONE: Sugg<'static> = Sugg::NonParen(Cow::Borrowed("1"));
/// a constant represents an empty string, for convenience.
pub const EMPTY: Sugg<'static> = Sugg::NonParen(Cow::Borrowed(""));
impl Display for Sugg<'_> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
match *self {
Sugg::NonParen(ref s) | Sugg::MaybeParen(ref s) => s.fmt(f),
Sugg::BinOp(op, ref lhs, ref rhs) => binop_to_string(op, lhs, rhs).fmt(f),
}
}
}
#[allow(clippy::wrong_self_convention)] // ok, because of the function `as_ty` method
impl<'a> Sugg<'a> {
/// Prepare a suggestion from an expression.
pub fn hir_opt(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> Option<Self> {
let get_snippet = |span| snippet(cx, span, "");
snippet_opt(cx, expr.span).map(|_| Self::hir_from_snippet(expr, get_snippet))
}
/// Convenience function around `hir_opt` for suggestions with a default
/// text.
pub fn hir(cx: &LateContext<'_>, expr: &hir::Expr<'_>, default: &'a str) -> Self {
Self::hir_opt(cx, expr).unwrap_or(Sugg::NonParen(Cow::Borrowed(default)))
}
/// Same as `hir`, but it adapts the applicability level by following rules:
///
/// - Applicability level `Unspecified` will never be changed.
/// - If the span is inside a macro, change the applicability level to `MaybeIncorrect`.
/// - If the default value is used and the applicability level is `MachineApplicable`, change it
/// to
/// `HasPlaceholders`
pub fn hir_with_applicability(
cx: &LateContext<'_>,
expr: &hir::Expr<'_>,
default: &'a str,
applicability: &mut Applicability,
) -> Self {
if *applicability != Applicability::Unspecified && expr.span.from_expansion() {
*applicability = Applicability::MaybeIncorrect;
}
Self::hir_opt(cx, expr).unwrap_or_else(|| {
if *applicability == Applicability::MachineApplicable {
*applicability = Applicability::HasPlaceholders;
}
Sugg::NonParen(Cow::Borrowed(default))
})
}
/// Same as `hir`, but will use the pre expansion span if the `expr` was in a macro.
pub fn hir_with_macro_callsite(cx: &LateContext<'_>, expr: &hir::Expr<'_>, default: &'a str) -> Self {
let get_snippet = |span| snippet_with_macro_callsite(cx, span, default);
Self::hir_from_snippet(expr, get_snippet)
}
/// Same as `hir`, but first walks the span up to the given context. This will result in the
/// macro call, rather then the expansion, if the span is from a child context. If the span is
/// not from a child context, it will be used directly instead.
///
/// e.g. Given the expression `&vec![]`, getting a snippet from the span for `vec![]` as a HIR
/// node would result in `box []`. If given the context of the address of expression, this
/// function will correctly get a snippet of `vec![]`.
pub fn hir_with_context(
cx: &LateContext<'_>,
expr: &hir::Expr<'_>,
ctxt: SyntaxContext,
default: &'a str,
applicability: &mut Applicability,
) -> Self {
if expr.span.ctxt() == ctxt {
Self::hir_from_snippet(expr, |span| snippet(cx, span, default))
} else {
let snip = snippet_with_applicability(cx, expr.span, default, applicability);
Sugg::NonParen(snip)
}
}
/// Generate a suggestion for an expression with the given snippet. This is used by the `hir_*`
/// function variants of `Sugg`, since these use different snippet functions.
fn hir_from_snippet(expr: &hir::Expr<'_>, get_snippet: impl Fn(Span) -> Cow<'a, str>) -> Self {
if let Some(range) = higher::Range::hir(expr) {
let op = match range.limits {
ast::RangeLimits::HalfOpen => AssocOp::DotDot,
ast::RangeLimits::Closed => AssocOp::DotDotEq,
};
let start = range.start.map_or("".into(), |expr| get_snippet(expr.span));
let end = range.end.map_or("".into(), |expr| get_snippet(expr.span));
return Sugg::BinOp(op, start, end);
}
match expr.kind {
hir::ExprKind::AddrOf(..)
| hir::ExprKind::Box(..)
| hir::ExprKind::If(..)
| hir::ExprKind::Let(..)
| hir::ExprKind::Closure(..)
| hir::ExprKind::Unary(..)
| hir::ExprKind::Match(..) => Sugg::MaybeParen(get_snippet(expr.span)),
hir::ExprKind::Continue(..)
| hir::ExprKind::Yield(..)
| hir::ExprKind::Array(..)
| hir::ExprKind::Block(..)
| hir::ExprKind::Break(..)
| hir::ExprKind::Call(..)
| hir::ExprKind::Field(..)
| hir::ExprKind::Index(..)
| hir::ExprKind::InlineAsm(..)
| hir::ExprKind::ConstBlock(..)
| hir::ExprKind::Lit(..)
| hir::ExprKind::Loop(..)
| hir::ExprKind::MethodCall(..)
| hir::ExprKind::Path(..)
| hir::ExprKind::Repeat(..)
| hir::ExprKind::Ret(..)
| hir::ExprKind::Struct(..)
| hir::ExprKind::Tup(..)
| hir::ExprKind::DropTemps(_)
| hir::ExprKind::Err => Sugg::NonParen(get_snippet(expr.span)),
hir::ExprKind::Assign(lhs, rhs, _) => {
Sugg::BinOp(AssocOp::Assign, get_snippet(lhs.span), get_snippet(rhs.span))
},
hir::ExprKind::AssignOp(op, lhs, rhs) => {
Sugg::BinOp(hirbinop2assignop(op), get_snippet(lhs.span), get_snippet(rhs.span))
},
hir::ExprKind::Binary(op, lhs, rhs) => Sugg::BinOp(
AssocOp::from_ast_binop(op.node.into()),
get_snippet(lhs.span),
get_snippet(rhs.span),
),
hir::ExprKind::Cast(lhs, ty) => Sugg::BinOp(AssocOp::As, get_snippet(lhs.span), get_snippet(ty.span)),
hir::ExprKind::Type(lhs, ty) => Sugg::BinOp(AssocOp::Colon, get_snippet(lhs.span), get_snippet(ty.span)),
}
}
/// Prepare a suggestion from an expression.
pub fn ast(cx: &EarlyContext<'_>, expr: &ast::Expr, default: &'a str) -> Self {
use rustc_ast::ast::RangeLimits;
let get_whole_snippet = || {
if expr.span.from_expansion() {
snippet_with_macro_callsite(cx, expr.span, default)
} else {
snippet(cx, expr.span, default)
}
};
match expr.kind {
ast::ExprKind::AddrOf(..)
| ast::ExprKind::Box(..)
| ast::ExprKind::Closure(..)
| ast::ExprKind::If(..)
| ast::ExprKind::Let(..)
| ast::ExprKind::Unary(..)
| ast::ExprKind::Match(..) => Sugg::MaybeParen(get_whole_snippet()),
ast::ExprKind::Async(..)
| ast::ExprKind::Block(..)
| ast::ExprKind::Break(..)
| ast::ExprKind::Call(..)
| ast::ExprKind::Continue(..)
| ast::ExprKind::Yield(..)
| ast::ExprKind::Field(..)
| ast::ExprKind::ForLoop(..)
| ast::ExprKind::Index(..)
| ast::ExprKind::InlineAsm(..)
| ast::ExprKind::ConstBlock(..)
| ast::ExprKind::Lit(..)
| ast::ExprKind::Loop(..)
| ast::ExprKind::MacCall(..)
| ast::ExprKind::MethodCall(..)
| ast::ExprKind::Paren(..)
| ast::ExprKind::Underscore
| ast::ExprKind::Path(..)
| ast::ExprKind::Repeat(..)
| ast::ExprKind::Ret(..)
| ast::ExprKind::Yeet(..)
| ast::ExprKind::Struct(..)
| ast::ExprKind::Try(..)
| ast::ExprKind::TryBlock(..)
| ast::ExprKind::Tup(..)
| ast::ExprKind::Array(..)
| ast::ExprKind::While(..)
| ast::ExprKind::Await(..)
| ast::ExprKind::Err => Sugg::NonParen(get_whole_snippet()),
ast::ExprKind::Range(ref lhs, ref rhs, RangeLimits::HalfOpen) => Sugg::BinOp(
AssocOp::DotDot,
lhs.as_ref().map_or("".into(), |lhs| snippet(cx, lhs.span, default)),
rhs.as_ref().map_or("".into(), |rhs| snippet(cx, rhs.span, default)),
),
ast::ExprKind::Range(ref lhs, ref rhs, RangeLimits::Closed) => Sugg::BinOp(
AssocOp::DotDotEq,
lhs.as_ref().map_or("".into(), |lhs| snippet(cx, lhs.span, default)),
rhs.as_ref().map_or("".into(), |rhs| snippet(cx, rhs.span, default)),
),
ast::ExprKind::Assign(ref lhs, ref rhs, _) => Sugg::BinOp(
AssocOp::Assign,
snippet(cx, lhs.span, default),
snippet(cx, rhs.span, default),
),
ast::ExprKind::AssignOp(op, ref lhs, ref rhs) => Sugg::BinOp(
astbinop2assignop(op),
snippet(cx, lhs.span, default),
snippet(cx, rhs.span, default),
),
ast::ExprKind::Binary(op, ref lhs, ref rhs) => Sugg::BinOp(
AssocOp::from_ast_binop(op.node),
snippet(cx, lhs.span, default),
snippet(cx, rhs.span, default),
),
ast::ExprKind::Cast(ref lhs, ref ty) => Sugg::BinOp(
AssocOp::As,
snippet(cx, lhs.span, default),
snippet(cx, ty.span, default),
),
ast::ExprKind::Type(ref lhs, ref ty) => Sugg::BinOp(
AssocOp::Colon,
snippet(cx, lhs.span, default),
snippet(cx, ty.span, default),
),
}
}
/// Convenience method to create the `<lhs> && <rhs>` suggestion.
pub fn and(self, rhs: &Self) -> Sugg<'static> {
make_binop(ast::BinOpKind::And, &self, rhs)
}
/// Convenience method to create the `<lhs> & <rhs>` suggestion.
pub fn bit_and(self, rhs: &Self) -> Sugg<'static> {
make_binop(ast::BinOpKind::BitAnd, &self, rhs)
}
/// Convenience method to create the `<lhs> as <rhs>` suggestion.
pub fn as_ty<R: Display>(self, rhs: R) -> Sugg<'static> {
make_assoc(AssocOp::As, &self, &Sugg::NonParen(rhs.to_string().into()))
}
/// Convenience method to create the `&<expr>` suggestion.
pub fn addr(self) -> Sugg<'static> {
make_unop("&", self)
}
/// Convenience method to create the `&mut <expr>` suggestion.
pub fn mut_addr(self) -> Sugg<'static> {
make_unop("&mut ", self)
}
/// Convenience method to create the `*<expr>` suggestion.
pub fn deref(self) -> Sugg<'static> {
make_unop("*", self)
}
/// Convenience method to create the `&*<expr>` suggestion. Currently this
/// is needed because `sugg.deref().addr()` produces an unnecessary set of
/// parentheses around the deref.
pub fn addr_deref(self) -> Sugg<'static> {
make_unop("&*", self)
}
/// Convenience method to create the `&mut *<expr>` suggestion. Currently
/// this is needed because `sugg.deref().mut_addr()` produces an unnecessary
/// set of parentheses around the deref.
pub fn mut_addr_deref(self) -> Sugg<'static> {
make_unop("&mut *", self)
}
/// Convenience method to transform suggestion into a return call
pub fn make_return(self) -> Sugg<'static> {
Sugg::NonParen(Cow::Owned(format!("return {}", self)))
}
/// Convenience method to transform suggestion into a block
/// where the suggestion is a trailing expression
pub fn blockify(self) -> Sugg<'static> {
Sugg::NonParen(Cow::Owned(format!("{{ {} }}", self)))
}
/// Convenience method to create the `<lhs>..<rhs>` or `<lhs>...<rhs>`
/// suggestion.
#[allow(dead_code)]
pub fn range(self, end: &Self, limit: ast::RangeLimits) -> Sugg<'static> {
match limit {
ast::RangeLimits::HalfOpen => make_assoc(AssocOp::DotDot, &self, end),
ast::RangeLimits::Closed => make_assoc(AssocOp::DotDotEq, &self, end),
}
}
/// Adds parentheses to any expression that might need them. Suitable to the
/// `self` argument of a method call
/// (e.g., to build `bar.foo()` or `(1 + 2).foo()`).
#[must_use]
pub fn maybe_par(self) -> Self {
match self {
Sugg::NonParen(..) => self,
// `(x)` and `(x).y()` both don't need additional parens.
Sugg::MaybeParen(sugg) => {
if has_enclosing_paren(&sugg) {
Sugg::MaybeParen(sugg)
} else {
Sugg::NonParen(format!("({})", sugg).into())
}
},
Sugg::BinOp(op, lhs, rhs) => {
let sugg = binop_to_string(op, &lhs, &rhs);
Sugg::NonParen(format!("({})", sugg).into())
},
}
}
}
/// Generates a string from the operator and both sides.
fn binop_to_string(op: AssocOp, lhs: &str, rhs: &str) -> String {
match op {
AssocOp::Add
| AssocOp::Subtract
| AssocOp::Multiply
| AssocOp::Divide
| AssocOp::Modulus
| AssocOp::LAnd
| AssocOp::LOr
| AssocOp::BitXor
| AssocOp::BitAnd
| AssocOp::BitOr
| AssocOp::ShiftLeft
| AssocOp::ShiftRight
| AssocOp::Equal
| AssocOp::Less
| AssocOp::LessEqual
| AssocOp::NotEqual
| AssocOp::Greater
| AssocOp::GreaterEqual => format!(
"{} {} {}",
lhs,
op.to_ast_binop().expect("Those are AST ops").to_string(),
rhs
),
AssocOp::Assign => format!("{} = {}", lhs, rhs),
AssocOp::AssignOp(op) => {
format!("{} {}= {}", lhs, token_kind_to_string(&token::BinOp(op)), rhs)
},
AssocOp::As => format!("{} as {}", lhs, rhs),
AssocOp::DotDot => format!("{}..{}", lhs, rhs),
AssocOp::DotDotEq => format!("{}..={}", lhs, rhs),
AssocOp::Colon => format!("{}: {}", lhs, rhs),
}
}
/// Return `true` if `sugg` is enclosed in parenthesis.
pub fn has_enclosing_paren(sugg: impl AsRef<str>) -> bool {
let mut chars = sugg.as_ref().chars();
if chars.next() == Some('(') {
let mut depth = 1;
for c in &mut chars {
if c == '(' {
depth += 1;
} else if c == ')' {
depth -= 1;
}
if depth == 0 {
break;
}
}
chars.next().is_none()
} else {
false
}
}
/// Copied from the rust standard library, and then edited
macro_rules! forward_binop_impls_to_ref {
(impl $imp:ident, $method:ident for $t:ty, type Output = $o:ty) => {
impl $imp<$t> for &$t {
type Output = $o;
fn $method(self, other: $t) -> $o {
$imp::$method(self, &other)
}
}
impl $imp<&$t> for $t {
type Output = $o;
fn $method(self, other: &$t) -> $o {
$imp::$method(&self, other)
}
}
impl $imp for $t {
type Output = $o;
fn $method(self, other: $t) -> $o {
$imp::$method(&self, &other)
}
}
};
}
impl Add for &Sugg<'_> {
type Output = Sugg<'static>;
fn add(self, rhs: &Sugg<'_>) -> Sugg<'static> {
make_binop(ast::BinOpKind::Add, self, rhs)
}
}
impl Sub for &Sugg<'_> {
type Output = Sugg<'static>;
fn sub(self, rhs: &Sugg<'_>) -> Sugg<'static> {
make_binop(ast::BinOpKind::Sub, self, rhs)
}
}
forward_binop_impls_to_ref!(impl Add, add for Sugg<'_>, type Output = Sugg<'static>);
forward_binop_impls_to_ref!(impl Sub, sub for Sugg<'_>, type Output = Sugg<'static>);
impl Neg for Sugg<'_> {
type Output = Sugg<'static>;
fn neg(self) -> Sugg<'static> {
make_unop("-", self)
}
}
impl<'a> Not for Sugg<'a> {
type Output = Sugg<'a>;
fn not(self) -> Sugg<'a> {
use AssocOp::{Equal, Greater, GreaterEqual, Less, LessEqual, NotEqual};
if let Sugg::BinOp(op, lhs, rhs) = self {
let to_op = match op {
Equal => NotEqual,
NotEqual => Equal,
Less => GreaterEqual,
GreaterEqual => Less,
Greater => LessEqual,
LessEqual => Greater,
_ => return make_unop("!", Sugg::BinOp(op, lhs, rhs)),
};
Sugg::BinOp(to_op, lhs, rhs)
} else {
make_unop("!", self)
}
}
}
/// Helper type to display either `foo` or `(foo)`.
struct ParenHelper<T> {
/// `true` if parentheses are needed.
paren: bool,
/// The main thing to display.
wrapped: T,
}
impl<T> ParenHelper<T> {
/// Builds a `ParenHelper`.
fn new(paren: bool, wrapped: T) -> Self {
Self { paren, wrapped }
}
}
impl<T: Display> Display for ParenHelper<T> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> Result<(), std::fmt::Error> {
if self.paren {
write!(f, "({})", self.wrapped)
} else {
self.wrapped.fmt(f)
}
}
}
/// Builds the string for `<op><expr>` adding parenthesis when necessary.
///
/// For convenience, the operator is taken as a string because all unary
/// operators have the same
/// precedence.
pub fn make_unop(op: &str, expr: Sugg<'_>) -> Sugg<'static> {
Sugg::MaybeParen(format!("{}{}", op, expr.maybe_par()).into())
}
/// Builds the string for `<lhs> <op> <rhs>` adding parenthesis when necessary.
///
/// Precedence of shift operator relative to other arithmetic operation is
/// often confusing so
/// parenthesis will always be added for a mix of these.
pub fn make_assoc(op: AssocOp, lhs: &Sugg<'_>, rhs: &Sugg<'_>) -> Sugg<'static> {
/// Returns `true` if the operator is a shift operator `<<` or `>>`.
fn is_shift(op: AssocOp) -> bool {
matches!(op, AssocOp::ShiftLeft | AssocOp::ShiftRight)
}
/// Returns `true` if the operator is an arithmetic operator
/// (i.e., `+`, `-`, `*`, `/`, `%`).
fn is_arith(op: AssocOp) -> bool {
matches!(
op,
AssocOp::Add | AssocOp::Subtract | AssocOp::Multiply | AssocOp::Divide | AssocOp::Modulus
)
}
/// Returns `true` if the operator `op` needs parenthesis with the operator
/// `other` in the direction `dir`.
fn needs_paren(op: AssocOp, other: AssocOp, dir: Associativity) -> bool {
other.precedence() < op.precedence()
|| (other.precedence() == op.precedence()
&& ((op != other && associativity(op) != dir)
|| (op == other && associativity(op) != Associativity::Both)))
|| is_shift(op) && is_arith(other)
|| is_shift(other) && is_arith(op)
}
let lhs_paren = if let Sugg::BinOp(lop, _, _) = *lhs {
needs_paren(op, lop, Associativity::Left)
} else {
false
};
let rhs_paren = if let Sugg::BinOp(rop, _, _) = *rhs {
needs_paren(op, rop, Associativity::Right)
} else {
false
};
let lhs = ParenHelper::new(lhs_paren, lhs).to_string();
let rhs = ParenHelper::new(rhs_paren, rhs).to_string();
Sugg::BinOp(op, lhs.into(), rhs.into())
}
/// Convenience wrapper around `make_assoc` and `AssocOp::from_ast_binop`.
pub fn make_binop(op: ast::BinOpKind, lhs: &Sugg<'_>, rhs: &Sugg<'_>) -> Sugg<'static> {
make_assoc(AssocOp::from_ast_binop(op), lhs, rhs)
}
#[derive(PartialEq, Eq, Clone, Copy)]
/// Operator associativity.
enum Associativity {
/// The operator is both left-associative and right-associative.
Both,
/// The operator is left-associative.
Left,
/// The operator is not associative.
None,
/// The operator is right-associative.
Right,
}
/// Returns the associativity/fixity of an operator. The difference with
/// `AssocOp::fixity` is that an operator can be both left and right associative
/// (such as `+`: `a + b + c == (a + b) + c == a + (b + c)`.
///
/// Chained `as` and explicit `:` type coercion never need inner parenthesis so
/// they are considered
/// associative.
#[must_use]
fn associativity(op: AssocOp) -> Associativity {
use rustc_ast::util::parser::AssocOp::{
Add, As, Assign, AssignOp, BitAnd, BitOr, BitXor, Colon, Divide, DotDot, DotDotEq, Equal, Greater,
GreaterEqual, LAnd, LOr, Less, LessEqual, Modulus, Multiply, NotEqual, ShiftLeft, ShiftRight, Subtract,
};
match op {
Assign | AssignOp(_) => Associativity::Right,
Add | BitAnd | BitOr | BitXor | LAnd | LOr | Multiply | As | Colon => Associativity::Both,
Divide | Equal | Greater | GreaterEqual | Less | LessEqual | Modulus | NotEqual | ShiftLeft | ShiftRight
| Subtract => Associativity::Left,
DotDot | DotDotEq => Associativity::None,
}
}
/// Converts a `hir::BinOp` to the corresponding assigning binary operator.
fn hirbinop2assignop(op: hir::BinOp) -> AssocOp {
use rustc_ast::token::BinOpToken::{And, Caret, Minus, Or, Percent, Plus, Shl, Shr, Slash, Star};
AssocOp::AssignOp(match op.node {
hir::BinOpKind::Add => Plus,
hir::BinOpKind::BitAnd => And,
hir::BinOpKind::BitOr => Or,
hir::BinOpKind::BitXor => Caret,
hir::BinOpKind::Div => Slash,
hir::BinOpKind::Mul => Star,
hir::BinOpKind::Rem => Percent,
hir::BinOpKind::Shl => Shl,
hir::BinOpKind::Shr => Shr,
hir::BinOpKind::Sub => Minus,
hir::BinOpKind::And
| hir::BinOpKind::Eq
| hir::BinOpKind::Ge
| hir::BinOpKind::Gt
| hir::BinOpKind::Le
| hir::BinOpKind::Lt
| hir::BinOpKind::Ne
| hir::BinOpKind::Or => panic!("This operator does not exist"),
})
}
/// Converts an `ast::BinOp` to the corresponding assigning binary operator.
fn astbinop2assignop(op: ast::BinOp) -> AssocOp {
use rustc_ast::ast::BinOpKind::{
Add, And, BitAnd, BitOr, BitXor, Div, Eq, Ge, Gt, Le, Lt, Mul, Ne, Or, Rem, Shl, Shr, Sub,
};
use rustc_ast::token::BinOpToken;
AssocOp::AssignOp(match op.node {
Add => BinOpToken::Plus,
BitAnd => BinOpToken::And,
BitOr => BinOpToken::Or,
BitXor => BinOpToken::Caret,
Div => BinOpToken::Slash,
Mul => BinOpToken::Star,
Rem => BinOpToken::Percent,
Shl => BinOpToken::Shl,
Shr => BinOpToken::Shr,
Sub => BinOpToken::Minus,
And | Eq | Ge | Gt | Le | Lt | Ne | Or => panic!("This operator does not exist"),
})
}
/// Returns the indentation before `span` if there are nothing but `[ \t]`
/// before it on its line.
fn indentation<T: LintContext>(cx: &T, span: Span) -> Option<String> {
let lo = cx.sess().source_map().lookup_char_pos(span.lo());
lo.file
.get_line(lo.line - 1 /* line numbers in `Loc` are 1-based */)
.and_then(|line| {
if let Some((pos, _)) = line.char_indices().find(|&(_, c)| c != ' ' && c != '\t') {
// We can mix char and byte positions here because we only consider `[ \t]`.
if lo.col == CharPos(pos) {
Some(line[..pos].into())
} else {
None
}
} else {
None
}
})
}
/// Convenience extension trait for `Diagnostic`.
pub trait DiagnosticExt<T: LintContext> {
/// Suggests to add an attribute to an item.
///
/// Correctly handles indentation of the attribute and item.
///
/// # Example
///
/// ```rust,ignore
/// diag.suggest_item_with_attr(cx, item, "#[derive(Default)]");
/// ```
fn suggest_item_with_attr<D: Display + ?Sized>(
&mut self,
cx: &T,
item: Span,
msg: &str,
attr: &D,
applicability: Applicability,
);
/// Suggest to add an item before another.
///
/// The item should not be indented (except for inner indentation).
///
/// # Example
///
/// ```rust,ignore
/// diag.suggest_prepend_item(cx, item,
/// "fn foo() {
/// bar();
/// }");
/// ```
fn suggest_prepend_item(&mut self, cx: &T, item: Span, msg: &str, new_item: &str, applicability: Applicability);
/// Suggest to completely remove an item.
///
/// This will remove an item and all following whitespace until the next non-whitespace
/// character. This should work correctly if item is on the same indentation level as the
/// following item.
///
/// # Example
///
/// ```rust,ignore
/// diag.suggest_remove_item(cx, item, "remove this")
/// ```
fn suggest_remove_item(&mut self, cx: &T, item: Span, msg: &str, applicability: Applicability);
}
impl<T: LintContext> DiagnosticExt<T> for rustc_errors::Diagnostic {
fn suggest_item_with_attr<D: Display + ?Sized>(
&mut self,
cx: &T,
item: Span,
msg: &str,
attr: &D,
applicability: Applicability,
) {
if let Some(indent) = indentation(cx, item) {
let span = item.with_hi(item.lo());
self.span_suggestion(span, msg, format!("{}\n{}", attr, indent), applicability);
}
}
fn suggest_prepend_item(&mut self, cx: &T, item: Span, msg: &str, new_item: &str, applicability: Applicability) {
if let Some(indent) = indentation(cx, item) {
let span = item.with_hi(item.lo());
let mut first = true;
let new_item = new_item
.lines()
.map(|l| {
if first {
first = false;
format!("{}\n", l)
} else {
format!("{}{}\n", indent, l)
}
})
.collect::<String>();
self.span_suggestion(span, msg, format!("{}\n{}", new_item, indent), applicability);
}
}
fn suggest_remove_item(&mut self, cx: &T, item: Span, msg: &str, applicability: Applicability) {
let mut remove_span = item;
let hi = cx.sess().source_map().next_point(remove_span).hi();
let fmpos = cx.sess().source_map().lookup_byte_offset(hi);
if let Some(ref src) = fmpos.sf.src {
let non_whitespace_offset = src[fmpos.pos.to_usize()..].find(|c| c != ' ' && c != '\t' && c != '\n');
if let Some(non_whitespace_offset) = non_whitespace_offset {
remove_span = remove_span
.with_hi(remove_span.hi() + BytePos(non_whitespace_offset.try_into().expect("offset too large")));
}
}
self.span_suggestion(remove_span, msg, String::new(), applicability);
}
}
/// Suggestion results for handling closure
/// args dereferencing and borrowing
pub struct DerefClosure {
/// confidence on the built suggestion
pub applicability: Applicability,
/// gradually built suggestion
pub suggestion: String,
}
/// Build suggestion gradually by handling closure arg specific usages,
/// such as explicit deref and borrowing cases.
/// Returns `None` if no such use cases have been triggered in closure body
///
/// note: this only works on single line immutable closures with exactly one input parameter.
pub fn deref_closure_args<'tcx>(cx: &LateContext<'_>, closure: &'tcx hir::Expr<'_>) -> Option<DerefClosure> {
if let hir::ExprKind::Closure(_, fn_decl, body_id, ..) = closure.kind {
let closure_body = cx.tcx.hir().body(body_id);
// is closure arg a type annotated double reference (i.e.: `|x: &&i32| ...`)
// a type annotation is present if param `kind` is different from `TyKind::Infer`
let closure_arg_is_type_annotated_double_ref = if let TyKind::Rptr(_, MutTy { ty, .. }) = fn_decl.inputs[0].kind
{
matches!(ty.kind, TyKind::Rptr(_, MutTy { .. }))
} else {
false
};
let mut visitor = DerefDelegate {
cx,
closure_span: closure.span,
closure_arg_is_type_annotated_double_ref,
next_pos: closure.span.lo(),
suggestion_start: String::new(),
applicability: Applicability::MachineApplicable,
};
let fn_def_id = cx.tcx.hir().local_def_id(closure.hir_id);
cx.tcx.infer_ctxt().enter(|infcx| {
ExprUseVisitor::new(&mut visitor, &infcx, fn_def_id, cx.param_env, cx.typeck_results())
.consume_body(closure_body);
});
if !visitor.suggestion_start.is_empty() {
return Some(DerefClosure {
applicability: visitor.applicability,
suggestion: visitor.finish(),
});
}
}
None
}
/// Visitor struct used for tracking down
/// dereferencing and borrowing of closure's args
struct DerefDelegate<'a, 'tcx> {
/// The late context of the lint
cx: &'a LateContext<'tcx>,
/// The span of the input closure to adapt
closure_span: Span,
/// Indicates if the arg of the closure is a type annotated double reference
closure_arg_is_type_annotated_double_ref: bool,
/// last position of the span to gradually build the suggestion
next_pos: BytePos,
/// starting part of the gradually built suggestion
suggestion_start: String,
/// confidence on the built suggestion
applicability: Applicability,
}
impl<'tcx> DerefDelegate<'_, 'tcx> {
/// build final suggestion:
/// - create the ending part of suggestion
/// - concatenate starting and ending parts
/// - potentially remove needless borrowing
pub fn finish(&mut self) -> String {
let end_span = Span::new(self.next_pos, self.closure_span.hi(), self.closure_span.ctxt(), None);
let end_snip = snippet_with_applicability(self.cx, end_span, "..", &mut self.applicability);
let sugg = format!("{}{}", self.suggestion_start, end_snip);
if self.closure_arg_is_type_annotated_double_ref {
sugg.replacen('&', "", 1)
} else {
sugg
}
}
/// indicates whether the function from `parent_expr` takes its args by double reference
fn func_takes_arg_by_double_ref(&self, parent_expr: &'tcx hir::Expr<'_>, cmt_hir_id: HirId) -> bool {
let (call_args, inputs) = match parent_expr.kind {
ExprKind::MethodCall(_, call_args, _) => {
if let Some(method_did) = self.cx.typeck_results().type_dependent_def_id(parent_expr.hir_id) {
(call_args, self.cx.tcx.fn_sig(method_did).skip_binder().inputs())
} else {
return false;
}
},
ExprKind::Call(func, call_args) => {
let typ = self.cx.typeck_results().expr_ty(func);
(call_args, typ.fn_sig(self.cx.tcx).skip_binder().inputs())
},
_ => return false,
};
iter::zip(call_args, inputs)
.any(|(arg, ty)| arg.hir_id == cmt_hir_id && matches!(ty.kind(), ty::Ref(_, inner, _) if inner.is_ref()))
}
}
impl<'tcx> Delegate<'tcx> for DerefDelegate<'_, 'tcx> {
fn consume(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {}
#[allow(clippy::too_many_lines)]
fn borrow(&mut self, cmt: &PlaceWithHirId<'tcx>, _: HirId, _: ty::BorrowKind) {
if let PlaceBase::Local(id) = cmt.place.base {
let map = self.cx.tcx.hir();
let span = map.span(cmt.hir_id);
let start_span = Span::new(self.next_pos, span.lo(), span.ctxt(), None);
let mut start_snip = snippet_with_applicability(self.cx, start_span, "..", &mut self.applicability);
// identifier referring to the variable currently triggered (i.e.: `fp`)
let ident_str = map.name(id).to_string();
// full identifier that includes projection (i.e.: `fp.field`)
let ident_str_with_proj = snippet(self.cx, span, "..").to_string();
if cmt.place.projections.is_empty() {
// handle item without any projection, that needs an explicit borrowing
// i.e.: suggest `&x` instead of `x`
self.suggestion_start.push_str(&format!("{}&{}", start_snip, ident_str));
} else {
// cases where a parent `Call` or `MethodCall` is using the item
// i.e.: suggest `.contains(&x)` for `.find(|x| [1, 2, 3].contains(x)).is_none()`
//
// Note about method calls:
// - compiler automatically dereference references if the target type is a reference (works also for
// function call)
// - `self` arguments in the case of `x.is_something()` are also automatically (de)referenced, and
// no projection should be suggested
if let Some(parent_expr) = get_parent_expr_for_hir(self.cx, cmt.hir_id) {
match &parent_expr.kind {
// given expression is the self argument and will be handled completely by the compiler
// i.e.: `|x| x.is_something()`
ExprKind::MethodCall(_, [self_expr, ..], _) if self_expr.hir_id == cmt.hir_id => {
self.suggestion_start
.push_str(&format!("{}{}", start_snip, ident_str_with_proj));
self.next_pos = span.hi();
return;
},
// item is used in a call
// i.e.: `Call`: `|x| please(x)` or `MethodCall`: `|x| [1, 2, 3].contains(x)`
ExprKind::Call(_, [call_args @ ..]) | ExprKind::MethodCall(_, [_, call_args @ ..], _) => {
let expr = self.cx.tcx.hir().expect_expr(cmt.hir_id);
let arg_ty_kind = self.cx.typeck_results().expr_ty(expr).kind();
if matches!(arg_ty_kind, ty::Ref(_, _, Mutability::Not)) {
// suggest ampersand if call function is taking args by double reference
let takes_arg_by_double_ref =
self.func_takes_arg_by_double_ref(parent_expr, cmt.hir_id);
// compiler will automatically dereference field or index projection, so no need
// to suggest ampersand, but full identifier that includes projection is required
let has_field_or_index_projection =
cmt.place.projections.iter().any(|proj| {
matches!(proj.kind, ProjectionKind::Field(..) | ProjectionKind::Index)
});
// no need to bind again if the function doesn't take arg by double ref
// and if the item is already a double ref
let ident_sugg = if !call_args.is_empty()
&& !takes_arg_by_double_ref
&& (self.closure_arg_is_type_annotated_double_ref || has_field_or_index_projection)
{
let ident = if has_field_or_index_projection {
ident_str_with_proj
} else {
ident_str
};
format!("{}{}", start_snip, ident)
} else
|
;
self.suggestion_start.push_str(&ident_sugg);
self.next_pos = span.hi();
return;
}
self.applicability = Applicability::Unspecified;
},
_ => (),
}
}
let mut replacement_str = ident_str;
let mut projections_handled = false;
cmt.place.projections.iter().enumerate().for_each(|(i, proj)| {
match proj.kind {
// Field projection like `|v| v.foo`
// no adjustment needed here, as field projections are handled by the compiler
ProjectionKind::Field(..) => match cmt.place.ty_before_projection(i).kind() {
ty::Adt(..) | ty::Tuple(_) => {
replacement_str = ident_str_with_proj.clone();
projections_handled = true;
},
_ => (),
},
// Index projection like `|x| foo[x]`
// the index is dropped so we can't get it to build the suggestion,
// so the span is set-up again to get more code, using `span.hi()` (i.e.: `foo[x]`)
// instead of `span.lo()` (i.e.: `foo`)
ProjectionKind::Index => {
let start_span = Span::new(self.next_pos, span.hi(), span.ctxt(), None);
start_snip = snippet_with_applicability(self.cx, start_span, "..", &mut self.applicability);
replacement_str.clear();
projections_handled = true;
},
// note: unable to trigger `Subslice` kind in tests
ProjectionKind::Subslice => (),
ProjectionKind::Deref => {
// Explicit derefs are typically handled later on, but
// some items do not need explicit deref, such as array accesses,
// so we mark them as already processed
// i.e.: don't suggest `*sub[1..4].len()` for `|sub| sub[1..4].len() == 3`
if let ty::Ref(_, inner, _) = cmt.place.ty_before_projection(i).kind() {
if matches!(inner.kind(), ty::Ref(_, innermost, _) if innermost.is_array()) {
projections_handled = true;
}
}
},
}
});
// handle `ProjectionKind::Deref` by removing one explicit deref
// if no special case was detected (i.e.: suggest `*x` instead of `**x`)
if !projections_handled {
let last_deref = cmt
.place
.projections
.iter()
.rposition(|proj| proj.kind == ProjectionKind::Deref);
if let Some(pos) = last_deref {
let mut projections = cmt.place.projections.clone();
projections.truncate(pos);
for item in projections {
if item.kind == ProjectionKind::Deref {
replacement_str = format!("*{}", replacement_str);
}
}
}
}
self.suggestion_start
.push_str(&format!("{}{}", start_snip, replacement_str));
}
self.next_pos = span.hi();
}
}
fn mutate(&mut self, _: &PlaceWithHirId<'tcx>, _: HirId) {}
fn fake_read(&mut self, _: rustc_typeck::expr_use_visitor::Place<'tcx>, _: FakeReadCause, _: HirId) {}
}
#[cfg(test)]
mod test {
use super::Sugg;
use rustc_ast::util::parser::AssocOp;
use std::borrow::Cow;
const SUGGESTION: Sugg<'static> = Sugg::NonParen(Cow::Borrowed("function_call()"));
#[test]
fn make_return_transform_sugg_into_a_return_call() {
assert_eq!("return function_call()", SUGGESTION.make_return().to_string());
}
#[test]
fn blockify_transforms_sugg_into_a_block() {
assert_eq!("{ function_call() }", SUGGESTION.blockify().to_string());
}
#[test]
fn binop_maybe_par() {
let sugg = Sugg::BinOp(AssocOp::Add, "1".into(), "1".into());
assert_eq!("(1 + 1)", sugg.maybe_par().to_string());
let sugg = Sugg::BinOp(AssocOp::Add, "(1 + 1)".into(), "(1 + 1)".into());
assert_eq!("((1 + 1) + (1 + 1))", sugg.maybe_par().to_string());
}
#[test]
fn not_op() {
use AssocOp::{Add, Equal, Greater, GreaterEqual, LAnd, LOr, Less, LessEqual, NotEqual};
fn test_not(op: AssocOp, correct: &str) {
let sugg = Sugg::BinOp(op, "x".into(), "y".into());
assert_eq!((!sugg).to_string(), correct);
}
// Invert the comparison operator.
test_not(Equal, "x != y");
test_not(NotEqual, "x == y");
test_not(Less, "x >= y");
test_not(LessEqual, "x > y");
test_not(Greater, "x <= y");
test_not(GreaterEqual, "x < y");
// Other operators are inverted like !(..).
test_not(Add, "!(x + y)");
test_not(LAnd, "!(x && y)");
test_not(LOr, "!(x || y)");
}
}
|
{
format!("{}&{}", start_snip, ident_str)
}
|
sort.js
|
import { request } from './request'
export function
|
() {
return request({
url: '/category'
})
}
export function getCategoryGoods(type, page) {
return request({
url: '/home/data',
params: {
type,
page
}
})
}
|
getCategoryDatas
|
cycle-collection4.rs
|
// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
struct foo { mut z : fn@() }
fn nop() { }
fn nop_foo(_y: ~[int], _x : @foo) { }
|
w.z = x;
}
|
fn main() {
let w = @foo{ z: || nop() };
let x : fn@() = || nop_foo(~[], w);
|
creep_manager.js
|
var builder = require("creep_builder");
var repairer = require("creep_repairer");
var transporter = require("creep_transporter");
var bootstrapper = require("creep_bootstrapper");
var miner = require("creep_miner");
var system_constants = require("system_constants");
var creep_helpers = require("creep_helpers");
Memory.bootstrapping = false;
var creep_manager = {
run: function() {
maintain_population();
run_creeps();
check_bootstrapping();
}
}
function run_creeps() {
for(var name in Game.creeps){
var creep = Game.creeps[name];
switch (creep.memory.role) {
case "miner":
miner.run(creep);
break;
case "builder":
builder.run(creep);
break;
case "transporter":
|
break;
case "repairer":
repairer.run(creep);
break;
case "bootstrapper":
bootstrapper.run(creep);
break;
default:
console.log("Creep role not recognized");
}
}
}
function maintain_population() {
var bootstrappers = _.filter(Game.creeps, (creep) => creep.memory.role == "bootstrapper");
if (Memory.bootstrapping && bootstrappers.length < 1) {
bootstrapper.spawn_creep();
}
else {
var miners = _.filter(Game.creeps, (creep) => creep.memory.role == "miner");
var transporters = _.filter(Game.creeps, (creep) => creep.memory.role == "transporter");
var builders = _.filter(Game.creeps, (creep) => creep.memory.role == "builder");
var repairers = _.filter(Game.creeps, (creep) => creep.memory.role == "repairer");
// Miners
if (miners.length < system_constants.population_targets.miners) {
miner.spawn_creep();
}
// Transporters
else if (transporters.length < system_constants.population_targets.transporters) {
transporter.spawn_creep();
}
// Repairers
else if (!Memory.bootstrapping && repairers.length < system_constants.population_targets.repairers) {
repairer.spawn_creep();
}
// Builders
else if (!Memory.bootstrapping && builders.length < system_constants.population_targets.builders) {
builder.spawn_creep();
}
}
}
function check_bootstrapping() {
var miners = _.filter(Game.creeps, (creep) => creep.memory.role == "miner").length;
var transporters = _.filter(Game.creeps, (creep) => creep.memory.role == "transporter").length;
if (Memory.bootstrapping &&
miners == system_constants.population_targets.miners &&
transporters == system_constants.population_targets.transporters) {
Memory.bootstrapping = false;
console.log("Bootstrapping set to false");
}
else if (!Memory.bootstrapping &&
(miners == 0 &&
transporters == 0)) {
Memory.bootstrapping = true;
console.log("Bootstrapping set to true");
}
}
module.exports = creep_manager;
|
transporter.run(creep);
|
pattern.py
|
# -*- coding: utf-8 -*-
"""
chemdataextractor.relex.pattern.py
Extraction pattern object
"""
"""
Modify generate_cde_element() function to adapt the changes of phrase.py.
If any prefix/middle/suffix are empty (blank), do not add it to the resulting phrase.
Modified by jz449
"""
import re
from ..parse.elements import I, W, R, Any, And, Start, OneOrMore, Group
class Pattern:
""" Pattern object, fundamentally the same as a phrase except assigned a confidence"""
def __init__(self, entities=None,
elements=None,
label=None,
sentences=None,
order=None,
relations=None, confidence=0):
self.cluster_label = label
self.elements = elements
self.entities = entities
self.number_of_entities = len(order)
self.order = order
self.relations = relations
self.confidence = confidence
self.parse_expression = self.generate_cde_parse_expression()
def __repr__(self):
return self.to_string()
def to_string(self):
output_string = ''
output_string += ' '.join(self.elements['prefix']['tokens']) + ' '
output_string += self.entities[0].tag + ' '
for i in range(0, self.number_of_entities - 1):
output_string += ' '.join(self.elements['middle_' + str(i+1)]['tokens']) + ' '
output_string += self.entities[i + 1].tag + ' '
output_string = output_string
output_string += ' '.join(self.elements['suffix']['tokens'])
return output_string
def generate_cde_parse_expression(self):
"""Create a CDE parse expression for this extraction pattern
"""
elements = []
prefix_tokens = self.elements['prefix']['tokens']
for token in prefix_tokens:
if token == '<Blank>':
continue
elements.append(I(token))
elements.append(self.entities[0].parse_expression)
for middle in range(0, self.number_of_entities -1):
middle_tokens = self.elements['middle_' + str(middle+1)]['tokens']
for token in middle_tokens:
if token == '<Blank>':
continue
elements.append(I(token))
elements.append(self.entities[middle+1].parse_expression)
suffix_tokens = self.elements['suffix']['tokens']
for token in suffix_tokens:
if token == '<Blank>':
continue
elements.append(I(token))
final_phrase = And(exprs=elements)
parse_expression = (final_phrase)('phrase')
return parse_expression
| ||
nginx.go
|
/*
* Tencent is pleased to support the open source community by making Blueking Container Service available.
* Copyright (C) 2019 THL A29 Limited, a Tencent company. All rights reserved.
* Licensed under the MIT License (the "License"); you may not use this file except
* in compliance with the License. You may obtain a copy of the License at
* http://opensource.org/licenses/MIT
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
* either express or implied. See the License for the specific language governing permissions and
* limitations under the License.
*
*/
package nginx
import (
"fmt"
"html/template"
"math/rand"
"os"
"path/filepath"
"strconv"
"strings"
"sync"
"bk-bcs/bcs-common/common/blog"
"bk-bcs/bcs-common/common/metric"
conf "bk-bcs/bcs-services/bcs-loadbalance/template"
"bk-bcs/bcs-services/bcs-loadbalance/types"
"bk-bcs/bcs-services/bcs-loadbalance/util"
)
//NewManager create haproxy config file manager
func NewManager(binPath, cfgPath, generatePath, backupPath, templatePath string) conf.Manager {
return &Manager{
nginxBin: binPath,
cfgFile: cfgPath,
tmpDir: generatePath,
backupDir: backupPath,
templateFile: filepath.Join(templatePath, "nginx.conf.template"),
healthInfo: metric.HealthMeta{
IsHealthy: conf.HealthStatusOK,
Message: conf.HealthStatusOKMsg,
CurrentRole: metric.SlaveRole,
},
}
}
//Manager implements TemplateManager interface, control
//nginx config file generating, validation, backup and reloading
type Manager struct {
nginxBin string //absolute path for haproxy executable binary
cfgFile string //absolute path for haproxy cfg file
backupDir string //absolute path for cfg file backup storage
tmpDir string //temperary file for create new file
templateFile string //template file
healthInfo metric.HealthMeta //Health information
healthLock sync.RWMutex
}
//Start point, do not block
func (m *Manager) Start() error {
//check template exist
if !conf.IsFileExist(m.nginxBin) {
blog.Error("nginx executable file lost")
return fmt.Errorf("nginx executable file lost")
}
if !conf.IsFileExist(m.templateFile) {
blog.Error("nginx.conf.template do not exist")
return fmt.Errorf("nginx.conf.template do not exist")
}
//create other file directory
err := os.MkdirAll(m.backupDir, os.ModePerm)
if err != nil {
blog.Warnf("mkdir %s failed, err %s", m.backupDir, err.Error())
}
err = os.MkdirAll(m.tmpDir, os.ModePerm)
if err != nil {
blog.Warnf("mkdir %s failed, err %s", m.tmpDir, err.Error())
}
return nil
}
//Stop stop
func (m *Manager) Stop() {
}
//Create config file with tmpData,
func (m *Manager) Create(tmpData *types.TemplateData) (string, error) {
//loading template file
t, err := template.ParseFiles(m.templateFile)
if err != nil {
blog.Errorf("Parse template file %s failed: %s", m.templateFile, err.Error())
return "", err
}
//create new config file
fileName := "nginx." + strconv.Itoa(rand.Int()) + ".conf"
absName := filepath.Join(m.tmpDir, fileName)
writer, wErr := os.Create(absName)
if wErr != nil {
blog.Errorf("Create tempory new config file %s failed: %s", absName, wErr.Error())
return "", wErr
}
//fix nginx vhost bug, 2018-09-26 12:12:41
for i := range tmpData.HTTP {
if len(tmpData.HTTP[i].BCSVHost) == 0 {
blog.Warnf("nginx got empty http vhost info, %s", tmpData.HTTP[i].Name)
continue
}
domains := strings.Split(tmpData.HTTP[i].BCSVHost, ":")
tmpData.HTTP[i].BCSVHost = domains[0]
}
exErr := t.Execute(writer, tmpData)
if exErr != nil {
blog.Errorf("Template Execute Err: %s", exErr.Error())
return "", exErr
}
blog.Infof("Create new nginx.conf %s success", absName)
return absName, nil
}
//CheckDifference two file are difference, true is difference
func (m *Manager) CheckDifference(oldFile, curFile string) bool {
var err error
if !conf.IsFileExist(oldFile) {
blog.Errorf("Old nginx.conf %s Do not exist", oldFile)
return false
}
if !conf.IsFileExist(curFile) {
blog.Errorf("Current nginx.conf %s Do not exist", oldFile)
return false
}
//calculate oldFile md5
oldMd5, err := util.Md5SumForFile(oldFile)
if err != nil {
blog.Errorf("calculate old nginx file %s md5sum failed, err %s", oldFile, err.Error())
return false
}
//calculate curFile md5
newMd5, err := util.Md5SumForFile(curFile)
if err != nil {
blog.Errorf("calculate cur nginx file %s md5sum failed, err %s", curFile, err.Error())
return false
}
//compare
if oldMd5 != newMd5 {
blog.Info("New and old nginx.conf MD5 is difference")
return true
}
return false
}
//Validate new cfg file grammar is OK
func (m *Manager) Validate(newFile string) bool {
command := m.nginxBin + " -t -c " + newFile
output, ok := util.ExeCommand(command)
if !ok {
blog.Errorf("Validate with command [%s] failed", command)
return false
}
blog.Infof("Validate with command %s, output: %s", command, output)
return true
}
//Replace old cfg file with cur one, return old file backup
func (m *Manager) Replace(oldFile, curFile string) error {
return util.ReplaceFile(oldFile, curFile)
|
func (m *Manager) Reload(cfgFile string) error {
command := m.nginxBin + " -s reload"
output, ok := util.ExeCommand(command)
if !ok {
blog.Errorf("Reload with command [%s] failed: %s", command, output)
return fmt.Errorf("Reload config err")
}
blog.Infof("Reload with command %s, output: %s", command, output)
return nil
}
// TryUpdateWithoutReload update haproxy config without reloading
// needReload: true for reload
func (m *Manager) TryUpdateWithoutReload(tmpData *types.TemplateData) (needReload bool) {
// always reload
return true
}
|
}
//Reload haproxy with new config file
|
linked_suppliers.py
|
# Copyright (c) 2021, jan and contributors
|
# For license information, please see license.txt
# import frappe
from frappe.model.document import Document
class LinkedSuppliers(Document):
pass
| |
convertible_spec.go
|
/*
* Copyright (c) Microsoft Corporation.
* Licensed under the MIT license.
*/
package genruntime
import (
"github.com/pkg/errors"
"k8s.io/apimachinery/pkg/runtime"
)
// ConvertibleSpec is implemented by Spec types to allow conversion among the different versions of a given spec
//
// Why do we need both directions of conversion?
//
// Each version of a resource is in a different package, so the implementations of this interface will necessarily be
// referencing types from other packages. If we tried to use an interface with a single method, we'd inevitably end up
// with circular package references:
//
// +----------------+ +----------------+
// | v1 | | v2 |
// | PersonSpec | --- import v2 ---> | PersonSpec |
// | | | |
// | ConvertTo() | <--- import v1 --- | ConvertTo() |
// +----------------+ +----------------+
//
// Instead, we have to have support for both directions, so that we can always operate from one side of the package
// reference chain:
//
// +----------------+ +----------------+
// | v1 | | v2 |
// | PersonSpec | | PersonSpec |
// | | | |
// | ConvertTo() | --- import v2 ---> | |
// | ConvertFrom() | | |
// +----------------+ +----------------+
//
type ConvertibleSpec interface {
// ConvertSpecTo will populate the passed Spec by copying over all available information from this one
ConvertSpecTo(destination ConvertibleSpec) error
// ConvertSpecFrom will populate this spec by copying over all available information from the passed one
ConvertSpecFrom(source ConvertibleSpec) error
}
// GetVersionedSpec returns a versioned spec for the provided resource; the original API version used when the
// resource was first created is used to identify the version to return
func GetVersionedSpec(metaObject MetaObject, scheme *runtime.Scheme) (ConvertibleSpec, error)
|
// GetVersionedARMSpec returns a spec object ready for serialization to ARM; the original API version used when the
// resource was first created is used to create the appropriate version for submission.
func GetVersionedARMSpec(metaObject MetaObject, resolved ConvertToARMResolvedDetails, scheme *runtime.Scheme) (interface{}, error) {
spec, err := GetVersionedSpec(metaObject, scheme)
if err != nil {
return nil, errors.Wrap(err, "creating ARM spec")
}
converter, ok := spec.(ToARMConverter)
if !ok {
return nil, errors.Errorf("expected %T to implement genruntime.ToARMConverter", spec)
}
result, err := converter.ConvertToARM(resolved)
if err != nil {
return nil, errors.Wrapf(err, "creating ARM spec from %T", spec)
}
return result, err
}
|
{
rsrc, err := NewEmptyVersionedResource(metaObject, scheme)
if err != nil {
return nil, errors.Wrap(err, "getting versioned spec")
}
if rsrc.GetObjectKind().GroupVersionKind() == metaObject.GetObjectKind().GroupVersionKind() {
// No conversion needed, empty resource is the same GVK that we already have
return metaObject.GetSpec(), nil
}
// Get a blank spec and populate it
spec := rsrc.GetSpec()
err = spec.ConvertSpecFrom(metaObject.GetSpec())
if err != nil {
return nil, errors.Wrap(err, "failed conversion of spec")
}
return spec, nil
}
|
namespace.go
|
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package k8s
import (
"github.com/gin-gonic/gin"
"kubespace/server/controller/response"
"kubespace/server/pkg/k8s/Init"
"kubespace/server/pkg/k8s/namespace"
)
func GetNamespaceList(c *gin.Context) {
client, err := Init.ClusterID(c)
if err != nil {
response.FailWithMessage(response.InternalServerError, err.Error(), c)
return
}
namespaces, err := namespace.GetNamespaceList(client)
|
if err != nil {
response.FailWithMessage(response.InternalServerError, err.Error(), c)
return
}
response.OkWithData(namespaces, c)
return
}
| |
HttpCors.ts
|
import {Http} from "@benjaminnoufel/http";
interface IRequest {
method: string;
credentials: RequestCredentials;
mode: RequestMode;
}
const getRequest = ({method, mode, credentials}: IRequest) => (_target: Object, _property: string, descriptor: TypedPropertyDescriptor<any>): any => {
const old = descriptor.value;
descriptor.value = function(url: string, options?: RequestInit): TypedPropertyDescriptor<any> {
return old(new Http()
.method(method)
.mode(mode)
.credentials(credentials)
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.requestInit(options)
.json(url));
};
return descriptor;
};
const postRequest = ({method, mode, credentials}: IRequest) => (_target: Object, _property: string, descriptor: TypedPropertyDescriptor<any>): any => {
const old = descriptor.value;
descriptor.value = function(url: string, body: Record<string, any>, options?: RequestInit): TypedPropertyDescriptor<any> {
return old(new Http()
.method(method)
.mode(mode)
.credentials(credentials)
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.requestInit(options)
.body(body)
.json(url));
};
return descriptor;
};
const httpCorsRequest = ({mode, credentials}: Omit<IRequest, "method">) => (target: Object, property: string, descriptor: TypedPropertyDescriptor<any>): any => {
const method = property.toUpperCase();
if ("get" === property || "delete" === property) {
return getRequest({method, mode, credentials})(target, property, descriptor);
}
if ("post" === property || "patch" === property || "put" === property) {
return postRequest({method, mode, credentials})(target, property, descriptor);
}
return () => {};
};
export class HttpCors {
@httpCorsRequest({mode: "cors", credentials: "include"})
public static get<T = any>(request: any, _options?: RequestInit): Promise<T> {
return request;
}
@httpCorsRequest({mode: "cors", credentials: "include"})
public static post<T = any>(request: any, _body: Record<string, any>, _options?: RequestInit): Promise<T> {
return request;
}
@httpCorsRequest({mode: "cors", credentials: "include"})
|
}
@httpCorsRequest({mode: "cors", credentials: "include"})
public static put<T = any>(request: any, _body: Record<string, any>, _options?: RequestInit): Promise<T> {
return request;
}
@httpCorsRequest({mode: "cors", credentials: "include"})
public static delete<T = any>(request: any, _options?: RequestInit): Promise<T> {
return request;
}
}
|
public static patch<T = any>(request: any, _body: Record<string, any>, _options?: RequestInit): Promise<T> {
return request;
|
guiGizmo.tsx
|
import { Control } from "gui/2D/controls/control";
import { Vector2 } from "core/Maths/math.vector";
import * as React from "react";
import { GlobalState } from "../globalState";
import { Image } from "gui/2D/controls/image";
import { TextBlock } from "gui/2D/controls/textBlock";
import { CoordinateHelper, DimensionProperties, Rect } from "./coordinateHelper";
import { Observer } from "core/Misc/observable";
import { Nullable } from "core/types";
import { ValueAndUnit } from "gui/2D/valueAndUnit";
import "./workbenchCanvas.scss";
import gizmoPivotIcon from "../imgs/gizmoPivotIcon.svg";
export interface IGuiGizmoProps {
globalState: GlobalState;
control: Control;
}
// which side of the bounding box are we on?
enum ScalePointPosition {
Top = -1,
Left = -1,
Center = 0,
Right = 1,
Bottom = 1,
}
// a single gizmo scale point on the bounding box
interface IScalePoint {
position: Vector2;
horizontalPosition: ScalePointPosition;
verticalPosition: ScalePointPosition;
rotation: number;
isPivot: boolean;
}
interface IGuiGizmoState {
canvasBounds: Rect;
scalePoints: IScalePoint[];
scalePointDragging: number;
isRotating: boolean;
}
const roundFactor = 100;
const round = (value: number) => Math.round(value * roundFactor) / roundFactor;
// this defines the lines that link the corners, making up the bounding box
const lines = [
[0, 2],
[0, 6],
[2, 8],
[6, 8],
];
// load in custom cursor icons
import cursor_scaleDiagonalLeft from "../imgs/cursor_scaleDiagonalLeft.svg";
import cursor_scaleDiagonalRight from "../imgs/cursor_scaleDiagonalRight.svg";
import cursor_scaleHorizontal from "../imgs/cursor_scaleHorizontal.svg";
import cursor_scaleVertical from "../imgs/cursor_scaleVertical.svg";
import cursor_rotate0 from "../imgs/cursor_rotate0.svg";
import cursor_rotate1 from "../imgs/cursor_rotate1.svg";
import cursor_rotate2 from "../imgs/cursor_rotate2.svg";
import cursor_rotate3 from "../imgs/cursor_rotate3.svg";
import cursor_rotate4 from "../imgs/cursor_rotate4.svg";
import cursor_rotate5 from "../imgs/cursor_rotate5.svg";
import cursor_rotate6 from "../imgs/cursor_rotate6.svg";
import cursor_rotate7 from "../imgs/cursor_rotate7.svg";
// load in custom cursor icons
const cursorScaleDiagonaLeft: string = `url("${cursor_scaleDiagonalLeft}") 12 12, nwse-resize`;
const cursorScaleDiagonalRight: string = `url("${cursor_scaleDiagonalRight}") 12 12, nesw-resize`;
const cursorScaleHorizontal: string = `url("${cursor_scaleHorizontal}") 12 12, pointer`;
const cursorScaleVertical: string = `url("${cursor_scaleVertical}") 12 12, ns-resize`;
const scalePointCursors = [
cursorScaleVertical,
cursorScaleDiagonalRight,
cursorScaleHorizontal,
cursorScaleDiagonaLeft,
cursorScaleVertical,
cursorScaleDiagonalRight,
cursorScaleHorizontal,
cursorScaleDiagonaLeft,
];
const rotateCursors: string[] = [];
for (let idx = 0; idx < 8; idx++) {
rotateCursors.push(
`url("${cursor_rotate0}") 12 12, pointer`,
`url("${cursor_rotate1}") 12 12, pointer`,
`url("${cursor_rotate2}") 12 12, pointer`,
`url("${cursor_rotate3}") 12 12, pointer`,
`url("${cursor_rotate4}") 12 12, pointer`,
`url("${cursor_rotate5}") 12 12, pointer`,
`url("${cursor_rotate6}") 12 12, pointer`,
`url("${cursor_rotate7}") 12 12, pointer`
);
}
// used to calculate which cursor icon we should display for the scalepoints
const defaultScalePointRotations = [315, 0, 45, 270, 0, 90, 225, 180, 135];
export class GuiGizmoComponent extends React.Component<IGuiGizmoProps, IGuiGizmoState> {
// used for scaling computations
private _storedValues: Rect;
private _localBounds: Rect;
private _rotation: {
pivot: Vector2;
initialAngleToPivot: number;
};
private _gizmoUpdateObserver: Nullable<Observer<void>>;
private _pointerUpObserver: Nullable<Observer<Nullable<React.PointerEvent<HTMLCanvasElement> | PointerEvent>>>;
private _pointerMoveObserver: Nullable<Observer<React.PointerEvent<HTMLCanvasElement>>>;
constructor(props: IGuiGizmoProps) {
super(props);
const scalePoints: IScalePoint[] = [];
for (let vertical = ScalePointPosition.Top; vertical <= ScalePointPosition.Bottom; vertical++) {
for (let horizontal = ScalePointPosition.Left; horizontal <= ScalePointPosition.Right; horizontal++) {
const isPivot = horizontal === ScalePointPosition.Center && vertical === ScalePointPosition.Center;
scalePoints.push({ position: new Vector2(), horizontalPosition: horizontal, verticalPosition: vertical, rotation: 0, isPivot });
}
}
this._localBounds = new Rect(0, 0, 0, 0);
this.state = {
canvasBounds: new Rect(0, 0, 0, 0),
scalePoints,
scalePointDragging: -1,
isRotating: false,
};
this._gizmoUpdateObserver = this.props.globalState.onGizmoUpdateRequireObservable.add(() => {
this.updateGizmo(true);
});
this._pointerUpObserver = this.props.globalState.onPointerUpObservable.add((evt) => this._onUp(evt));
this._pointerMoveObserver = this.props.globalState.onPointerMoveObservable.add(() => this._onMove());
this.updateGizmo(true);
}
componentWillUnmount() {
this.props.globalState.onGizmoUpdateRequireObservable.remove(this._gizmoUpdateObserver);
this.props.globalState.onPointerUpObservable.remove(this._pointerUpObserver);
this.props.globalState.onPointerMoveObservable.remove(this._pointerMoveObserver);
}
/**
* Update the gizmo's positions
* @param force should the update be forced. otherwise it will be updated only when the pointer is down
*/
updateGizmo(force?: boolean) {
const node = this.props.control;
// Calculating the offsets for each scale point.
const half = 1 / 2;
const canvasBounds = new Rect(Number.MAX_VALUE, Number.MAX_VALUE, 0, 0);
const localBounds = CoordinateHelper.computeLocalBounds(node);
this.state.scalePoints.forEach((scalePoint) => {
const nodeSpace = new Vector2();
switch (scalePoint.horizontalPosition) {
case ScalePointPosition.Left:
nodeSpace.x = localBounds.left;
break;
case ScalePointPosition.Center:
nodeSpace.x = localBounds.center.x;
break;
case ScalePointPosition.Right:
nodeSpace.x = localBounds.right;
break;
}
switch (scalePoint.verticalPosition) {
case ScalePointPosition.Top:
nodeSpace.y = localBounds.top;
break;
case ScalePointPosition.Center:
nodeSpace.y = localBounds.center.y;
break;
case ScalePointPosition.Bottom:
nodeSpace.y = localBounds.bottom;
break;
}
if (scalePoint.isPivot) {
// Calculate the pivot point
const pivotX = (node.transformCenterX - 0.5) * 2;
const pivotY = (node.transformCenterY - 0.5) * 2;
nodeSpace.x = node.widthInPixels * half * pivotX;
nodeSpace.y = node.heightInPixels * half * pivotY;
}
const rtt = CoordinateHelper.nodeToRTTSpace(node, nodeSpace.x, nodeSpace.y, undefined);
const canvas = CoordinateHelper.rttToCanvasSpace(rtt.x, rtt.y);
if (canvas.x < canvasBounds.left) {
canvasBounds.left = canvas.x;
}
if (canvas.x > canvasBounds.right) {
canvasBounds.right = canvas.x;
}
if (canvas.y < canvasBounds.top) {
canvasBounds.top = canvas.y;
}
if (canvas.y > canvasBounds.bottom) {
canvasBounds.bottom = canvas.y;
}
// edges, and rotate based on the rotation of the control
scalePoint.position.x = canvas.x;
scalePoint.position.y = canvas.y;
scalePoint.rotation = CoordinateHelper.getRotation(node) * (180 / Math.PI);
});
this.setState({
canvasBounds,
scalePoints: [...this.state.scalePoints],
});
}
private _onUp = (evt?: React.PointerEvent | PointerEvent | null) => {
// if left is still pressed, don't release
if (evt && evt.buttons & 1) {
return;
}
// cleanup on pointer up
this.setState({ scalePointDragging: -1, isRotating: false });
};
private _onMove = () => {
const scene = this.props.globalState.workbench._scene;
if (this.state.scalePointDragging !== -1) {
const node = this.props.control;
const inRTT = CoordinateHelper.mousePointerToRTTSpace(node, scene.pointerX, scene.pointerY);
const inNodeSpace = CoordinateHelper.rttToLocalNodeSpace(node, inRTT.x, inRTT.y, undefined, this._storedValues);
this._dragLocalBounds(inNodeSpace);
this._updateNodeFromLocalBounds();
this.props.globalState.onPropertyGridUpdateRequiredObservable.notifyObservers();
}
if (this.state.isRotating) {
const angle = Math.atan2(scene.pointerY - this._rotation.pivot.y, scene.pointerX - this._rotation.pivot.x);
for (const control of this.props.globalState.selectedControls) {
const oldRotation = control.rotation;
control.rotation += angle - this._rotation.initialAngleToPivot;
this.props.globalState.onPropertyChangedObservable.notifyObservers({
object: control,
property: "rotation",
value: control.rotation,
initialValue: oldRotation,
});
}
this._rotation.initialAngleToPivot = angle;
this.props.globalState.onPropertyGridUpdateRequiredObservable.notifyObservers();
}
};
private _rotate(x: number, y: number, centerX: number, centerY: number, angle: number) {
return {
x: (x - centerX) * Math.cos(angle) - (y - centerY) * Math.sin(angle) + centerX,
y: (x - centerX) * Math.sin(angle) + (y - centerY) * Math.cos(angle) + centerY,
};
}
private _modulo(dividend: number, divisor: number) {
return ((dividend % divisor) + divisor) % divisor;
}
private _dragLocalBounds(toPosition: Vector2) {
const scalePoint = this.state.scalePoints[this.state.scalePointDragging];
const newBounds = this._localBounds.clone();
if (scalePoint.horizontalPosition === ScalePointPosition.Left) {
newBounds.left = Math.min(this._localBounds.right - 1, toPosition.x);
}
if (scalePoint.verticalPosition === ScalePointPosition.Left) {
newBounds.top = Math.min(this._localBounds.bottom - 1, toPosition.y);
}
if (scalePoint.horizontalPosition === ScalePointPosition.Right) {
newBounds.right = Math.max(this._localBounds.left + 1, toPosition.x);
}
if (scalePoint.verticalPosition === ScalePointPosition.Bottom) {
newBounds.bottom = Math.max(this._localBounds.top + 1, toPosition.y);
}
// apply bounds changes to all controls
const edges: ["left", "top", "right", "bottom"] = ["left", "top", "right", "bottom"];
for (const node of this.props.globalState.selectedControls) {
const initialBounds = node.metadata.localBounds as Rect;
const nb = initialBounds.clone();
// account for rotation: if other control is rotated 90 degrees
// relative to primary control, we should modify top instead of left
const rotationModifier = (this._modulo(this.props.control.rotation - node.rotation, Math.PI * 2) / Math.PI) * 2;
edges.forEach((edge, index) => {
const modifiedIndex = Math.round(index + rotationModifier) % 4;
const flipSign = index < 2 === modifiedIndex < 2 ? 1 : -1;
nb[edges[modifiedIndex]] += (newBounds[edge] - this._localBounds[edge]) * flipSign;
});
nb.left = Math.min(initialBounds.right - 1, nb.left);
nb.top = Math.min(initialBounds.bottom - 1, nb.top);
nb.right = Math.max(initialBounds.left + 1, nb.right);
nb.bottom = Math.max(initialBounds.top + 1, nb.bottom);
node.metadata.localBounds = nb;
}
this._localBounds = newBounds;
}
private _updateNodeFromLocalBounds() {
const scalePoint = this.state.scalePoints[this.state.scalePointDragging];
const left = scalePoint.horizontalPosition === ScalePointPosition.Left;
const top = scalePoint.verticalPosition === ScalePointPosition.Top;
for (const selectedControl of this.props.globalState.selectedControls) {
const width = selectedControl.metadata.localBounds.width;
const height = selectedControl.metadata.localBounds.height;
// calculate the center point
const localRotation = CoordinateHelper.getRotation(selectedControl, true);
const localScaling = CoordinateHelper.getScale(selectedControl, true);
const absoluteCenter = (selectedControl.metadata.localBounds as Rect).center;
const center = absoluteCenter.clone();
// move to pivot
center.multiplyInPlace(localScaling);
const cosRotation = Math.cos(localRotation);
const sinRotation = Math.sin(localRotation);
const cosRotation180 = Math.cos(localRotation + Math.PI);
const sinRotation180 = Math.sin(localRotation + Math.PI);
const widthDelta = (selectedControl.metadata.storedValues.width - width) * 0.5;
const heightDelta = (selectedControl.metadata.storedValues.height - height) * 0.5;
// alignment compensation
switch (selectedControl.horizontalAlignment) {
case Control.HORIZONTAL_ALIGNMENT_LEFT:
center.x += (left ? widthDelta : -absoluteCenter.x) * cosRotation;
center.y += (left ? -widthDelta : absoluteCenter.x) * sinRotation;
break;
case Control.HORIZONTAL_ALIGNMENT_RIGHT:
center.x += (left ? -widthDelta : absoluteCenter.x) * cosRotation;
center.y += (left ? widthDelta : -absoluteCenter.x) * sinRotation;
break;
}
switch (selectedControl.verticalAlignment) {
case Control.VERTICAL_ALIGNMENT_TOP:
center.y += (top ? -heightDelta : absoluteCenter.y) * cosRotation180;
center.x += (top ? -heightDelta : absoluteCenter.y) * sinRotation180;
break;
case Control.VERTICAL_ALIGNMENT_BOTTOM:
center.y += (top ? heightDelta : -absoluteCenter.y) * cosRotation180;
center.x += (top ? heightDelta : -absoluteCenter.y) * sinRotation180;
break;
}
// rotate the center around 0,0
const rotatedCenter = this._rotate(center.x, center.y, 0, 0, localRotation);
const properties: DimensionProperties[] = ["left", "top", "width", "height"];
for (const property of properties) {
let newPixels = 0;
switch (property) {
case "left":
newPixels = round(selectedControl.metadata.storedValues.left + rotatedCenter.x);
break;
case "top":
newPixels = round(selectedControl.metadata.storedValues.top + rotatedCenter.y);
break;
case "width":
newPixels = round(width);
break;
case "height":
newPixels = round(height);
break;
}
// compute real change in property
const initialUnit = (selectedControl as any)[`_${property}`].unit;
const oldPixels = (selectedControl as any)[`${property}InPixels`];
(selectedControl as any)[`${property}InPixels`] = newPixels;
this.props.globalState.onPropertyChangedObservable.notifyObservers({
object: selectedControl,
property: `${property}InPixels`,
value: newPixels,
initialValue: oldPixels,
});
if (initialUnit === ValueAndUnit.UNITMODE_PERCENTAGE) {
CoordinateHelper.convertToPercentage(selectedControl, [property]);
}
}
if (selectedControl.typeName === "Image") {
(selectedControl as Image).autoScale = false;
} else if (selectedControl.typeName === "TextBlock") {
(selectedControl as TextBlock).resizeToFit = false;
}
}
}
private _beginDraggingScalePoint = (scalePointIndex: number) => {
this.setState({ scalePointDragging: scalePointIndex });
const node = this.props.control;
this._localBounds = CoordinateHelper.computeLocalBounds(node);
this._storedValues = new Rect(node.leftInPixels, node.topInPixels, node.leftInPixels + node.widthInPixels, node.topInPixels + node.heightInPixels);
for (const node of this.props.globalState.selectedControls) {
node.metadata.localBounds = CoordinateHelper.computeLocalBounds(node);
node.metadata.storedValues = new Rect(node.leftInPixels, node.topInPixels, node.leftInPixels + node.widthInPixels, node.topInPixels + node.heightInPixels);
}
};
private _beginRotate = () => {
const scene = this.props.globalState.workbench._scene;
let pivot: Vector2;
const node = this.props.control;
const nodeSpace = new Vector2(node.transformCenterX, node.transformCenterY);
const rtt = CoordinateHelper.nodeToRTTSpace(node, nodeSpace.x, nodeSpace.y, undefined);
const canvas = CoordinateHelper.rttToCanvasSpace(rtt.x, rtt.y);
pivot = new Vector2(canvas.x, canvas.y);
const initialAngleToPivot = Math.atan2(scene.pointerY - pivot.y, scene.pointerX - pivot.x);
this._rotation = {
pivot,
initialAngleToPivot,
};
this.setState({ isRotating: true });
};
render() {
// don't render if we don't have anything selected, or if we're currently dragging
return (
<div className="gizmo">
{lines.map((line, index) => {
const start = this.state.scalePoints[line[0]];
const end = this.state.scalePoints[line[1]];
// the vector between start and end
const delta = end.position.subtract(start.position);
const angle = Math.atan2(delta.y, delta.x);
const length = delta.length();
return (
<div
className="bounding-box-line"
key={index}
style={{
left: `${start.position.x + delta.x / 2}px`,
top: `${start.position.y + delta.y / 2}px`,
width: `${length}px`,
transform: `translate(-50%, -50%) rotate(${angle}rad)`,
}}
></div>
);
})}
{this.state.scalePoints.map((scalePoint, index) => {
const style: React.CSSProperties = {
left: `${scalePoint.position.x}px`,
top: `${scalePoint.position.y}px`,
transform: "translate(-50%, -50%) rotate(" + scalePoint.rotation + "deg)",
pointerEvents: this.state.scalePointDragging === -1 && !scalePoint.isPivot && !this.state.isRotating ? "auto" : "none",
};
|
}
// compute which cursor icon to use on hover
const angleOfCursor = defaultScalePointRotations[index] + scalePoint.rotation;
const angleAdjusted = this._modulo(angleOfCursor, 360);
const increment = 45;
const cursorIndex = Math.round(angleAdjusted / increment) % 8;
const cursor = scalePointCursors[cursorIndex];
const scalePointContainerSize = 30; // .scale-point-container width/height in px
const rotateClickAreaSize = 20; // .rotate-click-area width/height
const rotateClickAreaOffset = 7; // how much to offset the invisible rotate click area from the center
const rotateClickAreaStyle = {
top: (scalePointContainerSize - rotateClickAreaSize) / 2 + rotateClickAreaOffset * scalePoint.verticalPosition,
left: (scalePointContainerSize - rotateClickAreaSize) / 2 + rotateClickAreaOffset * scalePoint.horizontalPosition,
cursor: rotateCursors[cursorIndex],
};
const scaleClickAreaSize = 20; // .scale-click-area width/height
const scaleClickAreaOffset = 5; // how much to offset the invisible scale click area from the center
const scaleClickAreaStyle = {
top: (scalePointContainerSize - scaleClickAreaSize) / 2 - scaleClickAreaOffset * scalePoint.verticalPosition,
left: (scalePointContainerSize - scaleClickAreaSize) / 2 - scaleClickAreaOffset * scalePoint.horizontalPosition,
cursor,
};
return (
<div key={index} style={style} className="scale-point-container">
<div className="rotate-click-area" onPointerDown={() => this._beginRotate()} style={rotateClickAreaStyle}></div>
<div
className="scale-click-area"
draggable={true}
onDragStart={(evt) => evt.preventDefault()}
onPointerDown={(event) => {
// if left mouse button down
if (event.buttons & 1) {
this._beginDraggingScalePoint(index);
}
}}
onPointerUp={this._onUp}
style={scaleClickAreaStyle}
></div>
<div
className="scale-point"
draggable={true}
onDragStart={(evt) => evt.preventDefault()}
onPointerDown={(event) => {
if (event.buttons & 1) {
this._beginDraggingScalePoint(index);
}
}}
onPointerUp={this._onUp}
style={{ cursor }}
></div>
</div>
);
})}
</div>
);
}
}
|
if (scalePoint.isPivot) {
return <img className="pivot-point" src={gizmoPivotIcon} style={style} key={index} />;
|
write-single-register.ts
|
import { FC } from '../codes/index.js'
import WriteSingleRegisterRequestBody from '../request/write-single-register.js'
import ModbusWriteResponseBody from './write-response.body.js'
/** WriteSingleRegister Resonse Body (Function code 0x05)
* @extends ModbusResponseBody
* @class
*/
export default class
|
extends ModbusWriteResponseBody {
get address () {
return this._address
}
get value () {
return this._value
}
get byteCount () {
return 5
}
/** Create WriteSingleRegisterResponseBody from Request
* @param {WriteSingleRegisterRequestBody} request
* @param {Buffer} coil
* @returns WriteSingleRegisterResponseBody
*/
public static fromRequest (requestBody: WriteSingleRegisterRequestBody) {
const address = requestBody.address
const value = requestBody.value
return new WriteSingleRegisterResponseBody(address, value)
}
public static fromBuffer (buffer: Buffer) {
const fc = buffer.readUInt8(0)
const address = buffer.readUInt16BE(1)
const value = buffer.readUInt16BE(3)
if (fc !== FC.WRITE_SINGLE_HOLDING_REGISTER) {
return null
}
return new WriteSingleRegisterResponseBody(address, value)
}
private _address: number
private _value: number
constructor (address: number, value: number) {
super(FC.WRITE_SINGLE_HOLDING_REGISTER)
this._address = address
this._value = value
}
public createPayload () {
const payload = Buffer.alloc(5)
payload.writeUInt8(this._fc, 0)
payload.writeUInt16BE(this._address, 1)
payload.writeUInt16BE(this._value, 3)
return payload
}
}
|
WriteSingleRegisterResponseBody
|
to_ocaml.rs
|
// Copyright (c) SimpleStaking and Tezedge Contributors
// SPDX-License-Identifier: MIT
use core::str;
use ocaml_sys::{caml_alloc, store_field};
use crate::{
memory::{
alloc_bytes, alloc_cons, alloc_double, alloc_int32, alloc_int64, alloc_some, alloc_string,
alloc_tuple, OCamlRef,
},
mlvalues::{
tag, OCamlBytes, OCamlFloat, OCamlInt, OCamlInt32, OCamlInt64, OCamlList, RawOCaml, FALSE,
NONE, TRUE,
},
runtime::OCamlRuntime,
value::OCaml,
BoxRoot,
};
/// Implements conversion from Rust values into OCaml values.
pub unsafe trait ToOCaml<T> {
/// Convert to OCaml value. Return an already rooted value as [`BoxRoot`]`<T>`.
fn to_boxroot(&self, cr: &mut OCamlRuntime) -> BoxRoot<T> {
BoxRoot::new(self.to_ocaml(cr))
}
/// Convert to OCaml value.
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, T>;
}
unsafe impl<'root, T> ToOCaml<T> for OCamlRef<'root, T> {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, T> {
unsafe { OCaml::new(cr, self.get_raw()) }
}
}
unsafe impl ToOCaml<OCamlInt> for i64 {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, OCamlInt> {
unsafe { OCaml::new(cr, ((self << 1) | 1) as RawOCaml) }
}
}
unsafe impl ToOCaml<OCamlInt> for i32 {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, OCamlInt> {
(*self as i64).to_ocaml(cr)
}
}
unsafe impl ToOCaml<OCamlInt32> for i32 {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, OCamlInt32> {
alloc_int32(cr, *self)
}
}
unsafe impl ToOCaml<OCamlInt64> for i64 {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, OCamlInt64> {
alloc_int64(cr, *self)
}
}
unsafe impl ToOCaml<OCamlFloat> for f64 {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, OCamlFloat> {
alloc_double(cr, *self)
}
}
unsafe impl ToOCaml<bool> for bool {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, bool> {
unsafe { OCaml::new(cr, if *self { TRUE } else { FALSE }) }
}
}
// TODO: figure out how to implement all this without so much duplication
// it is not as simple as implementing for Borrow<str/[u8]> because
// of the Box<T> implementation bellow, which causes a trait implementation
// conflict.
unsafe impl ToOCaml<String> for &str {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, String> {
alloc_string(cr, self)
}
}
unsafe impl ToOCaml<OCamlBytes> for &str {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, OCamlBytes> {
alloc_bytes(cr, self.as_bytes())
}
}
unsafe impl ToOCaml<OCamlBytes> for &[u8] {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, OCamlBytes> {
alloc_bytes(cr, self)
}
}
unsafe impl ToOCaml<String> for &[u8] {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, String> {
alloc_string(cr, unsafe { str::from_utf8_unchecked(self) })
}
}
unsafe impl ToOCaml<String> for String {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, String> {
self.as_str().to_ocaml(cr)
}
}
unsafe impl ToOCaml<OCamlBytes> for String {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, OCamlBytes> {
self.as_str().to_ocaml(cr)
}
}
unsafe impl ToOCaml<String> for Vec<u8> {
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, String> {
self.as_slice().to_ocaml(cr)
}
}
unsafe impl ToOCaml<OCamlBytes> for Vec<u8> {
fn
|
<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, OCamlBytes> {
self.as_slice().to_ocaml(cr)
}
}
unsafe impl<A, OCamlA> ToOCaml<OCamlA> for Box<A>
where
A: ToOCaml<OCamlA>,
{
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, OCamlA> {
self.as_ref().to_ocaml(cr)
}
}
unsafe impl<A, OCamlA: 'static> ToOCaml<Option<OCamlA>> for Option<A>
where
A: ToOCaml<OCamlA>,
{
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, Option<OCamlA>> {
if let Some(value) = self {
let ocaml_value = value.to_boxroot(cr);
alloc_some(cr, &ocaml_value)
} else {
unsafe { OCaml::new(cr, NONE) }
}
}
}
unsafe impl<A, OCamlA, Err, OCamlErr> ToOCaml<Result<OCamlA, OCamlErr>> for Result<A, Err>
where
A: ToOCaml<OCamlA>,
Err: ToOCaml<OCamlErr>,
{
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, Result<OCamlA, OCamlErr>> {
match self {
Ok(value) => {
let ocaml_ok = unsafe { caml_alloc(1, tag::TAG_OK) };
let ocaml_value = value.to_ocaml(cr);
unsafe { store_field(ocaml_ok, 0, ocaml_value.get_raw()) };
unsafe { OCaml::new(cr, ocaml_ok) }
}
Err(error) => {
let ocaml_err = unsafe { caml_alloc(1, tag::TAG_ERROR) };
let ocaml_error = error.to_ocaml(cr);
unsafe { store_field(ocaml_err, 0, ocaml_error.get_raw()) };
unsafe { OCaml::new(cr, ocaml_err) }
}
}
}
}
unsafe impl<A, OCamlA: 'static> ToOCaml<OCamlList<OCamlA>> for Vec<A>
where
A: ToOCaml<OCamlA>,
{
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, OCamlList<OCamlA>> {
let mut result = BoxRoot::new(OCaml::nil());
for elt in self.iter().rev() {
let ov = elt.to_boxroot(cr);
let cons = alloc_cons(cr, &ov, &result);
result.keep(cons);
}
cr.get(&result)
}
}
// Tuples
macro_rules! tuple_to_ocaml {
($($n:tt: $t:ident => $ot:ident),+) => {
unsafe impl<$($t),+, $($ot: 'static),+> ToOCaml<($($ot),+)> for ($($t),+)
where
$($t: ToOCaml<$ot>),+
{
fn to_ocaml<'a>(&self, cr: &'a mut OCamlRuntime) -> OCaml<'a, ($($ot),+)> {
let len = $crate::count_fields!($($t)*);
unsafe {
let ocaml_tuple: BoxRoot<($($ot),+)> = BoxRoot::new(alloc_tuple(cr, len));
$(
let field_val = self.$n.to_ocaml(cr).get_raw();
store_field(ocaml_tuple.get(cr).raw(), $n, field_val);
)+
cr.get(&ocaml_tuple)
}
}
}
};
}
tuple_to_ocaml!(
0: A => OCamlA,
1: B => OCamlB);
tuple_to_ocaml!(
0: A => OCamlA,
1: B => OCamlB,
2: C => OCamlC);
tuple_to_ocaml!(
0: A => OCamlA,
1: B => OCamlB,
2: C => OCamlC,
3: D => OCamlD);
tuple_to_ocaml!(
0: A => OCamlA,
1: B => OCamlB,
2: C => OCamlC,
3: D => OCamlD,
4: E => OCamlE);
tuple_to_ocaml!(
0: A => OCamlA,
1: B => OCamlB,
2: C => OCamlC,
3: D => OCamlD,
4: E => OCamlE,
5: F => OCamlF);
tuple_to_ocaml!(
0: A => OCamlA,
1: B => OCamlB,
2: C => OCamlC,
3: D => OCamlD,
4: E => OCamlE,
5: F => OCamlF,
6: G => OCamlG);
tuple_to_ocaml!(
0: A => OCamlA,
1: B => OCamlB,
2: C => OCamlC,
3: D => OCamlD,
4: E => OCamlE,
5: F => OCamlF,
6: G => OCamlG,
7: H => OCamlH);
tuple_to_ocaml!(
0: A => OCamlA,
1: B => OCamlB,
2: C => OCamlC,
3: D => OCamlD,
4: E => OCamlE,
5: F => OCamlF,
6: G => OCamlG,
7: H => OCamlH,
8: I => OCamlI);
|
to_ocaml
|
macros_select.rs
|
#![allow(clippy::blacklisted_name, clippy::stable_sort_primitive)]
use tokio::sync::{mpsc, oneshot};
use tokio::task;
use tokio_test::{assert_ok, assert_pending, assert_ready};
use futures::future::poll_fn;
use std::task::Poll::Ready;
#[tokio::test]
async fn sync_one_lit_expr_comma() {
let foo = tokio::select! {
foo = async { 1 } => foo,
};
assert_eq!(foo, 1);
}
#[tokio::test]
async fn nested_one() {
let foo = tokio::select! {
foo = async { 1 } => tokio::select! {
bar = async { foo } => bar,
},
};
assert_eq!(foo, 1);
}
#[tokio::test]
async fn sync_one_lit_expr_no_comma() {
let foo = tokio::select! {
foo = async { 1 } => foo
};
assert_eq!(foo, 1);
}
#[tokio::test]
async fn sync_one_lit_expr_block() {
let foo = tokio::select! {
foo = async { 1 } => { foo }
};
assert_eq!(foo, 1);
}
#[tokio::test]
async fn sync_one_await() {
let foo = tokio::select! {
foo = one() => foo,
};
assert_eq!(foo, 1);
}
#[tokio::test]
async fn sync_one_ident() {
let one = one();
let foo = tokio::select! {
foo = one => foo,
};
assert_eq!(foo, 1);
}
#[tokio::test]
async fn sync_two() {
use std::cell::Cell;
let cnt = Cell::new(0);
let res = tokio::select! {
foo = async {
cnt.set(cnt.get() + 1);
1
} => foo,
bar = async {
cnt.set(cnt.get() + 1);
2
} => bar,
};
assert_eq!(1, cnt.get());
assert!(res == 1 || res == 2);
}
#[tokio::test]
async fn drop_in_fut() {
let s = "hello".to_string();
let res = tokio::select! {
foo = async {
let v = one().await;
drop(s);
v
} => foo
};
assert_eq!(res, 1);
}
#[tokio::test]
async fn one_ready() {
let (tx1, rx1) = oneshot::channel::<i32>();
let (_tx2, rx2) = oneshot::channel::<i32>();
tx1.send(1).unwrap();
let v = tokio::select! {
res = rx1 => {
assert_ok!(res)
},
_ = rx2 => unreachable!(),
};
assert_eq!(1, v);
}
#[tokio::test]
async fn select_streams() {
let (tx1, mut rx1) = mpsc::unbounded_channel::<i32>();
let (tx2, mut rx2) = mpsc::unbounded_channel::<i32>();
tokio::spawn(async move {
assert_ok!(tx2.send(1));
task::yield_now().await;
assert_ok!(tx1.send(2));
task::yield_now().await;
assert_ok!(tx2.send(3));
task::yield_now().await;
drop((tx1, tx2));
});
let mut rem = true;
let mut msgs = vec![];
while rem {
tokio::select! {
Some(x) = rx1.recv() => {
msgs.push(x);
}
Some(y) = rx2.recv() => {
msgs.push(y);
}
else => {
rem = false;
}
}
}
msgs.sort();
assert_eq!(&msgs[..], &[1, 2, 3]);
}
#[tokio::test]
async fn move_uncompleted_futures() {
let (tx1, mut rx1) = oneshot::channel::<i32>();
let (tx2, mut rx2) = oneshot::channel::<i32>();
tx1.send(1).unwrap();
tx2.send(2).unwrap();
let ran;
tokio::select! {
res = &mut rx1 => {
assert_eq!(1, assert_ok!(res));
assert_eq!(2, assert_ok!(rx2.await));
ran = true;
},
res = &mut rx2 => {
assert_eq!(2, assert_ok!(res));
assert_eq!(1, assert_ok!(rx1.await));
ran = true;
},
}
assert!(ran);
}
#[tokio::test]
async fn nested() {
let res = tokio::select! {
x = async { 1 } => {
tokio::select! {
y = async { 2 } => x + y,
}
}
};
assert_eq!(res, 3);
}
#[tokio::test]
async fn struct_size()
|
#[tokio::test]
async fn mutable_borrowing_future_with_same_borrow_in_block() {
let mut value = 234;
tokio::select! {
_ = require_mutable(&mut value) => { },
_ = async_noop() => {
value += 5;
},
}
assert!(value >= 234);
}
#[tokio::test]
async fn mutable_borrowing_future_with_same_borrow_in_block_and_else() {
let mut value = 234;
tokio::select! {
_ = require_mutable(&mut value) => { },
_ = async_noop() => {
value += 5;
},
else => {
value += 27;
},
}
assert!(value >= 234);
}
#[tokio::test]
async fn future_panics_after_poll() {
use tokio_test::task;
let (tx, rx) = oneshot::channel();
let mut polled = false;
let f = poll_fn(|_| {
assert!(!polled);
polled = true;
Ready(None::<()>)
});
let mut f = task::spawn(async {
tokio::select! {
Some(_) = f => unreachable!(),
ret = rx => ret.unwrap(),
}
});
assert_pending!(f.poll());
assert_pending!(f.poll());
assert_ok!(tx.send(1));
let res = assert_ready!(f.poll());
assert_eq!(1, res);
}
#[tokio::test]
async fn disable_with_if() {
use tokio_test::task;
let f = poll_fn(|_| panic!());
let (tx, rx) = oneshot::channel();
let mut f = task::spawn(async {
tokio::select! {
_ = f, if false => unreachable!(),
_ = rx => (),
}
});
assert_pending!(f.poll());
assert_ok!(tx.send(()));
assert!(f.is_woken());
assert_ready!(f.poll());
}
#[tokio::test]
async fn join_with_select() {
use tokio_test::task;
let (tx1, mut rx1) = oneshot::channel();
let (tx2, mut rx2) = oneshot::channel();
let mut f = task::spawn(async {
let mut a = None;
let mut b = None;
while a.is_none() || b.is_none() {
tokio::select! {
v1 = &mut rx1, if a.is_none() => a = Some(assert_ok!(v1)),
v2 = &mut rx2, if b.is_none() => b = Some(assert_ok!(v2))
}
}
(a.unwrap(), b.unwrap())
});
assert_pending!(f.poll());
assert_ok!(tx1.send(123));
assert!(f.is_woken());
assert_pending!(f.poll());
assert_ok!(tx2.send(456));
assert!(f.is_woken());
let (a, b) = assert_ready!(f.poll());
assert_eq!(a, 123);
assert_eq!(b, 456);
}
#[tokio::test]
async fn use_future_in_if_condition() {
use tokio::time::{self, Duration};
let mut delay = time::delay_for(Duration::from_millis(50));
tokio::select! {
_ = &mut delay, if !delay.is_elapsed() => {
}
_ = async { 1 } => {
}
}
}
#[tokio::test]
async fn many_branches() {
let num = tokio::select! {
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
x = async { 1 } => x,
};
assert_eq!(1, num);
}
#[tokio::test]
async fn never_branch_no_warnings() {
let t = tokio::select! {
_ = async_never() => 0,
one_async_ready = one() => one_async_ready,
};
assert_eq!(t, 1);
}
async fn one() -> usize {
1
}
async fn require_mutable(_: &mut i32) {}
async fn async_noop() {}
async fn async_never() -> ! {
use tokio::time::Duration;
loop {
tokio::time::delay_for(Duration::from_millis(10)).await;
}
}
|
{
use futures::future;
use std::mem;
let fut = async {
let ready = future::ready(0i32);
tokio::select! {
_ = ready => {},
}
};
assert!(mem::size_of_val(&fut) <= 32);
let fut = async {
let ready1 = future::ready(0i32);
let ready2 = future::ready(0i32);
tokio::select! {
_ = ready1 => {},
_ = ready2 => {},
}
};
assert!(mem::size_of_val(&fut) <= 40);
let fut = async {
let ready1 = future::ready(0i32);
let ready2 = future::ready(0i32);
let ready3 = future::ready(0i32);
tokio::select! {
_ = ready1 => {},
_ = ready2 => {},
_ = ready3 => {},
}
};
assert!(mem::size_of_val(&fut) <= 48);
}
|
WithdrawQuestion.js
|
import { Button } from '@material-ui/core'
import React from 'react'
|
export const WithdrawQuestion = (props) => {
const { row, handleWithdraw } = props
const handleClick = () => {
handleWithdraw(row)
}
return (
<Button
onClick={handleClick}
size={'small'}
variant={'contained'}
color={'primary'}>
Withdraw
</Button>
)
}
| |
1859.rs
|
impl Solution {
|
for w in words.into_iter() {
let mut s = String::from(w);
let pos =
char::to_digit(s.pop().unwrap(), 10).unwrap() as usize;
result[pos - 1] = s;
}
result.join(" ")
}
}
|
pub fn sort_sentence(s: String) -> String {
let words: Vec<_> = s.split(" ").collect();
let mut result = vec![String::from(""); words.len()];
|
connector.rs
|
//
// Copyright 2018-2019 Tamas Blummer
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
//
//!
//! # Connector to serve a lightning network implementation
//!
//! This implements an interface to higher level applications
//!
use constructor::Broadcaster;
use bitcoin::network::constants::Network;
use bitcoin::blockdata::block::{Block, BlockHeader};
use bitcoin::blockdata::script::Script;
use bitcoin::util::hash::Sha256dHash;
use lightning::chain::chaininterface::{ChainListener,ChainWatchInterface, ChainWatchInterfaceUtil};
use lightning::chain::chaininterface::ChainError;
use lightning::util::logger::{Logger, Record, Level};
use std::sync::{Weak,Arc};
struct LightningLogger{
level: Level
}
impl Logger for LightningLogger {
fn log(&self, record: &Record) {
if self.level >= record.level {
println!("{:<5} [{} : {}, {}] {}", record.level.to_string(), record.module_path, record.file, record.line, record.args);
}
}
}
/// connector to lightning network
pub struct LightningConnector {
util: ChainWatchInterfaceUtil,
broadcaster: Arc<Broadcaster>
}
impl LightningConnector {
/// create a connector
pub fn new (network: Network, broadcaster: Arc<Broadcaster>) -> LightningConnector {
LightningConnector {
util: ChainWatchInterfaceUtil::new(network, Arc::new(LightningLogger{level: Level::Info})),
broadcaster
}
}
/// called by the node if new block added to trunk (longest chain)
/// this will notify listeners on lightning side
pub fn block_connected(&self, block: &Block, height: u32) {
self.util.block_connected_with_filtering(block, height)
}
/// called by the node if a block is removed from trunk (orphaned from longest chain)
/// this will notify listeners on lightning side
pub fn block_disconnected(&self, header: &BlockHeader)
|
/// return the broadcaster that is able to send to all connected peers
pub fn get_broadcaster (&self) -> Arc<Broadcaster> {
return self.broadcaster.clone();
}
}
impl ChainWatchInterface for LightningConnector {
fn install_watch_tx(&self, _txid: &Sha256dHash, _script_pub_key: &Script) {
unimplemented!()
}
/// install a listener to be called with transactions that spend the outpoint
fn install_watch_outpoint(&self, outpoint: (Sha256dHash, u32), out_script: &Script) {
self.util.install_watch_outpoint(outpoint, out_script)
}
/// install a listener to be called for every transaction
fn watch_all_txn(&self) {
self.util.watch_all_txn()
}
/// install a listener for blocks added to or removed from trunk
fn register_listener(&self, listener: Weak<ChainListener>) {
self.util.register_listener(listener)
}
fn get_chain_utxo(&self, _genesis_hash: Sha256dHash, _unspent_tx_output_identifier: u64) -> Result<(Script, u64), ChainError> {
Err(ChainError::NotSupported)
}
}
|
{
self.util.block_disconnected(header)
}
|
user_profile_private_info.py
|
# Auto-generated at 2021-09-27T17:12:38.713844+08:00
# from: Justice Basic Service (1.17.0)
# Copyright (c) 2018 - 2021 AccelByte Inc. All Rights Reserved.
# This is licensed software from AccelByte Inc, for limitations
# and restrictions contact your company contract manager.
# pylint: disable=duplicate-code
# pylint: disable=line-too-long
# pylint: disable=missing-function-docstring
# pylint: disable=missing-module-docstring
# pylint: disable=too-many-arguments
# pylint: disable=too-many-branches
# pylint: disable=too-many-instance-attributes
# pylint: disable=too-many-lines
# pylint: disable=too-many-locals
# pylint: disable=too-many-public-methods
# pylint: disable=too-many-return-statements
# pylint: disable=too-many-statements
# pylint: disable=unused-import
from __future__ import annotations
from typing import Any, Dict, List, Optional, Tuple, Union
from ....core import Model
class UserProfilePrivateInfo(Model):
"""User profile private info
Properties:
user_id: (userId) OPTIONAL str
namespace: (namespace) OPTIONAL str
first_name: (firstName) OPTIONAL str
last_name: (lastName) OPTIONAL str
avatar_small_url: (avatarSmallUrl) OPTIONAL str
avatar_url: (avatarUrl) OPTIONAL str
avatar_large_url: (avatarLargeUrl) OPTIONAL str
status: (status) OPTIONAL str
language: (language) OPTIONAL str
time_zone: (timeZone) OPTIONAL str
date_of_birth: (dateOfBirth) OPTIONAL str
custom_attributes: (customAttributes) OPTIONAL Dict[str, Any]
zip_code: (zipCode) OPTIONAL str
private_custom_attributes: (privateCustomAttributes) OPTIONAL Dict[str, Any]
"""
# region fields
user_id: str # OPTIONAL
namespace: str # OPTIONAL
first_name: str # OPTIONAL
last_name: str # OPTIONAL
avatar_small_url: str # OPTIONAL
avatar_url: str # OPTIONAL
avatar_large_url: str # OPTIONAL
status: str # OPTIONAL
language: str # OPTIONAL
time_zone: str # OPTIONAL
date_of_birth: str # OPTIONAL
custom_attributes: Dict[str, Any] # OPTIONAL
zip_code: str # OPTIONAL
private_custom_attributes: Dict[str, Any] # OPTIONAL
# endregion fields
# region with_x methods
def with_user_id(self, value: str) -> UserProfilePrivateInfo:
self.user_id = value
return self
def with_namespace(self, value: str) -> UserProfilePrivateInfo:
self.namespace = value
return self
def with_first_name(self, value: str) -> UserProfilePrivateInfo:
self.first_name = value
return self
def with_last_name(self, value: str) -> UserProfilePrivateInfo:
self.last_name = value
return self
def with_avatar_small_url(self, value: str) -> UserProfilePrivateInfo:
self.avatar_small_url = value
return self
def with_avatar_url(self, value: str) -> UserProfilePrivateInfo:
self.avatar_url = value
return self
def with_avatar_large_url(self, value: str) -> UserProfilePrivateInfo:
self.avatar_large_url = value
return self
def with_status(self, value: str) -> UserProfilePrivateInfo:
self.status = value
return self
def with_language(self, value: str) -> UserProfilePrivateInfo:
self.language = value
return self
def with_time_zone(self, value: str) -> UserProfilePrivateInfo:
self.time_zone = value
return self
def with_date_of_birth(self, value: str) -> UserProfilePrivateInfo:
self.date_of_birth = value
return self
def with_custom_attributes(self, value: Dict[str, Any]) -> UserProfilePrivateInfo:
self.custom_attributes = value
return self
def with_zip_code(self, value: str) -> UserProfilePrivateInfo:
self.zip_code = value
return self
def with_private_custom_attributes(self, value: Dict[str, Any]) -> UserProfilePrivateInfo:
self.private_custom_attributes = value
return self
# endregion with_x methods
# region to methods
def to_dict(self, include_empty: bool = False) -> dict:
result = {}
if hasattr(self, "user_id") and self.user_id:
result["userId"] = str(self.user_id)
elif include_empty:
result["userId"] = str()
if hasattr(self, "namespace") and self.namespace:
result["namespace"] = str(self.namespace)
elif include_empty:
result["namespace"] = str()
if hasattr(self, "first_name") and self.first_name:
result["firstName"] = str(self.first_name)
elif include_empty:
result["firstName"] = str()
if hasattr(self, "last_name") and self.last_name:
result["lastName"] = str(self.last_name)
elif include_empty:
result["lastName"] = str()
if hasattr(self, "avatar_small_url") and self.avatar_small_url:
result["avatarSmallUrl"] = str(self.avatar_small_url)
elif include_empty:
result["avatarSmallUrl"] = str()
if hasattr(self, "avatar_url") and self.avatar_url:
result["avatarUrl"] = str(self.avatar_url)
elif include_empty:
result["avatarUrl"] = str()
if hasattr(self, "avatar_large_url") and self.avatar_large_url:
result["avatarLargeUrl"] = str(self.avatar_large_url)
elif include_empty:
result["avatarLargeUrl"] = str()
if hasattr(self, "status") and self.status:
result["status"] = str(self.status)
elif include_empty:
result["status"] = str()
if hasattr(self, "language") and self.language:
result["language"] = str(self.language)
elif include_empty:
result["language"] = str()
if hasattr(self, "time_zone") and self.time_zone:
result["timeZone"] = str(self.time_zone)
elif include_empty:
result["timeZone"] = str()
if hasattr(self, "date_of_birth") and self.date_of_birth:
result["dateOfBirth"] = str(self.date_of_birth)
elif include_empty:
result["dateOfBirth"] = str()
if hasattr(self, "custom_attributes") and self.custom_attributes:
result["customAttributes"] = {str(k0): v0 for k0, v0 in self.custom_attributes.items()}
elif include_empty:
result["customAttributes"] = {}
if hasattr(self, "zip_code") and self.zip_code:
result["zipCode"] = str(self.zip_code)
elif include_empty:
result["zipCode"] = str()
if hasattr(self, "private_custom_attributes") and self.private_custom_attributes:
result["privateCustomAttributes"] = {str(k0): v0 for k0, v0 in self.private_custom_attributes.items()}
elif include_empty:
result["privateCustomAttributes"] = {}
return result
# endregion to methods
# region static methods
@classmethod
def create(
cls,
user_id: Optional[str] = None,
namespace: Optional[str] = None,
first_name: Optional[str] = None,
last_name: Optional[str] = None,
avatar_small_url: Optional[str] = None,
avatar_url: Optional[str] = None,
avatar_large_url: Optional[str] = None,
status: Optional[str] = None,
language: Optional[str] = None,
time_zone: Optional[str] = None,
date_of_birth: Optional[str] = None,
custom_attributes: Optional[Dict[str, Any]] = None,
zip_code: Optional[str] = None,
private_custom_attributes: Optional[Dict[str, Any]] = None,
) -> UserProfilePrivateInfo:
instance = cls()
if user_id is not None:
instance.user_id = user_id
if namespace is not None:
instance.namespace = namespace
if first_name is not None:
instance.first_name = first_name
if last_name is not None:
instance.last_name = last_name
if avatar_small_url is not None:
instance.avatar_small_url = avatar_small_url
if avatar_url is not None:
instance.avatar_url = avatar_url
if avatar_large_url is not None:
instance.avatar_large_url = avatar_large_url
if status is not None:
instance.status = status
if language is not None:
instance.language = language
if time_zone is not None:
instance.time_zone = time_zone
if date_of_birth is not None:
instance.date_of_birth = date_of_birth
if custom_attributes is not None:
instance.custom_attributes = custom_attributes
if zip_code is not None:
instance.zip_code = zip_code
if private_custom_attributes is not None:
instance.private_custom_attributes = private_custom_attributes
return instance
@classmethod
def create_from_dict(cls, dict_: dict, include_empty: bool = False) -> UserProfilePrivateInfo:
instance = cls()
if not dict_:
return instance
if "userId" in dict_ and dict_["userId"] is not None:
instance.user_id = str(dict_["userId"])
elif include_empty:
instance.user_id = str()
if "namespace" in dict_ and dict_["namespace"] is not None:
instance.namespace = str(dict_["namespace"])
elif include_empty:
instance.namespace = str()
if "firstName" in dict_ and dict_["firstName"] is not None:
instance.first_name = str(dict_["firstName"])
elif include_empty:
instance.first_name = str()
if "lastName" in dict_ and dict_["lastName"] is not None:
instance.last_name = str(dict_["lastName"])
elif include_empty:
instance.last_name = str()
if "avatarSmallUrl" in dict_ and dict_["avatarSmallUrl"] is not None:
instance.avatar_small_url = str(dict_["avatarSmallUrl"])
elif include_empty:
instance.avatar_small_url = str()
if "avatarUrl" in dict_ and dict_["avatarUrl"] is not None:
instance.avatar_url = str(dict_["avatarUrl"])
elif include_empty:
instance.avatar_url = str()
if "avatarLargeUrl" in dict_ and dict_["avatarLargeUrl"] is not None:
instance.avatar_large_url = str(dict_["avatarLargeUrl"])
elif include_empty:
instance.avatar_large_url = str()
if "status" in dict_ and dict_["status"] is not None:
instance.status = str(dict_["status"])
elif include_empty:
instance.status = str()
if "language" in dict_ and dict_["language"] is not None:
instance.language = str(dict_["language"])
elif include_empty:
instance.language = str()
if "timeZone" in dict_ and dict_["timeZone"] is not None:
instance.time_zone = str(dict_["timeZone"])
elif include_empty:
instance.time_zone = str()
if "dateOfBirth" in dict_ and dict_["dateOfBirth"] is not None:
|
elif include_empty:
instance.date_of_birth = str()
if "customAttributes" in dict_ and dict_["customAttributes"] is not None:
instance.custom_attributes = {str(k0): v0 for k0, v0 in dict_["customAttributes"].items()}
elif include_empty:
instance.custom_attributes = {}
if "zipCode" in dict_ and dict_["zipCode"] is not None:
instance.zip_code = str(dict_["zipCode"])
elif include_empty:
instance.zip_code = str()
if "privateCustomAttributes" in dict_ and dict_["privateCustomAttributes"] is not None:
instance.private_custom_attributes = {str(k0): v0 for k0, v0 in dict_["privateCustomAttributes"].items()}
elif include_empty:
instance.private_custom_attributes = {}
return instance
@staticmethod
def get_field_info() -> Dict[str, str]:
return {
"userId": "user_id",
"namespace": "namespace",
"firstName": "first_name",
"lastName": "last_name",
"avatarSmallUrl": "avatar_small_url",
"avatarUrl": "avatar_url",
"avatarLargeUrl": "avatar_large_url",
"status": "status",
"language": "language",
"timeZone": "time_zone",
"dateOfBirth": "date_of_birth",
"customAttributes": "custom_attributes",
"zipCode": "zip_code",
"privateCustomAttributes": "private_custom_attributes",
}
# endregion static methods
|
instance.date_of_birth = str(dict_["dateOfBirth"])
|
record.py
|
import ast
import inspect
import json
import logging
import os
import sys
import tempfile
from functools import wraps
from collections import deque
from execution_trace.constants import RECORD_FN_NAME, RETVAL_NAME, MANGLED_FN_NAME
from execution_trace.utils import strip_indent
# Init logging.
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
# Will be initialized in `record`. See `init_recorded_state`.
_record_store_hidden_123 = None
# To guard against decorating more than one function.
num_fns_recorded = 0
# To know when to print out the source code of the function.
first_dump_call = True
|
def _record_state_fn_hidden_123(lineno, f_locals):
"""Stores local line data."""
# Make sure we have just primitive types.
f_locals = {k: repr(v) for k, v in f_locals.iteritems()}
data = {
'lineno': lineno,
'state': f_locals,
}
# This is a stack so always append in the top frame.
_record_store_hidden_123[-1]['data'].append(data)
# http://stackoverflow.com/a/12240419
# TL;DR need this because the decorator would
# recursively apply on the new generated function.
_blocked = False
def record(num_executions=1):
def _record(f):
"""Transforms `f` such that after every line record_state is called.
*** HERE BE DRAGONS ***
"""
global num_fns_recorded
# Make sure this is not a recursive decorator application.
global _blocked
if _blocked:
return f
# We only support recording one fn's executions at the moment.
if num_fns_recorded:
raise ValueError('Cannot `record` more than one function at a time.')
num_fns_recorded += 1
source = inspect.getsource(f)
parsed = ast.parse(strip_indent(source))
original_body = list(parsed.body[0].body)
# Update body
parsed.body[0].body = _fill_body_with_record(original_body)
# Compile and inject modified function back into its env.
new_f_compiled = compile(parsed, '<string>', 'exec')
env = sys.modules[f.__module__].__dict__
# We also need to inject our stuff in there.
env[RECORD_FN_NAME] = globals()[RECORD_FN_NAME]
_blocked = True
exec new_f_compiled in env
_blocked = False
# Keep a reference to the (original) mangled function, because our decorator
# will end up replacing it with `wrapped`. Then, whenever `wrapped` ends up
# calling the original function, it would end up calling itself, leading
# to an infinite recursion. Thus, we keep the fn we want to call under
# a separate key which `wrapped` can call without a problem.
# We are doing this instead of simply changing the recorded fn's name because
# we have to support recursive calls (which would lead to NameError if we changed
# the fn's name).
env[MANGLED_FN_NAME] = env[f.__name__]
init_recorded_state()
file, path = _get_dump_file()
logger.info("Will record execution of %s in %s . "
"Use `view_trace %s` to view it.",
f.__name__, path, path)
# Wrap in our own function such that we can dump the recorded state at the end.
@wraps(f)
def wrapped(*args, **kwargs):
# Write source to file the first time we are called.
global first_dump_call
if first_dump_call:
dump_fn_source(file, source)
first_dump_call = False
global num_recorded_executions
# Are we still recording?
if num_recorded_executions < num_executions:
# New stack frame -> new state.
push_recorded_state()
# Call the recorded function. This might throw.
try:
ret = env[MANGLED_FN_NAME](*args, **kwargs)
except:
# Re-raise, let the user handle this.
raise
finally:
# But still log what we captured.
dump_recorded_state(file)
# Done recording on this frame.
pop_recorded_state()
num_recorded_executions += 1
# If not, just call the original function.
else:
ret = f(*args, **kwargs)
return ret
return wrapped
return _record
def _make_record_state_call_expr(lineno):
# Create locals() call.
name = ast.Name(ctx=ast.Load(), id='locals', lineno=0, col_offset=0)
locals_call = ast.Call(func=name, lineno=0, col_offset=0, args=[], keywords=[])
# Create lineno constant arg.
num = ast.Num(n=lineno, lineno=0, col_offset=0)
# Create record_state call.
name = ast.Name(ctx=ast.Load(), id=RECORD_FN_NAME, lineno=0, col_offset=0)
call = ast.Call(func=name, lineno=0, col_offset=0,
args=[num, locals_call],
keywords=[])
expr = ast.Expr(value=call, lineno=0, col_offset=0)
return expr
def _make_return_trace_call_exprs(item):
# Store retval in an aux var and return that instead.
store_name = ast.Name(ctx=ast.Store(), id=RETVAL_NAME, col_offset=0, lineno=0)
load_name = ast.Name(ctx=ast.Load(), id=RETVAL_NAME, col_offset=0, lineno=0)
assign = ast.Assign(col_offset=0, targets=[store_name], value=item.value, lineno=0)
ret = ast.Return(lineno=0, value=load_name, col_offset=0)
return [
assign,
_make_record_state_call_expr(item.lineno),
ret
]
def _fill_body_with_record(original_body, prepend=False, lineno=None):
"""Adds a record_state call after every item in the block.
Recursive, works for nested bodies (e.g. if statements).
`prepend` inserts a record_state call right at the start. We need this for
recording the state on lines introducing nested blocks (`if`, `while` etc.)
"""
new_body = []
if prepend:
assert lineno is not None, "Should've called prepend with a lineno."
new_body.append(_make_record_state_call_expr(lineno))
for item in original_body:
# Handle return statements separately such that we capture retval as well.
if isinstance(item, ast.Return):
new_body.extend(_make_return_trace_call_exprs(item))
continue
has_nested = False
# Look out for nested bodies.
if hasattr(item, 'body'):
has_nested = True
new_nested_body = _fill_body_with_record(item.body, prepend=True, lineno=item.lineno)
item.body = new_nested_body
if hasattr(item, 'orelse'):
has_nested = True
# Don't want to prepend call for try/except, but we want for the others.
if isinstance(item, ast.TryExcept):
prepend = False
else:
prepend = True
# `else` does not have a lineno, using `if`'s lineno.
new_nested_body = _fill_body_with_record(item.orelse, prepend=prepend, lineno=item.lineno)
item.orelse = new_nested_body
# Except blocks.
if hasattr(item, 'handlers'):
has_nested = True
for handler in item.handlers:
new_nested_body = _fill_body_with_record(handler.body, prepend=False, lineno=handler.lineno)
handler.body = new_nested_body
new_body.append(item)
# Don't append a call after the end of the nested body, it's redundant.
if not has_nested:
new_body.append(_make_record_state_call_expr(item.lineno))
return new_body
def _get_dump_file():
"""Returns file object and its path."""
fd, path = tempfile.mkstemp(prefix='record_', suffix='.json')
# Will never be `close`d because we don't know when user stops the program.
# We'll live with this.
file = os.fdopen(fd, 'w')
return file, path
def init_recorded_state():
global _record_store_hidden_123
# Using a stack for the frames' states to support recursive fns.
_record_store_hidden_123 = deque()
def push_recorded_state():
global _record_store_hidden_123
_record_store_hidden_123.append({'data': []})
def pop_recorded_state():
global _record_store_hidden_123
_record_store_hidden_123.pop()
def dump_recorded_state(file):
# This is a stack so always dump top call.
json.dump(_record_store_hidden_123[-1], file)
file.write('\n')
def dump_fn_source(file, source):
data = {'source': source}
json.dump(data, file)
file.write('\n')
|
# To know how many executions were recorded.
num_recorded_executions = 0
|
simple_statement.go
|
package sources
import (
"github.com/ericr/solanalyzer/parser"
)
const (
// SimpleStatementVarDec is a simple statement with a variable declaration.
SimpleStatementVarDec = iota
// SimpleStatementExpr is a simple statement with an expression.
SimpleStatementExpr
)
// SimpleStatement represents a simple statement in Solidity.
type SimpleStatement struct {
Tokens
SubType int
VariableDeclaration *VariableDeclarationStatement
Expression *Expression
}
// NewSimpleStatement returns a new instance of SimpleStatement.
func NewSimpleStatement() *SimpleStatement
|
// Visit is called by a visitor.
func (ss *SimpleStatement) Visit(ctx *parser.SimpleStatementContext) {
ss.Start = ctx.GetStart()
ss.Stop = ctx.GetStop()
if ctx.VariableDeclarationStatement() != nil {
varDecStmtCtx := ctx.VariableDeclarationStatement()
varDecStmt := NewVariableDeclarationStatement()
varDecStmt.Visit(varDecStmtCtx.(*parser.VariableDeclarationStatementContext))
ss.SubType = SimpleStatementVarDec
ss.VariableDeclaration = varDecStmt
return
}
exprStmtCtx := ctx.ExpressionStatement().(*parser.ExpressionStatementContext)
expr := NewExpression()
expr.Visit(exprStmtCtx.Expression().(*parser.ExpressionContext))
ss.SubType = SimpleStatementExpr
ss.Expression = expr
}
func (ss *SimpleStatement) String() string {
if ss.SubType == SimpleStatementVarDec {
return ss.VariableDeclaration.String()
}
return ss.Expression.String()
}
|
{
return &SimpleStatement{}
}
|
iosxr_lldp_interfaces.py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################
# WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The module file for iosxr_lldp_interfaces
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
DOCUMENTATION = """
module: iosxr_lldp_interfaces
short_description: LLDP interfaces resource module
description:
- This module manages Link Layer Discovery Protocol (LLDP) attributes of interfaces
on IOS-XR devices.
version_added: 1.0.0
notes:
- Tested against IOS-XR 6.1.3.
- This module works with connection C(network_cli). See L(the IOS-XR Platform Options,../network/user_guide/platform_iosxr.html).
author: Nilashish Chakraborty (@nilashishc)
options:
config:
description: A dictionary of LLDP interfaces options.
type: list
elements: dict
suboptions:
name:
description:
- Name/Identifier of the interface or Ether-Bundle.
type: str
destination:
description:
- Specifies LLDP destination configuration on the interface.
suboptions:
mac_address:
description:
- Specifies the LLDP destination mac address on the interface.
type: str
choices:
- ieee-nearest-bridge
- ieee-nearest-non-tmpr-bridge
type: dict
receive:
description:
- Enable/disable LLDP RX on an interface.
type: bool
transmit:
description:
- Enable/disable LLDP TX on an interface.
type: bool
running_config:
description:
- This option is used only with state I(parsed).
- The value of this option should be the output received from the IOS-XR device
by executing the command B(show running-config int).
- The state I(parsed) reads the configuration from C(running_config) option and
transforms it into Ansible structured data as per the resource module's argspec
and the value is then returned in the I(parsed) key within the result.
type: str
state:
description:
- The state of the configuration after module completion.
type: str
choices:
- merged
- replaced
- overridden
- deleted
- parsed
- rendered
- gathered
default: merged
"""
EXAMPLES = """
# Using merged
#
#
# ------------
# Before state
# ------------
#
#
# RP/0/RP0/CPU0:ios#sh run int
# Mon Aug 12 12:40:23.104 UTC
# interface TenGigE0/0/0/0
# ipv4 address 192.0.2.11 255.255.255.192
# !
# interface preconfigure GigabitEthernet0/0/0/1
# !
# interface preconfigure GigabitEthernet0/0/0/2
# !
#
#
- name: Merge provided configuration with running configuration
cisco.iosxr.iosxr_lldp_interfaces:
config:
- name: GigabitEthernet0/0/0/1
destination:
mac_address: ieee-nearest-non-tmpr-bridge
transmit: false
- name: GigabitEthernet0/0/0/2
destination:
mac_address: ieee-nearest-bridge
receive: false
state: merged
#
#
# ------------------------
# Module Execution Result
# ------------------------
#
#
# "before": [
# {
# "name": "TenGigE0/0/0/0"
# },
# {
# "name": "GigabitEthernet0/0/0/1"
# },
# {
# "name": "GigabitEthernet0/0/0/2"
# }
# ]
#
# "commands": [
# "interface GigabitEthernet0/0/0/2",
# "lldp destination mac-address ieee-nearest-non-tmpr-bridge",
# "lldp transmit disable",
# "interface GigabitEthernet0/0/0/1",
# "lldp receive disable",
# "lldp destination mac-address ieee-nearest-bridge"
# ]
#
# "after": [
# {
# "name": "TenGigE0/0/0/0"
# },
# {
# "destination": {
# "mac_address": "ieee-nearest-bridge"
# },
# "name": "GigabitEthernet0/0/0/1",
# "receive": false
# },
# {
# "destination": {
# "mac_address": "ieee-nearest-non-tmpr-bridge"
# },
# "name": "GigabitEthernet0/0/0/2",
# "transmit": false
# }
# ]
#
#
# ------------
# After state
# ------------
#
#
# RP/0/RP0/CPU0:ios#sh run int
# Mon Aug 12 12:49:51.517 UTC
# interface TenGigE0/0/0/0
# ipv4 address 192.0.2.11 255.255.255.192
# !
# interface preconfigure GigabitEthernet0/0/0/1
# lldp
# receive disable
# destination mac-address
# ieee-nearest-bridge
# !
# !
# !
# interface preconfigure GigabitEthernet0/0/0/2
# lldp
# transmit disable
# destination mac-address
# ieee-nearest-non-tmpr-bridge
# !
# !
# !
#
#
# Using replaced
#
#
# -------------
# Before state
# -------------
#
#
# RP/0/RP0/CPU0:ios#sh run int
# Mon Aug 12 12:49:51.517 UTC
# interface TenGigE0/0/0/0
# ipv4 address 192.0.2.11 255.255.255.192
# !
# interface preconfigure GigabitEthernet0/0/0/1
# lldp
# receive disable
# destination mac-address
# ieee-nearest-bridge
# !
# !
# !
# interface preconfigure GigabitEthernet0/0/0/2
# lldp
# transmit disable
# destination mac-address
# ieee-nearest-non-tmpr-bridge
# !
# !
# !
#
#
- name: Replace existing LLDP configurations of specified interfaces with provided
configuration
cisco.iosxr.iosxr_lldp_interfaces:
config:
- name: GigabitEthernet0/0/0/1
destination:
mac_address: ieee-nearest-non-tmpr-bridge
state: replaced
#
#
# ------------------------
# Module Execution Result
# ------------------------
#
# "before": [
# {
# "name": "TenGigE0/0/0/0"
# },
# {
# "destination": {
# "mac_address": "ieee-nearest-bridge"
# },
# "name": "GigabitEthernet0/0/0/1",
# "receive": false
# },
# {
# "destination": {
# "mac_address": "ieee-nearest-non-tmpr-bridge"
# },
# "name": "GigabitEthernet0/0/0/2",
# "transmit": false
# }
# ]
#
#
# "commands": [
# "interface GigabitEthernet0/0/0/1",
# "no lldp receive disable",
# "lldp destination mac-address ieee-nearest-non-tmpr-bridge"
# ]
#
#
# "after": [
# {
# "name": "TenGigE0/0/0/0"
# },
# {
# "destination": {
# "mac_address": "ieee-nearest-non-tmpr-bridge"
# },
# "name": "GigabitEthernet0/0/0/1"
# },
# {
# "destination": {
# "mac_address": "ieee-nearest-non-tmpr-bridge"
# },
# "name": "GigabitEthernet0/0/0/2",
# "transmit": false
# }
# ]
#
#
# ------------
# After state
# ------------
#
#
# RP/0/RP0/CPU0:ios#sh run int
# Mon Aug 12 13:02:57.062 UTC
# interface TenGigE0/0/0/0
# ipv4 address 192.0.2.11 255.255.255.192
# !
# interface preconfigure GigabitEthernet0/0/0/1
# lldp
# destination mac-address
# ieee-nearest-non-tmpr-bridge
# !
# !
# !
# interface preconfigure GigabitEthernet0/0/0/2
# lldp
# transmit disable
# destination mac-address
# ieee-nearest-non-tmpr-bridge
# !
# !
# !
#
#
# Using overridden
#
#
# -------------
# Before state
# -------------
#
#
# RP/0/RP0/CPU0:ios#sh run int
# Mon Aug 12 13:15:40.465 UTC
# interface TenGigE0/0/0/0
# ipv4 address 192.0.2.11 255.255.255.192
# !
# interface preconfigure GigabitEthernet0/0/0/1
# lldp
# receive disable
# destination mac-address
# ieee-nearest-bridge
# !
# !
# !
# interface preconfigure GigabitEthernet0/0/0/2
# lldp
# transmit disable
# destination mac-address
# ieee-nearest-non-tmpr-bridge
# !
# !
# !
#
#
- name: Override the LLDP configurations of all the interfaces with provided configurations
cisco.iosxr.iosxr_lldp_interfaces:
config:
- name: GigabitEthernet0/0/0/1
transmit: false
state: overridden
#
#
# ------------------------
# Module Execution Result
# ------------------------
#
#
# "before": [
# {
# "name": "TenGigE0/0/0/0"
# },
# {
# "destination": {
# "mac_address": "ieee-nearest-bridge"
# },
# "name": "GigabitEthernet0/0/0/1",
# "receive": false
# },
# {
# "destination": {
# "mac_address": "ieee-nearest-non-tmpr-bridge"
# },
# "name": "GigabitEthernet0/0/0/2",
# "transmit": false
# }
# ]
#
# "commands": [
# "interface GigabitEthernet0/0/0/2",
# "no lldp destination mac-address ieee-nearest-non-tmpr-bridge",
# "no lldp transmit disable",
# "interface GigabitEthernet0/0/0/1",
# "no lldp destination mac-address ieee-nearest-bridge",
# "no lldp receive disable",
# "lldp transmit disable"
# ]
#
#
# "after": [
# {
# "name": "TenGigE0/0/0/0"
# },
# {
# "name": "GigabitEthernet0/0/0/1",
# "transmit": false
# },
# {
# "name": "GigabitEthernet0/0/0/2"
# }
# ]
#
#
# ------------
# After state
# ------------
#
#
# RP/0/RP0/CPU0:ios#sh run int
# Mon Aug 12 13:22:25.604 UTC
# interface TenGigE0/0/0/0
# ipv4 address 192.0.2.11 255.255.255.192
# !
# interface preconfigure GigabitEthernet0/0/0/1
# lldp
# transmit disable
# !
# !
# interface preconfigure GigabitEthernet0/0/0/2
# !
#
#
# Using deleted
#
#
# -------------
# Before state
# -------------
#
#
# RP/0/RP0/CPU0:ios#sh run int
# Mon Aug 12 13:26:21.498 UTC
# interface TenGigE0/0/0/0
# ipv4 address 192.0.2.11 255.255.255.192
# !
# interface preconfigure GigabitEthernet0/0/0/1
# lldp
# receive disable
# destination mac-address
# ieee-nearest-bridge
# !
# !
# !
# interface preconfigure GigabitEthernet0/0/0/2
# lldp
# transmit disable
# destination mac-address
# ieee-nearest-non-tmpr-bridge
# !
# !
# !
#
#
- name: Delete LLDP configurations of all interfaces (Note - This won't delete the
interfaces themselves)
cisco.iosxr.iosxr_lldp_interfaces:
state: deleted
#
#
#
# ------------------------
# Module Execution Result
# ------------------------
#
#
# "before": [
# {
# "name": "TenGigE0/0/0/0"
# },
# {
# "destination": {
# "mac_address": "ieee-nearest-bridge"
# },
# "name": "GigabitEthernet0/0/0/1",
# "receive": false
# },
# {
# "destination": {
# "mac_address": "ieee-nearest-non-tmpr-bridge"
# },
# "name": "GigabitEthernet0/0/0/2",
# "transmit": false
# }
# ]
#
#
# "commands": [
# "interface GigabitEthernet0/0/0/1",
# "no lldp destination mac-address ieee-nearest-bridge",
# "no lldp receive disable",
# "interface GigabitEthernet0/0/0/2",
# "no lldp destination mac-address ieee-nearest-non-tmpr-bridge",
# "no lldp transmit disable"
# ]
#
#
# "after": [
# {
# "name": "TenGigE0/0/0/0"
# },
# {
# "name": "GigabitEthernet0/0/0/1"
# },
# {
# "name": "GigabitEthernet0/0/0/2"
# }
# ]
#
#
# ------------
# After state
# ------------
#
#
# RP/0/RP0/CPU0:ios#sh run int
# Mon Aug 12 13:30:14.618 UTC
# interface TenGigE0/0/0/0
# ipv4 address 192.0.2.11 255.255.255.192
# !
# interface preconfigure GigabitEthernet0/0/0/1
# !
# interface preconfigure GigabitEthernet0/0/0/2
# !
#
#
# Using parsed:
# parsed.cfg
# interface TenGigE0/0/0/0
# ipv4 address 192.0.2.11 255.255.255.192
# !
# interface preconfigure GigabitEthernet0/0/0/1
# lldp
# receive disable
# destination mac-address
# ieee-nearest-bridge
# !
# !
# !
# interface preconfigure GigabitEthernet0/0/0/2
# lldp
# transmit disable
# destination mac-address
# ieee-nearest-non-tmpr-bridge
- name: Convert lacp interfaces config to argspec without connecting to the appliance
cisco.iosxr.iosxr_lldp_interfaces:
running_config: "{{ lookup('file', './parsed.cfg') }}"
state: parsed
# ------------------------
# Module Execution Result
# ------------------------
# parsed: [
# - name: GigabitEthernet0/0/0/1
# destination:
# mac_address: ieee-nearest-non-tmpr-bridge
# transmit: False
# - name: GigabitEthernet0/0/0/2
# destination:
# mac_address: ieee-nearest-bridge
# receive: False
# ]
# Using gathered:
# Device config:
# RP/0/RP0/CPU0:ios#sh run int
# Mon Aug 12 12:49:51.517 UTC
# interface TenGigE0/0/0/0
# ipv4 address 192.0.2.11 255.255.255.192
# !
# interface preconfigure GigabitEthernet0/0/0/1
# lldp
# receive disable
# destination mac-address
# ieee-nearest-bridge
# !
# !
# !
# interface preconfigure GigabitEthernet0/0/0/2
# lldp
# transmit disable
# destination mac-address
# ieee-nearest-non-tmpr-bridge
- name: Gather IOSXR lldp interfaces configuration
cisco.iosxr.iosxr_lldp_interfaces:
config:
state: gathered
# ------------------------
# Module Execution Result
# ------------------------
# gathered:
# - name: GigabitEthernet0/0/0/1
# destination:
# mac_address: ieee-nearest-non-tmpr-bridge
# transmit: False
# - name: GigabitEthernet0/0/0/2
# destination:
# mac_address: ieee-nearest-bridge
# receive: False
# Using rendred:
- name: Render platform specific commands from task input using rendered state
cisco.iosxr.iosxr_lldp_interfaces:
config:
- name: GigabitEthernet0/0/0/1
destination:
mac_address: ieee-nearest-non-tmpr-bridge
transmit: false
- name: GigabitEthernet0/0/0/2
destination:
mac_address: ieee-nearest-bridge
receive: false
state: rendered
# ------------------------
# Module Execution Result
# ------------------------
# "rendered": [
# "interface GigabitEthernet0/0/0/2",
# "lldp destination mac-address ieee-nearest-non-tmpr-bridge",
# "lldp transmit disable",
# "interface GigabitEthernet0/0/0/1",
# "lldp receive disable",
# "lldp destination mac-address ieee-nearest-bridge"
# ]
"""
RETURN = """
before:
description: The configuration as structured data prior to module invocation.
returned: always
type: list
sample: >
The configuration returned will always be in the same format
of the parameters above.
after:
description: The configuration as structured data after module completion.
returned: when changed
type: list
sample: >
The configuration returned will always be in the same format
of the parameters above.
commands:
description: The set of commands pushed to the remote device.
returned: always
type: list
sample: ['interface GigabitEthernet0/0/0/1', 'lldp destination mac-address ieee-nearest-non-tmpr-bridge', 'no lldp transmit disable']
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.cisco.iosxr.plugins.module_utils.network.iosxr.argspec.lldp_interfaces.lldp_interfaces import (
Lldp_interfacesArgs,
)
from ansible_collections.cisco.iosxr.plugins.module_utils.network.iosxr.config.lldp_interfaces.lldp_interfaces import (
Lldp_interfaces,
)
def
|
():
"""
Main entry point for module execution
:returns: the result form module invocation
"""
required_if = [
("state", "merged", ("config",)),
("state", "replaced", ("config",)),
("state", "overridden", ("config",)),
("state", "rendered", ("config",)),
("state", "parsed", ("running_config",)),
]
mutually_exclusive = [("config", "running_config")]
module = AnsibleModule(
argument_spec=Lldp_interfacesArgs.argument_spec,
required_if=required_if,
supports_check_mode=True,
mutually_exclusive=mutually_exclusive,
)
result = Lldp_interfaces(module).execute_module()
module.exit_json(**result)
if __name__ == "__main__":
main()
|
main
|
newcsr.go
|
package cmd
import (
"crypto/x509/pkix"
"encoding/asn1"
"github.com/Hyperledger-TWGC/tjfoc-gm/sm2"
"strings"
x509GM "github.com/Hyperledger-TWGC/tjfoc-gm/x509"
"github.com/cloudflare/cfssl/csr"
"github.com/google/certificate-transparency-go/x509"
"github.com/pkg/errors"
"github.com/spf13/cobra"
kms "github.com/tw-bc-group/aliyun-kms/sm2"
)
const subjectFormatError = "Subject 格式不正确,比如: /C=CN/ST=TJ/L=TJ/O=TEST/OU=TEST/CN=TEST CA"
func appendIfNotEmpty(s string, a *[]string) {
if s != "" {
*a = append(*a, s)
}
}
func NewCsrTemplate(subj string, publicKey *sm2.PublicKey) (*x509GM.CertificateRequest, error) {
var name pkix.Name
fields := strings.Split(subj, "/")
if len(fields) == 0 {
return nil, errors.New(subjectFormatError)
}
for _, field := range fields {
subFields := strings.Split(field, "=")
if len(subFields) == 1 {
continue
} else if len(subFields) != 2 {
return nil, errors.New(subjectFormatError)
}
switch strings.ToUpper(strings.TrimSpace(subFields[0])) {
case "C":
appendIfNotEmpty(subFields[1], &name.Country)
case "ST":
appendIfNotEmpty(subFields[1], &name.Province)
case "L":
appendIfNotEmpty(subFields[1], &name.Locality)
case "O":
appendIfNotEmpty(subFields[1], &name.Organization)
case "OU":
appendIfNotEmpty(subFields[1], &name.OrganizationalUnit)
case "CN":
name.CommonName = subFields[1]
default:
break
}
}
extensions := make([]pkix.Extension, 8)
basicConstraints, err := asn1.Marshal(csr.BasicConstraints{IsCA: true, MaxPathLen: -1})
if err != nil {
return nil, err
}
extensions = append(extensions, pkix.Extension{
Id: asn1.ObjectIdentifier(x509.OIDExtensionBasicConstraints),
Value: basicConstraints,
Critical: true,
})
return &x509GM.CertificateRequest{
Subject: name,
SignatureAlgorithm: x509GM.SM2WithSM3,
PublicKeyAlgorithm: x509GM.SM2,
PublicKey: publicKey,
Extensions: extensions,
}, nil
}
func newCsr(subj, key
|
ing) error {
keyAdapter, err := kms.CreateSm2KeyAdapter(keyID, kms.SignAndVerify)
if err != nil {
return err
}
pubKey := keyAdapter.PublicKey()
csrTemp, err := NewCsrTemplate(subj, pubKey)
if err != nil {
return err
}
csrPem, err := x509GM.CreateCertificateRequestToPem(csrTemp, keyAdapter)
if err != nil {
return err
}
printOutput(keyAdapter.KeyID(), string(csrPem))
return nil
}
func NewCsrCmd() *cobra.Command {
var subj, keyID string
csrCmd := &cobra.Command{
Use: "newcsr",
Short: "Generate new csr file",
Long: "Generate new zhong huan ica csr file",
}
csrCmd.Flags().StringVarP(&subj, "subj", "s", "", "设置 CSR 请求中的 Subject 字段(必选)")
_ = csrCmd.MarkFlagRequired("subj")
csrCmd.Flags().StringVarP(&keyID, "key", "k", "", "设置使用的 KMS 密钥 ID(可选)")
csrCmd.RunE = func(cmd *cobra.Command, args []string) error {
return newCsr(subj, keyID)
}
return csrCmd
}
|
ID str
|
mod.rs
|
//! `NcBlitter`
// functions already exported by bindgen: 4
// ------------------------------------------
// (#) test: 0
// (W) wrap: 4
// ------------------------------------------
//W+ ncblit_bgrx
//W+ ncblit_rgb_loose
//W+ ncblit_rgb_packed
//W+ ncblit_rgba
mod methods;
/// The blitter mode to use for rasterizing an [`NcVisual`][crate::NcVisual].
///
/// We never blit full blocks, but instead spaces (more efficient) with the
/// background set to the desired foreground.
///
/// # Default
/// *[`NcBlitter::Default`]*
///
/// # Degradation
///
/// There is a mechanism of graceful degradation, that works as follows:
/// - without braille support, [`Braille`] decays to [`Sextant`].
/// - without bitmap support, [`Pixel`] decays to [`Sextant`].
/// - without sextant support, [`Sextant`] decays to [`Quadrant`].
/// - without quadrant support, [`Quadrant`] decays to [`Half`].
/// - the only viable blitters in ASCII are [`Ascii`] and [`Pixel`].
|
/// *[`degrade(false)`]* on [`NcVisualOptionsBuilder`].
///
/// [`Braille`]: NcBlitter::Braille
/// [`Pixel`]: NcBlitter::Pixel
/// [`Ascii`]: NcBlitter::Ascii
/// [`Half`]: NcBlitter::Half
/// [`Quadrant`]: NcBlitter::Quadrant
/// [`Sextant`]: NcBlitter::Sextant
/// [`NcVisualFlag::noDegrade`]: crate::NcVisualFlag#associatedconstant.noDegrade
/// [`NcVisualOptions`]: crate::NcVisualOptions
/// [`degrade(false)`]: crate::NcVisualOptionsBuilder#method.degrade
/// [`NcVisualOptionsBuilder`]: crate::NcVisualOptionsBuilder
#[repr(u32)]
#[non_exhaustive]
#[derive(Clone, Copy, PartialEq, Eq)]
pub enum NcBlitter {
Default = c_api::NCBLIT_DEFAULT,
/// Blitter mode using only spaces, compatible with ASCII (1x1).
Ascii = c_api::NCBLIT_1x1,
/// Blitter mode using halves + `Ascii` (2x1).
/// ▄▀
Half = c_api::NCBLIT_2x1,
/// Blitter mode using quadrants + `Half` (2x2).
/// ▗▐ ▖▀▟▌▙
Quadrant = c_api::NCBLIT_2x2,
/// Blitter mode using sextants + `Quadrant` (3x2).
/// 🬀🬁🬂🬃🬄🬅🬆🬇🬈🬉🬊🬋🬌🬍🬎🬏🬐🬑🬒🬓🬔🬕🬖🬗🬘🬙🬚🬛🬜🬝🬞🬟🬠🬡🬢🬣🬤🬥🬦🬧🬨🬩🬪🬫🬬🬭🬮🬯🬰🬱🬲🬳🬴🬵🬶🬷🬸🬹🬺🬻
Sextant = c_api::NCBLIT_3x2,
/// Blitter mode using braille (4x2).
/// ⡀⡄⡆⡇⢀⣀⣄⣆⣇⢠⣠⣤⣦⣧⢰⣰⣴⣶⣷⢸⣸⣼⣾⣿
Braille = c_api::NCBLIT_BRAILLE,
/// Blitter mode using Pixels/Sixels.
Pixel = c_api::NCBLIT_PIXEL,
/// [`NcBlitter`] mode using: four vertical levels (4x1).
/// █▆▄▂
_4x1 = c_api::NCBLIT_4x1,
/// [`NcBlitter`] mode using: eight vertical levels (8x1).
/// █▇▆▅▄▃▂▁
_8x1 = c_api::NCBLIT_8x1,
}
/// # Aliases
impl NcBlitter {
pub const _1x1: NcBlitter = NcBlitter::Ascii;
pub const _2x1: NcBlitter = NcBlitter::Half;
pub const _2x2: NcBlitter = NcBlitter::Quadrant;
pub const _3x2: NcBlitter = NcBlitter::Sextant;
}
mod std_impls {
use super::{c_api, NcBlitter};
use std::fmt;
impl Default for NcBlitter {
fn default() -> Self {
Self::Default
}
}
impl fmt::Display for NcBlitter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use NcBlitter::*;
write!(
f,
"{}",
match self {
Default => "Default",
Ascii => "Ascii",
Half => "Half",
Quadrant => "Quadrant",
Sextant => "Sextant",
Braille => "Braille",
Pixel => "Pixel",
_4x1 => "4x1",
_8x1 => "8x1",
}
)
}
}
impl fmt::Debug for NcBlitter {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "Blitter::{}", self)
}
}
impl From<c_api::NcBlitter_u32> for NcBlitter {
fn from(blitter: c_api::NcBlitter_u32) -> Self {
use {c_api::*, NcBlitter::*};
match blitter {
NCBLIT_DEFAULT => Default,
NCBLIT_1x1 => Ascii,
NCBLIT_2x1 => Half,
NCBLIT_2x2 => Quadrant,
NCBLIT_3x2 => Sextant,
NCBLIT_BRAILLE => Braille,
NCBLIT_PIXEL => Pixel,
NCBLIT_4x1 => _4x1,
NCBLIT_8x1 => _8x1,
_ => Self::default(),
}
}
}
impl From<NcBlitter> for c_api::NcBlitter_u32 {
fn from(blitter: NcBlitter) -> Self {
use {c_api::*, NcBlitter::*};
match blitter {
Default => NCBLIT_DEFAULT,
Ascii => NCBLIT_1x1,
Half => NCBLIT_2x1,
Quadrant => NCBLIT_2x2,
Sextant => NCBLIT_3x2,
Braille => NCBLIT_BRAILLE,
Pixel => NCBLIT_PIXEL,
_4x1 => NCBLIT_4x1,
_8x1 => NCBLIT_8x1,
}
}
}
}
pub(crate) mod c_api {
use crate::c_api::ffi;
/// The blitter mode to use for rasterizing an [`NcVisual`][crate::NcVisual].
///
/// It's recommended to use [`NcBlitter`][crate::NcBlitter] instead.
///
/// # Associated `c_api` constants
///
/// - [`NCBLIT_DEFAULT`]
/// - [`NCBLIT_1x1`]
/// - [`NCBLIT_2x1`]
/// - [`NCBLIT_2x2`]
/// - [`NCBLIT_3x2`]
/// - [`NCBLIT_4x1`]
/// - [`NCBLIT_8x1`]
/// - [`NCBLIT_BRAILLE`]
/// - [`NCBLIT_PIXEL`]
pub type NcBlitter_u32 = ffi::ncblitter_e;
/// [`NcBlitter_u32`] mode where the blitter is automatically chosen.
pub const NCBLIT_DEFAULT: NcBlitter_u32 = ffi::ncblitter_e_NCBLIT_DEFAULT;
/// [`NcBlitter_u32`] mode using: space, compatible with ASCII.
pub const NCBLIT_1x1: NcBlitter_u32 = ffi::ncblitter_e_NCBLIT_1x1;
/// [`NcBlitter_u32`] mode using: halves + 1x1 (space).
/// ▄▀
pub const NCBLIT_2x1: NcBlitter_u32 = ffi::ncblitter_e_NCBLIT_2x1;
/// [`NcBlitter_u32`] mode using: quadrants + 2x1.
/// ▗▐ ▖▀▟▌▙
pub const NCBLIT_2x2: NcBlitter_u32 = ffi::ncblitter_e_NCBLIT_2x2;
/// [`NcBlitter_u32`] mode using: sextants.
/// 🬀🬁🬂🬃🬄🬅🬆🬇🬈🬉🬊🬋🬌🬍🬎🬏🬐🬑🬒🬓🬔🬕🬖🬗🬘🬙🬚🬛🬜🬝🬞🬟🬠🬡🬢🬣🬤🬥🬦🬧🬨🬩🬪🬫🬬🬭🬮🬯🬰🬱🬲🬳🬴🬵🬶🬷🬸🬹🬺🬻
pub const NCBLIT_3x2: NcBlitter_u32 = ffi::ncblitter_e_NCBLIT_3x2;
/// [`NcBlitter_u32`] mode using: four vertical levels.
/// █▆▄▂
pub const NCBLIT_4x1: NcBlitter_u32 = ffi::ncblitter_e_NCBLIT_4x1;
/// [`NcBlitter_u32`] mode using: eight vertical levels.
/// █▇▆▅▄▃▂▁
pub const NCBLIT_8x1: NcBlitter_u32 = ffi::ncblitter_e_NCBLIT_8x1;
/// [`NcBlitter_u32`] mode using: 4 rows, 2 cols (braille).
/// ⡀⡄⡆⡇⢀⣀⣄⣆⣇⢠⣠⣤⣦⣧⢰⣰⣴⣶⣷⢸⣸⣼⣾⣿
pub const NCBLIT_BRAILLE: NcBlitter_u32 = ffi::ncblitter_e_NCBLIT_BRAILLE;
/// Sixel/Pixel mode.
///
/// See [Sixel in Wikipedia](https://en.wikipedia.org/wiki/Sixel).
pub const NCBLIT_PIXEL: NcBlitter_u32 = ffi::ncblitter_e_NCBLIT_PIXEL;
}
|
///
/// If you don't want this behaviour you have to set the
/// *[`NcVisualFlag::NoDegrade`]* on [`NcVisualOptions`] or call
|
abs.go
|
// Copyright 2010 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
// Copyright ©2017 The Gonum Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package cmplx64
import math "github.com/gopherd/gonum/internal/math32"
|
// Abs returns the absolute value (also called the modulus) of x.
func Abs(x complex64) float32 { return math.Hypot(real(x), imag(x)) }
|
|
index.tsx
|
import React, { useState } from "react";
import { Alert } from "react-native";
import firestore from "@react-native-firebase/firestore";
import { Container } from "./styles";
import { ButtonIcon } from "../ButtonIcon";
import { Input } from "../Input";
export function
|
() {
const [description, setDescription] = useState("");
const [quantity, setQuantity] = useState(0);
async function handleAddProduct() {
firestore()
.collection("products")
.add({
description,
quantity,
done: false,
createdAt: firestore.FieldValue.serverTimestamp(),
})
.then(() => {
Alert.alert("Produto adicionado com sucesso");
})
.catch(() => {
Alert.alert("Opa", "Não conseguimos adicionar seu produto :/");
});
}
async function handleAddProductWithCustomId() {
firestore()
.collection("products")
.doc("my-custom-id")
.set({
description,
quantity,
done: false,
createdAt: firestore.FieldValue.serverTimestamp(),
})
.then(() => {
Alert.alert("Produto adicionado com sucesso");
})
.catch(() => {
Alert.alert("Opa", "Não conseguimos adicionar seu produto :/");
});
}
return (
<Container>
<Input placeholder="Nome do produto" size="medium" onChangeText={setDescription} />
<Input
placeholder="0"
keyboardType="numeric"
size="small"
style={{ marginHorizontal: 8 }}
onChangeText={(value) => setQuantity(Number(value))}
/>
<ButtonIcon size="large" icon="add-shopping-cart" onPress={handleAddProduct} />
</Container>
);
}
|
FormBox
|
colors.rs
|
//! Common color structures used in vga programming.
/// Represents the size of the vga palette in bytes.
pub const PALETTE_SIZE: usize = 768;
/// Represents a 16 bit color used for vga display.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[repr(u8)]
pub enum Color16 {
/// Represents the color `Black (0x0)`.
Black = 0x0,
/// Represents the color `Blue (0x1)`.
Blue = 0x1,
/// Represents the color `Green (0x2)`.
Green = 0x2,
/// Represents the color `Cyan (0x3)`.
Cyan = 0x3,
/// Represents the color `Red (0x4)`.
Red = 0x4,
/// Represents the color `Magenta (0x5)`.
Magenta = 0x5,
/// Represents the color `Brown (0x6)`.
Brown = 0x6,
/// Represents the color `LightGrey (0x7)`.
LightGrey = 0x7,
/// Represents the color `DarkGrey (0x8)`.
DarkGrey = 0x8,
/// Represents the color `LightBlue (0x9)`.
LightBlue = 0x9,
/// Represents the color `LightGreen (0xA)`.
LightGreen = 0xA,
/// Represents the color `LightCyan (0xB)`.
LightCyan = 0xB,
/// Represents the color `LightRed (0xC)`.
LightRed = 0xC,
/// Represents the color `Pink (0xD)`.
Pink = 0xD,
/// Represents the color `Yellow (0xE)`.
Yellow = 0xE,
/// Represents the color `White (0xF)`.
White = 0xF,
}
impl From<Color16> for u8 {
fn from(value: Color16) -> u8 {
value as u8
}
}
/// Represents a color for vga text modes.
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
#[repr(transparent)]
pub struct TextModeColor(u8);
impl TextModeColor {
/// Returns a new `TextModeColor` given the specified `foreground`
/// and `background` color.
pub const fn new(foreground: Color16, background: Color16) -> TextModeColor {
TextModeColor((background as u8) << 4 | (foreground as u8))
}
/// Sets the background color given the specified `background`;
pub fn set_background(&mut self, background: Color16) {
self.0 = (background as u8) << 4 | (self.0 & 0x0F);
}
/// Sets the foreground color given the specified `foreground`.
pub fn set_foreground(&mut self, foreground: Color16) {
self.0 = foreground as u8;
}
}
|
0x00, 0x2a, 0x2a, 0x15, 0x00, 0x2a, 0x2a, 0x2a, 0x15, 0x15, 0x15, 0x15, 0x15, 0x3f, 0x15, 0x3f,
0x15, 0x15, 0x3f, 0x3f, 0x3f, 0x15, 0x15, 0x3f, 0x15, 0x3f, 0x3f, 0x3f, 0x15, 0x3f, 0x3f, 0x3f,
0x00, 0x00, 0x00, 0x05, 0x05, 0x05, 0x08, 0x08, 0x08, 0x0b, 0x0b, 0x0b, 0x0e, 0x0e, 0x0e, 0x11,
0x11, 0x11, 0x14, 0x14, 0x14, 0x18, 0x18, 0x18, 0x1c, 0x1c, 0x1c, 0x20, 0x20, 0x20, 0x24, 0x24,
0x24, 0x28, 0x28, 0x28, 0x2d, 0x2d, 0x2d, 0x32, 0x32, 0x32, 0x38, 0x38, 0x38, 0x3f, 0x3f, 0x3f,
0x00, 0x00, 0x3f, 0x10, 0x00, 0x3f, 0x1f, 0x00, 0x3f, 0x2f, 0x00, 0x3f, 0x3f, 0x00, 0x3f, 0x3f,
0x00, 0x2f, 0x3f, 0x00, 0x1f, 0x3f, 0x00, 0x10, 0x3f, 0x00, 0x00, 0x3f, 0x10, 0x00, 0x3f, 0x1f,
0x00, 0x3f, 0x2f, 0x00, 0x3f, 0x3f, 0x00, 0x2f, 0x3f, 0x00, 0x1f, 0x3f, 0x00, 0x10, 0x3f, 0x00,
0x00, 0x3f, 0x00, 0x00, 0x3f, 0x10, 0x00, 0x3f, 0x1f, 0x00, 0x3f, 0x2f, 0x00, 0x3f, 0x3f, 0x00,
0x2f, 0x3f, 0x00, 0x1f, 0x3f, 0x00, 0x10, 0x3f, 0x1f, 0x1f, 0x3f, 0x27, 0x1f, 0x3f, 0x2f, 0x1f,
0x3f, 0x37, 0x1f, 0x3f, 0x3f, 0x1f, 0x3f, 0x3f, 0x1f, 0x37, 0x3f, 0x1f, 0x2f, 0x3f, 0x1f, 0x27,
0x3f, 0x1f, 0x1f, 0x3f, 0x27, 0x1f, 0x3f, 0x2f, 0x1f, 0x3f, 0x37, 0x1f, 0x3f, 0x3f, 0x1f, 0x37,
0x3f, 0x1f, 0x2f, 0x3f, 0x1f, 0x27, 0x3f, 0x1f, 0x1f, 0x3f, 0x1f, 0x1f, 0x3f, 0x27, 0x1f, 0x3f,
0x2f, 0x1f, 0x3f, 0x37, 0x1f, 0x3f, 0x3f, 0x1f, 0x37, 0x3f, 0x1f, 0x2f, 0x3f, 0x1f, 0x27, 0x3f,
0x2d, 0x2d, 0x3f, 0x31, 0x2d, 0x3f, 0x36, 0x2d, 0x3f, 0x3a, 0x2d, 0x3f, 0x3f, 0x2d, 0x3f, 0x3f,
0x2d, 0x3a, 0x3f, 0x2d, 0x36, 0x3f, 0x2d, 0x31, 0x3f, 0x2d, 0x2d, 0x3f, 0x31, 0x2d, 0x3f, 0x36,
0x2d, 0x3f, 0x3a, 0x2d, 0x3f, 0x3f, 0x2d, 0x3a, 0x3f, 0x2d, 0x36, 0x3f, 0x2d, 0x31, 0x3f, 0x2d,
0x2d, 0x3f, 0x2d, 0x2d, 0x3f, 0x31, 0x2d, 0x3f, 0x36, 0x2d, 0x3f, 0x3a, 0x2d, 0x3f, 0x3f, 0x2d,
0x3a, 0x3f, 0x2d, 0x36, 0x3f, 0x2d, 0x31, 0x3f, 0x00, 0x00, 0x1c, 0x07, 0x00, 0x1c, 0x0e, 0x00,
0x1c, 0x15, 0x00, 0x1c, 0x1c, 0x00, 0x1c, 0x1c, 0x00, 0x15, 0x1c, 0x00, 0x0e, 0x1c, 0x00, 0x07,
0x1c, 0x00, 0x00, 0x1c, 0x07, 0x00, 0x1c, 0x0e, 0x00, 0x1c, 0x15, 0x00, 0x1c, 0x1c, 0x00, 0x15,
0x1c, 0x00, 0x0e, 0x1c, 0x00, 0x07, 0x1c, 0x00, 0x00, 0x1c, 0x00, 0x00, 0x1c, 0x07, 0x00, 0x1c,
0x0e, 0x00, 0x1c, 0x15, 0x00, 0x1c, 0x1c, 0x00, 0x15, 0x1c, 0x00, 0x0e, 0x1c, 0x00, 0x07, 0x1c,
0x0e, 0x0e, 0x1c, 0x11, 0x0e, 0x1c, 0x15, 0x0e, 0x1c, 0x18, 0x0e, 0x1c, 0x1c, 0x0e, 0x1c, 0x1c,
0x0e, 0x18, 0x1c, 0x0e, 0x15, 0x1c, 0x0e, 0x11, 0x1c, 0x0e, 0x0e, 0x1c, 0x11, 0x0e, 0x1c, 0x15,
0x0e, 0x1c, 0x18, 0x0e, 0x1c, 0x1c, 0x0e, 0x18, 0x1c, 0x0e, 0x15, 0x1c, 0x0e, 0x11, 0x1c, 0x0e,
0x0e, 0x1c, 0x0e, 0x0e, 0x1c, 0x11, 0x0e, 0x1c, 0x15, 0x0e, 0x1c, 0x18, 0x0e, 0x1c, 0x1c, 0x0e,
0x18, 0x1c, 0x0e, 0x15, 0x1c, 0x0e, 0x11, 0x1c, 0x14, 0x14, 0x1c, 0x16, 0x14, 0x1c, 0x18, 0x14,
0x1c, 0x1a, 0x14, 0x1c, 0x1c, 0x14, 0x1c, 0x1c, 0x14, 0x1a, 0x1c, 0x14, 0x18, 0x1c, 0x14, 0x16,
0x1c, 0x14, 0x14, 0x1c, 0x16, 0x14, 0x1c, 0x18, 0x14, 0x1c, 0x1a, 0x14, 0x1c, 0x1c, 0x14, 0x1a,
0x1c, 0x14, 0x18, 0x1c, 0x14, 0x16, 0x1c, 0x14, 0x14, 0x1c, 0x14, 0x14, 0x1c, 0x16, 0x14, 0x1c,
0x18, 0x14, 0x1c, 0x1a, 0x14, 0x1c, 0x1c, 0x14, 0x1a, 0x1c, 0x14, 0x18, 0x1c, 0x14, 0x16, 0x1c,
0x00, 0x00, 0x10, 0x04, 0x00, 0x10, 0x08, 0x00, 0x10, 0x0c, 0x00, 0x10, 0x10, 0x00, 0x10, 0x10,
0x00, 0x0c, 0x10, 0x00, 0x08, 0x10, 0x00, 0x04, 0x10, 0x00, 0x00, 0x10, 0x04, 0x00, 0x10, 0x08,
0x00, 0x10, 0x0c, 0x00, 0x10, 0x10, 0x00, 0x0c, 0x10, 0x00, 0x08, 0x10, 0x00, 0x04, 0x10, 0x00,
0x00, 0x10, 0x00, 0x00, 0x10, 0x04, 0x00, 0x10, 0x08, 0x00, 0x10, 0x0c, 0x00, 0x10, 0x10, 0x00,
0x0c, 0x10, 0x00, 0x08, 0x10, 0x00, 0x04, 0x10, 0x08, 0x08, 0x10, 0x0a, 0x08, 0x10, 0x0c, 0x08,
0x10, 0x0e, 0x08, 0x10, 0x10, 0x08, 0x10, 0x10, 0x08, 0x0e, 0x10, 0x08, 0x0c, 0x10, 0x08, 0x0a,
0x10, 0x08, 0x08, 0x10, 0x0a, 0x08, 0x10, 0x0c, 0x08, 0x10, 0x0e, 0x08, 0x10, 0x10, 0x08, 0x0e,
0x10, 0x08, 0x0c, 0x10, 0x08, 0x0a, 0x10, 0x08, 0x08, 0x10, 0x08, 0x08, 0x10, 0x0a, 0x08, 0x10,
0x0c, 0x08, 0x10, 0x0e, 0x08, 0x10, 0x10, 0x08, 0x0e, 0x10, 0x08, 0x0c, 0x10, 0x08, 0x0a, 0x10,
0x0b, 0x0b, 0x10, 0x0c, 0x0b, 0x10, 0x0d, 0x0b, 0x10, 0x0f, 0x0b, 0x10, 0x10, 0x0b, 0x10, 0x10,
0x0b, 0x0f, 0x10, 0x0b, 0x0d, 0x10, 0x0b, 0x0c, 0x10, 0x0b, 0x0b, 0x10, 0x0c, 0x0b, 0x10, 0x0d,
0x0b, 0x10, 0x0f, 0x0b, 0x10, 0x10, 0x0b, 0x0f, 0x10, 0x0b, 0x0d, 0x10, 0x0b, 0x0c, 0x10, 0x0b,
0x0b, 0x10, 0x0b, 0x0b, 0x10, 0x0c, 0x0b, 0x10, 0x0d, 0x0b, 0x10, 0x0f, 0x0b, 0x10, 0x10, 0x0b,
0x0f, 0x10, 0x0b, 0x0d, 0x10, 0x0b, 0x0c, 0x10, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
];
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_set_foreground() {
let mut color = TextModeColor::new(Color16::Yellow, Color16::Black);
color.set_foreground(Color16::Red);
assert_eq!(color.0 & 0x0F, Color16::Red as u8);
}
#[test]
fn test_set_background() {
let mut color = TextModeColor::new(Color16::Yellow, Color16::Black);
color.set_background(Color16::DarkGrey);
assert_eq!(color.0 >> 4, Color16::DarkGrey as u8);
}
}
|
/// Represents the default vga 256 color palette.
pub const DEFAULT_PALETTE: [u8; PALETTE_SIZE] = [
0x00, 0x00, 0x00, 0x00, 0x00, 0x2a, 0x00, 0x2a, 0x00, 0x00, 0x2a, 0x2a, 0x2a, 0x00, 0x00, 0x2a,
|
search-box.component.ts
|
import { Component } from '@angular/core';
import { User } from '../../users/user.model';
import { Sign } from '../../signs/sign.model';
const FOUND_USERS: User[] = [
{ username: "Jen", picUrl: "https://upload.wikimedia.org/wikipedia/commons/e/e9/Official_portrait_of_Barack_Obama.jpg", status: "active" },
{ username: "Clint", picUrl: "https://upload.wikimedia.org/wikipedia/commons/thumb/4/49/44_Bill_Clinton_3x4.jpg/220px-44_Bill_Clinton_3x4.jpg", status: "active"},
{ username: "Eilee", picUrl: "", status: "active" },
{ username: "Clara", picUrl: "", status: "active" },
];
const OAUTH_FACEBOOK_SIGN: Sign = {
_id: '12345',
bgColor: '#3b5998',
description: 'some sign',
icon: 'facebook',
knownAs: 'my name',
linkUrl: 'http://facebook.com',
picUrl: '',
signName: 'facebook',
signType: 'oauth',
username: 'myactualusername',
owner: 'signowner'
};
const CUSTOM_ETSY_SIGN: Sign = {
_id: '54321',
bgColor: 'orange',
description: 'etsy sign',
icon: 'etsy',
knownAs: 'my etsy name',
linkUrl: 'http://etsy.com',
picUrl: 'https://il5.picdn.net/shutterstock/videos/3178849/thumb/1.jpg',
signName: 'etsy',
signType: 'custom',
username: 'myetsyusername',
owner: 'signowner'
};
const FOUND_SIGNS: Sign[] = [
OAUTH_FACEBOOK_SIGN,
CUSTOM_ETSY_SIGN
];
@Component({
moduleId: module.id,
selector: 'search-box',
templateUrl: 'search-box.component.html',
styleUrls: ['search-box.component.css']
})
export class
|
{
searchStr: string;
foundUsers: User[]; // users found by search
foundSigns: Sign[]; // signs found by search
searchUsers() {
console.log("SEARCH USERS TRIGGERED");
this.foundUsers = FOUND_USERS;
this.foundSigns = FOUND_SIGNS;
// console.log("SEARCHING CLICKED!");
// console.log("Search string is: ", $scope.searchStr);
// databaseSearch($scope.searchStr);
// function databaseSearch(queryStr) {
// $http.get('/search', {params: {'searchStr': queryStr} })
// .success(function(data) {
// console.log("SUCCESSFUL SEARCH. DATA IS: ", data);
// $scope.users = data.users;
// $scope.signs = data.signs;
// })
// .error(function(err) {
// // TODO: SHOW ERROR MSG TO USER
// console.log("Error searching.");
// });
// }
}
}
// 'use strict';
// module.exports = function(app) {
// app.controller('searchController', [
// '$scope',
// '$routeParams',
// '$http',
// function($scope, $routeParams, $http) {
// console.log("ROUTE PARAMS IS: ", $routeParams);
// ************ AFTER CLICKING SEARCH, THERE WILL BE SEARCH PARAMS IN THE ROUTE
// ************ IF THEY EXIST, THIS MEANS WE NEED TO GRAB THOSE RESULTS & DISPLAY
// var init = function() {
// var paramsQuery = $routeParams.searchStr;
// if(paramsQuery) {
// databaseSearch(paramsQuery);
// }
// };
// init();
// $scope.searchStr = '';
// $scope.users = []; // found users
// $scope.signs = []; // found signs
// $scope.searchUsers = function() {
// console.log("SEARCHING CLICKED!");
// console.log("Search string is: ", $scope.searchStr);
// databaseSearch($scope.searchStr);
// };
// function databaseSearch(queryStr) {
// $http.get('/search', {params: {'searchStr': queryStr} })
// .success(function(data) {
// console.log("SUCCESSFUL SEARCH. DATA IS: ", data);
// $scope.users = data.users;
// $scope.signs = data.signs;
// })
// .error(function(err) {
// // TODO: SHOW ERROR MSG TO USER
// console.log("Error searching.");
// });
// }
// }
// ]);
// };
|
SearchBoxComponent
|
list_detector_recipes_request_response.go
|
// Copyright (c) 2016, 2018, 2022, Oracle and/or its affiliates. All rights reserved.
// This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
// Code generated. DO NOT EDIT.
package cloudguard
import (
"fmt"
"github.com/oracle/oci-go-sdk/v58/common"
"net/http"
"strings"
)
// ListDetectorRecipesRequest wrapper for the ListDetectorRecipes operation
//
// See also
//
// Click https://docs.cloud.oracle.com/en-us/iaas/tools/go-sdk-examples/latest/cloudguard/ListDetectorRecipes.go.html to see an example of how to use ListDetectorRecipesRequest.
type ListDetectorRecipesRequest struct {
// The ID of the compartment in which to list resources.
CompartmentId *string `mandatory:"true" contributesTo:"query" name:"compartmentId"`
// A filter to return only resources that match the entire display name given.
DisplayName *string `mandatory:"false" contributesTo:"query" name:"displayName"`
// Default is false.
// When set to true, the list of all Oracle Managed Resources
// Metadata supported by Cloud Guard are returned.
ResourceMetadataOnly *bool `mandatory:"false" contributesTo:"query" name:"resourceMetadataOnly"`
// The field life cycle state. Only one state can be provided. Default value for state is active. If no value is specified state is active.
LifecycleState ListDetectorRecipesLifecycleStateEnum `mandatory:"false" contributesTo:"query" name:"lifecycleState" omitEmpty:"true"`
// The maximum number of items to return.
Limit *int `mandatory:"false" contributesTo:"query" name:"limit"`
// The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
Page *string `mandatory:"false" contributesTo:"query" name:"page"`
// Default is false.
// When set to true, the hierarchy of compartments is traversed
// and all compartments and subcompartments in the tenancy are
// returned depending on the the setting of `accessLevel`.
CompartmentIdInSubtree *bool `mandatory:"false" contributesTo:"query" name:"compartmentIdInSubtree"`
// Valid values are `RESTRICTED` and `ACCESSIBLE`. Default is `RESTRICTED`.
// Setting this to `ACCESSIBLE` returns only those compartments for which the
// user has INSPECT permissions directly or indirectly (permissions can be on a
// resource in a subcompartment).
// When set to `RESTRICTED` permissions are checked and no partial results are displayed.
AccessLevel ListDetectorRecipesAccessLevelEnum `mandatory:"false" contributesTo:"query" name:"accessLevel" omitEmpty:"true"`
// The sort order to use, either 'asc' or 'desc'.
SortOrder ListDetectorRecipesSortOrderEnum `mandatory:"false" contributesTo:"query" name:"sortOrder" omitEmpty:"true"`
// The field to sort by. Only one sort order may be provided. Default order for timeCreated is descending. Default order for displayName is ascending. If no value is specified timeCreated is default.
SortBy ListDetectorRecipesSortByEnum `mandatory:"false" contributesTo:"query" name:"sortBy" omitEmpty:"true"`
// The client request ID for tracing.
OpcRequestId *string `mandatory:"false" contributesTo:"header" name:"opc-request-id"`
// Metadata about the request. This information will not be transmitted to the service, but
// represents information that the SDK will consume to drive retry behavior.
RequestMetadata common.RequestMetadata
}
func (request ListDetectorRecipesRequest) String() string {
return common.PointerString(request)
}
// HTTPRequest implements the OCIRequest interface
func (request ListDetectorRecipesRequest) HTTPRequest(method, path string, binaryRequestBody *common.OCIReadSeekCloser, extraHeaders map[string]string) (http.Request, error) {
_, err := request.ValidateEnumValue()
if err != nil {
return http.Request{}, err
}
return common.MakeDefaultHTTPRequestWithTaggedStructAndExtraHeaders(method, path, request, extraHeaders)
}
// BinaryRequestBody implements the OCIRequest interface
func (request ListDetectorRecipesRequest) BinaryRequestBody() (*common.OCIReadSeekCloser, bool) {
return nil, false
}
// RetryPolicy implements the OCIRetryableRequest interface. This retrieves the specified retry policy.
func (request ListDetectorRecipesRequest) RetryPolicy() *common.RetryPolicy {
return request.RequestMetadata.RetryPolicy
}
// ValidateEnumValue returns an error when providing an unsupported enum value
// This function is being called during constructing API request process
// Not recommended for calling this function directly
func (request ListDetectorRecipesRequest) ValidateEnumValue() (bool, error) {
errMessage := []string{}
if _, ok := GetMappingListDetectorRecipesLifecycleStateEnum(string(request.LifecycleState)); !ok && request.LifecycleState != "" {
errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for LifecycleState: %s. Supported values are: %s.", request.LifecycleState, strings.Join(GetListDetectorRecipesLifecycleStateEnumStringValues(), ",")))
}
if _, ok := GetMappingListDetectorRecipesAccessLevelEnum(string(request.AccessLevel)); !ok && request.AccessLevel != "" {
errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for AccessLevel: %s. Supported values are: %s.", request.AccessLevel, strings.Join(GetListDetectorRecipesAccessLevelEnumStringValues(), ",")))
}
if _, ok := GetMappingListDetectorRecipesSortOrderEnum(string(request.SortOrder)); !ok && request.SortOrder != "" {
errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for SortOrder: %s. Supported values are: %s.", request.SortOrder, strings.Join(GetListDetectorRecipesSortOrderEnumStringValues(), ",")))
}
if _, ok := GetMappingListDetectorRecipesSortByEnum(string(request.SortBy)); !ok && request.SortBy != "" {
errMessage = append(errMessage, fmt.Sprintf("unsupported enum value for SortBy: %s. Supported values are: %s.", request.SortBy, strings.Join(GetListDetectorRecipesSortByEnumStringValues(), ",")))
}
if len(errMessage) > 0 {
return true, fmt.Errorf(strings.Join(errMessage, "\n"))
}
return false, nil
}
// ListDetectorRecipesResponse wrapper for the ListDetectorRecipes operation
type ListDetectorRecipesResponse struct {
// The underlying http response
RawResponse *http.Response
// A list of DetectorRecipeCollection instances
DetectorRecipeCollection `presentIn:"body"`
// Unique Oracle-assigned identifier for the request. If you need to contact
// Oracle about a particular request, please provide the request ID.
|
OpcRequestId *string `presentIn:"header" name:"opc-request-id"`
// For pagination of a list of items. When paging through a list, if this header appears in the response,
// then a partial list might have been returned. Include this value as the `page` parameter for the
// subsequent GET request to get the next batch of items.
OpcNextPage *string `presentIn:"header" name:"opc-next-page"`
}
func (response ListDetectorRecipesResponse) String() string {
return common.PointerString(response)
}
// HTTPResponse implements the OCIResponse interface
func (response ListDetectorRecipesResponse) HTTPResponse() *http.Response {
return response.RawResponse
}
// ListDetectorRecipesLifecycleStateEnum Enum with underlying type: string
type ListDetectorRecipesLifecycleStateEnum string
// Set of constants representing the allowable values for ListDetectorRecipesLifecycleStateEnum
const (
ListDetectorRecipesLifecycleStateCreating ListDetectorRecipesLifecycleStateEnum = "CREATING"
ListDetectorRecipesLifecycleStateUpdating ListDetectorRecipesLifecycleStateEnum = "UPDATING"
ListDetectorRecipesLifecycleStateActive ListDetectorRecipesLifecycleStateEnum = "ACTIVE"
ListDetectorRecipesLifecycleStateInactive ListDetectorRecipesLifecycleStateEnum = "INACTIVE"
ListDetectorRecipesLifecycleStateDeleting ListDetectorRecipesLifecycleStateEnum = "DELETING"
ListDetectorRecipesLifecycleStateDeleted ListDetectorRecipesLifecycleStateEnum = "DELETED"
ListDetectorRecipesLifecycleStateFailed ListDetectorRecipesLifecycleStateEnum = "FAILED"
)
var mappingListDetectorRecipesLifecycleStateEnum = map[string]ListDetectorRecipesLifecycleStateEnum{
"CREATING": ListDetectorRecipesLifecycleStateCreating,
"UPDATING": ListDetectorRecipesLifecycleStateUpdating,
"ACTIVE": ListDetectorRecipesLifecycleStateActive,
"INACTIVE": ListDetectorRecipesLifecycleStateInactive,
"DELETING": ListDetectorRecipesLifecycleStateDeleting,
"DELETED": ListDetectorRecipesLifecycleStateDeleted,
"FAILED": ListDetectorRecipesLifecycleStateFailed,
}
// GetListDetectorRecipesLifecycleStateEnumValues Enumerates the set of values for ListDetectorRecipesLifecycleStateEnum
func GetListDetectorRecipesLifecycleStateEnumValues() []ListDetectorRecipesLifecycleStateEnum {
values := make([]ListDetectorRecipesLifecycleStateEnum, 0)
for _, v := range mappingListDetectorRecipesLifecycleStateEnum {
values = append(values, v)
}
return values
}
// GetListDetectorRecipesLifecycleStateEnumStringValues Enumerates the set of values in String for ListDetectorRecipesLifecycleStateEnum
func GetListDetectorRecipesLifecycleStateEnumStringValues() []string {
return []string{
"CREATING",
"UPDATING",
"ACTIVE",
"INACTIVE",
"DELETING",
"DELETED",
"FAILED",
}
}
// GetMappingListDetectorRecipesLifecycleStateEnum performs case Insensitive comparison on enum value and return the desired enum
func GetMappingListDetectorRecipesLifecycleStateEnum(val string) (ListDetectorRecipesLifecycleStateEnum, bool) {
mappingListDetectorRecipesLifecycleStateEnumIgnoreCase := make(map[string]ListDetectorRecipesLifecycleStateEnum)
for k, v := range mappingListDetectorRecipesLifecycleStateEnum {
mappingListDetectorRecipesLifecycleStateEnumIgnoreCase[strings.ToLower(k)] = v
}
enum, ok := mappingListDetectorRecipesLifecycleStateEnumIgnoreCase[strings.ToLower(val)]
return enum, ok
}
// ListDetectorRecipesAccessLevelEnum Enum with underlying type: string
type ListDetectorRecipesAccessLevelEnum string
// Set of constants representing the allowable values for ListDetectorRecipesAccessLevelEnum
const (
ListDetectorRecipesAccessLevelRestricted ListDetectorRecipesAccessLevelEnum = "RESTRICTED"
ListDetectorRecipesAccessLevelAccessible ListDetectorRecipesAccessLevelEnum = "ACCESSIBLE"
)
var mappingListDetectorRecipesAccessLevelEnum = map[string]ListDetectorRecipesAccessLevelEnum{
"RESTRICTED": ListDetectorRecipesAccessLevelRestricted,
"ACCESSIBLE": ListDetectorRecipesAccessLevelAccessible,
}
// GetListDetectorRecipesAccessLevelEnumValues Enumerates the set of values for ListDetectorRecipesAccessLevelEnum
func GetListDetectorRecipesAccessLevelEnumValues() []ListDetectorRecipesAccessLevelEnum {
values := make([]ListDetectorRecipesAccessLevelEnum, 0)
for _, v := range mappingListDetectorRecipesAccessLevelEnum {
values = append(values, v)
}
return values
}
// GetListDetectorRecipesAccessLevelEnumStringValues Enumerates the set of values in String for ListDetectorRecipesAccessLevelEnum
func GetListDetectorRecipesAccessLevelEnumStringValues() []string {
return []string{
"RESTRICTED",
"ACCESSIBLE",
}
}
// GetMappingListDetectorRecipesAccessLevelEnum performs case Insensitive comparison on enum value and return the desired enum
func GetMappingListDetectorRecipesAccessLevelEnum(val string) (ListDetectorRecipesAccessLevelEnum, bool) {
mappingListDetectorRecipesAccessLevelEnumIgnoreCase := make(map[string]ListDetectorRecipesAccessLevelEnum)
for k, v := range mappingListDetectorRecipesAccessLevelEnum {
mappingListDetectorRecipesAccessLevelEnumIgnoreCase[strings.ToLower(k)] = v
}
enum, ok := mappingListDetectorRecipesAccessLevelEnumIgnoreCase[strings.ToLower(val)]
return enum, ok
}
// ListDetectorRecipesSortOrderEnum Enum with underlying type: string
type ListDetectorRecipesSortOrderEnum string
// Set of constants representing the allowable values for ListDetectorRecipesSortOrderEnum
const (
ListDetectorRecipesSortOrderAsc ListDetectorRecipesSortOrderEnum = "ASC"
ListDetectorRecipesSortOrderDesc ListDetectorRecipesSortOrderEnum = "DESC"
)
var mappingListDetectorRecipesSortOrderEnum = map[string]ListDetectorRecipesSortOrderEnum{
"ASC": ListDetectorRecipesSortOrderAsc,
"DESC": ListDetectorRecipesSortOrderDesc,
}
// GetListDetectorRecipesSortOrderEnumValues Enumerates the set of values for ListDetectorRecipesSortOrderEnum
func GetListDetectorRecipesSortOrderEnumValues() []ListDetectorRecipesSortOrderEnum {
values := make([]ListDetectorRecipesSortOrderEnum, 0)
for _, v := range mappingListDetectorRecipesSortOrderEnum {
values = append(values, v)
}
return values
}
// GetListDetectorRecipesSortOrderEnumStringValues Enumerates the set of values in String for ListDetectorRecipesSortOrderEnum
func GetListDetectorRecipesSortOrderEnumStringValues() []string {
return []string{
"ASC",
"DESC",
}
}
// GetMappingListDetectorRecipesSortOrderEnum performs case Insensitive comparison on enum value and return the desired enum
func GetMappingListDetectorRecipesSortOrderEnum(val string) (ListDetectorRecipesSortOrderEnum, bool) {
mappingListDetectorRecipesSortOrderEnumIgnoreCase := make(map[string]ListDetectorRecipesSortOrderEnum)
for k, v := range mappingListDetectorRecipesSortOrderEnum {
mappingListDetectorRecipesSortOrderEnumIgnoreCase[strings.ToLower(k)] = v
}
enum, ok := mappingListDetectorRecipesSortOrderEnumIgnoreCase[strings.ToLower(val)]
return enum, ok
}
// ListDetectorRecipesSortByEnum Enum with underlying type: string
type ListDetectorRecipesSortByEnum string
// Set of constants representing the allowable values for ListDetectorRecipesSortByEnum
const (
ListDetectorRecipesSortByTimecreated ListDetectorRecipesSortByEnum = "timeCreated"
ListDetectorRecipesSortByDisplayname ListDetectorRecipesSortByEnum = "displayName"
)
var mappingListDetectorRecipesSortByEnum = map[string]ListDetectorRecipesSortByEnum{
"timeCreated": ListDetectorRecipesSortByTimecreated,
"displayName": ListDetectorRecipesSortByDisplayname,
}
// GetListDetectorRecipesSortByEnumValues Enumerates the set of values for ListDetectorRecipesSortByEnum
func GetListDetectorRecipesSortByEnumValues() []ListDetectorRecipesSortByEnum {
values := make([]ListDetectorRecipesSortByEnum, 0)
for _, v := range mappingListDetectorRecipesSortByEnum {
values = append(values, v)
}
return values
}
// GetListDetectorRecipesSortByEnumStringValues Enumerates the set of values in String for ListDetectorRecipesSortByEnum
func GetListDetectorRecipesSortByEnumStringValues() []string {
return []string{
"timeCreated",
"displayName",
}
}
// GetMappingListDetectorRecipesSortByEnum performs case Insensitive comparison on enum value and return the desired enum
func GetMappingListDetectorRecipesSortByEnum(val string) (ListDetectorRecipesSortByEnum, bool) {
mappingListDetectorRecipesSortByEnumIgnoreCase := make(map[string]ListDetectorRecipesSortByEnum)
for k, v := range mappingListDetectorRecipesSortByEnum {
mappingListDetectorRecipesSortByEnumIgnoreCase[strings.ToLower(k)] = v
}
enum, ok := mappingListDetectorRecipesSortByEnumIgnoreCase[strings.ToLower(val)]
return enum, ok
}
| |
endpoint_targets.go
|
package service
import (
"context"
"time"
"github.com/fleetdm/fleet/server/kolide"
"github.com/go-kit/kit/endpoint"
)
////////////////////////////////////////////////////////////////////////////////
// Search Targets
////////////////////////////////////////////////////////////////////////////////
type searchTargetsRequest struct {
Query string `json:"query"`
Selected struct {
Labels []uint `json:"labels"`
Hosts []uint `json:"hosts"`
} `json:"selected"`
}
type hostSearchResult struct {
HostResponse
DisplayText string `json:"display_text"`
}
type labelSearchResult struct {
kolide.Label
DisplayText string `json:"display_text"`
Count int `json:"count"`
}
type targetsData struct {
Hosts []hostSearchResult `json:"hosts"`
Labels []labelSearchResult `json:"labels"`
}
type searchTargetsResponse struct {
Targets *targetsData `json:"targets,omitempty"`
TargetsCount uint `json:"targets_count"`
TargetsOnline uint `json:"targets_online"`
TargetsOffline uint `json:"targets_offline"`
TargetsMissingInAction uint `json:"targets_missing_in_action"`
Err error `json:"error,omitempty"`
}
func (r searchTargetsResponse) error() error { return r.Err }
func makeSearchTargetsEndpoint(svc kolide.Service) endpoint.Endpoint {
return func(ctx context.Context, request interface{}) (interface{}, error) {
req := request.(searchTargetsRequest)
results, err := svc.SearchTargets(ctx, req.Query, req.Selected.Hosts, req.Selected.Labels)
if err != nil
|
targets := &targetsData{
Hosts: []hostSearchResult{},
Labels: []labelSearchResult{},
}
for _, host := range results.Hosts {
targets.Hosts = append(targets.Hosts,
hostSearchResult{
HostResponse{
Host: host,
Status: host.Status(time.Now()),
},
host.HostName,
},
)
}
for _, label := range results.Labels {
targets.Labels = append(targets.Labels,
labelSearchResult{
Label: label,
DisplayText: label.Name,
Count: label.HostCount,
},
)
}
metrics, err := svc.CountHostsInTargets(ctx, req.Selected.Hosts, req.Selected.Labels)
if err != nil {
return searchTargetsResponse{Err: err}, nil
}
return searchTargetsResponse{
Targets: targets,
TargetsCount: metrics.TotalHosts,
TargetsOnline: metrics.OnlineHosts,
TargetsOffline: metrics.OfflineHosts,
TargetsMissingInAction: metrics.MissingInActionHosts,
}, nil
}
}
|
{
return searchTargetsResponse{Err: err}, nil
}
|
model.go
|
// Package schema defines JSON schema types.
//
// Code borrowed from https://github.com/alecthomas/jsonschema/
//
// Copyright (C) 2014 Alec Thomas
//
// Permission is hereby granted, free of charge, to any person obtaining a copy of
// this software and associated documentation files (the "Software"), to deal in
// the Software without restriction, including without limitation the rights to
// use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
// of the Software, and to permit persons to whom the Software is furnished to do
// so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in all
// copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
// SOFTWARE.
package schemas
import (
"encoding/json"
)
// Schema is the root schema.
// RFC draft-wright-json-schema-00, section 4.5
type Schema struct {
*Type
ID string `json:"id"`
Definitions Definitions `json:"definitions,omitempty"`
}
// TypeList is a list of type names.
type TypeList []string
// UnmarshalJSON implements json.Unmarshaler.
func (t *TypeList) UnmarshalJSON(b []byte) error {
if len(b) > 0 && b[0] == '[' {
var s []string
if err := json.Unmarshal(b, &s); err != nil {
return err
}
*t = TypeList(s)
return nil
}
var s string
if err := json.Unmarshal(b, &s); err != nil {
return err
}
if s != "" {
*t = TypeList([]string{s})
} else {
*t = nil
}
return nil
}
// Definitions hold schema definitions.
// http://json-schema.org/latest/json-schema-validation.html#rfc.section.5.26
// RFC draft-wright-json-schema-validation-00, section 5.26
type Definitions map[string]*Type
// Type represents a JSON Schema object type.
type Type struct {
// RFC draft-wright-json-schema-00
Version string `json:"$schema,omitempty"` // section 6.1
Ref string `json:"$ref,omitempty"` // section 7
// RFC draft-wright-json-schema-validation-00, section 5
MultipleOf int `json:"multipleOf,omitempty"` // section 5.1
Maximum float64 `json:"maximum,omitempty"` // section 5.2
ExclusiveMaximum bool `json:"exclusiveMaximum,omitempty"` // section 5.3
Minimum float64 `json:"minimum,omitempty"` // section 5.4
ExclusiveMinimum bool `json:"exclusiveMinimum,omitempty"` // section 5.5
MaxLength int `json:"maxLength,omitempty"` // section 5.6
MinLength int `json:"minLength,omitempty"` // section 5.7
Pattern string `json:"pattern,omitempty"` // section 5.8
AdditionalItems *Type `json:"additionalItems,omitempty"` // section 5.9
Items *Type `json:"items,omitempty"` // section 5.9
MaxItems int `json:"maxItems,omitempty"` // section 5.10
MinItems int `json:"minItems,omitempty"` // section 5.11
UniqueItems bool `json:"uniqueItems,omitempty"` // section 5.12
MaxProperties int `json:"maxProperties,omitempty"` // section 5.13
MinProperties int `json:"minProperties,omitempty"` // section 5.14
Required []string `json:"required,omitempty"` // section 5.15
Properties map[string]*Type `json:"properties,omitempty"` // section 5.16
PatternProperties map[string]*Type `json:"patternProperties,omitempty"` // section 5.17
AdditionalProperties json.RawMessage `json:"additionalProperties,omitempty"` // section 5.18
Dependencies map[string]*Type `json:"dependencies,omitempty"` // section 5.19
Enum []interface{} `json:"enum,omitempty"` // section 5.20
Type TypeList `json:"type,omitempty"` // section 5.21
AllOf []*Type `json:"allOf,omitempty"` // section 5.22
AnyOf []*Type `json:"anyOf,omitempty"` // section 5.23
OneOf []*Type `json:"oneOf,omitempty"` // section 5.24
Not *Type `json:"not,omitempty"` // section 5.25
|
// RFC draft-wright-json-schema-validation-00, section 6, 7
Title string `json:"title,omitempty"` // section 6.1
Description string `json:"description,omitempty"` // section 6.1
Default interface{} `json:"default,omitempty"` // section 6.2
Format string `json:"format,omitempty"` // section 7
// RFC draft-wright-json-schema-hyperschema-00, section 4
Media *Type `json:"media,omitempty"` // section 4.3
BinaryEncoding string `json:"binaryEncoding,omitempty"` // section 4.3
// ExtGoCustomType is the name of a (qualified or not) custom Go type
// to use for the field.
GoJSONSchemaExtension *GoJSONSchemaExtension `json:"goJSONSchema,omitempty"`
}
type GoJSONSchemaExtension struct {
Type *string `json:"type,omitempty"`
Identifier *string `json:"identifier,omitempty"`
}
|
Definitions Definitions `json:"definitions,omitempty"` // section 5.26
|
HiwinRT605_test_20190619121634.py
|
#!/usr/bin/env python3
# license removed for brevity
#策略 機械手臂 四點來回跑
import rospy
import os
import numpy as np
from std_msgs.msg import String
from ROS_Socket.srv import *
from ROS_Socket.msg import *
import math
import enum
import Hiwin_RT605_ROS as strategy
pos_feedback_times = 0
mode_feedback_times = 0
msg_feedback = 1
#Arm_state_flag = 0
#strategy_flag = 0
arm_move_times = 1
##-----------switch define------------##
class switch(object):
def __init__(self, value):
self.value = value
self.fall = False
def __iter__(self):
"""Return the match method once, then stop"""
yield self.match
raise StopIteration
def match(self, *args):
"""Indicate whether or not to enter a case suite"""
if self.fall or not args:
return True
elif self.value in args: # changed for v1.5, see below
self.fall = True
return True
else:
return False
##------------class-------
class point():
def __init__(self,x,y,z,pitch,roll,yaw):
self.x = x
self.y = y
self.z = z
self.pitch = pitch
self.roll = roll
self.yaw = yaw
##-------------------------strategy---------------------
##-----Mission 參數
GetInfoFlag = False
ExecuteFlag = False
GetKeyFlag = False
MotionSerialKey = []
MissionType_Flag = 0
MotionStep = 0
##-----手臂動作位置資訊
angle_SubCue = 0
LinePtpFlag = False
MoveFlag = False
PushBallHeight = 6
ObjAboveHeight = 10
SpeedValue = 10
MissionEndFlag = False
CurrentMissionType = 0
##---------------Enum---------------##
class ArmMotionCommand(enum.IntEnum):
Arm_Stop = 0
Arm_MoveToTargetUpside = 1
Arm_MoveFowardDown = 2
Arm_MoveVision = 3
Arm_PushBall = 4
Arm_LineUp = 5
Arm_LineDown = 6
Arm_Angle = 7
Arm_StopPush = 8
class MissionType(enum.IntEnum):
Get_Img = 0
PushBall = 1
Pushback = 2
Mission_End = 3
##-----------switch define------------##
class pos():
def __init__(self, x, y, z, pitch, roll, yaw):
self.x = 0
self.y = 36.8
self.z = 11.35
self.pitch = -90
self.roll = 0
self.yaw = 0
class Target_pos():
def __init__(self, x, y, z, pitch, roll, yaw):
self.x = 0
self.y = 36.8
self.z = 11.35
self.pitch = -90
self.roll = 0
self.yaw = 0
class TargetPush_pos():
def __init__(self, x, y, z, pitch, roll, yaw):
self.x = 0
self.y = 36.8
self.z = 11.35
self.pitch = -90
self.roll = 0
self.yaw = 0
class Item():
def __init__(self,x,y,label):
self.x = x
self.y = y
self.label = label
def Mission_Trigger():
if GetInfoFlag == True and GetKeyFlag == F
|
Flag,GetKeyFlag,ExecuteFlag
#Billiards_Calculation()
GetInfoFlag = False
GetKeyFlag = True
ExecuteFlag = False
def GetKey_Mission():
global GetInfoFlag,GetKeyFlag,ExecuteFlag,MotionKey,MotionSerialKey
Mission = Get_MissionType()
MissionItem(Mission)
MotionSerialKey = MotionKey
GetInfoFlag = False
GetKeyFlag = False
ExecuteFlag = True
def Get_MissionType():
global MissionType_Flag,CurrentMissionType
for case in switch(MissionType_Flag): #傳送指令給socket選擇手臂動作
if case(0):
Type = MissionType.PushBall
MissionType_Flag +=1
break
if case(1):
Type = MissionType.Pushback
MissionType_Flag -=1
break
CurrentMissionType = Type
return Type
def MissionItem(ItemNo):
global MotionKey
Key_PushBallCommand = [\
ArmMotionCommand.Arm_MoveToTargetUpside,\
ArmMotionCommand.Arm_LineDown,\
ArmMotionCommand.Arm_PushBall,\
ArmMotionCommand.Arm_LineUp,\
ArmMotionCommand.Arm_Stop,\
]
Key_PushBackCommand = [\
ArmMotionCommand.Arm_MoveVision,\
ArmMotionCommand.Arm_Stop,\
ArmMotionCommand.Arm_StopPush,\
]
for case in switch(ItemNo): #傳送指令給socket選擇手臂動作
if case(MissionType.PushBall):
MotionKey = Key_PushBallCommand
break
if case(MissionType.Pushback):
MotionKey = Key_PushBackCommand
break
return MotionKey
def Execute_Mission():
global GetInfoFlag,GetKeyFlag,ExecuteFlag,MotionKey,MotionStep,MotionSerialKey,MissionEndFlag,CurrentMissionType
print("strategy :" ,strategy.state_flag.Arm)
if strategy.state_flag.Arm == 0 and strategy.state_flag.Strategy == 1:
strategy.state_flag.Strategy = 0
if MotionKey[MotionStep] == ArmMotionCommand.Arm_Stop:
if MissionEndFlag == True:
CurrentMissionType = MissionType.Mission_End
GetInfoFlag = False
GetKeyFlag = False
ExecuteFlag = False
print("Mission_End")
elif CurrentMissionType == MissionType.PushBall:
GetInfoFlag = False
GetKeyFlag = True
ExecuteFlag = False
MotionStep = 0
print("PushBall")
else:
GetInfoFlag = True
GetKeyFlag = False
ExecuteFlag = False
MotionStep = 0
else:
MotionItem(MotionSerialKey[MotionStep])
MotionStep += 1
def MotionItem(ItemNo):
global angle_SubCue,SpeedValue,PushFlag,LinePtpFlag,MissionEndFlag
SpeedValue = 5
for case in switch(ItemNo): #傳送指令給socket選擇手臂動作
if case(ArmMotionCommand.Arm_Stop):
MoveFlag = False
print("Arm_Stop")
break
if case(ArmMotionCommand.Arm_StopPush):
MoveFlag = False
PushFlag = True #重新掃描物件
print("Arm_StopPush")
break
if case(ArmMotionCommand.Arm_MoveToTargetUpside):
pos.x = 10
pos.y = 36.8
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = 10
MoveFlag = True
LinePtpFlag = False
SpeedValue = 10
print("Arm_MoveToTargetUpside")
break
if case(ArmMotionCommand.Arm_LineUp):
pos.z = ObjAboveHeight
MoveFlag = True
LinePtpFlag = True
SpeedValue = 5
print("Arm_LineUp")
break
if case(ArmMotionCommand.Arm_LineDown):
pos.z = PushBallHeight
MoveFlag = True
LinePtpFlag = True
SpeedValue = 5
print("Arm_LineDown")
break
if case(ArmMotionCommand.Arm_PushBall):
pos.x = -10
pos.y = 36.8
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = -10
SpeedValue = 10 ##待測試up
MoveFlag = True
LinePtpFlag = False
print("Arm_PushBall")
break
if case(ArmMotionCommand.Arm_MoveVision):
pos.x = 0
pos.y = 36.8
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = 0
SpeedValue = 10
MoveFlag = True
LinePtpFlag = False
##任務結束旗標
MissionEndFlag = True
print("Arm_MoveVision")
break
if case(ArmMotionCommand.Arm_MoveFowardDown):
pos.x = 0
pos.y = 36.8
pos.z = 11.35
pos.pitch = -90
pos.roll = 0
pos.yaw = 0
MoveFlag = True
LinePtpFlag = False
print("Arm_MoveFowardDown")
break
if case(): # default, could also just omit condition or 'if True'
print ("something else!")
# No need to break here, it'll stop anyway
if MoveFlag == True:
if LinePtpFlag == False:
print('x: ',pos.x,' y: ',pos.y,' z: ',pos.z,' pitch: ',pos.pitch,' roll: ',pos.roll,' yaw: ',pos.yaw)
#strategy_client_Arm_Mode(0,1,0,30,2)#action,ra,grip,vel,both
strategy.strategy_client_Arm_Mode(2,1,0,SpeedValue,2)#action,ra,grip,vel,both
strategy.strategy_client_pos_move(pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw)
elif LinePtpFlag == True:
#strategy_client_Arm_Mode(0,1,0,40,2)#action,ra,grip,vel,both
print('x: ',pos.x,' y: ',pos.y,' z: ',pos.z,' pitch: ',pos.pitch,' roll: ',pos.roll,' yaw: ',pos.yaw)
strategy.strategy_client_Arm_Mode(3,1,0,SpeedValue,2)#action,ra,grip,vel,both
strategy.strategy_client_pos_move(pos.x,pos.y,pos.z,pos.pitch,pos.roll,pos.yaw)
#action: ptp line
#ra : abs rel
#grip 夾爪
#vel speed
#both : Ctrl_Mode
##-------------strategy end ------------
def myhook():
print ("shutdown time!")
if __name__ == '__main__':
argv = rospy.myargv()
rospy.init_node('strategy', anonymous=True)
GetInfoFlag = True #Test no data
strategy.strategy_client_Arm_Mode(0,1,0,20,2)#action,ra,grip,vel,both
while 1:
Mission_Trigger()
if CurrentMissionType == MissionType.Mission_End:
strategy.rospy.on_shutdown(myhook)
strategy.rospy.spin()
rospy.spin()
|
alse and ExecuteFlag == False:
GetInfo_Mission()
if GetInfoFlag == False and GetKeyFlag == True and ExecuteFlag == False:
GetKey_Mission()
if GetInfoFlag == False and GetKeyFlag == False and ExecuteFlag == True:
Execute_Mission()
def GetInfo_Mission():
global GetInfo
|
ramdisk.rs
|
// vim: tw=80
// Copyright (C) 2020 Axcient
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
//
// * Redistributions in binary form must reproduce the above copyright
// notice, this list of conditions and the following disclaimer in the
// documentation and/or other materials provided with the distribution.
//
// THIS SOFTWARE IS PROVIDED BY RED HAT AND CONTRIBUTORS ''AS IS'' AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
// THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
// PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL RED HAT OR
// CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF
// USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
use lazy_static::lazy_static;
use std::sync::Mutex;
use nbdkit::*;
// The RAM disk.
lazy_static! {
static ref DISK: Mutex<Vec<u8>> = Mutex::new (vec![0; 100 * 1024 * 1024]);
}
#[derive(Default)]
struct RamDisk {
// Box::new doesn't allocate anything unless we put some dummy
// fields here. In a real implementation you would put per-handle
// data here as required.
_not_used: i32,
}
impl Server for RamDisk {
fn get_size(&self) -> Result<i64> {
Ok(DISK.lock().unwrap().len() as i64)
}
fn name() -> &'static str {
"ramdisk"
}
fn open(_readonly: bool) -> Box<dyn Server> {
Box::new(RamDisk::default())
}
fn read_at(&self, buf: &mut [u8], offset: u64) -> Result<()> {
let disk = DISK.lock().unwrap();
let ofs = offset as usize;
let end = ofs + buf.len();
buf.copy_from_slice(&disk[ofs..end]);
Ok(())
}
fn thread_model() -> Result<ThreadModel> where Self: Sized {
Ok(ThreadModel::Parallel)
}
fn write_at(&self, buf: &[u8], offset: u64, _flags: Flags) -> Result<()> {
let mut disk = DISK.lock().unwrap();
let ofs = offset as usize;
let end = ofs + buf.len();
disk[ofs..end].copy_from_slice(buf);
Ok(())
}
}
plugin!(RamDisk {thread_model, write_at});
|
// OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
// OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
// SUCH DAMAGE.
|
exx.py
|
#_*_ coding: utf-8 _*_
def needsclap(x):
return x==2 or x==3 or x==5 or x==7
for i in range(1,101):
one = needsclap(i%10)
ten = needsclap(i/10)
if one and ten: print i,"짝짝"
elif one or ten: print i,"짝"
else: print i
print [1,2,3] + ["ff"]
print [1,2] * 3
a = [1,2,3,4,5,6,7,8,9]
a[1] = 5
print a[-2]
print a[3:5]
print a[::-1]
birthdays = [
("한재규", 1981, 8, 26),
("박현규", 1981, 6,6),
("장기호", 1980, 9, 2)
]
print birthdays
for name, year, month, day in birthdays:
print name, year,"년", month, "월", day, "일생"
print "%s - %s월 %s일생" % (name,month,day)
print "==========================="
formatfun = "{0} - {1}월 {2}일생".format
macro = "{0} - {1}월 {2}일생"
for name, _, month, day in birthdays:
print formatfun(name, month, day)
print macro.format(name, month, day)
print "4",type(4)
print "hello?", type("hello")
print "[1,2,3]", type([1,2,3])
print "(1,2,3)", type((1,2,3))
print type(formatfun)
print type(macro)
print type(birthdays)
print type(needsclap)
print "hello".upper()
print "".join(["i"," ","am"])
print "i am".split(" ")
for i in "i am a boy".split(" "):
print i
print repr(1/5.0)
print str(1/5.0)
def names(birthdays):
result = []
for name,_,_,_ in
|
days:
result.append(name)
result.sort()
return result
print names(birthdays)
for name in names(birthdays):
print name
a,b,c = [1,2,3]
print a,b,c
|
birth
|
analyze.py
|
"""Code for checking and inferring types."""
import collections
import logging
import re
import subprocess
from typing import Any, Dict, Union
from pytype import abstract
from pytype import abstract_utils
from pytype import convert_structural
from pytype import debug
from pytype import function
from pytype import metrics
from pytype import special_builtins
from pytype import state as frame_state
from pytype import vm
from pytype.overlays import typing_overlay
from pytype.pytd import builtins
from pytype.pytd import escape
from pytype.pytd import optimize
from pytype.pytd import pytd
from pytype.pytd import pytd_utils
from pytype.pytd import visitors
from pytype.typegraph import cfg
log = logging.getLogger(__name__)
# Most interpreter functions (including lambdas) need to be analyzed as
# stand-alone functions. The exceptions are comprehensions and generators, which
# have names like "<listcomp>" and "<genexpr>".
_SKIP_FUNCTION_RE = re.compile("<(?!lambda).+>$")
CallRecord = collections.namedtuple(
"CallRecord", ["node", "function", "signatures", "positional_arguments",
"keyword_arguments", "return_value"])
# How deep to follow call chains:
INIT_MAXIMUM_DEPTH = 4 # during module loading
MAXIMUM_DEPTH = 3 # during non-quick analysis
QUICK_CHECK_MAXIMUM_DEPTH = 2 # during quick checking
QUICK_INFER_MAXIMUM_DEPTH = 1 # during quick inference
class _Initializing:
pass
class CallTracer(vm.VirtualMachine):
"""Virtual machine that records all function calls.
Attributes:
exitpoint: A CFG node representing the program exit. Needs to be set before
analyze_types.
"""
_CONSTRUCTORS = ("__new__", "__init__")
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self._unknowns = {}
self._calls = set()
self._method_calls = set()
# Used by init_class.
self._instance_cache: Dict[Any, Union[_Initializing, cfg.Variable]] = {}
# Used by call_init. Can differ from _instance_cache because we also call
# __init__ on classes not initialized via init_class.
self._initialized_instances = set()
self._interpreter_functions = []
self._interpreter_classes = []
self._analyzed_functions = set()
self._analyzed_classes = set()
self._generated_classes = {}
self.exitpoint = None
def create_varargs(self, node):
value = abstract.Instance(self.convert.tuple_type, self)
value.merge_instance_type_parameter(
node, abstract_utils.T, self.convert.create_new_unknown(node))
return value.to_variable(node)
def create_kwargs(self, node):
key_type = self.convert.primitive_class_instances[str].to_variable(node)
value_type = self.convert.create_new_unknown(node)
kwargs = abstract.Instance(self.convert.dict_type, self)
kwargs.merge_instance_type_parameter(node, abstract_utils.K, key_type)
kwargs.merge_instance_type_parameter(node, abstract_utils.V, value_type)
return kwargs.to_variable(node)
def create_method_arguments(self, node, method, use_defaults=False):
"""Create arguments for the given method.
Creates Unknown objects as arguments for the given method. Note that we
don't need to take parameter annotations into account as
InterpreterFunction.call() will take care of that.
Args:
node: The current node.
method: An abstract.InterpreterFunction.
use_defaults: Whether to use parameter defaults for arguments. When True,
unknown arguments are created with force=False, as it is fine to use
Unsolvable rather than Unknown objects for type-checking defaults.
Returns:
A tuple of a node and a function.Args object.
"""
args = []
num_posargs = method.argcount(node)
num_posargs_no_default = num_posargs - len(method.defaults)
for i in range(num_posargs):
default_idx = i - num_posargs_no_default
if use_defaults and default_idx >= 0:
arg = method.defaults[default_idx]
else:
arg = self.convert.create_new_unknown(node, force=not use_defaults)
args.append(arg)
kws = {}
for key in method.signature.kwonly_params:
if use_defaults and key in method.kw_defaults:
kws[key] = method.kw_defaults[key]
else:
kws[key] = self.convert.create_new_unknown(node, force=not use_defaults)
starargs = self.create_varargs(node) if method.has_varargs() else None
starstarargs = self.create_kwargs(node) if method.has_kwargs() else None
return node, function.Args(posargs=tuple(args),
namedargs=kws,
starargs=starargs,
starstarargs=starstarargs)
def call_function_with_args(self, node, val, args):
"""Call a function.
Args:
node: The given node.
val: A cfg.Binding containing the function.
args: A function.Args object.
Returns:
A tuple of (1) a node and (2) a cfg.Variable of the return value.
"""
fvar = val.AssignToNewVariable(node)
with val.data.record_calls():
new_node, ret = self.call_function_in_frame(node, fvar, *args)
return new_node, ret
def call_function_in_frame(self, node, var, args, kwargs,
starargs, starstarargs):
frame = frame_state.SimpleFrame(node=node)
self.push_frame(frame)
log.info("Analyzing %r", [v.name for v in var.data])
state = frame_state.FrameState.init(node, self)
state, ret = self.call_function_with_state(
state, var, args, kwargs, starargs, starstarargs)
self.pop_frame(frame)
return state.node, ret
def _maybe_fix_classmethod_cls_arg(self, node, cls, func, args):
sig = func.signature
if (args.posargs and sig.param_names and
(sig.param_names[0] not in sig.annotations)):
# fix "cls" parameter
return args._replace(
posargs=(cls.AssignToNewVariable(node),) + args.posargs[1:])
else:
return args
def maybe_analyze_method(self, node, val, cls=None):
method = val.data
fname = val.data.name
if isinstance(method, abstract.INTERPRETER_FUNCTION_TYPES):
self._analyzed_functions.add(method.get_first_opcode())
if (not self.options.analyze_annotated and
(method.signature.has_return_annotation or method.has_overloads) and
fname.rsplit(".", 1)[-1] not in self._CONSTRUCTORS):
log.info("%r has annotations, not analyzing further.", fname)
else:
for f in method.iter_signature_functions():
node, args = self.create_method_arguments(node, f)
if f.is_classmethod and cls:
args = self._maybe_fix_classmethod_cls_arg(node, cls, f, args)
node, _ = self.call_function_with_args(node, val, args)
return node
def _call_with_fake_args(self, node0, funcv):
"""Attempt to call the given function with made-up arguments."""
# TODO(tsudol): If expand this beyond __init__, need to handle
# DictKeyMissing
nodes = []
rets = []
for funcb in funcv.bindings:
func = funcb.data
log.info("Trying %s with fake arguments", func)
if isinstance(func, abstract.INTERPRETER_FUNCTION_TYPES):
node1, args = self.create_method_arguments(node0, func)
# Once the args are generated, try calling the function.
# call_function will check fallback_to_unsolvable if a DictKeyMissing or
# FailedFunctionCall error is raised when the target function is called.
# DictKeyMissing doesn't trigger call_with_fake_args, so that shouldn't
# be raised again, and generating fake arguments should avoid any
# FailedFunctionCall errors. To prevent an infinite recursion loop, set
# fallback_to_unsolvable to False just in case.
# This means any additional errors that may be raised will be passed to
# the call_function that called this method in the first place.
node2, ret = self.call_function(node1,
funcb.AssignToNewVariable(),
args,
fallback_to_unsolvable=False)
nodes.append(node2)
rets.append(ret)
if nodes:
ret = self.join_variables(node0, rets)
node = self.join_cfg_nodes(nodes)
if ret.bindings:
return node, ret
else:
node = node0
log.info("Unable to generate fake arguments for %s", funcv)
return node, self.new_unsolvable(node)
def analyze_method_var(self, node0, name, var, cls=None):
log.info("Analyzing %s", name)
node1 = node0.ConnectNew(name)
for val in var.bindings:
node2 = self.maybe_analyze_method(node1, val, cls)
node2.ConnectTo(node0)
return node0
def bind_method(self, node, name, methodvar, instance_var):
bound = self.program.NewVariable()
for m in methodvar.Data(node):
if isinstance(m, special_builtins.ClassMethodInstance):
m = m.func.data[0]
is_cls = True
else:
is_cls = (m.isinstance_InterpreterFunction() and m.is_classmethod)
bound.AddBinding(m.property_get(instance_var, is_cls), [], node)
return bound
def _instantiate_binding(self, node0, cls, container):
"""Instantiate a class binding."""
node1, new = cls.data.get_own_new(node0, cls)
if not new or (
any(not isinstance(f, abstract.InterpreterFunction) for f in new.data)):
# This assumes that any inherited __new__ method defined in a pyi file
# returns an instance of the current class.
return node0, cls.data.instantiate(node0, container=container)
instance = self.program.NewVariable()
nodes = []
for b in new.bindings:
self._analyzed_functions.add(b.data.get_first_opcode())
node2, args = self.create_method_arguments(node1, b.data)
args = self._maybe_fix_classmethod_cls_arg(node0, cls, b.data, args)
node3 = node2.ConnectNew()
node4, ret = self.call_function_with_args(node3, b, args)
instance.PasteVariable(ret)
nodes.append(node4)
return self.join_cfg_nodes(nodes), instance
def _instantiate_var(self, node, clsv, container):
"""Build an (dummy) instance from a class, for analyzing it."""
n = self.program.NewVariable()
for cls in clsv.Bindings(node, strict=False):
node, var = self._instantiate_binding(node, cls, container)
n.PasteVariable(var)
return node, n
def _mark_maybe_missing_members(self, values):
"""Set maybe_missing_members to True on these values and their type params.
Args:
values: A list of BaseValue objects. On every instance among
the values, recursively set maybe_missing_members to True on the
instance and its type parameters.
"""
values = list(values)
seen = set()
while values:
v = values.pop(0)
if v not in seen:
seen.add(v)
if isinstance(v, abstract.SimpleValue):
v.maybe_missing_members = True
for child in v.instance_type_parameters.values():
values.extend(child.data)
def init_class(self, node, cls, container=None, extra_key=None):
"""Instantiate a class, and also call __init__.
Calling __init__ can be expensive, so this method caches its created
instances. If you don't need __init__ called, use cls.instantiate instead.
Args:
node: The current node.
cls: The class to instantiate.
container: Optionally, a container to pass to the class's instantiate()
method, so that type parameters in the container's template are
instantiated to TypeParameterInstance.
extra_key: Optionally, extra information about the location at which the
instantion occurs. By default, this method keys on the current opcode
and the class, which sometimes isn't enough to disambiguate callers
that shouldn't get back the same cached instance.
Returns:
A tuple of node and instance variable.
"""
key = (self.frame and self.frame.current_opcode, extra_key, cls)
instance = self._instance_cache.get(key)
if not instance or isinstance(instance, _Initializing):
clsvar = cls.to_variable(node)
node, instance = self._instantiate_var(node, clsvar, container)
if key in self._instance_cache:
# We've encountered a recursive pattern such as
# class A:
# def __init__(self, x: "A"): ...
# Calling __init__ again would lead to an infinite loop, so
# we instead create an incomplete instance that will be
# overwritten later. Note that we have to create a new
# instance rather than using the one that we're already in
# the process of initializing - otherwise, setting
# maybe_missing_members to True would cause pytype to ignore
# all attribute errors on self in __init__.
self._mark_maybe_missing_members(instance.data)
else:
self._instance_cache[key] = _Initializing()
node = self.call_init(node, instance)
self._instance_cache[key] = instance
return node, instance
def _call_method(self, node, binding, method_name):
node, method = self.attribute_handler.get_attribute(
node, binding.data.get_class(), method_name, binding)
if method:
bound_method = self.bind_method(
node, method_name, method, binding.AssignToNewVariable())
node = self.analyze_method_var(node, method_name, bound_method)
return node
def _call_init_on_binding(self, node, b):
if isinstance(b.data, abstract.SimpleValue):
for param in b.data.instance_type_parameters.values():
node = self.call_init(node, param)
node = self._call_method(node, b, "__init__")
cls = b.data.get_class()
if isinstance(cls, abstract.InterpreterClass):
# Call any additional initalizers the class has registered.
for method in cls.additional_init_methods:
node = self._call_method(node, b, method)
return node
def call_init(self, node, instance):
# Call __init__ on each binding.
for b in instance.bindings:
if b.data in self._initialized_instances:
continue
self._initialized_instances.add(b.data)
node = self._call_init_on_binding(node, b)
return node
def reinitialize_if_initialized(self, node, instance):
if instance in self._initialized_instances:
self._call_init_on_binding(node, instance.to_binding(node))
def analyze_class(self, node, val):
self._analyzed_classes.add(val.data)
node, instance = self.init_class(node, val.data)
good_instances = [b for b in instance.bindings if val.data == b.data.cls]
if not good_instances:
# __new__ returned something that's not an instance of our class.
instance = val.data.instantiate(node)
node = self.call_init(node, instance)
elif len(good_instances) != len(instance.bindings):
# __new__ returned some extra possibilities we don't need.
instance = self.join_bindings(node, good_instances)
for instance_value in instance.data:
val.data.register_canonical_instance(instance_value)
methods = sorted(val.data.members.items())
while methods:
name, methodvar = methods.pop(0)
if name in self._CONSTRUCTORS:
continue # We already called this method during initialization.
for v in methodvar.data:
if (self.options.bind_properties and
isinstance(v, special_builtins.PropertyInstance)):
for m in (v.fget, v.fset, v.fdel):
if m:
methods.insert(0, (name, m))
b = self.bind_method(node, name, methodvar, instance)
node = self.analyze_method_var(node, name, b, val)
return node
def analyze_function(self, node0, val):
if val.data.is_attribute_of_class:
# We'll analyze this function as part of a class.
log.info("Analyze functions: Skipping class method %s", val.data.name)
else:
node1 = node0.ConnectNew(val.data.name)
node2 = self.maybe_analyze_method(node1, val)
node2.ConnectTo(node0)
return node0
def _should_analyze_as_interpreter_function(self, data):
# We record analyzed functions by opcode rather than function object. The
# two ways of recording are equivalent except for closures, which are
# re-generated when the variables they close over change, but we don't want
# to re-analyze them.
return (isinstance(data, abstract.InterpreterFunction) and
not data.is_overload and
not data.is_class_builder and
data.get_first_opcode() not in self._analyzed_functions and
not _SKIP_FUNCTION_RE.search(data.name))
def analyze_toplevel(self, node, defs):
for name, var in sorted(defs.items()): # sort, for determinicity
if not self._is_typing_member(name, var):
for value in var.bindings:
if isinstance(value.data, abstract.InterpreterClass):
new_node = self.analyze_class(node, value)
elif (isinstance(value.data, abstract.INTERPRETER_FUNCTION_TYPES) and
not value.data.is_overload):
new_node = self.analyze_function(node, value)
else:
continue
if new_node is not node:
new_node.ConnectTo(node)
# Now go through all functions and classes we haven't analyzed yet.
# These are typically hidden under a decorator.
# Go through classes first so that the `is_attribute_of_class` will
# be set for all functions in class.
for c in self._interpreter_classes:
for value in c.bindings:
if (isinstance(value.data, abstract.InterpreterClass) and
value.data not in self._analyzed_classes):
node = self.analyze_class(node, value)
for f in self._interpreter_functions:
for value in f.bindings:
if self._should_analyze_as_interpreter_function(value.data):
node = self.analyze_function(node, value)
return node
def analyze(self, node, defs, maximum_depth):
assert not self.frame
self.maximum_depth = maximum_depth
self._analyzing = True
node = node.ConnectNew(name="Analyze")
return self.analyze_toplevel(node, defs)
def trace_unknown(self, name, unknown_binding):
self._unknowns[name] = unknown_binding
def trace_call(self, node, func, sigs, posargs, namedargs, result):
"""Add an entry into the call trace.
Args:
node: The CFG node right after this function call.
func: A cfg.Binding of a function that was called.
sigs: The signatures that the function might have been called with.
posargs: The positional arguments, an iterable over cfg.Value.
namedargs: The keyword arguments, a dict mapping str to cfg.Value.
result: A Variable of the possible result values.
"""
log.debug("Logging call to %r with %d args, return %r",
func, len(posargs), result)
args = tuple(posargs)
kwargs = tuple((namedargs or {}).items())
record = CallRecord(node, func, sigs, args, kwargs, result)
if isinstance(func.data, abstract.BoundPyTDFunction):
self._method_calls.add(record)
elif isinstance(func.data, abstract.PyTDFunction):
self._calls.add(record)
def trace_functiondef(self, f):
self._interpreter_functions.append(f)
def trace_classdef(self, c):
self._interpreter_classes.append(c)
def trace_namedtuple(self, nt):
# All namedtuple instances with the same name are equal, so it's fine to
# overwrite previous instances.
self._generated_classes[nt.name] = nt
def pytd_classes_for_unknowns(self):
classes = []
for name, val in self._unknowns.items():
if val in val.variable.Filter(self.exitpoint, strict=False):
classes.append(val.data.to_structural_def(self.exitpoint, name))
return classes
def
|
(self, defs):
# If a variable is annotated, we'll always output that type.
annotated_names = set()
data = []
pytd_convert = self.convert.pytd_convert
annots = abstract_utils.get_annotations_dict(defs)
for name, t in pytd_convert.annotations_to_instance_types(
self.exitpoint, annots):
annotated_names.add(name)
data.append(pytd.Constant(name, t))
for name, var in defs.items():
if (name in abstract_utils.TOP_LEVEL_IGNORE or name in annotated_names or
self._is_typing_member(name, var)):
continue
options = var.FilteredData(self.exitpoint, strict=False)
if (len(options) > 1 and
not all(isinstance(o, abstract.FUNCTION_TYPES) for o in options)):
if all(isinstance(o, (abstract.ParameterizedClass,
abstract.TypeParameter,
abstract.Union)) for o in options
) and self.options.preserve_union_macros: # type alias
data.append(pytd_utils.JoinTypes(t.to_pytd_def(self.exitpoint, name)
for t in options))
else:
# It's ambiguous whether this is a type, a function or something
# else, so encode it as a constant.
combined_types = pytd_utils.JoinTypes(t.to_type(self.exitpoint)
for t in options)
data.append(pytd.Constant(name, combined_types))
elif options:
for option in options:
try:
d = option.to_pytd_def(self.exitpoint, name) # Deep definition
except NotImplementedError:
d = option.to_type(self.exitpoint) # Type only
if isinstance(d, pytd.NothingType):
if isinstance(option, abstract.Empty):
d = pytd.AnythingType()
else:
assert isinstance(option, typing_overlay.NoReturn)
if isinstance(d, pytd.Type) and not isinstance(d, pytd.TypeParameter):
data.append(pytd.Constant(name, d))
else:
data.append(d)
else:
log.error("No visible options for %s", name)
data.append(pytd.Constant(name, pytd.AnythingType()))
return pytd_utils.WrapTypeDeclUnit("inferred", data)
@staticmethod
def _call_traces_to_function(call_traces, name_transform=lambda x: x):
funcs = collections.defaultdict(pytd_utils.OrderedSet)
for node, func, sigs, args, kws, retvar in call_traces:
# The lengths may be different in the presence of optional and kw args.
arg_names = max((sig.get_positional_names() for sig in sigs), key=len)
for i in range(len(arg_names)):
if not isinstance(func.data, abstract.BoundFunction) or i > 0:
arg_names[i] = function.argname(i)
arg_types = (a.data.to_type(node) for a in args)
ret = pytd_utils.JoinTypes(t.to_type(node) for t in retvar.data)
starargs = None
starstarargs = None
funcs[func.data.name].add(pytd.Signature(
tuple(pytd.Parameter(n, t, False, False, None)
for n, t in zip(arg_names, arg_types)) +
tuple(pytd.Parameter(name, a.data.to_type(node), False, False, None)
for name, a in kws),
starargs, starstarargs,
ret, exceptions=(), template=()))
functions = []
for name, signatures in funcs.items():
functions.append(pytd.Function(name_transform(name), tuple(signatures),
pytd.MethodTypes.METHOD))
return functions
def _is_typing_member(self, name, var):
for module_name in ("typing", "typing_extensions"):
if module_name not in self.loaded_overlays:
continue
module = self.loaded_overlays[module_name].get_module(name)
if name in module.members and module.members[name].data == var.data:
return True
return False
def pytd_functions_for_call_traces(self):
return self._call_traces_to_function(self._calls, escape.pack_partial)
def pytd_classes_for_call_traces(self):
class_to_records = collections.defaultdict(list)
for call_record in self._method_calls:
args = call_record.positional_arguments
if not any(isinstance(a.data, abstract.Unknown) for a in args):
# We don't need to record call signatures that don't involve
# unknowns - there's nothing to solve for.
continue
cls = args[0].data.get_class()
if isinstance(cls, abstract.PyTDClass):
class_to_records[cls].append(call_record)
classes = []
for cls, call_records in class_to_records.items():
full_name = cls.module + "." + cls.name if cls.module else cls.name
classes.append(pytd.Class(
name=escape.pack_partial(full_name),
metaclass=None,
parents=(pytd.NamedType("builtins.object"),), # not used in solver
methods=tuple(self._call_traces_to_function(call_records)),
constants=(),
classes=(),
decorators=(),
slots=None,
template=(),
))
return classes
def pytd_classes_for_namedtuple_instances(self):
return tuple(v.generate_ast() for v in self._generated_classes.values())
def compute_types(self, defs):
classes = (tuple(self.pytd_classes_for_unknowns()) +
tuple(self.pytd_classes_for_call_traces()) +
self.pytd_classes_for_namedtuple_instances())
functions = tuple(self.pytd_functions_for_call_traces())
aliases = () # aliases are instead recorded as constants
ty = pytd_utils.Concat(
self.pytd_for_types(defs),
pytd_utils.CreateModule("unknowns", classes=classes,
functions=functions, aliases=aliases))
ty = ty.Visit(optimize.CombineReturnsAndExceptions())
ty = ty.Visit(optimize.PullInMethodClasses())
ty = ty.Visit(visitors.DefaceUnresolved(
[ty, self.loader.concat_all()], escape.UNKNOWN))
return ty.Visit(visitors.AdjustTypeParameters())
def _check_return(self, node, actual, formal):
if not self.options.report_errors:
return True
views = abstract_utils.get_views([actual], node)
# Check for typevars in the return value first, since bad_matches
# expects not to get any.
bad = [view for view in views
if actual in view and view[actual].data.formal]
if not bad:
bad = self.matcher(node).bad_matches(actual, formal)
if bad:
self.errorlog.bad_return_type(
self.frames, node, formal, actual, bad)
return not bad
def check_types(src, filename, errorlog, options, loader,
deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH,
maximum_depth=None, **kwargs):
"""Verify the Python code."""
tracer = CallTracer(errorlog=errorlog, options=options,
generate_unknowns=False, loader=loader, **kwargs)
loc, defs = tracer.run_program(src, filename, init_maximum_depth)
snapshotter = metrics.get_metric("memory", metrics.Snapshot)
snapshotter.take_snapshot("analyze:check_types:tracer")
if deep:
if maximum_depth is None:
maximum_depth = (
QUICK_CHECK_MAXIMUM_DEPTH if options.quick else MAXIMUM_DEPTH)
tracer.analyze(loc, defs, maximum_depth=maximum_depth)
snapshotter.take_snapshot("analyze:check_types:post")
_maybe_output_debug(options, tracer.program)
def infer_types(src, errorlog, options, loader,
filename=None, deep=True, init_maximum_depth=INIT_MAXIMUM_DEPTH,
show_library_calls=False, maximum_depth=None, tracer_vm=None,
**kwargs):
"""Given Python source return its types.
Args:
src: A string containing Python source code.
errorlog: Where error messages go. Instance of errors.ErrorLog.
options: config.Options object
loader: A load_pytd.Loader instance to load PYI information.
filename: Filename of the program we're parsing.
deep: If True, analyze all functions, even the ones not called by the main
execution flow.
init_maximum_depth: Depth of analysis during module loading.
show_library_calls: If True, call traces are kept in the output.
maximum_depth: Depth of the analysis. Default: unlimited.
tracer_vm: An instance of CallTracer, in case the caller wants to
instantiate and retain the vm used for type inference.
**kwargs: Additional parameters to pass to vm.VirtualMachine
Returns:
A tuple of (ast: TypeDeclUnit, builtins: TypeDeclUnit)
Raises:
AssertionError: In case of a bad parameter combination.
"""
# If the caller has passed in a vm, use that.
if tracer_vm:
assert isinstance(tracer_vm, CallTracer)
tracer = tracer_vm
else:
tracer = CallTracer(errorlog=errorlog, options=options,
generate_unknowns=options.protocols,
store_all_calls=not deep, loader=loader, **kwargs)
loc, defs = tracer.run_program(src, filename, init_maximum_depth)
log.info("===Done running definitions and module-level code===")
snapshotter = metrics.get_metric("memory", metrics.Snapshot)
snapshotter.take_snapshot("analyze:infer_types:tracer")
if deep:
if maximum_depth is None:
if not options.quick:
maximum_depth = MAXIMUM_DEPTH
elif options.analyze_annotated:
# Since there's no point in analyzing annotated functions for inference,
# the presence of this option means that the user wants checking, too.
maximum_depth = QUICK_CHECK_MAXIMUM_DEPTH
else:
maximum_depth = QUICK_INFER_MAXIMUM_DEPTH
tracer.exitpoint = tracer.analyze(loc, defs, maximum_depth)
else:
tracer.exitpoint = loc
snapshotter.take_snapshot("analyze:infer_types:post")
ast = tracer.compute_types(defs)
ast = tracer.loader.resolve_ast(ast)
if tracer.has_unknown_wildcard_imports or any(
a in defs for a in abstract_utils.DYNAMIC_ATTRIBUTE_MARKERS):
if "__getattr__" not in ast:
ast = pytd_utils.Concat(
ast, builtins.GetDefaultAst(options.python_version))
# If merged with other if statement, triggers a ValueError: Unresolved class
# when attempts to load from the protocols file
if options.protocols:
protocols_pytd = tracer.loader.import_name("protocols")
else:
protocols_pytd = None
builtins_pytd = tracer.loader.concat_all()
# Insert type parameters, where appropriate
ast = ast.Visit(visitors.CreateTypeParametersForSignatures())
if options.protocols:
log.info("=========== PyTD to solve =============\n%s",
pytd_utils.Print(ast))
ast = convert_structural.convert_pytd(ast, builtins_pytd, protocols_pytd)
elif not show_library_calls:
log.info("Solving is turned off. Discarding call traces.")
# Rename remaining "~unknown" to "?"
ast = ast.Visit(visitors.RemoveUnknownClasses())
# Remove "~list" etc.:
ast = convert_structural.extract_local(ast)
_maybe_output_debug(options, tracer.program)
return ast, builtins_pytd
def _maybe_output_debug(options, program):
"""Maybe emit debugging output."""
if options.output_cfg or options.output_typegraph:
dot = debug.program_to_dot(program, set([]), bool(options.output_cfg))
svg_file = options.output_cfg or options.output_typegraph
with subprocess.Popen(
["/usr/bin/dot", "-T", "svg", "-o", svg_file],
stdin=subprocess.PIPE, universal_newlines=True) as proc:
(_, stderr) = proc.communicate(dot)
if stderr:
log.info("Failed to create %s: %s", svg_file, stderr)
if options.output_debug:
text = debug.program_to_text(program)
if options.output_debug == "-":
log.info("=========== Program Dump =============\n%s", text)
else:
with options.open_function(options.output_debug, "w") as fi:
fi.write(text)
|
pytd_for_types
|
pat.rs
|
use crate::check::FnCtxt;
use rustc_ast as ast;
use rustc_data_structures::fx::FxHashMap;
use rustc_errors::{pluralize, struct_span_err, Applicability, DiagnosticBuilder};
use rustc_hir as hir;
use rustc_hir::def::{CtorKind, DefKind, Res};
use rustc_hir::pat_util::EnumerateAndAdjustIterator;
use rustc_hir::{HirId, Pat, PatKind};
use rustc_infer::infer;
use rustc_infer::infer::type_variable::{TypeVariableOrigin, TypeVariableOriginKind};
use rustc_middle::ty::subst::GenericArg;
use rustc_middle::ty::{self, Adt, BindingMode, Ty, TypeFoldable};
use rustc_span::hygiene::DesugaringKind;
use rustc_span::lev_distance::find_best_match_for_name;
use rustc_span::source_map::{Span, Spanned};
use rustc_span::symbol::Ident;
use rustc_span::{BytePos, DUMMY_SP};
use rustc_trait_selection::traits::{ObligationCause, Pattern};
use std::cmp;
use std::collections::hash_map::Entry::{Occupied, Vacant};
use super::report_unexpected_variant_res;
const CANNOT_IMPLICITLY_DEREF_POINTER_TRAIT_OBJ: &str = "\
This error indicates that a pointer to a trait type cannot be implicitly dereferenced by a \
pattern. Every trait defines a type, but because the size of trait implementors isn't fixed, \
this type has no compile-time size. Therefore, all accesses to trait types must be through \
pointers. If you encounter this error you should try to avoid dereferencing the pointer.
You can read more about trait objects in the Trait Objects section of the Reference: \
https://doc.rust-lang.org/reference/types.html#trait-objects";
/// Information about the expected type at the top level of type checking a pattern.
///
/// **NOTE:** This is only for use by diagnostics. Do NOT use for type checking logic!
#[derive(Copy, Clone)]
struct TopInfo<'tcx> {
/// The `expected` type at the top level of type checking a pattern.
expected: Ty<'tcx>,
/// Was the origin of the `span` from a scrutinee expression?
///
/// Otherwise there is no scrutinee and it could be e.g. from the type of a formal parameter.
origin_expr: bool,
/// The span giving rise to the `expected` type, if one could be provided.
///
/// If `origin_expr` is `true`, then this is the span of the scrutinee as in:
///
/// - `match scrutinee { ... }`
/// - `let _ = scrutinee;`
///
/// This is used to point to add context in type errors.
/// In the following example, `span` corresponds to the `a + b` expression:
///
/// ```text
/// error[E0308]: mismatched types
/// --> src/main.rs:L:C
/// |
/// L | let temp: usize = match a + b {
/// | ----- this expression has type `usize`
/// L | Ok(num) => num,
/// | ^^^^^^^ expected `usize`, found enum `std::result::Result`
/// |
/// = note: expected type `usize`
/// found type `std::result::Result<_, _>`
/// ```
span: Option<Span>,
/// This refers to the parent pattern. Used to provide extra diagnostic information on errors.
/// ```text
/// error[E0308]: mismatched types
/// --> $DIR/const-in-struct-pat.rs:8:17
/// |
/// L | struct f;
/// | --------- unit struct defined here
/// ...
/// L | let Thing { f } = t;
/// | ^
/// | |
/// | expected struct `std::string::String`, found struct `f`
/// | `f` is interpreted as a unit struct, not a new binding
/// | help: bind the struct field to a different name instead: `f: other_f`
/// ```
parent_pat: Option<&'tcx Pat<'tcx>>,
}
impl<'tcx> FnCtxt<'_, 'tcx> {
fn pattern_cause(&self, ti: TopInfo<'tcx>, cause_span: Span) -> ObligationCause<'tcx> {
let code = Pattern { span: ti.span, root_ty: ti.expected, origin_expr: ti.origin_expr };
self.cause(cause_span, code)
}
fn demand_eqtype_pat_diag(
&self,
cause_span: Span,
expected: Ty<'tcx>,
actual: Ty<'tcx>,
ti: TopInfo<'tcx>,
) -> Option<DiagnosticBuilder<'tcx>> {
self.demand_eqtype_with_origin(&self.pattern_cause(ti, cause_span), expected, actual)
}
fn demand_eqtype_pat(
&self,
cause_span: Span,
expected: Ty<'tcx>,
actual: Ty<'tcx>,
ti: TopInfo<'tcx>,
) {
if let Some(mut err) = self.demand_eqtype_pat_diag(cause_span, expected, actual, ti) {
err.emit();
}
}
}
const INITIAL_BM: BindingMode = BindingMode::BindByValue(hir::Mutability::Not);
/// Mode for adjusting the expected type and binding mode.
enum AdjustMode {
/// Peel off all immediate reference types.
Peel,
/// Reset binding mode to the initial mode.
Reset,
/// Pass on the input binding mode and expected type.
Pass,
}
impl<'a, 'tcx> FnCtxt<'a, 'tcx> {
/// Type check the given top level pattern against the `expected` type.
///
/// If a `Some(span)` is provided and `origin_expr` holds,
/// then the `span` represents the scrutinee's span.
/// The scrutinee is found in e.g. `match scrutinee { ... }` and `let pat = scrutinee;`.
///
/// Otherwise, `Some(span)` represents the span of a type expression
/// which originated the `expected` type.
pub fn check_pat_top(
&self,
pat: &'tcx Pat<'tcx>,
expected: Ty<'tcx>,
span: Option<Span>,
origin_expr: bool,
) {
let info = TopInfo { expected, origin_expr, span, parent_pat: None };
self.check_pat(pat, expected, INITIAL_BM, info);
}
/// Type check the given `pat` against the `expected` type
/// with the provided `def_bm` (default binding mode).
///
/// Outside of this module, `check_pat_top` should always be used.
/// Conversely, inside this module, `check_pat_top` should never be used.
#[instrument(skip(self, ti))]
fn check_pat(
&self,
pat: &'tcx Pat<'tcx>,
expected: Ty<'tcx>,
def_bm: BindingMode,
ti: TopInfo<'tcx>,
) {
let path_res = match &pat.kind {
PatKind::Path(qpath) => Some(self.resolve_ty_and_res_ufcs(qpath, pat.hir_id, pat.span)),
_ => None,
};
let adjust_mode = self.calc_adjust_mode(pat, path_res.map(|(res, ..)| res));
let (expected, def_bm) = self.calc_default_binding_mode(pat, expected, def_bm, adjust_mode);
let ty = match pat.kind {
PatKind::Wild => expected,
PatKind::Lit(lt) => self.check_pat_lit(pat.span, lt, expected, ti),
PatKind::Range(lhs, rhs, _) => self.check_pat_range(pat.span, lhs, rhs, expected, ti),
PatKind::Binding(ba, var_id, _, sub) => {
self.check_pat_ident(pat, ba, var_id, sub, expected, def_bm, ti)
}
PatKind::TupleStruct(ref qpath, subpats, ddpos) => {
self.check_pat_tuple_struct(pat, qpath, subpats, ddpos, expected, def_bm, ti)
}
PatKind::Path(_) => self.check_pat_path(pat, path_res.unwrap(), expected, ti),
PatKind::Struct(ref qpath, fields, etc) => {
self.check_pat_struct(pat, qpath, fields, etc, expected, def_bm, ti)
}
PatKind::Or(pats) => {
let parent_pat = Some(pat);
for pat in pats {
self.check_pat(pat, expected, def_bm, TopInfo { parent_pat, ..ti });
}
expected
}
PatKind::Tuple(elements, ddpos) => {
self.check_pat_tuple(pat.span, elements, ddpos, expected, def_bm, ti)
}
PatKind::Box(inner) => self.check_pat_box(pat.span, inner, expected, def_bm, ti),
PatKind::Ref(inner, mutbl) => {
self.check_pat_ref(pat, inner, mutbl, expected, def_bm, ti)
}
PatKind::Slice(before, slice, after) => {
self.check_pat_slice(pat.span, before, slice, after, expected, def_bm, ti)
}
};
self.write_ty(pat.hir_id, ty);
// (note_1): In most of the cases where (note_1) is referenced
// (literals and constants being the exception), we relate types
// using strict equality, even though subtyping would be sufficient.
// There are a few reasons for this, some of which are fairly subtle
// and which cost me (nmatsakis) an hour or two debugging to remember,
// so I thought I'd write them down this time.
//
// 1. There is no loss of expressiveness here, though it does
// cause some inconvenience. What we are saying is that the type
// of `x` becomes *exactly* what is expected. This can cause unnecessary
// errors in some cases, such as this one:
//
// ```
// fn foo<'x>(x: &'x i32) {
// let a = 1;
// let mut z = x;
// z = &a;
// }
// ```
//
// The reason we might get an error is that `z` might be
// assigned a type like `&'x i32`, and then we would have
// a problem when we try to assign `&a` to `z`, because
// the lifetime of `&a` (i.e., the enclosing block) is
// shorter than `'x`.
//
// HOWEVER, this code works fine. The reason is that the
// expected type here is whatever type the user wrote, not
// the initializer's type. In this case the user wrote
// nothing, so we are going to create a type variable `Z`.
// Then we will assign the type of the initializer (`&'x i32`)
// as a subtype of `Z`: `&'x i32 <: Z`. And hence we
// will instantiate `Z` as a type `&'0 i32` where `'0` is
// a fresh region variable, with the constraint that `'x : '0`.
// So basically we're all set.
//
// Note that there are two tests to check that this remains true
// (`regions-reassign-{match,let}-bound-pointer.rs`).
//
// 2. Things go horribly wrong if we use subtype. The reason for
// THIS is a fairly subtle case involving bound regions. See the
// `givens` field in `region_constraints`, as well as the test
// `regions-relate-bound-regions-on-closures-to-inference-variables.rs`,
// for details. Short version is that we must sometimes detect
// relationships between specific region variables and regions
// bound in a closure signature, and that detection gets thrown
// off when we substitute fresh region variables here to enable
// subtyping.
}
/// Compute the new expected type and default binding mode from the old ones
/// as well as the pattern form we are currently checking.
fn calc_default_binding_mode(
&self,
pat: &'tcx Pat<'tcx>,
expected: Ty<'tcx>,
def_bm: BindingMode,
adjust_mode: AdjustMode,
) -> (Ty<'tcx>, BindingMode) {
match adjust_mode {
AdjustMode::Pass => (expected, def_bm),
AdjustMode::Reset => (expected, INITIAL_BM),
AdjustMode::Peel => self.peel_off_references(pat, expected, def_bm),
}
}
/// How should the binding mode and expected type be adjusted?
///
/// When the pattern is a path pattern, `opt_path_res` must be `Some(res)`.
fn calc_adjust_mode(&self, pat: &'tcx Pat<'tcx>, opt_path_res: Option<Res>) -> AdjustMode {
// When we perform destructuring assignment, we disable default match bindings, which are
// unintuitive in this context.
if !pat.default_binding_modes {
return AdjustMode::Reset;
}
match &pat.kind {
// Type checking these product-like types successfully always require
// that the expected type be of those types and not reference types.
PatKind::Struct(..)
| PatKind::TupleStruct(..)
| PatKind::Tuple(..)
| PatKind::Box(_)
| PatKind::Range(..)
| PatKind::Slice(..) => AdjustMode::Peel,
// String and byte-string literals result in types `&str` and `&[u8]` respectively.
// All other literals result in non-reference types.
// As a result, we allow `if let 0 = &&0 {}` but not `if let "foo" = &&"foo {}`.
PatKind::Lit(lt) => match self.check_expr(lt).kind() {
ty::Ref(..) => AdjustMode::Pass,
_ => AdjustMode::Peel,
},
PatKind::Path(_) => match opt_path_res.unwrap() {
// These constants can be of a reference type, e.g. `const X: &u8 = &0;`.
// Peeling the reference types too early will cause type checking failures.
// Although it would be possible to *also* peel the types of the constants too.
Res::Def(DefKind::Const | DefKind::AssocConst, _) => AdjustMode::Pass,
// In the `ValueNS`, we have `SelfCtor(..) | Ctor(_, Const), _)` remaining which
// could successfully compile. The former being `Self` requires a unit struct.
// In either case, and unlike constants, the pattern itself cannot be
// a reference type wherefore peeling doesn't give up any expressivity.
_ => AdjustMode::Peel,
},
// When encountering a `& mut? pat` pattern, reset to "by value".
// This is so that `x` and `y` here are by value, as they appear to be:
//
// ```
// match &(&22, &44) {
// (&x, &y) => ...
// }
// ```
//
// See issue #46688.
PatKind::Ref(..) => AdjustMode::Reset,
// A `_` pattern works with any expected type, so there's no need to do anything.
PatKind::Wild
// Bindings also work with whatever the expected type is,
// and moreover if we peel references off, that will give us the wrong binding type.
// Also, we can have a subpattern `binding @ pat`.
// Each side of the `@` should be treated independently (like with OR-patterns).
| PatKind::Binding(..)
// An OR-pattern just propagates to each individual alternative.
// This is maximally flexible, allowing e.g., `Some(mut x) | &Some(mut x)`.
// In that example, `Some(mut x)` results in `Peel` whereas `&Some(mut x)` in `Reset`.
| PatKind::Or(_) => AdjustMode::Pass,
}
}
/// Peel off as many immediately nested `& mut?` from the expected type as possible
/// and return the new expected type and binding default binding mode.
/// The adjustments vector, if non-empty is stored in a table.
fn peel_off_references(
&self,
pat: &'tcx Pat<'tcx>,
expected: Ty<'tcx>,
mut def_bm: BindingMode,
) -> (Ty<'tcx>, BindingMode) {
let mut expected = self.resolve_vars_with_obligations(&expected);
// Peel off as many `&` or `&mut` from the scrutinee type as possible. For example,
// for `match &&&mut Some(5)` the loop runs three times, aborting when it reaches
// the `Some(5)` which is not of type Ref.
//
// For each ampersand peeled off, update the binding mode and push the original
// type into the adjustments vector.
//
// See the examples in `ui/match-defbm*.rs`.
let mut pat_adjustments = vec![];
while let ty::Ref(_, inner_ty, inner_mutability) = *expected.kind() {
debug!("inspecting {:?}", expected);
debug!("current discriminant is Ref, inserting implicit deref");
// Preserve the reference type. We'll need it later during THIR lowering.
pat_adjustments.push(expected);
expected = inner_ty;
def_bm = ty::BindByReference(match def_bm {
// If default binding mode is by value, make it `ref` or `ref mut`
// (depending on whether we observe `&` or `&mut`).
ty::BindByValue(_) |
// When `ref mut`, stay a `ref mut` (on `&mut`) or downgrade to `ref` (on `&`).
ty::BindByReference(hir::Mutability::Mut) => inner_mutability,
// Once a `ref`, always a `ref`.
// This is because a `& &mut` cannot mutate the underlying value.
ty::BindByReference(m @ hir::Mutability::Not) => m,
});
}
if !pat_adjustments.is_empty() {
debug!("default binding mode is now {:?}", def_bm);
self.inh
.typeck_results
.borrow_mut()
.pat_adjustments_mut()
.insert(pat.hir_id, pat_adjustments);
}
(expected, def_bm)
}
fn check_pat_lit(
&self,
span: Span,
lt: &hir::Expr<'tcx>,
expected: Ty<'tcx>,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> {
// We've already computed the type above (when checking for a non-ref pat),
// so avoid computing it again.
let ty = self.node_ty(lt.hir_id);
// Byte string patterns behave the same way as array patterns
// They can denote both statically and dynamically-sized byte arrays.
let mut pat_ty = ty;
if let hir::ExprKind::Lit(Spanned { node: ast::LitKind::ByteStr(_), .. }) = lt.kind {
let expected = self.structurally_resolved_type(span, expected);
if let ty::Ref(_, inner_ty, _) = expected.kind() {
if matches!(inner_ty.kind(), ty::Slice(_)) {
let tcx = self.tcx;
trace!(?lt.hir_id.local_id, "polymorphic byte string lit");
self.typeck_results
.borrow_mut()
.treat_byte_string_as_slice
.insert(lt.hir_id.local_id);
pat_ty = tcx.mk_imm_ref(tcx.lifetimes.re_static, tcx.mk_slice(tcx.types.u8));
}
}
}
// Somewhat surprising: in this case, the subtyping relation goes the
// opposite way as the other cases. Actually what we really want is not
// a subtyping relation at all but rather that there exists a LUB
// (so that they can be compared). However, in practice, constants are
// always scalars or strings. For scalars subtyping is irrelevant,
// and for strings `ty` is type is `&'static str`, so if we say that
//
// &'static str <: expected
//
// then that's equivalent to there existing a LUB.
let cause = self.pattern_cause(ti, span);
if let Some(mut err) = self.demand_suptype_with_origin(&cause, expected, pat_ty) {
err.emit_unless(
ti.span
.filter(|&s| {
// In the case of `if`- and `while`-expressions we've already checked
// that `scrutinee: bool`. We know that the pattern is `true`,
// so an error here would be a duplicate and from the wrong POV.
s.is_desugaring(DesugaringKind::CondTemporary)
})
.is_some(),
);
}
pat_ty
}
fn check_pat_range(
&self,
span: Span,
lhs: Option<&'tcx hir::Expr<'tcx>>,
rhs: Option<&'tcx hir::Expr<'tcx>>,
expected: Ty<'tcx>,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> {
let calc_side = |opt_expr: Option<&'tcx hir::Expr<'tcx>>| match opt_expr {
None => (None, None),
Some(expr) => {
let ty = self.check_expr(expr);
// Check that the end-point is of numeric or char type.
let fail = !(ty.is_numeric() || ty.is_char() || ty.references_error());
(Some(ty), Some((fail, ty, expr.span)))
}
};
let (lhs_ty, lhs) = calc_side(lhs);
let (rhs_ty, rhs) = calc_side(rhs);
if let (Some((true, ..)), _) | (_, Some((true, ..))) = (lhs, rhs) {
// There exists a side that didn't meet our criteria that the end-point
// be of a numeric or char type, as checked in `calc_side` above.
self.emit_err_pat_range(span, lhs, rhs);
return self.tcx.ty_error();
}
// Now that we know the types can be unified we find the unified type
// and use it to type the entire expression.
let common_type = self.resolve_vars_if_possible(lhs_ty.or(rhs_ty).unwrap_or(expected));
// Subtyping doesn't matter here, as the value is some kind of scalar.
let demand_eqtype = |x, y| {
if let Some((_, x_ty, x_span)) = x {
if let Some(mut err) = self.demand_eqtype_pat_diag(x_span, expected, x_ty, ti) {
if let Some((_, y_ty, y_span)) = y {
self.endpoint_has_type(&mut err, y_span, y_ty);
}
err.emit();
};
}
};
demand_eqtype(lhs, rhs);
demand_eqtype(rhs, lhs);
common_type
}
fn endpoint_has_type(&self, err: &mut DiagnosticBuilder<'_>, span: Span, ty: Ty<'_>) {
if !ty.references_error() {
err.span_label(span, &format!("this is of type `{}`", ty));
}
}
fn emit_err_pat_range(
&self,
span: Span,
lhs: Option<(bool, Ty<'tcx>, Span)>,
rhs: Option<(bool, Ty<'tcx>, Span)>,
) {
let span = match (lhs, rhs) {
(Some((true, ..)), Some((true, ..))) => span,
(Some((true, _, sp)), _) => sp,
(_, Some((true, _, sp))) => sp,
_ => span_bug!(span, "emit_err_pat_range: no side failed or exists but still error?"),
};
let mut err = struct_span_err!(
self.tcx.sess,
span,
E0029,
"only `char` and numeric types are allowed in range patterns"
);
let msg = |ty| format!("this is of type `{}` but it should be `char` or numeric", ty);
let mut one_side_err = |first_span, first_ty, second: Option<(bool, Ty<'tcx>, Span)>| {
err.span_label(first_span, &msg(first_ty));
if let Some((_, ty, sp)) = second {
self.endpoint_has_type(&mut err, sp, ty);
}
};
match (lhs, rhs) {
(Some((true, lhs_ty, lhs_sp)), Some((true, rhs_ty, rhs_sp))) => {
err.span_label(lhs_sp, &msg(lhs_ty));
err.span_label(rhs_sp, &msg(rhs_ty));
}
(Some((true, lhs_ty, lhs_sp)), rhs) => one_side_err(lhs_sp, lhs_ty, rhs),
(lhs, Some((true, rhs_ty, rhs_sp))) => one_side_err(rhs_sp, rhs_ty, lhs),
_ => span_bug!(span, "Impossible, verified above."),
}
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note(
"In a match expression, only numbers and characters can be matched \
against a range. This is because the compiler checks that the range \
is non-empty at compile-time, and is unable to evaluate arbitrary \
comparison functions. If you want to capture values of an orderable \
type between two end-points, you can use a guard.",
);
}
err.emit();
}
fn check_pat_ident(
&self,
pat: &'tcx Pat<'tcx>,
ba: hir::BindingAnnotation,
var_id: HirId,
sub: Option<&'tcx Pat<'tcx>>,
expected: Ty<'tcx>,
def_bm: BindingMode,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> {
// Determine the binding mode...
let bm = match ba {
hir::BindingAnnotation::Unannotated => def_bm,
_ => BindingMode::convert(ba),
};
// ...and store it in a side table:
self.inh.typeck_results.borrow_mut().pat_binding_modes_mut().insert(pat.hir_id, bm);
debug!("check_pat_ident: pat.hir_id={:?} bm={:?}", pat.hir_id, bm);
let local_ty = self.local_ty(pat.span, pat.hir_id).decl_ty;
let eq_ty = match bm {
ty::BindByReference(mutbl) => {
// If the binding is like `ref x | ref mut x`,
// then `x` is assigned a value of type `&M T` where M is the
// mutability and T is the expected type.
//
// `x` is assigned a value of type `&M T`, hence `&M T <: typeof(x)`
// is required. However, we use equality, which is stronger.
// See (note_1) for an explanation.
self.new_ref_ty(pat.span, mutbl, expected)
}
// Otherwise, the type of x is the expected type `T`.
ty::BindByValue(_) => {
// As above, `T <: typeof(x)` is required, but we use equality, see (note_1).
expected
}
};
self.demand_eqtype_pat(pat.span, eq_ty, local_ty, ti);
// If there are multiple arms, make sure they all agree on
// what the type of the binding `x` ought to be.
if var_id != pat.hir_id {
self.check_binding_alt_eq_ty(pat.span, var_id, local_ty, ti);
}
if let Some(p) = sub {
self.check_pat(&p, expected, def_bm, TopInfo { parent_pat: Some(&pat), ..ti });
}
local_ty
}
fn check_binding_alt_eq_ty(&self, span: Span, var_id: HirId, ty: Ty<'tcx>, ti: TopInfo<'tcx>) {
let var_ty = self.local_ty(span, var_id).decl_ty;
if let Some(mut err) = self.demand_eqtype_pat_diag(span, var_ty, ty, ti) {
let hir = self.tcx.hir();
let var_ty = self.resolve_vars_with_obligations(var_ty);
let msg = format!("first introduced with type `{}` here", var_ty);
err.span_label(hir.span(var_id), msg);
let in_match = hir.parent_iter(var_id).any(|(_, n)| {
matches!(
n,
hir::Node::Expr(hir::Expr {
kind: hir::ExprKind::Match(.., hir::MatchSource::Normal),
..
})
)
});
let pre = if in_match { "in the same arm, " } else { "" };
err.note(&format!("{}a binding must have the same type in all alternatives", pre));
err.emit();
}
}
fn borrow_pat_suggestion(
&self,
err: &mut DiagnosticBuilder<'_>,
pat: &Pat<'_>,
inner: &Pat<'_>,
expected: Ty<'tcx>,
) {
let tcx = self.tcx;
if let PatKind::Binding(..) = inner.kind {
let binding_parent_id = tcx.hir().get_parent_node(pat.hir_id);
let binding_parent = tcx.hir().get(binding_parent_id);
debug!("inner {:?} pat {:?} parent {:?}", inner, pat, binding_parent);
match binding_parent {
hir::Node::Param(hir::Param { span, .. }) => {
if let Ok(snippet) = tcx.sess.source_map().span_to_snippet(inner.span) {
err.span_suggestion(
*span,
&format!("did you mean `{}`", snippet),
format!(" &{}", expected),
Applicability::MachineApplicable,
);
}
}
hir::Node::Arm(_) | hir::Node::Pat(_) => {
// rely on match ergonomics or it might be nested `&&pat`
if let Ok(snippet) = tcx.sess.source_map().span_to_snippet(inner.span) {
err.span_suggestion(
pat.span,
"you can probably remove the explicit borrow",
snippet,
Applicability::MaybeIncorrect,
);
}
}
_ => {} // don't provide suggestions in other cases #55175
}
}
}
pub fn check_dereferenceable(&self, span: Span, expected: Ty<'tcx>, inner: &Pat<'_>) -> bool {
if let PatKind::Binding(..) = inner.kind {
if let Some(mt) = self.shallow_resolve(expected).builtin_deref(true) {
if let ty::Dynamic(..) = mt.ty.kind() {
// This is "x = SomeTrait" being reduced from
// "let &x = &SomeTrait" or "let box x = Box<SomeTrait>", an error.
let type_str = self.ty_to_string(expected);
let mut err = struct_span_err!(
self.tcx.sess,
span,
E0033,
"type `{}` cannot be dereferenced",
type_str
);
err.span_label(span, format!("type `{}` cannot be dereferenced", type_str));
if self.tcx.sess.teach(&err.get_code().unwrap()) {
err.note(CANNOT_IMPLICITLY_DEREF_POINTER_TRAIT_OBJ);
}
err.emit();
return false;
}
}
}
true
}
fn check_pat_struct(
&self,
pat: &'tcx Pat<'tcx>,
qpath: &hir::QPath<'_>,
fields: &'tcx [hir::FieldPat<'tcx>],
etc: bool,
expected: Ty<'tcx>,
def_bm: BindingMode,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> {
// Resolve the path and check the definition for errors.
let (variant, pat_ty) = if let Some(variant_ty) = self.check_struct_path(qpath, pat.hir_id)
{
variant_ty
} else {
let err = self.tcx.ty_error();
for field in fields {
let ti = TopInfo { parent_pat: Some(&pat), ..ti };
self.check_pat(&field.pat, err, def_bm, ti);
}
return err;
};
// Type-check the path.
self.demand_eqtype_pat(pat.span, expected, pat_ty, ti);
// Type-check subpatterns.
if self.check_struct_pat_fields(pat_ty, &pat, variant, fields, etc, def_bm, ti) {
pat_ty
} else {
self.tcx.ty_error()
}
}
fn check_pat_path(
&self,
pat: &Pat<'_>,
path_resolution: (Res, Option<Ty<'tcx>>, &'b [hir::PathSegment<'b>]),
expected: Ty<'tcx>,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> {
let tcx = self.tcx;
// We have already resolved the path.
let (res, opt_ty, segments) = path_resolution;
match res {
Res::Err => {
self.set_tainted_by_errors();
return tcx.ty_error();
}
Res::Def(DefKind::AssocFn | DefKind::Ctor(_, CtorKind::Fictive | CtorKind::Fn), _) => {
report_unexpected_variant_res(tcx, res, pat.span);
return tcx.ty_error();
}
Res::SelfCtor(..)
| Res::Def(
DefKind::Ctor(_, CtorKind::Const)
| DefKind::Const
| DefKind::AssocConst
| DefKind::ConstParam,
_,
) => {} // OK
_ => bug!("unexpected pattern resolution: {:?}", res),
}
// Type-check the path.
let (pat_ty, pat_res) =
self.instantiate_value_path(segments, opt_ty, res, pat.span, pat.hir_id);
if let Some(err) =
self.demand_suptype_with_origin(&self.pattern_cause(ti, pat.span), expected, pat_ty)
{
self.emit_bad_pat_path(err, pat.span, res, pat_res, pat_ty, segments, ti.parent_pat);
}
pat_ty
}
fn maybe_suggest_range_literal(
&self,
e: &mut DiagnosticBuilder<'_>,
opt_def_id: Option<hir::def_id::DefId>,
ident: Ident,
) -> bool {
match opt_def_id {
Some(def_id) => match self.tcx.hir().get_if_local(def_id) {
Some(hir::Node::Item(hir::Item {
kind: hir::ItemKind::Const(_, body_id), ..
})) => match self.tcx.hir().get(body_id.hir_id) {
hir::Node::Expr(expr) => {
if hir::is_range_literal(expr) {
let span = self.tcx.hir().span(body_id.hir_id);
if let Ok(snip) = self.tcx.sess.source_map().span_to_snippet(span) {
e.span_suggestion_verbose(
ident.span,
"you may want to move the range into the match block",
snip,
Applicability::MachineApplicable,
);
return true;
}
}
}
_ => (),
},
_ => (),
},
_ => (),
}
false
}
fn emit_bad_pat_path(
&self,
mut e: DiagnosticBuilder<'_>,
pat_span: Span,
res: Res,
pat_res: Res,
pat_ty: Ty<'tcx>,
segments: &'b [hir::PathSegment<'b>],
parent_pat: Option<&Pat<'_>>,
) {
if let Some(span) = self.tcx.hir().res_span(pat_res) {
e.span_label(span, &format!("{} defined here", res.descr()));
if let [hir::PathSegment { ident, .. }] = &*segments {
e.span_label(
pat_span,
&format!(
"`{}` is interpreted as {} {}, not a new binding",
ident,
res.article(),
res.descr(),
),
);
match parent_pat {
Some(Pat { kind: hir::PatKind::Struct(..), .. }) => {
e.span_suggestion_verbose(
ident.span.shrink_to_hi(),
"bind the struct field to a different name instead",
format!(": other_{}", ident.as_str().to_lowercase()),
Applicability::HasPlaceholders,
);
}
_ => {
let (type_def_id, item_def_id) = match pat_ty.kind() {
Adt(def, _) => match res {
Res::Def(DefKind::Const, def_id) => (Some(def.did), Some(def_id)),
_ => (None, None),
},
_ => (None, None),
};
let ranges = &[
self.tcx.lang_items().range_struct(),
self.tcx.lang_items().range_from_struct(),
self.tcx.lang_items().range_to_struct(),
self.tcx.lang_items().range_full_struct(),
self.tcx.lang_items().range_inclusive_struct(),
self.tcx.lang_items().range_to_inclusive_struct(),
];
if type_def_id != None && ranges.contains(&type_def_id) {
if !self.maybe_suggest_range_literal(&mut e, item_def_id, *ident) {
let msg = "constants only support matching by type, \
if you meant to match against a range of values, \
consider using a range pattern like `min ..= max` in the match block";
e.note(msg);
}
} else {
let msg = "introduce a new binding instead";
let sugg = format!("other_{}", ident.as_str().to_lowercase());
e.span_suggestion(
ident.span,
msg,
sugg,
Applicability::HasPlaceholders,
);
}
}
};
}
}
e.emit();
}
fn check_pat_tuple_struct(
&self,
pat: &'tcx Pat<'tcx>,
qpath: &hir::QPath<'_>,
subpats: &'tcx [&'tcx Pat<'tcx>],
ddpos: Option<usize>,
expected: Ty<'tcx>,
def_bm: BindingMode,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> {
let tcx = self.tcx;
let on_error = || {
let parent_pat = Some(pat);
for pat in subpats {
self.check_pat(&pat, tcx.ty_error(), def_bm, TopInfo { parent_pat, ..ti });
}
};
let report_unexpected_res = |res: Res| {
let sm = tcx.sess.source_map();
let path_str = sm
.span_to_snippet(sm.span_until_char(pat.span, '('))
.map_or(String::new(), |s| format!(" `{}`", s.trim_end()));
let msg = format!(
"expected tuple struct or tuple variant, found {}{}",
res.descr(),
path_str
);
let mut err = struct_span_err!(tcx.sess, pat.span, E0164, "{}", msg);
match res {
Res::Def(DefKind::Fn | DefKind::AssocFn, _) => {
err.span_label(pat.span, "`fn` calls are not allowed in patterns");
err.help(
"for more information, visit \
https://doc.rust-lang.org/book/ch18-00-patterns.html",
);
}
_ => {
err.span_label(pat.span, "not a tuple variant or struct");
}
}
err.emit();
on_error();
};
// Resolve the path and check the definition for errors.
let (res, opt_ty, segments) = self.resolve_ty_and_res_ufcs(qpath, pat.hir_id, pat.span);
if res == Res::Err {
self.set_tainted_by_errors();
on_error();
return self.tcx.ty_error();
}
// Type-check the path.
let (pat_ty, res) =
self.instantiate_value_path(segments, opt_ty, res, pat.span, pat.hir_id);
if !pat_ty.is_fn() {
report_unexpected_res(res);
return tcx.ty_error();
}
let variant = match res {
Res::Err => {
self.set_tainted_by_errors();
on_error();
return tcx.ty_error();
}
Res::Def(DefKind::AssocConst | DefKind::AssocFn, _) => {
report_unexpected_res(res);
return tcx.ty_error();
}
Res::Def(DefKind::Ctor(_, CtorKind::Fn), _) => tcx.expect_variant_res(res),
_ => bug!("unexpected pattern resolution: {:?}", res),
};
// Replace constructor type with constructed type for tuple struct patterns.
let pat_ty = pat_ty.fn_sig(tcx).output();
let pat_ty = pat_ty.no_bound_vars().expect("expected fn type");
// Type-check the tuple struct pattern against the expected type.
let diag = self.demand_eqtype_pat_diag(pat.span, expected, pat_ty, ti);
let had_err = if let Some(mut err) = diag {
err.emit();
true
} else {
false
};
// Type-check subpatterns.
if subpats.len() == variant.fields.len()
|| subpats.len() < variant.fields.len() && ddpos.is_some()
{
let substs = match pat_ty.kind() {
ty::Adt(_, substs) => substs,
_ => bug!("unexpected pattern type {:?}", pat_ty),
};
for (i, subpat) in subpats.iter().enumerate_and_adjust(variant.fields.len(), ddpos) {
let field_ty = self.field_ty(subpat.span, &variant.fields[i], substs);
self.check_pat(&subpat, field_ty, def_bm, TopInfo { parent_pat: Some(&pat), ..ti });
self.tcx.check_stability(variant.fields[i].did, Some(pat.hir_id), subpat.span);
}
} else {
// Pattern has wrong number of fields.
self.e0023(pat.span, res, qpath, subpats, &variant.fields, expected, had_err);
on_error();
return tcx.ty_error();
}
pat_ty
}
fn e0023(
&self,
pat_span: Span,
res: Res,
qpath: &hir::QPath<'_>,
subpats: &'tcx [&'tcx Pat<'tcx>],
fields: &'tcx [ty::FieldDef],
expected: Ty<'tcx>,
had_err: bool,
) {
let subpats_ending = pluralize!(subpats.len());
let fields_ending = pluralize!(fields.len());
let res_span = self.tcx.def_span(res.def_id());
let mut err = struct_span_err!(
self.tcx.sess,
pat_span,
E0023,
"this pattern has {} field{}, but the corresponding {} has {} field{}",
subpats.len(),
subpats_ending,
res.descr(),
fields.len(),
fields_ending,
);
err.span_label(
pat_span,
format!("expected {} field{}, found {}", fields.len(), fields_ending, subpats.len(),),
)
.span_label(res_span, format!("{} defined here", res.descr()));
// Identify the case `Some(x, y)` where the expected type is e.g. `Option<(T, U)>`.
// More generally, the expected type wants a tuple variant with one field of an
// N-arity-tuple, e.g., `V_i((p_0, .., p_N))`. Meanwhile, the user supplied a pattern
// with the subpatterns directly in the tuple variant pattern, e.g., `V_i(p_0, .., p_N)`.
let missing_parentheses = match (&expected.kind(), fields, had_err) {
// #67037: only do this if we could successfully type-check the expected type against
// the tuple struct pattern. Otherwise the substs could get out of range on e.g.,
// `let P() = U;` where `P != U` with `struct P<T>(T);`.
(ty::Adt(_, substs), [field], false) => {
let field_ty = self.field_ty(pat_span, field, substs);
match field_ty.kind() {
ty::Tuple(_) => field_ty.tuple_fields().count() == subpats.len(),
_ => false,
}
}
_ => false,
};
if missing_parentheses {
let (left, right) = match subpats {
// This is the zero case; we aim to get the "hi" part of the `QPath`'s
// span as the "lo" and then the "hi" part of the pattern's span as the "hi".
// This looks like:
//
// help: missing parentheses
// |
// L | let A(()) = A(());
// | ^ ^
[] => (qpath.span().shrink_to_hi(), pat_span),
// Easy case. Just take the "lo" of the first sub-pattern and the "hi" of the
// last sub-pattern. In the case of `A(x)` the first and last may coincide.
// This looks like:
//
// help: missing parentheses
// |
// L | let A((x, y)) = A((1, 2));
// | ^ ^
[first, ..] => (first.span.shrink_to_lo(), subpats.last().unwrap().span),
};
err.multipart_suggestion(
"missing parentheses",
vec![(left, "(".to_string()), (right.shrink_to_hi(), ")".to_string())],
Applicability::MachineApplicable,
);
} else if fields.len() > subpats.len() {
let after_fields_span = if pat_span == DUMMY_SP {
pat_span
} else {
pat_span.with_hi(pat_span.hi() - BytePos(1)).shrink_to_hi()
};
let all_fields_span = match subpats {
[] => after_fields_span,
[field] => field.span,
[first, .., last] => first.span.to(last.span),
};
// Check if all the fields in the pattern are wildcards.
let all_wildcards = subpats.iter().all(|pat| matches!(pat.kind, PatKind::Wild));
let mut wildcard_sugg = vec!["_"; fields.len() - subpats.len()].join(", ");
if !subpats.is_empty() {
wildcard_sugg = String::from(", ") + &wildcard_sugg;
}
err.span_suggestion_verbose(
after_fields_span,
"use `_` to explicitly ignore each field",
wildcard_sugg,
Applicability::MaybeIncorrect,
);
// Only suggest `..` if more than one field is missing
// or the pattern consists of all wildcards.
if fields.len() - subpats.len() > 1 || all_wildcards {
if subpats.is_empty() || all_wildcards {
err.span_suggestion_verbose(
all_fields_span,
"use `..` to ignore all fields",
String::from(".."),
Applicability::MaybeIncorrect,
);
} else {
err.span_suggestion_verbose(
after_fields_span,
"use `..` to ignore the rest of the fields",
String::from(", .."),
Applicability::MaybeIncorrect,
);
}
}
}
err.emit();
}
fn check_pat_tuple(
&self,
span: Span,
elements: &'tcx [&'tcx Pat<'tcx>],
ddpos: Option<usize>,
expected: Ty<'tcx>,
def_bm: BindingMode,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> {
let tcx = self.tcx;
let mut expected_len = elements.len();
if ddpos.is_some()
|
let max_len = cmp::max(expected_len, elements.len());
let element_tys_iter = (0..max_len).map(|_| {
GenericArg::from(self.next_ty_var(
// FIXME: `MiscVariable` for now -- obtaining the span and name information
// from all tuple elements isn't trivial.
TypeVariableOrigin { kind: TypeVariableOriginKind::TypeInference, span },
))
});
let element_tys = tcx.mk_substs(element_tys_iter);
let pat_ty = tcx.mk_ty(ty::Tuple(element_tys));
if let Some(mut err) = self.demand_eqtype_pat_diag(span, expected, pat_ty, ti) {
err.emit();
// Walk subpatterns with an expected type of `err` in this case to silence
// further errors being emitted when using the bindings. #50333
let element_tys_iter = (0..max_len).map(|_| tcx.ty_error());
for (_, elem) in elements.iter().enumerate_and_adjust(max_len, ddpos) {
self.check_pat(elem, &tcx.ty_error(), def_bm, ti);
}
tcx.mk_tup(element_tys_iter)
} else {
for (i, elem) in elements.iter().enumerate_and_adjust(max_len, ddpos) {
self.check_pat(elem, &element_tys[i].expect_ty(), def_bm, ti);
}
pat_ty
}
}
fn check_struct_pat_fields(
&self,
adt_ty: Ty<'tcx>,
pat: &'tcx Pat<'tcx>,
variant: &'tcx ty::VariantDef,
fields: &'tcx [hir::FieldPat<'tcx>],
etc: bool,
def_bm: BindingMode,
ti: TopInfo<'tcx>,
) -> bool {
let tcx = self.tcx;
let (substs, adt) = match adt_ty.kind() {
ty::Adt(adt, substs) => (substs, adt),
_ => span_bug!(pat.span, "struct pattern is not an ADT"),
};
// Index the struct fields' types.
let field_map = variant
.fields
.iter()
.enumerate()
.map(|(i, field)| (field.ident.normalize_to_macros_2_0(), (i, field)))
.collect::<FxHashMap<_, _>>();
// Keep track of which fields have already appeared in the pattern.
let mut used_fields = FxHashMap::default();
let mut no_field_errors = true;
let mut inexistent_fields = vec![];
// Typecheck each field.
for field in fields {
let span = field.span;
let ident = tcx.adjust_ident(field.ident, variant.def_id);
let field_ty = match used_fields.entry(ident) {
Occupied(occupied) => {
self.error_field_already_bound(span, field.ident, *occupied.get());
no_field_errors = false;
tcx.ty_error()
}
Vacant(vacant) => {
vacant.insert(span);
field_map
.get(&ident)
.map(|(i, f)| {
self.write_field_index(field.hir_id, *i);
self.tcx.check_stability(f.did, Some(pat.hir_id), span);
self.field_ty(span, f, substs)
})
.unwrap_or_else(|| {
inexistent_fields.push(field.ident);
no_field_errors = false;
tcx.ty_error()
})
}
};
self.check_pat(&field.pat, field_ty, def_bm, TopInfo { parent_pat: Some(&pat), ..ti });
}
let mut unmentioned_fields = variant
.fields
.iter()
.map(|field| (field, field.ident.normalize_to_macros_2_0()))
.filter(|(_, ident)| !used_fields.contains_key(&ident))
.collect::<Vec<_>>();
let inexistent_fields_err = if !(inexistent_fields.is_empty() || variant.is_recovered()) {
Some(self.error_inexistent_fields(
adt.variant_descr(),
&inexistent_fields,
&mut unmentioned_fields,
variant,
))
} else {
None
};
// Require `..` if struct has non_exhaustive attribute.
if variant.is_field_list_non_exhaustive() && !adt.did.is_local() && !etc {
self.error_foreign_non_exhaustive_spat(pat, adt.variant_descr(), fields.is_empty());
}
let mut unmentioned_err = None;
// Report an error if an incorrect number of fields was specified.
if adt.is_union() {
if fields.len() != 1 {
tcx.sess
.struct_span_err(pat.span, "union patterns should have exactly one field")
.emit();
}
if etc {
tcx.sess.struct_span_err(pat.span, "`..` cannot be used in union patterns").emit();
}
} else if !etc && !unmentioned_fields.is_empty() {
let no_accessible_unmentioned_fields = !unmentioned_fields.iter().any(|(field, _)| {
field.vis.is_accessible_from(tcx.parent_module(pat.hir_id).to_def_id(), tcx)
});
if no_accessible_unmentioned_fields {
unmentioned_err = Some(self.error_no_accessible_fields(pat, &fields));
} else {
unmentioned_err =
Some(self.error_unmentioned_fields(pat, &unmentioned_fields, &fields));
}
}
match (inexistent_fields_err, unmentioned_err) {
(Some(mut i), Some(mut u)) => {
if let Some(mut e) = self.error_tuple_variant_as_struct_pat(pat, fields, variant) {
// We don't want to show the inexistent fields error when this was
// `Foo { a, b }` when it should have been `Foo(a, b)`.
i.delay_as_bug();
u.delay_as_bug();
e.emit();
} else {
i.emit();
u.emit();
}
}
(None, Some(mut err)) | (Some(mut err), None) => {
err.emit();
}
(None, None) => {}
}
no_field_errors
}
fn error_foreign_non_exhaustive_spat(&self, pat: &Pat<'_>, descr: &str, no_fields: bool) {
let sess = self.tcx.sess;
let sm = sess.source_map();
let sp_brace = sm.end_point(pat.span);
let sp_comma = sm.end_point(pat.span.with_hi(sp_brace.hi()));
let sugg = if no_fields || sp_brace != sp_comma { ".. }" } else { ", .. }" };
let mut err = struct_span_err!(
sess,
pat.span,
E0638,
"`..` required with {} marked as non-exhaustive",
descr
);
err.span_suggestion_verbose(
sp_comma,
"add `..` at the end of the field list to ignore all other fields",
sugg.to_string(),
Applicability::MachineApplicable,
);
err.emit();
}
fn error_field_already_bound(&self, span: Span, ident: Ident, other_field: Span) {
struct_span_err!(
self.tcx.sess,
span,
E0025,
"field `{}` bound multiple times in the pattern",
ident
)
.span_label(span, format!("multiple uses of `{}` in pattern", ident))
.span_label(other_field, format!("first use of `{}`", ident))
.emit();
}
fn error_inexistent_fields(
&self,
kind_name: &str,
inexistent_fields: &[Ident],
unmentioned_fields: &mut Vec<(&ty::FieldDef, Ident)>,
variant: &ty::VariantDef,
) -> DiagnosticBuilder<'tcx> {
let tcx = self.tcx;
let (field_names, t, plural) = if inexistent_fields.len() == 1 {
(format!("a field named `{}`", inexistent_fields[0]), "this", "")
} else {
(
format!(
"fields named {}",
inexistent_fields
.iter()
.map(|ident| format!("`{}`", ident))
.collect::<Vec<String>>()
.join(", ")
),
"these",
"s",
)
};
let spans = inexistent_fields.iter().map(|ident| ident.span).collect::<Vec<_>>();
let mut err = struct_span_err!(
tcx.sess,
spans,
E0026,
"{} `{}` does not have {}",
kind_name,
tcx.def_path_str(variant.def_id),
field_names
);
if let Some(ident) = inexistent_fields.last() {
err.span_label(
ident.span,
format!(
"{} `{}` does not have {} field{}",
kind_name,
tcx.def_path_str(variant.def_id),
t,
plural
),
);
if plural == "" {
let input =
unmentioned_fields.iter().map(|(_, field)| field.name).collect::<Vec<_>>();
let suggested_name = find_best_match_for_name(&input, ident.name, None);
if let Some(suggested_name) = suggested_name {
err.span_suggestion(
ident.span,
"a field with a similar name exists",
suggested_name.to_string(),
Applicability::MaybeIncorrect,
);
// When we have a tuple struct used with struct we don't want to suggest using
// the (valid) struct syntax with numeric field names. Instead we want to
// suggest the expected syntax. We infer that this is the case by parsing the
// `Ident` into an unsized integer. The suggestion will be emitted elsewhere in
// `smart_resolve_context_dependent_help`.
if suggested_name.to_ident_string().parse::<usize>().is_err() {
// We don't want to throw `E0027` in case we have thrown `E0026` for them.
unmentioned_fields.retain(|&(_, x)| x.name != suggested_name);
}
}
}
}
if tcx.sess.teach(&err.get_code().unwrap()) {
err.note(
"This error indicates that a struct pattern attempted to \
extract a non-existent field from a struct. Struct fields \
are identified by the name used before the colon : so struct \
patterns should resemble the declaration of the struct type \
being matched.\n\n\
If you are using shorthand field patterns but want to refer \
to the struct field by a different name, you should rename \
it explicitly.",
);
}
err
}
fn error_tuple_variant_as_struct_pat(
&self,
pat: &Pat<'_>,
fields: &'tcx [hir::FieldPat<'tcx>],
variant: &ty::VariantDef,
) -> Option<DiagnosticBuilder<'tcx>> {
if let (CtorKind::Fn, PatKind::Struct(qpath, ..)) = (variant.ctor_kind, &pat.kind) {
let path = rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| {
s.print_qpath(qpath, false)
});
let mut err = struct_span_err!(
self.tcx.sess,
pat.span,
E0769,
"tuple variant `{}` written as struct variant",
path
);
let (sugg, appl) = if fields.len() == variant.fields.len() {
(
fields
.iter()
.map(|f| match self.tcx.sess.source_map().span_to_snippet(f.pat.span) {
Ok(f) => f,
Err(_) => rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| {
s.print_pat(f.pat)
}),
})
.collect::<Vec<String>>()
.join(", "),
Applicability::MachineApplicable,
)
} else {
(
variant.fields.iter().map(|_| "_").collect::<Vec<&str>>().join(", "),
Applicability::MaybeIncorrect,
)
};
err.span_suggestion(
pat.span,
"use the tuple variant pattern syntax instead",
format!("{}({})", path, sugg),
appl,
);
return Some(err);
}
None
}
/// Returns a diagnostic reporting a struct pattern which is missing an `..` due to
/// inaccessible fields.
///
/// ```text
/// error: pattern requires `..` due to inaccessible fields
/// --> src/main.rs:10:9
/// |
/// LL | let foo::Foo {} = foo::Foo::default();
/// | ^^^^^^^^^^^
/// |
/// help: add a `..`
/// |
/// LL | let foo::Foo { .. } = foo::Foo::default();
/// | ^^^^^^
/// ```
fn error_no_accessible_fields(
&self,
pat: &Pat<'_>,
fields: &'tcx [hir::FieldPat<'tcx>],
) -> DiagnosticBuilder<'tcx> {
let mut err = self
.tcx
.sess
.struct_span_err(pat.span, "pattern requires `..` due to inaccessible fields");
if let Some(field) = fields.last() {
err.span_suggestion_verbose(
field.span.shrink_to_hi(),
"ignore the inaccessible and unused fields",
", ..".to_string(),
Applicability::MachineApplicable,
);
} else {
let qpath_span = if let PatKind::Struct(qpath, ..) = &pat.kind {
qpath.span()
} else {
bug!("`error_no_accessible_fields` called on non-struct pattern");
};
// Shrink the span to exclude the `foo:Foo` in `foo::Foo { }`.
let span = pat.span.with_lo(qpath_span.shrink_to_hi().hi());
err.span_suggestion_verbose(
span,
"ignore the inaccessible and unused fields",
" { .. }".to_string(),
Applicability::MachineApplicable,
);
}
err
}
/// Returns a diagnostic reporting a struct pattern which does not mention some fields.
///
/// ```text
/// error[E0027]: pattern does not mention field `bar`
/// --> src/main.rs:15:9
/// |
/// LL | let foo::Foo {} = foo::Foo::new();
/// | ^^^^^^^^^^^ missing field `bar`
/// ```
fn error_unmentioned_fields(
&self,
pat: &Pat<'_>,
unmentioned_fields: &[(&ty::FieldDef, Ident)],
fields: &'tcx [hir::FieldPat<'tcx>],
) -> DiagnosticBuilder<'tcx> {
let field_names = if unmentioned_fields.len() == 1 {
format!("field `{}`", unmentioned_fields[0].1)
} else {
let fields = unmentioned_fields
.iter()
.map(|(_, name)| format!("`{}`", name))
.collect::<Vec<String>>()
.join(", ");
format!("fields {}", fields)
};
let mut err = struct_span_err!(
self.tcx.sess,
pat.span,
E0027,
"pattern does not mention {}",
field_names
);
err.span_label(pat.span, format!("missing {}", field_names));
let len = unmentioned_fields.len();
let (prefix, postfix, sp) = match fields {
[] => match &pat.kind {
PatKind::Struct(path, [], false) => {
(" { ", " }", path.span().shrink_to_hi().until(pat.span.shrink_to_hi()))
}
_ => return err,
},
[.., field] => {
// if last field has a trailing comma, use the comma
// as the span to avoid trailing comma in ultimate
// suggestion (Issue #78511)
let tail = field.span.shrink_to_hi().until(pat.span.shrink_to_hi());
let tail_through_comma = self.tcx.sess.source_map().span_through_char(tail, ',');
let sp = if tail_through_comma == tail {
field.span.shrink_to_hi()
} else {
tail_through_comma
};
(
match pat.kind {
PatKind::Struct(_, [_, ..], _) => ", ",
_ => "",
},
"",
sp,
)
}
};
err.span_suggestion(
sp,
&format!(
"include the missing field{} in the pattern",
if len == 1 { "" } else { "s" },
),
format!(
"{}{}{}",
prefix,
unmentioned_fields
.iter()
.map(|(_, name)| name.to_string())
.collect::<Vec<_>>()
.join(", "),
postfix,
),
Applicability::MachineApplicable,
);
err.span_suggestion(
sp,
&format!(
"if you don't care about {} missing field{}, you can explicitly ignore {}",
if len == 1 { "this" } else { "these" },
if len == 1 { "" } else { "s" },
if len == 1 { "it" } else { "them" },
),
format!("{}..{}", prefix, postfix),
Applicability::MachineApplicable,
);
err
}
fn check_pat_box(
&self,
span: Span,
inner: &'tcx Pat<'tcx>,
expected: Ty<'tcx>,
def_bm: BindingMode,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> {
let tcx = self.tcx;
let (box_ty, inner_ty) = if self.check_dereferenceable(span, expected, &inner) {
// Here, `demand::subtype` is good enough, but I don't
// think any errors can be introduced by using `demand::eqtype`.
let inner_ty = self.next_ty_var(TypeVariableOrigin {
kind: TypeVariableOriginKind::TypeInference,
span: inner.span,
});
let box_ty = tcx.mk_box(inner_ty);
self.demand_eqtype_pat(span, expected, box_ty, ti);
(box_ty, inner_ty)
} else {
let err = tcx.ty_error();
(err, err)
};
self.check_pat(&inner, inner_ty, def_bm, ti);
box_ty
}
fn check_pat_ref(
&self,
pat: &'tcx Pat<'tcx>,
inner: &'tcx Pat<'tcx>,
mutbl: hir::Mutability,
expected: Ty<'tcx>,
def_bm: BindingMode,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> {
let tcx = self.tcx;
let expected = self.shallow_resolve(expected);
let (rptr_ty, inner_ty) = if self.check_dereferenceable(pat.span, expected, &inner) {
// `demand::subtype` would be good enough, but using `eqtype` turns
// out to be equally general. See (note_1) for details.
// Take region, inner-type from expected type if we can,
// to avoid creating needless variables. This also helps with
// the bad interactions of the given hack detailed in (note_1).
debug!("check_pat_ref: expected={:?}", expected);
match *expected.kind() {
ty::Ref(_, r_ty, r_mutbl) if r_mutbl == mutbl => (expected, r_ty),
_ => {
let inner_ty = self.next_ty_var(TypeVariableOrigin {
kind: TypeVariableOriginKind::TypeInference,
span: inner.span,
});
let rptr_ty = self.new_ref_ty(pat.span, mutbl, inner_ty);
debug!("check_pat_ref: demanding {:?} = {:?}", expected, rptr_ty);
let err = self.demand_eqtype_pat_diag(pat.span, expected, rptr_ty, ti);
// Look for a case like `fn foo(&foo: u32)` and suggest
// `fn foo(foo: &u32)`
if let Some(mut err) = err {
self.borrow_pat_suggestion(&mut err, &pat, &inner, &expected);
err.emit();
}
(rptr_ty, inner_ty)
}
}
} else {
let err = tcx.ty_error();
(err, err)
};
self.check_pat(&inner, inner_ty, def_bm, TopInfo { parent_pat: Some(&pat), ..ti });
rptr_ty
}
/// Create a reference type with a fresh region variable.
fn new_ref_ty(&self, span: Span, mutbl: hir::Mutability, ty: Ty<'tcx>) -> Ty<'tcx> {
let region = self.next_region_var(infer::PatternRegion(span));
let mt = ty::TypeAndMut { ty, mutbl };
self.tcx.mk_ref(region, mt)
}
/// Type check a slice pattern.
///
/// Syntactically, these look like `[pat_0, ..., pat_n]`.
/// Semantically, we are type checking a pattern with structure:
/// ```
/// [before_0, ..., before_n, (slice, after_0, ... after_n)?]
/// ```
/// The type of `slice`, if it is present, depends on the `expected` type.
/// If `slice` is missing, then so is `after_i`.
/// If `slice` is present, it can still represent 0 elements.
fn check_pat_slice(
&self,
span: Span,
before: &'tcx [&'tcx Pat<'tcx>],
slice: Option<&'tcx Pat<'tcx>>,
after: &'tcx [&'tcx Pat<'tcx>],
expected: Ty<'tcx>,
def_bm: BindingMode,
ti: TopInfo<'tcx>,
) -> Ty<'tcx> {
let expected = self.structurally_resolved_type(span, expected);
let (element_ty, opt_slice_ty, inferred) = match *expected.kind() {
// An array, so we might have something like `let [a, b, c] = [0, 1, 2];`.
ty::Array(element_ty, len) => {
let min = before.len() as u64 + after.len() as u64;
let (opt_slice_ty, expected) =
self.check_array_pat_len(span, element_ty, expected, slice, len, min);
// `opt_slice_ty.is_none()` => `slice.is_none()`.
// Note, though, that opt_slice_ty could be `Some(error_ty)`.
assert!(opt_slice_ty.is_some() || slice.is_none());
(element_ty, opt_slice_ty, expected)
}
ty::Slice(element_ty) => (element_ty, Some(expected), expected),
// The expected type must be an array or slice, but was neither, so error.
_ => {
if !expected.references_error() {
self.error_expected_array_or_slice(span, expected);
}
let err = self.tcx.ty_error();
(err, Some(err), err)
}
};
// Type check all the patterns before `slice`.
for elt in before {
self.check_pat(&elt, element_ty, def_bm, ti);
}
// Type check the `slice`, if present, against its expected type.
if let Some(slice) = slice {
self.check_pat(&slice, opt_slice_ty.unwrap(), def_bm, ti);
}
// Type check the elements after `slice`, if present.
for elt in after {
self.check_pat(&elt, element_ty, def_bm, ti);
}
inferred
}
/// Type check the length of an array pattern.
///
/// Returns both the type of the variable length pattern (or `None`), and the potentially
/// inferred array type. We only return `None` for the slice type if `slice.is_none()`.
fn check_array_pat_len(
&self,
span: Span,
element_ty: Ty<'tcx>,
arr_ty: Ty<'tcx>,
slice: Option<&'tcx Pat<'tcx>>,
len: &ty::Const<'tcx>,
min_len: u64,
) -> (Option<Ty<'tcx>>, Ty<'tcx>) {
if let Some(len) = len.try_eval_usize(self.tcx, self.param_env) {
// Now we know the length...
if slice.is_none() {
// ...and since there is no variable-length pattern,
// we require an exact match between the number of elements
// in the array pattern and as provided by the matched type.
if min_len == len {
return (None, arr_ty);
}
self.error_scrutinee_inconsistent_length(span, min_len, len);
} else if let Some(pat_len) = len.checked_sub(min_len) {
// The variable-length pattern was there,
// so it has an array type with the remaining elements left as its size...
return (Some(self.tcx.mk_array(element_ty, pat_len)), arr_ty);
} else {
// ...however, in this case, there were no remaining elements.
// That is, the slice pattern requires more than the array type offers.
self.error_scrutinee_with_rest_inconsistent_length(span, min_len, len);
}
} else if slice.is_none() {
// We have a pattern with a fixed length,
// which we can use to infer the length of the array.
let updated_arr_ty = self.tcx.mk_array(element_ty, min_len);
self.demand_eqtype(span, updated_arr_ty, arr_ty);
return (None, updated_arr_ty);
} else {
// We have a variable-length pattern and don't know the array length.
// This happens if we have e.g.,
// `let [a, b, ..] = arr` where `arr: [T; N]` where `const N: usize`.
self.error_scrutinee_unfixed_length(span);
}
// If we get here, we must have emitted an error.
(Some(self.tcx.ty_error()), arr_ty)
}
fn error_scrutinee_inconsistent_length(&self, span: Span, min_len: u64, size: u64) {
struct_span_err!(
self.tcx.sess,
span,
E0527,
"pattern requires {} element{} but array has {}",
min_len,
pluralize!(min_len),
size,
)
.span_label(span, format!("expected {} element{}", size, pluralize!(size)))
.emit();
}
fn error_scrutinee_with_rest_inconsistent_length(&self, span: Span, min_len: u64, size: u64) {
struct_span_err!(
self.tcx.sess,
span,
E0528,
"pattern requires at least {} element{} but array has {}",
min_len,
pluralize!(min_len),
size,
)
.span_label(
span,
format!("pattern cannot match array of {} element{}", size, pluralize!(size),),
)
.emit();
}
fn error_scrutinee_unfixed_length(&self, span: Span) {
struct_span_err!(
self.tcx.sess,
span,
E0730,
"cannot pattern-match on an array without a fixed length",
)
.emit();
}
fn error_expected_array_or_slice(&self, span: Span, expected_ty: Ty<'tcx>) {
let mut err = struct_span_err!(
self.tcx.sess,
span,
E0529,
"expected an array or slice, found `{}`",
expected_ty
);
if let ty::Ref(_, ty, _) = expected_ty.kind() {
if let ty::Array(..) | ty::Slice(..) = ty.kind() {
err.help("the semantics of slice patterns changed recently; see issue #62254");
}
}
err.span_label(span, format!("pattern cannot match with input type `{}`", expected_ty));
err.emit();
}
}
|
{
// Require known type only when `..` is present.
if let ty::Tuple(ref tys) = self.structurally_resolved_type(span, expected).kind() {
expected_len = tys.len();
}
}
|
index.d.ts
|
declare module '*.svg' {
import * as React from 'react';
export const ReactComponent: React.FunctionComponent<
React.SVGProps<SVGSVGElement>
|
const src: string;
export default src;
}
|
>;
|
ecc-tools.js
|
$(document).ready(function () {
if (document.querySelectorAll('.ecc-tools') !== null) {
var toolsArea = document.querySelectorAll('.ecc-tools')
toolsArea.forEach(function (b) {
var toggleButton = b.querySelector('.ecc-tools__toggle');
toggleButton.addEventListener('click', function (e) {
b.classList.toggle('open');
toggleButton.setAttribute(
'aria-expanded',
toggleButton.getAttribute('aria-expanded') === 'true'
|
})
})
}
})
|
? 'false'
: 'true'
);
|
volatile_memory.rs
|
// Copyright 2017 The Chromium OS Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//! Types for volatile access to memory.
//!
//! Two of the core rules for safe rust is no data races and no aliased mutable references.
//! `VolatileRef` and `VolatileSlice`, along with types that produce those which implement
//! `VolatileMemory`, allow us to sidestep that rule by wrapping pointers that absolutely have to be
//! accessed volatile. Some systems really do need to operate on shared memory and can't have the
//! compiler reordering or eliding access because it has no visibility into what other systems are
//! doing with that hunk of memory.
//!
//! For the purposes of maintaining safety, volatile memory has some rules of its own:
//! 1. No references or slices to volatile memory (`&` or `&mut`).
//! 2. Access should always been done with a volatile read or write.
//! The First rule is because having references of any kind to memory considered volatile would
//! violate pointer aliasing. The second is because unvolatile accesses are inherently undefined if
//! done concurrently without synchronization. With volatile access we know that the compiler has
//! not reordered or elided the access.
use std::cmp::min;
use std::fmt::{self, Display};
use std::io::Result as IoResult;
use std::io::{Read, Write};
use std::marker::PhantomData;
use std::mem::size_of;
use std::ptr::copy;
use std::ptr::{null_mut, read_volatile, write_volatile};
use std::result;
use std::slice::{from_raw_parts, from_raw_parts_mut};
use std::{isize, usize};
use DataInit;
#[derive(Eq, PartialEq, Debug)]
pub enum VolatileMemoryError {
/// `addr` is out of bounds of the volatile memory slice.
OutOfBounds { addr: u64 },
/// Taking a slice at `base` with `offset` would overflow `u64`.
Overflow { base: u64, offset: u64 },
}
impl Display for VolatileMemoryError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
use self::VolatileMemoryError::*;
match self {
OutOfBounds { addr } => write!(f, "address 0x{:x} is out of bounds", addr),
Overflow { base, offset } => write!(
f,
"address 0x{:x} offset by 0x{:x} would overflow",
base, offset
),
}
}
}
pub type VolatileMemoryResult<T> = result::Result<T, VolatileMemoryError>;
use VolatileMemoryError as Error;
type Result<T> = VolatileMemoryResult<T>;
/// Convenience function for computing `base + offset` which returns
/// `Err(VolatileMemoryError::Overflow)` instead of panicking in the case `base + offset` exceeds
/// `u64::MAX`.
///
/// # Examples
///
/// ```
/// # use data_model::*;
/// # fn get_slice(offset: u64, count: u64) -> VolatileMemoryResult<()> {
/// let mem_end = calc_offset(offset, count)?;
/// if mem_end > 100 {
/// return Err(VolatileMemoryError::OutOfBounds{addr: mem_end});
/// }
/// # Ok(())
/// # }
/// ```
pub fn calc_offset(base: u64, offset: u64) -> Result<u64> {
match base.checked_add(offset) {
None => Err(Error::Overflow { base, offset }),
Some(m) => Ok(m),
}
}
/// Trait for types that support raw volatile access to their data.
pub trait VolatileMemory {
/// Gets a slice of memory at `offset` that is `count` bytes in length and supports volatile
/// access.
fn get_slice(&self, offset: u64, count: u64) -> Result<VolatileSlice>;
/// Gets a `VolatileRef` at `offset`.
fn get_ref<T: DataInit>(&self, offset: u64) -> Result<VolatileRef<T>> {
let slice = self.get_slice(offset, size_of::<T>() as u64)?;
Ok(VolatileRef {
addr: slice.addr as *mut T,
phantom: PhantomData,
})
}
}
impl<'a> VolatileMemory for &'a mut [u8] {
fn get_slice(&self, offset: u64, count: u64) -> Result<VolatileSlice> {
let mem_end = calc_offset(offset, count)?;
if mem_end > self.len() as u64 {
return Err(Error::OutOfBounds { addr: mem_end });
}
Ok(unsafe { VolatileSlice::new((self.as_ptr() as u64 + offset) as *mut _, count) })
}
}
/// A slice of raw memory that supports volatile access.
#[derive(Copy, Clone, Debug)]
pub struct VolatileSlice<'a> {
addr: *mut u8,
size: u64,
phantom: PhantomData<&'a u8>,
}
impl<'a> Default for VolatileSlice<'a> {
fn default() -> VolatileSlice<'a> {
VolatileSlice {
addr: null_mut(),
size: 0,
phantom: PhantomData,
}
}
}
impl<'a> VolatileSlice<'a> {
/// Creates a slice of raw memory that must support volatile access.
///
/// To use this safely, the caller must guarantee that the memory at `addr` is `size` bytes long
/// and is available for the duration of the lifetime of the new `VolatileSlice`. The caller
/// must also guarantee that all other users of the given chunk of memory are using volatile
/// accesses.
pub unsafe fn new(addr: *mut u8, size: u64) -> VolatileSlice<'a> {
VolatileSlice {
addr,
size,
phantom: PhantomData,
}
}
/// Gets the address of this slice's memory.
pub fn as_ptr(&self) -> *mut u8 {
self.addr
}
/// Gets the size of this slice.
pub fn size(&self) -> u64 {
self.size
}
/// Creates a copy of this slice with the address increased by `count` bytes, and the size
/// reduced by `count` bytes.
pub fn offset(self, count: u64) -> Result<VolatileSlice<'a>> {
let new_addr =
(self.addr as u64)
.checked_add(count)
.ok_or(VolatileMemoryError::Overflow {
base: self.addr as u64,
offset: count,
})?;
if new_addr > usize::MAX as u64 {
return Err(VolatileMemoryError::Overflow {
base: self.addr as u64,
offset: count,
})?;
}
let new_size = self
.size
.checked_sub(count)
.ok_or(VolatileMemoryError::OutOfBounds { addr: new_addr })?;
// Safe because the memory has the same lifetime and points to a subset of the memory of the
// original slice.
unsafe { Ok(VolatileSlice::new(new_addr as *mut u8, new_size)) }
}
/// Copies `self.size()` or `buf.len()` times the size of `T` bytes, whichever is smaller, to
/// `buf`.
///
/// The copy happens from smallest to largest address in `T` sized chunks using volatile reads.
///
/// # Examples
///
/// ```
/// # use std::fs::File;
/// # use std::path::Path;
/// # use data_model::VolatileMemory;
/// # fn test_write_null() -> Result<(), ()> {
/// let mut mem = [0u8; 32];
/// let mem_ref = &mut mem[..];
/// let vslice = mem_ref.get_slice(0, 32).map_err(|_| ())?;
/// let mut buf = [5u8; 16];
/// vslice.copy_to(&mut buf[..]);
/// for v in &buf[..] {
/// assert_eq!(buf[0], 0);
/// }
/// # Ok(())
/// # }
/// ```
pub fn copy_to<T>(&self, buf: &mut [T])
where
T: DataInit,
{
let mut addr = self.addr;
for v in buf.iter_mut().take(self.size as usize / size_of::<T>()) {
unsafe {
*v = read_volatile(addr as *const T);
addr = addr.add(size_of::<T>());
}
}
}
/// Copies `self.size()` or `slice.size()` bytes, whichever is smaller, to `slice`.
///
/// The copies happen in an undefined order.
/// # Examples
///
/// ```
/// # use data_model::VolatileMemory;
/// # fn test_write_null() -> Result<(), ()> {
/// let mut mem = [0u8; 32];
/// let mem_ref = &mut mem[..];
/// let vslice = mem_ref.get_slice(0, 32).map_err(|_| ())?;
/// vslice.copy_to_volatile_slice(vslice.get_slice(16, 16).map_err(|_| ())?);
/// # Ok(())
/// # }
/// ```
pub fn copy_to_volatile_slice(&self, slice: VolatileSlice) {
unsafe {
copy(self.addr, slice.addr, min(self.size, slice.size) as usize);
}
}
/// Copies `self.size()` or `buf.len()` times the size of `T` bytes, whichever is smaller, to
/// this slice's memory.
///
/// The copy happens from smallest to largest address in `T` sized chunks using volatile writes.
///
/// # Examples
///
/// ```
/// # use std::fs::File;
/// # use std::path::Path;
/// # use data_model::VolatileMemory;
/// # fn test_write_null() -> Result<(), ()> {
/// let mut mem = [0u8; 32];
/// let mem_ref = &mut mem[..];
/// let vslice = mem_ref.get_slice(0, 32).map_err(|_| ())?;
/// let buf = [5u8; 64];
/// vslice.copy_from(&buf[..]);
/// for i in 0..4 {
/// assert_eq!(vslice.get_ref::<u32>(i * 4).map_err(|_| ())?.load(), 0x05050505);
/// }
/// # Ok(())
/// # }
/// ```
pub fn copy_from<T>(&self, buf: &[T])
where
T: DataInit,
{
let mut addr = self.addr;
for &v in buf.iter().take(self.size as usize / size_of::<T>()) {
unsafe {
write_volatile(addr as *mut T, v);
addr = addr.add(size_of::<T>());
}
}
}
/// Attempt to write all data from memory to a writable object and returns how many bytes were
/// actually written on success.
///
/// # Arguments
/// * `w` - Write from memory to `w`.
///
/// # Examples
///
/// * Write some bytes to /dev/null
///
/// ```
/// # use std::fs::File;
/// # use std::path::Path;
/// # use data_model::VolatileMemory;
/// # fn test_write_null() -> Result<(), ()> {
/// # let mut mem = [0u8; 32];
/// # let mem_ref = &mut mem[..];
/// # let vslice = mem_ref.get_slice(0, 32).map_err(|_| ())?;
/// let mut file = File::open(Path::new("/dev/null")).map_err(|_| ())?;
/// vslice.write_to(&mut file).map_err(|_| ())?;
/// # Ok(())
/// # }
/// ```
pub fn write_to<T: Write>(&self, w: &mut T) -> IoResult<usize> {
w.write(unsafe { self.as_slice() })
}
/// Writes all data from memory to a writable object via `Write::write_all`.
///
/// # Arguments
/// * `w` - Write from memory to `w`.
///
/// # Examples
///
/// * Write some bytes to /dev/null
///
/// ```
/// # use std::fs::File;
/// # use std::path::Path;
/// # use data_model::VolatileMemory;
/// # fn test_write_null() -> Result<(), ()> {
/// # let mut mem = [0u8; 32];
/// # let mem_ref = &mut mem[..];
/// # let vslice = mem_ref.get_slice(0, 32).map_err(|_| ())?;
/// let mut file = File::open(Path::new("/dev/null")).map_err(|_| ())?;
/// vslice.write_all_to(&mut file).map_err(|_| ())?;
/// # Ok(())
/// # }
/// ```
pub fn write_all_to<T: Write>(&self, w: &mut T) -> IoResult<()> {
w.write_all(unsafe { self.as_slice() })
}
/// Reads up to this slice's size to memory from a readable object and returns how many bytes
/// were actually read on success.
///
/// # Arguments
/// * `r` - Read to `r` to memory.
///
/// # Examples
///
/// * Read some bytes to /dev/null
///
/// ```
/// # use std::fs::File;
/// # use std::path::Path;
/// # use data_model::VolatileMemory;
/// # fn test_write_null() -> Result<(), ()> {
/// # let mut mem = [0u8; 32];
/// # let mem_ref = &mut mem[..];
/// # let vslice = mem_ref.get_slice(0, 32).map_err(|_| ())?;
/// let mut file = File::open(Path::new("/dev/null")).map_err(|_| ())?;
/// vslice.read_from(&mut file).map_err(|_| ())?;
/// # Ok(())
/// # }
/// ```
pub fn read_from<T: Read>(&self, r: &mut T) -> IoResult<usize> {
r.read(unsafe { self.as_mut_slice() })
}
/// Read exactly this slice's size into memory from to a readable object via `Read::read_exact`.
///
/// # Arguments
/// * `r` - Read to `r` to memory.
///
/// # Examples
///
/// * Read some bytes to /dev/null
///
/// ```
/// # use std::fs::File;
/// # use std::path::Path;
/// # use data_model::VolatileMemory;
/// # fn test_write_null() -> Result<(), ()> {
/// # let mut mem = [0u8; 32];
/// # let mem_ref = &mut mem[..];
/// # let vslice = mem_ref.get_slice(0, 32).map_err(|_| ())?;
/// let mut file = File::open(Path::new("/dev/null")).map_err(|_| ())?;
/// vslice.read_from(&mut file).map_err(|_| ())?;
/// # Ok(())
/// # }
/// ```
pub fn read_exact_from<T: Read>(&self, r: &mut T) -> IoResult<()> {
r.read_exact(unsafe { self.as_mut_slice() })
}
// These function are private and only used for the read/write functions. It is not valid in
// general to take slices of volatile memory.
unsafe fn as_slice(&self) -> &[u8] {
from_raw_parts(self.addr, self.size as usize)
}
// TODO(zachr) - refactor this so the mut from non-mut isn't necessary (bug: 938767)
#[allow(clippy::mut_from_ref)]
unsafe fn as_mut_slice(&self) -> &mut [u8] {
|
from_raw_parts_mut(self.addr, self.size as usize)
}
}
impl<'a> VolatileMemory for VolatileSlice<'a> {
fn get_slice(&self, offset: u64, count: u64) -> Result<VolatileSlice> {
let mem_end = calc_offset(offset, count)?;
if mem_end > self.size {
return Err(Error::OutOfBounds { addr: mem_end });
}
Ok(VolatileSlice {
addr: (self.addr as u64 + offset) as *mut _,
size: count,
phantom: PhantomData,
})
}
}
/// A memory location that supports volatile access of a `T`.
///
/// # Examples
///
/// ```
/// # use data_model::VolatileRef;
/// let mut v = 5u32;
/// assert_eq!(v, 5);
/// let v_ref = unsafe { VolatileRef::new(&mut v as *mut u32) };
/// assert_eq!(v_ref.load(), 5);
/// v_ref.store(500);
/// assert_eq!(v, 500);
#[derive(Debug)]
pub struct VolatileRef<'a, T: DataInit>
where
T: 'a,
{
addr: *mut T,
phantom: PhantomData<&'a T>,
}
impl<'a, T: DataInit> VolatileRef<'a, T> {
/// Creates a reference to raw memory that must support volatile access of `T` sized chunks.
///
/// To use this safely, the caller must guarantee that the memory at `addr` is big enough for a
/// `T` and is available for the duration of the lifetime of the new `VolatileRef`. The caller
/// must also guarantee that all other users of the given chunk of memory are using volatile
/// accesses.
pub unsafe fn new(addr: *mut T) -> VolatileRef<'a, T> {
VolatileRef {
addr,
phantom: PhantomData,
}
}
/// Gets the address of this slice's memory.
pub fn as_ptr(&self) -> *mut T {
self.addr
}
/// Gets the size of this slice.
///
/// # Examples
///
/// ```
/// # use std::mem::size_of;
/// # use data_model::VolatileRef;
/// let v_ref = unsafe { VolatileRef::new(0 as *mut u32) };
/// assert_eq!(v_ref.size(), size_of::<u32>() as u64);
/// ```
pub fn size(&self) -> u64 {
size_of::<T>() as u64
}
/// Does a volatile write of the value `v` to the address of this ref.
#[inline(always)]
pub fn store(&self, v: T) {
unsafe { write_volatile(self.addr, v) };
}
/// Does a volatile read of the value at the address of this ref.
#[inline(always)]
pub fn load(&self) -> T {
// For the purposes of demonstrating why read_volatile is necessary, try replacing the code
// in this function with the commented code below and running `cargo test --release`.
// unsafe { *(self.addr as *const T) }
unsafe { read_volatile(self.addr) }
}
/// Converts this `T` reference to a raw slice with the same size and address.
pub fn to_slice(&self) -> VolatileSlice<'a> {
unsafe { VolatileSlice::new(self.addr as *mut u8, size_of::<T>() as u64) }
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::sync::Arc;
use std::thread::{sleep, spawn};
use std::time::Duration;
#[derive(Clone)]
struct VecMem {
mem: Arc<Vec<u8>>,
}
impl VecMem {
fn new(size: usize) -> VecMem {
let mut mem = Vec::new();
mem.resize(size, 0);
VecMem { mem: Arc::new(mem) }
}
}
impl VolatileMemory for VecMem {
fn get_slice(&self, offset: u64, count: u64) -> Result<VolatileSlice> {
let mem_end = calc_offset(offset, count)?;
if mem_end > self.mem.len() as u64 {
return Err(Error::OutOfBounds { addr: mem_end });
}
Ok(unsafe { VolatileSlice::new((self.mem.as_ptr() as u64 + offset) as *mut _, count) })
}
}
#[test]
fn ref_store() {
let mut a = [0u8; 1];
{
let a_ref = &mut a[..];
let v_ref = a_ref.get_ref(0).unwrap();
v_ref.store(2u8);
}
assert_eq!(a[0], 2);
}
#[test]
fn ref_load() {
let mut a = [5u8; 1];
{
let a_ref = &mut a[..];
let c = {
let v_ref = a_ref.get_ref::<u8>(0).unwrap();
assert_eq!(v_ref.load(), 5u8);
v_ref
};
// To make sure we can take a v_ref out of the scope we made it in:
c.load();
// but not too far:
// c
} //.load()
;
}
#[test]
fn ref_to_slice() {
let mut a = [1u8; 5];
let a_ref = &mut a[..];
let v_ref = a_ref.get_ref(1).unwrap();
v_ref.store(0x12345678u32);
let ref_slice = v_ref.to_slice();
assert_eq!(v_ref.as_ptr() as u64, ref_slice.as_ptr() as u64);
assert_eq!(v_ref.size(), ref_slice.size());
}
#[test]
fn observe_mutate() {
let a = VecMem::new(1);
let a_clone = a.clone();
let v_ref = a.get_ref::<u8>(0).unwrap();
v_ref.store(99);
spawn(move || {
sleep(Duration::from_millis(10));
let clone_v_ref = a_clone.get_ref::<u8>(0).unwrap();
clone_v_ref.store(0);
});
// Technically this is a race condition but we have to observe the v_ref's value changing
// somehow and this helps to ensure the sleep actually happens before the store rather then
// being reordered by the compiler.
assert_eq!(v_ref.load(), 99);
// Granted we could have a machine that manages to perform this many volatile loads in the
// amount of time the spawned thread sleeps, but the most likely reason the retry limit will
// get reached is because v_ref.load() is not actually performing the required volatile read
// or v_ref.store() is not doing a volatile write. A timer based solution was avoided
// because that might use a syscall which could hint the optimizer to reload v_ref's pointer
// regardless of volatile status. Note that we use a longer retry duration for optimized
// builds.
#[cfg(debug_assertions)]
const RETRY_MAX: u64 = 500_000_000;
#[cfg(not(debug_assertions))]
const RETRY_MAX: u64 = 10_000_000_000;
let mut retry = 0;
while v_ref.load() == 99 && retry < RETRY_MAX {
retry += 1;
}
assert_ne!(retry, RETRY_MAX, "maximum retry exceeded");
assert_eq!(v_ref.load(), 0);
}
#[test]
fn slice_size() {
let a = VecMem::new(100);
let s = a.get_slice(0, 27).unwrap();
assert_eq!(s.size(), 27);
let s = a.get_slice(34, 27).unwrap();
assert_eq!(s.size(), 27);
let s = s.get_slice(20, 5).unwrap();
assert_eq!(s.size(), 5);
}
#[test]
fn slice_overflow_error() {
use std::u64::MAX;
let a = VecMem::new(1);
let res = a.get_slice(MAX, 1).unwrap_err();
assert_eq!(
res,
Error::Overflow {
base: MAX,
offset: 1,
}
);
}
#[test]
fn slice_oob_error() {
let a = VecMem::new(100);
a.get_slice(50, 50).unwrap();
let res = a.get_slice(55, 50).unwrap_err();
assert_eq!(res, Error::OutOfBounds { addr: 105 });
}
#[test]
fn ref_overflow_error() {
use std::u64::MAX;
let a = VecMem::new(1);
let res = a.get_ref::<u8>(MAX).unwrap_err();
assert_eq!(
res,
Error::Overflow {
base: MAX,
offset: 1,
}
);
}
#[test]
fn ref_oob_error() {
let a = VecMem::new(100);
a.get_ref::<u8>(99).unwrap();
let res = a.get_ref::<u16>(99).unwrap_err();
assert_eq!(res, Error::OutOfBounds { addr: 101 });
}
#[test]
fn ref_oob_too_large() {
let a = VecMem::new(3);
let res = a.get_ref::<u32>(0).unwrap_err();
assert_eq!(res, Error::OutOfBounds { addr: 4 });
}
}
| |
main.rs
|
mod interop;
mod window_subclass;
mod window_target;
use {
bindings::{
microsoft::graphics::canvas::{
effects::{
BlendEffect, BlendEffectMode, BorderEffect, ColorSourceEffect, CompositeEffect,
EffectBorderMode, GaussianBlurEffect, OpacityEffect, SaturationEffect,
},
ui::composition::CanvasComposition,
CanvasBitmap, CanvasComposite, CanvasDevice, CanvasEdgeBehavior,
},
windows::{
foundation::{numerics::Vector2, Size},
graphics::directx::{DirectXAlphaMode, DirectXPixelFormat},
storage::streams::{DataWriter, InMemoryRandomAccessStream},
ui::{
composition::{CompositionEffectSourceParameter, CompositionStretch, Compositor},
Color, Colors,
},
win32::winrt::{RoInitialize, RO_INIT_TYPE},
},
},
futures_executor::block_on,
interop::create_dispatcher_queue_controller_for_current_thread,
window_subclass::WindowSubclass,
window_target::CompositionDesktopWindowTargetSource,
winit::{
event::{Event, WindowEvent},
event_loop::{ControlFlow, EventLoop},
platform::windows::WindowBuilderExtWindows,
window::WindowBuilder,
},
};
fn main() -> windows::Result<()>
|
{
unsafe {
RoInitialize(RO_INIT_TYPE::RO_INIT_SINGLETHREADED).ok()?;
}
let _controller = create_dispatcher_queue_controller_for_current_thread()?;
let event_loop = EventLoop::new();
let window = WindowBuilder::new()
.with_visible(false)
.with_no_redirection_bitmap(true)
.build(&event_loop)
.unwrap();
unsafe {
window.apply_subclass();
}
window.set_visible(true);
let compositor = Compositor::new()?;
let target = window.create_window_target(&compositor, false)?;
let root = compositor.create_sprite_visual()?;
let clip = compositor.create_inset_clip()?;
clip.set_top_inset(1.)?;
root.set_clip(&clip)?;
let acrylic_effect = {
let effect = BlendEffect::new()?;
effect.set_mode(BlendEffectMode::Overlay)?;
effect.set_background({
let effect = CompositeEffect::new()?;
effect.set_mode(CanvasComposite::SourceOver)?;
let sources = effect.sources()?;
sources.append({
let effect = BlendEffect::new()?;
effect.set_mode(BlendEffectMode::Exclusion)?;
effect.set_background({
let effect = SaturationEffect::new()?;
effect.set_saturation(2.)?;
effect.set_source({
let effect = GaussianBlurEffect::new()?;
effect.set_source(CompositionEffectSourceParameter::create("Backdrop")?)?;
effect.set_border_mode(EffectBorderMode::Hard)?;
effect.set_blur_amount(30.)?;
effect
})?;
effect
})?;
effect.set_foreground({
let effect = ColorSourceEffect::new()?;
effect.set_color(Color {
a: 26,
r: 24,
g: 24,
b: 24,
})?;
effect
})?;
effect
})?;
sources.append({
let effect = ColorSourceEffect::new()?;
effect.set_color(Color {
a: 128,
r: 24,
g: 24,
b: 24,
})?;
effect
})?;
effect
})?;
effect.set_foreground({
let effect = OpacityEffect::new()?;
effect.set_opacity(0.02)?;
effect.set_source({
let effect = BorderEffect::new()?;
effect.set_extendx(CanvasEdgeBehavior::Wrap)?;
effect.set_extendy(CanvasEdgeBehavior::Wrap)?;
effect.set_source(CompositionEffectSourceParameter::create("Noise")?)?;
effect
})?;
effect
})?;
effect
};
let canvas_device = CanvasDevice::get_shared_device()?;
let composition_graphics_device =
CanvasComposition::create_composition_graphics_device(&compositor, &canvas_device)?;
let noise_drawing_surface = composition_graphics_device.create_drawing_surface(
Size {
width: 256.,
height: 256.,
},
DirectXPixelFormat::B8G8R8A8UIntNormalized,
DirectXAlphaMode::Premultiplied,
)?;
let noise_bytes = include_bytes!("noise.png");
let noise_stream = InMemoryRandomAccessStream::new()?;
let data_writer = DataWriter::create_data_writer(&noise_stream)?;
data_writer.write_bytes(noise_bytes)?;
block_on(data_writer.store_async()?)?;
let bitmap = block_on(CanvasBitmap::load_async_from_stream(
&canvas_device,
&noise_stream,
)?)?;
{
let ds = CanvasComposition::create_drawing_session(&noise_drawing_surface)?;
ds.clear(Colors::transparent()?)?;
ds.draw_image_at_origin(&bitmap)?;
}
let noise_brush = compositor.create_surface_brush_with_surface(&noise_drawing_surface)?;
noise_brush.set_stretch(CompositionStretch::None)?;
noise_brush.set_horizontal_alignment_ratio(0.)?;
noise_brush.set_vertical_alignment_ratio(0.)?;
let effect_factory = compositor.create_effect_factory(&acrylic_effect)?;
let acrylic_effect_brush = effect_factory.create_brush()?;
let destination_brush = compositor.create_backdrop_brush()?;
acrylic_effect_brush.set_source_parameter("Backdrop", &destination_brush)?;
acrylic_effect_brush.set_source_parameter("Noise", &noise_brush)?;
root.set_brush(acrylic_effect_brush)?;
root.set_relative_size_adjustment(Vector2 { x: 1., y: 1. })?;
target.set_root(&root)?;
event_loop.run(move |event, _target, control_flow| match event {
Event::WindowEvent { event, window_id } if window_id == window.id() => match event {
WindowEvent::CloseRequested => {
*control_flow = ControlFlow::Exit;
}
_ => {}
},
_ => {}
});
}
|
|
util.py
|
import torch
import random
import collections
import networkx as nx
from rdkit.Chem import AllChem
import numpy as np
from loader import graph_data_obj_to_nx_simple, nx_to_graph_data_obj_simple
from loader import MoleculeDataset
def get_filtered_fingerprint(smiles):
""" Get filtered PubChem fingerprint. The digits related to elements other than C,
H, O, N, S, F, Cl, and Br are discarded.
Args:
smiles (str): SMILES string.
Return:
fp (np.ndarray): The filtered PubChem fingerprint as a vector.
length (int): length of the filtered vector.
"""
from PyFingerprint.All_Fingerprint import get_fingerprint
fp = get_fingerprint(smiles, fp_type="pubchem", output="vector")
del_pos = (
[
26,
27,
28,
29,
30,
31,
32,
41,
42,
46,
47,
48,
295,
296,
298,
303,
304,
348,
354,
369,
407,
411,
415,
456,
525,
627,
]
+ list(range(49, 115))
+ list(range(263, 283))
+ list(range(288, 293))
+ list(range(310, 317))
+ list(range(318, 327))
+ list(range(327, 332))
+ list(range(424, 427))
)
fp = np.delete(fp, del_pos)
return fp
def check_same_molecules(s1, s2):
mol1 = AllChem.MolFromSmiles(s1)
mol2 = AllChem.MolFromSmiles(s2)
return AllChem.MolToInchi(mol1) == AllChem.MolToInchi(mol2)
class NegativeEdge:
def __init__(self):
"""
Randomly sample negative edges
"""
pass
def __call__(self, data):
num_nodes = data.num_nodes
num_edges = data.num_edges
edge_set = set(
[
str(data.edge_index[0, i].cpu().item())
+ ","
+ str(data.edge_index[1, i].cpu().item())
for i in range(data.edge_index.shape[1])
]
)
redandunt_sample = torch.randint(0, num_nodes, (2, 5 * num_edges))
sampled_ind = []
sampled_edge_set = set([])
for i in range(5 * num_edges):
node1 = redandunt_sample[0, i].cpu().item()
node2 = redandunt_sample[1, i].cpu().item()
edge_str = str(node1) + "," + str(node2)
if not any(
[edge_str in edge_set, edge_str in sampled_edge_set, node1 == node2]
):
sampled_edge_set.add(edge_str)
sampled_ind.append(i)
if len(sampled_ind) == num_edges / 2:
break
data.negative_edge_index = redandunt_sample[:, sampled_ind]
return data
class ExtractSubstructureContextPair:
def __init__(self, k, l1, l2):
"""
Randomly selects a node from the data object, and adds attributes
that contain the substructure that corresponds to k hop neighbours
rooted at the node, and the context substructures that corresponds to
the subgraph that is between l1 and l2 hops away from the
root node.
:param k:
:param l1:
:param l2:
"""
self.k = k
self.l1 = l1
self.l2 = l2
# for the special case of 0, addresses the quirk with
# single_source_shortest_path_length
if self.k == 0:
self.k = -1
if self.l1 == 0:
self.l1 = -1
if self.l2 == 0:
self.l2 = -1
def __call__(self, data, root_idx=None):
"""
:param data: pytorch geometric data object
:param root_idx: If None, then randomly samples an atom idx.
Otherwise sets atom idx of root (for debugging only)
:return: None. Creates new attributes in original data object:
data.center_substruct_idx
data.x_substruct
data.edge_attr_substruct
data.edge_index_substruct
data.x_context
data.edge_attr_context
data.edge_index_context
data.overlap_context_substruct_idx
"""
num_atoms = data.x.size(0)
if root_idx is None:
root_idx = random.sample(range(num_atoms), 1)[0]
G = graph_data_obj_to_nx_simple(data) # same ordering as input data obj
# Get k-hop subgraph rooted at specified atom idx
substruct_node_idxes = nx.single_source_shortest_path_length(
G, root_idx, self.k
).keys()
if len(substruct_node_idxes) > 0:
substruct_G = G.subgraph(substruct_node_idxes)
substruct_G, substruct_node_map = reset_idxes(substruct_G) # need
# to reset node idx to 0 -> num_nodes - 1, otherwise data obj does not
# make sense, since the node indices in data obj must start at 0
substruct_data = nx_to_graph_data_obj_simple(substruct_G)
data.x_substruct = substruct_data.x
data.edge_attr_substruct = substruct_data.edge_attr
data.edge_index_substruct = substruct_data.edge_index
data.center_substruct_idx = torch.tensor(
[substruct_node_map[root_idx]]
) # need
# to convert center idx from original graph node ordering to the
# new substruct node ordering
# Get subgraphs that is between l1 and l2 hops away from the root node
l1_node_idxes = nx.single_source_shortest_path_length(
G, root_idx, self.l1
).keys()
l2_node_idxes = nx.single_source_shortest_path_length(
G, root_idx, self.l2
).keys()
context_node_idxes = set(l1_node_idxes).symmetric_difference(set(l2_node_idxes))
if len(context_node_idxes) > 0:
context_G = G.subgraph(context_node_idxes)
context_G, context_node_map = reset_idxes(context_G) # need to
# reset node idx to 0 -> num_nodes - 1, otherwise data obj does not
# make sense, since the node indices in data obj must start at 0
context_data = nx_to_graph_data_obj_simple(context_G)
data.x_context = context_data.x
data.edge_attr_context = context_data.edge_attr
data.edge_index_context = context_data.edge_index
# Get indices of overlapping nodes between substruct and context,
# WRT context ordering
context_substruct_overlap_idxes = list(
set(context_node_idxes).intersection(set(substruct_node_idxes))
)
if len(context_substruct_overlap_idxes) > 0:
context_substruct_overlap_idxes_reorder = [
context_node_map[old_idx] for old_idx in context_substruct_overlap_idxes
]
# need to convert the overlap node idxes, which is from the
# original graph node ordering to the new context node ordering
data.overlap_context_substruct_idx = torch.tensor(
context_substruct_overlap_idxes_reorder
)
return data
# ### For debugging ###
# if len(substruct_node_idxes) > 0:
# substruct_mol = graph_data_obj_to_mol_simple(data.x_substruct,
# data.edge_index_substruct,
# data.edge_attr_substruct)
# print(AllChem.MolToSmiles(substruct_mol))
# if len(context_node_idxes) > 0:
# context_mol = graph_data_obj_to_mol_simple(data.x_context,
# data.edge_index_context,
# data.edge_attr_context)
# print(AllChem.MolToSmiles(context_mol))
#
# print(list(context_node_idxes))
# print(list(substruct_node_idxes))
# print(context_substruct_overlap_idxes)
# ### End debugging ###
def __repr__(self):
return "{}(k={},l1={}, l2={})".format(
self.__class__.__name__, self.k, self.l1, self.l2
)
def reset_idxes(G):
"""
Resets node indices such that they are numbered from 0 to num_nodes - 1
:param G:
:return: copy of G with relabelled node indices, mapping
"""
mapping = {}
for new_idx, old_idx in enumerate(G.nodes()):
mapping[old_idx] = new_idx
new_G = nx.relabel_nodes(G, mapping, copy=True)
return new_G, mapping
# TODO(Bowen): more unittests
class MaskAtom:
def __init__(self, num_atom_features, num_edge_type, mask_rate, mask_edge=True):
"""
Randomly masks an atom, and optionally masks edges connecting to it.
The mask atom type index is num_possible_atom_type
The mask edge type index in num_possible_edge_type
:param num_atom_type:
:param num_edge_type:
:param mask_rate: % of atoms to be masked
:param mask_edge: If True, also mask the edges that connect to the
masked atoms
"""
self.num_atom_features = num_atom_features
self.num_edge_type = num_edge_type
self.mask_rate = mask_rate
self.mask_edge = mask_edge
def __call__(self, data, masked_atom_indices=None):
"""
:param data: pytorch geometric data object. Assume that the edge
ordering is the default pytorch geometric ordering, where the two
directions of a single edge occur in pairs.
Eg. data.edge_index = tensor([[0, 1, 1, 2, 2, 3],
[1, 0, 2, 1, 3, 2]])
:param masked_atom_indices: If None, then randomly samples num_atoms
* mask rate number of atom indices
Otherwise a list of atom idx that sets the atoms to be masked (for
debugging only)
:return: None, Creates new attributes in original data object:
data.mask_node_idx
data.mask_node_label
data.mask_edge_idx
data.mask_edge_label
"""
if masked_atom_indices is None:
# sample x distinct atoms to be masked, based on mask rate. But
# will sample at least 1 atom
num_atoms = data.x.size()[0]
sample_size = int(round(num_atoms * self.mask_rate))
if sample_size == 0:
sample_size = 1
masked_atom_indices = random.sample(range(num_atoms), sample_size)
# create mask node label by copying atom feature of mask atom
mask_node_labels_list = []
for atom_idx in masked_atom_indices:
mask_node_labels_list.append(data.x[atom_idx].view(1, -1))
data.mask_node_label = torch.cat(mask_node_labels_list, dim=0)
data.masked_atom_indices = torch.tensor(masked_atom_indices)
# modify the original node feature of the masked node
for atom_idx in masked_atom_indices:
data.x[atom_idx] = torch.tensor([0] * self.num_atom_features)
if self.mask_edge:
# create mask edge labels by copying edge features of edges that are bonded
# to mask atoms
connected_edge_indices = []
for bond_idx, (u, v) in enumerate(data.edge_index.cpu().numpy().T):
for atom_idx in masked_atom_indices:
if (
atom_idx in set((u, v))
and bond_idx not in connected_edge_indices
):
connected_edge_indices.append(bond_idx)
if len(connected_edge_indices) > 0:
# create mask edge labels by copying bond features of the bonds
# connected to the mask atoms
mask_edge_labels_list = []
for bond_idx in connected_edge_indices[::2]: # because the
# edge ordering is such that two directions of a single
# edge occur in pairs, so to get the unique undirected
# edge indices, we take every 2nd edge index from list
mask_edge_labels_list.append(data.edge_attr[bond_idx].view(1, -1))
data.mask_edge_label = torch.cat(mask_edge_labels_list, dim=0)
# modify the original bond features of the bonds connected to the mask
# atoms
for bond_idx in connected_edge_indices:
data.edge_attr[bond_idx] = torch.tensor([self.num_edge_type, 0])
data.connected_edge_indices = torch.tensor(connected_edge_indices[::2])
else:
data.mask_edge_label = torch.empty((0, 2)).to(torch.int64)
data.connected_edge_indices = torch.tensor(connected_edge_indices).to(
torch.int64
)
# data.x = data.x[2:]
return data
def __repr__(self):
reprs = "{}(num_atom_features={}, num_edge_type={}, mask_rate={}, mask_edge={})"
return reprs.format(
self.__class__.__name__,
self.num_atom_features,
self.num_edge_type,
self.mask_rate,
self.mask_edge,
)
class ONEHOT_ContextPair(object):
ONEHOTENCODING_CODEBOOKS = {
"atom_type": list(range(119)),
"degree": list(range(11)),
"formal_charge": list(range(11)),
"hybridization_type": list(range(7)),
"aromatic": [0, 1],
"chirality_type": [0, 1, 2, 3],
}
def __init__(self, dataset, k, l1, l2):
self.dataset = dataset
self.k = k
self.l1 = l1
self.l2 = l2
# for the special case of 0, addresses the quirk with
# single_source_shortest_path_length
if self.k == 0:
self.k = -1
if self.l1 == 0:
self.l1 = -1
if self.l2 == 0:
self.l2 = -1
self.FEATURE_NAMES = [
"atom_type",
"degree",
"formal_charge",
"hybridization_type",
"aromatic",
"chirality_type",
]
self.ONEHOTENCODING = [0, 1, 2, 3, 4, 5]
def get_CODEBOOKS(self):
if self.ONEHOTENCODING_CODEBOOKS:
# print("ONEHOTENCODING_CODEBOOKS is available already, do not need to
# regenerate ONEHOTENCODING_CODEBOOKS")
# print(ONEHOTENCODING_CODEBOOKS)
return
# print(f"generating ONEHOTENCODING_CODEBOOKS......")
features_all = [data.x.numpy() for data in self.dataset]
features = np.vstack(features_all)
node_attributes_cnt = {}
for j, col in enumerate(zip(*features)):
node_attributes_cnt[self.FEATURE_NAMES[j]] = collections.Counter(col)
self.ONEHOTENCODING_CODEBOOKS.update(
{
feature_name: sorted(node_attributes_cnt[feature_name].keys())
for feature_name in self.FEATURE_NAMES
}
)
def get_onehot_features(self, features):
feature_one_hot = []
# print(f'input features{features}')
for row in features.tolist():
this_row = []
for j, feature_val_before_onehot in enumerate(row):
onehot_code = self.ONEHOTENCODING_CODEBOOKS[self.FEATURE_NAMES[j]]
onehot_val = [0.0] * len(onehot_code)
assert feature_val_before_onehot in onehot_code
onehot_val[onehot_code.index(feature_val_before_onehot)] = 1.0
this_row += onehot_val
feature_one_hot.append(this_row)
return torch.Tensor(feature_one_hot)
def __call__(self, data, root_idx=None):
self.get_CODEBOOKS()
# print(f'before onehot data {data.x.numpy()}')
num_atoms = data.x.size(0)
if root_idx is None:
root_idx = random.sample(range(num_atoms), 1)[0]
G = graph_data_obj_to_nx_simple(data) # same ordering as input data obj
# Get k-hop subgraph rooted at specified atom idx
substruct_node_idxes = nx.single_source_shortest_path_length(
G, root_idx, self.k
).keys()
if len(substruct_node_idxes) > 0:
substruct_G = G.subgraph(substruct_node_idxes)
substruct_G, substruct_node_map = reset_idxes(substruct_G) # need
# to reset node idx to 0 -> num_nodes - 1, otherwise data obj does not
# make sense, since the node indices in data obj must start at 0
substruct_data = nx_to_graph_data_obj_simple(substruct_G)
data.x_substruct = substruct_data.x
data.edge_attr_substruct = substruct_data.edge_attr
data.edge_index_substruct = substruct_data.edge_index
data.center_substruct_idx = torch.tensor(
[substruct_node_map[root_idx]]
) # need
# to convert center idx from original graph node ordering to the
# new substruct node ordering
data.x_substruct = self.get_onehot_features(data.x_substruct.numpy())
# Get subgraphs that is between l1 and l2 hops away from the root node
l1_node_idxes = nx.single_source_shortest_path_length(
G, root_idx, self.l1
).keys()
l2_node_idxes = nx.single_source_shortest_path_length(
G, root_idx, self.l2
).keys()
context_node_idxes = set(l1_node_idxes).symmetric_difference(set(l2_node_idxes))
if len(context_node_idxes) > 0:
context_G = G.subgraph(context_node_idxes)
context_G, context_node_map = reset_idxes(context_G) # need to
# reset node idx to 0 -> num_nodes - 1, otherwise data obj does not
# make sense, since the node indices in data obj must start at 0
context_data = nx_to_graph_data_obj_simple(context_G)
data.x_context = context_data.x
data.edge_attr_context = context_data.edge_attr
data.edge_index_context = context_data.edge_index
data.x_context = self.get_onehot_features(data.x_context.numpy())
# Get indices of overlapping nodes between substruct and context,
# WRT context ordering
context_substruct_overlap_idxes = list(
set(context_node_idxes).intersection(set(substruct_node_idxes))
)
if len(context_substruct_overlap_idxes) > 0:
context_substruct_overlap_idxes_reorder = [
context_node_map[old_idx] for old_idx in context_substruct_overlap_idxes
]
# need to convert the overlap node idxes, which is from the
# original graph node ordering to the new context node ordering
data.overlap_context_substruct_idx = torch.tensor(
context_substruct_overlap_idxes_reorder
)
# print(f'after onehot data{onehot_features.size()}')
# print()
# print ( data )
return data
def __repr__(self):
return "{}(k={},l1={}, l2={})".format(
self.__class__.__name__, self.k, self.l1, self.l2
)
# def __repr__(self):
# return f'{self.__class__.__name__}'
class ONEHOT_ENCODING(object):
ONEHOTENCODING_CODEBOOKS = {
"atom_type": list(range(119)),
"degree": list(range(11)),
"formal_charge": list(range(11)),
"hybridization_type": list(range(7)),
"aromatic": [0, 1],
"chirality_type": [0, 1, 2, 3],
}
def __init__(self, dataset):
self.dataset = dataset
self.FEATURE_NAMES = [
"atom_type",
"degree",
"formal_charge",
"hybridization_type",
"aromatic",
"chirality_type",
]
self.ONEHOTENCODING = [0, 1, 2, 3, 4, 5]
def get_CODEBOOKS(self):
if self.ONEHOTENCODING_CODEBOOKS:
# print("ONEHOTENCODING_CODEBOOKS is available already, do not need to
# regenerate ONEHOTENCODING_CODEBOOKS")
# print(ONEHOTENCODING_CODEBOOKS)
return
features_all = [data.x.numpy() for data in self.dataset]
features = np.vstack(features_all)
node_attributes_cnt = {}
for j, col in enumerate(zip(*features)):
node_attributes_cnt[self.FEATURE_NAMES[j]] = collections.Counter(col)
ONEHOTENCODING_CODEBOOKS.update({
feature_name: sorted(node_attributes_cnt[feature_name].keys())
for feature_name in self.FEATURE_NAMES} )
#print(f"generating ONEHOTENCODING_CODEBOOKS......")
def
|
(self,features):
feature_one_hot = []
#print(f'input features{features}')
for row in features.tolist():
this_row = []
for j, feature_val_before_onehot in enumerate(row):
onehot_code = self.ONEHOTENCODING_CODEBOOKS[self.FEATURE_NAMES[j]]
onehot_val = [0.0] * len(onehot_code)
assert feature_val_before_onehot in onehot_code
onehot_val[onehot_code.index(feature_val_before_onehot)] = 1.0
this_row += onehot_val
feature_one_hot.append(this_row)
return torch.Tensor(feature_one_hot)
def __call__(self, data):
self.get_CODEBOOKS()
#print(f'before onehot data {data.x.numpy()}')
onehot_features = self.get_onehot_features(data.x.numpy())
#print(f'after onehot data{onehot_features.size()}')
data.x = onehot_features
#print()
#print ( data )
return data
def __repr__(self):
return f'{self.__class__.__name__}'
if __name__ == "__main__":
transform = NegativeEdge()
dataset = MoleculeDataset("dataset/tox21", dataset="tox21")
transform(dataset[0])
"""
# TODO(Bowen): more unit tests
# test ExtractSubstructureContextPair
smiles = 'C#Cc1c(O)c(Cl)cc(/C=C/N)c1S'
m = AllChem.MolFromSmiles(smiles)
data = mol_to_graph_data_obj_simple(m)
root_idx = 13
# 0 hops: no substructure or context. We just test the absence of x attr
transform = ExtractSubstructureContextPair(0, 0, 0)
transform(data, root_idx)
assert not hasattr(data, 'x_substruct')
assert not hasattr(data, 'x_context')
# k > n_nodes, l1 = 0 and l2 > n_nodes: substructure and context same as
# molecule
data = mol_to_graph_data_obj_simple(m)
transform = ExtractSubstructureContextPair(100000, 0, 100000)
transform(data, root_idx)
substruct_mol = graph_data_obj_to_mol_simple(data.x_substruct,
data.edge_index_substruct,
data.edge_attr_substruct)
context_mol = graph_data_obj_to_mol_simple(data.x_context,
data.edge_index_context,
data.edge_attr_context)
assert check_same_molecules(AllChem.MolToSmiles(substruct_mol),
AllChem.MolToSmiles(context_mol))
transform = ExtractSubstructureContextPair(1, 1, 10000)
transform(data, root_idx)
# increase k from 0, and increase l1 from 1 while keeping l2 > n_nodes: the
# total number of atoms should be n_atoms
for i in range(len(m.GetAtoms())):
data = mol_to_graph_data_obj_simple(m)
print('i: {}'.format(i))
transform = ExtractSubstructureContextPair(i, i, 100000)
transform(data, root_idx)
if hasattr(data, 'x_substruct'):
n_substruct_atoms = data.x_substruct.size()[0]
else:
n_substruct_atoms = 0
print('n_substruct_atoms: {}'.format(n_substruct_atoms))
if hasattr(data, 'x_context'):
n_context_atoms = data.x_context.size()[0]
else:
n_context_atoms = 0
print('n_context_atoms: {}'.format(n_context_atoms))
assert n_substruct_atoms + n_context_atoms == len(m.GetAtoms())
# l1 < k and l2 >= k, so an overlap exists between context and substruct
data = mol_to_graph_data_obj_simple(m)
transform = ExtractSubstructureContextPair(2, 1, 3)
transform(data, root_idx)
assert hasattr(data, 'center_substruct_idx')
# check correct overlap atoms between context and substruct
# m = AllChem.MolFromSmiles('COC1=CC2=C(NC(=N2)[S@@](=O)CC2=NC=C(C)C(OC)=C2C)C=C1')
# data = mol_to_graph_data_obj_simple(m)
# root_idx = 9
# k = 1
# l1 = 1
# l2 = 2
# transform = ExtractSubstructureContextPaidata =
# mol_to_graph_data_obj_simple(m)r(k, l1, l2)
# transform(data, root_idx)
pass
# TODO(Bowen): more unit tests
# test MaskAtom
from loader import mol_to_graph_data_obj_simple, \
graph_data_obj_to_mol_simple
smiles = 'C#Cc1c(O)c(Cl)cc(/C=C/N)c1S'
m = AllChem.MolFromSmiles(smiles)
original_data = mol_to_graph_data_obj_simple(m)
num_atom_type = 118
num_edge_type = 5
# manually specify masked atom indices, don't mask edge
masked_atom_indices = [13, 12]
data = mol_to_graph_data_obj_simple(m)
transform = MaskAtom(num_atom_type, num_edge_type, 0.1, mask_edge=False)
transform(data, masked_atom_indices)
assert data.mask_node_label.size() == torch.Size(
(len(masked_atom_indices), 2))
assert not hasattr(data, 'mask_edge_label')
# check that the correct rows in x have been modified to be mask atom type
assert (data.x[masked_atom_indices] == torch.tensor(([num_atom_type,
0]))).all()
assert (data.mask_node_label == original_data.x[masked_atom_indices]).all()
# manually specify masked atom indices, mask edge
masked_atom_indices = [13, 12]
data = mol_to_graph_data_obj_simple(m)
transform = MaskAtom(num_atom_type, num_edge_type, 0.1, mask_edge=True)
transform(data, masked_atom_indices)
assert data.mask_node_label.size() == torch.Size(
(len(masked_atom_indices), 2))
# check that the correct rows in x have been modified to be mask atom type
assert (data.x[masked_atom_indices] == torch.tensor(([num_atom_type,
0]))).all()
assert (data.mask_node_label == original_data.x[masked_atom_indices]).all()
# check that the correct rows in edge_attr have been modified to be mask edge
# type, and the mask_edge_label are correct
rdkit_bonds = []
for atom_idx in masked_atom_indices:
bond_indices = list(AllChem.FindAtomEnvironmentOfRadiusN(m, radius=1,
rootedAtAtom=atom_idx))
for bond_idx in bond_indices:
rdkit_bonds.append(
(m.GetBonds()[bond_idx].GetBeginAtomIdx(), m.GetBonds()[
bond_idx].GetEndAtomIdx()))
rdkit_bonds.append(
(m.GetBonds()[bond_idx].GetEndAtomIdx(), m.GetBonds()[
bond_idx].GetBeginAtomIdx()))
rdkit_bonds = set(rdkit_bonds)
connected_edge_indices = []
for i in range(data.edge_index.size()[1]):
if tuple(data.edge_index.numpy().T[i].tolist()) in rdkit_bonds:
connected_edge_indices.append(i)
assert (data.edge_attr[connected_edge_indices] ==
torch.tensor(([num_edge_type, 0]))).all()
assert (data.mask_edge_label == original_data.edge_attr[
connected_edge_indices[::2]]).all() # data.mask_edge_label contains
# the unique edges (ignoring direction). The data obj has edge ordering
# such that two directions of a single edge occur in pairs, so to get the
# unique undirected edge indices, we take every 2nd edge index from list
"""
|
get_onehot_features
|
joswig_dijkstra.py
|
#Attempt to route using the Joswig Algorithm described here: https://arxiv.org/pdf/1904.01082.pdf
#using object oriented programming.
class Vertex:
num_vert = 0
vertices = []
def __init__(self, lab=""):
self.label = lab
self.adj = [] #adjacency list
self.weight = [] #weights associated to adjacency list edges
self.known = False #shortest path to self is known
self.pv = None #previous node in shortest path tree
self.dv = 0 #current distance on best known path
self.help = False #helper boolean denoting if a vertex has had dv changed from 0.
Vertex.num_vert += 1
Vertex.vertices.append(self)
#links self to vert with weight cost
def link(self,vert,cost):
if((vert in self.adj) == False):
self.adj.append(vert)
self.weight.append(cost)
def
|
(self,vert,cost):
if((vert in self.adj) == True):
self.weight[self.adj.index(vert)] = cost
def clear(self):
Vertex.num_vert = 0
Vertex.vertices = []
def printadj(self,lab):
for v in self.adj:
result = v.label,v.dv
if(lab == True):
result = v.label,v.pv.label,v.dv
print(result)
#reset vertex boolean values
#must be called before
def vert_false(self):
for v in Vertex.vertices:
v.known = False
v.dv = 0
v.pv = None
v.help = False
def shortestpath(self):
num_edge = 0
if(self.adj != []):
num_edge = len(self.adj)
self.known = True
if(num_edge > 0):
for i in range(0,num_edge):
weight = self.weight[i]
if(weight == -1):
weight = 0
if((self.adj[i].help == False) | (self.adj[i].dv > weight + self.dv)):
self.adj[i].dv = weight + self.dv
self.adj[i].pv = self
self.adj[i].help = True
min = -1
next = None
done = True
for v in Vertex.vertices:
if(v.known == False):
done = False
if(v.help == True):
if((min == -1) | (min > v.dv)):
min = v.dv
next = v
if(done == False):
if(next != None):
next.shortestpath()
class Tree:
num_trees = 0
trees = []
def __init__(self,numvert):
self.vertices = []
for i in range(0,numvert):
self.vertices.append(Vertex("v"+str(i+1)))
Tree.num_trees += 1
Tree.trees.append(self)
def link(self,init,final,weight):
numvert = len(self.vertices)
init = init-1
final = final-1
if((init < numvert) & (final < numvert)):
self.vertices[init].link(self.vertices[final],weight)
def editlink(self,init,final,newweight):
numvert = len(self.vertices)
init = init - 1
final = final - 1
if ((init < numvert) & (final < numvert)):
self.vertices[init].editlink(self.vertices[final],newweight)
def shortestpath(self,vert):
self.vertices[vert-1].shortestpath()
result = []
for x in self.vertices[vert-1].adj:
result.append([x.label,x.pv.label,x.dv])
return result
def add_vertex(self,vert):
self.vertices.append(vert)
def vert_false(self):
self.vertices[0].vert_false()
def printadj(self,vert,lab):
self.vertices[vert-1].printadj(lab)
|
editlink
|
timepicker.tpl.js
|
/**
* angular-strap
* @version v2.3.6 - 2015-11-14
* @link http://mgcrea.github.io/angular-strap
* @author Olivier Louvignes <olivier@mg-crea.com> (https://github.com/mgcrea)
* @license MIT License, http://www.opensource.org/licenses/MIT
|
'use strict';
angular.module('mgcrea.ngStrap.timepicker').run([ '$templateCache', function($templateCache) {
$templateCache.put('timepicker/timepicker.tpl.html', '<div class="dropdown-menu timepicker" style="min-width: 0px;width: auto"><table height="100%"><thead><tr class="text-center"><th><button tabindex="-1" type="button" class="btn btn-default pull-left" ng-click="$arrowAction(-1, 0)"><i class="{{ $iconUp }}"></i></button></th><th> </th><th><button tabindex="-1" type="button" class="btn btn-default pull-left" ng-click="$arrowAction(-1, 1)"><i class="{{ $iconUp }}"></i></button></th><th> </th><th><button ng-if="showSeconds" tabindex="-1" type="button" class="btn btn-default pull-left" ng-click="$arrowAction(-1, 2)"><i class="{{ $iconUp }}"></i></button></th></tr></thead><tbody><tr ng-repeat="(i, row) in rows"><td class="text-center"><button tabindex="-1" style="width: 100%" type="button" class="btn btn-default" ng-class="{\'btn-primary\': row[0].selected}" ng-click="$select(row[0].date, 0)" ng-disabled="row[0].disabled"><span ng-class="{\'text-muted\': row[0].muted}" ng-bind="row[0].label"></span></button></td><td><span ng-bind="i == midIndex ? timeSeparator : \' \'"></span></td><td class="text-center"><button tabindex="-1" ng-if="row[1].date" style="width: 100%" type="button" class="btn btn-default" ng-class="{\'btn-primary\': row[1].selected}" ng-click="$select(row[1].date, 1)" ng-disabled="row[1].disabled"><span ng-class="{\'text-muted\': row[1].muted}" ng-bind="row[1].label"></span></button></td><td><span ng-bind="i == midIndex ? timeSeparator : \' \'"></span></td><td class="text-center"><button tabindex="-1" ng-if="showSeconds && row[2].date" style="width: 100%" type="button" class="btn btn-default" ng-class="{\'btn-primary\': row[2].selected}" ng-click="$select(row[2].date, 2)" ng-disabled="row[2].disabled"><span ng-class="{\'text-muted\': row[2].muted}" ng-bind="row[2].label"></span></button></td><td ng-if="showAM"> </td><td ng-if="showAM"><button tabindex="-1" ng-show="i == midIndex - !isAM * 1" style="width: 100%" type="button" ng-class="{\'btn-primary\': !!isAM}" class="btn btn-default" ng-click="$switchMeridian()" ng-disabled="el.disabled">AM</button> <button tabindex="-1" ng-show="i == midIndex + 1 - !isAM * 1" style="width: 100%" type="button" ng-class="{\'btn-primary\': !isAM}" class="btn btn-default" ng-click="$switchMeridian()" ng-disabled="el.disabled">PM</button></td></tr></tbody><tfoot><tr class="text-center"><th><button tabindex="-1" type="button" class="btn btn-default pull-left" ng-click="$arrowAction(1, 0)"><i class="{{ $iconDown }}"></i></button></th><th> </th><th><button tabindex="-1" type="button" class="btn btn-default pull-left" ng-click="$arrowAction(1, 1)"><i class="{{ $iconDown }}"></i></button></th><th> </th><th><button ng-if="showSeconds" tabindex="-1" type="button" class="btn btn-default pull-left" ng-click="$arrowAction(1, 2)"><i class="{{ $iconDown }}"></i></button></th></tr></tfoot></table></div>');
} ]);
|
*/
|
securityPartnerProvider.go
|
// *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20200801
import (
"context"
"reflect"
"github.com/pkg/errors"
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
// Security Partner Provider resource.
type SecurityPartnerProvider struct {
pulumi.CustomResourceState
// The connection status with the Security Partner Provider.
ConnectionStatus pulumi.StringOutput `pulumi:"connectionStatus"`
// A unique read-only string that changes whenever the resource is updated.
Etag pulumi.StringOutput `pulumi:"etag"`
// Resource location.
Location pulumi.StringPtrOutput `pulumi:"location"`
// Resource name.
Name pulumi.StringOutput `pulumi:"name"`
// The provisioning state of the Security Partner Provider resource.
ProvisioningState pulumi.StringOutput `pulumi:"provisioningState"`
// The security provider name.
SecurityProviderName pulumi.StringPtrOutput `pulumi:"securityProviderName"`
// Resource tags.
Tags pulumi.StringMapOutput `pulumi:"tags"`
// Resource type.
Type pulumi.StringOutput `pulumi:"type"`
// The virtualHub to which the Security Partner Provider belongs.
VirtualHub SubResourceResponsePtrOutput `pulumi:"virtualHub"`
}
// NewSecurityPartnerProvider registers a new resource with the given unique name, arguments, and options.
func NewSecurityPartnerProvider(ctx *pulumi.Context,
name string, args *SecurityPartnerProviderArgs, opts ...pulumi.ResourceOption) (*SecurityPartnerProvider, error)
|
// GetSecurityPartnerProvider gets an existing SecurityPartnerProvider resource's state with the given name, ID, and optional
// state properties that are used to uniquely qualify the lookup (nil if not required).
func GetSecurityPartnerProvider(ctx *pulumi.Context,
name string, id pulumi.IDInput, state *SecurityPartnerProviderState, opts ...pulumi.ResourceOption) (*SecurityPartnerProvider, error) {
var resource SecurityPartnerProvider
err := ctx.ReadResource("azure-native:network/v20200801:SecurityPartnerProvider", name, id, state, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
// Input properties used for looking up and filtering SecurityPartnerProvider resources.
type securityPartnerProviderState struct {
// The connection status with the Security Partner Provider.
ConnectionStatus *string `pulumi:"connectionStatus"`
// A unique read-only string that changes whenever the resource is updated.
Etag *string `pulumi:"etag"`
// Resource location.
Location *string `pulumi:"location"`
// Resource name.
Name *string `pulumi:"name"`
// The provisioning state of the Security Partner Provider resource.
ProvisioningState *string `pulumi:"provisioningState"`
// The security provider name.
SecurityProviderName *string `pulumi:"securityProviderName"`
// Resource tags.
Tags map[string]string `pulumi:"tags"`
// Resource type.
Type *string `pulumi:"type"`
// The virtualHub to which the Security Partner Provider belongs.
VirtualHub *SubResourceResponse `pulumi:"virtualHub"`
}
type SecurityPartnerProviderState struct {
// The connection status with the Security Partner Provider.
ConnectionStatus pulumi.StringPtrInput
// A unique read-only string that changes whenever the resource is updated.
Etag pulumi.StringPtrInput
// Resource location.
Location pulumi.StringPtrInput
// Resource name.
Name pulumi.StringPtrInput
// The provisioning state of the Security Partner Provider resource.
ProvisioningState pulumi.StringPtrInput
// The security provider name.
SecurityProviderName pulumi.StringPtrInput
// Resource tags.
Tags pulumi.StringMapInput
// Resource type.
Type pulumi.StringPtrInput
// The virtualHub to which the Security Partner Provider belongs.
VirtualHub SubResourceResponsePtrInput
}
func (SecurityPartnerProviderState) ElementType() reflect.Type {
return reflect.TypeOf((*securityPartnerProviderState)(nil)).Elem()
}
type securityPartnerProviderArgs struct {
// Resource ID.
Id *string `pulumi:"id"`
// Resource location.
Location *string `pulumi:"location"`
// The name of the resource group.
ResourceGroupName string `pulumi:"resourceGroupName"`
// The name of the Security Partner Provider.
SecurityPartnerProviderName *string `pulumi:"securityPartnerProviderName"`
// The security provider name.
SecurityProviderName *string `pulumi:"securityProviderName"`
// Resource tags.
Tags map[string]string `pulumi:"tags"`
// The virtualHub to which the Security Partner Provider belongs.
VirtualHub *SubResource `pulumi:"virtualHub"`
}
// The set of arguments for constructing a SecurityPartnerProvider resource.
type SecurityPartnerProviderArgs struct {
// Resource ID.
Id pulumi.StringPtrInput
// Resource location.
Location pulumi.StringPtrInput
// The name of the resource group.
ResourceGroupName pulumi.StringInput
// The name of the Security Partner Provider.
SecurityPartnerProviderName pulumi.StringPtrInput
// The security provider name.
SecurityProviderName pulumi.StringPtrInput
// Resource tags.
Tags pulumi.StringMapInput
// The virtualHub to which the Security Partner Provider belongs.
VirtualHub SubResourcePtrInput
}
func (SecurityPartnerProviderArgs) ElementType() reflect.Type {
return reflect.TypeOf((*securityPartnerProviderArgs)(nil)).Elem()
}
type SecurityPartnerProviderInput interface {
pulumi.Input
ToSecurityPartnerProviderOutput() SecurityPartnerProviderOutput
ToSecurityPartnerProviderOutputWithContext(ctx context.Context) SecurityPartnerProviderOutput
}
func (*SecurityPartnerProvider) ElementType() reflect.Type {
return reflect.TypeOf((*SecurityPartnerProvider)(nil))
}
func (i *SecurityPartnerProvider) ToSecurityPartnerProviderOutput() SecurityPartnerProviderOutput {
return i.ToSecurityPartnerProviderOutputWithContext(context.Background())
}
func (i *SecurityPartnerProvider) ToSecurityPartnerProviderOutputWithContext(ctx context.Context) SecurityPartnerProviderOutput {
return pulumi.ToOutputWithContext(ctx, i).(SecurityPartnerProviderOutput)
}
type SecurityPartnerProviderOutput struct {
*pulumi.OutputState
}
func (SecurityPartnerProviderOutput) ElementType() reflect.Type {
return reflect.TypeOf((*SecurityPartnerProvider)(nil))
}
func (o SecurityPartnerProviderOutput) ToSecurityPartnerProviderOutput() SecurityPartnerProviderOutput {
return o
}
func (o SecurityPartnerProviderOutput) ToSecurityPartnerProviderOutputWithContext(ctx context.Context) SecurityPartnerProviderOutput {
return o
}
func init() {
pulumi.RegisterOutputType(SecurityPartnerProviderOutput{})
}
|
{
if args == nil {
return nil, errors.New("missing one or more required arguments")
}
if args.ResourceGroupName == nil {
return nil, errors.New("invalid value for required argument 'ResourceGroupName'")
}
aliases := pulumi.Aliases([]pulumi.Alias{
{
Type: pulumi.String("azure-nextgen:network/v20200801:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-native:network:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-nextgen:network:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-native:network/latest:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-nextgen:network/latest:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-native:network/v20200301:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200301:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-native:network/v20200401:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200401:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-native:network/v20200501:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200501:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-native:network/v20200601:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200601:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-native:network/v20200701:SecurityPartnerProvider"),
},
{
Type: pulumi.String("azure-nextgen:network/v20200701:SecurityPartnerProvider"),
},
})
opts = append(opts, aliases)
var resource SecurityPartnerProvider
err := ctx.RegisterResource("azure-native:network/v20200801:SecurityPartnerProvider", name, args, &resource, opts...)
if err != nil {
return nil, err
}
return &resource, nil
}
|
metric_runner.go
|
package mgr
import (
"errors"
"fmt"
"runtime/debug"
"strconv"
"strings"
"sync"
"sync/atomic"
"time"
"github.com/json-iterator/go"
"github.com/qiniu/log"
"github.com/qiniu/logkit/conf"
"github.com/qiniu/logkit/metric"
"github.com/qiniu/logkit/metric/curl"
"github.com/qiniu/logkit/reader"
"github.com/qiniu/logkit/sender"
"github.com/qiniu/logkit/transforms"
. "github.com/qiniu/logkit/utils/models"
)
const (
KeyMetricType = "type"
)
const (
defaultCollectInterval = 3
)
type MetricConfig struct {
MetricType string `json:"type"`
Attributes map[string]bool `json:"attributes"`
Config map[string]interface{} `json:"config"`
}
type MetricRunner struct {
RunnerName string `json:"name"`
envTag string
collectors []metric.Collector
senders []sender.Sender
transformers map[string][]transforms.Transformer
collectInterval time.Duration
rs *RunnerStatus
lastRs *RunnerStatus
rsMutex *sync.RWMutex
meta *reader.Meta
lastSend time.Time
stopped int32
exitChan chan struct{}
}
func NewMetric(tp string) (metric.Collector, error) {
if c, ok := metric.Collectors[tp]; ok {
return c(), nil
}
return nil, fmt.Errorf("metric <%v> is not support now", tp)
}
func NewMetricRunner(rc RunnerConfig, sr *sender.Registry) (runner *MetricRunner, err error) {
if rc.CollectInterval <= 0 {
rc.CollectInterval = defaultCollectInterval
}
interval := time.Duration(rc.CollectInterval) * time.Second
cf := conf.MapConf{
GlobalKeyName: rc.RunnerName,
KeyRunnerName: rc.RunnerName,
reader.KeyMode: reader.ModeMetrics,
}
if rc.ExtraInfo {
cf[ExtraInfo] = Bool2String(rc.ExtraInfo)
}
meta, err := reader.NewMetaWithConf(cf)
if err != nil {
return nil, fmt.Errorf("Runner "+rc.RunnerName+" add failed, err is %v", err)
}
for i := range rc.SendersConfig {
rc.SendersConfig[i][KeyRunnerName] = rc.RunnerName
}
collectors := make([]metric.Collector, 0)
transformers := make(map[string][]transforms.Transformer)
if len(rc.MetricConfig) == 0 {
return nil, fmt.Errorf("Runner " + rc.RunnerName + " has zero metric, ignore it")
}
for _, m := range rc.MetricConfig {
tp := m.MetricType
c, err := NewMetric(tp)
if err != nil {
log.Errorf("%v ignore it...", err)
err = nil
continue
}
// sync config to ExtCollector
ec, ok := c.(metric.ExtCollector)
if ok {
if err := ec.SyncConfig(m.Config); err != nil {
return nil, fmt.Errorf("metric %v sync config error %v", tp, err)
}
} else {
// sync config to buildin Collector
configBytes, err := jsoniter.Marshal(m.Config)
if err != nil {
return nil, fmt.Errorf("metric %v marshal config error %v", tp, err)
}
err = jsoniter.Unmarshal(configBytes, c)
if err != nil {
return nil, fmt.Errorf("metric %v unmarshal config error %v", tp, err)
}
}
collectors = append(collectors, c)
// 配置文件中明确标明 false 的 attr 加入 discard transformer
config := c.Config()
metricName := c.Name()
trans := make([]transforms.Transformer, 0)
if attributes, ex := config[metric.AttributesString]; ex {
if attrs, ok := attributes.([]KeyValue); ok {
for _, attr := range attrs {
val, exist := m.Attributes[attr.Key]
if exist && !val {
if m.MetricType == curl.TypeMetricHttp {
var httpDataArr []curl.HttpDataReq
|
httpData, ok := m.Config["http_datas"].(string)
if ok {
err = jsoniter.Unmarshal([]byte(httpData), &httpDataArr)
if err != nil {
return nil, fmt.Errorf("metric %v unmarshal config error %v", curl.TypeMetricHttp, err)
}
length := len(httpDataArr)
for i := 0; i < length; i++ {
key := attr.Key + "_" + strconv.Itoa(i+1)
DisTrans, err := createDiscardTransformer(key)
if err != nil {
return nil, fmt.Errorf("metric %v key %v, transform add failed, %v", tp, attr.Key, err)
}
trans = append(trans, DisTrans)
}
} else {
return nil, fmt.Errorf("http_datas need to be string")
}
} else {
DisTrans, err := createDiscardTransformer(attr.Key)
if err != nil {
return nil, fmt.Errorf("metric %v key %v, transform add failed, %v", tp, attr.Key, err)
}
trans = append(trans, DisTrans)
}
}
}
}
}
transformers[metricName] = trans
}
if len(collectors) < 1 {
err = errors.New("no collectors were added")
return
}
senders := make([]sender.Sender, 0)
for _, senderConfig := range rc.SendersConfig {
senderConfig[sender.KeyIsMetrics] = "true"
senderConfig[sender.KeyPandoraTSDBTimeStamp] = metric.Timestamp
if rc.ExtraInfo && senderConfig[sender.KeySenderType] == sender.TypePandora {
//如果已经开启了,不要重复加
senderConfig[sender.KeyPandoraExtraInfo] = "false"
}
s, err := sr.NewSender(senderConfig, meta.FtSaveLogPath())
if err != nil {
return nil, err
}
senders = append(senders, s)
}
runner = &MetricRunner{
RunnerName: rc.RunnerName,
exitChan: make(chan struct{}),
lastSend: time.Now(), // 上一次发送时间
meta: meta,
rs: &RunnerStatus{
ReaderStats: StatsInfo{},
SenderStats: make(map[string]StatsInfo),
lastState: time.Now(),
Name: rc.RunnerName,
RunningStatus: RunnerRunning,
},
lastRs: &RunnerStatus{
ReaderStats: StatsInfo{},
SenderStats: make(map[string]StatsInfo),
lastState: time.Now(),
Name: rc.RunnerName,
RunningStatus: RunnerRunning,
},
rsMutex: new(sync.RWMutex),
collectInterval: interval,
collectors: collectors,
transformers: transformers,
senders: senders,
envTag: rc.EnvTag,
}
runner.StatusRestore()
return
}
func (mr *MetricRunner) Name() string {
return mr.RunnerName
}
func (r *MetricRunner) Run() {
defer close(r.exitChan)
defer func() {
// recover when runner is stopped
if atomic.LoadInt32(&r.stopped) <= 0 {
return
}
if r := recover(); r != nil {
log.Errorf("recover when runner is stopped\npanic: %v\nstack: %s", r, debug.Stack())
}
}()
tags := r.meta.GetTags()
tags = MergeEnvTags(r.envTag, tags)
tags = MergeExtraInfoTags(r.meta, tags)
for {
if atomic.LoadInt32(&r.stopped) > 0 {
log.Debugf("runner %v exited from run", r.RunnerName)
r.exitChan <- struct{}{}
return
}
// collect data
dataCnt := 0
datas := make([]Data, 0)
tags[metric.Timestamp] = time.Now().Format(time.RFC3339Nano)
for _, c := range r.collectors {
metricName := c.Name()
tmpdatas, err := c.Collect()
if err != nil {
log.Errorf("collecter <%v> collect data error: %v", c.Name(), err)
continue
}
dataLen := len(tmpdatas)
nameLen := len(metricName)
if dataLen == 0 {
log.Debugf("MetricRunner %v collect No data", c.Name())
continue
}
tmpDatas := make([]Data, dataLen)
for i, d := range tmpdatas {
tmpDatas[i] = d
}
if trans, ok := r.transformers[metricName]; ok {
for _, t := range trans {
tmpDatas, err = t.Transform(tmpDatas)
if err != nil {
log.Error(err)
}
}
}
for _, metricData := range tmpDatas {
if len(metricData) == 0 {
continue
}
data := Data{}
// 重命名
// cpu_time_user --> cpu__time_user
for m, d := range metricData {
newName := m
if strings.HasPrefix(m, metricName) {
newName = metricName + "_" + m[nameLen:]
}
data[newName] = d
}
datas = append(datas, data)
dataCnt++
}
}
if len(datas) == 0 {
log.Warnf("metrics collect no data")
time.Sleep(r.collectInterval)
continue
}
if len(tags) > 0 {
datas = addTagsToData(tags, datas, r.Name())
}
r.rsMutex.Lock()
r.rs.ReadDataCount += int64(dataCnt)
r.rsMutex.Unlock()
r.lastSend = time.Now()
for _, s := range r.senders {
if !r.trySend(s, datas, 3) {
log.Errorf("failed to send metricData: << %v >>", datas)
break
}
}
time.Sleep(r.collectInterval)
}
}
// trySend 尝试发送数据,如果此时runner退出返回false,其他情况无论是达到最大重试次数还是发送成功,都返回true
func (r *MetricRunner) trySend(s sender.Sender, datas []Data, times int) bool {
if len(datas) <= 0 {
return true
}
if _, ok := r.rs.SenderStats[s.Name()]; !ok {
r.rs.SenderStats[s.Name()] = StatsInfo{}
}
r.rsMutex.RLock()
info := r.rs.SenderStats[s.Name()]
r.rsMutex.RUnlock()
cnt := 1
for {
// 至少尝试一次。如果任务已经停止,那么只尝试一次
if cnt > 1 && atomic.LoadInt32(&r.stopped) > 0 {
return false
}
err := s.Send(datas)
if se, ok := err.(*StatsError); ok {
err = se.ErrorDetail
if se.Ft {
r.rs.Lag.Ftlags = se.FtQueueLag
} else {
if cnt > 1 {
info.Errors -= se.Success
} else {
info.Errors += se.Errors
}
info.Success += se.Success
}
} else if err != nil {
if cnt <= 1 {
info.Errors += int64(len(datas))
}
} else {
info.Success += int64(len(datas))
}
if err != nil {
log.Error(err)
time.Sleep(time.Second)
if times <= 0 || cnt < times {
cnt++
continue
}
log.Errorf("retry send %v times, but still error %v, discard datas %v ... total %v lines", cnt, err, datas[0], len(datas))
}
break
}
r.rsMutex.Lock()
r.rs.SenderStats[s.Name()] = info
r.rsMutex.Unlock()
return true
}
func (mr *MetricRunner) Stop() {
atomic.AddInt32(&mr.stopped, 1)
log.Warnf("wait for MetricRunner " + mr.Name() + " stopped")
timer := time.NewTimer(time.Second * 10)
select {
case <-mr.exitChan:
log.Warnf("MetricRunner " + mr.Name() + " has been stopped ")
case <-timer.C:
log.Warnf("MetricRunner " + mr.Name() + " exited timeout ")
}
for _, s := range mr.senders {
err := s.Close()
if err != nil {
log.Errorf("cannot close sender name: %s, err: %v", s.Name(), err)
} else {
log.Warnf("sender %v of MetricRunner %v closed", s.Name(), mr.Name())
}
}
}
func (mr *MetricRunner) Reset() (err error) {
var errMsg string
if err = mr.meta.Reset(); err != nil {
errMsg += err.Error() + "\n"
}
for _, sd := range mr.senders {
ssd, ok := sd.(Resetable)
if ok {
if nerr := ssd.Reset(); nerr != nil {
errMsg += nerr.Error() + "\n"
}
}
}
if errMsg != "" {
err = errors.New(errMsg)
}
return err
}
func (_ *MetricRunner) Cleaner() CleanInfo {
return CleanInfo{
enable: false,
}
}
func (mr *MetricRunner) getStatusFrequently(now time.Time) (bool, float64, RunnerStatus) {
mr.rsMutex.RLock()
defer mr.rsMutex.RUnlock()
elaspedTime := now.Sub(mr.rs.lastState).Seconds()
if elaspedTime <= 3 {
return true, elaspedTime, mr.lastRs.Clone()
}
return false, elaspedTime, RunnerStatus{}
}
func (mr *MetricRunner) Status() (rs RunnerStatus) {
var isFre bool
var elaspedtime float64
now := time.Now()
if isFre, elaspedtime, rs = mr.getStatusFrequently(now); isFre {
return rs
}
mr.rsMutex.Lock()
defer mr.rsMutex.Unlock()
mr.rs.Elaspedtime += elaspedtime
mr.rs.lastState = now
durationTime := float64(mr.collectInterval.Seconds())
mr.rs.ReadSpeed = float64(mr.rs.ReadDataCount-mr.lastRs.ReadDataCount) / durationTime
mr.rs.ReadSpeedTrend = getTrend(mr.lastRs.ReadSpeed, mr.rs.ReadSpeed)
for i := range mr.senders {
sts, ok := mr.senders[i].(sender.StatsSender)
if ok {
mr.rs.SenderStats[mr.senders[i].Name()] = sts.Stats()
}
}
for k, v := range mr.rs.SenderStats {
if lv, ok := mr.lastRs.SenderStats[k]; ok {
v.Speed, v.Trend = calcSpeedTrend(lv, v, durationTime)
} else {
v.Speed, v.Trend = calcSpeedTrend(StatsInfo{}, v, durationTime)
}
mr.rs.SenderStats[k] = v
}
mr.rs.RunningStatus = RunnerRunning
*mr.lastRs = mr.rs.Clone()
return *mr.lastRs
}
func (mr *MetricRunner) TokenRefresh(tokens AuthTokens) error {
if mr.RunnerName != tokens.RunnerName {
return fmt.Errorf("tokens.RunnerName[%v] is not match %v", tokens.RunnerName, mr.RunnerName)
}
if len(mr.senders) > tokens.SenderIndex {
if tokenSender, ok := mr.senders[tokens.SenderIndex].(sender.TokenRefreshable); ok {
return tokenSender.TokenRefresh(tokens.SenderTokens)
}
}
return nil
}
func (mr *MetricRunner) StatusRestore() {
rStat, err := mr.meta.ReadStatistic()
if err != nil {
log.Warnf("runner %v, restore status failed", mr.RunnerName)
return
}
mr.rs.ReadDataCount = rStat.ReaderCnt
mr.rs.ParserStats.Success = rStat.ParserCnt[0]
mr.rs.ParserStats.Errors = rStat.ParserCnt[1]
for _, s := range mr.senders {
name := s.Name()
info, exist := rStat.SenderCnt[name]
if !exist {
continue
}
sStatus, ok := s.(sender.StatsSender)
if ok {
sStatus.Restore(&StatsInfo{
Success: info[0],
Errors: info[1],
})
}
status, ext := mr.rs.SenderStats[name]
if !ext {
status = StatsInfo{}
}
status.Success = info[0]
status.Errors = info[1]
mr.rs.SenderStats[name] = status
}
*mr.lastRs = mr.rs.Clone()
log.Infof("runner %v restore status %v", mr.RunnerName, rStat)
}
func (mr *MetricRunner) StatusBackup() {
status := mr.Status()
bStart := &reader.Statistic{
ReaderCnt: status.ReadDataCount,
ParserCnt: [2]int64{
status.ParserStats.Success,
status.ParserStats.Errors,
},
SenderCnt: map[string][2]int64{},
}
for _, s := range mr.senders {
name := s.Name()
sStatus, ok := s.(sender.StatsSender)
if ok {
status.SenderStats[name] = sStatus.Stats()
}
if sta, exist := status.SenderStats[name]; exist {
bStart.SenderCnt[name] = [2]int64{
sta.Success,
sta.Errors,
}
}
}
err := mr.meta.WriteStatistic(bStart)
if err != nil {
log.Warnf("runner %v, backup status failed", mr.RunnerName)
} else {
log.Infof("runner %v, backup status %v", mr.RunnerName, bStart)
}
}
func createDiscardTransformer(key string) (transforms.Transformer, error) {
strTP := "discard"
creater, ok := transforms.Transformers[strTP]
if !ok {
return nil, fmt.Errorf("type %v of transformer not exist", strTP)
}
tConf := map[string]string{
"key": key,
"type": strTP,
"stage": "after_parser",
}
trans := creater()
bts, err := jsoniter.Marshal(tConf)
if err != nil {
return nil, fmt.Errorf("type %v of transformer marshal config error %v", strTP, err)
}
err = jsoniter.Unmarshal(bts, trans)
if err != nil {
return nil, fmt.Errorf("type %v of transformer unmarshal config error %v", strTP, err)
}
return trans, nil
}
|
if _, ok := m.Config["http_datas"]; !ok {
return nil, fmt.Errorf("metric %v http_datas can't be empty", curl.TypeMetricHttp)
}
|
paramunittest.py
|
# Copyright 2012 Enrico Franchi
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
# PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import copy
import unittest
import collections
import importlib
__all__ = [
'parametrized',
'ParametrizedTestCase',
]
def _process_parameters(parameters_seq):
processed_parameters_seq = []
for parameters in parameters_seq:
if isinstance(parameters, collections.Mapping):
processed_parameters_seq.append((tuple(),
dict(parameters)))
elif (len(parameters) == 2
and isinstance(parameters[0], collections.Sequence)
and isinstance(parameters[1], collections.Mapping)):
processed_parameters_seq.append((tuple(parameters[0]),
dict(parameters[1])))
else:
processed_parameters_seq.append((tuple(parameters),
dict()))
return processed_parameters_seq
def _build_name(name, index):
return '%s_%d' % (name, index)
def strclass(cls):
|
class ParametrizedTestCase(unittest.TestCase):
def setParameters(self, *args, **kwargs):
raise NotImplementedError(
('setParameters must be implemented '
'because it receives the parameters.'))
def getParameters(self):
"""
Return the parameters with which this test case was instantiated.
"""
raise NotImplementedError(
'getParameters should have been patched by parametrized.')
def getFullParametersSequence(self):
raise NotImplementedError(
'getFullParametersSequence should have been patched by parametrized.')
def getTestCaseIndex(self):
"""
Return the index of the current test case according to the list of
parametes passed to parametrized.
"""
raise NotImplementedError(
'getTestCaseIndex should have been patched by parametrized.')
def getFullParametersSequence(self):
"""
Return the full normalized list of parameters passed to parametrized.
"""
raise NotImplementedError(
'getFullParametersSequence should have been patched by parametrized.')
def __str__(self):
try:
return "%s[%d](%s) (%s)" % (self._testMethodName,
self.getTestCaseIndex(),
self.getParameters(),
strclass(self.__class__))
except NotImplementedError:
return "%s[...](...) (%s)" % (self._testMethodName,
strclass(self.__class__))
def __repr__(self):
try:
return "<%s[%d](%s) testMethod=%s>" % (strclass(self.__class__),
self.getTestCaseIndex(),
self.getParameters(),
self._testMethodName)
except NotImplementedError:
return "<%s[...](...) testMethod=%s>" % (strclass(self.__class__),
self._testMethodName)
class PropagateSetAttr(type):
def __new__(mcs, name, bases, dct):
dct['setattr_observers'] = []
cls = super(PropagateSetAttr, mcs).__new__(mcs, name, bases, dct)
return cls
def __setattr__(cls, key, value):
for observer in cls.setattr_observers:
setattr(observer, key, value)
def make_propagator(cls, setattr_observers):
SkippableTest = PropagateSetAttr('SkippableTest', (unittest.TestCase,),
{})
SkippableTest.setattr_observers.extend(setattr_observers)
return SkippableTest
def parametrized(*parameters_seq):
parameters_seq = _process_parameters(parameters_seq)
def magic_module_set_test_case(cls):
if not hasattr(cls, 'setParameters'):
raise TypeError('%s does not have a setParameters method.' % (
cls.__name__, ))
module = importlib.import_module(cls.__module__)
generated_test_cases = []
for index, parameters in enumerate(parameters_seq):
name = _build_name(cls.__name__, index)
def closing_over(parameters=parameters, index=index):
def setUp(self):
self.setParameters(*parameters[0], **parameters[1])
cls.setUp(self)
def getParameters(self):
"""
Return the parameters with which this test case was instantiated.
"""
return parameters
def getTestCaseIndex(self):
"""
Return the index of the current test case according to the list of
parametes passed to parametrized.
"""
return index
def getFullParametersSequence(self):
"""
Return the full normalized list of parameters passed to parametrized.
"""
return copy.copy(parameters_seq)
return setUp, getParameters, getTestCaseIndex, getFullParametersSequence
(set_up, get_parameters,
get_test_case_index,
get_full_parameters_sequence) = closing_over()
new_class = type(name, (cls, ),
{'setUp': set_up,
'getParameters': get_parameters,
'getTestCaseIndex': get_test_case_index,
'getFullParametersSequence': get_full_parameters_sequence})
generated_test_cases.append(new_class)
setattr(module, name, new_class)
return make_propagator(cls, generated_test_cases)
return magic_module_set_test_case
|
return "%s.%s" % (cls.__module__, cls.__name__)
|
routes.ts
|
export const INDEX_PAGE = '/';
export const SEARCH_PAGE = '/search';
export const SERVER_PAGE = {
INDEX_PAGE: '/server/:key',
TRIBE_PAGE: {
INDEX_PAGE: '/server/:key/tribe/:id',
MEMBERS_PAGE: '/server/:key/tribe/:id/members',
HISTORY_PAGE: '/server/:key/tribe/:id/history',
TRIBE_CHANGES_PAGE: '/server/:key/tribe/:id/tribe-changes',
ENNOBLEMENTS_PAGE: '/server/:key/tribe/:id/ennoblements',
},
PLAYER_PAGE: {
INDEX_PAGE: '/server/:key/player/:id',
HISTORY_PAGE: '/server/:key/player/:id/history',
TRIBE_CHANGES_PAGE: '/server/:key/player/:id/tribe-changes',
ENNOBLEMENTS_PAGE: '/server/:key/player/:id/ennoblements',
},
VILLAGE_PAGE: {
INDEX_PAGE: '/server/:key/village/:id',
},
RANKING_PAGE: {
BASE: '/server/:key/ranking',
PLAYER_PAGE: {
INDEX_PAGE: '/server/:key/ranking/player',
OD_PAGE: '/server/:key/ranking/player/od',
DAILY_PAGE: '/server/:key/ranking/player/daily',
ARCHIVE_PAGE: '/server/:key/ranking/player/archive',
},
|
DAILY_PAGE: '/server/:key/ranking/tribe/daily',
ARCHIVE_PAGE: '/server/:key/ranking/tribe/archive',
},
},
ENNOBLEMENTS_PAGE: '/server/:key/ennoblements',
MAP_PAGE: '/server/:key/map',
WAR_STATS_PAGE: '/server/:key/war-stats'
};
|
TRIBE_PAGE: {
INDEX_PAGE: '/server/:key/ranking/tribe',
OD_PAGE: '/server/:key/ranking/tribe/od',
|
input.js
|
(function($){
function initialiseField( $el ) {
var container = $el;
var starList = $("ul", container);
var starListItems = $("li", starList);
var starListItemStars = $("i", starListItems);
var starField = $("input", container);
var clearButton = $("a.clear-button", container);
var allowHalf = (starField.data('allow-half') == 1);
starListItems.bind("click", function(e){
e.preventDefault();
var starValue = $(this).index();
starField.val(starValue + 1);
if (allowHalf) {
var width = $(this).innerWidth();
var offset = $(this).offset();
var leftSideClicked = (width / 2) > (e.pageX - offset.left);
if (leftSideClicked) {
starField.val(starField.val() - 0.5);
}
}
clearActiveStarClassesFromList();
starListItems.each(function(index){
var icon = $('i', $(this));
var starValue = starField.val();
if (index < starValue) {
icon.removeClass('fa-star-o')
.removeClass('fa-star-half-o')
.addClass('fa-star');
if (allowHalf && (index + .5 == starValue)) {
icon.addClass('fa-star-half-o')
}
}
});
});
clearButton.bind("click", function(e){
e.preventDefault();
clearActiveStarClassesFromList();
starField.val(0);
});
function clearActiveStarClassesFromList()
{
starListItemStars
.removeClass('fa-star')
.removeClass('fa-star-half-o')
.addClass('fa-star-o');
}
}
// Instantiate
acf.add_action('ready append', function($el) {
acf.get_fields({
type: 'star_rating_field'
|
});
})(jQuery);
|
}, $el).each(function(){
initialiseField($(this));
});
|
misc.py
|
# -*- coding: utf-8 -*-
# Copyright (c) 2020-2021 Salvador E. Tropea
# Copyright (c) 2020-2021 Instituto Nacional de Tecnologïa Industrial
# License: Apache 2.0
# Project: KiAuto (formerly kicad-automation-scripts)
import os
import re
import json
import configparser
from contextlib import contextmanager
from sys import exit, path
# Default W,H for recording
REC_W = 1366
REC_H = 960
# Return error codes
# Positive values are ERC/DRC errors
NO_SCHEMATIC = 1
WRONG_ARGUMENTS = 2 # This is what argsparse uses
EESCHEMA_CFG_PRESENT = 11
KICAD_CFG_PRESENT = 3
NO_PCB = 4
PCBNEW_CFG_PRESENT = 5
WRONG_LAYER_NAME = 6
WRONG_PCB_NAME = 7
WRONG_SCH_NAME = 8
PCBNEW_ERROR = 9
EESCHEMA_ERROR = 10
NO_PCBNEW_MODULE = 11
USER_HOTKEYS_PRESENT = 12
CORRUPTED_PCB = 13
# Wait 40 s to pcbnew/eeschema window to be present
WAIT_START = 60
# Name for testing versions
NIGHTLY = 'nightly'
# Scale factor for the timeouts
TIME_OUT_MULT = 1.0
KICAD_VERSION_5_99 = 5099000
KICAD_SHARE = '/usr/share/kicad/'
KICAD_NIGHTLY_SHARE = '/usr/share/kicad-nightly/'
@contextmanager
def hide_stderr():
""" Low level stderr supression, used to hide KiCad bugs. """
newstderr = os.dup(2)
devnull = os.open('/dev/null', os.O_WRONLY)
os.dup2(devnull, 2)
os.close(devnull)
yield
os.dup2(newstderr, 2)
class Config(object):
def __init__(self, logger, input_file=None, args=None):
self.export_format = 'pdf'
if input_file:
self.input_file = input_file
self.input_no_ext = os.path.splitext(input_file)[0]
#
# As soon as we init pcbnew the following files are modified:
#
if os.path.isfile(self.input_no_ext+'.pro'):
self.start_pro_stat = os.stat(self.input_no_ext+'.pro')
else:
self.start_pro_stat = None
if os.path.isfile(self.input_no_ext+'.kicad_pro'):
self.start_kicad_pro_stat = os.stat(self.input_no_ext+'.kicad_pro')
else:
self.start_kicad_pro_stat = None
if os.path.isfile(self.input_no_ext+'.kicad_prl'):
self.start_kicad_prl_stat = os.stat(self.input_no_ext+'.kicad_prl')
else:
self.start_kicad_prl_stat = None
if args:
# Session debug
self.use_wm = args.use_wm # Use a Window Manager, dialogs behaves in a different way
self.start_x11vnc = args.start_x11vnc
self.rec_width = args.rec_width
self.rec_height = args.rec_height
self.record = args.record
self.video_dir = args.output_dir
self.wait_for_key = args.wait_key
self.time_out_scale = args.time_out_scale
# Others
if hasattr(args, 'file_format'):
self.export_format = args.file_format.lower()
else:
# Session debug
self.use_wm = False
self.start_x11vnc = False
self.rec_width = REC_W
self.rec_height = REC_H
self.record = False
self.video_dir = None
self.wait_for_key = False
self.time_out_scale = 1.0
self.colordepth = 24
self.video_name = None
self.video_dir = self.output_dir = ''
# Executable and dirs
self.eeschema = 'eeschema'
self.pcbnew = 'pcbnew'
self.kicad_conf_dir = 'kicad'
ng_ver = os.environ.get('KIAUS_USE_NIGHTLY')
if ng_ver:
self.eeschema += '-'+NIGHTLY
self.pcbnew += '-'+NIGHTLY
self.kicad_conf_dir += os.path.join(NIGHTLY, ng_ver)
# Path to the Python module
path.insert(0, '/usr/lib/kicad-nightly/lib/python3/dist-packages')
# Detect KiCad version
try:
import pcbnew
except ImportError:
logger.error("Failed to import pcbnew Python module."
" Is KiCad installed?"
" Do you need to add it to PYTHONPATH?")
exit(NO_PCBNEW_MODULE)
kicad_version = pcbnew.GetBuildVersion()
m = re.match(r'(\d+)\.(\d+)\.(\d+)', kicad_version)
self.kicad_version_major = int(m.group(1))
self.kicad_version_minor = int(m.group(2))
self.kicad_version_patch = int(m.group(3))
self.kicad_version = self.kicad_version_major*1000000+self.kicad_version_minor*1000+self.kicad_version_patch
logger.debug('Detected KiCad v{}.{}.{} ({} {})'.format(self.kicad_version_major, self.kicad_version_minor,
self.kicad_version_patch, kicad_version, self.kicad_version))
# Config file names
if self.kicad_version >= KICAD_VERSION_5_99:
self.kicad_conf_path = pcbnew.GetSettingsManager().GetUserSettingsPath()
if ng_ver:
self.kicad_conf_path = self.kicad_conf_path.replace('/kicad/', '/kicadnightly/')
else:
# Bug in KiCad (#6989), prints to stderr:
# `../src/common/stdpbase.cpp(62): assert "traits" failed in Get(test_dir): create wxApp before calling this`
# Found in KiCad 5.1.8, 5.1.9
# So we temporarily supress stderr
with hide_stderr():
self.kicad_conf_path = pcbnew.GetKicadConfigPath()
logger.debug('Config path {}'.format(self.kicad_conf_path))
# First we solve kicad_common because it can redirect to another config dir
self.conf_kicad = os.path.join(self.kicad_conf_path, 'kicad_common')
self.conf_kicad_bkp = None
if self.kicad_version >= KICAD_VERSION_5_99:
self.conf_kicad += '.json'
self.conf_kicad_json = True
else:
self.conf_kicad_json = False
# Read the environment redefinitions used by KiCad
if os.path.isfile(self.conf_kicad):
self.load_kicad_environment(logger)
if 'KICAD_CONFIG_HOME' in self.env and self.kicad_version < KICAD_VERSION_5_99:
# The user is redirecting the configuration
# KiCad 5 unintentionally allows it, is a bug, and won't be fixed:
# https://forum.kicad.info/t/kicad-config-home-inconsistencies-and-detail/26875
self.kicad_conf_path = self.env['KICAD_CONFIG_HOME']
logger.debug('Redirecting KiCad config path to: '+self.kicad_conf_path)
else:
logger.warning('Missing KiCad main config file '+self.conf_kicad)
# - eeschema config
self.conf_eeschema = os.path.join(self.kicad_conf_path, 'eeschema')
self.conf_eeschema_bkp = None
# - pcbnew config
self.conf_pcbnew = os.path.join(self.kicad_conf_path, 'pcbnew')
self.conf_pcbnew_bkp = None
# Config files that migrated to JSON
# Note that they remain in the old format until saved
if self.kicad_version >= KICAD_VERSION_5_99:
self.conf_eeschema += '.json'
self.conf_pcbnew += '.json'
self.conf_eeschema_json = True
self.conf_pcbnew_json = True
self.pro_ext = 'kicad_pro'
self.prl_ext = 'kicad_prl'
else:
self.conf_eeschema_json = False
self.conf_pcbnew_json = False
self.pro_ext = 'pro'
self.prl_ext = None
# - hotkeys
self.conf_hotkeys = os.path.join(self.kicad_conf_path, 'user.hotkeys')
self.conf_hotkeys_bkp = None
# - sym-lib-table
self.user_sym_lib_table = os.path.join(self.kicad_conf_path, 'sym-lib-table')
self.user_fp_lib_table = os.path.join(self.kicad_conf_path, 'fp-lib-table')
self.sys_sym_lib_table = [KICAD_SHARE+'template/sym-lib-table']
self.sys_fp_lib_table = [KICAD_SHARE+'template/fp-lib-table']
if ng_ver:
# 20200912: sym-lib-table is missing
self.sys_sym_lib_table.insert(0, KICAD_NIGHTLY_SHARE+'template/sym-lib-table')
self.sys_fp_lib_table.insert(0, KICAD_NIGHTLY_SHARE+'template/fp-lib-table')
# Some details about the UI
if self.kicad_version >= KICAD_VERSION_5_99:
# KiCad 5.99.0
self.ee_window_title = r'\[.*\] — Eeschema$' # "PROJECT [HIERARCHY_PATH] - Eeschema"
else:
# KiCad 5.1.6
self.ee_window_title = r'Eeschema.*\.sch' # "Eeschema - file.sch"
# Collected errors and unconnecteds (warnings)
self.errs = []
self.wrns = []
# Error filters
self.err_filters = []
def load_kicad_environment(self, logger):
sel
|
@staticmethod
def get_config_vars_json(file):
with open(file, "rt") as f:
data = json.load(f)
if 'environment' in data and 'vars' in data['environment']:
return data['environment']['vars']
return None
@staticmethod
def get_config_vars_ini(file):
config = configparser.ConfigParser()
with open(file, "rt") as f:
data = f.read()
config.read_string('[Various]\n'+data)
if 'EnvironmentVariables' in config:
return config['EnvironmentVariables']
return None
__author__ = 'Salvador E. Tropea'
__copyright__ = 'Copyright 2018-2021, INTI/Productize SPRL'
__credits__ = ['Salvador E. Tropea', 'Seppe Stas', 'Jesse Vincent', 'Scott Bezek']
__license__ = 'Apache 2.0'
__email__ = 'stropea@inti.gob.ar'
__status__ = 'beta'
__url__ = 'https://github.com/INTI-CMNB/KiAuto/'
__version__ = '1.5.8'
|
f.env = {}
if self.conf_kicad_json:
env = self.get_config_vars_json(self.conf_kicad)
if env:
self.env = env
else:
env = self.get_config_vars_ini(self.conf_kicad)
if env:
for k, v in env.items():
self.env[k.upper()] = v
logger.debug('KiCad environment: '+str(self.env))
|
en-ZA.ts
|
/**
* @license
* Copyright Google LLC All Rights Reserved.
*
* Use of this source code is governed by an MIT-style license that can be
* found in the LICENSE file at https://angular.io/license
*/
// THIS CODE IS GENERATED - DO NOT MODIFY
// See angular/tools/gulp-tasks/cldr/extract.js
const u = undefined;
function
|
(n: number): number {
let i = Math.floor(Math.abs(n)), v = n.toString().replace(/^[^.]*\.?/, '').length;
if (i === 1 && v === 0) return 1;
return 5;
}
export default [
'en-ZA',
[['a', 'p'], ['am', 'pm'], u],
[['am', 'pm'], u, u],
[
['S', 'M', 'T', 'W', 'T', 'F', 'S'], ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat'],
['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'],
['Su', 'Mo', 'Tu', 'We', 'Th', 'Fr', 'Sa']
],
u,
[
['J', 'F', 'M', 'A', 'M', 'J', 'J', 'A', 'S', 'O', 'N', 'D'],
['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'],
[
'January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September',
'October', 'November', 'December'
]
],
u,
[['B', 'A'], ['BC', 'AD'], ['Before Christ', 'Anno Domini']],
0,
[6, 0],
['y/MM/dd', 'dd MMM y', 'dd MMMM y', 'EEEE, dd MMMM y'],
['HH:mm', 'HH:mm:ss', 'HH:mm:ss z', 'HH:mm:ss zzzz'],
['{1}, {0}', u, '{1} \'at\' {0}', u],
[',', ' ', ';', '%', '+', '-', 'E', '×', '‰', '∞', 'NaN', ':'],
['#,##0.###', '#,##0%', '¤#,##0.00', '#E0'],
'ZAR',
'R',
'South African Rand',
{'JPY': ['JP¥', '¥'], 'USD': ['US$', '$'], 'ZAR': ['R']},
'ltr',
plural
];
|
plural
|
loops.rs
|
use Context::*;
use rustc_errors::{struct_span_err, Applicability};
use rustc_hir as hir;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::{self, Visitor};
use rustc_hir::{Destination, Movability, Node};
use rustc_middle::hir::map::Map;
use rustc_middle::hir::nested_filter;
use rustc_middle::ty::query::Providers;
use rustc_middle::ty::TyCtxt;
use rustc_session::Session;
use rustc_span::hygiene::DesugaringKind;
use rustc_span::Span;
#[derive(Clone, Copy, Debug, PartialEq)]
enum Context {
Normal,
Loop(hir::LoopSource),
Closure(Span),
AsyncClosure(Span),
LabeledBlock,
AnonConst,
}
#[derive(Copy, Clone)]
struct CheckLoopVisitor<'a, 'hir> {
sess: &'a Session,
hir_map: Map<'hir>,
cx: Context,
}
fn check_mod_loops(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
tcx.hir().deep_visit_item_likes_in_module(
module_def_id,
&mut CheckLoopVisitor { sess: &tcx.sess, hir_map: tcx.hir(), cx: Normal },
);
}
pub(crate) fn provide(providers: &mut Providers) {
*providers = Providers { check_mod_loops, ..*providers };
}
impl<'a, 'hir> Visitor<'hir> for CheckLoopVisitor<'a, 'hir> {
type NestedFilter = nested_filter::OnlyBodies;
fn nested_visit_map(&mut self) -> Self::Map {
self.hir_map
}
fn visit_anon_const(&mut self, c: &'hir hir::AnonConst) {
self.with_context(AnonConst, |v| intravisit::walk_anon_const(v, c));
}
fn visit_expr(&mut self, e: &'hir hir::Expr<'hir>) {
match e.kind {
hir::ExprKind::Loop(ref b, _, source, _) => {
self.with_context(Loop(source), |v| v.visit_block(&b));
}
hir::ExprKind::Closure(_, ref function_decl, b, span, movability) => {
let cx = if let Some(Movability::Static) = movability {
AsyncClosure(span)
} else {
Closure(span)
};
self.visit_fn_decl(&function_decl);
self.with_context(cx, |v| v.visit_nested_body(b));
}
hir::ExprKind::Block(ref b, Some(_label)) => {
self.with_context(LabeledBlock, |v| v.visit_block(&b));
}
hir::ExprKind::Break(break_label, ref opt_expr) => {
if let Some(e) = opt_expr {
self.visit_expr(e);
}
if self.require_label_in_labeled_block(e.span, &break_label, "break") {
// If we emitted an error about an unlabeled break in a labeled
// block, we don't need any further checking for this break any more
return;
}
let loop_id = match break_label.target_id {
Ok(loop_id) => Some(loop_id),
Err(hir::LoopIdError::OutsideLoopScope) => None,
Err(hir::LoopIdError::UnlabeledCfInWhileCondition) => {
self.emit_unlabled_cf_in_while_condition(e.span, "break");
None
}
Err(hir::LoopIdError::UnresolvedLabel) => None,
};
if let Some(Node::Block(_)) = loop_id.and_then(|id| self.hir_map.find(id)) {
return;
}
if let Some(break_expr) = opt_expr {
let (head, loop_label, loop_kind) = if let Some(loop_id) = loop_id {
match self.hir_map.expect_expr(loop_id).kind {
hir::ExprKind::Loop(_, label, source, sp) => {
(Some(sp), label, Some(source))
}
ref r => {
span_bug!(e.span, "break label resolved to a non-loop: {:?}", r)
}
}
} else {
(None, None, None)
};
match loop_kind {
None | Some(hir::LoopSource::Loop) => (),
Some(kind) => {
let mut err = struct_span_err!(
self.sess,
e.span,
E0571,
"`break` with value from a `{}` loop",
kind.name()
);
err.span_label(
e.span,
"can only break with a value inside `loop` or breakable block",
);
if let Some(head) = head {
err.span_label(
|
head,
&format!(
"you can't `break` with a value in a `{}` loop",
kind.name()
),
);
}
err.span_suggestion(
e.span,
&format!(
"use `break` on its own without a value inside this `{}` loop",
kind.name(),
),
format!(
"break{}",
break_label
.label
.map_or_else(String::new, |l| format!(" {}", l.ident))
),
Applicability::MaybeIncorrect,
);
if let (Some(label), None) = (loop_label, break_label.label) {
match break_expr.kind {
hir::ExprKind::Path(hir::QPath::Resolved(
None,
hir::Path {
segments: [segment],
res: hir::def::Res::Err,
..
},
)) if label.ident.to_string()
== format!("'{}", segment.ident) =>
{
// This error is redundant, we will have already emitted a
// suggestion to use the label when `segment` wasn't found
// (hence the `Res::Err` check).
err.delay_as_bug();
}
_ => {
err.span_suggestion(
break_expr.span,
"alternatively, you might have meant to use the \
available loop label",
label.ident,
Applicability::MaybeIncorrect,
);
}
}
}
err.emit();
}
}
}
self.require_break_cx("break", e.span);
}
hir::ExprKind::Continue(destination) => {
self.require_label_in_labeled_block(e.span, &destination, "continue");
match destination.target_id {
Ok(loop_id) => {
if let Node::Block(block) = self.hir_map.find(loop_id).unwrap() {
struct_span_err!(
self.sess,
e.span,
E0696,
"`continue` pointing to a labeled block"
)
.span_label(e.span, "labeled blocks cannot be `continue`'d")
.span_label(block.span, "labeled block the `continue` points to")
.emit();
}
}
Err(hir::LoopIdError::UnlabeledCfInWhileCondition) => {
self.emit_unlabled_cf_in_while_condition(e.span, "continue");
}
Err(_) => {}
}
self.require_break_cx("continue", e.span)
}
_ => intravisit::walk_expr(self, e),
}
}
}
impl<'a, 'hir> CheckLoopVisitor<'a, 'hir> {
fn with_context<F>(&mut self, cx: Context, f: F)
where
F: FnOnce(&mut CheckLoopVisitor<'a, 'hir>),
{
let old_cx = self.cx;
self.cx = cx;
f(self);
self.cx = old_cx;
}
fn require_break_cx(&self, name: &str, span: Span) {
let err_inside_of = |article, ty, closure_span| {
struct_span_err!(self.sess, span, E0267, "`{}` inside of {} {}", name, article, ty)
.span_label(span, format!("cannot `{}` inside of {} {}", name, article, ty))
.span_label(closure_span, &format!("enclosing {}", ty))
.emit();
};
match self.cx {
LabeledBlock | Loop(_) => {}
Closure(closure_span) => err_inside_of("a", "closure", closure_span),
AsyncClosure(closure_span) => err_inside_of("an", "`async` block", closure_span),
Normal | AnonConst => {
struct_span_err!(self.sess, span, E0268, "`{}` outside of a loop", name)
.span_label(span, format!("cannot `{}` outside of a loop", name))
.emit();
}
}
}
fn require_label_in_labeled_block(
&mut self,
span: Span,
label: &Destination,
cf_type: &str,
) -> bool {
if !span.is_desugaring(DesugaringKind::QuestionMark) && self.cx == LabeledBlock {
if label.label.is_none() {
struct_span_err!(
self.sess,
span,
E0695,
"unlabeled `{}` inside of a labeled block",
cf_type
)
.span_label(
span,
format!(
"`{}` statements that would diverge to or through \
a labeled block need to bear a label",
cf_type
),
)
.emit();
return true;
}
}
false
}
fn emit_unlabled_cf_in_while_condition(&mut self, span: Span, cf_type: &str) {
struct_span_err!(
self.sess,
span,
E0590,
"`break` or `continue` with no label in the condition of a `while` loop"
)
.span_label(span, format!("unlabeled `{}` in the condition of a `while` loop", cf_type))
.emit();
}
}
| |
opml.go
|
// Copyright 2014 The project AUTHORS. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
/*
Package opml provides all the required structures and functions for parsing
OPML files, as defined by the specification of the OPML format:
[OPML 1.0] http://dev.opml.org/spec1.html
[OPML 2.0] http://dev.opml.org/spec2.html
It is able to parse both, OPML 1.0 and OPML 2.0, files.
*/
package opml
import (
"encoding/xml"
"io/ioutil"
"net/http"
)
// OPML is the root node of an OPML document. It only has a single required
// attribute: the version.
type OPML struct {
XMLName xml.Name `xml:"opml"`
Version string `xml:"version,attr"`
Head Head `xml:"head"`
Body Body `xml:"body"`
}
// Head holds some meta information about the document.
type Head struct {
Title string `xml:"title"`
DateCreated string `xml:"dateCreated,omitempty"`
DateModified string `xml:"dateModified,omitempty"`
OwnerName string `xml:"ownerName,omitempty"`
OwnerEmail string `xml:"ownerEmail,omitempty"`
OwnerID string `xml:"ownerId,omitempty"`
Docs string `xml:"docs,omitempty"`
ExpansionState string `xml:"expansionState,omitempty"`
VertScrollState string `xml:"vertScrollState,omitempty"`
WindowTop string `xml:"windowTop,omitempty"`
WindowBottom string `xml:"windowBottom,omitempty"`
WindowLeft string `xml:"windowLeft,omitempty"`
WindowRight string `xml:"windowRight,omitempty"`
}
// Body is the parent structure of all outlines.
type Body struct {
Outlines []Outline `xml:"outline"`
}
// Outline holds all information about an outline.
type Outline struct {
Outlines []Outline `xml:"outline"`
Text string `xml:"text,attr"`
Type string `xml:"type,attr,omitempty"`
IsComment string `xml:"isComment,attr,omitempty"`
IsBreakpoint string `xml:"isBreakpoint,attr,omitempty"`
Created string `xml:"created,attr,omitempty"`
Category string `xml:"category,attr,omitempty"`
XMLURL string `xml:"xmlUrl,attr,omitempty"`
HTMLURL string `xml:"htmlUrl,attr,omitempty"`
URL string `xml:"url,attr,omitempty"`
Language string `xml:"language,attr,omitempty"`
Title string `xml:"title,attr,omitempty"`
Version string `xml:"version,attr,omitempty"`
Description string `xml:"description,attr,omitempty"`
}
// NewOPML creates a new OPML structure from a slice of bytes.
func NewOPML(b []byte) (*OPML, error) {
var root OPML
err := xml.Unmarshal(b, &root)
if err != nil {
return nil, err
}
return &root, nil
}
// NewOPMLFromURL creates a new OPML structure from an URL.
func NewOPMLFromURL(url string) (*OPML, error) {
resp, err := http.Get(url)
if err != nil {
return nil, err
}
defer resp.Body.Close()
b, err := ioutil.ReadAll(resp.Body)
if err != nil
|
return NewOPML(b)
}
// NewOPMLFromFile creates a new OPML structure from a file.
func NewOPMLFromFile(filePath string) (*OPML, error) {
b, err := ioutil.ReadFile(filePath)
if err != nil {
return nil, err
}
return NewOPML(b)
}
// Outlines returns a slice of the outlines.
func (doc OPML) Outlines() []Outline {
return doc.Body.Outlines
}
// XML exports the OPML document to a XML string.
func (doc OPML) XML() (string, error) {
b, err := xml.MarshalIndent(doc, "", "\t")
return xml.Header + string(b), err
}
|
{
return nil, err
}
|
diskspace.py
|
# coding=utf-8
"""
Uses /proc/mounts and os.statvfs() to get disk space usage
#### Dependencies
* /proc/mounts
#### Examples
# no exclude filters at all
exclude_filters =,
# exclude everything that begins /boot or /mnt
exclude_filters = ^/boot, ^/mnt
# exclude everything that includes the letter 'm'
exclude_filters = m,
"""
import diamond.collector
import diamond.convertor
import os
import re
try:
import psutil
except ImportError:
psutil = None
class DiskSpaceCollector(diamond.collector.Collector):
def get_default_config_help(self):
config_help = super(DiskSpaceCollector, self).get_default_config_help()
config_help.update({
'filesystems': "filesystems to examine",
'exclude_filters':
"A list of regex patterns. Any filesystem" +
" matching any of these patterns will be excluded from disk" +
" space metrics collection",
})
return config_help
def get_default_config(self):
"""
Returns the default collector settings
"""
config = super(DiskSpaceCollector, self).get_default_config()
config.update({
'path': 'diskspace',
# filesystems to examine
'filesystems': 'ext2, ext3, ext4, xfs, glusterfs, nfs, nfs4, ' +
' ntfs, hfs, fat32, fat16, btrfs',
# exclude_filters
# A list of regex patterns
# A filesystem matching any of these patterns will be excluded
# from disk space metrics collection.
#
# Examples:
# exclude_filters =,
# no exclude filters at all
# exclude_filters = ^/boot, ^/mnt
# exclude everything that begins /boot or /mnt
# exclude_filters = m,
# exclude everything that includes the letter "m"
'exclude_filters': ['^/export/home'],
# Default numeric output
'byte_unit': ['byte']
})
return config
def process_config(self):
super(DiskSpaceCollector, self).process_config()
# Precompile things
self.exclude_filters = self.config['exclude_filters']
if isinstance(self.exclude_filters, basestring):
self.exclude_filters = [self.exclude_filters]
if not self.exclude_filters:
self.exclude_reg = re.compile('!.*')
else:
self.exclude_reg = re.compile('|'.join(self.exclude_filters))
self.filesystems = []
if isinstance(self.config['filesystems'], basestring):
for filesystem in self.config['filesystems'].split(','):
self.filesystems.append(filesystem.strip())
elif isinstance(self.config['filesystems'], list):
self.filesystems = self.config['filesystems']
def get_disk_labels(self):
|
def get_file_systems(self):
"""
Creates a map of mounted filesystems on the machine.
iostat(1): Each sector has size of 512 bytes.
Returns:
st_dev -> FileSystem(device, mount_point)
"""
result = {}
if os.access('/proc/mounts', os.R_OK):
file = open('/proc/mounts')
for line in file:
try:
mount = line.split()
device = mount[0]
mount_point = mount[1]
fs_type = mount[2]
except (IndexError, ValueError):
continue
# Skip the filesystem if it is not in the list of valid
# filesystems
if fs_type not in self.filesystems:
self.log.debug("Ignoring %s since it is of type %s " +
" which is not in the list of filesystems.",
mount_point, fs_type)
continue
# Process the filters
if self.exclude_reg.search(mount_point):
self.log.debug("Ignoring %s since it is in the " +
"exclude_filter list.", mount_point)
continue
if ((('/' in device or ':' in device or device == 'tmpfs') and
mount_point.startswith('/'))):
try:
stat = os.stat(mount_point)
except OSError:
self.log.debug("Path %s is not mounted - skipping.",
mount_point)
continue
if stat.st_dev in result:
continue
result[stat.st_dev] = {
'device': os.path.realpath(device),
'mount_point': mount_point,
'fs_type': fs_type
}
file.close()
else:
if not psutil:
self.log.error('Unable to import psutil')
return None
partitions = psutil.disk_partitions(False)
for partition in partitions:
result[len(result)] = {
'device': os.path.realpath(partition.device),
'mount_point': partition.mountpoint,
'fs_type': partition.fstype
}
pass
return result
def collect(self):
labels = self.get_disk_labels()
results = self.get_file_systems()
if not results:
self.log.error('No diskspace metrics retrieved')
return None
for info in results.itervalues():
if info['device'] in labels:
name = labels[info['device']]
else:
name = info['mount_point'].replace('/', '_')
name = name.replace('.', '_').replace('\\', '')
if name == '_':
name = 'root'
if name == '_tmp':
name = 'tmp'
if hasattr(os, 'statvfs'): # POSIX
try:
data = os.statvfs(info['mount_point'])
except OSError as e:
self.log.exception(e)
continue
# Changed from data.f_bsize as f_frsize seems to be a more
# accurate representation of block size on multiple POSIX
# operating systems.
block_size = data.f_frsize
blocks_total = data.f_blocks
blocks_free = data.f_bfree
blocks_avail = data.f_bavail
inodes_total = data.f_files
inodes_free = data.f_ffree
inodes_avail = data.f_favail
elif os.name == 'nt': # Windows
# fixme: used still not exact compared to disk_usage.py
# from psutil
raw_data = psutil.disk_usage(info['mount_point'])
block_size = 1 # fixme: ?
blocks_total = raw_data.total
blocks_free = raw_data.free
else:
raise NotImplementedError("platform not supported")
for unit in self.config['byte_unit']:
metric_name = '%s.%s_percentfree' % (name, unit)
try:
metric_value = float(blocks_free) / float(
blocks_free + (blocks_total - blocks_free)) * 100
except ZeroDivisionError:
metric_value = 0
self.publish_gauge(metric_name, metric_value, 2)
metric_name = '%s.%s_used' % (name, unit)
metric_value = float(block_size) * float(
blocks_total - blocks_free)
metric_value = diamond.convertor.binary.convert(
value=metric_value, oldUnit='byte', newUnit=unit)
self.publish_gauge(metric_name, metric_value, 2)
metric_name = '%s.%s_free' % (name, unit)
metric_value = float(block_size) * float(blocks_free)
metric_value = diamond.convertor.binary.convert(
value=metric_value, oldUnit='byte', newUnit=unit)
self.publish_gauge(metric_name, metric_value, 2)
if os.name != 'nt':
metric_name = '%s.%s_avail' % (name, unit)
metric_value = float(block_size) * float(blocks_avail)
metric_value = diamond.convertor.binary.convert(
value=metric_value, oldUnit='byte', newUnit=unit)
self.publish_gauge(metric_name, metric_value, 2)
if os.name != 'nt':
if float(inodes_total) > 0:
self.publish_gauge(
'%s.inodes_percentfree' % name,
float(inodes_free) / float(inodes_total) * 100)
self.publish_gauge('%s.inodes_used' % name,
inodes_total - inodes_free)
self.publish_gauge('%s.inodes_free' % name, inodes_free)
self.publish_gauge('%s.inodes_avail' % name, inodes_avail)
|
"""
Creates a mapping of device nodes to filesystem labels
"""
path = '/dev/disk/by-label/'
labels = {}
if not os.path.isdir(path):
return labels
for label in os.listdir(path):
label = label.replace('\\x2f', '/')
device = os.path.realpath(path + '/' + label)
labels[device] = label
return labels
|
bitcoin_hr.ts
|
<?xml version="1.0" ?><!DOCTYPE TS><TS language="hr" version="2.1">
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About VITAL</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+39"/>
<source><b>VITAL</b> version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Copyright © 2009-2014 The Bitcoin developers
Copyright © 2012-2014 The NovaCoin developers
Copyright © 2014 The VITAL developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<source>Double-click to edit address or label</source>
<translation>Dvostruki klik za uređivanje adrese ili oznake</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>Dodajte novu adresu</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopiraj trenutno odabranu adresu u međuspremnik</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-46"/>
<source>These are your VITAL addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+60"/>
<source>&Copy Address</source>
<translation>&Kopirati adresu</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a VITAL address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Verify a message to ensure it was signed with a specified VITAL address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>&Brisanje</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+65"/>
<source>Copy &Label</source>
<translation>Kopirati &oznaku</translation>
</message>
<message>
<location line="+2"/>
<source>&Edit</source>
<translation>&Izmjeniti</translation>
</message>
<message>
<location line="+250"/>
<source>Export Address Book Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Datoteka vrijednosti odvojenih zarezom (*. csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>Oznaka</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(bez oznake)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>Unesite lozinku</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>Nova lozinka</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>Ponovite novu lozinku</translation>
</message>
<message>
<location line="+33"/>
<source>Serves to disable the trivial sendmoney when OS account compromised. Provides no real security.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>For staking only</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+35"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Unesite novi lozinku za novčanik. <br/> Molimo Vas da koristite zaporku od <b>10 ili više slučajnih znakova,</b> ili <b>osam ili više riječi.</b></translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>Šifriranje novčanika</translation>
</message>
<message>
<location line="+7"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Ova operacija treba lozinku vašeg novčanika kako bi se novčanik otključao.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>Otključaj novčanik</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Ova operacija treba lozinku vašeg novčanika kako bi se novčanik dešifrirao.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>Dešifriranje novčanika.</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>Promjena lozinke</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Unesite staru i novu lozinku za novčanik.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>Potvrdi šifriranje novčanika</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR COINS</b>!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Jeste li sigurni da želite šifrirati svoj novčanik?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+103"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>Upozorenje: Tipka Caps Lock je uključena!</translation>
</message>
<message>
<location line="-133"/>
<location line="+60"/>
<source>Wallet encrypted</source>
<translation>Novčanik šifriran</translation>
</message>
<message>
<location line="-58"/>
<source>VITAL will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your coins from being stolen by malware infecting your computer.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+44"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>Šifriranje novčanika nije uspjelo</translation>
</message>
<message>
<location line="-56"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Šifriranje novčanika nije uspjelo zbog interne pogreške. Vaš novčanik nije šifriran.</translation>
</message>
<message>
<location line="+7"/>
<location line="+50"/>
<source>The supplied passphrases do not match.</source>
<translation>Priložene lozinke se ne podudaraju.</translation>
</message>
<message>
<location line="-38"/>
<source>Wallet unlock failed</source>
<translation>Otključavanje novčanika nije uspjelo</translation>
</message>
<message>
<location line="+1"/>
<location line="+12"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Lozinka za dešifriranje novčanika nije točna.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>Dešifriranje novčanika nije uspjelo</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>Lozinka novčanika je uspješno promijenjena.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+280"/>
<source>Sign &message...</source>
<translation>&Potpišite poruku...</translation>
</message>
<message>
<location line="+242"/>
<source>Synchronizing with network...</source>
<translation>Usklađivanje s mrežom ...</translation>
</message>
<message>
<location line="-308"/>
<source>&Overview</source>
<translation>&Pregled</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>Prikaži opći pregled novčanika</translation>
</message>
<message>
<location line="+17"/>
<source>&Transactions</source>
<translation>&Transakcije</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>Pretraži povijest transakcija</translation>
</message>
<message>
<location line="+5"/>
<source>&Address Book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit the list of stored addresses and labels</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-13"/>
<source>&Receive coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show the list of addresses for receiving payments</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-7"/>
<source>&Send coins</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>E&xit</source>
<translation>&Izlaz</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>Izlazak iz programa</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about VITAL</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>Više o &Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>Prikaži informacije o Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&Postavke</translation>
</message>
<message>
<location line="+4"/>
<source>&Encrypt Wallet...</source>
<translation>&Šifriraj novčanik...</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>&Backup novčanika...</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>&Promijena lozinke...</translation>
</message>
<message numerus="yes">
<location line="+250"/>
<source>~%n block(s) remaining</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Downloaded %1 of %2 blocks of transaction history (%3% done).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-247"/>
<source>&Export...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-62"/>
<source>Send coins to a VITAL address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Modify configuration options for VITAL</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Export the data in the current tab to a file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-14"/>
<source>Encrypt or decrypt wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup wallet to another location</source>
<translation>Napravite sigurnosnu kopiju novčanika na drugoj lokaciji</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>Promijenite lozinku za šifriranje novčanika</translation>
</message>
<message>
<location line="+10"/>
<source>&Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>&Verify message...</source>
<translation>&Potvrdite poruku...</translation>
</message>
<message>
<location line="-200"/>
<source>VITAL</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet</source>
<translation>Novčanik</translation>
</message>
<message>
<location line="+178"/>
<source>&About VITAL</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Unlock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>&Lock Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Lock wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+34"/>
<source>&File</source>
<translation>&Datoteka</translation>
</message>
<message>
<location line="+8"/>
<source>&Settings</source>
<translation>&Konfiguracija</translation>
</message>
<message>
<location line="+8"/>
<source>&Help</source>
<translation>&Pomoć</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>Traka kartica</translation>
</message>
<message>
<location line="+8"/>
<source>Actions toolbar</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+9"/>
<source>[testnet]</source>
<translation>[testnet]</translation>
</message>
<message>
<location line="+0"/>
<location line="+60"/>
<source>VITAL client</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+70"/>
<source>%n active connection(s) to VITAL network</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+40"/>
<source>Downloaded %1 blocks of transaction history.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+413"/>
|
<location line="+6"/>
<source>Not staking because wallet is locked</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because wallet is syncing</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Not staking because you don't have mature coins</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="-403"/>
<source>%n second(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="-284"/>
<source>&Unlock Wallet...</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+288"/>
<source>%n minute(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s) ago</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Up to date</source>
<translation>Ažurno</translation>
</message>
<message>
<location line="+7"/>
<source>Catching up...</source>
<translation>Ažuriranje...</translation>
</message>
<message>
<location line="+10"/>
<source>Last received block was generated %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm transaction fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+27"/>
<source>Sent transaction</source>
<translation>Poslana transakcija</translation>
</message>
<message>
<location line="+1"/>
<source>Incoming transaction</source>
<translation>Dolazna transakcija</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Datum:%1
Iznos:%2
Tip:%3
Adresa:%4
</translation>
</message>
<message>
<location line="+100"/>
<location line="+15"/>
<source>URI handling</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-15"/>
<location line="+15"/>
<source>URI can not be parsed! This can be caused by an invalid VITAL address or malformed URI parameters.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Novčanik je <b>šifriran</b> i trenutno <b>otključan</b></translation>
</message>
<message>
<location line="+10"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Novčanik je <b>šifriran</b> i trenutno <b>zaključan</b></translation>
</message>
<message>
<location line="+25"/>
<source>Backup Wallet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation type="unfinished"/>
</message>
<message numerus="yes">
<location line="+76"/>
<source>%n second(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n minute(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n hour(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+18"/>
<source>Not staking</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../bitcoin.cpp" line="+109"/>
<source>A fatal error occurred. VITAL can no longer continue safely and will quit.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+90"/>
<source>Network Alert</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<location filename="../forms/coincontroldialog.ui" line="+14"/>
<source>Coin Control</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Amount:</source>
<translation>Iznos:</translation>
</message>
<message>
<location line="+32"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+48"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="+551"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../forms/coincontroldialog.ui" line="+51"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+69"/>
<source>(un)select all</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Tree mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>List mode</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+45"/>
<source>Amount</source>
<translation>Iznos</translation>
</message>
<message>
<location line="+5"/>
<source>Label</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<location line="+5"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+5"/>
<source>Confirmations</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirmed</source>
<translation>Potvrđeno</translation>
</message>
<message>
<location line="+5"/>
<source>Priority</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../coincontroldialog.cpp" line="-515"/>
<source>Copy address</source>
<translation>Kopirati adresu</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Kopirati oznaku</translation>
</message>
<message>
<location line="+1"/>
<location line="+26"/>
<source>Copy amount</source>
<translation>Kopiraj iznos</translation>
</message>
<message>
<location line="-25"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+317"/>
<source>highest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium-high</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>low-medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>low</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>lowest</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+155"/>
<source>DUST</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>yes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>This label turns red, if the transaction size is bigger than 10000 bytes.
This means a fee of at least %1 per kb is required.
Can vary +/- 1 Byte per input.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transactions with higher priority get more likely into a block.
This label turns red, if the priority is smaller than "medium".
This means a fee of at least %1 per kb is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if any recipient receives an amount smaller than %1.
This means a fee of at least %2 is required.
Amounts below 0.546 times the minimum relay fee are shown as DUST.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>This label turns red, if the change is smaller than %1.
This means a fee of at least %2 is required.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+37"/>
<location line="+66"/>
<source>(no label)</source>
<translation>(bez oznake)</translation>
</message>
<message>
<location line="-9"/>
<source>change from %1 (%2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>(change)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>Izmjeni adresu</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>&Oznaka</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&Adresa</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+20"/>
<source>New receiving address</source>
<translation>Nova adresa za primanje</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>Nova adresa za slanje</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>Uredi adresu za primanje</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>Uredi adresu za slanje</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>Upisana adresa "%1" je već u adresaru.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid VITAL address.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>Ne mogu otključati novčanik.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>Stvaranje novog ključa nije uspjelo.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+420"/>
<location line="+12"/>
<source>VITAL-Qt</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>Postavke</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>&Glavno</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB. Fee 0.01 recommended.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>Plati &naknadu za transakciju</translation>
</message>
<message>
<location line="+31"/>
<source>Reserved amount does not participate in staking and is therefore spendable at any time.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Reserve</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Automatically start VITAL after logging in to the system.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Start VITAL on system login</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Detach block and address databases at shutdown. This means they can be moved to another data directory, but it slows down shutdown. The wallet is always detached.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Detach databases at shutdown</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>&Network</source>
<translation>&Mreža</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the VITAL client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>Mapiraj port koristeći &UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the VITAL network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>Proxy &IP:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Port od proxy-a (npr. 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>SOCKS &Verzija:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>&Prozor</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Prikaži samo ikonu u sistemskoj traci nakon minimiziranja prozora</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>&Minimiziraj u sistemsku traku umjesto u traku programa</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimizirati umjesto izaći iz aplikacije kada je prozor zatvoren. Kada je ova opcija omogućena, aplikacija će biti zatvorena tek nakon odabira Izlaz u izborniku.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>M&inimiziraj kod zatvaranja</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>&Prikaz</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting VITAL.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>&Jedinica za prikazivanje iznosa:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Izaberite željeni najmanji dio bitcoina koji će biti prikazan u sučelju i koji će se koristiti za plaćanje.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show VITAL addresses in the transaction list or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>&Prikaži adrese u popisu transakcija</translation>
</message>
<message>
<location line="+7"/>
<source>Whether to show coin control features or not.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Display coin &control features (experts only!)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>&U redu</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>&Odustani</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+55"/>
<source>default</source>
<translation>standardne vrijednosti</translation>
</message>
<message>
<location line="+149"/>
<location line="+9"/>
<source>Warning</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting VITAL.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>Oblik</translation>
</message>
<message>
<location line="+33"/>
<location line="+231"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the VITAL network after a connection is established, but this process has not completed yet.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-160"/>
<source>Stake:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-107"/>
<source>Wallet</source>
<translation>Novčanik</translation>
</message>
<message>
<location line="+49"/>
<source>Spendable:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Your current spendable balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>Immature:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Total:</source>
<translation>Ukupno:</translation>
</message>
<message>
<location line="+16"/>
<source>Your current total balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>Nedavne transakcije</b></translation>
</message>
<message>
<location line="-108"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-29"/>
<source>Total of coins that was staked, and do not yet count toward the current balance</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../overviewpage.cpp" line="+113"/>
<location line="+1"/>
<source>out of sync</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+59"/>
<source>Request Payment</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>Ime klijenta</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+348"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>Verzija klijenta</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>&Informacija</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>Koristim OpenSSL verziju</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>Mreža</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>Broj konekcija</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>Lanac blokova</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>Trenutni broj blokova</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>Procjenjeni ukupni broj blokova</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>Posljednje vrijeme bloka</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>&Otvori</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Show the VITAL-Qt help message to get a list with possible VITAL command-line options.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>&Konzola</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-104"/>
<source>VITAL - Debug window</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>VITAL Core</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Open the VITAL debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>Očisti konzolu</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-33"/>
<source>Welcome to the VITAL RPC console.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+182"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>Slanje novca</translation>
</message>
<message>
<location line="+76"/>
<source>Coin Control Features</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+20"/>
<source>Inputs...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>automatically selected</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>Insufficient funds!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Quantity:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+22"/>
<location line="+35"/>
<source>0</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-19"/>
<source>Bytes:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Amount:</source>
<translation>Iznos:</translation>
</message>
<message>
<location line="+22"/>
<location line="+86"/>
<location line="+86"/>
<location line="+32"/>
<source>0.00 BC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-191"/>
<source>Priority:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>medium</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Low Output:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+19"/>
<source>no</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+32"/>
<source>After Fee:</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>Change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+50"/>
<source>custom change address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+106"/>
<source>Send to multiple recipients at once</source>
<translation>Pošalji k nekoliko primatelja odjednom</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>&Dodaj primatelja</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>Obriši &sve</translation>
</message>
<message>
<location line="+28"/>
<source>Balance:</source>
<translation>Stanje:</translation>
</message>
<message>
<location line="+16"/>
<source>123.456 BC</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>Potvrdi akciju slanja</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>&Pošalji</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-173"/>
<source>Enter a VITAL address (e.g. 18gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Copy quantity</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Kopiraj iznos</translation>
</message>
<message>
<location line="+1"/>
<source>Copy fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy after fee</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy bytes</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy priority</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy low output</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Copy change</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+86"/>
<source><b>%1</b> to %2 (%3)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>Potvrdi slanje novca</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation type="unfinished"/>
</message>
<message>
<location line="+29"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>Adresa primatelja je nevaljala, molimo provjerite je ponovo.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>Iznos mora biti veći od 0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>Iznos je veći od stanja računa.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Iznos je veći od stanja računa kad se doda naknada za transakcije od %1.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Pronašli smo adresu koja se ponavlja. U svakom plaćanju program može svaku adresu koristiti samo jedanput.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+251"/>
<source>WARNING: Invalid VITAL address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<source>(no label)</source>
<translation>(bez oznake)</translation>
</message>
<message>
<location line="+4"/>
<source>WARNING: unknown change address</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>&Iznos:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>&Primatelj plaćanja:</translation>
</message>
<message>
<location line="+24"/>
<location filename="../sendcoinsentry.cpp" line="+25"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>Unesite oznaku za ovu adresu kako bi ju dodali u vaš adresar</translation>
</message>
<message>
<location line="+9"/>
<source>&Label:</source>
<translation>&Oznaka:</translation>
</message>
<message>
<location line="+18"/>
<source>The address to send the payment to (e.g. 18gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Choose address from address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>Zalijepi adresu iz međuspremnika</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a VITAL address (e.g. 18gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+13"/>
<location line="+124"/>
<source>&Sign Message</source>
<translation>&Potpišite poruku</translation>
</message>
<message>
<location line="-118"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Možete potpisati poruke sa svojom adresom kako bi dokazali da ih posjedujete. Budite oprezni da ne potpisujete ništa mutno, jer bi vas phishing napadi mogli na prevaru natjerati da prepišete svoj identitet njima. Potpisujte samo detaljno objašnjene izjave sa kojima se slažete.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. 18gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+10"/>
<location line="+203"/>
<source>Choose an address from the address book</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-193"/>
<location line="+203"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-193"/>
<source>Paste address from clipboard</source>
<translation>Zalijepi adresu iz međuspremnika</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>Upišite poruku koju želite potpisati ovdje</translation>
</message>
<message>
<location line="+24"/>
<source>Copy the current signature to the system clipboard</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this VITAL address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all sign message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>Obriši &sve</translation>
</message>
<message>
<location line="-87"/>
<location line="+70"/>
<source>&Verify Message</source>
<translation>&Potvrdite poruku</translation>
</message>
<message>
<location line="-64"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. 18gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified VITAL address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+17"/>
<source>Reset all verify message fields</source>
<translation type="unfinished"/>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a VITAL address (e.g. 18gZqgY4r2RoEdqYk3QsAqFckyf9pRHN6i)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Enter VITAL signature</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>Otključavanje novčanika je otkazano.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>Poruka je potpisana.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+19"/>
<source>Open until %1</source>
<translation>Otvoren do %1</translation>
</message>
<message numerus="yes">
<location line="-2"/>
<source>Open for %n block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+8"/>
<source>conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>%1/offline</source>
<translation>%1 nije dostupan</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/nepotvrđeno</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 potvrda</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>Status</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>Izvor</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>Generiran</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>Od</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>Za</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>vlastita adresa</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>oznaka</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>Uplaćeno</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>Nije prihvaćeno</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>Zaduženje</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>Naknada za transakciju</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>Neto iznos</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>Poruka</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>Komentar</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>ID transakcije</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 35 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>Transakcija</translation>
</message>
<message>
<location line="+5"/>
<source>Inputs</source>
<translation>Unosi</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>Iznos</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-211"/>
<source>, has not been successfully broadcast yet</source>
<translation>, još nije bio uspješno emitiran</translation>
</message>
<message>
<location line="+35"/>
<source>unknown</source>
<translation>nepoznato</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>Detalji transakcije</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>Ova panela prikazuje detaljni opis transakcije</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+226"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>Tip</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>Iznos</translation>
</message>
<message>
<location line="+60"/>
<source>Open until %1</source>
<translation>Otvoren do %1</translation>
</message>
<message>
<location line="+12"/>
<source>Confirmed (%1 confirmations)</source>
<translation>Potvrđen (%1 potvrda)</translation>
</message>
<message numerus="yes">
<location line="-15"/>
<source>Open for %n more block(s)</source>
<translation type="unfinished"><numerusform></numerusform><numerusform></numerusform><numerusform></numerusform></translation>
</message>
<message>
<location line="+6"/>
<source>Offline</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Conflicted</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Generirano - Upozorenje: ovaj blok nije bio primljen od strane bilo kojeg drugog noda i vjerojatno neće biti prihvaćen!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>Generirano, ali nije prihvaćeno</translation>
</message>
<message>
<location line="+42"/>
<source>Received with</source>
<translation>Primljeno s</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>Primljeno od</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>Poslano za</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>Plaćanje samom sebi</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>Rudareno</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/d)</translation>
</message>
<message>
<location line="+190"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Status transakcije</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>Datum i vrijeme kad je transakcija primljena</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>Vrsta transakcije.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>Odredište transakcije</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>Iznos odbijen od ili dodan k saldu.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+55"/>
<location line="+16"/>
<source>All</source>
<translation>Sve</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>Danas</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>Ovaj tjedan</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>Ovaj mjesec</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>Prošli mjesec</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>Ove godine</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>Raspon...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>Primljeno s</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>Poslano za</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>Tebi</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>Rudareno</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>Ostalo</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>Unesite adresu ili oznaku za pretraživanje</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>Min iznos</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>Kopirati adresu</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>Kopirati oznaku</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>Kopiraj iznos</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>Izmjeniti oznaku</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+144"/>
<source>Export Transaction Data</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>Datoteka podataka odvojenih zarezima (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>Potvrđeno</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>Tip</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>Oznaka</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>Adresa</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>Iznos</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>Raspon:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>za</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+206"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+33"/>
<source>VITAL version</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Usage:</source>
<translation>Upotreba:</translation>
</message>
<message>
<location line="+1"/>
<source>Send command to -server or vitald</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>List commands</source>
<translation>Prikaži komande</translation>
</message>
<message>
<location line="+1"/>
<source>Get help for a command</source>
<translation>Potraži pomoć za komandu</translation>
</message>
<message>
<location line="+2"/>
<source>Options:</source>
<translation>Postavke:</translation>
</message>
<message>
<location line="+2"/>
<source>Specify configuration file (default: vital.conf)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify pid file (default: vitald.pid)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify wallet file (within data directory)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>Odredi direktorij za datoteke</translation>
</message>
<message>
<location line="+2"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>Postavi cache za bazu podataka u MB (zadano:25)</translation>
</message>
<message>
<location line="+1"/>
<source>Set database disk log size in megabytes (default: 100)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+6"/>
<source>Listen for connections on <port> (default: 37385 or testnet: 47385)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>Održavaj najviše <n> veza sa članovima (default: 125)</translation>
</message>
<message>
<location line="+3"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Specify your own public address</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Bind to given address. Use [host]:port notation for IPv6</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Stake your coins to support network and gain reward (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>Prag za odspajanje članova koji se čudno ponašaju (default: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>Broj sekundi koliko se članovima koji se čudno ponašaju neće dopustiti da se opet spoje (default: 86400)</translation>
</message>
<message>
<location line="-44"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Detach block and address databases. Increases shutdown time (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+109"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-87"/>
<source>Listen for JSON-RPC connections on <port> (default: 37386 or testnet: 47386)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-11"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>Prihvati komande iz tekst moda i JSON-RPC</translation>
</message>
<message>
<location line="+101"/>
<source>Error: Transaction creation failed </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-5"/>
<source>Error: Wallet locked, unable to create transaction </source>
<translation type="unfinished"/>
</message>
<message>
<location line="-8"/>
<source>Importing blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Importing bootstrap blockchain data file.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-88"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>Izvršavaj u pozadini kao uslužnik i prihvaćaj komande</translation>
</message>
<message>
<location line="+1"/>
<source>Use the test network</source>
<translation>Koristi test mrežu</translation>
</message>
<message>
<location line="-24"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-38"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+117"/>
<source>Error initializing database environment %s! To recover, BACKUP THAT DIRECTORY, then remove everything from it except for wallet.dat.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-20"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Upozorenje: -paytxfee je podešen na preveliki iznos. To je iznos koji ćete platiti za obradu transakcije.</translation>
</message>
<message>
<location line="+61"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong VITAL will not work properly.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-31"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-18"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-30"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Block creation options:</source>
<translation>Opcije za kreiranje bloka:</translation>
</message>
<message>
<location line="-62"/>
<source>Connect only to the specified node(s)</source>
<translation>Poveži se samo sa određenim nodom</translation>
</message>
<message>
<location line="+4"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+94"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-90"/>
<source>Find peers using DNS lookup (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync checkpoints policy (default: strict)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+83"/>
<source>Invalid -tor address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Invalid amount for -reservebalance=<amount></source>
<translation type="unfinished"/>
</message>
<message>
<location line="-82"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-16"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Prepend debug output with timestamp</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+35"/>
<source>SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>SSL postavke: (za detalje o podešavanju SSL opcija vidi Bitcoin Wiki)</translation>
</message>
<message>
<location line="-74"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+41"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Šalji trace/debug informacije na konzolu umjesto u debug.log datoteku</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+28"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>Podesite minimalnu veličinu bloka u bajtovima (default: 0)</translation>
</message>
<message>
<location line="-29"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-42"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>Odredi vremenski prozor za spajanje na mrežu u milisekundama (ugrađeni izbor: 5000)</translation>
</message>
<message>
<location line="+109"/>
<source>Unable to sign checkpoint, wrong checkpointkey?
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-80"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>Pokušaj koristiti UPnP da otvoriš port za uslugu (default: 0)</translation>
</message>
<message>
<location line="-1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>Pokušaj koristiti UPnP da otvoriš port za uslugu (default: 1 when listening)</translation>
</message>
<message>
<location line="-25"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+42"/>
<source>Username for JSON-RPC connections</source>
<translation>Korisničko ime za JSON-RPC veze</translation>
</message>
<message>
<location line="+47"/>
<source>Verifying database integrity...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+57"/>
<source>WARNING: syncronized checkpoint violation detected, but skipped!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Warning: Disk space is low!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-2"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-48"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-54"/>
<source>Password for JSON-RPC connections</source>
<translation>Lozinka za JSON-RPC veze</translation>
</message>
<message>
<location line="-84"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=vitalrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "VITAL Alert" admin@foo.com
</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+51"/>
<source>Find peers using internet relay chat (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Sync time with other nodes. Disable if time on your system is precise e.g. syncing with NTP (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>When creating transactions, ignore inputs with value less than this (default: 0.01)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+16"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>Dozvoli JSON-RPC povezivanje s određene IP adrese</translation>
</message>
<message>
<location line="+1"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>Pošalji komande nodu na adresi <ip> (ugrađeni izbor: 127.0.0.1)</translation>
</message>
<message>
<location line="+1"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Izvršite naredbu kada se najbolji blok promjeni (%s u cmd je zamjenjen sa block hash)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Require a confirmations for change (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Enforce transaction scripts to use canonical PUSH operators (default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Upgrade wallet to latest format</source>
<translation>Nadogradite novčanik u posljednji format.</translation>
</message>
<message>
<location line="+1"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>Podesi memorijski prostor za ključeve na <n> (ugrađeni izbor: 100)</translation>
</message>
<message>
<location line="+1"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Ponovno pretraži lanac blokova za transakcije koje nedostaju</translation>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 2500, 0 = all)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-6, default: 1)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Imports blocks from external blk000?.dat file</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>Koristi OpenSSL (https) za JSON-RPC povezivanje</translation>
</message>
<message>
<location line="+1"/>
<source>Server certificate file (default: server.cert)</source>
<translation>Uslužnikov SSL certifikat (ugrađeni izbor: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>Uslužnikov privatni ključ (ugrađeni izbor: server.pem)</translation>
</message>
<message>
<location line="+1"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+53"/>
<source>Error: Wallet unlocked for staking only, unable to create transaction.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+18"/>
<source>WARNING: Invalid checkpoint found! Displayed transactions may not be correct! You may need to upgrade, or notify developers.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-158"/>
<source>This help message</source>
<translation>Ova poruka za pomoć</translation>
</message>
<message>
<location line="+95"/>
<source>Wallet %s resides outside data directory %s.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot obtain a lock on data directory %s. VITAL is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-98"/>
<source>VITAL</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+140"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>Program ne može koristiti %s na ovom računalu (bind returned error %d, %s)</translation>
</message>
<message>
<location line="-130"/>
<source>Connect through socks proxy</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+3"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Dozvoli DNS upite za dodavanje nodova i povezivanje</translation>
</message>
<message>
<location line="+122"/>
<source>Loading addresses...</source>
<translation>Učitavanje adresa...</translation>
</message>
<message>
<location line="-15"/>
<source>Error loading blkindex.dat</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Greška kod učitavanja wallet.dat: Novčanik pokvaren</translation>
</message>
<message>
<location line="+4"/>
<source>Error loading wallet.dat: Wallet requires newer version of VITAL</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Wallet needed to be rewritten: restart VITAL to complete</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat</source>
<translation>Greška kod učitavanja wallet.dat</translation>
</message>
<message>
<location line="-16"/>
<source>Invalid -proxy address: '%s'</source>
<translation>Nevaljala -proxy adresa: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+4"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-24"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Nevaljali iznos za opciju -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Error: could not start node</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+11"/>
<source>Sending...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+5"/>
<source>Invalid amount</source>
<translation>Nevaljali iznos za opciju</translation>
</message>
<message>
<location line="+1"/>
<source>Insufficient funds</source>
<translation>Nedovoljna sredstva</translation>
</message>
<message>
<location line="-34"/>
<source>Loading block index...</source>
<translation>Učitavanje indeksa blokova...</translation>
</message>
<message>
<location line="-103"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Unesite nod s kojim se želite spojiti and attempt to keep the connection open</translation>
</message>
<message>
<location line="+122"/>
<source>Unable to bind to %s on this computer. VITAL is probably already running.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-97"/>
<source>Fee per KB to add to transactions you send</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+55"/>
<source>Invalid amount for -mininput=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+25"/>
<source>Loading wallet...</source>
<translation>Učitavanje novčanika...</translation>
</message>
<message>
<location line="+8"/>
<source>Cannot downgrade wallet</source>
<translation>Nije moguće novčanik vratiti na prijašnju verziju.</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot initialize keypool</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Cannot write default address</source>
<translation>Nije moguće upisati zadanu adresu.</translation>
</message>
<message>
<location line="+1"/>
<source>Rescanning...</source>
<translation>Rescaniranje</translation>
</message>
<message>
<location line="+5"/>
<source>Done loading</source>
<translation>Učitavanje gotovo</translation>
</message>
<message>
<location line="-167"/>
<source>To use the %s option</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+14"/>
<source>Error</source>
<translation>Greška</translation>
</message>
<message>
<location line="+6"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation type="unfinished"/>
</message>
</context>
</TS>
|
<source>Staking.<br>Your weight is %1<br>Network weight is %2<br>Expected time to earn reward is %3</source>
<translation type="unfinished"/>
</message>
<message>
|
scripts.js
|
export const delimiters = {
startDelimiter: '{',
endDelimiter: '}'
}
export const delimit = (text) => {
const { startDelimiter, endDelimiter } = delimiters
return `${startDelimiter}${text}${endDelimiter}`
}
// const REQUIRED_UPLOAD_FIELDS = ['firstName', 'lastName', 'cell']
const TOP_LEVEL_UPLOAD_FIELDS = ['firstName', 'lastName', 'cell', 'zip', 'external_id']
const TEXTER_SCRIPT_FIELDS = ['texterFirstName', 'texterLastName']
// TODO: This will include zipCode even if you ddin't upload it
export const allScriptFields = (customFields) => TOP_LEVEL_UPLOAD_FIELDS.concat(TEXTER_SCRIPT_FIELDS).concat(customFields)
const capitalize = str => { return str.charAt(0).toUpperCase() + str.slice(1).toLowerCase() }
const getScriptFieldValue = (contact, texter, fieldName) => {
let result
if (fieldName === 'texterFirstName') {
const formatFirstName = capitalize(texter.firstName)
result = formatFirstName
} else if (fieldName === 'texterLastName') {
const formatLastName = capitalize(texter.lastName)
result = formatLastName
} else if (TOP_LEVEL_UPLOAD_FIELDS.indexOf(fieldName) !== -1) {
result = contact[fieldName]
} else {
const customFieldNames = JSON.parse(contact.customFields)
|
}
export const applyScript = ({ script, contact, customFields, texter }) => {
const scriptFields = allScriptFields(customFields)
let appliedScript = script
for (const field of scriptFields) {
const re = new RegExp(`${delimit(field)}`, 'g')
appliedScript = appliedScript.replace(re, getScriptFieldValue(contact, texter, field))
}
return appliedScript
}
|
result = customFieldNames[fieldName]
}
return result
|
static_dataset_test.go
|
package cantabular_test
import (
"context"
"errors"
"net/http"
"testing"
"github.com/ONSdigital/dp-api-clients-go/v2/cantabular"
"github.com/ONSdigital/dp-api-clients-go/v2/cantabular/mock"
dperrors "github.com/ONSdigital/dp-api-clients-go/v2/errors"
dphttp "github.com/ONSdigital/dp-net/http"
. "github.com/smartystreets/goconvey/convey"
)
func TestStaticDatasetQueryHappy(t *testing.T) {
Convey("Given a correct response from the /graphql endpoint", t, func() {
testCtx := context.Background()
mockHttpClient := &dphttp.ClienterMock{}
mockGQLClient := &mock.GraphQLClientMock{
QueryFunc: func(ctx context.Context, query interface{}, vars map[string]interface{}) error {
return nil
},
}
cantabularClient := cantabular.NewClient(
cantabular.Config{
Host: "cantabular.host",
ExtApiHost: "cantabular.ext.host",
},
mockHttpClient,
mockGQLClient,
)
Convey("When the StaticDatasetQuery method is called", func() {
req := cantabular.StaticDatasetQueryRequest{}
_, err := cantabularClient.StaticDatasetQuery(testCtx, req)
Convey("No error should be returned", func() {
So(err, ShouldBeNil)
})
})
})
}
func TestStaticDatasetQueryUnHappy(t *testing.T) {
Convey("Given the graphQL Client is not configured", t, func() {
testCtx := context.Background()
mockHttpClient := &dphttp.ClienterMock{}
cantabularClient := cantabular.NewClient(
cantabular.Config{
Host: "cantabular.host",
},
mockHttpClient,
nil,
)
Convey("When the StaticDatasetQuery method is called", func() {
req := cantabular.StaticDatasetQueryRequest{}
_, err := cantabularClient.StaticDatasetQuery(testCtx, req)
So(err, ShouldNotBeNil)
Convey("Status Code 503 Service Unavailable should be recoverable from error", func() {
_, err := cantabularClient.StaticDatasetQuery(testCtx, req)
So(dperrors.StatusCode(err), ShouldEqual, http.StatusServiceUnavailable)
})
})
})
Convey("Given a GraphQL error from the /graphql endpoint", t, func() {
testCtx := context.Background()
mockHttpClient := &dphttp.ClienterMock{}
mockGQLClient := &mock.GraphQLClientMock{
QueryFunc: func(ctx context.Context, query interface{}, vars map[string]interface{}) error {
if q, ok := query.(*cantabular.StaticDatasetQuery); ok {
q.Dataset.Table.Error = "I am error response"
return nil
}
return errors.New("query could not be cast to correct type")
},
}
cantabularClient := cantabular.NewClient(
cantabular.Config{
Host: "cantabular.host",
ExtApiHost: "cantabular.ext.host",
},
mockHttpClient,
mockGQLClient,
)
Convey("When the StaticDatasetQuery method is called", func() {
req := cantabular.StaticDatasetQueryRequest{}
_, err := cantabularClient.StaticDatasetQuery(testCtx, req)
Convey("An error should be returned with status code 400 Bad Request", func() {
So(err, ShouldNotBeNil)
|
})
})
}
|
So(dperrors.StatusCode(err), ShouldEqual, http.StatusBadRequest)
})
|
menu.ts
|
import { Menu, MenuItemConstructorOptions, BrowserWindow } from 'electron';
import { createMineOptionsWindow } from './windows/optionsWindow';
const isMac = process.platform === 'darwin'
function buildTemplate (win: BrowserWindow): MenuItemConstructorOptions[] {
const gameSubMenu: MenuItemConstructorOptions = {
label: '游戏',
accelerator: 'Command+G',
submenu: [
{
label: '难度选择',
accelerator: isMac ? 'Command+M' : 'Ctrl+M',
click () {
// ipcRenderer.send('mine-select-mode');
createMineOptionsWindow(win);
}
}
]
}
const helpSubMenu: MenuItemConstructorOptions = {
label: '帮助',
click () {}
}
return [gameSubMenu, helpSubMenu];
}
export function buildMenu (win:
|
ndow) {
const menu = Menu.buildFromTemplate(buildTemplate(win));
win.setMenu(menu);
return menu;
}
|
BrowserWi
|
bitcoin_nb_NO.ts
|
<TS language="nb_NO" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Høyreklikk for å redigere adresse, eller beskrivelse</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Opprett en ny adresse</translation>
</message>
<message>
<source>&New</source>
<translation>&Ny</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Kopier den valgte adressen til utklippstavlen</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Kopier</translation>
</message>
<message>
<source>C&lose</source>
<translation>&Lukk</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Slett den valgte adressen fra listen</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Eksporter data i den valgte fliken til en fil</translation>
</message>
<message>
<source>&Export</source>
<translation>&Eksport</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Slett</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Velg en adresse å sende mynter til</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Velg adressen som skal motta myntene</translation>
</message>
<message>
<source>C&hoose</source>
<translation>&Velg</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Avsender adresser</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Mottager adresser</translation>
</message>
<message>
<source>These are your DemoCoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Dette er dine DemoCoin adresser for å sende å sende betalinger. Husk å sjekke beløp og mottager adresser før du sender mynter.</translation>
</message>
<message>
<source>These are your DemoCoin addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Dette er dine DemoCoin adresse for å motta betalinger. Det er anbefalt å bruke en ny mottager adresse for hver transaksjon.</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>&Kopier adresse</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>Kopier &beskrivelse</translation>
</message>
<message>
<source>&Edit</source>
<translation>R&ediger</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Eksporter adresse listen</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Komma separert fil (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Eksporten feilet</translation>
</message>
<message>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<translation>Fet oppstod en feil ved lagring av adresselisten til %1. Vennligst prøv igjen.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Beskrivelse</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>(no label)</source>
<translation>(ingen beskrivelse)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Passord dialog</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Oppgi passord setning</translation>
</message>
<message>
|
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Repeter passorsetningen</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Oppgi passordsetningen for lommeboken. <br/>Vennligst bruk en passordsetninge med <b>ti, eller flere tilfeldige tegn </b>, eller <b>åtte, eller flere ord</b>.</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Krypter lommeboken</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Denne operasjonen krever passordsetningen for å låse opp lommeboken.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Lås opp lommeboken</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Denne operasjonen krever passordsetningen for å dekryptere lommeboken.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Dekrypter lommeboken</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Endre passordsetningen</translation>
</message>
<message>
<source>Enter the old passphrase and new passphrase to the wallet.</source>
<translation>Oppgi den gamle og den nye passordsetningen for lommeboken.</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Bekreft kryptering av lommeboken</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR LITECOINS</b>!</source>
<translation>Advarsel: Dersom du krypterer lommeboken og mister passordsetningen vil du <b>MISTE ALLE DINE LITECOIN</b>!</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Er du sikker på at du vil kryptere lommeboken?</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Lommeboken er kryptert</translation>
</message>
<message>
<source>%1 will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your democoins from being stolen by malware infecting your computer.</source>
<translation>%1 vil nå lukkes for å fullføre krypteringsprosessen. Husk at å kryptere lommeboken ikke kan beskytte dine democoin fullstendig fra å bli stjålet av skadelig programvare som har infisert datamaskinen din.</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>VIKTIG: Alle tidligere sikkerhetskopier du har tatt av lommebokfilen bør erstattes med den nye krypterte lommebokfilen. Av sikkerhetsgrunner vil tidligere sikkerhetskopier av lommebokfilen bli ubrukelige når du begynner å bruke den ny kypterte lommeboken.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Lommebokkrypteringen feilet</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Lommebokkrypteringen feilet pga. en intern feil. Lommeboken din ble ikke kryptert.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>De oppgitte passordsetningene er forskjellige.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>Opplåsing av lommeboken feilet</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Passordsetningen som ble oppgitt for å dekryptere lommeboken var feil.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>Dekryptering av lommeboken feilet</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>Passordsetningen for lommeboken ble endret</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Advarsel: Caps Lock er på!</translation>
</message>
</context>
<context>
<name>BanTableModel</name>
<message>
<source>IP/Netmask</source>
<translation>IP/Nettmaske</translation>
</message>
<message>
<source>Banned Until</source>
<translation>Utestengt Til</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Sign &message...</source>
<translation>Signer &melding</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Synkroniserer med nettverket</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Oversikt</translation>
</message>
<message>
<source>Node</source>
<translation>Node</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Vis generell oversikt over lommeboken</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transaksjoner</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Bla gjennom transaksjoner</translation>
</message>
<message>
<source>E&xit</source>
<translation>&Avslutt</translation>
</message>
<message>
<source>Quit application</source>
<translation>Avslutt program</translation>
</message>
<message>
<source>&About %1</source>
<translation>&Om %1</translation>
</message>
<message>
<source>Show information about %1</source>
<translation>Vis informasjon om %1</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Om &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Vis informasjon om Qt</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Valg</translation>
</message>
<message>
<source>Modify configuration options for %1</source>
<translation>Endre konfigurasjonsalternativer for %1</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>&Krypter lommebok...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>&Sikkerhetskopier lommebok</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>&Endre passordsetning</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>&Avsender adresser</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>&Mottager adresser</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>Åpne &URI</translation>
</message>
<message>
<source>Click to disable network activity.</source>
<translation>Klikk for å slå av nettverksaktivitet.</translation>
</message>
<message>
<source>Network activity disabled.</source>
<translation>Nettverksaktivitet er slått av</translation>
</message>
<message>
<source>Click to enable network activity again.</source>
<translation>Klikk for å slå på nettverksaktivitet igjen.</translation>
</message>
<message>
<source>Syncing Headers (%1%)...</source>
<translation>Synkroniserer Headers (%1%)...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Reindekserer blokker på disken</translation>
</message>
<message>
<source>Send coins to a DemoCoin address</source>
<translation>Send mynter til en DemoCoin adresse</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Sikkerhetskopier lommeboken til en annen lokasjon</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Endre passordsetningen for kryptering av lommeboken</translation>
</message>
<message>
<source>&Debug window</source>
<translation>&Feilsøkingsvindu</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>Åpne konsoll for feilsøking og diagnostisering</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>&Verifiser meldingen...</translation>
</message>
<message>
<source>DemoCoin</source>
<translation>DemoCoin</translation>
</message>
<message>
<source>Wallet</source>
<translation>Lommebok</translation>
</message>
<message>
<source>&Send</source>
<translation>&Sende</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Motta</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>Vi&s / Skjul</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Vis, eller skjul, hovedvinduet</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Krypter de private nøklene som tilhører lommeboken din</translation>
</message>
<message>
<source>Sign messages with your DemoCoin addresses to prove you own them</source>
<translation>Signer meldingene med DemoCoin adresse for å bevise at diu eier dem</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified DemoCoin addresses</source>
<translation>Verifiser meldinger for å sikre at de ble signert med en angitt DemoCoin adresse</translation>
</message>
<message>
<source>&File</source>
<translation>&Fil</translation>
</message>
<message>
<source>&Settings</source>
<translation>In&stillinger</translation>
</message>
<message>
<source>&Help</source>
<translation>&Hjelp</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Hjelpelinje for fliker</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>(no label)</source>
<translation>(ingen beskrivelse)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
</context>
<context>
<name>FreespaceChecker</name>
</context>
<context>
<name>HelpMessageDialog</name>
</context>
<context>
<name>Intro</name>
</context>
<context>
<name>ModalOverlay</name>
</context>
<context>
<name>OpenURIDialog</name>
</context>
<context>
<name>OptionsDialog</name>
</context>
<context>
<name>OverviewPage</name>
</context>
<context>
<name>PaymentServer</name>
</context>
<context>
<name>PeerTableModel</name>
</context>
<context>
<name>QObject</name>
</context>
<context>
<name>QObject::QObject</name>
</context>
<context>
<name>QRImageWidget</name>
</context>
<context>
<name>RPCConsole</name>
</context>
<context>
<name>ReceiveCoinsDialog</name>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>Label</source>
<translation>Beskrivelse</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Label</source>
<translation>Beskrivelse</translation>
</message>
<message>
<source>(no label)</source>
<translation>(ingen beskrivelse)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>(no label)</source>
<translation>(ingen beskrivelse)</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
</context>
<context>
<name>SendConfirmationDialog</name>
</context>
<context>
<name>ShutdownWindow</name>
</context>
<context>
<name>SignVerifyMessageDialog</name>
</context>
<context>
<name>SplashScreen</name>
</context>
<context>
<name>TrafficGraphWidget</name>
</context>
<context>
<name>TransactionDesc</name>
</context>
<context>
<name>TransactionDescDialog</name>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Label</source>
<translation>Beskrivelse</translation>
</message>
<message>
<source>(no label)</source>
<translation>(ingen beskrivelse)</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Komma separert fil (*.csv)</translation>
</message>
<message>
<source>Label</source>
<translation>Beskrivelse</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Eksporten feilet</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
</context>
<context>
<name>WalletFrame</name>
</context>
<context>
<name>WalletModel</name>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>&Eksport</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Eksporter data i den valgte fliken til en fil</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
</context>
</TS>
|
<source>New passphrase</source>
<translation>Ny passord setning</translation>
|
update_task_parameters.go
|
package tasks
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"net/http"
"time"
"golang.org/x/net/context"
"github.com/go-openapi/errors"
"github.com/go-openapi/runtime"
cr "github.com/go-openapi/runtime/client"
"github.com/go-openapi/swag"
strfmt "github.com/go-openapi/strfmt"
"github.com/go-swagger/go-swagger/examples/task-tracker/models"
)
// NewUpdateTaskParams creates a new UpdateTaskParams object
// with the default values initialized.
func NewUpdateTaskParams() *UpdateTaskParams {
var ()
return &UpdateTaskParams{
timeout: cr.DefaultTimeout,
}
}
// NewUpdateTaskParamsWithTimeout creates a new UpdateTaskParams object
// with the default values initialized, and the ability to set a timeout on a request
func NewUpdateTaskParamsWithTimeout(timeout time.Duration) *UpdateTaskParams {
var ()
return &UpdateTaskParams{
timeout: timeout,
}
}
// NewUpdateTaskParamsWithContext creates a new UpdateTaskParams object
// with the default values initialized, and the ability to set a context for a request
func NewUpdateTaskParamsWithContext(ctx context.Context) *UpdateTaskParams {
var ()
return &UpdateTaskParams{
Context: ctx,
}
}
// NewUpdateTaskParamsWithHTTPClient creates a new UpdateTaskParams object
// with the default values initialized, and the ability to set a custom HTTPClient for a request
func NewUpdateTaskParamsWithHTTPClient(client *http.Client) *UpdateTaskParams
|
/*UpdateTaskParams contains all the parameters to send to the API endpoint
for the update task operation typically these are written to a http.Request
*/
type UpdateTaskParams struct {
/*Body
The task to update
*/
Body *models.Task
/*ID
The id of the item
*/
ID int64
timeout time.Duration
Context context.Context
HTTPClient *http.Client
}
// WithTimeout adds the timeout to the update task params
func (o *UpdateTaskParams) WithTimeout(timeout time.Duration) *UpdateTaskParams {
o.SetTimeout(timeout)
return o
}
// SetTimeout adds the timeout to the update task params
func (o *UpdateTaskParams) SetTimeout(timeout time.Duration) {
o.timeout = timeout
}
// WithContext adds the context to the update task params
func (o *UpdateTaskParams) WithContext(ctx context.Context) *UpdateTaskParams {
o.SetContext(ctx)
return o
}
// SetContext adds the context to the update task params
func (o *UpdateTaskParams) SetContext(ctx context.Context) {
o.Context = ctx
}
// WithHTTPClient adds the HTTPClient to the update task params
func (o *UpdateTaskParams) WithHTTPClient(client *http.Client) *UpdateTaskParams {
o.SetHTTPClient(client)
return o
}
// SetHTTPClient adds the HTTPClient to the update task params
func (o *UpdateTaskParams) SetHTTPClient(client *http.Client) {
o.HTTPClient = client
}
// WithBody adds the body to the update task params
func (o *UpdateTaskParams) WithBody(body *models.Task) *UpdateTaskParams {
o.SetBody(body)
return o
}
// SetBody adds the body to the update task params
func (o *UpdateTaskParams) SetBody(body *models.Task) {
o.Body = body
}
// WithID adds the id to the update task params
func (o *UpdateTaskParams) WithID(id int64) *UpdateTaskParams {
o.SetID(id)
return o
}
// SetID adds the id to the update task params
func (o *UpdateTaskParams) SetID(id int64) {
o.ID = id
}
// WriteToRequest writes these params to a swagger request
func (o *UpdateTaskParams) WriteToRequest(r runtime.ClientRequest, reg strfmt.Registry) error {
if err := r.SetTimeout(o.timeout); err != nil {
return err
}
var res []error
if o.Body == nil {
o.Body = new(models.Task)
}
if err := r.SetBodyParam(o.Body); err != nil {
return err
}
// path param id
if err := r.SetPathParam("id", swag.FormatInt64(o.ID)); err != nil {
return err
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
|
{
var ()
return &UpdateTaskParams{
HTTPClient: client,
}
}
|
prometheus.go
|
package js
import (
|
"io/ioutil"
"net/http"
"strconv"
"time"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
log "github.com/sirupsen/logrus"
"github.com/andrewflash/lorawan/backend"
)
var (
reqCount = promauto.NewCounterVec(prometheus.CounterOpts{
Name: "api_joinserver_request_count",
Help: "The number of join-server API requests (per message-type and status code)",
}, []string{"message_type", "status_code"})
reqTimer = promauto.NewHistogramVec(prometheus.HistogramOpts{
Name: "api_joinserver_request_duration_seconds",
Help: "The duration of serving join-server API requests (per message-type and status code)",
}, []string{"message_type", "status_code"})
)
type prometheusMiddleware struct {
handler http.Handler
timingHistogram bool
}
func (h *prometheusMiddleware) ServeHTTP(w http.ResponseWriter, r *http.Request) {
start := time.Now()
var buf bytes.Buffer
if r.Body != nil {
if _, err := buf.ReadFrom(r.Body); err != nil {
log.WithError(err).Error("api/js: read request body error")
}
r.Body = ioutil.NopCloser(&buf)
}
var basePL backend.BasePayload
if err := json.Unmarshal(buf.Bytes(), &basePL); err != nil {
log.WithError(err).Error("api/js: unmarshal base payload error")
}
sw := statusWriter{ResponseWriter: w}
h.handler.ServeHTTP(&sw, r)
labels := prometheus.Labels{"message_type": string(basePL.MessageType), "status_code": strconv.FormatInt(int64(sw.status), 10)}
reqCount.With(labels).Inc()
if h.timingHistogram {
reqTimer.With(labels).Observe(float64(time.Since(start)) / float64(time.Second))
}
}
type statusWriter struct {
http.ResponseWriter
status int
}
func (w *statusWriter) WriteHeader(status int) {
w.status = status
w.ResponseWriter.WriteHeader(status)
}
func (w *statusWriter) Write(b []byte) (int, error) {
if w.status == 0 {
w.status = 200
}
return w.ResponseWriter.Write(b)
}
|
"bytes"
"encoding/json"
|
item_const.rs
|
use crate::priv_prelude::*;
#[derive(Clone, Debug)]
pub struct ItemConst {
pub visibility: Option<PubToken>,
pub const_token: ConstToken,
pub name: Ident,
pub ty_opt: Option<(ColonToken, Ty)>,
pub eq_token: EqToken,
pub expr: Expr,
pub semicolon_token: SemicolonToken,
}
impl ItemConst {
pub fn span(&self) -> Span {
let start = match &self.visibility {
Some(pub_token) => pub_token.span(),
None => self.const_token.span(),
};
let end = self.semicolon_token.span();
Span::join(start, end)
}
}
impl Parse for ItemConst {
fn parse(parser: &mut Parser) -> ParseResult<ItemConst> {
let visibility = parser.take();
let const_token = parser.parse()?;
let name = parser.parse()?;
let ty_opt = match parser.take() {
Some(colon_token) =>
|
None => None,
};
let eq_token = parser.parse()?;
let expr = parser.parse()?;
let semicolon_token = parser.parse()?;
Ok(ItemConst {
visibility,
const_token,
name,
ty_opt,
eq_token,
expr,
semicolon_token,
})
}
}
|
{
let ty = parser.parse()?;
Some((colon_token, ty))
}
|
server.js
|
/**
* NOTE: This file must be run with babel-node as Node is not yet compatible
* with all of ES6 and we also use JSX.
*/
import url from 'url';
import React from 'react';
import express from 'express';
import webpack from 'webpack';
import { renderToStaticMarkup } from 'react-dom/server';
import config from './webpack.config.dev';
import Html from './template';
/**
* Render the entire web page to a string. We use render to static markup here
* to avoid react hooking on to the document HTML that will not be managed by
* React. The body prop is a string that contains the actual document body,
* which react will hook on to.
*
* We also take this opportunity to prepend the doctype string onto the
* document.
*
* @param {object} props
|
*/
const renderDocumentToString = props =>
`<!doctype html>${renderToStaticMarkup(<Html {...props} />)}`;
const app = express();
const compiler = webpack(config);
app.use('/css', express.static('buildTemplate/css'));
app.use('/images', express.static('buildTemplate/images'));
app.use('/data', express.static('buildTemplate/data'));
app.use(
require('webpack-dev-middleware')(compiler, {
noInfo: true,
publicPath: config.output.publicPath,
})
);
app.use(require('webpack-hot-middleware')(compiler));
// Send the boilerplate HTML payload down for all get requests. Routing will be
// handled entirely client side and we don't make an effort to pre-render pages
// before they are served when in dev mode.
app.get('*', (req, res) => {
const html = renderDocumentToString({
bundle: `${config.output.publicPath}app.js`,
});
res.send(html);
});
// NOTE: url.parse can't handle URLs without a protocol explicitly defined. So
// if we parse '//localhost:8888' it doesn't work. We manually add a protocol even
// though we are only interested in the port.
const { port } = url.parse(`http:${config.output.publicPath}`);
app.listen(port, 'localhost', err => {
if (err) {
console.error(err); // eslint-disable-line no-console
return;
}
console.log(`Dev server listening at http://localhost:${port}`); // eslint-disable-line no-console
});
|
* @return {string}
|
tr.rs
|
#[doc = "Register `TR` reader"]
pub struct R(crate::R<TR_SPEC>);
impl core::ops::Deref for R {
type Target = crate::R<TR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl From<crate::R<TR_SPEC>> for R {
#[inline(always)]
fn from(reader: crate::R<TR_SPEC>) -> Self {
R(reader)
}
}
#[doc = "Register `TR` writer"]
pub struct W(crate::W<TR_SPEC>);
impl core::ops::Deref for W {
type Target = crate::W<TR_SPEC>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl core::ops::DerefMut for W {
#[inline(always)]
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<crate::W<TR_SPEC>> for W {
#[inline(always)]
fn from(writer: crate::W<TR_SPEC>) -> Self {
W(writer)
}
}
#[doc = "AM/PM notation\n\nValue on reset: 0"]
#[derive(Clone, Copy, Debug, PartialEq)]
pub enum PM_A {
#[doc = "0: AM or 24-hour format"]
AM = 0,
#[doc = "1: PM"]
PM = 1,
}
impl From<PM_A> for bool {
#[inline(always)]
fn from(variant: PM_A) -> Self {
variant as u8 != 0
}
}
#[doc = "Field `PM` reader - AM/PM notation"]
pub struct PM_R(crate::FieldReader<bool, PM_A>);
impl PM_R {
#[inline(always)]
pub(crate) fn new(bits: bool) -> Self {
PM_R(crate::FieldReader::new(bits))
}
#[doc = r"Get enumerated values variant"]
#[inline(always)]
pub fn variant(&self) -> PM_A {
match self.bits {
false => PM_A::AM,
true => PM_A::PM,
}
}
#[doc = "Checks if the value of the field is `AM`"]
#[inline(always)]
pub fn is_am(&self) -> bool {
**self == PM_A::AM
}
#[doc = "Checks if the value of the field is `PM`"]
#[inline(always)]
pub fn is_pm(&self) -> bool {
**self == PM_A::PM
}
}
impl core::ops::Deref for PM_R {
type Target = crate::FieldReader<bool, PM_A>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `PM` writer - AM/PM notation"]
pub struct PM_W<'a> {
w: &'a mut W,
}
impl<'a> PM_W<'a> {
#[doc = r"Writes `variant` to the field"]
#[inline(always)]
pub fn variant(self, variant: PM_A) -> &'a mut W {
self.bit(variant.into())
}
#[doc = "AM or 24-hour format"]
#[inline(always)]
pub fn am(self) -> &'a mut W {
self.variant(PM_A::AM)
}
#[doc = "PM"]
#[inline(always)]
pub fn pm(self) -> &'a mut W {
self.variant(PM_A::PM)
}
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 22)) | ((value as u32 & 0x01) << 22);
self.w
}
}
#[doc = "Field `HT` reader - Hour tens in BCD format"]
pub struct HT_R(crate::FieldReader<u8, u8>);
impl HT_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
HT_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for HT_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `HT` writer - Hour tens in BCD format"]
pub struct HT_W<'a> {
w: &'a mut W,
}
impl<'a> HT_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x03 << 20)) | ((value as u32 & 0x03) << 20);
self.w
}
}
#[doc = "Field `HU` reader - Hour units in BCD format"]
pub struct HU_R(crate::FieldReader<u8, u8>);
impl HU_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
HU_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for HU_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `HU` writer - Hour units in BCD format"]
pub struct HU_W<'a> {
w: &'a mut W,
}
impl<'a> HU_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 16)) | ((value as u32 & 0x0f) << 16);
self.w
}
}
#[doc = "Field `MNT` reader - Minute tens in BCD format"]
pub struct MNT_R(crate::FieldReader<u8, u8>);
impl MNT_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
MNT_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for MNT_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `MNT` writer - Minute tens in BCD format"]
pub struct MNT_W<'a> {
w: &'a mut W,
}
impl<'a> MNT_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 12)) | ((value as u32 & 0x07) << 12);
self.w
}
}
#[doc = "Field `MNU` reader - Minute units in BCD format"]
pub struct MNU_R(crate::FieldReader<u8, u8>);
impl MNU_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
MNU_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for MNU_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `MNU` writer - Minute units in BCD format"]
pub struct MNU_W<'a> {
w: &'a mut W,
}
impl<'a> MNU_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x0f << 8)) | ((value as u32 & 0x0f) << 8);
self.w
}
}
#[doc = "Field `ST` reader - Second tens in BCD format"]
pub struct ST_R(crate::FieldReader<u8, u8>);
impl ST_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
ST_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for ST_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `ST` writer - Second tens in BCD format"]
pub struct ST_W<'a> {
w: &'a mut W,
}
impl<'a> ST_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x07 << 4)) | ((value as u32 & 0x07) << 4);
self.w
}
}
#[doc = "Field `SU` reader - Second units in BCD format"]
pub struct SU_R(crate::FieldReader<u8, u8>);
impl SU_R {
#[inline(always)]
pub(crate) fn new(bits: u8) -> Self {
SU_R(crate::FieldReader::new(bits))
}
}
impl core::ops::Deref for SU_R {
type Target = crate::FieldReader<u8, u8>;
#[inline(always)]
fn deref(&self) -> &Self::Target {
&self.0
}
}
#[doc = "Field `SU` writer - Second units in BCD format"]
pub struct SU_W<'a> {
w: &'a mut W,
}
impl<'a> SU_W<'a> {
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bits(self, value: u8) -> &'a mut W {
self.w.bits = (self.w.bits & !0x0f) | (value as u32 & 0x0f);
self.w
}
}
impl R {
#[doc = "Bit 22 - AM/PM notation"]
#[inline(always)]
pub fn pm(&self) -> PM_R {
PM_R::new(((self.bits >> 22) & 0x01) != 0)
}
#[doc = "Bits 20:21 - Hour tens in BCD format"]
#[inline(always)]
pub fn ht(&self) -> HT_R {
HT_R::new(((self.bits >> 20) & 0x03) as u8)
}
#[doc = "Bits 16:19 - Hour units in BCD format"]
#[inline(always)]
pub fn hu(&self) -> HU_R {
HU_R::new(((self.bits >> 16) & 0x0f) as u8)
}
#[doc = "Bits 12:14 - Minute tens in BCD format"]
#[inline(always)]
pub fn mnt(&self) -> MNT_R {
MNT_R::new(((self.bits >> 12) & 0x07) as u8)
}
#[doc = "Bits 8:11 - Minute units in BCD format"]
#[inline(always)]
pub fn
|
(&self) -> MNU_R {
MNU_R::new(((self.bits >> 8) & 0x0f) as u8)
}
#[doc = "Bits 4:6 - Second tens in BCD format"]
#[inline(always)]
pub fn st(&self) -> ST_R {
ST_R::new(((self.bits >> 4) & 0x07) as u8)
}
#[doc = "Bits 0:3 - Second units in BCD format"]
#[inline(always)]
pub fn su(&self) -> SU_R {
SU_R::new((self.bits & 0x0f) as u8)
}
}
impl W {
#[doc = "Bit 22 - AM/PM notation"]
#[inline(always)]
pub fn pm(&mut self) -> PM_W {
PM_W { w: self }
}
#[doc = "Bits 20:21 - Hour tens in BCD format"]
#[inline(always)]
pub fn ht(&mut self) -> HT_W {
HT_W { w: self }
}
#[doc = "Bits 16:19 - Hour units in BCD format"]
#[inline(always)]
pub fn hu(&mut self) -> HU_W {
HU_W { w: self }
}
#[doc = "Bits 12:14 - Minute tens in BCD format"]
#[inline(always)]
pub fn mnt(&mut self) -> MNT_W {
MNT_W { w: self }
}
#[doc = "Bits 8:11 - Minute units in BCD format"]
#[inline(always)]
pub fn mnu(&mut self) -> MNU_W {
MNU_W { w: self }
}
#[doc = "Bits 4:6 - Second tens in BCD format"]
#[inline(always)]
pub fn st(&mut self) -> ST_W {
ST_W { w: self }
}
#[doc = "Bits 0:3 - Second units in BCD format"]
#[inline(always)]
pub fn su(&mut self) -> SU_W {
SU_W { w: self }
}
#[doc = "Writes raw bits to the register."]
#[inline(always)]
pub unsafe fn bits(&mut self, bits: u32) -> &mut Self {
self.0.bits(bits);
self
}
}
#[doc = "time register\n\nThis register you can [`read`](crate::generic::Reg::read), [`write_with_zero`](crate::generic::Reg::write_with_zero), [`reset`](crate::generic::Reg::reset), [`write`](crate::generic::Reg::write), [`modify`](crate::generic::Reg::modify). See [API](https://docs.rs/svd2rust/#read--modify--write-api).\n\nFor information about available fields see [tr](index.html) module"]
pub struct TR_SPEC;
impl crate::RegisterSpec for TR_SPEC {
type Ux = u32;
}
#[doc = "`read()` method returns [tr::R](R) reader structure"]
impl crate::Readable for TR_SPEC {
type Reader = R;
}
#[doc = "`write(|w| ..)` method takes [tr::W](W) writer structure"]
impl crate::Writable for TR_SPEC {
type Writer = W;
}
#[doc = "`reset()` method sets TR to value 0"]
impl crate::Resettable for TR_SPEC {
#[inline(always)]
fn reset_value() -> Self::Ux {
0
}
}
|
mnu
|
mod.rs
|
/*
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*/
use common::{ConsoleLogger, FeatureFlag, FeatureFlags, NamedItem, SourceLocationKey};
use fixture_tests::Fixture;
use graphql_ir::{
build_ir_with_extra_features, BuilderOptions, FragmentDefinition, FragmentVariablesSemantic,
OperationDefinition, Program, RelayMode,
};
use graphql_syntax::parse_executable;
use graphql_test_helpers::diagnostics_to_sorted_string;
use graphql_text_printer::print_full_operation;
use intern::string_key::Intern;
use relay_codegen::{
build_request_params, print_fragment, print_operation, print_request, JsModuleFormat,
};
use relay_compiler::{validate, ProjectConfig};
use relay_config::SchemaConfig;
use relay_test_schema::{
get_test_schema_with_custom_id, get_test_schema_with_custom_id_with_extensions,
};
use relay_transforms::{apply_transforms, DIRECTIVE_SPLIT_OPERATION};
use std::sync::Arc;
pub fn transform_fixture(fixture: &Fixture<'_>) -> Result<String, String> {
let source_location = SourceLocationKey::standalone(fixture.file_name);
if fixture.content.contains("%TODO%") {
if fixture.content.contains("expected-to-throw")
|
return Ok("TODO".to_string());
}
let parts: Vec<_> = fixture.content.split("%extensions%").collect();
let (base, schema) = match parts.as_slice() {
[base, extensions] => (
base,
get_test_schema_with_custom_id_with_extensions(extensions),
),
[base] => (base, get_test_schema_with_custom_id()),
_ => panic!("Invalid fixture input {}", fixture.content),
};
let ast = parse_executable(base, source_location)
.map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?;
let ir_result = build_ir_with_extra_features(
&schema,
&ast.definitions,
&BuilderOptions {
allow_undefined_fragment_spreads: false,
fragment_variables_semantic: FragmentVariablesSemantic::PassedValue,
relay_mode: Some(RelayMode {
enable_provided_variables: &FeatureFlag::Enabled,
}),
default_anonymous_operation_name: None,
},
);
let ir = ir_result
.map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?;
let program = Program::from_definitions(Arc::clone(&schema), ir);
let feature_flags = FeatureFlags {
enable_flight_transform: true,
hash_supported_argument: FeatureFlag::Disabled,
no_inline: FeatureFlag::Enabled,
enable_relay_resolver_transform: true,
enable_3d_branch_arg_generation: true,
actor_change_support: FeatureFlag::Enabled,
text_artifacts: FeatureFlag::Disabled,
enable_client_edges: FeatureFlag::Enabled,
enable_provided_variables: FeatureFlag::Enabled,
skip_printing_nulls: FeatureFlag::Disabled,
};
let project_config = ProjectConfig {
name: "test".intern(),
feature_flags: Arc::new(feature_flags),
schema_config: SchemaConfig {
node_interface_id_field: "global_id".intern(),
..Default::default()
},
js_module_format: JsModuleFormat::Haste,
..Default::default()
};
validate(&program, &project_config, &None)
.map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?;
// TODO pass base fragment names
let programs = apply_transforms(
&project_config,
Arc::new(program),
Default::default(),
Arc::new(ConsoleLogger),
None,
None,
)
.map_err(|diagnostics| diagnostics_to_sorted_string(fixture.content, &diagnostics))?;
let mut operations: Vec<&std::sync::Arc<OperationDefinition>> =
programs.normalization.operations().collect();
operations.sort_by_key(|operation| operation.name.item);
let result = operations
.into_iter()
.map(|operation| {
if operation
.directives
.named(*DIRECTIVE_SPLIT_OPERATION)
.is_some()
{
let mut import_statements = Default::default();
let operation =
print_operation(&schema, operation, &project_config, &mut import_statements);
format!("{}{}", import_statements, operation)
} else {
let name = operation.name.item;
let print_operation_node = programs
.operation_text
.operation(name)
.expect("a query text operation should be generated for this operation");
let text = print_full_operation(&programs.operation_text, print_operation_node);
let reader_operation = programs
.reader
.operation(name)
.expect("a reader fragment should be generated for this operation");
let operation_fragment = FragmentDefinition {
name: reader_operation.name,
variable_definitions: reader_operation.variable_definitions.clone(),
selections: reader_operation.selections.clone(),
used_global_variables: Default::default(),
directives: reader_operation.directives.clone(),
type_condition: reader_operation.type_,
};
let request_parameters = build_request_params(operation);
let mut import_statements = Default::default();
let request = print_request(
&schema,
operation,
&operation_fragment,
request_parameters,
&project_config,
&mut import_statements,
);
format!("{}{}\n\nQUERY:\n\n{}", import_statements, request, text)
}
})
.chain({
let mut fragments: Vec<&std::sync::Arc<FragmentDefinition>> =
programs.reader.fragments().collect();
fragments.sort_by_key(|fragment| fragment.name.item);
fragments.into_iter().map(|fragment| {
let mut import_statements = Default::default();
let fragment =
print_fragment(&schema, fragment, &project_config, &mut import_statements);
format!("{}{}", import_statements, fragment)
})
})
.collect::<Vec<_>>();
Ok(result.join("\n\n"))
}
|
{
return Err("TODO".to_string());
}
|
peerconnection_renegotation_test.go
|
// +build !js
package webrtc
import (
"context"
"io"
"math/rand"
"strconv"
"strings"
"sync/atomic"
"testing"
"time"
"github.com/pion/transport/test"
"github.com/pion/webrtc/v2/internal/util"
"github.com/pion/webrtc/v2/pkg/media"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func sendVideoUntilDone(done <-chan struct{}, t *testing.T, tracks []*Track) {
for {
select {
case <-time.After(20 * time.Millisecond):
for _, track := range tracks {
assert.NoError(t, track.WriteSample(media.Sample{Data: []byte{0x00}, Samples: 1}))
}
case <-done:
return
}
}
}
func sdpMidHasSsrc(offer SessionDescription, mid string, ssrc uint32) bool {
for _, media := range offer.parsed.MediaDescriptions {
cmid, ok := media.Attribute("mid")
if !ok {
continue
}
if cmid != mid {
continue
}
cssrc, ok := media.Attribute("ssrc")
if !ok {
continue
}
parts := strings.Split(cssrc, " ")
ssrcInt64, err := strconv.ParseUint(parts[0], 10, 32)
if err != nil {
continue
}
if uint32(ssrcInt64) == ssrc
|
}
return false
}
/*
* Assert the following behaviors
* - We are able to call AddTrack after signaling
* - OnTrack is NOT called on the other side until after SetRemoteDescription
* - We are able to re-negotiate and AddTrack is properly called
*/
func TestPeerConnection_Renegotation_AddTrack(t *testing.T) {
api := NewAPI()
lim := test.TimeOut(time.Second * 30)
defer lim.Stop()
report := test.CheckRoutines(t)
defer report()
api.mediaEngine.RegisterDefaultCodecs()
pcOffer, pcAnswer, err := api.newPair(Configuration{})
if err != nil {
t.Fatal(err)
}
haveRenegotiated := &atomicBool{}
onTrackFired, onTrackFiredFunc := context.WithCancel(context.Background())
pcAnswer.OnTrack(func(track *Track, r *RTPReceiver) {
if !haveRenegotiated.get() {
t.Fatal("OnTrack was called before renegotation")
}
onTrackFiredFunc()
})
assert.NoError(t, signalPair(pcOffer, pcAnswer))
_, err = pcAnswer.AddTransceiverFromKind(RTPCodecTypeVideo, RtpTransceiverInit{Direction: RTPTransceiverDirectionRecvonly})
assert.NoError(t, err)
vp8Track, err := pcOffer.NewTrack(DefaultPayloadTypeVP8, rand.Uint32(), "foo", "bar")
assert.NoError(t, err)
_, err = pcOffer.AddTrack(vp8Track)
assert.NoError(t, err)
// Send 10 packets, OnTrack MUST not be fired
for i := 0; i <= 10; i++ {
assert.NoError(t, vp8Track.WriteSample(media.Sample{Data: []byte{0x00}, Samples: 1}))
time.Sleep(20 * time.Millisecond)
}
haveRenegotiated.set(true)
assert.NoError(t, signalPair(pcOffer, pcAnswer))
sendVideoUntilDone(onTrackFired.Done(), t, []*Track{vp8Track})
assert.NoError(t, pcOffer.Close())
assert.NoError(t, pcAnswer.Close())
}
// Assert that adding tracks across multiple renegotations performs as expected
func TestPeerConnection_Renegotation_AddTrack_Multiple(t *testing.T) {
addTrackWithLabel := func(trackName string, pcOffer, pcAnswer *PeerConnection) *Track {
_, err := pcAnswer.AddTransceiverFromKind(RTPCodecTypeVideo, RtpTransceiverInit{Direction: RTPTransceiverDirectionRecvonly})
assert.NoError(t, err)
track, err := pcOffer.NewTrack(DefaultPayloadTypeVP8, rand.Uint32(), trackName, trackName)
assert.NoError(t, err)
_, err = pcOffer.AddTrack(track)
assert.NoError(t, err)
return track
}
trackNames := []string{util.RandSeq(trackDefaultIDLength), util.RandSeq(trackDefaultIDLength), util.RandSeq(trackDefaultIDLength)}
outboundTracks := []*Track{}
onTrackCount := map[string]int{}
onTrackChan := make(chan struct{}, 1)
api := NewAPI()
lim := test.TimeOut(time.Second * 30)
defer lim.Stop()
report := test.CheckRoutines(t)
defer report()
api.mediaEngine.RegisterDefaultCodecs()
pcOffer, pcAnswer, err := api.newPair(Configuration{})
if err != nil {
t.Fatal(err)
}
pcAnswer.OnTrack(func(track *Track, r *RTPReceiver) {
onTrackCount[track.Label()]++
onTrackChan <- struct{}{}
})
assert.NoError(t, signalPair(pcOffer, pcAnswer))
for i := range trackNames {
outboundTracks = append(outboundTracks, addTrackWithLabel(trackNames[i], pcOffer, pcAnswer))
assert.NoError(t, signalPair(pcOffer, pcAnswer))
sendVideoUntilDone(onTrackChan, t, outboundTracks)
}
assert.NoError(t, pcOffer.Close())
assert.NoError(t, pcAnswer.Close())
assert.Equal(t, onTrackCount[trackNames[0]], 1)
assert.Equal(t, onTrackCount[trackNames[1]], 1)
assert.Equal(t, onTrackCount[trackNames[2]], 1)
}
// Assert that renegotiation triggers OnTrack() with correct ID and label from
// remote side, even when a transceiver was added before the actual track data
// was received. This happens when we add a transceiver on the server, create
// an offer on the server and the browser's answer contains the same SSRC, but
// a track hasn't been added on the browser side yet. The browser can add a
// track later and renegotiate, and track ID and label will be set by the time
// first packets are received.
func TestPeerConnection_Renegotiation_AddTrack_Rename(t *testing.T) {
api := NewAPI()
lim := test.TimeOut(time.Second * 30)
defer lim.Stop()
report := test.CheckRoutines(t)
defer report()
api.mediaEngine.RegisterDefaultCodecs()
pcOffer, pcAnswer, err := api.newPair(Configuration{})
if err != nil {
t.Fatal(err)
}
haveRenegotiated := &atomicBool{}
onTrackFired, onTrackFiredFunc := context.WithCancel(context.Background())
var atomicRemoteTrack atomic.Value
pcOffer.OnTrack(func(track *Track, r *RTPReceiver) {
if !haveRenegotiated.get() {
t.Fatal("OnTrack was called before renegotation")
}
onTrackFiredFunc()
atomicRemoteTrack.Store(track)
})
_, err = pcOffer.AddTransceiverFromKind(RTPCodecTypeVideo, RtpTransceiverInit{Direction: RTPTransceiverDirectionRecvonly})
assert.NoError(t, err)
vp8Track, err := pcAnswer.NewTrack(DefaultPayloadTypeVP8, rand.Uint32(), "foo1", "bar1")
assert.NoError(t, err)
_, err = pcAnswer.AddTrack(vp8Track)
assert.NoError(t, err)
assert.NoError(t, signalPair(pcOffer, pcAnswer))
vp8Track.id = "foo2"
vp8Track.label = "bar2"
haveRenegotiated.set(true)
assert.NoError(t, signalPair(pcOffer, pcAnswer))
sendVideoUntilDone(onTrackFired.Done(), t, []*Track{vp8Track})
assert.NoError(t, pcOffer.Close())
assert.NoError(t, pcAnswer.Close())
remoteTrack, ok := atomicRemoteTrack.Load().(*Track)
require.True(t, ok)
require.NotNil(t, remoteTrack)
assert.Equal(t, vp8Track.SSRC(), remoteTrack.SSRC())
assert.Equal(t, "foo2", remoteTrack.ID())
assert.Equal(t, "bar2", remoteTrack.Label())
}
// TestPeerConnection_Transceiver_Mid tests that we'll provide the same
// transceiver for a media id on successive offer/answer
func TestPeerConnection_Transceiver_Mid(t *testing.T) {
lim := test.TimeOut(time.Second * 30)
defer lim.Stop()
report := test.CheckRoutines(t)
defer report()
pcOffer, err := NewPeerConnection(Configuration{})
assert.NoError(t, err)
pcAnswer, err := NewPeerConnection(Configuration{})
assert.NoError(t, err)
track1, err := pcOffer.NewTrack(DefaultPayloadTypeVP8, rand.Uint32(), "video", "pion1")
require.NoError(t, err)
sender1, err := pcOffer.AddTrack(track1)
require.NoError(t, err)
track2, err := pcOffer.NewTrack(DefaultPayloadTypeVP8, rand.Uint32(), "video", "pion2")
require.NoError(t, err)
_, err = pcOffer.AddTrack(track2)
require.NoError(t, err)
// this will create the initial offer using generateUnmatchedSDP
offer, err := pcOffer.CreateOffer(nil)
assert.NoError(t, err)
assert.NoError(t, pcOffer.SetLocalDescription(offer))
assert.NoError(t, pcAnswer.SetRemoteDescription(offer))
answer, err := pcAnswer.CreateAnswer(nil)
assert.NoError(t, err)
assert.NoError(t, pcAnswer.SetLocalDescription(answer))
// apply answer so we'll test generateMatchedSDP
assert.NoError(t, pcOffer.SetRemoteDescription(answer))
// Must have 3 media descriptions (2 video and 1 datachannel)
assert.Equal(t, len(offer.parsed.MediaDescriptions), 3)
assert.True(t, sdpMidHasSsrc(offer, "0", track1.SSRC()), "Expected mid %q with ssrc %d, offer.SDP: %s", "0", track1.SSRC(), offer.SDP)
// Remove first track, must keep same number of media
// descriptions and same track ssrc for mid 1 as previous
err = pcOffer.RemoveTrack(sender1)
assert.NoError(t, err)
offer, err = pcOffer.CreateOffer(nil)
assert.NoError(t, err)
assert.Equal(t, len(offer.parsed.MediaDescriptions), 3)
assert.True(t, sdpMidHasSsrc(offer, "1", track2.SSRC()), "Expected mid %q with ssrc %d, offer.SDP: %s", "1", track2.SSRC(), offer.SDP)
answer, err = pcAnswer.CreateAnswer(nil)
assert.NoError(t, err)
assert.NoError(t, pcAnswer.SetLocalDescription(answer))
// apply answer so we'll test generateMatchedSDP
assert.NoError(t, pcOffer.SetRemoteDescription(answer))
track3, err := pcOffer.NewTrack(DefaultPayloadTypeVP8, rand.Uint32(), "video", "pion3")
require.NoError(t, err)
_, err = pcOffer.AddTrack(track3)
require.NoError(t, err)
offer, err = pcOffer.CreateOffer(nil)
assert.NoError(t, err)
// We reuse the existing non-sending transceiver
assert.Equal(t, len(offer.parsed.MediaDescriptions), 3)
assert.True(t, sdpMidHasSsrc(offer, "0", track3.SSRC()), "Expected mid %q with ssrc %d, offer.sdp: %s", "0", track3.SSRC(), offer.SDP)
assert.True(t, sdpMidHasSsrc(offer, "1", track2.SSRC()), "Expected mid %q with ssrc %d, offer.sdp: %s", "1", track2.SSRC(), offer.SDP)
assert.NoError(t, pcOffer.Close())
assert.NoError(t, pcAnswer.Close())
}
func TestPeerConnection_Renegotiation_CodecChange(t *testing.T) {
lim := test.TimeOut(time.Second * 30)
defer lim.Stop()
report := test.CheckRoutines(t)
defer report()
pcOffer, err := NewPeerConnection(Configuration{})
assert.NoError(t, err)
pcAnswer, err := NewPeerConnection(Configuration{})
assert.NoError(t, err)
track1, err := pcOffer.NewTrack(DefaultPayloadTypeVP8, 123, "video1", "pion1")
require.NoError(t, err)
track2, err := pcOffer.NewTrack(DefaultPayloadTypeVP9, 456, "video2", "pion2")
require.NoError(t, err)
sender1, err := pcOffer.AddTrack(track1)
require.NoError(t, err)
_, err = pcAnswer.AddTransceiverFromKind(RTPCodecTypeVideo, RtpTransceiverInit{Direction: RTPTransceiverDirectionRecvonly})
require.NoError(t, err)
tracksCh := make(chan *Track)
tracksClosed := make(chan struct{})
pcAnswer.OnTrack(func(track *Track, r *RTPReceiver) {
tracksCh <- track
for {
if _, readErr := track.ReadRTP(); readErr == io.EOF {
tracksClosed <- struct{}{}
return
}
}
})
err = signalPair(pcOffer, pcAnswer)
require.NoError(t, err)
transceivers := pcOffer.GetTransceivers()
require.Equal(t, 1, len(transceivers))
require.Equal(t, "0", transceivers[0].Mid())
transceivers = pcAnswer.GetTransceivers()
require.Equal(t, 1, len(transceivers))
require.Equal(t, "0", transceivers[0].Mid())
ctx, cancel := context.WithCancel(context.Background())
go sendVideoUntilDone(ctx.Done(), t, []*Track{track1})
remoteTrack1 := <-tracksCh
cancel()
assert.Equal(t, uint32(123), remoteTrack1.SSRC())
assert.Equal(t, "video1", remoteTrack1.ID())
assert.Equal(t, "pion1", remoteTrack1.Label())
err = pcOffer.RemoveTrack(sender1)
require.NoError(t, err)
sender2, err := pcOffer.AddTrack(track2)
require.NoError(t, err)
err = signalPair(pcOffer, pcAnswer)
require.NoError(t, err)
<-tracksClosed
transceivers = pcOffer.GetTransceivers()
require.Equal(t, 1, len(transceivers))
require.Equal(t, "0", transceivers[0].Mid())
transceivers = pcAnswer.GetTransceivers()
require.Equal(t, 1, len(transceivers))
require.Equal(t, "0", transceivers[0].Mid())
ctx, cancel = context.WithCancel(context.Background())
go sendVideoUntilDone(ctx.Done(), t, []*Track{track2})
remoteTrack2 := <-tracksCh
cancel()
err = pcOffer.RemoveTrack(sender2)
require.NoError(t, err)
err = signalPair(pcOffer, pcAnswer)
require.NoError(t, err)
<-tracksClosed
assert.Equal(t, uint32(456), remoteTrack2.SSRC())
assert.Equal(t, "video2", remoteTrack2.ID())
assert.Equal(t, "pion2", remoteTrack2.Label())
require.NoError(t, pcOffer.Close())
require.NoError(t, pcAnswer.Close())
}
func TestPeerConnection_Renegotation_RemoveTrack(t *testing.T) {
api := NewAPI()
lim := test.TimeOut(time.Second * 30)
defer lim.Stop()
report := test.CheckRoutines(t)
defer report()
api.mediaEngine.RegisterDefaultCodecs()
pcOffer, pcAnswer, err := api.newPair(Configuration{})
if err != nil {
t.Fatal(err)
}
_, err = pcAnswer.AddTransceiverFromKind(RTPCodecTypeVideo, RtpTransceiverInit{Direction: RTPTransceiverDirectionRecvonly})
assert.NoError(t, err)
vp8Track, err := pcOffer.NewTrack(DefaultPayloadTypeVP8, rand.Uint32(), "foo", "bar")
assert.NoError(t, err)
rtpSender, err := pcOffer.AddTrack(vp8Track)
assert.NoError(t, err)
onTrackFired, onTrackFiredFunc := context.WithCancel(context.Background())
trackClosed, trackClosedFunc := context.WithCancel(context.Background())
pcAnswer.OnTrack(func(track *Track, r *RTPReceiver) {
onTrackFiredFunc()
for {
if _, err := track.ReadRTP(); err == io.EOF {
trackClosedFunc()
return
}
}
})
assert.NoError(t, signalPair(pcOffer, pcAnswer))
sendVideoUntilDone(onTrackFired.Done(), t, []*Track{vp8Track})
assert.NoError(t, pcOffer.RemoveTrack(rtpSender))
assert.NoError(t, signalPair(pcOffer, pcAnswer))
<-trackClosed.Done()
assert.NoError(t, pcOffer.Close())
assert.NoError(t, pcAnswer.Close())
}
func TestPeerConnection_RoleSwitch(t *testing.T) {
api := NewAPI()
lim := test.TimeOut(time.Second * 30)
defer lim.Stop()
report := test.CheckRoutines(t)
defer report()
api.mediaEngine.RegisterDefaultCodecs()
pcFirstOfferer, pcSecondOfferer, err := api.newPair(Configuration{})
if err != nil {
t.Fatal(err)
}
onTrackFired, onTrackFiredFunc := context.WithCancel(context.Background())
pcFirstOfferer.OnTrack(func(track *Track, r *RTPReceiver) {
onTrackFiredFunc()
})
assert.NoError(t, signalPair(pcFirstOfferer, pcSecondOfferer))
// Add a new Track to the second offerer
// This asserts that it will match the ordering of the last RemoteDescription, but then also add new Transceivers to the end
_, err = pcFirstOfferer.AddTransceiverFromKind(RTPCodecTypeVideo, RtpTransceiverInit{Direction: RTPTransceiverDirectionRecvonly})
assert.NoError(t, err)
vp8Track, err := pcSecondOfferer.NewTrack(DefaultPayloadTypeVP8, rand.Uint32(), "foo", "bar")
assert.NoError(t, err)
_, err = pcSecondOfferer.AddTrack(vp8Track)
assert.NoError(t, err)
assert.NoError(t, signalPair(pcSecondOfferer, pcFirstOfferer))
sendVideoUntilDone(onTrackFired.Done(), t, []*Track{vp8Track})
assert.NoError(t, pcFirstOfferer.Close())
assert.NoError(t, pcSecondOfferer.Close())
}
// Assert that renegotiation doesn't attempt to gather ICE twice
// Before we would attempt to gather multiple times and would put
// the PeerConnection into a broken state
func TestPeerConnection_Renegotation_Trickle(t *testing.T) {
settingEngine := SettingEngine{}
settingEngine.SetTrickle(true)
api := NewAPI(WithSettingEngine(settingEngine))
api.mediaEngine.RegisterDefaultCodecs()
// Invalid STUN server on purpose, will stop ICE Gathering from completing in time
pcOffer, pcAnswer, err := api.newPair(Configuration{
ICEServers: []ICEServer{
{
URLs: []string{"stun:127.0.0.1:5000"},
},
},
})
if err != nil {
t.Fatal(err)
}
pcOffer.OnICECandidate(func(c *ICECandidate) {
if c != nil {
assert.NoError(t, pcAnswer.AddICECandidate(c.ToJSON()))
}
})
pcAnswer.OnICECandidate(func(c *ICECandidate) {
if c != nil {
assert.NoError(t, pcOffer.AddICECandidate(c.ToJSON()))
}
})
negotiate := func() {
offer, err := pcOffer.CreateOffer(nil)
assert.NoError(t, err)
assert.NoError(t, pcOffer.SetLocalDescription(offer))
assert.NoError(t, pcAnswer.SetRemoteDescription(offer))
answer, err := pcAnswer.CreateAnswer(nil)
assert.NoError(t, err)
assert.NoError(t, pcAnswer.SetLocalDescription(answer))
assert.NoError(t, pcOffer.SetRemoteDescription(answer))
}
negotiate()
negotiate()
assert.NoError(t, pcOffer.Close())
assert.NoError(t, pcAnswer.Close())
}
|
{
return true
}
|
user-data.ts
|
import { Injectable } from '@angular/core';
import { Events } from 'ionic-angular';
import { Storage } from '@ionic/storage';
import { HttpClient } from './httpClient';
import { ServiceConfig } from './service.config';
import {ToastController} from 'ionic-angular'
@Injectable()
export class UserData {
_favorites: string[] = [];
HAS_LOGGED_IN = 'hasLoggedIn';
HAS_SEEN_TUTORIAL = 'hasSeenTutorial';
constructor(
public events: Events,
public http: HttpClient,
public toast: ToastController,
public storage: Storage
) {}
hasFavorite(sessionName: string): boolean {
return (this._favorites.indexOf(sessionName) > -1);
};
addFavorite(sessionName: string): void {
this._favorites.push(sessionName);
};
|
if (index > -1) {
this._favorites.splice(index, 1);
}
};
login(data:any) {
let self = this;
this.http.post(ServiceConfig.TOKENAUTH, {
username: data.username,
password: data.password,
}, function(data){
console.log(data)
if(data.token != '' && data.token != null){
let toast = self.toast.create({
message: '登录成功',
position: 'top',
duration: 2000
});
toast.present();
self.storage.set(self.HAS_LOGGED_IN, true);
self.storage.set('token', data.token);
self.setUsername(data.username);
self.events.publish('user:login');
return 'success';
}else {
let toast = self.toast.create({
message: '登录失败',
position: 'top',
duration: 2000
});
toast.present();
}
return 'fail';
});
};
signup(data: any) {
let self = this;
this.http.post(ServiceConfig.SIGNUP, {
phone_number: data.phone,
gender: data.gender,
username: data.username,
password: data.password,
}, function(data){
if(data.success == true || data.success == 'true'){
let toast = self.toast.create({
message: '注册成功',
position: 'top',
duration: 2000
});
toast.present();
self.storage.set(self.HAS_LOGGED_IN, true);
self.setUsername(data.username);
self.events.publish('user:signup');
return 'success';
}else {
let toast = self.toast.create({
message: '注册失败',
position: 'top',
duration: 2000
});
toast.present();
}
return 'fail';
});
};
logout() {
let self = this;
this.http.get(ServiceConfig.LOGOUT,function(data){
if(data.success == true || data.success == 'true'){
let toast = self.toast.create({
message: '注销成功',
position: 'top',
duration: 2000
});
toast.present();
self.storage.remove(self.HAS_LOGGED_IN);
self.storage.remove('username');
self.events.publish('user:logout');
return 'success';
}else {
let toast = self.toast.create({
message: '注销失败',
position: 'top',
duration: 2000
});
toast.present();
}
return 'fail';
});
};
setUsername(username: string): void {
this.storage.set('username', username);
};
getUsername(): Promise<string> {
return this.storage.get('username').then((value) => {
return value;
});
};
hasLoggedIn(): Promise<boolean> {
return this.storage.get(this.HAS_LOGGED_IN).then((value) => {
return value === true;
});
};
checkHasSeenTutorial(): Promise<string> {
return this.storage.get(this.HAS_SEEN_TUTORIAL).then((value) => {
return value;
});
};
}
|
removeFavorite(sessionName: string): void {
let index = this._favorites.indexOf(sessionName);
|
optinterface.py
|
import pickle
def write_params_file(params_file,type_file,lower_file,upper_file,obj_file,max_f_eval):
"""
assembles known problem info and settings for NOMAD
into a plain text file
---Inputs---
---Outputs---
"""
#get data from pickles
type_list = pickle.load(open(type_file,'rb'))
lower_bound_list = pickle.load(open(lower_file,'rb'))
upper_bound_list = pickle.load(open(upper_file,'rb'))
#convert to single string
type_string = ' '.join(type_list)
lower_bound_string = ' '.join(lower_bound_list)
upper_bound_string = ' '.join(upper_bound_list)
dimension = len(type_string.split()) #number of decision variables
lines = []
lines.append(f'BB_OUTPUT_TYPE OBJ\n')
lines.append(f'BB_EXE \"$python {obj_file}\"\n\n')
lines.append(f'DIMENSION {dimension}\n')
lines.append(f'BB_INPUT_TYPE ( {type_string} )\n')
lines.append(f'LOWER_BOUND ( {lower_bound_string} )\n')
lines.append(f'UPPER_BOUND ( {upper_bound_string} )\n\n')
lines.append(f'LH_SEARCH 1 0\n\n') #use latin hypercube search to set X0
lines.append(f'MAX_BB_EVAL {max_f_eval}\n\n')
lines.append(f'DISPLAY_DEGREE 2')
with open(params_file,'w') as f:
f.writelines(lines)
def get_parameter_strings(x,options_list,param_types,obj_dir):
|
def overwrite_tags_with_parameters(tagged_lines,parameter_strings):
"""
Takes tagged lines and the respective options and creates a
valid Python source file.
---Inputs---
tagged_lines : {list}
list of all lines making up the tagged source code of the
user's solver (all lines included, some tagged)
parameter_strings : {list}
list containing strings of source code to insert at each
corresponding tag point
---Outputs---
running_lines : {list}
list of lines corresponding to a version of user's
solver with arguments specified by parameter_strings
"""
running_lines = [0]*len(tagged_lines)
num_tags_found = 0
tag_string_cur = f'mgtune_tag_{num_tags_found}'
for i_line,tagged_line in enumerate(tagged_lines):
cur_line = tagged_lines[i_line]
found_all_tags_in_line = False
while (not found_all_tags_in_line):
cur_line_split = cur_line.split(tag_string_cur)
if (len(cur_line_split) == 2):
#current tag was in line, update cur_line by inserting parameter at split point
cur_line = cur_line_split[0] + parameter_strings[num_tags_found] + cur_line_split[1]
num_tags_found += 1
tag_string_cur = f'mgtune_tag_{num_tags_found}'
elif (len(cur_line_split) == 1):
#can't split on current tag (since it's not in line)
found_all_tags_in_line = True
running_lines[i_line] = cur_line
return running_lines
def iterate_to_running_solver(x,obj_dir):
"""
takes the current NOMAD iterate (a vector of numbers largely
corresponding to option value indices) and uses them to
---Inputs---
x : {list}
current NOMAD iterate, made of integers and floats
obj_dir : {path or string}
directory which houses the NOMAD objective function
likely something like .../user_running_dir/mgtune_working/
---Outputs---
NONE, writes a file
"""
#absolute paths of hardcoded files with relevant information
#these variables should match those in mgtune.ttune.tune
#limitations of NOMAD IO make it difficult to get around
# such hardcoding
options_file = obj_dir + '/options.p'
types_file = obj_dir + '/param_types.txt'
tagged_solver_file = obj_dir + '/tagged_solver.py'
running_solver_file = obj_dir + '/running_solver.py'
#get possible options
options_list = pickle.load(open(options_file,'rb'))
#options_list is a list of two element lists
#options_list[i][0] is the parameter's name
#options_list[i][1] is a list of its possible values (or bounds if the
#variable is a float, etc.)
#extract data type of each option from file
type_list = pickle.load(open(types_file,'rb'))
#get lines of tagged version of user's solver
with open(tagged_solver_file) as f:
tagged_lines = f.readlines()
#get source code snippets corresponding to each entry of iterate
parameter_strings = get_parameter_strings(x,options_list,type_list,obj_dir)
#replace tags in source code with respective parameters
running_lines = overwrite_tags_with_parameters(tagged_lines,parameter_strings)
#write running solver
with open(running_solver_file,'w') as f:
f.writelines(running_lines)
def params_file_to_list(params_file):
#reads a file setting up NOMAD optimization problem
#and converts to list, each elements of which sets
#one option
with open(params_file,'r') as f:
lines = f.readlines()
params = [line.strip() for line in lines if not line.isspace()]
return params
def get_types_and_bounds(parameter_options_list):
"""
---Inputs---
parameter_options_list : {list}
list keeping track of all free parameters (those that are inserted with tag)
and their respective possible values
len(parameter_options_list) = number of free parameters in function call
given by string
len(parameter_options_list[i]) = 2
parameter_options_list[i] = [ith option name, ith argument options (or keywords)]
---Outputs---
types_list : {list}
list of "types" for each of the decision variables
I -> integer
R -> real (float)
B -> binary
lower_bounds_list : {list}
lower bounds for each of the decision variables, numerics plus -inf, +inf
upper_bounds_list : {list}
upper bounds for each of the decision variables, numerics plus -inf, +inf
"""
n_params = len(parameter_options_list) #get number of free parameters
types_list = [0]*n_params
lower_bounds_list = [0]*n_params
upper_bounds_list = [0]*n_params
for i_o, option_cur in enumerate(parameter_options_list):
#option_cur looks like [option_name, [option_1, option_2,...]]
options = option_cur[1]
if isinstance(options,list):
if ((isinstance(options[0],str)) and ('unbounded' in options[0])):
print('ERROR: parameters which belong to set of unbounded size not yet implemented')
else:
types_list[i_o] = 'I' #if variable from set of bounded size, must be integer
lower_bounds_list[i_o] = 0
upper_bounds_list[i_o] = int(len(options) - 1)
#numerics -> strings
types_list = [str(elem) for elem in types_list]
lower_bounds_list = [str(elem) for elem in lower_bounds_list]
upper_bounds_list = [str(elem) for elem in upper_bounds_list]
return types_list, lower_bounds_list, upper_bounds_list
|
"""
gives the source code snippets corresponding to the current iterate
---Inputs---
x : {list}
current NOMAD iterate, made of integers and floats
param_types : {list}
list of strings corresponding to NOMAD parameters types
'I' -> integer
'R' -> real
'B' -> binary
options_list : {list}
list keeping track of all free parameters (those that are inserted with tag)
and their respective possible values
len(parameter_options_list) = number of parameters NOMAD is optimizing
given by string
len(parameter_options_list[i]) = 2
parameter_options_list[i] = [ith option name, list of ith argument options (or keywords)]
obj_dir : {path or string}
directory which houses the NOMAD objective function
likely something like .../user_running_dir/mgtune_working/
---Outputs---
parameter_strings : {list}
list of strings, each corresponding to a parameter option
"""
parameter_strings = [0]*len(x) #list to hold parameter strings
for i_p,(x_cur,type_cur) in enumerate(zip(x,param_types)):
if (type_cur == 'R'):
print('ERROR: mgtune does not yet support real (non-integer) parameters')
elif (type_cur == 'B'):
print('ERROR: mgtune does not yet support binary parameters (though it easily could)')
elif (type_cur == 'I'):
parameter_strings[i_p] = str(options_list[i_p][1][x_cur])
return parameter_strings
|
paths.py
|
from rest_framework import viewsets, request
from rest_framework.response import Response
from rest_framework.decorators import action
from posthog.models import Event, Filter
from posthog.utils import request_to_date_query, dict_from_cursor_fetchall
from django.db.models import OuterRef
from django.db import connection
from typing import Optional
from django.db.models.expressions import Window
from django.db.models.functions import Lag
from django.db.models import F, Q
from django.db import connection
import json
# At the moment, paths don't support users changing distinct_ids midway through.
# See: https://github.com/PostHog/posthog/issues/185
class PathsViewSet(viewsets.ViewSet):
def _event_subquery(self, event: str, key: str):
return Event.objects.filter(pk=OuterRef(event)).values(key)[:1]
def
|
(self, request):
requested_type = request.GET.get('type', None)
# Default
event: Optional[str] = "$pageview"
event_filter = {"event":event}
path_type = "properties->> \'$current_url\'"
start_comparator = "{} ~".format(path_type)
# determine requested type
if requested_type:
if requested_type == "$screen":
event = "$screen"
event_filter = {"event":event}
path_type = "properties->> \'$screen_name\'"
start_comparator = "{} ~".format(path_type)
elif requested_type == "$autocapture":
event = "$autocapture"
event_filter = {"event":event}
path_type = "tag_name_source"
start_comparator = "group_id ="
elif requested_type == "custom_event":
event = None
event_filter = {}
path_type = "event"
start_comparator = "event ="
return event, path_type, event_filter, start_comparator
@action(methods=['GET'], detail=False)
def elements(self, request: request.Request):
team = request.user.team_set.get()
all_events = Event.objects.filter(team=team, event="$autocapture")
all_events_SQL, sql_params = all_events.query.sql_with_params()
elements_readble = '\
SELECT tag_name_source as name, group_id as id FROM (SELECT \'<\' || e."tag_name" || \'> \' || e."text" as tag_name_source, e."text" as text_source, e.group_id FROM "posthog_element" e\
JOIN ( SELECT group_id, MIN("posthog_element"."order") as minOrder FROM "posthog_element" GROUP BY group_id) e2 ON e.order = e2.minOrder AND e.group_id = e2.group_id) as element\
JOIN (SELECT id, hash, count FROM posthog_elementgroup as g JOIN (SELECT count(*), elements_hash from ({}) as a group by elements_hash) as e on g.hash = e.elements_hash) as outer_group ON element.group_id = outer_group.id where text_source <> \'\' order by count DESC limit 20\
'.format(all_events_SQL)
cursor = connection.cursor()
cursor.execute(elements_readble, sql_params)
rows = dict_from_cursor_fetchall(cursor)
return Response(rows)
def _apply_start_point(self, start_comparator: str, query_string: str, start_point:str) -> str:
marked = '\
SELECT *, CASE WHEN {} \'{}\' THEN timestamp ELSE NULL END as mark from ({}) as sessionified\
'.format(start_comparator, start_point, query_string)
marked_plus = '\
SELECT *, MIN(mark) OVER (\
PARTITION BY distinct_id\
, session ORDER BY timestamp\
) AS max from ({}) as marked order by session\
'.format(marked)
sessionified = '\
SELECT * FROM ({}) as something where timestamp >= max \
'.format(marked_plus)
return sessionified
def _add_elements(self, query_string: str) -> str:
element = 'SELECT \'<\'|| e."tag_name" || \'> \' || e."text" as tag_name_source, e."text" as text_source FROM "posthog_element" e JOIN \
( SELECT group_id, MIN("posthog_element"."order") as minOrder FROM "posthog_element" GROUP BY group_id) e2 ON e.order = e2.minOrder AND e.group_id = e2.group_id where e.group_id = v2.group_id'
element_group = 'SELECT g."id" as group_id FROM "posthog_elementgroup" g where v1."elements_hash" = g."hash"'
sessions_sql = 'SELECT * FROM ({}) as v1 JOIN LATERAL ({}) as v2 on true JOIN LATERAL ({}) as v3 on true'.format(query_string, element_group, element)
return sessions_sql
# FIXME: Timestamp is timezone aware timestamp, date range uses naive date.
# To avoid unexpected results should convert date range to timestamps with timezone.
def list(self, request):
team = request.user.team_set.get()
resp = []
date_query = request_to_date_query(request.GET, exact=False)
event, path_type, event_filter, start_comparator = self._determine_path_type(request)
properties = request.GET.get('properties')
start_point = request.GET.get('start')
sessions = Event.objects.add_person_id(team.pk).filter(
team=team,
**(event_filter),
**date_query
)\
.filter(~Q(event__in=['$autocapture', '$pageview', '$identify', '$pageleave']) if event is None else Q())\
.filter(Filter(data={'properties': json.loads(properties)}).properties_to_Q(team_id=team.pk) if properties else Q())\
.annotate(previous_timestamp=Window(
expression=Lag('timestamp', default=None),
partition_by=F('distinct_id'),
order_by=F('timestamp').asc()
))
sessions_sql, sessions_sql_params = sessions.query.sql_with_params()
if event == "$autocapture":
sessions_sql = self._add_elements(query_string=sessions_sql)
events_notated = '\
SELECT *, CASE WHEN EXTRACT(\'EPOCH\' FROM (timestamp - previous_timestamp)) >= (60 * 30) OR previous_timestamp IS NULL THEN 1 ELSE 0 END AS new_session\
FROM ({}) AS inner_sessions\
'.format(sessions_sql)
sessionified = '\
SELECT events_notated.*, SUM(new_session) OVER (\
ORDER BY distinct_id\
,timestamp\
) AS session\
FROM ({}) as events_notated\
'.format(events_notated)
if start_point:
sessionified = self._apply_start_point(start_comparator=start_comparator, query_string=sessionified, start_point=start_point)
final = '\
SELECT {} as path_type, id, sessionified.session\
,ROW_NUMBER() OVER (\
PARTITION BY distinct_id\
,session ORDER BY timestamp\
) AS event_number\
FROM ({}) as sessionified\
'.format(path_type, sessionified)
counts = '\
SELECT event_number || \'_\' || path_type as target_event, id as target_id, LAG(event_number || \'_\' || path_type, 1) OVER (\
PARTITION BY session\
) AS source_event , LAG(id, 1) OVER (\
PARTITION BY session\
) AS source_id from \
({}) as final\
where event_number <= 4\
'.format(final)
cursor = connection.cursor()
cursor.execute('\
SELECT source_event, target_event, MAX(target_id), MAX(source_id), count(*) from ({}) as counts\
where source_event is not null and target_event is not null\
group by source_event, target_event order by count desc limit 20\
'.format(counts), sessions_sql_params)
rows = cursor.fetchall()
for row in rows:
resp.append({
'source': row[0],
'target': row[1],
'target_id': row[2],
'source_id': row[3],
'value': row[4]
})
resp = sorted(resp, key=lambda x: x['value'], reverse=True)
return Response(resp)
|
_determine_path_type
|
Projects.ts
|
import Fs from 'fs';
import Path from 'path';
import { BaseService, RequestHelper } from '../infrastructure';
import { assertEventOptions } from './Events';
import { RequestOptions } from '../infrastructure/RequestHelper';
/** TODO annotate options */
type ProjectOptions = temporaryAny;
class Projects extends BaseService {
all(options?: RequestOptions) {
return RequestHelper.get(this, 'projects', options);
}
archive(projectId: ProjectId) {
const pId = encodeURIComponent(projectId);
return RequestHelper.post(this, `projects/${pId}/archive`);
}
/**
* @see https://docs.gitlab.com/ee/api/projects.html#create-project-for-user
*/
create(options: temporaryAny) {
const url = options.userId ? `projects/user/${encodeURIComponent(options.userId)}` : 'projects';
return RequestHelper.post(this, url, options);
}
edit(projectId: ProjectId, options: temporaryAny) {
const pId = encodeURIComponent(projectId);
return RequestHelper.put(this, `projects/${pId}`, options);
}
events(projectId: ProjectId, options: ProjectOptions) {
assertEventOptions(options.action, options.targetType);
const pId = encodeURIComponent(projectId);
return RequestHelper.get(this, `projects/${pId}/events`, options);
}
fork(projectId: ProjectId, options: ProjectOptions) {
const pId = encodeURIComponent(projectId);
return RequestHelper.post(this, `projects/${pId}/fork`, options);
}
forks(projectId: ProjectId, options: ProjectOptions) {
const pId = encodeURIComponent(projectId);
return RequestHelper.get(this, `projects/${pId}/forks`, options);
}
languages(projectId: ProjectId) {
const pId = encodeURIComponent(projectId);
return RequestHelper.get(this, `projects/${pId}/languages`);
}
mirrorPull(projectId: ProjectId) {
const pId = encodeURIComponent(projectId);
return RequestHelper.post(this, `projects/${pId}/mirror/pull`);
}
remove(projectId: ProjectId) {
const pId = encodeURIComponent(projectId);
return RequestHelper.delete(this, `projects/${pId}`);
}
search(projectName: string) {
return RequestHelper.get(this, 'projects', { search: projectName });
}
share(projectId: ProjectId, groupId: GroupId, groupAccess: GroupAccess, options: ProjectOptions) {
const pId = encodeURIComponent(projectId);
if (!groupId || !groupAccess) throw new Error('Missing required arguments');
return RequestHelper.post(this, `projects/${pId}/share`, { groupId, groupAccess, ...options });
}
show(projectId: ProjectId, options: ProjectOptions) {
const pId = encodeURIComponent(projectId);
return RequestHelper.get(this, `projects/${pId}`, options);
}
star(projectId: ProjectId) {
const pId = encodeURIComponent(projectId);
return RequestHelper.post(this, `projects/${pId}/star`);
}
statuses(projectId: ProjectId, sha: string, state: string, options: ProjectOptions) {
const pId = encodeURIComponent(projectId);
return RequestHelper.post(this, `projects/${pId}/statuses/${sha}`, { state, ...options });
}
transfer(projectId: ProjectId, namespace: string) {
const pId = encodeURIComponent(projectId);
return RequestHelper.put(this, `projects/${pId}/transfer`, { namespace });
}
unarchive(projectId: ProjectId) {
const pId = encodeURIComponent(projectId);
return RequestHelper.post(this, `projects/${pId}/unarchive`);
}
unshare(projectId: ProjectId, groupId: GroupId) {
const [pId, gId] = [projectId, groupId].map(encodeURIComponent);
return RequestHelper.delete(this, `projects/${pId}/share/${gId}`);
}
unstar(projectId: ProjectId) {
const pId = encodeURIComponent(projectId);
return RequestHelper.post(this, `projects/${pId}/unstar`);
}
updatePushRule(projectId: ProjectId, options: ProjectOptions) {
const pId = encodeURIComponent(projectId);
return RequestHelper.put(this, `projects/${pId}/push_rule`, options);
}
upload(projectId: ProjectId, filePath: string, { fileName = Path.basename(filePath) } = {}) {
const pId = encodeURIComponent(projectId);
const file = Fs.readFileSync(filePath);
return RequestHelper.post(
this,
`projects/${pId}/uploads`,
{
file: {
value: file,
options: {
filename: fileName,
contentType: 'application/octet-stream',
},
},
},
true,
);
}
|
}
export default Projects;
| |
utc.go
|
/*
Copyright 2022 QuanxiangCloud Authors
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package time
import (
"errors"
"strconv"
"strings"
"time"
)
// UTC timezone
//go:generate stringer -type UTC
type UTC int
// UTC timezone
const (
UTC_12 UTC = iota - 12
UTC_11
UTC_10
|
UTC_5
UTC_4
UTC_3
UTC_2
UTC_1
UTC0
UTC1
UTC2
UTC3
UTC4
UTC5
UTC6
UTC7
UTC8
UTC9
UTC10
UTC11
UTC12
)
// predefined error
var (
ErrFormat = errors.New("error format")
)
// Tolerant time zone analysis
func Tolerant(str string) (UTC, error) {
var l int
if strings.Contains(str, "-") {
l = strings.LastIndex(str, "-")
} else {
l = strings.LastIndex(str, "+")
}
if l == -1 {
return UTC0, ErrFormat
}
i, err := strconv.Atoi(str[l:])
if err != nil {
return UTC0, nil
}
return UTC(i), nil
}
// Revise correct the time to the specified time zone, only 0 time zone.
func Revise(ts string, timezone UTC) (string, error) {
t, err := time.Parse(ISO8601, ts)
if err != nil {
return "", err
}
return t.Add(time.Hour * time.Duration(timezone)).Format(ISO8601), nil
}
// Regular modified to 0 time zone time.
func Regular(ts string, timezone UTC) (string, error) {
return Revise(ts, -timezone)
}
|
UTC_9
UTC_8
UTC_7
UTC_6
|
configuration.py
|
"""
Configuration of libweasyl.
libweasyl depends on some global state to be set up in order for e.g. database
access to work correctly. This might be nicer if python had a way of
parameterizing modules, but we can't, so this is what we have. It does mean
that only one libweasyl configuration can exist in a running python process.
"""
from libweasyl.models.media import DiskMediaItem, MediaItem
from libweasyl.models.meta import BaseQuery, _configure_dbsession
from libweasyl.staff import _init_staff
def
|
(
dbsession, not_found_exception, base_file_path,
staff_config_dict, media_link_formatter_callback):
"""
Configure libweasyl for the current application. This sets up some
global state around libweasyl.
This function can be called multiple times without issues; each call will
replace the values set by the previous call.
Parameters:
dbsession: A SQLAlchemy ``scoped_session`` instance configured for the
application's database usage.
not_found_exception: An exception to be raised on the ``*_or_404``
methods of queries.
base_file_path: The path to where static content lives on disk.
staff_config_dict: A dictionary of staff levels and user IDs.
media_link_formatter_callback: A callback to format the URL for a media
link. The callback will be called as ``callback(media_item, link)``
and is expected to return a URL or ``None`` to use the default.
"""
_configure_dbsession(dbsession)
BaseQuery._not_found_exception = staticmethod(not_found_exception)
DiskMediaItem._base_file_path = staticmethod(base_file_path)
_init_staff(**staff_config_dict)
MediaItem._media_link_formatter_callback = staticmethod(media_link_formatter_callback)
|
configure_libweasyl
|
tokentransport.go
|
package registry
import (
"encoding/json"
"fmt"
"net/http"
"net/url"
)
type TokenTransport struct {
Transport http.RoundTripper
Username string
Password string
}
func (t *TokenTransport) RoundTrip(req *http.Request) (*http.Response, error) {
resp, err := t.Transport.RoundTrip(req)
if err != nil {
return resp, err
}
if authService := isTokenDemand(resp); authService != nil {
resp, err = t.authAndRetry(authService, req)
}
return resp, err
}
type authToken struct {
Token string `json:"token"`
}
func (t *TokenTransport) authAndRetry(authService *authService, req *http.Request) (*http.Response, error) {
token, authResp, err := t.auth(authService)
if err != nil {
return authResp, err
}
retryResp, err := t.retry(req, token)
return retryResp, err
}
func (t *TokenTransport) auth(authService *authService) (string, *http.Response, error) {
authReq, err := authService.Request(t.Username, t.Password)
if err != nil {
return "", nil, err
}
client := http.Client{
Transport: t.Transport,
}
response, err := client.Do(authReq)
if err != nil {
return "", nil, err
}
if response.StatusCode != http.StatusOK {
return "", response, err
}
defer response.Body.Close()
var authToken authToken
decoder := json.NewDecoder(response.Body)
err = decoder.Decode(&authToken)
if response != nil {
response.Body.Close()
}
if err != nil {
return "", nil, err
}
return authToken.Token, nil, nil
}
func (t *TokenTransport) retry(req *http.Request, token string) (*http.Response, error) {
req.Header.Set("Authorization", fmt.Sprintf("Bearer %s", token))
resp, err := t.Transport.RoundTrip(req)
return resp, err
}
type authService struct {
Realm string
Service string
Scope string
}
func (authService *authService) Request(username, password string) (*http.Request, error) {
url, err := url.Parse(authService.Realm)
if err != nil {
return nil, err
}
q := url.Query()
q.Set("service", authService.Service)
if authService.Scope != "" {
q.Set("scope", authService.Scope)
}
url.RawQuery = q.Encode()
request, err := http.NewRequest("GET", url.String(), nil)
if username != "" || password != "" {
request.SetBasicAuth(username, password)
}
return request, err
}
func isTokenDemand(resp *http.Response) *authService {
if resp == nil {
return nil
}
if resp.StatusCode != http.StatusUnauthorized
|
return parseOauthHeader(resp)
}
func parseOauthHeader(resp *http.Response) *authService {
challenges := parseAuthHeader(resp.Header)
for _, challenge := range challenges {
if challenge.Scheme == "bearer" {
return &authService{
Realm: challenge.Parameters["realm"],
Service: challenge.Parameters["service"],
Scope: challenge.Parameters["scope"],
}
}
}
return nil
}
|
{
return nil
}
|
tx_builder.rs
|
use super::*;
use super::fees;
use super::utils;
use std::collections::BTreeSet;
// comes from witsVKeyNeeded in the Ledger spec
fn witness_keys_for_cert(cert_enum: &Certificate, keys: &mut BTreeSet<Ed25519KeyHash>) {
match &cert_enum.0 {
// stake key registrations do not require a witness
CertificateEnum::StakeRegistration(_cert) => {},
CertificateEnum::StakeDeregistration(cert) => {
keys.insert(cert.stake_credential().to_keyhash().unwrap());
},
CertificateEnum::StakeDelegation(cert) => {
keys.insert(cert.stake_credential().to_keyhash().unwrap());
},
CertificateEnum::PoolRegistration(cert) => {
for owner in &cert.pool_params().pool_owners().0 {
keys.insert(owner.clone());
}
keys.insert(
Ed25519KeyHash::from_bytes(cert.pool_params().operator().to_bytes()).unwrap()
);
},
CertificateEnum::PoolRetirement(cert) => {
keys.insert(
Ed25519KeyHash::from_bytes(cert.pool_keyhash().to_bytes()).unwrap()
);
},
CertificateEnum::GenesisKeyDelegation(cert) => {
keys.insert(
Ed25519KeyHash::from_bytes(cert.genesis_delegate_hash().to_bytes()).unwrap()
);
},
// not witness as there is no single core node or genesis key that posts the certificate
CertificateEnum::MoveInstantaneousRewardsCert(_cert) => {},
}
}
fn min_fee(tx_builder: &TransactionBuilder) -> Result<Coin, JsError> {
let body = tx_builder.build()?;
let fake_key_root = Bip32PrivateKey::from_bip39_entropy(
// art forum devote street sure rather head chuckle guard poverty release quote oak craft enemy
&[0x0c, 0xcb, 0x74, 0xf3, 0x6b, 0x7d, 0xa1, 0x64, 0x9a, 0x81, 0x44, 0x67, 0x55, 0x22, 0xd4, 0xd8, 0x09, 0x7c, 0x64, 0x12],
&[]
);
// recall: this includes keys for input, certs and withdrawals
let vkeys = match tx_builder.input_types.vkeys.len() {
0 => None,
x => {
let mut result = Vkeywitnesses::new();
let raw_key = fake_key_root.to_raw_key();
for _i in 0..x {
result.add(&Vkeywitness::new(
&Vkey::new(&raw_key.to_public()),
&raw_key.sign([1u8; 100].as_ref())
));
}
Some(result)
},
};
let script_keys = match tx_builder.input_types.scripts.len() {
0 => None,
_x => {
// TODO: figure out how to populate fake witnesses for these
return Err(JsError::from_str("Script inputs not supported yet"))
},
};
let bootstrap_keys = match tx_builder.input_types.bootstraps.len() {
0 => None,
_x => {
let mut result = BootstrapWitnesses::new();
for addr in &tx_builder.input_types.bootstraps {
// picking icarus over daedalus for fake witness generation shouldn't matter
result.add(&make_icarus_bootstrap_witness(
&hash_transaction(&body),
&ByronAddress::from_bytes(addr.clone()).unwrap(),
&fake_key_root
));
}
Some(result)
},
};
let witness_set = TransactionWitnessSet {
vkeys: vkeys,
scripts: script_keys,
bootstraps: bootstrap_keys,
};
let full_tx = Transaction {
body,
witness_set,
metadata: tx_builder.metadata.clone(),
};
fees::min_fee(&full_tx, &tx_builder.fee_algo)
}
// We need to know how many of each type of witness will be in the transaction so we can calculate the tx fee
#[derive(Clone, Debug)]
struct MockWitnessSet {
vkeys: BTreeSet<Ed25519KeyHash>,
scripts: BTreeSet<ScriptHash>,
bootstraps: BTreeSet<Vec<u8>>,
}
#[derive(Clone, Debug)]
struct TxBuilderInput {
input: TransactionInput,
amount: Coin, // we need to keep track of the amount in the inputs for input selection
}
#[wasm_bindgen]
#[derive(Clone, Debug)]
pub struct TransactionBuilder {
minimum_utxo_val: BigNum,
pool_deposit: BigNum,
key_deposit: BigNum,
fee_algo: fees::LinearFee,
inputs: Vec<TxBuilderInput>,
outputs: TransactionOutputs,
fee: Option<Coin>,
ttl: Option<u32>, // absolute slot number
certs: Option<Certificates>,
withdrawals: Option<Withdrawals>,
metadata: Option<TransactionMetadata>,
input_types: MockWitnessSet,
}
#[wasm_bindgen]
impl TransactionBuilder {
// We have to know what kind of inputs these are to know what kind of mock witnesses to create since
// 1) mock witnesses have different lengths depending on the type which changes the expecting fee
// 2) Witnesses are a set so we need to get rid of duplicates to avoid over-estimating the fee
pub fn add_key_input(&mut self, hash: &Ed25519KeyHash, input: &TransactionInput, amount: &Coin) {
self.inputs.push(TxBuilderInput {
input: input.clone(),
amount: amount.clone(),
});
self.input_types.vkeys.insert(hash.clone());
}
pub fn add_script_input(&mut self, hash: &ScriptHash, input: &TransactionInput, amount: &Coin) {
self.inputs.push(TxBuilderInput {
input: input.clone(),
amount: amount.clone(),
});
self.input_types.scripts.insert(hash.clone());
}
pub fn add_bootstrap_input(&mut self, hash: &ByronAddress, input: &TransactionInput, amount: &Coin) {
self.inputs.push(TxBuilderInput {
input: input.clone(),
amount: amount.clone(),
});
self.input_types.bootstraps.insert(hash.to_bytes());
}
pub fn add_input(&mut self, address: &Address, input: &TransactionInput, amount: &Coin) {
match &BaseAddress::from_address(address) {
Some(addr) => {
match &addr.payment_cred().to_keyhash() {
Some(hash) => return self.add_key_input(hash, input, amount),
None => (),
}
match &addr.payment_cred().to_scripthash() {
Some(hash) => return self.add_script_input(hash, input, amount),
None => (),
}
},
None => (),
}
match &EnterpriseAddress::from_address(address) {
Some(addr) => {
match &addr.payment_cred().to_keyhash() {
Some(hash) => return self.add_key_input(hash, input, amount),
None => (),
}
match &addr.payment_cred().to_scripthash() {
Some(hash) => return self.add_script_input(hash, input, amount),
None => (),
}
},
None => (),
}
match &PointerAddress::from_address(address) {
Some(addr) => {
match &addr.payment_cred().to_keyhash() {
Some(hash) => return self.add_key_input(hash, input, amount),
None => (),
}
match &addr.payment_cred().to_scripthash() {
Some(hash) => return self.add_script_input(hash, input, amount),
None => (),
}
},
None => (),
}
match &ByronAddress::from_address(address) {
Some(addr) => {
return self.add_bootstrap_input(addr, input, amount);
},
None => (),
}
}
/// calculates how much the fee would increase if you added a given output
pub fn fee_for_input(&mut self, address: &Address, input: &TransactionInput, amount: &Coin) -> Result<Coin, JsError> {
let mut self_copy = self.clone();
// we need some value for these for it to be a a valid transaction
// but since we're only calculating the different between the fee of two transactions
// it doesn't matter what these are set as, since it cancels out
self_copy.set_ttl(0);
self_copy.set_fee(&to_bignum(0));
let fee_before = min_fee(&self_copy)?;
self_copy.add_input(&address, &input, &amount);
let fee_after = min_fee(&self_copy)?;
fee_after.checked_sub(&fee_before)
}
pub fn add_output(&mut self, output: &TransactionOutput) -> Result<(), JsError> {
if output.amount() < self.minimum_utxo_val {
Err(JsError::from_str(&format!(
"Value {} less than the minimum UTXO value {}",
from_bignum(&output.amount()),
from_bignum(&self.minimum_utxo_val)
)))
} else {
self.outputs.add(output);
Ok(())
}
}
/// calculates how much the fee would increase if you added a given output
pub fn fee_for_output(&mut self, output: &TransactionOutput) -> Result<Coin, JsError> {
let mut self_copy = self.clone();
// we need some value for these for it to be a a valid transaction
// but since we're only calculating the different between the fee of two transactions
// it doesn't matter what these are set as, since it cancels out
self_copy.set_ttl(0);
self_copy.set_fee(&to_bignum(0));
let fee_before = min_fee(&self_copy)?;
self_copy.add_output(&output)?;
let fee_after = min_fee(&self_copy)?;
fee_after.checked_sub(&fee_before)
}
pub fn set_fee(&mut self, fee: &Coin) {
self.fee = Some(fee.clone())
}
pub fn set_ttl(&mut self, ttl: u32) {
self.ttl = Some(ttl)
}
pub fn set_certs(&mut self, certs: &Certificates) {
self.certs = Some(certs.clone());
for cert in &certs.0 {
witness_keys_for_cert(cert, &mut self.input_types.vkeys);
};
}
pub fn set_withdrawals(&mut self, withdrawals: &Withdrawals) {
self.withdrawals = Some(withdrawals.clone());
for (withdrawal, _coin) in &withdrawals.0 {
self.input_types.vkeys.insert(withdrawal.payment_cred().to_keyhash().unwrap().clone());
};
}
pub fn set_metadata(&mut self, metadata: &TransactionMetadata) {
self.metadata = Some(metadata.clone())
}
pub fn new(
linear_fee: &fees::LinearFee,
// protocol parameter that defines the minimum value a newly created UTXO can contain
minimum_utxo_val: &Coin,
pool_deposit: &BigNum, // protocol parameter
key_deposit: &BigNum, // protocol parameter
) -> Self {
Self {
minimum_utxo_val: minimum_utxo_val.clone(),
key_deposit: key_deposit.clone(),
pool_deposit: pool_deposit.clone(),
fee_algo: linear_fee.clone(),
inputs: Vec::new(),
outputs: TransactionOutputs::new(),
fee: None,
ttl: None,
certs: None,
withdrawals: None,
metadata: None,
input_types: MockWitnessSet {
vkeys: BTreeSet::new(),
scripts: BTreeSet::new(),
bootstraps: BTreeSet::new(),
},
}
}
/// does not include refunds or withdrawals
pub fn get_explicit_input(&self) -> Result<Coin, JsError> {
self
.inputs
.iter()
.try_fold(
to_bignum(0),
|acc, ref tx_builder_input| acc.checked_add(&tx_builder_input.amount)
)
}
/// withdrawals and refunds
pub fn get_implicit_input(&self) -> Result<Coin, JsError> {
internal_get_implicit_input(
&self.withdrawals,
&self.certs,
&self.pool_deposit,
&self.key_deposit,
)
}
/// does not include fee
pub fn get_explicit_output(&self) -> Result<Coin, JsError> {
self
.outputs.0
.iter()
.try_fold(
to_bignum(0),
|acc, ref output| acc.checked_add(&output.amount)
)
}
pub fn get_deposit(&self) -> Result<Coin, JsError> {
internal_get_deposit(
&self.certs,
&self.pool_deposit,
&self.key_deposit,
)
}
pub fn get_fee_if_set(&self) -> Option<Coin> {
self.fee.clone()
}
/// Warning: this function will mutate the /fee/ field
pub fn add_change_if_needed(&mut self, address: &Address) -> Result<bool, JsError> {
let fee = match &self.fee {
None => self.min_fee(),
// generating the change output involves changing the fee
Some(_x) => return Err(JsError::from_str("Cannot calculate change if fee was explicitly specified")),
}?;
let input_total = self.get_explicit_input()?.checked_add(&self.get_implicit_input()?)?;
let output_total = self.get_explicit_output()?.checked_add(&self.get_deposit()?)?;
match &input_total >= &output_total.checked_add(&fee)? {
false => return Err(JsError::from_str("Insufficient input in transaction")),
true => {
// check how much the fee would increase if we added a change output
let fee_for_change = self.fee_for_output(&TransactionOutput {
address: address.clone(),
// maximum possible output to maximize fee from adding this output
// this may over-estimate the fee by a few bytes but that's okay
amount: to_bignum(0x1_00_00_00_00),
})?;
let new_fee = fee.checked_add(&fee_for_change)?;
// needs to have at least minimum_utxo_val leftover for the change to be a valid UTXO entry
match input_total >= output_total.checked_add(&new_fee)?.checked_add(&self.minimum_utxo_val)? {
false => {
// recall: we originally assumed the fee was the maximum possible so we definitely have enough input to cover whatever fee it ends up being
self.set_fee(&input_total.checked_sub(&output_total)?);
return Ok(false) // not enough input to covert the extra fee from adding an output so we just burn whatever is left
},
true => {
// recall: we originally assumed the fee was the maximum possible so we definitely have enough input to cover whatever fee it ends up being
self.set_fee(&new_fee);
self.add_output(&TransactionOutput {
address: address.clone(),
amount: input_total.checked_sub(&output_total)?.checked_sub(&new_fee)?,
})?;
},
};
},
};
Ok(true)
}
pub fn build(&self) -> Result<TransactionBody, JsError> {
let fee = self.fee.ok_or_else(|| JsError::from_str("Fee not specified"))?;
let ttl = self.ttl.ok_or_else(|| JsError::from_str("ttl not specified"))?;
Ok(TransactionBody {
inputs: TransactionInputs(self.inputs.iter().map(|ref tx_builder_input| tx_builder_input.input.clone()).collect()),
outputs: self.outputs.clone(),
fee: fee,
ttl: ttl,
certs: self.certs.clone(),
withdrawals: self.withdrawals.clone(),
update: None,
metadata_hash: match &self.metadata {
None => None,
Some(x) => Some(utils::hash_metadata(x)),
},
})
}
/// warning: sum of all parts of a transaction must equal 0. You cannot just set the fee to the min value and forget about it
/// warning: min_fee may be slightly larger than the actual minimum fee (ex: a few lovelaces)
/// this is done to simplify the library code, but can be fixed later
pub fn min_fee(&self) -> Result<Coin, JsError> {
let mut self_copy = self.clone();
self_copy.set_fee(&to_bignum(0x1_00_00_00_00));
min_fee(&self_copy)
}
}
#[cfg(test)]
mod tests {
use super::*;
use fees::*;
fn genesis_id() -> TransactionHash {
TransactionHash::from([0u8; TransactionHash::BYTE_COUNT])
}
fn root_key_15() -> Bip32PrivateKey {
// art forum devote street sure rather head chuckle guard poverty release quote oak craft enemy
let entropy = [0x0c, 0xcb, 0x74, 0xf3, 0x6b, 0x7d, 0xa1, 0x64, 0x9a, 0x81, 0x44, 0x67, 0x55, 0x22, 0xd4, 0xd8, 0x09, 0x7c, 0x64, 0x12];
Bip32PrivateKey::from_bip39_entropy(&entropy, &[])
}
fn harden(index: u32) -> u32 {
index | 0x80_00_00_00
}
#[test]
fn build_tx_with_change() {
let linear_fee = LinearFee::new(&to_bignum(500), &to_bignum(2));
let mut tx_builder = TransactionBuilder::new(&linear_fee, &to_bignum(1), &to_bignum(1), &to_bignum(1));
let spend = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(0)
.derive(0)
.to_public();
let change_key = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(1)
.derive(0)
.to_public();
let stake = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(2)
.derive(0)
.to_public();
let spend_cred = StakeCredential::from_keyhash(&spend.to_raw_key().hash());
let stake_cred = StakeCredential::from_keyhash(&stake.to_raw_key().hash());
let addr_net_0 = BaseAddress::new(NetworkInfo::testnet().network_id(), &spend_cred, &stake_cred).to_address();
tx_builder.add_key_input(
&spend.to_raw_key().hash(),
&TransactionInput::new(&genesis_id(), 0),
&to_bignum(1_000_000)
);
tx_builder.add_output(&TransactionOutput::new(
&addr_net_0,
&to_bignum(10)
)).unwrap();
tx_builder.set_ttl(1000);
let change_cred = StakeCredential::from_keyhash(&change_key.to_raw_key().hash());
let change_addr = BaseAddress::new(NetworkInfo::testnet().network_id(), &change_cred, &stake_cred).to_address();
let added_change = tx_builder.add_change_if_needed(
&change_addr
);
assert!(added_change.unwrap());
assert_eq!(tx_builder.outputs.len(), 2);
assert_eq!(
tx_builder.get_explicit_input().unwrap().checked_add(&tx_builder.get_implicit_input().unwrap()).unwrap(),
tx_builder.get_explicit_output().unwrap().checked_add(&tx_builder.get_fee_if_set().unwrap()).unwrap()
);
let _final_tx = tx_builder.build(); // just test that it doesn't throw
}
#[test]
fn build_tx_without_change() {
let linear_fee = LinearFee::new(&to_bignum(500), &to_bignum(2));
let mut tx_builder = TransactionBuilder::new(&linear_fee, &to_bignum(1), &to_bignum(1), &to_bignum(1));
let spend = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(0)
.derive(0)
.to_public();
let change_key = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(1)
.derive(0)
.to_public();
let stake = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(2)
.derive(0)
.to_public();
let spend_cred = StakeCredential::from_keyhash(&spend.to_raw_key().hash());
let stake_cred = StakeCredential::from_keyhash(&stake.to_raw_key().hash());
let addr_net_0 = BaseAddress::new(NetworkInfo::testnet().network_id(), &spend_cred, &stake_cred).to_address();
tx_builder.add_key_input(
&spend.to_raw_key().hash(),
&TransactionInput::new(&genesis_id(), 0),
&to_bignum(1_000_000)
);
tx_builder.add_output(&TransactionOutput::new(
&addr_net_0,
&to_bignum(880_000)
)).unwrap();
tx_builder.set_ttl(1000);
let change_cred = StakeCredential::from_keyhash(&change_key.to_raw_key().hash());
let change_addr = BaseAddress::new(NetworkInfo::testnet().network_id(), &change_cred, &stake_cred).to_address();
let added_change = tx_builder.add_change_if_needed(
&change_addr
);
assert!(!added_change.unwrap());
assert_eq!(tx_builder.outputs.len(), 1);
assert_eq!(
tx_builder.get_explicit_input().unwrap().checked_add(&tx_builder.get_implicit_input().unwrap()).unwrap(),
tx_builder.get_explicit_output().unwrap().checked_add(&tx_builder.get_fee_if_set().unwrap()).unwrap()
);
let _final_tx = tx_builder.build(); // just test that it doesn't throw
}
#[test]
fn build_tx_with_certs() {
let linear_fee = LinearFee::new(&to_bignum(500), &to_bignum(2));
let mut tx_builder = TransactionBuilder::new(&linear_fee, &to_bignum(1), &to_bignum(1), &to_bignum(1_000_000));
let spend = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(0)
.derive(0)
.to_public();
let change_key = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(1)
.derive(0)
.to_public();
let stake = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(2)
.derive(0)
.to_public();
let stake_cred = StakeCredential::from_keyhash(&stake.to_raw_key().hash());
tx_builder.add_key_input(
&spend.to_raw_key().hash(),
&TransactionInput::new(&genesis_id(), 0),
&to_bignum(5_000_000)
);
tx_builder.set_ttl(1000);
let mut certs = Certificates::new();
certs.add(&Certificate::new_stake_registration(&StakeRegistration::new(&stake_cred)));
certs.add(&Certificate::new_stake_delegation(&StakeDelegation::new(
&stake_cred,
&stake.to_raw_key().hash(), // in reality, this should be the pool owner's key, not ours
)));
tx_builder.set_certs(&certs);
let change_cred = StakeCredential::from_keyhash(&change_key.to_raw_key().hash());
let change_addr = BaseAddress::new(NetworkInfo::testnet().network_id(), &change_cred, &stake_cred).to_address();
tx_builder.add_change_if_needed(
&change_addr
).unwrap();
assert_eq!(tx_builder.min_fee().unwrap().to_str(), "213502");
assert_eq!(tx_builder.get_fee_if_set().unwrap().to_str(), "215502");
assert_eq!(tx_builder.get_deposit().unwrap().to_str(), "1000000");
assert_eq!(tx_builder.outputs.len(), 1);
assert_eq!(
tx_builder.get_explicit_input().unwrap().checked_add(&tx_builder.get_implicit_input().unwrap()).unwrap(),
tx_builder
.get_explicit_output().unwrap()
.checked_add(&tx_builder.get_fee_if_set().unwrap()).unwrap()
.checked_add(&tx_builder.get_deposit().unwrap()).unwrap()
);
let _final_tx = tx_builder.build(); // just test that it doesn't throw
}
#[test]
fn build_tx_exact_amount() {
// transactions where sum(input) == sum(output) exact should pass
let linear_fee = LinearFee::new(&to_bignum(0), &to_bignum(0));
let mut tx_builder = TransactionBuilder::new(&linear_fee, &to_bignum(1), &to_bignum(0), &to_bignum(0));
let spend = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(0)
.derive(0)
.to_public();
let change_key = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(1)
.derive(0)
.to_public();
let stake = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(2)
.derive(0)
.to_public();
tx_builder.add_key_input(
&&spend.to_raw_key().hash(),
&TransactionInput::new(&genesis_id(), 0),
&to_bignum(5)
);
let spend_cred = StakeCredential::from_keyhash(&spend.to_raw_key().hash());
let stake_cred = StakeCredential::from_keyhash(&stake.to_raw_key().hash());
let addr_net_0 = BaseAddress::new(NetworkInfo::testnet().network_id(), &spend_cred, &stake_cred).to_address();
tx_builder.add_output(&TransactionOutput::new(
&addr_net_0,
&to_bignum(5)
)).unwrap();
tx_builder.set_ttl(0);
let change_cred = StakeCredential::from_keyhash(&change_key.to_raw_key().hash());
let change_addr = BaseAddress::new(NetworkInfo::testnet().network_id(), &change_cred, &stake_cred).to_address();
|
let added_change = tx_builder.add_change_if_needed(
&change_addr
).unwrap();
assert_eq!(added_change, false);
let final_tx = tx_builder.build().unwrap();
assert_eq!(final_tx.outputs().len(), 1);
}
#[test]
fn build_tx_exact_change() {
// transactions where we have exactly enough ADA to add change should pass
let linear_fee = LinearFee::new(&to_bignum(0), &to_bignum(0));
let mut tx_builder = TransactionBuilder::new(&linear_fee, &to_bignum(1), &to_bignum(0), &to_bignum(0));
let spend = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(0)
.derive(0)
.to_public();
let change_key = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(1)
.derive(0)
.to_public();
let stake = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(2)
.derive(0)
.to_public();
tx_builder.add_key_input(
&&spend.to_raw_key().hash(),
&TransactionInput::new(&genesis_id(), 0),
&to_bignum(6)
);
let spend_cred = StakeCredential::from_keyhash(&spend.to_raw_key().hash());
let stake_cred = StakeCredential::from_keyhash(&stake.to_raw_key().hash());
let addr_net_0 = BaseAddress::new(NetworkInfo::testnet().network_id(), &spend_cred, &stake_cred).to_address();
tx_builder.add_output(&TransactionOutput::new(
&addr_net_0,
&to_bignum(5)
)).unwrap();
tx_builder.set_ttl(0);
let change_cred = StakeCredential::from_keyhash(&change_key.to_raw_key().hash());
let change_addr = BaseAddress::new(NetworkInfo::testnet().network_id(), &change_cred, &stake_cred).to_address();
let added_change = tx_builder.add_change_if_needed(
&change_addr
).unwrap();
assert_eq!(added_change, true);
let final_tx = tx_builder.build().unwrap();
assert_eq!(final_tx.outputs().len(), 2);
assert_eq!(final_tx.outputs().get(1).amount().to_str(), "1");
}
#[test]
#[should_panic]
fn build_tx_insufficient_deposit() {
// transactions should fail with insufficient fees if a deposit is required
let linear_fee = LinearFee::new(&to_bignum(0), &to_bignum(0));
let mut tx_builder = TransactionBuilder::new(&linear_fee, &to_bignum(1), &to_bignum(0), &to_bignum(5));
let spend = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(0)
.derive(0)
.to_public();
let change_key = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(1)
.derive(0)
.to_public();
let stake = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(2)
.derive(0)
.to_public();
tx_builder.add_key_input(
&&spend.to_raw_key().hash(),
&TransactionInput::new(&genesis_id(), 0),
&to_bignum(5)
);
let spend_cred = StakeCredential::from_keyhash(&spend.to_raw_key().hash());
let stake_cred = StakeCredential::from_keyhash(&stake.to_raw_key().hash());
let addr_net_0 = BaseAddress::new(NetworkInfo::testnet().network_id(), &spend_cred, &stake_cred).to_address();
tx_builder.add_output(&TransactionOutput::new(
&addr_net_0,
&to_bignum(5)
)).unwrap();
tx_builder.set_ttl(0);
// add a cert which requires a deposit
let mut certs = Certificates::new();
certs.add(&Certificate::new_stake_registration(&StakeRegistration::new(&stake_cred)));
tx_builder.set_certs(&certs);
let change_cred = StakeCredential::from_keyhash(&change_key.to_raw_key().hash());
let change_addr = BaseAddress::new(NetworkInfo::testnet().network_id(), &change_cred, &stake_cred).to_address();
tx_builder.add_change_if_needed(
&change_addr
).unwrap();
}
#[test]
fn build_tx_with_inputs() {
let linear_fee = LinearFee::new(&to_bignum(500), &to_bignum(2));
let mut tx_builder = TransactionBuilder::new(&linear_fee, &to_bignum(1), &to_bignum(1), &to_bignum(1));
let spend = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(0)
.derive(0)
.to_public();
let stake = root_key_15()
.derive(harden(1852))
.derive(harden(1815))
.derive(harden(0))
.derive(2)
.derive(0)
.to_public();
let spend_cred = StakeCredential::from_keyhash(&spend.to_raw_key().hash());
let stake_cred = StakeCredential::from_keyhash(&stake.to_raw_key().hash());
{
assert_eq!(tx_builder.fee_for_input(
&EnterpriseAddress::new(
NetworkInfo::testnet().network_id(),
&spend_cred
).to_address(),
&TransactionInput::new(&genesis_id(), 0),
&to_bignum(1_000_000)
).unwrap().to_str(), "69500");
tx_builder.add_input(
&EnterpriseAddress::new(
NetworkInfo::testnet().network_id(),
&spend_cred
).to_address(),
&TransactionInput::new(&genesis_id(), 0),
&to_bignum(1_000_000)
);
}
tx_builder.add_input(
&BaseAddress::new(
NetworkInfo::testnet().network_id(),
&spend_cred,
&stake_cred
).to_address(),
&TransactionInput::new(&genesis_id(), 0),
&to_bignum(1_000_000)
);
tx_builder.add_input(
&PointerAddress::new(
NetworkInfo::testnet().network_id(),
&spend_cred,
&Pointer::new(
0,
0,
0
)
).to_address(),
&TransactionInput::new(&genesis_id(), 0),
&to_bignum(1_000_000)
);
tx_builder.add_input(
&ByronAddress::icarus_from_key(
&spend, NetworkInfo::testnet().protocol_magic()
).to_address(),
&TransactionInput::new(&genesis_id(), 0),
&to_bignum(1_000_000)
);
assert_eq!(tx_builder.inputs.len(), 4);
}
}
| |
screensharers.rs
|
use futures::future::join_all;
use serenity::{
async_trait,
client::Context,
model::{id::UserId, interactions::application_command::ApplicationCommandInteraction},
};
use crate::consts::{self, CONFIG};
use super::Command;
pub struct Screensharers;
#[async_trait]
impl Command for Screensharers {
fn name(&self) -> String {
String::from("screensharers")
}
async fn register(&self, ctx: &Context) -> crate::Result<()> {
CONFIG
.guild
.create_application_command(&ctx.http, |cmd| {
cmd.name(self.name()).description(
"Lists the screenshare team and how much they've unfrozen someone.",
)
})
.await?;
Ok(())
}
async fn run(
&self,
ctx: &Context,
command: &ApplicationCommandInteraction,
) -> crate::Result<()> {
let screensharers = join_all(consts::DATABASE.get_screensharers().into_iter().map(
|x| async move {
let user = UserId(x.id).to_user(&ctx.http).await;
if let Ok(user) = user {
Some((user.tag(), format!("{} freezes", x.freezes), false))
} else {
None
}
},
))
.await
.into_iter()
.flatten();
command.create_interaction_response(&ctx.http, |resp| {
resp.interaction_response_data(|data| {
data.create_embed(|embed| {
embed.title("Unfreeze leaderboard")
.description("List of every screenshare member that has unfrozen someone before and how many times they did it.")
.fields(screensharers)
})
|
}
fn new() -> Box<Self> {
Box::new(Self)
}
}
|
})
}).await?;
Ok(())
|
int_macros.rs
|
// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![macro_escape]
macro_rules! int_module (($T:ty, $T_i:ident) => (
#[cfg(test)]
mod tests {
use core::$T_i::*;
use core::int;
use num;
use core::num::CheckedDiv;
#[test]
fn test_overflows() {
assert!(MAX > 0);
assert!(MIN <= 0);
assert!(MIN + MAX + 1 == 0);
}
#[test]
fn test_num() {
num::test_num(10 as $T, 2 as $T);
}
#[test]
pub fn test_abs() {
assert!((1 as $T).abs() == 1 as $T);
assert!((0 as $T).abs() == 0 as $T);
assert!((-1 as $T).abs() == 1 as $T);
}
#[test]
fn test_abs_sub() {
assert!((-1 as $T).abs_sub(&(1 as $T)) == 0 as $T);
assert!((1 as $T).abs_sub(&(1 as $T)) == 0 as $T);
assert!((1 as $T).abs_sub(&(0 as $T)) == 1 as $T);
assert!((1 as $T).abs_sub(&(-1 as $T)) == 2 as $T);
}
#[test]
fn test_signum() {
assert!((1 as $T).signum() == 1 as $T);
assert!((0 as $T).signum() == 0 as $T);
assert!((-0 as $T).signum() == 0 as $T);
assert!((-1 as $T).signum() == -1 as $T);
}
#[test]
fn test_is_positive() {
assert!((1 as $T).is_positive());
assert!(!(0 as $T).is_positive());
assert!(!(-0 as $T).is_positive());
assert!(!(-1 as $T).is_positive());
}
#[test]
fn test_is_negative() {
assert!(!(1 as $T).is_negative());
assert!(!(0 as $T).is_negative());
assert!(!(-0 as $T).is_negative());
assert!((-1 as $T).is_negative());
}
#[test]
fn test_bitwise_operators() {
assert!(0b1110 as $T == (0b1100 as $T).bitor(&(0b1010 as $T)));
assert!(0b1000 as $T == (0b1100 as $T).bitand(&(0b1010 as $T)));
assert!(0b0110 as $T == (0b1100 as $T).bitxor(&(0b1010 as $T)));
assert!(0b1110 as $T == (0b0111 as $T).shl(&1));
assert!(0b0111 as $T == (0b1110 as $T).shr(&1));
assert!(-(0b11 as $T) - (1 as $T) == (0b11 as $T).not());
}
static A: $T = 0b0101100;
static B: $T = 0b0100001;
static C: $T = 0b1111001;
static _0: $T = 0;
static _1: $T = !0;
#[test]
fn test_count_ones() {
assert!(A.count_ones() == 3);
assert!(B.count_ones() == 2);
assert!(C.count_ones() == 5);
}
#[test]
fn test_count_zeros() {
assert!(A.count_zeros() == BITS - 3);
assert!(B.count_zeros() == BITS - 2);
|
#[test]
fn test_rotate() {
assert_eq!(A.rotate_left(6).rotate_right(2).rotate_right(4), A);
assert_eq!(B.rotate_left(3).rotate_left(2).rotate_right(5), B);
assert_eq!(C.rotate_left(6).rotate_right(2).rotate_right(4), C);
// Rotating these should make no difference
//
// We test using 124 bits because to ensure that overlong bit shifts do
// not cause undefined behaviour. See #10183.
assert_eq!(_0.rotate_left(124), _0);
assert_eq!(_1.rotate_left(124), _1);
assert_eq!(_0.rotate_right(124), _0);
assert_eq!(_1.rotate_right(124), _1);
// Rotating by 0 should have no effect
assert_eq!(A.rotate_left(0), A);
assert_eq!(B.rotate_left(0), B);
assert_eq!(C.rotate_left(0), C);
// Rotating by a multiple of word size should also have no effect
assert_eq!(A.rotate_left(64), A);
assert_eq!(B.rotate_left(64), B);
assert_eq!(C.rotate_left(64), C);
}
#[test]
fn test_swap_bytes() {
assert_eq!(A.swap_bytes().swap_bytes(), A);
assert_eq!(B.swap_bytes().swap_bytes(), B);
assert_eq!(C.swap_bytes().swap_bytes(), C);
// Swapping these should make no difference
assert_eq!(_0.swap_bytes(), _0);
assert_eq!(_1.swap_bytes(), _1);
}
#[test]
fn test_le() {
assert_eq!(Int::from_le(A.to_le()), A);
assert_eq!(Int::from_le(B.to_le()), B);
assert_eq!(Int::from_le(C.to_le()), C);
assert_eq!(Int::from_le(_0), _0);
assert_eq!(Int::from_le(_1), _1);
assert_eq!(_0.to_le(), _0);
assert_eq!(_1.to_le(), _1);
}
#[test]
fn test_be() {
assert_eq!(Int::from_be(A.to_be()), A);
assert_eq!(Int::from_be(B.to_be()), B);
assert_eq!(Int::from_be(C.to_be()), C);
assert_eq!(Int::from_be(_0), _0);
assert_eq!(Int::from_be(_1), _1);
assert_eq!(_0.to_be(), _0);
assert_eq!(_1.to_be(), _1);
}
#[test]
fn test_signed_checked_div() {
assert!(10i.checked_div(&2) == Some(5));
assert!(5i.checked_div(&0) == None);
assert!(int::MIN.checked_div(&-1) == None);
}
}
))
|
assert!(C.count_zeros() == BITS - 5);
}
|
test_cli_integration.py
|
from contextlib import contextmanager
import json
import os
import logging
import sys
import subprocess
from typing import Optional, Tuple
import pytest
logger = logging.getLogger(__name__)
@contextmanager
def set_env_var(key: str, val: Optional[str] = None):
old_val = os.environ.get(key, None)
if val is not None:
os.environ[key] = val
elif key in os.environ:
del os.environ[key]
yield
if key in os.environ:
del os.environ[key]
if old_val is not None:
os.environ[key] = old_val
@pytest.fixture
def ray_start_stop():
subprocess.check_output(["ray", "start", "--head"])
try:
with set_env_var("RAY_ADDRESS", "http://127.0.0.1:8265"):
yield
finally:
subprocess.check_output(["ray", "stop", "--force"])
@contextmanager
def ray_cluster_manager():
"""
Used not as fixture in case we want to set RAY_ADDRESS first.
"""
subprocess.check_output(["ray", "start", "--head"])
try:
yield
finally:
subprocess.check_output(["ray", "stop", "--force"])
def _run_cmd(cmd: str, should_fail=False) -> Tuple[str, str]:
"""Convenience wrapper for subprocess.run.
We always run with shell=True to simulate the CLI.
Asserts that the process succeeds/fails depending on should_fail.
Returns (stdout, stderr).
"""
print(f"Running command: '{cmd}'")
p: subprocess.CompletedProcess = subprocess.run(
cmd, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
if p.returncode == 0:
print("Command succeeded.")
if should_fail:
raise RuntimeError(
f"Expected command to fail, but got exit code: {p.returncode}."
)
else:
print(f"Command failed with exit code: {p.returncode}.")
if not should_fail:
raise RuntimeError(
f"Expected command to succeed, but got exit code: {p.returncode}."
)
return p.stdout.decode("utf-8"), p.stderr.decode("utf-8")
class TestJobSubmitHook:
"""Tests the RAY_JOB_SUBMIT_HOOK env var."""
def
|
(self, ray_start_stop):
with set_env_var("RAY_JOB_SUBMIT_HOOK", "ray._private.test_utils.job_hook"):
stdout, _ = _run_cmd("ray job submit -- echo hello")
assert "hook intercepted: echo hello" in stdout
class TestRayAddress:
"""
Integration version of job CLI test that ensures interaction with the
following components are working as expected:
1) Ray client: use of RAY_ADDRESS and ray.init() in job_head.py
2) Ray dashboard: `ray start --head`
"""
def test_empty_ray_address(self, ray_start_stop):
with set_env_var("RAY_ADDRESS", None):
stdout, _ = _run_cmd("ray job submit -- echo hello")
assert "hello" in stdout
assert "succeeded" in stdout
@pytest.mark.parametrize(
"ray_client_address", ["127.0.0.1:8265", "ray://127.0.0.1:8265"]
)
def test_ray_client_address(self, ray_start_stop, ray_client_address: str):
with set_env_var("RAY_ADDRESS", ray_client_address):
_run_cmd("ray job submit -- echo hello", should_fail=True)
def test_valid_http_ray_address(self, ray_start_stop):
stdout, _ = _run_cmd("ray job submit -- echo hello")
assert "hello" in stdout
assert "succeeded" in stdout
class TestJobSubmit:
def test_basic_submit(self, ray_start_stop):
"""Should tail logs and wait for process to exit."""
cmd = "sleep 1 && echo hello && sleep 1 && echo hello"
stdout, _ = _run_cmd(f"ray job submit -- bash -c '{cmd}'")
assert "hello\nhello" in stdout
assert "succeeded" in stdout
def test_submit_no_wait(self, ray_start_stop):
"""Should exit immediately w/o printing logs."""
cmd = "echo hello && sleep 1000"
stdout, _ = _run_cmd(f"ray job submit --no-wait -- bash -c '{cmd}'")
assert "hello" not in stdout
assert "Tailing logs until the job exits" not in stdout
class TestJobStop:
def test_basic_stop(self, ray_start_stop):
"""Should wait until the job is stopped."""
cmd = "sleep 1000"
job_id = "test_basic_stop"
_run_cmd(f"ray job submit --no-wait --job-id={job_id} -- {cmd}")
stdout, _ = _run_cmd(f"ray job stop {job_id}")
assert "Waiting for job" in stdout
assert f"Job '{job_id}' was stopped" in stdout
def test_stop_no_wait(self, ray_start_stop):
"""Should not wait until the job is stopped."""
cmd = "echo hello && sleep 1000"
job_id = "test_stop_no_wait"
_run_cmd(f"ray job submit --no-wait --job-id={job_id} -- bash -c '{cmd}'")
stdout, _ = _run_cmd(f"ray job stop --no-wait {job_id}")
assert "Waiting for job" not in stdout
assert f"Job '{job_id}' was stopped" not in stdout
class TestJobList:
def test_empty(self, ray_start_stop):
stdout, _ = _run_cmd("ray job list")
assert "{}" in stdout
def test_list(self, ray_start_stop):
_run_cmd("ray job submit --job-id='hello_id' -- echo hello")
runtime_env = {"env_vars": {"TEST": "123"}}
_run_cmd(
"ray job submit --job-id='hi_id' "
f"--runtime-env-json='{json.dumps(runtime_env)}' -- echo hi"
)
stdout, _ = _run_cmd("ray job list")
assert "JobInfo" in stdout
assert "123" in stdout
assert "hello_id" in stdout
assert "hi_id" in stdout
def test_quote_escaping(ray_start_stop):
cmd = "echo \"hello 'world'\""
job_id = "test_quote_escaping"
stdout, _ = _run_cmd(
f"ray job submit --job-id={job_id} -- {cmd}",
)
assert "hello 'world'" in stdout
if __name__ == "__main__":
sys.exit(pytest.main(["-v", __file__]))
|
test_hook
|
Polygon.d.ts
|
import LngLat from '../common/LngLat';
import IPolygonOption from '../interface/IPolygonOption';
import Overlayerbase from './Overlayerbase';
export default class Polygon extends Overlayerbase {
paths: LngLat[];
|
strokeWeight: number;
url: string;
picwidth: number;
picheight: number;
strokestyle: string;
constructor(ploption: IPolygonOption);
}
|
symboltype: string;
fillColor: string;
strokeColor: string;
style: string;
|
storage.py
|
#!/usr/bin/env python
from io import BufferedWriter, FileIO
import os
from ajaxuploader.backends.base import AbstractUploadBackend
from django.conf import settings
from panda.api import DataUploadResource, RelatedUploadResource, UserResource
from panda.models import Dataset, DataUpload, RelatedUpload, UserProxy
class PANDAAbstractUploadBackend(AbstractUploadBackend):
"""
Customized backend to handle AJAX uploads.
"""
def update_filename(self, request, filename):
"""
Verify that the filename is unique, if it isn't append and iterate
a counter until it is.
"""
self._original_filename = filename
filename = self._original_filename
root, ext = os.path.splitext(self._original_filename)
path = os.path.join(settings.MEDIA_ROOT, filename)
i = 1
while os.path.exists(path):
filename = '%s%i%s' % (root, i, ext)
path = os.path.join(settings.MEDIA_ROOT, filename)
i += 1
return filename
def setup(self, filename):
"""
Open the destination file for writing.
"""
self._path = os.path.join(settings.MEDIA_ROOT, filename)
try:
os.makedirs(os.path.realpath(os.path.dirname(self._path)))
except:
pass
self._dest = BufferedWriter(FileIO(self._path, "w"))
def upload_chunk(self, chunk):
"""
Write a chunk of data to the destination.
"""
self._dest.write(chunk)
def upload_complete(self, request, filename):
"""
Close the destination file.
"""
self._dest.close()
class PANDADataUploadBackend(PANDAAbstractUploadBackend):
"""
Backend specifically for DataUploads.
"""
def upload_complete(self, request, filename):
"""
Create a DataUpload object.
"""
try:
super(PANDADataUploadBackend, self).upload_complete(request, filename)
root, ext = os.path.splitext(filename)
path = os.path.join(settings.MEDIA_ROOT, filename)
size = os.path.getsize(path)
if 'dataset_slug' in request.REQUEST:
dataset = Dataset.objects.get(slug=request.REQUEST['dataset_slug'])
else:
dataset = None
encoding = request.REQUEST.get('encoding', 'utf-8')
if not encoding:
encoding = 'utf-8'
# Because users may have authenticated via headers the request.user may
# not be a full User instance. To be sure, we fetch one.
creator = UserProxy.objects.get(id=request.user.id)
upload = DataUpload.objects.create(
filename=filename,
original_filename=self._original_filename,
size=size,
creator=creator,
dataset=dataset,
encoding=encoding)
if dataset:
dataset.update_full_text()
resource = DataUploadResource()
bundle = resource.build_bundle(obj=upload, request=request)
data = resource.full_dehydrate(bundle).data
# django-ajax-upoader does not use the Tastypie serializer
# so we must 'manually' serialize the embedded resource bundle
resource = UserResource()
bundle = data['creator']
user_data = resource.full_dehydrate(bundle).data
data['creator'] = user_data
except Exception, e:
# This global error handler is a kludge to ensure IE8 can properly handle the responses
return { 'error_message': e.message, 'success': False }
return data
class PANDARelatedUploadBackend(PANDAAbstractUploadBackend):
|
"""
Backend specifically for RelatedUploads.
"""
def upload_complete(self, request, filename):
"""
Create a RelatedUpload object.
"""
try:
super(PANDARelatedUploadBackend, self).upload_complete(request, filename)
root, ext = os.path.splitext(filename)
path = os.path.join(settings.MEDIA_ROOT, filename)
size = os.path.getsize(path)
dataset = Dataset.objects.get(slug=request.REQUEST['dataset_slug'])
# Because users may have authenticated via headers the request.user may
# not be a full User instance. To be sure, we fetch one.
creator = UserProxy.objects.get(id=request.user.id)
upload = RelatedUpload.objects.create(
filename=filename,
original_filename=self._original_filename,
size=size,
creator=creator,
dataset=dataset)
dataset.update_full_text()
resource = RelatedUploadResource()
bundle = resource.build_bundle(obj=upload, request=request)
data = resource.full_dehydrate(bundle).data
# django-ajax-upoader does not use the Tastypie serializer
# so we must 'manually' serialize the embedded resource bundle
resource = UserResource()
bundle = data['creator']
user_data = resource.full_dehydrate(bundle).data
data['creator'] = user_data
except Exception, e:
# This global error handler is a kludge to ensure IE8 can properly handle the responses
return { 'error_message': e.message, 'success': False }
return data
|
|
account.rs
|
use std::convert::{TryFrom, TryInto};
use serde::{Deserialize, Serialize};
use thiserror::Error;
use url::Url;
use ratpack::prelude::*;
use super::{uri_to_url, HandlerState, ServiceState};
use crate::{
errors::{acme::JWSError, ACMEValidationError},
models::{
account::{new_accounts, JWK},
Record,
},
};
/// RFC8555 7.1.2
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Account {
status: AccountStatus,
contact: Option<Vec<AccountUrl>>,
terms_of_service_agreed: Option<bool>,
external_account_binding: Option<ExternalBinding>,
orders: Option<Url>,
}
impl Default for Account {
fn default() -> Self {
Self {
status: AccountStatus::Revoked,
contact: None,
terms_of_service_agreed: None,
external_account_binding: None,
orders: None,
}
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub enum AccountStatus {
Valid,
Deactivated,
Revoked,
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct AccountUrl(Url);
#[derive(Debug, Clone, Error)]
pub enum AccountUrlError {
#[error("invalid url scheme for account")]
InvalidScheme,
#[error("unknown error: {0}")]
Other(String),
}
impl TryFrom<&str> for AccountUrl {
type Error = AccountUrlError;
fn try_from(s: &str) -> Result<Self, Self::Error> {
match Url::parse(s) {
Ok(url) => url.try_into(),
Err(e) => Err(AccountUrlError::Other(e.to_string())),
}
}
}
impl TryFrom<Url> for AccountUrl {
type Error = AccountUrlError;
fn try_from(url: Url) -> Result<Self, Self::Error> {
// RFC8555 7.3
if url.scheme() != "mailto" {
return Err(AccountUrlError::InvalidScheme);
}
Ok(Self(url))
}
}
impl Into<String> for AccountUrl {
fn into(self) -> String
|
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ExternalBinding {}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct NewAccount {
pub contact: Option<Vec<AccountUrl>>,
pub terms_of_service_agreed: Option<bool>,
pub only_return_existing: Option<bool>,
pub external_account_binding: Option<ExternalBinding>,
}
impl NewAccount {
pub fn contacts(&self) -> Option<Vec<AccountUrl>> {
self.contact.clone()
}
pub fn to_account(&self) -> Account {
Account {
status: AccountStatus::Valid,
contact: self.contact.clone(),
terms_of_service_agreed: self.terms_of_service_agreed,
external_account_binding: None,
orders: None, // FIXME needs to be populated with a slug for user orders
}
}
}
impl Default for NewAccount {
fn default() -> Self {
Self {
contact: None,
terms_of_service_agreed: None,
only_return_existing: None,
external_account_binding: None,
}
}
}
pub(crate) async fn new_account(
req: Request<Body>,
_resp: Option<Response<Body>>,
_params: Params,
app: App<ServiceState, HandlerState>,
state: HandlerState,
) -> HTTPResult<HandlerState> {
let appstate_opt = app.state().await.clone().unwrap();
let appstate = appstate_opt.lock().await;
match state.clone().jws {
Some(mut jws) => {
let newacct = jws.clone().payload::<NewAccount>()?;
let uri = req.uri().clone();
let url = uri_to_url(appstate.clone().baseurl, uri).await?;
let protected = jws.protected()?;
if protected.kid().is_some() && newacct.only_return_existing.unwrap_or_default() {
let rec =
match JWK::find_by_kid(protected.kid().unwrap(), appstate.db.clone()).await {
Ok(rec) => rec,
Err(_) => return Err(ACMEValidationError::AccountDoesNotExist.to_status()),
};
let resp = state
.decorate_response(url.clone(), Response::builder())?
.status(StatusCode::OK)
.header(
"Location",
url.clone()
.join(&format!("./account/{}", &rec.clone().nonce_key()))?
.to_string(),
)
.body(Body::from(serde_json::to_string(&rec)?))
.unwrap();
return Ok((req, Some(resp), state));
} else {
let mut jwk = jws.into_db_jwk()?;
jwk.create(appstate.db.clone()).await?;
let mut acct = new_accounts(newacct.clone(), jwk.clone(), appstate.db.clone())?;
acct.create(appstate.db.clone()).await?;
let resp = state
.decorate_response(url.clone(), Response::builder())?
.status(StatusCode::CREATED)
.header(
"Location",
url.join(&format!("./account/{}", &jwk.nonce_key()))?
.to_string(),
)
.body(Body::from(serde_json::to_string(&newacct.to_account())?))
.unwrap();
return Ok((req, Some(resp), state));
}
}
None => {
return Err(ratpack::Error::StatusCode(
StatusCode::NOT_FOUND,
String::default(),
))
}
}
}
pub(crate) async fn post_account(
req: Request<Body>,
_resp: Option<Response<Body>>,
_params: Params,
app: App<ServiceState, HandlerState>,
state: HandlerState,
) -> HTTPResult<HandlerState> {
let appstate_opt = app.state().await.clone().unwrap();
let appstate = appstate_opt.lock().await;
// FIXME this still needs code to update contact lists; see 7.3.2.
match state.clone().jws {
Some(mut jws) => {
let acct: Account = jws.payload()?;
match acct.status {
AccountStatus::Deactivated => {
let aph = jws.protected()?;
let kid = aph.kid();
if kid.is_none() {
return Err(JWSError::InvalidPublicKey.to_status());
}
let kid = kid.unwrap();
let target = JWK::find_by_kid(kid, appstate.db.clone()).await?;
let target_jwk: crate::acme::jose::JWK = target.clone().try_into()?;
match target_jwk.try_into() {
Ok(key) => match jws.verify(key) {
Ok(b) => {
if !b {
return Err(ACMEValidationError::InvalidSignature.to_status());
}
}
Err(e) => return Err(e.into()),
},
Err(e) => return Err(e.into()),
}
target.delete(appstate.db.clone()).await?;
let url = uri_to_url(appstate.clone().baseurl, req.uri().clone()).await?;
return Ok((
req,
Some(
state
.decorate_response(url.clone(), Response::builder())?
.status(StatusCode::OK)
.body(Body::from(serde_json::to_string(&target)?))
.unwrap(),
),
state,
));
}
_ => {}
}
}
None => {
return Err(ratpack::Error::StatusCode(
StatusCode::NOT_FOUND,
String::default(),
))
}
}
return Err(ACMEValidationError::InvalidRequest.to_status());
}
mod tests {
#[tokio::test(flavor = "multi_thread")]
async fn new_account_failures() {
use crate::test::TestService;
use http::StatusCode;
use hyper::Body;
use spectral::prelude::*;
let srv = TestService::new("new_account_failures").await;
let res = srv.clone().app.get("/account").await;
assert_that!(res.status()).is_equal_to(StatusCode::METHOD_NOT_ALLOWED);
let res = srv.clone().app.post("/account", Body::default()).await;
assert_that!(res.status()).is_equal_to(StatusCode::FORBIDDEN);
let res = srv.clone().app.post("/account/herp", Body::default()).await;
assert_that!(res.status()).is_equal_to(StatusCode::FORBIDDEN);
}
#[tokio::test(flavor = "multi_thread")]
async fn account_register_with_certbot() {
use crate::test::TestService;
use spectral::prelude::*;
let srv = TestService::new("account_register_with_certbot").await;
for _ in 0..10 {
let res = srv
.clone()
.certbot(
None,
"register -m 'erik@hollensbe.org' --agree-tos".to_string(),
)
.await;
assert_that!(res).is_ok();
let dir = res.unwrap();
let res = srv
.clone()
.certbot(
Some(dir.clone()),
"unregister -m 'erik@hollensbe.org'".to_string(),
)
.await;
assert_that!(res).is_ok();
}
}
}
|
{
self.0.to_string()
}
|
feature_test.go
|
package wave_test
import (
"testing"
"github.com/aisola/wave"
)
func TestNewFeatureGroups(t *testing.T) {
featureA := &wave.Feature{Name: "test", Groups: []string{"test"}, Users: nil}
featureB := wave.NewFeatureGroups("test", []string{"test"})
if featureA.Name != featureB.Name {
t.Errorf("Feature names do not match")
}
if featureA.Groups[0] != featureB.Groups[0] {
t.Errorf("Feature groups do not match")
}
if featureB.Users != nil {
t.Errorf("Feature users not nil")
}
}
func
|
(t *testing.T) {
featureA := &wave.Feature{Name: "test", Groups: nil, Users: []string{"test"}}
featureB := wave.NewFeatureUsers("test", []string{"test"})
if featureA.Name != featureB.Name {
t.Errorf("Feature names do not match")
}
if featureA.Users[0] != featureB.Users[0] {
t.Errorf("Feature users do not match")
}
if featureB.Groups != nil {
t.Errorf("Feature groups not nil")
}
}
func TestFeatureCan(t *testing.T) {
user1 := newTestUser(make([]string, 0))
user2 := newTestUser([]string{"test"})
featureUsersNil := &wave.Feature{Name: "feature_users_nil", Groups: nil, Users: nil}
featureUsers := &wave.Feature{Name: "feature_users", Groups: nil, Users: []string{user1.UUID}}
featureGroupsNil := &wave.Feature{Name: "feature_groups_nil", Groups: nil, Users: nil}
featureGroupsNone := &wave.Feature{Name: "feature_groups_none", Groups: wave.NONE, Users: nil}
featureGroupsAll := &wave.Feature{Name: "feature_groups_all", Groups: wave.ALL, Users: nil}
featureGroupsTest := &wave.Feature{Name: "feature_groups_test", Groups: []string{"test"}, Users: nil}
if featureUsersNil.Can(user1) || featureUsersNil.Can(user2) {
t.Errorf("Users CAN access feature_users_nil")
}
if !featureUsers.Can(user1) {
t.Errorf("User1 CANNOT access feature_users")
}
if featureUsers.Can(user2) {
t.Errorf("User2 CAN access feature_users")
}
if featureGroupsNil.Can(user1) || featureGroupsNil.Can(user2) {
t.Errorf("Users CAN access feature_groups_nil")
}
if featureGroupsNone.Can(user1) || featureGroupsNone.Can(user2) {
t.Errorf("Users CAN access feature_groups_none")
}
if !featureGroupsAll.Can(user1) || !featureGroupsAll.Can(user2) {
t.Errorf("Users CANNOT access feature_groups_all")
}
if !featureGroupsTest.Can(user2) {
t.Errorf("User2 CANNOT access feature_groups_test")
}
if featureGroupsTest.Can(user1) {
t.Errorf("User1 CAN access feature_groups_test")
}
}
|
TestNewFeatureUsers
|
session.ts
|
/*
Copyright 2021 Eduworks Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
import { XApiStatement, ActivityObject } from "./xapiStatement";
import { PREFIX_PEBL_EXTENSION, PREFIX_PEBL, PREFIX_PEBL_THREAD } from "../utils/constants";
export class
|
extends XApiStatement {
readonly activityId: string;
readonly book: string;
readonly activityName?: string;
readonly activityDescription?: string;
readonly type: string;
readonly currentTeam?: string;
readonly currentClass?: string;
constructor(raw: { [key: string]: any }) {
super(raw);
let object = this.object as ActivityObject;
this.book = object.id;
if (this.book.indexOf(PREFIX_PEBL) != -1)
this.book = this.book.substring(this.book.indexOf(PREFIX_PEBL) + PREFIX_PEBL.length);
else if (this.book.indexOf(PREFIX_PEBL_THREAD) != -1)
this.book = this.book.substring(this.book.indexOf(PREFIX_PEBL_THREAD) + PREFIX_PEBL_THREAD.length);
this.activityId = object.id;
if (object.definition) {
this.activityName = object.definition.name && object.definition.name["en-US"];
this.activityDescription = object.definition.description && object.definition.description["en-US"];
let extensions = object.definition.extensions;
if (extensions) {
if (extensions[PREFIX_PEBL_EXTENSION + "bookId"])
this.book = extensions[PREFIX_PEBL_EXTENSION + "bookId"];
this.currentTeam = extensions[PREFIX_PEBL_EXTENSION + "currentTeam"];
this.currentClass = extensions[PREFIX_PEBL_EXTENSION + "currentClass"];
}
}
this.type = this.verb.display["en-US"];
}
static is(x: XApiStatement): boolean {
if (!XApiStatement.is(x))
return false;
let verb = x.verb.display["en-US"];
return (verb == "entered") || (verb == "exited") || (verb == "logged-in") ||
(verb == "logged-out") || (verb == "terminated") || (verb == "initialized") || (verb == "launched");
}
static isLogin(x: any): boolean {
let verb = x.verb.display["en-US"];
if (verb === 'logged-in')
return true;
return false;
}
}
|
Session
|
securitygroup_fitask.go
|
/*
Copyright 2019 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
|
limitations under the License.
*/
// Code generated by ""fitask" -type=SecurityGroup"; DO NOT EDIT
package awstasks
import (
"encoding/json"
"k8s.io/kops/upup/pkg/fi"
)
// SecurityGroup
// JSON marshaling boilerplate
type realSecurityGroup SecurityGroup
// UnmarshalJSON implements conversion to JSON, supporting an alternate specification of the object as a string
func (o *SecurityGroup) UnmarshalJSON(data []byte) error {
var jsonName string
if err := json.Unmarshal(data, &jsonName); err == nil {
o.Name = &jsonName
return nil
}
var r realSecurityGroup
if err := json.Unmarshal(data, &r); err != nil {
return err
}
*o = SecurityGroup(r)
return nil
}
var _ fi.HasLifecycle = &SecurityGroup{}
// GetLifecycle returns the Lifecycle of the object, implementing fi.HasLifecycle
func (o *SecurityGroup) GetLifecycle() *fi.Lifecycle {
return o.Lifecycle
}
// SetLifecycle sets the Lifecycle of the object, implementing fi.SetLifecycle
func (o *SecurityGroup) SetLifecycle(lifecycle fi.Lifecycle) {
o.Lifecycle = &lifecycle
}
var _ fi.HasName = &SecurityGroup{}
// GetName returns the Name of the object, implementing fi.HasName
func (o *SecurityGroup) GetName() *string {
return o.Name
}
// SetName sets the Name of the object, implementing fi.SetName
func (o *SecurityGroup) SetName(name string) {
o.Name = &name
}
// String is the stringer function for the task, producing readable output using fi.TaskAsString
func (o *SecurityGroup) String() string {
return fi.TaskAsString(o)
}
|
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
|
api.js
|
const path = require('path'),
Config = require('../config'),
compose = require('koa-compose'),
{ normalizePrefix } = require('../util'),
jsonApi = require('reed-json-api')
module.exports = () => {
const rules = Config.serveApi.endpoints,
mws = rules.map(({ endpoint, filePath, options }) => {
path.isAbsolute(filePath) || (filePath = path.join(process.cwd(), filePath))
endpoint = normalizePrefix(endpoint)
console.info(`[Reed Mock] loading template file at ${filePath}`)
return jsonApi({ urlPrefix: endpoint, filePath, options })
})
|
return compose(mws)
}
|
|
TokenOwnerOfContainer.js
|
import TokenOwnerOf from './TokenOwnerOf'
import { drizzleConnect } from 'drizzle-react'
// May still need this even with data function to refresh component on updates for this contract.
const mapStateToProps = state => {
return {
accounts: state.accounts,
ethritage: state.contracts.ethritageToken,
|
const TokenOwnerOfContainer = drizzleConnect(TokenOwnerOf, mapStateToProps);
export default TokenOwnerOfContainer
|
drizzleStatus: state.drizzleStatus,
}
}
|
appendable_block.rs
|
use std::collections::HashSet;
use casper_execution_engine::shared::gas::Gas;
use datasize::DataSize;
use num_traits::Zero;
use thiserror::Error;
use crate::{
components::block_proposer::DeployType,
types::{chainspec::DeployConfig, DeployHash, ProtoBlock, Timestamp},
};
#[derive(Debug, Error)]
pub(crate) enum AddError {
#[error("would exceed maximum transfer count per block")]
TransferCount,
#[error("would exceed maximum deploy count per block")]
DeployCount,
#[error("would exceed maximum gas per block")]
GasLimit,
#[error("would exceed maximum block size")]
BlockSize,
#[error("duplicate deploy")]
Duplicate,
#[error("payment amount could not be converted to gas")]
InvalidGasAmount,
#[error("deploy is not valid in this context")]
InvalidDeploy,
}
/// A block that is still being added to. It keeps track of and enforces block limits.
#[derive(Clone, DataSize, Debug)]
pub struct AppendableBlock {
deploy_config: DeployConfig,
deploy_hashes: Vec<DeployHash>,
transfer_hashes: Vec<DeployHash>,
deploy_and_transfer_set: HashSet<DeployHash>,
timestamp: Timestamp,
#[data_size(skip)]
total_gas: Gas,
total_size: usize,
}
impl AppendableBlock {
/// Creates an empty `AppendableBlock`.
pub(crate) fn new(deploy_config: DeployConfig, timestamp: Timestamp) -> Self {
AppendableBlock {
deploy_config,
deploy_hashes: Vec::new(),
transfer_hashes: Vec::new(),
timestamp,
deploy_and_transfer_set: HashSet::new(),
total_gas: Gas::zero(),
total_size: 0,
}
}
/// Returns the total size of all deploys so far.
|
self.total_size
}
/// Attempts to add a deploy to the block; returns an error if that would violate a validity
/// condition.
pub(crate) fn add(
&mut self,
hash: DeployHash,
deploy_type: &DeployType,
) -> Result<(), AddError> {
if self.deploy_and_transfer_set.contains(&hash) {
return Err(AddError::Duplicate);
}
if !deploy_type
.header()
.is_valid(&self.deploy_config, self.timestamp)
{
return Err(AddError::InvalidDeploy);
}
if deploy_type.is_transfer() {
if self.has_max_transfer_count() {
return Err(AddError::TransferCount);
}
self.transfer_hashes.push(hash);
} else {
if self.has_max_deploy_count() {
return Err(AddError::DeployCount);
}
// Only deploys count towards the size and gas limits.
let new_total_size = self
.total_size
.checked_add(deploy_type.size())
.filter(|size| *size <= self.deploy_config.max_block_size as usize)
.ok_or(AddError::BlockSize)?;
let payment_amount = deploy_type.payment_amount();
let gas_price = deploy_type.header().gas_price();
let gas =
Gas::from_motes(payment_amount, gas_price).ok_or(AddError::InvalidGasAmount)?;
let new_total_gas = self.total_gas.checked_add(gas).ok_or(AddError::GasLimit)?;
if new_total_gas > Gas::from(self.deploy_config.block_gas_limit) {
return Err(AddError::GasLimit);
}
self.deploy_hashes.push(hash);
self.total_gas = new_total_gas;
self.total_size = new_total_size;
}
self.deploy_and_transfer_set.insert(hash);
Ok(())
}
/// Creates a `ProtoBlock` with the `AppendableBlock`s deploys and transfers, and the given
/// random bit.
pub(crate) fn into_proto_block(self, random_bit: bool) -> ProtoBlock {
let AppendableBlock {
deploy_hashes,
transfer_hashes,
timestamp,
..
} = self;
ProtoBlock::new(deploy_hashes, transfer_hashes, timestamp, random_bit)
}
/// Returns `true` if the number of transfers is already the maximum allowed count, i.e. no
/// more transfers can be added to this block.
fn has_max_transfer_count(&self) -> bool {
self.transfer_hashes.len() == self.deploy_config.block_max_transfer_count as usize
}
/// Returns `true` if the number of deploys is already the maximum allowed count, i.e. no more
/// deploys can be added to this block.
fn has_max_deploy_count(&self) -> bool {
self.deploy_hashes.len() == self.deploy_config.block_max_deploy_count as usize
}
}
|
pub(crate) fn total_size(&self) -> usize {
|
gevent_consumer.py
|
#!/usr/bin/env python
"""
An example consumer that uses a greenlet pool to accept incoming market
messages. This example offers a high degree of concurrency.
"""
import zlib
# This can be replaced with the built-in json module, if desired.
import simplejson
import gevent
from gevent.pool import Pool
from gevent import monkey; gevent.monkey.patch_all()
import zmq.green as zmq
# The maximum number of greenlet workers in the greenlet pool. This is not one
# per processor, a decent machine can support hundreds or thousands of greenlets.
# I recommend setting this to the maximum number of connections your database
# backend can accept, if you must open one connection per save op.
MAX_NUM_POOL_WORKERS = 200
def main():
"""
The main flow of the application.
"""
context = zmq.Context()
subscriber = context.socket(zmq.SUB)
# Connect to the first publicly available relay.
subscriber.connect('tcp://relay-us-central-1.eve-emdr.com:8050')
# Disable filtering.
subscriber.setsockopt(zmq.SUBSCRIBE, "")
# We use a greenlet pool to cap the number of workers at a reasonable level.
greenlet_pool = Pool(size=MAX_NUM_POOL_WORKERS)
print("Consumer daemon started, waiting for jobs...")
print("Worker pool size: %d" % greenlet_pool.size)
while True:
# Since subscriber.recv() blocks when no messages are available,
# this loop stays under control. If something is available and the
# greenlet pool has greenlets available for use, work gets done.
greenlet_pool.spawn(worker, subscriber.recv())
def worker(job_json):
|
if __name__ == '__main__':
main()
|
"""
For every incoming message, this worker function is called. Be extremely
careful not to do anything CPU-intensive here, or you will see blocking.
Sockets are async under gevent, so those are fair game.
"""
# Receive raw market JSON strings.
market_json = zlib.decompress(job_json)
# Un-serialize the JSON data to a Python dict.
market_data = simplejson.loads(market_json)
# Save to your choice of DB here.
print market_data
|
load_subclip_audio.py
|
import argparse
import librosa
import numpy as np
def
|
(audio, sr, clip_size, pad=True):
# Given a list of audio files and corresponding sample rates,
# return a 2D list of subclips, each of size clip_size
# Optional padding takes care of audio files shorter than clip size
clips = []
for idx, a in enumerate(audio):
# Size of a single clip in samples
step = int(sr[idx] * clip_size)
# Optional padding for short clips
overhang = len(a) % step
if overhang != 0 and pad:
a = np.concatenate([a, np.zeros(step - overhang)])
subclips = []
for start in range(0, len(a), step):
end = start + step
if end > len(a):
break
subclips.append(a[start : end])
return subclips
def main(audio_file, clip_size):
# In python 2.7, librosa.load does not correctly handle 24-bit wav files.
# This is resolved in python 3.x
#
# If the sr parameter is set to None, loads the actual sampling rate
# from the audio file. Otherwise, will load the audio file and resample
# it to the given sample rate. This is good if you want all audio at the
# same sample rate, but can be slow. Default is 22050 Hz.
audio, sr = librosa.load(audio_file, sr=None)
# We just have one audio file here, but this should work for any number
audio_subclips = make_subclips([audio], [sr], 1.0)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--audio_file', type=str, required=True)
parser.add_argument('--clip_size', type=float, default=0)
args = parser.parse_args()
main(args.audio_file, args.clip_size)
|
make_subclips
|
expressRoutePort.ts
|
// *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
import * as pulumi from "@pulumi/pulumi";
import { input as inputs, output as outputs, enums } from "../../types";
import * as utilities from "../../utilities";
/**
* ExpressRoutePort resource definition.
*/
export class
|
extends pulumi.CustomResource {
/**
* Get an existing ExpressRoutePort resource's state with the given name, ID, and optional extra
* properties used to qualify the lookup.
*
* @param name The _unique_ name of the resulting resource.
* @param id The _unique_ provider ID of the resource to lookup.
* @param opts Optional settings to control the behavior of the CustomResource.
*/
public static get(name: string, id: pulumi.Input<pulumi.ID>, opts?: pulumi.CustomResourceOptions): ExpressRoutePort {
return new ExpressRoutePort(name, undefined as any, { ...opts, id: id });
}
/** @internal */
public static readonly __pulumiType = 'azure-native:network/v20191101:ExpressRoutePort';
/**
* Returns true if the given object is an instance of ExpressRoutePort. This is designed to work even
* when multiple copies of the Pulumi SDK have been loaded into the same process.
*/
public static isInstance(obj: any): obj is ExpressRoutePort {
if (obj === undefined || obj === null) {
return false;
}
return obj['__pulumiType'] === ExpressRoutePort.__pulumiType;
}
/**
* Date of the physical port allocation to be used in Letter of Authorization.
*/
public /*out*/ readonly allocationDate!: pulumi.Output<string>;
/**
* Bandwidth of procured ports in Gbps.
*/
public readonly bandwidthInGbps!: pulumi.Output<number | undefined>;
/**
* Reference the ExpressRoute circuit(s) that are provisioned on this ExpressRoutePort resource.
*/
public /*out*/ readonly circuits!: pulumi.Output<outputs.network.v20191101.SubResourceResponse[]>;
/**
* Encapsulation method on physical ports.
*/
public readonly encapsulation!: pulumi.Output<string | undefined>;
/**
* A unique read-only string that changes whenever the resource is updated.
*/
public /*out*/ readonly etag!: pulumi.Output<string>;
/**
* Ether type of the physical port.
*/
public /*out*/ readonly etherType!: pulumi.Output<string>;
/**
* The identity of ExpressRoutePort, if configured.
*/
public readonly identity!: pulumi.Output<outputs.network.v20191101.ManagedServiceIdentityResponse | undefined>;
/**
* The set of physical links of the ExpressRoutePort resource.
*/
public readonly links!: pulumi.Output<outputs.network.v20191101.ExpressRouteLinkResponse[] | undefined>;
/**
* Resource location.
*/
public readonly location!: pulumi.Output<string | undefined>;
/**
* Maximum transmission unit of the physical port pair(s).
*/
public /*out*/ readonly mtu!: pulumi.Output<string>;
/**
* Resource name.
*/
public /*out*/ readonly name!: pulumi.Output<string>;
/**
* The name of the peering location that the ExpressRoutePort is mapped to physically.
*/
public readonly peeringLocation!: pulumi.Output<string | undefined>;
/**
* Aggregate Gbps of associated circuit bandwidths.
*/
public /*out*/ readonly provisionedBandwidthInGbps!: pulumi.Output<number>;
/**
* The provisioning state of the express route port resource.
*/
public /*out*/ readonly provisioningState!: pulumi.Output<string>;
/**
* The resource GUID property of the express route port resource.
*/
public /*out*/ readonly resourceGuid!: pulumi.Output<string>;
/**
* Resource tags.
*/
public readonly tags!: pulumi.Output<{[key: string]: string} | undefined>;
/**
* Resource type.
*/
public /*out*/ readonly type!: pulumi.Output<string>;
/**
* Create a ExpressRoutePort resource with the given unique name, arguments, and options.
*
* @param name The _unique_ name of the resource.
* @param args The arguments to use to populate this resource's properties.
* @param opts A bag of options that control this resource's behavior.
*/
constructor(name: string, args: ExpressRoutePortArgs, opts?: pulumi.CustomResourceOptions) {
let inputs: pulumi.Inputs = {};
opts = opts || {};
if (!opts.id) {
if ((!args || args.resourceGroupName === undefined) && !opts.urn) {
throw new Error("Missing required property 'resourceGroupName'");
}
inputs["bandwidthInGbps"] = args ? args.bandwidthInGbps : undefined;
inputs["encapsulation"] = args ? args.encapsulation : undefined;
inputs["expressRoutePortName"] = args ? args.expressRoutePortName : undefined;
inputs["id"] = args ? args.id : undefined;
inputs["identity"] = args ? args.identity : undefined;
inputs["links"] = args ? args.links : undefined;
inputs["location"] = args ? args.location : undefined;
inputs["peeringLocation"] = args ? args.peeringLocation : undefined;
inputs["resourceGroupName"] = args ? args.resourceGroupName : undefined;
inputs["tags"] = args ? args.tags : undefined;
inputs["allocationDate"] = undefined /*out*/;
inputs["circuits"] = undefined /*out*/;
inputs["etag"] = undefined /*out*/;
inputs["etherType"] = undefined /*out*/;
inputs["mtu"] = undefined /*out*/;
inputs["name"] = undefined /*out*/;
inputs["provisionedBandwidthInGbps"] = undefined /*out*/;
inputs["provisioningState"] = undefined /*out*/;
inputs["resourceGuid"] = undefined /*out*/;
inputs["type"] = undefined /*out*/;
} else {
inputs["allocationDate"] = undefined /*out*/;
inputs["bandwidthInGbps"] = undefined /*out*/;
inputs["circuits"] = undefined /*out*/;
inputs["encapsulation"] = undefined /*out*/;
inputs["etag"] = undefined /*out*/;
inputs["etherType"] = undefined /*out*/;
inputs["identity"] = undefined /*out*/;
inputs["links"] = undefined /*out*/;
inputs["location"] = undefined /*out*/;
inputs["mtu"] = undefined /*out*/;
inputs["name"] = undefined /*out*/;
inputs["peeringLocation"] = undefined /*out*/;
inputs["provisionedBandwidthInGbps"] = undefined /*out*/;
inputs["provisioningState"] = undefined /*out*/;
inputs["resourceGuid"] = undefined /*out*/;
inputs["tags"] = undefined /*out*/;
inputs["type"] = undefined /*out*/;
}
if (!opts.version) {
opts = pulumi.mergeOptions(opts, { version: utilities.getVersion()});
}
const aliasOpts = { aliases: [{ type: "azure-nextgen:network/v20191101:ExpressRoutePort" }, { type: "azure-native:network:ExpressRoutePort" }, { type: "azure-nextgen:network:ExpressRoutePort" }, { type: "azure-native:network/v20180801:ExpressRoutePort" }, { type: "azure-nextgen:network/v20180801:ExpressRoutePort" }, { type: "azure-native:network/v20181001:ExpressRoutePort" }, { type: "azure-nextgen:network/v20181001:ExpressRoutePort" }, { type: "azure-native:network/v20181101:ExpressRoutePort" }, { type: "azure-nextgen:network/v20181101:ExpressRoutePort" }, { type: "azure-native:network/v20181201:ExpressRoutePort" }, { type: "azure-nextgen:network/v20181201:ExpressRoutePort" }, { type: "azure-native:network/v20190201:ExpressRoutePort" }, { type: "azure-nextgen:network/v20190201:ExpressRoutePort" }, { type: "azure-native:network/v20190401:ExpressRoutePort" }, { type: "azure-nextgen:network/v20190401:ExpressRoutePort" }, { type: "azure-native:network/v20190601:ExpressRoutePort" }, { type: "azure-nextgen:network/v20190601:ExpressRoutePort" }, { type: "azure-native:network/v20190701:ExpressRoutePort" }, { type: "azure-nextgen:network/v20190701:ExpressRoutePort" }, { type: "azure-native:network/v20190801:ExpressRoutePort" }, { type: "azure-nextgen:network/v20190801:ExpressRoutePort" }, { type: "azure-native:network/v20190901:ExpressRoutePort" }, { type: "azure-nextgen:network/v20190901:ExpressRoutePort" }, { type: "azure-native:network/v20191201:ExpressRoutePort" }, { type: "azure-nextgen:network/v20191201:ExpressRoutePort" }, { type: "azure-native:network/v20200301:ExpressRoutePort" }, { type: "azure-nextgen:network/v20200301:ExpressRoutePort" }, { type: "azure-native:network/v20200401:ExpressRoutePort" }, { type: "azure-nextgen:network/v20200401:ExpressRoutePort" }, { type: "azure-native:network/v20200501:ExpressRoutePort" }, { type: "azure-nextgen:network/v20200501:ExpressRoutePort" }, { type: "azure-native:network/v20200601:ExpressRoutePort" }, { type: "azure-nextgen:network/v20200601:ExpressRoutePort" }, { type: "azure-native:network/v20200701:ExpressRoutePort" }, { type: "azure-nextgen:network/v20200701:ExpressRoutePort" }, { type: "azure-native:network/v20200801:ExpressRoutePort" }, { type: "azure-nextgen:network/v20200801:ExpressRoutePort" }, { type: "azure-native:network/v20201101:ExpressRoutePort" }, { type: "azure-nextgen:network/v20201101:ExpressRoutePort" }] };
opts = pulumi.mergeOptions(opts, aliasOpts);
super(ExpressRoutePort.__pulumiType, name, inputs, opts);
}
}
/**
* The set of arguments for constructing a ExpressRoutePort resource.
*/
export interface ExpressRoutePortArgs {
/**
* Bandwidth of procured ports in Gbps.
*/
readonly bandwidthInGbps?: pulumi.Input<number>;
/**
* Encapsulation method on physical ports.
*/
readonly encapsulation?: pulumi.Input<string | enums.network.v20191101.ExpressRoutePortsEncapsulation>;
/**
* The name of the ExpressRoutePort resource.
*/
readonly expressRoutePortName?: pulumi.Input<string>;
/**
* Resource ID.
*/
readonly id?: pulumi.Input<string>;
/**
* The identity of ExpressRoutePort, if configured.
*/
readonly identity?: pulumi.Input<inputs.network.v20191101.ManagedServiceIdentityArgs>;
/**
* The set of physical links of the ExpressRoutePort resource.
*/
readonly links?: pulumi.Input<pulumi.Input<inputs.network.v20191101.ExpressRouteLinkArgs>[]>;
/**
* Resource location.
*/
readonly location?: pulumi.Input<string>;
/**
* The name of the peering location that the ExpressRoutePort is mapped to physically.
*/
readonly peeringLocation?: pulumi.Input<string>;
/**
* The name of the resource group.
*/
readonly resourceGroupName: pulumi.Input<string>;
/**
* Resource tags.
*/
readonly tags?: pulumi.Input<{[key: string]: pulumi.Input<string>}>;
}
|
ExpressRoutePort
|
searchIndex.js
|
const { searchIndex } = require("../algolia");
const path = require("path");
const db = require("../db");
function tuncate(str, len) {
const overflow = str.length - len;
if (overflow < 0) {
return str;
}
const half = Math.floor((len - 1) / 2);
const start = str.slice(0, half);
const end = str.slice(-half);
return `${start} ########### ${end}`;
}
function buildSkinIndex(skin) {
const { filePaths, nsfw } = skin;
if (!filePaths || filePaths.length === 0) {
console.warn("no file name for ", md5);
return;
}
const fileName = path.basename(filePaths[0]);
const readmeText = skin.readmeText ? tuncate(skin.readmeText, 4800) : null;
return {
objectID: skin.md5,
nsfw,
//md5,
//fileName,
// emails: skin.emails || null,
// readmeText
// color: skin.averageColor
// twitterLikes: Number(skin.twitterLikes || 0)
};
}
async function
|
({ dry = true }) {
const skins = await db.get("skins").find(
{ type: "CLASSIC" },
{
fields: {
md5: 1,
averageColor: 1,
nsfw: 1,
twitterLikes: 1,
readmeText: 1,
filePaths: 1,
},
}
);
const indexes = skins.map(buildSkinIndex).filter(Boolean);
// .filter((index) => index.nsfw);
db.close();
if (dry) {
console.log("Index turned off. Turn it on if you really mean it");
return;
}
console.log("Writing index");
const results = await new Promise((resolve, reject) => {
searchIndex.partialUpdateObjects(indexes, function (err, content) {
if (err != null) reject(err);
resolve(content);
});
});
console.log("done!", results);
}
go({ dry: true }); // .then(content => console.log("Updated index for:", content.length));
|
go
|
FormatHorizontalAlignLeftIcon.d.ts
|
export default FormatHorizontalAlignLeftIcon;
|
import { MdiReactIconComponentType } from './dist/typings';
declare const FormatHorizontalAlignLeftIcon: MdiReactIconComponentType;
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.