code
stringlengths 22
1.05M
| apis
listlengths 1
3.31k
| extract_api
stringlengths 75
3.25M
|
---|---|---|
from io import TextIOWrapper
import os
from typing import List
OUTPUT = "files/output.csv"
FOLDER = "modules/week2/folders"
def get_file_names(folderpath, out=OUTPUT):
"""takes a path to a folder and writes all filenames in the folder to a specified output file"""
dir_list = os.listdir(folderpath)
with open(out, "w") as file:
for line in dir_list:
file.write(line + "\n")
def get_all_file_names(folderpath, out=OUTPUT):
"""takes a path to a folder and write all filenames recursively (files of all sub folders to)"""
def write_dir_to_file(file: TextIOWrapper, dir: List[str], folderpath: str):
for line in dir:
path_to_file = f"{folderpath}/{line}"
if os.path.isdir(path_to_file):
write_dir_to_file(file, os.listdir(path_to_file), path_to_file)
continue
file.write(line + "\n")
with open(out, "w") as file:
write_dir_to_file(file, os.listdir(folderpath), folderpath)
def print_line_one(file_names: List[str]):
"""takes a list of filenames and print the first line of each"""
for file_name in file_names:
with open(file_name) as file:
print(file.readline())
def print_emails(file_names: List[str]):
"""takes a list of filenames and print each line that contains an email (just look for @)"""
for file_name in file_names:
with open(file_name) as file:
for line in file.readlines():
if "@" in line:
print(line)
def write_headlines(md_files: List[str], out=OUTPUT):
"""takes a list of md files and writes all headlines (lines starting with #) to a file"""
with open(out, "w") as output_file:
for md_file in md_files:
with open(md_file) as file:
for line in file.readlines():
if line.startswith("#"):
output_file.write(line)
|
[
"os.listdir",
"os.path.isdir"
] |
[((287, 309), 'os.listdir', 'os.listdir', (['folderpath'], {}), '(folderpath)\n', (297, 309), False, 'import os\n'), ((732, 759), 'os.path.isdir', 'os.path.isdir', (['path_to_file'], {}), '(path_to_file)\n', (745, 759), False, 'import os\n'), ((968, 990), 'os.listdir', 'os.listdir', (['folderpath'], {}), '(folderpath)\n', (978, 990), False, 'import os\n'), ((801, 825), 'os.listdir', 'os.listdir', (['path_to_file'], {}), '(path_to_file)\n', (811, 825), False, 'import os\n')]
|
import math
import re
import unittest
import urllib.error
import urllib.request
from .core import Quantity
from .define import defined_systems
si = defined_systems['si']
esu = defined_systems['esu']
emu = defined_systems['emu']
gauss = defined_systems['gauss']
class PhysicalQuantitiesTest(unittest.TestCase):
def assert_quantity_equal(self, first, second):
self.assertAlmostEqual(first.value, second.value)
self.assertAlmostEqual(first.error, second.error)
self.assertEqual(first.units, second.units)
self.assertEqual(first.system, second.system)
def test_sign(self):
a = Quantity(1, 0.2, {'Kilogram': 1}, si)
b = Quantity(-1, 0.2, {'Kilogram': 1}, si)
self.assert_quantity_equal(+a, a)
self.assert_quantity_equal(+b, b)
self.assert_quantity_equal(-a, b)
self.assert_quantity_equal(-b, a)
self.assert_quantity_equal(abs(a), a)
self.assert_quantity_equal(abs(b), a)
def test_add(self):
a = Quantity(1, 0.2, {'Newton': 1}, si)
b = Quantity(3, 0.4, {'Kilogram': 1, 'Meter': 1, 'Second': -2}, si)
c = Quantity(4, 1 / math.sqrt(5), {'Newton': 1}, si)
d = Quantity(1, 0.2, {'Kilogram': 1}, si)
self.assert_quantity_equal(a + b, c.expand())
with self.assertRaises(TypeError): a + d
with self.assertRaises(TypeError): a + 1
def test_subtract(self):
a = Quantity(1, 0.2, {'Newton': 1}, si)
b = Quantity(3, 0.4, {'Kilogram': 1, 'Meter': 1, 'Second': -2}, si)
c = Quantity(-2, 1 / math.sqrt(5), {'Newton': 1}, si)
d = Quantity(1, 0.2, {'Kilogram': 1}, si)
self.assert_quantity_equal(a - b, c.expand())
with self.assertRaises(TypeError): a - d
with self.assertRaises(TypeError): a - 1
def test_multiply(self):
a = Quantity(1, 0.2, {'Kilogram': 1}, si)
b = Quantity(3, 0.4, {'Meter': -2}, si)
c = Quantity(3, math.sqrt(13) / 5, {'Kilogram': 1, 'Meter': -2}, si)
self.assert_quantity_equal(a * b, c)
a = Quantity(1, 0.2, {'Kilogram': 1}, si) * 5
b = Quantity(5, 1, {'Kilogram': 1}, si)
self.assert_quantity_equal(a, b)
a = Quantity(1, 0.2, {'Kilogram': 1}, si) * -5
b = Quantity(-5, 1, {'Kilogram': 1}, si)
self.assert_quantity_equal(a, b)
a = 5 * Quantity(3, 0.4, {'Kilogram': 1}, si)
b = Quantity(15, 2, {'Kilogram': 1}, si)
self.assert_quantity_equal(a, b)
a = -5 * Quantity(3, 0.4, {'Kilogram': 1}, si)
b = Quantity(-15, 2, {'Kilogram': 1}, si)
self.assert_quantity_equal(a, b)
def test_divide(self):
a = Quantity(2, 0.1, {'Kilogram': 1}, si)
b = Quantity(4, 0.3, {'Meter': -2}, si)
c = Quantity(0.5, math.sqrt(13) / 80,
{'Kilogram': 1, 'Meter': 2}, si)
self.assert_quantity_equal(a / b, c)
a = Quantity(1, 0.2, {'Kilogram': 1}, si) / 5
b = Quantity(0.2, 0.04, {'Kilogram': 1}, si)
self.assert_quantity_equal(a, b)
a = Quantity(1, 0.2, {'Kilogram': 1}, si) / -5
b = Quantity(-0.2, 0.04, {'Kilogram': 1}, si)
self.assert_quantity_equal(a, b)
a = 5 / Quantity(3, 0.4, {'Kilogram': 1}, si)
b = Quantity(5/3, 2/9, {'Kilogram': -1}, si)
self.assert_quantity_equal(a, b)
a = -5 / Quantity(3, 0.4, {'Kilogram': 1}, si)
b = Quantity(-5/3, 2/9, {'Kilogram': -1}, si)
self.assert_quantity_equal(a, b)
def test_power(self):
a = Quantity(3, 0.4, {'Kilogram': 1, 'Meter': 1}, si) ** 5
b = Quantity(243, 162, {'Kilogram': 5, 'Meter': 5}, si)
self.assert_quantity_equal(a, b)
def test_almost_equals(self):
a = Quantity(1, 0.5, {'Kilogram': 1}, si)
b = Quantity(2, 0.7, {'Kilogram': 1}, si)
c = Quantity(3, 0.9, {'Kilogram': 1}, si)
d = Quantity(1, 0.5, {'Meter': 1}, si)
e = Quantity(1, 0.5, {}, si)
f = Quantity(2, 0.7, {}, si)
self.assertTrue(a.almost_equals(b))
self.assertFalse(a.almost_equals(c))
self.assertRaises(TypeError, a.almost_equals, d)
for x in [a, b, c, d]:
self.assertRaises(TypeError, x.almost_equals, 1)
self.assertTrue(e.almost_equals(1))
self.assertTrue(f.almost_equals(2))
self.assertFalse(e.almost_equals(2))
self.assertFalse(f.almost_equals(1))
self.assertTrue(e.almost_equals(f))
def test_float(self):
a = Quantity(1, 0, {'Second': 1, 'Hertz': 1}, si)
b = Quantity(365.25 * 86400, 0, {'Second': 1, 'JulianYear': -1}, si)
self.assertEqual(math.cos(a), math.cos(1))
self.assertEqual(math.cos(b), math.cos(1))
def test_expand(self):
# Lorentz force
a = Quantity(1, 0,
{'Coulomb': 1, 'Meter': 1, 'Second': -1, 'Tesla': 1}, si)
b = Quantity(1, 0, {'Newton': 1}, si)
self.assert_quantity_equal(a.expand(), b.expand())
# Faraday's law
a = Quantity(1, 0, {'Weber': 1, 'Second': -1}, si)
b = Quantity(1, 0, {'Volt': 1}, si)
self.assert_quantity_equal(a.expand(), b.expand())
# torque of a motor
a = Quantity(1, 0, {'Ampere': 1, 'Tesla': 1, 'Meter': 2}, si)
b = Quantity(1, 0, {'Newton': 1, 'Meter': 1}, si)
self.assert_quantity_equal(a.expand(), b.expand())
# resonance frequency of an RLC circuit
a = Quantity(1, 0, {'Henry': -1/2, 'Farad': -1/2}, si)
b = Quantity(1, 0, {'Hertz': 1}, si)
self.assert_quantity_equal(a.expand(), b.expand())
def test_simple_constants(self):
for system in defined_systems.values():
a = Quantity(13.6, 0,
{'ElectronVolt': 1, 'RydbergEnergy': -1}, system).expand()
self.assertAlmostEqual(a.value, 1, places=3)
self.assertEqual(a.units, {})
a = system.get_constant('FineStructureConstant').expand() * 137
self.assertAlmostEqual(a.value, 1, places=3)
self.assertEqual(a.units, {})
def test_electromagnetic_constants(self):
from . import si, esu, emu, gauss
a = (si.e**2 / si.a0 / (4*math.pi*si.epsilon0) / (1e-7*si.J)).expand()
b = (esu.e**2 / esu.a0 / esu.erg).expand()
c = (emu.e**2 / emu.a0 * emu.c**2 / emu.erg).expand()
d = (gauss.e**2 / gauss.a0 / gauss.erg).expand()
self.assertAlmostEqual(a.value * 1e11, b.value * 1e11)
self.assertAlmostEqual(a.value * 1e11, c.value * 1e11)
self.assertAlmostEqual(a.value * 1e11, d.value * 1e11)
a = (si.muB**2 / si.a0**3 * si.mu0 / (1e-7*si.J)).expand()
b = (esu.muB**2 / esu.a0**3 / esu.c**2 / esu.erg).expand()
c = (emu.muB**2 / emu.a0**3 / emu.erg).expand()
d = (gauss.muB**2 / gauss.a0**3 / gauss.erg).expand()
self.assertAlmostEqual(a.value * 1e3, b.value * 1e3)
self.assertAlmostEqual(a.value * 1e3, c.value * 1e3)
self.assertAlmostEqual(a.value * 1e3, d.value * 1e3)
def test_codata(self):
url = 'http://physics.nist.gov/cuu/Constants/Table/allascii.txt'
units = {
'AtomicMassUnit': 'unified atomic mass unit'}
constants = {
'AvogadroConstant': 'Avogadro constant',
'ElectronGFactor': 'electron g factor',
'ProtonGFactor': 'proton g factor',
'NeutronGFactor': 'neutron g factor',
'MuonGFactor': 'muon g factor',
'LightSpeed': 'speed of light in vacuum',
'ElementaryCharge': 'atomic unit of charge',
'PlanckConstant': 'Planck constant',
'BoltzmannConstant': 'Boltzmann constant',
'GravitationalConstant': 'Newtonian constant of gravitation',
'VacuumPermeability': 'vacuum mag. permeability',
'ElectronMass': 'electron mass',
'ProtonMass': 'proton mass',
'NeutronMass': 'neutron mass',
'MuonMass': 'muon mass'}
try:
response = urllib.request.urlopen(url)
except urllib.error.URLError:
raise ValueError('Cannot download data.')
data = iter(response.read().decode('ascii').rstrip('\n').split('\n'))
while not next(data).startswith('--'):
pass
data = (re.split(' {2,}', x) for x in data)
def parse_value(x):
return float(x.replace(' ', '').replace('...', ''))
def parse_error(x):
return 0 if x == '(exact)' else float(x.replace(' ', ''))
data = {x: (parse_value(y), parse_error(z)) for x, y, z, *_ in data}
for local_name, codata_name in units.items():
quantity = Quantity(1, 0, {local_name: 1}, si).expand()
x, y = data[codata_name]
assert math.isclose(quantity.value, x)
assert math.isclose(quantity.error, y)
for local_name, codata_name in constants.items():
quantity = si.get_constant(local_name).expand()
x, y = data[codata_name]
assert math.isclose(quantity.value, x)
assert math.isclose(quantity.error, y)
if __name__ == '__main__':
unittest.main()
|
[
"re.split",
"math.isclose",
"math.sqrt",
"math.cos",
"unittest.main"
] |
[((8506, 8521), 'unittest.main', 'unittest.main', ([], {}), '()\n', (8519, 8521), False, 'import unittest\n'), ((4296, 4307), 'math.cos', 'math.cos', (['a'], {}), '(a)\n', (4304, 4307), False, 'import math\n'), ((4309, 4320), 'math.cos', 'math.cos', (['(1)'], {}), '(1)\n', (4317, 4320), False, 'import math\n'), ((4343, 4354), 'math.cos', 'math.cos', (['b'], {}), '(b)\n', (4351, 4354), False, 'import math\n'), ((4356, 4367), 'math.cos', 'math.cos', (['(1)'], {}), '(1)\n', (4364, 4367), False, 'import math\n'), ((7734, 7754), 're.split', 're.split', (['""" {2,}"""', 'x'], {}), "(' {2,}', x)\n", (7742, 7754), False, 'import re\n'), ((8170, 8201), 'math.isclose', 'math.isclose', (['quantity.value', 'x'], {}), '(quantity.value, x)\n', (8182, 8201), False, 'import math\n'), ((8215, 8246), 'math.isclose', 'math.isclose', (['quantity.error', 'y'], {}), '(quantity.error, y)\n', (8227, 8246), False, 'import math\n'), ((8399, 8430), 'math.isclose', 'math.isclose', (['quantity.value', 'x'], {}), '(quantity.value, x)\n', (8411, 8430), False, 'import math\n'), ((8444, 8475), 'math.isclose', 'math.isclose', (['quantity.error', 'y'], {}), '(quantity.error, y)\n', (8456, 8475), False, 'import math\n'), ((1085, 1097), 'math.sqrt', 'math.sqrt', (['(5)'], {}), '(5)\n', (1094, 1097), False, 'import math\n'), ((1473, 1485), 'math.sqrt', 'math.sqrt', (['(5)'], {}), '(5)\n', (1482, 1485), False, 'import math\n'), ((1830, 1843), 'math.sqrt', 'math.sqrt', (['(13)'], {}), '(13)\n', (1839, 1843), False, 'import math\n'), ((2592, 2605), 'math.sqrt', 'math.sqrt', (['(13)'], {}), '(13)\n', (2601, 2605), False, 'import math\n')]
|
import unittest
import sys
import os
sys.path.append(os.getcwd().replace("test", "src"))
import cirrus_ngs.cfnCluster.ConnectionManager as ConnectionManager
import paramiko
import tempfile
import re
##THIS TEST WILL NOT WORK##
class test_ConnectionManager(unittest.TestCase):
def test_paramiko(self):
key_file = tempfile.NamedTemporaryFile()
key_file.write(b"notakey")
self.assertRaises(paramiko.SSHException, paramiko.RSAKey.from_private_key_file, key_file.name)
key_file.close()
#key path
new_key = ""
#checks to make sure a real key file works. will not be portable
#leaving my ssh key for users to download for tests seems not smart
paramiko.RSAKey.from_private_key_file(new_key)
def test_connect_master(self):
#ip
hostname = ""
username = "ec2-user"
key_file = tempfile.NamedTemporaryFile()
key_file.write(b"not_a_key")
key_file.seek(0)
self.assertRaises(paramiko.SSHException, ConnectionManager.connect_master, hostname, username, key_file.name)
key_file.close()
#this won't even work elsewhere but I don't want to put my keyfile into the eepo
#key path
new_key = ""
ConnectionManager.connect_master(hostname, username, new_key)
#checks if last line in the standard output is "connected"
out = sys.stdout.getvalue().strip()
last_line = out.split()[-1]
self.assertEqual(last_line, "connected")
#checks that connected and connecting only are printed once exactly
num_connected = len(re.findall("connected", out))
self.assertEqual(1, num_connected)
num_connecting = len(re.findall("connecting", out))
self.assertEqual(1, num_connecting)
def test_execute_command(self):
#ip
hostname = ""
username = "ec2-user"
#key path
key = ""
ssh_client = ConnectionManager.connect_master(hostname, username, key)
command = "pwd"
#checks that the pwd command worked
self.assertEqual(ConnectionManager.execute_command(ssh_client, command), "/home/ec2-user\n")
ssh_client = "not an ssh_client"
#makes sure that an error is raised when a non sshclient is passed in
self.assertRaises(AttributeError, ConnectionManager.execute_command, ssh_client, command)
def test_copy_file(self):
#ip
hostname = ""
username = "ec2-user"
#key path
key = ""
ssh_client = ConnectionManager.connect_master(hostname, username, key)
temp = tempfile.NamedTemporaryFile()
localpath = temp.name
remotepath = "/home/ec2-user"
ConnectionManager.copy_file(ssh_client, localpath, remotepath)
out = sys.stdout.getvalue().strip().split()[-2:]
#checks that the copy file prints the local and remote paths
self.assertEqual(out, [localpath, remotepath])
ls_output = ConnectionManager.execute_command(ssh_client,
"ls tmp* | wc -l")
ConnectionManager.execute_command(ssh_client, "rm tmp*")
#checks that there is exactly 1 tempfile in the home directory of the server
self.assertEqual(ls_output.strip(), "1")
#makes sure it doesn't work with a nonfile
self.assertRaises(FileNotFoundError, ConnectionManager.copy_file,
ssh_client, "fakefile", "/home/ec2-user")
#########################################################################
#copy_gatk, list_dir, and close_connection are considered trivial methods
#and are not tested
#########################################################################
if __name__ == "__main__":
unittest.main(module=__name__, buffer=True, exit=False)
|
[
"cirrus_ngs.cfnCluster.ConnectionManager.copy_file",
"paramiko.RSAKey.from_private_key_file",
"cirrus_ngs.cfnCluster.ConnectionManager.connect_master",
"cirrus_ngs.cfnCluster.ConnectionManager.execute_command",
"os.getcwd",
"tempfile.NamedTemporaryFile",
"unittest.main",
"sys.stdout.getvalue",
"re.findall"
] |
[((3772, 3827), 'unittest.main', 'unittest.main', ([], {'module': '__name__', 'buffer': '(True)', 'exit': '(False)'}), '(module=__name__, buffer=True, exit=False)\n', (3785, 3827), False, 'import unittest\n'), ((326, 355), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (353, 355), False, 'import tempfile\n'), ((718, 764), 'paramiko.RSAKey.from_private_key_file', 'paramiko.RSAKey.from_private_key_file', (['new_key'], {}), '(new_key)\n', (755, 764), False, 'import paramiko\n'), ((884, 913), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (911, 913), False, 'import tempfile\n'), ((1257, 1318), 'cirrus_ngs.cfnCluster.ConnectionManager.connect_master', 'ConnectionManager.connect_master', (['hostname', 'username', 'new_key'], {}), '(hostname, username, new_key)\n', (1289, 1318), True, 'import cirrus_ngs.cfnCluster.ConnectionManager as ConnectionManager\n'), ((1956, 2013), 'cirrus_ngs.cfnCluster.ConnectionManager.connect_master', 'ConnectionManager.connect_master', (['hostname', 'username', 'key'], {}), '(hostname, username, key)\n', (1988, 2013), True, 'import cirrus_ngs.cfnCluster.ConnectionManager as ConnectionManager\n'), ((2554, 2611), 'cirrus_ngs.cfnCluster.ConnectionManager.connect_master', 'ConnectionManager.connect_master', (['hostname', 'username', 'key'], {}), '(hostname, username, key)\n', (2586, 2611), True, 'import cirrus_ngs.cfnCluster.ConnectionManager as ConnectionManager\n'), ((2636, 2665), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {}), '()\n', (2663, 2665), False, 'import tempfile\n'), ((2743, 2805), 'cirrus_ngs.cfnCluster.ConnectionManager.copy_file', 'ConnectionManager.copy_file', (['ssh_client', 'localpath', 'remotepath'], {}), '(ssh_client, localpath, remotepath)\n', (2770, 2805), True, 'import cirrus_ngs.cfnCluster.ConnectionManager as ConnectionManager\n'), ((3009, 3073), 'cirrus_ngs.cfnCluster.ConnectionManager.execute_command', 'ConnectionManager.execute_command', (['ssh_client', '"""ls tmp* | wc -l"""'], {}), "(ssh_client, 'ls tmp* | wc -l')\n", (3042, 3073), True, 'import cirrus_ngs.cfnCluster.ConnectionManager as ConnectionManager\n'), ((3099, 3155), 'cirrus_ngs.cfnCluster.ConnectionManager.execute_command', 'ConnectionManager.execute_command', (['ssh_client', '"""rm tmp*"""'], {}), "(ssh_client, 'rm tmp*')\n", (3132, 3155), True, 'import cirrus_ngs.cfnCluster.ConnectionManager as ConnectionManager\n'), ((53, 64), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (62, 64), False, 'import os\n'), ((1621, 1649), 're.findall', 're.findall', (['"""connected"""', 'out'], {}), "('connected', out)\n", (1631, 1649), False, 'import re\n'), ((1724, 1753), 're.findall', 're.findall', (['"""connecting"""', 'out'], {}), "('connecting', out)\n", (1734, 1753), False, 'import re\n'), ((2108, 2162), 'cirrus_ngs.cfnCluster.ConnectionManager.execute_command', 'ConnectionManager.execute_command', (['ssh_client', 'command'], {}), '(ssh_client, command)\n', (2141, 2162), True, 'import cirrus_ngs.cfnCluster.ConnectionManager as ConnectionManager\n'), ((1401, 1422), 'sys.stdout.getvalue', 'sys.stdout.getvalue', ([], {}), '()\n', (1420, 1422), False, 'import sys\n'), ((2820, 2841), 'sys.stdout.getvalue', 'sys.stdout.getvalue', ([], {}), '()\n', (2839, 2841), False, 'import sys\n')]
|
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# $Id$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
from openerp.tools.translate import _
class crm_lead_forward_to_partner(osv.TransientModel):
""" Forward info history to partners. """
_name = 'crm.lead.forward.to.partner'
_inherit = "mail.compose.message"
def _get_composition_mode_selection(self, cr, uid, context=None):
composition_mode = super(crm_lead_forward_to_partner, self)._get_composition_mode_selection(cr, uid, context=context)
composition_mode.append(('forward', 'Forward'))
return composition_mode
_columns = {
'partner_ids': fields.many2many('res.partner',
'lead_forward_to_partner_res_partner_rel',
'wizard_id', 'partner_id', 'Additional contacts'),
'attachment_ids': fields.many2many('ir.attachment',
'lead_forward_to_partner_attachment_rel',
'wizard_id', 'attachment_id', 'Attachments'),
'history_mode': fields.selection([('info', 'Internal notes'),
('latest', 'Latest email'), ('whole', 'Whole Story')],
'Send history', required=True),
}
_defaults = {
'history_mode': 'info',
}
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
# set as comment, perform overrided document-like action that calls get_record_data
old_mode = context.get('default_composition_mode', 'forward')
context['default_composition_mode'] = 'comment'
res = super(crm_lead_forward_to_partner, self).default_get(cr, uid, fields, context=context)
# back to forward mode
context['default_composition_mode'] = old_mode
res['composition_mode'] = context['default_composition_mode']
return res
def get_record_data(self, cr, uid, model, res_id, context=None):
""" Override of mail.compose.message, to add default values coming
form the related lead.
"""
if context is None:
context = {}
res = super(crm_lead_forward_to_partner, self).get_record_data(cr, uid, model, res_id, context=context)
if model not in ('crm.lead') or not res_id:
return res
template_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'crm_partner_assign', 'crm_partner_assign_email_template')[1]
context['history_mode'] = context.get('history_mode','whole')
mail_body_fields = ['partner_id', 'partner_name', 'title', 'function', 'street', 'street2', 'zip', 'city', 'country_id', 'state_id', 'email_from', 'phone', 'fax', 'mobile', 'description']
lead = self.pool.get('crm.lead').browse(cr, uid, res_id, context=context)
context['mail_body'] = self.pool.get('crm.lead')._mail_body(cr, uid, lead, mail_body_fields, context=context)
template = self.generate_email_for_composer(cr, uid, template_id, res_id, context)
res['subject'] = template['subject']
res['body'] = template['body']
return res
def on_change_history_mode(self, cr, uid, ids, history_mode, model, res_id, context=None):
""" Update body when changing history_mode """
if context is None:
context = {}
if model and model == 'crm.lead' and res_id:
lead = self.pool.get(model).browse(cr, uid, res_id, context=context)
context['history_mode'] = history_mode
body = self.get_record_data(cr, uid, 'crm.lead', res_id, context=context)['body']
return {'value': {'body': body}}
def create(self, cr, uid, values, context=None):
""" TDE-HACK: remove 'type' from context, because when viewing an
opportunity form view, a default_type is set and propagated
to the wizard, that has a not matching type field. """
default_type = context.pop('default_type', None)
new_id = super(crm_lead_forward_to_partner, self).create(cr, uid, values, context=context)
if default_type:
context['default_type'] = default_type
return new_id
def action_forward(self, cr, uid, ids, context=None):
""" Forward the lead to a partner """
if context is None:
context = {}
res = {'type': 'ir.actions.act_window_close'}
wizard = self.browse(cr, uid, ids[0], context=context)
if wizard.model not in ('crm.lead'):
return res
lead = self.pool.get(wizard.model)
lead_ids = wizard.res_id and [wizard.res_id] or []
if wizard.composition_mode == 'mass_mail':
lead_ids = context and context.get('active_ids', []) or []
value = self.default_get(cr, uid, ['body', 'email_to', 'email_cc', 'subject', 'history_mode'], context=context)
self.write(cr, uid, ids, value, context=context)
return self.send_mail(cr, uid, ids, context=context)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
|
[
"openerp.osv.fields.selection",
"openerp.osv.fields.many2many"
] |
[((1592, 1720), 'openerp.osv.fields.many2many', 'fields.many2many', (['"""res.partner"""', '"""lead_forward_to_partner_res_partner_rel"""', '"""wizard_id"""', '"""partner_id"""', '"""Additional contacts"""'], {}), "('res.partner', 'lead_forward_to_partner_res_partner_rel',\n 'wizard_id', 'partner_id', 'Additional contacts')\n", (1608, 1720), False, 'from openerp.osv import fields, osv\n'), ((1768, 1892), 'openerp.osv.fields.many2many', 'fields.many2many', (['"""ir.attachment"""', '"""lead_forward_to_partner_attachment_rel"""', '"""wizard_id"""', '"""attachment_id"""', '"""Attachments"""'], {}), "('ir.attachment', 'lead_forward_to_partner_attachment_rel',\n 'wizard_id', 'attachment_id', 'Attachments')\n", (1784, 1892), False, 'from openerp.osv import fields, osv\n'), ((1938, 2074), 'openerp.osv.fields.selection', 'fields.selection', (["[('info', 'Internal notes'), ('latest', 'Latest email'), ('whole',\n 'Whole Story')]", '"""Send history"""'], {'required': '(True)'}), "([('info', 'Internal notes'), ('latest', 'Latest email'), (\n 'whole', 'Whole Story')], 'Send history', required=True)\n", (1954, 2074), False, 'from openerp.osv import fields, osv\n')]
|
import tensorflow as tf
from tensorflow.keras.losses import binary_crossentropy,sparse_categorical_crossentropy
from config import Configuration
cfg = Configuration()
class YOLOLoss(tf.losses.Loss):
def __init__(self, anchors):
super(YOLOLoss, self).__init__(reduction="none", name="YOLOLoss")
self.anchors = tf.constant(anchors)
def _meshgrid(self, n_a, n_b):
return [
tf.reshape(tf.tile(tf.range(n_a), [n_b]), (n_b, n_a)),
tf.reshape(tf.repeat(tf.range(n_b), n_a), (n_b, n_a))
]
def broadcast_iou(self, box_1, box_2):
# box_1: (..., (x1, y1, x2, y2))
# box_2: (N, (x1, y1, x2, y2))
# broadcast boxes
box_1 = tf.expand_dims(box_1, -2)
box_2 = tf.expand_dims(box_2, 0)
# new_shape: (..., N, (x1, y1, x2, y2))
new_shape = tf.broadcast_dynamic_shape(tf.shape(box_1), tf.shape(box_2))
box_1 = tf.broadcast_to(box_1, new_shape)
box_2 = tf.broadcast_to(box_2, new_shape)
int_w = tf.maximum(tf.minimum(box_1[..., 2], box_2[..., 2]) -
tf.maximum(box_1[..., 0], box_2[..., 0]), 0)
int_h = tf.maximum(tf.minimum(box_1[..., 3], box_2[..., 3]) -
tf.maximum(box_1[..., 1], box_2[..., 1]), 0)
int_area = int_w * int_h
box_1_area = (box_1[..., 2] - box_1[..., 0]) * \
(box_1[..., 3] - box_1[..., 1])
box_2_area = (box_2[..., 2] - box_2[..., 0]) * \
(box_2[..., 3] - box_2[..., 1])
return int_area / (box_1_area + box_2_area - int_area)
def yolo_boxes(self, pred, classes):
# pred: (batch_size, grid, grid, anchors, (x, y, w, h, obj, ...classes))
grid_size = tf.shape(pred)[1:3]
box_xy, box_wh, objectness, class_probs = tf.split(pred, (2, 2, 1, classes), axis=-1)
box_xy = tf.sigmoid(box_xy)
objectness = tf.sigmoid(objectness)
class_probs = tf.sigmoid(class_probs)
pred_box = tf.concat((box_xy, box_wh), axis=-1) # original xywh for loss
# !!! grid[x][y] == (y, x)
grid = self._meshgrid(grid_size[1],grid_size[0])
grid = tf.expand_dims(tf.stack(grid, axis=-1), axis=2) # [gx, gy, 1, 2]
box_xy = (box_xy + tf.cast(grid, tf.float32)) / tf.cast(grid_size, tf.float32)
box_wh = tf.exp(box_wh) * self.anchors
box_x1y1 = box_xy - box_wh / 2
box_x2y2 = box_xy + box_wh / 2
bbox = tf.concat([box_x1y1, box_x2y2], axis=-1)
return bbox, objectness, class_probs, pred_box
def call(self, y_true, y_pred):
# 1. transform all pred outputs
# y_pred: (batch_size, grid, grid, anchors, (x, y, w, h, obj, ...cls))
pred_box, pred_obj, pred_class, pred_xywh = self.yolo_boxes(y_pred, cfg.num_classes)
pred_xy = pred_xywh[..., 0:2]
pred_wh = pred_xywh[..., 2:4]
# 2. transform all true outputs
# y_true: (batch_size, grid, grid, anchors, (x1, y1, x2, y2, obj, cls))
true_box, true_obj, true_class_idx = tf.split(y_true, (4, 1, 1), axis=-1)
true_xy = (true_box[..., 0:2] + true_box[..., 2:4]) / 2
true_wh = true_box[..., 2:4] - true_box[..., 0:2]
# give higher weights to small boxes
box_loss_scale = 2 - true_wh[..., 0] * true_wh[..., 1]
# 3. inverting the pred box equations
grid_size = tf.shape(y_true)[1]
grid = tf.meshgrid(tf.range(grid_size), tf.range(grid_size))
grid = tf.expand_dims(tf.stack(grid, axis=-1), axis=2)
true_xy = true_xy * tf.cast(grid_size, tf.float32) - tf.cast(grid, tf.float32)
true_wh = tf.math.log(true_wh / self.anchors)
true_wh = tf.where(tf.math.is_inf(true_wh),tf.zeros_like(true_wh), true_wh)
# 4. calculate all masks
obj_mask = tf.squeeze(true_obj, -1)
# ignore false positive when iou is over threshold
best_iou = tf.map_fn(
lambda x: tf.reduce_max(self.broadcast_iou(x[0], tf.boolean_mask(
x[1], tf.cast(x[2], tf.bool))), axis=-1),
(pred_box, true_box, obj_mask),
tf.float32)
ignore_mask = tf.cast(best_iou < cfg.train_iou_threshold, tf.float32)
# 5. calculate all losses
xy_loss = obj_mask * box_loss_scale * tf.reduce_sum(tf.square(true_xy - pred_xy), axis=-1)
wh_loss = obj_mask * box_loss_scale * tf.reduce_sum(tf.square(true_wh - pred_wh), axis=-1)
obj_loss = binary_crossentropy(true_obj, pred_obj)
obj_loss = obj_mask * obj_loss + (1 - obj_mask) * ignore_mask * obj_loss
class_loss = obj_mask * sparse_categorical_crossentropy(true_class_idx, pred_class)
# 6. sum over (batch, gridx, gridy, anchors) => (batch, 1)
xy_loss = tf.reduce_sum(xy_loss, axis=(1, 2, 3))
wh_loss = tf.reduce_sum(wh_loss, axis=(1, 2, 3))
obj_loss = tf.reduce_sum(obj_loss, axis=(1, 2, 3))
class_loss = tf.reduce_sum(class_loss, axis=(1, 2, 3))
return xy_loss + wh_loss + obj_loss + class_loss
|
[
"tensorflow.shape",
"tensorflow.math.log",
"tensorflow.reduce_sum",
"tensorflow.split",
"tensorflow.keras.losses.binary_crossentropy",
"tensorflow.cast",
"tensorflow.math.is_inf",
"tensorflow.concat",
"tensorflow.maximum",
"tensorflow.zeros_like",
"tensorflow.square",
"tensorflow.stack",
"config.Configuration",
"tensorflow.range",
"tensorflow.keras.losses.sparse_categorical_crossentropy",
"tensorflow.sigmoid",
"tensorflow.squeeze",
"tensorflow.expand_dims",
"tensorflow.minimum",
"tensorflow.broadcast_to",
"tensorflow.constant",
"tensorflow.exp"
] |
[((151, 166), 'config.Configuration', 'Configuration', ([], {}), '()\n', (164, 166), False, 'from config import Configuration\n'), ((330, 350), 'tensorflow.constant', 'tf.constant', (['anchors'], {}), '(anchors)\n', (341, 350), True, 'import tensorflow as tf\n'), ((715, 740), 'tensorflow.expand_dims', 'tf.expand_dims', (['box_1', '(-2)'], {}), '(box_1, -2)\n', (729, 740), True, 'import tensorflow as tf\n'), ((757, 781), 'tensorflow.expand_dims', 'tf.expand_dims', (['box_2', '(0)'], {}), '(box_2, 0)\n', (771, 781), True, 'import tensorflow as tf\n'), ((927, 960), 'tensorflow.broadcast_to', 'tf.broadcast_to', (['box_1', 'new_shape'], {}), '(box_1, new_shape)\n', (942, 960), True, 'import tensorflow as tf\n'), ((977, 1010), 'tensorflow.broadcast_to', 'tf.broadcast_to', (['box_2', 'new_shape'], {}), '(box_2, new_shape)\n', (992, 1010), True, 'import tensorflow as tf\n'), ((1801, 1844), 'tensorflow.split', 'tf.split', (['pred', '(2, 2, 1, classes)'], {'axis': '(-1)'}), '(pred, (2, 2, 1, classes), axis=-1)\n', (1809, 1844), True, 'import tensorflow as tf\n'), ((1863, 1881), 'tensorflow.sigmoid', 'tf.sigmoid', (['box_xy'], {}), '(box_xy)\n', (1873, 1881), True, 'import tensorflow as tf\n'), ((1903, 1925), 'tensorflow.sigmoid', 'tf.sigmoid', (['objectness'], {}), '(objectness)\n', (1913, 1925), True, 'import tensorflow as tf\n'), ((1948, 1971), 'tensorflow.sigmoid', 'tf.sigmoid', (['class_probs'], {}), '(class_probs)\n', (1958, 1971), True, 'import tensorflow as tf\n'), ((1991, 2027), 'tensorflow.concat', 'tf.concat', (['(box_xy, box_wh)'], {'axis': '(-1)'}), '((box_xy, box_wh), axis=-1)\n', (2000, 2027), True, 'import tensorflow as tf\n'), ((2457, 2497), 'tensorflow.concat', 'tf.concat', (['[box_x1y1, box_x2y2]'], {'axis': '(-1)'}), '([box_x1y1, box_x2y2], axis=-1)\n', (2466, 2497), True, 'import tensorflow as tf\n'), ((3045, 3081), 'tensorflow.split', 'tf.split', (['y_true', '(4, 1, 1)'], {'axis': '(-1)'}), '(y_true, (4, 1, 1), axis=-1)\n', (3053, 3081), True, 'import tensorflow as tf\n'), ((3637, 3672), 'tensorflow.math.log', 'tf.math.log', (['(true_wh / self.anchors)'], {}), '(true_wh / self.anchors)\n', (3648, 3672), True, 'import tensorflow as tf\n'), ((3810, 3834), 'tensorflow.squeeze', 'tf.squeeze', (['true_obj', '(-1)'], {}), '(true_obj, -1)\n', (3820, 3834), True, 'import tensorflow as tf\n'), ((4150, 4205), 'tensorflow.cast', 'tf.cast', (['(best_iou < cfg.train_iou_threshold)', 'tf.float32'], {}), '(best_iou < cfg.train_iou_threshold, tf.float32)\n', (4157, 4205), True, 'import tensorflow as tf\n'), ((4458, 4497), 'tensorflow.keras.losses.binary_crossentropy', 'binary_crossentropy', (['true_obj', 'pred_obj'], {}), '(true_obj, pred_obj)\n', (4477, 4497), False, 'from tensorflow.keras.losses import binary_crossentropy, sparse_categorical_crossentropy\n'), ((4757, 4795), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['xy_loss'], {'axis': '(1, 2, 3)'}), '(xy_loss, axis=(1, 2, 3))\n', (4770, 4795), True, 'import tensorflow as tf\n'), ((4814, 4852), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['wh_loss'], {'axis': '(1, 2, 3)'}), '(wh_loss, axis=(1, 2, 3))\n', (4827, 4852), True, 'import tensorflow as tf\n'), ((4872, 4911), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['obj_loss'], {'axis': '(1, 2, 3)'}), '(obj_loss, axis=(1, 2, 3))\n', (4885, 4911), True, 'import tensorflow as tf\n'), ((4933, 4974), 'tensorflow.reduce_sum', 'tf.reduce_sum', (['class_loss'], {'axis': '(1, 2, 3)'}), '(class_loss, axis=(1, 2, 3))\n', (4946, 4974), True, 'import tensorflow as tf\n'), ((877, 892), 'tensorflow.shape', 'tf.shape', (['box_1'], {}), '(box_1)\n', (885, 892), True, 'import tensorflow as tf\n'), ((894, 909), 'tensorflow.shape', 'tf.shape', (['box_2'], {}), '(box_2)\n', (902, 909), True, 'import tensorflow as tf\n'), ((1731, 1745), 'tensorflow.shape', 'tf.shape', (['pred'], {}), '(pred)\n', (1739, 1745), True, 'import tensorflow as tf\n'), ((2177, 2200), 'tensorflow.stack', 'tf.stack', (['grid'], {'axis': '(-1)'}), '(grid, axis=-1)\n', (2185, 2200), True, 'import tensorflow as tf\n'), ((2285, 2315), 'tensorflow.cast', 'tf.cast', (['grid_size', 'tf.float32'], {}), '(grid_size, tf.float32)\n', (2292, 2315), True, 'import tensorflow as tf\n'), ((2333, 2347), 'tensorflow.exp', 'tf.exp', (['box_wh'], {}), '(box_wh)\n', (2339, 2347), True, 'import tensorflow as tf\n'), ((3380, 3396), 'tensorflow.shape', 'tf.shape', (['y_true'], {}), '(y_true)\n', (3388, 3396), True, 'import tensorflow as tf\n'), ((3427, 3446), 'tensorflow.range', 'tf.range', (['grid_size'], {}), '(grid_size)\n', (3435, 3446), True, 'import tensorflow as tf\n'), ((3448, 3467), 'tensorflow.range', 'tf.range', (['grid_size'], {}), '(grid_size)\n', (3456, 3467), True, 'import tensorflow as tf\n'), ((3499, 3522), 'tensorflow.stack', 'tf.stack', (['grid'], {'axis': '(-1)'}), '(grid, axis=-1)\n', (3507, 3522), True, 'import tensorflow as tf\n'), ((3593, 3618), 'tensorflow.cast', 'tf.cast', (['grid', 'tf.float32'], {}), '(grid, tf.float32)\n', (3600, 3618), True, 'import tensorflow as tf\n'), ((3700, 3723), 'tensorflow.math.is_inf', 'tf.math.is_inf', (['true_wh'], {}), '(true_wh)\n', (3714, 3723), True, 'import tensorflow as tf\n'), ((3724, 3746), 'tensorflow.zeros_like', 'tf.zeros_like', (['true_wh'], {}), '(true_wh)\n', (3737, 3746), True, 'import tensorflow as tf\n'), ((4611, 4670), 'tensorflow.keras.losses.sparse_categorical_crossentropy', 'sparse_categorical_crossentropy', (['true_class_idx', 'pred_class'], {}), '(true_class_idx, pred_class)\n', (4642, 4670), False, 'from tensorflow.keras.losses import binary_crossentropy, sparse_categorical_crossentropy\n'), ((1039, 1079), 'tensorflow.minimum', 'tf.minimum', (['box_1[..., 2]', 'box_2[..., 2]'], {}), '(box_1[..., 2], box_2[..., 2])\n', (1049, 1079), True, 'import tensorflow as tf\n'), ((1106, 1146), 'tensorflow.maximum', 'tf.maximum', (['box_1[..., 0]', 'box_2[..., 0]'], {}), '(box_1[..., 0], box_2[..., 0])\n', (1116, 1146), True, 'import tensorflow as tf\n'), ((1178, 1218), 'tensorflow.minimum', 'tf.minimum', (['box_1[..., 3]', 'box_2[..., 3]'], {}), '(box_1[..., 3], box_2[..., 3])\n', (1188, 1218), True, 'import tensorflow as tf\n'), ((1245, 1285), 'tensorflow.maximum', 'tf.maximum', (['box_1[..., 1]', 'box_2[..., 1]'], {}), '(box_1[..., 1], box_2[..., 1])\n', (1255, 1285), True, 'import tensorflow as tf\n'), ((2256, 2281), 'tensorflow.cast', 'tf.cast', (['grid', 'tf.float32'], {}), '(grid, tf.float32)\n', (2263, 2281), True, 'import tensorflow as tf\n'), ((3560, 3590), 'tensorflow.cast', 'tf.cast', (['grid_size', 'tf.float32'], {}), '(grid_size, tf.float32)\n', (3567, 3590), True, 'import tensorflow as tf\n'), ((4301, 4329), 'tensorflow.square', 'tf.square', (['(true_xy - pred_xy)'], {}), '(true_xy - pred_xy)\n', (4310, 4329), True, 'import tensorflow as tf\n'), ((4400, 4428), 'tensorflow.square', 'tf.square', (['(true_wh - pred_wh)'], {}), '(true_wh - pred_wh)\n', (4409, 4428), True, 'import tensorflow as tf\n'), ((436, 449), 'tensorflow.range', 'tf.range', (['n_a'], {}), '(n_a)\n', (444, 449), True, 'import tensorflow as tf\n'), ((505, 518), 'tensorflow.range', 'tf.range', (['n_b'], {}), '(n_b)\n', (513, 518), True, 'import tensorflow as tf\n'), ((4024, 4046), 'tensorflow.cast', 'tf.cast', (['x[2]', 'tf.bool'], {}), '(x[2], tf.bool)\n', (4031, 4046), True, 'import tensorflow as tf\n')]
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.13 on 2017-08-15 16:23
from __future__ import unicode_literals
import django.contrib.postgres.fields.jsonb
from django.db import migrations, models
import django.db.models.deletion
import djangoplicity.archives.base
import djangoplicity.archives.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
('media', '0021_auto_20170207_1749'),
]
operations = [
migrations.CreateModel(
name='Author',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('biography', models.TextField(blank=True)),
('photo', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='media.Image')),
],
),
migrations.CreateModel(
name='AuthorDescription',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('description', models.CharField(blank=True, help_text='Optional description, e.g.: "Author: ", or "Interview with"', max_length=100)),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Author')),
],
),
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('footer', models.TextField(blank=True, help_text='Optional footer added to the bottom of posts')),
],
),
migrations.CreateModel(
name='Post',
fields=[
('slug', models.SlugField(help_text='Used for the URL', primary_key=True, serialize=False)),
('title', models.CharField(max_length=255)),
('subtitle', models.CharField(blank=True, help_text='Optional subtitle', max_length=255)),
('lede', models.TextField()),
('body', models.TextField()),
('discover_box', models.TextField(blank=True)),
('numbers_box', models.TextField(blank=True)),
('links', models.TextField(blank=True)),
('release_date', djangoplicity.archives.fields.ReleaseDateTimeField(blank=True, db_index=True, null=True)),
('embargo_date', djangoplicity.archives.fields.ReleaseDateTimeField(blank=True, db_index=True, null=True)),
('published', models.BooleanField(db_index=True, default=False, verbose_name='Published')),
('last_modified', models.DateTimeField(auto_now=True, verbose_name='Last modified')),
('created', models.DateTimeField(auto_now_add=True, verbose_name='Created')),
('release_task_id', models.CharField(blank=True, max_length=64, null=True)),
('embargo_task_id', models.CharField(blank=True, max_length=64, null=True)),
('checksums', django.contrib.postgres.fields.jsonb.JSONField(blank=True, null=True)),
('authors', models.ManyToManyField(through='blog.AuthorDescription', to='blog.Author')),
('banner', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='media.Image', verbose_name='Banner Image')),
('category', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Category')),
],
options={
'ordering': ('-release_date',),
},
bases=(djangoplicity.archives.base.ArchiveModel, models.Model),
),
migrations.CreateModel(
name='Tag',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
),
migrations.AddField(
model_name='post',
name='tags',
field=models.ManyToManyField(to='blog.Tag'),
),
migrations.AddField(
model_name='authordescription',
name='post',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='blog.Post'),
),
]
|
[
"django.db.models.TextField",
"django.db.models.ForeignKey",
"django.db.models.ManyToManyField",
"django.db.models.BooleanField",
"django.db.models.SlugField",
"django.db.models.AutoField",
"django.db.models.DateTimeField",
"django.db.models.CharField"
] |
[((4222, 4259), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'to': '"""blog.Tag"""'}), "(to='blog.Tag')\n", (4244, 4259), False, 'from django.db import migrations, models\n'), ((4388, 4466), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""blog.Post"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='blog.Post')\n", (4405, 4466), False, 'from django.db import migrations, models\n'), ((562, 655), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (578, 655), False, 'from django.db import migrations, models\n'), ((679, 711), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (695, 711), False, 'from django.db import migrations, models\n'), ((744, 772), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (760, 772), False, 'from django.db import migrations, models\n'), ((801, 909), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""media.Image"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='media.Image')\n", (818, 909), False, 'from django.db import migrations, models\n'), ((1047, 1140), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1063, 1140), False, 'from django.db import migrations, models\n'), ((1171, 1297), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'help_text': '"""Optional description, e.g.: "Author: ", or "Interview with\\""""', 'max_length': '(100)'}), '(blank=True, help_text=\n \'Optional description, e.g.: "Author: ", or "Interview with"\',\n max_length=100)\n', (1187, 1297), False, 'from django.db import migrations, models\n'), ((1318, 1403), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""blog.Author"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='blog.Author'\n )\n", (1335, 1403), False, 'from django.db import migrations, models\n'), ((1532, 1625), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (1548, 1625), False, 'from django.db import migrations, models\n'), ((1649, 1681), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(100)'}), '(max_length=100)\n', (1665, 1681), False, 'from django.db import migrations, models\n'), ((1711, 1802), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)', 'help_text': '"""Optional footer added to the bottom of posts"""'}), "(blank=True, help_text=\n 'Optional footer added to the bottom of posts')\n", (1727, 1802), False, 'from django.db import migrations, models\n'), ((1929, 2015), 'django.db.models.SlugField', 'models.SlugField', ([], {'help_text': '"""Used for the URL"""', 'primary_key': '(True)', 'serialize': '(False)'}), "(help_text='Used for the URL', primary_key=True, serialize=\n False)\n", (1945, 2015), False, 'from django.db import migrations, models\n'), ((2039, 2071), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(255)'}), '(max_length=255)\n', (2055, 2071), False, 'from django.db import migrations, models\n'), ((2103, 2178), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'help_text': '"""Optional subtitle"""', 'max_length': '(255)'}), "(blank=True, help_text='Optional subtitle', max_length=255)\n", (2119, 2178), False, 'from django.db import migrations, models\n'), ((2206, 2224), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2222, 2224), False, 'from django.db import migrations, models\n'), ((2252, 2270), 'django.db.models.TextField', 'models.TextField', ([], {}), '()\n', (2268, 2270), False, 'from django.db import migrations, models\n'), ((2306, 2334), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (2322, 2334), False, 'from django.db import migrations, models\n'), ((2369, 2397), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (2385, 2397), False, 'from django.db import migrations, models\n'), ((2426, 2454), 'django.db.models.TextField', 'models.TextField', ([], {'blank': '(True)'}), '(blank=True)\n', (2442, 2454), False, 'from django.db import migrations, models\n'), ((2735, 2810), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'db_index': '(True)', 'default': '(False)', 'verbose_name': '"""Published"""'}), "(db_index=True, default=False, verbose_name='Published')\n", (2754, 2810), False, 'from django.db import migrations, models\n'), ((2847, 2912), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)', 'verbose_name': '"""Last modified"""'}), "(auto_now=True, verbose_name='Last modified')\n", (2867, 2912), False, 'from django.db import migrations, models\n'), ((2943, 3006), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)', 'verbose_name': '"""Created"""'}), "(auto_now_add=True, verbose_name='Created')\n", (2963, 3006), False, 'from django.db import migrations, models\n'), ((3045, 3099), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(64)', 'null': '(True)'}), '(blank=True, max_length=64, null=True)\n', (3061, 3099), False, 'from django.db import migrations, models\n'), ((3138, 3192), 'django.db.models.CharField', 'models.CharField', ([], {'blank': '(True)', 'max_length': '(64)', 'null': '(True)'}), '(blank=True, max_length=64, null=True)\n', (3154, 3192), False, 'from django.db import migrations, models\n'), ((3325, 3399), 'django.db.models.ManyToManyField', 'models.ManyToManyField', ([], {'through': '"""blog.AuthorDescription"""', 'to': '"""blog.Author"""'}), "(through='blog.AuthorDescription', to='blog.Author')\n", (3347, 3399), False, 'from django.db import migrations, models\n'), ((3429, 3543), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""media.Image"""', 'verbose_name': '"""Banner Image"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'media.Image', verbose_name='Banner Image')\n", (3446, 3543), False, 'from django.db import migrations, models\n'), ((3570, 3657), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""blog.Category"""'}), "(on_delete=django.db.models.deletion.CASCADE, to=\n 'blog.Category')\n", (3587, 3657), False, 'from django.db import migrations, models\n'), ((3942, 4035), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (3958, 4035), False, 'from django.db import migrations, models\n'), ((4059, 4090), 'django.db.models.CharField', 'models.CharField', ([], {'max_length': '(50)'}), '(max_length=50)\n', (4075, 4090), False, 'from django.db import migrations, models\n')]
|
# Copyright (c) 2019-2020 steelpy
# Python stdlib imports
# package imports
#from steelpy.codes.aisc.aisc360 import AISC_360_16
#from steelpy.codes.aisc.aisc335 import AISC_335_89
#from steelpy.codes.iso.ISO19902 import ISOCodeCheck
from steelpy.codes.piping.pipeline import Pipeline_Assessment
#from steelpy.codes.api.wsd_22ed import APIwsd22ed
from steelpy.codes.dnv.pannel import CodeCheckPanel
#
#from steelpy.process.units.main import Units
#from steelpy.material.material import Material
#from steelpy.sections.tubular import Tubular
from steelpy.codes.api.main import API_design
class CodeCheck:
"""
"""
def __init__(self):
""""""
#self._units = Units()
pass
#@property
#def units(self):
# """
# """
# return self._units
#
@property
def API(self):
"""
"""
return API_design()
#
@property
def pipe(self):
""" """
return Pipeline_Assessment()
#
def DNV_pannel(self):
""" """
return CodeCheckPanel()
|
[
"steelpy.codes.dnv.pannel.CodeCheckPanel",
"steelpy.codes.piping.pipeline.Pipeline_Assessment",
"steelpy.codes.api.main.API_design"
] |
[((884, 896), 'steelpy.codes.api.main.API_design', 'API_design', ([], {}), '()\n', (894, 896), False, 'from steelpy.codes.api.main import API_design\n'), ((968, 989), 'steelpy.codes.piping.pipeline.Pipeline_Assessment', 'Pipeline_Assessment', ([], {}), '()\n', (987, 989), False, 'from steelpy.codes.piping.pipeline import Pipeline_Assessment\n'), ((1053, 1069), 'steelpy.codes.dnv.pannel.CodeCheckPanel', 'CodeCheckPanel', ([], {}), '()\n', (1067, 1069), False, 'from steelpy.codes.dnv.pannel import CodeCheckPanel\n')]
|
import os
os.system("pip install pytorch_transformers")
import nsml
print(nsml.DATASET_PATH)
os.system('python ./code/train.py --n-labeled 10 --data-path '+ nsml.DATASET_PATH + '/train/ --batch-size 4 --batch-size-u 8 --epochs 20 --val-iteration 1000 --lambda-u 1 --T 0.5 --alpha 16 --mix-layers-set 7 9 12 --lrmain 0.000005 --lrlast 0.00005'
)
|
[
"os.system"
] |
[((10, 55), 'os.system', 'os.system', (['"""pip install pytorch_transformers"""'], {}), "('pip install pytorch_transformers')\n", (19, 55), False, 'import os\n'), ((94, 359), 'os.system', 'os.system', (["('python ./code/train.py --n-labeled 10 --data-path ' + nsml.DATASET_PATH +\n '/train/ --batch-size 4 --batch-size-u 8 --epochs 20 --val-iteration 1000 --lambda-u 1 --T 0.5 --alpha 16 --mix-layers-set 7 9 12 --lrmain 0.000005 --lrlast 0.00005'\n )"], {}), "('python ./code/train.py --n-labeled 10 --data-path ' + nsml.\n DATASET_PATH +\n '/train/ --batch-size 4 --batch-size-u 8 --epochs 20 --val-iteration 1000 --lambda-u 1 --T 0.5 --alpha 16 --mix-layers-set 7 9 12 --lrmain 0.000005 --lrlast 0.00005'\n )\n", (103, 359), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
import math
from selenium.webdriver.support.ui import Select
import os
import time
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
link = "http://suninjuly.github.io/explicit_wait2.html"
opt = webdriver.ChromeOptions()
opt.add_experimental_option('w3c', False)
browser = webdriver.Chrome(chrome_options=opt)
browser.implicitly_wait(5, 0.5)
browser.get(link)
button = browser.find_element_by_id("book")
price = WebDriverWait(browser, 12).until(EC.text_to_be_present_in_element((By.ID, "price"),"10000 RUR"))
button.click()
def calc(x):
return str(math.log(abs(12*math.sin(int(x)))))
browser.find_element_by_class_name("btn-primary").click()
# new_window = browser.window_handles[1]
# browser.switch_to.window(new_window)
x_element = browser.find_element_by_id("input_value")
x = x_element.text
y = calc(x)
browser.find_element_by_id("answer").click()
browser.find_element_by_id("answer").send_keys(y)
browser.find_element_by_id("solve").click()
|
[
"selenium.webdriver.Chrome",
"selenium.webdriver.ChromeOptions",
"selenium.webdriver.support.expected_conditions.text_to_be_present_in_element",
"selenium.webdriver.support.ui.WebDriverWait"
] |
[((367, 392), 'selenium.webdriver.ChromeOptions', 'webdriver.ChromeOptions', ([], {}), '()\n', (390, 392), False, 'from selenium import webdriver\n'), ((445, 481), 'selenium.webdriver.Chrome', 'webdriver.Chrome', ([], {'chrome_options': 'opt'}), '(chrome_options=opt)\n', (461, 481), False, 'from selenium import webdriver\n'), ((618, 681), 'selenium.webdriver.support.expected_conditions.text_to_be_present_in_element', 'EC.text_to_be_present_in_element', (["(By.ID, 'price')", '"""10000 RUR"""'], {}), "((By.ID, 'price'), '10000 RUR')\n", (650, 681), True, 'from selenium.webdriver.support import expected_conditions as EC\n'), ((585, 611), 'selenium.webdriver.support.ui.WebDriverWait', 'WebDriverWait', (['browser', '(12)'], {}), '(browser, 12)\n', (598, 611), False, 'from selenium.webdriver.support.ui import WebDriverWait\n')]
|
#!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: import_workload_create_instance
short_description: Create NBD exports of OpenStack volumes
extends_documentation_fragment: openstack
version_added: "2.9.0"
author: "OpenStack tenant migration tools (@os-migrate)"
description:
- "Take an instance from an OS-Migrate YAML structure, and export its volumes over NBD."
options:
auth:
description:
- Dictionary with parameters for chosen auth type on the destination cloud.
required: true
type: dict
auth_type:
description:
- Auth type plugin for destination OpenStack cloud. Can be omitted if using password authentication.
required: false
type: str
region_name:
description:
- Destination OpenStack region name. Can be omitted if using default region.
required: false
type: str
availability_zone:
description:
- Availability zone.
required: false
type: str
cloud:
description:
- Ignored. Present for backwards compatibility.
required: false
type: raw
validate_certs:
description:
- Validate HTTPS certificates when logging in to OpenStack.
required: false
type: bool
data:
description:
- Data structure with server parameters as loaded from OS-Migrate workloads YAML file.
required: true
type: dict
block_device_mapping:
description:
- A block_device_mapping_v2 structure from the transfer_volumes module.
- Used to attach destination volumes to the new instance in the right order.
required: true
type: list
elements: dict
'''
EXAMPLES = '''
main.yml:
- name: validate loaded resources
os_migrate.os_migrate.validate_resource_files:
paths:
- "{{ os_migrate_data_dir }}/workloads.yml"
register: workloads_file_validation
when: import_workloads_validate_file
- name: read workloads resource file
os_migrate.os_migrate.read_resources:
path: "{{ os_migrate_data_dir }}/workloads.yml"
register: read_workloads
- name: get source conversion host address
os_migrate.os_migrate.os_conversion_host_info:
auth:
auth_url: https://src-osp:13000/v3
username: migrate
password: <PASSWORD>
project_domain_id: default
project_name: migration-source
user_domain_id: default
server_id: ce4dda96-5d8e-4b67-aee2-9845cdc943fe
register: os_src_conversion_host_info
- name: get destination conversion host address
os_migrate.os_migrate.os_conversion_host_info:
auth:
auth_url: https://dest-osp:13000/v3
username: migrate
password: <PASSWORD>
project_domain_id: default
project_name: migration-destination
user_domain_id: default
server_id: 2d2afe57-ace5-4187-8fca-5f10f9059ba1
register: os_dst_conversion_host_info
- name: import workloads
include_tasks: workload.yml
loop: "{{ read_workloads.resources }}"
workload.yml:
- block:
- name: preliminary setup for workload import
os_migrate.os_migrate.import_workload_prelim:
auth:
auth_url: https://dest-osp:13000/v3
username: migrate
password: <PASSWORD>
project_domain_id: default
project_name: migration-destination
user_domain_id: default
validate_certs: False
src_conversion_host: "{{ os_src_conversion_host_info.openstack_conversion_host }}"
src_auth:
auth_url: https://src-osp:13000/v3
username: migrate
password: <PASSWORD>
project_domain_id: default
project_name: migration-source
user_domain_id: default
src_validate_certs: False
data: "{{ item }}"
data_dir: "{{ os_migrate_data_dir }}"
register: prelim
- debug:
msg:
- "{{ prelim.server_name }} log file: {{ prelim.log_file }}"
- "{{ prelim.server_name }} progress file: {{ prelim.state_file }}"
when: prelim.changed
- name: expose source volumes
os_migrate.os_migrate.import_workload_export_volumes:
auth: "{{ os_migrate_src_auth }}"
auth_type: "{{ os_migrate_src_auth_type|default(omit) }}"
region_name: "{{ os_migrate_src_region_name|default(omit) }}"
validate_certs: "{{ os_migrate_src_validate_certs|default(omit) }}"
ca_cert: "{{ os_migrate_src_ca_cert|default(omit) }}"
client_cert: "{{ os_migrate_src_client_cert|default(omit) }}"
client_key: "{{ os_migrate_src_client_key|default(omit) }}"
conversion_host:
"{{ os_src_conversion_host_info.openstack_conversion_host }}"
data: "{{ item }}"
log_file: "{{ os_migrate_data_dir }}/{{ prelim.server_name }}.log"
state_file: "{{ os_migrate_data_dir }}/{{ prelim.server_name }}.state"
ssh_key_path: "{{ os_migrate_conversion_keypair_private_path }}"
register: exports
when: prelim.changed
- name: transfer volumes to destination
os_migrate.os_migrate.import_workload_transfer_volumes:
auth: "{{ os_migrate_dst_auth }}"
auth_type: "{{ os_migrate_dst_auth_type|default(omit) }}"
region_name: "{{ os_migrate_dst_region_name|default(omit) }}"
validate_certs: "{{ os_migrate_dst_validate_certs|default(omit) }}"
ca_cert: "{{ os_migrate_dst_ca_cert|default(omit) }}"
client_cert: "{{ os_migrate_dst_client_cert|default(omit) }}"
client_key: "{{ os_migrate_dst_client_key|default(omit) }}"
data: "{{ item }}"
conversion_host:
"{{ os_dst_conversion_host_info.openstack_conversion_host }}"
ssh_key_path: "{{ os_migrate_conversion_keypair_private_path }}"
transfer_uuid: "{{ exports.transfer_uuid }}"
src_conversion_host_address:
"{{ os_src_conversion_host_info.openstack_conversion_host.address }}"
volume_map: "{{ exports.volume_map }}"
state_file: "{{ os_migrate_data_dir }}/{{ prelim.server_name }}.state"
log_file: "{{ os_migrate_data_dir }}/{{ prelim.server_name }}.log"
register: transfer
when: prelim.changed
- name: create destination instance
os_migrate.os_migrate.import_workload_create_instance:
auth: "{{ os_migrate_dst_auth }}"
auth_type: "{{ os_migrate_dst_auth_type|default(omit) }}"
region_name: "{{ os_migrate_dst_region_name|default(omit) }}"
validate_certs: "{{ os_migrate_dst_validate_certs|default(omit) }}"
ca_cert: "{{ os_migrate_dst_ca_cert|default(omit) }}"
client_cert: "{{ os_migrate_dst_client_cert|default(omit) }}"
client_key: "{{ os_migrate_dst_client_key|default(omit) }}"
data: "{{ item }}"
block_device_mapping: "{{ transfer.block_device_mapping }}"
register: os_migrate_destination_instance
when: prelim.changed
rescue:
- fail:
msg: "Failed to import {{ item.params.name }}!"
'''
RETURN = '''
server_id:
description: The ID of the newly created server.
returned: On successful creation of migrated server on destination cloud.
type: str
sample: 059635b7-451f-4a64-978a-7c2e9e4c15ff
'''
from ansible.module_utils.basic import AnsibleModule
# Import openstack module utils from ansible_collections.openstack.cloud.plugins as per ansible 3+
try:
from ansible_collections.openstack.cloud.plugins.module_utils.openstack \
import openstack_full_argument_spec, openstack_cloud_from_module
except ImportError:
# If this fails fall back to ansible < 3 imports
from ansible.module_utils.openstack \
import openstack_full_argument_spec, openstack_cloud_from_module
from ansible_collections.os_migrate.os_migrate.plugins.module_utils import server
def run_module():
argument_spec = openstack_full_argument_spec(
auth=dict(type='dict', no_log=True, required=True),
data=dict(type='dict', required=True),
block_device_mapping=dict(type='list', required=True, elements='dict'),
)
result = dict(
changed=False,
)
module = AnsibleModule(
argument_spec=argument_spec,
)
sdk, conn = openstack_cloud_from_module(module)
block_device_mapping = module.params['block_device_mapping']
ser_server = server.Server.from_data(module.params['data'])
sdk_server = ser_server.create(conn, block_device_mapping)
# Some info (e.g. flavor ID) will only become available after the
# server is in ACTIVE state, we need to wait for it.
sdk_server = conn.compute.wait_for_server(sdk_server, failures=['ERROR'], wait=600)
dst_ser_server = server.Server.from_sdk(conn, sdk_server)
if sdk_server:
result['changed'] = True
result['server'] = dst_ser_server.data
result['server_id'] = sdk_server.id
module.exit_json(**result)
def main():
run_module()
if __name__ == '__main__':
main()
|
[
"ansible.module_utils.basic.AnsibleModule",
"ansible_collections.os_migrate.os_migrate.plugins.module_utils.server.Server.from_sdk",
"ansible_collections.os_migrate.os_migrate.plugins.module_utils.server.Server.from_data",
"ansible.module_utils.openstack.openstack_cloud_from_module"
] |
[((8102, 8144), 'ansible.module_utils.basic.AnsibleModule', 'AnsibleModule', ([], {'argument_spec': 'argument_spec'}), '(argument_spec=argument_spec)\n', (8115, 8144), False, 'from ansible.module_utils.basic import AnsibleModule\n'), ((8177, 8212), 'ansible.module_utils.openstack.openstack_cloud_from_module', 'openstack_cloud_from_module', (['module'], {}), '(module)\n', (8204, 8212), False, 'from ansible.module_utils.openstack import openstack_full_argument_spec, openstack_cloud_from_module\n'), ((8296, 8342), 'ansible_collections.os_migrate.os_migrate.plugins.module_utils.server.Server.from_data', 'server.Server.from_data', (["module.params['data']"], {}), "(module.params['data'])\n", (8319, 8342), False, 'from ansible_collections.os_migrate.os_migrate.plugins.module_utils import server\n'), ((8642, 8682), 'ansible_collections.os_migrate.os_migrate.plugins.module_utils.server.Server.from_sdk', 'server.Server.from_sdk', (['conn', 'sdk_server'], {}), '(conn, sdk_server)\n', (8664, 8682), False, 'from ansible_collections.os_migrate.os_migrate.plugins.module_utils import server\n')]
|
from abc import ABC
from typing import List, Optional, Union
import numpy as np
from allopy import OptData
from allopy.penalty import NoPenalty, Penalty
__all__ = ["AbstractObjectiveBuilder", "AbstractConstraintBuilder"]
class AbstractObjectiveBuilder(ABC):
def __init__(self, data: List[OptData], cvar_data: List[OptData], rebalance: bool, time_unit):
self.data, self.cvar_data = format_inputs(data, cvar_data, time_unit)
self.rebalance = rebalance
self.num_scenarios = len(data)
assert self.num_scenarios > 0, "Provide data to the optimizer"
assert self.num_scenarios == len(cvar_data), "data and cvar data must have same number of scenarios"
self.num_assets = data[0].n_assets
assert all(d.n_assets == self.num_assets for d in data), \
f"number of assets in data should equal {self.num_assets}"
assert all(d.n_assets == self.num_assets for d in cvar_data), \
f"number of assets in cvar data should equal {self.num_assets}"
self._penalties = [NoPenalty(self.num_assets)] * self.num_scenarios
@property
def penalties(self):
return self._penalties
@penalties.setter
def penalties(self, penalties):
assert penalties is None or isinstance(penalties, Penalty) or hasattr(penalties, "__iter__"), \
"penalties can be None, a subsclass of the Penalty class or a list which subclasses the Penalty class"
if penalties is None:
self._penalties = [NoPenalty(self.num_assets)] * self.num_scenarios
elif isinstance(penalties, penalties):
self._penalties = [penalties] * self.num_scenarios
else:
penalties = list(penalties)
assert len(penalties) == self.num_scenarios, "number of penalties given must match number of scenarios"
assert all(isinstance(p, Penalty) for p in penalties), "non-Penalty instance detected"
self._penalties = penalties
class AbstractConstraintBuilder(ABC):
def __init__(self, data: List[OptData], cvar_data: List[OptData], rebalance: bool, time_unit):
self.data, self.cvar_data = format_inputs(data, cvar_data, time_unit)
self.rebalance = rebalance
self.num_scenarios = len(self.data)
def format_inputs(data: List[Union[OptData, np.ndarray]],
cvar_data: Optional[List[Union[OptData, np.ndarray]]],
time_unit: int):
data = [d if isinstance(data, OptData) else OptData(d, time_unit) for d in data]
if cvar_data is None:
return [d.cut_by_horizon(3) for d in data]
else:
cvar_data = [c if isinstance(c, OptData) else OptData(c, time_unit) for c in cvar_data]
return data, cvar_data
|
[
"allopy.penalty.NoPenalty",
"allopy.OptData"
] |
[((2494, 2515), 'allopy.OptData', 'OptData', (['d', 'time_unit'], {}), '(d, time_unit)\n', (2501, 2515), False, 'from allopy import OptData\n'), ((1054, 1080), 'allopy.penalty.NoPenalty', 'NoPenalty', (['self.num_assets'], {}), '(self.num_assets)\n', (1063, 1080), False, 'from allopy.penalty import NoPenalty, Penalty\n'), ((2673, 2694), 'allopy.OptData', 'OptData', (['c', 'time_unit'], {}), '(c, time_unit)\n', (2680, 2694), False, 'from allopy import OptData\n'), ((1514, 1540), 'allopy.penalty.NoPenalty', 'NoPenalty', (['self.num_assets'], {}), '(self.num_assets)\n', (1523, 1540), False, 'from allopy.penalty import NoPenalty, Penalty\n')]
|
import os
import pandas as pd
import matplotlib.pyplot as plt
wine_df = pd.read_csv(filepath_or_buffer='~/class5-homework/wine.data',
sep=',',
header=None)
wine_df.columns = ['Class','Alcohol','Malic_Acid','Ash','Alcalinity_of_Ash','Magnesium',
'Total_Phenols','Flavanoids','Nonflavanoid_Phenols','Proanthocyanins',
'Color_Intensity','Hue','OD280_OD315_of_Diluted_Wines','Proline']
wine_B = wine_df.drop(['Class'], axis = 1)
os.makedirs('graphs', exist_ok=True)
#Ploting line for alcohol
plt.plot(wine_B['Alcohol'], color='g')
plt.title('Alcohol by Index')
plt.xlabel('Index')
plt.ylabel('Alcohol')
plt.savefig(f'graphs/Alcohol_by_index_plot.png', format='png')
plt.clf()
#Ploting line for Malic_Acid
plt.plot(wine_B['Malic_Acid'], color='g')
plt.title('Malic_Acid by Index')
plt.xlabel('Index')
plt.ylabel('Malic_Acid')
plt.savefig(f'graphs/Malic_Acid_by_index_plot.png', format='png')
plt.clf()
#Ploting line for Ash
plt.plot(wine_B['Ash'], color='g')
plt.title('Ash by Index')
plt.xlabel('Index')
plt.ylabel('Ash')
plt.savefig(f'graphs/Ash_by_index_plot.png', format='png')
plt.clf()
#Ploting line for Alcalinity_of_Ash
plt.plot(wine_B['Alcalinity_of_Ash'], color='g')
plt.title('Alcalinity_of_Ash by Index')
plt.xlabel('Index')
plt.ylabel('Alcalinity_of_Ash')
plt.savefig(f'graphs/Alcalinity_of_Ash_by_index_plot.png', format='png')
plt.clf()
#Ploting line for Magnesium
plt.plot(wine_B['Magnesium'], color='g')
plt.title('Magnesium by Index')
plt.xlabel('Index')
plt.ylabel('Magnesium')
plt.savefig(f'graphs/Magnesium_by_index_plot.png', format='png')
plt.clf()
#Ploting line for Total_Phenols
plt.plot(wine_B['Total_Phenols'], color='g')
plt.title('Total_Phenols by Index')
plt.xlabel('Index')
plt.ylabel('Total_Phenols')
plt.savefig(f'graphs/Total_Phenols_by_index_plot.png', format='png')
plt.clf()
#Ploting line for Flavanoids
plt.plot(wine_B['Flavanoids'], color='g')
plt.title('Flavanoids by Index')
plt.xlabel('Index')
plt.ylabel('Flavanoids')
plt.savefig(f'graphs/Flavanoids_by_index_plot.png', format='png')
plt.clf()
#Ploting line for Nonflavanoid_Phenols
plt.plot(wine_B['Nonflavanoid_Phenols'], color='g')
plt.title('Nonflavanoid_Phenols by Index')
plt.xlabel('Index')
plt.ylabel('Nonflavanoid_Phenols')
plt.savefig(f'graphs/Nonflavanoid_Phenols_by_index_plot.png', format='png')
plt.clf()
#Ploting line for Proanthocyanins
plt.plot(wine_B['Proanthocyanins'], color='g')
plt.title('Proanthocyanins by Index')
plt.xlabel('Index')
plt.ylabel('Proanthocyanins')
plt.savefig(f'graphs/Proanthocyanins_by_index_plot.png', format='png')
plt.clf()
#Ploting line for Color_Intensity
plt.plot(wine_B['Color_Intensity'], color='g')
plt.title('Color_Intensity by Index')
plt.xlabel('Index')
plt.ylabel('Color_Intensity')
plt.savefig(f'graphs/Color_Intensity_by_index_plot.png', format='png')
plt.clf()
#Ploting line for Hue
plt.plot(wine_B['Hue'], color='g')
plt.title('Hue by Index')
plt.xlabel('Index')
plt.ylabel('Hue')
plt.savefig(f'graphs/Hue_by_index_plot.png', format='png')
plt.clf()
#Ploting line for OD280_OD315_of_Diluted_Wines
plt.plot(wine_B['OD280_OD315_of_Diluted_Wines'], color='g')
plt.title('OD280_OD315_of_Diluted_Wines by Index')
plt.xlabel('Index')
plt.ylabel('OD280_OD315_of_Diluted_Wines')
plt.savefig(f'graphs/OD280_OD315_of_Diluted_Wines_by_index_plot.png', format='png')
plt.clf()
#Ploting line for Proline
plt.plot(wine_B['Proline'], color='g')
plt.title('Proline by Index')
plt.xlabel('Index')
plt.ylabel('Proline')
plt.savefig(f'graphs/Proline_by_index_plot.png', format='png')
plt.clf()
#plt.plot(wine_B[i], color='green')
#plt.title(str(i)+' by Index')
#plt.xlabel('Index')
#plt.ylabel(i)
#plt.savefig(f'graphs/'+str(i)+'_by_index_plot.png', format='png')
#plt.clf()
|
[
"matplotlib.pyplot.savefig",
"os.makedirs",
"pandas.read_csv",
"matplotlib.pyplot.ylabel",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.clf",
"matplotlib.pyplot.title"
] |
[((73, 160), 'pandas.read_csv', 'pd.read_csv', ([], {'filepath_or_buffer': '"""~/class5-homework/wine.data"""', 'sep': '""","""', 'header': 'None'}), "(filepath_or_buffer='~/class5-homework/wine.data', sep=',',\n header=None)\n", (84, 160), True, 'import pandas as pd\n'), ((493, 529), 'os.makedirs', 'os.makedirs', (['"""graphs"""'], {'exist_ok': '(True)'}), "('graphs', exist_ok=True)\n", (504, 529), False, 'import os\n'), ((557, 595), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Alcohol']"], {'color': '"""g"""'}), "(wine_B['Alcohol'], color='g')\n", (565, 595), True, 'import matplotlib.pyplot as plt\n'), ((596, 625), 'matplotlib.pyplot.title', 'plt.title', (['"""Alcohol by Index"""'], {}), "('Alcohol by Index')\n", (605, 625), True, 'import matplotlib.pyplot as plt\n'), ((626, 645), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (636, 645), True, 'import matplotlib.pyplot as plt\n'), ((646, 667), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Alcohol"""'], {}), "('Alcohol')\n", (656, 667), True, 'import matplotlib.pyplot as plt\n'), ((668, 730), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Alcohol_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Alcohol_by_index_plot.png', format='png')\n", (679, 730), True, 'import matplotlib.pyplot as plt\n'), ((731, 740), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (738, 740), True, 'import matplotlib.pyplot as plt\n'), ((771, 812), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Malic_Acid']"], {'color': '"""g"""'}), "(wine_B['Malic_Acid'], color='g')\n", (779, 812), True, 'import matplotlib.pyplot as plt\n'), ((813, 845), 'matplotlib.pyplot.title', 'plt.title', (['"""Malic_Acid by Index"""'], {}), "('Malic_Acid by Index')\n", (822, 845), True, 'import matplotlib.pyplot as plt\n'), ((846, 865), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (856, 865), True, 'import matplotlib.pyplot as plt\n'), ((866, 890), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Malic_Acid"""'], {}), "('Malic_Acid')\n", (876, 890), True, 'import matplotlib.pyplot as plt\n'), ((891, 956), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Malic_Acid_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Malic_Acid_by_index_plot.png', format='png')\n", (902, 956), True, 'import matplotlib.pyplot as plt\n'), ((957, 966), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (964, 966), True, 'import matplotlib.pyplot as plt\n'), ((990, 1024), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Ash']"], {'color': '"""g"""'}), "(wine_B['Ash'], color='g')\n", (998, 1024), True, 'import matplotlib.pyplot as plt\n'), ((1025, 1050), 'matplotlib.pyplot.title', 'plt.title', (['"""Ash by Index"""'], {}), "('Ash by Index')\n", (1034, 1050), True, 'import matplotlib.pyplot as plt\n'), ((1051, 1070), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (1061, 1070), True, 'import matplotlib.pyplot as plt\n'), ((1071, 1088), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Ash"""'], {}), "('Ash')\n", (1081, 1088), True, 'import matplotlib.pyplot as plt\n'), ((1089, 1147), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Ash_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Ash_by_index_plot.png', format='png')\n", (1100, 1147), True, 'import matplotlib.pyplot as plt\n'), ((1148, 1157), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (1155, 1157), True, 'import matplotlib.pyplot as plt\n'), ((1195, 1243), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Alcalinity_of_Ash']"], {'color': '"""g"""'}), "(wine_B['Alcalinity_of_Ash'], color='g')\n", (1203, 1243), True, 'import matplotlib.pyplot as plt\n'), ((1244, 1283), 'matplotlib.pyplot.title', 'plt.title', (['"""Alcalinity_of_Ash by Index"""'], {}), "('Alcalinity_of_Ash by Index')\n", (1253, 1283), True, 'import matplotlib.pyplot as plt\n'), ((1284, 1303), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (1294, 1303), True, 'import matplotlib.pyplot as plt\n'), ((1304, 1335), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Alcalinity_of_Ash"""'], {}), "('Alcalinity_of_Ash')\n", (1314, 1335), True, 'import matplotlib.pyplot as plt\n'), ((1336, 1408), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Alcalinity_of_Ash_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Alcalinity_of_Ash_by_index_plot.png', format='png')\n", (1347, 1408), True, 'import matplotlib.pyplot as plt\n'), ((1409, 1418), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (1416, 1418), True, 'import matplotlib.pyplot as plt\n'), ((1448, 1488), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Magnesium']"], {'color': '"""g"""'}), "(wine_B['Magnesium'], color='g')\n", (1456, 1488), True, 'import matplotlib.pyplot as plt\n'), ((1489, 1520), 'matplotlib.pyplot.title', 'plt.title', (['"""Magnesium by Index"""'], {}), "('Magnesium by Index')\n", (1498, 1520), True, 'import matplotlib.pyplot as plt\n'), ((1521, 1540), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (1531, 1540), True, 'import matplotlib.pyplot as plt\n'), ((1541, 1564), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Magnesium"""'], {}), "('Magnesium')\n", (1551, 1564), True, 'import matplotlib.pyplot as plt\n'), ((1565, 1629), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Magnesium_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Magnesium_by_index_plot.png', format='png')\n", (1576, 1629), True, 'import matplotlib.pyplot as plt\n'), ((1630, 1639), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (1637, 1639), True, 'import matplotlib.pyplot as plt\n'), ((1673, 1717), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Total_Phenols']"], {'color': '"""g"""'}), "(wine_B['Total_Phenols'], color='g')\n", (1681, 1717), True, 'import matplotlib.pyplot as plt\n'), ((1718, 1753), 'matplotlib.pyplot.title', 'plt.title', (['"""Total_Phenols by Index"""'], {}), "('Total_Phenols by Index')\n", (1727, 1753), True, 'import matplotlib.pyplot as plt\n'), ((1754, 1773), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (1764, 1773), True, 'import matplotlib.pyplot as plt\n'), ((1774, 1801), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Total_Phenols"""'], {}), "('Total_Phenols')\n", (1784, 1801), True, 'import matplotlib.pyplot as plt\n'), ((1802, 1870), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Total_Phenols_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Total_Phenols_by_index_plot.png', format='png')\n", (1813, 1870), True, 'import matplotlib.pyplot as plt\n'), ((1871, 1880), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (1878, 1880), True, 'import matplotlib.pyplot as plt\n'), ((1911, 1952), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Flavanoids']"], {'color': '"""g"""'}), "(wine_B['Flavanoids'], color='g')\n", (1919, 1952), True, 'import matplotlib.pyplot as plt\n'), ((1953, 1985), 'matplotlib.pyplot.title', 'plt.title', (['"""Flavanoids by Index"""'], {}), "('Flavanoids by Index')\n", (1962, 1985), True, 'import matplotlib.pyplot as plt\n'), ((1986, 2005), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (1996, 2005), True, 'import matplotlib.pyplot as plt\n'), ((2006, 2030), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Flavanoids"""'], {}), "('Flavanoids')\n", (2016, 2030), True, 'import matplotlib.pyplot as plt\n'), ((2031, 2096), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Flavanoids_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Flavanoids_by_index_plot.png', format='png')\n", (2042, 2096), True, 'import matplotlib.pyplot as plt\n'), ((2097, 2106), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (2104, 2106), True, 'import matplotlib.pyplot as plt\n'), ((2147, 2198), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Nonflavanoid_Phenols']"], {'color': '"""g"""'}), "(wine_B['Nonflavanoid_Phenols'], color='g')\n", (2155, 2198), True, 'import matplotlib.pyplot as plt\n'), ((2199, 2241), 'matplotlib.pyplot.title', 'plt.title', (['"""Nonflavanoid_Phenols by Index"""'], {}), "('Nonflavanoid_Phenols by Index')\n", (2208, 2241), True, 'import matplotlib.pyplot as plt\n'), ((2242, 2261), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (2252, 2261), True, 'import matplotlib.pyplot as plt\n'), ((2262, 2296), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Nonflavanoid_Phenols"""'], {}), "('Nonflavanoid_Phenols')\n", (2272, 2296), True, 'import matplotlib.pyplot as plt\n'), ((2297, 2372), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Nonflavanoid_Phenols_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Nonflavanoid_Phenols_by_index_plot.png', format='png')\n", (2308, 2372), True, 'import matplotlib.pyplot as plt\n'), ((2373, 2382), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (2380, 2382), True, 'import matplotlib.pyplot as plt\n'), ((2418, 2464), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Proanthocyanins']"], {'color': '"""g"""'}), "(wine_B['Proanthocyanins'], color='g')\n", (2426, 2464), True, 'import matplotlib.pyplot as plt\n'), ((2465, 2502), 'matplotlib.pyplot.title', 'plt.title', (['"""Proanthocyanins by Index"""'], {}), "('Proanthocyanins by Index')\n", (2474, 2502), True, 'import matplotlib.pyplot as plt\n'), ((2503, 2522), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (2513, 2522), True, 'import matplotlib.pyplot as plt\n'), ((2523, 2552), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Proanthocyanins"""'], {}), "('Proanthocyanins')\n", (2533, 2552), True, 'import matplotlib.pyplot as plt\n'), ((2553, 2623), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Proanthocyanins_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Proanthocyanins_by_index_plot.png', format='png')\n", (2564, 2623), True, 'import matplotlib.pyplot as plt\n'), ((2624, 2633), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (2631, 2633), True, 'import matplotlib.pyplot as plt\n'), ((2669, 2715), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Color_Intensity']"], {'color': '"""g"""'}), "(wine_B['Color_Intensity'], color='g')\n", (2677, 2715), True, 'import matplotlib.pyplot as plt\n'), ((2716, 2753), 'matplotlib.pyplot.title', 'plt.title', (['"""Color_Intensity by Index"""'], {}), "('Color_Intensity by Index')\n", (2725, 2753), True, 'import matplotlib.pyplot as plt\n'), ((2754, 2773), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (2764, 2773), True, 'import matplotlib.pyplot as plt\n'), ((2774, 2803), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Color_Intensity"""'], {}), "('Color_Intensity')\n", (2784, 2803), True, 'import matplotlib.pyplot as plt\n'), ((2804, 2874), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Color_Intensity_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Color_Intensity_by_index_plot.png', format='png')\n", (2815, 2874), True, 'import matplotlib.pyplot as plt\n'), ((2875, 2884), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (2882, 2884), True, 'import matplotlib.pyplot as plt\n'), ((2908, 2942), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Hue']"], {'color': '"""g"""'}), "(wine_B['Hue'], color='g')\n", (2916, 2942), True, 'import matplotlib.pyplot as plt\n'), ((2943, 2968), 'matplotlib.pyplot.title', 'plt.title', (['"""Hue by Index"""'], {}), "('Hue by Index')\n", (2952, 2968), True, 'import matplotlib.pyplot as plt\n'), ((2969, 2988), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (2979, 2988), True, 'import matplotlib.pyplot as plt\n'), ((2989, 3006), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Hue"""'], {}), "('Hue')\n", (2999, 3006), True, 'import matplotlib.pyplot as plt\n'), ((3007, 3065), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Hue_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Hue_by_index_plot.png', format='png')\n", (3018, 3065), True, 'import matplotlib.pyplot as plt\n'), ((3066, 3075), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3073, 3075), True, 'import matplotlib.pyplot as plt\n'), ((3124, 3183), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['OD280_OD315_of_Diluted_Wines']"], {'color': '"""g"""'}), "(wine_B['OD280_OD315_of_Diluted_Wines'], color='g')\n", (3132, 3183), True, 'import matplotlib.pyplot as plt\n'), ((3184, 3234), 'matplotlib.pyplot.title', 'plt.title', (['"""OD280_OD315_of_Diluted_Wines by Index"""'], {}), "('OD280_OD315_of_Diluted_Wines by Index')\n", (3193, 3234), True, 'import matplotlib.pyplot as plt\n'), ((3235, 3254), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (3245, 3254), True, 'import matplotlib.pyplot as plt\n'), ((3255, 3297), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""OD280_OD315_of_Diluted_Wines"""'], {}), "('OD280_OD315_of_Diluted_Wines')\n", (3265, 3297), True, 'import matplotlib.pyplot as plt\n'), ((3298, 3385), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/OD280_OD315_of_Diluted_Wines_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/OD280_OD315_of_Diluted_Wines_by_index_plot.png',\n format='png')\n", (3309, 3385), True, 'import matplotlib.pyplot as plt\n'), ((3382, 3391), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3389, 3391), True, 'import matplotlib.pyplot as plt\n'), ((3419, 3457), 'matplotlib.pyplot.plot', 'plt.plot', (["wine_B['Proline']"], {'color': '"""g"""'}), "(wine_B['Proline'], color='g')\n", (3427, 3457), True, 'import matplotlib.pyplot as plt\n'), ((3458, 3487), 'matplotlib.pyplot.title', 'plt.title', (['"""Proline by Index"""'], {}), "('Proline by Index')\n", (3467, 3487), True, 'import matplotlib.pyplot as plt\n'), ((3488, 3507), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Index"""'], {}), "('Index')\n", (3498, 3507), True, 'import matplotlib.pyplot as plt\n'), ((3508, 3529), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Proline"""'], {}), "('Proline')\n", (3518, 3529), True, 'import matplotlib.pyplot as plt\n'), ((3530, 3592), 'matplotlib.pyplot.savefig', 'plt.savefig', (['f"""graphs/Proline_by_index_plot.png"""'], {'format': '"""png"""'}), "(f'graphs/Proline_by_index_plot.png', format='png')\n", (3541, 3592), True, 'import matplotlib.pyplot as plt\n'), ((3593, 3602), 'matplotlib.pyplot.clf', 'plt.clf', ([], {}), '()\n', (3600, 3602), True, 'import matplotlib.pyplot as plt\n')]
|
import numpy as np
"""
Contains preprocessing code for creating additional information based on MRI volumes and true segmentation maps (asegs).
Eg. weight masks for median frequency class weighing, edge weighing etc.
"""
def create_weight_mask(aseg):
"""
Main function for calculating weight mask of segmentation map for loss function. Currently only Median Frequency
Weighing is implemented. Other types can be additively added to the 'weights' variable
Args:
aseg (numpy.ndarray): Segmentation map with shape l x w x d
Returns:
numpy.ndarray: Weight Mask of same shape as aseg
"""
if len(aseg.shape)==4:
_, h,w,d = aseg.shape
elif len(aseg.shape)==3:
h,w,d = aseg.shape
weights = np.zeros((h,w,d), dtype=float) # Container ndarray of zeros for weights
weights += median_freq_class_weighing(aseg) # Add median frequency weights
# Further weights (eg. extra weights for region borders) can be added here
# Eg. weights += edge_weights(aseg)
return weights
def median_freq_class_weighing(aseg):
"""
Median Frequency Weighing. Guarded against class absence of certain classes.
Args:
aseg (numpy.ndarray): Segmentation map with shape l x w x d
Returns:
numpy.ndarray: Median frequency weighted mask of same shape as aseg
"""
# Calculates median frequency based weighing for classes
unique, counts = np.unique(aseg, return_counts=True)
if len(aseg.shape)==4:
_, h,w,d = aseg.shape
elif len(aseg.shape)==3:
h,w,d = aseg.shape
class_wise_weights = np.median(counts)/counts
aseg = aseg.astype(int)
# Guards against the absence of certain classes in sample
discon_guard_lut = np.zeros(int(max(unique))+1)-1
for idx, val in enumerate(unique):
discon_guard_lut[int(val)] = idx
discon_guard_lut = discon_guard_lut.astype(int)
# Assigns weights to w_mask and resets the missing classes
w_mask = np.reshape(class_wise_weights[discon_guard_lut[aseg.ravel()]], (h, w, d))
return w_mask
# Label mapping functions (to aparc (eval) and to label (train))
def map_label2aparc_aseg(mapped_aseg):
"""
Function to perform look-up table mapping from label space to aparc.DKTatlas+aseg space
:param np.ndarray mapped_aseg: label space segmentation (aparc.DKTatlas + aseg)
:return:
"""
aseg = np.zeros_like(mapped_aseg)
labels = np.array([0, 2, 4, 5, 7, 8, 10, 11, 12, 13, 14,
15, 16, 17, 18, 24, 26, 28, 31, 41, 43, 44,
46, 47, 49, 50, 51, 52, 53, 54, 58, 60, 63,
77, 1002, 1003, 1005, 1006, 1007, 1008, 1009, 1010, 1011,
1012, 1013, 1014, 1015, 1016, 1017, 1018, 1019, 1020, 1021, 1022,
1023, 1024, 1025, 1026, 1027, 1028, 1029, 1030, 1031, 1034, 1035,
2002, 2005, 2010, 2012, 2013, 2014, 2016, 2017, 2021, 2022, 2023,
2024, 2025, 2028])
h, w, d = aseg.shape
aseg = labels[mapped_aseg.ravel()]
aseg = aseg.reshape((h, w, d))
return aseg
# if __name__ == "__main__":
# #a = np.random.randint(0, 5, size=(10,10,10))
# #b = np.random.randint(5, 10, size=(10000))
#
# #map_masks_into_5_classes(np.random.randint(0, 250, size=(256, 256, 256)))
#
# import nibabel as nib
# from data_utils.process_mgz_into_hdf5 import map_aparc_aseg2label, map_aseg2label
# path = r"abide_ii/sub-28675/mri/aparc.DKTatlas+aseg.mgz"
# aseg = nib.load(path).get_data()
# labels_full, _ = map_aparc_aseg2label(aseg) # only for 79 classes case
# # labels_full, _ = map_aseg2label(aseg) # only for 37 classes case
# aseg = labels_full
# # print(aseg.shape)
# median_freq_class_weighing(aseg)
# # print(edge_weighing(aseg, 1.5))
|
[
"numpy.median",
"numpy.unique",
"numpy.array",
"numpy.zeros",
"numpy.zeros_like"
] |
[((756, 788), 'numpy.zeros', 'np.zeros', (['(h, w, d)'], {'dtype': 'float'}), '((h, w, d), dtype=float)\n', (764, 788), True, 'import numpy as np\n'), ((1441, 1476), 'numpy.unique', 'np.unique', (['aseg'], {'return_counts': '(True)'}), '(aseg, return_counts=True)\n', (1450, 1476), True, 'import numpy as np\n'), ((2410, 2436), 'numpy.zeros_like', 'np.zeros_like', (['mapped_aseg'], {}), '(mapped_aseg)\n', (2423, 2436), True, 'import numpy as np\n'), ((2450, 2881), 'numpy.array', 'np.array', (['[0, 2, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 24, 26, 28, 31, 41, \n 43, 44, 46, 47, 49, 50, 51, 52, 53, 54, 58, 60, 63, 77, 1002, 1003, \n 1005, 1006, 1007, 1008, 1009, 1010, 1011, 1012, 1013, 1014, 1015, 1016,\n 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027, 1028,\n 1029, 1030, 1031, 1034, 1035, 2002, 2005, 2010, 2012, 2013, 2014, 2016,\n 2017, 2021, 2022, 2023, 2024, 2025, 2028]'], {}), '([0, 2, 4, 5, 7, 8, 10, 11, 12, 13, 14, 15, 16, 17, 18, 24, 26, 28,\n 31, 41, 43, 44, 46, 47, 49, 50, 51, 52, 53, 54, 58, 60, 63, 77, 1002, \n 1003, 1005, 1006, 1007, 1008, 1009, 1010, 1011, 1012, 1013, 1014, 1015,\n 1016, 1017, 1018, 1019, 1020, 1021, 1022, 1023, 1024, 1025, 1026, 1027,\n 1028, 1029, 1030, 1031, 1034, 1035, 2002, 2005, 2010, 2012, 2013, 2014,\n 2016, 2017, 2021, 2022, 2023, 2024, 2025, 2028])\n', (2458, 2881), True, 'import numpy as np\n'), ((1616, 1633), 'numpy.median', 'np.median', (['counts'], {}), '(counts)\n', (1625, 1633), True, 'import numpy as np\n')]
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
###
# Copyright (2016-2020) Hewlett Packard Enterprise Development LP
#
# Licensed under the Apache License, Version 2.0 (the "License");
# You may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
###
import pytest
import mock
from copy import deepcopy
from hpe_test_utils import OneViewBaseFactsTest
from oneview_module_loader import HypervisorClusterProfileFactsModule
PROFILE_URI = '/rest/hypervisor-cluster-profiles/57d3af2a-b6d2-4446-8645-f38dd808ea4d'
PARAMS_GET_ALL = dict(
config='config.json'
)
PARAMS_GET_BY_NAME = dict(
config='config.json',
name="Test Cluster Profile"
)
PARAMS_GET_BY_URI = dict(
config='config.json',
uri="/rest/test/123"
)
PARAMS_WITH_OPTIONS = dict(
config='config.json',
name="Test Cluster Profile",
options=[
'compliancePreview',
]
)
@pytest.mark.resource(TestHypervisorClusterProfileFactsModule='hypervisor_cluster_profiles')
class TestHypervisorClusterProfileFactsModule(OneViewBaseFactsTest):
"""
FactsParamsTestCase has common tests for the parameters support.
"""
def test_should_get_all_cluster_profiles(self):
cluster_profiles = [
{"name": "Cluster Profile Name 1"},
{"name": "Cluster Profile Name 2"}
]
self.mock_ov_client.hypervisor_cluster_profiles.get_all.return_value = cluster_profiles
self.mock_ansible_module.params = deepcopy(PARAMS_GET_ALL)
HypervisorClusterProfileFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(hypervisor_cluster_profiles=cluster_profiles)
)
def test_should_get_by_name(self):
profile = {"name": "Test Cluster Profile", 'uri': '/rest/test/123'}
obj = mock.Mock()
obj.data = profile
self.mock_ov_client.hypervisor_cluster_profiles.get_by_name.return_value = obj
self.mock_ansible_module.params = deepcopy(PARAMS_GET_BY_NAME)
HypervisorClusterProfileFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(hypervisor_cluster_profiles=[profile])
)
def test_should_get_by_uri(self):
cluster_profile = {"name": "Test Cluster Profile", 'uri': '/rest/test/123'}
obj = mock.Mock()
obj.data = cluster_profile
self.mock_ov_client.hypervisor_cluster_profiles.get_by_uri.return_value = obj
self.mock_ansible_module.params = deepcopy(PARAMS_GET_BY_URI)
HypervisorClusterProfileFactsModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts=dict(hypervisor_cluster_profiles=[cluster_profile])
)
def test_should_get_cluster_profile_by_name_with_all_options(self):
mock_option_return = {'subresource': 'value'}
self.mock_ov_client.hypervisor_cluster_profiles.data = {"name": "Test Cluster Profile", "uri": PROFILE_URI}
self.mock_ov_client.hypervisor_cluster_profiles.get_by_name.return_value = \
self.mock_ov_client.hypervisor_cluster_profiles
self.mock_ov_client.hypervisor_cluster_profiles.get_compliance_preview.return_value = mock_option_return
self.mock_ansible_module.params = deepcopy(PARAMS_WITH_OPTIONS)
HypervisorClusterProfileFactsModule().run()
self.mock_ov_client.hypervisor_cluster_profiles.get_compliance_preview.assert_called_once_with()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ansible_facts={'hypervisor_cluster_profiles': [{'name': 'Test Cluster Profile', 'uri': PROFILE_URI}],
'hypervisor_cluster_profile_compliance_preview': mock_option_return,
}
)
if __name__ == '__main__':
pytest.main([__file__])
|
[
"oneview_module_loader.HypervisorClusterProfileFactsModule",
"mock.Mock",
"pytest.mark.resource",
"pytest.main",
"copy.deepcopy"
] |
[((1278, 1374), 'pytest.mark.resource', 'pytest.mark.resource', ([], {'TestHypervisorClusterProfileFactsModule': '"""hypervisor_cluster_profiles"""'}), "(TestHypervisorClusterProfileFactsModule=\n 'hypervisor_cluster_profiles')\n", (1298, 1374), False, 'import pytest\n'), ((4357, 4380), 'pytest.main', 'pytest.main', (['[__file__]'], {}), '([__file__])\n', (4368, 4380), False, 'import pytest\n'), ((1850, 1874), 'copy.deepcopy', 'deepcopy', (['PARAMS_GET_ALL'], {}), '(PARAMS_GET_ALL)\n', (1858, 1874), False, 'from copy import deepcopy\n'), ((2241, 2252), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2250, 2252), False, 'import mock\n'), ((2410, 2438), 'copy.deepcopy', 'deepcopy', (['PARAMS_GET_BY_NAME'], {}), '(PARAMS_GET_BY_NAME)\n', (2418, 2438), False, 'from copy import deepcopy\n'), ((2805, 2816), 'mock.Mock', 'mock.Mock', ([], {}), '()\n', (2814, 2816), False, 'import mock\n'), ((2981, 3008), 'copy.deepcopy', 'deepcopy', (['PARAMS_GET_BY_URI'], {}), '(PARAMS_GET_BY_URI)\n', (2989, 3008), False, 'from copy import deepcopy\n'), ((3790, 3819), 'copy.deepcopy', 'deepcopy', (['PARAMS_WITH_OPTIONS'], {}), '(PARAMS_WITH_OPTIONS)\n', (3798, 3819), False, 'from copy import deepcopy\n'), ((1884, 1921), 'oneview_module_loader.HypervisorClusterProfileFactsModule', 'HypervisorClusterProfileFactsModule', ([], {}), '()\n', (1919, 1921), False, 'from oneview_module_loader import HypervisorClusterProfileFactsModule\n'), ((2448, 2485), 'oneview_module_loader.HypervisorClusterProfileFactsModule', 'HypervisorClusterProfileFactsModule', ([], {}), '()\n', (2483, 2485), False, 'from oneview_module_loader import HypervisorClusterProfileFactsModule\n'), ((3018, 3055), 'oneview_module_loader.HypervisorClusterProfileFactsModule', 'HypervisorClusterProfileFactsModule', ([], {}), '()\n', (3053, 3055), False, 'from oneview_module_loader import HypervisorClusterProfileFactsModule\n'), ((3829, 3866), 'oneview_module_loader.HypervisorClusterProfileFactsModule', 'HypervisorClusterProfileFactsModule', ([], {}), '()\n', (3864, 3866), False, 'from oneview_module_loader import HypervisorClusterProfileFactsModule\n')]
|
import os
import scipy
import numpy as np
import pandas as pd
import torch
from torch.autograd import Variable
def predict_batch(net, inputs):
v = Variable(inputs.cuda(), volatile=True)
return net(v).data.cpu().numpy()
def get_probabilities(model, loader):
model.eval()
return np.vstack(predict_batch(model, data[0]) for data in loader)
def get_predictions(probs, thresholds):
preds = np.copy(probs)
preds[preds >= thresholds] = 1
preds[preds < thresholds] = 0
return preds.astype('uint8')
def get_argmax(output):
val,idx = torch.max(output, dim=1)
return idx.data.cpu().view(-1).numpy()
def get_targets(loader):
targets = None
for data in loader:
if targets is None:
shape = list(data[1].size())
shape[0] = 0
targets = np.empty(shape)
target = data[1]
if len(target.size()) == 1:
target = target.view(-1,1)
target = target.numpy()
targets = np.vstack([targets, target])
return targets
def ensemble_with_method(arr, method):
if method == c.MEAN:
return np.mean(arr, axis=0)
elif method == c.GMEAN:
return scipy.stats.mstats.gmean(arr, axis=0)
elif method == c.VOTE:
return scipy.stats.mode(arr, axis=0)[0][0]
raise Exception("Operation not found")
|
[
"numpy.copy",
"numpy.mean",
"scipy.stats.mode",
"torch.max",
"numpy.empty",
"numpy.vstack",
"scipy.stats.mstats.gmean"
] |
[((411, 425), 'numpy.copy', 'np.copy', (['probs'], {}), '(probs)\n', (418, 425), True, 'import numpy as np\n'), ((568, 592), 'torch.max', 'torch.max', (['output'], {'dim': '(1)'}), '(output, dim=1)\n', (577, 592), False, 'import torch\n'), ((988, 1016), 'numpy.vstack', 'np.vstack', (['[targets, target]'], {}), '([targets, target])\n', (997, 1016), True, 'import numpy as np\n'), ((1117, 1137), 'numpy.mean', 'np.mean', (['arr'], {'axis': '(0)'}), '(arr, axis=0)\n', (1124, 1137), True, 'import numpy as np\n'), ((822, 837), 'numpy.empty', 'np.empty', (['shape'], {}), '(shape)\n', (830, 837), True, 'import numpy as np\n'), ((1181, 1218), 'scipy.stats.mstats.gmean', 'scipy.stats.mstats.gmean', (['arr'], {'axis': '(0)'}), '(arr, axis=0)\n', (1205, 1218), False, 'import scipy\n'), ((1261, 1290), 'scipy.stats.mode', 'scipy.stats.mode', (['arr'], {'axis': '(0)'}), '(arr, axis=0)\n', (1277, 1290), False, 'import scipy\n')]
|
import click
import logging
import matplotlib
import matplotlib.pyplot as plt
import joblib
import fact.io
from ..configuration import AICTConfig
from ..plotting import (
plot_regressor_confusion,
plot_bias_resolution,
plot_feature_importances,
)
if matplotlib.get_backend() == 'pgf':
from matplotlib.backends.backend_pgf import PdfPages
else:
from matplotlib.backends.backend_pdf import PdfPages
@click.command()
@click.argument('configuration_path', type=click.Path(exists=True, dir_okay=False))
@click.argument('performance_path', type=click.Path(exists=True, dir_okay=False))
@click.argument('model_path', type=click.Path(exists=True, dir_okay=False))
@click.option('-o', '--output', type=click.Path(exists=False, dir_okay=False))
@click.option('-k', '--key', help='HDF5 key for hdf5', default='data')
def main(configuration_path, performance_path, model_path, output, key):
''' Create some performance evaluation plots for the separator '''
logging.basicConfig(level=logging.INFO)
log = logging.getLogger()
log.info('Loading perfomance data')
df = fact.io.read_data(performance_path, key=key)
log.info('Loading model')
model = joblib.load(model_path)
config = AICTConfig.from_yaml(configuration_path)
model_config = config.energy
energy_unit = config.energy_unit
figures = []
# Plot confusion
figures.append(plt.figure())
ax = figures[-1].add_subplot(1, 1, 1)
ax.set_title('Reconstructed vs. True Energy (log color scale)')
plot_regressor_confusion(
df, ax=ax,
label_column=model_config.target_column,
prediction_column=model_config.output_name,
energy_unit=energy_unit,
)
# Plot confusion
figures.append(plt.figure())
ax = figures[-1].add_subplot(1, 1, 1)
ax.set_title('Reconstructed vs. True Energy (linear color scale)')
plot_regressor_confusion(
df, log_z=False, ax=ax,
label_column=model_config.target_column,
prediction_column=model_config.output_name,
energy_unit=energy_unit,
)
# Plot bias/resolution
figures.append(plt.figure())
ax = figures[-1].add_subplot(1, 1, 1)
ax.set_title('Bias and Resolution')
plot_bias_resolution(
df, bins=15, ax=ax,
label_column=model_config.target_column,
prediction_column=model_config.output_name,
energy_unit=energy_unit,
)
if hasattr(model, 'feature_importances_'):
# Plot feature importances
figures.append(plt.figure())
ax = figures[-1].add_subplot(1, 1, 1)
features = model_config.features
plot_feature_importances(model, features, ax=ax)
if output is None:
plt.show()
else:
with PdfPages(output) as pdf:
for fig in figures:
fig.tight_layout(pad=0)
pdf.savefig(fig)
|
[
"logging.getLogger",
"logging.basicConfig",
"click.option",
"matplotlib.get_backend",
"matplotlib.pyplot.figure",
"click.Path",
"joblib.load",
"click.command",
"matplotlib.backends.backend_pdf.PdfPages",
"matplotlib.pyplot.show"
] |
[((422, 437), 'click.command', 'click.command', ([], {}), '()\n', (435, 437), False, 'import click\n'), ((760, 829), 'click.option', 'click.option', (['"""-k"""', '"""--key"""'], {'help': '"""HDF5 key for hdf5"""', 'default': '"""data"""'}), "('-k', '--key', help='HDF5 key for hdf5', default='data')\n", (772, 829), False, 'import click\n'), ((264, 288), 'matplotlib.get_backend', 'matplotlib.get_backend', ([], {}), '()\n', (286, 288), False, 'import matplotlib\n'), ((978, 1017), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'logging.INFO'}), '(level=logging.INFO)\n', (997, 1017), False, 'import logging\n'), ((1028, 1047), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (1045, 1047), False, 'import logging\n'), ((1186, 1209), 'joblib.load', 'joblib.load', (['model_path'], {}), '(model_path)\n', (1197, 1209), False, 'import joblib\n'), ((1394, 1406), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1404, 1406), True, 'import matplotlib.pyplot as plt\n'), ((1748, 1760), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (1758, 1760), True, 'import matplotlib.pyplot as plt\n'), ((2124, 2136), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2134, 2136), True, 'import matplotlib.pyplot as plt\n'), ((2712, 2722), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (2720, 2722), True, 'import matplotlib.pyplot as plt\n'), ((481, 520), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'dir_okay': '(False)'}), '(exists=True, dir_okay=False)\n', (491, 520), False, 'import click\n'), ((563, 602), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'dir_okay': '(False)'}), '(exists=True, dir_okay=False)\n', (573, 602), False, 'import click\n'), ((639, 678), 'click.Path', 'click.Path', ([], {'exists': '(True)', 'dir_okay': '(False)'}), '(exists=True, dir_okay=False)\n', (649, 678), False, 'import click\n'), ((717, 757), 'click.Path', 'click.Path', ([], {'exists': '(False)', 'dir_okay': '(False)'}), '(exists=False, dir_okay=False)\n', (727, 757), False, 'import click\n'), ((2520, 2532), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (2530, 2532), True, 'import matplotlib.pyplot as plt\n'), ((2746, 2762), 'matplotlib.backends.backend_pdf.PdfPages', 'PdfPages', (['output'], {}), '(output)\n', (2754, 2762), False, 'from matplotlib.backends.backend_pdf import PdfPages\n')]
|
##########################################################################
#
# MRC FGU Computational Genomics Group
#
# $Id$
#
# Copyright (C) 2009 <NAME>
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
##########################################################################
'''
Sra.py - Methods for dealing with short read archive files
==========================================================
Utility functions for dealing with :term:`SRA` formatted files from
the Short Read Archive.
Requirements:
* fastq-dump >= 2.1.7
Code
----
'''
import os
import glob
import tempfile
import shutil
import CGAT.Experiment as E
import CGAT.Fastq as Fastq
import CGAT.IOTools as IOTools
def peek(sra, outdir=None):
"""return the full file names for all files which will be extracted
Parameters
----------
outdir : path
perform extraction in outdir. If outdir is None, the extraction
will take place in a temporary directory, which will be deleted
afterwards.
Returns
-------
files : list
A list of fastq formatted files that are contained in the archive.
format : string
The quality score format in the :term:`fastq` formatted files.
"""
if outdir is None:
workdir = tempfile.mkdtemp()
else:
workdir = outdir
# --split-files creates files called prefix_#.fastq.gz,
# where # is the read number.
# If file cotains paired end data:
# output = prefix_1.fastq.gz, prefix_2.fastq.gz
# *special case: unpaired reads in a paired end --> prefix.fastq.gz
# *special case: if paired reads are stored in a single read,
# fastq-dump will split. There might be a joining
# sequence. The output would thus be:
# prefix_1.fastq.gz, prefix_2.fastq.gz, prefix_3.fastq.gz
# You want files 1 and 3.
E.run("""fastq-dump --split-files --gzip -X 1000
--outdir %(workdir)s %(sra)s""" % locals())
f = sorted(glob.glob(os.path.join(workdir, "*.fastq.gz")))
ff = [os.path.basename(x) for x in f]
if len(f) == 1:
# sra file contains one read: output = prefix.fastq.gz
pass
elif len(f) == 2:
# sra file contains read pairs:
# output = prefix_1.fastq.gz, prefix_2.fastq.gz
assert ff[0].endswith(
"_1.fastq.gz") and ff[1].endswith("_2.fastq.gz")
elif len(f) == 3:
if ff[2].endswith("_3.fastq.gz"):
f = glob.glob(os.path.join(workdir, "*_[13].fastq.gz"))
else:
f = glob.glob(os.path.join(workdir, "*_[13].fastq.gz"))
# check format of fastqs in .sra
fastq_format = Fastq.guessFormat(IOTools.openFile(f[0], "r"), raises=False)
fastq_datatype = Fastq.guessDataType(IOTools.openFile(f[0], "r"), raises=True)
if outdir is None:
shutil.rmtree(workdir)
return f, fastq_format, fastq_datatype
def extract(sra, outdir, tool="fastq-dump"):
"""return statement for extracting the SRA file in `outdir`.
possible tools are fastq-dump and abi-dump. Use abi-dump for colorspace"""
if tool == "fastq-dump":
tool += " --split-files"
statement = """%(tool)s --gzip --outdir %(outdir)s %(sra)s""" % locals()
return statement
|
[
"os.path.join",
"tempfile.mkdtemp",
"os.path.basename",
"shutil.rmtree",
"CGAT.IOTools.openFile"
] |
[((1943, 1961), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {}), '()\n', (1959, 1961), False, 'import tempfile\n'), ((2775, 2794), 'os.path.basename', 'os.path.basename', (['x'], {}), '(x)\n', (2791, 2794), False, 'import os\n'), ((3405, 3432), 'CGAT.IOTools.openFile', 'IOTools.openFile', (['f[0]', '"""r"""'], {}), "(f[0], 'r')\n", (3421, 3432), True, 'import CGAT.IOTools as IOTools\n'), ((3489, 3516), 'CGAT.IOTools.openFile', 'IOTools.openFile', (['f[0]', '"""r"""'], {}), "(f[0], 'r')\n", (3505, 3516), True, 'import CGAT.IOTools as IOTools\n'), ((3563, 3585), 'shutil.rmtree', 'shutil.rmtree', (['workdir'], {}), '(workdir)\n', (3576, 3585), False, 'import shutil\n'), ((2727, 2762), 'os.path.join', 'os.path.join', (['workdir', '"""*.fastq.gz"""'], {}), "(workdir, '*.fastq.gz')\n", (2739, 2762), False, 'import os\n'), ((3206, 3246), 'os.path.join', 'os.path.join', (['workdir', '"""*_[13].fastq.gz"""'], {}), "(workdir, '*_[13].fastq.gz')\n", (3218, 3246), False, 'import os\n'), ((3288, 3328), 'os.path.join', 'os.path.join', (['workdir', '"""*_[13].fastq.gz"""'], {}), "(workdir, '*_[13].fastq.gz')\n", (3300, 3328), False, 'import os\n')]
|
# Copyright (c) 2019 <NAME> and <NAME>
#
# This file is part of the LipidFinder software tool and governed by the
# 'MIT License'. Please see the LICENSE file that should have been
# included as part of this software.
"""Represent a DataFrame to be processed with LipidFinder's workflow."""
import glob
import logging
import os
import pandas
class LFDataFrame(pandas.core.frame.DataFrame):
"""A LFDataFrame object stores a dataframe to be used as input data
in LipidFinder.
The input data file(s) must comply with the following requirements:
- The format must be: CSV, TSV, XLS or XLSX. For the last two the
user can also specify the sheet to be read (or the list of
sheets if a folder is given as 'src').
- The first column contains an identifier for each row that is
unique throughout every file.
- There is one column named as "mzCol" parameter and another one
as "rtCol" parameter.
- Starting from the column index in "firstSampleIndex" parameter,
every intensity column must follow. For instance, for 2 samples
with 2 technical replicates, 1 quality control sample and 2
solvents, the columns would be as follows:
sample11 , sample12 , sample21 , sample22 , QC1 , sol1, sol2
Ensure that samples with multiple technical replicates are given
names in the format name1, name2, etc. such that each name is
unique for each column. Replicates should be suffixed 1, 2, etc.
Attributes:
src (Public[str])
Source path where the data was loaded from.
_resolution (Private[int])
Number of digits after the radix point in floats.
Examples:
LFDataFrame objects can be created in two different ways:
>>> from Configuration import LFParameters
>>> from LFDataFrame import LFDataFrame
>>> params = LFParameters(module='peakfilter')
>>> csvData = LFDataFrame('input_data.csv', params)
>>> xlsData = LFDataFrame('input_data.xls', params, sheet=2)
>>> folderData = LFDataFrame('/home/user/data/', params)
After loading the required set of parameters, the data can be
loaded from a single file ('csvData' and 'xlsData' examples) or
from multiple files located in the same folder ('folderData'
example). The latter is meant to be used to merge multiple files
split by time ranges that represent a single run. The first and
last retention time (RT) minutes of every file are trimmed as
they are considered unreliable (except for the first and last
minutes of the first and last files, respectively). The method
supports overlap (after trimming), and the frames retained will
be those from the file with the most frames for each overlapping
minute.
The number of decimal places to keep from the input m/z column
can be changed assigning a value to 'resolution' variable. It
has been predefined to 6, a standard value in high-resolution
liquid-chromatography coupled to mass-spectrometry.
"""
def __init__(self, src, parameters, resolution=6, sheet=0):
# type: (str, LFParameters, int, object) -> LFDataFrame
"""Constructor of the class LFDataFrame.
Keyword Arguments:
src -- source path where to load the data from
parameters -- LipidFinder's parameters instance (can be for
any module)
resolution -- number of decimal places to keep from m/z
column [default: 6]
sheet -- sheet number or list of sheet numbers to read
when input file(s) have XLS or XLSX extension
(zero-indexed position) [default: 0]
"""
rtCol = parameters['rtCol']
if (not os.path.isdir(src)):
data = self._read_file(src, parameters, sheet)
else:
# Create a list of the input files in the source folder (in
# alphabetical order)
fileList = sorted(glob.iglob(os.path.join(src, '*.*')))
if (len(fileList) == 0):
raise FileNotFoundError("No files found in '{0}'".format(src))
data = self._read_file(fileList[0], parameters, sheet[0])
if (len(fileList) > 1):
# Sort first dataframe by RT
data.sort_values([rtCol], inplace=True, kind='mergesort')
# Append "minute" column to the dataframe with the
# integer part of the float values of its RT column
timeCol = 'minute'
data = data.assign(minute=data[rtCol].astype(int))
# Since it is the first file, remove the frames
# corresponding to the last minute
data = data[data[timeCol] != data.iloc[-1][timeCol]]
for index, filePath in enumerate(fileList[1:], start=1):
chunk = self._read_file(filePath, parameters, sheet[index])
# Sort next chunk dataframe by RT
chunk.sort_values([rtCol], inplace=True, kind='mergesort')
# Append "minute" column to the dataframe with the
# integer part of the float values of its RT column
chunk = chunk.assign(minute=chunk[rtCol].astype(int))
# Remove the frames of the first minute
chunk = chunk[chunk[timeCol] != chunk.iloc[0][timeCol]]
if (index < (len(fileList) - 1)):
# Since it is not the last file, remove the
# frames corresponding to the last minute
chunk = chunk[chunk[timeCol] != chunk.iloc[-1][timeCol]]
# Create a dataframe with the number of frames per
# minute for both the dataframe and the next chunk
overlap = pandas.DataFrame(
{'data': data.groupby(timeCol).size(),
'chunk': chunk.groupby(timeCol).size()}
).fillna(0)
# Keep the minutes where the number of frames in the
# next chunk is higher than in the current dataframe
overlap = overlap[overlap['chunk'] > overlap['data']]
minutesToReplace = overlap.index.tolist()
if (minutesToReplace):
# Remove the dataframe frames to be replaced
data = data[~data[timeCol].isin(minutesToReplace)]
# Append chunk frames preserving the column
# order of the main dataframe
data = data.append(
chunk[chunk[timeCol].isin(minutesToReplace)],
ignore_index=True
)[data.columns.tolist()]
# Drop "minute" column as it will be no longer necessary
data.drop(timeCol, axis=1, inplace=True)
# Rename first column if no name was given in the input file(s)
data.rename(columns={'Unnamed: 0': 'id'}, inplace=True)
# Sort dataframe by m/z and RT, and reset the indexing
mzCol = parameters['mzCol']
data.sort_values([mzCol, rtCol], inplace=True, kind='mergesort')
data.reset_index(drop=True, inplace=True)
# Adjust m/z column values to the machine's maximum float
# resolution
data[mzCol] = data[mzCol].apply(round, ndigits=resolution)
super(LFDataFrame, self).__init__(data=data)
self.src = src
self._resolution = resolution
def drop_empty_frames(self, module, parameters, means=False):
# type: (str, LFParameters, bool) -> None
"""Remove empty frames from the dataframe and reset the index.
An empty frame is a row for which every sample replicate or
sample mean has a zero intensity.
Keyword Arguments:
module -- module name to write in the logging file
parameters -- LipidFinder's parameters instance (can be for
any module)
means -- check sample means instead of each sample
replicate? [default: False]
"""
if (means):
meanColIndexes = [i for i, col in enumerate(self.columns)
if col.endswith('_mean')]
if (parameters['numSolventReps'] > 0):
# The first mean column is for the solvents
firstIndex = meanColIndexes[1]
else:
firstIndex = meanColIndexes[0]
lastIndex = meanColIndexes[-1]
else:
firstIndex = parameters['firstSampleIndex'] - 1
lastIndex = firstIndex \
+ (parameters['numSamples'] * parameters['numTechReps'])
# Get the indices of all empty frames
emptyFrames = self.iloc[:, firstIndex : lastIndex].eq(0).all(axis=1)
indices = self[emptyFrames].index.tolist()
if (indices):
# Drop empty frames and reset the index
self.drop(module, labels=indices, axis=0, inplace=True)
self.reset_index(drop=True, inplace=True)
def drop(self, module, **kwargs):
# type: (str, ...) -> LFDataFrame
"""Wrapper of pandas.DataFrame.drop() with logging report.
The report will be updated only if the labels correspond to
rows, i.e. kwargs['axis'] == 0 (default value).
Keyword Arguments:
module -- module name to write in the logging file
*kwargs -- arguments to pass to pandas.DataFrame.drop()
"""
# Create logger to print message to the log file
logger = logging.getLogger(module)
logger.setLevel(logging.INFO)
if ((len(kwargs['labels']) > 0) and (kwargs.get('axis', 0) == 0)):
idCol = self.columns[0]
idList = [str(x) for x in sorted(self.loc[kwargs['labels'], idCol])]
logger.info('%s: removed %d rows. IDs: %s', module, len(idList),
','.join(idList))
return super(LFDataFrame, self).drop(**kwargs)
@staticmethod
def _read_file(src, parameters, sheet):
# type: (str, LFParameters, int) -> pandas.core.frame.DataFrame
"""Return a dataframe with the same content as the source file,
but with retention time in minutes.
The read function will be configured based on the file's
extension. Accepted extensions: CSV, TSV, XLS, XLSX.
Keyword Arguments:
src -- source file path
parameters -- LipidFinder's parameters instance (can be for
any module)
sheet -- sheet number to read when the input file has
XLS or XLSX extension (zero-indexed position)
"""
extension = os.path.splitext(src)[1].lower()[1:]
# Load file based on its extension
if (extension == 'csv'):
data = pandas.read_csv(src, float_precision='high')
elif (extension == 'tsv'):
data = pandas.read_csv(src, sep='\t', float_precision='high')
elif (extension in ['xls', 'xlsx']):
data = pandas.read_excel(src, sheet_name=sheet)
else:
raise IOError(("Unknown file extension '{0}'. Expected: csv, tsv, "
"xls, xlsx").format(extension))
if (('timeUnit' in parameters) and
(parameters['timeUnit'] == 'Seconds')):
rtCol = parameters['rtCol']
data[rtCol] = data[rtCol].apply(lambda x: round(x / 60.0, 2))
return data
|
[
"logging.getLogger",
"pandas.read_csv",
"os.path.join",
"os.path.splitext",
"os.path.isdir",
"pandas.read_excel"
] |
[((9947, 9972), 'logging.getLogger', 'logging.getLogger', (['module'], {}), '(module)\n', (9964, 9972), False, 'import logging\n'), ((3932, 3950), 'os.path.isdir', 'os.path.isdir', (['src'], {}), '(src)\n', (3945, 3950), False, 'import os\n'), ((11243, 11287), 'pandas.read_csv', 'pandas.read_csv', (['src'], {'float_precision': '"""high"""'}), "(src, float_precision='high')\n", (11258, 11287), False, 'import pandas\n'), ((11342, 11396), 'pandas.read_csv', 'pandas.read_csv', (['src'], {'sep': '"""\t"""', 'float_precision': '"""high"""'}), "(src, sep='\\t', float_precision='high')\n", (11357, 11396), False, 'import pandas\n'), ((4173, 4197), 'os.path.join', 'os.path.join', (['src', '"""*.*"""'], {}), "(src, '*.*')\n", (4185, 4197), False, 'import os\n'), ((11461, 11501), 'pandas.read_excel', 'pandas.read_excel', (['src'], {'sheet_name': 'sheet'}), '(src, sheet_name=sheet)\n', (11478, 11501), False, 'import pandas\n'), ((11111, 11132), 'os.path.splitext', 'os.path.splitext', (['src'], {}), '(src)\n', (11127, 11132), False, 'import os\n')]
|
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.ops import variables
from tensorflow.python.ops import array_ops
from tensorflow.python.framework import sparse_tensor
from tensorflow.python.ops import gen_fused_embedding_ops
from tensorflow.python.ops.gen_fused_embedding_ops import fused_embedding_local_sparse_look_up_grad
from tensorflow.python.ops.gen_fused_embedding_ops import fused_embedding_local_sparse_look_up
from tensorflow.python.ops.gen_fused_embedding_ops import fused_embedding_sparse_pre_look_up
from tensorflow.python.ops.gen_fused_embedding_ops import fused_embedding_sparse_post_look_up
from tensorflow.python.ops.gen_fused_embedding_ops import fused_embedding_sparse_post_look_up_grad
from tensorflow.python.util.tf_export import tf_export
def fused_embedding_lookup_sparse(embedding_weights,
sparse_ids,
combiner=None,
name=None,
max_norm=None):
if embedding_weights is None:
raise ValueError("Missing embedding_weights %s." % embedding_weights)
if isinstance(embedding_weights, variables.PartitionedVariable):
# get underlying Variables.
embedding_weights = list(embedding_weights)
if not isinstance(embedding_weights, list):
embedding_weights = [embedding_weights]
if len(embedding_weights) < 1:
raise ValueError("Missing embedding_weights %s." % embedding_weights)
with ops.name_scope(name, "fused_embedding_lookup", embedding_weights +
[sparse_ids]) as scope:
if combiner is None:
logging.warn("The default value of combiner will change from \"mean\" "
"to \"sqrtn\" after 2016/11/01.")
combiner = "mean"
if combiner not in ("mean", "sqrtn", "sum"):
raise ValueError("combiner must be one of 'mean', 'sqrtn' or 'sum'")
if not isinstance(sparse_ids, sparse_tensor.SparseTensor):
raise TypeError("sparse_ids must be SparseTensor")
partition_nums = len(embedding_weights)
# Local fused embedding lookup. Only support local look up and tf.Variable as
# embedding weight. So skip it for now.
#emb_vectors, _ = fused_embedding_local_sparse_look_up(sp_values=sparse_ids.values,
# sp_indices=sparse_ids.indices,
# sp_dense_shape=sparse_ids.dense_shape,
# emb_variable=embedding_weights[0],
# combiner=combiner,
# max_norm=max_norm)
partition_shapes = [w.shape for w in embedding_weights]
partitioned_values, partitioned_indices = fused_embedding_sparse_pre_look_up(
partition_shapes=partition_shapes,
sp_values=sparse_ids.values,
sp_indices=sparse_ids.indices,
)
emb_shards = []
for i in range(partition_nums):
embedding = embedding_weights[i]
sub_partition_values = partitioned_values[i]
with ops.colocate_with(embedding):
shard = array_ops.gather(embedding, sub_partition_values)
emb_shards.append(shard)
emb_vectors, _ = fused_embedding_sparse_post_look_up(
emb_shards=emb_shards, partitioned_indices=partitioned_indices,
sp_dense_shape=sparse_ids.dense_shape,
partitioned_values=partitioned_values,
combiner=combiner, max_norm=max_norm
)
return emb_vectors
@ops.RegisterGradient("FusedEmbeddingLocalSparseLookUp")
def fused_embedding_local_sparse_look_up_grad(op, top_grad_emb_vec, _):
grad_sp_values = gen_fused_embedding_ops.fused_embedding_local_sparse_look_up_grad(
top_grad=top_grad_emb_vec, emb_variable=op.inputs[3],
sp_values=op.inputs[0], sp_values_offset=op.outputs[1],
combiner=op.get_attr("combiner"),
max_norm=op.get_attr("max_norm")
)
grads = ops.IndexedSlices(values=grad_sp_values,
indices=op.inputs[0])
return [None, None, None, grads]
@ops.RegisterGradient("FusedEmbeddingSparsePostLookUp")
def fused_embedding_sparse_post_look_up_grad(op, top_grad_emb_vec, _):
num_partitions = op.get_attr("num_partitions")
grad_shards = gen_fused_embedding_ops.fused_embedding_sparse_post_look_up_grad(
top_grad=top_grad_emb_vec, emb_shards=[op.inputs[i] for i in range(0, num_partitions)],
partitioned_indices=[op.inputs[i] for i in range(num_partitions, 2 * num_partitions)],
feature_nums=op.outputs[1], combiner=op.get_attr("combiner"),
max_norm=op.get_attr("max_norm")
)
return grad_shards + [None for _ in range(0, 2 * num_partitions + 1)]
|
[
"tensorflow.python.framework.ops.RegisterGradient",
"tensorflow.python.ops.gen_fused_embedding_ops.fused_embedding_sparse_post_look_up",
"tensorflow.python.ops.array_ops.gather",
"tensorflow.python.framework.ops.colocate_with",
"tensorflow.python.ops.gen_fused_embedding_ops.fused_embedding_sparse_pre_look_up",
"tensorflow.python.framework.ops.IndexedSlices",
"tensorflow.python.framework.ops.name_scope"
] |
[((3667, 3722), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', (['"""FusedEmbeddingLocalSparseLookUp"""'], {}), "('FusedEmbeddingLocalSparseLookUp')\n", (3687, 3722), False, 'from tensorflow.python.framework import ops\n'), ((4217, 4271), 'tensorflow.python.framework.ops.RegisterGradient', 'ops.RegisterGradient', (['"""FusedEmbeddingSparsePostLookUp"""'], {}), "('FusedEmbeddingSparsePostLookUp')\n", (4237, 4271), False, 'from tensorflow.python.framework import ops\n'), ((4088, 4150), 'tensorflow.python.framework.ops.IndexedSlices', 'ops.IndexedSlices', ([], {'values': 'grad_sp_values', 'indices': 'op.inputs[0]'}), '(values=grad_sp_values, indices=op.inputs[0])\n', (4105, 4150), False, 'from tensorflow.python.framework import ops\n'), ((1593, 1678), 'tensorflow.python.framework.ops.name_scope', 'ops.name_scope', (['name', '"""fused_embedding_lookup"""', '(embedding_weights + [sparse_ids])'], {}), "(name, 'fused_embedding_lookup', embedding_weights + [sparse_ids]\n )\n", (1607, 1678), False, 'from tensorflow.python.framework import ops\n'), ((2933, 3066), 'tensorflow.python.ops.gen_fused_embedding_ops.fused_embedding_sparse_pre_look_up', 'fused_embedding_sparse_pre_look_up', ([], {'partition_shapes': 'partition_shapes', 'sp_values': 'sparse_ids.values', 'sp_indices': 'sparse_ids.indices'}), '(partition_shapes=partition_shapes,\n sp_values=sparse_ids.values, sp_indices=sparse_ids.indices)\n', (2967, 3066), False, 'from tensorflow.python.ops.gen_fused_embedding_ops import fused_embedding_sparse_pre_look_up\n'), ((3395, 3623), 'tensorflow.python.ops.gen_fused_embedding_ops.fused_embedding_sparse_post_look_up', 'fused_embedding_sparse_post_look_up', ([], {'emb_shards': 'emb_shards', 'partitioned_indices': 'partitioned_indices', 'sp_dense_shape': 'sparse_ids.dense_shape', 'partitioned_values': 'partitioned_values', 'combiner': 'combiner', 'max_norm': 'max_norm'}), '(emb_shards=emb_shards,\n partitioned_indices=partitioned_indices, sp_dense_shape=sparse_ids.\n dense_shape, partitioned_values=partitioned_values, combiner=combiner,\n max_norm=max_norm)\n', (3430, 3623), False, 'from tensorflow.python.ops.gen_fused_embedding_ops import fused_embedding_sparse_post_look_up\n'), ((3245, 3273), 'tensorflow.python.framework.ops.colocate_with', 'ops.colocate_with', (['embedding'], {}), '(embedding)\n', (3262, 3273), False, 'from tensorflow.python.framework import ops\n'), ((3291, 3340), 'tensorflow.python.ops.array_ops.gather', 'array_ops.gather', (['embedding', 'sub_partition_values'], {}), '(embedding, sub_partition_values)\n', (3307, 3340), False, 'from tensorflow.python.ops import array_ops\n')]
|
#!/usr/bin/env python3
#
# Author: <NAME>
# License: BSD 2-clause
# Last Change: Sun May 09, 2021 at 02:52 AM +0200
import numpy as np
ARRAY_TYPE = 'np'
def read_branch(ntp, tree, branch, idx=None):
data = ntp[tree][branch].array(library=ARRAY_TYPE)
return data if not idx else data[idx]
def read_branches_dict(ntp, tree, branches):
return ntp[tree].arrays(branches, library=ARRAY_TYPE)
def read_branches(ntp, tree, branches, idx=None, transpose=False):
data = list(ntp[tree].arrays(branches, library=ARRAY_TYPE).values())
if idx is not None:
data = [d[idx] for d in data]
return np.column_stack(data) if transpose else data
|
[
"numpy.column_stack"
] |
[((623, 644), 'numpy.column_stack', 'np.column_stack', (['data'], {}), '(data)\n', (638, 644), True, 'import numpy as np\n')]
|
#!/usr/bin/env python
"""
Test code for the BBox Object
"""
import numpy as np
import pytest
from geometry_utils.bound_box import (BBox,
asBBox,
NullBBox,
InfBBox,
fromBBArray,
from_points,
)
class TestConstructors():
def test_creates(self):
B = BBox(((0, 0), (5, 5)))
assert isinstance(B, BBox)
def test_type(self):
B = np.array(((0, 0), (5, 5)))
assert not isinstance(B, BBox)
def testDataType(self):
B = BBox(((0, 0), (5, 5)))
assert B.dtype == np.float
def testShape(self):
B = BBox((0, 0, 5, 5))
assert B.shape == (2, 2)
def testShape2(self):
with pytest.raises(ValueError):
BBox((0, 0, 5))
def testShape3(self):
with pytest.raises(ValueError):
BBox((0, 0, 5, 6, 7))
def testArrayConstruction(self):
A = np.array(((4, 5), (10, 12)), np.float_)
B = BBox(A)
assert isinstance(B, BBox)
def testMinMax(self):
with pytest.raises(ValueError):
BBox((0, 0, -1, 6))
def testMinMax2(self):
with pytest.raises(ValueError):
BBox((0, 0, 1, -6))
def testMinMax3(self):
# OK to have a zero-sized BB
B = BBox(((0, 0), (0, 5)))
assert isinstance(B, BBox)
def testMinMax4(self):
# OK to have a zero-sized BB
B = BBox(((10., -34), (10., -34.0)))
assert isinstance(B, BBox)
def testMinMax5(self):
# OK to have a tiny BB
B = BBox(((0, 0), (1e-20, 5)))
assert isinstance(B, BBox)
def testMinMax6(self):
# Should catch tiny difference
with pytest.raises(ValueError):
BBox(((0, 0), (-1e-20, 5)))
class TestAsBBox():
def testPassThrough(self):
B = BBox(((0, 0), (5, 5)))
C = asBBox(B)
assert B is C
def testPassThrough2(self):
B = ((0, 0), (5, 5))
C = asBBox(B)
assert B is not C
def testPassArray(self):
# Different data type
A = np.array(((0, 0), (5, 5)))
C = asBBox(A)
assert A is not C
def testPassArray2(self):
# same data type -- should be a view
A = np.array(((0, 0), (5, 5)), np.float_)
C = asBBox(A)
A[0, 0] = -10
assert C[0, 0] == A[0, 0]
class TestIntersect():
def testSame(self):
B = BBox(((-23.5, 456), (56, 532.0)))
C = BBox(((-23.5, 456), (56, 532.0)))
assert B.Overlaps(C)
def testUpperLeft(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((0, 12), (10, 32.0)))
assert B.Overlaps(C)
def testUpperRight(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((12, 12), (25, 32.0)))
assert B.Overlaps(C)
def testLowerRight(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((12, 5), (25, 15)))
assert B.Overlaps(C)
def testLowerLeft(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((-10, 5), (8.5, 15)))
assert B.Overlaps(C)
def testBelow(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((-10, 5), (8.5, 9.2)))
assert not B.Overlaps(C)
def testAbove(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((-10, 25.001), (8.5, 32)))
assert not B.Overlaps(C)
def testLeft(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((4, 8), (4.95, 32)))
assert not B.Overlaps(C)
def testRight(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((17.1, 8), (17.95, 32)))
assert not B.Overlaps(C)
def testInside(self):
B = BBox(((-15, -25), (-5, -10)))
C = BBox(((-12, -22), (-6, -8)))
assert B.Overlaps(C)
def testOutside(self):
B = BBox(((-15, -25), (-5, -10)))
C = BBox(((-17, -26), (3, 0)))
assert B.Overlaps(C)
def testTouch(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((15, 8), (17.95, 32)))
assert B.Overlaps(C)
def testCorner(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((15, 25), (17.95, 32)))
assert B.Overlaps(C)
def testZeroSize(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((15, 25), (15, 25)))
assert B.Overlaps(C)
def testZeroSize2(self):
B = BBox(((5, 10), (5, 10)))
C = BBox(((15, 25), (15, 25)))
assert not B.Overlaps(C)
def testZeroSize3(self):
B = BBox(((5, 10), (5, 10)))
C = BBox(((0, 8), (10, 12)))
assert B.Overlaps(C)
def testZeroSize4(self):
B = BBox(((5, 1), (10, 25)))
C = BBox(((8, 8), (8, 8)))
assert B.Overlaps(C)
class TestEquality():
def testSame(self):
B = BBox(((1.0, 2.0), (5., 10.)))
C = BBox(((1.0, 2.0), (5., 10.)))
assert B == C
def testIdentical(self):
B = BBox(((1.0, 2.0), (5., 10.)))
assert B == B
def testNotSame(self):
B = BBox(((1.0, 2.0), (5., 10.)))
C = BBox(((1.0, 2.0), (5., 10.1)))
assert not B == C
def testWithArray(self):
B = BBox(((1.0, 2.0), (5., 10.)))
C = np.array(((1.0, 2.0), (5., 10.)))
assert B == C
def testWithArray2(self):
B = BBox(((1.0, 2.0), (5., 10.)))
C = np.array(((1.0, 2.0), (5., 10.)))
assert C == B
def testWithArray3(self):
B = BBox(((1.0, 2.0), (5., 10.)))
C = np.array(((1.01, 2.0), (5., 10.)))
assert not C == B
class TestInside():
def testSame(self):
B = BBox(((1.0, 2.0), (5., 10.)))
C = BBox(((1.0, 2.0), (5., 10.)))
assert B.Inside(C)
def testPoint(self):
B = BBox(((1.0, 2.0), (5., 10.)))
C = BBox(((3.0, 4.0), (3.0, 4.0)))
assert B.Inside(C)
def testPointOutside(self):
B = BBox(((1.0, 2.0), (5., 10.)))
C = BBox(((-3.0, 4.0), (0.10, 4.0)))
assert not B.Inside(C)
def testUpperLeft(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((0, 12), (10, 32.0)))
assert not B.Inside(C)
def testUpperRight(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((12, 12), (25, 32.0)))
assert not B.Inside(C)
def testLowerRight(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((12, 5), (25, 15)))
assert not B.Inside(C)
def testLowerLeft(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((-10, 5), (8.5, 15)))
assert not (B.Inside(C))
def testBelow(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((-10, 5), (8.5, 9.2)))
assert not (B.Inside(C))
def testAbove(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((-10, 25.001), (8.5, 32)))
assert not (B.Inside(C))
def testLeft(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((4, 8), (4.95, 32)))
assert not (B.Inside(C))
def testRight(self):
B = BBox(((5, 10), (15, 25)))
C = BBox(((17.1, 8), (17.95, 32)))
assert not (B.Inside(C))
class TestPointInside():
def testPointIn(self):
B = BBox(((1.0, 2.0), (5., 10.)))
P = (3.0, 4.0)
assert (B.PointInside(P))
def testUpperLeft(self):
B = BBox(((5, 10), (15, 25)))
P = (4, 30)
assert not (B.PointInside(P))
def testUpperRight(self):
B = BBox(((5, 10), (15, 25)))
P = (16, 30)
assert not (B.PointInside(P))
def testLowerRight(self):
B = BBox(((5, 10), (15, 25)))
P = (16, 4)
assert not (B.PointInside(P))
def testLowerLeft(self):
B = BBox(((5, 10), (15, 25)))
P = (-10, 5)
assert not (B.PointInside(P))
def testBelow(self):
B = BBox(((5, 10), (15, 25)))
P = (10, 5)
assert not (B.PointInside(P))
def testAbove(self):
B = BBox(((5, 10), (15, 25)))
P = (10, 25.001)
assert not (B.PointInside(P))
def testLeft(self):
B = BBox(((5, 10), (15, 25)))
P = (4, 12)
assert not (B.PointInside(P))
def testRight(self):
B = BBox(((5, 10), (15, 25)))
P = (17.1, 12.3)
assert not (B.PointInside(P))
def testPointOnTopLine(self):
B = BBox(((1.0, 2.0), (5., 10.)))
P = (3.0, 10.)
assert (B.PointInside(P))
def testPointLeftTopLine(self):
B = BBox(((1.0, 2.0), (5., 10.)))
P = (-3.0, 10.)
assert not (B.PointInside(P))
def testPointOnBottomLine(self):
B = BBox(((1.0, 2.0), (5., 10.)))
P = (3.0, 5.)
assert (B.PointInside(P))
def testPointOnLeft(self):
B = BBox(((-10., -10.), (-1.0, -1.0)))
P = (-10, -5.)
assert (B.PointInside(P))
def testPointOnRight(self):
B = BBox(((-10., -10.), (-1.0, -1.0)))
P = (-1, -5.)
assert (B.PointInside(P))
def testPointOnBottomRight(self):
B = BBox(((-10., -10.), (-1.0, -1.0)))
P = (-1, -10.)
assert (B.PointInside(P))
class Test_from_points():
def testCreate(self):
Pts = np.array(((5, 2), (3, 4), (1, 6)), np.float64)
B = from_points(Pts)
assert (B[0, 0] == 1.0 and
B[0, 1] == 2.0 and
B[1, 0] == 5.0 and
B[1, 1] == 6.0)
def testCreateInts(self):
Pts = np.array(((5, 2), (3, 4), (1, 6)))
B = from_points(Pts)
assert (B[0, 0] == 1.0 and
B[0, 1] == 2.0 and
B[1, 0] == 5.0 and
B[1, 1] == 6.0)
def testSinglePoint(self):
Pts = np.array((5, 2), np.float_)
B = from_points(Pts)
assert (B[0, 0] == 5. and
B[0, 1] == 2.0 and
B[1, 0] == 5. and
B[1, 1] == 2.0)
def testListTuples(self):
Pts = [(3, 6.5), (13, 43.2), (-4.32, -4), (65, -23), (-0.0001,
23.432)]
B = from_points(Pts)
assert (B[0, 0] == -4.32 and
B[0, 1] == -23.0 and
B[1, 0] == 65.0 and
B[1, 1] == 43.2)
class TestMerge():
A = BBox(((-23.5, 456), (56, 532.0)))
B = BBox(((-20.3, 460), (54, 465))) # B should be completely inside A
C = BBox(((-23.5, 456), (58, 540.))) # up and to the right or A
D = BBox(((-26.5, 12), (56, 532.0)))
def testInside(self):
C = self.A.copy()
C.Merge(self.B)
assert (C == self.A)
def testFullOutside(self):
C = self.B.copy()
C.Merge(self.A)
assert (C == self.A)
def testUpRight(self):
A = self.A.copy()
A.Merge(self.C)
assert (A[0] == self.A[0] and A[1] == self.C[1])
def testDownLeft(self):
A = self.A.copy()
A.Merge(self.D)
assert (A[0] == self.D[0] and A[1] == self.A[1])
class TestWidthHeight():
B = BBox(((1.0, 2.0), (5., 10.)))
def testWidth(self):
assert (self.B.Width == 4.0)
def testWidth2(self):
assert (self.B.Height == 8.0)
def testSetW(self):
with pytest.raises(AttributeError):
self.B.Height = 6
def testSetH(self):
with pytest.raises(AttributeError):
self.B.Width = 6
class TestCenter():
B = BBox(((1.0, 2.0), (5., 10.)))
def testCenter(self):
assert ((self.B.Center == (3.0, 6.0)).all())
def testSetCenter(self):
with pytest.raises(AttributeError):
self.B.Center = (6, 5)
class TestBBarray():
BBarray = np.array((((-23.5, 456), (56, 532.0)), ((-20.3, 460),
(54, 465)), ((-23.5, 456), (58, 540.)), ((-26.5,
12), (56, 532.0))), dtype=np.float)
BB = asBBox(((-26.5, 12.), (58., 540.)))
def testJoin(self):
BB = fromBBArray(self.BBarray)
assert BB == self.BB
class TestNullBBox():
B1 = NullBBox()
B2 = NullBBox()
B3 = BBox(((1.0, 2.0), (5., 10.)))
def testValues(self):
assert (np.alltrue(np.isnan(self.B1)))
def testIsNull(self):
assert (self.B1.IsNull)
def testEquals(self):
assert ((self.B1 == self.B2) is True)
def testNotEquals(self):
assert not self.B1 == self.B3
def testNotEquals2(self):
assert not self.B3 == self.B1
def testMerge(self):
C = self.B1.copy()
C.Merge(self.B3)
assert C == self.B3, 'merge failed, got: %s' % C
def testOverlaps(self):
assert self.B1.Overlaps(self.B3) is False
def testOverlaps2(self):
assert self.B3.Overlaps(self.B1) is False
class TestInfBBox():
B1 = InfBBox()
B2 = InfBBox()
B3 = BBox(((1.0, 2.0), (5., 10.)))
NB = NullBBox()
def testValues(self):
assert (np.alltrue(np.isinf(self.B1)))
# def testIsNull(self):
# assert ( self.B1.IsNull )
def testEquals(self):
assert self.B1 == self.B2
def testNotEquals(self):
assert not self.B1 == self.B3
def testNotEquals2(self):
assert self.B1 != self.B3
def testNotEquals3(self):
assert not self.B3 == self.B1
def testMerge(self):
C = self.B1.copy()
C.Merge(self.B3)
assert C == self.B2, 'merge failed, got: %s' % C
def testMerge2(self):
C = self.B3.copy()
C.Merge(self.B1)
assert C == self.B1, 'merge failed, got: %s' % C
def testOverlaps(self):
assert (self.B1.Overlaps(self.B2) is True)
def testOverlaps2(self):
assert (self.B3.Overlaps(self.B1) is True)
def testOverlaps3(self):
assert (self.B1.Overlaps(self.B3) is True)
def testOverlaps4(self):
assert (self.B1.Overlaps(self.NB) is True)
def testOverlaps5(self):
assert (self.NB.Overlaps(self.B1) is True)
class TestSides():
B = BBox(((1.0, 2.0), (5., 10.)))
def testLeft(self):
assert self.B.Left == 1.0
def testRight(self):
assert self.B.Right == 5.0
def testBottom(self):
assert self.B.Bottom == 2.0
def testTop(self):
assert self.B.Top == 10.0
class TestAsPoly():
B = BBox(((5, 0), (10, 20)))
corners = np.array([(5., 0.), (5., 20.), (10., 20.), (10., 0.)],
dtype=np.float64)
def testCorners(self):
print(self.B.AsPoly())
assert np.array_equal(self.B.AsPoly(), self.corners)
|
[
"geometry_utils.bound_box.fromBBArray",
"geometry_utils.bound_box.InfBBox",
"geometry_utils.bound_box.BBox",
"numpy.array",
"geometry_utils.bound_box.from_points",
"pytest.raises",
"numpy.isnan",
"numpy.isinf",
"geometry_utils.bound_box.NullBBox",
"geometry_utils.bound_box.asBBox"
] |
[((10354, 10387), 'geometry_utils.bound_box.BBox', 'BBox', (['((-23.5, 456), (56, 532.0))'], {}), '(((-23.5, 456), (56, 532.0)))\n', (10358, 10387), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((10396, 10427), 'geometry_utils.bound_box.BBox', 'BBox', (['((-20.3, 460), (54, 465))'], {}), '(((-20.3, 460), (54, 465)))\n', (10400, 10427), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((10472, 10505), 'geometry_utils.bound_box.BBox', 'BBox', (['((-23.5, 456), (58, 540.0))'], {}), '(((-23.5, 456), (58, 540.0)))\n', (10476, 10505), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((10541, 10573), 'geometry_utils.bound_box.BBox', 'BBox', (['((-26.5, 12), (56, 532.0))'], {}), '(((-26.5, 12), (56, 532.0)))\n', (10545, 10573), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((11098, 11129), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (11102, 11129), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((11484, 11515), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (11488, 11515), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((11741, 11885), 'numpy.array', 'np.array', (['(((-23.5, 456), (56, 532.0)), ((-20.3, 460), (54, 465)), ((-23.5, 456), (58,\n 540.0)), ((-26.5, 12), (56, 532.0)))'], {'dtype': 'np.float'}), '((((-23.5, 456), (56, 532.0)), ((-20.3, 460), (54, 465)), ((-23.5, \n 456), (58, 540.0)), ((-26.5, 12), (56, 532.0))), dtype=np.float)\n', (11749, 11885), True, 'import numpy as np\n'), ((11935, 11973), 'geometry_utils.bound_box.asBBox', 'asBBox', (['((-26.5, 12.0), (58.0, 540.0))'], {}), '(((-26.5, 12.0), (58.0, 540.0)))\n', (11941, 11973), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((12098, 12108), 'geometry_utils.bound_box.NullBBox', 'NullBBox', ([], {}), '()\n', (12106, 12108), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((12118, 12128), 'geometry_utils.bound_box.NullBBox', 'NullBBox', ([], {}), '()\n', (12126, 12128), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((12138, 12169), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (12142, 12169), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((12838, 12847), 'geometry_utils.bound_box.InfBBox', 'InfBBox', ([], {}), '()\n', (12845, 12847), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((12857, 12866), 'geometry_utils.bound_box.InfBBox', 'InfBBox', ([], {}), '()\n', (12864, 12866), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((12876, 12907), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (12880, 12907), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((12915, 12925), 'geometry_utils.bound_box.NullBBox', 'NullBBox', ([], {}), '()\n', (12923, 12925), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((14031, 14062), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (14035, 14062), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((14333, 14357), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 0), (10, 20))'], {}), '(((5, 0), (10, 20)))\n', (14337, 14357), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((14372, 14457), 'numpy.array', 'np.array', (['[(5.0, 0.0), (5.0, 20.0), (10.0, 20.0), (10.0, 0.0)]'], {'dtype': 'np.float64'}), '([(5.0, 0.0), (5.0, 20.0), (10.0, 20.0), (10.0, 0.0)], dtype=np.float64\n )\n', (14380, 14457), True, 'import numpy as np\n'), ((492, 514), 'geometry_utils.bound_box.BBox', 'BBox', (['((0, 0), (5, 5))'], {}), '(((0, 0), (5, 5)))\n', (496, 514), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((588, 614), 'numpy.array', 'np.array', (['((0, 0), (5, 5))'], {}), '(((0, 0), (5, 5)))\n', (596, 614), True, 'import numpy as np\n'), ((695, 717), 'geometry_utils.bound_box.BBox', 'BBox', (['((0, 0), (5, 5))'], {}), '(((0, 0), (5, 5)))\n', (699, 717), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((791, 809), 'geometry_utils.bound_box.BBox', 'BBox', (['(0, 0, 5, 5)'], {}), '((0, 0, 5, 5))\n', (795, 809), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((1089, 1128), 'numpy.array', 'np.array', (['((4, 5), (10, 12))', 'np.float_'], {}), '(((4, 5), (10, 12)), np.float_)\n', (1097, 1128), True, 'import numpy as np\n'), ((1141, 1148), 'geometry_utils.bound_box.BBox', 'BBox', (['A'], {}), '(A)\n', (1145, 1148), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((1461, 1483), 'geometry_utils.bound_box.BBox', 'BBox', (['((0, 0), (0, 5))'], {}), '(((0, 0), (0, 5)))\n', (1465, 1483), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((1596, 1630), 'geometry_utils.bound_box.BBox', 'BBox', (['((10.0, -34), (10.0, -34.0))'], {}), '(((10.0, -34), (10.0, -34.0)))\n', (1600, 1630), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((1735, 1761), 'geometry_utils.bound_box.BBox', 'BBox', (['((0, 0), (1e-20, 5))'], {}), '(((0, 0), (1e-20, 5)))\n', (1739, 1761), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((2010, 2032), 'geometry_utils.bound_box.BBox', 'BBox', (['((0, 0), (5, 5))'], {}), '(((0, 0), (5, 5)))\n', (2014, 2032), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((2045, 2054), 'geometry_utils.bound_box.asBBox', 'asBBox', (['B'], {}), '(B)\n', (2051, 2054), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((2151, 2160), 'geometry_utils.bound_box.asBBox', 'asBBox', (['B'], {}), '(B)\n', (2157, 2160), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((2259, 2285), 'numpy.array', 'np.array', (['((0, 0), (5, 5))'], {}), '(((0, 0), (5, 5)))\n', (2267, 2285), True, 'import numpy as np\n'), ((2298, 2307), 'geometry_utils.bound_box.asBBox', 'asBBox', (['A'], {}), '(A)\n', (2304, 2307), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((2422, 2459), 'numpy.array', 'np.array', (['((0, 0), (5, 5))', 'np.float_'], {}), '(((0, 0), (5, 5)), np.float_)\n', (2430, 2459), True, 'import numpy as np\n'), ((2472, 2481), 'geometry_utils.bound_box.asBBox', 'asBBox', (['A'], {}), '(A)\n', (2478, 2481), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((2600, 2633), 'geometry_utils.bound_box.BBox', 'BBox', (['((-23.5, 456), (56, 532.0))'], {}), '(((-23.5, 456), (56, 532.0)))\n', (2604, 2633), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((2646, 2679), 'geometry_utils.bound_box.BBox', 'BBox', (['((-23.5, 456), (56, 532.0))'], {}), '(((-23.5, 456), (56, 532.0)))\n', (2650, 2679), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((2751, 2776), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (2755, 2776), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((2789, 2816), 'geometry_utils.bound_box.BBox', 'BBox', (['((0, 12), (10, 32.0))'], {}), '(((0, 12), (10, 32.0)))\n', (2793, 2816), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((2889, 2914), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (2893, 2914), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((2927, 2955), 'geometry_utils.bound_box.BBox', 'BBox', (['((12, 12), (25, 32.0))'], {}), '(((12, 12), (25, 32.0)))\n', (2931, 2955), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3028, 3053), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (3032, 3053), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3066, 3091), 'geometry_utils.bound_box.BBox', 'BBox', (['((12, 5), (25, 15))'], {}), '(((12, 5), (25, 15)))\n', (3070, 3091), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3163, 3188), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (3167, 3188), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3201, 3228), 'geometry_utils.bound_box.BBox', 'BBox', (['((-10, 5), (8.5, 15))'], {}), '(((-10, 5), (8.5, 15)))\n', (3205, 3228), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3296, 3321), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (3300, 3321), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3334, 3362), 'geometry_utils.bound_box.BBox', 'BBox', (['((-10, 5), (8.5, 9.2))'], {}), '(((-10, 5), (8.5, 9.2)))\n', (3338, 3362), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3434, 3459), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (3438, 3459), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3472, 3504), 'geometry_utils.bound_box.BBox', 'BBox', (['((-10, 25.001), (8.5, 32))'], {}), '(((-10, 25.001), (8.5, 32)))\n', (3476, 3504), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3575, 3600), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (3579, 3600), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3613, 3639), 'geometry_utils.bound_box.BBox', 'BBox', (['((4, 8), (4.95, 32))'], {}), '(((4, 8), (4.95, 32)))\n', (3617, 3639), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3711, 3736), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (3715, 3736), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3749, 3779), 'geometry_utils.bound_box.BBox', 'BBox', (['((17.1, 8), (17.95, 32))'], {}), '(((17.1, 8), (17.95, 32)))\n', (3753, 3779), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3852, 3881), 'geometry_utils.bound_box.BBox', 'BBox', (['((-15, -25), (-5, -10))'], {}), '(((-15, -25), (-5, -10)))\n', (3856, 3881), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3894, 3922), 'geometry_utils.bound_box.BBox', 'BBox', (['((-12, -22), (-6, -8))'], {}), '(((-12, -22), (-6, -8)))\n', (3898, 3922), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((3992, 4021), 'geometry_utils.bound_box.BBox', 'BBox', (['((-15, -25), (-5, -10))'], {}), '(((-15, -25), (-5, -10)))\n', (3996, 4021), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4034, 4060), 'geometry_utils.bound_box.BBox', 'BBox', (['((-17, -26), (3, 0))'], {}), '(((-17, -26), (3, 0)))\n', (4038, 4060), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4128, 4153), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (4132, 4153), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4166, 4194), 'geometry_utils.bound_box.BBox', 'BBox', (['((15, 8), (17.95, 32))'], {}), '(((15, 8), (17.95, 32)))\n', (4170, 4194), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4263, 4288), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (4267, 4288), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4301, 4330), 'geometry_utils.bound_box.BBox', 'BBox', (['((15, 25), (17.95, 32))'], {}), '(((15, 25), (17.95, 32)))\n', (4305, 4330), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4401, 4426), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (4405, 4426), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4439, 4465), 'geometry_utils.bound_box.BBox', 'BBox', (['((15, 25), (15, 25))'], {}), '(((15, 25), (15, 25)))\n', (4443, 4465), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4537, 4561), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (5, 10))'], {}), '(((5, 10), (5, 10)))\n', (4541, 4561), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4574, 4600), 'geometry_utils.bound_box.BBox', 'BBox', (['((15, 25), (15, 25))'], {}), '(((15, 25), (15, 25)))\n', (4578, 4600), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4676, 4700), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (5, 10))'], {}), '(((5, 10), (5, 10)))\n', (4680, 4700), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4713, 4737), 'geometry_utils.bound_box.BBox', 'BBox', (['((0, 8), (10, 12))'], {}), '(((0, 8), (10, 12)))\n', (4717, 4737), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4809, 4833), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 1), (10, 25))'], {}), '(((5, 1), (10, 25)))\n', (4813, 4833), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4846, 4868), 'geometry_utils.bound_box.BBox', 'BBox', (['((8, 8), (8, 8))'], {}), '(((8, 8), (8, 8)))\n', (4850, 4868), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((4959, 4990), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (4963, 4990), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((5001, 5032), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (5005, 5032), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((5095, 5126), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (5099, 5126), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((5187, 5218), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (5191, 5218), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((5229, 5260), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.1))'], {}), '(((1.0, 2.0), (5.0, 10.1)))\n', (5233, 5260), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((5328, 5359), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (5332, 5359), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((5370, 5405), 'numpy.array', 'np.array', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (5378, 5405), True, 'import numpy as np\n'), ((5469, 5500), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (5473, 5500), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((5511, 5546), 'numpy.array', 'np.array', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (5519, 5546), True, 'import numpy as np\n'), ((5610, 5641), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (5614, 5641), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((5652, 5688), 'numpy.array', 'np.array', (['((1.01, 2.0), (5.0, 10.0))'], {}), '(((1.01, 2.0), (5.0, 10.0)))\n', (5660, 5688), True, 'import numpy as np\n'), ((5772, 5803), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (5776, 5803), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((5814, 5845), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (5818, 5845), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((5909, 5940), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (5913, 5940), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((5951, 5981), 'geometry_utils.bound_box.BBox', 'BBox', (['((3.0, 4.0), (3.0, 4.0))'], {}), '(((3.0, 4.0), (3.0, 4.0)))\n', (5955, 5981), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6054, 6085), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (6058, 6085), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6096, 6127), 'geometry_utils.bound_box.BBox', 'BBox', (['((-3.0, 4.0), (0.1, 4.0))'], {}), '(((-3.0, 4.0), (0.1, 4.0)))\n', (6100, 6127), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6202, 6227), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (6206, 6227), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6240, 6267), 'geometry_utils.bound_box.BBox', 'BBox', (['((0, 12), (10, 32.0))'], {}), '(((0, 12), (10, 32.0)))\n', (6244, 6267), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6342, 6367), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (6346, 6367), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6380, 6408), 'geometry_utils.bound_box.BBox', 'BBox', (['((12, 12), (25, 32.0))'], {}), '(((12, 12), (25, 32.0)))\n', (6384, 6408), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6483, 6508), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (6487, 6508), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6521, 6546), 'geometry_utils.bound_box.BBox', 'BBox', (['((12, 5), (25, 15))'], {}), '(((12, 5), (25, 15)))\n', (6525, 6546), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6620, 6645), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (6624, 6645), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6658, 6685), 'geometry_utils.bound_box.BBox', 'BBox', (['((-10, 5), (8.5, 15))'], {}), '(((-10, 5), (8.5, 15)))\n', (6662, 6685), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6757, 6782), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (6761, 6782), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6795, 6823), 'geometry_utils.bound_box.BBox', 'BBox', (['((-10, 5), (8.5, 9.2))'], {}), '(((-10, 5), (8.5, 9.2)))\n', (6799, 6823), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6895, 6920), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (6899, 6920), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((6933, 6965), 'geometry_utils.bound_box.BBox', 'BBox', (['((-10, 25.001), (8.5, 32))'], {}), '(((-10, 25.001), (8.5, 32)))\n', (6937, 6965), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((7036, 7061), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (7040, 7061), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((7074, 7100), 'geometry_utils.bound_box.BBox', 'BBox', (['((4, 8), (4.95, 32))'], {}), '(((4, 8), (4.95, 32)))\n', (7078, 7100), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((7172, 7197), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (7176, 7197), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((7210, 7240), 'geometry_utils.bound_box.BBox', 'BBox', (['((17.1, 8), (17.95, 32))'], {}), '(((17.1, 8), (17.95, 32)))\n', (7214, 7240), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((7341, 7372), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (7345, 7372), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((7470, 7495), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (7474, 7495), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((7597, 7622), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (7601, 7622), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((7725, 7750), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (7729, 7750), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((7851, 7876), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (7855, 7876), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((7974, 7999), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (7978, 7999), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((8096, 8121), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (8100, 8121), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((8222, 8247), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (8226, 8247), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((8344, 8369), 'geometry_utils.bound_box.BBox', 'BBox', (['((5, 10), (15, 25))'], {}), '(((5, 10), (15, 25)))\n', (8348, 8369), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((8480, 8511), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (8484, 8511), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((8616, 8647), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (8620, 8647), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((8758, 8789), 'geometry_utils.bound_box.BBox', 'BBox', (['((1.0, 2.0), (5.0, 10.0))'], {}), '(((1.0, 2.0), (5.0, 10.0)))\n', (8762, 8789), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((8888, 8924), 'geometry_utils.bound_box.BBox', 'BBox', (['((-10.0, -10.0), (-1.0, -1.0))'], {}), '(((-10.0, -10.0), (-1.0, -1.0)))\n', (8892, 8924), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((9025, 9061), 'geometry_utils.bound_box.BBox', 'BBox', (['((-10.0, -10.0), (-1.0, -1.0))'], {}), '(((-10.0, -10.0), (-1.0, -1.0)))\n', (9029, 9061), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((9167, 9203), 'geometry_utils.bound_box.BBox', 'BBox', (['((-10.0, -10.0), (-1.0, -1.0))'], {}), '(((-10.0, -10.0), (-1.0, -1.0)))\n', (9171, 9203), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((9328, 9374), 'numpy.array', 'np.array', (['((5, 2), (3, 4), (1, 6))', 'np.float64'], {}), '(((5, 2), (3, 4), (1, 6)), np.float64)\n', (9336, 9374), True, 'import numpy as np\n'), ((9387, 9403), 'geometry_utils.bound_box.from_points', 'from_points', (['Pts'], {}), '(Pts)\n', (9398, 9403), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((9587, 9621), 'numpy.array', 'np.array', (['((5, 2), (3, 4), (1, 6))'], {}), '(((5, 2), (3, 4), (1, 6)))\n', (9595, 9621), True, 'import numpy as np\n'), ((9634, 9650), 'geometry_utils.bound_box.from_points', 'from_points', (['Pts'], {}), '(Pts)\n', (9645, 9650), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((9834, 9861), 'numpy.array', 'np.array', (['(5, 2)', 'np.float_'], {}), '((5, 2), np.float_)\n', (9842, 9861), True, 'import numpy as np\n'), ((9874, 9890), 'geometry_utils.bound_box.from_points', 'from_points', (['Pts'], {}), '(Pts)\n', (9885, 9890), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((10164, 10180), 'geometry_utils.bound_box.from_points', 'from_points', (['Pts'], {}), '(Pts)\n', (10175, 10180), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((12009, 12034), 'geometry_utils.bound_box.fromBBArray', 'fromBBArray', (['self.BBarray'], {}), '(self.BBarray)\n', (12020, 12034), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((883, 908), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (896, 908), False, 'import pytest\n'), ((922, 937), 'geometry_utils.bound_box.BBox', 'BBox', (['(0, 0, 5)'], {}), '((0, 0, 5))\n', (926, 937), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((978, 1003), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (991, 1003), False, 'import pytest\n'), ((1017, 1038), 'geometry_utils.bound_box.BBox', 'BBox', (['(0, 0, 5, 6, 7)'], {}), '((0, 0, 5, 6, 7))\n', (1021, 1038), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((1224, 1249), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1237, 1249), False, 'import pytest\n'), ((1263, 1282), 'geometry_utils.bound_box.BBox', 'BBox', (['(0, 0, -1, 6)'], {}), '((0, 0, -1, 6))\n', (1267, 1282), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((1324, 1349), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1337, 1349), False, 'import pytest\n'), ((1363, 1382), 'geometry_utils.bound_box.BBox', 'BBox', (['(0, 0, 1, -6)'], {}), '((0, 0, 1, -6))\n', (1367, 1382), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((1877, 1902), 'pytest.raises', 'pytest.raises', (['ValueError'], {}), '(ValueError)\n', (1890, 1902), False, 'import pytest\n'), ((1916, 1943), 'geometry_utils.bound_box.BBox', 'BBox', (['((0, 0), (-1e-20, 5))'], {}), '(((0, 0), (-1e-20, 5)))\n', (1920, 1943), False, 'from geometry_utils.bound_box import BBox, asBBox, NullBBox, InfBBox, fromBBArray, from_points\n'), ((11294, 11323), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (11307, 11323), False, 'import pytest\n'), ((11393, 11422), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (11406, 11422), False, 'import pytest\n'), ((11637, 11666), 'pytest.raises', 'pytest.raises', (['AttributeError'], {}), '(AttributeError)\n', (11650, 11666), False, 'import pytest\n'), ((12222, 12239), 'numpy.isnan', 'np.isnan', (['self.B1'], {}), '(self.B1)\n', (12230, 12239), True, 'import numpy as np\n'), ((12980, 12997), 'numpy.isinf', 'np.isinf', (['self.B1'], {}), '(self.B1)\n', (12988, 12997), True, 'import numpy as np\n')]
|
import cv2
cv2.setNumThreads(0)
cv2.ocl.setUseOpenCL(False)
import numpy as np
import math
from functools import wraps
def clip(img, dtype, maxval):
return np.clip(img, 0, maxval).astype(dtype)
def clipped(func):
"""
wrapper to clip results of transform to image dtype value range
"""
@wraps(func)
def wrapped_function(img, *args, **kwargs):
dtype, maxval = img.dtype, np.max(img)
return clip(func(img, *args, **kwargs), dtype, maxval)
return wrapped_function
def fix_shift_values(img, *args):
"""
shift values are normally specified in uint, but if your data is float - you need to remap values
"""
if img.dtype == np.float32:
return list(map(lambda x: x / 255, args))
return args
def vflip(img):
return cv2.flip(img, 0)
def hflip(img):
return cv2.flip(img, 1)
def flip(img, code):
return cv2.flip(img, code)
def transpose(img):
return img.transpose(1, 0, 2) if len(img.shape) > 2 else img.transpose(1, 0)
def rot90(img, times):
img = np.rot90(img, times)
return np.ascontiguousarray(img)
def rotate(img, angle):
"""
rotate image on specified angle
:param angle: angle in degrees
"""
height, width = img.shape[0:2]
mat = cv2.getRotationMatrix2D((width/2, height/2), angle, 1.0)
img = cv2.warpAffine(img, mat, (width, height),
flags=cv2.INTER_LINEAR,
borderMode=cv2.BORDER_REFLECT_101)
return img
def shift_scale_rotate(img, angle, scale, dx, dy):
"""
:param angle: in degrees
:param scale: relative scale
"""
height, width = img.shape[:2]
cc = math.cos(angle/180*math.pi) * scale
ss = math.sin(angle/180*math.pi) * scale
rotate_matrix = np.array([[cc, -ss], [ss, cc]])
box0 = np.array([[0, 0], [width, 0], [width, height], [0, height], ])
box1 = box0 - np.array([width/2, height/2])
box1 = np.dot(box1, rotate_matrix.T) + np.array([width/2+dx*width, height/2+dy*height])
box0 = box0.astype(np.float32)
box1 = box1.astype(np.float32)
mat = cv2.getPerspectiveTransform(box0, box1)
img = cv2.warpPerspective(img, mat, (width, height),
flags=cv2.INTER_LINEAR,
borderMode=cv2.BORDER_REFLECT_101)
return img
def center_crop(img, height, width):
h, w, c = img.shape
dy = (h-height)//2
dx = (w-width)//2
y1 = dy
y2 = y1 + height
x1 = dx
x2 = x1 + width
img = img[y1:y2, x1:x2, :]
return img
def shift_hsv(img, hue_shift, sat_shift, val_shift):
dtype = img.dtype
maxval = np.max(img)
img = cv2.cvtColor(img, cv2.COLOR_RGB2HSV).astype(np.int32)
h, s, v = cv2.split(img)
h = cv2.add(h, hue_shift)
h = np.where(h < 0, maxval - h, h)
h = np.where(h > maxval, h - maxval, h)
h = h.astype(dtype)
s = clip(cv2.add(s, sat_shift), dtype, maxval)
v = clip(cv2.add(v, val_shift), dtype, maxval)
img = cv2.merge((h, s, v)).astype(dtype)
img = cv2.cvtColor(img, cv2.COLOR_HSV2RGB)
return img
def shift_channels(img, r_shift, g_shift, b_shift):
img[...,0] = clip(img[...,0] + r_shift, np.uint8, 255)
img[...,1] = clip(img[...,1] + g_shift, np.uint8, 255)
img[...,2] = clip(img[...,2] + b_shift, np.uint8, 255)
return img
def clahe(img, clipLimit=2.0, tileGridSize=(8,8)):
img_yuv = cv2.cvtColor(img, cv2.COLOR_RGB2LAB)
clahe = cv2.createCLAHE(clipLimit=clipLimit, tileGridSize=tileGridSize)
img_yuv[:, :, 0] = clahe.apply(img_yuv[:, :, 0])
img_output = cv2.cvtColor(img_yuv, cv2.COLOR_LAB2RGB)
return img_output
def blur(img, ksize):
return cv2.blur(img, (ksize, ksize))
def invert(img):
return 255 - img
def channel_shuffle(img):
ch_arr = [0, 1, 2]
np.random.shuffle(ch_arr)
img = img[..., ch_arr]
return img
def img_to_tensor(im, verbose=False):
'''AVE edit'''
im_out = np.moveaxis(im / (255. if im.dtype == np.uint8 else 1), -1, 0).astype(np.float32)
if verbose:
print ("augmentations.functiona.py.img_to_tensor(): im_out.shape:", im_out.shape)
print ("im_out.unique:", np.unique(im_out))
return im_out
def mask_to_tensor(mask, num_classes, verbose=False):
'''AVE edit'''
if num_classes > 1:
mask = img_to_tensor(mask)
else:
mask = np.expand_dims(mask / (255. if mask.dtype == np.uint8 else 1), 0).astype(np.float32)
if verbose:
print ("augmentations.functiona.py.img_to_tensor(): mask.shape:", mask.shape)
print ("mask.unique:", np.unique(mask))
return mask
|
[
"numpy.clip",
"numpy.ascontiguousarray",
"math.cos",
"numpy.array",
"cv2.warpPerspective",
"numpy.rot90",
"numpy.moveaxis",
"cv2.ocl.setUseOpenCL",
"numpy.where",
"functools.wraps",
"numpy.max",
"numpy.dot",
"cv2.blur",
"cv2.add",
"cv2.merge",
"cv2.warpAffine",
"cv2.getPerspectiveTransform",
"cv2.split",
"cv2.cvtColor",
"cv2.getRotationMatrix2D",
"cv2.setNumThreads",
"cv2.flip",
"numpy.unique",
"cv2.createCLAHE",
"numpy.expand_dims",
"math.sin",
"numpy.random.shuffle"
] |
[((12, 32), 'cv2.setNumThreads', 'cv2.setNumThreads', (['(0)'], {}), '(0)\n', (29, 32), False, 'import cv2\n'), ((34, 61), 'cv2.ocl.setUseOpenCL', 'cv2.ocl.setUseOpenCL', (['(False)'], {}), '(False)\n', (54, 61), False, 'import cv2\n'), ((326, 337), 'functools.wraps', 'wraps', (['func'], {}), '(func)\n', (331, 337), False, 'from functools import wraps\n'), ((822, 838), 'cv2.flip', 'cv2.flip', (['img', '(0)'], {}), '(img, 0)\n', (830, 838), False, 'import cv2\n'), ((872, 888), 'cv2.flip', 'cv2.flip', (['img', '(1)'], {}), '(img, 1)\n', (880, 888), False, 'import cv2\n'), ((927, 946), 'cv2.flip', 'cv2.flip', (['img', 'code'], {}), '(img, code)\n', (935, 946), False, 'import cv2\n'), ((1093, 1113), 'numpy.rot90', 'np.rot90', (['img', 'times'], {}), '(img, times)\n', (1101, 1113), True, 'import numpy as np\n'), ((1126, 1151), 'numpy.ascontiguousarray', 'np.ascontiguousarray', (['img'], {}), '(img)\n', (1146, 1151), True, 'import numpy as np\n'), ((1319, 1379), 'cv2.getRotationMatrix2D', 'cv2.getRotationMatrix2D', (['(width / 2, height / 2)', 'angle', '(1.0)'], {}), '((width / 2, height / 2), angle, 1.0)\n', (1342, 1379), False, 'import cv2\n'), ((1387, 1491), 'cv2.warpAffine', 'cv2.warpAffine', (['img', 'mat', '(width, height)'], {'flags': 'cv2.INTER_LINEAR', 'borderMode': 'cv2.BORDER_REFLECT_101'}), '(img, mat, (width, height), flags=cv2.INTER_LINEAR,\n borderMode=cv2.BORDER_REFLECT_101)\n', (1401, 1491), False, 'import cv2\n'), ((1844, 1875), 'numpy.array', 'np.array', (['[[cc, -ss], [ss, cc]]'], {}), '([[cc, -ss], [ss, cc]])\n', (1852, 1875), True, 'import numpy as np\n'), ((1890, 1950), 'numpy.array', 'np.array', (['[[0, 0], [width, 0], [width, height], [0, height]]'], {}), '([[0, 0], [width, 0], [width, height], [0, height]])\n', (1898, 1950), True, 'import numpy as np\n'), ((2181, 2220), 'cv2.getPerspectiveTransform', 'cv2.getPerspectiveTransform', (['box0', 'box1'], {}), '(box0, box1)\n', (2208, 2220), False, 'import cv2\n'), ((2232, 2341), 'cv2.warpPerspective', 'cv2.warpPerspective', (['img', 'mat', '(width, height)'], {'flags': 'cv2.INTER_LINEAR', 'borderMode': 'cv2.BORDER_REFLECT_101'}), '(img, mat, (width, height), flags=cv2.INTER_LINEAR,\n borderMode=cv2.BORDER_REFLECT_101)\n', (2251, 2341), False, 'import cv2\n'), ((2744, 2755), 'numpy.max', 'np.max', (['img'], {}), '(img)\n', (2750, 2755), True, 'import numpy as np\n'), ((2836, 2850), 'cv2.split', 'cv2.split', (['img'], {}), '(img)\n', (2845, 2850), False, 'import cv2\n'), ((2860, 2881), 'cv2.add', 'cv2.add', (['h', 'hue_shift'], {}), '(h, hue_shift)\n', (2867, 2881), False, 'import cv2\n'), ((2891, 2921), 'numpy.where', 'np.where', (['(h < 0)', '(maxval - h)', 'h'], {}), '(h < 0, maxval - h, h)\n', (2899, 2921), True, 'import numpy as np\n'), ((2931, 2966), 'numpy.where', 'np.where', (['(h > maxval)', '(h - maxval)', 'h'], {}), '(h > maxval, h - maxval, h)\n', (2939, 2966), True, 'import numpy as np\n'), ((3153, 3189), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_HSV2RGB'], {}), '(img, cv2.COLOR_HSV2RGB)\n', (3165, 3189), False, 'import cv2\n'), ((3530, 3566), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_RGB2LAB'], {}), '(img, cv2.COLOR_RGB2LAB)\n', (3542, 3566), False, 'import cv2\n'), ((3580, 3643), 'cv2.createCLAHE', 'cv2.createCLAHE', ([], {'clipLimit': 'clipLimit', 'tileGridSize': 'tileGridSize'}), '(clipLimit=clipLimit, tileGridSize=tileGridSize)\n', (3595, 3643), False, 'import cv2\n'), ((3716, 3756), 'cv2.cvtColor', 'cv2.cvtColor', (['img_yuv', 'cv2.COLOR_LAB2RGB'], {}), '(img_yuv, cv2.COLOR_LAB2RGB)\n', (3728, 3756), False, 'import cv2\n'), ((3819, 3848), 'cv2.blur', 'cv2.blur', (['img', '(ksize, ksize)'], {}), '(img, (ksize, ksize))\n', (3827, 3848), False, 'import cv2\n'), ((3953, 3978), 'numpy.random.shuffle', 'np.random.shuffle', (['ch_arr'], {}), '(ch_arr)\n', (3970, 3978), True, 'import numpy as np\n'), ((1741, 1772), 'math.cos', 'math.cos', (['(angle / 180 * math.pi)'], {}), '(angle / 180 * math.pi)\n', (1749, 1772), False, 'import math\n'), ((1787, 1818), 'math.sin', 'math.sin', (['(angle / 180 * math.pi)'], {}), '(angle / 180 * math.pi)\n', (1795, 1818), False, 'import math\n'), ((1973, 2006), 'numpy.array', 'np.array', (['[width / 2, height / 2]'], {}), '([width / 2, height / 2])\n', (1981, 2006), True, 'import numpy as np\n'), ((2015, 2044), 'numpy.dot', 'np.dot', (['box1', 'rotate_matrix.T'], {}), '(box1, rotate_matrix.T)\n', (2021, 2044), True, 'import numpy as np\n'), ((2047, 2107), 'numpy.array', 'np.array', (['[width / 2 + dx * width, height / 2 + dy * height]'], {}), '([width / 2 + dx * width, height / 2 + dy * height])\n', (2055, 2107), True, 'import numpy as np\n'), ((3006, 3027), 'cv2.add', 'cv2.add', (['s', 'sat_shift'], {}), '(s, sat_shift)\n', (3013, 3027), False, 'import cv2\n'), ((3058, 3079), 'cv2.add', 'cv2.add', (['v', 'val_shift'], {}), '(v, val_shift)\n', (3065, 3079), False, 'import cv2\n'), ((171, 194), 'numpy.clip', 'np.clip', (['img', '(0)', 'maxval'], {}), '(img, 0, maxval)\n', (178, 194), True, 'import numpy as np\n'), ((423, 434), 'numpy.max', 'np.max', (['img'], {}), '(img)\n', (429, 434), True, 'import numpy as np\n'), ((2767, 2803), 'cv2.cvtColor', 'cv2.cvtColor', (['img', 'cv2.COLOR_RGB2HSV'], {}), '(img, cv2.COLOR_RGB2HSV)\n', (2779, 2803), False, 'import cv2\n'), ((3107, 3127), 'cv2.merge', 'cv2.merge', (['(h, s, v)'], {}), '((h, s, v))\n', (3116, 3127), False, 'import cv2\n'), ((4100, 4163), 'numpy.moveaxis', 'np.moveaxis', (['(im / (255.0 if im.dtype == np.uint8 else 1))', '(-1)', '(0)'], {}), '(im / (255.0 if im.dtype == np.uint8 else 1), -1, 0)\n', (4111, 4163), True, 'import numpy as np\n'), ((4324, 4341), 'numpy.unique', 'np.unique', (['im_out'], {}), '(im_out)\n', (4333, 4341), True, 'import numpy as np\n'), ((4750, 4765), 'numpy.unique', 'np.unique', (['mask'], {}), '(mask)\n', (4759, 4765), True, 'import numpy as np\n'), ((4529, 4595), 'numpy.expand_dims', 'np.expand_dims', (['(mask / (255.0 if mask.dtype == np.uint8 else 1))', '(0)'], {}), '(mask / (255.0 if mask.dtype == np.uint8 else 1), 0)\n', (4543, 4595), True, 'import numpy as np\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 14 09:49:13 2017
@author: vmg
"""
import os
import buildingspy.development.regressiontest as r
rt = r.Tester(check_html=False)#,tool="dymola")
LibPath = os.path.join("TRANSFORM")
ResPath = LibPath
rt.showGUI(True)
rt.setLibraryRoot(LibPath, ResPath)
rt.setNumberOfThreads(1)
#rt.TestSinglePackage('Media.Solids.Examples.Hastelloy_N_Haynes', SinglePack=True)
rt.run()
|
[
"os.path.join",
"buildingspy.development.regressiontest.Tester"
] |
[((149, 175), 'buildingspy.development.regressiontest.Tester', 'r.Tester', ([], {'check_html': '(False)'}), '(check_html=False)\n', (157, 175), True, 'import buildingspy.development.regressiontest as r\n'), ((203, 228), 'os.path.join', 'os.path.join', (['"""TRANSFORM"""'], {}), "('TRANSFORM')\n", (215, 228), False, 'import os\n')]
|
'''
Configuration generation for running Pancreas datasets
'''
import os, argparse
from pipelines import method_utils, dataloading_utils
from preprocess.process_train_test_data import *
if __name__ == "__main__":
data_dir = "~/gpu/data"
## parse arguments
import argparse
parser = argparse.ArgumentParser(description="Celltyping pipeline.")
parser.add_argument('data_source', help="Load which dataset",
choices=[
'pancreas', 'pancreas_seg_cond', 'pancreas_custom',
'pancreas_seg_mix', 'pancreas_multi_to_multi'
])
parser.add_argument('-m', '--method', help="Run which method",
choices=['MLP', 'MLP_GO', 'MLP_CP', 'GEDFN', 'ItClust', 'SVM_RBF', 'SVM_linear', 'RF'], ## remove DFN
required=True)
parser.add_argument('--select_on', help="Feature selection on train or test, or None of them",
choices=['train', 'test'])
parser.add_argument('--select_method', help="Feature selection method, Seurat/FEAST or None",
choices=['Seurat', 'FEAST', 'F-test'])
parser.add_argument('--n_features', help="Number of features selected",
default=1000, type=int)
parser.add_argument('--train', help="Specify which as train", required=True)
parser.add_argument('--test', help="Specify which as test", required=True)
parser.add_argument('--sample_seed', help="Downsample seed in combined individual effect",
default=0, type=int)
args = parser.parse_args()
pipeline_dir = "pipelines/result_Pancreas_collections"
result_prefix = pipeline_dir+os.sep+"result_"+args.data_source+'_'+\
args.train+'_to_'+args.test
os.makedirs(result_prefix, exist_ok=True)
## create file directory
if args.select_on is None and args.select_method is None:
result_dir = result_prefix+os.sep+"no_feature"
else:
result_dir = result_prefix+os.sep+args.select_method+'_'+\
str(args.n_features)+'_on_'+args.select_on
os.makedirs(result_dir, exist_ok=True)
load_ind, train_adata, test_adata = load_adata(result_dir)
if not load_ind:
train_adata, test_adata = dataloading_utils.load_Pancreas_adata(
data_dir, result_dir, args=args)
## whether to purify reference dataset
purify_method = ""
if "purify_dist" in args.data_source:
purify_method = "distance"
elif "purify_SVM" in args.data_source:
purify_method = "SVM"
train_adata, test_adata = dataloading_utils.process_loaded_data(
train_adata, test_adata, result_dir, args=args, purify_method=purify_method)
print("Train anndata: \n", train_adata)
print("Test anndata: \n", test_adata)
method_utils.run_pipeline(args, train_adata, test_adata, data_dir, result_dir)
|
[
"os.makedirs",
"argparse.ArgumentParser",
"pipelines.dataloading_utils.load_Pancreas_adata",
"pipelines.dataloading_utils.process_loaded_data",
"pipelines.method_utils.run_pipeline"
] |
[((301, 360), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Celltyping pipeline."""'}), "(description='Celltyping pipeline.')\n", (324, 360), False, 'import argparse\n'), ((1672, 1713), 'os.makedirs', 'os.makedirs', (['result_prefix'], {'exist_ok': '(True)'}), '(result_prefix, exist_ok=True)\n', (1683, 1713), False, 'import os, argparse\n'), ((2002, 2040), 'os.makedirs', 'os.makedirs', (['result_dir'], {'exist_ok': '(True)'}), '(result_dir, exist_ok=True)\n', (2013, 2040), False, 'import os, argparse\n'), ((2751, 2829), 'pipelines.method_utils.run_pipeline', 'method_utils.run_pipeline', (['args', 'train_adata', 'test_adata', 'data_dir', 'result_dir'], {}), '(args, train_adata, test_adata, data_dir, result_dir)\n', (2776, 2829), False, 'from pipelines import method_utils, dataloading_utils\n'), ((2160, 2230), 'pipelines.dataloading_utils.load_Pancreas_adata', 'dataloading_utils.load_Pancreas_adata', (['data_dir', 'result_dir'], {'args': 'args'}), '(data_dir, result_dir, args=args)\n', (2197, 2230), False, 'from pipelines import method_utils, dataloading_utils\n'), ((2520, 2638), 'pipelines.dataloading_utils.process_loaded_data', 'dataloading_utils.process_loaded_data', (['train_adata', 'test_adata', 'result_dir'], {'args': 'args', 'purify_method': 'purify_method'}), '(train_adata, test_adata, result_dir,\n args=args, purify_method=purify_method)\n', (2557, 2638), False, 'from pipelines import method_utils, dataloading_utils\n')]
|
import pandas as pd
from sklearn.preprocessing import StandardScaler
def stand_demo():
data = pd.read_csv("dating.txt")
print(data)
transfer = StandardScaler()
data = transfer.fit_transform(data[['milage', 'Liters', 'Consumtime']])
print("Standardization result: \n", data)
print("Mean of each figure: \n", transfer.mean_)
print("Variance of each figure: \n", transfer.var_)
return None
stand_demo()
|
[
"sklearn.preprocessing.StandardScaler",
"pandas.read_csv"
] |
[((99, 124), 'pandas.read_csv', 'pd.read_csv', (['"""dating.txt"""'], {}), "('dating.txt')\n", (110, 124), True, 'import pandas as pd\n'), ((157, 173), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (171, 173), False, 'from sklearn.preprocessing import StandardScaler\n')]
|
# -*- coding: utf-8 -*-
"""
Created on Mon Aug 18 22:20:01 2014
@author: baki
"""
import shlex
from subprocess import Popen, PIPE
from .Log import Log
class Shell:
def __init__(self, TAG=""):
self.log = Log(TAG=TAG)
self.current_process = None
self.process_output = None
def setTag(self, tag):
self.log.setTag(tag)
def runcmd(self, cmd, cwd=None, shell=False):
# self.log.v("cmd: {}\n with params: cwd={}, shell={}".format(cmd, cwd, shell))
args = shlex.split(cmd)
p = Popen(args, stdout=PIPE, stderr=PIPE, cwd=cwd, shell=shell)
out, err = p.communicate()
if out:
out = out.decode("ascii")
# self.log.v("cmd output: {}\n".format(out))
if err:
err = err.decode("ascii")
# self.log.v("cmd error: {}\n".format(err))
return out, err
def runcmdBgrnd(self, cmd, out=PIPE, cwd=None, shell=False):
assert self.current_process == None, "currently, one shell object supports only one background process"
self.log.v("cmd: {}\n with params: out={}, cwd={}, shell={}".format(cmd, out, cwd, shell))
redirect_to = out
if out is not PIPE:
assert self.process_output == None, "currently, one shell object supports only one background process"
redirect_to = open(out, "w")
args = shlex.split(cmd)
p = Popen(args, stdout=redirect_to, stderr=redirect_to, cwd=cwd, shell=shell)
self.current_process = p
self.process_output = redirect_to
return p
def kill(self, process=None):
if process is None:
process = self.current_process
process and process.kill()
self.process_output and self.process_output.close()
def terminate(self, process=None):
if process is None:
process = self.current_process
process and process.terminate()
self.process_output and self.process_output.close()
def runGrep(self, search, subject, options):
cmd = "grep {} \"{}\" {}".format(options, search, subject)
return self.runcmd(cmd)
def rm(self, name):
cmd = "rm {}".format(name)
return self.runcmd(cmd)
def rmdir(self, name):
cmd = "rmdir {}".format(name)
return self.runcmd(cmd)
def rmrdir(self, name):
cmd = "rm -r {}".format(name)
return self.runcmd(cmd)
def mv(self, src, dst):
cmd = "mv {} {}".format(src, dst)
return self.runcmd(cmd)
def cp(self, src, dst):
cmd = "cp -r {} {}".format(src, dst)
return self.runcmd(cmd)
def mkdir(self, name):
cmd = "mkdir {} -p".format(name)
return self.runcmd(cmd)
def clean(self, name):
self.rmrdir(name)
self.mkdir(name)
|
[
"shlex.split",
"subprocess.Popen"
] |
[((523, 539), 'shlex.split', 'shlex.split', (['cmd'], {}), '(cmd)\n', (534, 539), False, 'import shlex\n'), ((552, 611), 'subprocess.Popen', 'Popen', (['args'], {'stdout': 'PIPE', 'stderr': 'PIPE', 'cwd': 'cwd', 'shell': 'shell'}), '(args, stdout=PIPE, stderr=PIPE, cwd=cwd, shell=shell)\n', (557, 611), False, 'from subprocess import Popen, PIPE\n'), ((1437, 1453), 'shlex.split', 'shlex.split', (['cmd'], {}), '(cmd)\n', (1448, 1453), False, 'import shlex\n'), ((1466, 1539), 'subprocess.Popen', 'Popen', (['args'], {'stdout': 'redirect_to', 'stderr': 'redirect_to', 'cwd': 'cwd', 'shell': 'shell'}), '(args, stdout=redirect_to, stderr=redirect_to, cwd=cwd, shell=shell)\n', (1471, 1539), False, 'from subprocess import Popen, PIPE\n')]
|
import os
import six
import copy
import pickle
import random
import logging
from scrapy.http import Request
from scrapy.exceptions import NotConfigured
from scrapy.commands.genspider import sanitize_module_name
from scrapy.spiders import CrawlSpider
from .utils import (
add_sample,
response_to_dict,
get_or_create_test_dir,
parse_request,
parse_object,
get_project_dir,
get_middlewares,
create_dir,
)
logger = logging.getLogger(__name__)
def _copy_settings(settings):
out = {}
for name in settings.getlist('AUTOUNIT_INCLUDED_SETTINGS', []):
out[name] = settings.get(name)
return out
class AutounitMiddleware:
def __init__(self, settings):
if not any(
self.__class__.__name__ in s
for s in settings.getwithbase('SPIDER_MIDDLEWARES').keys()
):
raise ValueError(
'%s must be in SPIDER_MIDDLEWARES' % (
self.__class__.__name__,))
if not settings.getbool('AUTOUNIT_ENABLED'):
raise NotConfigured('scrapy-autounit is not enabled')
if settings.getint('CONCURRENT_REQUESTS') > 1:
logger.warn(
'Recording with concurrency > 1! '
'Data races in shared object modification may create broken '
'tests.'
)
self.max_fixtures = settings.getint(
'AUTOUNIT_MAX_FIXTURES_PER_CALLBACK',
default=10
)
self.max_fixtures = \
self.max_fixtures if self.max_fixtures >= 10 else 10
self.base_path = settings.get(
'AUTOUNIT_BASE_PATH',
default=os.path.join(get_project_dir(), 'autounit')
)
create_dir(self.base_path, exist_ok=True)
self.fixture_counters = {}
@classmethod
def from_crawler(cls, crawler):
return cls(crawler.settings)
def process_spider_input(self, response, spider):
filter_args = {'crawler', 'settings', 'start_urls'}
if isinstance(spider, CrawlSpider):
filter_args |= {'rules', '_rules'}
response.meta['_autounit'] = pickle.dumps({
'request': parse_request(response.request, spider),
'response': response_to_dict(response),
'spider_args': {
k: v for k, v in spider.__dict__.items()
if k not in filter_args
},
'middlewares': get_middlewares(spider),
})
return None
def process_spider_output(self, response, result, spider):
settings = spider.settings
processed_result = []
out = []
for elem in result:
out.append(elem)
is_request = isinstance(elem, Request)
if is_request:
_data = parse_request(elem, spider)
else:
_data = parse_object(copy.deepcopy(elem), spider)
processed_result.append({
'type': 'request' if is_request else 'item',
'data': _data
})
input_data = pickle.loads(response.meta.pop('_autounit'))
request = input_data['request']
callback_name = request['callback']
spider_attr_out = {
k: v for k, v in spider.__dict__.items()
if k not in ('crawler', 'settings', 'start_urls')
}
data = {
'spider_name': spider.name,
'request': request,
'response': input_data['response'],
'spider_args_out': spider_attr_out,
'result': processed_result,
'spider_args_in': input_data['spider_args'],
'settings': _copy_settings(settings),
'middlewares': input_data['middlewares'],
'python_version': 2 if six.PY2 else 3,
}
callback_counter = self.fixture_counters.setdefault(callback_name, 0)
self.fixture_counters[callback_name] += 1
test_dir, test_name = get_or_create_test_dir(
self.base_path,
sanitize_module_name(spider.name),
callback_name,
settings.get('AUTOUNIT_EXTRA_PATH'),
)
if callback_counter < self.max_fixtures:
add_sample(callback_counter + 1, test_dir, test_name, data)
else:
r = random.randint(0, callback_counter)
if r < self.max_fixtures:
add_sample(r + 1, test_dir, test_name, data)
return out
|
[
"logging.getLogger",
"scrapy.commands.genspider.sanitize_module_name",
"scrapy.exceptions.NotConfigured",
"copy.deepcopy",
"random.randint"
] |
[((446, 473), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (463, 473), False, 'import logging\n'), ((1049, 1096), 'scrapy.exceptions.NotConfigured', 'NotConfigured', (['"""scrapy-autounit is not enabled"""'], {}), "('scrapy-autounit is not enabled')\n", (1062, 1096), False, 'from scrapy.exceptions import NotConfigured\n'), ((4030, 4063), 'scrapy.commands.genspider.sanitize_module_name', 'sanitize_module_name', (['spider.name'], {}), '(spider.name)\n', (4050, 4063), False, 'from scrapy.commands.genspider import sanitize_module_name\n'), ((4303, 4338), 'random.randint', 'random.randint', (['(0)', 'callback_counter'], {}), '(0, callback_counter)\n', (4317, 4338), False, 'import random\n'), ((2880, 2899), 'copy.deepcopy', 'copy.deepcopy', (['elem'], {}), '(elem)\n', (2893, 2899), False, 'import copy\n')]
|
import asyncio
import discord
from discord.ext import commands
from discord.ext.commands.core import has_permissions
class cog(commands.Cog):
def __init__(self, client):
self.client = client
@commands.command(aliases=["clear"])
@has_permissions(ban_members=True)
async def purge(self, ctx, count):
await ctx.channel.purge(limit=count+1)
message = await ctx.send(f"Deleted {count} messages.")
asyncio.sleep(2)
await message.delete()
def setup(client):
client.add_cog(cog(client))
|
[
"discord.ext.commands.core.has_permissions",
"discord.ext.commands.command",
"asyncio.sleep"
] |
[((211, 246), 'discord.ext.commands.command', 'commands.command', ([], {'aliases': "['clear']"}), "(aliases=['clear'])\n", (227, 246), False, 'from discord.ext import commands\n'), ((252, 285), 'discord.ext.commands.core.has_permissions', 'has_permissions', ([], {'ban_members': '(True)'}), '(ban_members=True)\n', (267, 285), False, 'from discord.ext.commands.core import has_permissions\n'), ((444, 460), 'asyncio.sleep', 'asyncio.sleep', (['(2)'], {}), '(2)\n', (457, 460), False, 'import asyncio\n')]
|
from unittest import TestCase
from ..helpers import (
create_web3,
create_contract,
get_future_execution_start_at_timestamp,
proceed_time,
get_prediction_time_shift,
get_purchase_time_shift,
get_shipping_time_shift,
get_publication_time_shift,
get_tournament_id,
get_chain_id,
create_store,
generate_redis_namespace,
BaseHardhatTestCase
)
from src.web3 import get_account_address
execution_start_at = get_future_execution_start_at_timestamp()
content = 'abc'.encode()
model_id = 'model1'
model_id_other = 'model_other'
class TestStoreFetchPurchasesToShip(BaseHardhatTestCase):
def setUp(self):
super().setUp()
w3 = create_web3()
contract = create_contract(w3)
store = create_store(w3, contract)
self.store = store
self.w3 = w3
w3_other = create_web3(account_index=1)
contract_other = create_contract(w3_other)
store_other = create_store(w3_other, contract_other)
w3_purchaser = create_web3(account_index=2)
contract_purchaser = create_contract(w3_purchaser)
store_purchaser = create_store(w3_purchaser, contract_purchaser)
self.store_purchaser = store_purchaser
self.w3_purchaser = w3_purchaser
# predict
proceed_time(w3, execution_start_at + get_prediction_time_shift())
store.create_models_if_not_exist([dict(
model_id=model_id,
tournament_id=get_tournament_id(),
prediction_license='CC0-1.0',
)])
store.create_predictions([dict(
model_id=model_id,
execution_start_at=execution_start_at,
content=content,
price=1,
)])
# other predict
store_other.create_models_if_not_exist([dict(
model_id=model_id_other,
tournament_id=get_tournament_id(),
prediction_license='CC0-1.0',
)])
store_other.create_predictions([dict(
model_id=model_id_other,
execution_start_at=execution_start_at,
content=content,
price=1,
)])
# purchase
proceed_time(w3, execution_start_at + get_purchase_time_shift())
store_purchaser.create_purchases([dict(
model_id=model_id,
execution_start_at=execution_start_at,
), dict(
model_id=model_id_other,
execution_start_at=execution_start_at,
)])
def test_ok(self):
purchases = self.store.fetch_purchases_to_ship(
tournament_id=get_tournament_id(),
execution_start_at=execution_start_at
)
self.assertEqual(purchases, [{
**purchases[0],
'model_id': model_id,
'execution_start_at': execution_start_at,
'purchaser': get_account_address(self.w3_purchaser.eth.default_account),
}])
def test_different_tournament_id(self):
purchases = self.store.fetch_purchases_to_ship(
tournament_id='different',
execution_start_at=execution_start_at
)
self.assertEqual(purchases, [])
def test_different_execution_start_at(self):
purchases = self.store.fetch_purchases_to_ship(
tournament_id=get_tournament_id(),
execution_start_at=execution_start_at + 1,
)
self.assertEqual(purchases, [])
def test_already_shipped(self):
store = self.store
# ship
proceed_time(self.w3, execution_start_at + get_shipping_time_shift())
store.ship_purchases([dict(
model_id=model_id,
execution_start_at=execution_start_at,
purchaser=get_account_address(self.w3_purchaser.eth.default_account),
)])
purchases = store.fetch_purchases_to_ship(
tournament_id=get_tournament_id(),
execution_start_at=execution_start_at,
)
self.assertEqual(purchases, [])
|
[
"src.web3.get_account_address"
] |
[((2847, 2905), 'src.web3.get_account_address', 'get_account_address', (['self.w3_purchaser.eth.default_account'], {}), '(self.w3_purchaser.eth.default_account)\n', (2866, 2905), False, 'from src.web3 import get_account_address\n'), ((3715, 3773), 'src.web3.get_account_address', 'get_account_address', (['self.w3_purchaser.eth.default_account'], {}), '(self.w3_purchaser.eth.default_account)\n', (3734, 3773), False, 'from src.web3 import get_account_address\n')]
|
from scapy.fields import ByteField, ShortField
from scapy.packet import Packet
class TPKT(Packet):
name = "TPKT"
fields_desc = [ByteField("version", 3),
ByteField("reserved", 0),
ShortField("length", 0x0000)]
|
[
"scapy.fields.ShortField",
"scapy.fields.ByteField"
] |
[((138, 161), 'scapy.fields.ByteField', 'ByteField', (['"""version"""', '(3)'], {}), "('version', 3)\n", (147, 161), False, 'from scapy.fields import ByteField, ShortField\n'), ((182, 206), 'scapy.fields.ByteField', 'ByteField', (['"""reserved"""', '(0)'], {}), "('reserved', 0)\n", (191, 206), False, 'from scapy.fields import ByteField, ShortField\n'), ((227, 250), 'scapy.fields.ShortField', 'ShortField', (['"""length"""', '(0)'], {}), "('length', 0)\n", (237, 250), False, 'from scapy.fields import ByteField, ShortField\n')]
|
import argparse
import datetime
def get_last_elapsed_tax_year() -> int:
now = datetime.datetime.now()
if now.date() >= datetime.date(now.year, 4, 6):
return now.year - 1
else:
return now.year - 2
def create_parser() -> argparse.ArgumentParser:
# Schwab transactions
# Montly GBP/USD history from
# https://www.gov.uk/government/collections/exchange-rates-for-customs-and-vat
default_gbp_history_file = "GBP_USD_monthly_history.csv"
# Initial vesting and spin-off prices
default_initial_prices_file = "initial_prices.csv"
default_pdf_report = "calculations.pdf"
parser = argparse.ArgumentParser(
description="Calculate capital gains from stock transactions.",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument(
"--tax_year",
type=int,
default=get_last_elapsed_tax_year(),
nargs="?",
help="First year of the tax year to calculate gains on",
)
parser.add_argument(
"--schwab",
type=str,
nargs="?",
help="file containing the exported transactions from Schwab",
)
parser.add_argument(
"--trading212",
type=str,
nargs="?",
help="folder containing the exported transaction files from Trading212",
)
parser.add_argument(
"--gbp_history",
type=str,
default=default_gbp_history_file,
nargs="?",
help="monthly GBP/USD prices from HMRC",
)
parser.add_argument(
"--initial_prices",
type=str,
default=default_initial_prices_file,
nargs="?",
help="file cointaining stock prices in USD at the moment of vesting, split, etc.",
)
parser.add_argument(
"--report",
type=str,
default=default_pdf_report,
nargs="?",
help="where to save the generated pdf report",
)
return parser
|
[
"datetime.datetime.now",
"datetime.date",
"argparse.ArgumentParser"
] |
[((84, 107), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (105, 107), False, 'import datetime\n'), ((635, 788), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Calculate capital gains from stock transactions."""', 'formatter_class': 'argparse.ArgumentDefaultsHelpFormatter'}), "(description=\n 'Calculate capital gains from stock transactions.', formatter_class=\n argparse.ArgumentDefaultsHelpFormatter)\n", (658, 788), False, 'import argparse\n'), ((129, 158), 'datetime.date', 'datetime.date', (['now.year', '(4)', '(6)'], {}), '(now.year, 4, 6)\n', (142, 158), False, 'import datetime\n')]
|
"""Submit a batch task to livy server."""
import argparse
import datetime
import importlib
import json
import logging
import re
import typing
import livy
import livy.cli.config
import livy.cli.logging
logger = logging.getLogger(__name__)
class PreSubmitArguments(argparse.Namespace):
"""Typed :py:class:`~argparse.Namespace` for arguments before task submission."""
# task
script: str
args: typing.List[str]
class_name: str
jars: typing.List[str]
py_files: typing.List[str]
files: typing.List[str]
archives: typing.List[str]
queue_name: str
session_name: str
api_url: str
driver_memory: str
driver_cores: int
executor_memory: str
executor_cores: int
num_executors: int
spark_conf: typing.List[typing.Tuple[str, str]]
# log
watch_log: bool
# time
time_prog_start: datetime.datetime
"Local time this script is called"
class TaskEndedArguments(PreSubmitArguments):
"""Typed :py:class:`~argparse.Namespace` for arguments when task is ended.
It contains all attributes from :py:class:`~livy.cli.submit.PreSubmitArguments`.
"""
# task
batch_id: int
"Batch ID response by livy server"
state: str
"Task ended state"
# time
time_task_submit: datetime.datetime
"Local time before task is submitted"
time_task_ended: datetime.datetime
"Local time that detected task is ended"
def main(argv=None):
"""CLI entrypoint"""
# parse argument
cfg = livy.cli.config.load()
parser = argparse.ArgumentParser(
prog="livy submit",
description=__doc__,
)
parser.add_argument(
"script",
help="Path to the script that contains the application to be executed",
)
parser.add_argument(
"args",
nargs="*",
help="Arguments for the task script",
)
parser.add_argument(
"--class-name",
metavar="COM.EXAMPLE.FOO",
help="Application Java/Spark main class (for Java/Scala task)",
)
parser.add_argument(
"--jars",
nargs="+",
metavar="FOO.JAR",
help="Java dependencies to be used in this batch",
)
parser.add_argument(
"--py-files",
nargs="+",
metavar="FOO.ZIP",
help="Python dependencies to be used in this batch",
)
parser.add_argument(
"--files",
nargs="+",
metavar="FOO.TXT",
help="Files to be used in this batch",
)
parser.add_argument(
"--archives",
nargs="+",
metavar="FOO.TAR",
help="Archives to be used in this batch",
)
parser.add_argument(
"--queue-name",
metavar="DEFAULT",
help="The name of the YARN queue to which submitted",
)
parser.add_argument(
"--session-name",
metavar="HELLO",
help="The session name to execute this batch",
)
group = parser.add_argument_group("pre-submit actions")
group.add_argument(
"--on-pre-submit",
metavar="PLUG",
nargs="+",
default=cfg.submit.pre_submit,
help="Run plugin(s) before submit",
)
group = parser.add_argument_group("livy server configuration")
group.add_argument(
"--api-url",
required=cfg.root.api_url is None,
default=cfg.root.api_url,
help="Base-URL for Livy API server",
)
group.add_argument(
"--driver-memory",
metavar="10G",
default=cfg.submit.driver_memory,
type=argmem,
help="Amount of memory to use for the driver process.",
)
group.add_argument(
"--driver-cores",
metavar="N",
default=cfg.submit.driver_cores,
type=int,
help="Number of cores to use for the driver process.",
)
group.add_argument(
"--executor-memory",
metavar="10G",
default=cfg.submit.executor_memory,
type=argmem,
help="Amount of memory to use for the executor process.",
)
group.add_argument(
"--executor-cores",
metavar="N",
default=cfg.submit.executor_cores,
type=int,
help="Number of cores to use for each executor.",
)
group.add_argument(
"--num-executors",
metavar="N",
default=cfg.submit.num_executors,
type=int,
help="Number of executors to launch for this batch.",
)
group.add_argument(
"--spark-conf",
metavar="CONF_NAME=VALUE",
nargs="+",
default=cfg.submit.spark_conf,
type=argkvpair,
help="Spark configuration properties.",
)
group = parser.add_argument_group("post-submit actions")
g = group.add_mutually_exclusive_group()
g.set_defaults(watch_log=cfg.submit.watch_log)
g.add_argument(
"--watch-log",
dest="watch_log",
action="store_true",
help="Watching for logs until it is finished",
)
g.add_argument(
"--no-watch-log",
dest="watch_log",
action="store_false",
help="Not to watch for logs. Only submit the task and quit.",
)
group = parser.add_argument_group("after-task-finish actions")
group.add_argument(
"--on-task-success",
metavar="PLUG",
nargs="+",
default=cfg.submit.task_success,
help="Run plugin(s) on task is finished and success",
)
group.add_argument(
"--on-task-failed",
metavar="PLUG",
nargs="+",
default=cfg.submit.task_fail,
help="Run plugin(s) on task is ended and failed",
)
group.add_argument(
"--on-task-ended",
metavar="PLUG",
nargs="+",
default=cfg.submit.task_fail,
help="Run plugin(s) on task is ended and ended and regardless to its state",
)
livy.cli.logging.setup_argparse(parser)
args: PreSubmitArguments = parser.parse_args(argv)
# time stamping
tzlocal = datetime.datetime.now(datetime.timezone.utc).astimezone().tzinfo
def now() -> datetime.datetime:
return datetime.datetime.now().astimezone(tzlocal)
args.time_prog_start = now()
# setup logger
livy.cli.logging.init(args)
console = livy.cli.logging.get("livy-read-log.main")
console.info("Submission task started")
# run pre-submit actions
args: TaskEndedArguments = run_hook(console, "PRE-SUBMIT", args, args.on_pre_submit)
# check server state
client = livy.LivyClient(url=args.api_url)
try:
client.check(False)
except livy.RequestError as e:
console.error("Failed to connect to server: %s", e)
return 1
# build request payload
submit_parameter = {}
for key, value in [
("file", args.script),
("class_name", args.class_name),
("args", args.args),
("jars", args.jars),
("py_files", args.py_files),
("files", args.files),
("driver_memory", args.driver_memory),
("driver_cores", args.driver_cores),
("executor_memory", args.executor_memory),
("executor_cores", args.executor_cores),
("num_executors", args.num_executors),
("archives", args.archives),
("queue", args.queue_name),
("name", args.session_name),
("conf", {k: v for k, v in args.spark_conf}),
]:
if value:
submit_parameter[key] = value
console.info(
"Creating batch with parameters: %s",
json.dumps(submit_parameter, indent=2),
)
# timing
args.time_task_submit = now()
console.debug("Batch submission time= %s", args.time_task_submit)
# submit
try:
submit_resp = client.create_batch(**submit_parameter)
except livy.RequestError as e:
console.error("Failed to connect to server: %s", e)
return 1
console.info("Server response: %s", json.dumps(submit_resp, indent=2))
args.batch_id = submit_resp.get("id", None)
if not isinstance(args.batch_id, int) or args.batch_id < 0:
console.error("Failed to get batch id. Something goes wrong.")
return 1
# watch log
if not args.watch_log:
console.info("Batch %d created.", args.batch_id)
return 0
console.info("Start reading logs of batch %d", args.batch_id)
reader = livy.LivyBatchLogReader(client, args.batch_id)
try:
reader.read_until_finish()
except livy.RequestError as e:
console.error(
"Error occurs during read log. HTTP code=%d, Reason=%s", e.code, e.reason
)
return 1
except KeyboardInterrupt:
msg_args = args.batch_id, args.api_url # just for shorten
console.warning("Keyboard interrupt. Local livy-submit process terminating.")
console.warning("Your task might be still running on the server.")
console.warning("For reading the logs, call:")
console.warning(" livy read-log %d --api-url %s", *msg_args)
console.warning("For stopping the task, call:")
console.warning(" livy kill %d --api-url %s", *msg_args)
return 1
# timing
args.time_task_ended = now()
console.debug("Batch finishing time= %s", args.time_task_ended)
# get ending state
try:
args.state = client.get_batch_state(args.batch_id)
except livy.RequestError:
console.error("Error during query batch ending state.")
return 1
if args.state == "success":
exit_code = 0
state_level = logging.INFO
else:
exit_code = 1
state_level = logging.WARNING
console.log(state_level, "Batch#%d ended with state= %s", args.batch_id, args.state)
elapsed_time = args.time_task_ended - args.time_task_submit
console.info(
"Batch execution time: %dsec (%s)",
elapsed_time.total_seconds(),
human_readable_timeperiod(elapsed_time),
)
# run task-end actions
if args.state == "success":
args = run_hook(console, "TASK-SUCCESS", args, args.on_task_success)
else:
args = run_hook(console, "TASK-FAILED", args, args.on_task_failed)
args = run_hook(console, "TASK", args, args.on_task_ended)
return exit_code
def argmem(s: str):
"""Validate input for memory size"""
if not re.fullmatch(r"\d+[gm]b?", s, re.RegexFlag.IGNORECASE):
raise argparse.ArgumentTypeError(
"please specific memory size in format '1234mb'"
)
return s
def argkvpair(val):
"""Splitting key value pair"""
k, v = val.split("=", 1)
return k, v
def run_hook(
logger: logging.Logger,
identifier: str,
args: argparse.Namespace,
actions: typing.List[str],
) -> argparse.Namespace:
"""Run hook actions"""
for action_name in actions:
logger.info("Run %s action %s", identifier.lower(), action_name)
func = get_function(action_name)
if not func:
logger.warning("Failed to get action function instance. Stop process.")
exit(1)
try:
args = func(identifier, args)
except:
logger.exception(
"Error occurs during %s action. Stop process.", identifier.lower()
)
exit(1)
if not isinstance(args, argparse.Namespace):
logger.error(
"Expect namespace object from %s's return value. Got %s",
action_name,
type(args).__name__,
)
exit(1)
return args
def get_function(name: str) -> typing.Callable:
"""Get function by module name"""
m = re.fullmatch(r"([\w.]+):(\w+)", name, re.RegexFlag.I)
if not m:
logger.error("Failed to resolve function name: %s", name)
logger.error("Please specific it in module:func format")
return
module_name, func_name = m.groups()
try:
module = importlib.import_module(module_name)
except ImportError:
logger.error("Failed to find module: %s", module_name)
return
try:
func = getattr(module, func_name)
except AttributeError:
logger.error("Failed to find function %s in %s", func_name, module_name)
return
return func
def human_readable_timeperiod(period: datetime.timedelta):
"""Convert time period to human readable format"""
total_seconds = int(period.total_seconds())
terms = []
days = total_seconds // 86400
if days:
terms.append(f"{days}d")
hours = total_seconds // 3600 % 24
if hours:
terms.append(f"{hours}h")
minutes = total_seconds // 60 % 60
if minutes:
terms.append(f"{minutes}m")
seconds = total_seconds % 60
if seconds:
terms.append(f"{seconds}s")
return " ".join(terms)
if __name__ == "__main__":
exit(main())
|
[
"logging.getLogger",
"importlib.import_module",
"argparse.ArgumentParser",
"livy.cli.config.load",
"json.dumps",
"argparse.ArgumentTypeError",
"livy.cli.logging.init",
"re.fullmatch",
"datetime.datetime.now",
"livy.cli.logging.get",
"livy.cli.logging.setup_argparse",
"livy.LivyClient",
"livy.LivyBatchLogReader"
] |
[((212, 239), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (229, 239), False, 'import logging\n'), ((1499, 1521), 'livy.cli.config.load', 'livy.cli.config.load', ([], {}), '()\n', (1519, 1521), False, 'import livy\n'), ((1535, 1599), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'prog': '"""livy submit"""', 'description': '__doc__'}), "(prog='livy submit', description=__doc__)\n", (1558, 1599), False, 'import argparse\n'), ((5829, 5868), 'livy.cli.logging.setup_argparse', 'livy.cli.logging.setup_argparse', (['parser'], {}), '(parser)\n', (5860, 5868), False, 'import livy\n'), ((6179, 6206), 'livy.cli.logging.init', 'livy.cli.logging.init', (['args'], {}), '(args)\n', (6200, 6206), False, 'import livy\n'), ((6221, 6263), 'livy.cli.logging.get', 'livy.cli.logging.get', (['"""livy-read-log.main"""'], {}), "('livy-read-log.main')\n", (6241, 6263), False, 'import livy\n'), ((6466, 6499), 'livy.LivyClient', 'livy.LivyClient', ([], {'url': 'args.api_url'}), '(url=args.api_url)\n', (6481, 6499), False, 'import livy\n'), ((8308, 8354), 'livy.LivyBatchLogReader', 'livy.LivyBatchLogReader', (['client', 'args.batch_id'], {}), '(client, args.batch_id)\n', (8331, 8354), False, 'import livy\n'), ((11586, 11640), 're.fullmatch', 're.fullmatch', (['"""([\\\\w.]+):(\\\\w+)"""', 'name', 're.RegexFlag.I'], {}), "('([\\\\w.]+):(\\\\w+)', name, re.RegexFlag.I)\n", (11598, 11640), False, 'import re\n'), ((7471, 7509), 'json.dumps', 'json.dumps', (['submit_parameter'], {'indent': '(2)'}), '(submit_parameter, indent=2)\n', (7481, 7509), False, 'import json\n'), ((7873, 7906), 'json.dumps', 'json.dumps', (['submit_resp'], {'indent': '(2)'}), '(submit_resp, indent=2)\n', (7883, 7906), False, 'import json\n'), ((10266, 10320), 're.fullmatch', 're.fullmatch', (['"""\\\\d+[gm]b?"""', 's', 're.RegexFlag.IGNORECASE'], {}), "('\\\\d+[gm]b?', s, re.RegexFlag.IGNORECASE)\n", (10278, 10320), False, 'import re\n'), ((10336, 10412), 'argparse.ArgumentTypeError', 'argparse.ArgumentTypeError', (['"""please specific memory size in format \'1234mb\'"""'], {}), '("please specific memory size in format \'1234mb\'")\n', (10362, 10412), False, 'import argparse\n'), ((11868, 11904), 'importlib.import_module', 'importlib.import_module', (['module_name'], {}), '(module_name)\n', (11891, 11904), False, 'import importlib\n'), ((5960, 6004), 'datetime.datetime.now', 'datetime.datetime.now', (['datetime.timezone.utc'], {}), '(datetime.timezone.utc)\n', (5981, 6004), False, 'import datetime\n'), ((6077, 6100), 'datetime.datetime.now', 'datetime.datetime.now', ([], {}), '()\n', (6098, 6100), False, 'import datetime\n')]
|
"""Minimal setup file for learn project."""
import pathlib
from setuptools import setup, find_packages
# The directory containing this file
HERE = pathlib.Path(__file__).parent
# The text of the README file
README = (HERE / "README.md").read_text()
setup(
name = 'premoji',
version = '0.1.4',
description = 'predict emoji on given text',
long_description = README,
long_description_content_type = "text/markdown",
license = "MIT",
author = '<NAME>',
author_email = '<EMAIL>',
url = 'https://macworks.io',
download_url = 'https://github.com/nickyfoto/premoji/archive/v0.1.3-alpha.tar.gz',
packages = find_packages(where='src'),
package_dir = {'': 'src'},
include_package_data=True,
install_requires = [
'numpy',
'scikit-learn',
],
classifiers = [
'Development Status :: 3 - Alpha', # Chose either "3 - Alpha", "4 - Beta" or "5 - Production/Stable" as the current state of your package
'Intended Audience :: Developers', # Define that your audience are developers
'Topic :: Software Development :: Build Tools',
'License :: OSI Approved :: MIT License', # Again, pick a license
'Programming Language :: Python :: 3.7',
]
)
|
[
"setuptools.find_packages",
"pathlib.Path"
] |
[((149, 171), 'pathlib.Path', 'pathlib.Path', (['__file__'], {}), '(__file__)\n', (161, 171), False, 'import pathlib\n'), ((648, 674), 'setuptools.find_packages', 'find_packages', ([], {'where': '"""src"""'}), "(where='src')\n", (661, 674), False, 'from setuptools import setup, find_packages\n')]
|
"""
Class that holds the results: used for evaluating model performance on activity cliff compounds
<NAME>, Eindhoven University of Technology, March 2022
"""
import os
import numpy as np
from MoleculeACE.benchmark.utils.const import Algorithms
from .metrics import calc_rmse, calc_q2f3
class Results:
def __init__(self, predictions=None, reference=None, y_train=None, data=None,
tanimoto_cliff_compounds=None, scaffold_cliff_compounds=None, levenshtein_cliff_compounds=None,
soft_consensus_cliff_compounds=None):
self.predictions = predictions
self.reference = reference
self.y_train = y_train
self.tanimoto_cliff_compounds = tanimoto_cliff_compounds
self.scaffold_cliff_compounds = scaffold_cliff_compounds
self.levenshtein_cliff_compounds = levenshtein_cliff_compounds
self.soft_consensus_cliff_compounds = soft_consensus_cliff_compounds
self.data = data
self.rmse = np.inf
self.q2f3 = 0
self.tanimoto_cliff_rmse = np.inf
self.scaffold_cliff_rmse = np.inf
self.levenshtein_cliff_rmse = np.inf
self.soft_consensus_cliff_rmse = np.inf
def calc_rmse(self, reference=None, predictions=None):
""" Calculate the rmse from two lists of reference and predicted bioactivity"""
if reference is not None:
self.reference = reference
if predictions is not None:
self.predictions = predictions
# calculate the rmsd
self.rmse = calc_rmse(self.reference, self.predictions)
return self.rmse
def calc_q2f3(self, reference=None, predictions=None, y_train=None):
""" Calculates the Q2 F3 score (best according to Todeschini et al. 2016)
Args:
reference: (1d array-like shape) true test values (float)
predictions: (1d array-like shape) predicted test values (float)
y_train: (1d array-like shape) true train values (float)
Returns: Q2F3 score
"""
if reference is not None:
self.reference = reference
if predictions is not None:
self.predictions = predictions
if y_train is not None:
self.y_train = y_train
# calculate the q2f3
self.q2f3 = calc_q2f3(self.reference, self.predictions, self.y_train)
return self.q2f3
def calc_cliff_rmse(self, reference=None, predictions=None, tanimoto_cliff_compounds=None,
scaffold_cliff_compounds=None, levenshtein_cliff_compounds=None,
soft_consensus_cliff_compounds=None):
""" Calculate the rmse of only cliff compounds
Args:
levenshtein_cliff_compounds: (lst) Binary list of cliff compounds (same length as predictions)
tanimoto_cliff_compounds: (lst) Binary list of cliff compounds (same length as predictions)
scaffold_cliff_compounds: (lst) Binary list of cliff compounds (same length as predictions)
consensus_cliff_compounds: (lst) Binary list of cliff compounds (same length as predictions)
soft_consensus_cliff_compounds: (lst) Binary list of cliff compounds (same length as predictions)
reference: (lst) true bioactivity values
predictions: (lst) predicted bioactivity values
cliff_compounds: (lst) binary list describing if a compound is a cliff compound (1 == cliff, 0 == no cliff)
Returns: (float) rmse
"""
if reference is not None:
self.reference = reference
if predictions is not None:
self.predictions = predictions
if tanimoto_cliff_compounds is not None:
self.tanimoto_cliff_compounds = tanimoto_cliff_compounds
if scaffold_cliff_compounds is not None:
self.scaffold_cliff_compounds = scaffold_cliff_compounds
if levenshtein_cliff_compounds is not None:
self.levenshtein_cliff_compounds = levenshtein_cliff_compounds
if soft_consensus_cliff_compounds is not None:
self.soft_consensus_cliff_compounds = soft_consensus_cliff_compounds
if self.tanimoto_cliff_compounds is not None:
# Subset only reference and predicted values of the cliff compounds, then calculate cliff rmse
clf_ref = [self.reference[idx] for idx, clf in enumerate(self.tanimoto_cliff_compounds) if clf == 1]
clf_prd = [self.predictions[idx] for idx, clf in enumerate(self.tanimoto_cliff_compounds) if clf == 1]
self.tanimoto_cliff_rmse = calc_rmse(clf_ref, clf_prd)
if self.scaffold_cliff_compounds is not None:
# Subset only reference and predicted values of the cliff compounds, then calculate cliff rmse
clf_ref = [self.reference[idx] for idx, clf in enumerate(self.scaffold_cliff_compounds) if clf == 1]
clf_prd = [self.predictions[idx] for idx, clf in enumerate(self.scaffold_cliff_compounds) if clf == 1]
self.scaffold_cliff_rmse = calc_rmse(clf_ref, clf_prd)
if self.levenshtein_cliff_compounds is not None:
# Subset only reference and predicted values of the cliff compounds, then calculate cliff rmse
clf_ref = [self.reference[idx] for idx, clf in enumerate(self.levenshtein_cliff_compounds) if clf == 1]
clf_prd = [self.predictions[idx] for idx, clf in enumerate(self.levenshtein_cliff_compounds) if clf == 1]
self.levenshtein_cliff_rmse = calc_rmse(clf_ref, clf_prd)
if self.soft_consensus_cliff_compounds is not None:
# Subset only reference and predicted values of the cliff compounds, then calculate cliff rmse
clf_ref = [self.reference[idx] for idx, clf in enumerate(self.soft_consensus_cliff_compounds) if clf == 1]
clf_prd = [self.predictions[idx] for idx, clf in enumerate(self.soft_consensus_cliff_compounds) if clf == 1]
self.soft_consensus_cliff_rmse = calc_rmse(clf_ref, clf_prd)
return {'tanimoto_cliff_rmse': self.tanimoto_cliff_rmse, 'scaffold_cliff_rmse': self.scaffold_cliff_rmse,
'levenshtein_cliff_rmse': self.levenshtein_cliff_rmse,
'soft_consensus_cliff_rmse': self.soft_consensus_cliff_rmse}
def to_csv(self, filename, algorithm: Algorithms = None):
# Create output file if it doesnt exist
if self.data is not None:
if not os.path.isfile(filename):
with open(filename, 'w') as f:
f.write('dataset,'
'algorithm,'
'descriptor,'
'augmentation,'
'rmse,'
'cliff_rmse,'
'n_compounds,'
'n_cliff_compounds,'
'n_compounds_train,'
'n_cliff_compounds_train,'
'n_compounds_test,'
'n_cliff_compounds_test\n')
with open(filename, 'a') as f:
f.write(f'{self.data.name},'
f'{algorithm.value},'
f'{self.data.descriptor.value},'
f'{self.data.augmentation},'
f'{self.rmse},'
f'{self.soft_consensus_cliff_rmse},'
f'{self.data.cliffs.stats["n_compounds"]},'
f'{self.data.cliffs.stats["n_soft_consensus_cliff_compounds"]},'
f'{self.data.cliffs.stats["n_compounds_train"]},'
f'{self.data.cliffs.stats["n_soft_consensus_cliff_compounds_train"]},'
f'{self.data.cliffs.stats["n_compounds_test"]},'
f'{self.data.cliffs.stats["n_soft_consensus_cliff_compounds_test"]}\n')
def __repr__(self):
return f"RMSE: {self.rmse:.4f}\n" \
f"Q2F3: {self.q2f3:.4f}\n" \
f"AC-RMSE: {self.soft_consensus_cliff_rmse:.4f}\n"
|
[
"os.path.isfile"
] |
[((6462, 6486), 'os.path.isfile', 'os.path.isfile', (['filename'], {}), '(filename)\n', (6476, 6486), False, 'import os\n')]
|
from pathlib import Path
import weakref
import warnings
from typing import Union, Optional, List
from .merger import select_merge_algorithm
from .constants import DIR_HANGAR
from .remotes import Remotes
from .context import Environments
from .diagnostics import ecosystem, integrity
from .records import heads, parsing, summarize, vcompat, commiting
from .checkout import ReaderCheckout, WriterCheckout
from .diff import DiffAndConflicts, ReaderUserDiff
from .utils import (
is_valid_directory_path,
is_suitable_user_key,
is_ascii,
folder_size,
format_bytes
)
class Repository(object):
"""Launching point for all user operations in a Hangar repository.
All interaction, including the ability to initialize a repo, checkout a
commit (for either reading or writing), create a branch, merge branches, or
generally view the contents or state of the local repository starts here.
Just provide this class instance with a path to an existing Hangar
repository, or to a directory one should be initialized, and all required
data for starting your work on the repo will automatically be populated.
>>> from hangar import Repository
>>> repo = Repository('foo/path/to/dir')
Parameters
----------
path : Union[str, os.PathLike]
local directory path where the Hangar repository exists (or initialized)
exists : bool, optional
True if a Hangar repository should exist at the given directory path.
Should no Hangar repository exists at that location, a UserWarning will
be raised indicating that the :meth:`init` method needs to be called.
False if the provided path does not need to (but optionally can) contain a
Hangar repository. if a Hangar repository does not exist at that path, the
usual UserWarning will be suppressed.
In both cases, the path must exist and the user must have sufficient OS
permissions to write to that location. Default = True
"""
def __init__(self, path: Union[str, Path], exists: bool = True):
if isinstance(path, (str, bytes)):
path = Path(path)
try:
usr_path = is_valid_directory_path(path)
except (TypeError, NotADirectoryError, PermissionError) as e:
raise e from None
repo_pth = usr_path.joinpath(DIR_HANGAR)
if exists is False:
with warnings.catch_warnings():
warnings.simplefilter('ignore', UserWarning)
envs = Environments(pth=repo_pth)
else:
envs = Environments(pth=repo_pth)
self._repo_path: Path = repo_pth
self._env: Environments = envs
self._remote: Remotes = Remotes(self._env)
def _repr_pretty_(self, p, cycle):
"""provide a pretty-printed repr for ipython based user interaction.
Parameters
----------
p : printer
io stream printer type object which is provided via ipython
cycle : bool
if the pretty-printer detects a cycle or infinite loop. Not a
concern here since we just output the text and return, no looping
required.
"""
self.__verify_repo_initialized()
res = f'Hangar {self.__class__.__name__}\
\n Repository Path : {self.path}\
\n Writer-Lock Free : {heads.writer_lock_held(self._env.branchenv)}\n'
p.text(res)
def __repr__(self):
"""Override the default repr to show useful information to developers.
Note: the pprint repr (ipython enabled) is separately defined in
:py:meth:`_repr_pretty_`. We specialize because we assume that anyone
operating in a terminal-based interpreter is probably a more advanced
developer-type, and expects traditional repr information instead of a
user facing summary of the repo. Though if we're wrong, go ahead and
feel free to reassign the attribute :) won't hurt our feelings, promise.
Returns
-------
string
formatted representation of the object
"""
res = f'{self.__class__}(path={self._repo_path})'
return res
def __verify_repo_initialized(self):
"""Internal method to verify repo initialized before operations occur
Raises
------
RuntimeError
If the repository db environments have not been initialized at the
specified repo path.
"""
if not self._env.repo_is_initialized:
msg = f'Repository at path: {self._repo_path} has not been initialized. '\
f'Please run the `init_repo()` function'
raise RuntimeError(msg)
@property
def remote(self) -> Remotes:
"""Accessor to the methods controlling remote interactions.
.. seealso::
:class:`Remotes` for available methods of this property
Returns
-------
Remotes
Accessor object methods for controlling remote interactions.
"""
proxy = weakref.proxy(self._remote)
return proxy
@property
def path(self) -> str:
"""Return the path to the repository on disk, read-only attribute
Returns
-------
str
path to the specified repository, not including `.hangar` directory
"""
self.__verify_repo_initialized()
return str(self._repo_path.parent)
@property
def writer_lock_held(self) -> bool:
"""Check if the writer lock is currently marked as held. Read-only attribute.
Returns
-------
bool
True is writer-lock is held, False if writer-lock is free.
"""
self.__verify_repo_initialized()
return not heads.writer_lock_held(self._env.branchenv)
@property
def version(self) -> str:
"""Find the version of Hangar software the repository is written with
Returns
-------
str
semantic version of major, minor, micro version of repo software version.
"""
self.__verify_repo_initialized()
res = vcompat.get_repository_software_version_spec(self._env.branchenv)
return str(res)
@property
def initialized(self) -> bool:
"""
Check if the repository has been initialized or not
Returns
-------
bool
True if repository has been initialized.
"""
return self._env.repo_is_initialized
@property
def size_nbytes(self) -> int:
"""Disk space used by the repository returned in number of bytes.
>>> repo.size_nbytes
1234567890
>>> print(type(repo.size_nbytes))
<class 'int'>
Returns
-------
int
number of bytes used by the repository on disk.
"""
self.__verify_repo_initialized()
return folder_size(self._repo_path, recurse=True)
@property
def size_human(self) -> str:
"""Disk space used by the repository returned in human readable string.
>>> repo.size_human
'1.23 GB'
>>> print(type(repo.size_human))
<class 'str'>
Returns
-------
str
disk space used by the repository formated in human readable text.
"""
self.__verify_repo_initialized()
nbytes = folder_size(self._repo_path, recurse=True)
return format_bytes(nbytes)
def checkout(self,
write: bool = False,
*,
branch: str = '',
commit: str = '') -> Union[ReaderCheckout, WriterCheckout]:
"""Checkout the repo at some point in time in either `read` or `write` mode.
Only one writer instance can exist at a time. Write enabled checkout
must must create a staging area from the ``HEAD`` commit of a branch. On
the contrary, any number of reader checkouts can exist at the same time
and can specify either a branch name or a commit hash.
Parameters
----------
write : bool, optional
Specify if the checkout is write capable, defaults to False
branch : str, optional
name of the branch to checkout. This utilizes the state of the repo
as it existed at the branch ``HEAD`` commit when this checkout object
was instantiated, defaults to ''
commit : str, optional
specific hash of a commit to use for the checkout (instead of a
branch ``HEAD`` commit). This argument takes precedent over a branch
name parameter if it is set. Note: this only will be used in
non-writeable checkouts, defaults to ''
Raises
------
ValueError
If the value of `write` argument is not boolean
ValueError
If ``commit`` argument is set to any value when ``write=True``.
Only ``branch`` argument is allowed.
Returns
-------
Union[ReaderCheckout, WriterCheckout]
Checkout object which can be used to interact with the repository
data
"""
self.__verify_repo_initialized()
try:
if write is True:
if commit != '':
raise ValueError(
f'Only `branch` argument can be set if `write=True`. '
f'Setting `commit={commit}` not allowed.')
if branch == '':
branch = heads.get_staging_branch_head(self._env.branchenv)
co = WriterCheckout(
repo_pth=self._repo_path,
branch_name=branch,
hashenv=self._env.hashenv,
refenv=self._env.refenv,
stageenv=self._env.stageenv,
branchenv=self._env.branchenv,
stagehashenv=self._env.stagehashenv)
return co
elif write is False:
commit_hash = self._env.checkout_commit(
branch_name=branch, commit=commit)
co = ReaderCheckout(
base_path=self._repo_path,
dataenv=self._env.cmtenv[commit_hash],
hashenv=self._env.hashenv,
branchenv=self._env.branchenv,
refenv=self._env.refenv,
commit=commit_hash)
return co
else:
raise ValueError("Argument `write` only takes True or False as value")
except (RuntimeError, ValueError) as e:
raise e from None
def clone(self, user_name: str, user_email: str, remote_address: str,
*, remove_old: bool = False) -> str:
"""Download a remote repository to the local disk.
The clone method implemented here is very similar to a `git clone`
operation. This method will pull all commit records, history, and data
which are parents of the remote's `master` branch head commit. If a
:class:`Repository` exists at the specified directory,
the operation will fail.
Parameters
----------
user_name : str
Name of the person who will make commits to the repository. This
information is recorded permanently in the commit records.
user_email : str
Email address of the repository user. This information is recorded
permanently in any commits created.
remote_address : str
location where the
:class:`hangar.remote.server.HangarServer` process is
running and accessible by the clone user.
remove_old : bool, optional, kwarg only
DANGER! DEVELOPMENT USE ONLY! If enabled, a
:class:`hangar.repository.Repository` existing on disk at the same
path as the requested clone location will be completely removed and
replaced with the newly cloned repo. (the default is False, which
will not modify any contents on disk and which will refuse to create
a repository at a given location if one already exists there.)
Returns
-------
str
Name of the master branch for the newly cloned repository.
"""
self.init(user_name=user_name, user_email=user_email, remove_old=remove_old)
self._remote.add(name='origin', address=remote_address)
branch = self._remote.fetch(remote='origin', branch='master')
HEAD = heads.get_branch_head_commit(self._env.branchenv, branch_name=branch)
heads.set_branch_head_commit(self._env.branchenv, 'master', HEAD)
with warnings.catch_warnings(record=False):
warnings.simplefilter('ignore', category=UserWarning)
co = self.checkout(write=True, branch='master')
co.reset_staging_area()
co.close()
return 'master'
def init(self,
user_name: str,
user_email: str,
*,
remove_old: bool = False) -> str:
"""Initialize a Hangar repository at the specified directory path.
This function must be called before a checkout can be performed.
Parameters
----------
user_name : str
Name of the repository user account.
user_email : str
Email address of the repository user account.
remove_old : bool, kwarg-only
DEVELOPER USE ONLY -- remove and reinitialize a Hangar
repository at the given path, Default = False
Returns
-------
str
the full directory path where the Hangar repository was
initialized on disk.
"""
pth = self._env.init_repo(user_name=user_name,
user_email=user_email,
remove_old=remove_old)
return str(pth)
def log(self,
branch: str = None,
commit: str = None,
*,
return_contents: bool = False,
show_time: bool = False,
show_user: bool = False) -> Optional[dict]:
"""Displays a pretty printed commit log graph to the terminal.
.. note::
For programatic access, the return_contents value can be set to true
which will retrieve relevant commit specifications as dictionary
elements.
Parameters
----------
branch : str, optional
The name of the branch to start the log process from. (Default value
= None)
commit : str, optional
The commit hash to start the log process from. (Default value = None)
return_contents : bool, optional, kwarg only
If true, return the commit graph specifications in a dictionary
suitable for programatic access/evaluation.
show_time : bool, optional, kwarg only
If true and return_contents is False, show the time of each commit
on the printed log graph
show_user : bool, optional, kwarg only
If true and return_contents is False, show the committer of each
commit on the printed log graph
Returns
-------
Optional[dict]
Dict containing the commit ancestor graph, and all specifications.
"""
self.__verify_repo_initialized()
res = summarize.log(branchenv=self._env.branchenv,
refenv=self._env.refenv,
branch=branch,
commit=commit,
return_contents=return_contents,
show_time=show_time,
show_user=show_user)
return res
def summary(self, *, branch: str = '', commit: str = '') -> None:
"""Print a summary of the repository contents to the terminal
Parameters
----------
branch : str, optional
A specific branch name whose head commit will be used as the summary
point (Default value = '')
commit : str, optional
A specific commit hash which should be used as the summary point.
(Default value = '')
"""
self.__verify_repo_initialized()
ppbuf = summarize.summary(self._env, branch=branch, commit=commit)
print(ppbuf.getvalue())
return None
def _details(self, *, line_limit=100, line_length=100) -> None: # pragma: no cover
"""DEVELOPER USE ONLY: Dump some details about the underlying db structure to disk.
"""
print(summarize.details(
self._env.branchenv, line_limit=line_limit, line_length=line_length).getvalue())
print(summarize.details(
self._env.refenv, line_limit=line_limit, line_length=line_length).getvalue())
print(summarize.details(
self._env.hashenv, line_limit=line_limit, line_length=line_length).getvalue())
print(summarize.details(
self._env.stageenv, line_limit=line_limit, line_length=line_length).getvalue())
print(summarize.details(
self._env.stagehashenv, line_limit=line_limit, line_length=line_length).getvalue())
for commit, commitenv in self._env.cmtenv.items():
print(summarize.details(
commitenv, line_limit=line_limit, line_length=line_length).getvalue())
return
def _ecosystem_details(self) -> dict:
"""DEVELOPER USER ONLY: log and return package versions on the system.
"""
eco = ecosystem.get_versions()
return eco
def diff(self, master: str, dev: str) -> DiffAndConflicts:
"""Calculate diff between master and dev branch/commits.
Diff is calculated as if we are to merge "dev" into "master"
Parameters
----------
master: str
branch name or commit hash digest to use as the "master" which
changes made in "dev" are compared to.
dev: str
branch name or commit hash digest to use as the "dev"
(ie. "feature") branch which changes have been made to
which are to be compared to the contents of "master".
Returns
-------
DiffAndConflicts
Standard output diff structure.
"""
current_branches = self.list_branches()
# assert branch / commit specified by "master" exists and
# standardize into "digest" rather than "branch name" arg type
if master in current_branches:
masterHEAD = heads.get_branch_head_commit(
branchenv=self._env.branchenv, branch_name=master)
else:
cmtExists = commiting.check_commit_hash_in_history(
refenv=self._env.refenv, commit_hash=master)
if not cmtExists:
raise ValueError(f'`master` {master} is not valid branch/commit.')
masterHEAD = master
# same check & transform for "dev" branch/commit arg.
if dev in current_branches:
devHEAD = heads.get_branch_head_commit(
branchenv=self._env.branchenv, branch_name=dev)
else:
cmtExists = commiting.check_commit_hash_in_history(
refenv=self._env.refenv, commit_hash=dev)
if not cmtExists:
raise ValueError(f'`dev` {dev} is not valid branch/commit.')
devHEAD = dev
# create differ object and generate results...
diff = ReaderUserDiff(commit_hash=masterHEAD,
branchenv=self._env.branchenv,
refenv=self._env.refenv)
res = diff.commit(dev_commit_hash=devHEAD)
return res
def merge(self, message: str, master_branch: str, dev_branch: str) -> str:
"""Perform a merge of the changes made on two branches.
Parameters
----------
message: str
Commit message to use for this merge.
master_branch : str
name of the master branch to merge into
dev_branch : str
name of the dev/feature branch to merge
Returns
-------
str
Hash of the commit which is written if possible.
"""
self.__verify_repo_initialized()
commit_hash = select_merge_algorithm(
message=message,
branchenv=self._env.branchenv,
stageenv=self._env.stageenv,
refenv=self._env.refenv,
stagehashenv=self._env.stagehashenv,
master_branch=master_branch,
dev_branch=dev_branch,
repo_path=self._repo_path)
return commit_hash
def create_branch(self, name: str, base_commit: str = None) -> heads.BranchHead:
"""create a branch with the provided name from a certain commit.
If no base commit hash is specified, the current writer branch ``HEAD``
commit is used as the ``base_commit`` hash for the branch. Note that
creating a branch does not actually create a checkout object for
interaction with the data. to interact you must use the repository
checkout method to properly initialize a read (or write) enabled
checkout object.
>>> from hangar import Repository
>>> repo = Repository('foo/path/to/dir')
>>> repo.create_branch('testbranch')
BranchHead(name='testbranch', digest='b66b...a8cc')
>>> repo.list_branches()
['master', 'testbranch']
>>> co = repo.checkout(write=True, branch='testbranch')
>>> # add data ...
>>> newDigest = co.commit('added some stuff')
>>> repo.create_branch('new-changes', base_commit=newDigest)
BranchHead(name='new-changes', digest='35kd...3254')
>>> repo.list_branches()
['master', 'new-changes', 'testbranch']
Parameters
----------
name : str
name to assign to the new branch
base_commit : str, optional
commit hash to start the branch root at. if not specified, the
writer branch ``HEAD`` commit at the time of execution will be used,
defaults to None
Returns
-------
:class:`~.heads.BranchHead`
NamedTuple[str, str] with fields for ``name`` and ``digest`` of the
branch created (if the operation was successful)
Raises
------
ValueError
If the branch name provided contains characters outside of alpha-numeric
ascii characters and ".", "_", "-" (no whitespace), or is > 64 characters.
ValueError
If the branch already exists.
RuntimeError
If the repository does not have at-least one commit on the "default"
(ie. ``master``) branch.
"""
self.__verify_repo_initialized()
if (not is_ascii(name)) or (not is_suitable_user_key(name)):
err = ValueError(
f'Branch name provided: {name} invalid. Must contain only alpha-numeric '
f'or "." "_" "-" ascii characters. And be <= 64 Characters')
raise err from None
createdBranch = heads.create_branch(
branchenv=self._env.branchenv,
name=name,
base_commit=base_commit)
return createdBranch
def remove_branch(self, name: str, *, force_delete: bool = False) -> heads.BranchHead:
"""Permanently delete a branch pointer from the repository history.
Since a branch (by definition) is the name associated with the HEAD
commit of a historical path, the default behavior of this method is to
throw an exception (no-op) should the ``HEAD`` not be referenced as an
ancestor (or at least as a twin) of a separate branch which is
currently *ALIVE*. If referenced in another branch's history, we are
assured that all changes have been merged and recorded, and that this
pointer can be safely deleted without risk of damage to historical
provenance or (eventual) loss to garbage collection.
>>> from hangar import Repository
>>> repo = Repository('foo/path/to/dir')
>>> repo.create_branch('first-testbranch')
BranchHead(name='first-testbranch', digest='9785...56da')
>>> repo.create_branch('second-testbranch')
BranchHead(name='second-testbranch', digest='9785...56da')
>>> repo.list_branches()
['master', 'first-testbranch', 'second-testbranch']
>>> # Make a commit to advance a branch
>>> co = repo.checkout(write=True, branch='first-testbranch')
>>> # add data ...
>>> co.commit('added some stuff')
'3l253la5hna3k3a553256nak35hq5q534kq35532'
>>> co.close()
>>> repo.remove_branch('second-testbranch')
BranchHead(name='second-testbranch', digest='9785...56da')
A user may manually specify to delete an un-merged branch, in which
case the ``force_delete`` keyword-only argument should be set to
``True``.
>>> # check out master and try to remove 'first-testbranch'
>>> co = repo.checkout(write=True, branch='master')
>>> co.close()
>>> repo.remove_branch('first-testbranch')
Traceback (most recent call last):
...
RuntimeError: ("The branch first-testbranch is not fully merged. "
"If you are sure you want to delete it, re-run with "
"force-remove parameter set.")
>>> # Now set the `force_delete` parameter
>>> repo.remove_branch('first-testbranch', force_delete=True)
BranchHead(name='first-testbranch', digest='9785...56da')
It is important to note that *while this method will handle all safety
checks, argument validation, and performs the operation to permanently
delete a branch name/digest pointer, **no commit refs along the history
will be deleted from the Hangar database**.* Most of the history contains
commit refs which must be safe in other branch histories, and recent
commits may have been used as the base for some new history. As such, even
if some of the latest commits leading up to a deleted branch ``HEAD`` are
orphaned (unreachable), the records (and all data added in those commits)
will remain on the disk.
In the future, we intend to implement a garbage collector which will remove
orphan commits which have not been modified for some set amount of time
(probably on the order of a few months), but this is not implemented at the
moment.
Should an accidental forced branch deletion occur, *it is possible to
recover* and create a new branch head pointing to the same commit. If
the commit digest of the removed branch ``HEAD`` is known, its as simple as
specifying a name and the ``base_digest`` in the normal
:meth:`create_branch` method. If the digest is unknown, it will be a
bit more work, but some of the developer facing introspection tools /
routines could be used to either manually or (with minimal effort)
programmatically find the orphan commit candidates. If you find
yourself having accidentally deleted a branch, and must get it back,
please reach out on the `Github Issues
<https://github.com/tensorwerk/hangar-py/issues>`__ page. We'll gladly
explain more in depth and walk you through the process in any way we
can help!
Parameters
----------
name : str
name of the branch which should be deleted. This branch must exist, and
cannot refer to a remote tracked branch (ie. origin/devbranch), please
see exception descriptions for other parameters determining validity of
argument
force_delete : bool, optional
If True, remove the branch pointer even if the changes are un-merged in
other branch histories. May result in orphaned commits which may be
time-consuming to recover if needed, by default False
Returns
-------
:class:`~.heads.BranchHead`
NamedTuple[str, str] with fields for `name` and `digest` of the branch
pointer deleted.
Raises
------
ValueError
If a branch with the provided name does not exist locally
PermissionError
If removal of the branch would result in a repository with zero local
branches.
PermissionError
If a write enabled checkout is holding the writer-lock at time of this
call.
PermissionError
If the branch to be removed was the last used in a write-enabled
checkout, and whose contents form the base of the staging area.
RuntimeError
If the branch has not been fully merged into other branch histories,
and ``force_delete`` option is not ``True``.
"""
self.__verify_repo_initialized()
res = heads.remove_branch(branchenv=self._env.branchenv,
refenv=self._env.refenv,
name=name,
force_delete=force_delete)
return res
def list_branches(self) -> List[str]:
"""list all branch names created in the repository.
Returns
-------
List[str]
the branch names recorded in the repository
"""
self.__verify_repo_initialized()
branches = heads.get_branch_names(self._env.branchenv)
return branches
def verify_repo_integrity(self) -> bool:
"""Verify the integrity of the repository data on disk.
Runs a full cryptographic verification of repository contents in order
to ensure the integrity of all data and history recorded on disk.
.. note::
This proof may take a significant amount of time to run for
repositories which:
1. store significant quantities of data on disk.
2. have a very large number of commits in their history.
As a brief explanation for why these are the driving factors behind
processing time:
1. Every single piece of data in the repositories history must be read
from disk, cryptographically hashed, and compared to the expected
value. There is no exception to this rule; regardless of when a piece
of data was added / removed from an column, or for how many (or how
few) commits some sample exists in. The integrity of the commit tree at
any point after some piece of data is added to the repo can only be
validated if it - and all earlier data pieces - are proven to be intact
and unchanged.
Note: This does not mean that the verification is repeatedly
performed for every commit some piece of data is stored in. Each
data piece is read from disk and verified only once, regardless of
how many commits some piece of data is referenced in.
2. Each commit reference (defining names / contents of a commit) must be
decompressed and parsed into a usable data structure. We scan across
all data digests referenced in the commit and ensure that the
corresponding data piece is known to hangar (and validated as
unchanged). The commit refs (along with the corresponding user records,
message, and parent map), are then re-serialized and cryptographically
hashed for comparison to the expected value. While this process is
fairly efficient for a single commit, it must be repeated for each
commit in the repository history, and may take a non-trivial amount of
time for repositories with thousands of commits.
While the two points above are the most time consuming operations,
there are many more checks which are performed alongside them as part
of the full verification run.
Returns
-------
bool
True if integrity verification is successful, otherwise False; in
this case, a message describing the offending component will be
printed to stdout.
"""
self.__verify_repo_initialized()
heads.acquire_writer_lock(self._env.branchenv, 'VERIFY_PROCESS')
try:
integrity.run_verification(
branchenv=self._env.branchenv,
hashenv=self._env.hashenv,
refenv=self._env.refenv,
repo_path=self._env.repo_path)
finally:
heads.release_writer_lock(self._env.branchenv, 'VERIFY_PROCESS')
return True
def force_release_writer_lock(self) -> bool:
"""Force release the lock left behind by an unclosed writer-checkout
.. warning::
*NEVER USE THIS METHOD IF WRITER PROCESS IS CURRENTLY ACTIVE.* At the time
of writing, the implications of improper/malicious use of this are not
understood, and there is a a risk of of undefined behavior or (potentially)
data corruption.
At the moment, the responsibility to close a write-enabled checkout is
placed entirely on the user. If the `close()` method is not called
before the program terminates, a new checkout with write=True will fail.
The lock can only be released via a call to this method.
.. note::
This entire mechanism is subject to review/replacement in the future.
Returns
-------
bool
if the operation was successful.
"""
self.__verify_repo_initialized()
forceReleaseSentinal = parsing.repo_writer_lock_force_release_sentinal()
success = heads.release_writer_lock(self._env.branchenv, forceReleaseSentinal)
return success
|
[
"warnings.simplefilter",
"weakref.proxy",
"warnings.catch_warnings",
"pathlib.Path"
] |
[((5092, 5119), 'weakref.proxy', 'weakref.proxy', (['self._remote'], {}), '(self._remote)\n', (5105, 5119), False, 'import weakref\n'), ((2142, 2152), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (2146, 2152), False, 'from pathlib import Path\n'), ((12813, 12850), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {'record': '(False)'}), '(record=False)\n', (12836, 12850), False, 'import warnings\n'), ((12864, 12917), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {'category': 'UserWarning'}), "('ignore', category=UserWarning)\n", (12885, 12917), False, 'import warnings\n'), ((2415, 2440), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (2438, 2440), False, 'import warnings\n'), ((2458, 2502), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""', 'UserWarning'], {}), "('ignore', UserWarning)\n", (2479, 2502), False, 'import warnings\n')]
|
from pathlib import Path
from re import sub
from shutil import rmtree
from unittest import TestCase
from dotify import Dotify, models
class BaseNameResolverMixin(object):
@classmethod
def get_download_basename(cls, obj):
if isinstance(obj, models.Track):
return cls.get_download_basename_track(obj)
elif isinstance(obj, models.Playlist):
return cls.get_download_basename_playlist(obj)
elif isinstance(obj, models.Album):
return cls.get_download_basename_album(obj)
raise RuntimeError("`{0}` is an instance of {1}".format(obj, type(obj)))
@classmethod
def get_download_basename_track(cls, track):
artist, name = track.artist.name, track.name
artist, name = artist.strip(), name.strip()
artist, name = sub(r"\s+", "_", artist), sub(r"\s+", "_", name)
return "{0} - {1}.mp3".format(artist, name)
@classmethod
def get_download_basename_playlist(cls, playlist):
return sub(r"\s+", " ", playlist.name.strip())
@classmethod
def get_download_basename_album(cls, album):
artist, name = album.artist.name, album.name
artist, name = artist.strip(), name.strip()
artist, name = sub(r"\s+", " ", artist), sub(r"\s+", " ", name)
return "{0} - {1}".format(artist, name)
class DotifyBaseTestCase(TestCase, BaseNameResolverMixin):
def setUp(self):
self.client = Dotify()
self.test_directory = Path(__file__).parent / "tmp"
self.test_directory.mkdir(parents=True, exist_ok=True)
def tearDown(self):
rmtree(self.test_directory)
def download(self, cls_name, url):
with self.client:
model_type = getattr(models, cls_name)
obj = model_type.from_url(url)
download_basename = self.get_download_basename(obj)
download_fullpath = self.test_directory / download_basename
obj.download(download_fullpath)
self.assertTrue(download_fullpath.exists())
def search(self, cls_name, query, metadata_list, limit=1):
with self.client:
self.assertEqual(len(metadata_list), limit)
results = getattr(models, cls_name).search(query, limit=limit)
for result, metadata in zip(results, metadata_list):
for name, value in metadata.items():
self._test_search_result_metadata_equality(result, name, value)
@classmethod
def get_value(cls, obj, attribute_path):
return cls._get_value_recursive(
obj,
list(filter(None, attribute_path.split("."))),
)
@classmethod
def _get_value_recursive(cls, obj, paths):
if paths:
return cls._get_value_recursive(getattr(obj, paths[0]), paths[1:])
return obj
def _test_search_result_metadata_equality(self, result, name, value):
with self.subTest("Asserting metadata equality", **{name: value}):
self.assertEqual(self.get_value(result, name), value)
|
[
"re.sub",
"dotify.Dotify",
"pathlib.Path",
"shutil.rmtree"
] |
[((1440, 1448), 'dotify.Dotify', 'Dotify', ([], {}), '()\n', (1446, 1448), False, 'from dotify import Dotify, models\n'), ((1606, 1633), 'shutil.rmtree', 'rmtree', (['self.test_directory'], {}), '(self.test_directory)\n', (1612, 1633), False, 'from shutil import rmtree\n'), ((813, 837), 're.sub', 'sub', (['"""\\\\s+"""', '"""_"""', 'artist'], {}), "('\\\\s+', '_', artist)\n", (816, 837), False, 'from re import sub\n'), ((839, 861), 're.sub', 'sub', (['"""\\\\s+"""', '"""_"""', 'name'], {}), "('\\\\s+', '_', name)\n", (842, 861), False, 'from re import sub\n'), ((1238, 1262), 're.sub', 'sub', (['"""\\\\s+"""', '""" """', 'artist'], {}), "('\\\\s+', ' ', artist)\n", (1241, 1262), False, 'from re import sub\n'), ((1264, 1286), 're.sub', 'sub', (['"""\\\\s+"""', '""" """', 'name'], {}), "('\\\\s+', ' ', name)\n", (1267, 1286), False, 'from re import sub\n'), ((1480, 1494), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (1484, 1494), False, 'from pathlib import Path\n')]
|
import random
import requests
from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd
from userbot.cmdhelp import CmdHelp
LOVESTR = [
"The best and most beautiful things in this world cannot be seen or even heard, but must be felt with the heart.",
"You know you're in love when you can't fall asleep because reality is finally better than your dreams.",
"Love recognizes no barriers. It jumps hurdles, leaps fences, penetrates walls to arrive at its destination full of hope.",
"Being deeply loved by someone gives you strength, while loving someone deeply gives you courage.",
"The real lover is the man who can thrill you by kissing your forehead or smiling into your eyes or just staring into space.",
"I swear I couldn't love you more than I do right now, and yet I know I will tomorrow.",
"When I saw you I fell in love, and you smiled because you knew it.",
"In all the world, there is no heart for me like yours. / In all the world, there is no love for you like mine.",
"To love or have loved, that is enough. Ask nothing further. There is no other pearl to be found in the dark folds of life.",
"If you live to be a hundred, I want to live to be a hundred minus one day, so I never have to live without you.",
"Some love stories aren't epic novels. Some are short stories. But that doesn't make them any less filled with love.",
"As he read, I fell in love the way you fall asleep: slowly, and then all at once.",
"I've never had a moment's doubt. I love you. I believe in you completely. You are my dearest one. My reason for life.",
"Do I love you? My god, if your love were a grain of sand, mine would be a universe of beaches.",
"I am who I am because of you.",
"I just want you to know that you're very special... and the only reason I'm telling you is that I don't know if anyone else ever has.",
"Remember, we're madly in love, so it's all right to kiss me any time you feel like it.",
"I love you. I knew it the minute I met you.",
"I loved her against reason, against promise, against peace, against hope, against happiness, against all discouragement that could be.",
"I love you not because of who you are, but because of who I am when I am with you.",
]
DHOKA = [
"Humne Unse Wafa Ki, Aur Dil Bhi Gya Toot, Wo Bhi Chinaal Nikli, Uski Maa ki Chut.",
"Dabbe Me Dabba, Dabbe Me Cake ..Tu Chutiya Hai Zara Seesha To Dekh.",
"Kaam Se Kaam Rakhoge Toh Naam Hoga, Randi Log Ke Chakkkar Me Padoge to Naam Badnaam Hoga.",
"Usne Kaha- Mah Lyf maH Rule, Maine Kaha Bhag BSDK , Tujhy Paida Karna hi Teri Baap ki Sabse Badi Vul.",
"Humse Ulajhna Mat, BSDK Teri Hasi Mita Dunga, Muh Me Land Daal Ke..Sari Hosiyaari Gand Se Nikal Dunga.",
"Aur Sunau Bhosdiwalo ..Kya Haal Hai?..Tumhare Sakal Se Zayda Toh Tumhare Gand Laal Hai!!",
"Pata Nhi Kya Kashish Hai Tumhare Mohabbat Me,Jab Bhi Tumhe Yaad Karta Hu Mera Land Khada Ho Jata Hai.",
"Konsa Mohabbat Kounsi Story, Gand Faad Dunga Agr Bolne Aayi Sorry!",
"Naam Banta Hai Risk Se, Chutiya Banta Hai IshQ Se.",
"Sun Be, Ab Tujhy Mere Zindegi Me Ane ka Koi Haq Nhi,,Aur Tu 1 Number Ki Randi Hai Isme KOi Saq Nhi.",
"Beta Tu Chugli Karna Chor De , Hum Ungli Karna Chor Dengy.",
]
METOOSTR = [
"Me too thanks",
"Haha yes, me too",
"Same lol",
"Me irl",
"Same here",
"Haha yes",
"Me rn",
]
GDNOON = [
"`My wishes will always be with you, Morning wish to make you feel fresh, Afternoon wish to accompany you, Evening wish to refresh you, Night wish to comfort you with sleep, Good Afternoon Dear!`",
"`With a deep blue sky over my head and a relaxing wind around me, the only thing I am missing right now is the company of you. I wish you a refreshing afternoon!`",
"`The day has come a halt realizing that I am yet to wish you a great afternoon. My dear, if you thought you were forgotten, you’re so wrong. Good afternoon!`",
"`Good afternoon! May the sweet peace be part of your heart today and always and there is life shining through your sigh. May you have much light and peace.`",
"`With you, every part of a day is beautiful. I live every day to love you more than yesterday. Wishing you an enjoyable afternoon my love!`",
"`This bright afternoon sun always reminds me of how you brighten my life with all the happiness. I miss you a lot this afternoon. Have a good time`!",
"`Nature looks quieter and more beautiful at this time of the day! You really don’t want to miss the beauty of this time! Wishing you a happy afternoon!`",
"`What a wonderful afternoon to finish you day with! I hope you’re having a great time sitting on your balcony, enjoying this afternoon beauty!`",
"`I wish I were with you this time of the day. We hardly have a beautiful afternoon like this nowadays. Wishing you a peaceful afternoon!`",
"`As you prepare yourself to wave goodbye to another wonderful day, I want you to know that, I am thinking of you all the time. Good afternoon!`",
"`This afternoon is here to calm your dog-tired mind after a hectic day. Enjoy the blessings it offers you and be thankful always. Good afternoon!`",
"`The gentle afternoon wind feels like a sweet hug from you. You are in my every thought in this wonderful afternoon. Hope you are enjoying the time!`",
"`Wishing an amazingly good afternoon to the most beautiful soul I have ever met. I hope you are having a good time relaxing and enjoying the beauty of this time!`",
"`Afternoon has come to indicate you, Half of your day’s work is over, Just another half a day to go, Be brisk and keep enjoying your works, Have a happy noon!`",
"`Mornings are for starting a new work, Afternoons are for remembering, Evenings are for refreshing, Nights are for relaxing, So remember people, who are remembering you, Have a happy noon!`",
"`If you feel tired and sleepy you could use a nap, you will see that it will help you recover your energy and feel much better to finish the day. Have a beautiful afternoon!`",
"`Time to remember sweet persons in your life, I know I will be first on the list, Thanks for that, Good afternoon my dear!`",
"`May this afternoon bring a lot of pleasant surprises for you and fills you heart with infinite joy. Wishing you a very warm and love filled afternoon!`",
"`Good, better, best. Never let it rest. Til your good is better and your better is best. “Good Afternoon`”",
"`May this beautiful afternoon fill your heart boundless happiness and gives you new hopes to start yours with. May you have lot of fun! Good afternoon dear!`",
"`As the blazing sun slowly starts making its way to the west, I want you to know that this beautiful afternoon is here to bless your life with success and peace. Good afternoon!`",
"`The deep blue sky of this bright afternoon reminds me of the deepness of your heart and the brightness of your soul. May you have a memorable afternoon!`",
"`Your presence could make this afternoon much more pleasurable for me. Your company is what I cherish all the time. Good afternoon!`",
"`A relaxing afternoon wind and the sweet pleasure of your company can make my day complete. Missing you so badly during this time of the day! Good afternoon!`",
"`Wishing you an afternoon experience so sweet and pleasant that feel thankful to be alive today. May you have the best afternoon of your life today!`",
"`My wishes will always be with you, Morning wish to make you feel fresh, Afternoon wish to accompany you, Evening wish to refresh you, Night wish to comfort you with sleep, Good afternoon dear!`",
"`Noon time – it’s time to have a little break, Take time to breathe the warmth of the sun, Who is shining up in between the clouds, Good afternoon!`",
"`You are the cure that I need to take three times a day, in the morning, at the night and in the afternoon. I am missing you a lot right now. Good afternoon!`",
"`I want you when I wake up in the morning, I want you when I go to sleep at night and I want you when I relax under the sun in the afternoon!`",
"`I pray to god that he keeps me close to you so we can enjoy these beautiful afternoons together forever! Wishing you a good time this afternoon!`",
"`You are every bit of special to me just like a relaxing afternoon is special after a toiling noon. Thinking of my special one in this special time of the day!`",
"`May your Good afternoon be light, blessed, enlightened, productive and happy.`",
"`Thinking of you is my most favorite hobby every afternoon. Your love is all I desire in life. Wishing my beloved an amazing afternoon!`",
"`I have tasted things that are so sweet, heard words that are soothing to the soul, but comparing the joy that they both bring, I’ll rather choose to see a smile from your cheeks. You are sweet. I love you.`",
"`How I wish the sun could obey me for a second, to stop its scorching ride on my angel. So sorry it will be hot there. Don’t worry, the evening will soon come. I love you.`",
"`I want you when I wake up in the morning, I want you when I go to sleep at night and I want you when I relax under the sun in the afternoon!`",
"`With you every day is my lucky day. So lucky being your love and don’t know what else to say. Morning night and noon, you make my day.`",
"`Your love is sweeter than what I read in romantic novels and fulfilling more than I see in epic films. I couldn’t have been me, without you. Good afternoon honey, I love you!`",
"`No matter what time of the day it is, No matter what I am doing, No matter what is right and what is wrong, I still remember you like this time, Good Afternoon!`",
"`Things are changing. I see everything turning around for my favor. And the last time I checked, it’s courtesy of your love. 1000 kisses from me to you. I love you dearly and wishing you a very happy noon.`",
"`You are sometimes my greatest weakness, you are sometimes my biggest strength. I do not have a lot of words to say but let you make sure, you make my day, Good Afternoon!`",
"`Every afternoon is to remember the one whom my heart beats for. The one I live and sure can die for. Hope you doing good there my love. Missing your face.`",
"`My love, I hope you are doing well at work and that you remember that I will be waiting for you at home with my arms open to pamper you and give you all my love. I wish you a good afternoon!`",
"`Afternoons like this makes me think about you more. I desire so deeply to be with you in one of these afternoons just to tell you how much I love you. Good afternoon my love!`",
"`My heart craves for your company all the time. A beautiful afternoon like this can be made more enjoyable if you just decide to spend it with me. Good afternoon!`",
]
CHASE_STR = [
"Where do you think you're going?",
"Huh? what? did they get away?",
"ZZzzZZzz... Huh? what? oh, just them again, nevermind.",
"`Get back here!`",
"`Not so fast...`",
"Look out for the wall!",
"Don't leave me alone with them!!",
"You run, you die.",
"`Jokes on you, I'm everywhere`",
"You're gonna regret that...",
"You could also try /kickme, I hear that's fun.",
"`Go bother someone else, no-one here cares.`",
"You can run, but you can't hide.",
"Is that all you've got?",
"I'm behind you...",
"You've got company!",
"We can do this the easy way, or the hard way.",
"You just don't get it, do you?",
"Yeah, you better run!",
"Please, remind me how much I care?",
"I'd run faster if I were you.",
"That's definitely the droid we're looking for.",
"May the odds be ever in your favour.",
"Famous last words.",
"And they disappeared forever, never to be seen again.",
'"Oh, look at me! I\'m so cool, I can run from a bot!" - this person',
"Yeah yeah, just tap /kickme already.",
"Here, take this ring and head to Mordor while you're at it.",
"eviral has it, they're still running...",
"Unlike Harry Potter, your parents can't protect you from me.",
"Fear leads to anger. Anger leads to hate. Hate leads to suffering. If you keep running in fear, you might "
"be the next Vader.",
"Multiple calculations later, I have decided my interest in your shenanigans is exactly 0.",
"eviral has it, they're still running.",
"Keep it up, not sure we want you here anyway.",
"You're a wiza- Oh. Wait. You're not Harry, keep moving.",
"NO RUNNING IN THE HALLWAYS!",
"Hasta la vista, baby.",
"Who let the dogs out?",
"It's funny, because no one cares.",
"Ah, what a waste. I liked that one.",
"Frankly, my dear, I don't give a damn.",
"My milkshake brings all the boys to yard... So run faster!",
"You can't HANDLE the truth!",
"A long time ago, in a galaxy far far away... Someone would've cared about that. Not anymore though.",
"Hey, look at them! They're running from the inevitable banhammer... Cute.",
"Han shot first. So will I.",
"What are you running after, a white rabbit?",
"As The Doctor would say... RUN!",
]
eviralOSTR = [
"Hi !",
"‘Ello, gov'nor!",
"What’s crackin’?",
"Howdy, howdy ,howdy!",
"hello, who's there, I'm talking.",
"You know who this is.",
"Yo!",
"Whaddup.",
"Greetings and salutations!",
"hello, sunshine!",
"`Hey, howdy, hi!`",
"What’s kickin’, little chicken?",
"Peek-a-boo!",
"Howdy-doody!",
"`Hey there, freshman!`",
"`I come in peace!`",
"`I come for peace!`",
"Ahoy, matey!",
"`Hi !`",
]
CONGRATULATION = [
"`Congratulations and BRAVO!`",
"`You did it! So proud of you!`",
"`This calls for celebrating! Congratulations!`",
"`I knew it was only a matter of time. Well done!`",
"`Congratulations on your well-deserved success.`",
"`Heartfelt congratulations to you.`",
"`Warmest congratulations on your achievement.`",
"`Congratulations and best wishes for your next adventure!”`",
"`So pleased to see you accomplishing great things.`",
"`Feeling so much joy for you today. What an impressive achievement!`",
]
BYESTR = [
"`Nice talking with you`",
"`I've gotta go!`",
"`I've gotta run!`",
"`I've gotta split`",
"`I'm off!`",
"`Great to see you,bye`",
"`See you soon`",
"`Farewell!`",
]
GDNIGHT = [
"`Good night keep your dreams alive`",
"`Night, night, to a dear friend! May you sleep well!`",
"`May the night fill with stars for you. May counting every one, give you contentment!`",
"`Wishing you comfort, happiness, and a good night’s sleep!`",
"`Now relax. The day is over. You did your best. And tomorrow you’ll do better. Good Night!`",
"`Good night to a friend who is the best! Get your forty winks!`",
"`May your pillow be soft, and your rest be long! Good night, friend!`",
"`Let there be no troubles, dear friend! Have a Good Night!`",
"`Rest soundly tonight, friend!`",
"`Have the best night’s sleep, friend! Sleep well!`",
"`Have a very, good night, friend! You are wonderful!`",
"`Relaxation is in order for you! Good night, friend!`",
"`Good night. May you have sweet dreams tonight.`",
"`Sleep well, dear friend and have sweet dreams.`",
"`As we wait for a brand new day, good night and have beautiful dreams.`",
"`Dear friend, I wish you a night of peace and bliss. Good night.`",
"`Darkness cannot last forever. Keep the hope alive. Good night.`",
"`By hook or crook you shall have sweet dreams tonight. Have a good night, buddy!`",
"`Good night, my friend. I pray that the good Lord watches over you as you sleep. Sweet dreams.`",
"`Good night, friend! May you be filled with tranquility!`",
"`Wishing you a calm night, friend! I hope it is good!`",
"`Wishing you a night where you can recharge for tomorrow!`",
"`Slumber tonight, good friend, and feel well rested, tomorrow!`",
"`Wishing my good friend relief from a hard day’s work! Good Night!`",
"`Good night, friend! May you have silence for sleep!`",
"`Sleep tonight, friend and be well! Know that you have done your very best today, and that you will do your very best, tomorrow!`",
"`Friend, you do not hesitate to get things done! Take tonight to relax and do more, tomorrow!`",
"`Friend, I want to remind you that your strong mind has brought you peace, before. May it do that again, tonight! May you hold acknowledgment of this with you!`",
"`Wishing you a calm, night, friend! Hoping everything winds down to your liking and that the following day meets your standards!`",
"`May the darkness of the night cloak you in a sleep that is sound and good! Dear friend, may this feeling carry you through the next day!`",
"`Friend, may the quietude you experience tonight move you to have many more nights like it! May you find your peace and hold on to it!`",
"`May there be no activity for you tonight, friend! May the rest that you have coming to you arrive swiftly! May the activity that you do tomorrow match your pace and be all of your own making!`",
"`When the day is done, friend, may you know that you have done well! When you sleep tonight, friend, may you view all the you hope for, tomorrow!`",
"`When everything is brought to a standstill, friend, I hope that your thoughts are good, as you drift to sleep! May those thoughts remain with you, during all of your days!`",
"`Every day, you encourage me to do new things, friend! May tonight’s rest bring a new day that overflows with courage and exciting events!`",
]
GDMORNING = [
"`Life is full of uncertainties. But there will always be a sunrise after every sunset. Good morning!`",
"`It doesn’t matter how bad was your yesterday. Today, you are going to make it a good one. Wishing you a good morning!`",
"`If you want to gain health and beauty, you should wake up early. Good morning!`",
"`May this morning offer you new hope for life! May you be happy and enjoy every moment of it. Good morning!`",
"`May the sun shower you with blessings and prosperity in the days ahead. Good morning!`",
"`Every sunrise marks the rise of life over death, hope over despair and happiness over suffering. Wishing you a very enjoyable morning today!`",
"`Wake up and make yourself a part of this beautiful morning. A beautiful world is waiting outside your door. Have an enjoyable time!`",
"`Welcome this beautiful morning with a smile on your face. I hope you’ll have a great day today. Wishing you a very good morning!`",
"`You have been blessed with yet another day. What a wonderful way of welcoming the blessing with such a beautiful morning! Good morning to you!`",
"`Waking up in such a beautiful morning is a guaranty for a day that’s beyond amazing. I hope you’ll make the best of it. Good morning!`",
"`Nothing is more refreshing than a beautiful morning that calms your mind and gives you reasons to smile. Good morning! Wishing you a great day.`",
"`Another day has just started. Welcome the blessings of this beautiful morning. Rise and shine like you always do. Wishing you a wonderful morning!`",
"`Wake up like the sun every morning and light up the world your awesomeness. You have so many great things to achieve today. Good morning!`",
"`A new day has come with so many new opportunities for you. Grab them all and make the best out of your day. Here’s me wishing you a good morning!`",
"`The darkness of night has ended. A new sun is up there to guide you towards a life so bright and blissful. Good morning dear!`",
"`Wake up, have your cup of morning tea and let the morning wind freshen you up like a happiness pill. Wishing you a good morning and a good day ahead!`",
"`Sunrises are the best; enjoy a cup of coffee or tea with yourself because this day is yours, good morning! Have a wonderful day ahead.`",
"`A bad day will always have a good morning, hope all your worries are gone and everything you wish could find a place. Good morning!`",
"`A great end may not be decided but a good creative beginning can be planned and achieved. Good morning, have a productive day!`",
"`Having a sweet morning, a cup of coffee, a day with your loved ones is what sets your “Good Morning” have a nice day!`",
"`Anything can go wrong in the day but the morning has to be beautiful, so I am making sure your morning starts beautiful. Good morning!`",
"`Open your eyes with a smile, pray and thank god that you are waking up to a new beginning. Good morning!`",
"`Morning is not only sunrise but A Beautiful Miracle of God that defeats the darkness and spread light. Good Morning.`",
"`Life never gives you a second chance. So, enjoy every bit of it. Why not start with this beautiful morning. Good Morning!`",
"`If you want to gain health and beauty, you should wake up early. Good Morning!`",
"`Birds are singing sweet melodies and a gentle breeze is blowing through the trees, what a perfect morning to wake you up. Good morning!`",
"`This morning is so relaxing and beautiful that I really don’t want you to miss it in any way. So, wake up dear friend. A hearty good morning to you!`",
"`Mornings come with a blank canvas. Paint it as you like and call it a day. Wake up now and start creating your perfect day. Good morning!`",
"`Every morning brings you new hopes and new opportunities. Don’t miss any one of them while you’re sleeping. Good morning!`",
"`Start your day with solid determination and great attitude. You’re going to have a good day today. Good morning my friend!`",
"`Friendship is what makes life worth living. I want to thank you for being such a special friend of mine. Good morning to you!`",
"`A friend like you is pretty hard to come by in life. I must consider myself lucky enough to have you. Good morning. Wish you an amazing day ahead!`",
"`The more you count yourself as blessed, the more blessed you will be. Thank God for this beautiful morning and let friendship and love prevail this morning.`",
"`Wake up and sip a cup of loving friendship. Eat your heart out from a plate of hope. To top it up, a fork full of kindness and love. Enough for a happy good morning!`",
"`It is easy to imagine the world coming to an end. But it is difficult to imagine spending a day without my friends. Good morning.`",
]
@bot.on(admin_cmd(pattern=f"love$", outgoing=True))
@bot.on(sudo_cmd(pattern='love$', allow_sudo=True))
async def love(e):
txt = random.choice(LOVESTR)
await edit_or_reply(e, txt)
@bot.on(admin_cmd(pattern=f"dhoka$", outgoing=True))
@bot.on(sudo_cmd(pattern='dhoka$', allow_sudo=True))
async def katgya(e):
txt = random.choice(DHOKA)
await edit_or_reply(e, txt)
@bot.on(admin_cmd(pattern=f"metoo$", outgoing=True))
@bot.on(sudo_cmd(pattern='metoo$', allow_sudo=True))
async def metoo(e):
txt = random.choice(METOOSTR)
await edit_or_reply(e, txt)
@bot.on(admin_cmd(pattern=f"gdnoon$", outgoing=True))
@bot.on(sudo_cmd(pattern='gdnoon$', allow_sudo=True))
async def noon(e):
txt = random.choice(GDNOON)
await edit_or_reply(e, txt)
@bot.on(admin_cmd(pattern=f"chase$", outgoing=True))
@bot.on(sudo_cmd(pattern='chase$', allow_sudo=True))
async def police(e):
txt = random.choice(CHASE_STR)
await edit_or_reply(e, txt)
@bot.on(admin_cmd(pattern=f"congo$", outgoing=True))
@bot.on(sudo_cmd(pattern='congo$', allow_sudo=True))
async def Sahih(e):
txt = random.choice(CONGRATULATION)
await edit_or_reply(e, txt)
@bot.on(admin_cmd(pattern=f"qhi$", outgoing=True))
@bot.on(sudo_cmd(pattern='qhi$', allow_sudo=True))
async def hoi(e):
txt = random.choice(eviralOSTR)
await edit_or_reply(e, txt)
@bot.on(admin_cmd(pattern=f"gdbye$", outgoing=True))
@bot.on(sudo_cmd(pattern='gdbye$', allow_sudo=True))
async def bhago(e):
txt = random.choice(BYESTR)
await edit_or_reply(e, txt)
@bot.on(admin_cmd(pattern=f"gdnyt$", outgoing=True))
@bot.on(sudo_cmd(pattern='gdnyt$', allow_sudo=True))
async def night(e):
txt = random.choice(GDNIGHT)
await edit_or_reply(e, txt)
@bot.on(admin_cmd(pattern=f"gdmng$", outgoing=True))
@bot.on(sudo_cmd(pattern='gdmng$', allow_sudo=True))
async def morning(e):
txt = random.choice(GDMORNING)
await edit_or_reply(e, txt)
@bot.on(admin_cmd(pattern="quote ?(.*)", outgoing=True))
@bot.on(sudo_cmd(pattern="quote ?(.*)", allow_sudo=True))
async def quote_search(event):
if event.fwd_from:
return
catevent = await edit_or_reply(event, "`Processing...`")
input_str = event.pattern_match.group(1)
if not input_str:
api_url = "https://quotes.cwprojects.live/random"
try:
response = requests.get(api_url).json()
except:
response = None
else:
api_url = f"https://quotes.cwprojects.live/search/query={input_str}"
try:
response = random.choice(requests.get(api_url).json())
except:
response = None
if response is not None:
await catevent.edit(f"`{response['text']}`")
else:
await edit_or_reply(catevent, "`Sorry Zero results found`", 5)
CmdHelp("quotes").add_command(
"quote", None, "Sends a random mind-blowing quote"
).add_command("gdmng", None, "Sends a random Good Morning Quote").add_command(
"gdnyt", None, "Sends a random Good Night Quote"
).add_command(
"gdbye", None, "Sends a random Good Byee Quote"
).add_command(
"qhi", None, "Sends a random hello msg"
).add_command(
"congo", None, "Sends a random congratulations quote"
).add_command(
"chase", None, "Sends a random Chase quote"
).add_command(
"gdnoon", None, "Sends a random Good Afternoon quote"
).add_command(
"metoo", None, 'Sends a text saying "Mee too"'
).add_command(
"dhoka", None, "Sends a random Dhoka quote(katt gya bc)"
).add_command(
"love", None, "Sends a random love quote🥰. (A stage before .dhoka)"
).add()
|
[
"userbot.cmdhelp.CmdHelp",
"random.choice",
"requests.get",
"FIREX.utils.sudo_cmd",
"FIREX.utils.edit_or_reply",
"FIREX.utils.admin_cmd"
] |
[((22598, 22620), 'random.choice', 'random.choice', (['LOVESTR'], {}), '(LOVESTR)\n', (22611, 22620), False, 'import random\n'), ((22473, 22515), 'FIREX.utils.admin_cmd', 'admin_cmd', ([], {'pattern': 'f"""love$"""', 'outgoing': '(True)'}), "(pattern=f'love$', outgoing=True)\n", (22482, 22515), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((22525, 22567), 'FIREX.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""love$"""', 'allow_sudo': '(True)'}), "(pattern='love$', allow_sudo=True)\n", (22533, 22567), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((22792, 22812), 'random.choice', 'random.choice', (['DHOKA'], {}), '(DHOKA)\n', (22805, 22812), False, 'import random\n'), ((22663, 22706), 'FIREX.utils.admin_cmd', 'admin_cmd', ([], {'pattern': 'f"""dhoka$"""', 'outgoing': '(True)'}), "(pattern=f'dhoka$', outgoing=True)\n", (22672, 22706), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((22716, 22759), 'FIREX.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""dhoka$"""', 'allow_sudo': '(True)'}), "(pattern='dhoka$', allow_sudo=True)\n", (22724, 22759), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((22983, 23006), 'random.choice', 'random.choice', (['METOOSTR'], {}), '(METOOSTR)\n', (22996, 23006), False, 'import random\n'), ((22855, 22898), 'FIREX.utils.admin_cmd', 'admin_cmd', ([], {'pattern': 'f"""metoo$"""', 'outgoing': '(True)'}), "(pattern=f'metoo$', outgoing=True)\n", (22864, 22898), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((22908, 22951), 'FIREX.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""metoo$"""', 'allow_sudo': '(True)'}), "(pattern='metoo$', allow_sudo=True)\n", (22916, 22951), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23178, 23199), 'random.choice', 'random.choice', (['GDNOON'], {}), '(GDNOON)\n', (23191, 23199), False, 'import random\n'), ((23049, 23093), 'FIREX.utils.admin_cmd', 'admin_cmd', ([], {'pattern': 'f"""gdnoon$"""', 'outgoing': '(True)'}), "(pattern=f'gdnoon$', outgoing=True)\n", (23058, 23093), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23103, 23147), 'FIREX.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""gdnoon$"""', 'allow_sudo': '(True)'}), "(pattern='gdnoon$', allow_sudo=True)\n", (23111, 23147), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23371, 23395), 'random.choice', 'random.choice', (['CHASE_STR'], {}), '(CHASE_STR)\n', (23384, 23395), False, 'import random\n'), ((23242, 23285), 'FIREX.utils.admin_cmd', 'admin_cmd', ([], {'pattern': 'f"""chase$"""', 'outgoing': '(True)'}), "(pattern=f'chase$', outgoing=True)\n", (23251, 23285), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23295, 23338), 'FIREX.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""chase$"""', 'allow_sudo': '(True)'}), "(pattern='chase$', allow_sudo=True)\n", (23303, 23338), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23566, 23595), 'random.choice', 'random.choice', (['CONGRATULATION'], {}), '(CONGRATULATION)\n', (23579, 23595), False, 'import random\n'), ((23438, 23481), 'FIREX.utils.admin_cmd', 'admin_cmd', ([], {'pattern': 'f"""congo$"""', 'outgoing': '(True)'}), "(pattern=f'congo$', outgoing=True)\n", (23447, 23481), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23491, 23534), 'FIREX.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""congo$"""', 'allow_sudo': '(True)'}), "(pattern='congo$', allow_sudo=True)\n", (23499, 23534), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23760, 23785), 'random.choice', 'random.choice', (['eviralOSTR'], {}), '(eviralOSTR)\n', (23773, 23785), False, 'import random\n'), ((23638, 23679), 'FIREX.utils.admin_cmd', 'admin_cmd', ([], {'pattern': 'f"""qhi$"""', 'outgoing': '(True)'}), "(pattern=f'qhi$', outgoing=True)\n", (23647, 23679), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23689, 23730), 'FIREX.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""qhi$"""', 'allow_sudo': '(True)'}), "(pattern='qhi$', allow_sudo=True)\n", (23697, 23730), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23956, 23977), 'random.choice', 'random.choice', (['BYESTR'], {}), '(BYESTR)\n', (23969, 23977), False, 'import random\n'), ((23828, 23871), 'FIREX.utils.admin_cmd', 'admin_cmd', ([], {'pattern': 'f"""gdbye$"""', 'outgoing': '(True)'}), "(pattern=f'gdbye$', outgoing=True)\n", (23837, 23871), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23881, 23924), 'FIREX.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""gdbye$"""', 'allow_sudo': '(True)'}), "(pattern='gdbye$', allow_sudo=True)\n", (23889, 23924), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((24148, 24170), 'random.choice', 'random.choice', (['GDNIGHT'], {}), '(GDNIGHT)\n', (24161, 24170), False, 'import random\n'), ((24020, 24063), 'FIREX.utils.admin_cmd', 'admin_cmd', ([], {'pattern': 'f"""gdnyt$"""', 'outgoing': '(True)'}), "(pattern=f'gdnyt$', outgoing=True)\n", (24029, 24063), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((24073, 24116), 'FIREX.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""gdnyt$"""', 'allow_sudo': '(True)'}), "(pattern='gdnyt$', allow_sudo=True)\n", (24081, 24116), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((24343, 24367), 'random.choice', 'random.choice', (['GDMORNING'], {}), '(GDMORNING)\n', (24356, 24367), False, 'import random\n'), ((24213, 24256), 'FIREX.utils.admin_cmd', 'admin_cmd', ([], {'pattern': 'f"""gdmng$"""', 'outgoing': '(True)'}), "(pattern=f'gdmng$', outgoing=True)\n", (24222, 24256), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((24266, 24309), 'FIREX.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""gdmng$"""', 'allow_sudo': '(True)'}), "(pattern='gdmng$', allow_sudo=True)\n", (24274, 24309), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((24410, 24457), 'FIREX.utils.admin_cmd', 'admin_cmd', ([], {'pattern': '"""quote ?(.*)"""', 'outgoing': '(True)'}), "(pattern='quote ?(.*)', outgoing=True)\n", (24419, 24457), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((24467, 24515), 'FIREX.utils.sudo_cmd', 'sudo_cmd', ([], {'pattern': '"""quote ?(.*)"""', 'allow_sudo': '(True)'}), "(pattern='quote ?(.*)', allow_sudo=True)\n", (24475, 24515), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((22631, 22652), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['e', 'txt'], {}), '(e, txt)\n', (22644, 22652), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((22823, 22844), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['e', 'txt'], {}), '(e, txt)\n', (22836, 22844), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23017, 23038), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['e', 'txt'], {}), '(e, txt)\n', (23030, 23038), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23210, 23231), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['e', 'txt'], {}), '(e, txt)\n', (23223, 23231), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23406, 23427), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['e', 'txt'], {}), '(e, txt)\n', (23419, 23427), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23606, 23627), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['e', 'txt'], {}), '(e, txt)\n', (23619, 23627), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23796, 23817), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['e', 'txt'], {}), '(e, txt)\n', (23809, 23817), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((23988, 24009), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['e', 'txt'], {}), '(e, txt)\n', (24001, 24009), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((24181, 24202), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['e', 'txt'], {}), '(e, txt)\n', (24194, 24202), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((24378, 24399), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['e', 'txt'], {}), '(e, txt)\n', (24391, 24399), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((24607, 24646), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['event', '"""`Processing...`"""'], {}), "(event, '`Processing...`')\n", (24620, 24646), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((25198, 25254), 'FIREX.utils.edit_or_reply', 'edit_or_reply', (['catevent', '"""`Sorry Zero results found`"""', '(5)'], {}), "(catevent, '`Sorry Zero results found`', 5)\n", (25211, 25254), False, 'from FIREX.utils import admin_cmd, edit_or_reply, sudo_cmd\n'), ((24808, 24829), 'requests.get', 'requests.get', (['api_url'], {}), '(api_url)\n', (24820, 24829), False, 'import requests\n'), ((25018, 25039), 'requests.get', 'requests.get', (['api_url'], {}), '(api_url)\n', (25030, 25039), False, 'import requests\n'), ((25257, 25274), 'userbot.cmdhelp.CmdHelp', 'CmdHelp', (['"""quotes"""'], {}), "('quotes')\n", (25264, 25274), False, 'from userbot.cmdhelp import CmdHelp\n')]
|
############
# Standard #
############
import math
###############
# Third Party #
###############
import ophyd
import pytest
##########
# Module #
##########
from detrot import ConeJoint, AngledJoint, StandPoint, Point
from conftest import PseudoMotor
@pytest.fixture(scope='function')
def pseudo_cone():
angled = ConeJoint(slide = PseudoMotor(5),
lift = PseudoMotor(10),
offset = Point(1,2,3))
return angled
@pytest.fixture(scope='function')
def pseudo_angle():
angled = AngledJoint(slide = PseudoMotor(5),
lift = PseudoMotor(10),
offset = Point(1,2,3))
return angled
def test_cone_joint(pseudo_cone):
#Test Vertical
pseudo_cone.alpha = math.pi/2.
assert pytest.approx(pseudo_cone.joint.x) == 5
assert pytest.approx(pseudo_cone.joint.y) == 10
#Test Horizontal
pseudo_cone.alpha= 0
assert pseudo_cone.joint.x == 15
assert pseudo_cone.joint.y == 0
def test_cone_invert(pseudo_cone):
#Test 45
pseudo_cone.alpha = math.pi/4.
assert pseudo_cone.invert((13.07,9.07))[0] == pytest.approx(5,0.1)
assert pseudo_cone.invert((13.07,9.07))[1] == pytest.approx(10,0.1)
def test_angle_joint(pseudo_angle):
#Test Vertical
pseudo_angle.alpha = math.pi/2.
assert pytest.approx(pseudo_angle.joint.x) == 5
assert pytest.approx(pseudo_angle.joint.y) == 10
assert pytest.approx(pseudo_angle.joint.z) == 0
#Test Horizontal
pseudo_angle.alpha = 0
assert pytest.approx(pseudo_angle.joint.x) == 5
assert pytest.approx(pseudo_angle.joint.y) == 0
assert pytest.approx(pseudo_angle.joint.z) == 10
#Test no-slide
pseudo_angle.slide = None
assert pytest.approx(pseudo_angle.joint.x) == 0
assert pytest.approx(pseudo_angle.joint.y) == 0
assert pytest.approx(pseudo_angle.joint.z) == 10
def test_angle_invert(pseudo_angle):
#Test Vertical
pseudo_angle.alpha = math.pi/2.
assert pseudo_angle.invert((6,12))[0] == pytest.approx(5,0.1)
assert pseudo_angle.invert((6,12))[1] == pytest.approx(10,0.1)
#Test no-slide
pseudo_angle.slide = None
assert pseudo_angle.invert((6,12)) == pytest.approx(10,0.1)
def test_position(pseudo_cone):
pseudo_cone.alpha= 0
assert pseudo_cone.position == (16, 2, 3)
pseudo_cone.alpha = math.pi/2.
assert pseudo_cone.position.x == pytest.approx(6,0.1)
assert pseudo_cone.position.y == 12
assert pseudo_cone.position.z == 3
def test_displacement(pseudo_angle):
assert pseudo_angle.displacement == (5,10)
pseudo_angle.slide = None
assert pseudo_angle.displacement == 10
def test_set_joint(pseudo_angle):
#Vertical
pseudo_angle.alpha = math.pi/2.
pseudo_angle.set_joint((6,12))
assert pseudo_angle.displacement[0] == pytest.approx(5,0.1)
assert pseudo_angle.displacement[1] == pytest.approx(10,0.1)
#Test no-slide
pseudo_angle.slide = None
pseudo_angle.set_joint((6,12))
assert pseudo_angle.displacement == pytest.approx(10,0.1)
def test_model(pseudo_angle, pseudo_cone):
model = AngledJoint.model(pseudo_angle)
assert isinstance(model.slide, ophyd.SoftPositioner)
assert isinstance(model.lift, ophyd.SoftPositioner)
assert model.displacement == pseudo_angle.displacement
#Test no slide
pseudo_angle.slide = None
model = AngledJoint.model(pseudo_angle)
assert model.slide == None
assert isinstance(model.lift, ophyd.SoftPositioner)
assert model.displacement == pseudo_angle.displacement
#Test cone
model = ConeJoint.model(pseudo_cone)
assert isinstance(model.slide, ophyd.SoftPositioner)
assert isinstance(model.lift, ophyd.SoftPositioner)
assert model.displacement == pseudo_cone.displacement
def test_stop(pseudo_cone):
pseudo_cone.stop()
pseudo_cone.slide.stop_call.method.assert_called_with()
pseudo_cone.lift.stop_call.method.assert_called_with()
def test_cmp():
p1 = PseudoMotor(5)
p2 = PseudoMotor(10)
assert AngledJoint(p1,p2) == AngledJoint(p1, p2)
|
[
"pytest.approx",
"conftest.PseudoMotor",
"detrot.Point",
"detrot.ConeJoint.model",
"pytest.fixture",
"detrot.AngledJoint",
"detrot.AngledJoint.model"
] |
[((258, 290), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (272, 290), False, 'import pytest\n'), ((473, 505), 'pytest.fixture', 'pytest.fixture', ([], {'scope': '"""function"""'}), "(scope='function')\n", (487, 505), False, 'import pytest\n'), ((3125, 3156), 'detrot.AngledJoint.model', 'AngledJoint.model', (['pseudo_angle'], {}), '(pseudo_angle)\n', (3142, 3156), False, 'from detrot import ConeJoint, AngledJoint, StandPoint, Point\n'), ((3391, 3422), 'detrot.AngledJoint.model', 'AngledJoint.model', (['pseudo_angle'], {}), '(pseudo_angle)\n', (3408, 3422), False, 'from detrot import ConeJoint, AngledJoint, StandPoint, Point\n'), ((3597, 3625), 'detrot.ConeJoint.model', 'ConeJoint.model', (['pseudo_cone'], {}), '(pseudo_cone)\n', (3612, 3625), False, 'from detrot import ConeJoint, AngledJoint, StandPoint, Point\n'), ((3994, 4008), 'conftest.PseudoMotor', 'PseudoMotor', (['(5)'], {}), '(5)\n', (4005, 4008), False, 'from conftest import PseudoMotor\n'), ((4018, 4033), 'conftest.PseudoMotor', 'PseudoMotor', (['(10)'], {}), '(10)\n', (4029, 4033), False, 'from conftest import PseudoMotor\n'), ((794, 828), 'pytest.approx', 'pytest.approx', (['pseudo_cone.joint.x'], {}), '(pseudo_cone.joint.x)\n', (807, 828), False, 'import pytest\n'), ((845, 879), 'pytest.approx', 'pytest.approx', (['pseudo_cone.joint.y'], {}), '(pseudo_cone.joint.y)\n', (858, 879), False, 'import pytest\n'), ((1140, 1161), 'pytest.approx', 'pytest.approx', (['(5)', '(0.1)'], {}), '(5, 0.1)\n', (1153, 1161), False, 'import pytest\n'), ((1211, 1233), 'pytest.approx', 'pytest.approx', (['(10)', '(0.1)'], {}), '(10, 0.1)\n', (1224, 1233), False, 'import pytest\n'), ((1336, 1371), 'pytest.approx', 'pytest.approx', (['pseudo_angle.joint.x'], {}), '(pseudo_angle.joint.x)\n', (1349, 1371), False, 'import pytest\n'), ((1388, 1423), 'pytest.approx', 'pytest.approx', (['pseudo_angle.joint.y'], {}), '(pseudo_angle.joint.y)\n', (1401, 1423), False, 'import pytest\n'), ((1441, 1476), 'pytest.approx', 'pytest.approx', (['pseudo_angle.joint.z'], {}), '(pseudo_angle.joint.z)\n', (1454, 1476), False, 'import pytest\n'), ((1542, 1577), 'pytest.approx', 'pytest.approx', (['pseudo_angle.joint.x'], {}), '(pseudo_angle.joint.x)\n', (1555, 1577), False, 'import pytest\n'), ((1594, 1629), 'pytest.approx', 'pytest.approx', (['pseudo_angle.joint.y'], {}), '(pseudo_angle.joint.y)\n', (1607, 1629), False, 'import pytest\n'), ((1646, 1681), 'pytest.approx', 'pytest.approx', (['pseudo_angle.joint.z'], {}), '(pseudo_angle.joint.z)\n', (1659, 1681), False, 'import pytest\n'), ((1749, 1784), 'pytest.approx', 'pytest.approx', (['pseudo_angle.joint.x'], {}), '(pseudo_angle.joint.x)\n', (1762, 1784), False, 'import pytest\n'), ((1801, 1836), 'pytest.approx', 'pytest.approx', (['pseudo_angle.joint.y'], {}), '(pseudo_angle.joint.y)\n', (1814, 1836), False, 'import pytest\n'), ((1853, 1888), 'pytest.approx', 'pytest.approx', (['pseudo_angle.joint.z'], {}), '(pseudo_angle.joint.z)\n', (1866, 1888), False, 'import pytest\n'), ((2033, 2054), 'pytest.approx', 'pytest.approx', (['(5)', '(0.1)'], {}), '(5, 0.1)\n', (2046, 2054), False, 'import pytest\n'), ((2099, 2121), 'pytest.approx', 'pytest.approx', (['(10)', '(0.1)'], {}), '(10, 0.1)\n', (2112, 2121), False, 'import pytest\n'), ((2213, 2235), 'pytest.approx', 'pytest.approx', (['(10)', '(0.1)'], {}), '(10, 0.1)\n', (2226, 2235), False, 'import pytest\n'), ((2412, 2433), 'pytest.approx', 'pytest.approx', (['(6)', '(0.1)'], {}), '(6, 0.1)\n', (2425, 2433), False, 'import pytest\n'), ((2834, 2855), 'pytest.approx', 'pytest.approx', (['(5)', '(0.1)'], {}), '(5, 0.1)\n', (2847, 2855), False, 'import pytest\n'), ((2898, 2920), 'pytest.approx', 'pytest.approx', (['(10)', '(0.1)'], {}), '(10, 0.1)\n', (2911, 2920), False, 'import pytest\n'), ((3046, 3068), 'pytest.approx', 'pytest.approx', (['(10)', '(0.1)'], {}), '(10, 0.1)\n', (3059, 3068), False, 'import pytest\n'), ((4045, 4064), 'detrot.AngledJoint', 'AngledJoint', (['p1', 'p2'], {}), '(p1, p2)\n', (4056, 4064), False, 'from detrot import ConeJoint, AngledJoint, StandPoint, Point\n'), ((4067, 4086), 'detrot.AngledJoint', 'AngledJoint', (['p1', 'p2'], {}), '(p1, p2)\n', (4078, 4086), False, 'from detrot import ConeJoint, AngledJoint, StandPoint, Point\n'), ((342, 356), 'conftest.PseudoMotor', 'PseudoMotor', (['(5)'], {}), '(5)\n', (353, 356), False, 'from conftest import PseudoMotor\n'), ((390, 405), 'conftest.PseudoMotor', 'PseudoMotor', (['(10)'], {}), '(10)\n', (401, 405), False, 'from conftest import PseudoMotor\n'), ((439, 453), 'detrot.Point', 'Point', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (444, 453), False, 'from detrot import ConeJoint, AngledJoint, StandPoint, Point\n'), ((560, 574), 'conftest.PseudoMotor', 'PseudoMotor', (['(5)'], {}), '(5)\n', (571, 574), False, 'from conftest import PseudoMotor\n'), ((610, 625), 'conftest.PseudoMotor', 'PseudoMotor', (['(10)'], {}), '(10)\n', (621, 625), False, 'from conftest import PseudoMotor\n'), ((661, 675), 'detrot.Point', 'Point', (['(1)', '(2)', '(3)'], {}), '(1, 2, 3)\n', (666, 675), False, 'from detrot import ConeJoint, AngledJoint, StandPoint, Point\n')]
|
import numpy as np
from urban_AD_env.utils import rotated_rectangles_intersect
def test_rotated_rectangles_intersect():
assert rotated_rectangles_intersect(([12.86076812, 28.60182391], 5.0, 2.0, -0.4675779906495494),
([9.67753944, 28.90585412], 5.0, 2.0, -0.3417019364473201))
assert rotated_rectangles_intersect(([0, 0], 2, 1, 0), ([0, 1], 2, 1, 0))
assert not rotated_rectangles_intersect(([0, 0], 2, 1, 0), ([0, 2.1], 2, 1, 0))
assert not rotated_rectangles_intersect(([0, 0], 2, 1, 0), ([1, 1.1], 2, 1, 0))
assert rotated_rectangles_intersect(([0, 0], 2, 1, np.pi/4), ([1, 1.1], 2, 1, 0))
|
[
"urban_AD_env.utils.rotated_rectangles_intersect"
] |
[((134, 293), 'urban_AD_env.utils.rotated_rectangles_intersect', 'rotated_rectangles_intersect', (['([12.86076812, 28.60182391], 5.0, 2.0, -0.4675779906495494)', '([9.67753944, 28.90585412], 5.0, 2.0, -0.3417019364473201)'], {}), '(([12.86076812, 28.60182391], 5.0, 2.0, -\n 0.4675779906495494), ([9.67753944, 28.90585412], 5.0, 2.0, -\n 0.3417019364473201))\n', (162, 293), False, 'from urban_AD_env.utils import rotated_rectangles_intersect\n'), ((335, 401), 'urban_AD_env.utils.rotated_rectangles_intersect', 'rotated_rectangles_intersect', (['([0, 0], 2, 1, 0)', '([0, 1], 2, 1, 0)'], {}), '(([0, 0], 2, 1, 0), ([0, 1], 2, 1, 0))\n', (363, 401), False, 'from urban_AD_env.utils import rotated_rectangles_intersect\n'), ((581, 657), 'urban_AD_env.utils.rotated_rectangles_intersect', 'rotated_rectangles_intersect', (['([0, 0], 2, 1, np.pi / 4)', '([1, 1.1], 2, 1, 0)'], {}), '(([0, 0], 2, 1, np.pi / 4), ([1, 1.1], 2, 1, 0))\n', (609, 657), False, 'from urban_AD_env.utils import rotated_rectangles_intersect\n'), ((417, 485), 'urban_AD_env.utils.rotated_rectangles_intersect', 'rotated_rectangles_intersect', (['([0, 0], 2, 1, 0)', '([0, 2.1], 2, 1, 0)'], {}), '(([0, 0], 2, 1, 0), ([0, 2.1], 2, 1, 0))\n', (445, 485), False, 'from urban_AD_env.utils import rotated_rectangles_intersect\n'), ((501, 569), 'urban_AD_env.utils.rotated_rectangles_intersect', 'rotated_rectangles_intersect', (['([0, 0], 2, 1, 0)', '([1, 1.1], 2, 1, 0)'], {}), '(([0, 0], 2, 1, 0), ([1, 1.1], 2, 1, 0))\n', (529, 569), False, 'from urban_AD_env.utils import rotated_rectangles_intersect\n')]
|
"""Functions that test server functions."""
import pytest
from pyramid.httpexceptions import HTTPBadRequest, HTTPNotFound
from datetime import datetime
from learning_journal.models import Entry
def test_list_view_returns_list_of_entries_in_dict(dummy_request):
"""Test for the list_view function."""
from learning_journal.views.default import list_view
response = list_view(dummy_request)
assert 'journals' in response
assert isinstance(response['journals'], list)
def test_adding_to_dummy_db_works(dummy_request):
"""Test that adding to dummy db works."""
assert len(dummy_request.dbsession.query(Entry).all()) == 0
test_entry = Entry(
title="Fake Title",
creation_date=datetime.now(),
body="The body lul"
)
dummy_request.dbsession.add(test_entry)
assert len(dummy_request.dbsession.query(Entry).all()) == 1
def test_list_view_returns_a_dict(dummy_request):
"""Function to test if list_view returns a dict."""
from learning_journal.views.default import list_view
response = list_view(dummy_request)
assert isinstance(response, dict)
def test_list_view_returns_proper_amount_of_content(dummy_request):
"""Home view response has content."""
from learning_journal.views.default import list_view
response = list_view(dummy_request)
query = dummy_request.dbsession.query(Entry).all()
assert len(response["journals"]) == len(query)
def test_about_view_returns_a_dict(dummy_request):
"""Test that about view returns dict."""
from learning_journal.views.default import about_view
response = about_view(dummy_request)
assert isinstance(response, dict)
def test_create_view_returns_a_dict(dummy_request):
"""Test that create view returns dict."""
from learning_journal.views.default import create_view
response = create_view(dummy_request)
assert isinstance(response, dict)
def test_detail_view_returns_post_detail(dummy_request):
"""Test that detail view returns post details."""
from learning_journal.views.default import detail_view
test_entry = Entry(
title="Fake Title",
creation_date=datetime.now(),
body="The body lul"
)
dummy_request.dbsession.add(test_entry)
dummy_request.matchdict['id'] = 1
response = detail_view(dummy_request)
assert response['post'].title == "Fake Title"
def test_create_view_get_empty_is_empty_dict(dummy_request):
"""Test that GET request on create view returns empty dict."""
from learning_journal.views.default import create_view
dummy_request.method = "GET"
response = create_view(dummy_request)
assert response == {}
def test_create_view_post_works(dummy_request):
"""Test that create view post creates new entry."""
from learning_journal.views.default import create_view
dummy_request.method = "POST"
test_post = {"title": "Test", "body": "This is a body."}
dummy_request.POST = test_post
response = create_view(dummy_request)
assert response.status_code == 302
def test_create_view_raises_bad_request(dummy_request):
"""Test that an incomplete post request returns HTTPBadRequest."""
from learning_journal.views.default import create_view
dummy_request.method = "POST"
test_post = {"title": "Test"}
dummy_request.POST = test_post
with pytest.raises(HTTPBadRequest):
create_view(dummy_request)
def test_new_entry_redirects_to_home_page(testapp, empty_db):
"""Test that after adding a new entry you get redirected to home page."""
test_entry = {
"title": "Fake Title",
"body": "The body lul"
}
response = testapp.post("/journal/new-entry", test_entry)
assert response.location == "http://localhost/"
|
[
"learning_journal.views.default.create_view",
"learning_journal.views.default.list_view",
"datetime.datetime.now",
"pytest.raises",
"learning_journal.views.default.about_view",
"learning_journal.views.default.detail_view"
] |
[((378, 402), 'learning_journal.views.default.list_view', 'list_view', (['dummy_request'], {}), '(dummy_request)\n', (387, 402), False, 'from learning_journal.views.default import list_view\n'), ((1061, 1085), 'learning_journal.views.default.list_view', 'list_view', (['dummy_request'], {}), '(dummy_request)\n', (1070, 1085), False, 'from learning_journal.views.default import list_view\n'), ((1308, 1332), 'learning_journal.views.default.list_view', 'list_view', (['dummy_request'], {}), '(dummy_request)\n', (1317, 1332), False, 'from learning_journal.views.default import list_view\n'), ((1610, 1635), 'learning_journal.views.default.about_view', 'about_view', (['dummy_request'], {}), '(dummy_request)\n', (1620, 1635), False, 'from learning_journal.views.default import about_view\n'), ((1848, 1874), 'learning_journal.views.default.create_view', 'create_view', (['dummy_request'], {}), '(dummy_request)\n', (1859, 1874), False, 'from learning_journal.views.default import create_view\n'), ((2306, 2332), 'learning_journal.views.default.detail_view', 'detail_view', (['dummy_request'], {}), '(dummy_request)\n', (2317, 2332), False, 'from learning_journal.views.default import detail_view\n'), ((2620, 2646), 'learning_journal.views.default.create_view', 'create_view', (['dummy_request'], {}), '(dummy_request)\n', (2631, 2646), False, 'from learning_journal.views.default import create_view\n'), ((2983, 3009), 'learning_journal.views.default.create_view', 'create_view', (['dummy_request'], {}), '(dummy_request)\n', (2994, 3009), False, 'from learning_journal.views.default import create_view\n'), ((3349, 3378), 'pytest.raises', 'pytest.raises', (['HTTPBadRequest'], {}), '(HTTPBadRequest)\n', (3362, 3378), False, 'import pytest\n'), ((3388, 3414), 'learning_journal.views.default.create_view', 'create_view', (['dummy_request'], {}), '(dummy_request)\n', (3399, 3414), False, 'from learning_journal.views.default import create_view\n'), ((723, 737), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (735, 737), False, 'from datetime import datetime\n'), ((2159, 2173), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2171, 2173), False, 'from datetime import datetime\n')]
|
from aiohttp import ClientSession, FormData
from Findclone import __version__
from .models import Account, Profiles, Histories, get_builder
from .utils import random_string, paint_boxes
from .exceptions import a_error_handler, FindcloneError
from io import BufferedReader, BytesIO
class FindcloneAsync:
"""async findclone api class
Attributes:
headers : dict - set requests headers
"""
def __init__(self):
self._session = ClientSession()
self.headers = {"User-Agent": f"findclone-api/{__version__}"}
self.__builder = get_builder().build_aio_response
self._session_key = None
self._userid = None
self.__info = None
async def login(self,
login: [str, None] = None,
password: [str, None] = None,
session_key: [str, None] = None,
userid: [str, int, None] = None) -> bool:
"""
*coro
Findclone authorisation
:param login: account login
:param password: account password
:param session_key: account session_key
:param userid: account userid
:return: True is auth success
"""
if login and password:
async with self._session.post("https://findclone.ru/login", data={"phone": login,
"password": password}) as response:
await a_error_handler(response)
resp = await response.json()
self.__info = await self.__builder(response)
self._session_key = resp["session_key"]
self._userid = resp["userid"]
self.headers.update({'session-key': self._session_key, 'user-id': str(self._userid)})
return True
elif session_key and userid:
self.headers.update({"session-key": session_key, "user-id": str(userid)})
async with self._session.get("https://findclone.ru/profile", headers=self.headers) as response:
await a_error_handler(response)
self.__info = await self.__builder(response)
self._session_key = session_key
self._userid = userid
return True
else:
raise FindcloneError("Need login and password or session-key and _userid")
@property
async def info(self) -> Account:
"""
*coro
return account information
:return: Account object
"""
async with self._session.get("https://findclone.ru/profile", headers=self.headers) as response:
info = await self.__builder(response)
self.__info = info
return info
async def upload(self,
file: [str, BufferedReader],
face_box_id: int = None,
timeout: float = 180) -> [Profiles, BytesIO]:
"""
*coro
upload image or image url and return Profiles object or BytesIO object
:param file: image direct download link or path
:param face_box_id: OPTIONAL, send facebox id if 2 or more faces are detected
:param timeout: OPTIONAL - max timeout delay
:return: Profiles object or BytesIO if 2 or more faces are detected
"""
data = FormData()
if file.startswith("http"):
async with self._session.get(file, headers=self.headers) as response:
file = await response.read()
data.add_field("uploaded_photo", file, filename=f"{random_string()}.png", content_type="image/png")
else:
data.add_field("uploaded_photo", open(file, "rb"), filename=f"{random_string()}.png",
content_type="image/png")
async with self._session.post("https://findclone.ru/upload2", data=data, headers=self.headers,
timeout=timeout) as response:
resp = await response.json()
if resp.get("faceBoxes"):
if face_box_id is not None:
async with self._session.get("https://findclone.ru/upload3", params={"id": face_box_id},
headers=self.headers) as response2:
resp = await self.__builder(response2)
return resp
else:
img_bytes = paint_boxes(file, resp) # return bytesIO object
return img_bytes
resp = await self.__builder(response)
return resp
async def history(self, offset: int = 0, count: int = 100) -> Histories:
"""
*coro
return object histories search for account
:param offset: int
:param count: int
:return: Histories object
"""
async with self._session.get("https://findclone.ru/hist", params={"offset": offset, "count": count},
headers=self.headers) as response:
history = await self.__builder(response)
return history
async def search(self, search_id: [int, str], count: int = 128) -> Profiles:
"""
*coro
:param search_id: [int, str] search id
:param count: [int] max Profiles count get
:return: Profiles object
"""
async with self._session.get("https://findclone.ru/search", params={"id": search_id, "count": count},
headers=self.headers) as response:
search_result = await self.__builder(response)
return search_result
@property
def get_session(self) -> dict:
"""
property
return session-key and _userid account
:return: dict {"session-key": session_key, "user-id": userid}
"""
_session = {"session-key": self._session_key, "user-id": self._userid}
return _session
def __str__(self):
return self.__info.__str__()
async def __aenter__(self) -> 'FindcloneAsync':
return self
async def __aexit__(self, exc_type, exc_val, exc_tb) -> None:
await self._session.close()
async def close(self) -> None:
await self._session.close()
|
[
"aiohttp.ClientSession",
"aiohttp.FormData"
] |
[((458, 473), 'aiohttp.ClientSession', 'ClientSession', ([], {}), '()\n', (471, 473), False, 'from aiohttp import ClientSession, FormData\n'), ((3353, 3363), 'aiohttp.FormData', 'FormData', ([], {}), '()\n', (3361, 3363), False, 'from aiohttp import ClientSession, FormData\n')]
|
#!/usr/bin/env python3
# Standalone script which rebuilds the history of maintainership
#
# Copyright (C) 2015 Intel Corporation
# Author: <NAME> <<EMAIL>>
#
# Licensed under the MIT license, see COPYING.MIT for details
import sys
import os.path
import optparse
import logging
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__))))
from common import common_setup, get_logger, DryRunRollbackException
common_setup()
from layerindex import utils, recipeparse
utils.setup_django()
from django.db import transaction
import settings
from layerindex.models import Recipe, LayerBranch, LayerItem
from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink
from django.core.exceptions import ObjectDoesNotExist
# FIXME we shouldn't be hardcoded to expect RECIPE_MAINTAINER to be set in this file,
# as it may be in the recipe in future
MAINTAINERS_INCLUDE_PATH = 'conf/distro/include/maintainers.inc'
"""
Try to get recipe maintainer from line, if not found return None
"""
def get_recipe_maintainer(line, logger):
import re
regex = re.compile('^RECIPE_MAINTAINER_pn-(?P<pn>.*)\s=\s"(?P<name>.+) <(?P<email>.*)>"$')
match = regex.search(line)
if match:
return (match.group('pn'), match.group('name'), match.group('email'))
else:
logger.debug("line (%s) don\'t match" % (line))
return None
"""
Get commit information from text.
Returns author_name, author_email, date and title.
"""
def get_commit_info(info, logger):
import re
from datetime import datetime
from email.utils import parsedate_tz, mktime_tz
author_regex = re.compile("^Author: (?P<name>.*) <(?P<email>.*)>$")
date_regex = re.compile("^Date: (?P<date>.*)$")
title_regex = re.compile("^ (?P<title>.*)$")
lines = info.split('\n')
author_name = author_regex.search(lines[1]).group('name')
author_email = author_regex.search(lines[1]).group('email')
date_str = date_regex.search(lines[2]).group('date')
date = datetime.utcfromtimestamp(mktime_tz(parsedate_tz(date_str)))
title = title_regex.search(lines[4]).group('title')
return (author_name, author_email, date, title)
def maintainers_inc_history(options, logger, maintplan, layerbranch, repodir, layerdir):
maintainers_full_path = os.path.join(layerdir, MAINTAINERS_INCLUDE_PATH)
if not os.path.exists(maintainers_full_path):
logger.warning('Maintainer style is maintainers.inc for plan %s but no maintainers.inc exists in for %s' % (maintplan, layerbranch))
return
logger.debug('Checking maintainers.inc history for %s' % layerbranch)
commits = utils.runcmd("git log --format='%%H' --reverse --date=rfc origin/master %s"
% os.path.join(layerbranch.vcs_subdir, MAINTAINERS_INCLUDE_PATH),
repodir, logger=logger)
no_maintainer, _ = Maintainer.objects.get_or_create(name='No maintainer')
try:
with transaction.atomic():
for commit in commits.strip().split("\n"):
if RecipeMaintainerHistory.objects.filter(layerbranch=layerbranch, sha1=commit):
continue
logger.debug("Analysing commit %s ..." % (commit))
(author_name, author_email, date, title) = \
get_commit_info(utils.runcmd("git show " + commit, repodir,
logger=logger), logger)
author = Maintainer.create_or_update(author_name, author_email)
rms = RecipeMaintainerHistory(title=title, date=date, author=author,
sha1=commit, layerbranch=layerbranch)
rms.save()
utils.runcmd("git checkout %s -f" % commit,
repodir, logger=logger)
lines = [line.strip() for line in open(maintainers_full_path)]
for line in lines:
res = get_recipe_maintainer(line, logger)
if res:
(pn, name, email) = res
qry = Recipe.objects.filter(pn = pn, layerbranch = layerbranch)
if qry:
m = Maintainer.create_or_update(name, email)
rm = RecipeMaintainer()
rm.recipe = qry[0]
rm.maintainer = m
rm.history = rms
rm.save()
logger.debug("%s: Change maintainer to %s in commit %s." % \
(pn, m.name, commit))
else:
logger.debug("%s: Not found in %s." % \
(pn, layerbranch))
# set missing recipes to no maintainer
for recipe in layerbranch.recipe_set.all():
if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms):
rm = RecipeMaintainer()
rm.recipe = recipe
link_maintainer = RecipeMaintenanceLink.link_maintainer(recipe.pn, rms)
if link_maintainer:
rm.maintainer = link_maintainer.maintainer
else:
rm.maintainer = no_maintainer
rm.history = rms
rm.save()
if link_maintainer:
logger.debug("%s: linked to maintainer for %s" % (recipe.pn, link_maintainer.recipe.pn))
else:
logger.debug("%s: Not found maintainer in commit %s set to 'No maintainer'." % \
(recipe.pn, rms.sha1))
# set new recipes to no maintainer if don't have one
rms = RecipeMaintainerHistory.get_last(layerbranch)
for recipe in layerbranch.recipe_set.all():
if not RecipeMaintainer.objects.filter(recipe = recipe, history = rms):
rm = RecipeMaintainer()
rm.recipe = recipe
link_maintainer = RecipeMaintenanceLink.link_maintainer(recipe.pn, rms)
if link_maintainer:
rm.maintainer = link_maintainer.maintainer
else:
rm.maintainer = no_maintainer
rm.history = rms
rm.save()
if link_maintainer:
logger.debug("%s: New recipe linked to maintainer for %s" % (recipe.pn, link_maintainer.recipe.pn))
else:
logger.debug("%s: New recipe not found maintainer set to 'No maintainer'." % \
(recipe.pn))
if options.dry_run:
raise DryRunRollbackException
except DryRunRollbackException:
pass
"""
Recreate Maintainership history from the beginning
"""
def maintainer_history(options, logger):
fetchdir = settings.LAYER_FETCH_DIR
if options.plan:
maintplans = MaintenancePlan.objects.filter(id=int(options.plan))
if not maintplans.exists():
logger.error('No maintenance plan with ID %s found' % options.plan)
sys.exit(1)
else:
maintplans = MaintenancePlan.objects.filter(updates_enabled=True)
if not maintplans.exists():
logger.error('No enabled maintenance plans found')
sys.exit(1)
lockfn = os.path.join(fetchdir, "layerindex.lock")
lockfile = utils.lock_file(lockfn)
if not lockfile:
logger.error("Layer index lock timeout expired")
sys.exit(1)
try:
for maintplan in maintplans:
for item in maintplan.maintenanceplanlayerbranch_set.all():
layerbranch = item.layerbranch
if options.fullreload and not options.dry_run:
RecipeMaintainerHistory.objects.filter(layerbranch=layerbranch).delete()
urldir = str(layerbranch.layer.get_fetch_dir())
repodir = os.path.join(fetchdir, urldir)
layerdir = os.path.join(repodir, layerbranch.vcs_subdir)
if maintplan.maintainer_style == 'I':
# maintainers.inc
maintainers_inc_history(options, logger, maintplan, layerbranch, repodir, layerdir)
elif maintplan.maintainer_style == 'L':
# Layer-wide, don't need to do anything
logger.debug('Skipping maintainer processing for %s - plan %s maintainer style is layer-wide' % (layerbranch, maintplan))
else:
raise Exception('Unknown maintainer style %s for maintenance plan %s' % (maintplan.maintainer_style, maintplan))
finally:
utils.unlock_file(lockfile)
if __name__=="__main__":
parser = optparse.OptionParser(usage = """%prog [options]""")
parser.add_option("-p", "--plan",
help="Specify maintenance plan to operate on (default is all plans that have updates enabled)",
action="store", dest="plan", default=None)
parser.add_option("--fullreload",
help="Reload upgrade data from scratch",
action="store_true", dest="fullreload", default=False)
parser.add_option("-d", "--debug",
help = "Enable debug output",
action="store_const", const=logging.DEBUG, dest="loglevel",
default=logging.INFO)
parser.add_option("--dry-run",
help = "Do not write any data back to the database",
action="store_true", dest="dry_run", default=False)
logger = get_logger("MaintainerUpdate", settings)
options, args = parser.parse_args(sys.argv)
logger.setLevel(options.loglevel)
maintainer_history(options, logger)
|
[
"layerindex.utils.lock_file",
"re.compile",
"rrs.models.RecipeMaintainer",
"rrs.models.RecipeMaintenanceLink.link_maintainer",
"rrs.models.RecipeMaintainer.objects.filter",
"layerindex.utils.setup_django",
"sys.exit",
"layerindex.models.Recipe.objects.filter",
"rrs.models.Maintainer.objects.get_or_create",
"layerindex.utils.unlock_file",
"common.common_setup",
"email.utils.parsedate_tz",
"rrs.models.Maintainer.create_or_update",
"rrs.models.RecipeMaintainerHistory",
"rrs.models.MaintenancePlan.objects.filter",
"rrs.models.RecipeMaintainerHistory.objects.filter",
"common.get_logger",
"django.db.transaction.atomic",
"rrs.models.RecipeMaintainerHistory.get_last",
"optparse.OptionParser",
"layerindex.utils.runcmd"
] |
[((427, 441), 'common.common_setup', 'common_setup', ([], {}), '()\n', (439, 441), False, 'from common import common_setup, get_logger, DryRunRollbackException\n'), ((485, 505), 'layerindex.utils.setup_django', 'utils.setup_django', ([], {}), '()\n', (503, 505), False, 'from layerindex import utils, recipeparse\n'), ((1126, 1215), 're.compile', 're.compile', (['"""^RECIPE_MAINTAINER_pn-(?P<pn>.*)\\\\s=\\\\s"(?P<name>.+) <(?P<email>.*)>"$"""'], {}), '(\n \'^RECIPE_MAINTAINER_pn-(?P<pn>.*)\\\\s=\\\\s"(?P<name>.+) <(?P<email>.*)>"$\')\n', (1136, 1215), False, 'import re\n'), ((1676, 1728), 're.compile', 're.compile', (['"""^Author: (?P<name>.*) <(?P<email>.*)>$"""'], {}), "('^Author: (?P<name>.*) <(?P<email>.*)>$')\n", (1686, 1728), False, 'import re\n'), ((1746, 1782), 're.compile', 're.compile', (['"""^Date: (?P<date>.*)$"""'], {}), "('^Date: (?P<date>.*)$')\n", (1756, 1782), False, 'import re\n'), ((1801, 1834), 're.compile', 're.compile', (['"""^ (?P<title>.*)$"""'], {}), "('^ (?P<title>.*)$')\n", (1811, 1834), False, 'import re\n'), ((2932, 2986), 'rrs.models.Maintainer.objects.get_or_create', 'Maintainer.objects.get_or_create', ([], {'name': '"""No maintainer"""'}), "(name='No maintainer')\n", (2964, 2986), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((7674, 7697), 'layerindex.utils.lock_file', 'utils.lock_file', (['lockfn'], {}), '(lockfn)\n', (7689, 7697), False, 'from layerindex import utils, recipeparse\n'), ((9009, 9055), 'optparse.OptionParser', 'optparse.OptionParser', ([], {'usage': '"""%prog [options]"""'}), "(usage='%prog [options]')\n", (9030, 9055), False, 'import optparse\n'), ((9790, 9830), 'common.get_logger', 'get_logger', (['"""MaintainerUpdate"""', 'settings'], {}), "('MaintainerUpdate', settings)\n", (9800, 9830), False, 'from common import common_setup, get_logger, DryRunRollbackException\n'), ((7427, 7479), 'rrs.models.MaintenancePlan.objects.filter', 'MaintenancePlan.objects.filter', ([], {'updates_enabled': '(True)'}), '(updates_enabled=True)\n', (7457, 7479), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((7784, 7795), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7792, 7795), False, 'import sys\n'), ((8942, 8969), 'layerindex.utils.unlock_file', 'utils.unlock_file', (['lockfile'], {}), '(lockfile)\n', (8959, 8969), False, 'from layerindex import utils, recipeparse\n'), ((2096, 2118), 'email.utils.parsedate_tz', 'parsedate_tz', (['date_str'], {}), '(date_str)\n', (2108, 2118), False, 'from email.utils import parsedate_tz, mktime_tz\n'), ((3010, 3030), 'django.db.transaction.atomic', 'transaction.atomic', ([], {}), '()\n', (3028, 3030), False, 'from django.db import transaction\n'), ((5936, 5981), 'rrs.models.RecipeMaintainerHistory.get_last', 'RecipeMaintainerHistory.get_last', (['layerbranch'], {}), '(layerbranch)\n', (5968, 5981), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((7384, 7395), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7392, 7395), False, 'import sys\n'), ((7591, 7602), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (7599, 7602), False, 'import sys\n'), ((3106, 3182), 'rrs.models.RecipeMaintainerHistory.objects.filter', 'RecipeMaintainerHistory.objects.filter', ([], {'layerbranch': 'layerbranch', 'sha1': 'commit'}), '(layerbranch=layerbranch, sha1=commit)\n', (3144, 3182), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((3497, 3551), 'rrs.models.Maintainer.create_or_update', 'Maintainer.create_or_update', (['author_name', 'author_email'], {}), '(author_name, author_email)\n', (3524, 3551), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((3574, 3678), 'rrs.models.RecipeMaintainerHistory', 'RecipeMaintainerHistory', ([], {'title': 'title', 'date': 'date', 'author': 'author', 'sha1': 'commit', 'layerbranch': 'layerbranch'}), '(title=title, date=date, author=author, sha1=commit,\n layerbranch=layerbranch)\n', (3597, 3678), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((3743, 3810), 'layerindex.utils.runcmd', 'utils.runcmd', (["('git checkout %s -f' % commit)", 'repodir'], {'logger': 'logger'}), "('git checkout %s -f' % commit, repodir, logger=logger)\n", (3755, 3810), False, 'from layerindex import utils, recipeparse\n'), ((3379, 3437), 'layerindex.utils.runcmd', 'utils.runcmd', (["('git show ' + commit)", 'repodir'], {'logger': 'logger'}), "('git show ' + commit, repodir, logger=logger)\n", (3391, 3437), False, 'from layerindex import utils, recipeparse\n'), ((6061, 6120), 'rrs.models.RecipeMaintainer.objects.filter', 'RecipeMaintainer.objects.filter', ([], {'recipe': 'recipe', 'history': 'rms'}), '(recipe=recipe, history=rms)\n', (6092, 6120), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((6151, 6169), 'rrs.models.RecipeMaintainer', 'RecipeMaintainer', ([], {}), '()\n', (6167, 6169), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((6247, 6300), 'rrs.models.RecipeMaintenanceLink.link_maintainer', 'RecipeMaintenanceLink.link_maintainer', (['recipe.pn', 'rms'], {}), '(recipe.pn, rms)\n', (6284, 6300), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((4118, 4171), 'layerindex.models.Recipe.objects.filter', 'Recipe.objects.filter', ([], {'pn': 'pn', 'layerbranch': 'layerbranch'}), '(pn=pn, layerbranch=layerbranch)\n', (4139, 4171), False, 'from layerindex.models import Recipe, LayerBranch, LayerItem\n'), ((4955, 5014), 'rrs.models.RecipeMaintainer.objects.filter', 'RecipeMaintainer.objects.filter', ([], {'recipe': 'recipe', 'history': 'rms'}), '(recipe=recipe, history=rms)\n', (4986, 5014), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((5049, 5067), 'rrs.models.RecipeMaintainer', 'RecipeMaintainer', ([], {}), '()\n', (5065, 5067), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((5153, 5206), 'rrs.models.RecipeMaintenanceLink.link_maintainer', 'RecipeMaintenanceLink.link_maintainer', (['recipe.pn', 'rms'], {}), '(recipe.pn, rms)\n', (5190, 5206), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((4241, 4281), 'rrs.models.Maintainer.create_or_update', 'Maintainer.create_or_update', (['name', 'email'], {}), '(name, email)\n', (4268, 4281), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((4316, 4334), 'rrs.models.RecipeMaintainer', 'RecipeMaintainer', ([], {}), '()\n', (4332, 4334), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n'), ((8044, 8107), 'rrs.models.RecipeMaintainerHistory.objects.filter', 'RecipeMaintainerHistory.objects.filter', ([], {'layerbranch': 'layerbranch'}), '(layerbranch=layerbranch)\n', (8082, 8107), False, 'from rrs.models import MaintenancePlan, Maintainer, RecipeMaintainerHistory, RecipeMaintainer, RecipeMaintenanceLink\n')]
|
__author__ = "<NAME>"
__copyright__ = "Copyright 2015-2019, <NAME>"
__email__ = "<EMAIL>"
__license__ = "MIT"
import os
import shutil
import signal
import marshal
import pickle
import json
import time
from base64 import urlsafe_b64encode, b64encode
from functools import lru_cache, partial
from itertools import filterfalse, count
from pathlib import Path
from snakemake.logging import logger
from snakemake.jobs import jobfiles
from snakemake.utils import listfiles
class Persistence:
def __init__(
self,
nolock=False,
dag=None,
conda_prefix=None,
singularity_prefix=None,
shadow_prefix=None,
warn_only=False,
):
self.path = os.path.abspath(".snakemake")
if not os.path.exists(self.path):
os.mkdir(self.path)
self._lockdir = os.path.join(self.path, "locks")
if not os.path.exists(self._lockdir):
os.mkdir(self._lockdir)
self.dag = dag
self._lockfile = dict()
self._metadata_path = os.path.join(self.path, "metadata")
self._incomplete_path = os.path.join(self.path, "incomplete")
self.conda_env_archive_path = os.path.join(self.path, "conda-archive")
self.benchmark_path = os.path.join(self.path, "benchmarks")
if conda_prefix is None:
self.conda_env_path = os.path.join(self.path, "conda")
else:
self.conda_env_path = os.path.abspath(os.path.expanduser(conda_prefix))
if singularity_prefix is None:
self.container_img_path = os.path.join(self.path, "singularity")
else:
self.container_img_path = os.path.abspath(
os.path.expanduser(singularity_prefix)
)
if shadow_prefix is None:
self.shadow_path = os.path.join(self.path, "shadow")
else:
self.shadow_path = os.path.join(shadow_prefix, "shadow")
# place to store any auxiliary information needed during a run (e.g. source tarballs)
self.aux_path = os.path.join(self.path, "auxiliary")
# migration of .snakemake folder structure
migration_indicator = Path(
os.path.join(self._incomplete_path, "migration_underway")
)
if (
os.path.exists(self._metadata_path)
and not os.path.exists(self._incomplete_path)
) or migration_indicator.exists():
os.makedirs(self._incomplete_path, exist_ok=True)
migration_indicator.touch()
self.migrate_v1_to_v2()
migration_indicator.unlink()
self._incomplete_cache = None
for d in (
self._metadata_path,
self._incomplete_path,
self.shadow_path,
self.conda_env_archive_path,
self.conda_env_path,
self.container_img_path,
self.aux_path,
):
os.makedirs(d, exist_ok=True)
if nolock:
self.lock = self.noop
self.unlock = self.noop
if warn_only:
self.lock = self.lock_warn_only
self.unlock = self.noop
self._read_record = self._read_record_cached
def migrate_v1_to_v2(self):
logger.info("Migrating .snakemake folder to new format...")
i = 0
for path, _, filenames in os.walk(self._metadata_path):
path = Path(path)
for filename in filenames:
with open(path / filename, "r") as f:
try:
record = json.load(f)
except json.JSONDecodeError:
continue # not a properly formatted JSON file
if record.get("incomplete", False):
target_path = Path(self._incomplete_path) / path.relative_to(
self._metadata_path
)
os.makedirs(target_path, exist_ok=True)
shutil.copyfile(
path / filename,
target_path / filename,
)
i += 1
# this can take a while for large folders...
if (i % 10000) == 0 and i > 0:
logger.info("{} files migrated".format(i))
logger.info("Migration complete")
@property
def files(self):
if self._files is None:
self._files = set(self.dag.output_files)
return self._files
@property
def locked(self):
inputfiles = set(self.all_inputfiles())
outputfiles = set(self.all_outputfiles())
if os.path.exists(self._lockdir):
for lockfile in self._locks("input"):
with open(lockfile) as lock:
for f in lock:
f = f.strip()
if f in outputfiles:
return True
for lockfile in self._locks("output"):
with open(lockfile) as lock:
for f in lock:
f = f.strip()
if f in outputfiles or f in inputfiles:
return True
return False
def lock_warn_only(self):
if self.locked:
logger.info(
"Error: Directory cannot be locked. This usually "
"means that another Snakemake instance is running on this directory. "
"Another possibility is that a previous run exited unexpectedly."
)
def lock(self):
if self.locked:
raise IOError("Another snakemake process " "has locked this directory.")
self._lock(self.all_inputfiles(), "input")
self._lock(self.all_outputfiles(), "output")
def unlock(self, *args):
logger.debug("unlocking")
for lockfile in self._lockfile.values():
try:
logger.debug("removing lock")
os.remove(lockfile)
except OSError as e:
if e.errno != 2: # missing file
raise e
logger.debug("removed all locks")
def cleanup_locks(self):
shutil.rmtree(self._lockdir)
def cleanup_metadata(self, path):
self._delete_record(self._metadata_path, path)
def cleanup_shadow(self):
if os.path.exists(self.shadow_path):
shutil.rmtree(self.shadow_path)
os.mkdir(self.shadow_path)
def conda_cleanup_envs(self):
# cleanup envs
in_use = set(env.hash[:8] for env in self.dag.conda_envs.values())
for d in os.listdir(self.conda_env_path):
if len(d) >= 8 and d[:8] not in in_use:
if os.path.isdir(os.path.join(self.conda_env_path, d)):
shutil.rmtree(os.path.join(self.conda_env_path, d))
else:
os.remove(os.path.join(self.conda_env_path, d))
# cleanup env archives
in_use = set(env.content_hash for env in self.dag.conda_envs.values())
for d in os.listdir(self.conda_env_archive_path):
if d not in in_use:
shutil.rmtree(os.path.join(self.conda_env_archive_path, d))
def started(self, job, external_jobid=None):
for f in job.output:
self._record(
self._incomplete_path,
{"external_jobid": external_jobid},
f,
)
def finished(self, job, keep_metadata=True):
if not keep_metadata:
for f in job.expanded_output:
self._delete_record(self._incomplete_path, f)
return
version = str(job.rule.version) if job.rule.version is not None else None
code = self._code(job.rule)
input = self._input(job)
log = self._log(job)
params = self._params(job)
shellcmd = job.shellcmd
conda_env = self._conda_env(job)
fallback_time = time.time()
for f in job.expanded_output:
rec_path = self._record_path(self._incomplete_path, f)
starttime = os.path.getmtime(rec_path) if os.path.exists(rec_path) else None
# Sometimes finished is called twice, if so, lookup the previous starttime
if not os.path.exists(rec_path):
starttime = self._read_record(self._metadata_path, f).get(
"starttime", None
)
endtime = f.mtime.local_or_remote() if f.exists else fallback_time
self._record(
self._metadata_path,
{
"version": version,
"code": code,
"rule": job.rule.name,
"input": input,
"log": log,
"params": params,
"shellcmd": shellcmd,
"incomplete": False,
"starttime": starttime,
"endtime": endtime,
"job_hash": hash(job),
"conda_env": conda_env,
"container_img_url": job.container_img_url,
},
f,
)
self._delete_record(self._incomplete_path, f)
def cleanup(self, job):
for f in job.expanded_output:
self._delete_record(self._incomplete_path, f)
self._delete_record(self._metadata_path, f)
def incomplete(self, job):
if self._incomplete_cache is None:
self._cache_incomplete_folder()
if self._incomplete_cache is False: # cache deactivated
def marked_incomplete(f):
return self._exists_record(self._incomplete_path, f)
else:
def marked_incomplete(f):
rec_path = self._record_path(self._incomplete_path, f)
return rec_path in self._incomplete_cache
return any(map(lambda f: f.exists and marked_incomplete(f), job.output))
def _cache_incomplete_folder(self):
self._incomplete_cache = {
os.path.join(path, f)
for path, dirnames, filenames in os.walk(self._incomplete_path)
for f in filenames
}
def external_jobids(self, job):
return list(
set(
self._read_record(self._incomplete_path, f).get("external_jobid", None)
for f in job.output
)
)
def metadata(self, path):
return self._read_record(self._metadata_path, path)
def version(self, path):
return self.metadata(path).get("version")
def rule(self, path):
return self.metadata(path).get("rule")
def input(self, path):
return self.metadata(path).get("input")
def log(self, path):
return self.metadata(path).get("log")
def shellcmd(self, path):
return self.metadata(path).get("shellcmd")
def params(self, path):
return self.metadata(path).get("params")
def code(self, path):
return self.metadata(path).get("code")
def version_changed(self, job, file=None):
"""Yields output files with changed versions of bool if file given."""
return _bool_or_gen(self._version_changed, job, file=file)
def code_changed(self, job, file=None):
"""Yields output files with changed code of bool if file given."""
return _bool_or_gen(self._code_changed, job, file=file)
def input_changed(self, job, file=None):
"""Yields output files with changed input of bool if file given."""
return _bool_or_gen(self._input_changed, job, file=file)
def params_changed(self, job, file=None):
"""Yields output files with changed params of bool if file given."""
return _bool_or_gen(self._params_changed, job, file=file)
def _version_changed(self, job, file=None):
assert file is not None
return self.version(file) != job.rule.version
def _code_changed(self, job, file=None):
assert file is not None
return self.code(file) != self._code(job.rule)
def _input_changed(self, job, file=None):
assert file is not None
return self.input(file) != self._input(job)
def _params_changed(self, job, file=None):
assert file is not None
return self.params(file) != self._params(job)
def noop(self, *args):
pass
def _b64id(self, s):
return urlsafe_b64encode(str(s).encode()).decode()
@lru_cache()
def _code(self, rule):
code = rule.run_func.__code__
return b64encode(pickle_code(code)).decode()
@lru_cache()
def _conda_env(self, job):
if job.conda_env:
return b64encode(job.conda_env.content).decode()
@lru_cache()
def _input(self, job):
return sorted(job.input)
@lru_cache()
def _log(self, job):
return sorted(job.log)
@lru_cache()
def _params(self, job):
return sorted(map(repr, job.params))
@lru_cache()
def _output(self, job):
return sorted(job.output)
def _record(self, subject, json_value, id):
recpath = self._record_path(subject, id)
os.makedirs(os.path.dirname(recpath), exist_ok=True)
with open(recpath, "w") as f:
json.dump(json_value, f)
def _delete_record(self, subject, id):
try:
recpath = self._record_path(subject, id)
os.remove(recpath)
recdirs = os.path.relpath(os.path.dirname(recpath), start=subject)
if recdirs != ".":
os.removedirs(recdirs)
except OSError as e:
if e.errno != 2: # not missing
raise e
@lru_cache()
def _read_record_cached(self, subject, id):
return self._read_record_uncached(subject, id)
def _read_record_uncached(self, subject, id):
if not self._exists_record(subject, id):
return dict()
with open(self._record_path(subject, id), "r") as f:
return json.load(f)
def _exists_record(self, subject, id):
return os.path.exists(self._record_path(subject, id))
def _locks(self, type):
return (
f
for f, _ in listfiles(
os.path.join(self._lockdir, "{{n,[0-9]+}}.{}.lock".format(type))
)
if not os.path.isdir(f)
)
def _lock(self, files, type):
for i in count(0):
lockfile = os.path.join(self._lockdir, "{}.{}.lock".format(i, type))
if not os.path.exists(lockfile):
self._lockfile[type] = lockfile
with open(lockfile, "w") as lock:
print(*files, sep="\n", file=lock)
return
def _record_path(self, subject, id):
max_len = (
os.pathconf(subject, "PC_NAME_MAX") if os.name == "posix" else 255
) # maximum NTFS and FAT32 filename length
if max_len == 0:
max_len = 255
b64id = self._b64id(id)
# split into chunks of proper length
b64id = [b64id[i : i + max_len - 1] for i in range(0, len(b64id), max_len - 1)]
# prepend dirs with @ (does not occur in b64) to avoid conflict with b64-named files in the same dir
b64id = ["@" + s for s in b64id[:-1]] + [b64id[-1]]
path = os.path.join(subject, *b64id)
return path
def all_outputfiles(self):
# we only look at output files that will be updated
return jobfiles(self.dag.needrun_jobs, "output")
def all_inputfiles(self):
# we consider all input files, also of not running jobs
return jobfiles(self.dag.jobs, "input")
def deactivate_cache(self):
self._read_record_cached.cache_clear()
self._read_record = self._read_record_uncached
self._incomplete_cache = False
def _bool_or_gen(func, job, file=None):
if file is None:
return (f for f in job.expanded_output if func(job, file=f))
else:
return func(job, file=file)
def pickle_code(code):
consts = [
(pickle_code(const) if type(const) == type(code) else const)
for const in code.co_consts
]
return pickle.dumps((code.co_code, code.co_varnames, consts, code.co_names))
|
[
"os.pathconf",
"pickle.dumps",
"base64.b64encode",
"snakemake.logging.logger.info",
"os.walk",
"os.remove",
"os.path.exists",
"os.listdir",
"pathlib.Path",
"os.path.isdir",
"os.mkdir",
"os.path.expanduser",
"snakemake.logging.logger.debug",
"os.path.dirname",
"shutil.copyfile",
"os.path.getmtime",
"time.time",
"snakemake.jobs.jobfiles",
"os.makedirs",
"os.path.join",
"json.load",
"itertools.count",
"os.removedirs",
"shutil.rmtree",
"os.path.abspath",
"functools.lru_cache",
"json.dump"
] |
[((12485, 12496), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (12494, 12496), False, 'from functools import lru_cache, partial\n'), ((12621, 12632), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (12630, 12632), False, 'from functools import lru_cache, partial\n'), ((12757, 12768), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (12766, 12768), False, 'from functools import lru_cache, partial\n'), ((12835, 12846), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (12844, 12846), False, 'from functools import lru_cache, partial\n'), ((12909, 12920), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (12918, 12920), False, 'from functools import lru_cache, partial\n'), ((13000, 13011), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (13009, 13011), False, 'from functools import lru_cache, partial\n'), ((13701, 13712), 'functools.lru_cache', 'lru_cache', ([], {}), '()\n', (13710, 13712), False, 'from functools import lru_cache, partial\n'), ((16191, 16260), 'pickle.dumps', 'pickle.dumps', (['(code.co_code, code.co_varnames, consts, code.co_names)'], {}), '((code.co_code, code.co_varnames, consts, code.co_names))\n', (16203, 16260), False, 'import pickle\n'), ((702, 731), 'os.path.abspath', 'os.path.abspath', (['""".snakemake"""'], {}), "('.snakemake')\n", (717, 731), False, 'import os\n'), ((830, 862), 'os.path.join', 'os.path.join', (['self.path', '"""locks"""'], {}), "(self.path, 'locks')\n", (842, 862), False, 'import os\n'), ((1032, 1067), 'os.path.join', 'os.path.join', (['self.path', '"""metadata"""'], {}), "(self.path, 'metadata')\n", (1044, 1067), False, 'import os\n'), ((1100, 1137), 'os.path.join', 'os.path.join', (['self.path', '"""incomplete"""'], {}), "(self.path, 'incomplete')\n", (1112, 1137), False, 'import os\n'), ((1177, 1217), 'os.path.join', 'os.path.join', (['self.path', '"""conda-archive"""'], {}), "(self.path, 'conda-archive')\n", (1189, 1217), False, 'import os\n'), ((1248, 1285), 'os.path.join', 'os.path.join', (['self.path', '"""benchmarks"""'], {}), "(self.path, 'benchmarks')\n", (1260, 1285), False, 'import os\n'), ((2040, 2076), 'os.path.join', 'os.path.join', (['self.path', '"""auxiliary"""'], {}), "(self.path, 'auxiliary')\n", (2052, 2076), False, 'import os\n'), ((3224, 3283), 'snakemake.logging.logger.info', 'logger.info', (['"""Migrating .snakemake folder to new format..."""'], {}), "('Migrating .snakemake folder to new format...')\n", (3235, 3283), False, 'from snakemake.logging import logger\n'), ((3332, 3360), 'os.walk', 'os.walk', (['self._metadata_path'], {}), '(self._metadata_path)\n', (3339, 3360), False, 'import os\n'), ((4324, 4357), 'snakemake.logging.logger.info', 'logger.info', (['"""Migration complete"""'], {}), "('Migration complete')\n", (4335, 4357), False, 'from snakemake.logging import logger\n'), ((4652, 4681), 'os.path.exists', 'os.path.exists', (['self._lockdir'], {}), '(self._lockdir)\n', (4666, 4681), False, 'import os\n'), ((5832, 5857), 'snakemake.logging.logger.debug', 'logger.debug', (['"""unlocking"""'], {}), "('unlocking')\n", (5844, 5857), False, 'from snakemake.logging import logger\n'), ((6124, 6157), 'snakemake.logging.logger.debug', 'logger.debug', (['"""removed all locks"""'], {}), "('removed all locks')\n", (6136, 6157), False, 'from snakemake.logging import logger\n'), ((6196, 6224), 'shutil.rmtree', 'shutil.rmtree', (['self._lockdir'], {}), '(self._lockdir)\n', (6209, 6224), False, 'import shutil\n'), ((6361, 6393), 'os.path.exists', 'os.path.exists', (['self.shadow_path'], {}), '(self.shadow_path)\n', (6375, 6393), False, 'import os\n'), ((6628, 6659), 'os.listdir', 'os.listdir', (['self.conda_env_path'], {}), '(self.conda_env_path)\n', (6638, 6659), False, 'import os\n'), ((7075, 7114), 'os.listdir', 'os.listdir', (['self.conda_env_archive_path'], {}), '(self.conda_env_archive_path)\n', (7085, 7114), False, 'import os\n'), ((7969, 7980), 'time.time', 'time.time', ([], {}), '()\n', (7978, 7980), False, 'import time\n'), ((14429, 14437), 'itertools.count', 'count', (['(0)'], {}), '(0)\n', (14434, 14437), False, 'from itertools import filterfalse, count\n'), ((15335, 15364), 'os.path.join', 'os.path.join', (['subject', '*b64id'], {}), '(subject, *b64id)\n', (15347, 15364), False, 'import os\n'), ((15492, 15533), 'snakemake.jobs.jobfiles', 'jobfiles', (['self.dag.needrun_jobs', '"""output"""'], {}), "(self.dag.needrun_jobs, 'output')\n", (15500, 15533), False, 'from snakemake.jobs import jobfiles\n'), ((15644, 15676), 'snakemake.jobs.jobfiles', 'jobfiles', (['self.dag.jobs', '"""input"""'], {}), "(self.dag.jobs, 'input')\n", (15652, 15676), False, 'from snakemake.jobs import jobfiles\n'), ((747, 772), 'os.path.exists', 'os.path.exists', (['self.path'], {}), '(self.path)\n', (761, 772), False, 'import os\n'), ((786, 805), 'os.mkdir', 'os.mkdir', (['self.path'], {}), '(self.path)\n', (794, 805), False, 'import os\n'), ((878, 907), 'os.path.exists', 'os.path.exists', (['self._lockdir'], {}), '(self._lockdir)\n', (892, 907), False, 'import os\n'), ((921, 944), 'os.mkdir', 'os.mkdir', (['self._lockdir'], {}), '(self._lockdir)\n', (929, 944), False, 'import os\n'), ((1354, 1386), 'os.path.join', 'os.path.join', (['self.path', '"""conda"""'], {}), "(self.path, 'conda')\n", (1366, 1386), False, 'import os\n'), ((1562, 1600), 'os.path.join', 'os.path.join', (['self.path', '"""singularity"""'], {}), "(self.path, 'singularity')\n", (1574, 1600), False, 'import os\n'), ((1804, 1837), 'os.path.join', 'os.path.join', (['self.path', '"""shadow"""'], {}), "(self.path, 'shadow')\n", (1816, 1837), False, 'import os\n'), ((1883, 1920), 'os.path.join', 'os.path.join', (['shadow_prefix', '"""shadow"""'], {}), "(shadow_prefix, 'shadow')\n", (1895, 1920), False, 'import os\n'), ((2177, 2234), 'os.path.join', 'os.path.join', (['self._incomplete_path', '"""migration_underway"""'], {}), "(self._incomplete_path, 'migration_underway')\n", (2189, 2234), False, 'import os\n'), ((2419, 2468), 'os.makedirs', 'os.makedirs', (['self._incomplete_path'], {'exist_ok': '(True)'}), '(self._incomplete_path, exist_ok=True)\n', (2430, 2468), False, 'import os\n'), ((2907, 2936), 'os.makedirs', 'os.makedirs', (['d'], {'exist_ok': '(True)'}), '(d, exist_ok=True)\n', (2918, 2936), False, 'import os\n'), ((3381, 3391), 'pathlib.Path', 'Path', (['path'], {}), '(path)\n', (3385, 3391), False, 'from pathlib import Path\n'), ((5297, 5501), 'snakemake.logging.logger.info', 'logger.info', (['"""Error: Directory cannot be locked. This usually means that another Snakemake instance is running on this directory. Another possibility is that a previous run exited unexpectedly."""'], {}), "(\n 'Error: Directory cannot be locked. This usually means that another Snakemake instance is running on this directory. Another possibility is that a previous run exited unexpectedly.'\n )\n", (5308, 5501), False, 'from snakemake.logging import logger\n'), ((6407, 6438), 'shutil.rmtree', 'shutil.rmtree', (['self.shadow_path'], {}), '(self.shadow_path)\n', (6420, 6438), False, 'import shutil\n'), ((6451, 6477), 'os.mkdir', 'os.mkdir', (['self.shadow_path'], {}), '(self.shadow_path)\n', (6459, 6477), False, 'import os\n'), ((10076, 10097), 'os.path.join', 'os.path.join', (['path', 'f'], {}), '(path, f)\n', (10088, 10097), False, 'import os\n'), ((13192, 13216), 'os.path.dirname', 'os.path.dirname', (['recpath'], {}), '(recpath)\n', (13207, 13216), False, 'import os\n'), ((13283, 13307), 'json.dump', 'json.dump', (['json_value', 'f'], {}), '(json_value, f)\n', (13292, 13307), False, 'import json\n'), ((13430, 13448), 'os.remove', 'os.remove', (['recpath'], {}), '(recpath)\n', (13439, 13448), False, 'import os\n'), ((14022, 14034), 'json.load', 'json.load', (['f'], {}), '(f)\n', (14031, 14034), False, 'import json\n'), ((14815, 14850), 'os.pathconf', 'os.pathconf', (['subject', '"""PC_NAME_MAX"""'], {}), "(subject, 'PC_NAME_MAX')\n", (14826, 14850), False, 'import os\n'), ((1451, 1483), 'os.path.expanduser', 'os.path.expanduser', (['conda_prefix'], {}), '(conda_prefix)\n', (1469, 1483), False, 'import os\n'), ((1686, 1724), 'os.path.expanduser', 'os.path.expanduser', (['singularity_prefix'], {}), '(singularity_prefix)\n', (1704, 1724), False, 'import os\n'), ((2270, 2305), 'os.path.exists', 'os.path.exists', (['self._metadata_path'], {}), '(self._metadata_path)\n', (2284, 2305), False, 'import os\n'), ((5940, 5969), 'snakemake.logging.logger.debug', 'logger.debug', (['"""removing lock"""'], {}), "('removing lock')\n", (5952, 5969), False, 'from snakemake.logging import logger\n'), ((5986, 6005), 'os.remove', 'os.remove', (['lockfile'], {}), '(lockfile)\n', (5995, 6005), False, 'import os\n'), ((8140, 8164), 'os.path.exists', 'os.path.exists', (['rec_path'], {}), '(rec_path)\n', (8154, 8164), False, 'import os\n'), ((8110, 8136), 'os.path.getmtime', 'os.path.getmtime', (['rec_path'], {}), '(rec_path)\n', (8126, 8136), False, 'import os\n'), ((8281, 8305), 'os.path.exists', 'os.path.exists', (['rec_path'], {}), '(rec_path)\n', (8295, 8305), False, 'import os\n'), ((10143, 10173), 'os.walk', 'os.walk', (['self._incomplete_path'], {}), '(self._incomplete_path)\n', (10150, 10173), False, 'import os\n'), ((13487, 13511), 'os.path.dirname', 'os.path.dirname', (['recpath'], {}), '(recpath)\n', (13502, 13511), False, 'import os\n'), ((13575, 13597), 'os.removedirs', 'os.removedirs', (['recdirs'], {}), '(recdirs)\n', (13588, 13597), False, 'import os\n'), ((14539, 14563), 'os.path.exists', 'os.path.exists', (['lockfile'], {}), '(lockfile)\n', (14553, 14563), False, 'import os\n'), ((2326, 2363), 'os.path.exists', 'os.path.exists', (['self._incomplete_path'], {}), '(self._incomplete_path)\n', (2340, 2363), False, 'import os\n'), ((6746, 6782), 'os.path.join', 'os.path.join', (['self.conda_env_path', 'd'], {}), '(self.conda_env_path, d)\n', (6758, 6782), False, 'import os\n'), ((7178, 7222), 'os.path.join', 'os.path.join', (['self.conda_env_archive_path', 'd'], {}), '(self.conda_env_archive_path, d)\n', (7190, 7222), False, 'import os\n'), ((12709, 12741), 'base64.b64encode', 'b64encode', (['job.conda_env.content'], {}), '(job.conda_env.content)\n', (12718, 12741), False, 'from base64 import urlsafe_b64encode, b64encode\n'), ((14350, 14366), 'os.path.isdir', 'os.path.isdir', (['f'], {}), '(f)\n', (14363, 14366), False, 'import os\n'), ((3543, 3555), 'json.load', 'json.load', (['f'], {}), '(f)\n', (3552, 3555), False, 'import json\n'), ((3917, 3956), 'os.makedirs', 'os.makedirs', (['target_path'], {'exist_ok': '(True)'}), '(target_path, exist_ok=True)\n', (3928, 3956), False, 'import os\n'), ((3981, 4037), 'shutil.copyfile', 'shutil.copyfile', (['(path / filename)', '(target_path / filename)'], {}), '(path / filename, target_path / filename)\n', (3996, 4037), False, 'import shutil\n'), ((6819, 6855), 'os.path.join', 'os.path.join', (['self.conda_env_path', 'd'], {}), '(self.conda_env_path, d)\n', (6831, 6855), False, 'import os\n'), ((6909, 6945), 'os.path.join', 'os.path.join', (['self.conda_env_path', 'd'], {}), '(self.conda_env_path, d)\n', (6921, 6945), False, 'import os\n'), ((3771, 3798), 'pathlib.Path', 'Path', (['self._incomplete_path'], {}), '(self._incomplete_path)\n', (3775, 3798), False, 'from pathlib import Path\n')]
|
"""
Author: <NAME>
Created: 23/11/2020 11:06 AM
"""
import ksl_env
# add basgra nz functions
ksl_env.add_basgra_nz_path()
from supporting_functions.plotting import plot_multiple_results
from check_basgra_python.support_for_tests import establish_org_input, get_lincoln_broadfield, get_woodward_weather, _clean_harvest
from input_output_keys import matrix_weather_keys_pet
from basgra_python import run_basgra_nz
def run_nonirr_lincoln_low_basil(IBASAL):
params, matrix_weather, days_harvest, doy_irr = establish_org_input('lincoln')
matrix_weather = get_lincoln_broadfield()
matrix_weather.loc[:, 'max_irr'] = 10
matrix_weather.loc[:, 'irr_trig'] = 0
matrix_weather.loc[:, 'irr_targ'] = 1
matrix_weather = matrix_weather.loc[:, matrix_weather_keys_pet]
params['IRRIGF'] = 0 # no irrigation
params['BASALI'] = IBASAL # start at 20% basal
days_harvest = _clean_harvest(days_harvest,matrix_weather)
out = run_basgra_nz(params, matrix_weather, days_harvest, doy_irr, verbose=False)
out.loc[:,'per_fc'] = out.loc[:,'WAL']/out.loc[:,'WAFC']
out.loc[:,'per_paw'] = out.loc[:,'PAW']/out.loc[:,'MXPAW']
return out
if __name__ == '__main__':
ibasals = [0,0.1,0.15,.2,0.3]
data = {
'IBASAL:{}'.format(e): run_nonirr_lincoln_low_basil(e) for e in ibasals
}
plot_multiple_results(data, out_vars=['BASAL', 'DM', 'YIELD','per_paw'])
|
[
"check_basgra_python.support_for_tests.get_lincoln_broadfield",
"basgra_python.run_basgra_nz",
"check_basgra_python.support_for_tests._clean_harvest",
"check_basgra_python.support_for_tests.establish_org_input",
"ksl_env.add_basgra_nz_path",
"supporting_functions.plotting.plot_multiple_results"
] |
[((97, 125), 'ksl_env.add_basgra_nz_path', 'ksl_env.add_basgra_nz_path', ([], {}), '()\n', (123, 125), False, 'import ksl_env\n'), ((511, 541), 'check_basgra_python.support_for_tests.establish_org_input', 'establish_org_input', (['"""lincoln"""'], {}), "('lincoln')\n", (530, 541), False, 'from check_basgra_python.support_for_tests import establish_org_input, get_lincoln_broadfield, get_woodward_weather, _clean_harvest\n'), ((564, 588), 'check_basgra_python.support_for_tests.get_lincoln_broadfield', 'get_lincoln_broadfield', ([], {}), '()\n', (586, 588), False, 'from check_basgra_python.support_for_tests import establish_org_input, get_lincoln_broadfield, get_woodward_weather, _clean_harvest\n'), ((899, 943), 'check_basgra_python.support_for_tests._clean_harvest', '_clean_harvest', (['days_harvest', 'matrix_weather'], {}), '(days_harvest, matrix_weather)\n', (913, 943), False, 'from check_basgra_python.support_for_tests import establish_org_input, get_lincoln_broadfield, get_woodward_weather, _clean_harvest\n'), ((954, 1029), 'basgra_python.run_basgra_nz', 'run_basgra_nz', (['params', 'matrix_weather', 'days_harvest', 'doy_irr'], {'verbose': '(False)'}), '(params, matrix_weather, days_harvest, doy_irr, verbose=False)\n', (967, 1029), False, 'from basgra_python import run_basgra_nz\n'), ((1340, 1413), 'supporting_functions.plotting.plot_multiple_results', 'plot_multiple_results', (['data'], {'out_vars': "['BASAL', 'DM', 'YIELD', 'per_paw']"}), "(data, out_vars=['BASAL', 'DM', 'YIELD', 'per_paw'])\n", (1361, 1413), False, 'from supporting_functions.plotting import plot_multiple_results\n')]
|
import telnetlib
import time
def send_command_telnetlib(ipaddress, username, password, enable_pass, command):
t = telnetlib.Telnet("192.168.100.1")
t.read_until(b"Username:")
t.write(username.encode("ascii") + b"\n")
t.read_until(b"Password:")
t.write(password.encode("ascii") + b"\n")
t.write(b"enable\n")
t.read_until(b"Password:")
t.write(enable_pass.encode("ascii") + b"\n")
t.read_until(b"#")
t.write(b"terminal length 0\n")
t.write(command + b"\n")
time.sleep(1)
result = t.read_until(b"#").decode("utf-8")
return result
|
[
"time.sleep",
"telnetlib.Telnet"
] |
[((120, 153), 'telnetlib.Telnet', 'telnetlib.Telnet', (['"""192.168.100.1"""'], {}), "('192.168.100.1')\n", (136, 153), False, 'import telnetlib\n'), ((510, 523), 'time.sleep', 'time.sleep', (['(1)'], {}), '(1)\n', (520, 523), False, 'import time\n')]
|
from social_tornado.models import TornadoStorage
from skyportal.models import DBSession, ACL, Role, User, Token, Group
from skyportal.enum_types import LISTENER_CLASSES, sqla_enum_types
from baselayer.app.env import load_env
all_acl_ids = [
'Become user',
'Comment',
'Annotate',
'Manage users',
'Manage sources',
'Manage groups',
'Manage shifts',
'Manage allocations',
'Manage observing runs',
'Upload data',
'System admin',
'Post taxonomy',
'Delete taxonomy',
'Classify',
] + [c.get_acl_id() for c in LISTENER_CLASSES]
role_acls = {
'Super admin': all_acl_ids,
'Group admin': [
'Annotate',
'Comment',
'Manage shifts',
'Manage sources',
'Upload data',
'Post taxonomy',
'Manage users',
'Classify',
'Manage observing runs',
],
'Full user': [
'Annotate',
'Comment',
'Upload data',
'Classify',
'Manage observing runs',
],
'View only': [],
}
env, cfg = load_env()
def add_user(username, roles=[], auth=False, first_name=None, last_name=None):
user = User.query.filter(User.username == username).first()
if user is None:
user = User(username=username, first_name=first_name, last_name=last_name)
if auth:
TornadoStorage.user.create_social_auth(user, user.username, 'google-oauth2')
for rolename in roles:
role = Role.query.get(rolename)
if role not in user.roles:
user.roles.append(role)
DBSession().add(user)
DBSession().flush()
# Add user to sitewide public group
public_group = Group.query.filter(
Group.name == cfg["misc"]["public_group_name"]
).first()
if public_group is None:
public_group = Group(name=cfg["misc"]["public_group_name"])
DBSession().add(public_group)
DBSession().flush()
user.groups.append(public_group)
DBSession().commit()
return User.query.filter(User.username == username).first()
def refresh_enums():
for type in sqla_enum_types:
for key in type.enums:
DBSession().execute(
f"ALTER TYPE {type.name} ADD VALUE IF NOT EXISTS '{key}'"
)
DBSession().commit()
def make_super_user(username):
"""Initializes a super user with full permissions."""
setup_permissions() # make sure permissions already exist
add_user(username, roles=['Super admin'], auth=True)
def provision_token():
"""Provision an initial administrative token."""
admin = add_user(
'provisioned_admin',
roles=['Super admin'],
first_name="provisioned",
last_name="admin",
)
token_name = 'Initial <PASSWORD> token'
token = (
Token.query.filter(Token.created_by == admin).filter(Token.name == token_name)
).first()
if token is None:
token_id = create_token(all_acl_ids, user_id=admin.id, name=token_name)
token = Token.query.get(token_id)
return token
def provision_public_group():
"""If public group name is set in the config file, create it."""
env, cfg = load_env()
public_group_name = cfg['misc.public_group_name']
if public_group_name:
pg = Group.query.filter(Group.name == public_group_name).first()
if pg is None:
DBSession().add(Group(name=public_group_name))
DBSession().commit()
def setup_permissions():
"""Create default ACLs/Roles needed by application.
If a given ACL or Role already exists, it will be skipped."""
all_acls = [ACL.create_or_get(a) for a in all_acl_ids]
DBSession().add_all(all_acls)
DBSession().commit()
for r, acl_ids in role_acls.items():
role = Role.create_or_get(r)
role.acls = [ACL.query.get(a) for a in acl_ids]
DBSession().add(role)
DBSession().commit()
def create_token(ACLs, user_id, name):
t = Token(permissions=ACLs, name=name)
u = User.query.get(user_id)
u.tokens.append(t)
t.created_by = u
DBSession().add(u)
DBSession().add(t)
DBSession().commit()
return t.id
def delete_token(token_id):
t = Token.query.get(token_id)
if DBSession().query(Token).filter(Token.id == token_id).first():
DBSession().delete(t)
DBSession().commit()
|
[
"skyportal.models.User",
"skyportal.models.Token",
"skyportal.models.ACL.query.get",
"skyportal.models.Token.query.filter",
"skyportal.models.Group",
"skyportal.models.ACL.create_or_get",
"baselayer.app.env.load_env",
"social_tornado.models.TornadoStorage.user.create_social_auth",
"skyportal.models.User.query.get",
"skyportal.models.Group.query.filter",
"skyportal.models.User.query.filter",
"skyportal.models.Role.create_or_get",
"skyportal.models.DBSession",
"skyportal.models.Token.query.get",
"skyportal.models.Role.query.get"
] |
[((1044, 1054), 'baselayer.app.env.load_env', 'load_env', ([], {}), '()\n', (1052, 1054), False, 'from baselayer.app.env import load_env\n'), ((3150, 3160), 'baselayer.app.env.load_env', 'load_env', ([], {}), '()\n', (3158, 3160), False, 'from baselayer.app.env import load_env\n'), ((3937, 3971), 'skyportal.models.Token', 'Token', ([], {'permissions': 'ACLs', 'name': 'name'}), '(permissions=ACLs, name=name)\n', (3942, 3971), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3980, 4003), 'skyportal.models.User.query.get', 'User.query.get', (['user_id'], {}), '(user_id)\n', (3994, 4003), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((4173, 4198), 'skyportal.models.Token.query.get', 'Token.query.get', (['token_id'], {}), '(token_id)\n', (4188, 4198), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((1236, 1303), 'skyportal.models.User', 'User', ([], {'username': 'username', 'first_name': 'first_name', 'last_name': 'last_name'}), '(username=username, first_name=first_name, last_name=last_name)\n', (1240, 1303), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((1453, 1477), 'skyportal.models.Role.query.get', 'Role.query.get', (['rolename'], {}), '(rolename)\n', (1467, 1477), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((1801, 1845), 'skyportal.models.Group', 'Group', ([], {'name': "cfg['misc']['public_group_name']"}), "(name=cfg['misc']['public_group_name'])\n", (1806, 1845), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((2990, 3015), 'skyportal.models.Token.query.get', 'Token.query.get', (['token_id'], {}), '(token_id)\n', (3005, 3015), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3596, 3616), 'skyportal.models.ACL.create_or_get', 'ACL.create_or_get', (['a'], {}), '(a)\n', (3613, 3616), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3755, 3776), 'skyportal.models.Role.create_or_get', 'Role.create_or_get', (['r'], {}), '(r)\n', (3773, 3776), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((1147, 1191), 'skyportal.models.User.query.filter', 'User.query.filter', (['(User.username == username)'], {}), '(User.username == username)\n', (1164, 1191), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((1333, 1409), 'social_tornado.models.TornadoStorage.user.create_social_auth', 'TornadoStorage.user.create_social_auth', (['user', 'user.username', '"""google-oauth2"""'], {}), "(user, user.username, 'google-oauth2')\n", (1371, 1409), False, 'from social_tornado.models import TornadoStorage\n'), ((1554, 1565), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (1563, 1565), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((1580, 1591), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (1589, 1591), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((1660, 1726), 'skyportal.models.Group.query.filter', 'Group.query.filter', (["(Group.name == cfg['misc']['public_group_name'])"], {}), "(Group.name == cfg['misc']['public_group_name'])\n", (1678, 1726), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((1954, 1965), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (1963, 1965), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((1987, 2031), 'skyportal.models.User.query.filter', 'User.query.filter', (['(User.username == username)'], {}), '(User.username == username)\n', (2004, 2031), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((2252, 2263), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (2261, 2263), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3643, 3654), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (3652, 3654), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3677, 3688), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (3686, 3688), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3798, 3814), 'skyportal.models.ACL.query.get', 'ACL.query.get', (['a'], {}), '(a)\n', (3811, 3814), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3867, 3878), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (3876, 3878), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((4052, 4063), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (4061, 4063), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((4075, 4086), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (4084, 4086), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((4098, 4109), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (4107, 4109), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((1854, 1865), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (1863, 1865), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((1892, 1903), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (1901, 1903), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3254, 3305), 'skyportal.models.Group.query.filter', 'Group.query.filter', (['(Group.name == public_group_name)'], {}), '(Group.name == public_group_name)\n', (3272, 3305), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3366, 3395), 'skyportal.models.Group', 'Group', ([], {'name': 'public_group_name'}), '(name=public_group_name)\n', (3371, 3395), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3841, 3852), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (3850, 3852), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((4277, 4288), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (4286, 4288), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((4307, 4318), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (4316, 4318), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((2139, 2150), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (2148, 2150), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((2778, 2823), 'skyportal.models.Token.query.filter', 'Token.query.filter', (['(Token.created_by == admin)'], {}), '(Token.created_by == admin)\n', (2796, 2823), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3350, 3361), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (3359, 3361), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((3409, 3420), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (3418, 3420), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n'), ((4206, 4217), 'skyportal.models.DBSession', 'DBSession', ([], {}), '()\n', (4215, 4217), False, 'from skyportal.models import DBSession, ACL, Role, User, Token, Group\n')]
|
"""
Use this script to evaluate your model. It stores metrics in the file
`scores.txt`.
Input:
predictions (str): filepath. Should be a file that matches the submission
format;
groundtruths (str): filepath. Should be an annotation file.
Usage:
evaluate_classification.py <groundtruths> <predictions> <output_dir>
"""
import numpy as np
import pandas as pd
import os
import sys
OUTPUT_FILE = 'scores.txt'
def evaluate_from_files(groundtruths_filepath, predictions_filepath, output_dir):
output_dir = output_dir
data = pd.read_csv(groundtruths_filepath)
sub_data = pd.read_csv(predictions_filepath)
ground_truth = data.to_numpy()
submission = sub_data.to_numpy()
indexed_gt = {}
for idx in range(len(ground_truth)):
indexed_gt[ground_truth[idx][0]] = ground_truth[idx]
indexed_sbm = {}
for idx in range(len(submission)):
indexed_sbm[submission[idx][0]] = submission[idx]
tp = 0.0
fp = 0.0
for im_idx in indexed_gt:
if im_idx not in indexed_sbm:
continue
if indexed_gt[im_idx][1] == indexed_sbm[im_idx][1]:
tp += 1.
else:
fp += 1.
acc = tp / (tp+fp)
print('accuracy', acc)
metrics = [("Top1 accuracy", acc)]
with open(os.path.join(output_dir, OUTPUT_FILE), 'w') as f:
for name, val in metrics:
f.write(f"{name}: {val:.8f}\n")
print("Metrics written to scores.txt.")
if __name__ == '__main__':
args = sys.argv[1:]
evaluate_from_files(args[0], args[1], args[2])
|
[
"os.path.join",
"pandas.read_csv"
] |
[((549, 583), 'pandas.read_csv', 'pd.read_csv', (['groundtruths_filepath'], {}), '(groundtruths_filepath)\n', (560, 583), True, 'import pandas as pd\n'), ((599, 632), 'pandas.read_csv', 'pd.read_csv', (['predictions_filepath'], {}), '(predictions_filepath)\n', (610, 632), True, 'import pandas as pd\n'), ((1285, 1322), 'os.path.join', 'os.path.join', (['output_dir', 'OUTPUT_FILE'], {}), '(output_dir, OUTPUT_FILE)\n', (1297, 1322), False, 'import os\n')]
|
import expressions
import abc
import copy
class Instruction(abc.ABC):
@abc.abstractmethod
def __init__(): ...
@abc.abstractmethod
def wykonaj(self, zmienne) -> dict[str, int]:
'''Evaluate the instruction'''
...
@abc.abstractmethod
def __str__(self): ...
class If(Instruction):
def __init__(self, cond: expressions.Wyrazenie, branch_true: Instruction, branch_false: Instruction):
self._cond = cond
self._branch_true = branch_true
self._branch_false = branch_false
def wykonaj(self, zmienne):
if self._cond.oblicz(zmienne) == 0:
lokalne_zmienne = self._branch_true.wykonaj(copy.copy(zmienne))
else:
lokalne_zmienne = self._branch_false.wykonaj(copy.copy(zmienne))
for key in lokalne_zmienne:
if key in zmienne:
zmienne[key] = lokalne_zmienne[key]
return zmienne
def __str__(self):
tab, nl = '\n\t\t', '\n'
return f'if {str(self._cond)}\n\n\tthen\t{tab.join(str(self._branch_true).split(nl))}\n\n\telse\t{tab.join(str(self._branch_false).split(nl))}\n'
class While(Instruction):
def __init__(self, cond: expressions.Wyrazenie, branch: Instruction):
self._cond = cond
self._branch = branch
def wykonaj(self, zmienne):
while self._cond.oblicz(zmienne):
lokalne_zmienne = self._branch.wykonaj(copy.copy(zmienne))
for key in lokalne_zmienne:
if key in zmienne:
zmienne[key] = lokalne_zmienne[key]
return zmienne
def __str__(self):
tab, nl = '\n\t\t', '\n'
return f'while {str(self._cond)}\n\n\tdo\t{tab.join(str(self._branch).split(nl))}\n'
class Chain(Instruction):
def __init__(self, instructions: list[Instruction]):
self._chain = instructions
def wykonaj(self, zmienne):
for inst in self._chain:
zmienne = inst.wykonaj(zmienne)
return zmienne
def __str__(self):
return '\n'.join([str(inst) for inst in self._chain])
class Assign(Instruction):
def __init__(self, var: expressions.Zmienna, val: expressions.Wyrazenie):
self._var = var
self._val = val
def wykonaj(self, zmienne):
zmienne[str(self._var)] = self._val.oblicz(zmienne)
return zmienne
def __str__(self):
return f'{self._var} = {self._val}'
|
[
"copy.copy"
] |
[((670, 688), 'copy.copy', 'copy.copy', (['zmienne'], {}), '(zmienne)\n', (679, 688), False, 'import copy\n'), ((761, 779), 'copy.copy', 'copy.copy', (['zmienne'], {}), '(zmienne)\n', (770, 779), False, 'import copy\n'), ((1420, 1438), 'copy.copy', 'copy.copy', (['zmienne'], {}), '(zmienne)\n', (1429, 1438), False, 'import copy\n')]
|
"""
TODO
"""
from collections import Counter
import simplejson
import yaml
import flask
from sheepdog.errors import (
UserError,
)
def oph_raise_for_duplicates(object_pairs):
"""
Given an list of ordered pairs, contstruct a dict as with the normal JSON
``object_pairs_hook``, but raise an exception if there are duplicate keys
with a message describing all violations.
"""
counter = Counter(p[0] for p in object_pairs)
duplicates = [p for p in counter.iteritems() if p[1] > 1]
if duplicates:
raise ValueError(
'The document contains duplicate keys: {}'
.format(','.join(d[0] for d in duplicates))
)
return {pair[0]: pair[1] for pair in object_pairs}
def parse_json(raw):
"""
Return a python representation of a JSON document.
Args:
raw (str): string of raw JSON content
Raises:
UserError: if any exception is raised parsing the JSON body
.. note:: Uses :func:`oph_raise_for_duplicates` in parser.
"""
try:
return simplejson.loads(
raw, object_pairs_hook=oph_raise_for_duplicates
)
except Exception as e:
raise UserError('Unable to parse json: {}'.format(e))
def parse_request_json(expected_types=(dict, list)):
"""
Return a python representation of a JSON POST body.
Args:
raw (str): string of raw JSON content
Return:
TODO
Raises:
UserError: if any exception is raised parsing the JSON body
UserError: if the result is not of the expected type
If raw is not provided, pull the body from global request object.
"""
parsed = parse_json(flask.request.get_data())
if not isinstance(parsed, expected_types):
raise UserError('JSON parsed from request is an invalid type: {}'
.format(parsed.__class__.__name__))
return parsed
def parse_request_yaml():
"""
Return a python representation of a YAML POST body. Raise UserError if any
exception is raised parsing the YAML body.
"""
try:
return yaml.safe_load(flask.request.get_data())
except Exception as e:
raise UserError('Unable to parse yaml: {}'.format(e))
|
[
"collections.Counter",
"simplejson.loads",
"flask.request.get_data"
] |
[((416, 451), 'collections.Counter', 'Counter', (['(p[0] for p in object_pairs)'], {}), '(p[0] for p in object_pairs)\n', (423, 451), False, 'from collections import Counter\n'), ((1055, 1120), 'simplejson.loads', 'simplejson.loads', (['raw'], {'object_pairs_hook': 'oph_raise_for_duplicates'}), '(raw, object_pairs_hook=oph_raise_for_duplicates)\n', (1071, 1120), False, 'import simplejson\n'), ((1679, 1703), 'flask.request.get_data', 'flask.request.get_data', ([], {}), '()\n', (1701, 1703), False, 'import flask\n'), ((2113, 2137), 'flask.request.get_data', 'flask.request.get_data', ([], {}), '()\n', (2135, 2137), False, 'import flask\n')]
|
# -*- coding: utf-8 -*-
#
# This file is part of Sequana software
#
# Copyright (c) 2016 - Sequana Development Team
#
# File author(s):
# <NAME> <<EMAIL>>
# <NAME> <<EMAIL>>,
# <<EMAIL>>
#
# Distributed under the terms of the 3-clause BSD license.
# The full license is in the LICENSE file, distributed with this software.
#
# website: https://github.com/sequana/sequana
# documentation: http://sequana.readthedocs.io
#
##############################################################################
"""Retrieve data from sequana library"""
import os
import easydev
import glob
import collections
def sequana_data(filename=None, where=None):
"""Return full path of a sequana resource data file.
:param str filename: a valid filename to be found
:param str where: one of the registered data directory (see below)
:return: the path of file. See also here below in the case where
filename is set to "*".
.. code-block:: python
from sequana import sequana_data
filename = sequana_data("test.bam")
Type the function name with "*" parameter to get a list of
available files. Withe where argument set, the function returns a
list of files. Without the where argument, a dictionary is returned where
keys correspond to the registered directories::
filenames = sequana_data("*", where="images")
Registered directories are:
- data
- testing
- data/adapters
- images
.. note:: this does not handle wildcards. The * means retrieve all files.
"""
sequana_path = easydev.get_package_location('sequana')
sharedir = os.sep.join([sequana_path , "sequana", 'resources'])
directories = ['data', 'testing', 'data/adapters', 'images', 'scripts']
if filename == "*":
found = collections.defaultdict(list)
if where is not None:
directories = [where]
for thisdir in directories:
for filename in glob.glob(sharedir + "/%s/*" % thisdir):
filename = os.path.split(filename)[1]
to_ignore = ["__init__.py", "__pycache__"]
if filename.endswith('.pyc') or filename in to_ignore:
pass
else:
found[thisdir].append(os.path.split(filename)[1])
if where is not None:
return found[where]
return found
if filename is None:
for thisdir in directories:
print('From %s directory:' % thisdir)
for filename in glob.glob(sharedir + "/%s/*" % thisdir):
filename = os.path.split(filename)[1]
to_ignore = ["__init__.py", "__pycache__"]
if filename.endswith('.pyc') or filename in to_ignore:
pass
else:
print(' - sequana("%s", "%s")' % (os.path.split(filename)[1], thisdir))
raise ValueError("Choose a valid file from the list above")
# in the code one may use / or \
if where:
filename = os.sep.join([sharedir, where, filename])
else:
def _get_valid_file(filename, directory):
filename = os.sep.join([sharedir, directory, filename])
if os.path.exists(filename) is False:
return False
else:
return filename
# try to introspect the different directories
# return filename if found otherwise raise error
for thisdir in directories:
if _get_valid_file(filename, thisdir):
return _get_valid_file(filename, thisdir)
raise Exception("unknown file %s. Type sequana_data() to get a list of valid names" % filename)
return filename
|
[
"os.path.exists",
"easydev.get_package_location",
"os.path.split",
"os.sep.join",
"collections.defaultdict",
"glob.glob"
] |
[((1603, 1642), 'easydev.get_package_location', 'easydev.get_package_location', (['"""sequana"""'], {}), "('sequana')\n", (1631, 1642), False, 'import easydev\n'), ((1658, 1709), 'os.sep.join', 'os.sep.join', (["[sequana_path, 'sequana', 'resources']"], {}), "([sequana_path, 'sequana', 'resources'])\n", (1669, 1709), False, 'import os\n'), ((1828, 1857), 'collections.defaultdict', 'collections.defaultdict', (['list'], {}), '(list)\n', (1851, 1857), False, 'import collections\n'), ((3056, 3096), 'os.sep.join', 'os.sep.join', (['[sharedir, where, filename]'], {}), '([sharedir, where, filename])\n', (3067, 3096), False, 'import os\n'), ((1986, 2025), 'glob.glob', 'glob.glob', (["(sharedir + '/%s/*' % thisdir)"], {}), "(sharedir + '/%s/*' % thisdir)\n", (1995, 2025), False, 'import glob\n'), ((2552, 2591), 'glob.glob', 'glob.glob', (["(sharedir + '/%s/*' % thisdir)"], {}), "(sharedir + '/%s/*' % thisdir)\n", (2561, 2591), False, 'import glob\n'), ((3180, 3224), 'os.sep.join', 'os.sep.join', (['[sharedir, directory, filename]'], {}), '([sharedir, directory, filename])\n', (3191, 3224), False, 'import os\n'), ((3240, 3264), 'os.path.exists', 'os.path.exists', (['filename'], {}), '(filename)\n', (3254, 3264), False, 'import os\n'), ((2054, 2077), 'os.path.split', 'os.path.split', (['filename'], {}), '(filename)\n', (2067, 2077), False, 'import os\n'), ((2620, 2643), 'os.path.split', 'os.path.split', (['filename'], {}), '(filename)\n', (2633, 2643), False, 'import os\n'), ((2300, 2323), 'os.path.split', 'os.path.split', (['filename'], {}), '(filename)\n', (2313, 2323), False, 'import os\n'), ((2878, 2901), 'os.path.split', 'os.path.split', (['filename'], {}), '(filename)\n', (2891, 2901), False, 'import os\n')]
|
#!/usr/bin/python3
import cv2
if __name__ == '__main__':
cv2.SIFT_create()
|
[
"cv2.SIFT_create"
] |
[((61, 78), 'cv2.SIFT_create', 'cv2.SIFT_create', ([], {}), '()\n', (76, 78), False, 'import cv2\n')]
|
import io
import pytest
import pytorch_pfn_extras as ppe
from pytorch_pfn_extras.training.extensions import _ipython_module_available
from pytorch_pfn_extras.training.extensions.log_report import _pandas_available
@pytest.mark.skipif(
not _ipython_module_available or not _pandas_available,
reason="print report notebook import failed, "
"maybe ipython is not installed"
)
def test_run_print_report_notebook():
max_epochs = 5
iters_per_epoch = 5
manager = ppe.training.ExtensionsManager(
{}, {}, max_epochs, iters_per_epoch=iters_per_epoch)
out = io.StringIO()
log_report = ppe.training.extensions.LogReport()
manager.extend(log_report)
extension = ppe.training.extensions.PrintReportNotebook(out=out)
manager.extend(extension)
for _ in range(max_epochs):
for _ in range(iters_per_epoch):
with manager.run_iteration():
# Only test it runs without fail
# The value is not tested now...
pass
if __name__ == '__main__':
pytest.main([__file__, '-v', '-s'])
|
[
"pytorch_pfn_extras.training.extensions.LogReport",
"pytest.main",
"pytorch_pfn_extras.training.ExtensionsManager",
"pytest.mark.skipif",
"io.StringIO",
"pytorch_pfn_extras.training.extensions.PrintReportNotebook"
] |
[((219, 380), 'pytest.mark.skipif', 'pytest.mark.skipif', (['(not _ipython_module_available or not _pandas_available)'], {'reason': '"""print report notebook import failed, maybe ipython is not installed"""'}), "(not _ipython_module_available or not _pandas_available,\n reason=\n 'print report notebook import failed, maybe ipython is not installed')\n", (237, 380), False, 'import pytest\n'), ((491, 579), 'pytorch_pfn_extras.training.ExtensionsManager', 'ppe.training.ExtensionsManager', (['{}', '{}', 'max_epochs'], {'iters_per_epoch': 'iters_per_epoch'}), '({}, {}, max_epochs, iters_per_epoch=\n iters_per_epoch)\n', (521, 579), True, 'import pytorch_pfn_extras as ppe\n'), ((595, 608), 'io.StringIO', 'io.StringIO', ([], {}), '()\n', (606, 608), False, 'import io\n'), ((626, 661), 'pytorch_pfn_extras.training.extensions.LogReport', 'ppe.training.extensions.LogReport', ([], {}), '()\n', (659, 661), True, 'import pytorch_pfn_extras as ppe\n'), ((709, 761), 'pytorch_pfn_extras.training.extensions.PrintReportNotebook', 'ppe.training.extensions.PrintReportNotebook', ([], {'out': 'out'}), '(out=out)\n', (752, 761), True, 'import pytorch_pfn_extras as ppe\n'), ((1060, 1095), 'pytest.main', 'pytest.main', (["[__file__, '-v', '-s']"], {}), "([__file__, '-v', '-s'])\n", (1071, 1095), False, 'import pytest\n')]
|
#!/usr/bin/env python3
# Copyright 2017 The Imaging Source Europe GmbH
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This example will show you how to list information about the available devices
#
import sys
import gi
gi.require_version("Tcam", "0.1")
gi.require_version("Gst", "1.0")
from gi.repository import Tcam, Gst
def list_devices():
"""
Print information about all available devices
"""
source = Gst.ElementFactory.make("tcambin")
serials = source.get_device_serials()
for single_serial in serials:
# This returns someting like:
# (True,
# name='DFK Z12GP031',
# identifier='The Imaging Source Europe GmbH-11410533',
# connection_type='aravis')
# The identifier is the name given by the backend
# The connection_type identifies the backend that is used.
# Currently 'aravis', 'v4l2', 'libusb' and 'unknown' exist
(return_value, model,
identifier, connection_type) = source.get_device_info(single_serial)
# return value would be False when a non-existant serial is used
# since we are iterating get_device_serials this should not happen
if return_value:
print("Model: {} Serial: {} Type: {}".format(model,
single_serial,
connection_type))
if __name__ == "__main__":
Gst.init(sys.argv) # init gstreamer
list_devices()
|
[
"gi.repository.Gst.ElementFactory.make",
"gi.repository.Gst.init",
"gi.require_version"
] |
[((723, 756), 'gi.require_version', 'gi.require_version', (['"""Tcam"""', '"""0.1"""'], {}), "('Tcam', '0.1')\n", (741, 756), False, 'import gi\n'), ((757, 789), 'gi.require_version', 'gi.require_version', (['"""Gst"""', '"""1.0"""'], {}), "('Gst', '1.0')\n", (775, 789), False, 'import gi\n'), ((929, 963), 'gi.repository.Gst.ElementFactory.make', 'Gst.ElementFactory.make', (['"""tcambin"""'], {}), "('tcambin')\n", (952, 963), False, 'from gi.repository import Tcam, Gst\n'), ((1956, 1974), 'gi.repository.Gst.init', 'Gst.init', (['sys.argv'], {}), '(sys.argv)\n', (1964, 1974), False, 'from gi.repository import Tcam, Gst\n')]
|
from .linear_torch import TorchGradientDescentAutogradRegression
import torch, math, random
class stochasticGradientDescent(TorchGradientDescentAutogradRegression):
def __init__(self, X, Y, alpha, **kwargs):
super(stochasticGradientDescent, self).__init__(X, Y, alpha, **kwargs)
try:
h = kwargs['batch_size']
self.iterations = int(self.Y.shape[0])/h
self.batch_size = int(self.Y.shape[0])/self.iterations
except:
self.iterations = int(self.Y.shape[0])
self.batch_size = 1
try:
self.epochs_no = kwargs['epochs_no']
except:
self.epochs_no = 1
self.batches = None
def assign_batchs(self):
r = range(int(self.Y.shape[0]))
random.shuffle(r, random.random)
batches = list()
for i in xrange(self.iterations):
batches.append(r[i:i+self.batch_size])
self.batches = batches
return batches
def ForwardFunction(self, i):
X = self.X[self.batches[i]]
Y = self.Y[self.batches[i]]
p = torch.mean((Y-X.mm(self.theta.double()))**2) #Loss function forward function
self.objective = p
return p
def get_grads(self, i):
self.initialise_theta()
k = self.ForwardFunction(i)
self.objective.backward()
self.gradients = self.theta.grad
return self.gradients
def epoch(self):
for i in xrange(self.iterations):
self.update_theta(i)
return self.theta
def update_theta(self, i):
h = self.get_grads(i)
current_theta = self.theta.clone() #cloing theta so that we don't update in-place values
current_theta -= self.gradients*self.alpha
self.theta = current_theta
return current_theta
def train(self):
self.initialise_theta()
error = 0.0001
for i in xrange(self.epochs_no):
self.assign_batchs()
print('')
theta = self.epoch().double()
print('Epoch - '+ str(i+1))
print('')
return theta
print(self.MSE(theta))
if self.MSE(theta) <= error:
break
print('### Training complete')
|
[
"random.shuffle"
] |
[((777, 809), 'random.shuffle', 'random.shuffle', (['r', 'random.random'], {}), '(r, random.random)\n', (791, 809), False, 'import torch, math, random\n')]
|
import pytest
import ast
from pytest_mock import MockerFixture
from pystratis.api.node import Node
from pystratis.api.node.responsemodels import *
from pystratis.api import FullNodeState, FeatureInitializationState, LogRule
from pystratis.core.networks import StraxMain, CirrusMain
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_status_no_publish(mocker: MockerFixture, network):
data = {
'agent': 'nodeagent',
'version': 'nodeversion',
'externalAddress': '[::0.0.0.0]',
'network': network.name,
'coin_ticker': 'STRAX' if 'Strax' in network.name else 'CRS',
'processId': '0',
'consensusHeight': 10,
'blockStoreHeight': 10,
'bestPeerHeight': 10,
'inboundPeers': [
{
'version': 1,
'remoteSocketEndpoint': '[::0.0.0.0]',
'tipHeight': 10
}
],
'outboundPeers': [
{
'version': 1,
'remoteSocketEndpoint': '[::0.0.0.0]',
'tipHeight': 10
}
],
'featuresData': [
{
'namespace': 'node.feature',
'state': FeatureInitializationState.Initialized
}
],
'dataDirectoryPath': '/my/data/dir',
'runningTime': 'a long time',
'difficulty': 100000.0000,
'protocolVersion': 123,
'testnet': False,
'relayFee': 0,
'state': FullNodeState.Initialized,
'inIbd': False,
'headerHeight': 1
}
mocker.patch.object(Node, 'get', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.status(publish=False)
assert response == StatusModel(**data)
# noinspection PyUnresolvedReferences
node.get.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_status_publish(mocker: MockerFixture, network):
data = {
'agent': 'nodeagent',
'version': 'nodeversion',
'externalAddress': '[::0.0.0.0]',
'network': network.name,
'coin_ticker': 'STRAX' if 'Strax' in network.name else 'CRS',
'processId': '0',
'consensusHeight': 10,
'blockStoreHeight': 10,
'bestPeerHeight': 10,
'inboundPeers': [
{
'version': 1,
'remoteSocketEndpoint': '[::0.0.0.0]',
'tipHeight': 10
}
],
'outboundPeers': [
{
'version': 1,
'remoteSocketEndpoint': '[::0.0.0.0]',
'tipHeight': 10
}
],
'featuresData': [
{
'namespace': 'node.feature',
'state': FeatureInitializationState.Initialized
}
],
'dataDirectoryPath': '/my/data/dir',
'runningTime': 'a long time',
'difficulty': 100000.0000,
'protocolVersion': 123,
'testnet': False,
'relayFee': 0,
'state': FullNodeState.Initialized,
'inIbd': False,
'headerHeight': 1
}
mocker.patch.object(Node, 'get', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.status(publish=True)
assert response == StatusModel(**data)
# noinspection PyUnresolvedReferences
node.get.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_get_blockheader(mocker: MockerFixture, network, generate_uint256):
data = {
'version': 1,
'merkleroot': generate_uint256,
'nonce': 0,
'bits': 'bits',
'previousblockhash': generate_uint256,
'time': 1,
}
mocker.patch.object(Node, 'get', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.get_blockheader(
block_hash=generate_uint256,
is_json_format=True
)
assert response == BlockHeaderModel(**data)
# noinspection PyUnresolvedReferences
node.get.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_get_raw_transaction_verbose(mocker: MockerFixture, network, generate_coinbase_transaction, generate_uint256):
trxid = generate_uint256
data = generate_coinbase_transaction(trxid)
mocker.patch.object(Node, 'get', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.get_raw_transaction(trxid=trxid, verbose=True)
assert response == TransactionModel(**data)
# noinspection PyUnresolvedReferences
node.get.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_get_raw_transaction_nonverbose(mocker: MockerFixture, network, generate_coinbase_transaction, generate_uint256):
trxid = generate_uint256
data = generate_coinbase_transaction(trxid)
hexified_data = bytes(str(data), 'ascii').hex()
mocker.patch.object(Node, 'get', return_value=hexified_data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.get_raw_transaction(trxid=trxid, verbose=False)
assert response == hexified_data
unserialized_response = ast.literal_eval(bytes.fromhex(hexified_data).decode('ascii'))
assert data == unserialized_response
# noinspection PyUnresolvedReferences
node.get.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_decode_raw_transaction(mocker: MockerFixture, network, generate_uint256, generate_coinbase_transaction):
trxid = generate_uint256
data = generate_coinbase_transaction(trxid)
hexified_data = bytes(str(data), 'ascii').hex()
mocker.patch.object(Node, 'post', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.decode_raw_transaction(raw_hex=hexified_data)
assert response == TransactionModel(**data)
# noinspection PyUnresolvedReferences
node.post.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_validate_address(mocker: MockerFixture, network, generate_p2pkh_address):
address = generate_p2pkh_address(network=network)
data = {
'isvalid': True,
'address': address,
'scriptPubKey': 'a scriptPubKey',
'isscript': False,
'iswitness': False
}
mocker.patch.object(Node, 'get', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.validate_address(address=address)
assert response == ValidateAddressModel(**data)
# noinspection PyUnresolvedReferences
node.get.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_get_txout(mocker: MockerFixture, network, generate_uint256, generate_hexstring, generate_p2pkh_address):
data = {
'bestblock': generate_uint256,
'confirmations': 1,
'value': 5,
'scriptPubKey': {
'asm': generate_hexstring(128),
'hex': generate_hexstring(128),
'type': 'pubkey',
'reqSigs': 1,
"addresses": [
generate_p2pkh_address(network=network)
]
},
'coinbase': False
}
mocker.patch.object(Node, 'get', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.get_txout(trxid=generate_uint256, vout=0, include_mempool=False)
assert response == GetTxOutModel(**data)
# noinspection PyUnresolvedReferences
node.get.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_get_txout_proof(mocker: MockerFixture, network, generate_uint256, generate_hexstring):
data = generate_hexstring(128)
mocker.patch.object(Node, 'get', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.get_txout_proof(
txids=[
generate_uint256,
generate_uint256
],
block_hash=generate_uint256
)
assert response == data
# noinspection PyUnresolvedReferences
node.get.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_shutdown(mocker: MockerFixture, network):
data = None
mocker.patch.object(Node, 'post', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
node.shutdown()
# noinspection PyUnresolvedReferences
node.post.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_stop(mocker: MockerFixture, network):
data = None
mocker.patch.object(Node, 'post', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
node.stop()
# noinspection PyUnresolvedReferences
node.post.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_log_levels(mocker: MockerFixture, network):
data = None
mocker.patch.object(Node, 'put', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
node.log_levels(log_rules=[LogRule(rule_name='TestRule', log_level='Debug', filename='filename')])
# noinspection PyUnresolvedReferences
node.put.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_log_rules(mocker: MockerFixture, network):
data = [
{
'ruleName': 'TestRule',
'logLevel': 'Debug',
'filename': 'filename'
}
]
mocker.patch.object(Node, 'get', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.log_rules()
assert response == [LogRule(**x) for x in data]
# noinspection PyUnresolvedReferences
node.get.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_async_loops(mocker: MockerFixture, network):
data = [
{
'loopName': 'Loop1',
'status': 'Running'
}
]
mocker.patch.object(Node, 'get', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.async_loops()
assert response == [AsyncLoopsModel(**x) for x in data]
# noinspection PyUnresolvedReferences
node.get.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_rewind(mocker: MockerFixture, network):
data = "Rewind flag set, please restart the node."
mocker.patch.object(Node, 'put', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
response = node.rewind(height=2)
assert isinstance(response, str)
# noinspection PyUnresolvedReferences
node.put.assert_called_once()
@pytest.mark.parametrize('network', [StraxMain(), CirrusMain()], ids=['StraxMain', 'CirrusMain'])
def test_delete_datafolder_chain(mocker: MockerFixture, network):
data = None
mocker.patch.object(Node, 'delete', return_value=data)
node = Node(network=network, baseuri=mocker.MagicMock())
node.delete_datafolder_chain()
# noinspection PyUnresolvedReferences
node.delete.assert_called_once()
|
[
"pystratis.core.networks.StraxMain",
"pystratis.core.networks.CirrusMain",
"pystratis.api.LogRule"
] |
[((321, 332), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (330, 332), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((334, 346), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (344, 346), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((1940, 1951), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (1949, 1951), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((1953, 1965), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (1963, 1965), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((3555, 3566), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (3564, 3566), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((3568, 3580), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (3578, 3580), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((4272, 4283), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (4281, 4283), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((4285, 4297), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (4295, 4297), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((4878, 4889), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (4887, 4889), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((4891, 4903), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (4901, 4903), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((5670, 5681), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (5679, 5681), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((5683, 5695), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (5693, 5695), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((6324, 6335), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (6333, 6335), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((6337, 6349), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (6347, 6349), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((7030, 7041), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (7039, 7041), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((7043, 7055), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (7053, 7055), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((7979, 7990), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (7988, 7990), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((7992, 8004), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (8002, 8004), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((8597, 8608), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (8606, 8608), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((8610, 8622), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (8620, 8622), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((8981, 8992), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (8990, 8992), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((8994, 9006), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (9004, 9006), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((9357, 9368), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (9366, 9368), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((9370, 9382), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (9380, 9382), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((9824, 9835), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (9833, 9835), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((9837, 9849), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (9847, 9849), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((10399, 10410), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (10408, 10410), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((10412, 10424), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (10422, 10424), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((10946, 10957), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (10955, 10957), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((10959, 10971), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (10969, 10971), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((11419, 11430), 'pystratis.core.networks.StraxMain', 'StraxMain', ([], {}), '()\n', (11428, 11430), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((11432, 11444), 'pystratis.core.networks.CirrusMain', 'CirrusMain', ([], {}), '()\n', (11442, 11444), False, 'from pystratis.core.networks import StraxMain, CirrusMain\n'), ((10256, 10268), 'pystratis.api.LogRule', 'LogRule', ([], {}), '(**x)\n', (10263, 10268), False, 'from pystratis.api import FullNodeState, FeatureInitializationState, LogRule\n'), ((9636, 9705), 'pystratis.api.LogRule', 'LogRule', ([], {'rule_name': '"""TestRule"""', 'log_level': '"""Debug"""', 'filename': '"""filename"""'}), "(rule_name='TestRule', log_level='Debug', filename='filename')\n", (9643, 9705), False, 'from pystratis.api import FullNodeState, FeatureInitializationState, LogRule\n')]
|
import numpy as np
def get_conf_thresholded(conf, thresh_log_conf, dtype_np):
"""Normalizes a confidence score to (0..1).
Args:
conf (float):
Unnormalized confidence.
dtype_np (type):
Desired return type.
Returns:
confidence (np.float32):
Normalized joint confidence.
"""
# 1. / (1. + np.exp(-5000. * conf + 5))
# https://www.desmos.com/calculator/olqbvoffua
# + 9.5: 0.0019 => 0.5
# + 5 : 0.0010 => 0.5
# + 6.5: 0.0013 => 0.5
return np.where(
conf < dtype_np(0.),
dtype_np(0.),
dtype_np(1.) /
(dtype_np(1.) + np.exp(dtype_np(-5000.) * conf + dtype_np(9.5)))
).astype(dtype_np)
def get_confs(query_2d_full, frame_id, thresh_log_conf, mx_conf, dtype_np):
"""
Args:
query_2d_full (stealth.logic.skeleton.Skeleton):
Skeleton with confidences.
frame_id (int):
Frame id.
Returns:
confs (List[float]):
Confidences at frame_id.
"""
confs = np.zeros(query_2d_full.poses.shape[-1],
dtype=dtype_np)
is_normalized = query_2d_full.is_confidence_normalized()
if query_2d_full.has_confidence(frame_id):
for joint, conf in query_2d_full.confidence[frame_id].items():
cnf = dtype_np(conf) \
if is_normalized \
else get_conf_thresholded(conf, thresh_log_conf, dtype_np)
if mx_conf is not None and mx_conf < cnf:
mx_conf = dtype_np(cnf)
confs[joint] = dtype_np(cnf)
if mx_conf is None:
return confs
else:
assert isinstance(mx_conf, dtype_np)
return confs, mx_conf
|
[
"numpy.zeros"
] |
[((1052, 1107), 'numpy.zeros', 'np.zeros', (['query_2d_full.poses.shape[-1]'], {'dtype': 'dtype_np'}), '(query_2d_full.poses.shape[-1], dtype=dtype_np)\n', (1060, 1107), True, 'import numpy as np\n')]
|
"""
Copyright (c) 2017 <NAME>
https://github.com/jeffmer/micropython-ili9341
Jan 6, 2018
MIT License
https://github.com/jeffmer/micropython-ili9341/blob/master/LICENSE
"""
# This is an adapted version of the ILI934X driver as below.
# It works with multiple fonts and also works with the esp32 H/W SPI implementation
# Also includes a word wrap print function
# Proportional fonts are generated by Peter Hinch's Font-to-py
# MIT License; Copyright (c) 2017 <NAME>
# This file is part of MicroPython ILI934X driver
# Copyright (c) 2016 - 2017 <NAME>, <NAME>
#
# Licensed under the MIT license:
# http://www.opensource.org/licenses/mit-license.php
#
# Project home:
# https://github.com/tuupola/micropython-ili934x
import time
import ustruct
import tt32
import framebuf
from micropython import const
_RDDSDR = const(0x0f) # Read Display Self-Diagnostic Result
_SLPOUT = const(0x11) # Sleep Out
_GAMSET = const(0x26) # Gamma Set
_DISPOFF = const(0x28) # Display Off
_DISPON = const(0x29) # Display On
_CASET = const(0x2a) # Column Address Set
_PASET = const(0x2b) # Page Address Set
_RAMWR = const(0x2c) # Memory Write
_RAMRD = const(0x2e) # Memory Read
_MADCTL = const(0x36) # Memory Access Control
_VSCRSADD = const(0x37) # Vertical Scrolling Start Address
_PIXSET = const(0x3a) # Pixel Format Set
_PWCTRLA = const(0xcb) # Power Control A
_PWCRTLB = const(0xcf) # Power Control B
_DTCTRLA = const(0xe8) # Driver Timing Control A
_DTCTRLB = const(0xea) # Driver Timing Control B
_PWRONCTRL = const(0xed) # Power on Sequence Control
_PRCTRL = const(0xf7) # Pump Ratio Control
_PWCTRL1 = const(0xc0) # Power Control 1
_PWCTRL2 = const(0xc1) # Power Control 2
_VMCTRL1 = const(0xc5) # VCOM Control 1
_VMCTRL2 = const(0xc7) # VCOM Control 2
_FRMCTR1 = const(0xb1) # Frame Rate Control 1
_DISCTRL = const(0xb6) # Display Function Control
_ENA3G = const(0xf2) # Enable 3G
_PGAMCTRL = const(0xe0) # Positive Gamma Control
_NGAMCTRL = const(0xe1) # Negative Gamma Control
_CHUNK = const(1024) #maximum number of pixels per spi write
def color565(r, g, b):
return (r & 0xf8) << 8 | (g & 0xfc) << 3 | b >> 3
class ILI9341:
width = 320
height = 240
def __init__(self, spi, cs, dc, rst):
self.spi = spi
self.cs = cs
self.dc = dc
self.rst = rst
self.cs.init(self.cs.OUT, value=1)
self.dc.init(self.dc.OUT, value=0)
self.rst.init(self.rst.OUT, value=0)
self.reset()
self.init()
self._scroll = 0
self._buf = bytearray(_CHUNK * 2)
self._colormap = bytearray(b'\x00\x00\xFF\xFF') #default white foregraound, black background
self._x = 0
self._y = 0
self._font = tt32
self.scrolling = False
def set_color(self,fg,bg):
self._colormap[0] = bg>>8
self._colormap[1] = bg & 255
self._colormap[2] = fg>>8
self._colormap[3] = fg & 255
def set_pos(self,x,y):
self._x = x
self._y = y
def reset_scroll(self):
self.scrolling = False
self._scroll = 0
self.scroll(0)
def set_font(self, font):
self._font = font
def init(self):
for command, data in (
(_RDDSDR, b"\x03\x80\x02"),
(_PWCRTLB, b"\x00\xc1\x30"),
(_PWRONCTRL, b"\x64\x03\x12\x81"),
(_DTCTRLA, b"\x85\x00\x78"),
(_PWCTRLA, b"\x39\x2c\x00\x34\x02"),
(_PRCTRL, b"\x20"),
(_DTCTRLB, b"\x00\x00"),
(_PWCTRL1, b"\x23"),
(_PWCTRL2, b"\x10"),
(_VMCTRL1, b"\x3e\x28"),
(_VMCTRL2, b"\x86"),
#(_MADCTL, b"\x48"),
(_MADCTL, b"\x08"),
(_PIXSET, b"\x55"),
(_FRMCTR1, b"\x00\x18"),
(_DISCTRL, b"\x08\x82\x27"),
(_ENA3G, b"\x00"),
(_GAMSET, b"\x01"),
(_PGAMCTRL, b"\x0f\x31\x2b\x0c\x0e\x08\x4e\xf1\x37\x07\x10\x03\x0e\x09\x00"),
(_NGAMCTRL, b"\x00\x0e\x14\x03\x11\x07\x31\xc1\x48\x08\x0f\x0c\x31\x36\x0f")):
self._write(command, data)
self._write(_SLPOUT)
time.sleep_ms(120)
self._write(_DISPON)
def reset(self):
self.rst(0)
time.sleep_ms(50)
self.rst(1)
time.sleep_ms(50)
def _write(self, command, data=None):
self.dc(0)
self.cs(0)
self.spi.write(bytearray([command]))
self.cs(1)
if data is not None:
self._data(data)
def _data(self, data):
self.dc(1)
self.cs(0)
self.spi.write(data)
self.cs(1)
def _writeblock(self, x0, y0, x1, y1, data=None):
self._write(_CASET, ustruct.pack(">HH", x0, x1))
self._write(_PASET, ustruct.pack(">HH", y0, y1))
self._write(_RAMWR, data)
def _readblock(self, x0, y0, x1, y1):
self._write(_CASET, ustruct.pack(">HH", x0, x1))
self._write(_PASET, ustruct.pack(">HH", y0, y1))
if data is None:
return self._read(_RAMRD, (x1 - x0 + 1) * (y1 - y0 + 1) * 3)
def _read(self, command, count):
self.dc(0)
self.cs(0)
self.spi.write(bytearray([command]))
data = self.spi.read(count)
self.cs(1)
return data
def pixel(self, x, y, color=None):
if color is None:
r, b, g = self._readblock(x, y, x, y)
return color565(r, g, b)
if not 0 <= x < self.width or not 0 <= y < self.height:
return
self._writeblock(x, y, x, y, ustruct.pack(">H", color))
def fill_rectangle(self, x, y, w, h, color=None):
x = min(self.width - 1, max(0, x))
y = min(self.height - 1, max(0, y))
w = min(self.width - x, max(1, w))
h = min(self.height - y, max(1, h))
if color:
color = ustruct.pack(">H", color)
else:
color = self._colormap[0:2] #background
for i in range(_CHUNK):
self._buf[2*i]=color[0]; self._buf[2*i+1]=color[1]
chunks, rest = divmod(w * h, _CHUNK)
self._writeblock(x, y, x + w - 1, y + h - 1, None)
if chunks:
for count in range(chunks):
self._data(self._buf)
if rest != 0:
mv = memoryview(self._buf)
self._data(mv[:rest*2])
def erase(self):
self.fill_rectangle(0, 0, self.width, self.height)
def blit(self, bitbuff, x, y, w, h):
x = min(self.width - 1, max(0, x))
y = min(self.height - 1, max(0, y))
w = min(self.width - x, max(1, w))
h = min(self.height - y, max(1, h))
chunks, rest = divmod(w * h, _CHUNK)
self._writeblock(x, y, x + w - 1, y + h - 1, None)
written = 0
for iy in range(h):
for ix in range(w):
index = ix+iy*w - written
if index >=_CHUNK:
self._data(self._buf)
written += _CHUNK
index -= _CHUNK
c = bitbuff.pixel(ix,iy)
self._buf[index*2] = self._colormap[c*2]
self._buf[index*2+1] = self._colormap[c*2+1]
rest = w*h - written
if rest != 0:
mv = memoryview(self._buf)
self._data(mv[:rest*2])
def chars(self, str, x, y):
str_w = self._font.get_width(str)
div, rem = divmod(self._font.height(),8)
nbytes = div+1 if rem else div
buf = bytearray(str_w * nbytes)
pos = 0
for ch in str:
glyph, char_w = self._font.get_ch(ch)
for row in range(nbytes):
index = row*str_w + pos
for i in range(char_w):
buf[index+i] = glyph[nbytes*i+row]
pos += char_w
fb = framebuf.FrameBuffer(buf,str_w, self._font.height(), framebuf.MONO_VLSB)
self.blit(fb,x,y,str_w,self._font.height())
return x+str_w
def scroll(self, dy):
self._scroll = (self._scroll + dy) % self.height
self._write(_VSCRSADD, ustruct.pack(">H", self._scroll))
def next_line(self, cury, char_h):
global scrolling
if not self.scrolling:
res = cury + char_h
self.scrolling = (res >= self.height)
if self.scrolling:
self.scroll(char_h)
res = (self.height - char_h + self._scroll)%self.height
self.fill_rectangle(0, res, self.width, self._font.height())
return res
def write(self, text): #does character wrap, compatible with stream output
curx = self._x; cury = self._y
char_h = self._font.height()
width = 0
written = 0
for pos, ch in enumerate(text):
if ch == '\n':
if pos>0:
self.chars(text[written:pos],curx,cury)
curx = 0; written = pos+1; width = 0
cury = self.next_line(cury,char_h)
else:
char_w = self._font.get_width(ch)
if curx + width + char_w >= self.width:
self.chars(text[written:pos], curx,cury)
curx = 0 ; written = pos; width = char_h
cury = self.next_line(cury,char_h)
else:
width += char_w
if written<len(text):
curx = self.chars(text[written:], curx,cury)
self._x = curx; self._y = cury
def print(self, text): #does word wrap, leaves self._x unchanged
cury = self._y; curx = self._x
char_h = self._font.height()
char_w = self._font.max_width()
lines = text.split('\n')
for line in lines:
words = line.split(' ')
for word in words:
if curx + self._font.get_width(word) >= self.width:
curx = self._x; cury = self.next_line(cury,char_h)
while self._font.get_width(word) > self.width:
self.chars(word[:self.width//char_w],curx,cury)
word = word[self.width//char_w:]
cury = self.next_line(cury,char_h)
if len(word)>0:
curx = self.chars(word+' ', curx,cury)
curx = self._x; cury = self.next_line(cury,char_h)
self._y = cury
|
[
"time.sleep_ms",
"ustruct.pack",
"micropython.const"
] |
[((818, 827), 'micropython.const', 'const', (['(15)'], {}), '(15)\n', (823, 827), False, 'from micropython import const\n'), ((878, 887), 'micropython.const', 'const', (['(17)'], {}), '(17)\n', (883, 887), False, 'from micropython import const\n'), ((912, 921), 'micropython.const', 'const', (['(38)'], {}), '(38)\n', (917, 921), False, 'from micropython import const\n'), ((947, 956), 'micropython.const', 'const', (['(40)'], {}), '(40)\n', (952, 956), False, 'from micropython import const\n'), ((983, 992), 'micropython.const', 'const', (['(41)'], {}), '(41)\n', (988, 992), False, 'from micropython import const\n'), ((1017, 1026), 'micropython.const', 'const', (['(42)'], {}), '(42)\n', (1022, 1026), False, 'from micropython import const\n'), ((1059, 1068), 'micropython.const', 'const', (['(43)'], {}), '(43)\n', (1064, 1068), False, 'from micropython import const\n'), ((1099, 1108), 'micropython.const', 'const', (['(44)'], {}), '(44)\n', (1104, 1108), False, 'from micropython import const\n'), ((1135, 1144), 'micropython.const', 'const', (['(46)'], {}), '(46)\n', (1140, 1144), False, 'from micropython import const\n'), ((1171, 1180), 'micropython.const', 'const', (['(54)'], {}), '(54)\n', (1176, 1180), False, 'from micropython import const\n'), ((1219, 1228), 'micropython.const', 'const', (['(55)'], {}), '(55)\n', (1224, 1228), False, 'from micropython import const\n'), ((1276, 1285), 'micropython.const', 'const', (['(58)'], {}), '(58)\n', (1281, 1285), False, 'from micropython import const\n'), ((1318, 1328), 'micropython.const', 'const', (['(203)'], {}), '(203)\n', (1323, 1328), False, 'from micropython import const\n'), ((1359, 1369), 'micropython.const', 'const', (['(207)'], {}), '(207)\n', (1364, 1369), False, 'from micropython import const\n'), ((1400, 1410), 'micropython.const', 'const', (['(232)'], {}), '(232)\n', (1405, 1410), False, 'from micropython import const\n'), ((1449, 1459), 'micropython.const', 'const', (['(234)'], {}), '(234)\n', (1454, 1459), False, 'from micropython import const\n'), ((1500, 1510), 'micropython.const', 'const', (['(237)'], {}), '(237)\n', (1505, 1510), False, 'from micropython import const\n'), ((1550, 1560), 'micropython.const', 'const', (['(247)'], {}), '(247)\n', (1555, 1560), False, 'from micropython import const\n'), ((1594, 1604), 'micropython.const', 'const', (['(192)'], {}), '(192)\n', (1599, 1604), False, 'from micropython import const\n'), ((1635, 1645), 'micropython.const', 'const', (['(193)'], {}), '(193)\n', (1640, 1645), False, 'from micropython import const\n'), ((1676, 1686), 'micropython.const', 'const', (['(197)'], {}), '(197)\n', (1681, 1686), False, 'from micropython import const\n'), ((1716, 1726), 'micropython.const', 'const', (['(199)'], {}), '(199)\n', (1721, 1726), False, 'from micropython import const\n'), ((1756, 1766), 'micropython.const', 'const', (['(177)'], {}), '(177)\n', (1761, 1766), False, 'from micropython import const\n'), ((1802, 1812), 'micropython.const', 'const', (['(182)'], {}), '(182)\n', (1807, 1812), False, 'from micropython import const\n'), ((1850, 1860), 'micropython.const', 'const', (['(242)'], {}), '(242)\n', (1855, 1860), False, 'from micropython import const\n'), ((1886, 1896), 'micropython.const', 'const', (['(224)'], {}), '(224)\n', (1891, 1896), False, 'from micropython import const\n'), ((1935, 1945), 'micropython.const', 'const', (['(225)'], {}), '(225)\n', (1940, 1945), False, 'from micropython import const\n'), ((1982, 1993), 'micropython.const', 'const', (['(1024)'], {}), '(1024)\n', (1987, 1993), False, 'from micropython import const\n'), ((4127, 4145), 'time.sleep_ms', 'time.sleep_ms', (['(120)'], {}), '(120)\n', (4140, 4145), False, 'import time\n'), ((4225, 4242), 'time.sleep_ms', 'time.sleep_ms', (['(50)'], {}), '(50)\n', (4238, 4242), False, 'import time\n'), ((4271, 4288), 'time.sleep_ms', 'time.sleep_ms', (['(50)'], {}), '(50)\n', (4284, 4288), False, 'import time\n'), ((4689, 4716), 'ustruct.pack', 'ustruct.pack', (['""">HH"""', 'x0', 'x1'], {}), "('>HH', x0, x1)\n", (4701, 4716), False, 'import ustruct\n'), ((4746, 4773), 'ustruct.pack', 'ustruct.pack', (['""">HH"""', 'y0', 'y1'], {}), "('>HH', y0, y1)\n", (4758, 4773), False, 'import ustruct\n'), ((4880, 4907), 'ustruct.pack', 'ustruct.pack', (['""">HH"""', 'x0', 'x1'], {}), "('>HH', x0, x1)\n", (4892, 4907), False, 'import ustruct\n'), ((4937, 4964), 'ustruct.pack', 'ustruct.pack', (['""">HH"""', 'y0', 'y1'], {}), "('>HH', y0, y1)\n", (4949, 4964), False, 'import ustruct\n'), ((5533, 5558), 'ustruct.pack', 'ustruct.pack', (['""">H"""', 'color'], {}), "('>H', color)\n", (5545, 5558), False, 'import ustruct\n'), ((5827, 5852), 'ustruct.pack', 'ustruct.pack', (['""">H"""', 'color'], {}), "('>H', color)\n", (5839, 5852), False, 'import ustruct\n'), ((8053, 8085), 'ustruct.pack', 'ustruct.pack', (['""">H"""', 'self._scroll'], {}), "('>H', self._scroll)\n", (8065, 8085), False, 'import ustruct\n')]
|
import subprocess
import os
import sys
import datetime
import random
from configparser import ConfigParser
from datetime import datetime
import s03_heteroplasmy_likelihood, s04_sort_candidates, s05_select_sites, s06_location_conservation
import multiprocessing
def check_exist(cmd, thing):
try:
subprocess.check_output('%s %s' % (cmd, thing), shell=True)
except subprocess.CalledProcessError:
print("Error: did not find %s in path." % thing)
sys.exit(0)
def log_error(cmd, exec_output, exec_error, LOG_FILE):
with open(LOG_FILE, 'a') as f:
f.write('time: %s\ncmd: %s\noutput: %s\nexec error:%s\n' % (str(datetime.now()), cmd, exec_output, exec_error))
def log_final(no_error, argv):
log_output = os.path.join(SCRIPT_DIR, 'log_align_analyze_sort.txt')
with open(log_output, 'a') as f:
f.write('%s %s %s %s\n' % (no_error, argv[0], argv[1], str(datetime.now())))
def process(params):
ref = params['ref']
annotation = params['annotation']
dist = params['dist']
read_file = params['read_file']
out_html_name = params['out_html_name']
random_id = params['random_id']
READS_DIR = params['read_dir']
OUTPUT_DIR = params['output_dir']
LOG_FILE = params['log_file']
alignment_quality = params['alignment_quality']
score_threshold = params['score_threshold']
percentage_threshold = params['percentage_threshold']
# print(ref)
# print(annotation)
# print(dist)
# print(read_file)
# print(READS_DIR)
# print(OUTPUT_DIR)
# print(LOG_FILE)
# print(alignment_quality)
# print(score_threshold)
# print(percentage_threshold)
# read version
with open('VERSION','r') as f:
line = f.readline()
version = float(line.strip())
# #--------------------------------------------------------------
SCRIPT_DIR = os.getcwd()
print("\nComputing scores")
# print("Version: "+str(version))
output = 'None'
if not os.path.exists(OUTPUT_DIR):
os.makedirs(OUTPUT_DIR)
###########################################################
# 03_compute_heteroplasmy likelihood
# 04_sort_sites
###########################################################
check_exist('ls', annotation)
csv_dir = os.path.join(OUTPUT_DIR, "csv")
if not os.path.exists(csv_dir):
os.makedirs(csv_dir)
print("Compute heteroplasmy likelihood")
P = multiprocessing.Pool()
jobs = []
with open(read_file, 'r') as f:
for line in f:
read1 = os.path.join(READS_DIR, line.strip() + '_1.fastq')
read2 = os.path.join(READS_DIR, line.strip() + '_2.fastq')
name = read1.split('/')[-1].split('_R1')[0]
# name = line.strip()
out_csv = os.path.join(csv_dir, name+'_f2_F0x900_q'+alignment_quality+'.csv')
out_filtered_sam = os.path.join(OUTPUT_DIR, name+'_f2_F0x900_q'+alignment_quality+'.sam')
no_error = True
output = 'None'
kw = {
'ref': ref,
'out_filtered_sam': out_filtered_sam,
'annotation': annotation,
'out_csv': out_csv,
}
jobs.append(P.apply_async(s03_heteroplasmy_likelihood.process, (), kw))
P.close()
P.join()
# Sort score
P = multiprocessing.Pool()
jobs = []
with open(read_file, 'r') as f:
for line in f:
read1 = os.path.join(READS_DIR, line.strip() + '_1.fastq')
read2 = os.path.join(READS_DIR, line.strip() + '_2.fastq')
name = read1.split('/')[-1].split('_R1')[0]
# name = line.strip()
out_csv = os.path.join(csv_dir, name+'_f2_F0x900_q'+alignment_quality+'.csv')
kw2 = {
'out_csv': out_csv
}
jobs.append(P.apply_async(s04_sort_candidates.process, (), kw2))
P.close()
P.join()
print ('Finished computing heteroplasmy scores.\n')
###########################################################
# 05_select_sites
###########################################################
print('Select heteroplasmy sites.')
# run select_sites.py
result_dir = os.path.join(OUTPUT_DIR,"Result")
if not os.path.exists(result_dir):
os.makedirs(result_dir)
organellar_type = None
if 'chloroplast' in out_html_name:
organellar_type = 'chloroplast'
if 'mitochondria' in out_html_name:
organellar_type = 'mitochondria'
select_sites_inputs = {
'csv_dir' : csv_dir,
'score_threshold': score_threshold,
'percentage_threshold': percentage_threshold,
'name_list' : None,
'organellar_type': organellar_type,
'result_dir': result_dir
}
het_file = s05_select_sites.process(select_sites_inputs)
###########################################################
# 06_compute_site_conservation
###########################################################
# run location_conservation.py
print('\nCompute site conservation.')
cp_conserved = None
if organellar_type == 'chloroplast':
cp_conserved = os.path.join(result_dir, "chloroplast_conserved_"+dist+".csv")
if organellar_type == 'mitochondria':
cp_conserved = os.path.join(result_dir, "mitochondria_conserved_"+dist+".csv")
location_conservation_inputs = {
'het_file': het_file,
'func': dist,
'output': cp_conserved
}
s06_location_conservation.main(location_conservation_inputs)
###########################################################
# 07_plot
###########################################################
# run plot_heteroplasmy.py
print('\nPlot heteroplasmies.')
plot_heteroplasmy = os.path.join(SCRIPT_DIR, 's07_plot_heteroplasmy.py')
check_exist('ls',plot_heteroplasmy)
# genome_name = '"Daucus carota chloroplast genome"'
if organellar_type == 'chloroplast':
genome_name = '"Daucus carota chloroplast genome"'
if organellar_type == 'mitochondria':
genome_name = '"Daucus carota mitochondrial genome"'
out_html = os.path.join(OUTPUT_DIR, out_html_name)
cmd = 'python %s %s %s %s %s %s' %(plot_heteroplasmy, genome_name, annotation, het_file, cp_conserved, out_html)
print(cmd)
print()
try:
output = subprocess.check_call(cmd, shell=True)
except:
no_error = False
log_error(cmd, output, sys.exc_info(), LOG_FILE)
print("\nSuccess!\n")
print("Vizualization file : ", out_html)
if __name__ == '__main__':
if len(sys.argv) != 13:
print('Usage: python', sys.argv[0], 'ref', 'annotation', 'dist', 'read_file', 'output.html', 'random_id', 'READS_DIR', 'output_dir', 'log_file', 'alignment_quality', 'score_threshold', 'percentage_threshold')
sys.exit(0)
params = {
'ref': sys.argv[1],
'annotation': sys.argv[2],
'dist': sys.argv[3],
'read_file': sys.argv[4],
'out_html_name': sys.argv[5],
'random_id': sys.argv[6],
'READS_DIR': sys.argv[7],
'OUTPUT_DIR': sys.argv[8],
'LOG_FILE': sys.argv[9],
'alignment_quality': sys.argv[10],
'score_threshold': sys.argv[11],
'percentage_threshold': sys.argv[12],
}
process(params)
|
[
"subprocess.check_output",
"os.path.exists",
"s06_location_conservation.main",
"os.makedirs",
"subprocess.check_call",
"os.path.join",
"os.getcwd",
"sys.exc_info",
"datetime.datetime.now",
"s05_select_sites.process",
"multiprocessing.Pool",
"sys.exit"
] |
[((775, 829), 'os.path.join', 'os.path.join', (['SCRIPT_DIR', '"""log_align_analyze_sort.txt"""'], {}), "(SCRIPT_DIR, 'log_align_analyze_sort.txt')\n", (787, 829), False, 'import os\n'), ((1898, 1909), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (1907, 1909), False, 'import os\n'), ((2311, 2342), 'os.path.join', 'os.path.join', (['OUTPUT_DIR', '"""csv"""'], {}), "(OUTPUT_DIR, 'csv')\n", (2323, 2342), False, 'import os\n'), ((2463, 2485), 'multiprocessing.Pool', 'multiprocessing.Pool', ([], {}), '()\n', (2483, 2485), False, 'import multiprocessing\n'), ((3372, 3394), 'multiprocessing.Pool', 'multiprocessing.Pool', ([], {}), '()\n', (3392, 3394), False, 'import multiprocessing\n'), ((4268, 4302), 'os.path.join', 'os.path.join', (['OUTPUT_DIR', '"""Result"""'], {}), "(OUTPUT_DIR, 'Result')\n", (4280, 4302), False, 'import os\n'), ((4844, 4889), 's05_select_sites.process', 's05_select_sites.process', (['select_sites_inputs'], {}), '(select_sites_inputs)\n', (4868, 4889), False, 'import s03_heteroplasmy_likelihood, s04_sort_candidates, s05_select_sites, s06_location_conservation\n'), ((5544, 5604), 's06_location_conservation.main', 's06_location_conservation.main', (['location_conservation_inputs'], {}), '(location_conservation_inputs)\n', (5574, 5604), False, 'import s03_heteroplasmy_likelihood, s04_sort_candidates, s05_select_sites, s06_location_conservation\n'), ((5839, 5891), 'os.path.join', 'os.path.join', (['SCRIPT_DIR', '"""s07_plot_heteroplasmy.py"""'], {}), "(SCRIPT_DIR, 's07_plot_heteroplasmy.py')\n", (5851, 5891), False, 'import os\n'), ((6209, 6248), 'os.path.join', 'os.path.join', (['OUTPUT_DIR', 'out_html_name'], {}), '(OUTPUT_DIR, out_html_name)\n', (6221, 6248), False, 'import os\n'), ((308, 367), 'subprocess.check_output', 'subprocess.check_output', (["('%s %s' % (cmd, thing))"], {'shell': '(True)'}), "('%s %s' % (cmd, thing), shell=True)\n", (331, 367), False, 'import subprocess\n'), ((2012, 2038), 'os.path.exists', 'os.path.exists', (['OUTPUT_DIR'], {}), '(OUTPUT_DIR)\n', (2026, 2038), False, 'import os\n'), ((2048, 2071), 'os.makedirs', 'os.makedirs', (['OUTPUT_DIR'], {}), '(OUTPUT_DIR)\n', (2059, 2071), False, 'import os\n'), ((2354, 2377), 'os.path.exists', 'os.path.exists', (['csv_dir'], {}), '(csv_dir)\n', (2368, 2377), False, 'import os\n'), ((2387, 2407), 'os.makedirs', 'os.makedirs', (['csv_dir'], {}), '(csv_dir)\n', (2398, 2407), False, 'import os\n'), ((4313, 4339), 'os.path.exists', 'os.path.exists', (['result_dir'], {}), '(result_dir)\n', (4327, 4339), False, 'import os\n'), ((4349, 4372), 'os.makedirs', 'os.makedirs', (['result_dir'], {}), '(result_dir)\n', (4360, 4372), False, 'import os\n'), ((5220, 5286), 'os.path.join', 'os.path.join', (['result_dir', "('chloroplast_conserved_' + dist + '.csv')"], {}), "(result_dir, 'chloroplast_conserved_' + dist + '.csv')\n", (5232, 5286), False, 'import os\n'), ((5348, 5415), 'os.path.join', 'os.path.join', (['result_dir', "('mitochondria_conserved_' + dist + '.csv')"], {}), "(result_dir, 'mitochondria_conserved_' + dist + '.csv')\n", (5360, 5415), False, 'import os\n'), ((6420, 6458), 'subprocess.check_call', 'subprocess.check_call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (6441, 6458), False, 'import subprocess\n'), ((6906, 6917), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (6914, 6917), False, 'import sys\n'), ((475, 486), 'sys.exit', 'sys.exit', (['(0)'], {}), '(0)\n', (483, 486), False, 'import sys\n'), ((2813, 2886), 'os.path.join', 'os.path.join', (['csv_dir', "(name + '_f2_F0x900_q' + alignment_quality + '.csv')"], {}), "(csv_dir, name + '_f2_F0x900_q' + alignment_quality + '.csv')\n", (2825, 2886), False, 'import os\n'), ((2912, 2988), 'os.path.join', 'os.path.join', (['OUTPUT_DIR', "(name + '_f2_F0x900_q' + alignment_quality + '.sam')"], {}), "(OUTPUT_DIR, name + '_f2_F0x900_q' + alignment_quality + '.sam')\n", (2924, 2988), False, 'import os\n'), ((3722, 3795), 'os.path.join', 'os.path.join', (['csv_dir', "(name + '_f2_F0x900_q' + alignment_quality + '.csv')"], {}), "(csv_dir, name + '_f2_F0x900_q' + alignment_quality + '.csv')\n", (3734, 3795), False, 'import os\n'), ((6527, 6541), 'sys.exc_info', 'sys.exc_info', ([], {}), '()\n', (6539, 6541), False, 'import sys\n'), ((662, 676), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (674, 676), False, 'from datetime import datetime\n'), ((934, 948), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (946, 948), False, 'from datetime import datetime\n')]
|
from datetime import datetime
from typing import Any, List
import json
import tempfile
from airflow.models.baseoperator import BaseOperator
from airflow.providers.mongo.hooks.mongo import MongoHook
import pandas
from airflow.providers.siasg.dw.hooks.dw import DWSIASGHook
class DWSIASGRelatorioParaMongoOperator(BaseOperator):
'''Baixa um relatório do DW-SIASG para um banco Mongo
:param id_conexao: id pra conexão do tipo "dw_siasg"
:type id_conexao: str
:param id_relatorio: id do relatório no DW-SIASG
:type id_relatorio: str
:param id_conexao_mongo: id para conexão do tipo "mongo"
:type id_conexao_mongo
:param banco: Nome do banco
:type banco: str
:param colecao: Nome da coleção
:type colecao: str
:param repostas_prompts: lista de respostas para prompts do relatório
:type repostas_prompts: List[str]
:param timeout_segundos_segundos: tempo máximo de espera em segundos
:type timeout_segundos_segundos: int, opcional
:param truncar_colecao: `True` se coleção deve ser truncada antes da
inserção e `False` caso contrário
:type truncar_colecao: bool
'''
template_fields = [
'id_relatorio', 'respostas_prompts', 'banco', 'colecao'
]
id_conexao: str
id_relatorio: str
respostas_prompts: List[str]
timeout_segundos: int
id_conexao_mongo: str
banco: str
colecao: str
truncar_colecao: bool
def __init__(
self,
id_conexao: str,
id_relatorio: str,
id_conexao_mongo: str,
banco: str = None,
colecao: str = 'test',
respostas_prompts: List[str] = None,
timeout_segundos: int = 60,
truncar_colecao: bool = False,
**kwargs
) -> None:
super().__init__(**kwargs)
self.id_conexao = id_conexao
self.id_relatorio = id_relatorio
self.respostas_prompts = respostas_prompts
self.timeout_segundos = timeout_segundos
self.id_conexao_mongo = id_conexao_mongo
self.banco = banco
self.colecao = colecao
self.truncar_colecao = truncar_colecao
def execute(self, context: Any) -> None:
self.log.info(
'Baixando relatório "%s" para coleção do mongo "%s" com as '
'seguintes respostas para prompts: "%s"%s',
self.id_relatorio, self.colecao, self.respostas_prompts,
'. Truncando coleção' if self.truncar_colecao else ''
)
respostas_prompts = json.loads(self.respostas_prompts) \
if isinstance(self.respostas_prompts, str) \
else self.respostas_prompts
with tempfile.NamedTemporaryFile(mode='wb') as arquivo:
instante = datetime.now()
with DWSIASGHook(self.id_conexao) as hook:
local, _ = hook.baixa_para_excel(
self.id_relatorio, arquivo.name, respostas_prompts,
self.timeout_segundos
)
df = pandas.read_excel(local)
df.columns = df.columns.str.replace('.', '', regex=False)
df['Timestamp'] = instante
with MongoHook(self.id_conexao_mongo) as hook:
if self.truncar_colecao:
hook.delete_many(self.colecao, {}, self.banco)
if len(df) > 0:
inseridos = hook.insert_many(
self.colecao, df.to_dict('records'), self.banco
).inserted_ids
else:
inseridos = []
self.log.info(
'Relatório transferido com sucesso, tendo produzido %s registros',
len(inseridos)
)
self.xcom_push(context, 'registros_inseridos', len(inseridos))
|
[
"json.loads",
"datetime.datetime.now",
"tempfile.NamedTemporaryFile",
"airflow.providers.mongo.hooks.mongo.MongoHook",
"pandas.read_excel",
"airflow.providers.siasg.dw.hooks.dw.DWSIASGHook"
] |
[((2489, 2523), 'json.loads', 'json.loads', (['self.respostas_prompts'], {}), '(self.respostas_prompts)\n', (2499, 2523), False, 'import json\n'), ((2637, 2675), 'tempfile.NamedTemporaryFile', 'tempfile.NamedTemporaryFile', ([], {'mode': '"""wb"""'}), "(mode='wb')\n", (2664, 2675), False, 'import tempfile\n'), ((2711, 2725), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (2723, 2725), False, 'from datetime import datetime\n'), ((2982, 3006), 'pandas.read_excel', 'pandas.read_excel', (['local'], {}), '(local)\n', (2999, 3006), False, 'import pandas\n'), ((3123, 3155), 'airflow.providers.mongo.hooks.mongo.MongoHook', 'MongoHook', (['self.id_conexao_mongo'], {}), '(self.id_conexao_mongo)\n', (3132, 3155), False, 'from airflow.providers.mongo.hooks.mongo import MongoHook\n'), ((2744, 2772), 'airflow.providers.siasg.dw.hooks.dw.DWSIASGHook', 'DWSIASGHook', (['self.id_conexao'], {}), '(self.id_conexao)\n', (2755, 2772), False, 'from airflow.providers.siasg.dw.hooks.dw import DWSIASGHook\n')]
|
#!/usr/bin/python3
import sys
import glob
import os
import re
def main():
directory = sys.argv[1]
builddir = sys.argv[2]
extra_module = ""
if(len(sys.argv) > 3):
extra_module = sys.argv[3]
projectModules = {}
for filename in glob.glob(os.path.join(directory, '*.bsv')):
m = re.match(".*/(.*).bsv", filename)
modName = m.group(1).strip()
projectModules[modName] = []
with open(filename, "r") as f:
for line in f:
if line.strip().startswith("import"):
m = re.match("import(.*)::", line.strip())
if m:
mod = m.group(1).strip()
if mod == "`RUN_TEST":
mod = extra_module
projectModules[modName].append(mod)
# Remove duplicates
for module, deps in projectModules.items():
projectModules[module] = list(set(deps))
# Remove non project Dependencies
for module, deps in projectModules.items():
old = list(deps)
for dep in old:
if not dep in projectModules:
deps.remove(dep)
# Create List of modules for dependency resolution
for m, d in projectModules.items():
print("{}/{}.bo: {}/{}.bsv {}".format(builddir, m, directory, m, " ".join(map(lambda x : "{}/{}.bo".format(builddir, x), d))))
depList = []
# Produce dependency list
while len(projectModules.keys()) > 0:
# Look for Module without dependency
found = False
for m, d in projectModules.items():
if not d:
found = True
depList.append(m)
del projectModules[m]
for _, d in projectModules.items():
if m in d:
d.remove(m)
break
if not found:
print("Loop detected")
break
depListFull = []
for d in depList:
d = builddir + "/" + d + ".bo"
depListFull.append(d)
t = "OBJS=" + " ".join(depListFull)
print(t)
if __name__ == '__main__':
main()
|
[
"os.path.join",
"re.match"
] |
[((269, 301), 'os.path.join', 'os.path.join', (['directory', '"""*.bsv"""'], {}), "(directory, '*.bsv')\n", (281, 301), False, 'import os\n'), ((316, 349), 're.match', 're.match', (['""".*/(.*).bsv"""', 'filename'], {}), "('.*/(.*).bsv', filename)\n", (324, 349), False, 'import re\n')]
|
"""Ingest USGS Bird Banding Laboratory data."""
from pathlib import Path
import pandas as pd
from . import db, util
DATASET_ID = 'bbl'
RAW_DIR = Path('data') / 'raw' / DATASET_ID
BANDING = RAW_DIR / 'Banding'
ENCOUNTERS = RAW_DIR / 'Encounters'
RECAPTURES = RAW_DIR / 'Recaptures'
SPECIES = RAW_DIR / 'species.html'
ONE_MIN = 111.32 * 1000
TEN_MIN = 111.32 * 1000 * 10
EXACT = 0
def ingest():
"""Ingest USGS Bird Banding Laboratory data."""
db.delete_dataset_records(DATASET_ID)
to_taxon_id = get_taxa()
db.insert_dataset({
'dataset_id': DATASET_ID,
'title': 'Bird Banding Laboratory (BBL)',
'version': '2020.0',
'url': ('https://www.usgs.gov/centers/pwrc/science/'
'bird-banding-laboratory')})
to_place_id = {}
to_place_id = insert_banding_data(to_place_id, to_taxon_id)
to_place_id = insert_encounter_data(
ENCOUNTERS, to_place_id, to_taxon_id, 'encounter')
insert_encounter_data(RECAPTURES, to_place_id, to_taxon_id, 'recapture')
def get_taxa():
"""Build a taxa table to link to our taxa."""
codes = pd.read_html(str(SPECIES))[0]
codes = codes.rename(columns={
'Scientific Name': 'sci_name',
'Species Number': 'species_id'})
codes = codes[codes['sci_name'].notna()]
codes = codes.set_index('sci_name')['species_id'].to_dict()
sql = """SELECT taxon_id, sci_name FROM taxa WHERE "class"='aves';"""
taxa = pd.read_sql(sql, db.connect())
taxa = taxa.set_index('sci_name')['taxon_id'].to_dict()
to_taxon_id = {str(v).zfill(4): i for k, v in codes.items()
if (i := taxa.get(k))}
return to_taxon_id
def insert_banding_data(to_place_id, to_taxon_id):
"""Insert raw banding data."""
util.log(f'Inserting {DATASET_ID} banding data')
for path in sorted(BANDING.glob('*.csv')):
util.log(f'File {path}')
df = read_csv(
path, 'LON_DECIMAL_DEGREES', 'LAT_DECIMAL_DEGREES', 'banding')
df = filter_data(
df, to_taxon_id, 'BANDING_DATE', 'SPECIES_ID', 'COORD_PRECISION')
to_place_id = insert_places(df, to_place_id, 'COORD_PRECISION')
event_json = """ BAND_NUM BANDING_DATE TYPE """.split()
insert_events(df, event_json)
count_json = """
AGE_CODE SEX_CODE SPECIES_ID SPECIES_NAME TYPE """.split()
insert_counts(df, count_json)
return to_place_id
def insert_encounter_data(dir_, to_place_id, to_taxon_id, type_):
"""Insert raw encounter and recapture data."""
util.log(f'Inserting {DATASET_ID} {type_} data')
for path in sorted(dir_.glob('*.csv')):
util.log(f'File {path}')
df = read_csv(
path, 'E_LON_DECIMAL_DEGREES', 'E_LAT_DECIMAL_DEGREES', type_)
df = filter_data(
df, to_taxon_id,
'ENCOUNTER_DATE', 'B_SPECIES_ID', 'E_COORD_PRECISION')
to_place_id = insert_places(df, to_place_id, 'E_COORD_PRECISION')
event_json = """ BAND_NUM ENCOUNTER_DATE TYPE """.split()
insert_events(df, event_json)
count_json = """
B_AGE_CODE B_SEX_CODE B_SPECIES_ID B_SPECIES_NAME MIN_AGE_AT_ENC
ORIGINAL_BAND TYPE """.split()
insert_counts(df, count_json)
return to_place_id
def read_csv(path, lng, lat, type_):
"""Read in a CSV file."""
df = pd.read_csv(path, dtype='unicode').fillna('')
util.normalize_columns_names(df)
df = df.rename(columns={lng: 'lng', lat: 'lat'})
df['TYPE'] = type_
df['dataset_id'] = DATASET_ID
return df
def filter_data(df, to_taxon_id, event_date, species_id, coord_precision):
"""Remove records that will not work for our analysis."""
df['date'] = pd.to_datetime(df[event_date], errors='coerce')
has_date = df['date'].notna()
# Check if the scientific name is in our database
df['taxon_id'] = df[species_id].map(to_taxon_id)
has_taxon_id = df['taxon_id'].notna()
# Country and state are too big of an area
too_big = df[coord_precision].isin(['12', '72'])
df = df.loc[~too_big & has_taxon_id & has_date]
return df
def insert_places(df, to_place_id, coord_precision):
"""Insert place records."""
util.filter_lng_lat(df, 'lng', 'lat')
df['radius'] = TEN_MIN
df.loc[df[coord_precision] == '0', 'radius'] = EXACT
df.loc[df[coord_precision].isin(['1', '60']), 'radius'] = ONE_MIN
df['place_key'] = tuple(zip(df.lng, df.lat, df.radius))
places = df.drop_duplicates('place_key')
old_places = places['place_key'].isin(to_place_id)
places = places[~old_places]
places['place_id'] = db.create_ids(places, 'places')
places['place_json'] = util.json_object(places, [coord_precision])
places.loc[:, db.PLACE_FIELDS].to_sql(
'places', db.connect(), if_exists='append', index=False)
new_place_ids = places.set_index('place_key')['place_id'].to_dict()
to_place_id = {**to_place_id, **new_place_ids}
df['place_id'] = df['place_key'].map(to_place_id)
return to_place_id
def insert_events(df, event_json):
"""Insert event records."""
df['event_id'] = db.create_ids(df, 'events')
df['year'] = df['date'].dt.strftime('%Y')
df['day'] = df['date'].dt.strftime('%j')
df['started'] = None
df['ended'] = None
df['event_json'] = util.json_object(df, event_json)
df.loc[:, db.EVENT_FIELDS].to_sql(
'events', db.connect(), if_exists='append', index=False)
def insert_counts(df, count_json):
"""Insert count records."""
df['count_id'] = db.create_ids(df, 'counts')
df['count'] = 1
df['count_json'] = util.json_object(df, count_json)
df.loc[:, db.COUNT_FIELDS].to_sql(
'counts', db.connect(), if_exists='append', index=False)
if __name__ == '__main__':
ingest()
|
[
"pandas.read_csv",
"pandas.to_datetime",
"pathlib.Path"
] |
[((3737, 3784), 'pandas.to_datetime', 'pd.to_datetime', (['df[event_date]'], {'errors': '"""coerce"""'}), "(df[event_date], errors='coerce')\n", (3751, 3784), True, 'import pandas as pd\n'), ((149, 161), 'pathlib.Path', 'Path', (['"""data"""'], {}), "('data')\n", (153, 161), False, 'from pathlib import Path\n'), ((3374, 3408), 'pandas.read_csv', 'pd.read_csv', (['path'], {'dtype': '"""unicode"""'}), "(path, dtype='unicode')\n", (3385, 3408), True, 'import pandas as pd\n')]
|
# Generated by Django 2.1.10 on 2019-07-19 12:42
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('contenttypes', '0002_remove_content_type_name'),
('cms_content', '0003_auto_20190719_1232'),
]
operations = [
migrations.AlterModelOptions(
name='element',
options={'ordering': ['position'], 'verbose_name': 'Element', 'verbose_name_plural': 'Element'},
),
migrations.AddField(
model_name='container',
name='content_type',
field=models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to='contenttypes.ContentType', verbose_name='Content type'),
),
migrations.AddField(
model_name='container',
name='object_id',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Object ID'),
),
]
|
[
"django.db.migrations.AlterModelOptions",
"django.db.models.PositiveIntegerField",
"django.db.models.ForeignKey"
] |
[((332, 478), 'django.db.migrations.AlterModelOptions', 'migrations.AlterModelOptions', ([], {'name': '"""element"""', 'options': "{'ordering': ['position'], 'verbose_name': 'Element', 'verbose_name_plural':\n 'Element'}"}), "(name='element', options={'ordering': [\n 'position'], 'verbose_name': 'Element', 'verbose_name_plural': 'Element'})\n", (360, 478), False, 'from django.db import migrations, models\n'), ((626, 781), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'blank': '(True)', 'null': '(True)', 'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""contenttypes.ContentType"""', 'verbose_name': '"""Content type"""'}), "(blank=True, null=True, on_delete=django.db.models.\n deletion.CASCADE, to='contenttypes.ContentType', verbose_name=\n 'Content type')\n", (643, 781), False, 'from django.db import migrations, models\n'), ((897, 973), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'null': '(True)', 'verbose_name': '"""Object ID"""'}), "(blank=True, null=True, verbose_name='Object ID')\n", (924, 973), False, 'from django.db import migrations, models\n')]
|
from direct.directnotify import DirectNotifyGlobal
from direct.fsm import ClassicFSM, State
from direct.fsm import State
from pandac.PandaModules import *
from toontown.battle import BattlePlace
from toontown.building import Elevator
from toontown.coghq import CogHQExterior
from toontown.dna.DNAParser import loadDNAFileAI
from libpandadna import DNAStorage
from toontown.hood import ZoneUtil
from toontown.toonbase import ToontownGlobals
class LawbotHQExterior(CogHQExterior.CogHQExterior):
notify = DirectNotifyGlobal.directNotify.newCategory('LawbotHQExterior')
def enter(self, requestStatus):
CogHQExterior.CogHQExterior.enter(self, requestStatus)
# Load the CogHQ DNA file:
dnaStore = DNAStorage()
dnaFileName = self.genDNAFileName(self.zoneId)
loadDNAFileAI(dnaStore, dnaFileName)
# Collect all of the vis group zone IDs:
self.zoneVisDict = {}
for i in range(dnaStore.getNumDNAVisGroupsAI()):
groupFullName = dnaStore.getDNAVisGroupName(i)
visGroup = dnaStore.getDNAVisGroupAI(i)
visZoneId = int(base.cr.hoodMgr.extractGroupName(groupFullName))
visZoneId = ZoneUtil.getTrueZoneId(visZoneId, self.zoneId)
visibles = []
for i in range(visGroup.getNumVisibles()):
visibles.append(int(visGroup.getVisible(i)))
visibles.append(ZoneUtil.getBranchZone(visZoneId))
self.zoneVisDict[visZoneId] = visibles
# Next, we want interest in all vis groups due to this being a Cog HQ:
base.cr.sendSetZoneMsg(self.zoneId, list(self.zoneVisDict.values())[0])
|
[
"libpandadna.DNAStorage",
"direct.directnotify.DirectNotifyGlobal.directNotify.newCategory",
"toontown.hood.ZoneUtil.getBranchZone",
"toontown.hood.ZoneUtil.getTrueZoneId",
"toontown.coghq.CogHQExterior.CogHQExterior.enter",
"toontown.dna.DNAParser.loadDNAFileAI"
] |
[((508, 571), 'direct.directnotify.DirectNotifyGlobal.directNotify.newCategory', 'DirectNotifyGlobal.directNotify.newCategory', (['"""LawbotHQExterior"""'], {}), "('LawbotHQExterior')\n", (551, 571), False, 'from direct.directnotify import DirectNotifyGlobal\n'), ((617, 671), 'toontown.coghq.CogHQExterior.CogHQExterior.enter', 'CogHQExterior.CogHQExterior.enter', (['self', 'requestStatus'], {}), '(self, requestStatus)\n', (650, 671), False, 'from toontown.coghq import CogHQExterior\n'), ((727, 739), 'libpandadna.DNAStorage', 'DNAStorage', ([], {}), '()\n', (737, 739), False, 'from libpandadna import DNAStorage\n'), ((803, 839), 'toontown.dna.DNAParser.loadDNAFileAI', 'loadDNAFileAI', (['dnaStore', 'dnaFileName'], {}), '(dnaStore, dnaFileName)\n', (816, 839), False, 'from toontown.dna.DNAParser import loadDNAFileAI\n'), ((1189, 1235), 'toontown.hood.ZoneUtil.getTrueZoneId', 'ZoneUtil.getTrueZoneId', (['visZoneId', 'self.zoneId'], {}), '(visZoneId, self.zoneId)\n', (1211, 1235), False, 'from toontown.hood import ZoneUtil\n'), ((1406, 1439), 'toontown.hood.ZoneUtil.getBranchZone', 'ZoneUtil.getBranchZone', (['visZoneId'], {}), '(visZoneId)\n', (1428, 1439), False, 'from toontown.hood import ZoneUtil\n')]
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('convos', '0004_auto_20150511_0945'),
]
operations = [
migrations.AddField(
model_name='convothread',
name='last_message_at',
field=models.DateTimeField(null=True, verbose_name='Last message at', blank=True),
),
]
|
[
"django.db.models.DateTimeField"
] |
[((363, 438), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'null': '(True)', 'verbose_name': '"""Last message at"""', 'blank': '(True)'}), "(null=True, verbose_name='Last message at', blank=True)\n", (383, 438), False, 'from django.db import models, migrations\n')]
|
#!/usr/bin/env python
import sys
from itertools import chain
from common import open_example_serial_interface
from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask
def get_features(interface):
commands = read_feature_ids()
ids = interface.execute(commands)
return parse_features(ids, commands)
def eab_alternate_zip(regen_buffer, eab_buffer):
return bytes(chain(*zip(regen_buffer, eab_buffer)))
with open_example_serial_interface() as interface:
features = get_features(interface)
if Feature.EAB not in features:
sys.exit('No EAB feature found.')
eab_address = features[Feature.EAB]
print(f'EAB feature found at address {eab_address}')
# Protected Normal
interface.execute([LoadAddressCounterHi(0), LoadAddressCounterLo(80)])
regen_buffer = bytes.fromhex('e0 08 00 af 91 8e 93 84 82 93 84 83 00 ad 8e 91 8c 80 8b 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 09')
interface.execute(WriteData(regen_buffer))
# Protected Intense
interface.execute([LoadAddressCounterHi(0), LoadAddressCounterLo(160)])
regen_buffer = bytes.fromhex('e8 08 00 af 91 8e 93 84 82 93 84 83 00 a8 8d 93 84 8d 92 84 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 09')
interface.execute(WriteData(regen_buffer))
# Normal EFA
interface.execute([LoadAddressCounterHi(1), LoadAddressCounterLo(64)])
regen_buffer = bytes.fromhex('e0 08 00 ad 8e 91 8c 80 8b 00 a4 a5 a0 00 00 00 00 00 00 00 00 00 00 b7 bf 00 a1 bf 00 b1 bf 00 ac bf 00 a6 bf 00 a2 bf 00 b8 bf 00 b6 bf 00 00 09 e0')
eab_buffer = bytes.fromhex('00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 08 00 00 10 00 00 18 00 00 20 00 00 28 00 00 30 00 00 38 00 00 00 00 00')
interface.execute(EABWriteAlternate(eab_address, eab_alternate_zip(regen_buffer, eab_buffer)))
# Blink EFA
interface.execute([LoadAddressCounterHi(1), LoadAddressCounterLo(144)])
regen_buffer = bytes.fromhex('e0 08 00 a1 8b 88 8d 8a 00 a4 a5 a0 00 00 00 00 00 00 00 00 00 00 00 b7 bf 00 a1 bf 00 b1 bf 00 ac bf 00 a6 bf 00 a2 bf 00 b8 bf 00 b6 bf 00 00 09 e0')
eab_buffer = bytes.fromhex('40 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 08 00 00 10 00 00 18 00 00 20 00 00 28 00 00 30 00 00 38 00 00 00 00 00')
interface.execute(EABWriteAlternate(eab_address, eab_alternate_zip(regen_buffer, eab_buffer)))
# Reverse EFA
interface.execute([LoadAddressCounterHi(1), LoadAddressCounterLo(224)])
regen_buffer = bytes.fromhex('e0 08 00 b1 84 95 84 91 92 84 00 a4 a5 a0 00 00 00 00 00 00 00 00 00 b7 bf 00 a1 bf 00 b1 bf 00 ac bf 00 a6 bf 00 a2 bf 00 b8 bf 00 b6 bf 00 00 09 e0')
eab_buffer = bytes.fromhex('80 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 08 00 00 10 00 00 18 00 00 20 00 00 28 00 00 30 00 00 38 00 00 00 00 00')
interface.execute(EABWriteAlternate(eab_address, eab_alternate_zip(regen_buffer, eab_buffer)))
# Underline EFA
interface.execute([LoadAddressCounterHi(2), LoadAddressCounterLo(48)])
regen_buffer = bytes.fromhex('e0 08 00 b4 8d 83 84 91 8b 88 8d 84 00 a4 a5 a0 00 00 00 00 00 00 00 b7 bf 00 a1 bf 00 b1 bf 00 ac bf 00 a6 bf 00 a2 bf 00 b8 bf 00 b6 bf 00 00 09 e0')
eab_buffer = bytes.fromhex('c0 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 00 08 00 00 10 00 00 18 00 00 20 00 00 28 00 00 30 00 00 38 00 00 00 00 00')
interface.execute(EABWriteAlternate(eab_address, eab_alternate_zip(regen_buffer, eab_buffer)))
|
[
"coax.LoadAddressCounterHi",
"common.open_example_serial_interface",
"coax.read_feature_ids",
"sys.exit",
"coax.parse_features",
"coax.WriteData",
"coax.LoadAddressCounterLo"
] |
[((304, 322), 'coax.read_feature_ids', 'read_feature_ids', ([], {}), '()\n', (320, 322), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((374, 403), 'coax.parse_features', 'parse_features', (['ids', 'commands'], {}), '(ids, commands)\n', (388, 403), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((516, 547), 'common.open_example_serial_interface', 'open_example_serial_interface', ([], {}), '()\n', (545, 547), False, 'from common import open_example_serial_interface\n'), ((646, 679), 'sys.exit', 'sys.exit', (['"""No EAB feature found."""'], {}), "('No EAB feature found.')\n", (654, 679), False, 'import sys\n'), ((1085, 1108), 'coax.WriteData', 'WriteData', (['regen_buffer'], {}), '(regen_buffer)\n', (1094, 1108), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((1418, 1441), 'coax.WriteData', 'WriteData', (['regen_buffer'], {}), '(regen_buffer)\n', (1427, 1441), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((826, 849), 'coax.LoadAddressCounterHi', 'LoadAddressCounterHi', (['(0)'], {}), '(0)\n', (846, 849), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((851, 875), 'coax.LoadAddressCounterLo', 'LoadAddressCounterLo', (['(80)'], {}), '(80)\n', (871, 875), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((1158, 1181), 'coax.LoadAddressCounterHi', 'LoadAddressCounterHi', (['(0)'], {}), '(0)\n', (1178, 1181), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((1183, 1208), 'coax.LoadAddressCounterLo', 'LoadAddressCounterLo', (['(160)'], {}), '(160)\n', (1203, 1208), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((1484, 1507), 'coax.LoadAddressCounterHi', 'LoadAddressCounterHi', (['(1)'], {}), '(1)\n', (1504, 1507), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((1509, 1533), 'coax.LoadAddressCounterLo', 'LoadAddressCounterLo', (['(64)'], {}), '(64)\n', (1529, 1533), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((2047, 2070), 'coax.LoadAddressCounterHi', 'LoadAddressCounterHi', (['(1)'], {}), '(1)\n', (2067, 2070), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((2072, 2097), 'coax.LoadAddressCounterLo', 'LoadAddressCounterLo', (['(144)'], {}), '(144)\n', (2092, 2097), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((2613, 2636), 'coax.LoadAddressCounterHi', 'LoadAddressCounterHi', (['(1)'], {}), '(1)\n', (2633, 2636), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((2638, 2663), 'coax.LoadAddressCounterLo', 'LoadAddressCounterLo', (['(224)'], {}), '(224)\n', (2658, 2663), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((3181, 3204), 'coax.LoadAddressCounterHi', 'LoadAddressCounterHi', (['(2)'], {}), '(2)\n', (3201, 3204), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n'), ((3206, 3230), 'coax.LoadAddressCounterLo', 'LoadAddressCounterLo', (['(48)'], {}), '(48)\n', (3226, 3230), False, 'from coax import read_feature_ids, parse_features, Feature, LoadAddressCounterHi, LoadAddressCounterLo, WriteData, EABWriteAlternate, EABLoadMask\n')]
|
# coding=utf-8
from Base.DevicesList import devicesList as dl
from Base.Common import Common
class DevicesConnect:
def deviceConnect(self):
commands = []
data = dl().get_Tv_IP()
for IP in data:
cmd = "adb connect %s" %(IP)
commands.append(cmd)
Common().loop_threads(commands)
if __name__ == '__main__':
DevicesConnect().deviceConnect()
|
[
"Base.DevicesList.devicesList",
"Base.Common.Common"
] |
[((184, 188), 'Base.DevicesList.devicesList', 'dl', ([], {}), '()\n', (186, 188), True, 'from Base.DevicesList import devicesList as dl\n'), ((307, 315), 'Base.Common.Common', 'Common', ([], {}), '()\n', (313, 315), False, 'from Base.Common import Common\n')]
|
# Copyright 2011 The scales Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Formatting methods for stats."""
from greplin import scales
import cgi
import six
import json
import operator
import re
OPERATORS = {
'>=': operator.ge,
'>': operator.gt,
'<': operator.lt,
'<=': operator.le,
'=': operator.eq,
'==': operator.eq,
'!=': operator.ne
}
OPERATOR = re.compile('(%s)' % '|'.join(list(OPERATORS.keys())))
def runQuery(statDict, query):
"""Filters for the given query."""
parts = [x.strip() for x in OPERATOR.split(query)]
assert len(parts) in (1, 3)
queryKey = parts[0]
result = {}
for key, value in six.iteritems(statDict):
if key == queryKey:
if len(parts) == 3:
op = OPERATORS[parts[1]]
try:
queryValue = type(value)(parts[2]) if value else parts[2]
except (TypeError, ValueError):
continue
if not op(value, queryValue):
continue
result[key] = value
elif isinstance(value, scales.StatContainer) or isinstance(value, dict):
child = runQuery(value, query)
if child:
result[key] = child
return result
def htmlHeader(output, path, serverName, query = None):
"""Writes an HTML header."""
if path and path != '/':
output.write('<title>%s - Status: %s</title>' % (serverName, path))
else:
output.write('<title>%s - Status</title>' % serverName)
output.write('''
<style>
body,td { font-family: monospace }
.level div {
padding-bottom: 4px;
}
.level .level {
margin-left: 2em;
padding: 1px 0;
}
span { color: #090; vertical-align: top }
.key { color: black; font-weight: bold }
.int, .float { color: #00c }
</style>
''')
output.write('<h1 style="margin: 0">Stats</h1>')
output.write('<h3 style="margin: 3px 0 18px">%s</h3>' % serverName)
output.write(
'<p><form action="#" method="GET">Filter: <input type="text" name="query" size="20" value="%s"></form></p>' %
(query or ''))
def htmlFormat(output, pathParts = (), statDict = None, query = None):
"""Formats as HTML, writing to the given object."""
statDict = statDict or scales.getStats()
if query:
statDict = runQuery(statDict, query)
_htmlRenderDict(pathParts, statDict, output)
def _htmlRenderDict(pathParts, statDict, output):
"""Render a dictionary as a table - recursing as necessary."""
keys = list(statDict.keys())
keys.sort()
links = []
output.write('<div class="level">')
for key in keys:
keyStr = cgi.escape(_utf8str(key))
value = statDict[key]
if hasattr(value, '__call__'):
value = value()
if hasattr(value, 'keys'):
valuePath = pathParts + (keyStr,)
if isinstance(value, scales.StatContainer) and value.isCollapsed():
link = '/status/' + '/'.join(valuePath)
links.append('<div class="key"><a href="%s">%s</a></div>' % (link, keyStr))
else:
output.write('<div class="key">%s</div>' % keyStr)
_htmlRenderDict(valuePath, value, output)
else:
output.write('<div><span class="key">%s</span> <span class="%s">%s</span></div>' %
(keyStr, type(value).__name__, cgi.escape(_utf8str(value)).replace('\n', '<br/>')))
if links:
for link in links:
output.write(link)
output.write('</div>')
def _utf8str(x):
"""Like str(x), but returns UTF8."""
if six.PY3:
return str(x)
if isinstance(x, six.binary_type):
return x
elif isinstance(x, six.text_type):
return x.encode('utf-8')
else:
return six.binary_type(x)
def jsonFormat(output, statDict = None, query = None, pretty = False):
"""Formats as JSON, writing to the given object."""
statDict = statDict or scales.getStats()
if query:
statDict = runQuery(statDict, query)
indent = 2 if pretty else None
# At first, assume that strings are in UTF-8. If this fails -- if, for example, we have
# crazy binary data -- then in order to get *something* out, we assume ISO-8859-1,
# which maps each byte to a unicode code point.
try:
serialized = json.dumps(statDict, cls=scales.StatContainerEncoder, indent=indent)
except UnicodeDecodeError:
serialized = json.dumps(statDict, cls=scales.StatContainerEncoder, indent=indent, encoding='iso-8859-1')
output.write(serialized)
output.write('\n')
|
[
"greplin.scales.getStats",
"json.dumps",
"six.iteritems",
"six.binary_type"
] |
[((1145, 1168), 'six.iteritems', 'six.iteritems', (['statDict'], {}), '(statDict)\n', (1158, 1168), False, 'import six\n'), ((2616, 2633), 'greplin.scales.getStats', 'scales.getStats', ([], {}), '()\n', (2631, 2633), False, 'from greplin import scales\n'), ((4173, 4190), 'greplin.scales.getStats', 'scales.getStats', ([], {}), '()\n', (4188, 4190), False, 'from greplin import scales\n'), ((4526, 4594), 'json.dumps', 'json.dumps', (['statDict'], {'cls': 'scales.StatContainerEncoder', 'indent': 'indent'}), '(statDict, cls=scales.StatContainerEncoder, indent=indent)\n', (4536, 4594), False, 'import json\n'), ((4002, 4020), 'six.binary_type', 'six.binary_type', (['x'], {}), '(x)\n', (4017, 4020), False, 'import six\n'), ((4641, 4736), 'json.dumps', 'json.dumps', (['statDict'], {'cls': 'scales.StatContainerEncoder', 'indent': 'indent', 'encoding': '"""iso-8859-1"""'}), "(statDict, cls=scales.StatContainerEncoder, indent=indent,\n encoding='iso-8859-1')\n", (4651, 4736), False, 'import json\n')]
|
import click
from ..cli import with_context
@click.command('clean', short_help="Cleans a book' output directories")
@with_context
def clean_command(ctx=None):
pass
|
[
"click.command"
] |
[((47, 117), 'click.command', 'click.command', (['"""clean"""'], {'short_help': '"""Cleans a book\' output directories"""'}), '(\'clean\', short_help="Cleans a book\' output directories")\n', (60, 117), False, 'import click\n')]
|
#MenuTitle: Steal Kerning Groups from Font
"""Copy kerning groups from one font to another."""
from __future__ import print_function
import vanilla
class GroupsCopy(object):
"""GUI for copying kerning groups from one font to another"""
def __init__(self):
self.w = vanilla.FloatingWindow((400, 70), "Steal kerning groups")
self.w.text_anchor = vanilla.TextBox((15, 12+2, 130, 14), "Copy groups from:", sizeStyle='small')
self.w.from_font = vanilla.PopUpButton((150, 12, 150, 17), self.GetFonts(isSourceFont=True), sizeStyle='small', callback=self.buttonCheck)
self.w.text_value = vanilla.TextBox((15, 12+2+25, 130, 14), "To selected glyphs in:", sizeStyle='small')
self.w.to_font = vanilla.PopUpButton((150, 12+25, 150, 17), self.GetFonts(isSourceFont=False), sizeStyle='small', callback=self.buttonCheck)
self.w.copybutton = vanilla.Button((-80, 12+25, -15, 17), "Copy", sizeStyle='small', callback=self.copyGroups)
self.w.setDefaultButton( self.w.copybutton )
self.w.open()
self.buttonCheck(None)
def GetFonts(self, isSourceFont):
myFontList = [ "%s - %s" % ( x.font.familyName, x.selectedFontMaster().name ) for x in Glyphs.orderedDocuments() ]
if isSourceFont:
myFontList.reverse()
return myFontList
def buttonCheck(self, sender):
fromFont = self.w.from_font.getItems()[ self.w.from_font.get() ]
toFont = self.w.to_font.getItems()[ self.w.to_font.get() ]
if fromFont == toFont:
self.w.copybutton.enable( onOff=False )
else:
self.w.copybutton.enable( onOff=True )
def copyGroups(self, sender):
fromFont = self.w.from_font.getItems()[ self.w.from_font.get() ]
toFont = self.w.to_font.getItems()[ self.w.to_font.get() ]
Doc_source = [ x for x in Glyphs.orderedDocuments() if ("%s - %s" % ( x.font.familyName, x.selectedFontMaster().name )) == fromFont ][0]
Master_source = Doc_source.selectedFontMaster().id
Font_source = Doc_source.font
Font_target = [ x.font for x in Glyphs.orderedDocuments() if ("%s - %s" % ( x.font.familyName, x.selectedFontMaster().name )) == toFont ][0]
Glyphs_selected = [ x.parent for x in Font_target.parent.selectedLayers() ]
print("Syncing kerning groups for", len(Glyphs_selected), "glyphs from", Font_source.familyName, "to", Font_target.familyName, ":")
try:
for thisGlyph in Glyphs_selected:
glyphName = thisGlyph.name
try:
sourceGlyph = Font_source.glyphs[ glyphName ]
oldL = thisGlyph.leftKerningGroup
oldR = thisGlyph.rightKerningGroup
newL = sourceGlyph.leftKerningGroup
newR = sourceGlyph.rightKerningGroup
if oldL != newL or oldR != newR:
thisGlyph.leftKerningGroup = newL
thisGlyph.rightKerningGroup = newR
print(" ", glyphName, ":", newL, "<--->", newR)
# start: temporary fix for 3.0.3 unwrapped vertical kerning
def kerningGetter(kerning):
if kerning is not None and not isinstance(kerning, str):
kerning = kerning()
return kerning
# end: temporary fix for 3.0.3 unwrapped vertical kerning
oldT = kerningGetter(thisGlyph.topKerningGroup)
oldB = kerningGetter(thisGlyph.bottomKerningGroup)
newT = kerningGetter(sourceGlyph.topKerningGroup)
newB = kerningGetter(sourceGlyph.bottomKerningGroup)
if oldT != newT or oldB != newB:
thisGlyph.leftKerningGroup = newL
thisGlyph.setTopKerningGroup_(newT)
thisGlyph.setBottomKerningGroup_(newB)
print(" ", glyphName, ":", newT, "\n ^\n |\n V\n", newB)
pass
except Exception as e:
print(" ", glyphName,": Error")
# print e
except Exception as e:
import traceback
print(traceback.format_exc())
finally:
print("Done.")
self.w.close()
GroupsCopy()
|
[
"traceback.format_exc",
"vanilla.FloatingWindow",
"vanilla.TextBox",
"vanilla.Button"
] |
[((271, 328), 'vanilla.FloatingWindow', 'vanilla.FloatingWindow', (['(400, 70)', '"""Steal kerning groups"""'], {}), "((400, 70), 'Steal kerning groups')\n", (293, 328), False, 'import vanilla\n'), ((355, 433), 'vanilla.TextBox', 'vanilla.TextBox', (['(15, 12 + 2, 130, 14)', '"""Copy groups from:"""'], {'sizeStyle': '"""small"""'}), "((15, 12 + 2, 130, 14), 'Copy groups from:', sizeStyle='small')\n", (370, 433), False, 'import vanilla\n'), ((598, 690), 'vanilla.TextBox', 'vanilla.TextBox', (['(15, 12 + 2 + 25, 130, 14)', '"""To selected glyphs in:"""'], {'sizeStyle': '"""small"""'}), "((15, 12 + 2 + 25, 130, 14), 'To selected glyphs in:',\n sizeStyle='small')\n", (613, 690), False, 'import vanilla\n'), ((849, 946), 'vanilla.Button', 'vanilla.Button', (['(-80, 12 + 25, -15, 17)', '"""Copy"""'], {'sizeStyle': '"""small"""', 'callback': 'self.copyGroups'}), "((-80, 12 + 25, -15, 17), 'Copy', sizeStyle='small', callback\n =self.copyGroups)\n", (863, 946), False, 'import vanilla\n'), ((3727, 3749), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (3747, 3749), False, 'import traceback\n')]
|
# extdiff.py - external diff program support for mercurial
#
# Copyright 2006 <NAME> <<EMAIL>>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''command to allow external programs to compare revisions
The extdiff Mercurial extension allows you to use external programs
to compare revisions, or revision with working directory. The external
diff programs are called with a configurable set of options and two
non-option arguments: paths to directories containing snapshots of
files to compare.
The extdiff extension also allows you to configure new diff commands, so
you do not need to type :hg:`extdiff -p kdiff3` always. ::
[extdiff]
# add new command that runs GNU diff(1) in 'context diff' mode
cdiff = gdiff -Nprc5
## or the old way:
#cmd.cdiff = gdiff
#opts.cdiff = -Nprc5
# add new command called vdiff, runs kdiff3
vdiff = kdiff3
# add new command called meld, runs meld (no need to name twice)
meld =
# add new command called vimdiff, runs gvimdiff with DirDiff plugin
# (see http://www.vim.org/scripts/script.php?script_id=102) Non
# English user, be sure to put "let g:DirDiffDynamicDiffText = 1" in
# your .vimrc
vimdiff = gvim -f "+next" \\
"+execute 'DirDiff' fnameescape(argv(0)) fnameescape(argv(1))"
Tool arguments can include variables that are expanded at runtime::
$parent1, $plabel1 - filename, descriptive label of first parent
$child, $clabel - filename, descriptive label of child revision
$parent2, $plabel2 - filename, descriptive label of second parent
$root - repository root
$parent is an alias for $parent1.
The extdiff extension will look in your [diff-tools] and [merge-tools]
sections for diff tool arguments, when none are specified in [extdiff].
::
[extdiff]
kdiff3 =
[diff-tools]
kdiff3.diffargs=--L1 '$plabel1' --L2 '$clabel' $parent $child
You can use -I/-X and list of file or directory names like normal
:hg:`diff` command. The extdiff extension makes snapshots of only
needed files, so running the external diff program will actually be
pretty fast (at least faster than having to compare the entire tree).
'''
from mercurial.i18n import _
from mercurial.node import short, nullid
from mercurial import scmutil, scmutil, util, commands, encoding
import os, shlex, shutil, tempfile, re
def snapshot(ui, repo, files, node, tmproot):
'''snapshot files as of some revision
if not using snapshot, -I/-X does not work and recursive diff
in tools like kdiff3 and meld displays too many files.'''
dirname = os.path.basename(repo.root)
if dirname == "":
dirname = "root"
if node is not None:
dirname = '%s.%s' % (dirname, short(node))
base = os.path.join(tmproot, dirname)
os.mkdir(base)
if node is not None:
ui.note(_('making snapshot of %d files from rev %s\n') %
(len(files), short(node)))
else:
ui.note(_('making snapshot of %d files from working directory\n') %
(len(files)))
wopener = scmutil.opener(base)
fns_and_mtime = []
ctx = repo[node]
for fn in files:
wfn = util.pconvert(fn)
if not wfn in ctx:
# File doesn't exist; could be a bogus modify
continue
ui.note(' %s\n' % wfn)
dest = os.path.join(base, wfn)
fctx = ctx[wfn]
data = repo.wwritedata(wfn, fctx.data())
if 'l' in fctx.flags():
wopener.symlink(data, wfn)
else:
wopener.write(wfn, data)
if 'x' in fctx.flags():
util.setflags(dest, False, True)
if node is None:
fns_and_mtime.append((dest, repo.wjoin(fn),
os.lstat(dest).st_mtime))
return dirname, fns_and_mtime
def dodiff(ui, repo, diffcmd, diffopts, pats, opts):
'''Do the actuall diff:
- copy to a temp structure if diffing 2 internal revisions
- copy to a temp structure if diffing working revision with
another one and more than 1 file is changed
- just invoke the diff for a single file in the working dir
'''
revs = opts.get('rev')
change = opts.get('change')
args = ' '.join(diffopts)
do3way = '$parent2' in args
if revs and change:
msg = _('cannot specify --rev and --change at the same time')
raise util.Abort(msg)
elif change:
node2 = scmutil.revsingle(repo, change, None).node()
node1a, node1b = repo.changelog.parents(node2)
else:
node1a, node2 = scmutil.revpair(repo, revs)
if not revs:
node1b = repo.dirstate.p2()
else:
node1b = nullid
# Disable 3-way merge if there is only one parent
if do3way:
if node1b == nullid:
do3way = False
matcher = scmutil.match(repo[node2], pats, opts)
mod_a, add_a, rem_a = map(set, repo.status(node1a, node2, matcher)[:3])
if do3way:
mod_b, add_b, rem_b = map(set, repo.status(node1b, node2, matcher)[:3])
else:
mod_b, add_b, rem_b = set(), set(), set()
modadd = mod_a | add_a | mod_b | add_b
common = modadd | rem_a | rem_b
if not common:
return 0
tmproot = tempfile.mkdtemp(prefix='extdiff.')
try:
# Always make a copy of node1a (and node1b, if applicable)
dir1a_files = mod_a | rem_a | ((mod_b | add_b) - add_a)
dir1a = snapshot(ui, repo, dir1a_files, node1a, tmproot)[0]
rev1a = '@%d' % repo[node1a].rev()
if do3way:
dir1b_files = mod_b | rem_b | ((mod_a | add_a) - add_b)
dir1b = snapshot(ui, repo, dir1b_files, node1b, tmproot)[0]
rev1b = '@%d' % repo[node1b].rev()
else:
dir1b = None
rev1b = ''
fns_and_mtime = []
# If node2 in not the wc or there is >1 change, copy it
dir2root = ''
rev2 = ''
if node2:
dir2 = snapshot(ui, repo, modadd, node2, tmproot)[0]
rev2 = '@%d' % repo[node2].rev()
elif len(common) > 1:
#we only actually need to get the files to copy back to
#the working dir in this case (because the other cases
#are: diffing 2 revisions or single file -- in which case
#the file is already directly passed to the diff tool).
dir2, fns_and_mtime = snapshot(ui, repo, modadd, None, tmproot)
else:
# This lets the diff tool open the changed file directly
dir2 = ''
dir2root = repo.root
label1a = rev1a
label1b = rev1b
label2 = rev2
# If only one change, diff the files instead of the directories
# Handle bogus modifies correctly by checking if the files exist
if len(common) == 1:
common_file = util.localpath(common.pop())
dir1a = os.path.join(tmproot, dir1a, common_file)
label1a = common_file + rev1a
if not os.path.isfile(dir1a):
dir1a = os.devnull
if do3way:
dir1b = os.path.join(tmproot, dir1b, common_file)
label1b = common_file + rev1b
if not os.path.isfile(dir1b):
dir1b = os.devnull
dir2 = os.path.join(dir2root, dir2, common_file)
label2 = common_file + rev2
# Function to quote file/dir names in the argument string.
# When not operating in 3-way mode, an empty string is
# returned for parent2
replace = dict(parent=dir1a, parent1=dir1a, parent2=dir1b,
plabel1=label1a, plabel2=label1b,
clabel=label2, child=dir2,
root=repo.root)
def quote(match):
key = match.group()[1:]
if not do3way and key == 'parent2':
return ''
return util.shellquote(replace[key])
# Match parent2 first, so 'parent1?' will match both parent1 and parent
regex = '\$(parent2|parent1?|child|plabel1|plabel2|clabel|root)'
if not do3way and not re.search(regex, args):
args += ' $parent1 $child'
args = re.sub(regex, quote, args)
cmdline = util.shellquote(diffcmd) + ' ' + args
ui.debug('running %r in %s\n' % (cmdline, tmproot))
util.system(cmdline, cwd=tmproot, out=ui.fout)
for copy_fn, working_fn, mtime in fns_and_mtime:
if os.lstat(copy_fn).st_mtime != mtime:
ui.debug('file changed while diffing. '
'Overwriting: %s (src: %s)\n' % (working_fn, copy_fn))
util.copyfile(copy_fn, working_fn)
return 1
finally:
ui.note(_('cleaning up temp directory\n'))
shutil.rmtree(tmproot)
def extdiff(ui, repo, *pats, **opts):
'''use external program to diff repository (or selected files)
Show differences between revisions for the specified files, using
an external program. The default program used is diff, with
default options "-Npru".
To select a different program, use the -p/--program option. The
program will be passed the names of two directories to compare. To
pass additional options to the program, use -o/--option. These
will be passed before the names of the directories to compare.
When two revision arguments are given, then changes are shown
between those revisions. If only one revision is specified then
that revision is compared to the working directory, and, when no
revisions are specified, the working directory files are compared
to its parent.'''
program = opts.get('program')
option = opts.get('option')
if not program:
program = 'diff'
option = option or ['-Npru']
return dodiff(ui, repo, program, option, pats, opts)
cmdtable = {
"extdiff":
(extdiff,
[('p', 'program', '',
_('comparison program to run'), _('CMD')),
('o', 'option', [],
_('pass option to comparison program'), _('OPT')),
('r', 'rev', [],
_('revision'), _('REV')),
('c', 'change', '',
_('change made by revision'), _('REV')),
] + commands.walkopts,
_('hg extdiff [OPT]... [FILE]...')),
}
def uisetup(ui):
for cmd, path in ui.configitems('extdiff'):
if cmd.startswith('cmd.'):
cmd = cmd[4:]
if not path:
path = cmd
diffopts = ui.config('extdiff', 'opts.' + cmd, '')
diffopts = diffopts and [diffopts] or []
elif cmd.startswith('opts.'):
continue
else:
# command = path opts
if path:
diffopts = shlex.split(path)
path = diffopts.pop(0)
else:
path, diffopts = cmd, []
# look for diff arguments in [diff-tools] then [merge-tools]
if diffopts == []:
args = ui.config('diff-tools', cmd+'.diffargs') or \
ui.config('merge-tools', cmd+'.diffargs')
if args:
diffopts = shlex.split(args)
def save(cmd, path, diffopts):
'''use closure to save diff command to use'''
def mydiff(ui, repo, *pats, **opts):
return dodiff(ui, repo, path, diffopts + opts['option'],
pats, opts)
doc = _('''\
use %(path)s to diff repository (or selected files)
Show differences between revisions for the specified files, using
the %(path)s program.
When two revision arguments are given, then changes are shown
between those revisions. If only one revision is specified then
that revision is compared to the working directory, and, when no
revisions are specified, the working directory files are compared
to its parent.\
''') % dict(path=util.uirepr(path))
# We must translate the docstring right away since it is
# used as a format string. The string will unfortunately
# be translated again in commands.helpcmd and this will
# fail when the docstring contains non-ASCII characters.
# Decoding the string to a Unicode string here (using the
# right encoding) prevents that.
mydiff.__doc__ = doc.decode(encoding.encoding)
return mydiff
cmdtable[cmd] = (save(cmd, path, diffopts),
cmdtable['extdiff'][1][1:],
_('hg %s [OPTION]... [FILE]...') % cmd)
|
[
"mercurial.node.short",
"shlex.split",
"mercurial.scmutil.revsingle",
"mercurial.scmutil.match",
"mercurial.scmutil.revpair",
"mercurial.util.uirepr",
"re.search",
"mercurial.util.setflags",
"os.mkdir",
"mercurial.util.Abort",
"mercurial.i18n._",
"os.path.isfile",
"tempfile.mkdtemp",
"mercurial.util.copyfile",
"os.lstat",
"re.sub",
"mercurial.util.system",
"mercurial.scmutil.opener",
"mercurial.util.shellquote",
"os.path.join",
"os.path.basename",
"shutil.rmtree",
"mercurial.util.pconvert"
] |
[((2637, 2664), 'os.path.basename', 'os.path.basename', (['repo.root'], {}), '(repo.root)\n', (2653, 2664), False, 'import os, shlex, shutil, tempfile, re\n'), ((2799, 2829), 'os.path.join', 'os.path.join', (['tmproot', 'dirname'], {}), '(tmproot, dirname)\n', (2811, 2829), False, 'import os, shlex, shutil, tempfile, re\n'), ((2834, 2848), 'os.mkdir', 'os.mkdir', (['base'], {}), '(base)\n', (2842, 2848), False, 'import os, shlex, shutil, tempfile, re\n'), ((3108, 3128), 'mercurial.scmutil.opener', 'scmutil.opener', (['base'], {}), '(base)\n', (3122, 3128), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n'), ((4876, 4914), 'mercurial.scmutil.match', 'scmutil.match', (['repo[node2]', 'pats', 'opts'], {}), '(repo[node2], pats, opts)\n', (4889, 4914), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n'), ((5276, 5311), 'tempfile.mkdtemp', 'tempfile.mkdtemp', ([], {'prefix': '"""extdiff."""'}), "(prefix='extdiff.')\n", (5292, 5311), False, 'import os, shlex, shutil, tempfile, re\n'), ((3208, 3225), 'mercurial.util.pconvert', 'util.pconvert', (['fn'], {}), '(fn)\n', (3221, 3225), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n'), ((3379, 3402), 'os.path.join', 'os.path.join', (['base', 'wfn'], {}), '(base, wfn)\n', (3391, 3402), False, 'import os, shlex, shutil, tempfile, re\n'), ((4351, 4406), 'mercurial.i18n._', '_', (['"""cannot specify --rev and --change at the same time"""'], {}), "('cannot specify --rev and --change at the same time')\n", (4352, 4406), False, 'from mercurial.i18n import _\n'), ((4421, 4436), 'mercurial.util.Abort', 'util.Abort', (['msg'], {}), '(msg)\n', (4431, 4436), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n'), ((8234, 8260), 're.sub', 're.sub', (['regex', 'quote', 'args'], {}), '(regex, quote, args)\n', (8240, 8260), False, 'import os, shlex, shutil, tempfile, re\n'), ((8386, 8432), 'mercurial.util.system', 'util.system', (['cmdline'], {'cwd': 'tmproot', 'out': 'ui.fout'}), '(cmdline, cwd=tmproot, out=ui.fout)\n', (8397, 8432), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n'), ((8820, 8842), 'shutil.rmtree', 'shutil.rmtree', (['tmproot'], {}), '(tmproot)\n', (8833, 8842), False, 'import os, shlex, shutil, tempfile, re\n'), ((10255, 10289), 'mercurial.i18n._', '_', (['"""hg extdiff [OPT]... [FILE]..."""'], {}), "('hg extdiff [OPT]... [FILE]...')\n", (10256, 10289), False, 'from mercurial.i18n import _\n'), ((4604, 4631), 'mercurial.scmutil.revpair', 'scmutil.revpair', (['repo', 'revs'], {}), '(repo, revs)\n', (4619, 4631), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n'), ((6930, 6971), 'os.path.join', 'os.path.join', (['tmproot', 'dir1a', 'common_file'], {}), '(tmproot, dir1a, common_file)\n', (6942, 6971), False, 'import os, shlex, shutil, tempfile, re\n'), ((7330, 7371), 'os.path.join', 'os.path.join', (['dir2root', 'dir2', 'common_file'], {}), '(dir2root, dir2, common_file)\n', (7342, 7371), False, 'import os, shlex, shutil, tempfile, re\n'), ((7942, 7971), 'mercurial.util.shellquote', 'util.shellquote', (['replace[key]'], {}), '(replace[key])\n', (7957, 7971), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n'), ((8777, 8810), 'mercurial.i18n._', '_', (['"""cleaning up temp directory\n"""'], {}), "('cleaning up temp directory\\n')\n", (8778, 8810), False, 'from mercurial.i18n import _\n'), ((2775, 2786), 'mercurial.node.short', 'short', (['node'], {}), '(node)\n', (2780, 2786), False, 'from mercurial.node import short, nullid\n'), ((2890, 2936), 'mercurial.i18n._', '_', (['"""making snapshot of %d files from rev %s\n"""'], {}), "('making snapshot of %d files from rev %s\\n')\n", (2891, 2936), False, 'from mercurial.i18n import _\n'), ((3008, 3065), 'mercurial.i18n._', '_', (['"""making snapshot of %d files from working directory\n"""'], {}), "('making snapshot of %d files from working directory\\n')\n", (3009, 3065), False, 'from mercurial.i18n import _\n'), ((3650, 3682), 'mercurial.util.setflags', 'util.setflags', (['dest', '(False)', '(True)'], {}), '(dest, False, True)\n', (3663, 3682), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n'), ((7033, 7054), 'os.path.isfile', 'os.path.isfile', (['dir1a'], {}), '(dir1a)\n', (7047, 7054), False, 'import os, shlex, shutil, tempfile, re\n'), ((7138, 7179), 'os.path.join', 'os.path.join', (['tmproot', 'dir1b', 'common_file'], {}), '(tmproot, dir1b, common_file)\n', (7150, 7179), False, 'import os, shlex, shutil, tempfile, re\n'), ((8156, 8178), 're.search', 're.search', (['regex', 'args'], {}), '(regex, args)\n', (8165, 8178), False, 'import os, shlex, shutil, tempfile, re\n'), ((8279, 8303), 'mercurial.util.shellquote', 'util.shellquote', (['diffcmd'], {}), '(diffcmd)\n', (8294, 8303), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n'), ((8695, 8729), 'mercurial.util.copyfile', 'util.copyfile', (['copy_fn', 'working_fn'], {}), '(copy_fn, working_fn)\n', (8708, 8729), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n'), ((11134, 11151), 'shlex.split', 'shlex.split', (['args'], {}), '(args)\n', (11145, 11151), False, 'import os, shlex, shutil, tempfile, re\n'), ((11431, 11886), 'mercurial.i18n._', '_', (['"""use %(path)s to diff repository (or selected files)\n\n Show differences between revisions for the specified files, using\n the %(path)s program.\n\n When two revision arguments are given, then changes are shown\n between those revisions. If only one revision is specified then\n that revision is compared to the working directory, and, when no\n revisions are specified, the working directory files are compared\n to its parent."""'], {}), '("""use %(path)s to diff repository (or selected files)\n\n Show differences between revisions for the specified files, using\n the %(path)s program.\n\n When two revision arguments are given, then changes are shown\n between those revisions. If only one revision is specified then\n that revision is compared to the working directory, and, when no\n revisions are specified, the working directory files are compared\n to its parent."""\n )\n', (11432, 11886), False, 'from mercurial.i18n import _\n'), ((12523, 12555), 'mercurial.i18n._', '_', (['"""hg %s [OPTION]... [FILE]..."""'], {}), "('hg %s [OPTION]... [FILE]...')\n", (12524, 12555), False, 'from mercurial.i18n import _\n'), ((2968, 2979), 'mercurial.node.short', 'short', (['node'], {}), '(node)\n', (2973, 2979), False, 'from mercurial.node import short, nullid\n'), ((4470, 4507), 'mercurial.scmutil.revsingle', 'scmutil.revsingle', (['repo', 'change', 'None'], {}), '(repo, change, None)\n', (4487, 4507), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n'), ((7249, 7270), 'os.path.isfile', 'os.path.isfile', (['dir1b'], {}), '(dir1b)\n', (7263, 7270), False, 'import os, shlex, shutil, tempfile, re\n'), ((8506, 8523), 'os.lstat', 'os.lstat', (['copy_fn'], {}), '(copy_fn)\n', (8514, 8523), False, 'import os, shlex, shutil, tempfile, re\n'), ((9965, 9995), 'mercurial.i18n._', '_', (['"""comparison program to run"""'], {}), "('comparison program to run')\n", (9966, 9995), False, 'from mercurial.i18n import _\n'), ((9997, 10005), 'mercurial.i18n._', '_', (['"""CMD"""'], {}), "('CMD')\n", (9998, 10005), False, 'from mercurial.i18n import _\n'), ((10041, 10079), 'mercurial.i18n._', '_', (['"""pass option to comparison program"""'], {}), "('pass option to comparison program')\n", (10042, 10079), False, 'from mercurial.i18n import _\n'), ((10081, 10089), 'mercurial.i18n._', '_', (['"""OPT"""'], {}), "('OPT')\n", (10082, 10089), False, 'from mercurial.i18n import _\n'), ((10122, 10135), 'mercurial.i18n._', '_', (['"""revision"""'], {}), "('revision')\n", (10123, 10135), False, 'from mercurial.i18n import _\n'), ((10137, 10145), 'mercurial.i18n._', '_', (['"""REV"""'], {}), "('REV')\n", (10138, 10145), False, 'from mercurial.i18n import _\n'), ((10181, 10209), 'mercurial.i18n._', '_', (['"""change made by revision"""'], {}), "('change made by revision')\n", (10182, 10209), False, 'from mercurial.i18n import _\n'), ((10211, 10219), 'mercurial.i18n._', '_', (['"""REV"""'], {}), "('REV')\n", (10212, 10219), False, 'from mercurial.i18n import _\n'), ((10748, 10765), 'shlex.split', 'shlex.split', (['path'], {}), '(path)\n', (10759, 10765), False, 'import os, shlex, shutil, tempfile, re\n'), ((3798, 3812), 'os.lstat', 'os.lstat', (['dest'], {}), '(dest)\n', (3806, 3812), False, 'import os, shlex, shutil, tempfile, re\n'), ((11898, 11915), 'mercurial.util.uirepr', 'util.uirepr', (['path'], {}), '(path)\n', (11909, 11915), False, 'from mercurial import scmutil, scmutil, util, commands, encoding\n')]
|
# encoding: utf-8
import datetime
import numpy as np
import pandas as pd
def get_next_period_day(current, period, n=1, extra_offset=0):
"""
Get the n'th day in next period from current day.
Parameters
----------
current : int
Current date in format "%Y%m%d".
period : str
Interval between current and next. {'day', 'week', 'month'}
n : int
n times period.
extra_offset : int
n'th business day after next period.
Returns
-------
nxt : int
"""
current_dt = convert_int_to_datetime(current)
if period == 'day':
offset = pd.tseries.offsets.BDay() # move to next business day
# offset = offsets.Day
elif period == 'week':
offset = pd.tseries.offsets.Week(weekday=0) # move to next Monday
elif period == 'month':
offset = pd.tseries.offsets.BMonthBegin() # move to first business day of next month
# offset = offsets.MonthBegin
else:
raise NotImplementedError("Frequency as {} not support".format(period))
offset = offset * n
next_dt = current_dt + offset
if extra_offset:
next_dt = next_dt + extra_offset * pd.tseries.offsets.BDay()
nxt = convert_datetime_to_int(next_dt)
return nxt
def convert_int_to_datetime(dt):
"""Convert int date (%Y%m%d) to datetime.datetime object."""
if isinstance(dt, pd.Series):
dt = dt.astype(str)
elif isinstance(dt, int):
dt = str(dt)
return pd.to_datetime(dt, format="%Y%m%d")
def convert_datetime_to_int(dt):
f = lambda x: x.year * 10000 + x.month * 100 + x.day
if isinstance(dt, (datetime.datetime, datetime.date)):
dt = pd.Timestamp(dt)
res = f(dt)
elif isinstance(dt, np.datetime64):
dt = pd.Timestamp(dt)
res = f(dt)
else:
dt = pd.Series(dt)
res = dt.apply(f)
return res
def shift(date, n_weeks=0):
"""Shift date backward or forward for n weeks.
Parameters
----------
date : int or datetime
The date to be shifted.
n_weeks : int, optional
Positive for increasing date, negative for decreasing date.
Default 0 (no shift).
Returns
-------
res : int or datetime
"""
delta = pd.Timedelta(weeks=n_weeks)
is_int = isinstance(date, (int, np.integer))
if is_int:
dt = convert_int_to_datetime(date)
else:
dt = date
res = dt + delta
if is_int:
res = convert_datetime_to_int(res)
return res
def combine_date_time(date, time):
return np.int64(date) * 1000000 + np.int64(time)
def split_date_time(dt):
date = dt // 1000000
time = dt % 1000000
return date, time
def date_to_month(ser):
# ser = pd.Series(ser)
res = ser % 10000 // 100
MONTH_MAP = {1: 'Jan',
2: 'Feb',
3: 'Mar',
4: 'Apr',
5: 'May',
6: 'Jun',
7: 'Jul',
8: 'Aug',
9: 'Sep',
10: 'Oct',
11: 'Nov',
12: 'Dec'}
# res = res.replace(MONTH_MAP)
return res
def date_to_year(ser):
return ser // 10000
|
[
"pandas.Series",
"numpy.int64",
"pandas.Timedelta",
"pandas.tseries.offsets.BMonthBegin",
"pandas.tseries.offsets.Week",
"pandas.tseries.offsets.BDay",
"pandas.Timestamp",
"pandas.to_datetime"
] |
[((1491, 1526), 'pandas.to_datetime', 'pd.to_datetime', (['dt'], {'format': '"""%Y%m%d"""'}), "(dt, format='%Y%m%d')\n", (1505, 1526), True, 'import pandas as pd\n'), ((2277, 2304), 'pandas.Timedelta', 'pd.Timedelta', ([], {'weeks': 'n_weeks'}), '(weeks=n_weeks)\n', (2289, 2304), True, 'import pandas as pd\n'), ((618, 643), 'pandas.tseries.offsets.BDay', 'pd.tseries.offsets.BDay', ([], {}), '()\n', (641, 643), True, 'import pandas as pd\n'), ((1691, 1707), 'pandas.Timestamp', 'pd.Timestamp', (['dt'], {}), '(dt)\n', (1703, 1707), True, 'import pandas as pd\n'), ((2614, 2628), 'numpy.int64', 'np.int64', (['time'], {}), '(time)\n', (2622, 2628), True, 'import numpy as np\n'), ((748, 782), 'pandas.tseries.offsets.Week', 'pd.tseries.offsets.Week', ([], {'weekday': '(0)'}), '(weekday=0)\n', (771, 782), True, 'import pandas as pd\n'), ((1781, 1797), 'pandas.Timestamp', 'pd.Timestamp', (['dt'], {}), '(dt)\n', (1793, 1797), True, 'import pandas as pd\n'), ((1841, 1854), 'pandas.Series', 'pd.Series', (['dt'], {}), '(dt)\n', (1850, 1854), True, 'import pandas as pd\n'), ((2587, 2601), 'numpy.int64', 'np.int64', (['date'], {}), '(date)\n', (2595, 2601), True, 'import numpy as np\n'), ((851, 883), 'pandas.tseries.offsets.BMonthBegin', 'pd.tseries.offsets.BMonthBegin', ([], {}), '()\n', (881, 883), True, 'import pandas as pd\n'), ((1183, 1208), 'pandas.tseries.offsets.BDay', 'pd.tseries.offsets.BDay', ([], {}), '()\n', (1206, 1208), True, 'import pandas as pd\n')]
|
import flask
from flask import request
import flask_restful as restful
from marshmallow import Schema, fields, validate
from api.helpers import success, created
from api.exceptions import NotFound
from sensors.ds18b20 import lookup
class DS18B20Query (restful.Resource):
def __init__(self, *args, **kwargs):
self.sensor_service = kwargs['sensor_service']
def get(self):
available = lookup(self.sensor_service.get_config())
return success(available)
|
[
"api.helpers.success"
] |
[((465, 483), 'api.helpers.success', 'success', (['available'], {}), '(available)\n', (472, 483), False, 'from api.helpers import success, created\n')]
|
import deepchem as dc
import numpy as np
import tensorflow as tf
import deepchem.models.tensorgraph.layers as layers
from tensorflow.python.eager import context
from tensorflow.python.framework import test_util
class TestLayersEager(test_util.TensorFlowTestCase):
"""
Test that layers function in eager mode.
"""
def test_conv_1d(self):
"""Test invoking Conv1D in eager mode."""
with context.eager_mode():
width = 5
in_channels = 2
filters = 3
kernel_size = 2
batch_size = 10
input = np.random.rand(batch_size, width, in_channels).astype(np.float32)
layer = layers.Conv1D(filters, kernel_size)
result = layer(input)
self.assertEqual(result.shape[0], batch_size)
self.assertEqual(result.shape[2], filters)
assert len(layer.trainable_variables) == 2
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.Conv1D(filters, kernel_size)
result2 = layer2(input)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3 = layer(input)
assert np.allclose(result, result3)
def test_dense(self):
"""Test invoking Dense in eager mode."""
with context.eager_mode():
in_dim = 2
out_dim = 3
batch_size = 10
input = np.random.rand(batch_size, in_dim).astype(np.float32)
layer = layers.Dense(out_dim)
result = layer(input)
assert result.shape == (batch_size, out_dim)
assert len(layer.trainable_variables) == 2
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.Dense(out_dim)
result2 = layer2(input)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3 = layer(input)
assert np.allclose(result, result3)
def test_highway(self):
"""Test invoking Highway in eager mode."""
with context.eager_mode():
width = 5
batch_size = 10
input = np.random.rand(batch_size, width).astype(np.float32)
layer = layers.Highway()
result = layer(input)
assert result.shape == (batch_size, width)
assert len(layer.trainable_variables) == 4
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.Highway()
result2 = layer2(input)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3 = layer(input)
assert np.allclose(result, result3)
def test_flatten(self):
"""Test invoking Flatten in eager mode."""
with context.eager_mode():
input = np.random.rand(5, 10, 4).astype(np.float32)
result = layers.Flatten()(input)
assert result.shape == (5, 40)
def test_reshape(self):
"""Test invoking Reshape in eager mode."""
with context.eager_mode():
input = np.random.rand(5, 10, 4).astype(np.float32)
result = layers.Reshape((100, 2))(input)
assert result.shape == (100, 2)
def test_cast(self):
"""Test invoking Cast in eager mode."""
with context.eager_mode():
input = np.random.rand(5, 3)
result = layers.Cast(dtype=tf.float32)(input)
assert result.dtype == tf.float32
def test_squeeze(self):
"""Test invoking Squeeze in eager mode."""
with context.eager_mode():
input = np.random.rand(5, 1, 4).astype(np.float32)
result = layers.Squeeze()(input)
assert result.shape == (5, 4)
def test_transpose(self):
"""Test invoking Transpose in eager mode."""
with context.eager_mode():
input = np.random.rand(5, 10, 4).astype(np.float32)
result = layers.Transpose((1, 2, 0))(input)
assert result.shape == (10, 4, 5)
def test_combine_mean_std(self):
"""Test invoking CombineMeanStd in eager mode."""
with context.eager_mode():
mean = np.random.rand(5, 3).astype(np.float32)
std = np.random.rand(5, 3).astype(np.float32)
layer = layers.CombineMeanStd(training_only=True, noise_epsilon=0.01)
result1 = layer(mean, std, training=False)
assert np.array_equal(result1, mean) # No noise in test mode
result2 = layer(mean, std, training=True)
assert not np.array_equal(result2, mean)
assert np.allclose(result2, mean, atol=0.1)
def test_repeat(self):
"""Test invoking Repeat in eager mode."""
with context.eager_mode():
input = np.random.rand(5, 4).astype(np.float32)
result = layers.Repeat(3)(input)
assert result.shape == (5, 3, 4)
assert np.array_equal(result[:, 0, :], result[:, 1, :])
def test_gather(self):
"""Test invoking Gather in eager mode."""
with context.eager_mode():
input = np.random.rand(5).astype(np.float32)
indices = [[1], [3]]
result = layers.Gather()(input, indices)
assert np.array_equal(result, [input[1], input[3]])
def test_gru(self):
"""Test invoking GRU in eager mode."""
with context.eager_mode():
batch_size = 10
n_hidden = 7
in_channels = 4
n_steps = 6
input = np.random.rand(batch_size, n_steps,
in_channels).astype(np.float32)
layer = layers.GRU(n_hidden, batch_size)
result, state = layer(input)
assert result.shape == (batch_size, n_steps, n_hidden)
assert len(layer.trainable_variables) == 3
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.GRU(n_hidden, batch_size)
result2, state2 = layer2(input)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3, state3 = layer(input)
assert np.allclose(result, result3)
# But if we specify a different starting state, that should produce a
# different result.
result4, state4 = layer(input, initial_state=state3)
assert not np.allclose(result, result4)
def test_lstm(self):
"""Test invoking LSTM in eager mode."""
with context.eager_mode():
batch_size = 10
n_hidden = 7
in_channels = 4
n_steps = 6
input = np.random.rand(batch_size, n_steps,
in_channels).astype(np.float32)
layer = layers.LSTM(n_hidden, batch_size)
result, state = layer(input)
assert result.shape == (batch_size, n_steps, n_hidden)
assert len(layer.trainable_variables) == 3
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.LSTM(n_hidden, batch_size)
result2, state2 = layer2(input)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3, state3 = layer(input)
assert np.allclose(result, result3)
# But if we specify a different starting state, that should produce a
# different result.
result4, state4 = layer(input, initial_state=state3)
assert not np.allclose(result, result4)
def test_time_series_dense(self):
"""Test invoking TimeSeriesDense in eager mode."""
with context.eager_mode():
in_dim = 2
out_dim = 3
n_steps = 6
batch_size = 10
input = np.random.rand(batch_size, n_steps, in_dim).astype(np.float32)
layer = layers.TimeSeriesDense(out_dim)
result = layer(input)
assert result.shape == (batch_size, n_steps, out_dim)
assert len(layer.trainable_variables) == 2
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.TimeSeriesDense(out_dim)
result2 = layer2(input)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3 = layer(input)
assert np.allclose(result, result3)
def test_l1_loss(self):
"""Test invoking L1Loss in eager mode."""
with context.eager_mode():
input1 = np.random.rand(5, 10).astype(np.float32)
input2 = np.random.rand(5, 10).astype(np.float32)
result = layers.L1Loss()(input1, input2)
expected = np.mean(np.abs(input1 - input2), axis=1)
assert np.allclose(result, expected)
def test_l2_loss(self):
"""Test invoking L2Loss in eager mode."""
with context.eager_mode():
input1 = np.random.rand(5, 10).astype(np.float32)
input2 = np.random.rand(5, 10).astype(np.float32)
result = layers.L2Loss()(input1, input2)
expected = np.mean((input1 - input2)**2, axis=1)
assert np.allclose(result, expected)
def test_softmax(self):
"""Test invoking SoftMax in eager mode."""
with context.eager_mode():
input = np.random.rand(5, 10).astype(np.float32)
result = layers.SoftMax()(input)
expected = tf.nn.softmax(input)
assert np.allclose(result, expected)
def test_sigmoid(self):
"""Test invoking Sigmoid in eager mode."""
with context.eager_mode():
input = np.random.rand(5, 10).astype(np.float32)
result = layers.Sigmoid()(input)
expected = tf.nn.sigmoid(input)
assert np.allclose(result, expected)
def test_relu(self):
"""Test invoking ReLU in eager mode."""
with context.eager_mode():
input = np.random.normal(size=(5, 10)).astype(np.float32)
result = layers.ReLU()(input)
expected = tf.nn.relu(input)
assert np.allclose(result, expected)
def test_concat(self):
"""Test invoking Concat in eager mode."""
with context.eager_mode():
input1 = np.random.rand(5, 10).astype(np.float32)
input2 = np.random.rand(5, 4).astype(np.float32)
result = layers.Concat()(input1, input2)
assert result.shape == (5, 14)
assert np.array_equal(input1, result[:, :10])
assert np.array_equal(input2, result[:, 10:])
def test_stack(self):
"""Test invoking Stack in eager mode."""
with context.eager_mode():
input1 = np.random.rand(5, 4).astype(np.float32)
input2 = np.random.rand(5, 4).astype(np.float32)
result = layers.Stack()(input1, input2)
assert result.shape == (5, 2, 4)
assert np.array_equal(input1, result[:, 0, :])
assert np.array_equal(input2, result[:, 1, :])
def test_constant(self):
"""Test invoking Constant in eager mode."""
with context.eager_mode():
value = np.random.rand(5, 4).astype(np.float32)
result = layers.Constant(value)()
assert np.array_equal(result, value)
def test_variable(self):
"""Test invoking Variable in eager mode."""
with context.eager_mode():
value = np.random.rand(5, 4).astype(np.float32)
layer = layers.Variable(value)
result = layer()
assert np.array_equal(result.numpy(), value)
assert len(layer.trainable_variables) == 1
def test_add(self):
"""Test invoking Add in eager mode."""
with context.eager_mode():
result = layers.Add()([1, 2], [3, 4])
assert np.array_equal(result, [4, 6])
def test_multiply(self):
"""Test invoking Multiply in eager mode."""
with context.eager_mode():
result = layers.Multiply()([1, 2], [3, 4])
assert np.array_equal(result, [3, 8])
def test_divide(self):
"""Test invoking Divide in eager mode."""
with context.eager_mode():
result = layers.Divide()([1, 2], [2, 5])
assert np.allclose(result, [0.5, 0.4])
def test_log(self):
"""Test invoking Log in eager mode."""
with context.eager_mode():
result = layers.Log()(2.5)
assert np.allclose(result, np.log(2.5))
def test_exp(self):
"""Test invoking Exp in eager mode."""
with context.eager_mode():
result = layers.Exp()(2.5)
assert np.allclose(result, np.exp(2.5))
def test_interatomic_l2_distances(self):
"""Test invoking InteratomicL2Distances in eager mode."""
with context.eager_mode():
atoms = 5
neighbors = 2
coords = np.random.rand(atoms, 3)
neighbor_list = np.random.randint(0, atoms, size=(atoms, neighbors))
layer = layers.InteratomicL2Distances(atoms, neighbors, 3)
result = layer(coords, neighbor_list)
assert result.shape == (atoms, neighbors)
for atom in range(atoms):
for neighbor in range(neighbors):
delta = coords[atom] - coords[neighbor_list[atom, neighbor]]
dist2 = np.dot(delta, delta)
assert np.allclose(dist2, result[atom, neighbor])
def test_sparse_softmax_cross_entropy(self):
"""Test invoking SparseSoftMaxCrossEntropy in eager mode."""
with context.eager_mode():
batch_size = 10
n_features = 5
logits = np.random.rand(batch_size, n_features).astype(np.float32)
labels = np.random.rand(batch_size).astype(np.int32)
result = layers.SparseSoftMaxCrossEntropy()(labels, logits)
expected = tf.nn.sparse_softmax_cross_entropy_with_logits(
labels=labels, logits=logits)
assert np.allclose(result, expected)
def test_softmax_cross_entropy(self):
"""Test invoking SoftMaxCrossEntropy in eager mode."""
with context.eager_mode():
batch_size = 10
n_features = 5
logits = np.random.rand(batch_size, n_features).astype(np.float32)
labels = np.random.rand(batch_size, n_features).astype(np.float32)
result = layers.SoftMaxCrossEntropy()(labels, logits)
expected = tf.nn.softmax_cross_entropy_with_logits_v2(
labels=labels, logits=logits)
assert np.allclose(result, expected)
def test_sigmoid_cross_entropy(self):
"""Test invoking SigmoidCrossEntropy in eager mode."""
with context.eager_mode():
batch_size = 10
n_features = 5
logits = np.random.rand(batch_size, n_features).astype(np.float32)
labels = np.random.randint(0, 2,
(batch_size, n_features)).astype(np.float32)
result = layers.SigmoidCrossEntropy()(labels, logits)
expected = tf.nn.sigmoid_cross_entropy_with_logits(
labels=labels, logits=logits)
assert np.allclose(result, expected)
def test_reduce_mean(self):
"""Test invoking ReduceMean in eager mode."""
with context.eager_mode():
input = np.random.rand(5, 10).astype(np.float32)
result = layers.ReduceMean(axis=1)(input)
assert result.shape == (5,)
assert np.allclose(result, np.mean(input, axis=1))
def test_reduce_max(self):
"""Test invoking ReduceMax in eager mode."""
with context.eager_mode():
input = np.random.rand(5, 10).astype(np.float32)
result = layers.ReduceMax(axis=1)(input)
assert result.shape == (5,)
assert np.allclose(result, np.max(input, axis=1))
def test_reduce_sum(self):
"""Test invoking ReduceSum in eager mode."""
with context.eager_mode():
input = np.random.rand(5, 10).astype(np.float32)
result = layers.ReduceSum(axis=1)(input)
assert result.shape == (5,)
assert np.allclose(result, np.sum(input, axis=1))
def test_reduce_square_difference(self):
"""Test invoking ReduceSquareDifference in eager mode."""
with context.eager_mode():
input1 = np.random.rand(5, 10).astype(np.float32)
input2 = np.random.rand(5, 10).astype(np.float32)
result = layers.ReduceSquareDifference(axis=1)(input1, input2)
assert result.shape == (5,)
assert np.allclose(result, np.mean((input1 - input2)**2, axis=1))
def test_conv_2d(self):
"""Test invoking Conv2D in eager mode."""
with context.eager_mode():
length = 4
width = 5
in_channels = 2
filters = 3
kernel_size = 2
batch_size = 10
input = np.random.rand(batch_size, length, width,
in_channels).astype(np.float32)
layer = layers.Conv2D(filters, kernel_size=kernel_size)
result = layer(input)
assert result.shape == (batch_size, length, width, filters)
assert len(layer.trainable_variables) == 2
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.Conv2D(filters, kernel_size=kernel_size)
result2 = layer2(input)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3 = layer(input)
assert np.allclose(result, result3)
def test_conv_3d(self):
"""Test invoking Conv3D in eager mode."""
with context.eager_mode():
length = 4
width = 5
depth = 6
in_channels = 2
filters = 3
kernel_size = 2
batch_size = 10
input = np.random.rand(batch_size, length, width, depth,
in_channels).astype(np.float32)
layer = layers.Conv3D(filters, kernel_size=kernel_size)
result = layer(input)
assert result.shape == (batch_size, length, width, depth, filters)
assert len(layer.trainable_variables) == 2
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.Conv3D(filters, kernel_size=kernel_size)
result2 = layer2(input)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3 = layer(input)
assert np.allclose(result, result3)
def test_conv_2d_transpose(self):
"""Test invoking Conv2DTranspose in eager mode."""
with context.eager_mode():
length = 4
width = 5
in_channels = 2
filters = 3
kernel_size = 2
stride = 2
batch_size = 10
input = np.random.rand(batch_size, length, width,
in_channels).astype(np.float32)
layer = layers.Conv2DTranspose(
filters, kernel_size=kernel_size, stride=stride)
result = layer(input)
assert result.shape == (batch_size, length * stride, width * stride,
filters)
assert len(layer.trainable_variables) == 2
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.Conv2DTranspose(
filters, kernel_size=kernel_size, stride=stride)
result2 = layer2(input)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3 = layer(input)
assert np.allclose(result, result3)
def test_conv_3d_transpose(self):
"""Test invoking Conv3DTranspose in eager mode."""
with context.eager_mode():
length = 4
width = 5
depth = 6
in_channels = 2
filters = 3
kernel_size = 2
stride = 2
batch_size = 10
input = np.random.rand(batch_size, length, width, depth,
in_channels).astype(np.float32)
layer = layers.Conv3DTranspose(
filters, kernel_size=kernel_size, stride=stride)
result = layer(input)
assert result.shape == (batch_size, length * stride, width * stride,
depth * stride, filters)
assert len(layer.trainable_variables) == 2
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.Conv3DTranspose(
filters, kernel_size=kernel_size, stride=stride)
result2 = layer2(input)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3 = layer(input)
assert np.allclose(result, result3)
def test_max_pool_1d(self):
"""Test invoking MaxPool1D in eager mode."""
with context.eager_mode():
input = np.random.rand(4, 6, 8).astype(np.float32)
result = layers.MaxPool1D(strides=2)(input)
assert result.shape == (4, 3, 8)
def test_max_pool_2d(self):
"""Test invoking MaxPool2D in eager mode."""
with context.eager_mode():
input = np.random.rand(2, 4, 6, 8).astype(np.float32)
result = layers.MaxPool2D()(input)
assert result.shape == (2, 2, 3, 8)
def test_max_pool_3d(self):
"""Test invoking MaxPool3D in eager mode."""
with context.eager_mode():
input = np.random.rand(2, 4, 6, 8, 2).astype(np.float32)
result = layers.MaxPool3D()(input)
assert result.shape == (2, 2, 3, 4, 2)
def test_graph_conv(self):
"""Test invoking GraphConv in eager mode."""
with context.eager_mode():
out_channels = 2
n_atoms = 4 # In CCC and C, there are 4 atoms
raw_smiles = ['CCC', 'C']
import rdkit
mols = [rdkit.Chem.MolFromSmiles(s) for s in raw_smiles]
featurizer = dc.feat.graph_features.ConvMolFeaturizer()
mols = featurizer.featurize(mols)
multi_mol = dc.feat.mol_graphs.ConvMol.agglomerate_mols(mols)
atom_features = multi_mol.get_atom_features().astype(np.float32)
degree_slice = multi_mol.deg_slice
membership = multi_mol.membership
deg_adjs = multi_mol.get_deg_adjacency_lists()[1:]
args = [atom_features, degree_slice, membership] + deg_adjs
layer = layers.GraphConv(out_channels)
result = layer(*args)
assert result.shape == (n_atoms, out_channels)
assert len(layer.trainable_variables) == 2 * layer.num_deg
def test_graph_pool(self):
"""Test invoking GraphPool in eager mode."""
with context.eager_mode():
n_atoms = 4 # In CCC and C, there are 4 atoms
raw_smiles = ['CCC', 'C']
import rdkit
mols = [rdkit.Chem.MolFromSmiles(s) for s in raw_smiles]
featurizer = dc.feat.graph_features.ConvMolFeaturizer()
mols = featurizer.featurize(mols)
multi_mol = dc.feat.mol_graphs.ConvMol.agglomerate_mols(mols)
atom_features = multi_mol.get_atom_features().astype(np.float32)
degree_slice = multi_mol.deg_slice
membership = multi_mol.membership
deg_adjs = multi_mol.get_deg_adjacency_lists()[1:]
args = [atom_features, degree_slice, membership] + deg_adjs
result = layers.GraphPool()(*args)
assert result.shape[0] == n_atoms
# TODO What should shape[1] be? It's not documented.
def test_graph_gather(self):
"""Test invoking GraphGather in eager mode."""
with context.eager_mode():
batch_size = 2
n_features = 75
n_atoms = 4 # In CCC and C, there are 4 atoms
raw_smiles = ['CCC', 'C']
import rdkit
mols = [rdkit.Chem.MolFromSmiles(s) for s in raw_smiles]
featurizer = dc.feat.graph_features.ConvMolFeaturizer()
mols = featurizer.featurize(mols)
multi_mol = dc.feat.mol_graphs.ConvMol.agglomerate_mols(mols)
atom_features = multi_mol.get_atom_features().astype(np.float32)
degree_slice = multi_mol.deg_slice
membership = multi_mol.membership
deg_adjs = multi_mol.get_deg_adjacency_lists()[1:]
args = [atom_features, degree_slice, membership] + deg_adjs
result = layers.GraphGather(batch_size)(*args)
# TODO(rbharath): Why is it 2*n_features instead of n_features?
assert result.shape == (batch_size, 2 * n_features)
def test_lstm_step(self):
"""Test invoking LSTMStep in eager mode."""
with context.eager_mode():
max_depth = 5
n_test = 5
n_feat = 10
y = np.random.rand(n_test, 2 * n_feat).astype(np.float32)
state_zero = np.random.rand(n_test, n_feat).astype(np.float32)
state_one = np.random.rand(n_test, n_feat).astype(np.float32)
layer = layers.LSTMStep(n_feat, 2 * n_feat)
result = layer(y, state_zero, state_one)
h_out, h_copy_out, c_out = (result[0], result[1][0], result[1][1])
assert h_out.shape == (n_test, n_feat)
assert h_copy_out.shape == (n_test, n_feat)
assert c_out.shape == (n_test, n_feat)
assert len(layer.trainable_variables) == 3
def test_attn_lstm_embedding(self):
"""Test invoking AttnLSTMEmbedding in eager mode."""
with context.eager_mode():
max_depth = 5
n_test = 5
n_support = 11
n_feat = 10
test = np.random.rand(n_test, n_feat).astype(np.float32)
support = np.random.rand(n_support, n_feat).astype(np.float32)
layer = layers.AttnLSTMEmbedding(n_test, n_support, n_feat, max_depth)
test_out, support_out = layer(test, support)
assert test_out.shape == (n_test, n_feat)
assert support_out.shape == (n_support, n_feat)
assert len(layer.trainable_variables) == 7
def test_iter_ref_lstm_embedding(self):
"""Test invoking AttnLSTMEmbedding in eager mode."""
with context.eager_mode():
max_depth = 5
n_test = 5
n_support = 11
n_feat = 10
test = np.random.rand(n_test, n_feat).astype(np.float32)
support = np.random.rand(n_support, n_feat).astype(np.float32)
layer = layers.IterRefLSTMEmbedding(n_test, n_support, n_feat, max_depth)
test_out, support_out = layer(test, support)
assert test_out.shape == (n_test, n_feat)
assert support_out.shape == (n_support, n_feat)
assert len(layer.trainable_variables) == 12
def test_batch_norm(self):
"""Test invoking BatchNorm in eager mode."""
with context.eager_mode():
batch_size = 10
n_features = 5
input = np.random.rand(batch_size, n_features).astype(np.float32)
layer = layers.BatchNorm()
result = layer(input)
assert result.shape == (batch_size, n_features)
assert len(layer.trainable_variables) == 2
def test_weighted_error(self):
"""Test invoking WeightedError in eager mode."""
with context.eager_mode():
input1 = np.random.rand(5, 10).astype(np.float32)
input2 = np.random.rand(5, 10).astype(np.float32)
result = layers.WeightedError()(input1, input2)
expected = np.sum(input1 * input2)
assert np.allclose(result, expected)
def test_vina_free_energy(self):
"""Test invoking VinaFreeEnergy in eager mode."""
with context.eager_mode():
n_atoms = 5
m_nbrs = 1
ndim = 3
nbr_cutoff = 1
start = 0
stop = 4
X = np.random.rand(n_atoms, ndim).astype(np.float32)
Z = np.random.randint(0, 2, (n_atoms)).astype(np.float32)
layer = layers.VinaFreeEnergy(n_atoms, m_nbrs, ndim, nbr_cutoff, start,
stop)
result = layer(X, Z)
assert len(layer.trainable_variables) == 6
assert result.shape == tuple()
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.VinaFreeEnergy(n_atoms, m_nbrs, ndim, nbr_cutoff, start,
stop)
result2 = layer2(X, Z)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3 = layer(X, Z)
assert np.allclose(result, result3)
def test_weighted_linear_combo(self):
"""Test invoking WeightedLinearCombo in eager mode."""
with context.eager_mode():
input1 = np.random.rand(5, 10).astype(np.float32)
input2 = np.random.rand(5, 10).astype(np.float32)
layer = layers.WeightedLinearCombo()
result = layer(input1, input2)
assert len(layer.trainable_variables) == 2
expected = input1 * layer.trainable_variables[0] + input2 * layer.trainable_variables[1]
assert np.allclose(result, expected)
def test_neighbor_list(self):
"""Test invoking NeighborList in eager mode."""
with context.eager_mode():
N_atoms = 5
start = 0
stop = 12
nbr_cutoff = 3
ndim = 3
M_nbrs = 2
coords = start + np.random.rand(N_atoms, ndim) * (stop - start)
coords = tf.cast(tf.stack(coords), tf.float32)
layer = layers.NeighborList(N_atoms, M_nbrs, ndim, nbr_cutoff, start,
stop)
result = layer(coords)
assert result.shape == (N_atoms, M_nbrs)
def test_dropout(self):
"""Test invoking Dropout in eager mode."""
with context.eager_mode():
rate = 0.5
input = np.random.rand(5, 10).astype(np.float32)
layer = layers.Dropout(rate)
result1 = layer(input, training=False)
assert np.allclose(result1, input)
result2 = layer(input, training=True)
assert not np.allclose(result2, input)
nonzero = result2.numpy() != 0
assert np.allclose(result2.numpy()[nonzero], input[nonzero] / rate)
def test_atomic_convolution(self):
"""Test invoking AtomicConvolution in eager mode."""
with context.eager_mode():
batch_size = 4
max_atoms = 5
max_neighbors = 2
dimensions = 3
params = [[5.0, 2.0, 0.5], [10.0, 2.0, 0.5]]
input1 = np.random.rand(batch_size, max_atoms,
dimensions).astype(np.float32)
input2 = np.random.randint(
max_atoms, size=(batch_size, max_atoms, max_neighbors))
input3 = np.random.randint(
1, 10, size=(batch_size, max_atoms, max_neighbors))
layer = layers.AtomicConvolution(radial_params=params)
result = layer(input1, input2, input3)
assert result.shape == (batch_size, max_atoms, len(params))
assert len(layer.trainable_variables) == 3
def test_alpha_share_layer(self):
"""Test invoking AlphaShareLayer in eager mode."""
with context.eager_mode():
batch_size = 10
length = 6
input1 = np.random.rand(batch_size, length).astype(np.float32)
input2 = np.random.rand(batch_size, length).astype(np.float32)
layer = layers.AlphaShareLayer()
result = layer(input1, input2)
assert input1.shape == result[0].shape
assert input2.shape == result[1].shape
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.AlphaShareLayer()
result2 = layer2(input1, input2)
assert not np.allclose(result[0], result2[0])
assert not np.allclose(result[1], result2[1])
# But evaluating the first layer again should produce the same result as before.
result3 = layer(input1, input2)
assert np.allclose(result[0], result3[0])
assert np.allclose(result[1], result3[1])
def test_sluice_loss(self):
"""Test invoking SluiceLoss in eager mode."""
with context.eager_mode():
input1 = np.ones((3, 4)).astype(np.float32)
input2 = np.ones((2, 2)).astype(np.float32)
result = layers.SluiceLoss()(input1, input2)
assert np.allclose(result, 40.0)
def test_beta_share(self):
"""Test invoking BetaShare in eager mode."""
with context.eager_mode():
batch_size = 10
length = 6
input1 = np.random.rand(batch_size, length).astype(np.float32)
input2 = np.random.rand(batch_size, length).astype(np.float32)
layer = layers.BetaShare()
result = layer(input1, input2)
assert input1.shape == result.shape
assert input2.shape == result.shape
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.BetaShare()
result2 = layer2(input1, input2)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3 = layer(input1, input2)
assert np.allclose(result, result3)
def test_ani_feat(self):
"""Test invoking ANIFeat in eager mode."""
with context.eager_mode():
batch_size = 10
max_atoms = 5
input = np.random.rand(batch_size, max_atoms, 4).astype(np.float32)
layer = layers.ANIFeat(max_atoms=max_atoms)
result = layer(input)
# TODO What should the output shape be? It's not documented, and there
# are no other test cases for it.
def test_graph_embed_pool_layer(self):
"""Test invoking GraphEmbedPoolLayer in eager mode."""
with context.eager_mode():
V = np.random.uniform(size=(10, 100, 50)).astype(np.float32)
adjs = np.random.uniform(size=(10, 100, 5, 100)).astype(np.float32)
layer = layers.GraphEmbedPoolLayer(num_vertices=6)
result = layer(V, adjs)
assert result[0].shape == (10, 6, 50)
assert result[1].shape == (10, 6, 5, 6)
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.GraphEmbedPoolLayer(num_vertices=6)
result2 = layer2(V, adjs)
assert not np.allclose(result[0], result2[0])
assert not np.allclose(result[1], result2[1])
# But evaluating the first layer again should produce the same result as before.
result3 = layer(V, adjs)
assert np.allclose(result[0], result3[0])
assert np.allclose(result[1], result3[1])
def test_graph_cnn(self):
"""Test invoking GraphCNN in eager mode."""
with context.eager_mode():
V = np.random.uniform(size=(10, 100, 50)).astype(np.float32)
adjs = np.random.uniform(size=(10, 100, 5, 100)).astype(np.float32)
layer = layers.GraphCNN(num_filters=6)
result = layer(V, adjs)
assert result.shape == (10, 100, 6)
# Creating a second layer should produce different results, since it has
# different random weights.
layer2 = layers.GraphCNN(num_filters=6)
result2 = layer2(V, adjs)
assert not np.allclose(result, result2)
# But evaluating the first layer again should produce the same result as before.
result3 = layer(V, adjs)
assert np.allclose(result, result3)
def test_hinge_loss(self):
"""Test invoking HingeLoss in eager mode."""
with context.eager_mode():
n_labels = 1
n_logits = 1
logits = np.random.rand(n_logits).astype(np.float32)
labels = np.random.rand(n_labels).astype(np.float32)
result = layers.HingeLoss()(labels, logits)
assert result.shape == (n_labels,)
|
[
"numpy.log",
"deepchem.models.tensorgraph.layers.MaxPool1D",
"deepchem.models.tensorgraph.layers.BatchNorm",
"deepchem.models.tensorgraph.layers.Conv3D",
"deepchem.models.tensorgraph.layers.ReduceMax",
"deepchem.models.tensorgraph.layers.SoftMax",
"deepchem.models.tensorgraph.layers.Gather",
"deepchem.models.tensorgraph.layers.TimeSeriesDense",
"numpy.exp",
"deepchem.models.tensorgraph.layers.AtomicConvolution",
"deepchem.models.tensorgraph.layers.BetaShare",
"deepchem.models.tensorgraph.layers.Conv2D",
"deepchem.models.tensorgraph.layers.GraphPool",
"deepchem.models.tensorgraph.layers.Conv3DTranspose",
"deepchem.feat.graph_features.ConvMolFeaturizer",
"deepchem.models.tensorgraph.layers.Conv2DTranspose",
"deepchem.models.tensorgraph.layers.WeightedLinearCombo",
"deepchem.models.tensorgraph.layers.AlphaShareLayer",
"numpy.sum",
"numpy.random.randint",
"deepchem.models.tensorgraph.layers.Exp",
"deepchem.models.tensorgraph.layers.GraphConv",
"deepchem.models.tensorgraph.layers.Transpose",
"numpy.mean",
"deepchem.models.tensorgraph.layers.ReLU",
"deepchem.models.tensorgraph.layers.IterRefLSTMEmbedding",
"deepchem.models.tensorgraph.layers.ReduceMean",
"deepchem.models.tensorgraph.layers.SigmoidCrossEntropy",
"numpy.max",
"deepchem.models.tensorgraph.layers.MaxPool2D",
"tensorflow.nn.sigmoid",
"numpy.dot",
"deepchem.models.tensorgraph.layers.Concat",
"deepchem.models.tensorgraph.layers.GraphGather",
"numpy.random.normal",
"numpy.ones",
"deepchem.models.tensorgraph.layers.Conv1D",
"deepchem.models.tensorgraph.layers.VinaFreeEnergy",
"deepchem.models.tensorgraph.layers.Flatten",
"deepchem.models.tensorgraph.layers.Cast",
"rdkit.Chem.MolFromSmiles",
"tensorflow.nn.softmax_cross_entropy_with_logits_v2",
"deepchem.models.tensorgraph.layers.Sigmoid",
"deepchem.models.tensorgraph.layers.AttnLSTMEmbedding",
"numpy.random.rand",
"deepchem.models.tensorgraph.layers.Add",
"deepchem.models.tensorgraph.layers.Divide",
"tensorflow.nn.sparse_softmax_cross_entropy_with_logits",
"deepchem.models.tensorgraph.layers.Squeeze",
"deepchem.models.tensorgraph.layers.WeightedError",
"deepchem.models.tensorgraph.layers.GRU",
"tensorflow.nn.softmax",
"deepchem.models.tensorgraph.layers.HingeLoss",
"deepchem.models.tensorgraph.layers.LSTMStep",
"deepchem.feat.mol_graphs.ConvMol.agglomerate_mols",
"tensorflow.stack",
"deepchem.models.tensorgraph.layers.ReduceSquareDifference",
"deepchem.models.tensorgraph.layers.Reshape",
"deepchem.models.tensorgraph.layers.Variable",
"deepchem.models.tensorgraph.layers.Highway",
"deepchem.models.tensorgraph.layers.Dense",
"deepchem.models.tensorgraph.layers.L2Loss",
"deepchem.models.tensorgraph.layers.L1Loss",
"deepchem.models.tensorgraph.layers.Dropout",
"numpy.array_equal",
"deepchem.models.tensorgraph.layers.LSTM",
"deepchem.models.tensorgraph.layers.Stack",
"deepchem.models.tensorgraph.layers.SparseSoftMaxCrossEntropy",
"deepchem.models.tensorgraph.layers.GraphEmbedPoolLayer",
"deepchem.models.tensorgraph.layers.ANIFeat",
"deepchem.models.tensorgraph.layers.MaxPool3D",
"deepchem.models.tensorgraph.layers.ReduceSum",
"deepchem.models.tensorgraph.layers.NeighborList",
"deepchem.models.tensorgraph.layers.CombineMeanStd",
"deepchem.models.tensorgraph.layers.Repeat",
"deepchem.models.tensorgraph.layers.InteratomicL2Distances",
"deepchem.models.tensorgraph.layers.SoftMaxCrossEntropy",
"numpy.abs",
"tensorflow.python.eager.context.eager_mode",
"numpy.allclose",
"deepchem.models.tensorgraph.layers.Constant",
"tensorflow.nn.sigmoid_cross_entropy_with_logits",
"deepchem.models.tensorgraph.layers.SluiceLoss",
"deepchem.models.tensorgraph.layers.Multiply",
"tensorflow.nn.relu",
"numpy.random.uniform",
"deepchem.models.tensorgraph.layers.GraphCNN",
"deepchem.models.tensorgraph.layers.Log"
] |
[((403, 423), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (421, 423), False, 'from tensorflow.python.eager import context\n'), ((619, 654), 'deepchem.models.tensorgraph.layers.Conv1D', 'layers.Conv1D', (['filters', 'kernel_size'], {}), '(filters, kernel_size)\n', (632, 654), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((963, 998), 'deepchem.models.tensorgraph.layers.Conv1D', 'layers.Conv1D', (['filters', 'kernel_size'], {}), '(filters, kernel_size)\n', (976, 998), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((1206, 1234), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (1217, 1234), True, 'import numpy as np\n'), ((1314, 1334), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (1332, 1334), False, 'from tensorflow.python.eager import context\n'), ((1475, 1496), 'deepchem.models.tensorgraph.layers.Dense', 'layers.Dense', (['out_dim'], {}), '(out_dim)\n', (1487, 1496), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((1755, 1776), 'deepchem.models.tensorgraph.layers.Dense', 'layers.Dense', (['out_dim'], {}), '(out_dim)\n', (1767, 1776), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((1984, 2012), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (1995, 2012), True, 'import numpy as np\n'), ((2096, 2116), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (2114, 2116), False, 'from tensorflow.python.eager import context\n'), ((2237, 2253), 'deepchem.models.tensorgraph.layers.Highway', 'layers.Highway', ([], {}), '()\n', (2251, 2253), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((2510, 2526), 'deepchem.models.tensorgraph.layers.Highway', 'layers.Highway', ([], {}), '()\n', (2524, 2526), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((2734, 2762), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (2745, 2762), True, 'import numpy as np\n'), ((2846, 2866), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (2864, 2866), False, 'from tensorflow.python.eager import context\n'), ((3085, 3105), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (3103, 3105), False, 'from tensorflow.python.eager import context\n'), ((3327, 3347), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (3345, 3347), False, 'from tensorflow.python.eager import context\n'), ((3363, 3383), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)'], {}), '(5, 3)\n', (3377, 3383), True, 'import numpy as np\n'), ((3559, 3579), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (3577, 3579), False, 'from tensorflow.python.eager import context\n'), ((3800, 3820), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (3818, 3820), False, 'from tensorflow.python.eager import context\n'), ((4069, 4089), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (4087, 4089), False, 'from tensorflow.python.eager import context\n'), ((4210, 4271), 'deepchem.models.tensorgraph.layers.CombineMeanStd', 'layers.CombineMeanStd', ([], {'training_only': '(True)', 'noise_epsilon': '(0.01)'}), '(training_only=True, noise_epsilon=0.01)\n', (4231, 4271), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((4334, 4363), 'numpy.array_equal', 'np.array_equal', (['result1', 'mean'], {}), '(result1, mean)\n', (4348, 4363), True, 'import numpy as np\n'), ((4497, 4533), 'numpy.allclose', 'np.allclose', (['result2', 'mean'], {'atol': '(0.1)'}), '(result2, mean, atol=0.1)\n', (4508, 4533), True, 'import numpy as np\n'), ((4615, 4635), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (4633, 4635), False, 'from tensorflow.python.eager import context\n'), ((4782, 4830), 'numpy.array_equal', 'np.array_equal', (['result[:, 0, :]', 'result[:, 1, :]'], {}), '(result[:, 0, :], result[:, 1, :])\n', (4796, 4830), True, 'import numpy as np\n'), ((4912, 4932), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (4930, 4932), False, 'from tensorflow.python.eager import context\n'), ((5072, 5116), 'numpy.array_equal', 'np.array_equal', (['result', '[input[1], input[3]]'], {}), '(result, [input[1], input[3]])\n', (5086, 5116), True, 'import numpy as np\n'), ((5192, 5212), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (5210, 5212), False, 'from tensorflow.python.eager import context\n'), ((5420, 5452), 'deepchem.models.tensorgraph.layers.GRU', 'layers.GRU', (['n_hidden', 'batch_size'], {}), '(n_hidden, batch_size)\n', (5430, 5452), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((5728, 5760), 'deepchem.models.tensorgraph.layers.GRU', 'layers.GRU', (['n_hidden', 'batch_size'], {}), '(n_hidden, batch_size)\n', (5738, 5760), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((5984, 6012), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (5995, 6012), True, 'import numpy as np\n'), ((6299, 6319), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (6317, 6319), False, 'from tensorflow.python.eager import context\n'), ((6527, 6560), 'deepchem.models.tensorgraph.layers.LSTM', 'layers.LSTM', (['n_hidden', 'batch_size'], {}), '(n_hidden, batch_size)\n', (6538, 6560), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((6836, 6869), 'deepchem.models.tensorgraph.layers.LSTM', 'layers.LSTM', (['n_hidden', 'batch_size'], {}), '(n_hidden, batch_size)\n', (6847, 6869), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((7093, 7121), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (7104, 7121), True, 'import numpy as np\n'), ((7432, 7452), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (7450, 7452), False, 'from tensorflow.python.eager import context\n'), ((7620, 7651), 'deepchem.models.tensorgraph.layers.TimeSeriesDense', 'layers.TimeSeriesDense', (['out_dim'], {}), '(out_dim)\n', (7642, 7651), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((7919, 7950), 'deepchem.models.tensorgraph.layers.TimeSeriesDense', 'layers.TimeSeriesDense', (['out_dim'], {}), '(out_dim)\n', (7941, 7950), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((8158, 8186), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (8169, 8186), True, 'import numpy as np\n'), ((8269, 8289), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (8287, 8289), False, 'from tensorflow.python.eager import context\n'), ((8521, 8550), 'numpy.allclose', 'np.allclose', (['result', 'expected'], {}), '(result, expected)\n', (8532, 8550), True, 'import numpy as np\n'), ((8633, 8653), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (8651, 8653), False, 'from tensorflow.python.eager import context\n'), ((8831, 8870), 'numpy.mean', 'np.mean', (['((input1 - input2) ** 2)'], {'axis': '(1)'}), '((input1 - input2) ** 2, axis=1)\n', (8838, 8870), True, 'import numpy as np\n'), ((8882, 8911), 'numpy.allclose', 'np.allclose', (['result', 'expected'], {}), '(result, expected)\n', (8893, 8911), True, 'import numpy as np\n'), ((8995, 9015), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (9013, 9015), False, 'from tensorflow.python.eager import context\n'), ((9128, 9148), 'tensorflow.nn.softmax', 'tf.nn.softmax', (['input'], {}), '(input)\n', (9141, 9148), True, 'import tensorflow as tf\n'), ((9162, 9191), 'numpy.allclose', 'np.allclose', (['result', 'expected'], {}), '(result, expected)\n', (9173, 9191), True, 'import numpy as np\n'), ((9275, 9295), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (9293, 9295), False, 'from tensorflow.python.eager import context\n'), ((9408, 9428), 'tensorflow.nn.sigmoid', 'tf.nn.sigmoid', (['input'], {}), '(input)\n', (9421, 9428), True, 'import tensorflow as tf\n'), ((9442, 9471), 'numpy.allclose', 'np.allclose', (['result', 'expected'], {}), '(result, expected)\n', (9453, 9471), True, 'import numpy as np\n'), ((9549, 9569), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (9567, 9569), False, 'from tensorflow.python.eager import context\n'), ((9688, 9705), 'tensorflow.nn.relu', 'tf.nn.relu', (['input'], {}), '(input)\n', (9698, 9705), True, 'import tensorflow as tf\n'), ((9719, 9748), 'numpy.allclose', 'np.allclose', (['result', 'expected'], {}), '(result, expected)\n', (9730, 9748), True, 'import numpy as np\n'), ((9830, 9850), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (9848, 9850), False, 'from tensorflow.python.eager import context\n'), ((10060, 10098), 'numpy.array_equal', 'np.array_equal', (['input1', 'result[:, :10]'], {}), '(input1, result[:, :10])\n', (10074, 10098), True, 'import numpy as np\n'), ((10112, 10150), 'numpy.array_equal', 'np.array_equal', (['input2', 'result[:, 10:]'], {}), '(input2, result[:, 10:])\n', (10126, 10150), True, 'import numpy as np\n'), ((10230, 10250), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (10248, 10250), False, 'from tensorflow.python.eager import context\n'), ((10460, 10499), 'numpy.array_equal', 'np.array_equal', (['input1', 'result[:, 0, :]'], {}), '(input1, result[:, 0, :])\n', (10474, 10499), True, 'import numpy as np\n'), ((10513, 10552), 'numpy.array_equal', 'np.array_equal', (['input2', 'result[:, 1, :]'], {}), '(input2, result[:, 1, :])\n', (10527, 10552), True, 'import numpy as np\n'), ((10638, 10658), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (10656, 10658), False, 'from tensorflow.python.eager import context\n'), ((10767, 10796), 'numpy.array_equal', 'np.array_equal', (['result', 'value'], {}), '(result, value)\n', (10781, 10796), True, 'import numpy as np\n'), ((10882, 10902), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (10900, 10902), False, 'from tensorflow.python.eager import context\n'), ((10972, 10994), 'deepchem.models.tensorgraph.layers.Variable', 'layers.Variable', (['value'], {}), '(value)\n', (10987, 10994), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((11193, 11213), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (11211, 11213), False, 'from tensorflow.python.eager import context\n'), ((11272, 11302), 'numpy.array_equal', 'np.array_equal', (['result', '[4, 6]'], {}), '(result, [4, 6])\n', (11286, 11302), True, 'import numpy as np\n'), ((11388, 11408), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (11406, 11408), False, 'from tensorflow.python.eager import context\n'), ((11472, 11502), 'numpy.array_equal', 'np.array_equal', (['result', '[3, 8]'], {}), '(result, [3, 8])\n', (11486, 11502), True, 'import numpy as np\n'), ((11584, 11604), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (11602, 11604), False, 'from tensorflow.python.eager import context\n'), ((11666, 11697), 'numpy.allclose', 'np.allclose', (['result', '[0.5, 0.4]'], {}), '(result, [0.5, 0.4])\n', (11677, 11697), True, 'import numpy as np\n'), ((11773, 11793), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (11791, 11793), False, 'from tensorflow.python.eager import context\n'), ((11949, 11969), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (11967, 11969), False, 'from tensorflow.python.eager import context\n'), ((12165, 12185), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (12183, 12185), False, 'from tensorflow.python.eager import context\n'), ((12238, 12262), 'numpy.random.rand', 'np.random.rand', (['atoms', '(3)'], {}), '(atoms, 3)\n', (12252, 12262), True, 'import numpy as np\n'), ((12285, 12337), 'numpy.random.randint', 'np.random.randint', (['(0)', 'atoms'], {'size': '(atoms, neighbors)'}), '(0, atoms, size=(atoms, neighbors))\n', (12302, 12337), True, 'import numpy as np\n'), ((12352, 12402), 'deepchem.models.tensorgraph.layers.InteratomicL2Distances', 'layers.InteratomicL2Distances', (['atoms', 'neighbors', '(3)'], {}), '(atoms, neighbors, 3)\n', (12381, 12402), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((12861, 12881), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (12879, 12881), False, 'from tensorflow.python.eager import context\n'), ((13141, 13217), 'tensorflow.nn.sparse_softmax_cross_entropy_with_logits', 'tf.nn.sparse_softmax_cross_entropy_with_logits', ([], {'labels': 'labels', 'logits': 'logits'}), '(labels=labels, logits=logits)\n', (13187, 13217), True, 'import tensorflow as tf\n'), ((13242, 13271), 'numpy.allclose', 'np.allclose', (['result', 'expected'], {}), '(result, expected)\n', (13253, 13271), True, 'import numpy as np\n'), ((13381, 13401), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (13399, 13401), False, 'from tensorflow.python.eager import context\n'), ((13669, 13741), 'tensorflow.nn.softmax_cross_entropy_with_logits_v2', 'tf.nn.softmax_cross_entropy_with_logits_v2', ([], {'labels': 'labels', 'logits': 'logits'}), '(labels=labels, logits=logits)\n', (13711, 13741), True, 'import tensorflow as tf\n'), ((13766, 13795), 'numpy.allclose', 'np.allclose', (['result', 'expected'], {}), '(result, expected)\n', (13777, 13795), True, 'import numpy as np\n'), ((13905, 13925), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (13923, 13925), False, 'from tensorflow.python.eager import context\n'), ((14237, 14306), 'tensorflow.nn.sigmoid_cross_entropy_with_logits', 'tf.nn.sigmoid_cross_entropy_with_logits', ([], {'labels': 'labels', 'logits': 'logits'}), '(labels=labels, logits=logits)\n', (14276, 14306), True, 'import tensorflow as tf\n'), ((14331, 14360), 'numpy.allclose', 'np.allclose', (['result', 'expected'], {}), '(result, expected)\n', (14342, 14360), True, 'import numpy as np\n'), ((14451, 14471), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (14469, 14471), False, 'from tensorflow.python.eager import context\n'), ((14755, 14775), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (14773, 14775), False, 'from tensorflow.python.eager import context\n'), ((15057, 15077), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (15075, 15077), False, 'from tensorflow.python.eager import context\n'), ((15386, 15406), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (15404, 15406), False, 'from tensorflow.python.eager import context\n'), ((15777, 15797), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (15795, 15797), False, 'from tensorflow.python.eager import context\n'), ((16047, 16094), 'deepchem.models.tensorgraph.layers.Conv2D', 'layers.Conv2D', (['filters'], {'kernel_size': 'kernel_size'}), '(filters, kernel_size=kernel_size)\n', (16060, 16094), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((16368, 16415), 'deepchem.models.tensorgraph.layers.Conv2D', 'layers.Conv2D', (['filters'], {'kernel_size': 'kernel_size'}), '(filters, kernel_size=kernel_size)\n', (16381, 16415), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((16623, 16651), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (16634, 16651), True, 'import numpy as np\n'), ((16734, 16754), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (16752, 16754), False, 'from tensorflow.python.eager import context\n'), ((17027, 17074), 'deepchem.models.tensorgraph.layers.Conv3D', 'layers.Conv3D', (['filters'], {'kernel_size': 'kernel_size'}), '(filters, kernel_size=kernel_size)\n', (17040, 17074), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((17355, 17402), 'deepchem.models.tensorgraph.layers.Conv3D', 'layers.Conv3D', (['filters'], {'kernel_size': 'kernel_size'}), '(filters, kernel_size=kernel_size)\n', (17368, 17402), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((17610, 17638), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (17621, 17638), True, 'import numpy as np\n'), ((17740, 17760), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (17758, 17760), False, 'from tensorflow.python.eager import context\n'), ((18027, 18098), 'deepchem.models.tensorgraph.layers.Conv2DTranspose', 'layers.Conv2DTranspose', (['filters'], {'kernel_size': 'kernel_size', 'stride': 'stride'}), '(filters, kernel_size=kernel_size, stride=stride)\n', (18049, 18098), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((18431, 18502), 'deepchem.models.tensorgraph.layers.Conv2DTranspose', 'layers.Conv2DTranspose', (['filters'], {'kernel_size': 'kernel_size', 'stride': 'stride'}), '(filters, kernel_size=kernel_size, stride=stride)\n', (18453, 18502), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((18721, 18749), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (18732, 18749), True, 'import numpy as np\n'), ((18851, 18871), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (18869, 18871), False, 'from tensorflow.python.eager import context\n'), ((19161, 19232), 'deepchem.models.tensorgraph.layers.Conv3DTranspose', 'layers.Conv3DTranspose', (['filters'], {'kernel_size': 'kernel_size', 'stride': 'stride'}), '(filters, kernel_size=kernel_size, stride=stride)\n', (19183, 19232), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((19581, 19652), 'deepchem.models.tensorgraph.layers.Conv3DTranspose', 'layers.Conv3DTranspose', (['filters'], {'kernel_size': 'kernel_size', 'stride': 'stride'}), '(filters, kernel_size=kernel_size, stride=stride)\n', (19603, 19652), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((19871, 19899), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (19882, 19899), True, 'import numpy as np\n'), ((19989, 20009), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (20007, 20009), False, 'from tensorflow.python.eager import context\n'), ((20246, 20266), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (20264, 20266), False, 'from tensorflow.python.eager import context\n'), ((20500, 20520), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (20518, 20520), False, 'from tensorflow.python.eager import context\n'), ((20759, 20779), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (20777, 20779), False, 'from tensorflow.python.eager import context\n'), ((20990, 21032), 'deepchem.feat.graph_features.ConvMolFeaturizer', 'dc.feat.graph_features.ConvMolFeaturizer', ([], {}), '()\n', (21030, 21032), True, 'import deepchem as dc\n'), ((21091, 21140), 'deepchem.feat.mol_graphs.ConvMol.agglomerate_mols', 'dc.feat.mol_graphs.ConvMol.agglomerate_mols', (['mols'], {}), '(mols)\n', (21134, 21140), True, 'import deepchem as dc\n'), ((21430, 21460), 'deepchem.models.tensorgraph.layers.GraphConv', 'layers.GraphConv', (['out_channels'], {}), '(out_channels)\n', (21446, 21460), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((21695, 21715), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (21713, 21715), False, 'from tensorflow.python.eager import context\n'), ((21903, 21945), 'deepchem.feat.graph_features.ConvMolFeaturizer', 'dc.feat.graph_features.ConvMolFeaturizer', ([], {}), '()\n', (21943, 21945), True, 'import deepchem as dc\n'), ((22004, 22053), 'deepchem.feat.mol_graphs.ConvMol.agglomerate_mols', 'dc.feat.mol_graphs.ConvMol.agglomerate_mols', (['mols'], {}), '(mols)\n', (22047, 22053), True, 'import deepchem as dc\n'), ((22562, 22582), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (22580, 22582), False, 'from tensorflow.python.eager import context\n'), ((22813, 22855), 'deepchem.feat.graph_features.ConvMolFeaturizer', 'dc.feat.graph_features.ConvMolFeaturizer', ([], {}), '()\n', (22853, 22855), True, 'import deepchem as dc\n'), ((22914, 22963), 'deepchem.feat.mol_graphs.ConvMol.agglomerate_mols', 'dc.feat.mol_graphs.ConvMol.agglomerate_mols', (['mols'], {}), '(mols)\n', (22957, 22963), True, 'import deepchem as dc\n'), ((23506, 23526), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (23524, 23526), False, 'from tensorflow.python.eager import context\n'), ((23798, 23833), 'deepchem.models.tensorgraph.layers.LSTMStep', 'layers.LSTMStep', (['n_feat', '(2 * n_feat)'], {}), '(n_feat, 2 * n_feat)\n', (23813, 23833), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((24248, 24268), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (24266, 24268), False, 'from tensorflow.python.eager import context\n'), ((24492, 24554), 'deepchem.models.tensorgraph.layers.AttnLSTMEmbedding', 'layers.AttnLSTMEmbedding', (['n_test', 'n_support', 'n_feat', 'max_depth'], {}), '(n_test, n_support, n_feat, max_depth)\n', (24516, 24554), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((24866, 24886), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (24884, 24886), False, 'from tensorflow.python.eager import context\n'), ((25110, 25175), 'deepchem.models.tensorgraph.layers.IterRefLSTMEmbedding', 'layers.IterRefLSTMEmbedding', (['n_test', 'n_support', 'n_feat', 'max_depth'], {}), '(n_test, n_support, n_feat, max_depth)\n', (25137, 25175), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((25467, 25487), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (25485, 25487), False, 'from tensorflow.python.eager import context\n'), ((25618, 25636), 'deepchem.models.tensorgraph.layers.BatchNorm', 'layers.BatchNorm', ([], {}), '()\n', (25634, 25636), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((25864, 25884), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (25882, 25884), False, 'from tensorflow.python.eager import context\n'), ((26069, 26092), 'numpy.sum', 'np.sum', (['(input1 * input2)'], {}), '(input1 * input2)\n', (26075, 26092), True, 'import numpy as np\n'), ((26106, 26135), 'numpy.allclose', 'np.allclose', (['result', 'expected'], {}), '(result, expected)\n', (26117, 26135), True, 'import numpy as np\n'), ((26235, 26255), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (26253, 26255), False, 'from tensorflow.python.eager import context\n'), ((26496, 26565), 'deepchem.models.tensorgraph.layers.VinaFreeEnergy', 'layers.VinaFreeEnergy', (['n_atoms', 'm_nbrs', 'ndim', 'nbr_cutoff', 'start', 'stop'], {}), '(n_atoms, m_nbrs, ndim, nbr_cutoff, start, stop)\n', (26517, 26565), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((26845, 26914), 'deepchem.models.tensorgraph.layers.VinaFreeEnergy', 'layers.VinaFreeEnergy', (['n_atoms', 'm_nbrs', 'ndim', 'nbr_cutoff', 'start', 'stop'], {}), '(n_atoms, m_nbrs, ndim, nbr_cutoff, start, stop)\n', (26866, 26914), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((27157, 27185), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (27168, 27185), True, 'import numpy as np\n'), ((27295, 27315), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (27313, 27315), False, 'from tensorflow.python.eager import context\n'), ((27443, 27471), 'deepchem.models.tensorgraph.layers.WeightedLinearCombo', 'layers.WeightedLinearCombo', ([], {}), '()\n', (27469, 27471), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((27666, 27695), 'numpy.allclose', 'np.allclose', (['result', 'expected'], {}), '(result, expected)\n', (27677, 27695), True, 'import numpy as np\n'), ((27790, 27810), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (27808, 27810), False, 'from tensorflow.python.eager import context\n'), ((28052, 28119), 'deepchem.models.tensorgraph.layers.NeighborList', 'layers.NeighborList', (['N_atoms', 'M_nbrs', 'ndim', 'nbr_cutoff', 'start', 'stop'], {}), '(N_atoms, M_nbrs, ndim, nbr_cutoff, start, stop)\n', (28071, 28119), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((28313, 28333), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (28331, 28333), False, 'from tensorflow.python.eager import context\n'), ((28421, 28441), 'deepchem.models.tensorgraph.layers.Dropout', 'layers.Dropout', (['rate'], {}), '(rate)\n', (28435, 28441), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((28500, 28527), 'numpy.allclose', 'np.allclose', (['result1', 'input'], {}), '(result1, input)\n', (28511, 28527), True, 'import numpy as np\n'), ((28832, 28852), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (28850, 28852), False, 'from tensorflow.python.eager import context\n'), ((29120, 29193), 'numpy.random.randint', 'np.random.randint', (['max_atoms'], {'size': '(batch_size, max_atoms, max_neighbors)'}), '(max_atoms, size=(batch_size, max_atoms, max_neighbors))\n', (29137, 29193), True, 'import numpy as np\n'), ((29220, 29289), 'numpy.random.randint', 'np.random.randint', (['(1)', '(10)'], {'size': '(batch_size, max_atoms, max_neighbors)'}), '(1, 10, size=(batch_size, max_atoms, max_neighbors))\n', (29237, 29289), True, 'import numpy as np\n'), ((29315, 29361), 'deepchem.models.tensorgraph.layers.AtomicConvolution', 'layers.AtomicConvolution', ([], {'radial_params': 'params'}), '(radial_params=params)\n', (29339, 29361), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((29623, 29643), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (29641, 29643), False, 'from tensorflow.python.eager import context\n'), ((29836, 29860), 'deepchem.models.tensorgraph.layers.AlphaShareLayer', 'layers.AlphaShareLayer', ([], {}), '()\n', (29858, 29860), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((30118, 30142), 'deepchem.models.tensorgraph.layers.AlphaShareLayer', 'layers.AlphaShareLayer', ([], {}), '()\n', (30140, 30142), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((30426, 30460), 'numpy.allclose', 'np.allclose', (['result[0]', 'result3[0]'], {}), '(result[0], result3[0])\n', (30437, 30460), True, 'import numpy as np\n'), ((30474, 30508), 'numpy.allclose', 'np.allclose', (['result[1]', 'result3[1]'], {}), '(result[1], result3[1])\n', (30485, 30508), True, 'import numpy as np\n'), ((30599, 30619), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (30617, 30619), False, 'from tensorflow.python.eager import context\n'), ((30785, 30810), 'numpy.allclose', 'np.allclose', (['result', '(40.0)'], {}), '(result, 40.0)\n', (30796, 30810), True, 'import numpy as np\n'), ((30899, 30919), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (30917, 30919), False, 'from tensorflow.python.eager import context\n'), ((31112, 31130), 'deepchem.models.tensorgraph.layers.BetaShare', 'layers.BetaShare', ([], {}), '()\n', (31128, 31130), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((31382, 31400), 'deepchem.models.tensorgraph.layers.BetaShare', 'layers.BetaShare', ([], {}), '()\n', (31398, 31400), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((31626, 31654), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (31637, 31654), True, 'import numpy as np\n'), ((31739, 31759), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (31757, 31759), False, 'from tensorflow.python.eager import context\n'), ((31891, 31926), 'deepchem.models.tensorgraph.layers.ANIFeat', 'layers.ANIFeat', ([], {'max_atoms': 'max_atoms'}), '(max_atoms=max_atoms)\n', (31905, 31926), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((32183, 32203), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (32201, 32203), False, 'from tensorflow.python.eager import context\n'), ((32360, 32402), 'deepchem.models.tensorgraph.layers.GraphEmbedPoolLayer', 'layers.GraphEmbedPoolLayer', ([], {'num_vertices': '(6)'}), '(num_vertices=6)\n', (32386, 32402), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((32653, 32695), 'deepchem.models.tensorgraph.layers.GraphEmbedPoolLayer', 'layers.GraphEmbedPoolLayer', ([], {'num_vertices': '(6)'}), '(num_vertices=6)\n', (32679, 32695), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((32965, 32999), 'numpy.allclose', 'np.allclose', (['result[0]', 'result3[0]'], {}), '(result[0], result3[0])\n', (32976, 32999), True, 'import numpy as np\n'), ((33013, 33047), 'numpy.allclose', 'np.allclose', (['result[1]', 'result3[1]'], {}), '(result[1], result3[1])\n', (33024, 33047), True, 'import numpy as np\n'), ((33134, 33154), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (33152, 33154), False, 'from tensorflow.python.eager import context\n'), ((33311, 33341), 'deepchem.models.tensorgraph.layers.GraphCNN', 'layers.GraphCNN', ([], {'num_filters': '(6)'}), '(num_filters=6)\n', (33326, 33341), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((33544, 33574), 'deepchem.models.tensorgraph.layers.GraphCNN', 'layers.GraphCNN', ([], {'num_filters': '(6)'}), '(num_filters=6)\n', (33559, 33574), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((33786, 33814), 'numpy.allclose', 'np.allclose', (['result', 'result3'], {}), '(result, result3)\n', (33797, 33814), True, 'import numpy as np\n'), ((33903, 33923), 'tensorflow.python.eager.context.eager_mode', 'context.eager_mode', ([], {}), '()\n', (33921, 33923), False, 'from tensorflow.python.eager import context\n'), ((1046, 1074), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (1057, 1074), True, 'import numpy as np\n'), ((1824, 1852), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (1835, 1852), True, 'import numpy as np\n'), ((2574, 2602), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (2585, 2602), True, 'import numpy as np\n'), ((2941, 2957), 'deepchem.models.tensorgraph.layers.Flatten', 'layers.Flatten', ([], {}), '()\n', (2955, 2957), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((3180, 3204), 'deepchem.models.tensorgraph.layers.Reshape', 'layers.Reshape', (['(100, 2)'], {}), '((100, 2))\n', (3194, 3204), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((3399, 3428), 'deepchem.models.tensorgraph.layers.Cast', 'layers.Cast', ([], {'dtype': 'tf.float32'}), '(dtype=tf.float32)\n', (3410, 3428), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((3653, 3669), 'deepchem.models.tensorgraph.layers.Squeeze', 'layers.Squeeze', ([], {}), '()\n', (3667, 3669), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((3895, 3922), 'deepchem.models.tensorgraph.layers.Transpose', 'layers.Transpose', (['(1, 2, 0)'], {}), '((1, 2, 0))\n', (3911, 3922), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((4454, 4483), 'numpy.array_equal', 'np.array_equal', (['result2', 'mean'], {}), '(result2, mean)\n', (4468, 4483), True, 'import numpy as np\n'), ((4706, 4722), 'deepchem.models.tensorgraph.layers.Repeat', 'layers.Repeat', (['(3)'], {}), '(3)\n', (4719, 4722), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((5027, 5042), 'deepchem.models.tensorgraph.layers.Gather', 'layers.Gather', ([], {}), '()\n', (5040, 5042), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((5816, 5844), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (5827, 5844), True, 'import numpy as np\n'), ((6193, 6221), 'numpy.allclose', 'np.allclose', (['result', 'result4'], {}), '(result, result4)\n', (6204, 6221), True, 'import numpy as np\n'), ((6925, 6953), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (6936, 6953), True, 'import numpy as np\n'), ((7302, 7330), 'numpy.allclose', 'np.allclose', (['result', 'result4'], {}), '(result, result4)\n', (7313, 7330), True, 'import numpy as np\n'), ((7998, 8026), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (8009, 8026), True, 'import numpy as np\n'), ((8418, 8433), 'deepchem.models.tensorgraph.layers.L1Loss', 'layers.L1Loss', ([], {}), '()\n', (8431, 8433), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((8475, 8498), 'numpy.abs', 'np.abs', (['(input1 - input2)'], {}), '(input1 - input2)\n', (8481, 8498), True, 'import numpy as np\n'), ((8782, 8797), 'deepchem.models.tensorgraph.layers.L2Loss', 'layers.L2Loss', ([], {}), '()\n', (8795, 8797), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((9087, 9103), 'deepchem.models.tensorgraph.layers.SoftMax', 'layers.SoftMax', ([], {}), '()\n', (9101, 9103), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((9367, 9383), 'deepchem.models.tensorgraph.layers.Sigmoid', 'layers.Sigmoid', ([], {}), '()\n', (9381, 9383), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((9650, 9663), 'deepchem.models.tensorgraph.layers.ReLU', 'layers.ReLU', ([], {}), '()\n', (9661, 9663), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((9978, 9993), 'deepchem.models.tensorgraph.layers.Concat', 'layers.Concat', ([], {}), '()\n', (9991, 9993), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((10377, 10391), 'deepchem.models.tensorgraph.layers.Stack', 'layers.Stack', ([], {}), '()\n', (10389, 10391), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((10729, 10751), 'deepchem.models.tensorgraph.layers.Constant', 'layers.Constant', (['value'], {}), '(value)\n', (10744, 10751), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((11230, 11242), 'deepchem.models.tensorgraph.layers.Add', 'layers.Add', ([], {}), '()\n', (11240, 11242), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((11425, 11442), 'deepchem.models.tensorgraph.layers.Multiply', 'layers.Multiply', ([], {}), '()\n', (11440, 11442), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((11621, 11636), 'deepchem.models.tensorgraph.layers.Divide', 'layers.Divide', ([], {}), '()\n', (11634, 11636), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((11810, 11822), 'deepchem.models.tensorgraph.layers.Log', 'layers.Log', ([], {}), '()\n', (11820, 11822), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((11861, 11872), 'numpy.log', 'np.log', (['(2.5)'], {}), '(2.5)\n', (11867, 11872), True, 'import numpy as np\n'), ((11986, 11998), 'deepchem.models.tensorgraph.layers.Exp', 'layers.Exp', ([], {}), '()\n', (11996, 11998), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((12037, 12048), 'numpy.exp', 'np.exp', (['(2.5)'], {}), '(2.5)\n', (12043, 12048), True, 'import numpy as np\n'), ((13073, 13107), 'deepchem.models.tensorgraph.layers.SparseSoftMaxCrossEntropy', 'layers.SparseSoftMaxCrossEntropy', ([], {}), '()\n', (13105, 13107), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((13607, 13635), 'deepchem.models.tensorgraph.layers.SoftMaxCrossEntropy', 'layers.SoftMaxCrossEntropy', ([], {}), '()\n', (13633, 13635), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((14175, 14203), 'deepchem.models.tensorgraph.layers.SigmoidCrossEntropy', 'layers.SigmoidCrossEntropy', ([], {}), '()\n', (14201, 14203), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((14543, 14568), 'deepchem.models.tensorgraph.layers.ReduceMean', 'layers.ReduceMean', ([], {'axis': '(1)'}), '(axis=1)\n', (14560, 14568), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((14643, 14665), 'numpy.mean', 'np.mean', (['input'], {'axis': '(1)'}), '(input, axis=1)\n', (14650, 14665), True, 'import numpy as np\n'), ((14847, 14871), 'deepchem.models.tensorgraph.layers.ReduceMax', 'layers.ReduceMax', ([], {'axis': '(1)'}), '(axis=1)\n', (14863, 14871), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((14946, 14967), 'numpy.max', 'np.max', (['input'], {'axis': '(1)'}), '(input, axis=1)\n', (14952, 14967), True, 'import numpy as np\n'), ((15149, 15173), 'deepchem.models.tensorgraph.layers.ReduceSum', 'layers.ReduceSum', ([], {'axis': '(1)'}), '(axis=1)\n', (15165, 15173), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((15248, 15269), 'numpy.sum', 'np.sum', (['input'], {'axis': '(1)'}), '(input, axis=1)\n', (15254, 15269), True, 'import numpy as np\n'), ((15535, 15572), 'deepchem.models.tensorgraph.layers.ReduceSquareDifference', 'layers.ReduceSquareDifference', ([], {'axis': '(1)'}), '(axis=1)\n', (15564, 15572), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((15656, 15695), 'numpy.mean', 'np.mean', (['((input1 - input2) ** 2)'], {'axis': '(1)'}), '((input1 - input2) ** 2, axis=1)\n', (15663, 15695), True, 'import numpy as np\n'), ((16463, 16491), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (16474, 16491), True, 'import numpy as np\n'), ((17450, 17478), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (17461, 17478), True, 'import numpy as np\n'), ((18561, 18589), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (18572, 18589), True, 'import numpy as np\n'), ((19711, 19739), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (19722, 19739), True, 'import numpy as np\n'), ((20083, 20110), 'deepchem.models.tensorgraph.layers.MaxPool1D', 'layers.MaxPool1D', ([], {'strides': '(2)'}), '(strides=2)\n', (20099, 20110), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((20343, 20361), 'deepchem.models.tensorgraph.layers.MaxPool2D', 'layers.MaxPool2D', ([], {}), '()\n', (20359, 20361), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((20600, 20618), 'deepchem.models.tensorgraph.layers.MaxPool3D', 'layers.MaxPool3D', ([], {}), '()\n', (20616, 20618), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((20922, 20949), 'rdkit.Chem.MolFromSmiles', 'rdkit.Chem.MolFromSmiles', (['s'], {}), '(s)\n', (20946, 20949), False, 'import rdkit\n'), ((21835, 21862), 'rdkit.Chem.MolFromSmiles', 'rdkit.Chem.MolFromSmiles', (['s'], {}), '(s)\n', (21859, 21862), False, 'import rdkit\n'), ((22344, 22362), 'deepchem.models.tensorgraph.layers.GraphPool', 'layers.GraphPool', ([], {}), '()\n', (22360, 22362), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((22745, 22772), 'rdkit.Chem.MolFromSmiles', 'rdkit.Chem.MolFromSmiles', (['s'], {}), '(s)\n', (22769, 22772), False, 'import rdkit\n'), ((23254, 23284), 'deepchem.models.tensorgraph.layers.GraphGather', 'layers.GraphGather', (['batch_size'], {}), '(batch_size)\n', (23272, 23284), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((26013, 26035), 'deepchem.models.tensorgraph.layers.WeightedError', 'layers.WeightedError', ([], {}), '()\n', (26033, 26035), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((26998, 27026), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (27009, 27026), True, 'import numpy as np\n'), ((28008, 28024), 'tensorflow.stack', 'tf.stack', (['coords'], {}), '(coords)\n', (28016, 28024), True, 'import tensorflow as tf\n'), ((28589, 28616), 'numpy.allclose', 'np.allclose', (['result2', 'input'], {}), '(result2, input)\n', (28600, 28616), True, 'import numpy as np\n'), ((30199, 30233), 'numpy.allclose', 'np.allclose', (['result[0]', 'result2[0]'], {}), '(result[0], result2[0])\n', (30210, 30233), True, 'import numpy as np\n'), ((30251, 30285), 'numpy.allclose', 'np.allclose', (['result[1]', 'result2[1]'], {}), '(result[1], result2[1])\n', (30262, 30285), True, 'import numpy as np\n'), ((30736, 30755), 'deepchem.models.tensorgraph.layers.SluiceLoss', 'layers.SluiceLoss', ([], {}), '()\n', (30753, 30755), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((31457, 31485), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (31468, 31485), True, 'import numpy as np\n'), ((32745, 32779), 'numpy.allclose', 'np.allclose', (['result[0]', 'result2[0]'], {}), '(result[0], result2[0])\n', (32756, 32779), True, 'import numpy as np\n'), ((32797, 32831), 'numpy.allclose', 'np.allclose', (['result[1]', 'result2[1]'], {}), '(result[1], result2[1])\n', (32808, 32831), True, 'import numpy as np\n'), ((33624, 33652), 'numpy.allclose', 'np.allclose', (['result', 'result2'], {}), '(result, result2)\n', (33635, 33652), True, 'import numpy as np\n'), ((34096, 34114), 'deepchem.models.tensorgraph.layers.HingeLoss', 'layers.HingeLoss', ([], {}), '()\n', (34112, 34114), True, 'import deepchem.models.tensorgraph.layers as layers\n'), ((539, 585), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'width', 'in_channels'], {}), '(batch_size, width, in_channels)\n', (553, 585), True, 'import numpy as np\n'), ((1407, 1441), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'in_dim'], {}), '(batch_size, in_dim)\n', (1421, 1441), True, 'import numpy as np\n'), ((2170, 2203), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'width'], {}), '(batch_size, width)\n', (2184, 2203), True, 'import numpy as np\n'), ((2882, 2906), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)', '(4)'], {}), '(5, 10, 4)\n', (2896, 2906), True, 'import numpy as np\n'), ((3121, 3145), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)', '(4)'], {}), '(5, 10, 4)\n', (3135, 3145), True, 'import numpy as np\n'), ((3595, 3618), 'numpy.random.rand', 'np.random.rand', (['(5)', '(1)', '(4)'], {}), '(5, 1, 4)\n', (3609, 3618), True, 'import numpy as np\n'), ((3836, 3860), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)', '(4)'], {}), '(5, 10, 4)\n', (3850, 3860), True, 'import numpy as np\n'), ((4104, 4124), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)'], {}), '(5, 3)\n', (4118, 4124), True, 'import numpy as np\n'), ((4156, 4176), 'numpy.random.rand', 'np.random.rand', (['(5)', '(3)'], {}), '(5, 3)\n', (4170, 4176), True, 'import numpy as np\n'), ((4651, 4671), 'numpy.random.rand', 'np.random.rand', (['(5)', '(4)'], {}), '(5, 4)\n', (4665, 4671), True, 'import numpy as np\n'), ((4948, 4965), 'numpy.random.rand', 'np.random.rand', (['(5)'], {}), '(5)\n', (4962, 4965), True, 'import numpy as np\n'), ((5309, 5357), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'n_steps', 'in_channels'], {}), '(batch_size, n_steps, in_channels)\n', (5323, 5357), True, 'import numpy as np\n'), ((6416, 6464), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'n_steps', 'in_channels'], {}), '(batch_size, n_steps, in_channels)\n', (6430, 6464), True, 'import numpy as np\n'), ((7543, 7586), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'n_steps', 'in_dim'], {}), '(batch_size, n_steps, in_dim)\n', (7557, 7586), True, 'import numpy as np\n'), ((8306, 8327), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (8320, 8327), True, 'import numpy as np\n'), ((8362, 8383), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (8376, 8383), True, 'import numpy as np\n'), ((8670, 8691), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (8684, 8691), True, 'import numpy as np\n'), ((8726, 8747), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (8740, 8747), True, 'import numpy as np\n'), ((9031, 9052), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (9045, 9052), True, 'import numpy as np\n'), ((9311, 9332), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (9325, 9332), True, 'import numpy as np\n'), ((9585, 9615), 'numpy.random.normal', 'np.random.normal', ([], {'size': '(5, 10)'}), '(size=(5, 10))\n', (9601, 9615), True, 'import numpy as np\n'), ((9867, 9888), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (9881, 9888), True, 'import numpy as np\n'), ((9923, 9943), 'numpy.random.rand', 'np.random.rand', (['(5)', '(4)'], {}), '(5, 4)\n', (9937, 9943), True, 'import numpy as np\n'), ((10267, 10287), 'numpy.random.rand', 'np.random.rand', (['(5)', '(4)'], {}), '(5, 4)\n', (10281, 10287), True, 'import numpy as np\n'), ((10322, 10342), 'numpy.random.rand', 'np.random.rand', (['(5)', '(4)'], {}), '(5, 4)\n', (10336, 10342), True, 'import numpy as np\n'), ((10674, 10694), 'numpy.random.rand', 'np.random.rand', (['(5)', '(4)'], {}), '(5, 4)\n', (10688, 10694), True, 'import numpy as np\n'), ((10918, 10938), 'numpy.random.rand', 'np.random.rand', (['(5)', '(4)'], {}), '(5, 4)\n', (10932, 10938), True, 'import numpy as np\n'), ((12658, 12678), 'numpy.dot', 'np.dot', (['delta', 'delta'], {}), '(delta, delta)\n', (12664, 12678), True, 'import numpy as np\n'), ((12696, 12738), 'numpy.allclose', 'np.allclose', (['dist2', 'result[atom, neighbor]'], {}), '(dist2, result[atom, neighbor])\n', (12707, 12738), True, 'import numpy as np\n'), ((12941, 12979), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'n_features'], {}), '(batch_size, n_features)\n', (12955, 12979), True, 'import numpy as np\n'), ((13014, 13040), 'numpy.random.rand', 'np.random.rand', (['batch_size'], {}), '(batch_size)\n', (13028, 13040), True, 'import numpy as np\n'), ((13461, 13499), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'n_features'], {}), '(batch_size, n_features)\n', (13475, 13499), True, 'import numpy as np\n'), ((13534, 13572), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'n_features'], {}), '(batch_size, n_features)\n', (13548, 13572), True, 'import numpy as np\n'), ((13985, 14023), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'n_features'], {}), '(batch_size, n_features)\n', (13999, 14023), True, 'import numpy as np\n'), ((14058, 14107), 'numpy.random.randint', 'np.random.randint', (['(0)', '(2)', '(batch_size, n_features)'], {}), '(0, 2, (batch_size, n_features))\n', (14075, 14107), True, 'import numpy as np\n'), ((14487, 14508), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (14501, 14508), True, 'import numpy as np\n'), ((14791, 14812), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (14805, 14812), True, 'import numpy as np\n'), ((15093, 15114), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (15107, 15114), True, 'import numpy as np\n'), ((15423, 15444), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (15437, 15444), True, 'import numpy as np\n'), ((15479, 15500), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (15493, 15500), True, 'import numpy as np\n'), ((15930, 15984), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'length', 'width', 'in_channels'], {}), '(batch_size, length, width, in_channels)\n', (15944, 15984), True, 'import numpy as np\n'), ((16903, 16964), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'length', 'width', 'depth', 'in_channels'], {}), '(batch_size, length, width, depth, in_channels)\n', (16917, 16964), True, 'import numpy as np\n'), ((17910, 17964), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'length', 'width', 'in_channels'], {}), '(batch_size, length, width, in_channels)\n', (17924, 17964), True, 'import numpy as np\n'), ((19037, 19098), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'length', 'width', 'depth', 'in_channels'], {}), '(batch_size, length, width, depth, in_channels)\n', (19051, 19098), True, 'import numpy as np\n'), ((20025, 20048), 'numpy.random.rand', 'np.random.rand', (['(4)', '(6)', '(8)'], {}), '(4, 6, 8)\n', (20039, 20048), True, 'import numpy as np\n'), ((20282, 20308), 'numpy.random.rand', 'np.random.rand', (['(2)', '(4)', '(6)', '(8)'], {}), '(2, 4, 6, 8)\n', (20296, 20308), True, 'import numpy as np\n'), ((20536, 20565), 'numpy.random.rand', 'np.random.rand', (['(2)', '(4)', '(6)', '(8)', '(2)'], {}), '(2, 4, 6, 8, 2)\n', (20550, 20565), True, 'import numpy as np\n'), ((23593, 23627), 'numpy.random.rand', 'np.random.rand', (['n_test', '(2 * n_feat)'], {}), '(n_test, 2 * n_feat)\n', (23607, 23627), True, 'import numpy as np\n'), ((23666, 23696), 'numpy.random.rand', 'np.random.rand', (['n_test', 'n_feat'], {}), '(n_test, n_feat)\n', (23680, 23696), True, 'import numpy as np\n'), ((23734, 23764), 'numpy.random.rand', 'np.random.rand', (['n_test', 'n_feat'], {}), '(n_test, n_feat)\n', (23748, 23764), True, 'import numpy as np\n'), ((24359, 24389), 'numpy.random.rand', 'np.random.rand', (['n_test', 'n_feat'], {}), '(n_test, n_feat)\n', (24373, 24389), True, 'import numpy as np\n'), ((24425, 24458), 'numpy.random.rand', 'np.random.rand', (['n_support', 'n_feat'], {}), '(n_support, n_feat)\n', (24439, 24458), True, 'import numpy as np\n'), ((24977, 25007), 'numpy.random.rand', 'np.random.rand', (['n_test', 'n_feat'], {}), '(n_test, n_feat)\n', (24991, 25007), True, 'import numpy as np\n'), ((25043, 25076), 'numpy.random.rand', 'np.random.rand', (['n_support', 'n_feat'], {}), '(n_support, n_feat)\n', (25057, 25076), True, 'import numpy as np\n'), ((25546, 25584), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'n_features'], {}), '(batch_size, n_features)\n', (25560, 25584), True, 'import numpy as np\n'), ((25901, 25922), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (25915, 25922), True, 'import numpy as np\n'), ((25957, 25978), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (25971, 25978), True, 'import numpy as np\n'), ((26369, 26398), 'numpy.random.rand', 'np.random.rand', (['n_atoms', 'ndim'], {}), '(n_atoms, ndim)\n', (26383, 26398), True, 'import numpy as np\n'), ((26428, 26460), 'numpy.random.randint', 'np.random.randint', (['(0)', '(2)', 'n_atoms'], {}), '(0, 2, n_atoms)\n', (26445, 26460), True, 'import numpy as np\n'), ((27332, 27353), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (27346, 27353), True, 'import numpy as np\n'), ((27388, 27409), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (27402, 27409), True, 'import numpy as np\n'), ((27938, 27967), 'numpy.random.rand', 'np.random.rand', (['N_atoms', 'ndim'], {}), '(N_atoms, ndim)\n', (27952, 27967), True, 'import numpy as np\n'), ((28366, 28387), 'numpy.random.rand', 'np.random.rand', (['(5)', '(10)'], {}), '(5, 10)\n', (28380, 28387), True, 'import numpy as np\n'), ((29006, 29055), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'max_atoms', 'dimensions'], {}), '(batch_size, max_atoms, dimensions)\n', (29020, 29055), True, 'import numpy as np\n'), ((29699, 29733), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'length'], {}), '(batch_size, length)\n', (29713, 29733), True, 'import numpy as np\n'), ((29768, 29802), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'length'], {}), '(batch_size, length)\n', (29782, 29802), True, 'import numpy as np\n'), ((30636, 30651), 'numpy.ones', 'np.ones', (['(3, 4)'], {}), '((3, 4))\n', (30643, 30651), True, 'import numpy as np\n'), ((30686, 30701), 'numpy.ones', 'np.ones', (['(2, 2)'], {}), '((2, 2))\n', (30693, 30701), True, 'import numpy as np\n'), ((30975, 31009), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'length'], {}), '(batch_size, length)\n', (30989, 31009), True, 'import numpy as np\n'), ((31044, 31078), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'length'], {}), '(batch_size, length)\n', (31058, 31078), True, 'import numpy as np\n'), ((31817, 31857), 'numpy.random.rand', 'np.random.rand', (['batch_size', 'max_atoms', '(4)'], {}), '(batch_size, max_atoms, 4)\n', (31831, 31857), True, 'import numpy as np\n'), ((32215, 32252), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': '(10, 100, 50)'}), '(size=(10, 100, 50))\n', (32232, 32252), True, 'import numpy as np\n'), ((32285, 32326), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': '(10, 100, 5, 100)'}), '(size=(10, 100, 5, 100))\n', (32302, 32326), True, 'import numpy as np\n'), ((33166, 33203), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': '(10, 100, 50)'}), '(size=(10, 100, 50))\n', (33183, 33203), True, 'import numpy as np\n'), ((33236, 33277), 'numpy.random.uniform', 'np.random.uniform', ([], {'size': '(10, 100, 5, 100)'}), '(size=(10, 100, 5, 100))\n', (33253, 33277), True, 'import numpy as np\n'), ((33978, 34002), 'numpy.random.rand', 'np.random.rand', (['n_logits'], {}), '(n_logits)\n', (33992, 34002), True, 'import numpy as np\n'), ((34037, 34061), 'numpy.random.rand', 'np.random.rand', (['n_labels'], {}), '(n_labels)\n', (34051, 34061), True, 'import numpy as np\n')]
|
# Solution of;
# Project Euler Problem 49: Prime permutations
# https://projecteuler.net/problem=49
#
# The arithmetic sequence, 1487, 4817, 8147, in which each of the terms
# increases by 3330, is unusual in two ways: (i) each of the three terms are
# prime, and, (ii) each of the 4-digit numbers are permutations of one
# another. There are no arithmetic sequences made up of three 1-, 2-, or
# 3-digit primes, exhibiting this property, but there is one other 4-digit
# increasing sequence. What 12-digit number do you form by concatenating the
# three terms in this sequence?
#
# by lcsm29 http://github.com/lcsm29/project-euler
import timed
def dummy(n):
pass
if __name__ == '__main__':
n = 1000
i = 10000
prob_id = 49
timed.caller(dummy, n, i, prob_id)
|
[
"timed.caller"
] |
[((755, 789), 'timed.caller', 'timed.caller', (['dummy', 'n', 'i', 'prob_id'], {}), '(dummy, n, i, prob_id)\n', (767, 789), False, 'import timed\n')]
|
#!/usr/bin/env python3
# -*- coding: UTF-8 -*-
from pathlib import Path
import pandas as pd
from numpy import around
if __name__ == "__main__":
# Harden's PPG is from 2018-19 season
# Bryant's PPG is from 2005-06 season
# Jordan's PPG is from 1986-87 season
per_game_df = pd.read_csv(Path('../data/compare_players_per_game.csv'))
per_48_df = pd.read_csv(Path('../data/compare_players_per_48.csv'))
per_100_df = pd.read_csv(Path('../data/compare_players_per_100_poss.csv'))
avg_TS_for_2018_19_season = 0.560 # source: https://www.basketball-reference.com/leagues/NBA_2019.html#all_misc_stats
avg_TS_for_2005_06_season = 0.536 # source: https://www.basketball-reference.com/leagues/NBA_2006.html#all_misc_stats
avg_TS_for_1986_87_season = 0.538 # source: https://www.basketball-reference.com/leagues/NBA_1987.html#all_misc_stats
# per game
per_game_harden = per_game_df[per_game_df['Player'] == '<NAME>']
per_game_bryant = per_game_df[per_game_df['Player'] == '<NAME>']
per_game_jordan = per_game_df[per_game_df['Player'] == '<NAME>']
harden_ppg = per_game_harden['PTS'].values[0]
bryant_ppg = per_game_bryant['PTS'].values[0]
jordan_ppg = per_game_jordan['PTS'].values[0]
# shooting stats
harden_efg = per_game_harden['eFG%'].values[0]
bryant_efg = per_game_bryant['eFG%'].values[0]
jordan_efg = per_game_jordan['eFG%'].values[0]
harden_ts = per_game_harden['TS%'].values[0]
bryant_ts = per_game_bryant['TS%'].values[0]
jordan_ts = per_game_jordan['TS%'].values[0]
# number of games
harden_g = per_game_harden['G'].values[0]
bryant_g = per_game_bryant['G'].values[0]
jordan_g = per_game_jordan['G'].values[0]
# minutes per game
harden_mpg = per_game_harden['MP'].values[0]
bryant_mpg = per_game_bryant['MP'].values[0]
jordan_mpg = per_game_jordan['MP'].values[0]
# per 48
per_48_harden = per_48_df[per_48_df['Player'] == '<NAME>']
per_48_bryant = per_48_df[per_48_df['Player'] == '<NAME>']
per_48_jordan = per_48_df[per_48_df['Player'] == '<NAME>']
harden_pp48 = per_48_harden['PTS'].values[0]
bryant_pp48 = per_48_bryant['PTS'].values[0]
jordan_pp48 = per_48_jordan['PTS'].values[0]
# per 100
per_100_harden = per_100_df[per_100_df['Player'] == '<NAME>']
per_100_bryant = per_100_df[per_100_df['Player'] == '<NAME>']
per_100_jordan = per_100_df[per_100_df['Player'] == '<NAME>']
harden_pp100 = per_100_harden['PTS'].values[0]
bryant_pp100 = per_100_bryant['PTS'].values[0]
jordan_pp100 = per_100_jordan['PTS'].values[0]
print('<NAME> in 2018-19: {} games, {} PPG, {}eFG%, {}TS% in {} minutes per game'
.format(harden_g, harden_ppg, harden_efg, harden_ts, harden_mpg))
print('He was {} more efficient than the average player in was that season'
.format(around(harden_ts - avg_TS_for_2018_19_season, 3)))
print('In the same season, he had {} Points per 48 minutes, and {} Points per 100 possessions'
.format(harden_pp48, harden_pp100))
print('\n------------------------------------------------------------------------------------------\n')
print('<NAME> in 2005-06: {} games, {} PPG, {}eFG%, {}TS% in {} minutes per game'
.format(bryant_g, bryant_ppg, bryant_efg, bryant_ts, bryant_mpg))
print('He was {} more efficient than the average player was in that season'
.format(around(bryant_ts - avg_TS_for_2005_06_season, 3)))
print('In the same season, he had {} Points per 48 minutes, and {} Points per 100 possessions'
.format(bryant_pp48, bryant_pp100))
print('\n------------------------------------------------------------------------------------------\n')
print('<NAME> in 1986-87: {} games, {} PPG, {}eFG%, {}TS% in {} minutes per game'
.format(jordan_g, jordan_ppg, jordan_efg, jordan_ts, jordan_mpg))
print('He was {} more efficient than the average player was in that season'
.format(around(jordan_ts - avg_TS_for_1986_87_season, 3)))
print('In the same season, he had {} Points per 48 minutes, and {} Points per 100 possessions'
.format(jordan_pp48, jordan_pp100))
|
[
"numpy.around",
"pathlib.Path"
] |
[((303, 347), 'pathlib.Path', 'Path', (['"""../data/compare_players_per_game.csv"""'], {}), "('../data/compare_players_per_game.csv')\n", (307, 347), False, 'from pathlib import Path\n'), ((377, 419), 'pathlib.Path', 'Path', (['"""../data/compare_players_per_48.csv"""'], {}), "('../data/compare_players_per_48.csv')\n", (381, 419), False, 'from pathlib import Path\n'), ((450, 498), 'pathlib.Path', 'Path', (['"""../data/compare_players_per_100_poss.csv"""'], {}), "('../data/compare_players_per_100_poss.csv')\n", (454, 498), False, 'from pathlib import Path\n'), ((2895, 2943), 'numpy.around', 'around', (['(harden_ts - avg_TS_for_2018_19_season)', '(3)'], {}), '(harden_ts - avg_TS_for_2018_19_season, 3)\n', (2901, 2943), False, 'from numpy import around\n'), ((3492, 3540), 'numpy.around', 'around', (['(bryant_ts - avg_TS_for_2005_06_season)', '(3)'], {}), '(bryant_ts - avg_TS_for_2005_06_season, 3)\n', (3498, 3540), False, 'from numpy import around\n'), ((4089, 4137), 'numpy.around', 'around', (['(jordan_ts - avg_TS_for_1986_87_season)', '(3)'], {}), '(jordan_ts - avg_TS_for_1986_87_season, 3)\n', (4095, 4137), False, 'from numpy import around\n')]
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from setuptools import setup, find_packages
setup(name="easyvista-rapid7-plugin",
version="1.0.0",
description="EasyVista Service Manager platform supports even the most complex requirements, while bringing a new level of simplicity, agility, and mobility required to make cloud based IT Service Management (ITSM) software easy to use and easy to deliver. Using the EasyVista plugin for Rapid7 InsightConnect, users can manage the creation, update, search and closure of incident, service request, problem or event tickets",
author="rapid7",
author_email="",
url="",
packages=find_packages(),
install_requires=['insightconnect-plugin-runtime'], # Add third-party dependencies to requirements.txt, not here!
scripts=['bin/icon_easyvista']
)
|
[
"setuptools.find_packages"
] |
[((654, 669), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (667, 669), False, 'from setuptools import setup, find_packages\n')]
|
# stdlib
from copy import deepcopy
from functools import wraps
import os
import tempfile
from time import time
# 3p
import mock
# datadog
from datadog import initialize, api, util
from datadog.api import (
Distribution,
Metric,
ServiceCheck
)
from datadog.api.exceptions import ApiError, ApiNotInitialized
from datadog.util.compat import is_p3k
from tests.unit.api.helper import (
DatadogAPIWithInitialization,
DatadogAPINoInitialization,
MyCreatable,
MyUpdatable,
MyDeletable,
MyGetable,
MyListable,
MyListableSubResource,
MyAddableSubResource,
MyUpdatableSubResource,
MyDeletableSubResource,
MyActionable,
API_KEY,
APP_KEY,
API_HOST,
HOST_NAME,
FAKE_PROXY
)
from tests.util.contextmanagers import EnvVars
class TestInitialization(DatadogAPINoInitialization):
def test_no_initialization_fails(self):
"""
Raise ApiNotInitialized exception when `initialize` has not ran or no API key was set.
"""
self.assertRaises(ApiNotInitialized, MyCreatable.create)
# No API key => only stats in statsd mode should work
initialize()
api._api_key = None
self.assertRaises(ApiNotInitialized, MyCreatable.create)
# Finally, initialize with an API key
initialize(api_key=API_KEY, api_host=API_HOST)
MyCreatable.create()
self.assertEqual(self.request_mock.call_count(), 1)
@mock.patch('datadog.util.config.get_config_path')
def test_get_hostname(self, mock_config_path):
"""
API hostname parameter fallback with Datadog Agent hostname when available.
"""
# Generate a fake agent config
tmpfilepath = os.path.join(tempfile.gettempdir(), "tmp-agentconfig")
with open(tmpfilepath, "wb") as f:
if is_p3k():
f.write(bytes("[Main]\n", 'UTF-8'))
f.write(bytes("hostname: {0}\n".format(HOST_NAME), 'UTF-8'))
else:
f.write("[Main]\n")
f.write("hostname: {0}\n".format(HOST_NAME))
# Mock get_config_path to return this fake agent config
mock_config_path.return_value = tmpfilepath
initialize()
self.assertEqual(api._host_name, HOST_NAME, api._host_name)
def test_request_parameters(self):
"""
API parameters are set with `initialize` method.
"""
# Test API, application keys, API host, and some HTTP client options
initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST)
# Make a simple API call
MyCreatable.create()
_, options = self.request_mock.call_args()
# Assert `requests` parameters
self.assertIn('params', options)
self.assertIn('api_key', options['params'])
self.assertEqual(options['params']['api_key'], API_KEY)
self.assertIn('application_key', options['params'])
self.assertEqual(options['params']['application_key'], APP_KEY)
self.assertIn('headers', options)
self.assertEqual(options['headers'], {'Content-Type': 'application/json'})
def test_initialize_options(self):
"""
HTTP client and API options are set with `initialize` method.
"""
initialize(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST,
proxies=FAKE_PROXY, cacert=False)
# Make a simple API call
MyCreatable.create()
_, options = self.request_mock.call_args()
# Assert `requests` parameters
self.assertIn('proxies', options)
self.assertEqual(options['proxies'], FAKE_PROXY)
self.assertIn('verify', options)
self.assertEqual(options['verify'], False)
# Arm the `requests` to raise
self.arm_requests_to_raise()
# No exception should be raised (mute=True by default)
MyCreatable.create()
# Repeat with mute to False
initialize(api_key=API_KEY, mute=False)
self.assertRaises(ApiError, MyCreatable.create)
def test_return_raw_response(self):
# Test default initialization sets return_raw_response to False
initialize()
assert not api._return_raw_response
# Assert that we can set this to True
initialize(return_raw_response=True)
assert api._return_raw_response
# Assert we get multiple fields back when set to True
initialize(api_key="<KEY>", app_key="123456", return_raw_response=True)
data, raw = api.Monitor.get_all()
def test_default_values(self):
with EnvVars(ignore=[
"DATADOG_API_KEY",
"DATADOG_APP_KEY",
"DD_API_KEY",
"DD_APP_KEY"
]):
initialize()
self.assertIsNone(api._api_key)
self.assertIsNone(api._application_key)
self.assertEqual(api._api_host, "https://api.datadoghq.com")
self.assertEqual(api._host_name, util.hostname.get_hostname())
def test_env_var_values(self):
with EnvVars(
env_vars={
"DATADOG_API_KEY": "API_KEY_ENV",
"DATADOG_APP_KEY": "APP_KEY_ENV",
"DATADOG_HOST": "HOST_ENV",
}
):
initialize()
self.assertEqual(api._api_key, "API_KEY_ENV")
self.assertEqual(api._application_key, "APP_KEY_ENV")
self.assertEqual(api._api_host, "HOST_ENV")
self.assertEqual(api._host_name, util.hostname.get_hostname())
del os.environ["DATADOG_API_KEY"]
del os.environ["DATADOG_APP_KEY"]
del os.environ["DATADOG_HOST"]
with EnvVars(env_vars={
"DD_API_KEY": "API_KEY_ENV_DD",
"DD_APP_KEY": "APP_KEY_ENV_DD",
}):
api._api_key = None
api._application_key = None
initialize()
self.assertEqual(api._api_key, "API_KEY_ENV_DD")
self.assertEqual(api._application_key, "APP_KEY_ENV_DD")
def test_function_param_value(self):
initialize(api_key="API_KEY", app_key="APP_KEY", api_host="HOST", host_name="HOSTNAME")
self.assertEqual(api._api_key, "API_KEY")
self.assertEqual(api._application_key, "APP_KEY")
self.assertEqual(api._api_host, "HOST")
self.assertEqual(api._host_name, "HOSTNAME")
def test_precedence(self):
# Initialize first with env vars
with EnvVars(env_vars={
"DD_API_KEY": "API_KEY_ENV_DD",
"DD_APP_KEY": "APP_KEY_ENV_DD",
}):
os.environ["DATADOG_API_KEY"] = "API_KEY_ENV"
os.environ["DATADOG_APP_KEY"] = "APP_KEY_ENV"
os.environ["DATADOG_HOST"] = "HOST_ENV"
initialize()
self.assertEqual(api._api_key, "API_KEY_ENV")
self.assertEqual(api._application_key, "APP_KEY_ENV")
self.assertEqual(api._api_host, "HOST_ENV")
self.assertEqual(api._host_name, util.hostname.get_hostname())
# Initialize again to check given parameters take precedence over already set value and env vars
initialize(api_key="API_KEY", app_key="APP_KEY", api_host="HOST", host_name="HOSTNAME")
self.assertEqual(api._api_key, "API_KEY")
self.assertEqual(api._application_key, "APP_KEY")
self.assertEqual(api._api_host, "HOST")
self.assertEqual(api._host_name, "HOSTNAME")
# Initialize again without specifying attributes to check that already initialized value takes precedence
initialize()
self.assertEqual(api._api_key, "API_KEY")
self.assertEqual(api._application_key, "APP_KEY")
self.assertEqual(api._api_host, "HOST")
self.assertEqual(api._host_name, "HOSTNAME")
del os.environ["DATADOG_API_KEY"]
del os.environ["DATADOG_APP_KEY"]
del os.environ["DATADOG_HOST"]
class TestResources(DatadogAPIWithInitialization):
def test_creatable(self):
"""
Creatable resource logic.
"""
MyCreatable.create(mydata="val")
self.request_called_with('POST', API_HOST + "/api/v1/creatables", data={'mydata': "val"})
MyCreatable.create(mydata="val", attach_host_name=True)
self.request_called_with('POST', API_HOST + "/api/v1/creatables",
data={'mydata': "val", 'host': api._host_name})
def test_getable(self):
"""
Getable resource logic.
"""
getable_object_id = 123
MyGetable.get(getable_object_id, otherparam="val")
self.request_called_with('GET', API_HOST + "/api/v1/getables/" + str(getable_object_id),
params={'otherparam': "val"})
_, kwargs = self.request_mock.call_args()
self.assertIsNone(kwargs["data"])
def test_listable(self):
"""
Listable resource logic.
"""
MyListable.get_all(otherparam="val")
self.request_called_with('GET', API_HOST + "/api/v1/listables", params={'otherparam': "val"})
_, kwargs = self.request_mock.call_args()
self.assertIsNone(kwargs["data"])
def test_updatable(self):
"""
Updatable resource logic.
"""
updatable_object_id = 123
MyUpdatable.update(updatable_object_id, params={'myparam': "val1"}, mydata="val2")
self.request_called_with('PUT', API_HOST + "/api/v1/updatables/" + str(updatable_object_id),
params={'myparam': "val1"}, data={'mydata': "val2"})
def test_detalable(self):
"""
Deletable resource logic.
"""
deletable_object_id = 123
MyDeletable.delete(deletable_object_id, otherparam="val")
self.request_called_with('DELETE', API_HOST + "/api/v1/deletables/" + str(deletable_object_id),
params={'otherparam': "val"})
def test_listable_sub_resources(self):
"""
Listable sub-resources logic.
"""
resource_id = 123
MyListableSubResource.get_items(resource_id, otherparam="val")
self.request_called_with(
'GET',
API_HOST + '/api/v1/resource_name/{0}/sub_resource_name'.format(resource_id),
params={'otherparam': "val"}
)
_, kwargs = self.request_mock.call_args()
self.assertIsNone(kwargs["data"])
def test_addable_sub_resources(self):
"""
Addable sub-resources logic.
"""
resource_id = 123
MyAddableSubResource.add_items(resource_id, params={'myparam': 'val1'}, mydata='val2')
self.request_called_with(
'POST',
API_HOST + '/api/v1/resource_name/{0}/sub_resource_name'.format(resource_id),
params={'myparam': 'val1'},
data={'mydata': 'val2'}
)
def test_updatable_sub_resources(self):
"""
Updatable sub-resources logic.
"""
resource_id = 123
MyUpdatableSubResource.update_items(resource_id, params={'myparam': 'val1'}, mydata='val2')
self.request_called_with(
'PUT',
API_HOST + '/api/v1/resource_name/{0}/sub_resource_name'.format(resource_id),
params={'myparam': 'val1'},
data={'mydata': 'val2'}
)
def test_deletable_sub_resources(self):
"""
Deletable sub-resources logic.
"""
resource_id = 123
MyDeletableSubResource.delete_items(resource_id, params={'myparam': 'val1'}, mydata='val2')
self.request_called_with(
'DELETE',
API_HOST + '/api/v1/resource_name/{0}/sub_resource_name'.format(resource_id),
params={'myparam': 'val1'},
data={'mydata': 'val2'}
)
def test_actionable(self):
"""
Actionable resource logic.
"""
actionable_object_id = 123
MyActionable.trigger_class_action(
'POST',
'actionname',
id=actionable_object_id,
params={'myparam': 'val1'},
mydata='val',
mydata2='val2'
)
self.request_called_with(
'POST',
API_HOST + '/api/v1/actionables/{0}/actionname'.format(str(actionable_object_id)),
params={'myparam': 'val1'},
data={'mydata': 'val', 'mydata2': 'val2'}
)
MyActionable.trigger_class_action(
'POST',
'actionname',
id=actionable_object_id,
mydata='val',
mydata2='val2'
)
self.request_called_with(
'POST',
API_HOST +'/api/v1/actionables/{0}/actionname'.format(str(actionable_object_id)),
params={},
data={'mydata': 'val', 'mydata2': 'val2'}
)
MyActionable.trigger_class_action(
'GET',
'actionname',
id=actionable_object_id,
params={'param1': 'val1', 'param2': 'val2'}
)
self.request_called_with(
'GET',
API_HOST + '/api/v1/actionables/{0}/actionname'.format(str(actionable_object_id)),
params={'param1': 'val1', 'param2': 'val2'}
)
_, kwargs = self.request_mock.call_args()
self.assertIsNone(kwargs["data"])
MyActionable.trigger_action(
'POST',
'actionname',
id=actionable_object_id,
mydata="val"
)
self.request_called_with(
'POST',
API_HOST + '/api/v1/actionname/{0}'.format(actionable_object_id),
data={'mydata': "val"}
)
MyActionable.trigger_action(
'GET',
'actionname',
id=actionable_object_id,
)
self.request_called_with(
'GET',
API_HOST + '/api/v1/actionname/{0}'.format(actionable_object_id)
)
_, kwargs = self.request_mock.call_args()
self.assertIsNone(kwargs["data"])
class TestMetricResource(DatadogAPIWithInitialization):
def submit_and_assess_metric_payload(self, serie, attach_host_name=True):
"""
Helper to assess the metric payload format.
"""
now = time()
if isinstance(serie, dict):
Metric.send(attach_host_name=attach_host_name, **deepcopy(serie))
serie = [serie]
else:
Metric.send(deepcopy(serie), attach_host_name=attach_host_name)
payload = self.get_request_data()
for i, metric in enumerate(payload['series']):
if attach_host_name:
self.assertEqual(set(metric.keys()), set(['metric', 'points', 'host']))
self.assertEqual(metric['host'], api._host_name)
else:
self.assertEqual(set(metric.keys()), set(['metric', 'points']))
self.assertEqual(metric['metric'], serie[i]['metric'])
# points is a list of 1 point
self.assertTrue(isinstance(metric['points'], list))
self.assertEqual(len(metric['points']), 1)
# it consists of a [time, value] pair
self.assertEqual(len(metric['points'][0]), 2)
# its value == value we sent
self.assertEqual(metric['points'][0][1], float(serie[i]['points']))
# it's time not so far from current time
assert now - 1 < metric['points'][0][0] < now + 1
def submit_and_assess_dist_payload(self, serie, attach_host_name=True):
"""
Helper to assess the metric payload format.
"""
now = time()
if isinstance(serie, dict):
Distribution.send(attach_host_name=attach_host_name, **deepcopy(serie))
serie = [serie]
else:
Distribution.send(deepcopy(serie), attach_host_name=attach_host_name)
payload = self.get_request_data()
for i, metric in enumerate(payload['series']):
if attach_host_name:
self.assertEqual(set(metric.keys()), set(['metric', 'points', 'host']))
self.assertEqual(metric['host'], api._host_name)
else:
self.assertEqual(set(metric.keys()), set(['metric', 'points']))
self.assertEqual(metric['metric'], serie[i]['metric'])
# points is a list of 1 point
self.assertTrue(isinstance(metric['points'], list))
self.assertEqual(len(metric['points']), 1)
# it consists of a [time, value] pair
self.assertEqual(len(metric['points'][0]), 2)
# its value == value we sent
self.assertEqual(metric['points'][0][1], serie[i]['points'][0][1])
# it's time not so far from current time
assert now - 1 < metric['points'][0][0] < now + 1
def test_metric_submit_query_switch(self):
"""
Endpoints are different for submission and queries.
"""
Metric.send(points=(123, 456))
self.request_called_with('POST', API_HOST + "/api/v1/series",
data={'series': [{'points': [[123, 456.0]], 'host': api._host_name}]})
Metric.query(start="val1", end="val2")
self.request_called_with('GET', API_HOST + "/api/v1/query",
params={'from': "val1", 'to': "val2"})
def test_points_submission(self):
"""
Assess the data payload format, when submitting a single or multiple points.
"""
# Single point
serie = dict(metric='metric.1', points=13)
self.submit_and_assess_metric_payload(serie)
# Multiple point
serie = [dict(metric='metric.1', points=13),
dict(metric='metric.2', points=19)]
self.submit_and_assess_metric_payload(serie)
# Single point no hostname
serie = dict(metric='metric.1', points=13)
self.submit_and_assess_metric_payload(serie, attach_host_name=False)
# Multiple point no hostname
serie = [dict(metric='metric.1', points=13),
dict(metric='metric.2', points=19)]
self.submit_and_assess_metric_payload(serie, attach_host_name=False)
def test_dist_points_submission(self):
"""
Assess the distribution data payload format, when submitting a single or multiple points.
"""
# Single point
serie = dict(metric='metric.1', points=[[time(), [13]]])
self.submit_and_assess_dist_payload(serie)
# Multiple point
serie = [dict(metric='metric.1', points=[[time(), [13]]]),
dict(metric='metric.2', points=[[time(), [19]]])]
self.submit_and_assess_dist_payload(serie)
# Single point no hostname
serie = dict(metric='metric.1', points=[[time(), [13]]])
self.submit_and_assess_dist_payload(serie, attach_host_name=False)
# Multiple point no hostname
serie = [dict(metric='metric.1', points=[[time(), [13]]]),
dict(metric='metric.2', points=[[time(), [19]]])]
self.submit_and_assess_dist_payload(serie, attach_host_name=False)
def test_data_type_support(self):
"""
`Metric` API supports `real` numerical data types.
"""
from decimal import Decimal
from fractions import Fraction
m_long = int(1) # long in Python 3.x
if not is_p3k():
m_long = long(1)
supported_data_types = [1, 1.0, m_long, Decimal(1), Fraction(1, 2)]
for point in supported_data_types:
serie = dict(metric='metric.numerical', points=point)
self.submit_and_assess_metric_payload(serie)
class TestServiceCheckResource(DatadogAPIWithInitialization):
def test_service_check_supports_none_parameters(self):
"""
ServiceCheck should support none parameters
```
$ dog service_check check check_pg host0 1
```
resulted in `RuntimeError: dictionary changed size during iteration`
"""
ServiceCheck.check(
check='check_pg', host_name='host0', status=1, message=None,
timestamp=None, tags=None)
|
[
"tests.unit.api.helper.MyListableSubResource.get_items",
"datadog.initialize",
"tests.unit.api.helper.MyAddableSubResource.add_items",
"datadog.api.Monitor.get_all",
"tests.unit.api.helper.MyDeletableSubResource.delete_items",
"copy.deepcopy",
"datadog.util.hostname.get_hostname",
"mock.patch",
"tests.unit.api.helper.MyCreatable.create",
"datadog.api.ServiceCheck.check",
"fractions.Fraction",
"tests.unit.api.helper.MyUpdatableSubResource.update_items",
"tests.util.contextmanagers.EnvVars",
"datadog.util.compat.is_p3k",
"tests.unit.api.helper.MyGetable.get",
"tests.unit.api.helper.MyActionable.trigger_action",
"datadog.api.Metric.query",
"datadog.api.Metric.send",
"tests.unit.api.helper.MyUpdatable.update",
"time.time",
"tests.unit.api.helper.MyListable.get_all",
"tests.unit.api.helper.MyActionable.trigger_class_action",
"tempfile.gettempdir",
"tests.unit.api.helper.MyDeletable.delete",
"decimal.Decimal"
] |
[((1448, 1497), 'mock.patch', 'mock.patch', (['"""datadog.util.config.get_config_path"""'], {}), "('datadog.util.config.get_config_path')\n", (1458, 1497), False, 'import mock\n'), ((1145, 1157), 'datadog.initialize', 'initialize', ([], {}), '()\n', (1155, 1157), False, 'from datadog import initialize, api, util\n'), ((1306, 1352), 'datadog.initialize', 'initialize', ([], {'api_key': 'API_KEY', 'api_host': 'API_HOST'}), '(api_key=API_KEY, api_host=API_HOST)\n', (1316, 1352), False, 'from datadog import initialize, api, util\n'), ((1361, 1381), 'tests.unit.api.helper.MyCreatable.create', 'MyCreatable.create', ([], {}), '()\n', (1379, 1381), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((2210, 2222), 'datadog.initialize', 'initialize', ([], {}), '()\n', (2220, 2222), False, 'from datadog import initialize, api, util\n'), ((2497, 2560), 'datadog.initialize', 'initialize', ([], {'api_key': 'API_KEY', 'app_key': 'APP_KEY', 'api_host': 'API_HOST'}), '(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST)\n', (2507, 2560), False, 'from datadog import initialize, api, util\n'), ((2603, 2623), 'tests.unit.api.helper.MyCreatable.create', 'MyCreatable.create', ([], {}), '()\n', (2621, 2623), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((3274, 3376), 'datadog.initialize', 'initialize', ([], {'api_key': 'API_KEY', 'app_key': 'APP_KEY', 'api_host': 'API_HOST', 'proxies': 'FAKE_PROXY', 'cacert': '(False)'}), '(api_key=API_KEY, app_key=APP_KEY, api_host=API_HOST, proxies=\n FAKE_PROXY, cacert=False)\n', (3284, 3376), False, 'from datadog import initialize, api, util\n'), ((3433, 3453), 'tests.unit.api.helper.MyCreatable.create', 'MyCreatable.create', ([], {}), '()\n', (3451, 3453), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((3886, 3906), 'tests.unit.api.helper.MyCreatable.create', 'MyCreatable.create', ([], {}), '()\n', (3904, 3906), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((3952, 3991), 'datadog.initialize', 'initialize', ([], {'api_key': 'API_KEY', 'mute': '(False)'}), '(api_key=API_KEY, mute=False)\n', (3962, 3991), False, 'from datadog import initialize, api, util\n'), ((4170, 4182), 'datadog.initialize', 'initialize', ([], {}), '()\n', (4180, 4182), False, 'from datadog import initialize, api, util\n'), ((4281, 4317), 'datadog.initialize', 'initialize', ([], {'return_raw_response': '(True)'}), '(return_raw_response=True)\n', (4291, 4317), False, 'from datadog import initialize, api, util\n'), ((4428, 4499), 'datadog.initialize', 'initialize', ([], {'api_key': '"""<KEY>"""', 'app_key': '"""123456"""', 'return_raw_response': '(True)'}), "(api_key='<KEY>', app_key='123456', return_raw_response=True)\n", (4438, 4499), False, 'from datadog import initialize, api, util\n'), ((4520, 4541), 'datadog.api.Monitor.get_all', 'api.Monitor.get_all', ([], {}), '()\n', (4539, 4541), False, 'from datadog import initialize, api, util\n'), ((6119, 6211), 'datadog.initialize', 'initialize', ([], {'api_key': '"""API_KEY"""', 'app_key': '"""APP_KEY"""', 'api_host': '"""HOST"""', 'host_name': '"""HOSTNAME"""'}), "(api_key='API_KEY', app_key='APP_KEY', api_host='HOST', host_name\n ='HOSTNAME')\n", (6129, 6211), False, 'from datadog import initialize, api, util\n'), ((8164, 8196), 'tests.unit.api.helper.MyCreatable.create', 'MyCreatable.create', ([], {'mydata': '"""val"""'}), "(mydata='val')\n", (8182, 8196), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((8304, 8359), 'tests.unit.api.helper.MyCreatable.create', 'MyCreatable.create', ([], {'mydata': '"""val"""', 'attach_host_name': '(True)'}), "(mydata='val', attach_host_name=True)\n", (8322, 8359), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((8640, 8690), 'tests.unit.api.helper.MyGetable.get', 'MyGetable.get', (['getable_object_id'], {'otherparam': '"""val"""'}), "(getable_object_id, otherparam='val')\n", (8653, 8690), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((9038, 9074), 'tests.unit.api.helper.MyListable.get_all', 'MyListable.get_all', ([], {'otherparam': '"""val"""'}), "(otherparam='val')\n", (9056, 9074), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((9400, 9487), 'tests.unit.api.helper.MyUpdatable.update', 'MyUpdatable.update', (['updatable_object_id'], {'params': "{'myparam': 'val1'}", 'mydata': '"""val2"""'}), "(updatable_object_id, params={'myparam': 'val1'}, mydata=\n 'val2')\n", (9418, 9487), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((9801, 9858), 'tests.unit.api.helper.MyDeletable.delete', 'MyDeletable.delete', (['deletable_object_id'], {'otherparam': '"""val"""'}), "(deletable_object_id, otherparam='val')\n", (9819, 9858), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((10166, 10228), 'tests.unit.api.helper.MyListableSubResource.get_items', 'MyListableSubResource.get_items', (['resource_id'], {'otherparam': '"""val"""'}), "(resource_id, otherparam='val')\n", (10197, 10228), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((10653, 10743), 'tests.unit.api.helper.MyAddableSubResource.add_items', 'MyAddableSubResource.add_items', (['resource_id'], {'params': "{'myparam': 'val1'}", 'mydata': '"""val2"""'}), "(resource_id, params={'myparam': 'val1'},\n mydata='val2')\n", (10683, 10743), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((11112, 11207), 'tests.unit.api.helper.MyUpdatableSubResource.update_items', 'MyUpdatableSubResource.update_items', (['resource_id'], {'params': "{'myparam': 'val1'}", 'mydata': '"""val2"""'}), "(resource_id, params={'myparam': 'val1'},\n mydata='val2')\n", (11147, 11207), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((11575, 11670), 'tests.unit.api.helper.MyDeletableSubResource.delete_items', 'MyDeletableSubResource.delete_items', (['resource_id'], {'params': "{'myparam': 'val1'}", 'mydata': '"""val2"""'}), "(resource_id, params={'myparam': 'val1'},\n mydata='val2')\n", (11610, 11670), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((12033, 12181), 'tests.unit.api.helper.MyActionable.trigger_class_action', 'MyActionable.trigger_class_action', (['"""POST"""', '"""actionname"""'], {'id': 'actionable_object_id', 'params': "{'myparam': 'val1'}", 'mydata': '"""val"""', 'mydata2': '"""val2"""'}), "('POST', 'actionname', id=\n actionable_object_id, params={'myparam': 'val1'}, mydata='val', mydata2\n ='val2')\n", (12066, 12181), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((12516, 12631), 'tests.unit.api.helper.MyActionable.trigger_class_action', 'MyActionable.trigger_class_action', (['"""POST"""', '"""actionname"""'], {'id': 'actionable_object_id', 'mydata': '"""val"""', 'mydata2': '"""val2"""'}), "('POST', 'actionname', id=\n actionable_object_id, mydata='val', mydata2='val2')\n", (12549, 12631), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((12941, 13070), 'tests.unit.api.helper.MyActionable.trigger_class_action', 'MyActionable.trigger_class_action', (['"""GET"""', '"""actionname"""'], {'id': 'actionable_object_id', 'params': "{'param1': 'val1', 'param2': 'val2'}"}), "('GET', 'actionname', id=\n actionable_object_id, params={'param1': 'val1', 'param2': 'val2'})\n", (12974, 13070), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((13439, 13531), 'tests.unit.api.helper.MyActionable.trigger_action', 'MyActionable.trigger_action', (['"""POST"""', '"""actionname"""'], {'id': 'actionable_object_id', 'mydata': '"""val"""'}), "('POST', 'actionname', id=actionable_object_id,\n mydata='val')\n", (13466, 13531), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((13772, 13845), 'tests.unit.api.helper.MyActionable.trigger_action', 'MyActionable.trigger_action', (['"""GET"""', '"""actionname"""'], {'id': 'actionable_object_id'}), "('GET', 'actionname', id=actionable_object_id)\n", (13799, 13845), False, 'from tests.unit.api.helper import DatadogAPIWithInitialization, DatadogAPINoInitialization, MyCreatable, MyUpdatable, MyDeletable, MyGetable, MyListable, MyListableSubResource, MyAddableSubResource, MyUpdatableSubResource, MyDeletableSubResource, MyActionable, API_KEY, APP_KEY, API_HOST, HOST_NAME, FAKE_PROXY\n'), ((14352, 14358), 'time.time', 'time', ([], {}), '()\n', (14356, 14358), False, 'from time import time\n'), ((15716, 15722), 'time.time', 'time', ([], {}), '()\n', (15720, 15722), False, 'from time import time\n'), ((17064, 17094), 'datadog.api.Metric.send', 'Metric.send', ([], {'points': '(123, 456)'}), '(points=(123, 456))\n', (17075, 17094), False, 'from datadog.api import Distribution, Metric, ServiceCheck\n'), ((17278, 17316), 'datadog.api.Metric.query', 'Metric.query', ([], {'start': '"""val1"""', 'end': '"""val2"""'}), "(start='val1', end='val2')\n", (17290, 17316), False, 'from datadog.api import Distribution, Metric, ServiceCheck\n'), ((20146, 20257), 'datadog.api.ServiceCheck.check', 'ServiceCheck.check', ([], {'check': '"""check_pg"""', 'host_name': '"""host0"""', 'status': '(1)', 'message': 'None', 'timestamp': 'None', 'tags': 'None'}), "(check='check_pg', host_name='host0', status=1, message=\n None, timestamp=None, tags=None)\n", (20164, 20257), False, 'from datadog.api import Distribution, Metric, ServiceCheck\n'), ((1731, 1752), 'tempfile.gettempdir', 'tempfile.gettempdir', ([], {}), '()\n', (1750, 1752), False, 'import tempfile\n'), ((1831, 1839), 'datadog.util.compat.is_p3k', 'is_p3k', ([], {}), '()\n', (1837, 1839), False, 'from datadog.util.compat import is_p3k\n'), ((4592, 4678), 'tests.util.contextmanagers.EnvVars', 'EnvVars', ([], {'ignore': "['DATADOG_API_KEY', 'DATADOG_APP_KEY', 'DD_API_KEY', 'DD_APP_KEY']"}), "(ignore=['DATADOG_API_KEY', 'DATADOG_APP_KEY', 'DD_API_KEY',\n 'DD_APP_KEY'])\n", (4599, 4678), False, 'from tests.util.contextmanagers import EnvVars\n'), ((4746, 4758), 'datadog.initialize', 'initialize', ([], {}), '()\n', (4756, 4758), False, 'from datadog import initialize, api, util\n'), ((5053, 5171), 'tests.util.contextmanagers.EnvVars', 'EnvVars', ([], {'env_vars': "{'DATADOG_API_KEY': 'API_KEY_ENV', 'DATADOG_APP_KEY': 'APP_KEY_ENV',\n 'DATADOG_HOST': 'HOST_ENV'}"}), "(env_vars={'DATADOG_API_KEY': 'API_KEY_ENV', 'DATADOG_APP_KEY':\n 'APP_KEY_ENV', 'DATADOG_HOST': 'HOST_ENV'})\n", (5060, 5171), False, 'from tests.util.contextmanagers import EnvVars\n'), ((5266, 5278), 'datadog.initialize', 'initialize', ([], {}), '()\n', (5276, 5278), False, 'from datadog import initialize, api, util\n'), ((6503, 6589), 'tests.util.contextmanagers.EnvVars', 'EnvVars', ([], {'env_vars': "{'DD_API_KEY': 'API_KEY_ENV_DD', 'DD_APP_KEY': 'APP_KEY_ENV_DD'}"}), "(env_vars={'DD_API_KEY': 'API_KEY_ENV_DD', 'DD_APP_KEY':\n 'APP_KEY_ENV_DD'})\n", (6510, 6589), False, 'from tests.util.contextmanagers import EnvVars\n'), ((6803, 6815), 'datadog.initialize', 'initialize', ([], {}), '()\n', (6813, 6815), False, 'from datadog import initialize, api, util\n'), ((7194, 7286), 'datadog.initialize', 'initialize', ([], {'api_key': '"""API_KEY"""', 'app_key': '"""APP_KEY"""', 'api_host': '"""HOST"""', 'host_name': '"""HOSTNAME"""'}), "(api_key='API_KEY', app_key='APP_KEY', api_host='HOST', host_name\n ='HOSTNAME')\n", (7204, 7286), False, 'from datadog import initialize, api, util\n'), ((7639, 7651), 'datadog.initialize', 'initialize', ([], {}), '()\n', (7649, 7651), False, 'from datadog import initialize, api, util\n'), ((19501, 19509), 'datadog.util.compat.is_p3k', 'is_p3k', ([], {}), '()\n', (19507, 19509), False, 'from datadog.util.compat import is_p3k\n'), ((19589, 19599), 'decimal.Decimal', 'Decimal', (['(1)'], {}), '(1)\n', (19596, 19599), False, 'from decimal import Decimal\n'), ((19601, 19615), 'fractions.Fraction', 'Fraction', (['(1)', '(2)'], {}), '(1, 2)\n', (19609, 19615), False, 'from fractions import Fraction\n'), ((4974, 5002), 'datadog.util.hostname.get_hostname', 'util.hostname.get_hostname', ([], {}), '()\n', (5000, 5002), False, 'from datadog import initialize, api, util\n'), ((5505, 5533), 'datadog.util.hostname.get_hostname', 'util.hostname.get_hostname', ([], {}), '()\n', (5531, 5533), False, 'from datadog import initialize, api, util\n'), ((5689, 5775), 'tests.util.contextmanagers.EnvVars', 'EnvVars', ([], {'env_vars': "{'DD_API_KEY': 'API_KEY_ENV_DD', 'DD_APP_KEY': 'APP_KEY_ENV_DD'}"}), "(env_vars={'DD_API_KEY': 'API_KEY_ENV_DD', 'DD_APP_KEY':\n 'APP_KEY_ENV_DD'})\n", (5696, 5775), False, 'from tests.util.contextmanagers import EnvVars\n'), ((5917, 5929), 'datadog.initialize', 'initialize', ([], {}), '()\n', (5927, 5929), False, 'from datadog import initialize, api, util\n'), ((7042, 7070), 'datadog.util.hostname.get_hostname', 'util.hostname.get_hostname', ([], {}), '()\n', (7068, 7070), False, 'from datadog import initialize, api, util\n'), ((14540, 14555), 'copy.deepcopy', 'deepcopy', (['serie'], {}), '(serie)\n', (14548, 14555), False, 'from copy import deepcopy\n'), ((15916, 15931), 'copy.deepcopy', 'deepcopy', (['serie'], {}), '(serie)\n', (15924, 15931), False, 'from copy import deepcopy\n'), ((14457, 14472), 'copy.deepcopy', 'deepcopy', (['serie'], {}), '(serie)\n', (14465, 14472), False, 'from copy import deepcopy\n'), ((15827, 15842), 'copy.deepcopy', 'deepcopy', (['serie'], {}), '(serie)\n', (15835, 15842), False, 'from copy import deepcopy\n'), ((18540, 18546), 'time.time', 'time', ([], {}), '()\n', (18544, 18546), False, 'from time import time\n'), ((18903, 18909), 'time.time', 'time', ([], {}), '()\n', (18907, 18909), False, 'from time import time\n'), ((18683, 18689), 'time.time', 'time', ([], {}), '()\n', (18687, 18689), False, 'from time import time\n'), ((18750, 18756), 'time.time', 'time', ([], {}), '()\n', (18754, 18756), False, 'from time import time\n'), ((19082, 19088), 'time.time', 'time', ([], {}), '()\n', (19086, 19088), False, 'from time import time\n'), ((19149, 19155), 'time.time', 'time', ([], {}), '()\n', (19153, 19155), False, 'from time import time\n')]
|
# -*- coding: utf-8 -*-
"""
Core views to provide custom operations
"""
import uuid
from datetime import datetime
from django.http import HttpResponseRedirect
from threepio import logger
from atmosphere import settings
from django_cyverse_auth.decorators import atmo_login_required
from django_cyverse_auth.models import Token as AuthToken
from core.models import AtmosphereUser as DjangoUser
@atmo_login_required
def emulate_request(request, username=None):
try:
logger.info("Emulate attempt: %s wants to be %s"
% (request.user, username))
logger.info(request.session.__dict__)
if not username and 'emulator' in request.session:
logger.info("Clearing emulation attributes from user")
username = request.session['emulator']
orig_token = request.session['emulator_token']
request.session['username'] = username
request.session['token'] = orig_token
del request.session['emulator']
del request.session['emulator_token']
# Allow user to fall through on line below
return HttpResponseRedirect(settings.REDIRECT_URL + "/api/v1/profile")
try:
user = DjangoUser.objects.get(username=username)
except DjangoUser.DoesNotExist:
logger.info("Emulate attempt failed. User <%s> does not exist"
% username)
return HttpResponseRedirect(
settings.REDIRECT_URL +
"/api/v1/profile")
logger.info("Emulate success, creating tokens for %s" % username)
token = AuthToken(
user=user,
key=str(uuid.uuid4()),
issuedTime=datetime.now(),
remote_ip=request.META['REMOTE_ADDR'],
api_server_url=settings.API_SERVER_URL
)
token.save()
# Keep original emulator+token if it exists, or use the last known username+token
if 'emulator' not in request.session:
original_emulator = request.session['username']
request.session['emulator'] = original_emulator
logger.info("Returning user %s - Emulated as user %s - to api profile "
% (original_emulator, username))
if 'emulator_token' not in request.session:
original_token = request.session['token']
request.session['emulator_token'] = original_token
# # Set the username to the user to be emulated
# # to whom the token also belongs
request.session['username'] = username
request.session['token'] = token.key
request.session.save()
logger.info(request.session.__dict__)
logger.info(request.user)
return HttpResponseRedirect(settings.REDIRECT_URL + "/api/v1/profile")
except Exception as e:
logger.warn("Emulate request failed")
logger.exception(e)
return HttpResponseRedirect(settings.REDIRECT_URL + "/api/v1/profile")
|
[
"django.http.HttpResponseRedirect",
"uuid.uuid4",
"datetime.datetime.now",
"threepio.logger.info",
"core.models.AtmosphereUser.objects.get",
"threepio.logger.exception",
"threepio.logger.warn"
] |
[((481, 557), 'threepio.logger.info', 'logger.info', (["('Emulate attempt: %s wants to be %s' % (request.user, username))"], {}), "('Emulate attempt: %s wants to be %s' % (request.user, username))\n", (492, 557), False, 'from threepio import logger\n'), ((586, 623), 'threepio.logger.info', 'logger.info', (['request.session.__dict__'], {}), '(request.session.__dict__)\n', (597, 623), False, 'from threepio import logger\n'), ((1544, 1609), 'threepio.logger.info', 'logger.info', (["('Emulate success, creating tokens for %s' % username)"], {}), "('Emulate success, creating tokens for %s' % username)\n", (1555, 1609), False, 'from threepio import logger\n'), ((2664, 2701), 'threepio.logger.info', 'logger.info', (['request.session.__dict__'], {}), '(request.session.__dict__)\n', (2675, 2701), False, 'from threepio import logger\n'), ((2710, 2735), 'threepio.logger.info', 'logger.info', (['request.user'], {}), '(request.user)\n', (2721, 2735), False, 'from threepio import logger\n'), ((2751, 2814), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["(settings.REDIRECT_URL + '/api/v1/profile')"], {}), "(settings.REDIRECT_URL + '/api/v1/profile')\n", (2771, 2814), False, 'from django.http import HttpResponseRedirect\n'), ((695, 749), 'threepio.logger.info', 'logger.info', (['"""Clearing emulation attributes from user"""'], {}), "('Clearing emulation attributes from user')\n", (706, 749), False, 'from threepio import logger\n'), ((1129, 1192), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["(settings.REDIRECT_URL + '/api/v1/profile')"], {}), "(settings.REDIRECT_URL + '/api/v1/profile')\n", (1149, 1192), False, 'from django.http import HttpResponseRedirect\n'), ((1226, 1267), 'core.models.AtmosphereUser.objects.get', 'DjangoUser.objects.get', ([], {'username': 'username'}), '(username=username)\n', (1248, 1267), True, 'from core.models import AtmosphereUser as DjangoUser\n'), ((2135, 2244), 'threepio.logger.info', 'logger.info', (["('Returning user %s - Emulated as user %s - to api profile ' % (\n original_emulator, username))"], {}), "('Returning user %s - Emulated as user %s - to api profile ' % (\n original_emulator, username))\n", (2146, 2244), False, 'from threepio import logger\n'), ((2850, 2887), 'threepio.logger.warn', 'logger.warn', (['"""Emulate request failed"""'], {}), "('Emulate request failed')\n", (2861, 2887), False, 'from threepio import logger\n'), ((2896, 2915), 'threepio.logger.exception', 'logger.exception', (['e'], {}), '(e)\n', (2912, 2915), False, 'from threepio import logger\n'), ((2931, 2994), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["(settings.REDIRECT_URL + '/api/v1/profile')"], {}), "(settings.REDIRECT_URL + '/api/v1/profile')\n", (2951, 2994), False, 'from django.http import HttpResponseRedirect\n'), ((1320, 1394), 'threepio.logger.info', 'logger.info', (["('Emulate attempt failed. User <%s> does not exist' % username)"], {}), "('Emulate attempt failed. User <%s> does not exist' % username)\n", (1331, 1394), False, 'from threepio import logger\n'), ((1438, 1501), 'django.http.HttpResponseRedirect', 'HttpResponseRedirect', (["(settings.REDIRECT_URL + '/api/v1/profile')"], {}), "(settings.REDIRECT_URL + '/api/v1/profile')\n", (1458, 1501), False, 'from django.http import HttpResponseRedirect\n'), ((1718, 1732), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1730, 1732), False, 'from datetime import datetime\n'), ((1680, 1692), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (1690, 1692), False, 'import uuid\n')]
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Check the state of an AWS AMI."""
from __future__ import annotations
import json
from typing import Any, Dict
import boto3
print("Loading function get_image_status")
ec2_client = boto3.client("ec2")
# {
# "instance_id": "i-identifier",
# "kms_id": "KMS ID",
# "account": "account_number",
# "instance_status": "should be there if in loop"
# "migrated_ami_id": "ami-identifier"
# }
def lambda_handler(event: Dict[str, Any], context: Any) -> str:
"""Handle signaling and entry into the AWS Lambda."""
print("Received event: " + json.dumps(event, indent=2))
migrated_ami_id: str = event["migrated_ami_id"]
ami_state: Dict[str, Any] = ec2_client.describe_images(ImageIds=[migrated_ami_id])
return ami_state["Images"][0]["State"]
|
[
"json.dumps",
"boto3.client"
] |
[((232, 251), 'boto3.client', 'boto3.client', (['"""ec2"""'], {}), "('ec2')\n", (244, 251), False, 'import boto3\n'), ((600, 627), 'json.dumps', 'json.dumps', (['event'], {'indent': '(2)'}), '(event, indent=2)\n', (610, 627), False, 'import json\n')]
|
'''
Created on June 24, 2019
@author: <NAME>
'''
import copy
import json
import sys
import math
import numbers
import intervals as I
from abc import ABC, abstractmethod
from greenery.lego import parse
from intervals import inf as infinity
import config
import _constants
from canoncalization import canoncalize_object
from _normalizer import lazy_normalize
from _utils import (
validate_schema,
print_db,
is_sub_interval_from_optional_ranges,
is_num,
is_list,
is_dict,
is_empty_dict_or_none,
is_dict_or_true,
one
)
class JSONschema(dict):
kw_defaults = {}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# self.validate()
self.updateKeys()
# self.canoncalize()
if self.isUninhabited():
sys.exit("Found an uninhabited type at: " + str(self))
def __getattr__(self, name):
if name in self:
return self[name]
else:
raise AttributeError("No such attribute: ", name)
def __setattr__(self, name, value):
self[name] = value
def __delattr__(self, name):
if name in self:
del self[name]
else:
raise AttributeError("No such attribute: ", name)
def validate(self):
validate_schema(self)
def updateKeys(self):
for k, v in self.kw_defaults.items():
if k == "items":
k = "items_"
if k not in self.keys():
self[k] = v
def isBoolean(self):
return self.keys() & _constants.Jconnectors
def isUninhabited(self):
return self._isUninhabited()
def _isUninhabited(self):
pass
def meet(self, s2):
pass
def join(self, s2):
pass
def isSubtype(self, s2):
if s2 == {} or s2 == True or self == s2:
return True
return self._isSubtype(s2)
def isSubtype_handle_rhs(self, s2, isSubtype_cb):
if s2.isBoolean():
# TODO revisit all of this. They are wrong.
if "anyOf" in s2:
return any(self.isSubtype(s) for s in s2["anyOf"])
elif "allOf" in s2:
return all(self.isSubtype(s) for s in s2["allOf"])
elif "oneOf" in s2:
return one(self.isSubtype(s) for s in s2["oneOf"])
elif "not" in s2:
# TODO
print("No handling of not yet.")
return None
else:
print_db("cb on rhs")
return isSubtype_cb(self, s2)
class JSONTypeString(JSONschema):
kw_defaults = {"minLength": 0, "maxLength": infinity, "pattern": ".*"}
def __init__(self, s):
super().__init__(s)
def _isUninhabited(self):
return self.minLength > self.maxLength
def meet(self, s):
pass
def _isSubtype(self, s2):
def _isStringSubtype(self, s2):
if s2.type != "string":
return False
is_sub_interval = is_sub_interval_from_optional_ranges(
self.minLength, self.maxLength, s2.minLength, s2.maxLength)
if not is_sub_interval:
return False
#
# at this point, length is compatible,
# so we should now worry about pattern only.
if s2.pattern == None or s2.pattern == "":
return True
elif self.pattern == None or self.pattern == "":
return False
elif self.pattern == s2.pattern:
return True
else:
regex = parse(self.pattern)
regex2 = parse(s2.pattern)
result = regex & regex2.everythingbut()
if result.empty():
return True
else:
return False
return super().isSubtype_handle_rhs(s2, _isStringSubtype)
def JSONNumericFactory(s):
if s.get("type") == "number":
if s.get("multipleOf") and float(s.get("multipleOf")).is_integer():
s["type"] = "integer"
if s.get("minimum") != None: # -I.inf:
s["minimum"] = math.floor(s.get("minimum")) if s.get(
"exclusiveMinimum") else math.ceil(s.get("minimum"))
if s.get("maximum") != None: # I.inf:
s["maximum"] = math.ceil(s.get("maximum")) if s.get(
"exclusiveMaximum") else math.floor(s.get("maximum"))
return JSONTypeInteger(s)
else:
return JSONTypeNumber(s)
else:
return JSONTypeInteger(s)
class JSONTypeInteger(JSONschema):
kw_defaults = {"minimum": -infinity, "maximum": infinity,
"exclusiveMinimum": False, "exclusiveMaximum": False, "multipleOf": None}
def __init__(self, s):
super().__init__(s)
def build_interval_draft4(self):
if self.exclusiveMinimum and self.exclusiveMaximum:
self.interval = I.closed(self.minimum+1, self.maximum-1)
elif self.exclusiveMinimum:
self.interval = I.closed(self.minimum+1, self.maximum)
elif self.exclusiveMaximum:
self.interval = I.closed(self.minimum, self.maximum-1)
else:
self.interval = I.closed(self.minimum, self.maximum)
def _isUninhabited(self):
self.build_interval_draft4()
return self.interval.is_empty() or \
(self.multipleOf != None and self.multipleOf not in self.interval)
def meet(self, s):
pass
def _isSubtype(self, s2):
def _isIntegerSubtype(self, s2):
if s2.type not in ["integer", "number"]:
return False
#
is_sub_interval = self.interval in s2.interval
if not is_sub_interval:
print_db("num__00")
return False
#
if (self.multipleOf == s2.multipleOf) \
or (self.multipleOf != None and s2.multipleOf == None) \
or (self.multipleOf != None and s2.multipleOf != None and self.multipleOf % s2.multipleOf == 0) \
or (self.multipleOf == None and s2.multipleOf == 1):
print_db("num__02")
return True
if self.multipleOf == None and s2.multipleOf != None:
return False
return super().isSubtype_handle_rhs(s2, _isIntegerSubtype)
class JSONTypeNumber(JSONschema):
kw_defaults = {"minimum": -infinity, "maximum": infinity,
"exclusiveMinimum": False, "exclusiveMaximum": False, "multipleOf": None}
def __init__(self, s):
super().__init__(s)
def build_interval_draft4(self):
if self.exclusiveMinimum and self.exclusiveMaximum:
self.interval = I.open(self.minimum, self.maximum)
elif self.exclusiveMinimum:
self.interval = I.openclosed(self.minimum, self.maximum)
elif self.exclusiveMaximum:
self.interval = I.closedopen(self.minimum, self.maximum)
else:
self.interval = I.closed(self.minimum, self.maximum)
def _isUninhabited(self):
self.build_interval_draft4()
return self.interval.is_empty() or \
(self.multipleOf != None and self.multipleOf not in self.interval)
def meet(self, s):
pass
def _isSubtype(self, s2):
def _isNumberSubtype(self, s2):
if s2.type != "number":
return False
#
is_sub_interval = self.interval in s2.interval
if not is_sub_interval:
print_db("num__00")
return False
#
if self.type == "number" and s2.type == "integer":
print_db("num__01")
return False
#
if (self.multipleOf == s2.multipleOf) \
or (self.multipleOf != None and s2.multipleOf == None) \
or (self.multipleOf != None and s2.multipleOf != None and self.multipleOf % s2.multipleOf == 0) \
or (self.multipleOf == None and s2.multipleOf == 1):
print_db("num__02")
return True
return super().isSubtype_handle_rhs(s2, _isNumberSubtype)
class JSONTypeBoolean(JSONschema):
kw_defaults = {}
def __init__(self, s):
super().__init__(s)
def _isSubtype(self, s2):
def _isBooleanSubtype(self, s2):
if s2.type == "boolean":
return True
else:
return False
return super().isSubtype_handle_rhs(s2, _isBooleanSubtype)
class JSONTypeNull(JSONschema):
kw_defaults = {}
def __init__(self, s):
super().__init__(s)
def _isSubtype(self, s2):
def _isNullSubtype(self, s2):
if s2.type == "null":
return True
else:
return False
return super().isSubtype_handle_rhs(s2, _isNullSubtype)
class JSONTypeObject(JSONschema):
kw_defaults = {"properties": {}, "additionalProperties": {}, "required": [
], "minProperties": 0, "maxProperties": infinity, "dependencies": {}, "patternProperties": {}}
def __init__(self, s):
super().__init__(s)
def meet(self, s2):
pass
def _isSubtype(self, s2):
def _isObjectSubtype(self, s2):
pass
return super().isSubtype_handle_rhs(s2, _isObjectSubtype)
class JSONTypeArray(JSONschema):
kw_defaults = {"minItems": 0, "maxItems": infinity,
"items": JSONTypeObject({}), "additionalItems": JSONTypeObject({}), "uniqueItems": False}
def __init__(self, s):
super().__init__(s)
def _isUninhabited(self):
return (self.minItems > self.maxItems) or \
(is_list(self.items) and self.additionalItems ==
False and self.minItems > len(self.items))
def meet(self, s2):
pass
def _isSubtype(self, s2):
def _isArraySubtype(self, s2):
print_db("in array subtype")
if s2.type != "array":
return False
#
#
# self = JsonArray(self)
# s2 = JsonArray(s2)
#
# uninhabited = handle_uninhabited_types(self, s2)
# if uninhabited != None:
# return uninhabited
#
# -- minItems and maxItems
is_sub_interval = is_sub_interval_from_optional_ranges(
self.minItems, self.maxItems, s2.minItems, s2.maxItems)
# also takes care of {'items' = [..], 'additionalItems' = False}
if not is_sub_interval:
print_db("__01__")
return False
#
# -- uniqueItemsue
# TODO Double-check. Could be more subtle?
if not self.uniqueItems and s2.uniqueItems:
print_db("__02__")
return False
#
# -- items = {not empty}
# no need to check additionalItems
if is_dict(self.items_):
if is_dict(s2.items_):
print_db(self.items_)
print_db(s2.items_)
# if subschemachecker.Checker.is_subtype(self.items_, s2.items_):
if self.items_.isSubtype(s2.items_):
print_db("__05__")
return True
else:
print_db("__06__")
return False
elif is_list(s2.items_):
if s2.additionalItems == False:
print_db("__07__")
return False
elif s2.additionalItems == True:
for i in s2.items_:
# if not subschemachecker.Checker.is_subtype(self.items_, i):
if not self.items_.isSubtype(i):
print_db("__08__")
return False
print_db("__09__")
return True
elif is_dict(s2.additionalItems):
for i in s2.items_:
# if not subschemachecker.Checker.is_subtype(self.items_, i):
if not self.items_.isSubtype(i):
print_db("__10__")
return False
# if subschemachecker.Checker.is_subtype(self.items_, s2.additionalItems):
if self.items_.isSubtype(s2.additionalItems):
print_db("__11__")
return True
else:
print_db("__12__")
return False
#
elif is_list(self.items_):
print_db("lhs is list")
if is_dict(s2.items_):
if self.additionalItems == False:
for i in self.items_:
# if not subschemachecker.Checker.is_subtype(i, s2.items_):
if not i.isSubtype(s2.items_):
print_db("__13__")
return False
print_db("__14__")
return True
elif self.additionalItems == True:
for i in self.items_:
# if not subschemachecker.Checker.is_subtype(i, s2.items_):
if not i.isSubtype(s2.items_):
return False
return True
elif is_dict(self.additionalItems):
for i in self.items_:
# if not subschemachecker.Checker.is_subtype(i, s2.items_):
if not i.isSubtype(s2.items_):
return False
# if subschemachecker.Checker.is_subtype(self.additionalItems, s2.items_):
if self.additionalItems.isSubtype(s2.items_):
return True
else:
return False
# now lhs and rhs are lists
elif is_list(s2.items_):
print_db("lhs & rhs are lists")
len1 = len(self.items_)
len2 = len(s2.items_)
for i, j in zip(self.items_, s2.items_):
# if not subschemachecker.Checker.is_subtype(i, j):
if not i.isSubtype(j):
return False
if len1 == len2:
print_db("len1 == len2")
if self.additionalItems == s2.additionalItems:
return True
elif self.additionalItems == True and s2.additionalItems == False:
return False
elif self.additionalItems == False and s2.additionalItems == True:
return True
else:
# return subschemachecker.Checker.is_subtype(self.additionalItems, s2.additionalItems)
return self.additionalItems.isSubtype(s2.additionalItems)
elif len1 > len2:
diff = len1 - len2
for i in range(len1-diff, len1):
# if not subschemachecker.Checker.is_subtype(self.items_[i], s2.additionalItems):
if not self.items_[i].isSubtype(s2.additionalItems):
print_db("9999")
return False
print_db("8888")
return True
else: # len2 > len 1
# if self.additionalItems:
diff = len2 - len1
for i in range(len2 - diff, len2):
print_db("self.additionalItems",
self.additionalItems)
print_db(i, s2.items_[i])
# if not subschemachecker.Checker.is_subtype(self.additionalItems, s2.items_[i]):
if not self.additionalItems.isSubtype(s2.items_[i]):
print_db("!!!")
return False
# return subschemachecker.Checker.is_subtype(self.additionalItems, s2.additionalItems)
return self.additionalItems.isSubtype(s2.additionalItems)
return super().isSubtype_handle_rhs(s2, _isArraySubtype)
class JSONanyOf(JSONschema):
def meet(self, s):
pass
def _isSubtype(self, s2):
def _isAnyofSubtype(self, s2):
for s in self.anyOf:
if not s.isSubtype(s2):
return False
return True
return super().isSubtype_handle_rhs(s2, _isAnyofSubtype)
class JSONallOf(JSONschema):
def meet(self, s):
pass
def _isSubtype(Self, s2):
def _isAllOfSubtype(self, s2):
for s in self.allOf:
if not s.isSubtype(s2):
return False
return True
return super().isSubtype_handle_rhs(s2, _isAllOfSubtype)
class JSONoneOf(JSONschema):
def meet(self, s):
pass
def _isSubtype(self, s2):
sys.exit("onOf on the lhs is not supported yet.")
class JSONnot(JSONschema):
def meet(self, s):
pass
def _isSubtype(self, s):
pass
typeToConstructor = {
"string": JSONTypeString,
"integer": JSONNumericFactory,
"number": JSONNumericFactory,
"boolean": JSONTypeBoolean,
"null": JSONTypeNull,
"array": JSONTypeArray,
"object": JSONTypeObject
}
boolToConstructor = {
"anyOf": JSONanyOf,
"allOf": JSONallOf,
"oneOf": JSONoneOf,
"not": JSONnot
}
class JSONSchemaSubtypeFactory(json.JSONDecoder):
def __init__(self, *args, **kwargs):
json.JSONDecoder.__init__(
self, object_hook=self.object_hook, *args, **kwargs)
def object_hook(self, d):
print_db("object before canon.", d)
# return JSONSchemaSubtypeFactory.canoncalize_object(d)
return canoncalize_object(d)
# @staticmethod
# def canoncalize_object(d):
# validate_schema(d)
# if d == {}:
# return d
# t = d.get("type")
# if isinstance(t, list):
# return JSONSchemaSubtypeFactory.canoncalize_list_of_types(d)
# elif isinstance(t, str):
# return JSONSchemaSubtypeFactory.canoncalize_single_type(d)
# else:
# connectors = set(d.keys()) & set(_constants.Jconnectors)
# if connectors:
# return JSONSchemaSubtypeFactory.canoncalize_connectors(d)
# else:
# d["type"] = _constants.Jtypes
# return JSONSchemaSubtypeFactory.canoncalize_list_of_types(d)
# @staticmethod
# def canoncalize_list_of_types(d):
# t = d.get("type")
# choices = []
# for t_i in t:
# if t_i in typeToConstructor.keys():
# s_i = copy.deepcopy(d)
# s_i["type"] = t_i
# s_i = JSONSchemaSubtypeFactory.canoncalize_single_type(s_i)
# choices.append(s_i)
# else:
# print("Unknown schema type {} at:".format(t))
# print(d)
# print("Exiting...")
# sys.exit(1)
# d = {"anyOf": choices}
# # TODO do we need to return JSONanyOf ?
# return boolToConstructor.get("anyOf")(d)
# @staticmethod
# def canoncalize_single_type(d):
# t = d.get("type")
# # check type is known
# if t in typeToConstructor.keys():
# # remove irrelevant keywords
# tmp = copy.deepcopy(d)
# for k in tmp.keys():
# if k not in _constants.Jcommonkw and k not in _constants.JtypesToKeywords.get(t):
# d.pop(k)
# return typeToConstructor[t](d)
# else:
# print("Unknown schema type {} at:".format(t))
# print(d)
# print("Exiting...")
# sys.exit(1)
# @staticmethod
# def canoncalize_connectors(d):
# # TODO
# connectors = set(d.keys()) & set(_constants.Jconnectors)
# if len(connectors) == 1:
# return boolToConstructor[connectors.pop()](d)
# elif len(connectors) > 1:
# return boolToConstructor["allOf"]({"allOf": list({k: v} for k, v in d.items())})
# else:
# print("Something went wrong")
class JSONSubtypeChecker:
def __init__(self, s1, s2):
# validate_schema(s1)
# validate_schema(s2)
self.s1 = self.canoncalize_json(s1)
self.s2 = self.canoncalize_json(s2)
def canoncalize_json(self, obj):
if isinstance(obj, str) or isinstance(obj, numbers.Number) or isinstance(obj, bool) or isinstance(obj, type(None)) or isinstance(obj, list):
return obj
elif isinstance(obj, dict):
# return JSONSchemaSubtypeFactory.canoncalize_object(obj)
return canoncalize_object(obj)
def isSubtype(self):
return self.s1.isSubtype(self.s2)
if __name__ == "__main__":
s1_file = sys.argv[1]
s2_file = sys.argv[2]
print("Loading json schemas from:\n{}\n{}\n".format(s1_file, s2_file))
#######################################
with open(s1_file, 'r') as f1:
s1 = json.load(f1, cls=JSONSchemaSubtypeFactory)
with open(s2_file, 'r') as f2:
s2 = json.load(f2, cls=JSONSchemaSubtypeFactory)
print(s1)
print(s2)
print("Usage scenario 1:", s1.isSubtype(s2))
#######################################
with open(s1_file, 'r') as f1:
s1 = json.load(f1)
with open(s2_file, 'r') as f2:
s2 = json.load(f2)
print(s1)
print(s2)
print("Usage scenario 2:", JSONSubtypeChecker(s1, s2).isSubtype())
|
[
"canoncalization.canoncalize_object",
"intervals.openclosed",
"intervals.closedopen",
"_utils.validate_schema",
"json.JSONDecoder.__init__",
"intervals.open",
"_utils.print_db",
"intervals.closed",
"greenery.lego.parse",
"sys.exit",
"json.load",
"_utils.is_list",
"_utils.is_dict",
"_utils.is_sub_interval_from_optional_ranges"
] |
[((1296, 1317), '_utils.validate_schema', 'validate_schema', (['self'], {}), '(self)\n', (1311, 1317), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((17661, 17710), 'sys.exit', 'sys.exit', (['"""onOf on the lhs is not supported yet."""'], {}), "('onOf on the lhs is not supported yet.')\n", (17669, 17710), False, 'import sys\n'), ((18279, 18357), 'json.JSONDecoder.__init__', 'json.JSONDecoder.__init__', (['self', '*args'], {'object_hook': 'self.object_hook'}), '(self, *args, object_hook=self.object_hook, **kwargs)\n', (18304, 18357), False, 'import json\n'), ((18410, 18445), '_utils.print_db', 'print_db', (['"""object before canon."""', 'd'], {}), "('object before canon.', d)\n", (18418, 18445), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((18525, 18546), 'canoncalization.canoncalize_object', 'canoncalize_object', (['d'], {}), '(d)\n', (18543, 18546), False, 'from canoncalization import canoncalize_object\n'), ((21895, 21938), 'json.load', 'json.load', (['f1'], {'cls': 'JSONSchemaSubtypeFactory'}), '(f1, cls=JSONSchemaSubtypeFactory)\n', (21904, 21938), False, 'import json\n'), ((21987, 22030), 'json.load', 'json.load', (['f2'], {'cls': 'JSONSchemaSubtypeFactory'}), '(f2, cls=JSONSchemaSubtypeFactory)\n', (21996, 22030), False, 'import json\n'), ((22202, 22215), 'json.load', 'json.load', (['f1'], {}), '(f1)\n', (22211, 22215), False, 'import json\n'), ((22264, 22277), 'json.load', 'json.load', (['f2'], {}), '(f2)\n', (22273, 22277), False, 'import json\n'), ((2507, 2528), '_utils.print_db', 'print_db', (['"""cb on rhs"""'], {}), "('cb on rhs')\n", (2515, 2528), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((3022, 3123), '_utils.is_sub_interval_from_optional_ranges', 'is_sub_interval_from_optional_ranges', (['self.minLength', 'self.maxLength', 's2.minLength', 's2.maxLength'], {}), '(self.minLength, self.maxLength, s2.\n minLength, s2.maxLength)\n', (3058, 3123), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((4989, 5033), 'intervals.closed', 'I.closed', (['(self.minimum + 1)', '(self.maximum - 1)'], {}), '(self.minimum + 1, self.maximum - 1)\n', (4997, 5033), True, 'import intervals as I\n'), ((6808, 6842), 'intervals.open', 'I.open', (['self.minimum', 'self.maximum'], {}), '(self.minimum, self.maximum)\n', (6814, 6842), True, 'import intervals as I\n'), ((10044, 10072), '_utils.print_db', 'print_db', (['"""in array subtype"""'], {}), "('in array subtype')\n", (10052, 10072), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((10470, 10567), '_utils.is_sub_interval_from_optional_ranges', 'is_sub_interval_from_optional_ranges', (['self.minItems', 'self.maxItems', 's2.minItems', 's2.maxItems'], {}), '(self.minItems, self.maxItems, s2.\n minItems, s2.maxItems)\n', (10506, 10567), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((11090, 11110), '_utils.is_dict', 'is_dict', (['self.items_'], {}), '(self.items_)\n', (11097, 11110), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((5094, 5134), 'intervals.closed', 'I.closed', (['(self.minimum + 1)', 'self.maximum'], {}), '(self.minimum + 1, self.maximum)\n', (5102, 5134), True, 'import intervals as I\n'), ((5824, 5843), '_utils.print_db', 'print_db', (['"""num__00"""'], {}), "('num__00')\n", (5832, 5843), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((6223, 6242), '_utils.print_db', 'print_db', (['"""num__02"""'], {}), "('num__02')\n", (6231, 6242), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((6907, 6947), 'intervals.openclosed', 'I.openclosed', (['self.minimum', 'self.maximum'], {}), '(self.minimum, self.maximum)\n', (6919, 6947), True, 'import intervals as I\n'), ((7623, 7642), '_utils.print_db', 'print_db', (['"""num__00"""'], {}), "('num__00')\n", (7631, 7642), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((7765, 7784), '_utils.print_db', 'print_db', (['"""num__01"""'], {}), "('num__01')\n", (7773, 7784), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((8164, 8183), '_utils.print_db', 'print_db', (['"""num__02"""'], {}), "('num__02')\n", (8172, 8183), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((9819, 9838), '_utils.is_list', 'is_list', (['self.items'], {}), '(self.items)\n', (9826, 9838), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((10709, 10727), '_utils.print_db', 'print_db', (['"""__01__"""'], {}), "('__01__')\n", (10717, 10727), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((10929, 10947), '_utils.print_db', 'print_db', (['"""__02__"""'], {}), "('__02__')\n", (10937, 10947), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((11131, 11149), '_utils.is_dict', 'is_dict', (['s2.items_'], {}), '(s2.items_)\n', (11138, 11149), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((12907, 12927), '_utils.is_list', 'is_list', (['self.items_'], {}), '(self.items_)\n', (12914, 12927), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((21548, 21571), 'canoncalization.canoncalize_object', 'canoncalize_object', (['obj'], {}), '(obj)\n', (21566, 21571), False, 'from canoncalization import canoncalize_object\n'), ((5197, 5237), 'intervals.closed', 'I.closed', (['self.minimum', '(self.maximum - 1)'], {}), '(self.minimum, self.maximum - 1)\n', (5205, 5237), True, 'import intervals as I\n'), ((5278, 5314), 'intervals.closed', 'I.closed', (['self.minimum', 'self.maximum'], {}), '(self.minimum, self.maximum)\n', (5286, 5314), True, 'import intervals as I\n'), ((7012, 7052), 'intervals.closedopen', 'I.closedopen', (['self.minimum', 'self.maximum'], {}), '(self.minimum, self.maximum)\n', (7024, 7052), True, 'import intervals as I\n'), ((7095, 7131), 'intervals.closed', 'I.closed', (['self.minimum', 'self.maximum'], {}), '(self.minimum, self.maximum)\n', (7103, 7131), True, 'import intervals as I\n'), ((11171, 11192), '_utils.print_db', 'print_db', (['self.items_'], {}), '(self.items_)\n', (11179, 11192), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((11213, 11232), '_utils.print_db', 'print_db', (['s2.items_'], {}), '(s2.items_)\n', (11221, 11232), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((11582, 11600), '_utils.is_list', 'is_list', (['s2.items_'], {}), '(s2.items_)\n', (11589, 11600), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((12945, 12968), '_utils.print_db', 'print_db', (['"""lhs is list"""'], {}), "('lhs is list')\n", (12953, 12968), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((12988, 13006), '_utils.is_dict', 'is_dict', (['s2.items_'], {}), '(s2.items_)\n', (12995, 13006), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((3611, 3630), 'greenery.lego.parse', 'parse', (['self.pattern'], {}), '(self.pattern)\n', (3616, 3630), False, 'from greenery.lego import parse\n'), ((3656, 3673), 'greenery.lego.parse', 'parse', (['s2.pattern'], {}), '(s2.pattern)\n', (3661, 3673), False, 'from greenery.lego import parse\n'), ((11400, 11418), '_utils.print_db', 'print_db', (['"""__05__"""'], {}), "('__05__')\n", (11408, 11418), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((11505, 11523), '_utils.print_db', 'print_db', (['"""__06__"""'], {}), "('__06__')\n", (11513, 11523), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((14398, 14416), '_utils.is_list', 'is_list', (['s2.items_'], {}), '(s2.items_)\n', (14405, 14416), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((11678, 11696), '_utils.print_db', 'print_db', (['"""__07__"""'], {}), "('__07__')\n", (11686, 11696), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((13375, 13393), '_utils.print_db', 'print_db', (['"""__14__"""'], {}), "('__14__')\n", (13383, 13393), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((14438, 14469), '_utils.print_db', 'print_db', (['"""lhs & rhs are lists"""'], {}), "('lhs & rhs are lists')\n", (14446, 14469), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((12102, 12120), '_utils.print_db', 'print_db', (['"""__09__"""'], {}), "('__09__')\n", (12110, 12120), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((12182, 12209), '_utils.is_dict', 'is_dict', (['s2.additionalItems'], {}), '(s2.additionalItems)\n', (12189, 12209), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((13784, 13813), '_utils.is_dict', 'is_dict', (['self.additionalItems'], {}), '(self.additionalItems)\n', (13791, 13813), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((14842, 14866), '_utils.print_db', 'print_db', (['"""len1 == len2"""'], {}), "('len1 == len2')\n", (14850, 14866), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((13287, 13305), '_utils.print_db', 'print_db', (['"""__13__"""'], {}), "('__13__')\n", (13295, 13305), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((15919, 15935), '_utils.print_db', 'print_db', (['"""8888"""'], {}), "('8888')\n", (15927, 15935), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((12014, 12032), '_utils.print_db', 'print_db', (['"""__08__"""'], {}), "('__08__')\n", (12022, 12032), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((12699, 12717), '_utils.print_db', 'print_db', (['"""__11__"""'], {}), "('__11__')\n", (12707, 12717), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((12816, 12834), '_utils.print_db', 'print_db', (['"""__12__"""'], {}), "('__12__')\n", (12824, 12834), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((16195, 16249), '_utils.print_db', 'print_db', (['"""self.additionalItems"""', 'self.additionalItems'], {}), "('self.additionalItems', self.additionalItems)\n", (16203, 16249), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((16315, 16340), '_utils.print_db', 'print_db', (['i', 's2.items_[i]'], {}), '(i, s2.items_[i])\n', (16323, 16340), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((12438, 12456), '_utils.print_db', 'print_db', (['"""__10__"""'], {}), "('__10__')\n", (12446, 12456), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((15833, 15849), '_utils.print_db', 'print_db', (['"""9999"""'], {}), "('9999')\n", (15841, 15849), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n'), ((16564, 16579), '_utils.print_db', 'print_db', (['"""!!!"""'], {}), "('!!!')\n", (16572, 16579), False, 'from _utils import validate_schema, print_db, is_sub_interval_from_optional_ranges, is_num, is_list, is_dict, is_empty_dict_or_none, is_dict_or_true, one\n')]
|
"""
Various utilities functions used by django_community and
other apps to perform authentication related tasks.
"""
import hashlib, re
import django.forms as forms
from django.core.exceptions import ObjectDoesNotExist
from django.forms import ValidationError
import django.http as http
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.contrib.contenttypes import generic
from django.contrib.auth import logout as auth_logout
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth import authenticate, login
from django_community.models import UserOpenID, UserProfile
def openid_logout(request):
"""
Clears session which effectively logs out the current
OpenId user.
"""
request.session.flush()
def handle_logout(request):
"""
Log out.
"""
auth_logout(request)
def get_logged_user(request):
"""
Returns the current user who is logged in, checks for openid user first,
then for regular user, return None if no user is currently logged in
"""
if settings.OPENID_ENABLED and hasattr(request, 'openid'):
user = UserOpenID.objects.get_for_openid(request, request.openid)
if not user:
user = request.user
return user
def handle_login(request, data):
"""
Logs the user in based on form data from django_community.LoginForm.
"""
user = authenticate(username = data.get('username', None),
password = data.get('password', None))
user_object = User.objects.get(username = data.get('username', None))
if user is not None:
login(request, user)
return user
def handle_signup(request, data):
"""
Signs a user up based on form data from django_community.SignupForm.
"""
from django.contrib.auth.models import get_hexdigest
username = data.get('username', None)
email = data.get('email', None)
password = data.get('password', None)
try:
user = User.objects.get(username = username, email = email)
except ObjectDoesNotExist:
user = User(username = username, email = email)
user.save()
user.set_password(password)
user_profile = UserProfile.objects.get_user_profile(user)
user = authenticate(username = username, password = password)
login(request, user)
return user
def get_or_create_from_openid(openid):
"""
Returns an User with the given openid or
creates a new user and associates openid with that user.
"""
try:
user = User.objects.get(username = openid)
except ObjectDoesNotExist:
password = hashlib.sha256(openid).hexdigest()
user = User(username = openid, email = '', password = password)
user.save()
user.display_name = "%s_%s" % ('user', str(user.id))
user.save()
return user
def generate_random_user_name():
"""
Generates a random user name user_{user_id}_{salt}
to be used for creating new users.
"""
import random
current_users = User.objects.all().order_by('-id')
if current_users:
next_id = current_users[0].id + 1
else:
next_id = 1
random_salt = random.randint(1, 5000)
return 'user_%s_%s' % (str(next_id), str(random_salt))
def create_user_from_openid(request, openid):
"""
Creates a new User object associated with the given
openid.
"""
from django_community.config import OPENID_FIELD_MAPPING
from django_utils.request_helpers import get_ip
username = generate_random_user_name()
profile_attributes = {}
for attribute in OPENID_FIELD_MAPPING.keys():
mapped_attribute = OPENID_FIELD_MAPPING[attribute]
if openid.sreg and openid.sreg.get(attribute, ''):
profile_attributes[mapped_attribute] = openid.sreg.get(attribute, '')
new_user = User(username = username)
new_user.save()
new_openid = UserOpenID(openid = openid.openid, user = new_user)
new_openid.save()
new_user_profile = UserProfile.objects.get_user_profile(new_user)
for filled_attribute in profile_attributes.keys():
setattr(new_user, filled_attribute, profile_attributes[filled_attribute])
new_user_profile.save()
return new_user
def get_anon_user(request):
"""
Returns an anonmymous user corresponding to this IP address if one exists.
Else create an anonymous user and return it.
"""
try:
anon_user = User.objects.get(username = generate_anon_user_name(request))
except ObjectDoesNotExist:
anon_user = create_anon_user(request)
return anon_user
def create_anon_user(request):
"""
Creates a new anonymous user based on the ip provided by the request
object.
"""
anon_user_name = generate_anon_user_name(request)
anon_user = User(username = anon_user_name)
anon_user.save()
user_profile = UserProfile(user = anon_user, display_name = 'anonymous')
user_profile.save()
return anon_user
def generate_anon_user_name(request):
"""
Generate an anonymous user name based on and ip address.
"""
from django_utils.request_helpers import get_ip
ip = get_ip(request)
return "anon_user_%s" % (str(ip))
def is_anon_user(user):
"""
Determine if an user is anonymous or not.
"""
return user.username[0:10] == 'anon_user_'
def is_random(name):
"""
Determine if a user has a randomly generated display name.
"""
if len(name.split('_')) and name.startswith('user'):
return True
else:
return False
def process_ax_data(user, ax_data):
"""
Process OpenID AX data.
"""
import django_openidconsumer.config
emails = ax_data.get(django_openidconsumer.config.URI_GROUPS.get('email').get('type_uri', ''), '')
display_names = ax_data.get(django_openidconsumer.config.URI_GROUPS.get('alias').get('type_uri', ''), '')
if emails and not user.email.strip():
user.email = emails[0]
user.save()
if not user.profile.display_name.strip() or is_random(user.profile.display_name):
if display_names:
user.profile.display_name = display_names[0]
elif emails:
user.profile.display_name = emails[0].split('@')[0]
user.profile.save()
|
[
"django.contrib.auth.authenticate",
"hashlib.sha256",
"random.randint",
"django.contrib.auth.models.User",
"django_community.models.UserProfile",
"django_community.models.UserOpenID",
"django.contrib.auth.login",
"django.contrib.auth.models.User.objects.all",
"django_community.models.UserOpenID.objects.get_for_openid",
"django_utils.request_helpers.get_ip",
"django_community.models.UserProfile.objects.get_user_profile",
"django.contrib.auth.models.User.objects.get",
"django_community.config.OPENID_FIELD_MAPPING.keys",
"django.contrib.auth.logout"
] |
[((903, 923), 'django.contrib.auth.logout', 'auth_logout', (['request'], {}), '(request)\n', (914, 923), True, 'from django.contrib.auth import logout as auth_logout\n'), ((2341, 2391), 'django.contrib.auth.authenticate', 'authenticate', ([], {'username': 'username', 'password': 'password'}), '(username=username, password=password)\n', (2353, 2391), False, 'from django.contrib.auth import authenticate, login\n'), ((2400, 2420), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (2405, 2420), False, 'from django.contrib.auth import authenticate, login\n'), ((3275, 3298), 'random.randint', 'random.randint', (['(1)', '(5000)'], {}), '(1, 5000)\n', (3289, 3298), False, 'import random\n'), ((3699, 3726), 'django_community.config.OPENID_FIELD_MAPPING.keys', 'OPENID_FIELD_MAPPING.keys', ([], {}), '()\n', (3724, 3726), False, 'from django_community.config import OPENID_FIELD_MAPPING\n'), ((3956, 3979), 'django.contrib.auth.models.User', 'User', ([], {'username': 'username'}), '(username=username)\n', (3960, 3979), False, 'from django.contrib.auth.models import User\n'), ((4019, 4066), 'django_community.models.UserOpenID', 'UserOpenID', ([], {'openid': 'openid.openid', 'user': 'new_user'}), '(openid=openid.openid, user=new_user)\n', (4029, 4066), False, 'from django_community.models import UserOpenID, UserProfile\n'), ((4116, 4162), 'django_community.models.UserProfile.objects.get_user_profile', 'UserProfile.objects.get_user_profile', (['new_user'], {}), '(new_user)\n', (4152, 4162), False, 'from django_community.models import UserOpenID, UserProfile\n'), ((4917, 4946), 'django.contrib.auth.models.User', 'User', ([], {'username': 'anon_user_name'}), '(username=anon_user_name)\n', (4921, 4946), False, 'from django.contrib.auth.models import User\n'), ((4989, 5042), 'django_community.models.UserProfile', 'UserProfile', ([], {'user': 'anon_user', 'display_name': '"""anonymous"""'}), "(user=anon_user, display_name='anonymous')\n", (5000, 5042), False, 'from django_community.models import UserOpenID, UserProfile\n'), ((5274, 5289), 'django_utils.request_helpers.get_ip', 'get_ip', (['request'], {}), '(request)\n', (5280, 5289), False, 'from django_utils.request_helpers import get_ip\n'), ((1204, 1262), 'django_community.models.UserOpenID.objects.get_for_openid', 'UserOpenID.objects.get_for_openid', (['request', 'request.openid'], {}), '(request, request.openid)\n', (1237, 1262), False, 'from django_community.models import UserOpenID, UserProfile\n'), ((1680, 1700), 'django.contrib.auth.login', 'login', (['request', 'user'], {}), '(request, user)\n', (1685, 1700), False, 'from django.contrib.auth import authenticate, login\n'), ((2061, 2109), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': 'username', 'email': 'email'}), '(username=username, email=email)\n', (2077, 2109), False, 'from django.contrib.auth.models import User\n'), ((2623, 2656), 'django.contrib.auth.models.User.objects.get', 'User.objects.get', ([], {'username': 'openid'}), '(username=openid)\n', (2639, 2656), False, 'from django.contrib.auth.models import User\n'), ((2161, 2197), 'django.contrib.auth.models.User', 'User', ([], {'username': 'username', 'email': 'email'}), '(username=username, email=email)\n', (2165, 2197), False, 'from django.contrib.auth.models import User\n'), ((2282, 2324), 'django_community.models.UserProfile.objects.get_user_profile', 'UserProfile.objects.get_user_profile', (['user'], {}), '(user)\n', (2318, 2324), False, 'from django_community.models import UserOpenID, UserProfile\n'), ((2759, 2809), 'django.contrib.auth.models.User', 'User', ([], {'username': 'openid', 'email': '""""""', 'password': 'password'}), "(username=openid, email='', password=password)\n", (2763, 2809), False, 'from django.contrib.auth.models import User\n'), ((3128, 3146), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (3144, 3146), False, 'from django.contrib.auth.models import User\n'), ((2709, 2731), 'hashlib.sha256', 'hashlib.sha256', (['openid'], {}), '(openid)\n', (2723, 2731), False, 'import hashlib, re\n')]
|
import os
from pathlib import Path
from ament_index_python.packages import get_package_share_directory
from launch import LaunchDescription
from launch.actions import IncludeLaunchDescription, SetEnvironmentVariable, Shutdown
from launch.launch_description_sources import PythonLaunchDescriptionSource
from launch_ros.actions import Node
def generate_launch_description():
bringup_dir = Path(get_package_share_directory('rj_robocup'))
launch_dir = bringup_dir / 'launch'
stdout_linebuf_envvar = SetEnvironmentVariable(
'RCUTILS_CONSOLE_STDOUT_LINE_BUFFERED', '1')
grsim = Node(package='rj_robocup', executable='grSim', arguments=[])
radio = Node(package='rj_robocup',
executable='sim_radio_node',
output='screen',
on_exit=Shutdown())
control = Node(package='rj_robocup',
executable='control_node',
output='screen',
on_exit=Shutdown())
config_server = Node(package='rj_robocup',
executable='config_server',
output='screen',
on_exit=Shutdown())
vision_receiver_launch_path = str(launch_dir / "vision_receiver.launch.py")
vision_receiver = IncludeLaunchDescription(
PythonLaunchDescriptionSource(vision_receiver_launch_path))
ref_receiver = Node(package='rj_robocup',
executable='internal_referee_node',
output='screen',
on_exit=Shutdown())
vision_filter_launch_path = str(launch_dir / "vision_filter.launch.py")
vision_filter = IncludeLaunchDescription(
PythonLaunchDescriptionSource(vision_filter_launch_path))
return LaunchDescription([
grsim, stdout_linebuf_envvar, config_server, radio, control,
vision_receiver, vision_filter, ref_receiver
])
|
[
"launch.actions.SetEnvironmentVariable",
"launch.actions.Shutdown",
"ament_index_python.packages.get_package_share_directory",
"launch.LaunchDescription",
"launch.launch_description_sources.PythonLaunchDescriptionSource",
"launch_ros.actions.Node"
] |
[((511, 578), 'launch.actions.SetEnvironmentVariable', 'SetEnvironmentVariable', (['"""RCUTILS_CONSOLE_STDOUT_LINE_BUFFERED"""', '"""1"""'], {}), "('RCUTILS_CONSOLE_STDOUT_LINE_BUFFERED', '1')\n", (533, 578), False, 'from launch.actions import IncludeLaunchDescription, SetEnvironmentVariable, Shutdown\n'), ((601, 661), 'launch_ros.actions.Node', 'Node', ([], {'package': '"""rj_robocup"""', 'executable': '"""grSim"""', 'arguments': '[]'}), "(package='rj_robocup', executable='grSim', arguments=[])\n", (605, 661), False, 'from launch_ros.actions import Node\n'), ((1760, 1890), 'launch.LaunchDescription', 'LaunchDescription', (['[grsim, stdout_linebuf_envvar, config_server, radio, control,\n vision_receiver, vision_filter, ref_receiver]'], {}), '([grsim, stdout_linebuf_envvar, config_server, radio,\n control, vision_receiver, vision_filter, ref_receiver])\n', (1777, 1890), False, 'from launch import LaunchDescription\n'), ((399, 440), 'ament_index_python.packages.get_package_share_directory', 'get_package_share_directory', (['"""rj_robocup"""'], {}), "('rj_robocup')\n", (426, 440), False, 'from ament_index_python.packages import get_package_share_directory\n'), ((1307, 1365), 'launch.launch_description_sources.PythonLaunchDescriptionSource', 'PythonLaunchDescriptionSource', (['vision_receiver_launch_path'], {}), '(vision_receiver_launch_path)\n', (1336, 1365), False, 'from launch.launch_description_sources import PythonLaunchDescriptionSource\n'), ((1690, 1746), 'launch.launch_description_sources.PythonLaunchDescriptionSource', 'PythonLaunchDescriptionSource', (['vision_filter_launch_path'], {}), '(vision_filter_launch_path)\n', (1719, 1746), False, 'from launch.launch_description_sources import PythonLaunchDescriptionSource\n'), ((807, 817), 'launch.actions.Shutdown', 'Shutdown', ([], {}), '()\n', (815, 817), False, 'from launch.actions import IncludeLaunchDescription, SetEnvironmentVariable, Shutdown\n'), ((970, 980), 'launch.actions.Shutdown', 'Shutdown', ([], {}), '()\n', (978, 980), False, 'from launch.actions import IncludeLaunchDescription, SetEnvironmentVariable, Shutdown\n'), ((1158, 1168), 'launch.actions.Shutdown', 'Shutdown', ([], {}), '()\n', (1166, 1168), False, 'from launch.actions import IncludeLaunchDescription, SetEnvironmentVariable, Shutdown\n'), ((1547, 1557), 'launch.actions.Shutdown', 'Shutdown', ([], {}), '()\n', (1555, 1557), False, 'from launch.actions import IncludeLaunchDescription, SetEnvironmentVariable, Shutdown\n')]
|
# A Rapid Proof of Concept for the eDensiometer
# Copyright 2018, <NAME>. All Rights Reserved. Created with contributions from <NAME>.
# Imports
from PIL import Image
from pprint import pprint
import numpy as np
import time as time_
def millis(): # from https://stackoverflow.com/questions/5998245/get-current-time-in-milliseconds-in-python/6000198#6000198
return int(round(time_.time() * 1000))
start = millis()
# Constants
# BRIGHT_CUTOFF = 175
RED_CUTOFF = 200
GREEN_CUTOFF = 150
BLUE_CUTOFF = 200
# Pull from test.jpg image in local directory
temp = np.asarray(Image.open('test.jpg'))
print(temp.shape)
# Variable Initialization
result = np.zeros((temp.shape[0], temp.shape[1], temp.shape[2]))
temp_bright = np.zeros((temp.shape[0], temp.shape[1]))
count_total = 0
count_open = 0
# Cycle through image
for row in range(0, temp.shape[0]):
for element in range(0, temp.shape[1]):
count_total += 1
temp_bright[row, element] = (int(temp[row][element][0]) + int(temp[row][element][1]) + int(temp[row][element][2]))/3
# bright = temp_bright[row][element] > BRIGHT_CUTOFF
red_enough = temp[row][element][0] > RED_CUTOFF
green_enough = temp[row][element][1] > GREEN_CUTOFF
blue_enough = temp[row][element][2] > BLUE_CUTOFF
if red_enough and green_enough and blue_enough:
# print(temp[row, element])
count_open += 1
result[row, element] = [255, 255, 255]
# Save filtered image as final.jpg
final = Image.fromarray(result.astype('uint8'), 'RGB')
final.save('final.jpg')
# Return/Print Percent Coverage
percent_open = count_open/count_total
percent_cover = 1 - percent_open
end = millis()
print("Percent Open: " + str(percent_open))
print("Percent Cover: " + str(percent_cover))
runtime = end-start
print("Runtime in MS: " + str(runtime))
|
[
"numpy.zeros",
"PIL.Image.open",
"time.time"
] |
[((653, 708), 'numpy.zeros', 'np.zeros', (['(temp.shape[0], temp.shape[1], temp.shape[2])'], {}), '((temp.shape[0], temp.shape[1], temp.shape[2]))\n', (661, 708), True, 'import numpy as np\n'), ((723, 763), 'numpy.zeros', 'np.zeros', (['(temp.shape[0], temp.shape[1])'], {}), '((temp.shape[0], temp.shape[1]))\n', (731, 763), True, 'import numpy as np\n'), ((575, 597), 'PIL.Image.open', 'Image.open', (['"""test.jpg"""'], {}), "('test.jpg')\n", (585, 597), False, 'from PIL import Image\n'), ((380, 392), 'time.time', 'time_.time', ([], {}), '()\n', (390, 392), True, 'import time as time_\n')]
|
from core.utilities.functions import delete_message
from core.utilities.message import message
from core.database.repository.group import GroupRepository
"""
This function allows you to terminate the type
of file that contains a message on telegram and filter it
"""
def init(update, context):
apk = 'application/vnd.android.package-archive'
doc = 'application/msword'
docx = 'application/vnd.openxmlformats-officedocument.wordprocessingml.document'
exe = 'application/x-ms-dos-executable'
gif = 'video/mp4'
jpg = 'image/jpeg'
mp3 = 'audio/mpeg'
pdf = 'application/pdf'
py = 'text/x-python'
svg = 'image/svg+xml'
txt = 'text/plain'
targz = 'application/x-compressed-tar'
wav = 'audio/x-wav'
xml = 'application/xml'
filezip = 'application/zip'
msg = update.effective_message
chat = update.effective_message.chat_id
group = GroupRepository().getById(chat)
if msg.document is not None:
#No APK Allowed
if msg.document.mime_type == apk and group['apk_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No APK Allowed!</b>")
#No DOC/DOCX Allowed
if msg.document.mime_type == doc or msg.document.mime_type == docx and group['docx_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No DOC/DOCX Allowed!</b>")
#No EXE Allowed
if msg.document.mime_type == exe and group['exe_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No EXE Allowed!</b>")
#No GIF Allowed
if msg.document.mime_type == gif and group['gif_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No GIF Allowed!</b>")
#No JPG Allowed
if msg.document.mime_type == jpg and group['jpg_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No JPG Allowed!</b>")
#No TARGZ Allowed
if msg.document.mime_type == targz and group['targz_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No TARGZ Allowed!</b>")
#No ZIP Allowed
if msg.document.mime_type == filezip and group['zip_filter'] == 1:
delete_message(update,context)
message(update, context, "#Automatic Filter Handler: <b>No ZIP Allowed!</b>")
if msg.document.mime_type == wav:
print("NO WAV ALLOWED")
if msg.document.mime_type == xml:
print("NO XML ALLOWED")
if msg.document.mime_type == mp3:
print("NO MP3 ALLOWED")
if msg.document.mime_type == pdf:
print("NO PDF ALLOWED")
if msg.document.mime_type == py:
print("NO PY ALLOWED")
if msg.document.mime_type == svg:
print("NO SVG ALLOWED")
if msg.document.mime_type == txt:
print("NO TXT ALLOWED")
|
[
"core.database.repository.group.GroupRepository",
"core.utilities.functions.delete_message",
"core.utilities.message.message"
] |
[((895, 912), 'core.database.repository.group.GroupRepository', 'GroupRepository', ([], {}), '()\n', (910, 912), False, 'from core.database.repository.group import GroupRepository\n'), ((1068, 1099), 'core.utilities.functions.delete_message', 'delete_message', (['update', 'context'], {}), '(update, context)\n', (1082, 1099), False, 'from core.utilities.functions import delete_message\n'), ((1111, 1188), 'core.utilities.message.message', 'message', (['update', 'context', '"""#Automatic Filter Handler: <b>No APK Allowed!</b>"""'], {}), "(update, context, '#Automatic Filter Handler: <b>No APK Allowed!</b>')\n", (1118, 1188), False, 'from core.utilities.message import message\n'), ((1336, 1367), 'core.utilities.functions.delete_message', 'delete_message', (['update', 'context'], {}), '(update, context)\n', (1350, 1367), False, 'from core.utilities.functions import delete_message\n'), ((1379, 1465), 'core.utilities.message.message', 'message', (['update', 'context', '"""#Automatic Filter Handler: <b>No DOC/DOCX Allowed!</b>"""'], {}), "(update, context,\n '#Automatic Filter Handler: <b>No DOC/DOCX Allowed!</b>')\n", (1386, 1465), False, 'from core.utilities.message import message\n'), ((1569, 1600), 'core.utilities.functions.delete_message', 'delete_message', (['update', 'context'], {}), '(update, context)\n', (1583, 1600), False, 'from core.utilities.functions import delete_message\n'), ((1612, 1689), 'core.utilities.message.message', 'message', (['update', 'context', '"""#Automatic Filter Handler: <b>No EXE Allowed!</b>"""'], {}), "(update, context, '#Automatic Filter Handler: <b>No EXE Allowed!</b>')\n", (1619, 1689), False, 'from core.utilities.message import message\n'), ((1797, 1828), 'core.utilities.functions.delete_message', 'delete_message', (['update', 'context'], {}), '(update, context)\n', (1811, 1828), False, 'from core.utilities.functions import delete_message\n'), ((1840, 1917), 'core.utilities.message.message', 'message', (['update', 'context', '"""#Automatic Filter Handler: <b>No GIF Allowed!</b>"""'], {}), "(update, context, '#Automatic Filter Handler: <b>No GIF Allowed!</b>')\n", (1847, 1917), False, 'from core.utilities.message import message\n'), ((2025, 2056), 'core.utilities.functions.delete_message', 'delete_message', (['update', 'context'], {}), '(update, context)\n', (2039, 2056), False, 'from core.utilities.functions import delete_message\n'), ((2068, 2145), 'core.utilities.message.message', 'message', (['update', 'context', '"""#Automatic Filter Handler: <b>No JPG Allowed!</b>"""'], {}), "(update, context, '#Automatic Filter Handler: <b>No JPG Allowed!</b>')\n", (2075, 2145), False, 'from core.utilities.message import message\n'), ((2259, 2290), 'core.utilities.functions.delete_message', 'delete_message', (['update', 'context'], {}), '(update, context)\n', (2273, 2290), False, 'from core.utilities.functions import delete_message\n'), ((2302, 2381), 'core.utilities.message.message', 'message', (['update', 'context', '"""#Automatic Filter Handler: <b>No TARGZ Allowed!</b>"""'], {}), "(update, context, '#Automatic Filter Handler: <b>No TARGZ Allowed!</b>')\n", (2309, 2381), False, 'from core.utilities.message import message\n'), ((2493, 2524), 'core.utilities.functions.delete_message', 'delete_message', (['update', 'context'], {}), '(update, context)\n', (2507, 2524), False, 'from core.utilities.functions import delete_message\n'), ((2536, 2613), 'core.utilities.message.message', 'message', (['update', 'context', '"""#Automatic Filter Handler: <b>No ZIP Allowed!</b>"""'], {}), "(update, context, '#Automatic Filter Handler: <b>No ZIP Allowed!</b>')\n", (2543, 2613), False, 'from core.utilities.message import message\n')]
|
from behave.matchers import RegexMatcher
from ahk import AHK
from behave_classy import step_impl_base
Base = step_impl_base()
class AHKSteps(AHK, Base):
@Base.given(u'the mouse position is ({xpos:d}, {ypos:d})')
def given_mouse_move(self, xpos, ypos):
self.mouse_move(x=xpos, y=ypos)
@Base.when(u'I move the mouse (UP|DOWN|LEFT|RIGHT) (\d+)px', matcher=RegexMatcher)
def move_direction(self, direction, px):
px = int(px)
if direction in ('UP', 'DOWN'):
axis = 'y'
else:
axis = 'x'
if direction in ('LEFT', 'UP'):
px = px * -1
kwargs = {axis: px, 'relative': True}
self.mouse_move(**kwargs)
@Base.then(u'I expect the mouse position to be ({xpos:d}, {ypos:d})')
def check_position(self, xpos, ypos):
x, y = self.mouse_position
assert x == xpos
assert y == ypos
AHKSteps().register()
|
[
"behave_classy.step_impl_base"
] |
[((110, 126), 'behave_classy.step_impl_base', 'step_impl_base', ([], {}), '()\n', (124, 126), False, 'from behave_classy import step_impl_base\n')]
|
import numpy as nm
from sfepy.linalg import dot_sequences
from sfepy.terms.terms import Term, terms
class DivGradTerm(Term):
r"""
Diffusion term.
:Definition:
.. math::
\int_{\Omega} \nu\ \nabla \ul{v} : \nabla \ul{u} \mbox{ , }
\int_{\Omega} \nu\ \nabla \ul{u} : \nabla \ul{w} \\
\int_{\Omega} \nabla \ul{v} : \nabla \ul{u} \mbox{ , }
\int_{\Omega} \nabla \ul{u} : \nabla \ul{w}
:Arguments 1:
- material : :math:`\nu` (viscosity, optional)
- virtual : :math:`\ul{v}`
- state : :math:`\ul{u}`
:Arguments 2:
- material : :math:`\nu` (viscosity, optional)
- parameter_1 : :math:`\ul{u}`
- parameter_2 : :math:`\ul{w}`
"""
name = 'dw_div_grad'
arg_types = (('opt_material', 'virtual', 'state'),
('opt_material', 'parameter_1', 'parameter_2'))
arg_shapes = {'opt_material' : '1, 1', 'virtual' : ('D', 'state'),
'state' : 'D', 'parameter_1' : 'D', 'parameter_2' : 'D'}
modes = ('weak', 'eval')
function = staticmethod(terms.term_ns_asm_div_grad)
def d_div_grad(self, out, grad1, grad2, mat, vg, fmode):
sh = grad1.shape
g1 = grad1.reshape((sh[0], sh[1], sh[2] * sh[3]))
g2 = grad2.reshape((sh[0], sh[1], sh[2] * sh[3]))
aux = mat * dot_sequences(g1[..., None], g2, 'ATB')[..., None]
if fmode == 2:
out[:] = aux
status = 0
else:
status = vg.integrate(out, aux, fmode)
return status
def get_fargs(self, mat, virtual, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(state)
if mat is None:
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(state)
mat = nm.ones((1, n_qp, 1, 1), dtype=nm.float64)
if mode == 'weak':
if diff_var is None:
grad = self.get(state, 'grad').transpose((0, 1, 3, 2))
sh = grad.shape
grad = grad.reshape((sh[0], sh[1], sh[2] * sh[3], 1))
fmode = 0
else:
grad = nm.array([0], ndmin=4, dtype=nm.float64)
fmode = 1
return grad, mat, vg, fmode
elif mode == 'eval':
grad1 = self.get(virtual, 'grad')
grad2 = self.get(state, 'grad')
fmode = {'eval' : 0, 'el_avg' : 1, 'qp' : 2}.get(mode, 1)
return grad1, grad2, mat, vg, fmode
else:
raise ValueError('unsupported evaluation mode in %s! (%s)'
% (self.name, mode))
def get_eval_shape(self, mat, virtual, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(state)
return (n_el, 1, 1, 1), state.dtype
def set_arg_types(self):
if self.mode == 'weak':
self.function = terms.term_ns_asm_div_grad
else:
self.function = self.d_div_grad
class ConvectTerm(Term):
r"""
Nonlinear convective term.
:Definition:
.. math::
\int_{\Omega} ((\ul{u} \cdot \nabla) \ul{u}) \cdot \ul{v}
:Arguments:
- virtual : :math:`\ul{v}`
- state : :math:`\ul{u}`
"""
name = 'dw_convect'
arg_types = ('virtual', 'state')
arg_shapes = {'virtual' : ('D', 'state'), 'state' : 'D'}
function = staticmethod(terms.term_ns_asm_convect)
def get_fargs(self, virtual, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(state)
grad = self.get(state, 'grad').transpose((0, 1, 3, 2)).copy()
val_qp = self.get(state, 'val')
fmode = diff_var is not None
return grad, val_qp, vg, fmode
class LinearConvectTerm(Term):
r"""
Linearized convective term.
:Definition:
.. math::
\int_{\Omega} ((\ul{b} \cdot \nabla) \ul{u}) \cdot \ul{v}
.. math::
((\ul{b} \cdot \nabla) \ul{u})|_{qp}
:Arguments:
- virtual : :math:`\ul{v}`
- parameter : :math:`\ul{b}`
- state : :math:`\ul{u}`
"""
name = 'dw_lin_convect'
arg_types = ('virtual', 'parameter', 'state')
arg_shapes = {'virtual' : ('D', 'state'), 'parameter' : 'D', 'state' : 'D'}
function = staticmethod(terms.dw_lin_convect)
def get_fargs(self, virtual, parameter, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(state)
val_qp = self.get(parameter, 'val')
if mode == 'weak':
if diff_var is None:
grad = self.get(state, 'grad').transpose((0, 1, 3, 2)).copy()
fmode = 0
else:
grad = nm.array([0], ndmin=4, dtype=nm.float64)
fmode = 1
return grad, val_qp, vg, fmode
elif mode == 'qp':
grad = self.get(state, 'grad').transpose((0, 1, 3, 2)).copy()
fmode = 2
return grad, val_qp, vg, fmode
else:
raise ValueError('unsupported evaluation mode in %s! (%s)'
% (self.name, mode))
class StokesTerm(Term):
r"""
Stokes problem coupling term. Corresponds to weak forms of gradient and
divergence terms. Can be evaluated.
:Definition:
.. math::
\int_{\Omega} p\ \nabla \cdot \ul{v} \mbox{ , }
\int_{\Omega} q\ \nabla \cdot \ul{u}
\mbox{ or }
\int_{\Omega} c\ p\ \nabla \cdot \ul{v} \mbox{ , }
\int_{\Omega} c\ q\ \nabla \cdot \ul{u}
:Arguments 1:
- material : :math:`c` (optional)
- virtual : :math:`\ul{v}`
- state : :math:`p`
:Arguments 2:
- material : :math:`c` (optional)
- state : :math:`\ul{u}`
- virtual : :math:`q`
:Arguments 3:
- material : :math:`c` (optional)
- parameter_v : :math:`\ul{u}`
- parameter_s : :math:`p`
"""
name = 'dw_stokes'
arg_types = (('opt_material', 'virtual', 'state'),
('opt_material', 'state', 'virtual'),
('opt_material', 'parameter_v', 'parameter_s'))
arg_shapes = [{'opt_material' : '1, 1',
'virtual/grad' : ('D', None), 'state/grad' : 1,
'virtual/div' : (1, None), 'state/div' : 'D',
'parameter_v' : 'D', 'parameter_s' : 1},
{'opt_material' : None}]
modes = ('grad', 'div', 'eval')
@staticmethod
def d_eval(out, coef, vec_qp, div, vvg):
out_qp = coef * vec_qp * div
status = vvg.integrate(out, out_qp)
return status
def get_fargs(self, coef, vvar, svar,
mode=None, term_mode=None, diff_var=None, **kwargs):
if self.mode == 'grad':
qp_var, qp_name = svar, 'val'
else:
qp_var, qp_name = vvar, 'div'
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(vvar)
if coef is None:
coef = nm.ones((1, n_qp, 1, 1), dtype=nm.float64)
if mode == 'weak':
vvg, _ = self.get_mapping(vvar)
svg, _ = self.get_mapping(svar)
if diff_var is None:
val_qp = self.get(qp_var, qp_name)
fmode = 0
else:
val_qp = nm.array([0], ndmin=4, dtype=nm.float64)
fmode = 1
return coef, val_qp, svg, vvg, fmode
elif mode == 'eval':
vvg, _ = self.get_mapping(vvar)
div = self.get(vvar, 'div')
vec_qp = self.get(svar, 'val')
return coef, vec_qp, div, vvg
else:
raise ValueError('unsupported evaluation mode in %s! (%s)'
% (self.name, mode))
def get_eval_shape(self, coef, vvar, svar,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(vvar)
return (n_el, 1, 1, 1), vvar.dtype
def set_arg_types(self):
self.function = {
'grad' : terms.dw_grad,
'div' : terms.dw_div,
'eval' : self.d_eval,
}[self.mode]
class GradTerm(Term):
r"""
Evaluate gradient of a scalar or vector field.
Supports 'eval', 'el_avg' and 'qp' evaluation modes.
:Definition:
.. math::
\int_{\Omega} \nabla p \mbox{ or } \int_{\Omega} \nabla \ul{w}
.. math::
\mbox{vector for } K \from \Ical_h: \int_{T_K} \nabla p /
\int_{T_K} 1 \mbox{ or } \int_{T_K} \nabla \ul{w} /
\int_{T_K} 1
.. math::
(\nabla p)|_{qp} \mbox{ or } \nabla \ul{w}|_{qp}
:Arguments:
- parameter : :math:`p` or :math:`\ul{w}`
"""
name = 'ev_grad'
arg_types = ('parameter',)
arg_shapes = [{'parameter' : 1}, {'parameter' : 'D'}]
@staticmethod
def function(out, grad, vg, fmode):
if fmode == 2:
out[:] = grad
status = 0
else:
status = vg.integrate(out, grad, fmode)
return status
def get_fargs(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(parameter)
grad = self.get(parameter, 'grad')
fmode = {'eval' : 0, 'el_avg' : 1, 'qp' : 2}.get(mode, 1)
return grad, vg, fmode
def get_eval_shape(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(parameter)
if mode != 'qp':
n_qp = 1
return (n_el, n_qp, dim, n_c), parameter.dtype
class DivTerm(Term):
r"""
Evaluate divergence of a vector field.
Supports 'eval', 'el_avg' and 'qp' evaluation modes.
:Definition:
.. math::
\int_{\Omega} \nabla \cdot \ul{u}
.. math::
\mbox{vector for } K \from \Ical_h:
\int_{T_K} \nabla \cdot \ul{u} / \int_{T_K} 1
.. math::
(\nabla \cdot \ul{u})|_{qp}
:Arguments:
- parameter : :math:`\ul{u}`
"""
name = 'ev_div'
arg_types = ('parameter',)
arg_shapes = {'parameter' : 'D'}
@staticmethod
def function(out, div, vg, fmode):
if fmode == 2:
out[:] = div
status = 0
else:
status = vg.integrate(out, div, fmode)
return status
def get_fargs(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(parameter)
div = self.get(parameter, 'div')
fmode = {'eval' : 0, 'el_avg' : 1, 'qp' : 2}.get(mode, 1)
return div, vg, fmode
def get_eval_shape(self, parameter,
mode=None, term_mode=None, diff_var=None, **kwargs):
n_el, n_qp, dim, n_en, n_c = self.get_data_shape(parameter)
if mode != 'qp':
n_qp = 1
return (n_el, n_qp, 1, 1), parameter.dtype
class DivOperatorTerm(Term):
r"""
Weighted divergence term of a test function.
:Definition:
.. math::
\int_{\Omega} \nabla \cdot \ul{v} \mbox { or } \int_{\Omega} c \nabla
\cdot \ul{v}
:Arguments:
- material : :math:`c` (optional)
- virtual : :math:`\ul{v}`
"""
name = 'dw_div'
arg_types = ('opt_material', 'virtual')
arg_shapes = [{'opt_material' : '1, 1', 'virtual' : ('D', None)},
{'opt_material' : None}]
@staticmethod
def function(out, mat, vg):
div_bf = vg.bfg
n_el, n_qp, dim, n_ep = div_bf.shape
div_bf = div_bf.reshape((n_el, n_qp, dim * n_ep, 1))
div_bf = nm.ascontiguousarray(div_bf)
if mat is not None:
status = vg.integrate(out, mat * div_bf)
else:
status = vg.integrate(out, div_bf)
return status
def get_fargs(self, mat, virtual,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(virtual)
return mat, vg
class GradDivStabilizationTerm(Term):
r"""
Grad-div stabilization term ( :math:`\gamma` is a global stabilization
parameter).
:Definition:
.. math::
\gamma \int_{\Omega} (\nabla\cdot\ul{u}) \cdot (\nabla\cdot\ul{v})
:Arguments:
- material : :math:`\gamma`
- virtual : :math:`\ul{v}`
- state : :math:`\ul{u}`
"""
name = 'dw_st_grad_div'
arg_types = ('material', 'virtual', 'state')
arg_shapes = {'material' : '1, 1', 'virtual' : ('D', 'state'),
'state' : 'D'}
function = staticmethod(terms.dw_st_grad_div)
def get_fargs(self, gamma, virtual, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
vg, _ = self.get_mapping(state)
if diff_var is None:
div = self.get(state, 'div')
fmode = 0
else:
div = nm.array([0], ndmin=4, dtype=nm.float64)
fmode = 1
return div, gamma, vg, fmode
from sfepy.terms.terms_diffusion import LaplaceTerm
class PSPGPStabilizationTerm(LaplaceTerm):
r"""
PSPG stabilization term, pressure part ( :math:`\tau` is a local
stabilization parameter), alias to Laplace term dw_laplace.
:Definition:
.. math::
\sum_{K \in \Ical_h}\int_{T_K} \tau_K\ \nabla p \cdot \nabla q
:Arguments:
- material : :math:`\tau_K`
- virtual : :math:`q`
- state : :math:`p`
"""
name = 'dw_st_pspg_p'
class PSPGCStabilizationTerm(Term):
r"""
PSPG stabilization term, convective part ( :math:`\tau` is a local
stabilization parameter).
:Definition:
.. math::
\sum_{K \in \Ical_h}\int_{T_K} \tau_K\ ((\ul{b} \cdot \nabla) \ul{u})
\cdot \nabla q
:Arguments:
- material : :math:`\tau_K`
- virtual : :math:`q`
- parameter : :math:`\ul{b}`
- state : :math:`\ul{u}`
"""
name = 'dw_st_pspg_c'
arg_types = ('material', 'virtual', 'parameter', 'state')
arg_shapes = {'material' : '1, 1', 'virtual' : (1, None),
'parameter' : 'D', 'state' : 'D'}
function = staticmethod(terms.dw_st_pspg_c)
def get_fargs(self, tau, virtual, parameter, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
sap, svg = self.get_approximation(virtual)
vap, vvg = self.get_approximation(state)
val_qp = self.get(parameter, 'val')
conn = vap.get_connectivity(self.region, self.integration)
if diff_var is None:
fmode = 0
else:
fmode = 1
return val_qp, state(), tau, svg, vvg, conn, fmode
class SUPGPStabilizationTerm(Term):
r"""
SUPG stabilization term, pressure part ( :math:`\delta` is a local
stabilization parameter).
:Definition:
.. math::
\sum_{K \in \Ical_h}\int_{T_K} \delta_K\ \nabla p\cdot ((\ul{b} \cdot
\nabla) \ul{v})
:Arguments:
- material : :math:`\delta_K`
- virtual : :math:`\ul{v}`
- parameter : :math:`\ul{b}`
- state : :math:`p`
"""
name = 'dw_st_supg_p'
arg_types = ('material', 'virtual', 'parameter', 'state')
arg_shapes = {'material' : '1, 1', 'virtual' : ('D', None),
'parameter' : 'D', 'state' : 1}
function = staticmethod(terms.dw_st_supg_p)
def get_fargs(self, delta, virtual, parameter, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
vvg, _ = self.get_mapping(virtual)
svg, _ = self.get_mapping(state)
val_qp = self.get(parameter, 'val')
if diff_var is None:
grad = self.get(state, 'grad')
fmode = 0
else:
grad = nm.array([0], ndmin=4, dtype=nm.float64)
fmode = 1
return val_qp, grad, delta, vvg, svg, fmode
class SUPGCStabilizationTerm(Term):
r"""
SUPG stabilization term, convective part ( :math:`\delta` is a local
stabilization parameter).
:Definition:
.. math::
\sum_{K \in \Ical_h}\int_{T_K} \delta_K\ ((\ul{b} \cdot \nabla)
\ul{u})\cdot ((\ul{b} \cdot \nabla) \ul{v})
:Arguments:
- material : :math:`\delta_K`
- virtual : :math:`\ul{v}`
- parameter : :math:`\ul{b}`
- state : :math:`\ul{u}`
"""
name = 'dw_st_supg_c'
arg_types = ('material', 'virtual', 'parameter', 'state')
arg_shapes = {'material' : '1, 1', 'virtual' : ('D', 'state'),
'parameter' : 'D', 'state' : 'D'}
function = staticmethod(terms.dw_st_supg_c)
def get_fargs(self, delta, virtual, parameter, state,
mode=None, term_mode=None, diff_var=None, **kwargs):
ap, vg = self.get_approximation(virtual)
val_qp = self.get(parameter, 'val')
conn = ap.get_connectivity(self.region, self.integration)
if diff_var is None:
fmode = 0
else:
fmode = 1
return val_qp, state(), delta, vg, conn, fmode
|
[
"numpy.array",
"sfepy.linalg.dot_sequences",
"numpy.ones",
"numpy.ascontiguousarray"
] |
[((11757, 11785), 'numpy.ascontiguousarray', 'nm.ascontiguousarray', (['div_bf'], {}), '(div_bf)\n', (11777, 11785), True, 'import numpy as nm\n'), ((1821, 1863), 'numpy.ones', 'nm.ones', (['(1, n_qp, 1, 1)'], {'dtype': 'nm.float64'}), '((1, n_qp, 1, 1), dtype=nm.float64)\n', (1828, 1863), True, 'import numpy as nm\n'), ((7094, 7136), 'numpy.ones', 'nm.ones', (['(1, n_qp, 1, 1)'], {'dtype': 'nm.float64'}), '((1, n_qp, 1, 1), dtype=nm.float64)\n', (7101, 7136), True, 'import numpy as nm\n'), ((13021, 13061), 'numpy.array', 'nm.array', (['[0]'], {'ndmin': '(4)', 'dtype': 'nm.float64'}), '([0], ndmin=4, dtype=nm.float64)\n', (13029, 13061), True, 'import numpy as nm\n'), ((15893, 15933), 'numpy.array', 'nm.array', (['[0]'], {'ndmin': '(4)', 'dtype': 'nm.float64'}), '([0], ndmin=4, dtype=nm.float64)\n', (15901, 15933), True, 'import numpy as nm\n'), ((1341, 1380), 'sfepy.linalg.dot_sequences', 'dot_sequences', (['g1[..., None]', 'g2', '"""ATB"""'], {}), "(g1[..., None], g2, 'ATB')\n", (1354, 1380), False, 'from sfepy.linalg import dot_sequences\n'), ((2166, 2206), 'numpy.array', 'nm.array', (['[0]'], {'ndmin': '(4)', 'dtype': 'nm.float64'}), '([0], ndmin=4, dtype=nm.float64)\n', (2174, 2206), True, 'import numpy as nm\n'), ((4832, 4872), 'numpy.array', 'nm.array', (['[0]'], {'ndmin': '(4)', 'dtype': 'nm.float64'}), '([0], ndmin=4, dtype=nm.float64)\n', (4840, 4872), True, 'import numpy as nm\n'), ((7408, 7448), 'numpy.array', 'nm.array', (['[0]'], {'ndmin': '(4)', 'dtype': 'nm.float64'}), '([0], ndmin=4, dtype=nm.float64)\n', (7416, 7448), True, 'import numpy as nm\n')]
|
# Generated by Django 3.1.1 on 2021-09-21 04:52
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('crawler', '0012_auto_20210921_0451'),
]
operations = [
migrations.AlterField(
model_name='crawlerline',
name='ustatus',
field=models.PositiveIntegerField(blank=True, default=1, null=True),
),
]
|
[
"django.db.models.PositiveIntegerField"
] |
[((342, 403), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {'blank': '(True)', 'default': '(1)', 'null': '(True)'}), '(blank=True, default=1, null=True)\n', (369, 403), False, 'from django.db import migrations, models\n')]
|
#!/usr/bin/env python
#
# Copyright (c) Microsoft Corporation.
# Licensed under the MIT License.
import logging
import os
import sys
import tempfile
from subprocess import PIPE, CalledProcessError, check_call # nosec
from typing import List, Optional
from onefuzztypes.models import NotificationConfig
from onefuzztypes.primitives import PoolName
from onefuzz.api import Command, Onefuzz
from onefuzz.cli import execute_api
SANITIZERS = ["address", "dataflow", "memory", "undefined"]
class Ossfuzz(Command):
def build(self, project: str, sanitizer: str) -> None:
""" Build the latest oss-fuzz target """
self.logger.info("building %s:%s", project, sanitizer)
cmd = [
"docker",
"run",
"--rm",
"-ti",
"-e",
"SANITIZER=%s" % sanitizer,
"--mount",
"src=%s,target=/out,type=bind" % os.getcwd(),
"gcr.io/oss-fuzz/%s" % project,
"compile",
]
check_call(cmd, stderr=PIPE, stdout=PIPE)
def fuzz(
self,
project: str,
build: str,
pool: PoolName,
sanitizers: Optional[List[str]] = None,
notification_config: Optional[NotificationConfig] = None,
) -> None:
""" Build & Launch all of the libFuzzer targets for a given project """
if sanitizers is None:
sanitizers = SANITIZERS
for sanitizer in sanitizers:
with tempfile.TemporaryDirectory() as tmpdir:
os.chdir(tmpdir)
try:
self.build(project, sanitizer)
except CalledProcessError:
self.logger.warning("building %s:%s failed", project, sanitizer)
continue
self.logger.info("launching %s:%s build:%s", project, sanitizer, build)
self.onefuzz.template.ossfuzz.libfuzzer(
project,
"%s:%s" % (sanitizer, build),
pool,
max_target_count=0,
sync_inputs=True,
notification_config=notification_config,
)
def stop(self, project: str) -> None:
for job in self.onefuzz.jobs.list():
if job.config.project != project:
continue
if job.config.build != "base":
continue
self.logger.info("stopping %s: %s", job.job_id, job.state)
self.onefuzz.jobs.delete(job.job_id)
def main() -> int:
return execute_api(
Ossfuzz(Onefuzz(), logging.getLogger("ossfuzz")), [Command], "0.0.1"
)
if __name__ == "__main__":
sys.exit(main())
|
[
"onefuzz.api.Onefuzz",
"tempfile.TemporaryDirectory",
"logging.getLogger",
"subprocess.check_call",
"os.getcwd",
"os.chdir"
] |
[((1008, 1049), 'subprocess.check_call', 'check_call', (['cmd'], {'stderr': 'PIPE', 'stdout': 'PIPE'}), '(cmd, stderr=PIPE, stdout=PIPE)\n', (1018, 1049), False, 'from subprocess import PIPE, CalledProcessError, check_call\n'), ((2596, 2605), 'onefuzz.api.Onefuzz', 'Onefuzz', ([], {}), '()\n', (2603, 2605), False, 'from onefuzz.api import Command, Onefuzz\n'), ((2607, 2635), 'logging.getLogger', 'logging.getLogger', (['"""ossfuzz"""'], {}), "('ossfuzz')\n", (2624, 2635), False, 'import logging\n'), ((909, 920), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (918, 920), False, 'import os\n'), ((1477, 1506), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {}), '()\n', (1504, 1506), False, 'import tempfile\n'), ((1534, 1550), 'os.chdir', 'os.chdir', (['tmpdir'], {}), '(tmpdir)\n', (1542, 1550), False, 'import os\n')]
|
# -*- coding: utf-8 -*-
from __future__ import absolute_import, print_function, unicode_literals
import itertools
import operator
from collections import OrderedDict, defaultdict
from functools import reduce
import six
from .formatters import DEFAULT_FORMATTER, DEFAULT_LENGTH
from .utils import is_site_package, is_std_lib
@six.python_2_unicode_compatible
class BaseImportGroup(object):
def __init__(self, config=None, **kwargs):
self.config = config or {}
self.statements = kwargs.get("statements", [])
self.file_artifacts = kwargs.get("file_artifacts", {})
@property
def unique_statements(self):
return sorted(list(set(self.merged_statements)))
@property
def merged_statements(self):
"""
Merge statements with the same import stems
"""
leafless_counter = defaultdict(list)
counter = defaultdict(list)
for statement in self.statements:
if statement.leafs:
counter[statement.stem].append(statement)
else:
leafless_counter[statement.stem].append(statement)
merged_statements = list(itertools.chain(*leafless_counter.values()))
def merge(statements):
_special = []
_statements = []
for i in statements:
if i.leafs and i.leafs[0].name == "*":
_special.append(i)
else:
_statements.append(i)
_reduced = []
if _statements:
_reduced = [reduce(lambda a, b: a + b, _statements)]
return _special + _reduced
for statements in counter.values():
merged_statements.extend(merge(statements))
return merged_statements
def all_line_numbers(self):
return sorted(
list(
set(
list(
itertools.chain(
*map(
operator.attrgetter("line_numbers"),
self.statements,
)
)
)
)
)
)
def should_add_statement(self, statement):
raise NotImplementedError
def add_statement(self, statement):
if self.should_add_statement(statement):
self.statements.append(statement)
return True
return False
def as_string(self):
sep = self.file_artifacts.get("sep", "\n")
return sep.join(
map(operator.methodcaller("as_string"), self.unique_statements)
)
def formatted(self, formatter=DEFAULT_FORMATTER, length=DEFAULT_LENGTH):
sep = self.file_artifacts.get("sep", "\n")
return sep.join(
map(
operator.methodcaller(
"formatted", formatter=formatter, length=length
),
self.unique_statements,
)
)
def __str__(self):
return self.as_string()
class StdLibGroup(BaseImportGroup):
def should_add_statement(self, statement):
return is_std_lib(statement.root_module)
class SitePackagesGroup(BaseImportGroup):
def should_add_statement(self, statement):
return is_site_package(statement.root_module)
class PackagesGroup(BaseImportGroup):
def __init__(self, *args, **kwargs):
super(PackagesGroup, self).__init__(*args, **kwargs)
if "packages" not in self.config:
msg = (
'"package" config must be supplied ' "for packages import group"
)
raise ValueError(msg)
def should_add_statement(self, statement):
return statement.root_module in self.config.get("packages", [])
class LocalGroup(BaseImportGroup):
def should_add_statement(self, statement):
return statement.stem.startswith(".")
class RemainderGroup(BaseImportGroup):
def should_add_statement(self, statement):
return True
# -- RemainderGroup goes last and catches everything left over
GROUP_MAPPING = OrderedDict(
(
("stdlib", StdLibGroup),
("sitepackages", SitePackagesGroup),
("packages", PackagesGroup),
("local", LocalGroup),
("remainder", RemainderGroup),
)
)
def sort_groups(groups):
return sorted(
groups, key=lambda i: list(GROUP_MAPPING.values()).index(type(i))
)
@six.python_2_unicode_compatible
class ImportGroups(list):
def __init__(self, *args, **kwargs):
super(ImportGroups, self).__init__(*args)
self.file_artifacts = kwargs.get("file_artifacts", {})
def all_line_numbers(self):
return sorted(
list(
set(
list(
itertools.chain(
*map(
operator.methodcaller("all_line_numbers"), self
)
)
)
)
)
)
def add_group(self, config):
if "type" not in config:
msg = '"type" must be specified in ' "import group config"
raise ValueError(msg)
if config["type"] not in GROUP_MAPPING:
msg = '"{}" is not supported import group'.format(config["type"])
raise ValueError(msg)
self.append(GROUP_MAPPING[config["type"]](config))
def add_statement_to_group(self, statement):
groups_by_priority = sort_groups(self)
added = False
for group in groups_by_priority:
if group.add_statement(statement):
added = True
break
if not added:
msg = (
"Import statement was not added into "
"any of the import groups. "
"Perhaps you can consider adding "
'"remaining" import group which will '
"catch all remaining import statements."
)
raise ValueError(msg)
def as_string(self):
sep = self.file_artifacts.get("sep", "\n") * 2
return sep.join(
filter(None, map(operator.methodcaller("as_string"), self))
)
def formatted(self, formatter=DEFAULT_FORMATTER, length=DEFAULT_LENGTH):
sep = self.file_artifacts.get("sep", "\n") * 2
return sep.join(
filter(
None,
map(
operator.methodcaller(
"formatted", formatter=formatter, length=length
),
self,
),
)
)
def __str__(self):
return self.as_string()
|
[
"operator.attrgetter",
"collections.OrderedDict",
"functools.reduce",
"operator.methodcaller",
"collections.defaultdict"
] |
[((4131, 4297), 'collections.OrderedDict', 'OrderedDict', (["(('stdlib', StdLibGroup), ('sitepackages', SitePackagesGroup), ('packages',\n PackagesGroup), ('local', LocalGroup), ('remainder', RemainderGroup))"], {}), "((('stdlib', StdLibGroup), ('sitepackages', SitePackagesGroup),\n ('packages', PackagesGroup), ('local', LocalGroup), ('remainder',\n RemainderGroup)))\n", (4142, 4297), False, 'from collections import OrderedDict, defaultdict\n'), ((849, 866), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (860, 866), False, 'from collections import OrderedDict, defaultdict\n'), ((885, 902), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (896, 902), False, 'from collections import OrderedDict, defaultdict\n'), ((2592, 2626), 'operator.methodcaller', 'operator.methodcaller', (['"""as_string"""'], {}), "('as_string')\n", (2613, 2626), False, 'import operator\n'), ((2849, 2919), 'operator.methodcaller', 'operator.methodcaller', (['"""formatted"""'], {'formatter': 'formatter', 'length': 'length'}), "('formatted', formatter=formatter, length=length)\n", (2870, 2919), False, 'import operator\n'), ((1561, 1600), 'functools.reduce', 'reduce', (['(lambda a, b: a + b)', '_statements'], {}), '(lambda a, b: a + b, _statements)\n', (1567, 1600), False, 'from functools import reduce\n'), ((6222, 6256), 'operator.methodcaller', 'operator.methodcaller', (['"""as_string"""'], {}), "('as_string')\n", (6243, 6256), False, 'import operator\n'), ((6516, 6586), 'operator.methodcaller', 'operator.methodcaller', (['"""formatted"""'], {'formatter': 'formatter', 'length': 'length'}), "('formatted', formatter=formatter, length=length)\n", (6537, 6586), False, 'import operator\n'), ((2005, 2040), 'operator.attrgetter', 'operator.attrgetter', (['"""line_numbers"""'], {}), "('line_numbers')\n", (2024, 2040), False, 'import operator\n'), ((4912, 4953), 'operator.methodcaller', 'operator.methodcaller', (['"""all_line_numbers"""'], {}), "('all_line_numbers')\n", (4933, 4953), False, 'import operator\n')]
|
"""
Function convert lists of 10 elements
into in the format of phone number
Example,
(123) 456-789
"""
def create_phone_number(n: list) -> str:
"""
>>> create_phone_number([1,2,3,4,5,6,7,8,9,0])
'(123) 456-7890'
"""
return "({}{}{}) {}{}{}-{}{}{}{}".format(*n)
if __name__ == "__main__":
import doctest
doctest.testmod()
|
[
"doctest.testmod"
] |
[((347, 364), 'doctest.testmod', 'doctest.testmod', ([], {}), '()\n', (362, 364), False, 'import doctest\n')]
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import cv2
import imageio
import numpy as np
from tar.miscellaneous import convert_flow_to_color
prev = imageio.imread("ressources/1_1.png")
prev = cv2.cvtColor(prev, cv2.COLOR_RGB2GRAY)
curr = imageio.imread("ressources/1_2.png")
curr = cv2.cvtColor(curr, cv2.COLOR_RGB2GRAY)
flow = cv2.calcOpticalFlowFarneback(prev, curr, None, 0.9, 15, 20, 100, 10, 1.5, cv2.OPTFLOW_FARNEBACK_GAUSSIAN)
rgb = convert_flow_to_color(flow)
imageio.imsave("/Users/sele/Desktop/test.png", rgb)
|
[
"tar.miscellaneous.convert_flow_to_color",
"imageio.imsave",
"cv2.cvtColor",
"imageio.imread",
"cv2.calcOpticalFlowFarneback"
] |
[((154, 190), 'imageio.imread', 'imageio.imread', (['"""ressources/1_1.png"""'], {}), "('ressources/1_1.png')\n", (168, 190), False, 'import imageio\n'), ((198, 236), 'cv2.cvtColor', 'cv2.cvtColor', (['prev', 'cv2.COLOR_RGB2GRAY'], {}), '(prev, cv2.COLOR_RGB2GRAY)\n', (210, 236), False, 'import cv2\n'), ((244, 280), 'imageio.imread', 'imageio.imread', (['"""ressources/1_2.png"""'], {}), "('ressources/1_2.png')\n", (258, 280), False, 'import imageio\n'), ((288, 326), 'cv2.cvtColor', 'cv2.cvtColor', (['curr', 'cv2.COLOR_RGB2GRAY'], {}), '(curr, cv2.COLOR_RGB2GRAY)\n', (300, 326), False, 'import cv2\n'), ((334, 443), 'cv2.calcOpticalFlowFarneback', 'cv2.calcOpticalFlowFarneback', (['prev', 'curr', 'None', '(0.9)', '(15)', '(20)', '(100)', '(10)', '(1.5)', 'cv2.OPTFLOW_FARNEBACK_GAUSSIAN'], {}), '(prev, curr, None, 0.9, 15, 20, 100, 10, 1.5,\n cv2.OPTFLOW_FARNEBACK_GAUSSIAN)\n', (362, 443), False, 'import cv2\n'), ((447, 474), 'tar.miscellaneous.convert_flow_to_color', 'convert_flow_to_color', (['flow'], {}), '(flow)\n', (468, 474), False, 'from tar.miscellaneous import convert_flow_to_color\n'), ((475, 526), 'imageio.imsave', 'imageio.imsave', (['"""/Users/sele/Desktop/test.png"""', 'rgb'], {}), "('/Users/sele/Desktop/test.png', rgb)\n", (489, 526), False, 'import imageio\n')]
|
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
def plot_loss(model, n_iter):
plt.figure()
plt.plot(model.trainloss, 'b-', model.validloss, 'r-')
plt.xlim(0, n_iter)
plt.xlabel('iteration')
plt.ylabel('loss')
plt.title('learning curve')
plt.legend(['training loss', 'validation loss'])
plt.show()
def plot_F1(model, n_iter):
plt.figure()
plt.plot(model.trainF1, 'b-', model.validF1, 'r-')
plt.xlim(0, n_iter)
plt.xlabel('iteration')
plt.ylabel('F1 score')
plt.title('F1 metric curve')
plt.legend(['training F1', 'validation F1'], loc='lower right')
plt.show()
def confusion_matrix(threshold, y_hat, y_target):
# 任务2:实现该函数。函数应返回 TP, FP, FN, TN 四个值。
# y_hat = (y_hat > threshold).astype(np.int32) # 高于阈值的预测值置为1,反之为0
# 提示:对比 y_hat 和 y_target 中的值计算 True Positive,False Positive 等
tmp = np.hstack((y_target, y_hat > threshold))
pass
# return TP, FP, FN, TN
|
[
"matplotlib.pyplot.ylabel",
"numpy.hstack",
"matplotlib.pyplot.xlabel",
"matplotlib.pyplot.plot",
"matplotlib.pyplot.figure",
"matplotlib.pyplot.title",
"matplotlib.pyplot.xlim",
"matplotlib.pyplot.legend",
"matplotlib.pyplot.show"
] |
[((111, 123), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (121, 123), True, 'import matplotlib.pyplot as plt\n'), ((128, 182), 'matplotlib.pyplot.plot', 'plt.plot', (['model.trainloss', '"""b-"""', 'model.validloss', '"""r-"""'], {}), "(model.trainloss, 'b-', model.validloss, 'r-')\n", (136, 182), True, 'import matplotlib.pyplot as plt\n'), ((187, 206), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', 'n_iter'], {}), '(0, n_iter)\n', (195, 206), True, 'import matplotlib.pyplot as plt\n'), ((211, 234), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""iteration"""'], {}), "('iteration')\n", (221, 234), True, 'import matplotlib.pyplot as plt\n'), ((239, 257), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""loss"""'], {}), "('loss')\n", (249, 257), True, 'import matplotlib.pyplot as plt\n'), ((262, 289), 'matplotlib.pyplot.title', 'plt.title', (['"""learning curve"""'], {}), "('learning curve')\n", (271, 289), True, 'import matplotlib.pyplot as plt\n'), ((294, 342), 'matplotlib.pyplot.legend', 'plt.legend', (["['training loss', 'validation loss']"], {}), "(['training loss', 'validation loss'])\n", (304, 342), True, 'import matplotlib.pyplot as plt\n'), ((347, 357), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (355, 357), True, 'import matplotlib.pyplot as plt\n'), ((400, 412), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (410, 412), True, 'import matplotlib.pyplot as plt\n'), ((417, 467), 'matplotlib.pyplot.plot', 'plt.plot', (['model.trainF1', '"""b-"""', 'model.validF1', '"""r-"""'], {}), "(model.trainF1, 'b-', model.validF1, 'r-')\n", (425, 467), True, 'import matplotlib.pyplot as plt\n'), ((472, 491), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0)', 'n_iter'], {}), '(0, n_iter)\n', (480, 491), True, 'import matplotlib.pyplot as plt\n'), ((496, 519), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""iteration"""'], {}), "('iteration')\n", (506, 519), True, 'import matplotlib.pyplot as plt\n'), ((524, 546), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""F1 score"""'], {}), "('F1 score')\n", (534, 546), True, 'import matplotlib.pyplot as plt\n'), ((551, 579), 'matplotlib.pyplot.title', 'plt.title', (['"""F1 metric curve"""'], {}), "('F1 metric curve')\n", (560, 579), True, 'import matplotlib.pyplot as plt\n'), ((584, 647), 'matplotlib.pyplot.legend', 'plt.legend', (["['training F1', 'validation F1']"], {'loc': '"""lower right"""'}), "(['training F1', 'validation F1'], loc='lower right')\n", (594, 647), True, 'import matplotlib.pyplot as plt\n'), ((652, 662), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (660, 662), True, 'import matplotlib.pyplot as plt\n'), ((911, 951), 'numpy.hstack', 'np.hstack', (['(y_target, y_hat > threshold)'], {}), '((y_target, y_hat > threshold))\n', (920, 951), True, 'import numpy as np\n')]
|
#!/usr/bin/python
import logging
# create logger
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
# create console handler and set level to debug
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create file handler which and set level to debug
fh = logging.FileHandler('pythonLogging.log')
fh.setLevel(logging.WARNING)
# create formatter
formatter = logging.Formatter("%(asctime)s %(levelname)-8s %(message)s")
# add formatter to ch and fh
ch.setFormatter(formatter)
fh.setFormatter(formatter)
# add ch and fh to logger
logger.addHandler(ch)
logger.addHandler(fh)
# "application" code
logger.debug("debug message")
logger.info("info message")
logger.warn("warn message")
logger.error("error message")
logger.critical("critical message")
print('\nDone')
|
[
"logging.getLogger",
"logging.Formatter",
"logging.StreamHandler",
"logging.FileHandler"
] |
[((59, 78), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (76, 78), False, 'import logging\n'), ((163, 186), 'logging.StreamHandler', 'logging.StreamHandler', ([], {}), '()\n', (184, 186), False, 'import logging\n'), ((270, 310), 'logging.FileHandler', 'logging.FileHandler', (['"""pythonLogging.log"""'], {}), "('pythonLogging.log')\n", (289, 310), False, 'import logging\n'), ((371, 431), 'logging.Formatter', 'logging.Formatter', (['"""%(asctime)s %(levelname)-8s %(message)s"""'], {}), "('%(asctime)s %(levelname)-8s %(message)s')\n", (388, 431), False, 'import logging\n')]
|
from sys import argv
from server.AServer import AServer
if '--old' in argv:
from server.server import Server
Server()
else:
AServer( websocket='--websocket' in argv ).Start()
|
[
"server.AServer.AServer",
"server.server.Server"
] |
[((114, 122), 'server.server.Server', 'Server', ([], {}), '()\n', (120, 122), False, 'from server.server import Server\n'), ((130, 170), 'server.AServer.AServer', 'AServer', ([], {'websocket': "('--websocket' in argv)"}), "(websocket='--websocket' in argv)\n", (137, 170), False, 'from server.AServer import AServer\n')]
|
#!/usr/bin/env python2.7
from common import *
from random import randint, choice
registers = {\
"a" : int("0000", 2),
"f" : int("0001", 2),
"b" : int("0010", 2),
"c" : int("0011", 2),
"d" : int("0100", 2),
"e" : int("0101", 2),
"h" : int("0110", 2),
"l" : int("0111", 2),
"af" : int("1000", 2),
"bc" : int("1001", 2),
"de" : int("1010", 2),
"hl" : int("1011", 2),
"sp" : int("1100", 2),
"pc" : int("1101", 2),
}
def output_line(fp, reg_write, reg_read, we,
write_data, read_data, reg_w_name, reg_r_name):
fp.write("%s %s %s %s %s #%s %s\n" %
(to_bin(reg_write, 4),
to_bin(reg_read, 4),
"1" if we else "0",
to_bin(write_data, 16),
to_bin(read_data, 16),
reg_w_name,
reg_r_name))
class Registers(object):
def __init__(self):
self.regs = [0] * 8
self.sp = 0
self.pc = 0
def write(self, reg, value):
if reg == "af":
self.regs[registers["a"]] = (value >> 8) & 0xff
self.regs[registers["f"]] = (value >> 0) & 0xff
elif reg == "bc":
self.regs[registers["b"]] = (value >> 8) & 0xff
self.regs[registers["c"]] = (value >> 0) & 0xff
elif reg == "de":
self.regs[registers["d"]] = (value >> 8) & 0xff
self.regs[registers["e"]] = (value >> 0) & 0xff
elif reg == "hl":
self.regs[registers["h"]] = (value >> 8) & 0xff
self.regs[registers["l"]] = (value >> 0) & 0xff
elif reg == "sp":
self.sp = value
elif reg == "pc":
self.pc = value
else:
self.regs[registers[reg]] = (value) & 0xff
def read(self, reg):
if reg == "af":
return self.regs[registers["a"]] << 8 | self.regs[registers["f"]];
elif reg == "bc":
return self.regs[registers["b"]] << 8 | self.regs[registers["c"]];
elif reg == "de":
return self.regs[registers["d"]] << 8 | self.regs[registers["e"]];
elif reg == "hl":
return self.regs[registers["h"]] << 8 | self.regs[registers["l"]];
elif reg == "sp":
return self.sp
elif reg == "pc":
return self.pc
else:
return self.regs[registers[reg]];
def random_op(self):
we = randint(0, 1)
reg_write = choice(registers.keys())
reg_read = choice(registers.keys())
write_data = randint(0, 0xffff)
read_data = self.read(reg_read)
if we:
self.write(reg_write, write_data)
return (registers[reg_write], registers[reg_read],
we, write_data, read_data, reg_write, reg_read)
def main():
fp = open("registers.txt", "w")
reg = Registers()
m = 1000000
for i in xrange(m):
if i % 10000 == 0:
f = 100 * float(i) / float(m)
print("%s" % f)
output_line(fp, *reg.random_op())
if __name__ == "__main__":
main()
|
[
"random.randint"
] |
[((2423, 2436), 'random.randint', 'randint', (['(0)', '(1)'], {}), '(0, 1)\n', (2430, 2436), False, 'from random import randint, choice\n'), ((2548, 2565), 'random.randint', 'randint', (['(0)', '(65535)'], {}), '(0, 65535)\n', (2555, 2565), False, 'from random import randint, choice\n')]
|
import os
import sys
def find_docs_root() -> str:
filepath = os.path.abspath(__file__)
path_chunks = filepath.split(os.path.sep)
while path_chunks[-1] != "docs":
path_chunks.pop()
return os.path.sep.join(path_chunks)
sys.path.append(find_docs_root())
from _rtd_conf import *
from _sphinx_conf import *
|
[
"os.path.abspath",
"os.path.sep.join"
] |
[((67, 92), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (82, 92), False, 'import os\n'), ((213, 242), 'os.path.sep.join', 'os.path.sep.join', (['path_chunks'], {}), '(path_chunks)\n', (229, 242), False, 'import os\n')]
|
import socket
import click
import uvicorn # type: ignore
def get_address(default: str = "127.0.0.1") -> str:
try:
ip_address = socket.gethostbyname(socket.gethostname())
except socket.gaierror:
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
try:
s.connect(("8.8.8.8", 1))
ip_address = s.getsockname()[0]
except socket.gaierror:
ip_address = default
finally:
s.close()
return ip_address
@click.group()
@click.pass_context
def server(ctx):
pass
@server.command()
@click.option("--host", default=None, help="Specify application host")
@click.option("--port", default=5000, help="Specify application port")
@click.pass_context
def run(ctx, host, port):
try:
port = int(port)
if port < 1024 and port > 65535:
raise RuntimeError("Port should be from 1024 to 65535")
except ValueError:
raise RuntimeError("Port should be numeric")
if not host:
host = "127.0.0.1"
address = "127.0.0.1"
else:
address = get_address()
uvicorn.run(
"graph:init",
host=address,
port=port,
access_log=False,
log_level="info",
log_config=None,
loop="uvloop",
factory=True,
)
|
[
"socket.socket",
"uvicorn.run",
"click.group",
"click.option",
"socket.gethostname"
] |
[((500, 513), 'click.group', 'click.group', ([], {}), '()\n', (511, 513), False, 'import click\n'), ((581, 650), 'click.option', 'click.option', (['"""--host"""'], {'default': 'None', 'help': '"""Specify application host"""'}), "('--host', default=None, help='Specify application host')\n", (593, 650), False, 'import click\n'), ((652, 721), 'click.option', 'click.option', (['"""--port"""'], {'default': '(5000)', 'help': '"""Specify application port"""'}), "('--port', default=5000, help='Specify application port')\n", (664, 721), False, 'import click\n'), ((1110, 1246), 'uvicorn.run', 'uvicorn.run', (['"""graph:init"""'], {'host': 'address', 'port': 'port', 'access_log': '(False)', 'log_level': '"""info"""', 'log_config': 'None', 'loop': '"""uvloop"""', 'factory': '(True)'}), "('graph:init', host=address, port=port, access_log=False,\n log_level='info', log_config=None, loop='uvloop', factory=True)\n", (1121, 1246), False, 'import uvicorn\n'), ((164, 184), 'socket.gethostname', 'socket.gethostname', ([], {}), '()\n', (182, 184), False, 'import socket\n'), ((226, 274), 'socket.socket', 'socket.socket', (['socket.AF_INET', 'socket.SOCK_DGRAM'], {}), '(socket.AF_INET, socket.SOCK_DGRAM)\n', (239, 274), False, 'import socket\n')]
|
import pytheia as pt
import os
import numpy as np
def test_track_set_descriptor_read_write():
recon = pt.sfm.Reconstruction()
view_id1 = recon.AddView("0",0.0)
m_view1 = recon.MutableView(view_id1)
m_view1.IsEstimated = True
view_id2 = recon.AddView("1",1.0)
m_view2 = recon.MutableView(view_id2)
m_view2.IsEstimated = True
t_id = recon.AddTrack()
m_track = recon.MutableTrack(t_id)
m_track.AddView(view_id1)
m_track.AddView(view_id2)
m_track.IsEstimated = True
desc = np.asarray([100,200,300,400])
m_track.SetReferenceDescriptor(desc)
assert (m_track.ReferenceDescriptor() == desc).all()
# read write
pt.io.WriteReconstruction(recon,"test")
recon_loaded = pt.io.ReadReconstruction("test")[1]
s_track = recon_loaded.Track(t_id)
assert (s_track.ReferenceDescriptor() == desc).all()
os.remove("test")
if __name__ == "__main__":
test_track_set_descriptor_read_write()
|
[
"pytheia.io.ReadReconstruction",
"numpy.asarray",
"pytheia.sfm.Reconstruction",
"pytheia.io.WriteReconstruction",
"os.remove"
] |
[((107, 130), 'pytheia.sfm.Reconstruction', 'pt.sfm.Reconstruction', ([], {}), '()\n', (128, 130), True, 'import pytheia as pt\n'), ((523, 555), 'numpy.asarray', 'np.asarray', (['[100, 200, 300, 400]'], {}), '([100, 200, 300, 400])\n', (533, 555), True, 'import numpy as np\n'), ((678, 718), 'pytheia.io.WriteReconstruction', 'pt.io.WriteReconstruction', (['recon', '"""test"""'], {}), "(recon, 'test')\n", (703, 718), True, 'import pytheia as pt\n'), ((875, 892), 'os.remove', 'os.remove', (['"""test"""'], {}), "('test')\n", (884, 892), False, 'import os\n'), ((737, 769), 'pytheia.io.ReadReconstruction', 'pt.io.ReadReconstruction', (['"""test"""'], {}), "('test')\n", (761, 769), True, 'import pytheia as pt\n')]
|
# -*- coding: utf-8 -*-
"""
@author: <NAME>.
Department of Aerodynamics
Faculty of Aerospace Engineering
TU Delft, Delft, Netherlands
"""
from numpy import sin, cos, pi
from objects.CSCG._3d.exact_solutions.status.incompressible_Navier_Stokes.base import incompressible_NavierStokes_Base
from objects.CSCG._3d.fields.vector.main import _3dCSCG_VectorField
# noinspection PyAbstractClass
class SinCosRebholz_Conservation(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.2 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, <NAME>, 2007]
"""
def __init__(self, es):
super(SinCosRebholz_Conservation, self).__init__(es, 0)
@property
def valid_time(self):
return 'valid_only_at_its_first_instant'
def u(self, t, x, y, z): return cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return -2 * pi * sin(2 * pi * z)
def v(self, t, x, y, z): return sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * z)
def w(self, t, x, y, z): return sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
def fx(self, t, x, y, z): return 0 * x # can not name it by _fx_
def fy(self, t, x, y, z): return 0 * x # can not name it by _fy_
def fz(self, t, x, y, z): return 0 * x # can not name it by _fz_
@property
def body_force(self):
"""This makes body force valid at all time instants."""
if self._bodyForce_ is None:
self._bodyForce_ = _3dCSCG_VectorField(self.mesh, (self.fx, self.fy, self.fz))
return self._bodyForce_
class SinCosRebholz_Dissipation(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.3 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, <NAME>, 2007]
"""
def __init__(self, es, nu=1):
super(SinCosRebholz_Dissipation, self).__init__(es, nu)
def u(self, t, x, y, z): return (2 - t) * cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return - 2 * pi * (2 - t) * sin(2 * pi * z)
def u_t(self, t, x, y, z): return - cos(2 * pi * z)
def u_xx(self, t, x, y, z): return 0 * x
def u_yy(self, t, x, y, z): return 0 * y
def u_zz(self, t, x, y, z): return -4 * pi ** 2 * (2 - t) * cos(2 * pi * z)
def v(self, t, x, y, z): return (1 + t) * sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * (1 + t) * cos(2 * pi * z)
def v_t(self, t, x, y, z): return sin(2 * pi * z)
def v_xx(self, t, x, y, z): return 0 * x
def v_yy(self, t, x, y, z): return 0 * x
def v_zz(self, t, x, y, z): return - 4 * pi ** 2 * (1 + t) * sin(2 * pi * z)
def w(self, t, x, y, z): return (1 - t) * sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * (1 - t) * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
def w_t(self, t, x, y, z): return - sin(2 * pi * x)
def w_xx(self, t, x, y, z): return - 4 * pi ** 2 * (1 - t) * sin(2 * pi * x)
def w_yy(self, t, x, y, z): return 0 * x
def w_zz(self, t, x, y, z): return 0 * x
def p(self, t, x, y, z): return sin(2 * pi * (x + y + z + t))
def p_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
def p_y(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
def p_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
class SinCos_Modified_Dissipation(incompressible_NavierStokes_Base):
"""A modified case that the solution along t is not linear."""
def __init__(self, es, nu=1):
super(SinCos_Modified_Dissipation, self).__init__(es, nu)
def u(self, t, x, y, z): return (1 - sin(2*pi*t)) * cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return - 2 * pi * (1 - sin(2*pi*t)) * sin(2 * pi * z)
def u_t(self, t, x, y, z): return - 2*pi*cos(2*pi*t) * cos(2 * pi * z)
def u_xx(self, t, x, y, z): return 0 * x
def u_yy(self, t, x, y, z): return 0 * y
def u_zz(self, t, x, y, z): return -4 * pi ** 2 * (1 - sin(2*pi*t)) * cos(2 * pi * z)
def v(self, t, x, y, z): return (1 + cos(2*pi*t)) * sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * (1 + cos(2*pi*t)) * cos(2 * pi * z)
def v_t(self, t, x, y, z): return -2*pi*sin(2*pi*t) * sin(2 * pi * z)
def v_xx(self, t, x, y, z): return 0 * x
def v_yy(self, t, x, y, z): return 0 * x
def v_zz(self, t, x, y, z): return - 4 * pi ** 2 * (1 + cos(2*pi*t)) * sin(2 * pi * z)
def w(self, t, x, y, z): return (1 - sin(2*pi*t)) * sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * (1 - sin(2*pi*t)) * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
def w_t(self, t, x, y, z): return - 2*pi*cos(2*pi*t) * sin(2 * pi * x)
def w_xx(self, t, x, y, z): return - 4 * pi ** 2 * (1 - sin(2*pi*t)) * sin(2 * pi * x)
def w_yy(self, t, x, y, z): return 0 * x
def w_zz(self, t, x, y, z): return 0 * x
def p(self, t, x, y, z): return sin(2 * pi * (x + y + z + t))
def p_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
def p_y(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
def p_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * (x + y + z + t))
# noinspection PyAbstractClass
class SinCos_Conservation_Conservative_Body_Force(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.2 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, <NAME>, 2007]
"""
def __init__(self, es):
super(SinCos_Conservation_Conservative_Body_Force, self).__init__(es, 0)
@property
def valid_time(self):
return 'valid_only_at_its_first_instant'
def u(self, t, x, y, z): return cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return -2 * pi * sin(2 * pi * z)
def v(self, t, x, y, z): return sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * z)
def w(self, t, x, y, z): return sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
# varphi(t,x,y,z) = t * sin(2 * pi * x) * sin(2 * pi * y) * sin(2 * pi * z)
def fx(self, t, x, y, z): return 2 * pi * t * cos(2 * pi * x) * sin(2 * pi * y) * sin(2 * pi * z)
def fy(self, t, x, y, z): return 2 * pi * t * sin(2 * pi * x) * cos(2 * pi * y) * sin(2 * pi * z)
def fz(self, t, x, y, z): return 2 * pi * t * sin(2 * pi * x) * sin(2 * pi * y) * cos(2 * pi * z)
@property
def body_force(self):
"""This makes body force valid at all time instants."""
if self._bodyForce_ is None:
self._bodyForce_ = _3dCSCG_VectorField(self.mesh, (self.fx, self.fy, self.fz))
return self._bodyForce_
# noinspection PyAbstractClass
class SinCos_Conservation_Conservative_Body_Force1(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.2 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, <NAME>, 2007]
"""
def __init__(self, es):
super(SinCos_Conservation_Conservative_Body_Force1, self).__init__(es, 0)
@property
def valid_time(self):
return 'valid_only_at_its_first_instant'
def u(self, t, x, y, z): return cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return -2 * pi * sin(2 * pi * z)
def v(self, t, x, y, z): return sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * z)
def w(self, t, x, y, z): return sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
# varphi(t,x,y,z) = sin(2 * pi * x) * sin(2 * pi * y) * sin(2 * pi * z)
def fx(self, t, x, y, z): return 2 * pi * cos(2 * pi * x) * sin(2 * pi * y) * sin(2 * pi * z)
def fy(self, t, x, y, z): return 2 * pi * sin(2 * pi * x) * cos(2 * pi * y) * sin(2 * pi * z)
def fz(self, t, x, y, z): return 2 * pi * sin(2 * pi * x) * sin(2 * pi * y) * cos(2 * pi * z)
@property
def body_force(self):
"""This makes body force valid at all time instants."""
if self._bodyForce_ is None:
self._bodyForce_ = _3dCSCG_VectorField(self.mesh, (self.fx, self.fy, self.fz))
return self._bodyForce_
# noinspection PyAbstractClass
class SinCos_Conservation_Conservative_Body_Force_POLYNOMIALS(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.2 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, <NAME>, 2007]
"""
def __init__(self, es):
super(SinCos_Conservation_Conservative_Body_Force_POLYNOMIALS, self).__init__(es, 0)
@property
def valid_time(self):
return 'valid_only_at_its_first_instant'
def u(self, t, x, y, z): return cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return -2 * pi * sin(2 * pi * z)
def v(self, t, x, y, z): return sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * z)
def w(self, t, x, y, z): return sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
# phi(t,x,y,z) = t * (x**3/3 - x**2/2 + y**3/3 - y**2/2 + z**3/3 - z**2/2)
def fx(self, t, x, y, z): return t * x * (x-1)
def fy(self, t, x, y, z): return t * y * (y-1)
def fz(self, t, x, y, z): return t * z * (z-1)
@property
def body_force(self):
"""This makes body force valid at all time instants."""
if self._bodyForce_ is None:
self._bodyForce_ = _3dCSCG_VectorField(self.mesh, (self.fx, self.fy, self.fz))
return self._bodyForce_
# noinspection PyAbstractClass
class SinCos_Conservation_Conservative_Body_Force_CONSTANT(incompressible_NavierStokes_Base):
"""
The sin cos test case for the conservation, see Section 5.2 of paper:
[An Energy- and helicity-conserving finite element scheme for the Navier-Stokes
equations, <NAME>, 2007]
"""
def __init__(self, es):
super(SinCos_Conservation_Conservative_Body_Force_CONSTANT, self).__init__(es, 0)
@property
def valid_time(self):
return 'valid_only_at_its_first_instant'
def u(self, t, x, y, z): return cos(2 * pi * z)
def u_x(self, t, x, y, z): return 0 * x
def u_y(self, t, x, y, z): return 0 * x
def u_z(self, t, x, y, z): return -2 * pi * sin(2 * pi * z)
def v(self, t, x, y, z): return sin(2 * pi * z)
def v_x(self, t, x, y, z): return 0 * x
def v_y(self, t, x, y, z): return 0 * x
def v_z(self, t, x, y, z): return 2 * pi * cos(2 * pi * z)
def w(self, t, x, y, z): return sin(2 * pi * x)
def w_x(self, t, x, y, z): return 2 * pi * cos(2 * pi * x)
def w_y(self, t, x, y, z): return 0 * x
def w_z(self, t, x, y, z): return 0 * x
# phi(t,x,y,z) = x
def fx(self, t, x, y, z): return 1 + 0 * x * y * z
def fy(self, t, x, y, z): return 0 + 0 * x * y * z
def fz(self, t, x, y, z): return 0 + 0 * x * y * z
@property
def body_force(self):
"""This makes body force valid at all time instants."""
if self._bodyForce_ is None:
self._bodyForce_ = _3dCSCG_VectorField(self.mesh, (self.fx, self.fy, self.fz))
return self._bodyForce_
|
[
"numpy.sin",
"objects.CSCG._3d.fields.vector.main._3dCSCG_VectorField",
"numpy.cos"
] |
[((918, 933), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (921, 933), False, 'from numpy import sin, cos, pi\n'), ((1126, 1141), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (1129, 1141), False, 'from numpy import sin, cos, pi\n'), ((1333, 1348), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (1336, 1348), False, 'from numpy import sin, cos, pi\n'), ((3086, 3101), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (3089, 3101), False, 'from numpy import sin, cos, pi\n'), ((3771, 3800), 'numpy.sin', 'sin', (['(2 * pi * (x + y + z + t))'], {}), '(2 * pi * (x + y + z + t))\n', (3774, 3800), False, 'from numpy import sin, cos, pi\n'), ((5833, 5862), 'numpy.sin', 'sin', (['(2 * pi * (x + y + z + t))'], {}), '(2 * pi * (x + y + z + t))\n', (5836, 5862), False, 'from numpy import sin, cos, pi\n'), ((6664, 6679), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (6667, 6679), False, 'from numpy import sin, cos, pi\n'), ((6872, 6887), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (6875, 6887), False, 'from numpy import sin, cos, pi\n'), ((7079, 7094), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (7082, 7094), False, 'from numpy import sin, cos, pi\n'), ((8473, 8488), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (8476, 8488), False, 'from numpy import sin, cos, pi\n'), ((8681, 8696), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (8684, 8696), False, 'from numpy import sin, cos, pi\n'), ((8888, 8903), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (8891, 8903), False, 'from numpy import sin, cos, pi\n'), ((10288, 10303), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (10291, 10303), False, 'from numpy import sin, cos, pi\n'), ((10496, 10511), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (10499, 10511), False, 'from numpy import sin, cos, pi\n'), ((10703, 10718), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (10706, 10718), False, 'from numpy import sin, cos, pi\n'), ((11962, 11977), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (11965, 11977), False, 'from numpy import sin, cos, pi\n'), ((12170, 12185), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (12173, 12185), False, 'from numpy import sin, cos, pi\n'), ((12377, 12392), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (12380, 12392), False, 'from numpy import sin, cos, pi\n'), ((1073, 1088), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (1076, 1088), False, 'from numpy import sin, cos, pi\n'), ((1280, 1295), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (1283, 1295), False, 'from numpy import sin, cos, pi\n'), ((1397, 1412), 'numpy.cos', 'cos', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (1400, 1412), False, 'from numpy import sin, cos, pi\n'), ((1888, 1947), 'objects.CSCG._3d.fields.vector.main._3dCSCG_VectorField', '_3dCSCG_VectorField', (['self.mesh', '(self.fx, self.fy, self.fz)'], {}), '(self.mesh, (self.fx, self.fy, self.fz))\n', (1907, 1947), False, 'from objects.CSCG._3d.fields.vector.main import _3dCSCG_VectorField\n'), ((2408, 2423), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (2411, 2423), False, 'from numpy import sin, cos, pi\n'), ((2574, 2589), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (2577, 2589), False, 'from numpy import sin, cos, pi\n'), ((2631, 2646), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (2634, 2646), False, 'from numpy import sin, cos, pi\n'), ((2804, 2819), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (2807, 2819), False, 'from numpy import sin, cos, pi\n'), ((2867, 2882), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (2870, 2882), False, 'from numpy import sin, cos, pi\n'), ((3031, 3046), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (3034, 3046), False, 'from numpy import sin, cos, pi\n'), ((3260, 3275), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (3263, 3275), False, 'from numpy import sin, cos, pi\n'), ((3323, 3338), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (3326, 3338), False, 'from numpy import sin, cos, pi\n'), ((3397, 3412), 'numpy.cos', 'cos', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (3400, 3412), False, 'from numpy import sin, cos, pi\n'), ((3544, 3559), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (3547, 3559), False, 'from numpy import sin, cos, pi\n'), ((3626, 3641), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (3629, 3641), False, 'from numpy import sin, cos, pi\n'), ((3849, 3878), 'numpy.cos', 'cos', (['(2 * pi * (x + y + z + t))'], {}), '(2 * pi * (x + y + z + t))\n', (3852, 3878), False, 'from numpy import sin, cos, pi\n'), ((3927, 3956), 'numpy.cos', 'cos', (['(2 * pi * (x + y + z + t))'], {}), '(2 * pi * (x + y + z + t))\n', (3930, 3956), False, 'from numpy import sin, cos, pi\n'), ((4005, 4034), 'numpy.cos', 'cos', (['(2 * pi * (x + y + z + t))'], {}), '(2 * pi * (x + y + z + t))\n', (4008, 4034), False, 'from numpy import sin, cos, pi\n'), ((4332, 4347), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (4335, 4347), False, 'from numpy import sin, cos, pi\n'), ((4508, 4523), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (4511, 4523), False, 'from numpy import sin, cos, pi\n'), ((4584, 4599), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (4587, 4599), False, 'from numpy import sin, cos, pi\n'), ((4767, 4782), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (4770, 4782), False, 'from numpy import sin, cos, pi\n'), ((4840, 4855), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (4843, 4855), False, 'from numpy import sin, cos, pi\n'), ((5014, 5029), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (5017, 5029), False, 'from numpy import sin, cos, pi\n'), ((5089, 5104), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (5092, 5104), False, 'from numpy import sin, cos, pi\n'), ((5273, 5288), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (5276, 5288), False, 'from numpy import sin, cos, pi\n'), ((5346, 5361), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (5349, 5361), False, 'from numpy import sin, cos, pi\n'), ((5430, 5445), 'numpy.cos', 'cos', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (5433, 5445), False, 'from numpy import sin, cos, pi\n'), ((5596, 5611), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (5599, 5611), False, 'from numpy import sin, cos, pi\n'), ((5688, 5703), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (5691, 5703), False, 'from numpy import sin, cos, pi\n'), ((5911, 5940), 'numpy.cos', 'cos', (['(2 * pi * (x + y + z + t))'], {}), '(2 * pi * (x + y + z + t))\n', (5914, 5940), False, 'from numpy import sin, cos, pi\n'), ((5989, 6018), 'numpy.cos', 'cos', (['(2 * pi * (x + y + z + t))'], {}), '(2 * pi * (x + y + z + t))\n', (5992, 6018), False, 'from numpy import sin, cos, pi\n'), ((6067, 6096), 'numpy.cos', 'cos', (['(2 * pi * (x + y + z + t))'], {}), '(2 * pi * (x + y + z + t))\n', (6070, 6096), False, 'from numpy import sin, cos, pi\n'), ((6819, 6834), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (6822, 6834), False, 'from numpy import sin, cos, pi\n'), ((7026, 7041), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (7029, 7041), False, 'from numpy import sin, cos, pi\n'), ((7143, 7158), 'numpy.cos', 'cos', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (7146, 7158), False, 'from numpy import sin, cos, pi\n'), ((7418, 7433), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (7421, 7433), False, 'from numpy import sin, cos, pi\n'), ((7521, 7536), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (7524, 7536), False, 'from numpy import sin, cos, pi\n'), ((7624, 7639), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (7627, 7639), False, 'from numpy import sin, cos, pi\n'), ((7814, 7873), 'objects.CSCG._3d.fields.vector.main._3dCSCG_VectorField', '_3dCSCG_VectorField', (['self.mesh', '(self.fx, self.fy, self.fz)'], {}), '(self.mesh, (self.fx, self.fy, self.fz))\n', (7833, 7873), False, 'from objects.CSCG._3d.fields.vector.main import _3dCSCG_VectorField\n'), ((8628, 8643), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (8631, 8643), False, 'from numpy import sin, cos, pi\n'), ((8835, 8850), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (8838, 8850), False, 'from numpy import sin, cos, pi\n'), ((8952, 8967), 'numpy.cos', 'cos', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (8955, 8967), False, 'from numpy import sin, cos, pi\n'), ((9219, 9234), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (9222, 9234), False, 'from numpy import sin, cos, pi\n'), ((9318, 9333), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (9321, 9333), False, 'from numpy import sin, cos, pi\n'), ((9417, 9432), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (9420, 9432), False, 'from numpy import sin, cos, pi\n'), ((9607, 9666), 'objects.CSCG._3d.fields.vector.main._3dCSCG_VectorField', '_3dCSCG_VectorField', (['self.mesh', '(self.fx, self.fy, self.fz)'], {}), '(self.mesh, (self.fx, self.fy, self.fz))\n', (9626, 9666), False, 'from objects.CSCG._3d.fields.vector.main import _3dCSCG_VectorField\n'), ((10443, 10458), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (10446, 10458), False, 'from numpy import sin, cos, pi\n'), ((10650, 10665), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (10653, 10665), False, 'from numpy import sin, cos, pi\n'), ((10767, 10782), 'numpy.cos', 'cos', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (10770, 10782), False, 'from numpy import sin, cos, pi\n'), ((11284, 11343), 'objects.CSCG._3d.fields.vector.main._3dCSCG_VectorField', '_3dCSCG_VectorField', (['self.mesh', '(self.fx, self.fy, self.fz)'], {}), '(self.mesh, (self.fx, self.fy, self.fz))\n', (11303, 11343), False, 'from objects.CSCG._3d.fields.vector.main import _3dCSCG_VectorField\n'), ((12117, 12132), 'numpy.sin', 'sin', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (12120, 12132), False, 'from numpy import sin, cos, pi\n'), ((12324, 12339), 'numpy.cos', 'cos', (['(2 * pi * z)'], {}), '(2 * pi * z)\n', (12327, 12339), False, 'from numpy import sin, cos, pi\n'), ((12441, 12456), 'numpy.cos', 'cos', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (12444, 12456), False, 'from numpy import sin, cos, pi\n'), ((12914, 12973), 'objects.CSCG._3d.fields.vector.main._3dCSCG_VectorField', '_3dCSCG_VectorField', (['self.mesh', '(self.fx, self.fy, self.fz)'], {}), '(self.mesh, (self.fx, self.fy, self.fz))\n', (12933, 12973), False, 'from objects.CSCG._3d.fields.vector.main import _3dCSCG_VectorField\n'), ((4317, 4332), 'numpy.sin', 'sin', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (4320, 4332), False, 'from numpy import sin, cos, pi\n'), ((4570, 4585), 'numpy.cos', 'cos', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (4573, 4585), False, 'from numpy import sin, cos, pi\n'), ((4825, 4840), 'numpy.cos', 'cos', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (4828, 4840), False, 'from numpy import sin, cos, pi\n'), ((5075, 5090), 'numpy.sin', 'sin', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (5078, 5090), False, 'from numpy import sin, cos, pi\n'), ((5331, 5346), 'numpy.sin', 'sin', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (5334, 5346), False, 'from numpy import sin, cos, pi\n'), ((5582, 5597), 'numpy.cos', 'cos', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (5585, 5597), False, 'from numpy import sin, cos, pi\n'), ((7400, 7415), 'numpy.sin', 'sin', (['(2 * pi * y)'], {}), '(2 * pi * y)\n', (7403, 7415), False, 'from numpy import sin, cos, pi\n'), ((7503, 7518), 'numpy.cos', 'cos', (['(2 * pi * y)'], {}), '(2 * pi * y)\n', (7506, 7518), False, 'from numpy import sin, cos, pi\n'), ((7606, 7621), 'numpy.sin', 'sin', (['(2 * pi * y)'], {}), '(2 * pi * y)\n', (7609, 7621), False, 'from numpy import sin, cos, pi\n'), ((9201, 9216), 'numpy.sin', 'sin', (['(2 * pi * y)'], {}), '(2 * pi * y)\n', (9204, 9216), False, 'from numpy import sin, cos, pi\n'), ((9300, 9315), 'numpy.cos', 'cos', (['(2 * pi * y)'], {}), '(2 * pi * y)\n', (9303, 9315), False, 'from numpy import sin, cos, pi\n'), ((9399, 9414), 'numpy.sin', 'sin', (['(2 * pi * y)'], {}), '(2 * pi * y)\n', (9402, 9414), False, 'from numpy import sin, cos, pi\n'), ((4493, 4508), 'numpy.sin', 'sin', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (4496, 4508), False, 'from numpy import sin, cos, pi\n'), ((4752, 4767), 'numpy.sin', 'sin', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (4755, 4767), False, 'from numpy import sin, cos, pi\n'), ((4999, 5014), 'numpy.cos', 'cos', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (5002, 5014), False, 'from numpy import sin, cos, pi\n'), ((5258, 5273), 'numpy.cos', 'cos', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (5261, 5273), False, 'from numpy import sin, cos, pi\n'), ((5415, 5430), 'numpy.sin', 'sin', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (5418, 5430), False, 'from numpy import sin, cos, pi\n'), ((5673, 5688), 'numpy.sin', 'sin', (['(2 * pi * t)'], {}), '(2 * pi * t)\n', (5676, 5688), False, 'from numpy import sin, cos, pi\n'), ((7382, 7397), 'numpy.cos', 'cos', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (7385, 7397), False, 'from numpy import sin, cos, pi\n'), ((7485, 7500), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (7488, 7500), False, 'from numpy import sin, cos, pi\n'), ((7588, 7603), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (7591, 7603), False, 'from numpy import sin, cos, pi\n'), ((9183, 9198), 'numpy.cos', 'cos', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (9186, 9198), False, 'from numpy import sin, cos, pi\n'), ((9282, 9297), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (9285, 9297), False, 'from numpy import sin, cos, pi\n'), ((9381, 9396), 'numpy.sin', 'sin', (['(2 * pi * x)'], {}), '(2 * pi * x)\n', (9384, 9396), False, 'from numpy import sin, cos, pi\n')]
|
import os
BEHIND_REVERSE_PROXY = bool(os.environ.get('BBBS_BEHIND_REVERSE_PROXY', False))
POSTS_PER_PAGE = 25
TEMPLATES_AUTO_RELOAD = True
RECAPTCHA_ENABLED = os.environ.get('BBBS_RECAPTCHA_ENABLED', False)
RECAPTCHA_SITE_KEY = os.environ.get('BBBS_RECAPTCHA_SITE_KEY', 'CHANGEGME')
RECAPTCHA_SECRET_KEY = os.environ.get('BBS_RECAPTCHA_SECRET_KEY', 'CHANGEME')
SECRET_KEY = os.environ.get('BBBS_SECRET_KEY', 'PLEASE CHANGE ME')
SECRET_SALT = os.environ.get('BBBS_SECRET_SALT', 'CHANGEME')
SQLALCHEMY_DATABASE_URI = os.environ.get('BBBS_DB_STRING', 'sqlite:///test.db')
SITE_TAGLINE = os.environ.get('BBBS_SITE_TAGLINE', 'some tagline')
SITE_TITLE = os.environ.get('BBBS_SITE_TAGLINE', 'super title')
SITE_FOOTER = os.environ.get(
'BBBS_SITE_FOOTER',
'<a href="https://github.com/kawa-kokosowa/bubblebbs">Powered by BubbleBBS</a>',
)
RATELIMIT_STORAGE_URL = os.environ.get('BBBS_RATELIMIT_STORAGE_URL', 'redis://localhost:6379/1')
RATELIMIT_DEFAULT = "400 per day, 100 per hour"
RATELIMIT_ENABLED = True
RATELIMIT_LIST_THREADS = "20 per minute, 1 per second"
RATELIMIT_VIEW_SPECIFIC_POST = "20 per minute, 1 per second"
RATELIMIT_NEW_REPLY = "20 per hour, 1 per second, 2 per minute"
RATELIMIT_VIEW_TRIP_META = "50 per hour, 15 per minute"
RATELIMIT_EDIT_TRIP_META = "60 per hour, 1 per second, 4 per minute"
RATELIMIT_MANAGE_COOKIE = '60 per hour, 1 per second, 7 per minute'
RATELIMIT_CREATE_THREAD = '700 per hour, 100 per minute'
RATELIMIT_NEW_THREAD_FORM = '60 per hour, 1 per second'
|
[
"os.environ.get"
] |
[((163, 210), 'os.environ.get', 'os.environ.get', (['"""BBBS_RECAPTCHA_ENABLED"""', '(False)'], {}), "('BBBS_RECAPTCHA_ENABLED', False)\n", (177, 210), False, 'import os\n'), ((232, 286), 'os.environ.get', 'os.environ.get', (['"""BBBS_RECAPTCHA_SITE_KEY"""', '"""CHANGEGME"""'], {}), "('BBBS_RECAPTCHA_SITE_KEY', 'CHANGEGME')\n", (246, 286), False, 'import os\n'), ((310, 364), 'os.environ.get', 'os.environ.get', (['"""BBS_RECAPTCHA_SECRET_KEY"""', '"""CHANGEME"""'], {}), "('BBS_RECAPTCHA_SECRET_KEY', 'CHANGEME')\n", (324, 364), False, 'import os\n'), ((379, 432), 'os.environ.get', 'os.environ.get', (['"""BBBS_SECRET_KEY"""', '"""PLEASE CHANGE ME"""'], {}), "('BBBS_SECRET_KEY', 'PLEASE CHANGE ME')\n", (393, 432), False, 'import os\n'), ((447, 493), 'os.environ.get', 'os.environ.get', (['"""BBBS_SECRET_SALT"""', '"""CHANGEME"""'], {}), "('BBBS_SECRET_SALT', 'CHANGEME')\n", (461, 493), False, 'import os\n'), ((520, 573), 'os.environ.get', 'os.environ.get', (['"""BBBS_DB_STRING"""', '"""sqlite:///test.db"""'], {}), "('BBBS_DB_STRING', 'sqlite:///test.db')\n", (534, 573), False, 'import os\n'), ((590, 641), 'os.environ.get', 'os.environ.get', (['"""BBBS_SITE_TAGLINE"""', '"""some tagline"""'], {}), "('BBBS_SITE_TAGLINE', 'some tagline')\n", (604, 641), False, 'import os\n'), ((655, 705), 'os.environ.get', 'os.environ.get', (['"""BBBS_SITE_TAGLINE"""', '"""super title"""'], {}), "('BBBS_SITE_TAGLINE', 'super title')\n", (669, 705), False, 'import os\n'), ((720, 844), 'os.environ.get', 'os.environ.get', (['"""BBBS_SITE_FOOTER"""', '"""<a href="https://github.com/kawa-kokosowa/bubblebbs">Powered by BubbleBBS</a>"""'], {}), '(\'BBBS_SITE_FOOTER\',\n \'<a href="https://github.com/kawa-kokosowa/bubblebbs">Powered by BubbleBBS</a>\'\n )\n', (734, 844), False, 'import os\n'), ((872, 944), 'os.environ.get', 'os.environ.get', (['"""BBBS_RATELIMIT_STORAGE_URL"""', '"""redis://localhost:6379/1"""'], {}), "('BBBS_RATELIMIT_STORAGE_URL', 'redis://localhost:6379/1')\n", (886, 944), False, 'import os\n'), ((40, 90), 'os.environ.get', 'os.environ.get', (['"""BBBS_BEHIND_REVERSE_PROXY"""', '(False)'], {}), "('BBBS_BEHIND_REVERSE_PROXY', False)\n", (54, 90), False, 'import os\n')]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.