text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
##############################################################################
#
# OSIS stands for Open Student Information System. It's an application
# designed to manage the core business of higher education institutions,
# such as universities, faculties, institutes and professional schools.
# The core business involves the administration of students, teachers,
# courses, programs and so on.
#
# Copyright (C) 2015-2019 Université catholique de Louvain (http://www.uclouvain.be)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# A copy of this license - GNU General Public License - is available
# at the root of the source code of this program. If not,
# see http://www.gnu.org/licenses/.
#
##############################################################################
import datetime
import operator
import string
import random
import factory.fuzzy
from base.models.enums.quadrimesters import DerogationQuadrimester
from base.tests.factories.utils.fuzzy import FuzzyBoolean
from program_management.tests.factories.element import ElementGroupYearFactory, ElementLearningUnitYearFactory
def _generate_block_value():
"""Generate a random string composed of digit between 1 and 6 included.
Each digit can be represented at most once in the string and they are sorted from smallest to greatest.
Ex: "", "156", "2", "456" and so on
"""
population = list(range(1, 7))
k = random.randint(0, len(population))
sample = random.sample(population, k)
sample.sort()
return int("".join([str(element) for element in sample])) if sample else None
class GroupElementYearFactory(factory.django.DjangoModelFactory):
class Meta:
model = "base.GroupElementYear"
django_get_or_create = ('parent_element', 'child_element')
external_id = factory.fuzzy.FuzzyText(length=10, chars=string.digits)
changed = factory.fuzzy.FuzzyNaiveDateTime(datetime.datetime(2016, 1, 1), datetime.datetime(2017, 3, 1))
parent_element = factory.SubFactory(ElementGroupYearFactory)
child_element = factory.SubFactory(ElementGroupYearFactory)
relative_credits = factory.fuzzy.FuzzyInteger(0, 10)
is_mandatory = FuzzyBoolean()
link_type = None
order = None
block = factory.LazyFunction(_generate_block_value)
class GroupElementYearChildLeafFactory(GroupElementYearFactory):
child_element = factory.SubFactory(ElementLearningUnitYearFactory)
|
uclouvain/OSIS-Louvain
|
base/tests/factories/group_element_year.py
|
Python
|
agpl-3.0
| 2,912 | 0.002061 |
# Adapted from ASE https://wiki.fysik.dtu.dk/ase/
#
#
# Copyright (C) 2010, Jesper Friis
# (see accompanying license files for details).
"""
A module for chemlab for simple creation of crystalline structures from
knowledge of the space group.
"""
import numpy as np
from collections import Counter
from .spacegroup import Spacegroup
from ..system import System
from .cell import cellpar_to_cell
__all__ = ['crystal']
def crystal(positions, molecules, group,
cellpar=[1.0, 1.0, 1.0, 90, 90, 90], repetitions=[1, 1, 1]):
'''Build a crystal from atomic positions, space group and cell
parameters.
**Parameters**
positions: list of coordinates
A list of the atomic positions
molecules: list of Molecule
The molecules corresponding to the positions, the molecule will be
translated in all the equivalent positions.
group: int | str
Space group given either as its number in International Tables
or as its Hermann-Mauguin symbol.
repetitions:
Repetition of the unit cell in each direction
cellpar:
Unit cell parameters
This function was taken and adapted from the *spacegroup* module
found in `ASE <https://wiki.fysik.dtu.dk/ase/>`_.
The module *spacegroup* module was originally developed by Jesper
Frills.
'''
sp = Spacegroup(group)
sites, kind = sp.equivalent_sites(positions)
nx, ny, nz = repetitions
reptot = nx*ny*nz
# Unit cell parameters
a,b,c = cellpar_to_cell(cellpar)
cry = System()
i = 0
with cry.batch() as batch:
for x in range(nx):
for y in range(ny):
for z in range(nz):
for s, ki in zip(sites, kind):
tpl = molecules[ki]
tpl.move_to(s[0]*a +s[1]*b + s[2]*c + a*x + b*y + c*z)
batch.append(tpl.copy())
# Computing the box_vectors
cry.box_vectors = np.array([a*nx, b*ny, c*nz])
return cry
# def crystal(symbols=None, basis=None, spacegroup=1, setting=1,
# cell=None, cellpar=None,
# ab_normal=(0,0,1), a_direction=None, size=(1,1,1),
# ondublicates='warn', symprec=0.001,
# pbc=True, primitive_cell=False, **kwargs):
# """Create a System instance for a conventional unit cell of a
# space group.
# Parameters:
# symbols : str | sequence of str | sequence of Atom | Atoms
# Element symbols of the unique sites. Can either be a string
# formula or a sequence of element symbols. E.g. ('Na', 'Cl')
# and 'NaCl' are equivalent. Can also be given as a sequence of
# Atom objects or an Atoms object.
# basis : list of scaled coordinates
# Positions of the unique sites corresponding to symbols given
# either as scaled positions or through an atoms instance. Not
# needed if *symbols* is a sequence of Atom objects or an Atoms
# object.
# spacegroup : int | string | Spacegroup instance
# Space group given either as its number in International Tables
# or as its Hermann-Mauguin symbol.
# setting : 1 | 2
# Space group setting.
# cell : 3x3 matrix
# Unit cell vectors.
# cellpar : [a, b, c, alpha, beta, gamma]
# Cell parameters with angles in degree. Is not used when `cell`
# is given.
# ab_normal : vector
# Is used to define the orientation of the unit cell relative
# to the Cartesian system when `cell` is not given. It is the
# normal vector of the plane spanned by a and b.
# a_direction : vector
# Defines the orientation of the unit cell a vector. a will be
# parallel to the projection of `a_direction` onto the a-b plane.
# size : 3 positive integers
# How many times the conventional unit cell should be repeated
# in each direction.
# ondublicates : 'keep' | 'replace' | 'warn' | 'error'
# Action if `basis` contain symmetry-equivalent positions:
# 'keep' - ignore additional symmetry-equivalent positions
# 'replace' - replace
# 'warn' - like 'keep', but issue an UserWarning
# 'error' - raises a SpacegroupValueError
# symprec : float
# Minimum "distance" betweed two sites in scaled coordinates
# before they are counted as the same site.
# pbc : one or three bools
# Periodic boundary conditions flags. Examples: True,
# False, 0, 1, (1, 1, 0), (True, False, False). Default
# is True.
# primitive_cell : bool
# Wheter to return the primitive instead of the conventional
# unit cell.
# Keyword arguments:
# All additional keyword arguments are passed on to the Atoms
# constructor. Currently, probably the most useful additional
# keyword arguments are `info`, `constraint` and `calculator`.
# Examples:
# Two diamond unit cells (space group number 227)
# >>> diamond = crystal('C', [(0,0,0)], spacegroup=227,
# ... cellpar=[3.57, 3.57, 3.57, 90, 90, 90], size=(2,1,1))
# >>> ase.view(diamond) # doctest: +SKIP
# A CoSb3 skutterudite unit cell containing 32 atoms
# >>> skutterudite = crystal(('Co', 'Sb'),
# ... basis=[(0.25,0.25,0.25), (0.0, 0.335, 0.158)],
# ... spacegroup=204, cellpar=[9.04, 9.04, 9.04, 90, 90, 90])
# >>> len(skutterudite)
# 32
# """
# sg = Spacegroup(spacegroup, setting)
# if (not isinstance(symbols, str) and
# hasattr(symbols, '__getitem__') and
# len(symbols) > 0 and
# isinstance(symbols[0], ase.Atom)):
# symbols = ase.Atoms(symbols)
# if isinstance(symbols, ase.Atoms):
# basis = symbols
# symbols = basis.get_chemical_symbols()
# if isinstance(basis, ase.Atoms):
# basis_coords = basis.get_scaled_positions()
# if cell is None and cellpar is None:
# cell = basis.cell
# if symbols is None:
# symbols = basis.get_chemical_symbols()
# else:
# basis_coords = np.array(basis, dtype=float, copy=False, ndmin=2)
# sites, kinds = sg.equivalent_sites(basis_coords,
# ondublicates=ondublicates,
# symprec=symprec)
# symbols = parse_symbols(symbols)
# symbols = [symbols[i] for i in kinds]
# if cell is None:
# cell = cellpar_to_cell(cellpar, ab_normal, a_direction)
# info = dict(spacegroup=sg)
# if primitive_cell:
# info['unit_cell'] = 'primitive'
# else:
# info['unit_cell'] = 'conventional'
# if 'info' in kwargs:
# info.update(kwargs['info'])
# kwargs['info'] = info
# atoms = ase.Atoms(symbols,
# scaled_positions=sites,
# cell=cell,
# pbc=pbc,
# **kwargs)
# if isinstance(basis, ase.Atoms):
# for name in basis.arrays:
# if not atoms.has(name):
# array = basis.get_array(name)
# atoms.new_array(name, [array[i] for i in kinds],
# dtype=array.dtype, shape=array.shape[1:])
# if primitive_cell:
# from ase.utils.geometry import cut
# prim_cell = sg.scaled_primitive_cell
# atoms = cut(atoms, a=prim_cell[0], b=prim_cell[1], c=prim_cell[2])
# if size != (1, 1, 1):
# atoms = atoms.repeat(size)
# return atoms
# def parse_symbols(symbols):
# """Return `sumbols` as a sequence of element symbols."""
# if isinstance(symbols, basestring):
# symbols = string2symbols(symbols)
# return symbols
#-----------------------------------------------------------------
# Self test
if __name__ == '__main__':
import doctest
#print 'doctest: ', doctest.testmod()
|
chemlab/chemlab
|
chemlab/core/spacegroup/crystal.py
|
Python
|
gpl-3.0
| 7,982 | 0.003884 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Urwid LCD display module
# Copyright (C) 2010 Ian Ward
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# Urwid web site: http://excess.org/urwid/
from .display_common import BaseScreen
import time
class LCDScreen(BaseScreen):
def set_terminal_properties(self, colors=None, bright_is_bold=None,
has_underline=None):
pass
def set_mouse_tracking(self, enable=True):
pass
def start(self):
pass
def stop(self):
pass
def set_input_timeouts(self, *args):
pass
def reset_default_terminal_palette(self, *args):
pass
def run_wrapper(self,fn):
return fn()
def draw_screen(self, xxx_todo_changeme, r ):
(cols, rows) = xxx_todo_changeme
pass
def clear(self):
pass
def get_cols_rows(self):
return self.DISPLAY_SIZE
class CFLCDScreen(LCDScreen):
"""
Common methods for Crystal Fontz LCD displays
"""
KEYS = [None, # no key with code 0
'up_press', 'down_press', 'left_press',
'right_press', 'enter_press', 'exit_press',
'up_release', 'down_release', 'left_release',
'right_release', 'enter_release', 'exit_release',
'ul_press', 'ur_press', 'll_press', 'lr_press',
'ul_release', 'ur_release', 'll_release', 'lr_release']
CMD_PING = 0
CMD_VERSION = 1
CMD_CLEAR = 6
CMD_CGRAM = 9
CMD_CURSOR_POSITION = 11 # data = [col, row]
CMD_CURSOR_STYLE = 12 # data = [style (0-4)]
CMD_LCD_CONTRAST = 13 # data = [contrast (0-255)]
CMD_BACKLIGHT = 14 # data = [power (0-100)]
CMD_LCD_DATA = 31 # data = [col, row] + text
CMD_GPO = 34 # data = [pin(0-12), value(0-100)]
# sent from device
CMD_KEY_ACTIVITY = 0x80
CMD_ACK = 0x40 # in high two bits ie. & 0xc0
CURSOR_NONE = 0
CURSOR_BLINKING_BLOCK = 1
CURSOR_UNDERSCORE = 2
CURSOR_BLINKING_BLOCK_UNDERSCORE = 3
CURSOR_INVERTING_BLINKING_BLOCK = 4
MAX_PACKET_DATA_LENGTH = 22
colors = 1
has_underline = False
def __init__(self, device_path, baud):
"""
device_path -- eg. '/dev/ttyUSB0'
baud -- baud rate
"""
super(CFLCDScreen, self).__init__()
self.device_path = device_path
from serial import Serial
self._device = Serial(device_path, baud, timeout=0)
self._unprocessed = ""
@classmethod
def get_crc(cls, buf):
# This seed makes the output of this shift based algorithm match
# the table based algorithm. The center 16 bits of the 32-bit
# "newCRC" are used for the CRC. The MSB of the lower byte is used
# to see what bit was shifted out of the center 16 bit CRC
# accumulator ("carry flag analog");
newCRC = 0x00F32100
for byte in buf:
# Push this byte’s bits through a software
# implementation of a hardware shift & xor.
for bit_count in range(8):
# Shift the CRC accumulator
newCRC >>= 1
# The new MSB of the CRC accumulator comes
# from the LSB of the current data byte.
if ord(byte) & (0x01 << bit_count):
newCRC |= 0x00800000
# If the low bit of the current CRC accumulator was set
# before the shift, then we need to XOR the accumulator
# with the polynomial (center 16 bits of 0x00840800)
if newCRC & 0x00000080:
newCRC ^= 0x00840800
# All the data has been done. Do 16 more bits of 0 data.
for bit_count in range(16):
# Shift the CRC accumulator
newCRC >>= 1
# If the low bit of the current CRC accumulator was set
# before the shift we need to XOR the accumulator with
# 0x00840800.
if newCRC & 0x00000080:
newCRC ^= 0x00840800
# Return the center 16 bits, making this CRC match the one’s
# complement that is sent in the packet.
return ((~newCRC)>>8) & 0xffff
def _send_packet(self, command, data):
"""
low-level packet sending.
Following the protocol requires waiting for ack packet between
sending each packet to the device.
"""
buf = chr(command) + chr(len(data)) + data
crc = self.get_crc(buf)
buf = buf + chr(crc & 0xff) + chr(crc >> 8)
self._device.write(buf)
def _read_packet(self):
"""
low-level packet reading.
returns (command/report code, data) or None
This method stored data read and tries to resync when bad data
is received.
"""
# pull in any new data available
self._unprocessed = self._unprocessed + self._device.read()
while True:
try:
command, data, unprocessed = self._parse_data(self._unprocessed)
self._unprocessed = unprocessed
return command, data
except self.MoreDataRequired:
return
except self.InvalidPacket:
# throw out a byte and try to parse again
self._unprocessed = self._unprocessed[1:]
class InvalidPacket(Exception):
pass
class MoreDataRequired(Exception):
pass
@classmethod
def _parse_data(cls, data):
"""
Try to read a packet from the start of data, returning
(command/report code, packet_data, remaining_data)
or raising InvalidPacket or MoreDataRequired
"""
if len(data) < 2:
raise cls.MoreDataRequired
command = ord(data[0])
plen = ord(data[1])
if plen > cls.MAX_PACKET_DATA_LENGTH:
raise cls.InvalidPacket("length value too large")
if len(data) < plen + 4:
raise cls.MoreDataRequired
crc = cls.get_crc(data[:2 + plen])
pcrc = ord(data[2 + plen]) + (ord(data[3 + plen]) << 8 )
if crc != pcrc:
raise cls.InvalidPacket("CRC doesn't match")
return (command, data[2:2 + plen], data[4 + plen:])
class KeyRepeatSimulator(object):
"""
Provide simulated repeat key events when given press and
release events.
If two or more keys are pressed disable repeating until all
keys are released.
"""
def __init__(self, repeat_delay, repeat_next):
"""
repeat_delay -- seconds to wait before starting to repeat keys
repeat_next -- time between each repeated key
"""
self.repeat_delay = repeat_delay
self.repeat_next = repeat_next
self.pressed = {}
self.multiple_pressed = False
def press(self, key):
if self.pressed:
self.multiple_pressed = True
self.pressed[key] = time.time()
def release(self, key):
if key not in self.pressed:
return # ignore extra release events
del self.pressed[key]
if not self.pressed:
self.multiple_pressed = False
def next_event(self):
"""
Return (remaining, key) where remaining is the number of seconds
(float) until the key repeat event should be sent, or None if no
events are pending.
"""
if len(self.pressed) != 1 or self.multiple_pressed:
return
for key in self.pressed:
return max(0, self.pressed[key] + self.repeat_delay
- time.time()), key
def sent_event(self):
"""
Cakk this method when you have sent a key repeat event so the
timer will be reset for the next event
"""
if len(self.pressed) != 1:
return # ignore event that shouldn't have been sent
for key in self.pressed:
self.pressed[key] = (
time.time() - self.repeat_delay + self.repeat_next)
return
class CF635Screen(CFLCDScreen):
"""
Crystal Fontz 635 display
20x4 character display + cursor
no foreground/background colors or settings supported
see CGROM for list of close unicode matches to characters available
6 button input
up, down, left, right, enter (check mark), exit (cross)
"""
DISPLAY_SIZE = (20, 4)
# ① through ⑧ are programmable CGRAM (chars 0-7, repeated at 8-15)
# double arrows (⇑⇓) appear as double arrowheads (chars 18, 19)
# ⑴ resembles a bell
# ⑵ resembles a filled-in "Y"
# ⑶ is the letters "Pt" together
# partial blocks (▇▆▄▃▁) are actually shorter versions of (▉▋▌▍▏)
# both groups are intended to draw horizontal bars with pixel
# precision, use ▇*[▆▄▃▁]? for a thin bar or ▉*[▋▌▍▏]? for a thick bar
CGROM = (
"①②③④⑤⑥⑦⑧①②③④⑤⑥⑦⑧"
"►◄⇑⇓«»↖↗↙↘▲▼↲^ˇ█"
" !\"#¤%&'()*+,-./"
"0123456789:;<=>?"
"¡ABCDEFGHIJKLMNO"
"PQRSTUVWXYZÄÖÑܧ"
"¿abcdefghijklmno"
"pqrstuvwxyzäöñüà"
"⁰¹²³⁴⁵⁶⁷⁸⁹½¼±≥≤μ"
"♪♫⑴♥♦⑵⌜⌟“”()αɛδ∞"
"@£$¥èéùìòÇᴾØøʳÅå"
"⌂¢ΦτλΩπΨΣθΞ♈ÆæßÉ"
"ΓΛΠϒ_ÈÊêçğŞşİι~◊"
"▇▆▄▃▁ƒ▉▋▌▍▏⑶◽▪↑→"
"↓←ÁÍÓÚÝáíóúýÔôŮů"
"ČĔŘŠŽčĕřšž[\]{|}")
cursor_style = CFLCDScreen.CURSOR_INVERTING_BLINKING_BLOCK
def __init__(self, device_path, baud=115200,
repeat_delay=0.5, repeat_next=0.125,
key_map=['up', 'down', 'left', 'right', 'enter', 'esc']):
"""
device_path -- eg. '/dev/ttyUSB0'
baud -- baud rate
repeat_delay -- seconds to wait before starting to repeat keys
repeat_next -- time between each repeated key
key_map -- the keys to send for this device's buttons
"""
super(CF635Screen, self).__init__(device_path, baud)
self.repeat_delay = repeat_delay
self.repeat_next = repeat_next
self.key_repeat = KeyRepeatSimulator(repeat_delay, repeat_next)
self.key_map = key_map
self._last_command = None
self._last_command_time = 0
self._command_queue = []
self._screen_buf = None
self._previous_canvas = None
self._update_cursor = False
def get_input_descriptors(self):
"""
return the fd from our serial device so we get called
on input and responses
"""
return [self._device.fd]
def get_input_nonblocking(self):
"""
Return a (next_input_timeout, keys_pressed, raw_keycodes)
tuple.
The protocol for our device requires waiting for acks between
each command, so this method responds to those as well as key
press and release events.
Key repeat events are simulated here as the device doesn't send
any for us.
raw_keycodes are the bytes of messages we received, which might
not seem to have any correspondence to keys_pressed.
"""
input = []
raw_input = []
timeout = None
while True:
packet = self._read_packet()
if not packet:
break
command, data = packet
if command == self.CMD_KEY_ACTIVITY and data:
d0 = ord(data[0])
if 1 <= d0 <= 12:
release = d0 > 6
keycode = d0 - (release * 6) - 1
key = self.key_map[keycode]
if release:
self.key_repeat.release(key)
else:
input.append(key)
self.key_repeat.press(key)
raw_input.append(d0)
elif command & 0xc0 == 0x40: # "ACK"
if command & 0x3f == self._last_command:
self._send_next_command()
next_repeat = self.key_repeat.next_event()
if next_repeat:
timeout, key = next_repeat
if not timeout:
input.append(key)
self.key_repeat.sent_event()
timeout = None
return timeout, input, []
def _send_next_command(self):
"""
send out the next command in the queue
"""
if not self._command_queue:
self._last_command = None
return
command, data = self._command_queue.pop(0)
self._send_packet(command, data)
self._last_command = command # record command for ACK
self._last_command_time = time.time()
def queue_command(self, command, data):
self._command_queue.append((command, data))
# not waiting? send away!
if self._last_command is None:
self._send_next_command()
def draw_screen(self, size, canvas):
assert size == self.DISPLAY_SIZE
if self._screen_buf:
osb = self._screen_buf
else:
osb = []
sb = []
y = 0
for row in canvas.content():
text = []
for a, cs, run in row:
text.append(run)
if not osb or osb[y] != text:
self.queue_command(self.CMD_LCD_DATA, chr(0) + chr(y) +
"".join(text))
sb.append(text)
y += 1
if (self._previous_canvas and
self._previous_canvas.cursor == canvas.cursor and
(not self._update_cursor or not canvas.cursor)):
pass
elif canvas.cursor is None:
self.queue_command(self.CMD_CURSOR_STYLE, chr(self.CURSOR_NONE))
else:
x, y = canvas.cursor
self.queue_command(self.CMD_CURSOR_POSITION, chr(x) + chr(y))
self.queue_command(self.CMD_CURSOR_STYLE, chr(self.cursor_style))
self._update_cursor = False
self._screen_buf = sb
self._previous_canvas = canvas
def program_cgram(self, index, data):
"""
Program character data. Characters available as chr(0) through
chr(7), and repeated as chr(8) through chr(15).
index -- 0 to 7 index of character to program
data -- list of 8, 6-bit integer values top to bottom with MSB
on the left side of the character.
"""
assert 0 <= index <= 7
assert len(data) == 8
self.queue_command(self.CMD_CGRAM, chr(index) +
"".join([chr(x) for x in data]))
def set_cursor_style(self, style):
"""
style -- CURSOR_BLINKING_BLOCK, CURSOR_UNDERSCORE,
CURSOR_BLINKING_BLOCK_UNDERSCORE or
CURSOR_INVERTING_BLINKING_BLOCK
"""
assert 1 <= style <= 4
self.cursor_style = style
self._update_cursor = True
def set_backlight(self, value):
"""
Set backlight brightness
value -- 0 to 100
"""
assert 0 <= value <= 100
self.queue_command(self.CMD_BACKLIGHT, chr(value))
def set_lcd_contrast(self, value):
"""
value -- 0 to 255
"""
assert 0 <= value <= 255
self.queue_command(self.CMD_LCD_CONTRAST, chr(value))
def set_led_pin(self, led, rg, value):
"""
led -- 0 to 3
rg -- 0 for red, 1 for green
value -- 0 to 100
"""
assert 0 <= led <= 3
assert rg in (0, 1)
assert 0 <= value <= 100
self.queue_command(self.CMD_GPO, chr(12 - 2 * led - rg) +
chr(value))
|
DarkPurpleShadow/ConnectFour
|
urwid/lcd_display.py
|
Python
|
bsd-3-clause
| 16,440 | 0.003343 |
# -*- coding: utf-8 -*-
from django.db import migrations
def create_switch(apps, schema_editor):
"""Create the async_order_fulfillment switch if it does not already exist."""
Switch = apps.get_model('waffle', 'Switch')
Switch.objects.get_or_create(name='async_order_fulfillment', defaults={'active': False})
def delete_switch(apps, schema_editor):
"""Delete the async_order_fulfillment switch."""
Switch = apps.get_model('waffle', 'Switch')
Switch.objects.filter(name='async_order_fulfillment').delete()
class Migration(migrations.Migration):
dependencies = [
('core', '0006_add_service_user'),
('waffle', '0001_initial'),
]
operations = [
migrations.RunPython(create_switch, reverse_code=delete_switch),
]
|
edx/ecommerce
|
ecommerce/core/migrations/0007_auto_20151005_1333.py
|
Python
|
agpl-3.0
| 780 | 0.002564 |
# -*- coding: utf-8 -*-
#!/usr/bin/env python
"""
Created on Sun Sep 18 20:24:29 2016
"""
list1 = [3, 44, 38, 5, 47, 15, 36, 26, 27, 2, 46, 4, 19, 50, 48]
list2 = [1,1,1,1,1,1,1,1]
list3 = [1,2,3,4,5,6,7,8]
list4 = [2,3,6,7,5,2,2,2]
list5 = [8,7,6,5,4,3,2,1]
#检查函数
def check(func):
print sort_bubble(list1)==func(list1)
print sort_bubble(list2)==func(list2)
print sort_bubble(list3)==func(list3)
print sort_bubble(list4)==func(list4)
#冒泡
def sort_bubble(l):
while True:
swapped = False
for i in range(len(l)-1):
if l[i]>l[i+1]:
l[i],l[i+1] = l[i+1],l[i]
swapped = True
if swapped == False:
break;
return l
#选择
def sort_select(l):
for i in range(len(l)-1):
min_num = l[i]
index_min = i
for j in range(i,len(l)):
if l[j]<min_num:
min_num = l[j]
index_min = j
l[i],l[index_min] = l[index_min],l[i]
return l
#插入
def sort_insert(l):
for i in range(1,len(l)):
temp = l[i]
del l[i]
for j in range(i-1,-1,-1):
if j==0 and l[j] > temp:
l.insert(0,temp)
elif l[j] > temp:
pass
else:
l.insert(j+1,temp)
break
return l
#归并
def sort_merge(l):
if len(l) <= 1:
return l
num = int( len(l)/2 )
left = sort_merge(l[:num])
right = sort_merge(l[num:])
return Merge(left, right)
def Merge(left,right):
r, l=0, 0
result=[]
while l<len(left) and r<len(right):
if left[l] < right[r]:
result.append(left[l])
l += 1
else:
result.append(right[r])
r += 1
result += right[r:]
result+= left[l:]
return result
#
|
zfrxiaxia/Code-zfr
|
visualgo数据结构/01_sort.py
|
Python
|
gpl-3.0
| 1,847 | 0.045529 |
"""Features modules"""
|
timevortexproject/timevortex
|
features/__init__.py
|
Python
|
mit
| 23 | 0 |
#!/usr/bin/env python
"""AFF4 objects for managing Chipsec responses."""
from grr.client.components.chipsec_support.actions import chipsec_types
from grr.lib.aff4_objects import collects
class ACPITableDataCollection(collects.RDFValueCollection):
"""A collection of ACPI table data."""
_rdf_type = chipsec_types.ACPITableData
|
destijl/grr
|
grr/lib/aff4_objects/hardware.py
|
Python
|
apache-2.0
| 334 | 0.005988 |
#!/usr/bin/env python
"""
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import time
import urllib2
import ambari_simplejson as json # simplejson is much faster comparing to Python 2.6 json module and has the same functions set.
import logging
import traceback
from resource_management.libraries.functions.namenode_ha_utils import get_all_namenode_addresses
from resource_management.libraries.functions.curl_krb_request import curl_krb_request
from resource_management.libraries.functions.curl_krb_request import DEFAULT_KERBEROS_KINIT_TIMER_MS
from resource_management.libraries.functions.curl_krb_request import KERBEROS_KINIT_TIMER_PARAMETER
from resource_management.core.environment import Environment
LABEL = 'Last Checkpoint: [{h} hours, {m} minutes, {tx} transactions]'
HDFS_SITE_KEY = '{{hdfs-site}}'
RESULT_STATE_UNKNOWN = 'UNKNOWN'
RESULT_STATE_SKIPPED = 'SKIPPED'
NN_HTTP_ADDRESS_KEY = '{{hdfs-site/dfs.namenode.http-address}}'
NN_HTTPS_ADDRESS_KEY = '{{hdfs-site/dfs.namenode.https-address}}'
NN_HTTP_POLICY_KEY = '{{hdfs-site/dfs.http.policy}}'
NN_CHECKPOINT_TX_KEY = '{{hdfs-site/dfs.namenode.checkpoint.txns}}'
NN_CHECKPOINT_PERIOD_KEY = '{{hdfs-site/dfs.namenode.checkpoint.period}}'
PERCENT_WARNING_KEY = 'checkpoint.time.warning.threshold'
PERCENT_WARNING_DEFAULT = 200
PERCENT_CRITICAL_KEY = 'checkpoint.time.critical.threshold'
PERCENT_CRITICAL_DEFAULT = 200
CHECKPOINT_TX_MULTIPLIER_WARNING_KEY = 'checkpoint.txns.multiplier.warning.threshold'
CHECKPOINT_TX_MULTIPLIER_WARNING_DEFAULT = 2
CHECKPOINT_TX_MULTIPLIER_CRITICAL_KEY = 'checkpoint.txns.multiplier.critical.threshold'
CHECKPOINT_TX_MULTIPLIER_CRITICAL_DEFAULT = 4
CHECKPOINT_TX_DEFAULT = 1000000
CHECKPOINT_PERIOD_DEFAULT = 21600
CONNECTION_TIMEOUT_KEY = 'connection.timeout'
CONNECTION_TIMEOUT_DEFAULT = 5.0
KERBEROS_KEYTAB = '{{hdfs-site/dfs.web.authentication.kerberos.keytab}}'
KERBEROS_PRINCIPAL = '{{hdfs-site/dfs.web.authentication.kerberos.principal}}'
SECURITY_ENABLED_KEY = '{{cluster-env/security_enabled}}'
SMOKEUSER_KEY = "{{cluster-env/smokeuser}}"
EXECUTABLE_SEARCH_PATHS = '{{kerberos-env/executable_search_paths}}'
logger = logging.getLogger('ambari_alerts')
def get_tokens():
"""
Returns a tuple of tokens in the format {{site/property}} that will be used
to build the dictionary passed into execute
"""
return (HDFS_SITE_KEY, NN_HTTP_ADDRESS_KEY, NN_HTTPS_ADDRESS_KEY, NN_HTTP_POLICY_KEY, EXECUTABLE_SEARCH_PATHS,
NN_CHECKPOINT_TX_KEY, NN_CHECKPOINT_PERIOD_KEY, KERBEROS_KEYTAB, KERBEROS_PRINCIPAL, SECURITY_ENABLED_KEY, SMOKEUSER_KEY)
def execute(configurations={}, parameters={}, host_name=None):
"""
Returns a tuple containing the result code and a pre-formatted result label
Keyword arguments:
configurations (dictionary): a mapping of configuration key to value
parameters (dictionary): a mapping of script parameter key to value
host_name (string): the name of this host where the alert is running
"""
if configurations is None:
return (('UNKNOWN', ['There were no configurations supplied to the script.']))
uri = None
scheme = 'http'
http_uri = None
https_uri = None
http_policy = 'HTTP_ONLY'
checkpoint_tx = CHECKPOINT_TX_DEFAULT
checkpoint_period = CHECKPOINT_PERIOD_DEFAULT
# hdfs-site is required
if not HDFS_SITE_KEY in configurations:
return (RESULT_STATE_UNKNOWN, ['{0} is a required parameter for the script'.format(HDFS_SITE_KEY)])
if NN_HTTP_POLICY_KEY in configurations:
http_policy = configurations[NN_HTTP_POLICY_KEY]
if NN_CHECKPOINT_TX_KEY in configurations:
checkpoint_tx = configurations[NN_CHECKPOINT_TX_KEY]
if NN_CHECKPOINT_PERIOD_KEY in configurations:
checkpoint_period = configurations[NN_CHECKPOINT_PERIOD_KEY]
if SMOKEUSER_KEY in configurations:
smokeuser = configurations[SMOKEUSER_KEY]
executable_paths = None
if EXECUTABLE_SEARCH_PATHS in configurations:
executable_paths = configurations[EXECUTABLE_SEARCH_PATHS]
security_enabled = False
if SECURITY_ENABLED_KEY in configurations:
security_enabled = str(configurations[SECURITY_ENABLED_KEY]).upper() == 'TRUE'
kerberos_keytab = None
if KERBEROS_KEYTAB in configurations:
kerberos_keytab = configurations[KERBEROS_KEYTAB]
kerberos_principal = None
if KERBEROS_PRINCIPAL in configurations:
kerberos_principal = configurations[KERBEROS_PRINCIPAL]
kerberos_principal = kerberos_principal.replace('_HOST', host_name)
# parse script arguments
connection_timeout = CONNECTION_TIMEOUT_DEFAULT
if CONNECTION_TIMEOUT_KEY in parameters:
connection_timeout = float(parameters[CONNECTION_TIMEOUT_KEY])
percent_warning = PERCENT_WARNING_DEFAULT
if PERCENT_WARNING_KEY in parameters:
percent_warning = float(parameters[PERCENT_WARNING_KEY])
percent_critical = PERCENT_CRITICAL_DEFAULT
if PERCENT_CRITICAL_KEY in parameters:
percent_critical = float(parameters[PERCENT_CRITICAL_KEY])
checkpoint_txn_multiplier_warning = CHECKPOINT_TX_MULTIPLIER_WARNING_DEFAULT
if CHECKPOINT_TX_MULTIPLIER_WARNING_KEY in parameters:
checkpoint_txn_multiplier_warning = float(parameters[CHECKPOINT_TX_MULTIPLIER_WARNING_KEY])
checkpoint_txn_multiplier_critical = CHECKPOINT_TX_MULTIPLIER_CRITICAL_DEFAULT
if CHECKPOINT_TX_MULTIPLIER_CRITICAL_KEY in parameters:
checkpoint_txn_multiplier_critical = float(parameters[CHECKPOINT_TX_MULTIPLIER_CRITICAL_KEY])
kinit_timer_ms = parameters.get(KERBEROS_KINIT_TIMER_PARAMETER, DEFAULT_KERBEROS_KINIT_TIMER_MS)
# determine the right URI and whether to use SSL
hdfs_site = configurations[HDFS_SITE_KEY]
scheme = "https" if http_policy == "HTTPS_ONLY" else "http"
nn_addresses = get_all_namenode_addresses(hdfs_site)
for nn_address in nn_addresses:
if nn_address.startswith(host_name + ":"):
uri = nn_address
break
if not uri:
return (RESULT_STATE_SKIPPED, ['NameNode on host {0} not found (namenode adresses = {1})'.format(host_name, ', '.join(nn_addresses))])
current_time = int(round(time.time() * 1000))
last_checkpoint_time_qry = "{0}://{1}/jmx?qry=Hadoop:service=NameNode,name=FSNamesystem".format(scheme,uri)
journal_transaction_info_qry = "{0}://{1}/jmx?qry=Hadoop:service=NameNode,name=NameNodeInfo".format(scheme,uri)
# start out assuming an OK status
label = None
result_code = "OK"
try:
if kerberos_principal is not None and kerberos_keytab is not None and security_enabled:
env = Environment.get_instance()
# curl requires an integer timeout
curl_connection_timeout = int(connection_timeout)
last_checkpoint_time_response, error_msg, time_millis = curl_krb_request(env.tmp_dir, kerberos_keytab,
kerberos_principal, last_checkpoint_time_qry,"checkpoint_time_alert", executable_paths, False,
"NameNode Last Checkpoint", smokeuser, connection_timeout=curl_connection_timeout,
kinit_timer_ms = kinit_timer_ms)
last_checkpoint_time_response_json = json.loads(last_checkpoint_time_response)
last_checkpoint_time = int(last_checkpoint_time_response_json["beans"][0]["LastCheckpointTime"])
journal_transaction_info_response, error_msg, time_millis = curl_krb_request(env.tmp_dir, kerberos_keytab,
kerberos_principal, journal_transaction_info_qry,"checkpoint_time_alert", executable_paths,
False, "NameNode Last Checkpoint", smokeuser, connection_timeout=curl_connection_timeout,
kinit_timer_ms = kinit_timer_ms)
journal_transaction_info_response_json = json.loads(journal_transaction_info_response)
journal_transaction_info = journal_transaction_info_response_json["beans"][0]["JournalTransactionInfo"]
else:
last_checkpoint_time = int(get_value_from_jmx(last_checkpoint_time_qry,
"LastCheckpointTime", connection_timeout))
journal_transaction_info = get_value_from_jmx(journal_transaction_info_qry,
"JournalTransactionInfo", connection_timeout)
journal_transaction_info_dict = json.loads(journal_transaction_info)
last_tx = int(journal_transaction_info_dict['LastAppliedOrWrittenTxId'])
most_recent_tx = int(journal_transaction_info_dict['MostRecentCheckpointTxId'])
transaction_difference = last_tx - most_recent_tx
delta = (current_time - last_checkpoint_time)/1000
label = LABEL.format(h=get_time(delta)['h'], m=get_time(delta)['m'], tx=transaction_difference)
is_checkpoint_txn_warning = transaction_difference > checkpoint_txn_multiplier_warning * int(checkpoint_tx)
is_checkpoint_txn_critical = transaction_difference > checkpoint_txn_multiplier_critical * int(checkpoint_tx)
# Either too many uncommitted transactions or missed check-pointing for
# long time decided by the thresholds
if is_checkpoint_txn_critical or (float(delta) / int(checkpoint_period)*100 >= int(percent_critical)):
logger.debug('Raising critical alert: transaction_difference = {0}, checkpoint_tx = {1}'.format(transaction_difference, checkpoint_tx))
result_code = 'CRITICAL'
elif is_checkpoint_txn_warning or (float(delta) / int(checkpoint_period)*100 >= int(percent_warning)):
logger.debug('Raising warning alert: transaction_difference = {0}, checkpoint_tx = {1}'.format(transaction_difference, checkpoint_tx))
result_code = 'WARNING'
except:
label = traceback.format_exc()
result_code = 'UNKNOWN'
return ((result_code, [label]))
def get_time(delta):
h = int(delta/3600)
m = int((delta % 3600)/60)
return {'h':h, 'm':m}
def get_value_from_jmx(query, jmx_property, connection_timeout):
response = None
try:
response = urllib2.urlopen(query, timeout=connection_timeout)
data = response.read()
data_dict = json.loads(data)
return data_dict["beans"][0][jmx_property]
finally:
if response is not None:
try:
response.close()
except:
pass
|
arenadata/ambari
|
ambari-server/src/main/resources/stacks/BigInsights/4.2.5/services/HDFS/package/alerts/alert_checkpoint_time.py
|
Python
|
apache-2.0
| 10,596 | 0.014062 |
from sys import exit
import argparse
import sobol, morris, extended_fast
parser = argparse.ArgumentParser(description='Perform sensitivity analysis on model output')
parser.add_argument('-m', '--method', type=str, choices=['sobol', 'morris', 'fast'], required=True)
parser.add_argument('-p', '--paramfile', type=str, required=True, help='Parameter range file')
parser.add_argument('-Y', '--model-output-file', type=str, required=True, help='Model output file')
parser.add_argument('-c', '--column', type=int, required=False, default=0, help='Column of output to analyze')
parser.add_argument('--delimiter', type=str, required=False, default=' ', help='Column delimiter in model output file')
parser.add_argument('--sobol-max-order', type=int, required=False, default=2, choices=[1, 2], help='Maximum order of sensitivity indices to calculate (Sobol only)')
parser.add_argument('-X', '--morris-model-input', type=str, required=False, default=None, help='Model inputs (required for Method of Morris only)')
parser.add_argument('-r', '--sobol-bootstrap-resamples', type=int, required=False, default=1000, help='Number of bootstrap resamples for Sobol confidence intervals')
args = parser.parse_args()
if args.method == 'sobol':
calc_second_order = (args.sobol_max_order == 2)
sobol.analyze(args.paramfile, args.model_output_file, args.column, calc_second_order, num_resamples = args.sobol_bootstrap_resamples, delim = args.delimiter)
elif args.method == 'morris':
if args.morris_model_input is not None:
morris.analyze(args.paramfile, args.morris_model_input, args.model_output_file, args.column, delim = args.delimiter)
else:
print "Error: model input file is required for Method of Morris. Run with -h flag to see usage."
exit()
elif args.method == 'fast':
extended_fast.analyze(args.paramfile, args.model_output_file, args.column, delim = args.delimiter)
|
dhyams/SALib
|
SALib/analyze/__main__.py
|
Python
|
lgpl-3.0
| 1,927 | 0.011936 |
import pybullet as p
import time
p.connect(p.GUI)
fileIO = p.loadPlugin("fileIOPlugin")
if (fileIO >= 0):
p.executePluginCommand(fileIO, "pickup.zip", [p.AddFileIOAction, p.ZipFileIO])
objs = p.loadSDF("pickup/model.sdf")
dobot = objs[0]
p.changeVisualShape(dobot, -1, rgbaColor=[1, 1, 1, 1])
else:
print("fileIOPlugin is disabled.")
p.setPhysicsEngineParameter(enableFileCaching=False)
while (1):
p.stepSimulation()
time.sleep(1. / 240.)
|
MadManRises/Madgine
|
shared/bullet3-2.89/examples/pybullet/examples/fileIOPlugin.py
|
Python
|
mit
| 457 | 0.017505 |
from streamable_archive_tests import *
from delivery_collection_tests import *
|
vegarang/devilry-django
|
devilry/utils/tests/__init__.py
|
Python
|
bsd-3-clause
| 81 | 0.024691 |
# Eve W-Space
# Copyright (C) 2013 Andrew Austin and other contributors
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version. An additional term under section
# 7 of the GPL is included in the LICENSE file.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from django.db import models
from django import forms
from django.contrib.auth.models import User, Group
from django.contrib.auth.forms import UserCreationForm
from django.core.cache import cache
from Map.models import Map, System
from django.db.models.signals import post_save
import pytz
import datetime
import time
# Create your models here.
class PlayTime(models.Model):
"""PlayTime represents a choice of play times for use in several forms."""
fromtime = models.TimeField()
totime = models.TimeField()
class UserProfile(models.Model):
"""UserProfile defines custom fields tied to each User record in the Django auth DB."""
user = models.ForeignKey(User, unique=True)
jabberid = models.EmailField(blank=True, null=True)
defaultmap = models.ForeignKey(Map, related_name = "defaultusers", blank=True, null=True)
playtimes = models.ManyToManyField(PlayTime)
currentsystem = models.ForeignKey(System, related_name="activepilots", blank=True, null=True)
lastactive = models.DateTimeField()
class Meta:
permissions = (('account_admin', 'Administer users and groups'),)
def update_location(self, sys_id, charid, charname, shipname, shiptype):
"""
Updates the cached locations dict for this user.
"""
current_time = time.time()
user_cache_key = 'user_%s_locations' % self.user.pk
user_locations_dict = cache.get(user_cache_key)
time_threshold = current_time - (60 * 15)
location_tuple = (sys_id, charname, shipname, shiptype, current_time)
if user_locations_dict:
user_locations_dict.pop(charid, None)
user_locations_dict[charid] = location_tuple
else:
user_locations_dict = {charid: location_tuple}
# Prune dict to ensure we're not carrying over stale entries
for charid, location in user_locations_dict.items():
if location[4] < time_threshold:
user_locations_dict.pop(charid, None)
cache.set(user_cache_key, user_locations_dict, 60 * 15)
return user_locations_dict
class GroupProfile(models.Model):
"""GroupProfile defines custom fields tied to each Group record."""
group = models.OneToOneField(Group, related_name='profile')
description = models.CharField(max_length=200, blank=True, null=True)
regcode = models.CharField(max_length=64, blank=True, null=True)
visible = models.BooleanField(default=True)
def create_user_profile(sender, instance, created, **kwargs):
"""Handle user creation event and create a new profile to match the new user"""
if created:
UserProfile.objects.create(user=instance, lastactive=datetime.datetime.utcnow().replace(tzinfo=pytz.UTC))
post_save.connect(create_user_profile, sender=User)
def create_group_profile(sender, instance, created, **kwargs):
"""Handle group creation event and create a new group profile."""
if created:
GroupProfile.objects.create(group=instance)
post_save.connect(create_group_profile, sender=Group)
class RegistrationForm(UserCreationForm):
"""Extends the django registration form to add fields."""
username = forms.CharField(max_length=30, label="Username")
email = forms.EmailField(required=False, label="E-Mail Address (Optional)")
password2 = forms.CharField(widget=forms.PasswordInput, label="Confirm Password:")
regcode = forms.CharField(max_length=64, label="Registration Code")
|
rosudrag/eve-wspace
|
evewspace/account/models.py
|
Python
|
gpl-3.0
| 4,390 | 0.003189 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_django-nupages
------------
Tests for `django-nupages` models module.
"""
import os
import shutil
import unittest
from django.utils import timezone
from django.core.urlresolvers import reverse
from django.contrib.sites.models import Site
from nupages import models
from nupages import views
class TestNupages(unittest.TestCase):
def create_page(
self,
title="Test Page",
description="yes, this is only a test",
content="yes, this is only a test",
custom_template="",
site=Site.objects.create(domain="127.0.0.1:8000", name="127.0.0.1:8000")):
return models.Page.objects.create(
title=title,
description=description,
content=content,
custom_template=custom_template,
created=timezone.now(),
site=site)
def test_page_creation(self):
p = self.create_page()
self.assertTrue(isinstance(p, models.Page))
self.assertEqual(p.__unicode__(), p.title)
self.assertEqual(p.get_absolute_url(), reverse("nupages:detail", kwargs={'slug': p.slug}))
|
goldhand/django-nupages
|
tests/test_models.py
|
Python
|
bsd-3-clause
| 1,041 | 0.024976 |
#!/usr/bin/env python3
#
# Copyright 2013 Simone Campagna
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
__author__ = 'Simone Campagna'
import os
import fnmatch
import collections
from .stats import FileStats
from .filetype_classifier import FileTypeClassifier
class ProjectFile(object):
def __init__(self, filepath, project_dir, filetype=None):
self.project_dir = project_dir
self.filetype_classifier = project_dir.project.filetype_classifier
self.filepath = filepath
self._filetypes = None
self.qualifiers = None
self.filetype = filetype
self.file_stats = None
def pre_classify(self):
qualifiers, self._filetypes = self.filetype_classifier.classify(self.filepath)
if qualifiers:
self.qualifiers = ";".join(qualifiers) + '-'
if self._filetypes is not None:
if len(self._filetypes) == 0:
self.filetype = FileTypeClassifier.FILETYPE_UNCLASSIFIED
elif len(self._filetypes) == 1:
self.filetype = next(iter(self._filetypes))
#print("PRE", self.filepath, self._filetypes, self.filetype)
def post_classify(self):
# if self.filepath.endswith(".h"):
# print("***", self.filepath, self.filetype, self._filetypes)
if self.filetype is None:
if not self._filetypes:
self.filetype = FileTypeClassifier.FILETYPE_UNCLASSIFIED
else:
self._filetypes = self.filetype_classifier.classify_by_content(self._filetypes, self.filepath)
if len(self._filetypes) == 0:
self.filetype = FileTypeClassifier.FILETYPE_UNCLASSIFIED
elif len(self._filetypes) == 1:
self.filetype = next(iter(self._filetypes))
else:
project_dir = self.project_dir
while project_dir:
for filetype in project_dir.most_common_filetypes():
assert not filetype in FileTypeClassifier.NO_FILETYPE_FILES
if filetype in self._filetypes:
self.filetype = filetype
#print("HERE A: ", self.filepath, self._filetypes, self.filetype, project_dir.dirpath)
break
else:
project_dir = project_dir.parent
continue
break
else:
#self.filetype = next(iter(self._filetypes))
self.filetype = next(iter(self._filetypes))
#print("HERE Z: ", self.filepath, self._filetypes, self.filetype)
# stats
if self.filetype in FileTypeClassifier.NON_EXISTENT_FILES:
self.file_stats = FileStats()
else:
block_size = self.project_dir.project.block_size
num_lines = 0
num_bytes = 0
newline = b'\n'
try:
with open(self.filepath, 'rb') as filehandle:
last_block = None
while True:
block = filehandle.read(block_size)
if not block:
break
last_block = block
num_bytes += len(block)
num_lines += block.count(newline)
if last_block and last_block[-1] != newline:
num_lines += 1
self.file_stats = FileStats(lines=num_lines, bytes=num_bytes)
except (OSError, IOError) as e:
self.filetype = FileTypeClassifier.FILETYPE_UNREADABLE
self.file_stats = FileStats()
try:
self.file_stats.bytes += os.stat(self.filepath).st_size
except:
pass
#if self.filetype_classifier.filetype_is_binary(self.filetype):
# self.file_stats = FileStats(bytes=os.stat(self.filepath).st_size)
#else:
# try:
# with open(self.filepath, 'r') as filehandle:
# num_lines = 0
# num_bytes = 0
# for line in filehandle:
# num_bytes += len(line)
# num_lines += 1
# self.file_stats = FileStats(lines=num_lines, bytes=num_bytes)
# except UnicodeDecodeError as e:
# self.filetype = FileTypeClassifier.FILETYPE_DATA
# self.file_stats = FileStats(bytes=os.stat(self.filepath).st_size)
#print("POST", self.filepath, self._filetypes, self.filetype)
|
simone-campagna/statcode
|
lib/python/statcode/project_file.py
|
Python
|
apache-2.0
| 5,340 | 0.004307 |
import pylogging
import os
# Logs Dir Absolute Path
logs_path = os.path.dirname(os.path.abspath(__file__)) + '/logs/'
# Create Logger Instance
logger = pylogging.PyLogging(LOG_FILE_PATH = logs_path)
def customAction1(type, msg):
# Custom Action Goes Here
pass
# Add Action
actionIden1 = logger.addAction(customAction1)
def customAction2(type, msg):
# Custom Action Goes Here
pass
# Add Action
actionIden2 = logger.addAction(customAction2)
# To Remove Action1
logger.removeAction(actionIden1)
# Log Info Message
logger.info("Info Message")
# Log Warning Message
logger.warning("Warning Message.")
# Log Error Message
logger.error("Error Message.")
# Log Critical Message
logger.critical("Critical Message.")
# Log Normal Message
logger.log("Normal Log Message.")
|
Clivern/PyLogging
|
examples/custom_actions.py
|
Python
|
mit
| 773 | 0.01423 |
from django import forms
from django.core.urlresolvers import reverse, NoReverseMatch
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ObjectDoesNotExist
from django.core.validators import URLValidator
from treenav.models import MenuItem
from mptt.forms import TreeNodeChoiceField, MPTTAdminForm
class MenuItemFormMixin(object):
def clean_link(self):
link = self.cleaned_data['link'] or ''
# It could be a fully-qualified URL -- try that first b/c reverse()
# chokes on "http://"
if any([link.startswith(s) for s in ('http://', 'https://')]):
URLValidator()(link)
elif link and not any([link.startswith(s) for s in ('^', '/')]):
# Not a regex or site-root-relative absolute path -- see if it's a
# named URL or view
try:
reverse(link)
except NoReverseMatch:
raise forms.ValidationError('Please supply a valid URL, URL '
'name, or regular expression.')
return self.cleaned_data['link']
def clean(self):
super(MenuItemFormMixin, self).clean()
content_type = self.cleaned_data['content_type']
object_id = self.cleaned_data['object_id']
if (content_type and not object_id) or (not content_type and object_id):
raise forms.ValidationError(
"Both 'Content type' and 'Object id' must be specified to use generic relationship"
)
if content_type and object_id:
try:
obj = content_type.get_object_for_this_type(pk=object_id)
except ObjectDoesNotExist, e:
raise forms.ValidationError(str(e))
try:
obj.get_absolute_url()
except AttributeError, e:
raise forms.ValidationError(str(e))
if 'is_enabled' in self.cleaned_data and \
self.cleaned_data['is_enabled'] and \
'link' in self.cleaned_data and \
self.cleaned_data['link'].startswith('^'):
raise forms.ValidationError('Menu items with regular expression '
'URLs must be disabled.')
return self.cleaned_data
class MenuItemForm(MenuItemFormMixin, MPTTAdminForm):
class Meta:
model = MenuItem
exclude = ()
class MenuItemInlineForm(MenuItemFormMixin, forms.ModelForm):
class Meta:
model = MenuItem
exclude = ()
class GenericInlineMenuItemForm(forms.ModelForm):
parent = TreeNodeChoiceField(
queryset=MenuItem.tree.all(),
required=False
)
class Meta:
model = MenuItem
fields = ('parent', 'label', 'slug', 'order', 'is_enabled')
|
NetstationMurator/django-treenav
|
treenav/forms.py
|
Python
|
bsd-3-clause
| 2,787 | 0.001794 |
from tools.load import LoadMatrix
from numpy import double
lm=LoadMatrix()
traindat = double(lm.load_numbers('../data/fm_train_real.dat'))
testdat = double(lm.load_numbers('../data/fm_test_real.dat'))
traindna = lm.load_dna('../data/fm_train_dna.dat')
testdna = lm.load_dna('../data/fm_test_dna.dat')
parameter_list = [[traindat,testdat,traindna,testdna],[traindat,testdat,traindna,testdna]]
def kernel_combined_modular(fm_train_real=traindat,fm_test_real=testdat,fm_train_dna=traindna,fm_test_dna=testdna ):
from shogun.Kernel import CombinedKernel, GaussianKernel, FixedDegreeStringKernel, LocalAlignmentStringKernel
from shogun.Features import RealFeatures, StringCharFeatures, CombinedFeatures, DNA
kernel=CombinedKernel()
feats_train=CombinedFeatures()
feats_test=CombinedFeatures()
subkfeats_train=RealFeatures(fm_train_real)
subkfeats_test=RealFeatures(fm_test_real)
subkernel=GaussianKernel(10, 1.1)
feats_train.append_feature_obj(subkfeats_train)
feats_test.append_feature_obj(subkfeats_test)
kernel.append_kernel(subkernel)
subkfeats_train=StringCharFeatures(fm_train_dna, DNA)
subkfeats_test=StringCharFeatures(fm_test_dna, DNA)
degree=3
subkernel=FixedDegreeStringKernel(10, degree)
feats_train.append_feature_obj(subkfeats_train)
feats_test.append_feature_obj(subkfeats_test)
kernel.append_kernel(subkernel)
subkfeats_train=StringCharFeatures(fm_train_dna, DNA)
subkfeats_test=StringCharFeatures(fm_test_dna, DNA)
subkernel=LocalAlignmentStringKernel(10)
feats_train.append_feature_obj(subkfeats_train)
feats_test.append_feature_obj(subkfeats_test)
kernel.append_kernel(subkernel)
kernel.init(feats_train, feats_train)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
if __name__=='__main__':
print('Combined')
kernel_combined_modular(*parameter_list[0])
|
ratschlab/ASP
|
examples/undocumented/python_modular/kernel_combined_modular.py
|
Python
|
gpl-2.0
| 1,900 | 0.036842 |
""" Tools for handling trade data, candle data, and simulating trades. """
from __future__ import division
import os
import csv
import time
from datetime import datetime, timedelta
import itertools
import traceback
from decimal import Decimal, getcontext
getcontext().prec = 8
from utilities import (TRADES, CANDLES, ut_to_dt, dt_to_ut,
build_data_directories, parse_period, pdelta,
get_candle, trades_to_candles, save_candlefile)
from api import BitfinexAPI
class TradeStream(object):
"""
A TradeStream collects live data from exchange API
(currently only Bitfinex API supported). If record_trades is True,
trades will be recorded to a local file.
Note that multiple TradeStreams should not be run simultaneously for the
same market, or duplicate trades will be written to the file.
"""
API_ACCESS = {'bitfinex': BitfinexAPI, 'bitstamp': BitfinexAPI}
def __init__(self, market='bitfinex_BTC_USD',
record_trades=True, quiet=False):
self.exchange, self.base, self.alt = (item.lower()
for item in market.split('_'))
self.symbol = self.base + self.alt
self.api = self.API_ACCESS[self.exchange]()
self.record_trades = record_trades
self.quiet = quiet
try:
with open(TRADES+'{}_{}_{}'.format(self.exchange,
self.base, self.alt), 'rb') as readfile:
reader = csv.reader(readfile, delimiter=',')
self.trades = [
{'timestamp': int(row[0]),
'price': Decimal(row[1]),
'amount': Decimal(row[2])}
for row in reader]
except:
self.trades = []
self.update()
def update(self):
self.new_trades = []
response = self.api.trades({}, self.symbol)
if response:
trades = sorted(response, key=lambda x: int(x['timestamp']))
new_trades = [{'timestamp': int(t['timestamp']),
'price': Decimal(t['price']),
'amount': Decimal(t['amount'])}
for t in trades
if t['timestamp'] > self.last_trade()['timestamp']
and t['exchange'] == self.exchange]
if new_trades:
self.new_trades = new_trades
# print each new trade, and add it to the
# trade file if record_trades==True
for trade in new_trades:
if not self.quiet:
print "{} {} {} {} {} {}".format(
ut_to_dt(trade['timestamp']), self.exchange,
trade['price'], self.alt, trade['amount'],
self.base)
self.trades.append({'timestamp': int(trade['timestamp']),
'price': Decimal(trade['price']),
'amount': Decimal(trade['amount'])})
self.price = self.trades[-1]['price']
# write new trades to tradefile
if self.record_trades:
tradefile = TRADES+'{}_{}_{}'.format(
self.exchange, self.base, self.alt)
if not os.path.exists(TRADES):
build_data_directories()
with open(tradefile, 'a') as writefile:
writer = csv.writer(writefile, delimiter=',')
writer.writerow([trade['timestamp'],
trade['price'], trade['amount']])
return self.new_trades
def run(self, update_every=15):
while True:
time.sleep(update_every)
try:
self.update()
except:
traceback.print_exc()
def last_trade(self):
if self.trades:
return self.trades[-1]
else:
return {'timestamp': 0}
class CandleStream(object):
""" A CandleStream converts trade data from a TradeSource to candle data
for a given period. Multiple candle streams can be run from the same
TradeSource. In that case, all the CandleStreams should be updated before
each new update of the TradeSource.
"""
def __init__(self, tradesource, period, record_candles=True,
start=None, quiet=False):
self.tradesource = tradesource
self.p_value, self.p_unit = parse_period(period)
self.period = period
self.step = pdelta(self.p_value, self.p_unit)
self.exchange, self.base, self.alt = (tradesource.exchange,
tradesource.base,
tradesource.alt)
self.candlefile = CANDLES + '{}_{}_{}_{}{}'.format(
self.exchange, self.base, self.alt, self.p_value, self.p_unit)
self.record_candles = record_candles
self.quiet = quiet
# check for candle directory
if not os.path.exists(CANDLES):
build_data_directories()
# check for candle file
if os.path.exists(self.candlefile):
with open(self.candlefile, 'rb') as readfile:
reader = csv.reader(readfile, delimiter=',')
if start:
self.closed_candles = [[int(candle[0])] + [Decimal(x)
for x in candle[1:]]
for candle in reader
if ut_to_dt(candle[0]) < start]
else:
self.closed_candles = [[int(candle[0])] + [Decimal(x)
for x in candle[1:]]
for candle in reader]
self.active_candle = self.closed_candles.pop()
# if no candle file, check for trades in tradesource
elif self.tradesource.trades:
if not self.quiet:
print 'No candlefile found; generating from tradesource...'
if start:
self.closed_candles = [[int(candle[0])] + [Decimal(x)
for x in candle[1:]]
for candle in trades_to_candles(
self.tradesource.trades, period)
if ut_to_dt(candle[0]) < start]
else:
self.closed_candles = [[int(candle[0])] + [Decimal(x)
for x in candle[1:]]
for candle in trades_to_candles(
self.tradesource.trades, period)]
# assume the last candle is still active
self.active_candle = self.closed_candles.pop()
# if no candles or trades
else:
if not self.quiet:
print ('No candlefile found; no tradefile found; '
'waiting for new trades...')
self.closed_candles = []
self.active_candle = []
self.active_trades = []
self.next_start = None
if self.active_candle: # at least one candle was found
self.next_start = ut_to_dt(self.active_candle[0]) + self.step
# assume last candle is not closed yet (check in update)
self.last_closed_known = False
# get trade data from most recent candle
self.active_trades = [
trade for trade in self.tradesource.trades
if trade['timestamp'] >= self.active_candle[0]]
def update(self):
""" Checks for new trades and updates the candle data. """
new_trades = self.tradesource.new_trades
if new_trades:
self.active_trades += [{'timestamp': int(trade['timestamp']),
'price': Decimal(trade['price']),
'amount': Decimal(trade['amount'])}
for trade in new_trades]
if not self.next_start:
first = ut_to_dt(self.active_trades[0]['timestamp'])
start = datetime(
year=first.year, month=first.month, day=first.day)
while start + self.step < first:
start += self.step
self.next_start = start + self.step
self.active_candle = get_candle(
dt_to_ut(start), self.active_trades)
self.last_closed_known = False
# dump older trades if active candle has closed,
# accounting for possible gaps
while ut_to_dt(
self.active_trades[-1]['timestamp']) > self.next_start:
self.dump()
# update active candle
new_candle = get_candle(self.active_candle[0], new_trades)
self.active_candle = self.update_candle(
self.active_candle, new_candle)
def dump(self):
"""
Run once the candle is completed, to close the candle and record
it to the candle data file.
"""
to_dump = [t for t in self.active_trades
if ut_to_dt(t['timestamp']) < self.next_start]
to_keep = [t for t in self.active_trades
if ut_to_dt(t['timestamp']) >= self.next_start]
if len(to_dump):
if not self.quiet:
print '{} {}{} candle closed at {} with {} trades'.format(
self.exchange, self.p_value, self.p_unit,
to_dump[-1]['price'], len(to_dump))
dump_candle = get_candle(self.active_candle[0], to_dump)
self.closed_candles.append(dump_candle)
if self.record_candles:
# if last entry not closed, pop out the last entry
# before rewriting with update
if not self.last_closed_known:
save_candlefile(self.closed_candles, self.period,
self.candlefile, replace=True)
else:
save_candlefile([dump_candle], self.period,
self.candlefile, replace=False)
self.active_trades = to_keep
self.active_candle = get_candle(
dt_to_ut(self.next_start), [to_keep[0]])
self.next_start += self.step
# only closed candles are saved, so last will
# always be closed on further updates
self.last_closed_known = True
def update_candle(self, active_candle, new_candle):
""" Merges new trade data with an open candle. """
# candle order: [start, open, close, high, low, volume]
if int(active_candle[0]) != int(new_candle[0]):
print '(update_candle) Warning: Candle start times do not align!'
elif len(active_candle) == 1:
return new_candle
else:
start = active_candle[0]
opening = active_candle[1]
closing = new_candle[2]
high = max([active_candle[3], new_candle[3]])
low = min([active_candle[4], new_candle[4]])
volume = sum([active_candle[5], new_candle[5]])
return [start, opening, closing, high, low, volume]
def run(self, update_every=10, update_src=True):
while True:
time.sleep(update_every)
try:
if update_src:
self.tradesource.update()
self.update()
except:
traceback.print_exc()
def price(self):
return self.tradesource.trades[-1]['price']
def get_candles(self, ncandles=None):
if ncandles:
return self.closed_candles[-(ncandles-1):] + [self.active_candle]
else:
return self.closed_candles + [self.active_candle]
def get_closed_candles(self, ncandles=None):
if ncandles:
return self.closed_candles[-(ncandles-1):]
else:
return self.closed_candles
# candles beginning after timestamp
def get_candles_since(self, timestamp):
g = itertools.dropwhile(
lambda c: c[0] < timestamp, self.closed_candles)
return list(g)
# candles containing timestamp and beginning after timestamp
def get_candles_from(self, timestamp):
timestamp = dt_to_ut(ut_to_dt(timestamp) - self.step)
g = itertools.dropwhile(
lambda c: c[0] < timestamp, self.closed_candles)
return list(g)
class ExchangeTrader(object):
"""
The backend for the GUI-based live trading implementation.
Because API calls can potentially fail (e.g. due to internet connection
lapses), they are placed in a queue that is attempted with each update.
If an API call fails, it will be retained to try again with the
next update, and further execution of the queue will stop until the
call succeeds and any alert messages are cleared. If it succeeds,
it will be removed from the queue and another update will be forced.
Strategy trade conditions will not be checked until the queue has
cleared. If triggered, the trade orders will be added to the queue
and a new update will be forced. If a buy is called for, a stop sell
order will also be queued if a stoploss is specified in the Strategy.
"""
def __init__(self, candlestream, api, strategy):
self.candlestream = candlestream
self.exchange, self.base, self.alt = (candlestream.exchange,
candlestream.base,
candlestream.alt)
self.symbol = self.base.lower() + self.alt.lower()
self.strategy = strategy
self.strategy.trader = self
self.api = api
self.messages = []
self.openMarketOrder = False
self.openOrders = []
self.queue = []
self.queueRequery()
# attempt first pass of queue immediately
while self.queue:
action = self.queue[0]
response = action()
if response: # action accepted; remove from queue
self.queue = self.queue[1:]
else: # action not accepted; try again next update
break
def getOpenOrders(self):
""" example response:
[{ u'avg_execution_price': u'0.0',
u'remaining_amount': Decimal('0.27958'),
u'timestamp': Decimal('1389409705.0'),
u'price': Decimal('850.0'),
u'exchange': None,
u'executed_amount': Decimal('0.0'),
u'symbol': u'btcusd',
u'is_live': True,
u'was_forced': False,
u'id': 5475379,
u'is_cancelled': False,
u'original_amount': Decimal('0.27958'),
u'type': u'exchange stop',
u'side': u'sell' }]
"""
response = self.api.orders()
# check response
if response is None:
return
elif 'message' in response:
self.messages.append(response['message'])
return
else:
self.openOrders = response
"""
Keep track of whether an open market order is still on the books,
such that each update need not re-check unless the trader
has recently placed a new trade
"""
self.openMarketOrder = False
for order in self.openOrders:
if 'market' in order['type']:
self.openMarketOrder = True
return True
def getFinances(self):
"""
Example response:
[{u'available': Decimal('0.0'), u'currency': u'btc',
u'amount': Decimal('0.0'), u'type': u'trading'},
{u'available': Decimal('0.0'), u'currency': u'usd',
u'amount': Decimal('0.0'), u'type': u'trading'},
{u'available': Decimal('0.0'), u'currency': u'btc',
u'amount': Decimal('0.0'), u'type': u'deposit'},
{u'available': Decimal('0.0'), u'currency': u'usd',
u'amount': Decimal('0.0'), u'type': u'deposit'},
{u'available': Decimal('0.0'), u'currency': u'btc',
u'amount': Decimal('0.0'), u'type': u'exchange'},
{u'available': Decimal('481.24270344'), u'currency': u'usd',
u'amount': Decimal('481.24270344'), u'type': u'exchange'}]
"""
response = self.api.balances()
# check response
if response is None:
return
elif 'message' in response:
self.messages.append(response['message'])
return
else:
self.finances = {}
for entry in response:
orderType = entry['type']
if orderType not in self.finances:
self.finances[orderType] = {
entry['currency']: {'available': entry['available'],
'amount': entry['amount']}}
else:
self.finances[orderType][entry['currency']] = {
'available': entry['available'],
'amount': entry['amount']}
return True
def getCompletedTrades(self, weeks=4):
""" Example response:
{u'timestamp': Decimal('1386924359.0'),
u'price': Decimal('906.19'),
u'type': u'Buy',
u'amount': Decimal('0.6605'),
u'exchange': u'bitstamp'}
"""
now = datetime.utcnow()
start = dt_to_ut(now - timedelta(weeks=weeks))
payload = {'symbol': self.base+self.alt, 'timestamp': start}
response = self.api.past_trades(payload)
# check response
if response is None:
return
elif 'message' in response:
self.messages.append(response['message'])
return
else:
self.my_trades = response[::-1]
return True
def getMarketPrice(self, action=None):
response = self.api.book({'limit_bids': 5, 'limit_asks': 5},
symbol=self.symbol)
# check response
if response is None:
return
elif 'message' in response:
self.messages.append(response['message'])
return
else:
bids, asks = response['bids'], response['asks']
prices = {'Buy': asks[0]['price'], 'Sell': bids[0]['price']}
if not action:
return prices
else:
return prices[action]
def getEquity(self):
alt = self.finances['exchange'][self.alt]['amount']
base = self.finances['exchange'][self.base]['amount']
return alt + base * self.getMarketPrice('Sell')
def equity(self):
alt = self.finances['exchange'][self.alt]['amount']
base = self.finances['exchange'][self.base]['amount']
return alt + base * self.lastPrice()
def lastPrice(self):
return self.candlestream.price()
def position(self):
if self.my_trades[-1]['type'] == 'Buy':
position = 'long'
elif self.my_trades[-1]['type'] == 'Sell':
position = 'out'
return position
def marketTradeEstimate(self, amount, action):
""" For 'Buy' action, returns cost of buying amount. For 'Sell' action,
returns revenue of selling amount (accounting for open orders)
"""
response = self.api.book({'limit_bids': 5, 'limit_asks': 5},
symbol=self.symbol)
# check response
if response is None:
return
elif 'message' in response:
self.messages.append(response['message'])
return
else:
bids, asks = response['bids'], response['asks']
if action == 'Buy':
orders = asks
if action == 'Sell':
orders = bids
remaining = Decimal(amount)
result = Decimal(0)
for order in orders:
if order['amount'] > remaining:
result += remaining*order['price']
break
else:
result += order['amount']*order['price']
remaining -= order['amount']
return result
def placeOrder(self, action, amount='all',
orderType='exchange market', price=None):
"""
Example response:
{u'avg_execution_price': u'0.0',
u'remaining_amount': Decimal('0.1'),
u'order_id': 5480291,
u'timestamp': Decimal('1389414906.0'),
u'price': Decimal('864.01'),
u'exchange': u'bitfinex',
u'executed_amount': Decimal('0.0'),
u'symbol': u'btcusd',
u'is_live': True,
u'was_forced': False,
u'id': 5480291,
u'is_cancelled': False,
u'original_amount': Decimal('0.1'),
u'type': u'exchange market',
u'side': u'sell'}
"""
print ('placeOrder triggered with action {}, amount {}, type {} '
'and price {}').format(action, amount, orderType, price)
if not price:
price = self.getMarketPrice(action)
if price:
print ('price not provided; '
'market price of {} used').format(price)
else:
print 'price not provided; market price could not be obtained'
return
if amount in ['all', 'All']:
if action == 'Sell':
amount = self.finances['exchange'][self.base]['available']
print '{} sell all attempted with order amount {}'.format(
orderType, amount)
payload = {'symbol': self.symbol,
'amount': amount,
'price': price,
'exchange': self.exchange,
'side': action.lower(),
'type': orderType}
response = self.api.order_new(payload)
# check response
if response is None:
return
elif 'message' in response:
self.messages.append(response['message'])
return
else:
if 'market' in orderType:
self.openMarketOrder = True
print 'placeOrder successful'
return response
def cancelOrder(self, order_id):
payload = {'order_id': order_id}
response = self.api.order_cancel(payload)
# check response
if response is None:
return
elif 'message' in response:
self.messages.append(response['message'])
return
else:
return response
def queueRequery(self):
self.queue.append(lambda: self.getOpenOrders())
self.queue.append(lambda: self.getFinances())
self.queue.append(lambda: self.getCompletedTrades())
def exitPoint(self, reference='peak'):
""" Calculates exit price points based on stoploss value specified by
Strategy.set_stoploss().
"""
if reference == 'entry':
# calculate stoploss relative to entry price
if self.my_trades:
if self.position() == 'long':
entry_price = self.my_trades[-1]['price']
return entry_price - (entry_price*self.strategy.stoploss)
if reference == 'peak':
# calculate stoploss relative to peak closing price since entry
if self.my_trades:
if self.position() == 'long':
entry_price = self.my_trades[-1]['price']
timestamp = self.my_trades[-1]['timestamp']
candles_from = self.candlestream.get_candles_from(
timestamp)
max_from = Decimal(max([entry_price] + [c[2]
for c in candles_from]))
return max_from - (max_from*self.strategy.stoploss)
def lastTradeType(self):
if self.my_trades:
return self.my_trades[-1]['type']
def update(self):
actionTaken = False # default case
# cease updating if alert messages haven't been dismissed
if self.messages:
return {'messages': self.messages}
# if a market order was placed recently, requery
if self.openMarketOrder:
if not self.getOpenOrders():
# query failed; try again next update
return actionTaken
self.openMarketOrder = False # default case
for order in self.openOrders:
if 'market' in order['type']:
# order still open; wait until next update and check again
self.openMarketOrder = True
print 'open market order detected; waiting for next update'
return actionTaken
# ATTEMPT TO PROCESS QUEUE
if self.queue:
action = self.queue[0]
response = action()
if response: # action accepted; remove from queue and force update
self.queue = self.queue[1:]
actionTaken = True
self.update()
return actionTaken
else: # action failed; try again next update
return actionTaken
if self.position() == 'long':
# check if expected stoploss is missing
# or stop order should be increased
missing = True # default case
for order in self.openOrders:
if order['side'] == 'sell' and 'stop' in order['type']:
# stop order is present
missing = False
# check if price increase is called for
current_exit = order['price']
new_exit = self.exitPoint()
if new_exit > current_exit:
print ('exit point has increased from {} to {} '
'since stop order placed; resubmitting stop '
'order').format(current_exit, new_exit)
# cancel current stop order
self.queue.append(
lambda: self.cancelOrder(order['id']))
self.queueRequery()
# post new stop order at higher price
self.queue.append(lambda: self.placeOrder(
'Sell', orderType='exchange stop', price=new_exit))
self.queueRequery()
return actionTaken
if missing and 'stop' not in self.lastTradeType():
print 'stoploss is missing; queuing stop order'
self.queue.append(
lambda: self.placeOrder('Sell', orderType='exchange stop',
price=self.exitPoint()))
self.queueRequery()
return actionTaken
# check if price has passed stoploss
for order in self.openOrders:
if order['side'] == 'sell' and 'stop' in order['type']:
# stop sell order is present; check new trades
new_trades = self.candlestream.tradesource.new_trades
for trade in new_trades:
if trade['price'] < order['price']:
# assume stop order triggered, requery API
print 'price below stoploss detected; requerying'
self.queueRequery()
return actionTaken
# CHECK CONDITIONS FOR NEW TRADES
action = self.strategy.check(
position=self.position(), my_trades=self.my_trades,
candlestream=self.candlestream) # **kwargs
if action:
# queue trade and force update
print 'action {} received'.format(action)
self.queue.append(lambda: self.trade(action))
return actionTaken or self.update()
def trade(self, action, amount=None):
# buy action
if action == 'Buy':
if not amount:
price = self.getMarketPrice(action)
dollars_at_risk = self.strategy.risk * self.equity()
price_move = price * self.strategy.stoploss
# buy amount based on risk management
amount = (dollars_at_risk / price_move *
(1 - self.strategy.commission))
print 'buy amount ', amount
cost = self.marketTradeEstimate(amount, action)
print 'buy cost ', cost
# make sure query executed
if cost is None:
return
# make sure sufficient finances are available
elif cost > self.finances['exchange'][self.alt]['available']:
self.messages.append(
'WARNING: cost of buy exceeds available finances.')
return
# queue market buy order and requery
print 'queuing market buy order'
self.queue.append(lambda: self.placeOrder('Buy', amount=amount))
self.queueRequery()
# queue stop sell order and requery
# using same amount here causes 'not enough balance'
# error due to commission loss
print 'queuing stop sell order'
self.queue.append(
lambda: self.placeOrder('Sell', orderType='exchange stop',
price=self.exitPoint()))
self.queueRequery()
return True
# sell action
elif action == 'Sell':
# close open stop sell orders
for order in self.openOrders:
if 'stop' in order['type'] and 'sell' in order['side']:
print 'queuing cancel of open stop order'
self.queue.append(lambda: self.cancelOrder(order['id']))
self.queueRequery()
if not amount:
amount = 'all'
# queue market order and requery
print 'queuing market sell order'
self.queue.append(lambda: self.placeOrder(action, amount=amount))
self.queueRequery()
return True
|
AdamStone/cryptrade
|
cryptrade/trading.py
|
Python
|
unlicense
| 30,686 | 0.000293 |
from django.conf.urls import url
from annotate import views
urlpatterns = [
url(r'^$', views.IndexView.as_view(), name='index'),
url(r'^create$', views.create_annotation, name='create'),
url(r'^edit$', views.edit_annotation, name='edit'),
url(r'^json$', views.export_json, name='export_json'),
url(r'^rdf$', views.export_rdf, name='export_rdf'),
url(r'^(?P<pk>\d+)/update$', views.update_annotation, name='update'),
url(r'^(?P<pk>\d+)/$', views.DetailView.as_view(), name='detail'),
]
|
opensemanticsearch/open-semantic-search-apps
|
src/annotate/urls.py
|
Python
|
gpl-3.0
| 494 | 0.01417 |
# flake8: noqa
"""
PILKit image processors.
A processor accepts an image, does some stuff, and returns the result.
Processors can do anything with the image you want, but their responsibilities
should be limited to image manipulations--they should be completely decoupled
from the filesystem.
"""
from .base import *
from .crop import *
from .overlay import *
from .resize import *
|
bzennn/blog_flask
|
python/lib/python3.5/site-packages/pilkit/processors/__init__.py
|
Python
|
gpl-3.0
| 386 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
read-bookmark.py
~~~~~~~~~~~~~~~~
This module is an example of how to harness the Readability API w/ oAuth.
This module expects the following environment variables to be set:
- READABILITY_CONSUMER_KEY
- READABILITY_CONSUMER_SECRET
- READABILITY_ACCESS_TOKEN
- READABILITY_ACCESS_SECRET
Once you have your consumer keys setup, run the following to get your
access tokens::
$ ./login-xauth.py <username> <password>
"""
import sys
from HTMLParser import HTMLParser
from ext import setup_rdd
class MLStripper(HTMLParser):
"""HTMLParser w/ overrides for stripping text out."""
def __init__(self):
self.reset()
self.fed = []
def handle_data(self, d):
self.fed.append(d)
def get_data(self):
return ' '.join(self.fed)
def strip_tags(html):
"""A super low-tech and debatably irresponsible attempt to turn HTML
into plain text."""
s = MLStripper()
s.feed(html)
data = s.get_data()
for s in ('\n\n\n\n\n', '\n\n\n\n', '\n\n\n', '\n', '\t'):
data = data.replace(s, '')
data = data.replace(' ', '')
return data
def main():
rdd = setup_rdd()
bookmarks = rdd.get_me().bookmarks(limit=10)
print 'Recent Bookmarks'
print '----------------\n'
for i, mark in enumerate(bookmarks):
print '%01d: %s (%s)' % (i, mark.article.title, mark.article.domain)
try:
selection = raw_input('\nRead Article (0-9)? ')
selection = int(selection)
assert (selection < 10) and (selection >= 0)
except (ValueError, AssertionError):
print >> sys.stderr, '\nEnter a number within 0-9, if you don\'t mind.'
except KeyboardInterrupt:
print >> sys.stderr, '\nWell, fine.'
sys.exit()
article = bookmarks[selection].article
article = rdd.get_article(article.id)
print article.title
print '-' * len(article.title) + '\n'
print strip_tags(article.content)
if __name__ == '__main__':
main()
|
alexwaters/python-readability-api
|
examples/read-bookmarks.py
|
Python
|
mit
| 2,034 | 0.002458 |
def get_hero_winrate(hero):
"""returns hero winrate from list of meta heroes"""
if hero['pro_pick'] == 0: return 0
else: return hero.get('pro_win', 0) / hero.get('pro_pick', 1)
def get_hero_pick_percent(hero, heroes):
return hero.get('pro_pick', 0) / get_total_pro_games(heroes)
def get_hero_ban_percent(hero, heroes):
return hero.get('pro_ban', 0) / get_total_pro_games(heroes)
def get_total_pro_games(heroes):
total = 0
for hero in heroes:
total += hero.get('pro_pick', 0) # sums total games in the list
total = total/10
return total
def get_hero_pickban_percent(hero, heroes):
return (
hero.get('pro_pick', 0) + hero.get('pro_ban', 0)
) / get_total_pro_games(heroes)
|
mdiller/MangoByte
|
cogs/utils/metastats.py
|
Python
|
mit
| 738 | 0.004065 |
# Natural Language Toolkit: Product Reviews Corpus Reader
#
# Copyright (C) 2001-2017 NLTK Project
# Author: Pierpaolo Pantone <24alsecondo@gmail.com>
# URL: <http://nltk.org/>
# For license information, see LICENSE.TXT
"""
CorpusReader for reviews corpora (syntax based on Customer Review Corpus).
- Customer Review Corpus information -
Annotated by: Minqing Hu and Bing Liu, 2004.
Department of Computer Sicence
University of Illinois at Chicago
Contact: Bing Liu, liub@cs.uic.edu
http://www.cs.uic.edu/~liub
Distributed with permission.
The "product_reviews_1" and "product_reviews_2" datasets respectively contain
annotated customer reviews of 5 and 9 products from amazon.com.
Related papers:
- Minqing Hu and Bing Liu. "Mining and summarizing customer reviews".
Proceedings of the ACM SIGKDD International Conference on Knowledge
Discovery & Data Mining (KDD-04), 2004.
- Minqing Hu and Bing Liu. "Mining Opinion Features in Customer Reviews".
Proceedings of Nineteeth National Conference on Artificial Intelligence
(AAAI-2004), 2004.
- Xiaowen Ding, Bing Liu and Philip S. Yu. "A Holistic Lexicon-Based Appraoch to
Opinion Mining." Proceedings of First ACM International Conference on Web
Search and Data Mining (WSDM-2008), Feb 11-12, 2008, Stanford University,
Stanford, California, USA.
Symbols used in the annotated reviews:
[t] : the title of the review: Each [t] tag starts a review.
xxxx[+|-n]: xxxx is a product feature.
[+n]: Positive opinion, n is the opinion strength: 3 strongest, and 1 weakest.
Note that the strength is quite subjective.
You may want ignore it, but only considering + and -
[-n]: Negative opinion
## : start of each sentence. Each line is a sentence.
[u] : feature not appeared in the sentence.
[p] : feature not appeared in the sentence. Pronoun resolution is needed.
[s] : suggestion or recommendation.
[cc]: comparison with a competing product from a different brand.
[cs]: comparison with a competing product from the same brand.
Note: Some of the files (e.g. "ipod.txt", "Canon PowerShot SD500.txt") do not
provide separation between different reviews. This is due to the fact that
the dataset was specifically designed for aspect/feature-based sentiment
analysis, for which sentence-level annotation is sufficient. For document-
level classification and analysis, this peculiarity should be taken into
consideration.
"""
from __future__ import division
from six import string_types
import re
from nltk.corpus.reader.api import *
from nltk.tokenize import *
TITLE = re.compile(r'^\[t\](.*)$') # [t] Title
FEATURES = re.compile(r'((?:(?:\w+\s)+)?\w+)\[((?:\+|\-)\d)\]') # find 'feature' in feature[+3]
NOTES = re.compile(r'\[(?!t)(p|u|s|cc|cs)\]') # find 'p' in camera[+2][p]
SENT = re.compile(r'##(.*)$') # find tokenized sentence
@compat.python_2_unicode_compatible
class Review(object):
"""
A Review is the main block of a ReviewsCorpusReader.
"""
def __init__(self, title=None, review_lines=None):
"""
:param title: the title of the review.
:param review_lines: the list of the ReviewLines that belong to the Review.
"""
self.title = title
if review_lines is None:
self.review_lines = []
else:
self.review_lines = review_lines
def add_line(self, review_line):
"""
Add a line (ReviewLine) to the review.
:param review_line: a ReviewLine instance that belongs to the Review.
"""
assert isinstance(review_line, ReviewLine)
self.review_lines.append(review_line)
def features(self):
"""
Return a list of features in the review. Each feature is a tuple made of
the specific item feature and the opinion strength about that feature.
:return: all features of the review as a list of tuples (feat, score).
:rtype: list(tuple)
"""
features = []
for review_line in self.review_lines:
features.extend(review_line.features)
return features
def sents(self):
"""
Return all tokenized sentences in the review.
:return: all sentences of the review as lists of tokens.
:rtype: list(list(str))
"""
return [review_line.sent for review_line in self.review_lines]
def __repr__(self):
return 'Review(title=\"{}\", review_lines={})'.format(self.title, self.review_lines)
@compat.python_2_unicode_compatible
class ReviewLine(object):
"""
A ReviewLine represents a sentence of the review, together with (optional)
annotations of its features and notes about the reviewed item.
"""
def __init__(self, sent, features=None, notes=None):
self.sent = sent
if features is None:
self.features = []
else:
self.features = features
if notes is None:
self.notes = []
else:
self.notes = notes
def __repr__(self):
return ('ReviewLine(features={}, notes={}, sent={})'.format(
self.features, self.notes, self.sent))
class ReviewsCorpusReader(CorpusReader):
"""
Reader for the Customer Review Data dataset by Hu, Liu (2004).
Note: we are not applying any sentence tokenization at the moment, just word
tokenization.
>>> from nltk.corpus import product_reviews_1
>>> camera_reviews = product_reviews_1.reviews('Canon_G3.txt')
>>> review = camera_reviews[0]
>>> review.sents()[0]
['i', 'recently', 'purchased', 'the', 'canon', 'powershot', 'g3', 'and', 'am',
'extremely', 'satisfied', 'with', 'the', 'purchase', '.']
>>> review.features()
[('canon powershot g3', '+3'), ('use', '+2'), ('picture', '+2'),
('picture quality', '+1'), ('picture quality', '+1'), ('camera', '+2'),
('use', '+2'), ('feature', '+1'), ('picture quality', '+3'), ('use', '+1'),
('option', '+1')]
We can also reach the same information directly from the stream:
>>> product_reviews_1.features('Canon_G3.txt')
[('canon powershot g3', '+3'), ('use', '+2'), ...]
We can compute stats for specific product features:
>>> from __future__ import division
>>> n_reviews = len([(feat,score) for (feat,score) in product_reviews_1.features('Canon_G3.txt') if feat=='picture'])
>>> tot = sum([int(score) for (feat,score) in product_reviews_1.features('Canon_G3.txt') if feat=='picture'])
>>> # We use float for backward compatibility with division in Python2.7
>>> mean = tot / n_reviews
>>> print(n_reviews, tot, mean)
15 24 1.6
"""
CorpusView = StreamBackedCorpusView
def __init__(self, root, fileids, word_tokenizer=WordPunctTokenizer(),
encoding='utf8'):
"""
:param root: The root directory for the corpus.
:param fileids: a list or regexp specifying the fileids in the corpus.
:param word_tokenizer: a tokenizer for breaking sentences or paragraphs
into words. Default: `WordPunctTokenizer`
:param encoding: the encoding that should be used to read the corpus.
"""
CorpusReader.__init__(self, root, fileids, encoding)
self._word_tokenizer = word_tokenizer
def features(self, fileids=None):
"""
Return a list of features. Each feature is a tuple made of the specific
item feature and the opinion strength about that feature.
:param fileids: a list or regexp specifying the ids of the files whose
features have to be returned.
:return: all features for the item(s) in the given file(s).
:rtype: list(tuple)
"""
if fileids is None:
fileids = self._fileids
elif isinstance(fileids, string_types):
fileids = [fileids]
return concat([self.CorpusView(fileid, self._read_features, encoding=enc)
for (fileid, enc) in self.abspaths(fileids, True)])
def raw(self, fileids=None):
"""
:param fileids: a list or regexp specifying the fileids of the files that
have to be returned as a raw string.
:return: the given file(s) as a single string.
:rtype: str
"""
if fileids is None:
fileids = self._fileids
elif isinstance(fileids, string_types):
fileids = [fileids]
return concat([self.open(f).read() for f in fileids])
def readme(self):
"""
Return the contents of the corpus README.txt file.
"""
return self.open("README.txt").read()
def reviews(self, fileids=None):
"""
Return all the reviews as a list of Review objects. If `fileids` is
specified, return all the reviews from each of the specified files.
:param fileids: a list or regexp specifying the ids of the files whose
reviews have to be returned.
:return: the given file(s) as a list of reviews.
"""
if fileids is None:
fileids = self._fileids
return concat([self.CorpusView(fileid, self._read_review_block, encoding=enc)
for (fileid, enc) in self.abspaths(fileids, True)])
def sents(self, fileids=None):
"""
Return all sentences in the corpus or in the specified files.
:param fileids: a list or regexp specifying the ids of the files whose
sentences have to be returned.
:return: the given file(s) as a list of sentences, each encoded as a
list of word strings.
:rtype: list(list(str))
"""
return concat([self.CorpusView(path, self._read_sent_block, encoding=enc)
for (path, enc, fileid)
in self.abspaths(fileids, True, True)])
def words(self, fileids=None):
"""
Return all words and punctuation symbols in the corpus or in the specified
files.
:param fileids: a list or regexp specifying the ids of the files whose
words have to be returned.
:return: the given file(s) as a list of words and punctuation symbols.
:rtype: list(str)
"""
return concat([self.CorpusView(path, self._read_word_block, encoding=enc)
for (path, enc, fileid)
in self.abspaths(fileids, True, True)])
def _read_features(self, stream):
features = []
for i in range(20):
line = stream.readline()
if not line:
return features
features.extend(re.findall(FEATURES, line))
return features
def _read_review_block(self, stream):
while True:
line = stream.readline()
if not line:
return [] # end of file.
title_match = re.match(TITLE, line)
if title_match:
review = Review(title=title_match.group(1).strip()) # We create a new review
break
# Scan until we find another line matching the regexp, or EOF.
while True:
oldpos = stream.tell()
line = stream.readline()
# End of file:
if not line:
return [review]
# Start of a new review: backup to just before it starts, and
# return the review we've already collected.
if re.match(TITLE, line):
stream.seek(oldpos)
return [review]
# Anything else is part of the review line.
feats = re.findall(FEATURES, line)
notes = re.findall(NOTES, line)
sent = re.findall(SENT, line)
if sent:
sent = self._word_tokenizer.tokenize(sent[0])
review_line = ReviewLine(sent=sent, features=feats, notes=notes)
review.add_line(review_line)
def _read_sent_block(self, stream):
sents = []
for review in self._read_review_block(stream):
sents.extend([sent for sent in review.sents()])
return sents
def _read_word_block(self, stream):
words = []
for i in range(20): # Read 20 lines at a time.
line = stream.readline()
sent = re.findall(SENT, line)
if sent:
words.extend(self._word_tokenizer.tokenize(sent[0]))
return words
|
arju88nair/projectCulminate
|
venv/lib/python3.5/site-packages/nltk/corpus/reader/reviews.py
|
Python
|
apache-2.0
| 12,534 | 0.002074 |
# Time: O(n)
# Space: O(1)
#
# The API: int read4(char *buf) reads 4 characters at a time from a file.
#
# The return value is the actual number of characters read. For example, it returns 3 if there is only 3 characters left in the file.
#
# By using the read4 API, implement the function int read(char *buf, int n) that reads n characters from the file.
#
# Note:
# The read function may be called multiple times.
#
# The read4 API is already defined for you.
# @param buf, a list of characters
# @return an integer
def read4(buf):
global file_content
i = 0
while i < len(file_content) and i < 4:
buf[i] = file_content[i]
i += 1
if len(file_content) > 4:
file_content = file_content[4:]
else:
file_content = ""
return i
# The read4 API is already defined for you.
# @param buf, a list of characters
# @return an integer
# def read4(buf):
class Solution(object):
def __init__(self):
self.__buf4 = [''] * 4
self.__i4 = 0
self.__n4 = 0
def read(self, buf, n):
"""
:type buf: Destination buffer (List[str])
:type n: Maximum number of characters to read (int)
:rtype: The number of characters read (int)
"""
i = 0
while i < n:
if self.__i4 < self.__n4: # Any characters in buf4.
buf[i] = self.__buf4[self.__i4]
i += 1
self.__i4 += 1
else:
self.__n4 = read4(self.__buf4) # Read more characters.
if self.__n4:
self.__i4 = 0
else: # Buffer has been empty.
break
return i
if __name__ == "__main__":
global file_content
sol = Solution()
buf = ['' for _ in xrange(100)]
file_content = "ab"
print buf[:sol.read(buf, 1)]
print buf[:sol.read(buf, 2)]
|
yiwen-luo/LeetCode
|
Python/read-n-characters-given-read4-ii-call-multiple-times.py
|
Python
|
mit
| 1,921 | 0.006247 |
#!/usr/bin/python
import os,sys,glob,re
import numpy as np
import scipy
from scipy import stats
import datetime
import time
from datetime import timedelta
#import matplotlib
#matplotlib.use('Agg')
#import matplotlib.pyplot as plt
#from matplotlib import colors as c
#from matplotlib import cm
from scipy.stats.kde import gaussian_kde
from numpy import linspace
from scipy.stats import kruskal
#from scipy.stats import nanmean
#from scipy.stats import nanmedian
import pandas as pd
import statsmodels.api as sm
from scipy.stats import mstats
#freqlist = ["numberofbouts_min", "numberofbouts_10min", "dpixnumberofbouts_min", "dpixnumberofbouts_10min", "aveinterboutinterval_min", "aveinterboutinterval_10min", "avedpixinterboutinterval_min", "avedpixinterboutinterval_10min", "dpixsecpermin", "dpixminper10min", "distsecpermin", "distminper10min"]
#loclist = ["interboutcenterfrac", "interboutaverhofrac", "centerfrac", "averhofrac"]
#featlist = ["dpixavebouttime_min", "dpixavebouttime_10min", "aveboutvel_min", "aveboutvel_10min", "avebouttime_min", "avebouttime_10min", "aveboutspeed_min", "aveboutspeed_10min", "aveboutdist_min", "aveboutdist_10min", "aveboutdisp_min", "aveboutdisp_10min", "aveboutcumdpix_min", "aveboutcumdpix_10min"]
nonstimcombos = {"Frequency of movement": ["numberofbouts_min", "numberofbouts_10min", "dpixnumberofbouts_min", "dpixnumberofbouts_10min", "aveinterboutinterval_min", "aveinterboutinterval_10min", "avedpixinterboutinterval_min", "avedpixinterboutinterval_10min", "dpixsecper_min", "dpixminper_10min", "distsecper_min", "distminper_10min"], "Location in well": ["interboutcenterfrac_min", "interboutaverhofrac_min", "centerfrac_min", "averhofrac_min","interboutcenterfrac_10min", "interboutaverhofrac_10min", "centerfrac_10min", "averhofrac_10min"], "Features of movement": ["dpixavebouttime_min", "dpixavebouttime_10min", "aveboutvel_min", "aveboutvel_10min", "avebouttime_min", "avebouttime_10min", "aveboutspeed_min", "aveboutspeed_10min", "aveboutdist_min", "aveboutdist_10min", "aveboutdisp_min", "aveboutdisp_10min", "aveboutcumdpix_min", "aveboutcumdpix_10min"]}
typecombos = [["Night tap habituation", "Day tap habituation 1", "Day tap habituation 2", "Day tap habituation 3"], ["Day light flash", "Night light flash"],["Night early prepulse tap", "Day early prepulse tap"], ["Night all prepulse tap", "Day all prepulse tap"], ["Day all strong tap", "Night all strong tap"], ["Day early strong tap","Night early strong tap"],["Night early weak tap", "Day early weak tap"], ["Day all weak tap", "Night all weak tap"], ["Dark flash block 3 start","Dark flash block 3 end","Dark flash block 4 start","Dark flash block 4 end","Dark flash block 1 start","Dark flash block 1 end","Dark flash block 2 start","Dark flash block 2 end"]]
stimcombos = {
#"Day light flash and weak tap": ["106106"],
#"Night light flash and weak tap": ["night106106"],
"Night tap habituation": ["nighttaphab102", "nighttaphab1"],
"Day tap habituation 1": ["adaytaphab102", "adaytaphab1"],
"Day tap habituation 3": ["cdaytaphab102", "cdaytaphab1"],
"Day tap habituation 2": ["bdaytaphab102", "bdaytaphab1"],
"Day light flash": ["lightflash104"],
#"Day light flash": ["lightflash104", "lightflash0"],
"Night light flash": ["nightlightflash104"],
#"Night light flash": ["nightlightflash104", "nightlightflash0"],
"Night early prepulse tap": ["shortnightprepulseinhibition100b"],
#"Night early prepulse tap": ["shortnightprepulseinhibition100b", "shortnightprepulseinhibition100c"],
"Night all prepulse tap": ["nightprepulseinhibition100b"],
#"Night all prepulse tap": ["nightprepulseinhibition100b", "nightprepulseinhibition100c"],
"Day early prepulse tap": ["shortdayprepulseinhibition100b"],
#"Day early prepulse tap": ["shortdayprepulseinhibition100b", "shortdayprepulseinhibition100c"],
"Day all prepulse tap": ["dayprepulseinhibition100b"],
#"Day all prepulse tap": ["dayprepulseinhibition100b", "dayprepulseinhibition100c"],
"Day all weak tap": ["dayprepulseinhibition100a", "dayprepulseinhibition101"],
"Day early weak tap": ["shortdayprepulseinhibition100a", "shortdayprepulseinhibition101"],
"Night all weak tap": ["nightprepulseinhibition100a", "nightprepulseinhibition101"],
"Night early weak tap": ["shortnightprepulseinhibition100a", "shortnightprepulseinhibition101"],
"Day early strong tap": ["adaytappre102", "shortdayprepulseinhibition102"],
#"Day early strong tap": ["adaytappre102", "adaytappre1", "shortdayprepulseinhibition102"],
"Day all strong tap": ["dayprepulseinhibition102", "adaytappostbdaytappre102","bdaytappostcdaytappre102", "cdaytappost102"],
#"Day all strong tap": ["dayprepulseinhibition102", "adaytappostbdaytappre102","bdaytappostcdaytappre102", "bdaytappostcdaytappre1", "cdaytappost1", "cdaytappost102","adaytappostbdaytappre1"],
"Night early strong tap": ["nighttappre102"],
#"Night early strong tap": ["nighttappre1", "nighttappre102"],
"Night all strong tap": ["nightprepulseinhibition102","nighttappost102"],
#"Night all strong tap": ["nightprepulseinhibition102","nighttappost102", "nighttappost1"],
#"Dark flash all blocks": ["darkflash103", "darkflash0"],
"Dark flash block 3 start": ["cdarkflash103"],
"Dark flash block 3 end": ["c2darkflash103"],
"Dark flash block 1 start": ["adarkflash103"],
"Dark flash block 1 end": ["a2darkflash103"],
"Dark flash block 2 start": ["bdarkflash103"],
"Dark flash block 2 end": ["b2darkflash103"],
"Dark flash block 4 start": ["ddarkflash103"],
"Dark flash block 4 end": ["d2darkflash103"]}
# "Dark flash block 3 start": ["cdarkflash103", "cdarkflash0"],
# "Dark flash block 3 end": ["c2darkflash103", "c2darkflash0"],
# "Dark flash block 1 start": ["adarkflash103", "adarkflash0"],
# "Dark flash block 1 end": ["a2darkflash103", "a2darkflash0"],
# "Dark flash block 2 start": ["bdarkflash103", "bdarkflash0"],
# "Dark flash block 2 end": ["b2darkflash103", "b2darkflash0"],
# "Dark flash block 4 start": ["ddarkflash103", "ddarkflash0"],
# "Dark flash block 4 end": ["d2darkflash103", "d2darkflash0"]}
#direction = {
# "aveboutspeed": 1
# "aveboutspeed": 1
# ones that are opposite of expected
# fullboutdatamaxloc (max peak location (larger is less strong of response))
# latency (longer is less good), similar to max peak
# aveinterboutinterval
# rho or centerfrac, not sure which orientation would want
# make wall-hugging positive
# lower centerfrac means more positive, which is how it is right now I think, yes, so if I default everything to switching signs, then averhofrac is the odd one out and should be skipped
# for most, larger should mean - and should mean mutant is stronger response or more movement
# need to make most into opposite
# standard
# cumdpix, displacement, distance, speed, velocity, secpermin, numberofbouts, frequency of response, polygonarea
# unsure - fullboutdata as done with linear model, and also the dark flash ones done with linear model
#}
direction_swaps = ["rhofrac", "latency", "interboutinterval", "fullboutdatamaxloc"]
for file in glob.glob("*linearmodel*"): # THIS IS WHAT THE PRINT OUTPUT MUST POINT TO, CAN HAVE SOMETHING AT END, BUT MUST START THIS WAY
if "finalsorted" in file:
continue
dir = os.path.basename(os.path.dirname(os.path.realpath(__file__)))
ffile = open('finalsortedupdatedCP4or2_' + file + "_" + dir, 'w')
ofile = open(file, 'r')
lines = ofile.readlines()
pdict = {}
for line in lines:
# anova data
if line.startswith("anova:"):
pval = line.split(":")[3].strip().split()[3].strip()
#anova: ribgraph_mean_ribbon_latencyresponse_dpix_nighttappost102.png : Mean of array wt, mut, H-stat, P-value: 25.8557471264 21.4177419355 2.63243902441 0.104700765405
meanwtminmut = float(line.split(":")[3].strip().split()[0]) - float(line.split(":")[3].strip().split()[1])
name = line.split(":")[1].strip()
pdict[name] = [pval, meanwtminmut]
# ffile.write(str(pval))
# ffile.write(', ')
# ffile.write(str(meanwtminmut))
# ffile.write(', ')
# ffile.write(name.strip())
# ffile.write('\n')
# linear mixed model data - this formatting could change if I change the linear model I'm using
else:
list = []
for line in range(0, len(lines)):
#print lines[line]
if lines[line].startswith("mutornot[T.wt] "):
#print lines[line]
if len(lines[line].split()) > 3:
pvalue = lines[line].split()[4]
coef = lines[line].split()[1]
if float(pvalue) == 0:
pvalue = 0.001
list.append((float(pvalue), float(coef), lines[line-13].strip()))
#list.append((float(pvalue), lines[line-13].strip(), lines[line].split()[1:6]))
# list2 = sorted(list, key=lambda x: x[0])
for fline in list:
#pdict[str(fline[2])] = (str(fline[0])[:8], str(fline[1])[:8])
pdict[str(fline[2])] = [str(fline[0])[:8], str(fline[1])[:8]]
#ffile.write(str(fline[0])[:8])
#ffile.write(', ')
#ffile.write(str(fline[1])[:8])
#ffile.write(', ')
#ffile.write(str(fline[2]))
#ffile.write('\n')
splitdict = {}
for k in pdict:
# k = ribgraph_mean_ribbonbout_dpixavebouttime_min_day1taps.png
# section = day1taps
# or section = adaytappostbdaytappre102
if k.startswith("ratio"):
continue
section = k.split('.')[0].split('_')[-1]
for k2 in nonstimcombos.keys():
# k2 = "Frequency of movement"
for v2 in nonstimcombos[k2]:
# v2 = numberofbouts_min
if v2 in k:
test = False
for k3 in splitdict.keys():
if (k2 + " " + section) == k3:
test = True
if test == False:
splitdict[k2 + " " + section] = []
splitdict[k2 + " " + section].append([k,pdict[k]])
else:
splitdict[k2 + " " + section].append([k,pdict[k]])
break
for sk2 in stimcombos.keys():
# sk2 = "Night light flash"
for sv2 in stimcombos[sk2]:
# sv2 = nightlightflash104
if sv2 == k.split('.')[0].split('_')[-1]:
# combining everything for these stimuli responses
test = False
for sk3 in splitdict.keys():
if sk2 == sk3:
test = True
if test == False:
splitdict[sk2] = []
splitdict[sk2].append([k,pdict[k]])
else:
splitdict[sk2].append([k,pdict[k]])
break
for skey in splitdict.keys():
lowest = 10
listints = []
cutpoint = 0.05
cutpointnumber = 3
if skey in stimcombos.keys():
cutpointnumber = 4
else:
cutpointnumber = 3
cutlist = []
for t in typecombos:
for tt in t:
if skey == tt:
#cutpointnumber = 4
#print "TEST", skey, t
import copy
shortt = copy.copy(t)
shortt.remove(tt)
#print shortt
for svey0 in splitdict[skey]:
if abs(float(svey0[1][0])) < cutpoint:
if "bigmovesribgraph_mean_ribbon_freqresponse_dpix_" in svey0[0] and "100b.png" in svey0[0]:
cutpointnumber = 0
#print "testing1 ", skey, svey0
for ttt in shortt:
for tsvey in splitdict[ttt]:
#print "testing3", ttt, tsvey
if '_'.join(svey0[0].split('.')[0].split('_')[:-1]) == '_'.join(tsvey[0].split('.')[0].split('_')[:-1]):
#print "testing4", ttt, tsvey, '_'.join(svey0[0].split('.')[0].split('_')[:-1]), '_'.join(tsvey[0].split('.')[0].split('_')[:-1])
if abs(float(tsvey[1][0])) < cutpoint:
#print "testing5", tsvey
cutpointnumber = 2
break
for svey in splitdict[skey]:
switch = False
for x in direction_swaps:
if x in svey[0]:
switch = True
if switch == False:
if float(svey[1][1]) > 0:
# change the sign of the original data
# if wt is moving more than mutant (>0), want signs swapped so mutant is over wt (ie, mutant moving less than wt has - number)
svey[1][0] = float(svey[1][0]) * -1
# else, data is fine as is
else: # switch == True
# in the cases where a switch is needed for the sign (such as interboutinterval because it's opposite when considering frequency)
if float(svey[1][1]) < 0: # if wt has greater interboutinterval and then the number is positive (ie, mutant moves more), don't swap, do swap if <
# change the sign of the original data
svey[1][0] = float(svey[1][0]) * -1
#lowest = 10
#listints = []
#cutpoint = 0.05
#cutpointnumber = 3
#cutlist = []
for svey in splitdict[skey]:
#print skey, svey
listints.append(float(svey[1][0]))
if abs(float(svey[1][0])) < abs(lowest):
lowest = float(svey[1][0])
if abs(float(svey[1][0])) < cutpoint:
cutlist.append(float(svey[1][0]))
ave = np.mean(np.absolute(np.asarray(listints)))
if lowest < 0:
ave = ave * -1
if len(cutlist) > cutpointnumber:
cutave = np.mean(np.absolute(np.asarray(cutlist)))
if lowest < 0:
cutave = cutave * -1
else:
cutave = ave
ffile.write("Lowest ")
ffile.write(skey)
ffile.write(": ")
ffile.write(str(lowest))
ffile.write('\n')
ffile.write("Average ")
ffile.write(skey)
ffile.write(": ")
ffile.write(str(ave))
ffile.write('\n')
ffile.write("Lowaverage (reg if not >")#3, <0.05) ")
ffile.write(str(cutpointnumber))
ffile.write(", <0.05) ")
ffile.write(skey)
ffile.write(": ")
ffile.write(str(cutave))
ffile.write('\n')
for svey in splitdict[skey]:
ffile.write(str(svey[0]))
ffile.write(', ')
ffile.write(str(svey[1][0]))
ffile.write(', ')
ffile.write(str(svey[1][1]))
ffile.write('\n')
#print splitdict
#ffile.write(k)
#ffile.write(', ')
#ffile.write(str(pdict[k][0]))
#ffile.write(', ')
#ffile.write(str(pdict[k][1]))
#ffile.write('\n')
|
sthyme/ZFSchizophrenia
|
BehaviorAnalysis/mergingbehaviordata/lmmanalysisave_septcut4and2ifsame.py
|
Python
|
mit
| 13,435 | 0.026424 |
# Copyright (c) 2008 Princeton University
# Copyright (c) 2009 Advanced Micro Devices, Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Steve Reinhardt
# Brad Beckmann
from m5.params import *
from m5.proxy import *
from BasicRouter import BasicRouter
class GarnetRouter_d(BasicRouter):
type = 'GarnetRouter_d'
cxx_class = 'Router_d'
vcs_per_vnet = Param.Int(Parent.vcs_per_vnet,
"virtual channels per virtual network")
virt_nets = Param.Int(Parent.number_of_virtual_networks,
"number of virtual networks")
|
aferr/LatticeMemCtl
|
src/mem/ruby/network/garnet/fixed-pipeline/GarnetRouter_d.py
|
Python
|
bsd-3-clause
| 2,044 | 0.001468 |
# Generated by Django 2.0.8 on 2018-10-05 19:45
from django.db import migrations
from django.core.exceptions import ObjectDoesNotExist
def cria_sistema_cultura(apps, schema_editor):
erros = []
SistemaCultura = apps.get_model('adesao', 'SistemaCultura')
Municipio = apps.get_model('adesao', 'Municipio')
Cidade = apps.get_model('adesao', 'Cidade')
EnteFederado = apps.get_model('adesao', 'EnteFederado')
Secretario = apps.get_model('adesao', 'Secretario')
Funcionario = apps.get_model('adesao', 'Funcionario')
Gestor = apps.get_model('adesao', 'Gestor')
Sede = apps.get_model('adesao', 'Sede')
Diligencia = apps.get_model('gestao', 'Diligencia')
DiligenciaSimples = apps.get_model('gestao', 'DiligenciaSimples')
Componente = apps.get_model('planotrabalho', 'Componente')
for municipio in Municipio.objects.all():
sistema_cultura = SistemaCultura()
sistema_cultura.gestor = Gestor.objects.create(
cpf=municipio.cpf_prefeito,
rg=municipio.rg_prefeito,
orgao_expeditor_rg=municipio.orgao_expeditor_rg,
estado_expeditor=municipio.estado_expeditor,
nome=municipio.nome_prefeito,
telefone_um=municipio.telefone_um,
telefone_dois=municipio.telefone_dois,
telefone_tres=municipio.telefone_tres,
email_institucional=municipio.email_institucional_prefeito,
tipo_funcionario=3,
termo_posse=municipio.termo_posse_prefeito,
rg_copia=municipio.rg_copia_prefeito,
cpf_copia=municipio.cpf_copia_prefeito
)
sistema_cultura.sede = Sede.objects.create(
localizacao=municipio.localizacao,
cnpj=municipio.cnpj_prefeitura,
endereco=municipio.endereco,
complemento=municipio.complemento,
cep=municipio.cep,
bairro=municipio.bairro,
telefone_um=municipio.telefone_um,
telefone_dois=municipio.telefone_dois,
telefone_tres=municipio.telefone_tres,
endereco_eletronico=municipio.endereco_eletronico
)
if municipio.cidade is None:
try:
sistema_cultura.ente_federado = EnteFederado.objects.get(cod_ibge=municipio.estado.codigo_ibge)
except EnteFederado.DoesNotExist:
ente = EnteFederado.objects.filter(nome__icontains=municipio.estado.nome_uf)
if not ente or len(ente) > 1:
print(f"Erro ao procurar UF {municipio.estado.nome_uf} - {municipio.estado.codigo_ibge}\n")
erros.append(municipio.estado.codigo_ibge)
pass
sistema_cultura.ente_federado = ente[0]
else:
try:
cidade = Cidade.objects.get(nome_municipio=municipio.cidade.nome_municipio, uf=municipio.estado)
sistema_cultura.ente_federado = EnteFederado.objects.get(cod_ibge=cidade.codigo_ibge)
except EnteFederado.DoesNotExist:
ente = EnteFederado.objects.filter(cod_ibge__startswith=cidade.codigo_ibge)
if not ente or len(ente) > 1:
print(f"Erro ao procurar Municipio {municipio.cidade.nome_municipio} - {municipio.cidade.codigo_ibge}\n")
erros.append(municipio.estado.codigo_ibge)
pass
sistema_cultura.ente_federado = ente[0]
componentes_antigos = ('criacao_sistema', 'orgao_gestor', 'conselho_cultural', 'plano_cultura')
componente_type = ('36', '37', '38', '40')
componentes_novos = ('legislacao', 'orgao_gestor', 'conselho', 'plano')
sistema_cultura.numero_processo = municipio.numero_processo
try:
sistema_cultura.cadastrador = municipio.usuario
sistema_cultura.estado_processo = municipio.usuario.estado_processo
sistema_cultura.data_publicacao_acordo = municipio.usuario.data_publicacao_acordo
sistema_cultura.link_publicacao_acordo = municipio.usuario.link_publicacao_acordo
sistema_cultura.processo_sei = municipio.usuario.processo_sei
if municipio.usuario.plano_trabalho:
diligencia = Diligencia.objects.filter(
componente_id=municipio.usuario.plano_trabalho.id,
componente_type_id=35).order_by('-data_criacao').first()
if diligencia:
sistema_cultura.diligencia = DiligenciaSimples.objects.create(
texto_diligencia=diligencia.texto_diligencia,
classificacao_arquivo=diligencia.classificacao_arquivo,
usuario=diligencia.usuario)
sistema_cultura.diligencia.save()
for nome_componente_antigo, nome_componente_novo, tipo_componente in zip(componentes_antigos, componentes_novos, componente_type):
if municipio.usuario.plano_trabalho:
componente_antigo = getattr(municipio.usuario.plano_trabalho, nome_componente_antigo)
if componente_antigo:
setattr(sistema_cultura, nome_componente_novo, Componente.objects.create())
componente_novo = getattr(sistema_cultura, nome_componente_novo)
componente_novo.tipo = componentes_novos.index(nome_componente_novo)
componente_novo.arquivo = componente_antigo.arquivo
componente_novo.situacao = componente_antigo.situacao.id
componente_novo.data_envio = componente_antigo.data_envio
componente_novo.data_publicacao = componente_antigo.data_publicacao
diligencia = Diligencia.objects.filter(
componente_id=componente_antigo.id,
componente_type_id=tipo_componente).order_by('-data_criacao').first()
if diligencia:
componente_novo.diligencia = DiligenciaSimples.objects.create(
texto_diligencia=diligencia.texto_diligencia,
classificacao_arquivo=diligencia.classificacao_arquivo,
usuario=diligencia.usuario)
componente_novo.save()
secretario = municipio.usuario.secretario
if secretario:
sistema_cultura.secretario = Funcionario.objects.create(cpf=secretario.cpf_secretario,
rg=secretario.rg_secretario, orgao_expeditor_rg=secretario.orgao_expeditor_rg,
estado_expeditor=secretario.estado_expeditor, nome=secretario.nome_secretario,
cargo=secretario.cargo_secretario, instituicao=secretario.instituicao_secretario,
telefone_um=secretario.telefone_um, telefone_dois=secretario.telefone_dois,
telefone_tres=secretario.telefone_tres,
email_institucional=secretario.email_institucional_secretario,
tipo_funcionario=0)
responsavel = municipio.usuario.responsavel
if responsavel:
sistema_cultura.responsavel = Funcionario.objects.create(cpf=responsavel.cpf_responsavel,
rg=responsavel.rg_responsavel, orgao_expeditor_rg=responsavel.orgao_expeditor_rg,
estado_expeditor=responsavel.estado_expeditor, nome=responsavel.nome_responsavel,
cargo=responsavel.cargo_responsavel, instituicao=responsavel.instituicao_responsavel,
telefone_um=responsavel.telefone_um, telefone_dois=responsavel.telefone_dois,
telefone_tres=responsavel.telefone_tres,
email_institucional=responsavel.email_institucional_responsavel,
tipo_funcionario=1)
except ObjectDoesNotExist:
sistema_cultura.estado_processo = 6
sistema_cultura.save()
class Migration(migrations.Migration):
dependencies = [
('planotrabalho', '0008_componente_data_publicacao'),
('gestao', '0006_remove_diligenciasimples_tipo_diligencia'),
('adesao', '0020_auto_20181008_1610'),
]
operations = [
migrations.RunPython(cria_sistema_cultura),
]
|
culturagovbr/sistema-nacional-cultura
|
adesao/migrations/0019_auto_20181005_1645.py
|
Python
|
agpl-3.0
| 8,604 | 0.005811 |
# -*- coding: utf-8 -*-
#
# hh_psc_alpha.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
"""Example using hh_psc_alpha
-------------------------------
This example produces a rate-response (FI) curve of the Hodgkin-Huxley
neuron in response to a range of different current (DC) stimulations.
The result is plotted using matplotlib.
Since a DC input affetcs only the neuron's channel dynamics, this routine
does not yet check correctness of synaptic response.
References
~~~~~~~~~~~
See Also
~~~~~~~~~~
:Authors:
KEYWORDS:
"""
import nest
import numpy as np
import matplotlib.pyplot as plt
nest.hl_api.set_verbosity('M_WARNING')
nest.ResetKernel()
simtime = 1000
# Amplitude range, in pA
dcfrom = 0
dcstep = 20
dcto = 2000
h = 0.1 # simulation step size in mS
neuron = nest.Create('hh_psc_alpha')
sd = nest.Create('spike_detector')
nest.SetStatus(sd, {'to_memory': False})
nest.Connect(neuron, sd, syn_spec={'weight': 1.0, 'delay': h})
# Simulation loop
n_data = int(dcto / float(dcstep))
amplitudes = np.zeros(n_data)
event_freqs = np.zeros(n_data)
for i, amp in enumerate(range(dcfrom, dcto, dcstep)):
nest.SetStatus(neuron, {'I_e': float(amp)})
print("Simulating with current I={} pA".format(amp))
nest.Simulate(1000) # one second warm-up time for equilibrium state
nest.SetStatus(sd, {'n_events': 0}) # then reset spike counts
nest.Simulate(simtime) # another simulation call to record firing rate
n_events = nest.GetStatus(sd, keys={'n_events'})[0][0]
amplitudes[i] = amp
event_freqs[i] = n_events / (simtime / 1000.)
plt.plot(amplitudes, event_freqs)
plt.show()
|
terhorstd/nest-simulator
|
pynest/examples/hh_psc_alpha.py
|
Python
|
gpl-2.0
| 2,263 | 0 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'OrganizationOption'
db.create_table('sentry_organizationoptions', (
('id', self.gf('sentry.db.models.fields.bounded.BoundedBigAutoField')(primary_key=True)),
('organization', self.gf('sentry.db.models.fields.foreignkey.FlexibleForeignKey')(to=orm['sentry.Organization'])),
('key', self.gf('django.db.models.fields.CharField')(max_length=64)),
('value', self.gf('sentry.db.models.fields.pickle.UnicodePickledObjectField')()),
))
db.send_create_signal('sentry', ['OrganizationOption'])
# Adding unique constraint on 'OrganizationOption', fields ['organization', 'key']
db.create_unique('sentry_organizationoptions', ['organization_id', 'key'])
def backwards(self, orm):
# Removing unique constraint on 'OrganizationOption', fields ['organization', 'key']
db.delete_unique('sentry_organizationoptions', ['organization_id', 'key'])
# Deleting model 'OrganizationOption'
db.delete_table('sentry_organizationoptions')
models = {
'sentry.activity': {
'Meta': {'object_name': 'Activity'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Event']", 'null': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True'})
},
'sentry.apikey': {
'Meta': {'object_name': 'ApiKey'},
'allowed_origins': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'default': "'Default'", 'max_length': '64', 'blank': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Organization']"}),
'scopes': ('django.db.models.fields.BigIntegerField', [], {'default': 'None'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.auditlogentry': {
'Meta': {'object_name': 'AuditLogEntry'},
'actor': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_actors'", 'null': 'True', 'to': "orm['sentry.User']"}),
'actor_key': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.ApiKey']", 'null': 'True', 'blank': 'True'}),
'actor_label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'target_object': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'target_user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'audit_targets'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.authidentity': {
'Meta': {'unique_together': "(('auth_provider', 'ident'), ('auth_provider', 'user'))", 'object_name': 'AuthIdentity'},
'auth_provider': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.AuthProvider']"}),
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'last_synced': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_verified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.authprovider': {
'Meta': {'object_name': 'AuthProvider'},
'config': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'default_role': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'default_teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_sync': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']", 'unique': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'sync_time': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'})
},
'sentry.broadcast': {
'Meta': {'object_name': 'Broadcast'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'link': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'sentry.broadcastseen': {
'Meta': {'unique_together': "(('broadcast', 'user'),)", 'object_name': 'BroadcastSeen'},
'broadcast': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Broadcast']"}),
'date_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"})
},
'sentry.event': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'", 'index_together': "(('group', 'datetime'),)"},
'data': ('sentry.db.models.fields.node.NodeField', [], {'null': 'True', 'blank': 'True'}),
'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'time_spent': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'null': 'True'})
},
'sentry.eventmapping': {
'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.eventuser': {
'Meta': {'unique_together': "(('project', 'ident'), ('project', 'hash'))", 'object_name': 'EventUser', 'index_together': "(('project', 'email'), ('project', 'username'), ('project', 'ip_address'))"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'ip_address': ('django.db.models.fields.GenericIPAddressField', [], {'max_length': '39', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'username': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'})
},
'sentry.file': {
'Meta': {'object_name': 'File'},
'checksum': ('django.db.models.fields.CharField', [], {'max_length': '40', 'null': 'True'}),
'headers': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'path': ('django.db.models.fields.TextField', [], {'null': 'True'}),
'size': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True'}),
'storage': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True'}),
'storage_options': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'timestamp': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.group': {
'Meta': {'object_name': 'Group', 'db_table': "'sentry_groupedmessage'", 'index_together': "(('project', 'first_release'),)"},
'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}),
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']", 'null': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'level': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}),
'logger': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}),
'message': ('django.db.models.fields.TextField', [], {}),
'num_comments': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'null': 'True'}),
'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'score': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'time_spent_count': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'time_spent_total': ('sentry.db.models.fields.bounded.BoundedIntegerField', [], {'default': '0'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '1', 'db_index': 'True'})
},
'sentry.groupassignee': {
'Meta': {'object_name': 'GroupAssignee', 'db_table': "'sentry_groupasignee'"},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'unique': 'True', 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'assignee_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_assignee_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupbookmark': {
'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': "orm['sentry.User']"})
},
'sentry.groupemailthread': {
'Meta': {'unique_together': "(('email', 'group'), ('email', 'msgid'))", 'object_name': 'GroupEmailThread'},
'date': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'msgid': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'groupemail_set'", 'to': "orm['sentry.Project']"})
},
'sentry.grouphash': {
'Meta': {'unique_together': "(('project', 'hash'),)", 'object_name': 'GroupHash'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'})
},
'sentry.groupmeta': {
'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'value': ('django.db.models.fields.TextField', [], {})
},
'sentry.grouprulestatus': {
'Meta': {'unique_together': "(('rule', 'group'),)", 'object_name': 'GroupRuleStatus'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_active': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'rule': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Rule']"}),
'status': ('django.db.models.fields.PositiveSmallIntegerField', [], {'default': '0'})
},
'sentry.groupseen': {
'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'db_index': 'False'})
},
'sentry.grouptagkey': {
'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'},
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.grouptagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTagValue', 'db_table': "'sentry_messagefiltervalue'"},
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'to': "orm['sentry.Group']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'grouptag'", 'null': 'True', 'to': "orm['sentry.Project']"}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.helppage': {
'Meta': {'object_name': 'HelpPage'},
'content': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64', 'unique': 'True', 'null': 'True'}),
'priority': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.lostpasswordhash': {
'Meta': {'object_name': 'LostPasswordHash'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'unique': 'True'})
},
'sentry.option': {
'Meta': {'object_name': 'Option'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}),
'last_updated': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.organization': {
'Meta': {'object_name': 'Organization'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'default_role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'org_memberships'", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMember']", 'to': "orm['sentry.User']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.organizationaccessrequest': {
'Meta': {'unique_together': "(('team', 'member'),)", 'object_name': 'OrganizationAccessRequest'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'member': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationmember': {
'Meta': {'unique_together': "(('organization', 'user'), ('organization', 'email'))", 'object_name': 'OrganizationMember'},
'counter': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'null': 'True', 'blank': 'True'}),
'flags': ('django.db.models.fields.BigIntegerField', [], {'default': '0'}),
'has_global_access': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': "orm['sentry.Organization']"}),
'role': ('django.db.models.fields.CharField', [], {'default': "'member'", 'max_length': '32'}),
'teams': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['sentry.Team']", 'symmetrical': 'False', 'through': "orm['sentry.OrganizationMemberTeam']", 'blank': 'True'}),
'type': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '50', 'blank': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'sentry_orgmember_set'", 'null': 'True', 'to': "orm['sentry.User']"})
},
'sentry.organizationmemberteam': {
'Meta': {'unique_together': "(('team', 'organizationmember'),)", 'object_name': 'OrganizationMemberTeam', 'db_table': "'sentry_organizationmember_teams'"},
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'organizationmember': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.OrganizationMember']"}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.organizationoption': {
'Meta': {'unique_together': "(('organization', 'key'),)", 'object_name': 'OrganizationOption', 'db_table': "'sentry_organizationoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.project': {
'Meta': {'unique_together': "(('team', 'slug'), ('organization', 'slug'))", 'object_name': 'Project'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'first_event': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'team': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Team']"})
},
'sentry.projectkey': {
'Meta': {'object_name': 'ProjectKey'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': "orm['sentry.Project']"}),
'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'roles': ('django.db.models.fields.BigIntegerField', [], {'default': '1'}),
'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0', 'db_index': 'True'})
},
'sentry.projectoption': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.release': {
'Meta': {'unique_together': "(('project', 'version'),)", 'object_name': 'Release'},
'data': ('jsonfield.fields.JSONField', [], {'default': '{}'}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'date_released': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'date_started': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'new_groups': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'owner': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']", 'null': 'True', 'blank': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'ref': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
'sentry.releasefile': {
'Meta': {'unique_together': "(('release', 'ident'),)", 'object_name': 'ReleaseFile'},
'file': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.File']"}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'ident': ('django.db.models.fields.CharField', [], {'max_length': '40'}),
'name': ('django.db.models.fields.TextField', [], {}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'release': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Release']"})
},
'sentry.rule': {
'Meta': {'object_name': 'Rule'},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
},
'sentry.savedsearch': {
'Meta': {'unique_together': "(('project', 'name'),)", 'object_name': 'SavedSearch'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'query': ('django.db.models.fields.TextField', [], {})
},
'sentry.tagkey': {
'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'values_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.tagvalue': {
'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"},
'data': ('sentry.db.models.fields.gzippeddict.GzippedDictField', [], {'null': 'True', 'blank': 'True'}),
'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'times_seen': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '200'})
},
'sentry.team': {
'Meta': {'unique_together': "(('organization', 'slug'),)", 'object_name': 'Team'},
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'organization': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Organization']"}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}),
'status': ('sentry.db.models.fields.bounded.BoundedPositiveIntegerField', [], {'default': '0'})
},
'sentry.user': {
'Meta': {'object_name': 'User', 'db_table': "'auth_user'"},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedAutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '128'})
},
'sentry.useroption': {
'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'},
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']", 'null': 'True'}),
'user': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.User']"}),
'value': ('sentry.db.models.fields.pickle.UnicodePickledObjectField', [], {})
},
'sentry.userreport': {
'Meta': {'object_name': 'UserReport', 'index_together': "(('project', 'event_id'),)"},
'comments': ('django.db.models.fields.TextField', [], {}),
'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}),
'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'group': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Group']", 'null': 'True'}),
'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'project': ('sentry.db.models.fields.foreignkey.FlexibleForeignKey', [], {'to': "orm['sentry.Project']"})
}
}
complete_apps = ['sentry']
|
JackDanger/sentry
|
src/sentry/south_migrations/0212_auto__add_organizationoption__add_unique_organizationoption_organizati.py
|
Python
|
bsd-3-clause
| 40,363 | 0.008052 |
#------------------------------------------------------------------------------------------
#
# Copyright 2017 Robert Pengelly.
#
# This file is part of ppa-helper.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
#------------------------------------------------------------------------------------------
# coding: utf-8
from __future__ import unicode_literals
import collections
import os
import shutil
import sys
if sys.version_info >= (3, 0):
compat_getenv = os.getenv
compat_expanduser = os.path.expanduser
def compat_setenv(key, value, env=os.environ):
env[key] = value
else:
# Environment variables should be decoded with filesystem encoding.
# Otherwise it will fail if any non-ASCII characters present (see #3854 #3217 #2918)
def compat_getenv(key, default=None):
from .utils import get_filesystem_encoding
env = os.getenv(key, default)
if env:
env = env.decode(get_filesystem_encoding())
return env
# Python < 2.6.5 require kwargs to be bytes
try:
def _testfunc(x):
pass
_testfunc(**{'x': 0})
except TypeError:
def compat_kwargs(kwargs):
return dict((bytes(k), v) for k, v in kwargs.items())
else:
compat_kwargs = lambda kwargs: kwargs
if hasattr(shutil, 'get_terminal_size'): # Python >= 3.3
compat_get_terminal_size = shutil.get_terminal_size
else:
_terminal_size = collections.namedtuple('terminal_size', ['columns', 'lines'])
def compat_get_terminal_size(fallback=(80, 24)):
columns = compat_getenv('COLUMNS')
if columns:
columns = int(columns)
else:
columns = None
lines = compat_getenv('LINES')
if lines:
lines = int(lines)
else:
lines = None
if columns is None or lines is None or columns <= 0 or lines <= 0:
try:
sp = subprocess.Popen(
['stty', 'size'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
out, err = sp.communicate()
_lines, _columns = map(int, out.split())
except Exception:
_columns, _lines = _terminal_size(*fallback)
if columns is None or columns <= 0:
columns = _columns
if lines is None or lines <= 0:
lines = _lines
return _terminal_size(columns, lines)
|
robertapengelly/ppa-helper
|
ppa_helper/compat.py
|
Python
|
gpl-3.0
| 3,031 | 0.002639 |
#!/usr/bin/python
import os
import shutil
from trac.util.compat import close_fds
# On Windows, shutil.rmtree doesn't remove files with the read-only
# attribute set, so this function explicitly removes it on every error
# before retrying. Even on Linux, shutil.rmtree chokes on read-only
# directories, so we use this version in all cases.
# Fix from http://bitten.edgewall.org/changeset/521
def rmtree(root):
"""Catch shutil.rmtree failures on Windows when files are read-only."""
def _handle_error(fn, path, excinfo):
os.chmod(path, 0666)
fn(path)
return shutil.rmtree(root, onerror=_handle_error)
|
i-rabot/tractogithub
|
tracformatter/trac/tests/functional/compat.py
|
Python
|
bsd-3-clause
| 650 | 0.003077 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('academics', '0016_student_auth_key'),
('courseevaluations', '0004_auto_20151208_1004'),
]
operations = [
migrations.RemoveField(
model_name='evaluable',
name='students',
),
migrations.RemoveField(
model_name='freeformquestionanswer',
name='question',
),
migrations.RemoveField(
model_name='multiplechoicequestionanswer',
name='student',
),
migrations.AddField(
model_name='evaluable',
name='student',
field=models.ForeignKey(to='academics.Student', default=None),
preserve_default=False,
),
]
|
rectory-school/rectory-apps
|
courseevaluations/migrations/0005_auto_20151208_1014.py
|
Python
|
mit
| 877 | 0 |
# test concurrent interning of strings
#
# MIT license; Copyright (c) 2016 Damien P. George on behalf of Pycom Ltd
import _thread
# function to check the interned string
def check(s, val):
assert type(s) == str
assert int(s) == val
# main thread function
def th(base, n):
for i in range(n):
# this will intern the string and check it
exec("check('%u', %u)" % (base + i, base + i))
with lock:
global n_finished
n_finished += 1
lock = _thread.allocate_lock()
n_thread = 4
n_finished = 0
n_qstr_per_thread = 100 # make 1000 for a more stressful test (uses more heap)
# spawn threads
for i in range(n_thread):
_thread.start_new_thread(th, (i * n_qstr_per_thread, n_qstr_per_thread))
# busy wait for threads to finish
while n_finished < n_thread:
pass
print('pass')
|
mhoffma/micropython
|
tests/thread/thread_qstr1.py
|
Python
|
mit
| 825 | 0.004848 |
# This file is part of MAUS: http://micewww.pp.rl.ac.uk/projects/maus
#
# MAUS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# MAUS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with MAUS. If not, see <http://www.gnu.org/licenses/>.
#
"""
Get KL calibrations from DB
"""
import cdb
import json
from Configuration import Configuration
from cdb._exceptions import CdbPermanentError
class GetCalib:
"""
Evaluator class to evaluate mathematical expressions
Able to see many simple math expressions and some common units; the
standard geant4 system of units is enabled.
"""
def __init__(self):
"""
Initialise the evaluator with math functions and units
"""
self._current_cali = {}
self.reset()
cfg = Configuration()
cfgdoc = cfg.getConfigJSON()
cfgdoc_json = json.loads(cfgdoc)
cdb_url = cfgdoc_json['cdb_download_url'] + 'calibration?wsdl'
self.cdb_server = cdb.Calibration()
self.cdb_server.set_url(cdb_url)
#print 'Server: ', self.cdb_server.get_name(), \
# self.cdb_server.get_version()
try:
cdb.Calibration().get_status()
except CdbPermanentError:
raise CdbPermanentError("CDB error")
def get_calib(self, devname, ctype, fromdate):
"""
Evaluate a string expression given by formula
"""
if devname != "" and ctype != "":
if devname != "KL" or ctype != "gain":
raise Exception('get_kl_calib failed. \
Invalid detector/calib type.')
# check whether we are asked for the current calibration
# or calibration for an older date
if fromdate == "" or fromdate == "current":
#print 'getting current calib', devname, ctype
try:
self._current_cali = \
self.cdb_server.get_current_calibration(devname, ctype)
except CdbPermanentError:
self._current_cali = "cdb_permanent_error"
else:
#print 'getting calib for date', fromdate
try:
self._current_cali = \
self.cdb_server.get_calibration_for_date(devname,
fromdate,
ctype)
except CdbPermanentError:
self._current_cali = "cdb_permanent_error"
#print self._current_cali
else:
raise Exception('get_kl_calib failed. No device/calibration type.')
return self._current_cali
def reset(self):
"""
Reinitialize calibration
"""
self._current_cali = {}
|
mice-software/maus
|
src/common_py/calibration/get_kl_calib.py
|
Python
|
gpl-3.0
| 3,304 | 0.003329 |
"""
Client for the library API.
"""
class LibraryClient(object):
"""
Library API client.
"""
def __init__(self,axilent_connection):
self.content_resource = axilent_connection.resource_client('axilent.library','content')
self.api = axilent_connection.http_client('axilent.library')
def create_content(self,content_type,project,search_index=True,**field_data):
"""
Creates the content. Returns the new content item key in the format:
<content-type>:<content-key>
"""
response = self.content_resource.post(data={'content_type':content_type,
'project':project,
'search_index':search_index,
'content':field_data})
return response['created_content']
def update_content(self,content_type,project,content_key,search_index=True,reset_workflow=True,**field_data):
"""
Updates existing content.
"""
response = self.content_resource.put(data={'content_type':content_type,
'project':project,
'key':content_key,
'search_index':search_index,
'reset_workflow':reset_workflow,
'content':field_data})
return response['updated_content']
def ping(self,project,content_type):
"""
Tests connection with Axilent.
"""
return self.api.ping(project=project,content_type=content_type)
def index_content(self,project,content_type,content_key):
"""
Forces re-indexing of the specified content item.
"""
response = self.api.indexcontent(content_key=content_key,
project=project,
content_type=content_type)
return response['indexed']
def tag_content(self,project,content_type,content_key,tag,search_index=True):
"""
Tags the specified content item.
"""
response = self.api.tagcontent(project=project,
content_type=content_type,
content_key=content_key,
tag=tag,
search_index=search_index)
return response['tagged_content']
def detag_content(self,project,content_type,content_key,tag,search_index=True):
"""
De-tags the specified content item.
"""
response = self.api.detagcontent(project=project,
content_type=content_type,
content_key=content_key,
tag=tag,
search_index=search_index)
return response['removed_tag']
def archive_content(self,project,content_type,content_key):
"""
Archives the content on Axilent.
"""
response = self.content_resource.delete(params={'content_type':content_type,
'project':project,
'key':content_key})
return response['archived']
|
aericson/Djax
|
pax/library.py
|
Python
|
bsd-3-clause
| 3,541 | 0.017509 |
import pyhwtherm
mytest = pyhwtherm.PyHWTherm(
username="someuser@example.com",
password="mysecretpassword",
deviceid=123456
)
print "login successful: ",mytest.login()
print "Get thermostat data:", mytest.updateStatus()
beforeChange = mytest.status
print "Status: ", beforeChange
mytest.tempHold("11:00",cool=78,heat=68)
mytest.submit()
print "Get thermostat data:", mytest.updateStatus()
afterChange = mytest.status
print "heat >>",beforeChange['latestData']['uiData']['HeatSetpoint'],"->",afterChange['latestData']['uiData']['HeatSetpoint']
print "cool >>",beforeChange['latestData']['uiData']['CoolSetpoint'],"->",afterChange['latestData']['uiData']['CoolSetpoint']
print "Logout", mytest.logout()
|
texnofobix/pyhwtherm
|
example.py
|
Python
|
mit
| 739 | 0.014885 |
#!/usr/bin/env python
# encoding: utf-8
import sqlite3
from sys import version_info
if version_info >= (3, 0, 0):
def listkey(dicts):
return list(dicts.keys())[0]
else:
def listkey(dicts):
return dicts.keys()[0]
class sqlitei:
'''Encapsulation sql.'''
def __init__(self, path):
self.db = sqlite3.connect(path)
# self.db.text_factory = str
self.cs = self.db.cursor()
def commit(self):
self.db.commit()
def select(self, table, column, dump=None):
'''Select
table str, column list, dump dict.'''
columns = ','.join(column)
sql = 'select ' + columns + ' from ' + table
dumps = []
if dump:
dumpname = listkey(dump)
sql += ' where ' + dumpname + '=?'
dumps.append(dump[dumpname])
return self.cs.execute(sql, dumps)
def update(self, table, column, dump):
'''Update
table str, column dict, dump dict.'''
columns = []
columnx = ''
for c in column:
columnx += c + '=?,'
columns.append(column[c])
dumpname = listkey(dump)
sql = 'update ' + table + ' set '+ columnx[:-1] + ' where ' + dumpname + '=?'
columns.append(dump[dumpname])
return self.cs.execute(sql, columns)
def insert(self, table, column, dump):
'''Insert
table str, column list, dump list'''
dumps = ('?,'*len(dump))[:-1]
columns = ','.join(column)
sql = 'insert into ' + table + ' (' + columns + ') values (' +dumps + ')'
return self.cs.execute(sql, dump)
def delete(self, table, dump):
'''Delete
table str, dump dict'''
dumpname = listkey(dump)
sql = 'delete from ' + table + ' where ' + dumpname + '=?'
return self.cs.execute(sql, [dump[dumpname]])
|
54Pany/gum
|
data/libdbs.py
|
Python
|
gpl-3.0
| 1,870 | 0.002674 |
"""Support for INSTEON dimmers via PowerLinc Modem."""
import logging
from homeassistant.helpers.entity import Entity
from .insteon_entity import InsteonEntity
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities, discovery_info=None):
"""Set up the INSTEON device class for the hass platform."""
insteon_modem = hass.data["insteon"].get("modem")
address = discovery_info["address"]
device = insteon_modem.devices[address]
state_key = discovery_info["state_key"]
_LOGGER.debug(
"Adding device %s entity %s to Sensor platform",
device.address.hex,
device.states[state_key].name,
)
new_entity = InsteonSensorDevice(device, state_key)
async_add_entities([new_entity])
class InsteonSensorDevice(InsteonEntity, Entity):
"""A Class for an Insteon device."""
|
postlund/home-assistant
|
homeassistant/components/insteon/sensor.py
|
Python
|
apache-2.0
| 876 | 0.001142 |
import unittest
import wire
class TestSQLString(unittest.TestCase):
def setUp(self):
'''Sets up the test case'''
self.sql = wire.SQLString
def test_pragma(self):
'''Tests the PRAGMA SQL generation'''
self.assertEqual(self.sql.pragma("INTEGRITY_CHECK(10)"), "PRAGMA INTEGRITY_CHECK(10)")
self.assertEqual(self.sql.checkIntegrity(5), "PRAGMA INTEGRITY_CHECK(5)")
def test_createTable(self):
'''Tests the CREATE TABLE SQL generation'''
table_outputs = ["CREATE TABLE test (id INT NOT NULL,username VARCHAR(255) DEFAULT 'default_user')",
"CREATE TABLE test (username VARCHAR(255) DEFAULT 'default_user',id INT NOT NULL)"]
temp_table_outputs = ["CREATE TEMPORARY TABLE test_temp (value REAL DEFAULT 0.0,time TIMESTAMP DEFAULT CURRENT_TIMESTAMP)",
"CREATE TEMPORARY TABLE test_temp (time TIMESTAMP DEFAULT CURRENT_TIMESTAMP,value REAL DEFAULT 0.0)"]
self.assertIn(self.sql.createTable("test", False, id = "INT", username = ["VARCHAR(255)", "'default_user'"]), table_outputs)
self.assertIn(self.sql.createTable("test_temp", True, value = ["REAL", 0.0], time = ["TIMESTAMP", "CURRENT_TIMESTAMP"]), temp_table_outputs)
# include a Temp table test (False --> True)
def test_dropTable(self):
'''Tests the DROP TABLE SQL generation'''
self.assertEqual(self.sql.dropTable("table_drop"), "DROP TABLE table_drop")
self.assertEqual(self.sql.dropTable("some_other_table"), "DROP TABLE some_other_table")
def test_renameTable(self):
'''Tests the ALTER TABLE RENAME SQL generation'''
self.assertEqual(self.sql.rename("orig_table", "new_table"), "ALTER TABLE orig_table RENAME TO new_table")
if __name__ == '__main__':
unittest.main()
|
panchr/wire
|
wire/tests/sqlstring_test.py
|
Python
|
gpl-3.0
| 1,665 | 0.027628 |
from glob import glob
import pylab as pyl
import h5py as hdf
files = glob('ML_predicted_masses*')
# get the power law masses
with hdf.File('../results_cluster.hdf5', 'r') as f:
dset = f[f.keys()[0]]
results = dset.value
# make a figure
f = pyl.figure(figsize=(6, 6 * (pyl.sqrt(5.) - 1.0) / 2.0))
ax = f.add_subplot(111)
i = 0
for f, c, l in zip(files, ['#7a68a6', '#e24a33'],
['$ML_{\sigma, N_{gals}}$, Flat HMF',
'$ML_{\sigma, N_{gals}}$']):
if i == 0:
i += 1
continue
with hdf.File(f, 'r') as f1:
dset = f1[f1.keys()[0]]
ML = dset.value
ax.errorbar(results['MASS'],
ML['ML_pred'],
xerr=results['MASS_err'],
yerr=ML['ML_pred_err'],
fmt='o',
color=c,
markersize=10,
label=l) #f.rstrip('.hdf5'))
ax.set_xlabel('Log M$_{pred, PL}$')
ax.set_ylabel('Log M$_{pred, ML}$')
ax.plot([12.5, 15.5], [12.5, 15.5], c='k', zorder=0)
handles, labels = ax.get_legend_handles_labels()
ax.legend(handles[::-1], labels[::-1], loc='upper left')
pyl.show()
|
boada/vpCluster
|
data/boada/analysis_all/MLmethods/plot_massComparison_scatter.py
|
Python
|
mit
| 1,151 | 0.002606 |
import unittest
from .context import json_stable_stringify_python as stringify
class TestStringify(unittest.TestCase):
def test_simple_object(self):
node = {'c':6, 'b': [4,5], 'a': 3, 'z': None}
actual = stringify.stringify(node)
expected = '{"a":3,"b":[4,5],"c":6,"z":null}'
self.assertEqual(actual, expected)
def test_object_with_empty_string(self):
node = {'a': 3, 'z': ''}
actual = stringify.stringify(node)
expected = '{"a":3,"z":""}'
self.assertEqual(actual, expected)
def test_nested_object(self):
node = {
'a': {
'b': {
'c': [1,2,3,None]
}
}
}
actual = stringify.stringify(node)
expected = '{"a":{"b":{"c":[1,2,3,null]}}}'
self.assertEqual(actual, expected)
def test_array_with_objects(self):
node = [{'z': 1, 'a': 2}]
actual = stringify.stringify(node)
expected = '[{"a":2,"z":1}]'
self.assertEqual(actual, expected)
def test_nested_array_objects(self):
node = [{'z': [[{'y': 1, 'b': 2}]], 'a': 2}]
actual = stringify.stringify(node)
expected = '[{"a":2,"z":[[{"b":2,"y":1}]]}]'
self.assertEqual(actual, expected)
def test_array_with_none(self):
node = [1, None]
actual = stringify.stringify(node)
expected = '[1,null]'
self.assertEqual(actual, expected)
def test_array_with_empty_string(self):
node = [1, '']
actual = stringify.stringify(node)
expected = '[1,""]'
self.assertEqual(actual, expected)
if __name__ == '__main__':
unittest.main()
|
haochi/json-stable-stringify-python
|
tests/test_stringify.py
|
Python
|
mit
| 1,700 | 0.004118 |
"""
Django settings for accountant project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
import os
import sys
from decimal import Decimal
import accounting
VERSION = accounting.VERSION
DISPLAY_VERSION = accounting.get_version()
DISPLAY_SHORT_VERSION = accounting.get_short_version()
BASE_DIR = os.path.dirname(os.path.dirname(__file__))
PROJECT_DIR = os.path.basename(BASE_DIR)
# Add the BASE_DIR to the path in order to reuse the apps easily
sys.path.append(BASE_DIR)
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'o7k)j*lewj6va4yqz=#1^z@6wtf!$#dx(u=z!3(351rc27c9fm'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
LOCAL_SERVER = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
SITE_ID = 1
SITE_MAIN_DOMAIN = 'example.com'
SITE_MAIN_NAME = 'example.com'
# Application definition
DJANGO_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
)
THIRD_PARTY_APPS = (
'djrill',
'crispy_forms',
'stronghold', # enforce login on the whole app
'avatar', # for user avatars
'allauth',
'allauth.account',
'allauth.socialaccount',
# social providers
# 'allauth.socialaccount.providers.github',
# 'allauth.socialaccount.providers.facebook',
# 'allauth.socialaccount.providers.twitter',
)
# Accounting apps
from accounting import get_apps
LOCAL_APPS = get_apps()
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
# Migrations
MIGRATION_MODULES = {
'sites': 'migrations.sites',
'socialaccount': 'migrations.socialaccount',
}
from accounting import ACCOUNTING_MIDDLEWARE_CLASSES
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'stronghold.middleware.LoginRequiredMiddleware',
) + ACCOUNTING_MIDDLEWARE_CLASSES
ROOT_URLCONF = 'accountant.urls'
WSGI_APPLICATION = 'wsgi.application'
# Emailing
DEFAULT_FROM_EMAIL = 'noreply@accountant.fr'
# Templates
# https://docs.djangoproject.com/en/1.7/ref/settings/#template-context-processors
from accounting import ACCOUNTING_TEMPLATE_CONTEXT_PROCESSORS
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'allauth.account.context_processors.account',
'allauth.socialaccount.context_processors.socialaccount',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.tz',
'django.contrib.messages.context_processors.messages',
'django.core.context_processors.request',
) + ACCOUNTING_TEMPLATE_CONTEXT_PROCESSORS
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#template-loaders
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)
# See: https://docs.djangoproject.com/en/1.7/ref/settings/#template-dirs
from accounting import ACCOUNTING_MAIN_TEMPLATE_DIR
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
ACCOUNTING_MAIN_TEMPLATE_DIR,
)
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': '',
'USER': '',
'PASSWORD': '',
'HOST': ''
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
# See: https://docs.djangoproject.com/en/1.7/ref/contrib/staticfiles\
# /#std:setting-STATICFILES_DIRS
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'assets'),
)
STATICFILES_FINDERS = (
"django.contrib.staticfiles.finders.FileSystemFinder",
"django.contrib.staticfiles.finders.AppDirectoriesFinder",
"djangobower.finders.BowerFinder",
)
# Media files
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
MEDIA_URL = '/media/'
# Bower config
BOWER_COMPONENTS_ROOT = os.path.abspath(os.path.join(BASE_DIR, 'components'))
BOWER_INSTALLED_APPS = (
'modernizr',
'jquery',
'bootstrap',
)
# Select 2
AUTO_RENDER_SELECT2_STATICS = False
SELECT2_BOOTSTRAP = True
# Custom User
LOGIN_REDIRECT_URL = 'connect:getting-started'
LOGIN_URL = 'account_login'
# Authentication
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend',
)
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_EMAIL_VERIFICATION = 'mandatory'
ACCOUNT_SIGNUP_PASSWORD_VERIFICATION = False
ACCOUNT_USERNAME_REQUIRED = False
# Stronghold
STRONGHOLD_PUBLIC_URLS = (
r'^%s.+$' % STATIC_URL,
r'^%s.+$' % MEDIA_URL,
r'^/accounts/.*$',
)
STRONGHOLD_PUBLIC_NAMED_URLS = (
)
# Forms
CRISPY_TEMPLATE_PACK = 'bootstrap3'
# Accounting
from accounting.defaults import *
|
dulaccc/Accountant
|
accountant/settings/common.py
|
Python
|
mit
| 5,873 | 0.000851 |
##############################################################################
#
# Copyright Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
import time
class Wait(object):
class TimeOutWaitingFor(Exception):
"A test condition timed out"
timeout = 9
wait = .01
def __init__(self, timeout=None, wait=None, exception=None,
getnow=(lambda: time.time), getsleep=(lambda: time.sleep)):
if timeout is not None:
self.timeout = timeout
if wait is not None:
self.wait = wait
if exception is not None:
self.TimeOutWaitingFor = exception
self.getnow = getnow
self.getsleep = getsleep
def __call__(self, func=None, timeout=None, wait=None, message=None):
if func is None:
return lambda func: self(func, timeout, wait, message)
if func():
return
now = self.getnow()
sleep = self.getsleep()
if timeout is None:
timeout = self.timeout
if wait is None:
wait = self.wait
wait = float(wait)
deadline = now() + timeout
while 1:
sleep(wait)
if func():
return
if now() > deadline:
raise self.TimeOutWaitingFor(
message or
getattr(func, '__doc__') or
getattr(func, '__name__')
)
wait = Wait()
|
dcm-oss/blockade
|
blockade/tests/util.py
|
Python
|
apache-2.0
| 1,956 | 0.001022 |
"""
Shogun demo
Fernando J. Iglesias Garcia
"""
import numpy as np
import matplotlib as mpl
import pylab
import util
from scipy import linalg
from shogun.Classifier import QDA
from shogun.Features import RealFeatures, MulticlassLabels
# colormap
cmap = mpl.colors.LinearSegmentedColormap('color_classes',
{'red': [(0, 1, 1),
(1, .7, .7)],
'green': [(0, 1, 1),
(1, .7, .7)],
'blue': [(0, 1, 1),
(1, .7, .7)]})
pylab.cm.register_cmap(cmap = cmap)
# Generate data from Gaussian distributions
def gen_data():
np.random.seed(0)
covs = np.array([[[0., -1. ], [2.5, .7]],
[[3., -1.5], [1.2, .3]],
[[ 2, 0 ], [ .0, 1.5 ]]])
X = np.r_[np.dot(np.random.randn(N, dim), covs[0]) + np.array([-4, 3]),
np.dot(np.random.randn(N, dim), covs[1]) + np.array([-1, -5]),
np.dot(np.random.randn(N, dim), covs[2]) + np.array([3, 4])];
Y = np.hstack((np.zeros(N), np.ones(N), 2*np.ones(N)))
return X, Y
def plot_data(qda, X, y, y_pred, ax):
X0, X1, X2 = X[y == 0], X[y == 1], X[y == 2]
# Correctly classified
tp = (y == y_pred)
tp0, tp1, tp2 = tp[y == 0], tp[y == 1], tp[y == 2]
X0_tp, X1_tp, X2_tp = X0[tp0], X1[tp1], X2[tp2]
# Misclassified
X0_fp, X1_fp, X2_fp = X0[tp0 != True], X1[tp1 != True], X2[tp2 != True]
# Class 0 data
pylab.plot(X0_tp[:, 0], X0_tp[:, 1], 'o', color = cols[0])
pylab.plot(X0_fp[:, 0], X0_fp[:, 1], 's', color = cols[0])
m0 = qda.get_mean(0)
pylab.plot(m0[0], m0[1], 'o', color = 'black', markersize = 8)
# Class 1 data
pylab.plot(X1_tp[:, 0], X1_tp[:, 1], 'o', color = cols[1])
pylab.plot(X1_fp[:, 0], X1_fp[:, 1], 's', color = cols[1])
m1 = qda.get_mean(1)
pylab.plot(m1[0], m1[1], 'o', color = 'black', markersize = 8)
# Class 2 data
pylab.plot(X2_tp[:, 0], X2_tp[:, 1], 'o', color = cols[2])
pylab.plot(X2_fp[:, 0], X2_fp[:, 1], 's', color = cols[2])
m2 = qda.get_mean(2)
pylab.plot(m2[0], m2[1], 'o', color = 'black', markersize = 8)
def plot_cov(plot, mean, cov, color):
v, w = linalg.eigh(cov)
u = w[0] / linalg.norm(w[0])
angle = np.arctan(u[1] / u[0]) # rad
angle = 180 * angle / np.pi # degrees
# Filled gaussian at 2 standard deviation
ell = mpl.patches.Ellipse(mean, 2*v[0]**0.5, 2*v[1]**0.5, 180 + angle, color = color)
ell.set_clip_box(plot.bbox)
ell.set_alpha(0.5)
plot.add_artist(ell)
def plot_regions(qda):
nx, ny = 500, 500
x_min, x_max = pylab.xlim()
y_min, y_max = pylab.ylim()
xx, yy = np.meshgrid(np.linspace(x_min, x_max, nx),
np.linspace(y_min, y_max, ny))
dense = RealFeatures(np.array((np.ravel(xx), np.ravel(yy))))
dense_labels = qda.apply(dense).get_labels()
Z = dense_labels.reshape(xx.shape)
pylab.pcolormesh(xx, yy, Z)
pylab.contour(xx, yy, Z, linewidths = 3, colors = 'k')
# Number of classes
M = 3
# Number of samples of each class
N = 300
# Dimension of the data
dim = 2
cols = ['blue', 'green', 'red']
fig = pylab.figure()
ax = fig.add_subplot(111)
pylab.title('Quadratic Discrimant Analysis')
X, y = gen_data()
labels = MulticlassLabels(y)
features = RealFeatures(X.T)
qda = QDA(features, labels, 1e-4, True)
qda.train()
ypred = qda.apply().get_labels()
plot_data(qda, X, y, ypred, ax)
for i in range(M):
plot_cov(ax, qda.get_mean(i), qda.get_cov(i), cols[i])
plot_regions(qda)
pylab.connect('key_press_event', util.quit)
pylab.show()
|
ratschlab/ASP
|
examples/undocumented/python_modular/graphical/multiclass_qda.py
|
Python
|
gpl-2.0
| 3,312 | 0.038647 |
from floyd.client.base import FloydHttpClient
from floyd.model.version import CliVersion
from floyd.log import logger as floyd_logger
class VersionClient(FloydHttpClient):
"""
Client to get API version from the server
"""
def __init__(self):
self.url = "/cli_version"
super(VersionClient, self).__init__(skip_auth=True)
def get_cli_version(self):
response = self.request("GET", self.url)
data_dict = response.json()
floyd_logger.debug("CLI Version info: %s", data_dict)
return CliVersion.from_dict(data_dict)
|
houqp/floyd-cli
|
floyd/client/version.py
|
Python
|
apache-2.0
| 580 | 0 |
from ymir import app
app.run(debug=True, host='0.0.0.0', port=2841)
|
Ymir-RPG/ymir-api
|
run.py
|
Python
|
apache-2.0
| 70 | 0 |
"""
Scrapy - a web crawling and web scraping framework written for Python
"""
__all__ = ['__version__', 'version_info', 'twisted_version',
'Spider', 'Request', 'FormRequest', 'Selector', 'Item', 'Field']
# Scrapy version
import pkgutil
__version__ = pkgutil.get_data(__package__, 'VERSION').decode('ascii').strip()
version_info = tuple(int(v) if v.isdigit() else v
for v in __version__.split('.'))
del pkgutil
# Check minimum required Python version
import sys
if sys.version_info < (3, 5):
print("Scrapy %s requires Python 3.5" % __version__)
sys.exit(1)
# Ignore noisy twisted deprecation warnings
import warnings
warnings.filterwarnings('ignore', category=DeprecationWarning, module='twisted')
del warnings
# Apply monkey patches to fix issues in external libraries
from scrapy import _monkeypatches
del _monkeypatches
from twisted import version as _txv
twisted_version = (_txv.major, _txv.minor, _txv.micro)
# Declare top-level shortcuts
from scrapy.spiders import Spider
from scrapy.http import Request, FormRequest
from scrapy.selector import Selector
from scrapy.item import Item, Field
del sys
|
eLRuLL/scrapy
|
scrapy/__init__.py
|
Python
|
bsd-3-clause
| 1,151 | 0.007819 |
from django.db.models import Transform
from django.db.models import DateTimeField, TimeField
from django.utils.functional import cached_property
class TimeValue(Transform):
lookup_name = 'time'
function = 'time'
def as_sql(self, compiler, connection):
lhs, params = compiler.compile(self.lhs)
return 'TIME({})'.format(lhs), params
@cached_property
def output_field(self):
return TimeField()
DateTimeField.register_lookup(TimeValue)
|
mivanov-utwente/t4proj
|
t4proj/apps/stats/models.py
|
Python
|
bsd-2-clause
| 481 | 0.002079 |
from openerp.osv import fields,osv
import time
import openerp.addons.decimal_precision as dp
from openerp.tools.translate import _
from openerp import pooler
from openerp import netsvc
import base64
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
from openerp.addons.Edumedia_India import config
class sale_order(osv.osv):
def history(self, cr, uid, cases, keyword, history=False, subject=None, email=False, details=None, email_from=False, message_id=False, attach=[], context=None):
mailgate_pool = self.pool.get('mailgate.thread')
return mailgate_pool.history(cr, uid, cases, keyword, history=history,\
subject=subject, email=email, \
details=details, email_from=email_from,\
message_id=message_id, attach=attach, \
context=context)
def _get_partner_default_addr(self, cr, uid, ids, name, arg, context=None):
res = {}
for case in self.browse(cr, uid, ids, context=context):
addr = self.pool.get('res.partner').address_get(cr, uid, [case.partner_id.id], ['default'])
res[case.id] = addr['default']
return res
# def _amount_all(self, cr, uid, ids, field_name, arg, context=None):
# return super(sale_order,self)._amount_all(cr, uid, ids,field_name,arg,context=context)
def _get_class_details(self, cr, uid, ids, field_name, arg, context=None):
res = {}
for case in self.browse(cr, uid, ids, context=context):
res[case.id] = {
'tot_class': 0, 'low_std': 0, 'high_std': 0, 'tot_student' : 0, 'tot_sectn':0 }
cnt_class = l_std = h_std = studnts = sectns = 0
class_std = []
if case.class_ids:
for line in case.class_ids:
cnt_class += 1
class_std.append(line.ed_class)
studnts += line.ed_students
sectns += line.ed_sec
if class_std:
l_std = min(class_std)
h_std = max(class_std)
res[case.id]['tot_class'] = cnt_class
res[case.id]['low_std'] = l_std
res[case.id]['high_std'] = h_std
res[case.id]['tot_student'] = studnts
res[case.id]['tot_sectn'] = sectns
return res
# def _get_order(self, cr, uid, ids, context=None):
# result = {}
# for line in self.pool.get('sale.order.line').browse(cr, uid, ids, context=context):
# result[line.order_id.id] = True
# return result.keys()
def _get_delivry_ids(self, cr, uid, ids, field_name, arg, context=None):
delivry_obj = self.pool.get("stock.picking")
res = {}
for case in self.browse(cr,uid,ids,context):
res[case.id] = delivry_obj.search(cr, uid, [('sale_id', '=', case.id),('state','=','done')])
return res
_inherit='sale.order'
_columns={
# Overridden
'product_id': fields.many2one('product.product', 'Product', change_default=True,states={'draft': [('readonly', False)]}),
# 'amount_untaxed': fields.function(_amount_all, method=True, digits_compute= dp.get_precision('Sale Price'), string='Untaxed Amount',
# store = {
# 'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
# 'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
# },
# multi='sums', help="The amount without tax."),
# 'amount_tax': fields.function(_amount_all, method=True, digits_compute= dp.get_precision('Sale Price'), string='Taxes',
# store = {
# 'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
# 'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
# },
# multi='sums', help="The tax amount."),
# 'amount_total': fields.function(_amount_all, method=True, digits_compute= dp.get_precision('Sale Price'), string='Total',
# store = {
# 'sale.order': (lambda self, cr, uid, ids, c={}: ids, ['order_line'], 10),
# 'sale.order.line': (_get_order, ['price_unit', 'tax_id', 'discount', 'product_uom_qty'], 10),
# },
# multi='sums', help="The total amount."),
'state': fields.selection([
('draft', 'Quotation'),
# ('waiting_date', 'Waiting Schedule'),
# ('proposal_sent', 'Proposal Sent'),
# ('proposal_accept','Proposal Accepted'),
('manual', 'Manual In Progress'),
('progress', 'In Progress'),
('shipping_except', 'Shipping Exception'),
('invoice_except', 'Invoice Exception'),
('done', 'Done'),
('cancel', 'Cancelled'),
],'State',readonly=True),
# Extra Fields
'films_only':fields.boolean('Film License Only',readonly=True ,states={'draft': [('readonly', False)]}),
'address_ids':fields.many2many('res.partner','address_sale_rel','sale_id','address_id','Coordinator Details'),
'class_ids':fields.one2many('ed.class.details','sale_id','Class Details'),
'cap1_terms' : fields.char('Caption 1',size=100),
'cap1_text':fields.text('Caption Text',size=500),
'cap2_terms' : fields.char('Caption 2',size=100),
'cap2_text':fields.text('Caption Text',size=500),
'cap3_terms' : fields.char('Caption 3',size=100),
'cap3_text':fields.text('Caption Text',size=500),
'cap4_terms' : fields.char('Caption 4',size=100),
'cap4_text':fields.text('Caption Text',size=500),
'ed_type':fields.selection([('so','Sale Order'),('crm','CRM')],'Type'),
'ed_license':fields.selection(config.CLASS_STD,'License',readonly=True ,states={'draft': [('readonly', False)]}),
'rsn_reject' : fields.text('Relationship Manager Remarks',readonly=True ,states={'draft': [('readonly', False)]}),
'ed_proj':fields.char('Project',size=100),
'ed_cdd':fields.integer('No.Of.CDD',readonly=True ,states={'draft': [('readonly', False)]}),
'ed_rate':fields.integer('Rate',readonly=True ,states={'draft': [('readonly', False)]}),
'license_rate':fields.integer('Rate',readonly=True ,states={'draft': [('readonly', False)]}),
'nxt_payment_date' : fields.date('Next Payment Date'),
'licen_stdate' : fields.date('Start Date',readonly=True ,states={'draft': [('readonly', False)]}),
'licen_eddate' : fields.date('End Date',readonly=True ,states={'draft': [('readonly', False)]}),
'invoice_id' : fields.many2one('account.invoice','Invoice No',readonly=True),
'training_ids':fields.one2many('ed.training.grid','sale_id','Training'),
'message_ids': fields.one2many('mail.message', 'res_id', 'Messages', domain=[('model','=',_inherit)]),
'vw_address_ids':fields.one2many('vw.res.partner','sale_id','View Coordinator Details'),
'vw_class_ids':fields.one2many('vw.ed.class.details','sale_id','view class details'),
'payment_ids' : fields.one2many('ed.payment','sale_id','ed.payment'),
'feedback_ids':fields.one2many('ed.feedback','sale_id','Feedback'),
'ed_pod_ids':fields.one2many('ed.product','sale_id','Product',states={'draft': [('readonly', False)]}),
'ed_serv_ids':fields.one2many('ed.service','sale_id','service',states={'draft': [('readonly', False)]}),
'hub_id' : fields.many2one('ed.hub','HUB',readonly=True,states={'draft': [('readonly', False)]}),
'partner_default_id': fields.function(_get_partner_default_addr, method=True, relation='res.partner', type="many2one", string='Default Contact', strore=True),
'tot_class' : fields.function(_get_class_details, string="Total Classes", method=True, store=True, type="integer", multi="class_details"),
'low_std' : fields.function(_get_class_details, string="Lowest Standard", method=True, store=True, type="integer", multi="class_details"),
'high_std' : fields.function(_get_class_details, string="Highest Standard", method=True, store=True, type="integer", multi="class_details"),
'tot_student' : fields.function(_get_class_details, string="Total Students", method=True, store=True, type="integer", multi="class_details"),
'tot_sectn' : fields.function(_get_class_details, string="Total Sections", method=True, store=True, type="integer", multi="class_details"),
'delivery_ids': fields.function(_get_delivry_ids, method=True, type='one2many', obj='stock.picking', string='Delivery Orders' ,readonly=True),
}
def _create_session(self, cr, uid, ids, context=None):
ses_obj = self.pool.get('ed.sessions')
for case in self.browse(cr, uid, ids):
ses_vals={
'sale_id':case.id,
'ed_so': case.name,
'ed_school':case.partner_id.name,
}
ses_obj.create(cr, uid,ses_vals)
return True
def _open_crm_form(self, cr, uid, ids, context=None):
models_data = self.pool.get('ir.model.data')
sale_order_form = models_data._get_id(
cr, uid, 'Edumedia_India', 'view_ed_sale_crm_form')
sale_order_tree = models_data._get_id(
cr, uid, 'Edumedia_India', 'view_ed_sale_tree')
if sale_order_form:
sale_order_form = models_data.browse(
cr, uid, sale_order_form, context=context).res_id
if sale_order_tree:
sale_order_tree = models_data.browse(
cr, uid, sale_order_tree, context=context).res_id
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'sale.order',
'view_id' : False,
'views' : [(sale_order_form, 'form'),
(sale_order_tree, 'tree'), ],
'type': 'ir.actions.act_window',
'res_id': ids[0]
}
# *************** Overwritten standard function *****************
def action_wait(self, cr, uid, ids, *args):
self.button_dummy(cr, uid, ids)
for o in self.browse(cr, uid, ids):
if (o.order_policy == 'manual'):
self.write(cr, uid, [o.id], {'state': 'manual', 'date_confirm': time.strftime('%Y-%m-%d')})
else:
self.write(cr, uid, [o.id], {'state': 'progress', 'date_confirm': time.strftime('%Y-%m-%d')})
self.pool.get('sale.order.line').button_confirm(cr, uid, [x.id for x in o.order_line])
message = _("The quotation '%s' has been converted to a sales order.") % (o.name,)
self.log(cr, uid, o.id, message)
self._create_session(cr, uid, ids)
partner_obj = self.pool.get('res.partner')
partner_obj.write(cr,uid,[o.partner_id.id],{'ed_sh_cinema':True})
# self._open_crm_form(cr, uid, ids)
return True
def button_2populateLines(self, cr, uid, ids, context=None):
ordln_obj = self.pool.get('sale.order.line')
edProd_obj = self.pool.get('ed.product')
edServ_obj = self.pool.get('ed.service')
class_obj = self.pool.get('ed.class.details')
prod_obj = self.pool.get('product.product')
prod_id = []
for case in self.browse(cr, uid, ids):
cr.execute("delete from sale_order_line where order_id = %d"%(case.id))
cr.execute("delete from ed_product where sale_id = %d"%(case.id))
cr.execute("delete from ed_service where sale_id = %d"%(case.id))
cr.execute("delete from address_sale_rel where sale_id=%d"%(case.id))
prod_films = prod_obj.search(cr,uid,[('name_template','=','Films')],limit=1)
prod_license = prod_obj.search(cr,uid,[('name_template','=','License')],limit=1)
prod_id.append(case.product_id.id)
if prod_films:
prod_id.append(prod_films[0])
if prod_license:
prod_id.append(prod_license[0])
# to create sale order lines on select of product
for prod in self.pool.get('product.product').browse(cr,uid,prod_id):
result = ordln_obj.product_id_change(cr, uid, ids, case.pricelist_id.id, prod.id, qty=0,
uom=False, qty_uos=0, uos=False, name='', partner_id=case.partner_id.id,
lang='lang' in context and context.get('lang')or False, update_tax=True, date_order=case.date_order, packaging=False, fiscal_position=False, flag=False)
prod_name = ''
if prod.name == 'Films':
prod_name = str(prod.name) + ' - License Fee'
lnvals = {
'product_id':prod.id,
'product_uom':prod.uom_id.id,
'name':prod_name or prod.name,
'price_unit':prod.list_price,
'order_id':case.id,
'tax_id' :[(6, 0, result['value']['tax_id'])],
}
ordln_id = ordln_obj.create(cr, uid, lnvals)
#to create lines of subproducts and service of main product
if prod.prod_ids:
for subprod in prod.prod_ids:
edProd_obj.create(cr, uid, {
'product_id':subprod.product_id.id,
'ed_qty': subprod.ed_qty,
'ed_per_id':subprod.ed_per_id.id,
'ed_class':subprod.ed_class,
'display_unit':subprod.product_id.display_unit,
'sale_id':case.id
})
for serv in prod.sevice_ids:
edServ_obj.create(cr, uid, {
'name':serv.name,
'sale_id':case.id
})
ordln_obj.write(cr,uid,[ordln_id],{})
#to create lines of address for selected customer
for add in case.partner_id.address:
if add.type == 'contact':
cr.execute("insert into address_sale_rel(sale_id, address_id) values(%d,%d)"%(case.id, add.id))
#to create class lines
if not case.class_ids:
for i in range(1,9):
class_obj.create(cr,uid,{'sale_id' : case.id,
'ed_class' : i,
'ed_boys':0,
'ed_girls':0,
'ed_sec':0,
'ed_students':0
},context)
return True
# ************************************ button to generate sale Dispatch order report***************************
def print_order_report(self, cr, uid, ids, context=None):
for case in self.browse(cr, uid, ids):
cr.execute(""" CREATE OR REPLACE VIEW vw_res_partner_address AS
select pa.id
, pa.name
, pa.mobile
, pa.email
, pa.ed_desig_id
, """ + str(case.id) + """ as sale_id
from res_partner_address pa
where pa.id in (select address_id from address_sale_rel
where sale_id = """ + str(case.id) + """);
CREATE OR REPLACE VIEW vw_ed_class_details AS
select cl.id
, cl.ed_class
, cl.ed_sec
, cl.ed_students
, cl.ed_boys
, case when cl.ed_class = 6 then sum(cl.ed_girls)
else 0 end as girls
, case when cl.ed_class = 7 then sum(cl.ed_boys)
else 0 end as boys
, cl.ed_girls
, """ + str(case.id) + """ as sale_id
from ed_class_details cl
where cl.id in (select cls_id from class_sale_rel
where sale_id = """ + str(case.id) + """)
group by cl.id,cl.ed_class ,cl.ed_sec, cl.ed_students, cl.ed_boys, cl.ed_girls """);
data = {}
data['ids'] = ids
data['model'] = 'sale.order'
return {
'report_name': 'sale.order.dispatch.order',
'type': 'ir.actions.report.xml',
'target': 'new',
'datas': data,
}
def print_proposal_report(self, cr, uid, ids, context=None):
""" button to generate proposal report """
# self._get_proposal_report(cr, uid, ids, context=None)
data = {}
data['ids'] = ids
data['model'] = 'sale.order'
return {
'report_name': 'sale.order.proposal',
'type': 'ir.actions.report.xml',
'target': 'new',
'datas': data,
}
# Modifying Standard Shipping create from Sale Order
# to create deliveries for school cinema process
def action_ship_create(self, cr, uid, ids, *args):
wf_service = netsvc.LocalService("workflow")
prod_obj = self.pool.get('product.product')
company = self.pool.get('res.users').browse(cr, uid, uid).company_id
for order in self.browse(cr, uid, ids, context={}):
proc_ids = []
output_id = order.shop_id.warehouse_id.lot_output_id.id
val = {}
val['ed_type']='crm'
self.write(cr, uid, [order.id], val)
return True
def _create_delivery_order(self, cr, uid, ids, context=None):
picking_id = False
move_obj = self.pool.get('stock.move')
proc_obj = self.pool.get('procurement.order')
prod_obj = self.pool.get('product.product')
company = self.pool.get('res.users').browse(cr, uid, uid).company_id
for order in self.browse(cr, uid, ids, context={}):
proc_ids = []
output_id = order.shop_id.warehouse_id.lot_output_id.id
picking_id = move_id = False
cls_txt1 = ''
count = 0
for line in order.order_line:
count += 1
proc_id = False
date_planned = datetime.now() + relativedelta(days=line.delay or 0.0)
date_planned = (date_planned - timedelta(days=company.security_lead)).strftime('%Y-%m-%d %H:%M:%S')
if line.product_id and line.product_id.product_tmpl_id.type in ('product', 'consu'):
location_id = order.shop_id.warehouse_id.lot_stock_id.id
if not picking_id and line.product_id == order.product_id and line.price_unit > 0:
pick_name = self.pool.get('ir.sequence').get(cr, uid, 'stock.picking.out')
picking_id = self.pool.get('stock.picking').create(cr, uid, {
'name': pick_name,
'origin': order.name,
'type': 'out',
'state': 'draft',
'move_type': order.picking_policy,
'sale_id': order.id,
'address_id': order.partner_shipping_id.id,
'note': order.note,
'invoice_state': (order.order_policy=='picking' and '2binvoiced') or 'none',
'company_id': order.company_id.id,
'service_type':'shl_cinema'
})
#Selecting the no of classes, sections and students
nof_class = nof_sec = nof_stud = 0
cr.execute('SELECT count(c.ed_class) as cls_count, sum(ed_sec) as sec, sum(ed_students) as stud FROM ed_class_details c WHERE c.sale_id = %d'%(order.id))
cls = cr.fetchone()
if cls:
if cls[0] == 0:
raise osv.except_osv(_('Warning'),_("Add Data in other details"))
nof_class = cls[0]
nof_sec = cls[1]
nof_stud = cls[2]
#Looping through sub products against the option selected
cls_txt = ''
if not move_id:
for cl in order.class_ids:
if cl.ed_students > 0 and cl.wrk_bk_rate > 0 :
cls_txt += str(cl.ed_class) + ','
for subprod in order.ed_pod_ids:
qty = 0
if subprod.ed_class == cl.ed_class:
if subprod.ed_per_id.name == 'Student':
qty = cl.ed_students * subprod.ed_qty
if subprod.ed_per_id.name == 'Class':
qty = nof_class * subprod.ed_qty
if subprod.ed_per_id.name == 'Section':
qty = cl.ed_sec * subprod.ed_qty
if subprod.ed_per_id.name == 'Boys' and cl.ed_boys > 0 :
qty = cl.ed_boys * subprod.ed_qty
if subprod.ed_per_id.name == 'Girls' and cl.ed_girls > 0 :
qty = cl.ed_girls * subprod.ed_qty
#if subprod.ed_per_id.name:
if qty > 0:
move_id = self.pool.get('stock.move').create(cr, uid, {
'name': line.name[:64],
'picking_id': picking_id,
'product_id': subprod.product_id.id,
'date': date_planned,
'date_expected': date_planned,
'product_qty': qty,
'product_uom': subprod.product_id.uom_id.id,
'product_uos_qty': qty,
'product_uos': subprod.product_id.uom_id.id,
#'product_packaging': line.product_packaging.id,
#'address_id': line.address_allotment_id.id or order.partner_shipping_id.id,
'location_id': location_id,
'location_dest_id': output_id,
'sale_line_id': line.id,
'tracking_id': False,
'state': 'draft',
#'state': 'waiting',
'note': line.notes,
'company_id': order.company_id.id,
})
#updating license details to stock picking
cls_txt = cls_txt[0:(len(cls_txt) - 1)]
if picking_id:
self.pool.get('stock.picking').write(cr, uid, [picking_id],{'license_detls':"License Start Date :" + str(order.licen_stdate) +
", License End Date :" + str(order.licen_eddate) +
", Class :" + cls_txt})
if count == 3:
cls_txt = ''
for cl in order.class_ids:
if cl.films_rate > 0 :
cls_txt += str(cl.ed_class) + ','
cls_txt = cls_txt[0:(len(cls_txt) - 1)]
# creating additional deliver order for HDD media
pick_name = self.pool.get('ir.sequence').get(cr, uid, 'stock.picking.out')
picking_id = self.pool.get('stock.picking').create(cr, uid, {
'name': pick_name,
'origin': order.name,
'type': 'out',
'state': 'draft',
'move_type': order.picking_policy,
'sale_id': order.id,
'address_id': order.partner_shipping_id.id,
'note': order.note,
'invoice_state': (order.order_policy=='picking' and '2binvoiced') or 'none',
'company_id': order.company_id.id,
'license_detls' :"License Start Date :" + str(order.licen_stdate) +
", License End Date :" + str(order.licen_eddate) +
", Class :" + cls_txt,
'service_type':'shl_cinema'
})
products = prod_obj.search(cr,uid,[('categ_id', '=', 'HDD')],limit=1)
for prod in prod_obj.browse(cr,uid,products):
move_id = self.pool.get('stock.move').create(cr, uid, {
'name': line.name[:64],
'picking_id': picking_id,
'product_id': prod.id,
'date': date_planned,
'date_expected': date_planned,
'product_qty': order.ed_cdd,
'product_uom': prod.uom_id.id,
'product_uos_qty': order.ed_cdd,
'product_uos': prod.uom_id.id,
#'product_packaging': line.product_packaging.id,
#'address_id': line.address_allotment_id.id or order.partner_shipping_id.id,
'location_id': location_id,
'location_dest_id': output_id,
'sale_line_id': line.id,
'tracking_id': False,
'state': 'draft',
#'state': 'waiting',
'note': line.notes,
'company_id': order.company_id.id,
})
if line.product_id:
proc_id = self.pool.get('procurement.order').create(cr, uid, {
'name': line.name,
'origin': order.name,
'date_planned': date_planned,
'product_id': line.product_id.id,
'product_qty': line.product_uom_qty,
'product_uom': line.product_uom.id,
'product_uos_qty': (line.product_uos and line.product_uos_qty)\
or line.product_uom_qty,
'product_uos': (line.product_uos and line.product_uos.id)\
or line.product_uom.id,
'location_id': order.shop_id.warehouse_id.lot_stock_id.id,
'procure_method': line.type,
'move_id': move_id,
'property_ids': [(6, 0, [x.id for x in line.property_ids])],
'company_id': order.company_id.id,
})
proc_ids.append(proc_id)
self.pool.get('sale.order.line').write(cr, uid, [line.id], {'procurement_id': proc_id})
if order.state == 'shipping_except':
for pick in order.picking_ids:
for move in pick.move_lines:
if move.state == 'cancel':
mov_ids = move_obj.search(cr, uid, [('state', '=', 'cancel'),('sale_line_id', '=', line.id),('picking_id', '=', pick.id)])
if mov_ids:
for mov in move_obj.browse(cr, uid, mov_ids):
move_obj.write(cr, uid, [move_id], {'product_qty': mov.product_qty, 'product_uos_qty': mov.product_uos_qty})
proc_obj.write(cr, uid, [proc_id], {'product_qty': mov.product_qty, 'product_uos_qty': mov.product_uos_qty})
if order.state == 'shipping_except':
val['state'] = 'progress'
val['shipped'] = False
if (order.order_policy == 'manual'):
for line in order.order_line:
if (not line.invoiced) and (line.state not in ('cancel', 'draft')):
val['state'] = 'manual'
break
return True
#inherited
def _make_invoice(self, cr, uid, order, lines, context=None):
accinv_obj = self.pool.get('account.invoice')
invln_obj = self.pool.get('account.invoice.line')
res = super(sale_order, self)._make_invoice(cr, uid, order,lines, context=context)
accinv_obj.write(cr,uid,[res],{'sale_id':order.id})
invln_ids = invln_obj.search(cr,uid,[('invoice_id','=',res)])
invln_obj.write(cr,uid,invln_ids,{})
return res
# Overridden
def manual_invoice(self, cr, uid, ids, context=None):
mod_obj = self.pool.get('ir.model.data')
wf_service = netsvc.LocalService("workflow")
inv_ids = set()
inv_ids1 = set()
for id in ids:
for record in self.pool.get('sale.order').browse(cr, uid, id).invoice_ids:
inv_ids.add(record.id)
# inv_ids would have old invoices if any
for id in ids:
wf_service.trg_validate(uid, 'sale.order', id, 'manual_invoice', cr)
for record in self.pool.get('sale.order').browse(cr, uid, id).invoice_ids:
inv_ids1.add(record.id)
inv_ids = list(inv_ids1.difference(inv_ids))
res = mod_obj.get_object_reference(cr, uid, 'account', 'invoice_form')
res_id = res and res[1] or False,
self.write(cr, uid, [id], {'invoice_id':inv_ids[0]})
self._create_delivery_order(cr, uid, ids, context)
return True
#overriden
def action_invoice_create(self, cr, uid, ids, grouped=False, states=['confirmed', 'done', 'exception'], date_inv = False, context=None):
res = False
invoices = {}
invoice_ids = []
picking_obj = self.pool.get('stock.picking')
invoice = self.pool.get('account.invoice')
obj_sale_order_line = self.pool.get('sale.order.line')
if context is None:
context = {}
# If date was specified, use it as date invoiced, usefull when invoices are generated this month and put the
# last day of the last month as invoice date
if date_inv:
context['date_inv'] = date_inv
for o in self.browse(cr, uid, ids, context=context):
lines = []
for line in o.order_line:
if line.price_unit > 0:
if line.invoiced:
continue
elif (line.state in states):
lines.append(line.id)
created_lines = obj_sale_order_line.invoice_line_create(cr, uid, lines)
if created_lines:
invoices.setdefault(o.partner_id.id, []).append((o, created_lines))
if not invoices:
for o in self.browse(cr, uid, ids, context=context):
for i in o.invoice_ids:
if i.state == 'draft':
return i.id
for val in invoices.values():
if grouped:
res = self._make_invoice(cr, uid, val[0][0], reduce(lambda x, y: x + y, [l for o, l in val], []), context=context)
invoice_ref = ''
for o, l in val:
invoice_ref += o.name + '|'
self.write(cr, uid, [o.id], {'state': 'progress'})
if o.order_policy == 'picking':
picking_obj.write(cr, uid, map(lambda x: x.id, o.picking_ids), {'invoice_state': 'invoiced'})
cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (o.id, res))
invoice.write(cr, uid, [res], {'origin': invoice_ref, 'name': invoice_ref})
else:
for order, il in val:
for lin in il:#to split sale order lines and create seprate invoices
res = self._make_invoice(cr, uid, order, [lin], context=context)
invoice_ids.append(res)
self.write(cr, uid, [order.id], {'state': 'progress'})
if order.order_policy == 'picking':
picking_obj.write(cr, uid, map(lambda x: x.id, order.picking_ids), {'invoice_state': 'invoiced'})
cr.execute('insert into sale_order_invoice_rel (order_id,invoice_id) values (%s,%s)', (order.id, res))
return res
# Overridden
def button_dummy(self, cr, uid, ids, context=None):
ordln_obj=self.pool.get('sale.order.line')
edprod_obj=self.pool.get('ed.product')
for case in self.browse(cr,uid,ids):
if case.order_line:
for line in case.order_line:
ordln_obj.write(cr,uid,[line.id],{})
if case.ed_pod_ids:
for ep in case.ed_pod_ids:
edprod_obj.write(cr, uid, [ep.id], {})
self.write(cr, uid, [case.id], {})
return True
def button_proposal_sent(self,cr,uid,ids,context=None):
for case in self.browse(cr,uid,ids):
# self._get_proposal_report(cr, uid, ids, context=None)
data = {}
data['ids'] = ids
data['model'] = 'sale.order'
obj = netsvc.LocalService('report.' + 'sale.order.dispatch.order')
(result, format) = obj.create(cr, uid, ids, data, context)
doc_parent_id = self.pool.get('document.directory')._get_root_directory(cr,uid)
attachment_obj = self.pool.get('ir.attachment')
attval = {}
cr.execute("select id from ir_attachment where res_id = " + str(case.id) + " and res_model = 'sale.order' and name = '"+ str(case.name) +".pdf'")
file_att = cr.fetchall()
if not file_att:
attval = {
'res_model' : 'sale.order',
'res_name' : str(case.name),
'res_id' : str(case.id),
'db_datas' : str(result),
'type' : 'binary',
'file_type' : 'application/pdf',
'datas_fname': str(case.name) + ".pdf",
'name' : str(case.name) + ".pdf",
'file_size' : len(result),
'parent_id' : doc_parent_id,
'partner_id' : case.partner_id.id,
}
attachment_obj.create(cr,uid,attval)
else:
for f in file_att:
attval = {
'db_datas' : str(result),
'file_size' : len(result),
}
attachment_obj.write(cr,uid, [f[0]],attval)
return True
def button_proposal_accepted(self,cr,uid,ids,context=None):
self.write(cr, uid, ids, {'state':'proposal_accept'})
return True
def _open_sale_form(self, cr, uid, ids, context=None):
models_data = self.pool.get('ir.model.data')
sale_order_form = models_data._get_id(
cr, uid, 'Edumedia_India', 'view_ed_sale_form')
sale_order_tree = models_data._get_id(
cr, uid, 'Edumedia_India', 'view_ed_sale_tree')
if sale_order_form:
sale_order_form = models_data.browse(
cr, uid, sale_order_form, context=context).res_id
if sale_order_tree:
sale_order_tree = models_data.browse(
cr, uid, sale_order_tree, context=context).res_id
return {
'view_type': 'form',
'view_mode': 'form',
'res_model': 'sale.order',
'view_id' : False,
'views' : [(sale_order_form, 'form'),
(sale_order_tree, 'tree'), ],
'type': 'ir.actions.act_window',
'res_id': ids[0]
}
# Overridden:
def action_cancel_draft(self, cr, uid, ids, *args):
if not len(ids):
return False
for sale in self.browse(cr, uid, ids):
if sale.state == 'cancel':
l = len(sale.name)
if l > 5:
nxt_no = int(sale.name[8:(l-1)]) + 1
sale_name = sale.name[0:8] + str(nxt_no) + sale.name[(l-1):l]
else:
sale_name = str(sale.name) + ' (R1)'
self.write(cr, uid, ids, {'state': 'draft','ed_type':'so','name':sale_name, 'invoice_ids': [], 'shipped': 0})
else:
self.write(cr, uid, ids, {'state': 'draft','ed_type':'so', 'invoice_ids': [], 'shipped': 0})
cr.execute('select id from sale_order_line where order_id IN %s and state=%s', (tuple(ids), 'cancel'))
line_ids = map(lambda x: x[0], cr.fetchall())
self.pool.get('sale.order.line').write(cr, uid, line_ids, {'invoiced': False, 'state': 'draft', 'invoice_lines': [(6, 0, [])]})
wf_service = netsvc.LocalService("workflow")
for inv_id in ids:
# Deleting the existing instance of workflow for SO
wf_service.trg_delete(uid, 'sale.order', inv_id, cr)
wf_service.trg_create(uid, 'sale.order', inv_id, cr)
for (id,name) in self.name_get(cr, uid, ids):
message = _("The sales order '%s' has been set in draft state.") %(name,)
self.log(cr, uid, id, message)
# self._open_sale_form(cr, uid, ids)
return True
# Overridden:
def action_cancel(self, cr, uid, ids, context=None):
wf_service = netsvc.LocalService("workflow")
if context is None:
context = {}
sale_order_line_obj = self.pool.get('sale.order.line')
proc_obj = self.pool.get('procurement.order')
for sale in self.browse(cr, uid, ids, context=context):
for pick in sale.picking_ids:
if pick.state not in ('draft', 'cancel'):
raise osv.except_osv(
_('Could not cancel sales order !'),
_('You must first cancel all picking attached to this sales order.'))
if pick.state == 'cancel':
for mov in pick.move_lines:
proc_ids = proc_obj.search(cr, uid, [('move_id', '=', mov.id)])
if proc_ids:
for proc in proc_ids:
wf_service.trg_validate(uid, 'procurement.order', proc, 'button_check', cr)
for r in self.read(cr, uid, ids, ['picking_ids']):
for pick in r['picking_ids']:
wf_service.trg_validate(uid, 'stock.picking', pick, 'button_cancel', cr)
for inv in sale.invoice_ids:
if inv.state not in ('draft', 'cancel'):
raise osv.except_osv(
_('Could not cancel this sales order !'),
_('You must first cancel all invoices attached to this sales order.'))
for r in self.read(cr, uid, ids, ['invoice_ids']):
for inv in r['invoice_ids']:
wf_service.trg_validate(uid, 'account.invoice', inv, 'invoice_cancel', cr)
sale_order_line_obj.write(cr, uid, [l.id for l in sale.order_line],
{'state': 'cancel'})
message = _("The sales order '%s' has been cancelled.") % (sale.name,)
self.log(cr, uid, sale.id, message)
if sale.ed_type == 'crm':
cr.execute("delete from ed_sessions where sale_id = %d"%(sale.id))
self.write(cr, uid, ids, {'state': 'cancel','ed_type':'so'})
return True
def write(self, cr, uid, ids,vals,context=None):
addr_obj = self.pool.get('res.partner')
partner_obj = self.pool.get('res.partner')
class_obj = self.pool.get('ed.class.details')
line_obj = self.pool.get('sale.order.line')
result = super(sale_order, self).write(cr, uid, ids, vals, context=context)
for case in self.browse(cr, uid, ids):
if case.address_ids:
for a in case.address_ids:
if not a.partner_id:
addr_obj.write(cr, uid, [a.id], {'partner_id': case.partner_id.id})
# sale_cls_ids = set()
# if case.class_ids:
# for c in case.class_ids:
# sale_cls_ids.add(c.id)
#
# part_cls_ids = new_cls_ids = set()
#
# partner = partner_obj.browse(cr,uid, case.partner_id.id)
# for pc in partner.ed_cls_ids:
# part_cls_ids.add(pc.id)
# new_cls_ids = sale_cls_ids - part_cls_ids
# class_ids = class_obj.search(cr,uid,[('sale_id','=',case.id)],order='ed_class')
tot_wb_price = tot_fl_price = avg_wb_price = avg_fl_price = 0.00
tot_stu = 0
id
for cl in case.class_ids:
if not case.films_only:
tot_wb_price += cl.wrk_bk_rate * cl.ed_students
tot_fl_price += cl.films_rate * cl.ed_students
tot_stu += cl.ed_students
if case.films_only:
tot_fl_price += cl.films_rate
avg_wb_price = tot_wb_price / (tot_stu or 1)
avg_fl_price = tot_fl_price / (tot_stu or 1)
lvals = {}
line_ids = line_obj.search(cr,uid,[('order_id','=',case.id)])
for ln in line_obj.browse(cr,uid,line_ids):
if ln.product_id.name_template == case.product_id.name:
lvals = {'price_unit':avg_wb_price,'ed_units':case.ed_cdd,'ed_per_depo':0}
if ln.product_id.name_template == 'Films':
lvals = {'price_unit':avg_fl_price}
if ln.product_id.name_template == 'License':
lvals = {'price_unit':case.license_rate}
line_obj.write(cr,uid,[ln.id],lvals)
#partner_obj.write(cr, uid, [partner.id], {'ed_cls_ids': [(6, 0, new_cls_ids)]})
# for n in new_cls_ids:
# cr.execute("insert into ed_partner_cls_rel(partner_id, class_id) values(%d,%d)"%(case.partner_id.id, n))
return result
sale_order()
class sale_order_line(osv.osv):
_inherit='sale.order.line'
# def _amount_line(self, cr, uid, ids, field_name, arg, context=None):
# tax_obj = self.pool.get('account.tax')
# cur_obj = self.pool.get('res.currency')
# res = {}
# if context is None:
# context = {}
# for line in self.browse(cr, uid, ids, context=context):
# price = line.price_unit * (1 - (line.discount or 0.0) / 100.0)
# taxes = tax_obj.compute_all(cr, uid, line.tax_id, price,line.product_uom_qty ,line.order_id.partner_invoice_id.id, line.product_id, line.order_id.partner_id)
#
# cur = line.order_id.pricelist_id.currency_id
# res[line.id] = cur_obj.round(cr, uid, cur, taxes['total']+line.ed_total)
# return res
def get_deposit_Total(self,cr,uid,ids,field_name,arg,context=None):
res = {}
sale_obj = self.pool.get('sale.order')
for case in self.browse(cr,uid,ids):
deposit = 0.00
if case.order_id:
sale = sale_obj.browse(cr,uid,case.order_id.id)
deposit = sale.ed_cdd * sale.ed_rate
res[case.id] = deposit
return res
def _get_students(self,cr,uid,ids,field_name,arg,context=None):
res={}
for case in self.browse(cr, uid, ids):
res[case.id]= 1
if case.product_id.name != 'License' and not case.order_id.films_only:
cr.execute('SELECT sum(ed_students) as stud FROM ed_class_details c \
WHERE c.sale_id =%d'%(case.order_id.id))
cls = cr.fetchone()
res[case.id]= cls and cls[0] or 1
return res
def _default_qty(self, cr, uid, context=None):
sale_id = context.get('sale_id', False)
if sale_id:
cr.execute('SELECT sum(ed_students) as stud FROM ed_class_details c \
WHERE c.sale_id =%d'%(sale_id))
cls = cr.fetchone()
return cls and cls[0] or 1
else:
return 1
_columns={
# Inherited
# 'price_subtotal': fields.function(_amount_line, method=True, string='Subtotal', digits_compute= dp.get_precision('Sale Price')),
# 'ed_total':fields.function(get_deposit_Total, method=True, string='Total Deposit', type='float', store=True, readonly=True),
'product_uom_qty':fields.function(_get_students, method=True, string='NO.Of Students', type='float', store=True),
'ed_units':fields.integer('No.Of.Units'),
'ed_per_depo':fields.integer('Deposit Per Unit'),
'notes': fields.text('Notes'),
}
_defaults={
'product_uom_qty':_default_qty
}
_order = 'id'
def invoice_line_create(self, cr, uid, ids, context=None):
if context is None:
context = {}
def _get_line_qty(line):
if (line.order_id.invoice_quantity=='order') or not line.procurement_id:
if line.product_uos:
return line.product_uos_qty or 0.0
return line.product_uom_qty
else:
return self.pool.get('procurement.order').quantity_get(cr, uid,
line.procurement_id.id, context=context)
def _get_line_uom(line):
if (line.order_id.invoice_quantity=='order') or not line.procurement_id:
if line.product_uos:
return line.product_uos.id
return line.product_uom.id
else:
return self.pool.get('procurement.order').uom_get(cr, uid,
line.procurement_id.id, context=context)
create_ids = []
sales = {}
for line in self.browse(cr, uid, ids, context=context):
if not line.invoiced:
if line.product_id:
a = line.product_id.product_tmpl_id.property_account_income.id
if not a:
a = line.product_id.categ_id.property_account_income_categ.id
if not a:
raise osv.except_osv(_('Error !'),
_('There is no income account defined ' \
'for this product: "%s" (id:%d)') % \
(line.product_id.name, line.product_id.id,))
else:
prop = self.pool.get('ir.property').get(cr, uid,
'property_account_income_categ', 'product.category',
context=context)
a = prop and prop.id or False
uosqty = _get_line_qty(line)
uos_id = _get_line_uom(line)
pu = 0.0
if uosqty:
pu = round(line.price_unit * line.product_uom_qty / uosqty,
self.pool.get('decimal.precision').precision_get(cr, uid, 'Sale Price'))
fpos = line.order_id.fiscal_position or False
a = self.pool.get('account.fiscal.position').map_account(cr, uid, fpos, a)
if not a:
raise osv.except_osv(_('Error !'),
_('There is no income category account defined in default Properties for Product Category or Fiscal Position is not defined !'))
inv_id = self.pool.get('account.invoice.line').create(cr, uid, {
'name': line.name,
'origin': line.order_id.name,
'account_id': a,
'price_unit': pu,
'quantity': uosqty,
'discount': line.discount,
'uos_id': uos_id,
'product_id': line.product_id.id or False,
'invoice_line_tax_id': [(6, 0, [x.id for x in line.tax_id])],
'note': line.notes,
'account_analytic_id': line.order_id.project_id and line.order_id.project_id.id or False,
})
cr.execute('insert into sale_order_line_invoice_rel (order_line_id,invoice_id) values (%s,%s)', (line.id, inv_id))
self.write(cr, uid, [line.id], {'invoiced': True})
sales[line.order_id.id] = True
create_ids.append(inv_id)
# Trigger workflow events
wf_service = netsvc.LocalService("workflow")
for sid in sales.keys():
wf_service.trg_write(uid, 'sale.order', sid, cr)
return create_ids
def onchange_Total(self, cr, uid, ids, ed_per_depo,ed_units):
res = {}
total_dep = 0
if ed_per_depo and ed_units:
total_dep = ed_per_depo * ed_units
res['ed_total'] = total_dep
return {'value':res}
# def onchange_subtotal(self, cr, uid, ids, price_unit,ed_total):
# res = {}
# subtotal_dep = 0
#
# if price_unit and ed_total:
# subtotal_dep = price_unit
#
# res['price_subtotal'] = subtotal_dep
#
# return {'value':res}
def create(self, cr, uid, vals, context=None):
return super(sale_order_line, self).create(cr, uid, vals, context=context)
def write(self, cr, uid, ids,vals,context=None):
return super(sale_order_line, self).write(cr, uid, ids, vals, context=context)
sale_order_line()
|
trabacus-softapps/docker-edumedia
|
additional_addons/Edumedia_India/ed_sale.py
|
Python
|
agpl-3.0
| 56,005 | 0.017034 |
# coding=utf-8
import sys
def read(client, vlans, cnames, print_warnings):
lines = []
if cnames:
for cname in client.list_cnames():
lines.append('cname\t' + cname['name'])
lines.append('target\t' + cname['canonical'])
lines.append('')
for vlan in client.list_vlans():
net = vlan['network']
if net not in vlans:
continue
lines.append('')
lines.append('network\t' + net)
for ip in client.list_vlan_ips(net):
types = ip['types']
addr = ip['ip_address']
lines.append('')
if types and 'HOST' not in types:
lines.append('host\t' + addr + '\t# in use as: '+', '.join(types))
continue
lines.append('host\t' + addr)
names = ip['names']
name = ''
extra = ''
if len(names) > 0:
name = names[0]
if len(names) > 1:
extra = '\t# additional names: ' + ', '.join(names[1:])
if print_warnings:
print('Warning! '+ addr + ' has several names. '
+ 'Adding extra names as file comment.',
file=sys.stderr)
if name:
append_line = 'name\t' + name
if extra:
append_line += extra
lines.append(append_line)
(comment, aliases) = client.get_host_info(name)
if comment:
lines.append('comment\t' + comment)
for alias in aliases:
lines.append('alias\t' + alias)
mac = ip['mac_address']
if mac and not name.startswith('dhcp'):
lines.append('mac\t' + mac)
return lines
|
booski/hostdb9
|
dns_reader.py
|
Python
|
gpl-2.0
| 1,853 | 0.001619 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._managed_clusters_operations import build_create_or_update_request_initial, build_delete_request_initial, build_get_access_profile_request, build_get_request, build_get_upgrade_profile_request, build_list_by_resource_group_request, build_list_cluster_admin_credentials_request, build_list_cluster_monitoring_user_credentials_request, build_list_cluster_user_credentials_request, build_list_request, build_reset_aad_profile_request_initial, build_reset_service_principal_profile_request_initial, build_rotate_cluster_certificates_request_initial, build_update_tags_request_initial
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ManagedClustersOperations:
"""ManagedClustersOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.containerservice.v2020_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
**kwargs: Any
) -> AsyncIterable["_models.ManagedClusterListResult"]:
"""Gets a list of managed clusters in the specified subscription.
Gets a list of managed clusters in the specified subscription. The operation returns properties
of each managed cluster.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagedClusterListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerservice.v2020_04_01.models.ManagedClusterListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedClusterListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ManagedClusterListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.ContainerService/managedClusters'} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ManagedClusterListResult"]:
"""Lists managed clusters in the specified subscription and resource group.
Lists managed clusters in the specified subscription and resource group. The operation returns
properties of each managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ManagedClusterListResult or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerservice.v2020_04_01.models.ManagedClusterListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedClusterListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=self.list_by_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_resource_group_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ManagedClusterListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters'} # type: ignore
@distributed_trace_async
async def get_upgrade_profile(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> "_models.ManagedClusterUpgradeProfile":
"""Gets upgrade profile for a managed cluster.
Gets the details of the upgrade profile for a managed cluster with a specified resource group
and name.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagedClusterUpgradeProfile, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2020_04_01.models.ManagedClusterUpgradeProfile
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedClusterUpgradeProfile"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_upgrade_profile_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.get_upgrade_profile.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagedClusterUpgradeProfile', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_upgrade_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/upgradeProfiles/default'} # type: ignore
@distributed_trace_async
async def get_access_profile(
self,
resource_group_name: str,
resource_name: str,
role_name: str,
**kwargs: Any
) -> "_models.ManagedClusterAccessProfile":
"""Gets an access profile of a managed cluster.
Gets the accessProfile for the specified role name of the managed cluster with a specified
resource group and name. **WARNING**\ : This API will be deprecated. Instead use
`ListClusterUserCredentials
<https://docs.microsoft.com/en-us/rest/api/aks/managedclusters/listclusterusercredentials>`_ or
`ListClusterAdminCredentials
<https://docs.microsoft.com/en-us/rest/api/aks/managedclusters/listclusteradmincredentials>`_ .
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param role_name: The name of the role for managed cluster accessProfile resource.
:type role_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagedClusterAccessProfile, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2020_04_01.models.ManagedClusterAccessProfile
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedClusterAccessProfile"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_access_profile_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
role_name=role_name,
template_url=self.get_access_profile.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagedClusterAccessProfile', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_access_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/accessProfiles/{roleName}/listCredential'} # type: ignore
@distributed_trace_async
async def list_cluster_admin_credentials(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> "_models.CredentialResults":
"""Gets cluster admin credential of a managed cluster.
Gets cluster admin credential of the managed cluster with a specified resource group and name.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CredentialResults, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2020_04_01.models.CredentialResults
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CredentialResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_cluster_admin_credentials_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.list_cluster_admin_credentials.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('CredentialResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_cluster_admin_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/listClusterAdminCredential'} # type: ignore
@distributed_trace_async
async def list_cluster_user_credentials(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> "_models.CredentialResults":
"""Gets cluster user credential of a managed cluster.
Gets cluster user credential of the managed cluster with a specified resource group and name.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CredentialResults, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2020_04_01.models.CredentialResults
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CredentialResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_cluster_user_credentials_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.list_cluster_user_credentials.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('CredentialResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_cluster_user_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/listClusterUserCredential'} # type: ignore
@distributed_trace_async
async def list_cluster_monitoring_user_credentials(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> "_models.CredentialResults":
"""Gets cluster monitoring user credential of a managed cluster.
Gets cluster monitoring user credential of the managed cluster with a specified resource group
and name.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: CredentialResults, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2020_04_01.models.CredentialResults
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CredentialResults"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_cluster_monitoring_user_credentials_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.list_cluster_monitoring_user_credentials.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('CredentialResults', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_cluster_monitoring_user_credentials.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/listClusterMonitoringUserCredential'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> "_models.ManagedCluster":
"""Gets a managed cluster.
Gets the details of the managed cluster with a specified resource group and name.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ManagedCluster, or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2020_04_01.models.ManagedCluster
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedCluster"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagedCluster', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
resource_name: str,
parameters: "_models.ManagedCluster",
**kwargs: Any
) -> "_models.ManagedCluster":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedCluster"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ManagedCluster')
request = build_create_or_update_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('ManagedCluster', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('ManagedCluster', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}'} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
resource_name: str,
parameters: "_models.ManagedCluster",
**kwargs: Any
) -> AsyncLROPoller["_models.ManagedCluster"]:
"""Creates or updates a managed cluster.
Creates or updates a managed cluster with the specified configuration for agents and Kubernetes
version.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param parameters: Parameters supplied to the Create or Update a Managed Cluster operation.
:type parameters: ~azure.mgmt.containerservice.v2020_04_01.models.ManagedCluster
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ManagedCluster or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerservice.v2020_04_01.models.ManagedCluster]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedCluster"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
parameters=parameters,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ManagedCluster', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}'} # type: ignore
async def _update_tags_initial(
self,
resource_group_name: str,
resource_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> "_models.ManagedCluster":
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedCluster"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'TagsObject')
request = build_update_tags_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
content_type=content_type,
json=_json,
template_url=self._update_tags_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ManagedCluster', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_tags_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}'} # type: ignore
@distributed_trace_async
async def begin_update_tags(
self,
resource_group_name: str,
resource_name: str,
parameters: "_models.TagsObject",
**kwargs: Any
) -> AsyncLROPoller["_models.ManagedCluster"]:
"""Updates tags on a managed cluster.
Updates a managed cluster with the specified tags.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param parameters: Parameters supplied to the Update Managed Cluster Tags operation.
:type parameters: ~azure.mgmt.containerservice.v2020_04_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either ManagedCluster or the result of
cls(response)
:rtype:
~azure.core.polling.AsyncLROPoller[~azure.mgmt.containerservice.v2020_04_01.models.ManagedCluster]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ManagedCluster"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._update_tags_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
parameters=parameters,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('ManagedCluster', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}'} # type: ignore
async def _delete_initial(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self._delete_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}'} # type: ignore
@distributed_trace_async
async def begin_delete(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes a managed cluster.
Deletes the managed cluster with a specified resource group and name.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}'} # type: ignore
async def _reset_service_principal_profile_initial(
self,
resource_group_name: str,
resource_name: str,
parameters: "_models.ManagedClusterServicePrincipalProfile",
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ManagedClusterServicePrincipalProfile')
request = build_reset_service_principal_profile_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
content_type=content_type,
json=_json,
template_url=self._reset_service_principal_profile_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_reset_service_principal_profile_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/resetServicePrincipalProfile'} # type: ignore
@distributed_trace_async
async def begin_reset_service_principal_profile(
self,
resource_group_name: str,
resource_name: str,
parameters: "_models.ManagedClusterServicePrincipalProfile",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Reset Service Principal Profile of a managed cluster.
Update the service principal Profile for a managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param parameters: Parameters supplied to the Reset Service Principal Profile operation for a
Managed Cluster.
:type parameters:
~azure.mgmt.containerservice.v2020_04_01.models.ManagedClusterServicePrincipalProfile
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reset_service_principal_profile_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
parameters=parameters,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset_service_principal_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/resetServicePrincipalProfile'} # type: ignore
async def _reset_aad_profile_initial(
self,
resource_group_name: str,
resource_name: str,
parameters: "_models.ManagedClusterAADProfile",
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(parameters, 'ManagedClusterAADProfile')
request = build_reset_aad_profile_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
content_type=content_type,
json=_json,
template_url=self._reset_aad_profile_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_reset_aad_profile_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/resetAADProfile'} # type: ignore
@distributed_trace_async
async def begin_reset_aad_profile(
self,
resource_group_name: str,
resource_name: str,
parameters: "_models.ManagedClusterAADProfile",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Reset AAD Profile of a managed cluster.
Update the AAD Profile for a managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:param parameters: Parameters supplied to the Reset AAD Profile operation for a Managed
Cluster.
:type parameters: ~azure.mgmt.containerservice.v2020_04_01.models.ManagedClusterAADProfile
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._reset_aad_profile_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
parameters=parameters,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_reset_aad_profile.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/resetAADProfile'} # type: ignore
async def _rotate_cluster_certificates_initial(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_rotate_cluster_certificates_request_initial(
subscription_id=self._config.subscription_id,
resource_group_name=resource_group_name,
resource_name=resource_name,
template_url=self._rotate_cluster_certificates_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_rotate_cluster_certificates_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/rotateClusterCertificates'} # type: ignore
@distributed_trace_async
async def begin_rotate_cluster_certificates(
self,
resource_group_name: str,
resource_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Rotate certificates of a managed cluster.
Rotate certificates of a managed cluster.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises: ~azure.core.exceptions.HttpResponseError
"""
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._rotate_cluster_certificates_initial(
resource_group_name=resource_group_name,
resource_name=resource_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_rotate_cluster_certificates.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/rotateClusterCertificates'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2020_04_01/aio/operations/_managed_clusters_operations.py
|
Python
|
mit
| 54,887 | 0.005265 |
"""
Provides XML parsing support.
"""
from django.conf import settings
from rest_framework.exceptions import ParseError
from rest_framework.parsers import BaseParser
import defusedxml.ElementTree as etree
class XMLCompanyParser(BaseParser):
"""
XML company parser.
"""
media_type = 'application/xml'
def parse(self, stream, media_type=None, parser_context=None):
"""
Parses the incoming bytestream as XML and returns the resulting data.
"""
parser_context = parser_context or {}
encoding = parser_context.get('encoding', settings.DEFAULT_CHARSET)
parser = etree.DefusedXMLParser(encoding=encoding)
try:
tree = etree.parse(stream, parser=parser, forbid_dtd=True)
except (etree.ParseError, ValueError) as exc:
raise ParseError(detail=str(exc))
data = self._xml_convert(tree.getroot())
return data
def _xml_convert(self, element):
"""
convert the xml `element` into the corresponding python object
"""
data = {}
fields = list(element)
for field in fields:
if field.tag == 'id':
data['id'] = str(field.text)
if field.tag == 'name':
data['name'] = str(field.text)
if field.tag == 'web-address':
data['website'] = str(field.text)
if field.tag == 'phone-number-office':
data['phone'] = str(field.text)
if field.tag == 'phone-number-fax':
data['fax'] = str(field.text)
if field.tag == 'address-one':
data['address'] = str(field.text)
if field.tag == 'address-two':
data['address1'] = str(field.text)
if field.tag == 'city':
data['city'] = str(field.text)
if field.tag == 'state':
data['state'] = str(field.text)
if field.tag == 'country':
data['country'] = str(field.text)
if field.tag == 'time-zone-id':
data['timezone'] = str(field.text)
if field.tag == 'locale':
data['locale'] = str(field.text)
return data
|
bgroff/kala-app
|
django_kala/api/basecamp_classic/companies/parsers.py
|
Python
|
mit
| 2,236 | 0 |
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Provides the setup for the experiments."""
from pytorch_pretrained_bert import modeling
from pytorch_pretrained_bert import tokenization
import torch
import embeddings_helper
def setup_uncased(model_config):
"""Setup the uncased bert model.
Args:
model_config: The model configuration to be loaded.
Returns:
tokenizer: The tokenizer to be used to convert between tokens and ids.
model: The model that has been initialized.
device: The device to be used in this run.
embedding_map: Holding all token embeddings.
"""
# Load pre-trained model tokenizer (vocabulary)
tokenizer = tokenization.BertTokenizer.from_pretrained(model_config)
# Load pre-trained model (weights)
model = modeling.BertModel.from_pretrained(model_config)
_ = model.eval()
# Set up the device in use
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print('device : ', device)
model = model.to(device)
# Initialize the embedding map
embedding_map = embeddings_helper.EmbeddingMap(device, model)
return tokenizer, model, device, embedding_map
def setup_bert_vanilla(model_config):
"""Setup the uncased bert model without embedding maps.
Args:
model_config: The model configuration to be loaded.
Returns:
tokenizer: The tokenizer to be used to convert between tokens and ids.
model: The model that has been initialized.
device: The device to be used in this run.
"""
# Load pre-trained model tokenizer (vocabulary)
tokenizer = tokenization.BertTokenizer.from_pretrained(model_config)
# Load pre-trained model (weights)
model = modeling.BertModel.from_pretrained(model_config)
_ = model.eval()
# Set up the device in use
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print('device : ', device)
model = model.to(device)
return tokenizer, model, device
def setup_bert_mlm(model_config):
"""Setup the uncased bert model with classification head.
Args:
model_config: The model configuration to be loaded.
Returns:
tokenizer: The tokenizer to be used to convert between tokens and ids.
model: The model that has been initialized.
device: The device to be used in this run.
"""
# Load pre-trained model tokenizer (vocabulary)
tokenizer = tokenization.BertTokenizer.from_pretrained(model_config)
# Load pre-trained model (weights)
model = modeling.BertForMaskedLM.from_pretrained('bert-base-uncased')
_ = model.eval()
# Set up the device in use
device = torch.device('cuda:0' if torch.cuda.is_available() else 'cpu')
print('device : ', device)
model = model.to(device)
# Initialize the embedding map
embedding_map = embeddings_helper.EmbeddingMap(device, model.bert)
return tokenizer, model, device, embedding_map
|
PAIR-code/interpretability
|
text-dream/python/helpers/setup_helper.py
|
Python
|
apache-2.0
| 3,457 | 0.010703 |
from uber.tests import *
class TestCosts:
@pytest.fixture(autouse=True)
def mocked_prices(self, monkeypatch):
monkeypatch.setattr(c, 'get_oneday_price', Mock(return_value=10))
monkeypatch.setattr(c, 'get_attendee_price', Mock(return_value=20))
def test_badge_cost(self):
assert 10 == Attendee(badge_type=c.ONE_DAY_BADGE).badge_cost
assert 20 == Attendee().badge_cost
assert 30 == Attendee(overridden_price=30).badge_cost
assert 0 == Attendee(paid=c.NEED_NOT_PAY).badge_cost
assert 20 == Attendee(paid=c.PAID_BY_GROUP).badge_cost
def test_total_cost(self):
assert 20 == Attendee().total_cost
assert 25 == Attendee(amount_extra=5).total_cost
def test_amount_unpaid(self, monkeypatch):
monkeypatch.setattr(Attendee, 'total_cost', 50)
assert 50 == Attendee().amount_unpaid
assert 10 == Attendee(amount_paid=40).amount_unpaid
assert 0 == Attendee(amount_paid=50).amount_unpaid
assert 0 == Attendee(amount_paid=51).amount_unpaid
def test_age_discount(self, monkeypatch):
monkeypatch.setattr(Attendee, 'age_group_conf', {'discount': 5})
assert 15 == Attendee().total_cost
assert 20 == Attendee(amount_extra=5).total_cost
assert 10 == Attendee(overridden_price=10).total_cost
assert 15 == Attendee(overridden_price=10, amount_extra=5).total_cost
def test_age_free(self, monkeypatch):
monkeypatch.setattr(Attendee, 'age_group_conf', {'discount': 999}) # makes badge_cost free unless overridden_price is set
assert 0 == Attendee().total_cost
assert 5 == Attendee(amount_extra=5).total_cost
assert 10 == Attendee(overridden_price=10).total_cost
assert 15 == Attendee(overridden_price=10, amount_extra=5).total_cost
def test_is_unpaid():
assert Attendee().is_unpaid
assert Attendee(paid=c.NOT_PAID).is_unpaid
for status in [c.NEED_NOT_PAY, c.PAID_BY_GROUP, c.REFUNDED]:
assert not Attendee(paid=status).is_unpaid
# we may eventually want to make this a little more explicit;
# at the moment I'm basically just testing an implementation detail
def test_is_unassigned():
assert Attendee().is_unassigned
assert not Attendee(first_name='x').is_unassigned
def test_is_dealer():
assert not Attendee().is_dealer
assert Attendee(ribbon=c.DEALER_RIBBON).is_dealer
assert Attendee(badge_type=c.PSEUDO_DEALER_BADGE).is_dealer
# not all attendees in a dealer group are necessarily dealers
dealer_group = Group(tables=1)
assert not Attendee(group=dealer_group).is_dealer
def test_is_dept_head():
assert not Attendee().is_dept_head
assert Attendee(ribbon=c.DEPT_HEAD_RIBBON).is_dept_head
def test_unassigned_name(monkeypatch):
monkeypatch.setattr(Attendee, 'badge', 'BadgeType')
assert not Attendee().unassigned_name
assert not Attendee(group_id=1, first_name='x').unassigned_name
assert '[Unassigned BadgeType]' == Attendee(group_id=1).unassigned_name
def test_full_name(monkeypatch):
assert 'x y' == Attendee(first_name='x', last_name='y').full_name
monkeypatch.setattr(Attendee, 'unassigned_name', 'xxx')
assert 'xxx' == Attendee(first_name='x', last_name='y').full_name
def test_last_first(monkeypatch):
assert 'y, x' == Attendee(first_name='x', last_name='y').last_first
monkeypatch.setattr(Attendee, 'unassigned_name', 'xxx')
assert 'xxx' == Attendee(first_name='x', last_name='y').last_first
def test_badge():
assert Attendee().badge == 'Unpaid Attendee'
assert Attendee(paid=c.HAS_PAID).badge == 'Attendee'
assert Attendee(badge_num=123).badge == 'Unpaid Attendee'
assert Attendee(badge_num=123, paid=c.HAS_PAID).badge == 'Attendee #123'
assert Attendee(ribbon=c.VOLUNTEER_RIBBON).badge == 'Unpaid Attendee (Volunteer)'
def test_is_transferable(monkeypatch):
assert not Attendee(paid=c.HAS_PAID).is_transferable
monkeypatch.setattr(Attendee, 'is_new', False)
assert Attendee(paid=c.HAS_PAID).is_transferable
assert Attendee(paid=c.PAID_BY_GROUP).is_transferable
assert not Attendee(paid=c.NOT_PAID).is_transferable
assert not Attendee(paid=c.HAS_PAID, checked_in=datetime.now(UTC)).is_transferable
assert not Attendee(paid=c.HAS_PAID, badge_type=c.STAFF_BADGE).is_transferable
assert not Attendee(paid=c.HAS_PAID, badge_type=c.GUEST_BADGE).is_transferable
def test_is_not_transferable_trusted(monkeypatch):
monkeypatch.setattr(Attendee, 'is_new', False)
assert not Attendee(paid=c.HAS_PAID, trusted_depts=c.CONSOLE).is_transferable
def test_trusted_somewhere():
assert Attendee(trusted_depts='{},{}'.format(c.ARCADE, c.CONSOLE)).trusted_somewhere
assert Attendee(trusted_depts=str(c.CONSOLE)).trusted_somewhere
assert not Attendee(trusted_depts='').trusted_somewhere
def test_has_personalized_badge():
assert not Attendee().has_personalized_badge
assert Attendee(badge_type=c.STAFF_BADGE).has_personalized_badge
assert Attendee(badge_type=c.SUPPORTER_BADGE).has_personalized_badge
for badge_type in [c.ATTENDEE_BADGE, c.ONE_DAY_BADGE, c.GUEST_BADGE]:
assert not Attendee(badge_type=badge_type).has_personalized_badge
def test_takes_shifts():
assert not Attendee().takes_shifts
assert not Attendee(staffing=True).takes_shifts
assert Attendee(staffing=True, assigned_depts=c.CONSOLE).takes_shifts
assert not Attendee(staffing=True, assigned_depts=c.CON_OPS).takes_shifts
assert Attendee(staffing=True, assigned_depts=','.join(map(str, [c.CONSOLE, c.CON_OPS]))).takes_shifts
class TestUnsetVolunteer:
def test_basic(self):
a = Attendee(staffing=True, trusted_depts=c.CONSOLE, requested_depts=c.CONSOLE, assigned_depts=c.CONSOLE, ribbon=c.VOLUNTEER_RIBBON, shifts=[Shift()])
a.unset_volunteering()
assert not a.staffing and not a.trusted_somewhere and not a.requested_depts and not a.assigned_depts and not a.shifts and a.ribbon == c.NO_RIBBON
def test_different_ribbon(self):
a = Attendee(ribbon=c.DEALER_RIBBON)
a.unset_volunteering()
assert a.ribbon == c.DEALER_RIBBON
def test_staff_badge(self, monkeypatch):
with Session() as session:
monkeypatch.setattr(Attendee, 'session', Mock())
a = Attendee(badge_type=c.STAFF_BADGE, badge_num=123)
a.unset_volunteering()
assert a.badge_type == c.ATTENDEE_BADGE and a.badge_num is None
def test_affiliate_with_extra(self):
a = Attendee(affiliate='xxx', amount_extra=1)
a._misc_adjustments()
assert a.affiliate == 'xxx'
def test_affiliate_without_extra(self):
a = Attendee(affiliate='xxx')
a._misc_adjustments()
assert a.affiliate == ''
def test_amount_refunded_when_refunded(self):
a = Attendee(amount_refunded=123, paid=c.REFUNDED)
a._misc_adjustments()
assert a.amount_refunded == 123
def test_amount_refunded_when_not_refunded(self):
a = Attendee(amount_refunded=123)
a._misc_adjustments()
assert not a.amount_refunded
def test_badge_precon(self):
a = Attendee(badge_num=1)
a._misc_adjustments()
assert not a.checked_in
def test_badge_at_con(self, monkeypatch, at_con):
a = Attendee()
a._misc_adjustments()
assert not a.checked_in
a = Attendee(badge_num=1)
a._misc_adjustments()
assert a.checked_in
monkeypatch.setattr(Attendee, 'is_new', False)
a = Attendee(badge_num=1)
a._misc_adjustments()
assert a.checked_in
def test_names(self):
a = Attendee(first_name='nac', last_name='mac Feegle')
a._misc_adjustments()
assert a.full_name == 'Nac mac Feegle'
a = Attendee(first_name='NAC', last_name='mac feegle')
a._misc_adjustments()
assert a.full_name == 'Nac Mac Feegle'
class TestStaffingAdjustments:
@pytest.fixture(autouse=True)
def unset_volunteering(self, monkeypatch):
monkeypatch.setattr(Attendee, 'unset_volunteering', Mock())
return Attendee.unset_volunteering
@pytest.fixture(autouse=True)
def prevent_presave_adjustments(self, monkeypatch):
""" Prevent some tests from crashing on exit by not invoking presave_adjustements() """
monkeypatch.setattr(Attendee, 'presave_adjustments', Mock())
return Attendee.presave_adjustments
def test_dept_head_invariants(self):
a = Attendee(ribbon=c.DEPT_HEAD_RIBBON, assigned_depts=c.CONSOLE)
a._staffing_adjustments()
assert a.staffing
assert a.trusted_in(c.CONSOLE)
assert a.trusted_somewhere
assert a.badge_type == c.STAFF_BADGE
def test_staffing_still_trusted_assigned(self):
"""
After applying staffing adjustements:
Any depts you are both trusted and assigned to should remain unchanged
"""
a = Attendee(staffing=True,
assigned_depts='{},{}'.format(c.CONSOLE, c.CON_OPS),
trusted_depts='{},{}'.format(c.CONSOLE, c.CON_OPS))
a._staffing_adjustments()
assert a.assigned_to(c.CONSOLE) and a.trusted_in(c.CONSOLE)
assert a.assigned_to(c.CON_OPS) and a.trusted_in(c.CON_OPS)
def test_staffing_no_longer_trusted_unassigned(self):
"""
After applying staffing adjustements:
1) Any depts you are trusted in but not assigned to, you should not longer remain trusted in
2) Any depts you are assigned to but not trusted in, you should remain untrusted in
"""
a = Attendee(staffing=True,
assigned_depts='{},{}'.format(c.CONSOLE, c.CON_OPS),
trusted_depts='{},{}'.format(c.ARCADE, c.CON_OPS))
a._staffing_adjustments()
assert a.assigned_to(c.CONSOLE) and not a.trusted_in(c.CONSOLE)
assert not a.assigned_to(c.ARCADE) and not a.trusted_in(c.ARCADE)
assert a.assigned_to(c.CON_OPS) and a.trusted_in(c.CON_OPS)
def test_unpaid_dept_head(self):
a = Attendee(ribbon=c.DEPT_HEAD_RIBBON)
a._staffing_adjustments()
assert a.paid == c.NEED_NOT_PAY
def test_under_18_at_con(self, at_con, unset_volunteering):
a = Attendee(age_group=c.UNDER_18)
a._staffing_adjustments()
assert not unset_volunteering.called
def test_staffers_need_no_volunteer_ribbon(self):
a = Attendee(badge_type=c.STAFF_BADGE, ribbon=c.VOLUNTEER_RIBBON)
a._staffing_adjustments()
assert a.ribbon == c.NO_RIBBON
def test_staffers_can_have_other_ribbons(self):
a = Attendee(badge_type=c.STAFF_BADGE, ribbon=c.DEALER_RIBBON)
a._staffing_adjustments()
assert a.ribbon == c.DEALER_RIBBON
def test_no_to_yes_ribbon(self, unset_volunteering, prevent_presave_adjustments):
with Session() as session:
a = session.attendee(first_name='Regular', last_name='Attendee')
a.ribbon = c.VOLUNTEER_RIBBON
a._staffing_adjustments()
assert a.staffing
assert not unset_volunteering.called
def test_no_to_yes_volunteering(self, unset_volunteering, prevent_presave_adjustments):
with Session() as session:
a = session.attendee(first_name='Regular', last_name='Attendee')
a.staffing = True
a._staffing_adjustments()
assert a.ribbon == c.VOLUNTEER_RIBBON
assert not unset_volunteering.called
def test_yes_to_no_ribbon(self, unset_volunteering, prevent_presave_adjustments):
with Session() as session:
a = session.attendee(first_name='Regular', last_name='Volunteer')
a.ribbon = c.NO_RIBBON
a._staffing_adjustments()
assert unset_volunteering.called
def test_yes_to_no_volunteering(self, unset_volunteering, prevent_presave_adjustments):
with Session() as session:
a = session.attendee(first_name='Regular', last_name='Volunteer')
a.staffing = False
a._staffing_adjustments()
assert unset_volunteering.called
class TestBadgeAdjustments:
@pytest.fixture(autouse=True)
def mock_attendee_session(self, monkeypatch):
monkeypatch.setattr(Attendee, 'session', Mock())
Attendee.session.get_next_badge_num = Mock(return_value=123)
@pytest.fixture
def fully_paid(self, monkeypatch):
monkeypatch.setattr(Attendee, 'paid', c.HAS_PAID)
monkeypatch.setattr(Attendee, 'amount_unpaid', 0)
def test_group_to_attendee(self):
a = Attendee(badge_type=c.PSEUDO_GROUP_BADGE)
a._badge_adjustments()
assert a.badge_type == c.ATTENDEE_BADGE and a.ribbon == c.NO_RIBBON
def test_dealer_to_attendee(self):
a = Attendee(badge_type=c.PSEUDO_DEALER_BADGE)
a._badge_adjustments()
assert a.badge_type == c.ATTENDEE_BADGE and a.ribbon == c.DEALER_RIBBON
class TestStatusAdjustments:
def test_set_paid_to_complete(self):
a = Attendee(paid=c.HAS_PAID, badge_status=c.NEW_STATUS, first_name='Paid', placeholder=False)
a._status_adjustments()
assert a.badge_status == c.COMPLETED_STATUS
def test_set_comped_to_complete(self):
a = Attendee(paid=c.NEED_NOT_PAY, badge_status=c.NEW_STATUS, first_name='Paid', placeholder=False)
a._status_adjustments()
assert a.badge_status == c.COMPLETED_STATUS
def test_set_group_paid_to_complete(self, monkeypatch):
monkeypatch.setattr(Group, 'amount_unpaid', 0)
g = Group()
a = Attendee(paid=c.PAID_BY_GROUP, badge_status=c.NEW_STATUS, first_name='Paid', placeholder=False, group=g, group_id=g.id)
a._status_adjustments()
assert a.badge_status == c.COMPLETED_STATUS
def test_unpaid_group_not_completed(self, monkeypatch):
monkeypatch.setattr(Group, 'amount_unpaid', 100)
g = Group()
a = Attendee(paid=c.PAID_BY_GROUP, badge_status=c.NEW_STATUS, first_name='Paid', placeholder=False, group=g)
a._status_adjustments()
assert a.badge_status == c.NEW_STATUS
def test_placeholder_not_completed(self):
a = Attendee(paid=c.NEED_NOT_PAY, badge_status=c.NEW_STATUS, first_name='Paid', placeholder=True)
a._status_adjustments()
assert a.badge_status == c.NEW_STATUS
def test_unassigned_not_completed(self):
a = Attendee(paid=c.NEED_NOT_PAY, badge_status=c.NEW_STATUS, first_name='')
a._status_adjustments()
assert a.badge_status == c.NEW_STATUS
def test_banned_to_deferred(self, monkeypatch):
a = Attendee(paid=c.HAS_PAID, badge_status=c.NEW_STATUS, first_name='Paid', placeholder=False)
monkeypatch.setattr(Attendee, 'banned', True)
a._status_adjustments()
assert a.badge_status == c.WATCHED_STATUS
class TestLookupAttendee:
@pytest.fixture(autouse=True)
def searchable(self):
with Session() as session:
attendee = Attendee(
placeholder=True,
first_name='Searchable',
last_name='Attendee',
email='searchable@example.com',
zip_code='12345'
)
session.add(attendee)
session.add(Attendee(
placeholder=True,
first_name='Two First',
last_name='Names',
email='searchable@example.com',
zip_code='12345'
))
session.add(Attendee(
placeholder=True,
first_name='Two',
last_name='Last Names',
email='searchable@example.com',
zip_code='12345'
))
return attendee.id
def test_search_not_found(self):
with Session() as session:
pytest.raises(ValueError, session.lookup_attendee, 'Searchable Attendee', 'searchable@example.com', 'xxxxx')
pytest.raises(ValueError, session.lookup_attendee, 'XXX XXX', 'searchable@example.com', '12345')
pytest.raises(ValueError, session.lookup_attendee, 'Searchable Attendee', 'xxx', '12345')
def test_search_basic(self, searchable):
with Session() as session:
assert str(searchable) == session.lookup_attendee('Searchable Attendee', 'searchable@example.com', '12345').id
def test_search_case_insensitive(self, searchable):
with Session() as session:
assert str(searchable) == session.lookup_attendee('searchablE attendeE', 'seArchAble@exAmple.com', '12345').id
def test_search_multi_word_names(self):
with Session() as session:
assert session.lookup_attendee('Two First Names', 'searchable@example.com', '12345')
assert session.lookup_attendee('Two Last Names', 'searchable@example.com', '12345')
|
md1024/rams
|
uber/tests/models/test_attendee.py
|
Python
|
agpl-3.0
| 16,952 | 0.001829 |
from abc import ABCMeta, abstractmethod
class AbstractConsumer(metaclass=ABCMeta):
"""
This class provides facilities to create and manage queue consumers. To
create a consumer, subclass this class and override the :meth:`run`
method. Then, instantiate the class with the desired parameters and call
:meth:`declare` to declare the consumer to the server.
Example::
class Consumer(AbstractConsumer):
def run(self, msg: Message):
print('Received message: {}'.format(msg.body))
msg.ack()
c1 = Consumer(ch, 'test.q')
c1.declare()
conn.drain_events()
"""
def __init__(self, channel, queue, consumer_tag='', no_local=False,
no_ack=False, exclusive=False):
"""
:param channel: channel
:type channel: amqpy.channel.Channel
:param str queue: queue
:param str consumer_tag: consumer tag, local to the connection; leave
blank to let server auto-assign a tag
:param bool no_local: if True: do not deliver own messages
:param bool no_ack: server will not expect an ack for each message
:param bool exclusive: request exclusive access
"""
self.channel = channel
self.queue = queue
self.consumer_tag = consumer_tag
self.no_local = no_local
self.no_ack = no_ack
self.exclusive = exclusive
#: Number of messages consumed (incremented automatically)
self.consume_count = 0
def declare(self):
"""Declare the consumer
This method calls :meth:`~amqpy.channel.Channel.basic_consume()`
internally.
After the queue consumer is created, :attr:`self.consumer_tag` is
set to the server-assigned consumer tag if a tag was not specified
initially.
"""
self.consumer_tag = self.channel.basic_consume(
self.queue, self.consumer_tag, self.no_local, self.no_ack, self.exclusive,
callback=self.start, on_cancel=self.cancel_cb)
def cancel(self):
"""Cancel the consumer
"""
self.channel.basic_cancel(self.consumer_tag)
@abstractmethod
def run(self, msg):
"""Consumer callback
This method is called when the consumer is delivered a message. This
method **must** be overridden in the subclass.
:param msg: received message
:type msg: amqpy.message.Message
"""
pass
def cancel_cb(self, consumer_tag):
"""Consumer cancel callback
This method is called when the consumer is cancelled. This method may
be overridden in the subclass.
:param str consumer_tag: consumer tag
"""
pass
def start(self, msg):
self.run(msg)
self.consume_count += 1
|
gst/amqpy
|
amqpy/consumer.py
|
Python
|
mit
| 2,846 | 0.000351 |
import os
def NormalizedJoin( *args ):
"Normalizes and joins directory names"
return os.path.normpath(os.path.join(*args))
|
DavidYen/YEngine
|
ypy/path_help.py
|
Python
|
mit
| 129 | 0.03876 |
"""Tests for certbot_dns_route53._internal.dns_route53.Authenticator"""
import unittest
from botocore.exceptions import ClientError
from botocore.exceptions import NoCredentialsError
try:
import mock
except ImportError: # pragma: no cover
from unittest import mock # type: ignore
from certbot import errors
from certbot.compat import os
from certbot.plugins import dns_test_common
from certbot.plugins.dns_test_common import DOMAIN
class AuthenticatorTest(unittest.TestCase, dns_test_common.BaseAuthenticatorTest):
# pylint: disable=protected-access
def setUp(self):
from certbot_dns_route53._internal.dns_route53 import Authenticator
super().setUp()
self.config = mock.MagicMock()
# Set up dummy credentials for testing
os.environ["AWS_ACCESS_KEY_ID"] = "dummy_access_key"
os.environ["AWS_SECRET_ACCESS_KEY"] = "dummy_secret_access_key"
self.auth = Authenticator(self.config, "route53")
def tearDown(self):
# Remove the dummy credentials from env vars
del os.environ["AWS_ACCESS_KEY_ID"]
del os.environ["AWS_SECRET_ACCESS_KEY"]
def test_perform(self):
self.auth._change_txt_record = mock.MagicMock()
self.auth._wait_for_change = mock.MagicMock()
self.auth.perform([self.achall])
self.auth._change_txt_record.assert_called_once_with("UPSERT",
'_acme-challenge.' + DOMAIN,
mock.ANY)
self.assertEqual(self.auth._wait_for_change.call_count, 1)
def test_perform_no_credentials_error(self):
self.auth._change_txt_record = mock.MagicMock(side_effect=NoCredentialsError)
self.assertRaises(errors.PluginError,
self.auth.perform,
[self.achall])
def test_perform_client_error(self):
self.auth._change_txt_record = mock.MagicMock(
side_effect=ClientError({"Error": {"Code": "foo"}}, "bar"))
self.assertRaises(errors.PluginError,
self.auth.perform,
[self.achall])
def test_cleanup(self):
self.auth._attempt_cleanup = True
self.auth._change_txt_record = mock.MagicMock()
self.auth.cleanup([self.achall])
self.auth._change_txt_record.assert_called_once_with("DELETE",
'_acme-challenge.'+DOMAIN,
mock.ANY)
def test_cleanup_no_credentials_error(self):
self.auth._attempt_cleanup = True
self.auth._change_txt_record = mock.MagicMock(side_effect=NoCredentialsError)
self.auth.cleanup([self.achall])
def test_cleanup_client_error(self):
self.auth._attempt_cleanup = True
self.auth._change_txt_record = mock.MagicMock(
side_effect=ClientError({"Error": {"Code": "foo"}}, "bar"))
self.auth.cleanup([self.achall])
class ClientTest(unittest.TestCase):
# pylint: disable=protected-access
PRIVATE_ZONE = {
"Id": "BAD-PRIVATE",
"Name": "example.com",
"Config": {
"PrivateZone": True
}
}
EXAMPLE_NET_ZONE = {
"Id": "BAD-WRONG-TLD",
"Name": "example.net",
"Config": {
"PrivateZone": False
}
}
EXAMPLE_COM_ZONE = {
"Id": "EXAMPLE",
"Name": "example.com",
"Config": {
"PrivateZone": False
}
}
FOO_EXAMPLE_COM_ZONE = {
"Id": "FOO",
"Name": "foo.example.com",
"Config": {
"PrivateZone": False
}
}
def setUp(self):
from certbot_dns_route53._internal.dns_route53 import Authenticator
self.config = mock.MagicMock()
# Set up dummy credentials for testing
os.environ["AWS_ACCESS_KEY_ID"] = "dummy_access_key"
os.environ["AWS_SECRET_ACCESS_KEY"] = "dummy_secret_access_key"
self.client = Authenticator(self.config, "route53")
def tearDown(self):
# Remove the dummy credentials from env vars
del os.environ["AWS_ACCESS_KEY_ID"]
del os.environ["AWS_SECRET_ACCESS_KEY"]
def test_find_zone_id_for_domain(self):
self.client.r53.get_paginator = mock.MagicMock()
self.client.r53.get_paginator().paginate.return_value = [
{
"HostedZones": [
self.EXAMPLE_NET_ZONE,
self.EXAMPLE_COM_ZONE,
]
}
]
result = self.client._find_zone_id_for_domain("foo.example.com")
self.assertEqual(result, "EXAMPLE")
def test_find_zone_id_for_domain_pagination(self):
self.client.r53.get_paginator = mock.MagicMock()
self.client.r53.get_paginator().paginate.return_value = [
{
"HostedZones": [
self.PRIVATE_ZONE,
self.EXAMPLE_COM_ZONE,
]
},
{
"HostedZones": [
self.PRIVATE_ZONE,
self.FOO_EXAMPLE_COM_ZONE,
]
}
]
result = self.client._find_zone_id_for_domain("foo.example.com")
self.assertEqual(result, "FOO")
def test_find_zone_id_for_domain_no_results(self):
self.client.r53.get_paginator = mock.MagicMock()
self.client.r53.get_paginator().paginate.return_value = []
self.assertRaises(errors.PluginError,
self.client._find_zone_id_for_domain,
"foo.example.com")
def test_find_zone_id_for_domain_no_correct_results(self):
self.client.r53.get_paginator = mock.MagicMock()
self.client.r53.get_paginator().paginate.return_value = [
{
"HostedZones": [
self.PRIVATE_ZONE,
self.EXAMPLE_NET_ZONE,
]
},
]
self.assertRaises(errors.PluginError,
self.client._find_zone_id_for_domain,
"foo.example.com")
def test_change_txt_record(self):
self.client._find_zone_id_for_domain = mock.MagicMock()
self.client.r53.change_resource_record_sets = mock.MagicMock(
return_value={"ChangeInfo": {"Id": 1}})
self.client._change_txt_record("FOO", DOMAIN, "foo")
call_count = self.client.r53.change_resource_record_sets.call_count
self.assertEqual(call_count, 1)
def test_change_txt_record_delete(self):
self.client._find_zone_id_for_domain = mock.MagicMock()
self.client.r53.change_resource_record_sets = mock.MagicMock(
return_value={"ChangeInfo": {"Id": 1}})
validation = "some-value"
validation_record = {"Value": '"{0}"'.format(validation)}
self.client._resource_records[DOMAIN] = [validation_record]
self.client._change_txt_record("DELETE", DOMAIN, validation)
call_count = self.client.r53.change_resource_record_sets.call_count
self.assertEqual(call_count, 1)
call_args = self.client.r53.change_resource_record_sets.call_args_list[0][1]
call_args_batch = call_args["ChangeBatch"]["Changes"][0]
self.assertEqual(call_args_batch["Action"], "DELETE")
self.assertEqual(
call_args_batch["ResourceRecordSet"]["ResourceRecords"],
[validation_record])
def test_change_txt_record_multirecord(self):
self.client._find_zone_id_for_domain = mock.MagicMock()
self.client._get_validation_rrset = mock.MagicMock()
self.client._resource_records[DOMAIN] = [
{"Value": "\"pre-existing-value\""},
{"Value": "\"pre-existing-value-two\""},
]
self.client.r53.change_resource_record_sets = mock.MagicMock(
return_value={"ChangeInfo": {"Id": 1}})
self.client._change_txt_record("DELETE", DOMAIN, "pre-existing-value")
call_count = self.client.r53.change_resource_record_sets.call_count
call_args = self.client.r53.change_resource_record_sets.call_args_list[0][1]
call_args_batch = call_args["ChangeBatch"]["Changes"][0]
self.assertEqual(call_args_batch["Action"], "UPSERT")
self.assertEqual(
call_args_batch["ResourceRecordSet"]["ResourceRecords"],
[{"Value": "\"pre-existing-value-two\""}])
self.assertEqual(call_count, 1)
def test_wait_for_change(self):
self.client.r53.get_change = mock.MagicMock(
side_effect=[{"ChangeInfo": {"Status": "PENDING"}},
{"ChangeInfo": {"Status": "INSYNC"}}])
self.client._wait_for_change(1)
self.assertTrue(self.client.r53.get_change.called)
if __name__ == "__main__":
unittest.main() # pragma: no cover
|
stweil/letsencrypt
|
certbot-dns-route53/tests/dns_route53_test.py
|
Python
|
apache-2.0
| 9,471 | 0.00095 |
# -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Deleting field 'PiezaConjunto.fragmentos'
db.delete_column(u'cachi_piezaconjunto', 'fragmentos')
def backwards(self, orm):
# Adding field 'PiezaConjunto.fragmentos'
db.add_column(u'cachi_piezaconjunto', 'fragmentos',
self.gf('django.db.models.fields.BooleanField')(default=False),
keep_default=False)
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'cachi.adjunto': {
'Meta': {'object_name': 'Adjunto'},
'adjunto': ('django.db.models.fields.files.FileField', [], {'max_length': '768'}),
'content_type': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'ficha_relevamiento_sitio': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.FichaRelevamientoSitio']", 'null': 'True', 'blank': 'True'}),
'ficha_tecnica': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'adjunto_ficha_tecnica'", 'null': 'True', 'to': u"orm['cachi.FichaTecnica']"}),
'fragmento': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'adjunto_fragmento'", 'null': 'True', 'to': u"orm['cachi.Fragmento']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'informe_campo': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.InformeCampo']", 'null': 'True', 'blank': 'True'}),
'nombre_archivo': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'pieza_conjunto': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'adjunto_pieza_conjunto'", 'null': 'True', 'to': u"orm['cachi.PiezaConjunto']"}),
'size': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'tipo': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
u'cachi.ficharelevamientositio': {
'Meta': {'object_name': 'FichaRelevamientoSitio'},
'adjuntos': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.Adjunto']", 'null': 'True', 'blank': 'True'}),
'autor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.Persona']"}),
'fecha': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'cachi.fichatecnica': {
'Meta': {'object_name': 'FichaTecnica'},
'alto': ('django.db.models.fields.PositiveIntegerField', [], {}),
'autor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.Persona']", 'null': 'True', 'blank': 'True'}),
'color': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'decoracion': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'desperfectos': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'desperfectos_fabricacion': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'diagnostico_estado': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'diametro_max': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'diametro_min': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'espesor': ('django.db.models.fields.PositiveIntegerField', [], {}),
'fecha': ('django.db.models.fields.DateField', [], {}),
'fragmento': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'fichas_tecnicas'", 'to': u"orm['cachi.Fragmento']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'inscripciones_marcas': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'observacion': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'otras_caracteristicas_distintivas': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'peso': ('django.db.models.fields.PositiveIntegerField', [], {}),
'razon_actualizacion': ('django.db.models.fields.PositiveIntegerField', [], {}),
'reparaciones': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'tratamiento': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'usuario': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
},
u'cachi.fragmento': {
'Meta': {'object_name': 'Fragmento'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'numero_inventario': ('django.db.models.fields.PositiveIntegerField', [], {}),
'pieza_conjunto': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'fragmentos_pieza_conjunto'", 'to': u"orm['cachi.PiezaConjunto']"}),
'ultima_version': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "u'ultima_version'", 'null': 'True', 'to': u"orm['cachi.FichaTecnica']"})
},
u'cachi.informecampo': {
'Meta': {'object_name': 'InformeCampo'},
'adjuntos': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.Adjunto']", 'null': 'True', 'blank': 'True'}),
'autor': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.Persona']"}),
'fecha': ('django.db.models.fields.DateField', [], {}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'sitio_aqueologico': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.SitioArqueologico']", 'null': 'True', 'blank': 'True'})
},
u'cachi.modificacion': {
'Meta': {'object_name': 'Modificacion'},
'atributo': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'fecha': ('django.db.models.fields.DateField', [], {}),
'ficha_tecnica': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.FichaTecnica']", 'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'pieza_conjunto': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.PiezaConjunto']", 'null': 'True', 'blank': 'True'}),
'sitio_aqueologico': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.SitioArqueologico']", 'null': 'True', 'blank': 'True'}),
'valor_nuevo': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'valor_viejo': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
u'cachi.naturaleza': {
'Meta': {'object_name': 'Naturaleza'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
u'cachi.persona': {
'Meta': {'object_name': 'Persona'},
'apellido': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'to': u"orm['auth.User']", 'unique': 'True'})
},
u'cachi.piezaconjunto': {
'Meta': {'object_name': 'PiezaConjunto'},
'condicion_hallazgo': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'fecha_hallazgo': ('django.db.models.fields.DateField', [], {}),
'forma': ('django.db.models.fields.TextField', [], {'max_length': '128'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'naturaleza': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.Naturaleza']"}),
'nombre_descriptivo': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'persona_colectora': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.Persona']", 'null': 'True', 'blank': 'True'}),
'tecnica_manufactura': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'tipo_adquisicion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.TipoAdquisicion']", 'null': 'True', 'blank': 'True'}),
'tipo_condicion_hallazgo': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.TipoCondicionHallazgo']", 'null': 'True', 'blank': 'True'}),
'ubicacion': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.Ubicacion']", 'null': 'True', 'blank': 'True'})
},
u'cachi.procedencia': {
'Meta': {'object_name': 'Procedencia'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'otra': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True', 'blank': 'True'}),
'pieza_conjunto': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "u'procedencia'", 'unique': 'True', 'to': u"orm['cachi.PiezaConjunto']"}),
'sitio_arqueologico': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.SitioArqueologico']", 'null': 'True', 'blank': 'True'}),
'ubicacion_geografica': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.UbicacionGeografica']", 'null': 'True', 'blank': 'True'})
},
u'cachi.sitioarqueologico': {
'Meta': {'object_name': 'SitioArqueologico'},
'coordenada_x': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'coordenada_y': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'ubicacion_geografica': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.UbicacionGeografica']"})
},
u'cachi.tipoadquisicion': {
'Meta': {'object_name': 'TipoAdquisicion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
u'cachi.tipocondicionhallazgo': {
'Meta': {'object_name': 'TipoCondicionHallazgo'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
u'cachi.ubicacion': {
'Meta': {'object_name': 'Ubicacion'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '64'})
},
u'cachi.ubicaciongeografica': {
'Meta': {'object_name': 'UbicacionGeografica'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'nombre': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'padre': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['cachi.UbicacionGeografica']", 'null': 'True', 'blank': 'True'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
}
}
complete_apps = ['cachi']
|
data-tsunami/museo-cachi
|
cachi/migrations/0008_auto__del_field_piezaconjunto_fragmentos.py
|
Python
|
gpl-3.0
| 15,336 | 0.007303 |
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2021 Florian Bruhin (The Compiler) <mail@qutebrowser.org>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Custom useful data types."""
import operator
import enum
import dataclasses
from typing import Any, Optional, Sequence, TypeVar, Union
from PyQt5.QtCore import pyqtSignal, pyqtSlot, QObject, QTimer
from PyQt5.QtCore import QUrl
from qutebrowser.utils import log, qtutils, utils
_T = TypeVar('_T', bound=utils.SupportsLessThan)
class Unset:
"""Class for an unset object."""
__slots__ = ()
def __repr__(self) -> str:
return '<UNSET>'
UNSET = Unset()
class NeighborList(Sequence[_T]):
"""A list of items which saves its current position.
Class attributes:
Modes: Different modes, see constructor documentation.
Attributes:
fuzzyval: The value which is currently set but not in the list.
_idx: The current position in the list.
_items: A list of all items, accessed through item property.
_mode: The current mode.
"""
class Modes(enum.Enum):
"""Behavior for the 'mode' argument."""
edge = enum.auto()
exception = enum.auto()
def __init__(self, items: Sequence[_T] = None,
default: Union[_T, Unset] = UNSET,
mode: Modes = Modes.exception) -> None:
"""Constructor.
Args:
items: The list of items to iterate in.
_default: The initially selected value.
_mode: Behavior when the first/last item is reached.
Modes.edge: Go to the first/last item
Modes.exception: Raise an IndexError.
"""
if not isinstance(mode, self.Modes):
raise TypeError("Mode {} is not a Modes member!".format(mode))
if items is None:
self._items: Sequence[_T] = []
else:
self._items = list(items)
self._default = default
if not isinstance(default, Unset):
idx = self._items.index(default)
self._idx: Optional[int] = idx
else:
self._idx = None
self._mode = mode
self.fuzzyval: Optional[int] = None
def __getitem__(self, key: int) -> _T: # type: ignore[override]
return self._items[key]
def __len__(self) -> int:
return len(self._items)
def __repr__(self) -> str:
return utils.get_repr(self, items=self._items, mode=self._mode,
idx=self._idx, fuzzyval=self.fuzzyval)
def _snap_in(self, offset: int) -> bool:
"""Set the current item to the closest item to self.fuzzyval.
Args:
offset: negative to get the next smaller item, positive for the
next bigger one.
Return:
True if the value snapped in (changed),
False when the value already was in the list.
"""
assert isinstance(self.fuzzyval, (int, float)), self.fuzzyval
op = operator.le if offset < 0 else operator.ge
items = [(idx, e) for (idx, e) in enumerate(self._items)
if op(e, self.fuzzyval)]
if items:
item = min(
items,
key=lambda tpl:
abs(self.fuzzyval - tpl[1])) # type: ignore[operator]
else:
sorted_items = sorted(enumerate(self.items), key=lambda e: e[1])
idx = 0 if offset < 0 else -1
item = sorted_items[idx]
self._idx = item[0]
return self.fuzzyval not in self._items
def _get_new_item(self, offset: int) -> _T:
"""Logic for getitem to get the item at offset.
Args:
offset: The offset of the current item, relative to the last one.
Return:
The new item.
"""
assert self._idx is not None
try:
if self._idx + offset >= 0:
new = self._items[self._idx + offset]
else:
raise IndexError
except IndexError:
if self._mode == self.Modes.edge:
assert offset != 0
if offset > 0:
new = self.lastitem()
else:
new = self.firstitem()
elif self._mode == self.Modes.exception: # pragma: no branch
raise
else:
self._idx += offset
return new
@property
def items(self) -> Sequence[_T]:
"""Getter for items, which should not be set."""
return self._items
def getitem(self, offset: int) -> _T:
"""Get the item with a relative position.
Args:
offset: The offset of the current item, relative to the last one.
Return:
The new item.
"""
log.misc.debug("{} items, idx {}, offset {}".format(
len(self._items), self._idx, offset))
if not self._items:
raise IndexError("No items found!")
if self.fuzzyval is not None:
# Value has been set to something not in the list, so we snap in to
# the closest value in the right direction and count this as one
# step towards offset.
snapped = self._snap_in(offset)
if snapped and offset > 0:
offset -= 1
elif snapped:
offset += 1
self.fuzzyval = None
return self._get_new_item(offset)
def curitem(self) -> _T:
"""Get the current item in the list."""
if self._idx is not None:
return self._items[self._idx]
else:
raise IndexError("No current item!")
def nextitem(self) -> _T:
"""Get the next item in the list."""
return self.getitem(1)
def previtem(self) -> _T:
"""Get the previous item in the list."""
return self.getitem(-1)
def firstitem(self) -> _T:
"""Get the first item in the list."""
if not self._items:
raise IndexError("No items found!")
self._idx = 0
return self.curitem()
def lastitem(self) -> _T:
"""Get the last item in the list."""
if not self._items:
raise IndexError("No items found!")
self._idx = len(self._items) - 1
return self.curitem()
def reset(self) -> _T:
"""Reset the position to the default."""
if self._default is UNSET:
raise ValueError("No default set!")
self._idx = self._items.index(self._default)
return self.curitem()
class PromptMode(enum.Enum):
"""The mode of a Question."""
yesno = enum.auto()
text = enum.auto()
user_pwd = enum.auto()
alert = enum.auto()
download = enum.auto()
class ClickTarget(enum.Enum):
"""How to open a clicked link."""
normal = enum.auto() #: Open the link in the current tab
tab = enum.auto() #: Open the link in a new foreground tab
tab_bg = enum.auto() #: Open the link in a new background tab
window = enum.auto() #: Open the link in a new window
hover = enum.auto() #: Only hover over the link
class KeyMode(enum.Enum):
"""Key input modes."""
normal = enum.auto() #: Normal mode (no mode was entered)
hint = enum.auto() #: Hint mode (showing labels for links)
command = enum.auto() #: Command mode (after pressing the colon key)
yesno = enum.auto() #: Yes/No prompts
prompt = enum.auto() #: Text prompts
insert = enum.auto() #: Insert mode (passing through most keys)
passthrough = enum.auto() #: Passthrough mode (passing through all keys)
caret = enum.auto() #: Caret mode (moving cursor with keys)
set_mark = enum.auto()
jump_mark = enum.auto()
record_macro = enum.auto()
run_macro = enum.auto()
# 'register' is a bit of an oddball here: It's not really a "real" mode,
# but it's used in the config for common bindings for
# set_mark/jump_mark/record_macro/run_macro.
register = enum.auto()
class Exit(enum.IntEnum):
"""Exit statuses for errors. Needs to be an int for sys.exit."""
ok = 0
reserved = 1
exception = 2
err_ipc = 3
err_init = 4
class LoadStatus(enum.Enum):
"""Load status of a tab."""
none = enum.auto()
success = enum.auto()
success_https = enum.auto()
error = enum.auto()
warn = enum.auto()
loading = enum.auto()
class Backend(enum.Enum):
"""The backend being used (usertypes.backend)."""
QtWebKit = enum.auto()
QtWebEngine = enum.auto()
class JsWorld(enum.Enum):
"""World/context to run JavaScript code in."""
main = enum.auto() #: Same world as the web page's JavaScript.
application = enum.auto() #: Application world, used by qutebrowser internally.
user = enum.auto() #: User world, currently not used.
jseval = enum.auto() #: World used for the jseval-command.
class JsLogLevel(enum.Enum):
"""Log level of a JS message.
This needs to match up with the keys allowed for the
content.javascript.log setting.
"""
unknown = enum.auto()
info = enum.auto()
warning = enum.auto()
error = enum.auto()
class MessageLevel(enum.Enum):
"""The level of a message being shown."""
error = enum.auto()
warning = enum.auto()
info = enum.auto()
class IgnoreCase(enum.Enum):
"""Possible values for the 'search.ignore_case' setting."""
smart = enum.auto()
never = enum.auto()
always = enum.auto()
class CommandValue(enum.Enum):
"""Special values which are injected when running a command handler."""
count = enum.auto()
win_id = enum.auto()
cur_tab = enum.auto()
count_tab = enum.auto()
class Question(QObject):
"""A question asked to the user, e.g. via the status bar.
Note the creator is responsible for cleaning up the question after it
doesn't need it anymore, e.g. via connecting Question.completed to
Question.deleteLater.
Attributes:
mode: A PromptMode enum member.
yesno: A question which can be answered with yes/no.
text: A question which requires a free text answer.
user_pwd: A question for a username and password.
default: The default value.
For yesno, None (no default), True or False.
For text, a default text as string.
For user_pwd, a default username as string.
title: The question title to show.
text: The prompt text to display to the user.
url: Any URL referenced in prompts.
option: Boolean option to be set when answering always/never.
answer: The value the user entered (as password for user_pwd).
is_aborted: Whether the question was aborted.
interrupted: Whether the question was interrupted by another one.
Signals:
answered: Emitted when the question has been answered by the user.
arg: The answer to the question.
cancelled: Emitted when the question has been cancelled by the user.
aborted: Emitted when the question was aborted programmatically.
In this case, cancelled is not emitted.
answered_yes: Convenience signal emitted when a yesno question was
answered with yes.
answered_no: Convenience signal emitted when a yesno question was
answered with no.
completed: Emitted when the question was completed in any way.
"""
answered = pyqtSignal(object)
cancelled = pyqtSignal()
aborted = pyqtSignal()
answered_yes = pyqtSignal()
answered_no = pyqtSignal()
completed = pyqtSignal()
def __init__(self, parent: QObject = None) -> None:
super().__init__(parent)
self.mode: Optional[PromptMode] = None
self.default: Union[bool, str, None] = None
self.title: Optional[str] = None
self.text: Optional[str] = None
self.url: Optional[str] = None
self.option: Optional[bool] = None
self.answer: Union[str, bool, None] = None
self.is_aborted = False
self.interrupted = False
def __repr__(self) -> str:
return utils.get_repr(self, title=self.title, text=self.text,
mode=self.mode, default=self.default,
option=self.option)
@pyqtSlot()
def done(self) -> None:
"""Must be called when the question was answered completely."""
self.answered.emit(self.answer)
if self.mode == PromptMode.yesno:
if self.answer:
self.answered_yes.emit()
else:
self.answered_no.emit()
self.completed.emit()
@pyqtSlot()
def cancel(self) -> None:
"""Cancel the question (resulting from user-input)."""
self.cancelled.emit()
self.completed.emit()
@pyqtSlot()
def abort(self) -> None:
"""Abort the question."""
if self.is_aborted:
log.misc.debug("Question was already aborted")
return
self.is_aborted = True
self.aborted.emit()
self.completed.emit()
class Timer(QTimer):
"""A timer which has a name to show in __repr__ and checks for overflows.
Attributes:
_name: The name of the timer.
"""
def __init__(self, parent: QObject = None, name: str = None) -> None:
super().__init__(parent)
if name is None:
self._name = "unnamed"
else:
self.setObjectName(name)
self._name = name
def __repr__(self) -> str:
return utils.get_repr(self, name=self._name)
def setInterval(self, msec: int) -> None:
"""Extend setInterval to check for overflows."""
qtutils.check_overflow(msec, 'int')
super().setInterval(msec)
def start(self, msec: int = None) -> None:
"""Extend start to check for overflows."""
if msec is not None:
qtutils.check_overflow(msec, 'int')
super().start(msec)
else:
super().start()
class AbstractCertificateErrorWrapper:
"""A wrapper over an SSL/certificate error."""
def __init__(self, error: Any) -> None:
self._error = error
def __str__(self) -> str:
raise NotImplementedError
def __repr__(self) -> str:
raise NotImplementedError
def is_overridable(self) -> bool:
raise NotImplementedError
@dataclasses.dataclass
class NavigationRequest:
"""A request to navigate to the given URL."""
class Type(enum.Enum):
"""The type of a request.
Based on QWebEngineUrlRequestInfo::NavigationType and QWebPage::NavigationType.
"""
#: Navigation initiated by clicking a link.
link_clicked = 1
#: Navigation explicitly initiated by typing a URL (QtWebEngine only).
typed = 2
#: Navigation submits a form.
form_submitted = 3
#: An HTML form was submitted a second time (QtWebKit only).
form_resubmitted = 4
#: Navigation initiated by a history action.
back_forward = 5
#: Navigation initiated by refreshing the page.
reloaded = 6
#: Navigation triggered automatically by page content or remote server
#: (QtWebEngine >= 5.14 only)
redirect = 7
#: None of the above.
other = 8
url: QUrl
navigation_type: Type
is_main_frame: bool
accepted: bool = True
|
forkbong/qutebrowser
|
qutebrowser/utils/usertypes.py
|
Python
|
gpl-3.0
| 16,178 | 0.000124 |
# Copyright: Damien Elmes <anki@ichi2.net>
# -*- coding: utf-8 -*-
# License: GNU AGPL, version 3 or later; http://www.gnu.org/licenses/agpl.html
import re
import signal
import zipfile
from send2trash import send2trash
from aqt.qt import *
from anki import Collection
from anki.utils import isWin, isMac, intTime, splitFields, ids2str
from anki.hooks import runHook, addHook
import aqt
import aqt.progress
import aqt.webview
import aqt.toolbar
import aqt.stats
from aqt.utils import restoreGeom, showInfo, showWarning,\
restoreState, getOnlyText, askUser, applyStyles, showText, tooltip, \
openHelp, openLink, checkInvalidFilename
import anki.db
class AnkiQt(QMainWindow):
def __init__(self, app, profileManager, args):
QMainWindow.__init__(self)
self.state = "startup"
aqt.mw = self
self.app = app
if isWin:
self._xpstyle = QStyleFactory.create("WindowsXP")
self.app.setStyle(self._xpstyle)
self.pm = profileManager
# running 2.0 for the first time?
if self.pm.meta['firstRun']:
# load the new deck user profile
self.pm.load(self.pm.profiles()[0])
# upgrade if necessary
from aqt.upgrade import Upgrader
u = Upgrader(self)
u.maybeUpgrade()
self.pm.meta['firstRun'] = False
self.pm.save()
# init rest of app
if qtmajor == 4 and qtminor < 8:
# can't get modifiers immediately on qt4.7, so no safe mode there
self.safeMode = False
else:
self.safeMode = self.app.queryKeyboardModifiers() & Qt.ShiftModifier
try:
self.setupUI()
self.setupAddons()
except:
showInfo(_("Error during startup:\n%s") % traceback.format_exc())
sys.exit(1)
# must call this after ui set up
if self.safeMode:
tooltip(_("Shift key was held down. Skipping automatic "
"syncing and add-on loading."))
# were we given a file to import?
if args and args[0]:
self.onAppMsg(unicode(args[0], sys.getfilesystemencoding(), "ignore"))
# Load profile in a timer so we can let the window finish init and not
# close on profile load error.
if isMac and qtmajor >= 5:
self.show()
self.progress.timer(10, self.setupProfile, False)
def setupUI(self):
self.col = None
self.hideSchemaMsg = False
self.setupAppMsg()
self.setupKeys()
self.setupThreads()
self.setupFonts()
self.setupMainWindow()
self.setupSystemSpecific()
self.setupStyle()
self.setupMenus()
self.setupProgress()
self.setupErrorHandler()
self.setupSignals()
self.setupAutoUpdate()
self.setupHooks()
self.setupRefreshTimer()
self.updateTitleBar()
# screens
self.setupDeckBrowser()
self.setupOverview()
self.setupReviewer()
# Profiles
##########################################################################
def setupProfile(self):
self.pendingImport = None
# profile not provided on command line?
if not self.pm.name:
# if there's a single profile, load it automatically
profs = self.pm.profiles()
if len(profs) == 1:
try:
self.pm.load(profs[0])
except:
# password protected
pass
if not self.pm.name:
self.showProfileManager()
else:
self.loadProfile()
def showProfileManager(self):
self.state = "profileManager"
d = self.profileDiag = QDialog()
f = self.profileForm = aqt.forms.profiles.Ui_Dialog()
f.setupUi(d)
d.connect(f.login, SIGNAL("clicked()"), self.onOpenProfile)
d.connect(f.profiles, SIGNAL("itemDoubleClicked(QListWidgetItem*)"),
self.onOpenProfile)
d.connect(f.quit, SIGNAL("clicked()"), lambda: sys.exit(0))
d.connect(f.add, SIGNAL("clicked()"), self.onAddProfile)
d.connect(f.rename, SIGNAL("clicked()"), self.onRenameProfile)
d.connect(f.delete_2, SIGNAL("clicked()"), self.onRemProfile)
d.connect(d, SIGNAL("rejected()"), lambda: d.close())
d.connect(f.profiles, SIGNAL("currentRowChanged(int)"),
self.onProfileRowChange)
self.refreshProfilesList()
# raise first, for osx testing
d.show()
d.activateWindow()
d.raise_()
d.exec_()
def refreshProfilesList(self):
f = self.profileForm
f.profiles.clear()
profs = self.pm.profiles()
f.profiles.addItems(profs)
try:
idx = profs.index(self.pm.name)
except:
idx = 0
f.profiles.setCurrentRow(idx)
def onProfileRowChange(self, n):
if n < 0:
# called on .clear()
return
name = self.pm.profiles()[n]
f = self.profileForm
passwd = not self.pm.load(name)
f.passEdit.setVisible(passwd)
f.passLabel.setVisible(passwd)
def openProfile(self):
name = self.pm.profiles()[self.profileForm.profiles.currentRow()]
passwd = self.profileForm.passEdit.text()
return self.pm.load(name, passwd)
def onOpenProfile(self):
if not self.openProfile():
showWarning(_("Invalid password."))
return
self.profileDiag.close()
self.loadProfile()
return True
def profileNameOk(self, str):
return not checkInvalidFilename(str)
def onAddProfile(self):
name = getOnlyText(_("Name:"))
if name:
name = name.strip()
if name in self.pm.profiles():
return showWarning(_("Name exists."))
if not self.profileNameOk(name):
return
self.pm.create(name)
self.pm.name = name
self.refreshProfilesList()
def onRenameProfile(self):
name = getOnlyText(_("New name:"), default=self.pm.name)
if not self.openProfile():
return showWarning(_("Invalid password."))
if not name:
return
if name == self.pm.name:
return
if name in self.pm.profiles():
return showWarning(_("Name exists."))
if not self.profileNameOk(name):
return
self.pm.rename(name)
self.refreshProfilesList()
def onRemProfile(self):
profs = self.pm.profiles()
if len(profs) < 2:
return showWarning(_("There must be at least one profile."))
# password correct?
if not self.openProfile():
return
# sure?
if not askUser(_("""\
All cards, notes, and media for this profile will be deleted. \
Are you sure?""")):
return
self.pm.remove(self.pm.name)
self.refreshProfilesList()
def loadProfile(self):
# show main window
if self.pm.profile['mainWindowState']:
restoreGeom(self, "mainWindow")
restoreState(self, "mainWindow")
else:
self.resize(500, 400)
# toolbar needs to be retranslated
self.toolbar.draw()
# titlebar
self.setWindowTitle("Anki - " + self.pm.name)
# show and raise window for osx
self.show()
self.activateWindow()
self.raise_()
# maybe sync (will load DB)
if self.pendingImport and os.path.basename(
self.pendingImport).startswith("backup-"):
# skip sync when importing a backup
self.loadCollection()
else:
self.onSync(auto=True)
# import pending?
if self.pendingImport:
if self.pm.profile['key']:
showInfo(_("""\
To import into a password protected profile, please open the profile before attempting to import."""))
else:
self.handleImport(self.pendingImport)
self.pendingImport = None
runHook("profileLoaded")
def unloadProfile(self, browser=True):
if not self.pm.profile:
# already unloaded
return
runHook("unloadProfile")
if not self.unloadCollection():
return
self.state = "profileManager"
self.onSync(auto=True, reload=False)
self.pm.profile['mainWindowGeom'] = self.saveGeometry()
self.pm.profile['mainWindowState'] = self.saveState()
self.pm.save()
self.pm.profile = None
self.hide()
if browser:
self.showProfileManager()
# Collection load/unload
##########################################################################
def loadCollection(self):
self.hideSchemaMsg = True
cpath = self.pm.collectionPath()
try:
self.col = Collection(cpath, log=True)
except anki.db.Error:
# warn user
showWarning("""\
Your collection is corrupt. Please see the manual for \
how to restore from a backup.""")
# move it out of the way so the profile can be used again
newpath = cpath+str(intTime())
os.rename(cpath, newpath)
# then close
sys.exit(1)
except Exception, e:
# the custom exception handler won't catch this if we immediately
# unload, so we have to manually handle it
if "invalidTempFolder" in repr(str(e)):
showWarning(self.errorHandler.tempFolderMsg())
self.unloadProfile()
return
self.unloadProfile()
raise
self.hideSchemaMsg = False
self.progress.setupDB(self.col.db)
self.maybeEnableUndo()
self.moveToState("deckBrowser")
def unloadCollection(self):
"""
Unload the collection.
This unloads a collection if there is one and returns True if
there is no collection after the call. (Because the unload
worked or because there was no collection to start with.)
"""
if self.col:
if not self.closeAllCollectionWindows():
return
self.maybeOptimize()
self.progress.start(immediate=True)
if os.getenv("ANKIDEV", 0):
corrupt = False
else:
corrupt = self.col.db.scalar("pragma integrity_check") != "ok"
if corrupt:
showWarning(_("Your collection file appears to be corrupt. \
This can happen when the file is copied or moved while Anki is open, or \
when the collection is stored on a network or cloud drive. Please see \
the manual for information on how to restore from an automatic backup."))
self.col.close()
self.col = None
if not corrupt:
self.backup()
self.progress.finish()
return True
# Backup and auto-optimize
##########################################################################
def backup(self):
nbacks = self.pm.profile['numBackups']
if self.pm.profile.get('compressBackups', True):
zipStorage = zipfile.ZIP_DEFLATED
else:
zipStorage = zipfile.ZIP_STORED
if not nbacks or os.getenv("ANKIDEV", 0):
return
dir = self.pm.backupFolder()
path = self.pm.collectionPath()
# find existing backups
backups = []
for file in os.listdir(dir):
m = re.search("backup-(\d+).apkg", file)
if not m:
# unknown file
continue
backups.append((int(m.group(1)), file))
backups.sort()
# get next num
if not backups:
n = 1
else:
n = backups[-1][0] + 1
# do backup
newpath = os.path.join(dir, "backup-%d.apkg" % n)
z = zipfile.ZipFile(newpath, "w", zipStorage)
z.write(path, "collection.anki2")
z.writestr("media", "{}")
z.close()
# remove if over
if len(backups) + 1 > nbacks:
delete = len(backups) + 1 - nbacks
delete = backups[:delete]
for file in delete:
os.unlink(os.path.join(dir, file[1]))
def maybeOptimize(self):
# have two weeks passed?
if (intTime() - self.pm.profile['lastOptimize']) < 86400*14:
return
self.progress.start(label=_("Optimizing..."), immediate=True)
self.col.optimize()
self.pm.profile['lastOptimize'] = intTime()
self.pm.save()
self.progress.finish()
# State machine
##########################################################################
def moveToState(self, state, *args):
#print "-> move from", self.state, "to", state
oldState = self.state or "dummy"
cleanup = getattr(self, "_"+oldState+"Cleanup", None)
if cleanup:
cleanup(state)
self.state = state
getattr(self, "_"+state+"State")(oldState, *args)
def _deckBrowserState(self, oldState):
self.deckBrowser.show()
def _colLoadingState(self, oldState):
"Run once, when col is loaded."
self.enableColMenuItems()
# ensure cwd is set if media dir exists
self.col.media.dir()
runHook("colLoading", self.col)
self.moveToState("overview")
def _selectedDeck(self):
did = self.col.decks.selected()
if not self.col.decks.nameOrNone(did):
showInfo(_("Please select a deck."))
return
return self.col.decks.get(did)
def _overviewState(self, oldState):
if not self._selectedDeck():
return self.moveToState("deckBrowser")
self.col.reset()
self.overview.show()
def _reviewState(self, oldState):
self.reviewer.show()
self.web.setCanFocus(True)
def _reviewCleanup(self, newState):
if newState != "resetRequired" and newState != "review":
self.reviewer.cleanup()
self.web.setCanFocus(False)
def noteChanged(self, nid):
"Called when a card or note is edited (but not deleted)."
runHook("noteChanged", nid)
# Resetting state
##########################################################################
def reset(self, guiOnly=False):
"Called for non-trivial edits. Rebuilds queue and updates UI."
if self.col:
if not guiOnly:
self.col.reset()
runHook("reset")
self.maybeEnableUndo()
self.moveToState(self.state)
def requireReset(self, modal=False):
"Signal queue needs to be rebuilt when edits are finished or by user."
self.autosave()
self.resetModal = modal
if self.interactiveState():
self.moveToState("resetRequired")
def interactiveState(self):
"True if not in profile manager, syncing, etc."
return self.state in ("overview", "review", "deckBrowser")
def maybeReset(self):
self.autosave()
if self.state == "resetRequired":
self.state = self.returnState
self.reset()
def delayedMaybeReset(self):
# if we redraw the page in a button click event it will often crash on
# windows
self.progress.timer(100, self.maybeReset, False)
def _resetRequiredState(self, oldState):
if oldState != "resetRequired":
self.returnState = oldState
if self.resetModal:
# we don't have to change the webview, as we have a covering window
return
self.web.setLinkHandler(lambda url: self.delayedMaybeReset())
i = _("Waiting for editing to finish.")
b = self.button("refresh", _("Resume Now"), id="resume")
self.web.stdHtml("""
<center><div style="height: 100%%">
<div style="position:relative; vertical-align: middle;">
%s<br>
%s</div></div></center>
""" % (i, b), css=self.sharedCSS)
self.bottomWeb.hide()
self.web.setFocus()
self.web.eval("$('#resume').focus()")
# HTML helpers
##########################################################################
sharedCSS = """
body {
background: #f3f3f3;
margin: 2em;
}
h1 { margin-bottom: 0.2em; }
"""
def button(self, link, name, key=None, class_="", id=""):
class_ = "but "+ class_
if key:
key = _("Shortcut key: %s") % key
else:
key = ""
return '''
<button id="%s" class="%s" onclick="py.link('%s');return false;"
title="%s">%s</button>''' % (
id, class_, link, key, name)
# Main window setup
##########################################################################
def setupMainWindow(self):
# main window
self.form = aqt.forms.main.Ui_MainWindow()
self.form.setupUi(self)
# toolbar
tweb = aqt.webview.AnkiWebView()
tweb.setObjectName("toolbarWeb")
tweb.setFocusPolicy(Qt.WheelFocus)
tweb.setFixedHeight(32+self.fontHeightDelta)
self.toolbar = aqt.toolbar.Toolbar(self, tweb)
self.toolbar.draw()
# main area
self.web = aqt.webview.AnkiWebView()
self.web.setObjectName("mainText")
self.web.setFocusPolicy(Qt.WheelFocus)
self.web.setMinimumWidth(400)
# bottom area
sweb = self.bottomWeb = aqt.webview.AnkiWebView()
#sweb.hide()
sweb.setFixedHeight(100)
sweb.setObjectName("bottomWeb")
sweb.setFocusPolicy(Qt.WheelFocus)
# add in a layout
self.mainLayout = QVBoxLayout()
self.mainLayout.setContentsMargins(0,0,0,0)
self.mainLayout.setSpacing(0)
self.mainLayout.addWidget(tweb)
self.mainLayout.addWidget(self.web)
self.mainLayout.addWidget(sweb)
self.form.centralwidget.setLayout(self.mainLayout)
def closeAllCollectionWindows(self):
return aqt.dialogs.closeAll()
# Components
##########################################################################
def setupSignals(self):
signal.signal(signal.SIGINT, self.onSigInt)
def onSigInt(self, signum, frame):
# interrupt any current transaction and schedule a rollback & quit
self.col.db.interrupt()
def quit():
self.col.db.rollback()
self.close()
self.progress.timer(100, quit, False)
def setupProgress(self):
self.progress = aqt.progress.ProgressManager(self)
def setupErrorHandler(self):
import aqt.errors
self.errorHandler = aqt.errors.ErrorHandler(self)
def setupAddons(self):
import aqt.addons
self.addonManager = aqt.addons.AddonManager(self)
def setupThreads(self):
self._mainThread = QThread.currentThread()
def inMainThread(self):
return self._mainThread == QThread.currentThread()
def setupDeckBrowser(self):
from aqt.deckbrowser import DeckBrowser
self.deckBrowser = DeckBrowser(self)
def setupOverview(self):
from aqt.overview import Overview
self.overview = Overview(self)
def setupReviewer(self):
from aqt.reviewer import Reviewer
self.reviewer = Reviewer(self)
# Syncing
##########################################################################
def onSync(self, auto=False, reload=True):
if not auto or (self.pm.profile['syncKey'] and
self.pm.profile['autoSync'] and
not self.safeMode):
from aqt.sync import SyncManager
if not self.unloadCollection():
return
# set a sync state so the refresh timer doesn't fire while deck
# unloaded
self.state = "sync"
self.syncer = SyncManager(self, self.pm)
self.syncer.sync()
if reload:
if not self.col:
self.loadCollection()
# Tools
##########################################################################
def raiseMain(self):
if not self.app.activeWindow():
# make sure window is shown
self.setWindowState(self.windowState() & ~Qt.WindowMinimized)
return True
def setStatus(self, text, timeout=3000):
self.form.statusbar.showMessage(text, timeout)
def setupStyle(self):
applyStyles(self)
# Key handling
##########################################################################
def setupKeys(self):
self.keyHandler = None
# debug shortcut
self.debugShortcut = QShortcut(QKeySequence("Ctrl+:"), self)
self.connect(
self.debugShortcut, SIGNAL("activated()"), self.onDebug)
def keyPressEvent(self, evt):
# do we have a delegate?
if self.keyHandler:
# did it eat the key?
if self.keyHandler(evt):
return
# run standard handler
QMainWindow.keyPressEvent(self, evt)
# check global keys
key = unicode(evt.text())
if key == "d":
self.moveToState("deckBrowser")
elif key == "s":
if self.state == "overview":
self.col.startTimebox()
self.moveToState("review")
else:
self.moveToState("overview")
elif key == "a":
self.onAddCard()
elif key == "b":
self.onBrowse()
elif key == "S":
self.onStats()
elif key == "y":
self.onSync()
# App exit
##########################################################################
def closeEvent(self, event):
"User hit the X button, etc."
event.accept()
self.onClose(force=True)
def onClose(self, force=False):
"Called from a shortcut key. Close current active window."
aw = self.app.activeWindow()
if not aw or aw == self or force:
self.unloadProfile(browser=False)
self.app.closeAllWindows()
else:
aw.close()
# Undo & autosave
##########################################################################
def onUndo(self):
n = self.col.undoName()
cid = self.col.undo()
if cid and self.state == "review":
card = self.col.getCard(cid)
self.reviewer.cardQueue.append(card)
else:
tooltip(_("Reverted to state prior to '%s'.") % n.lower())
self.reset()
self.maybeEnableUndo()
def maybeEnableUndo(self):
if self.col and self.col.undoName():
self.form.actionUndo.setText(_("Undo %s") %
self.col.undoName())
self.form.actionUndo.setEnabled(True)
runHook("undoState", True)
else:
self.form.actionUndo.setText(_("Undo"))
self.form.actionUndo.setEnabled(False)
runHook("undoState", False)
def checkpoint(self, name):
self.col.save(name)
self.maybeEnableUndo()
def autosave(self):
self.col.autosave()
self.maybeEnableUndo()
# Other menu operations
##########################################################################
def onAddCard(self):
aqt.dialogs.open("AddCards", self)
def onBrowse(self):
aqt.dialogs.open("Browser", self)
def onEditCurrent(self):
aqt.dialogs.open("EditCurrent", self)
def onDeckConf(self, deck=None):
if not deck:
deck = self.col.decks.current()
if deck['dyn']:
import aqt.dyndeckconf
aqt.dyndeckconf.DeckConf(self, deck=deck)
else:
import aqt.deckconf
aqt.deckconf.DeckConf(self, deck)
def onOverview(self):
self.col.reset()
self.moveToState("overview")
def onStats(self):
deck = self._selectedDeck()
if not deck:
return
aqt.stats.DeckStats(self)
def onPrefs(self):
import aqt.preferences
aqt.preferences.Preferences(self)
def onNoteTypes(self):
import aqt.models
aqt.models.Models(self, self, fromMain=True)
def onAbout(self):
import aqt.about
aqt.about.show(self)
def onDonate(self):
openLink(aqt.appDonate)
def onDocumentation(self):
openHelp("")
# Importing & exporting
##########################################################################
def handleImport(self, path):
import aqt.importing
if not os.path.exists(path):
return showInfo(_("Please use File>Import to import this file."))
aqt.importing.importFile(self, path)
def onImport(self):
import aqt.importing
aqt.importing.onImport(self)
def onExport(self):
import aqt.exporting
aqt.exporting.ExportDialog(self)
# Cramming
##########################################################################
def onCram(self, search=""):
import aqt.dyndeckconf
n = 1
deck = self.col.decks.current()
if not search:
if not deck['dyn']:
search = 'deck:"%s" ' % deck['name']
decks = self.col.decks.allNames()
while _("Filtered Deck %d") % n in decks:
n += 1
name = _("Filtered Deck %d") % n
did = self.col.decks.newDyn(name)
diag = aqt.dyndeckconf.DeckConf(self, first=True, search=search)
if not diag.ok:
# user cancelled first config
self.col.decks.rem(did)
self.col.decks.select(deck['id'])
else:
self.moveToState("overview")
# Menu, title bar & status
##########################################################################
def setupMenus(self):
m = self.form
s = SIGNAL("triggered()")
#self.connect(m.actionDownloadSharedPlugin, s, self.onGetSharedPlugin)
self.connect(m.actionSwitchProfile, s, self.unloadProfile)
self.connect(m.actionImport, s, self.onImport)
self.connect(m.actionExport, s, self.onExport)
self.connect(m.actionExit, s, self, SLOT("close()"))
self.connect(m.actionPreferences, s, self.onPrefs)
self.connect(m.actionAbout, s, self.onAbout)
self.connect(m.actionUndo, s, self.onUndo)
self.connect(m.actionFullDatabaseCheck, s, self.onCheckDB)
self.connect(m.actionCheckMediaDatabase, s, self.onCheckMediaDB)
self.connect(m.actionDocumentation, s, self.onDocumentation)
self.connect(m.actionDonate, s, self.onDonate)
self.connect(m.actionStudyDeck, s, self.onStudyDeck)
self.connect(m.actionCreateFiltered, s, self.onCram)
self.connect(m.actionEmptyCards, s, self.onEmptyCards)
self.connect(m.actionNoteTypes, s, self.onNoteTypes)
def updateTitleBar(self):
self.setWindowTitle("Anki")
# Auto update
##########################################################################
def setupAutoUpdate(self):
import aqt.update
self.autoUpdate = aqt.update.LatestVersionFinder(self)
self.connect(self.autoUpdate, SIGNAL("newVerAvail"), self.newVerAvail)
self.connect(self.autoUpdate, SIGNAL("newMsg"), self.newMsg)
self.connect(self.autoUpdate, SIGNAL("clockIsOff"), self.clockIsOff)
self.autoUpdate.start()
def newVerAvail(self, ver):
if self.pm.meta.get('suppressUpdate', None) != ver:
aqt.update.askAndUpdate(self, ver)
def newMsg(self, data):
aqt.update.showMessages(self, data)
def clockIsOff(self, diff):
diffText = ngettext("%s second", "%s seconds", diff) % diff
warn = _("""\
In order to ensure your collection works correctly when moved between \
devices, Anki requires your computer's internal clock to be set correctly. \
The internal clock can be wrong even if your system is showing the correct \
local time.
Please go to the time settings on your computer and check the following:
- AM/PM
- Clock drift
- Day, month and year
- Timezone
- Daylight savings
Difference to correct time: %s.""") % diffText
showWarning(warn)
self.app.closeAllWindows()
# Count refreshing
##########################################################################
def setupRefreshTimer(self):
# every 10 minutes
self.progress.timer(10*60*1000, self.onRefreshTimer, True)
def onRefreshTimer(self):
if self.state == "deckBrowser":
self.deckBrowser.refresh()
elif self.state == "overview":
self.overview.refresh()
# Permanent libanki hooks
##########################################################################
def setupHooks(self):
addHook("modSchema", self.onSchemaMod)
addHook("remNotes", self.onRemNotes)
addHook("odueInvalid", self.onOdueInvalid)
def onOdueInvalid(self):
showWarning(_("""\
Invalid property found on card. Please use Tools>Check Database, \
and if the problem comes up again, please ask on the support site."""))
# Log note deletion
##########################################################################
def onRemNotes(self, col, nids):
path = os.path.join(self.pm.profileFolder(), "deleted.txt")
existed = os.path.exists(path)
with open(path, "a") as f:
if not existed:
f.write("nid\tmid\tfields\n")
for id, mid, flds in col.db.execute(
"select id, mid, flds from notes where id in %s" %
ids2str(nids)):
fields = splitFields(flds)
f.write(("\t".join([str(id), str(mid)] + fields)).encode("utf8"))
f.write("\n")
# Schema modifications
##########################################################################
def onSchemaMod(self, arg):
# if triggered in sync, make sure we don't use the gui
if not self.inMainThread():
return True
# if from the full sync menu, ignore
if self.hideSchemaMsg:
return True
return askUser(_("""\
The requested change will require a full upload of the database when \
you next synchronize your collection. If you have reviews or other changes \
waiting on another device that haven't been synchronized here yet, they \
will be lost. Continue?"""))
# Advanced features
##########################################################################
def onCheckDB(self):
"True if no problems"
self.progress.start(immediate=True)
ret, ok = self.col.fixIntegrity()
self.progress.finish()
if not ok:
showText(ret)
else:
tooltip(ret)
self.reset()
return ret
def onCheckMediaDB(self):
self.progress.start(immediate=True)
(nohave, unused, invalid) = self.col.media.check()
self.progress.finish()
# generate report
report = ""
if invalid:
report += _("Invalid encoding; please rename:")
report += "\n" + "\n".join(invalid)
if unused:
if report:
report += "\n\n\n"
report += _(
"In media folder but not used by any cards:")
report += "\n" + "\n".join(unused)
if nohave:
if report:
report += "\n\n\n"
report += _(
"Used on cards but missing from media folder:")
report += "\n" + "\n".join(nohave)
if not report:
tooltip(_("No unused or missing files found."))
return
# show report and offer to delete
diag = QDialog(self)
diag.setWindowTitle("Anki")
layout = QVBoxLayout(diag)
diag.setLayout(layout)
text = QTextEdit()
text.setReadOnly(True)
text.setPlainText(report)
layout.addWidget(text)
box = QDialogButtonBox(QDialogButtonBox.Close)
layout.addWidget(box)
b = QPushButton(_("Delete Unused"))
b.setAutoDefault(False)
box.addButton(b, QDialogButtonBox.ActionRole)
b.connect(
b, SIGNAL("clicked()"), lambda u=unused, d=diag: self.deleteUnused(u, d))
diag.connect(box, SIGNAL("rejected()"), diag, SLOT("reject()"))
diag.setMinimumHeight(400)
diag.setMinimumWidth(500)
diag.exec_()
def deleteUnused(self, unused, diag):
if not askUser(
_("Delete unused media?")):
return
mdir = self.col.media.dir()
for f in unused:
path = os.path.join(mdir, f)
send2trash(path)
tooltip(_("Deleted."))
diag.close()
def onStudyDeck(self):
from aqt.studydeck import StudyDeck
ret = StudyDeck(
self, dyn=True, current=self.col.decks.current()['name'])
if ret.name:
self.col.decks.select(self.col.decks.id(ret.name))
self.moveToState("overview")
def onEmptyCards(self):
self.progress.start(immediate=True)
cids = self.col.emptyCids()
if not cids:
self.progress.finish()
tooltip(_("No empty cards."))
return
report = self.col.emptyCardReport(cids)
self.progress.finish()
part1 = ngettext("%d card", "%d cards", len(cids)) % len(cids)
part1 = _("%s to delete:") % part1
diag, box = showText(part1 + "\n\n" + report, run=False)
box.addButton(_("Delete Cards"), QDialogButtonBox.AcceptRole)
box.button(QDialogButtonBox.Close).setDefault(True)
def onDelete():
QDialog.accept(diag)
self.checkpoint(_("Delete Empty"))
self.col.remCards(cids)
tooltip(ngettext("%d card deleted.", "%d cards deleted.", len(cids)) % len(cids))
self.reset()
diag.connect(box, SIGNAL("accepted()"), onDelete)
diag.show()
# Debugging
######################################################################
def onDebug(self):
d = self.debugDiag = QDialog()
frm = aqt.forms.debug.Ui_Dialog()
frm.setupUi(d)
s = self.debugDiagShort = QShortcut(QKeySequence("ctrl+return"), d)
self.connect(s, SIGNAL("activated()"),
lambda: self.onDebugRet(frm))
s = self.debugDiagShort = QShortcut(
QKeySequence("ctrl+shift+return"), d)
self.connect(s, SIGNAL("activated()"),
lambda: self.onDebugPrint(frm))
d.show()
def _captureOutput(self, on):
mw = self
class Stream(object):
def write(self, data):
mw._output += data
if on:
self._output = ""
self._oldStderr = sys.stderr
self._oldStdout = sys.stdout
s = Stream()
sys.stderr = s
sys.stdout = s
else:
sys.stderr = self._oldStderr
sys.stdout = self._oldStdout
def _debugCard(self):
return self.reviewer.card.__dict__
def _debugBrowserCard(self):
return aqt.dialogs._dialogs['Browser'][1].card.__dict__
def onDebugPrint(self, frm):
frm.text.setPlainText("pp(%s)" % frm.text.toPlainText())
self.onDebugRet(frm)
def onDebugRet(self, frm):
import pprint, traceback
text = frm.text.toPlainText()
card = self._debugCard
bcard = self._debugBrowserCard
mw = self
pp = pprint.pprint
self._captureOutput(True)
try:
exec text
except:
self._output += traceback.format_exc()
self._captureOutput(False)
buf = ""
for c, line in enumerate(text.strip().split("\n")):
if c == 0:
buf += ">>> %s\n" % line
else:
buf += "... %s\n" % line
try:
frm.log.appendPlainText(buf + (self._output or "<no output>"))
except UnicodeDecodeError:
frm.log.appendPlainText(_("<non-unicode text>"))
frm.log.ensureCursorVisible()
# System specific code
##########################################################################
def setupFonts(self):
f = QFontInfo(self.font())
ws = QWebSettings.globalSettings()
self.fontHeight = max(14, f.pixelSize())
self.fontFamily = f.family()
self.fontHeightDelta = max(0, self.fontHeight - 13)
ws.setFontFamily(QWebSettings.StandardFont, self.fontFamily)
ws.setFontSize(QWebSettings.DefaultFontSize, self.fontHeight)
def setupSystemSpecific(self):
self.hideMenuAccels = False
if isMac:
# mac users expect a minimize option
self.minimizeShortcut = QShortcut("Ctrl+M", self)
self.connect(self.minimizeShortcut, SIGNAL("activated()"),
self.onMacMinimize)
self.hideMenuAccels = True
self.maybeHideAccelerators()
self.hideStatusTips()
elif isWin:
# make sure ctypes is bundled
from ctypes import windll, wintypes
_dummy = windll
_dummy = wintypes
def maybeHideAccelerators(self, tgt=None):
if not self.hideMenuAccels:
return
tgt = tgt or self
for action in tgt.findChildren(QAction):
txt = unicode(action.text())
m = re.match("^(.+)\(&.+\)(.+)?", txt)
if m:
action.setText(m.group(1) + (m.group(2) or ""))
def hideStatusTips(self):
for action in self.findChildren(QAction):
action.setStatusTip("")
def onMacMinimize(self):
self.setWindowState(self.windowState() | Qt.WindowMinimized)
# Single instance support
##########################################################################
def setupAppMsg(self):
self.connect(self.app, SIGNAL("appMsg"), self.onAppMsg)
def onAppMsg(self, buf):
if self.state == "startup":
# try again in a second
return self.progress.timer(1000, lambda: self.onAppMsg(buf), False)
elif self.state == "profileManager":
# can't raise window while in profile manager
if buf == "raise":
return
self.pendingImport = buf
return tooltip(_("Deck will be imported when a profile is opened."))
if not self.interactiveState() or self.progress.busy():
# we can't raise the main window while in profile dialog, syncing, etc
if buf != "raise":
showInfo(_("""\
Please ensure a profile is open and Anki is not busy, then try again."""),
parent=None)
return
# raise window
if isWin:
# on windows we can raise the window by minimizing and restoring
self.showMinimized()
self.setWindowState(Qt.WindowActive)
self.showNormal()
else:
# on osx we can raise the window. on unity the icon in the tray will just flash.
self.activateWindow()
self.raise_()
if buf == "raise":
return
# import
if not isinstance(buf, unicode):
buf = unicode(buf, "utf8", "ignore")
self.handleImport(buf)
|
timrae/anki
|
aqt/main.py
|
Python
|
agpl-3.0
| 39,738 | 0.000906 |
#! /usr/bin/env python
import os
import time
import traceback
import re
import locale
import subprocess
import zlib
import zipfile
import private.metadata as metadata
import private.messages as messages
from glob import glob
from private.exceptionclasses import CustomError, CritError, SyntaxError, LogicError
from private.preliminaries import print_error
#== Directory modification functions =================
def delete_files(pathname):
"""Delete files specified by a path
This function deletes a possibly-empty list of files whose names match
`pathname`, which must be a string containing a path specification.
`pathname` can be either absolute (like /usr/src/Python-1.5/Makefile)
or relative (like ../../Tools/*/*.gif). It can contain shell-style wildcards.
"""
print "\nDelete files", pathname
for f in glob(pathname):
os.remove(f)
def remove_dir(pathname, options = '@DEFAULTVALUE@'):
"""Remove a directory
This function completely removes the directory specified by `pathname`
using the 'rmdir' command on Windows platforms and the 'rm' command
on Unix platforms. This is useful for removing symlinks without
deleting the source files or directory.
"""
if os.name == 'posix':
os_command = 'rmdirunix'
if pathname[-1] == '/':
pathname = pathname[0:-1]
else:
os_command = 'rmdirwin'
command = metadata.commands[os_command]
if options == '@DEFAULTVALUE@':
options = metadata.default_options[os_command]
subprocess.check_call(command % (options, pathname), shell=True)
def check_manifest(manifestlog = '@DEFAULTVALUE@',
output_dir = '@DEFAULTVALUE@',
makelog = '@DEFAULTVALUE@'):
"""
Produce an error if there are any .dta files in "output_dir" and all
non-hidden sub-directories that are not in the manifest file "manifestlog",
and produce a warning if there are .txt or .csv files not in the manifest
along with a list of these files. All log is printed to "makelog" log file.
"""
# metadata.settings should not be part of argument defaults so that they can be
# overwritten by make_log.set_option
if manifestlog == '@DEFAULTVALUE@':
manifestlog = metadata.settings['manifest_file']
if output_dir == '@DEFAULTVALUE@':
output_dir = metadata.settings['output_dir']
if makelog == '@DEFAULTVALUE@':
makelog = metadata.settings['makelog_file']
print "\nCheck manifest log file", manifestlog
# Open main log file
try:
LOGFILE = open(makelog, 'ab')
except Exception as errmsg:
print errmsg
raise CritError(messages.crit_error_log % makelog)
try:
# Open manifest log file
try:
MANIFESTFILE = open(manifestlog, 'rU')
except Exception as errmsg:
print errmsg
raise CritError(messages.crit_error_log % manifestlog)
manifest_lines = MANIFESTFILE.readlines()
MANIFESTFILE.close()
# Get file list
try:
file_list = [];
for i in range(len(manifest_lines)):
if manifest_lines[i].startswith('File: '):
filepath = os.path.abspath(manifest_lines[i][6:].rstrip())
ext = os.path.splitext(filepath)[1]
if ext == '':
filepath = filepath + '.dta'
file_list.append( filepath )
except Exception as errmsg:
print errmsg
raise SyntaxError(messages.syn_error_manifest % manifestlog)
if not os.path.isdir(output_dir):
raise CritError(messages.crit_error_no_directory % (output_dir))
# Loop over all levels of sub-directories of output_dir
for root, dirs, files in os.walk(output_dir, topdown = True):
# Ignore non-hidden sub-directories
dirs_to_keep = []
for dirname in dirs:
if not dirname.startswith('.'):
dirs_to_keep.append(dirname)
dirs[:] = dirs_to_keep
# Check each file
for filename in files:
ext = os.path.splitext(filename)[1]
fullpath = os.path.abspath( os.path.join(root, filename) )
# non-hidden .dta file: error
if (not filename.startswith('.')) and (ext == '.dta'):
print 'Checking: ', fullpath
if not (fullpath in file_list):
raise CritError(messages.crit_error_no_dta_file % (filename, manifestlog))
# non-hidden .csv file: warning
if (not filename.startswith('.')) and (ext == '.csv'):
print 'Checking: ', fullpath
if not (fullpath in file_list):
print messages.note_no_csv_file % (filename, manifestlog)
print >> LOGFILE, messages.note_no_csv_file % (filename, manifestlog)
# non-hidden .txt file: warning
if (not filename.startswith('.')) and (ext == '.txt'):
print 'Checking: ', fullpath
if not (fullpath in file_list):
print messages.note_no_txt_file % (filename, manifestlog)
print >> LOGFILE, messages.note_no_txt_file % (filename, manifestlog)
except:
print_error(LOGFILE)
LOGFILE.close()
def list_directory(top, makelog = '@DEFAULTVALUE@'):
"""List directories
This function lists all non-hidden sub-directories of the directory
specified by `top`, a path, and their content from the top down.
It writes their names, modified times, and sizes in bytes to the
log file specified by the path `makelog`.
"""
# metadata.settings should not be part of argument defaults so that they can be
# overwritten by make_log.set_option
if makelog == '@DEFAULTVALUE@':
makelog = metadata.settings['makelog_file']
print "\nList all files in directory", top
# To print numbers (file sizes) with thousand separator
locale.setlocale(locale.LC_ALL, '')
makelog = re.sub('\\\\', '/', makelog)
try:
LOGFILE = open(makelog, 'ab')
except Exception as errmsg:
print errmsg
raise CritError(messages.crit_error_log % makelog)
print >> LOGFILE, '\n'
print >> LOGFILE, 'List of all files in sub-directories in', top
try:
if os.path.isdir(top):
for root, dirs, files in os.walk(top, topdown = True):
# Ignore non-hidden sub-directories
dirs_to_keep = []
for dirname in dirs:
if not dirname.startswith('.'):
dirs_to_keep.append(dirname)
dirs[:] = dirs_to_keep
# Print out the sub-directory and its time stamp
created = os.stat(root).st_mtime
asciiTime = time.asctime(time.localtime(created))
print >> LOGFILE, root
print >> LOGFILE, 'created/modified', asciiTime
# Print out all the files in the sub-directories
for name in files:
full_name = os.path.join(root, name)
created = os.path.getmtime(full_name)
size = os.path.getsize(full_name)
asciiTime = time.asctime(time.localtime(created))
print >> LOGFILE, '%50s' % name, '--- created/modified', asciiTime, \
'(', locale.format('%d', size, 1), 'bytes )'
except:
print_error(LOGFILE)
print >> LOGFILE, '\n'
LOGFILE.close()
def clear_dirs(*args):
"""Create fresh directories
This function creates a directory for each path specified in
*args if such a directory does not already exist. It deletes
all files in each directory that already exists while keeping
directory structure (i.e. sub-directories). This function is
safe for symlinks.
"""
for dir in args:
if os.path.isdir(dir):
remove_dir(dir)
os.makedirs(dir)
print 'Cleared:', dir
def unzip(file_name, output_dir):
zip = zipfile.ZipFile(file_name, allowZip64=True)
zip.extractall(output_dir)
zip.close()
def unzip_externals(external_dir='@DEFAULTVALUE@'):
if external_dir == '@DEFAULTVALUE@':
external_dir = metadata.settings['external_dir']
for dirname, subdirs, files in os.walk(external_dir):
for filename in files:
absname = os.path.abspath(os.path.join(dirname, filename))
if zipfile.is_zipfile(absname):
unzip(absname, dirname)
def zip_dir(source_dir, dest):
zf = zipfile.ZipFile('%s.zip' % (dest), 'w', zipfile.ZIP_DEFLATED, allowZip64=True)
abs_src = os.path.abspath(source_dir)
for dirname, subdirs, files in os.walk(source_dir):
for filename in files:
absname = os.path.abspath(os.path.join(dirname, filename))
arcname = absname[len(abs_src) + 1:]
print 'zipping %s as %s' % (os.path.join(dirname, filename), arcname)
zf.write(absname, arcname)
zf.close()
|
gslab-econ/gslab_python
|
gslab_make/dir_mod.py
|
Python
|
mit
| 9,540 | 0.005346 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2011, Nicolas Clairon
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of the University of California, Berkeley nor the
# names of its contributors may be used to endorse or promote products
# derived from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE REGENTS AND CONTRIBUTORS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE REGENTS AND CONTRIBUTORS BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import datetime
import logging
from copy import deepcopy
log = logging.getLogger(__name__)
from mongo_exceptions import StructureError, BadKeyError, AuthorizedTypeError
from helpers import DotCollapsedDict
# field wich does not need to be declared into the skeleton
STRUCTURE_KEYWORDS = []
class SchemaProperties(type):
def __new__(cls, name, bases, attrs):
attrs['_protected_field_names'] = set(
['_protected_field_names', '_namespaces', '_required_namespace'])
for base in bases:
parent = base.__mro__[0]
if hasattr(parent, 'skeleton'):
if parent.skeleton is not None:
if parent.skeleton:
if 'skeleton' not in attrs and parent.skeleton:
attrs['skeleton'] = parent.skeleton
else:
obj_skeleton = attrs.get('skeleton', {}).copy()
attrs['skeleton'] = parent.skeleton.copy()
attrs['skeleton'].update(obj_skeleton)
if hasattr(parent, 'optional'):
if parent.optional is not None:
if parent.optional:
if 'optional' not in attrs and parent.optional:
attrs['optional'] = parent.optional
else:
obj_optional = attrs.get('optional', {}).copy()
attrs['optional'] = parent.optional.copy()
attrs['optional'].update(obj_optional)
if hasattr(parent, 'default_values'):
if parent.default_values:
obj_default_values = attrs.get('default_values', {}).copy()
attrs['default_values'] = parent.default_values.copy()
attrs['default_values'].update(obj_default_values)
if hasattr(parent, 'skeleton') or hasattr(parent, 'optional'):
if attrs.get('authorized_types'):
attrs['authorized_types'] = list(set(parent.authorized_types).union(set(attrs['authorized_types'])))
for mro in bases[0].__mro__:
attrs['_protected_field_names'] = attrs['_protected_field_names'].union(list(mro.__dict__))
attrs['_protected_field_names'] = list(attrs['_protected_field_names'])
attrs['_namespaces'] = []
attrs['_collapsed_struct'] = {}
if (attrs.get('skeleton') or attrs.get('optional')) and name not in ["SchemaDocument", "Document"]:
base = bases[0]
if not attrs.get('authorized_types'):
attrs['authorized_types'] = base.authorized_types
if attrs.get('skeleton'):
base._validate_skeleton(attrs['skeleton'], name, attrs.get('authorized_types'))
attrs['_namespaces'].extend(list(base._SchemaDocument__walk_dict(attrs['skeleton'])))
attrs['_collapsed_struct'].update(DotCollapsedDict(attrs['skeleton'], remove_under_type=True))
if attrs.get('optional'):
base._validate_skeleton(attrs['optional'], name, attrs.get('authorized_types'))
attrs['_namespaces'].extend(list(base._SchemaDocument__walk_dict(attrs['optional'])))
attrs['_collapsed_struct'].update(DotCollapsedDict(attrs['optional'], remove_under_type=True))
cls._validate_descriptors(attrs)
if (attrs.get('skeleton') or attrs.get('optional')):
skel_doc = ""
for k, v in attrs.get('skeleton', {}).iteritems():
skel_doc += " "*8+k+" : "+str(v)+"\n"
opt_doc = ""
for k, v in attrs.get('optional', {}).iteritems():
opt_doc += " "*8+k+" : "+str(v)+"\n"
attrs['__doc__'] = attrs.get('__doc__', '')+"""
required fields: {
%s }
optional fields: {
%s }
""" % (skel_doc, opt_doc)
return type.__new__(cls, name, bases, attrs)
@classmethod
def _validate_descriptors(cls, attrs):
for dv in attrs.get('default_values', {}):
if not dv in attrs['_namespaces']:
raise ValueError("Error in default_values: can't find %s in skeleton" % dv )
class SchemaDocument(dict):
__metaclass__ = SchemaProperties
skeleton = None
optional = None
default_values = {}
authorized_types = [
type(None),
bool,
int,
long,
float,
unicode,
str,
basestring,
list,
dict,
datetime.datetime,
]
def __init__(self, doc=None, gen_skel=True, gen_auth_types=True):
"""
doc : a dictionnary
gen_skel : if True, generate automaticly the skeleton of the doc
filled with NoneType each time validate() is called. Note that
if doc is not {}, gen_skel is always False. If gen_skel is False,
default_values cannot be filled.
gen_auth_types: if True, generate automaticly the self.authorized_types
attribute from self.authorized_types
"""
if self.skeleton is None:
self.skeleton = {}
# init
if doc:
for k, v in doc.iteritems():
self[k] = v
gen_skel = False
if gen_skel:
self.generate_skeleton()
if self.default_values:
if self.skeleton:
self._set_default_fields(self, self.skeleton)
if self.optional:
self._set_default_fields(self, self.optional)
def generate_skeleton(self):
"""
validate and generate the skeleton of the document
from the skeleton (unknown values are set to None)
"""
if self.skeleton:
self.__generate_skeleton(self, self.skeleton)
if self.optional:
self.__generate_skeleton(self, self.optional)
#
# Public API end
#
@classmethod
def __walk_dict(cls, dic):
# thanks jean_b for the patch
for key, value in dic.items():
if isinstance(value, dict) and len(value):
if type(key) is type:
yield '$%s' % key.__name__
else:
yield key
for child_key in cls.__walk_dict(value):
if type(key) is type:
new_key = "$%s" % key.__name__
else:
new_key = key
#if type(child_key) is type:
# new_child_key = "$%s" % child_key.__name__
#else:
if type(child_key) is not type:
new_child_key = child_key
yield '%s.%s' % (new_key, new_child_key)
elif type(key) is type:
yield '$%s' % key.__name__
# elif isinstance(value, list) and len(value):
# if isinstance(value[0], dict):
# for child_key in cls.__walk_dict(value[0]):
# #if type(key) is type:
# # new_key = "$%s" % key.__name__
# #else:
# if type(key) is not type:
# new_key = key
# #if type(child_key) is type:
# # new_child_key = "$%s" % child_key.__name__
# #else:
# if type(child_key) is not type:
# new_child_key = child_key
# yield '%s.%s' % (new_key, new_child_key)
# else:
# if type(key) is not type:
# yield key
# #else:
# # yield ""
else:
if type(key) is not type:
yield key
#else:
# yield ""
@classmethod
def _validate_skeleton(cls, skeleton, name, authorized_types):
"""
validate if all fields in self.skeleton are in authorized types.
"""
##############
def __validate_skeleton(struct, name, authorized):
if type(struct) is type:
if struct not in authorized_types:
if struct not in authorized_types:
raise StructureError("%s: %s is not an authorized type" % (name, struct))
elif isinstance(struct, dict):
for key in struct:
if isinstance(key, basestring):
if "." in key:
raise BadKeyError(
"%s: %s must not contain '.'" % (name, key))
if key.startswith('$'):
raise BadKeyError(
"%s: %s must not start with '$'" % (name, key))
elif type(key) is type:
if not key in authorized_types:
raise AuthorizedTypeError(
"%s: %s is not an authorized type" % (name, key))
else:
raise StructureError(
"%s: %s must be a basestring or a type" % (name, key))
if struct[key] is None:
pass
elif isinstance(struct[key], dict):
__validate_skeleton(struct[key], name, authorized_types)
elif isinstance(struct[key], list):
__validate_skeleton(struct[key], name, authorized_types)
elif isinstance(struct[key], tuple):
__validate_skeleton(struct[key], name, authorized_types)
elif isinstance(struct[key], SchemaProperties):
pass
elif hasattr(struct[key], 'skeleton'):
__validate_skeleton(struct[key], name, authorized_types)
elif (struct[key] not in authorized_types):
ok = False
for auth_type in authorized_types:
if struct[key] is None:
ok = True
else:
try:
if isinstance(struct[key], auth_type) or issubclass(struct[key], auth_type):
ok = True
except TypeError:
raise TypeError("%s: %s is not a type" % (name, struct[key]))
if not ok:
raise StructureError(
"%s: %s is not an authorized type" % (name, struct[key]))
elif isinstance(struct, list) or isinstance(struct, tuple):
for item in struct:
__validate_skeleton(item, name, authorized_types)
elif isinstance(struct, SchemaProperties):
pass
else:
ok = False
for auth_type in authorized_types:
if isinstance(struct, auth_type):
ok = True
if not ok:
raise StructureError(
"%s: %s is not an authorized_types" % (name, struct))
#################
if skeleton is None:
raise StructureError(
"%s.skeleton must not be None" % name)
if not isinstance(skeleton, dict):
raise StructureError(
"%s.skeleton must be a dict instance" % name)
__validate_skeleton(skeleton, name, authorized_types)
def _set_default_fields(self, doc, struct, path = ""):
# TODO check this out, this method must be restructured
for key in struct:
new_key = key
new_path = ".".join([path, new_key]).strip('.')
#
# default_values :
# if the value is None, check if a default value exist.
# if exists, and it is a function then call it otherwise,
# juste feed it
#
if type(key) is not type:
if doc[key] is None and new_path in self.default_values:
new_value = self.default_values[new_path]
if callable(new_value):
new_value = new_value()
elif isinstance(new_value, dict):
new_value = deepcopy(new_value)
elif isinstance(new_value, list):
new_value = new_value[:]
doc[key] = new_value
#
# if the value is a dict, we have a another skeleton to validate
#
if isinstance(struct[key], dict):
#
# if the dict is still empty into the document we build
# it with None values
#
if len(struct[key]) and\
not [i for i in struct[key].keys() if type(i) is type]:
self._set_default_fields(doc[key], struct[key], new_path)
else:
if new_path in self.default_values:
new_value = self.default_values[new_path]
if callable(new_value):
new_value = new_value()
elif isinstance(new_value, dict):
new_value = deepcopy(new_value)
elif isinstance(new_value, list):
new_value = new_value[:]
doc[key] = new_value
elif isinstance(struct[key], list):
if new_path in self.default_values:
for new_value in self.default_values[new_path]:
if callable(new_value):
new_value = new_value()
elif isinstance(new_value, dict):
new_value = deepcopy(new_value)
elif isinstance(new_value, list):
new_value = new_value[:]
doc[key].append(new_value)
else: # what else
if new_path in self.default_values:
new_value = self.default_values[new_path]
if callable(new_value):
new_value = new_value()
elif isinstance(new_value, dict):
new_value = deepcopy(new_value)
elif isinstance(new_value, list):
new_value = new_value[:]
doc[key] = new_value
def __generate_skeleton(self, doc, struct, path = ""):
for key in struct:
if type(key) is type:
new_key = "$%s" % key.__name__
else:
new_key = key
new_path = ".".join([path, new_key]).strip('.')
#
# Automatique generate the skeleton with NoneType
#
if type(key) is not type and key not in doc:
if isinstance(struct[key], dict):
if callable(struct[key]):
doc[key] = struct[key]()
else:
doc[key] = type(struct[key])()
elif struct[key] is dict:
doc[key] = {}
elif isinstance(struct[key], list):
doc[key] = type(struct[key])()
elif struct[key] is list:
doc[key] = []
elif isinstance(struct[key], tuple):
doc[key] = [None for i in range(len(struct[key]))]
else:
doc[key] = None
#
# if the value is a dict, we have a another skeleton to validate
#
if isinstance(struct[key], dict) and type(key) is not type:
self.__generate_skeleton(doc[key], struct[key], new_path)
|
namlook/MongoLite
|
mongolite/schema_document.py
|
Python
|
bsd-3-clause
| 17,726 | 0.002369 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import ConversionValueRuleSetServiceTransport
from .grpc import ConversionValueRuleSetServiceGrpcTransport
# Compile a registry of transports.
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[ConversionValueRuleSetServiceTransport]]
_transport_registry["grpc"] = ConversionValueRuleSetServiceGrpcTransport
__all__ = (
"ConversionValueRuleSetServiceTransport",
"ConversionValueRuleSetServiceGrpcTransport",
)
|
googleads/google-ads-python
|
google/ads/googleads/v9/services/services/conversion_value_rule_set_service/transports/__init__.py
|
Python
|
apache-2.0
| 1,117 | 0 |
# -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from tencentcloud.common.abstract_model import AbstractModel
class AccountCreateInfo(AbstractModel):
"""账号创建信息
"""
def __init__(self):
r"""
:param UserName: 实例用户名
:type UserName: str
:param Password: 实例密码
:type Password: str
:param DBPrivileges: DB权限列表
:type DBPrivileges: list of DBPrivilege
:param Remark: 账号备注信息
:type Remark: str
:param IsAdmin: 是否为管理员账户,默认为否
:type IsAdmin: bool
"""
self.UserName = None
self.Password = None
self.DBPrivileges = None
self.Remark = None
self.IsAdmin = None
def _deserialize(self, params):
self.UserName = params.get("UserName")
self.Password = params.get("Password")
if params.get("DBPrivileges") is not None:
self.DBPrivileges = []
for item in params.get("DBPrivileges"):
obj = DBPrivilege()
obj._deserialize(item)
self.DBPrivileges.append(obj)
self.Remark = params.get("Remark")
self.IsAdmin = params.get("IsAdmin")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AccountDetail(AbstractModel):
"""账户信息详情
"""
def __init__(self):
r"""
:param Name: 账户名
:type Name: str
:param Remark: 账户备注
:type Remark: str
:param CreateTime: 账户创建时间
:type CreateTime: str
:param Status: 账户状态,1-创建中,2-正常,3-修改中,4-密码重置中,-1-删除中
:type Status: int
:param UpdateTime: 账户更新时间
:type UpdateTime: str
:param PassTime: 密码更新时间
:type PassTime: str
:param InternalStatus: 账户内部状态,正常为enable
:type InternalStatus: str
:param Dbs: 该账户对相关db的读写权限信息
:type Dbs: list of DBPrivilege
:param IsAdmin: 是否为管理员账户
:type IsAdmin: bool
"""
self.Name = None
self.Remark = None
self.CreateTime = None
self.Status = None
self.UpdateTime = None
self.PassTime = None
self.InternalStatus = None
self.Dbs = None
self.IsAdmin = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.Remark = params.get("Remark")
self.CreateTime = params.get("CreateTime")
self.Status = params.get("Status")
self.UpdateTime = params.get("UpdateTime")
self.PassTime = params.get("PassTime")
self.InternalStatus = params.get("InternalStatus")
if params.get("Dbs") is not None:
self.Dbs = []
for item in params.get("Dbs"):
obj = DBPrivilege()
obj._deserialize(item)
self.Dbs.append(obj)
self.IsAdmin = params.get("IsAdmin")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AccountPassword(AbstractModel):
"""实例账号密码信息
"""
def __init__(self):
r"""
:param UserName: 用户名
:type UserName: str
:param Password: 密码
:type Password: str
"""
self.UserName = None
self.Password = None
def _deserialize(self, params):
self.UserName = params.get("UserName")
self.Password = params.get("Password")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AccountPrivilege(AbstractModel):
"""数据库账号权限信息。创建数据库时设置
"""
def __init__(self):
r"""
:param UserName: 数据库用户名
:type UserName: str
:param Privilege: 数据库权限。ReadWrite表示可读写,ReadOnly表示只读
:type Privilege: str
"""
self.UserName = None
self.Privilege = None
def _deserialize(self, params):
self.UserName = params.get("UserName")
self.Privilege = params.get("Privilege")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AccountPrivilegeModifyInfo(AbstractModel):
"""数据库账号权限变更信息
"""
def __init__(self):
r"""
:param UserName: 数据库用户名
:type UserName: str
:param DBPrivileges: 账号权限变更信息
:type DBPrivileges: list of DBPrivilegeModifyInfo
"""
self.UserName = None
self.DBPrivileges = None
def _deserialize(self, params):
self.UserName = params.get("UserName")
if params.get("DBPrivileges") is not None:
self.DBPrivileges = []
for item in params.get("DBPrivileges"):
obj = DBPrivilegeModifyInfo()
obj._deserialize(item)
self.DBPrivileges.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AccountRemark(AbstractModel):
"""账户备注信息
"""
def __init__(self):
r"""
:param UserName: 账户名
:type UserName: str
:param Remark: 对应账户新的备注信息
:type Remark: str
"""
self.UserName = None
self.Remark = None
def _deserialize(self, params):
self.UserName = params.get("UserName")
self.Remark = params.get("Remark")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AssociateSecurityGroupsRequest(AbstractModel):
"""AssociateSecurityGroups请求参数结构体
"""
def __init__(self):
r"""
:param SecurityGroupId: 安全组ID。
:type SecurityGroupId: str
:param InstanceIdSet: 实例ID 列表,一个或者多个实例ID组成的数组。多个实例必须是同一个地域,同一个可用区,同一个项目下的。
:type InstanceIdSet: list of str
"""
self.SecurityGroupId = None
self.InstanceIdSet = None
def _deserialize(self, params):
self.SecurityGroupId = params.get("SecurityGroupId")
self.InstanceIdSet = params.get("InstanceIdSet")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AssociateSecurityGroupsResponse(AbstractModel):
"""AssociateSecurityGroups返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class Backup(AbstractModel):
"""备份文件详细信息
"""
def __init__(self):
r"""
:param FileName: 文件名,对于单库备份文件不返回此值;单库备份文件通过DescribeBackupFiles接口获取文件名
:type FileName: str
:param Size: 文件大小,单位 KB,对于单库备份文件不返回此值;单库备份文件通过DescribeBackupFiles接口获取文件大小
:type Size: int
:param StartTime: 备份开始时间
:type StartTime: str
:param EndTime: 备份结束时间
:type EndTime: str
:param InternalAddr: 内网下载地址,对于单库备份文件不返回此值;单库备份文件通过DescribeBackupFiles接口获取下载地址
:type InternalAddr: str
:param ExternalAddr: 外网下载地址,对于单库备份文件不返回此值;单库备份文件通过DescribeBackupFiles接口获取下载地址
:type ExternalAddr: str
:param Id: 备份文件唯一标识,RestoreInstance接口会用到该字段,对于单库备份文件不返回此值;单库备份文件通过DescribeBackupFiles接口获取可回档的ID
:type Id: int
:param Status: 备份文件状态(0-创建中;1-成功;2-失败)
:type Status: int
:param DBs: 多库备份时的DB列表
:type DBs: list of str
:param Strategy: 备份策略(0-实例备份;1-多库备份)
:type Strategy: int
:param BackupWay: 备份方式,0-定时备份;1-手动临时备份
:type BackupWay: int
:param BackupName: 备份任务名称,可自定义
:type BackupName: str
:param GroupId: 聚合Id,对于打包备份文件不返回此值。通过此值调用DescribeBackupFiles接口,获取单库备份文件的详细信息
:type GroupId: str
"""
self.FileName = None
self.Size = None
self.StartTime = None
self.EndTime = None
self.InternalAddr = None
self.ExternalAddr = None
self.Id = None
self.Status = None
self.DBs = None
self.Strategy = None
self.BackupWay = None
self.BackupName = None
self.GroupId = None
def _deserialize(self, params):
self.FileName = params.get("FileName")
self.Size = params.get("Size")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.InternalAddr = params.get("InternalAddr")
self.ExternalAddr = params.get("ExternalAddr")
self.Id = params.get("Id")
self.Status = params.get("Status")
self.DBs = params.get("DBs")
self.Strategy = params.get("Strategy")
self.BackupWay = params.get("BackupWay")
self.BackupName = params.get("BackupName")
self.GroupId = params.get("GroupId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class BackupFile(AbstractModel):
"""在非打包上传备份模式下,每个库对应一个备份文件
"""
def __init__(self):
r"""
:param Id: 备份文件唯一标识
:type Id: int
:param FileName: 备份文件名称
:type FileName: str
:param Size: 文件大小(K)
:type Size: int
:param DBs: 备份文件的库的名称
:type DBs: list of str
:param DownloadLink: 下载地址
:type DownloadLink: str
"""
self.Id = None
self.FileName = None
self.Size = None
self.DBs = None
self.DownloadLink = None
def _deserialize(self, params):
self.Id = params.get("Id")
self.FileName = params.get("FileName")
self.Size = params.get("Size")
self.DBs = params.get("DBs")
self.DownloadLink = params.get("DownloadLink")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CloneDBRequest(AbstractModel):
"""CloneDB请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-j8kv137v
:type InstanceId: str
:param RenameRestore: 按照ReNameRestoreDatabase中的库进行克隆,并重命名,新库名称必须指定
:type RenameRestore: list of RenameRestoreDatabase
"""
self.InstanceId = None
self.RenameRestore = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
if params.get("RenameRestore") is not None:
self.RenameRestore = []
for item in params.get("RenameRestore"):
obj = RenameRestoreDatabase()
obj._deserialize(item)
self.RenameRestore.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CloneDBResponse(AbstractModel):
"""CloneDB返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 异步流程任务ID,使用FlowId调用DescribeFlowStatus接口获取任务执行状态
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class CompleteExpansionRequest(AbstractModel):
"""CompleteExpansion请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-j8kv137v
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CompleteExpansionResponse(AbstractModel):
"""CompleteExpansion返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 流程ID,可通过接口DescribeFlowStatus查询立即切换升级任务的状态。
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class CompleteMigrationRequest(AbstractModel):
"""CompleteMigration请求参数结构体
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务ID
:type MigrateId: int
"""
self.MigrateId = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CompleteMigrationResponse(AbstractModel):
"""CompleteMigration返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 完成迁移流程发起后,返回的流程id
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class CosUploadBackupFile(AbstractModel):
"""查询已经上传的备份文件大小。
"""
def __init__(self):
r"""
:param FileName: 备份名称
:type FileName: str
:param Size: 备份大小
:type Size: int
"""
self.FileName = None
self.Size = None
def _deserialize(self, params):
self.FileName = params.get("FileName")
self.Size = params.get("Size")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateAccountRequest(AbstractModel):
"""CreateAccount请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 数据库实例ID,形如mssql-njj2mtpl
:type InstanceId: str
:param Accounts: 数据库实例账户信息
:type Accounts: list of AccountCreateInfo
"""
self.InstanceId = None
self.Accounts = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
if params.get("Accounts") is not None:
self.Accounts = []
for item in params.get("Accounts"):
obj = AccountCreateInfo()
obj._deserialize(item)
self.Accounts.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateAccountResponse(AbstractModel):
"""CreateAccount返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 任务流ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class CreateBackupMigrationRequest(AbstractModel):
"""CreateBackupMigration请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 导入目标实例ID
:type InstanceId: str
:param RecoveryType: 迁移任务恢复类型,FULL-全量备份恢复,FULL_LOG-全量备份+事务日志恢复,FULL_DIFF-全量备份+差异备份恢复
:type RecoveryType: str
:param UploadType: 备份上传类型,COS_URL-备份放在用户的对象存储上,提供URL。COS_UPLOAD-备份放在业务的对象存储上,需要用户上传。
:type UploadType: str
:param MigrationName: 任务名称
:type MigrationName: str
:param BackupFiles: UploadType是COS_URL时这里填URL,COS_UPLOAD这里填备份文件的名字。只支持1个备份文件,但1个备份文件内可包含多个库
:type BackupFiles: list of str
"""
self.InstanceId = None
self.RecoveryType = None
self.UploadType = None
self.MigrationName = None
self.BackupFiles = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.RecoveryType = params.get("RecoveryType")
self.UploadType = params.get("UploadType")
self.MigrationName = params.get("MigrationName")
self.BackupFiles = params.get("BackupFiles")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateBackupMigrationResponse(AbstractModel):
"""CreateBackupMigration返回参数结构体
"""
def __init__(self):
r"""
:param BackupMigrationId: 备份导入任务ID
:type BackupMigrationId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.BackupMigrationId = None
self.RequestId = None
def _deserialize(self, params):
self.BackupMigrationId = params.get("BackupMigrationId")
self.RequestId = params.get("RequestId")
class CreateBackupRequest(AbstractModel):
"""CreateBackup请求参数结构体
"""
def __init__(self):
r"""
:param Strategy: 备份策略(0-实例备份 1-多库备份)
:type Strategy: int
:param DBNames: 需要备份库名的列表(多库备份才填写)
:type DBNames: list of str
:param InstanceId: 实例ID,形如mssql-i1z41iwd
:type InstanceId: str
:param BackupName: 备份名称,若不填则自动生成“实例ID_备份开始时间戳”
:type BackupName: str
"""
self.Strategy = None
self.DBNames = None
self.InstanceId = None
self.BackupName = None
def _deserialize(self, params):
self.Strategy = params.get("Strategy")
self.DBNames = params.get("DBNames")
self.InstanceId = params.get("InstanceId")
self.BackupName = params.get("BackupName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateBackupResponse(AbstractModel):
"""CreateBackup返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 异步任务ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class CreateBasicDBInstancesRequest(AbstractModel):
"""CreateBasicDBInstances请求参数结构体
"""
def __init__(self):
r"""
:param Zone: 实例可用区,类似ap-guangzhou-1(广州一区);实例可售卖区域可以通过接口DescribeZones获取
:type Zone: str
:param Cpu: 实例的CPU核心数
:type Cpu: int
:param Memory: 实例内存大小,单位GB
:type Memory: int
:param Storage: 实例磁盘大小,单位GB
:type Storage: int
:param SubnetId: VPC子网ID,形如subnet-bdoe83fa
:type SubnetId: str
:param VpcId: VPC网络ID,形如vpc-dsp338hz
:type VpcId: str
:param MachineType: 购买实例的宿主机类型, CLOUD_PREMIUM-虚拟机高性能云盘,CLOUD_SSD-虚拟机SSD云盘
:type MachineType: str
:param InstanceChargeType: 付费模式,取值支持 PREPAID(预付费),POSTPAID(后付费)。
:type InstanceChargeType: str
:param ProjectId: 项目ID
:type ProjectId: int
:param GoodsNum: 本次购买几个实例,默认值为1。取值不超过10
:type GoodsNum: int
:param DBVersion: sqlserver版本,目前只支持:2008R2(SQL Server 2008 Enterprise),2012SP3(SQL Server 2012 Enterprise),2016SP1(SQL Server 2016 Enterprise),201602(SQL Server 2016 Standard),2017(SQL Server 2017 Enterprise),201202(SQL Server 2012 Standard),201402(SQL Server 2014 Standard),2014SP2(SQL Server 2014 Enterprise),201702(SQL Server 2017 Standard)版本。每个地域支持售卖的版本不同,可通过DescribeProductConfig接口来拉取每个地域可售卖的版本信息。不填,默认为版本2008R2。
:type DBVersion: str
:param Period: 购买实例周期,默认取值为1,表示一个月。取值不超过48
:type Period: int
:param SecurityGroupList: 安全组列表,填写形如sg-xxx的安全组ID
:type SecurityGroupList: list of str
:param AutoRenewFlag: 自动续费标志:0-正常续费 1-自动续费,默认为1自动续费。只在购买预付费实例时有效。
:type AutoRenewFlag: int
:param AutoVoucher: 是否自动使用代金券;1 - 是,0 - 否,默认不使用
:type AutoVoucher: int
:param VoucherIds: 代金券ID数组,目前单个订单只能使用一张
:type VoucherIds: list of str
:param Weekly: 可维护时间窗配置,以周为单位,表示周几允许维护,1-7分别代表周一到周末
:type Weekly: list of int
:param StartTime: 可维护时间窗配置,每天可维护的开始时间
:type StartTime: str
:param Span: 可维护时间窗配置,持续时间,单位:小时
:type Span: int
:param ResourceTags: 新建实例绑定的标签集合
:type ResourceTags: list of ResourceTag
"""
self.Zone = None
self.Cpu = None
self.Memory = None
self.Storage = None
self.SubnetId = None
self.VpcId = None
self.MachineType = None
self.InstanceChargeType = None
self.ProjectId = None
self.GoodsNum = None
self.DBVersion = None
self.Period = None
self.SecurityGroupList = None
self.AutoRenewFlag = None
self.AutoVoucher = None
self.VoucherIds = None
self.Weekly = None
self.StartTime = None
self.Span = None
self.ResourceTags = None
def _deserialize(self, params):
self.Zone = params.get("Zone")
self.Cpu = params.get("Cpu")
self.Memory = params.get("Memory")
self.Storage = params.get("Storage")
self.SubnetId = params.get("SubnetId")
self.VpcId = params.get("VpcId")
self.MachineType = params.get("MachineType")
self.InstanceChargeType = params.get("InstanceChargeType")
self.ProjectId = params.get("ProjectId")
self.GoodsNum = params.get("GoodsNum")
self.DBVersion = params.get("DBVersion")
self.Period = params.get("Period")
self.SecurityGroupList = params.get("SecurityGroupList")
self.AutoRenewFlag = params.get("AutoRenewFlag")
self.AutoVoucher = params.get("AutoVoucher")
self.VoucherIds = params.get("VoucherIds")
self.Weekly = params.get("Weekly")
self.StartTime = params.get("StartTime")
self.Span = params.get("Span")
if params.get("ResourceTags") is not None:
self.ResourceTags = []
for item in params.get("ResourceTags"):
obj = ResourceTag()
obj._deserialize(item)
self.ResourceTags.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateBasicDBInstancesResponse(AbstractModel):
"""CreateBasicDBInstances返回参数结构体
"""
def __init__(self):
r"""
:param DealName: 订单名称
:type DealName: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DealName = None
self.RequestId = None
def _deserialize(self, params):
self.DealName = params.get("DealName")
self.RequestId = params.get("RequestId")
class CreateDBInstancesRequest(AbstractModel):
"""CreateDBInstances请求参数结构体
"""
def __init__(self):
r"""
:param Zone: 实例可用区,类似ap-guangzhou-1(广州一区);实例可售卖区域可以通过接口DescribeZones获取
:type Zone: str
:param Memory: 实例内存大小,单位GB
:type Memory: int
:param Storage: 实例磁盘大小,单位GB
:type Storage: int
:param InstanceChargeType: 付费模式,取值支持 PREPAID(预付费),POSTPAID(后付费)。
:type InstanceChargeType: str
:param ProjectId: 项目ID
:type ProjectId: int
:param GoodsNum: 本次购买几个实例,默认值为1。取值不超过10
:type GoodsNum: int
:param SubnetId: VPC子网ID,形如subnet-bdoe83fa;SubnetId和VpcId需同时设置或者同时不设置
:type SubnetId: str
:param VpcId: VPC网络ID,形如vpc-dsp338hz;SubnetId和VpcId需同时设置或者同时不设置
:type VpcId: str
:param Period: 购买实例周期,默认取值为1,表示一个月。取值不超过48
:type Period: int
:param AutoVoucher: 是否自动使用代金券;1 - 是,0 - 否,默认不使用
:type AutoVoucher: int
:param VoucherIds: 代金券ID数组,目前单个订单只能使用一张
:type VoucherIds: list of str
:param DBVersion: sqlserver版本,目前只支持:2008R2(SQL Server 2008 Enterprise),2012SP3(SQL Server 2012 Enterprise),2016SP1(SQL Server 2016 Enterprise),201602(SQL Server 2016 Standard)2017(SQL Server 2017 Enterprise)版本。每个地域支持售卖的版本不同,可通过DescribeProductConfig接口来拉取每个地域可售卖的版本信息。不填,默认为版本2008R2。
:type DBVersion: str
:param AutoRenewFlag: 自动续费标志:0-正常续费 1-自动续费,默认为1自动续费。只在购买预付费实例时有效。
:type AutoRenewFlag: int
:param SecurityGroupList: 安全组列表,填写形如sg-xxx的安全组ID
:type SecurityGroupList: list of str
:param Weekly: 可维护时间窗配置,以周为单位,表示周几允许维护,1-7分别代表周一到周末
:type Weekly: list of int
:param StartTime: 可维护时间窗配置,每天可维护的开始时间
:type StartTime: str
:param Span: 可维护时间窗配置,持续时间,单位:小时
:type Span: int
:param HAType: 购买高可用实例的类型:DUAL-双机高可用 CLUSTER-集群,默认值为DUAL
:type HAType: str
:param MultiZones: 是否跨可用区部署,默认值为false
:type MultiZones: bool
:param ResourceTags: 新建实例绑定的标签集合
:type ResourceTags: list of ResourceTag
"""
self.Zone = None
self.Memory = None
self.Storage = None
self.InstanceChargeType = None
self.ProjectId = None
self.GoodsNum = None
self.SubnetId = None
self.VpcId = None
self.Period = None
self.AutoVoucher = None
self.VoucherIds = None
self.DBVersion = None
self.AutoRenewFlag = None
self.SecurityGroupList = None
self.Weekly = None
self.StartTime = None
self.Span = None
self.HAType = None
self.MultiZones = None
self.ResourceTags = None
def _deserialize(self, params):
self.Zone = params.get("Zone")
self.Memory = params.get("Memory")
self.Storage = params.get("Storage")
self.InstanceChargeType = params.get("InstanceChargeType")
self.ProjectId = params.get("ProjectId")
self.GoodsNum = params.get("GoodsNum")
self.SubnetId = params.get("SubnetId")
self.VpcId = params.get("VpcId")
self.Period = params.get("Period")
self.AutoVoucher = params.get("AutoVoucher")
self.VoucherIds = params.get("VoucherIds")
self.DBVersion = params.get("DBVersion")
self.AutoRenewFlag = params.get("AutoRenewFlag")
self.SecurityGroupList = params.get("SecurityGroupList")
self.Weekly = params.get("Weekly")
self.StartTime = params.get("StartTime")
self.Span = params.get("Span")
self.HAType = params.get("HAType")
self.MultiZones = params.get("MultiZones")
if params.get("ResourceTags") is not None:
self.ResourceTags = []
for item in params.get("ResourceTags"):
obj = ResourceTag()
obj._deserialize(item)
self.ResourceTags.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateDBInstancesResponse(AbstractModel):
"""CreateDBInstances返回参数结构体
"""
def __init__(self):
r"""
:param DealName: 订单名称
:type DealName: str
:param DealNames: 订单名称数组
:type DealNames: list of str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DealName = None
self.DealNames = None
self.RequestId = None
def _deserialize(self, params):
self.DealName = params.get("DealName")
self.DealNames = params.get("DealNames")
self.RequestId = params.get("RequestId")
class CreateDBRequest(AbstractModel):
"""CreateDB请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
:param DBs: 数据库创建信息
:type DBs: list of DBCreateInfo
"""
self.InstanceId = None
self.DBs = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
if params.get("DBs") is not None:
self.DBs = []
for item in params.get("DBs"):
obj = DBCreateInfo()
obj._deserialize(item)
self.DBs.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateDBResponse(AbstractModel):
"""CreateDB返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 任务流ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class CreateIncrementalMigrationRequest(AbstractModel):
"""CreateIncrementalMigration请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 导入目标实例ID
:type InstanceId: str
:param BackupMigrationId: 备份导入任务ID,由CreateBackupMigration接口返回
:type BackupMigrationId: str
:param BackupFiles: 增量备份文件,全量备份任务UploadType是COS_URL时这里填URL,是COS_UPLOAD这里填备份文件的名字;只支持1个备份文件,但1个备份文件内可包含多个库
:type BackupFiles: list of str
:param IsRecovery: 是否需要恢复,NO-不需要,YES-需要,默认不需要
:type IsRecovery: str
"""
self.InstanceId = None
self.BackupMigrationId = None
self.BackupFiles = None
self.IsRecovery = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupMigrationId = params.get("BackupMigrationId")
self.BackupFiles = params.get("BackupFiles")
self.IsRecovery = params.get("IsRecovery")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateIncrementalMigrationResponse(AbstractModel):
"""CreateIncrementalMigration返回参数结构体
"""
def __init__(self):
r"""
:param IncrementalMigrationId: 增量备份导入任务ID
:type IncrementalMigrationId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.IncrementalMigrationId = None
self.RequestId = None
def _deserialize(self, params):
self.IncrementalMigrationId = params.get("IncrementalMigrationId")
self.RequestId = params.get("RequestId")
class CreateMigrationRequest(AbstractModel):
"""CreateMigration请求参数结构体
"""
def __init__(self):
r"""
:param MigrateName: 迁移任务的名称
:type MigrateName: str
:param MigrateType: 迁移类型(1:结构迁移 2:数据迁移 3:增量同步)
:type MigrateType: int
:param SourceType: 迁移源的类型 1:TencentDB for SQLServer 2:云服务器自建SQLServer数据库 4:SQLServer备份还原 5:SQLServer备份还原(COS方式)
:type SourceType: int
:param Source: 迁移源
:type Source: :class:`tencentcloud.sqlserver.v20180328.models.MigrateSource`
:param Target: 迁移目标
:type Target: :class:`tencentcloud.sqlserver.v20180328.models.MigrateTarget`
:param MigrateDBSet: 迁移DB对象 ,离线迁移不使用(SourceType=4或SourceType=5)。
:type MigrateDBSet: list of MigrateDB
:param RenameRestore: 按照ReNameRestoreDatabase中的库进行恢复,并重命名,不填则按照默认方式命名恢复的库,且恢复所有的库。SourceType=5的情况下有效。
:type RenameRestore: list of RenameRestoreDatabase
"""
self.MigrateName = None
self.MigrateType = None
self.SourceType = None
self.Source = None
self.Target = None
self.MigrateDBSet = None
self.RenameRestore = None
def _deserialize(self, params):
self.MigrateName = params.get("MigrateName")
self.MigrateType = params.get("MigrateType")
self.SourceType = params.get("SourceType")
if params.get("Source") is not None:
self.Source = MigrateSource()
self.Source._deserialize(params.get("Source"))
if params.get("Target") is not None:
self.Target = MigrateTarget()
self.Target._deserialize(params.get("Target"))
if params.get("MigrateDBSet") is not None:
self.MigrateDBSet = []
for item in params.get("MigrateDBSet"):
obj = MigrateDB()
obj._deserialize(item)
self.MigrateDBSet.append(obj)
if params.get("RenameRestore") is not None:
self.RenameRestore = []
for item in params.get("RenameRestore"):
obj = RenameRestoreDatabase()
obj._deserialize(item)
self.RenameRestore.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateMigrationResponse(AbstractModel):
"""CreateMigration返回参数结构体
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务ID
:type MigrateId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.MigrateId = None
self.RequestId = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
self.RequestId = params.get("RequestId")
class CreatePublishSubscribeRequest(AbstractModel):
"""CreatePublishSubscribe请求参数结构体
"""
def __init__(self):
r"""
:param PublishInstanceId: 发布实例ID,形如mssql-j8kv137v
:type PublishInstanceId: str
:param SubscribeInstanceId: 订阅实例ID,形如mssql-j8kv137v
:type SubscribeInstanceId: str
:param DatabaseTupleSet: 数据库的订阅发布关系集合
:type DatabaseTupleSet: list of DatabaseTuple
:param PublishSubscribeName: 发布订阅的名称,默认值为:default_name
:type PublishSubscribeName: str
"""
self.PublishInstanceId = None
self.SubscribeInstanceId = None
self.DatabaseTupleSet = None
self.PublishSubscribeName = None
def _deserialize(self, params):
self.PublishInstanceId = params.get("PublishInstanceId")
self.SubscribeInstanceId = params.get("SubscribeInstanceId")
if params.get("DatabaseTupleSet") is not None:
self.DatabaseTupleSet = []
for item in params.get("DatabaseTupleSet"):
obj = DatabaseTuple()
obj._deserialize(item)
self.DatabaseTupleSet.append(obj)
self.PublishSubscribeName = params.get("PublishSubscribeName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreatePublishSubscribeResponse(AbstractModel):
"""CreatePublishSubscribe返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 流程ID,可通过接口DescribeFlowStatus查询立即切换升级任务的状态。
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class CreateReadOnlyDBInstancesRequest(AbstractModel):
"""CreateReadOnlyDBInstances请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 主实例ID,格式如:mssql-3l3fgqn7
:type InstanceId: str
:param Zone: 实例可用区,类似ap-guangzhou-1(广州一区);实例可售卖区域可以通过接口DescribeZones获取
:type Zone: str
:param ReadOnlyGroupType: 只读组类型选项,1-按照一个实例一个只读组的方式发货,2-新建只读组后发货,所有实例都在这个只读组下面, 3-发货的所有实例都在已有的只读组下面
:type ReadOnlyGroupType: int
:param Memory: 实例内存大小,单位GB
:type Memory: int
:param Storage: 实例磁盘大小,单位GB
:type Storage: int
:param ReadOnlyGroupForcedUpgrade: 0-默认不升级主实例,1-强制升级主实例完成ro部署;主实例为非集群版时需要填1,强制升级为集群版。填1 说明您已同意将主实例升级到集群版实例。
:type ReadOnlyGroupForcedUpgrade: int
:param ReadOnlyGroupId: ReadOnlyGroupType=3时必填,已存在的只读组ID
:type ReadOnlyGroupId: str
:param ReadOnlyGroupName: ReadOnlyGroupType=2时必填,新建的只读组名称
:type ReadOnlyGroupName: str
:param ReadOnlyGroupIsOfflineDelay: ReadOnlyGroupType=2时必填,新建的只读组是否开启延迟剔除功能,1-开启,0-关闭。当只读副本与主实例延迟大于阈值后,自动剔除。
:type ReadOnlyGroupIsOfflineDelay: int
:param ReadOnlyGroupMaxDelayTime: ReadOnlyGroupType=2 且 ReadOnlyGroupIsOfflineDelay=1时必填,新建的只读组延迟剔除的阈值。
:type ReadOnlyGroupMaxDelayTime: int
:param ReadOnlyGroupMinInGroup: ReadOnlyGroupType=2 且 ReadOnlyGroupIsOfflineDelay=1时必填,新建的只读组延迟剔除后至少保留只读副本的个数。
:type ReadOnlyGroupMinInGroup: int
:param InstanceChargeType: 付费模式,取值支持 PREPAID(预付费),POSTPAID(后付费)。
:type InstanceChargeType: str
:param GoodsNum: 本次购买几个只读实例,默认值为1。
:type GoodsNum: int
:param SubnetId: VPC子网ID,形如subnet-bdoe83fa;SubnetId和VpcId需同时设置或者同时不设置
:type SubnetId: str
:param VpcId: VPC网络ID,形如vpc-dsp338hz;SubnetId和VpcId需同时设置或者同时不设置
:type VpcId: str
:param Period: 购买实例周期,默认取值为1,表示一个月。取值不超过48
:type Period: int
:param SecurityGroupList: 安全组列表,填写形如sg-xxx的安全组ID
:type SecurityGroupList: list of str
:param AutoVoucher: 是否自动使用代金券;1 - 是,0 - 否,默认不使用
:type AutoVoucher: int
:param VoucherIds: 代金券ID数组,目前单个订单只能使用一张
:type VoucherIds: list of str
:param ResourceTags: 新建实例绑定的标签集合
:type ResourceTags: list of ResourceTag
"""
self.InstanceId = None
self.Zone = None
self.ReadOnlyGroupType = None
self.Memory = None
self.Storage = None
self.ReadOnlyGroupForcedUpgrade = None
self.ReadOnlyGroupId = None
self.ReadOnlyGroupName = None
self.ReadOnlyGroupIsOfflineDelay = None
self.ReadOnlyGroupMaxDelayTime = None
self.ReadOnlyGroupMinInGroup = None
self.InstanceChargeType = None
self.GoodsNum = None
self.SubnetId = None
self.VpcId = None
self.Period = None
self.SecurityGroupList = None
self.AutoVoucher = None
self.VoucherIds = None
self.ResourceTags = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Zone = params.get("Zone")
self.ReadOnlyGroupType = params.get("ReadOnlyGroupType")
self.Memory = params.get("Memory")
self.Storage = params.get("Storage")
self.ReadOnlyGroupForcedUpgrade = params.get("ReadOnlyGroupForcedUpgrade")
self.ReadOnlyGroupId = params.get("ReadOnlyGroupId")
self.ReadOnlyGroupName = params.get("ReadOnlyGroupName")
self.ReadOnlyGroupIsOfflineDelay = params.get("ReadOnlyGroupIsOfflineDelay")
self.ReadOnlyGroupMaxDelayTime = params.get("ReadOnlyGroupMaxDelayTime")
self.ReadOnlyGroupMinInGroup = params.get("ReadOnlyGroupMinInGroup")
self.InstanceChargeType = params.get("InstanceChargeType")
self.GoodsNum = params.get("GoodsNum")
self.SubnetId = params.get("SubnetId")
self.VpcId = params.get("VpcId")
self.Period = params.get("Period")
self.SecurityGroupList = params.get("SecurityGroupList")
self.AutoVoucher = params.get("AutoVoucher")
self.VoucherIds = params.get("VoucherIds")
if params.get("ResourceTags") is not None:
self.ResourceTags = []
for item in params.get("ResourceTags"):
obj = ResourceTag()
obj._deserialize(item)
self.ResourceTags.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateReadOnlyDBInstancesResponse(AbstractModel):
"""CreateReadOnlyDBInstances返回参数结构体
"""
def __init__(self):
r"""
:param DealNames: 订单名称数组
:type DealNames: list of str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DealNames = None
self.RequestId = None
def _deserialize(self, params):
self.DealNames = params.get("DealNames")
self.RequestId = params.get("RequestId")
class DBCreateInfo(AbstractModel):
"""数据库创建信息
"""
def __init__(self):
r"""
:param DBName: 数据库名
:type DBName: str
:param Charset: 字符集。可通过接口DescribeDBCharsets查到支持的字符集,不填默认为Chinese_PRC_CI_AS。
:type Charset: str
:param Accounts: 数据库账号权限信息
:type Accounts: list of AccountPrivilege
:param Remark: 备注
:type Remark: str
"""
self.DBName = None
self.Charset = None
self.Accounts = None
self.Remark = None
def _deserialize(self, params):
self.DBName = params.get("DBName")
self.Charset = params.get("Charset")
if params.get("Accounts") is not None:
self.Accounts = []
for item in params.get("Accounts"):
obj = AccountPrivilege()
obj._deserialize(item)
self.Accounts.append(obj)
self.Remark = params.get("Remark")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DBDetail(AbstractModel):
"""数据库信息
"""
def __init__(self):
r"""
:param Name: 数据库名称
:type Name: str
:param Charset: 字符集
:type Charset: str
:param Remark: 备注
:type Remark: str
:param CreateTime: 数据库创建时间
:type CreateTime: str
:param Status: 数据库状态。1--创建中, 2--运行中, 3--修改中,-1--删除中
:type Status: int
:param Accounts: 数据库账号权限信息
:type Accounts: list of AccountPrivilege
:param InternalStatus: 内部状态。ONLINE表示运行中
:type InternalStatus: str
"""
self.Name = None
self.Charset = None
self.Remark = None
self.CreateTime = None
self.Status = None
self.Accounts = None
self.InternalStatus = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.Charset = params.get("Charset")
self.Remark = params.get("Remark")
self.CreateTime = params.get("CreateTime")
self.Status = params.get("Status")
if params.get("Accounts") is not None:
self.Accounts = []
for item in params.get("Accounts"):
obj = AccountPrivilege()
obj._deserialize(item)
self.Accounts.append(obj)
self.InternalStatus = params.get("InternalStatus")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DBInstance(AbstractModel):
"""实例详细信息
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
:param Name: 实例名称
:type Name: str
:param ProjectId: 实例所在项目ID
:type ProjectId: int
:param RegionId: 实例所在地域ID
:type RegionId: int
:param ZoneId: 实例所在可用区ID
:type ZoneId: int
:param VpcId: 实例所在私有网络ID,基础网络时为 0
:type VpcId: int
:param SubnetId: 实例所在私有网络子网ID,基础网络时为 0
:type SubnetId: int
:param Status: 实例状态。取值范围: <li>1:申请中</li> <li>2:运行中</li> <li>3:受限运行中 (主备切换中)</li> <li>4:已隔离</li> <li>5:回收中</li> <li>6:已回收</li> <li>7:任务执行中 (实例做备份、回档等操作)</li> <li>8:已下线</li> <li>9:实例扩容中</li> <li>10:实例迁移中</li> <li>11:只读</li> <li>12:重启中</li> <li>13:实例修改中且待切换</li> <li>14:订阅发布创建中</li> <li>15:订阅发布修改中</li> <li>16:实例修改中且切换中</li> <li>17:创建RO副本中</li>
:type Status: int
:param Vip: 实例访问IP
:type Vip: str
:param Vport: 实例访问端口
:type Vport: int
:param CreateTime: 实例创建时间
:type CreateTime: str
:param UpdateTime: 实例更新时间
:type UpdateTime: str
:param StartTime: 实例计费开始时间
:type StartTime: str
:param EndTime: 实例计费结束时间
:type EndTime: str
:param IsolateTime: 实例隔离时间
:type IsolateTime: str
:param Memory: 实例内存大小,单位G
:type Memory: int
:param UsedStorage: 实例已经使用存储空间大小,单位G
:type UsedStorage: int
:param Storage: 实例存储空间大小,单位G
:type Storage: int
:param VersionName: 实例版本
:type VersionName: str
:param RenewFlag: 实例续费标记,0-正常续费,1-自动续费,2-到期不续费
:type RenewFlag: int
:param Model: 实例高可用, 1-双机高可用,2-单机,3-跨可用区,4-集群跨可用区,5-集群,9-自研机房
:type Model: int
:param Region: 实例所在地域名称,如 ap-guangzhou
:type Region: str
:param Zone: 实例所在可用区名称,如 ap-guangzhou-1
:type Zone: str
:param BackupTime: 备份时间点
:type BackupTime: str
:param PayMode: 实例付费模式, 0-按量计费,1-包年包月
:type PayMode: int
:param Uid: 实例唯一UID
:type Uid: str
:param Cpu: 实例cpu核心数
:type Cpu: int
:param Version: 实例版本代号
:type Version: str
:param Type: 物理机代号
:type Type: str
:param Pid: 计费ID
:type Pid: int
:param UniqVpcId: 实例所属VPC的唯一字符串ID,格式如:vpc-xxx,基础网络时为空字符串
:type UniqVpcId: str
:param UniqSubnetId: 实例所属子网的唯一字符串ID,格式如: subnet-xxx,基础网络时为空字符串
:type UniqSubnetId: str
:param IsolateOperator: 实例隔离操作
注意:此字段可能返回 null,表示取不到有效值。
:type IsolateOperator: str
:param SubFlag: 发布订阅标识,SUB-订阅实例,PUB-发布实例,空值-没有发布订阅的普通实例
注意:此字段可能返回 null,表示取不到有效值。
:type SubFlag: str
:param ROFlag: 只读标识,RO-只读实例,MASTER-有RO实例的主实例,空值-没有只读组的非RO实例
注意:此字段可能返回 null,表示取不到有效值。
:type ROFlag: str
:param HAFlag: 容灾类型,MIRROR-镜像,ALWAYSON-AlwaysOn, SINGLE-单例
注意:此字段可能返回 null,表示取不到有效值。
:type HAFlag: str
:param ResourceTags: 实例绑定的标签列表
注意:此字段可能返回 null,表示取不到有效值。
:type ResourceTags: list of ResourceTag
:param BackupModel: 备份模式,master_pkg-主节点打包备份(默认) ;master_no_pkg-主节点不打包备份;slave_pkg-从节点打包备份(always on集群有效);slave_no_pkg-从节点不打包备份(always on集群有效);只读副本对该值无效。
注意:此字段可能返回 null,表示取不到有效值。
:type BackupModel: str
"""
self.InstanceId = None
self.Name = None
self.ProjectId = None
self.RegionId = None
self.ZoneId = None
self.VpcId = None
self.SubnetId = None
self.Status = None
self.Vip = None
self.Vport = None
self.CreateTime = None
self.UpdateTime = None
self.StartTime = None
self.EndTime = None
self.IsolateTime = None
self.Memory = None
self.UsedStorage = None
self.Storage = None
self.VersionName = None
self.RenewFlag = None
self.Model = None
self.Region = None
self.Zone = None
self.BackupTime = None
self.PayMode = None
self.Uid = None
self.Cpu = None
self.Version = None
self.Type = None
self.Pid = None
self.UniqVpcId = None
self.UniqSubnetId = None
self.IsolateOperator = None
self.SubFlag = None
self.ROFlag = None
self.HAFlag = None
self.ResourceTags = None
self.BackupModel = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Name = params.get("Name")
self.ProjectId = params.get("ProjectId")
self.RegionId = params.get("RegionId")
self.ZoneId = params.get("ZoneId")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.Status = params.get("Status")
self.Vip = params.get("Vip")
self.Vport = params.get("Vport")
self.CreateTime = params.get("CreateTime")
self.UpdateTime = params.get("UpdateTime")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.IsolateTime = params.get("IsolateTime")
self.Memory = params.get("Memory")
self.UsedStorage = params.get("UsedStorage")
self.Storage = params.get("Storage")
self.VersionName = params.get("VersionName")
self.RenewFlag = params.get("RenewFlag")
self.Model = params.get("Model")
self.Region = params.get("Region")
self.Zone = params.get("Zone")
self.BackupTime = params.get("BackupTime")
self.PayMode = params.get("PayMode")
self.Uid = params.get("Uid")
self.Cpu = params.get("Cpu")
self.Version = params.get("Version")
self.Type = params.get("Type")
self.Pid = params.get("Pid")
self.UniqVpcId = params.get("UniqVpcId")
self.UniqSubnetId = params.get("UniqSubnetId")
self.IsolateOperator = params.get("IsolateOperator")
self.SubFlag = params.get("SubFlag")
self.ROFlag = params.get("ROFlag")
self.HAFlag = params.get("HAFlag")
if params.get("ResourceTags") is not None:
self.ResourceTags = []
for item in params.get("ResourceTags"):
obj = ResourceTag()
obj._deserialize(item)
self.ResourceTags.append(obj)
self.BackupModel = params.get("BackupModel")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DBPrivilege(AbstractModel):
"""账号的数据库权限信息
"""
def __init__(self):
r"""
:param DBName: 数据库名
:type DBName: str
:param Privilege: 数据库权限,ReadWrite表示可读写,ReadOnly表示只读
:type Privilege: str
"""
self.DBName = None
self.Privilege = None
def _deserialize(self, params):
self.DBName = params.get("DBName")
self.Privilege = params.get("Privilege")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DBPrivilegeModifyInfo(AbstractModel):
"""数据库权限变更信息
"""
def __init__(self):
r"""
:param DBName: 数据库名
:type DBName: str
:param Privilege: 权限变更信息。ReadWrite表示可读写,ReadOnly表示只读,Delete表示删除账号对该DB的权限
:type Privilege: str
"""
self.DBName = None
self.Privilege = None
def _deserialize(self, params):
self.DBName = params.get("DBName")
self.Privilege = params.get("Privilege")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DBRemark(AbstractModel):
"""数据库备注信息
"""
def __init__(self):
r"""
:param Name: 数据库名称
:type Name: str
:param Remark: 备注信息
:type Remark: str
"""
self.Name = None
self.Remark = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.Remark = params.get("Remark")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DatabaseTuple(AbstractModel):
"""该数据结构表示具有发布订阅关系的两个数据库。
"""
def __init__(self):
r"""
:param PublishDatabase: 发布数据库名称
:type PublishDatabase: str
:param SubscribeDatabase: 订阅数据库名称
:type SubscribeDatabase: str
"""
self.PublishDatabase = None
self.SubscribeDatabase = None
def _deserialize(self, params):
self.PublishDatabase = params.get("PublishDatabase")
self.SubscribeDatabase = params.get("SubscribeDatabase")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DatabaseTupleStatus(AbstractModel):
"""该数据结构表示具有发布订阅关系的两个数据库,以及其之间发布订阅的状态信息。
"""
def __init__(self):
r"""
:param PublishDatabase: 发布数据库名称
:type PublishDatabase: str
:param SubscribeDatabase: 订阅数据库名称
:type SubscribeDatabase: str
:param LastSyncTime: 最近一次同步时间
:type LastSyncTime: str
:param Status: 数据库之间的发布订阅状态 running,success,fail,unknow
:type Status: str
"""
self.PublishDatabase = None
self.SubscribeDatabase = None
self.LastSyncTime = None
self.Status = None
def _deserialize(self, params):
self.PublishDatabase = params.get("PublishDatabase")
self.SubscribeDatabase = params.get("SubscribeDatabase")
self.LastSyncTime = params.get("LastSyncTime")
self.Status = params.get("Status")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DbNormalDetail(AbstractModel):
"""数据库配置信息
"""
def __init__(self):
r"""
:param IsSubscribed: 是否已订阅 0:否 1:是
:type IsSubscribed: str
:param CollationName: 数据库排序规则
:type CollationName: str
:param IsAutoCleanupOn: 开启CT之后是否自动清理 0:否 1:是
:type IsAutoCleanupOn: str
:param IsBrokerEnabled: 是否已启用代理 0:否 1:是
:type IsBrokerEnabled: str
:param IsCdcEnabled: 是否已开启/关闭CDC 0:关闭 1:开启
:type IsCdcEnabled: str
:param IsDbChainingOn: 是否已启用/ 禁用CT 0:禁用 1:启用
:type IsDbChainingOn: str
:param IsEncrypted: 是否加密 0:否 1:是
:type IsEncrypted: str
:param IsFulltextEnabled: 是否全文启用 0:否 1:是
:type IsFulltextEnabled: str
:param IsMirroring: 是否是镜像 0:否 1:是
:type IsMirroring: str
:param IsPublished: 是否已发布 0:否 1:是
:type IsPublished: str
:param IsReadCommittedSnapshotOn: 是否开启快照 0:否 1:是
:type IsReadCommittedSnapshotOn: str
:param IsTrustworthyOn: 是否可信任 0:否 1:是
:type IsTrustworthyOn: str
:param MirroringState: 镜像状态
:type MirroringState: str
:param Name: 数据库名称
:type Name: str
:param RecoveryModelDesc: 恢复模式
:type RecoveryModelDesc: str
:param RetentionPeriod: 保留天数
:type RetentionPeriod: str
:param StateDesc: 数据库状态
:type StateDesc: str
:param UserAccessDesc: 用户类型
:type UserAccessDesc: str
"""
self.IsSubscribed = None
self.CollationName = None
self.IsAutoCleanupOn = None
self.IsBrokerEnabled = None
self.IsCdcEnabled = None
self.IsDbChainingOn = None
self.IsEncrypted = None
self.IsFulltextEnabled = None
self.IsMirroring = None
self.IsPublished = None
self.IsReadCommittedSnapshotOn = None
self.IsTrustworthyOn = None
self.MirroringState = None
self.Name = None
self.RecoveryModelDesc = None
self.RetentionPeriod = None
self.StateDesc = None
self.UserAccessDesc = None
def _deserialize(self, params):
self.IsSubscribed = params.get("IsSubscribed")
self.CollationName = params.get("CollationName")
self.IsAutoCleanupOn = params.get("IsAutoCleanupOn")
self.IsBrokerEnabled = params.get("IsBrokerEnabled")
self.IsCdcEnabled = params.get("IsCdcEnabled")
self.IsDbChainingOn = params.get("IsDbChainingOn")
self.IsEncrypted = params.get("IsEncrypted")
self.IsFulltextEnabled = params.get("IsFulltextEnabled")
self.IsMirroring = params.get("IsMirroring")
self.IsPublished = params.get("IsPublished")
self.IsReadCommittedSnapshotOn = params.get("IsReadCommittedSnapshotOn")
self.IsTrustworthyOn = params.get("IsTrustworthyOn")
self.MirroringState = params.get("MirroringState")
self.Name = params.get("Name")
self.RecoveryModelDesc = params.get("RecoveryModelDesc")
self.RetentionPeriod = params.get("RetentionPeriod")
self.StateDesc = params.get("StateDesc")
self.UserAccessDesc = params.get("UserAccessDesc")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DbRollbackTimeInfo(AbstractModel):
"""数据库可回档时间范围信息
"""
def __init__(self):
r"""
:param DBName: 数据库名称
:type DBName: str
:param StartTime: 可回档开始时间
:type StartTime: str
:param EndTime: 可回档结束时间
:type EndTime: str
"""
self.DBName = None
self.StartTime = None
self.EndTime = None
def _deserialize(self, params):
self.DBName = params.get("DBName")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DealInfo(AbstractModel):
"""订单信息
"""
def __init__(self):
r"""
:param DealName: 订单名
:type DealName: str
:param Count: 商品数量
:type Count: int
:param FlowId: 关联的流程 ID,可用于查询流程执行状态
:type FlowId: int
:param InstanceIdSet: 只有创建实例的订单会填充该字段,表示该订单创建的实例的 ID。
:type InstanceIdSet: list of str
:param OwnerUin: 所属账号
:type OwnerUin: str
:param InstanceChargeType: 实例付费类型
:type InstanceChargeType: str
"""
self.DealName = None
self.Count = None
self.FlowId = None
self.InstanceIdSet = None
self.OwnerUin = None
self.InstanceChargeType = None
def _deserialize(self, params):
self.DealName = params.get("DealName")
self.Count = params.get("Count")
self.FlowId = params.get("FlowId")
self.InstanceIdSet = params.get("InstanceIdSet")
self.OwnerUin = params.get("OwnerUin")
self.InstanceChargeType = params.get("InstanceChargeType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteAccountRequest(AbstractModel):
"""DeleteAccount请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 数据库实例ID,形如mssql-njj2mtpl
:type InstanceId: str
:param UserNames: 实例用户名数组
:type UserNames: list of str
"""
self.InstanceId = None
self.UserNames = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.UserNames = params.get("UserNames")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteAccountResponse(AbstractModel):
"""DeleteAccount返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 任务流ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class DeleteBackupMigrationRequest(AbstractModel):
"""DeleteBackupMigration请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 目标实例ID,由DescribeBackupMigration接口返回
:type InstanceId: str
:param BackupMigrationId: 备份导入任务ID,由DescribeBackupMigration接口返回
:type BackupMigrationId: str
"""
self.InstanceId = None
self.BackupMigrationId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupMigrationId = params.get("BackupMigrationId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteBackupMigrationResponse(AbstractModel):
"""DeleteBackupMigration返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteDBInstanceRequest(AbstractModel):
"""DeleteDBInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,格式如:mssql-3l3fgqn7 或 mssqlro-3l3fgqn7
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteDBInstanceResponse(AbstractModel):
"""DeleteDBInstance返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteDBRequest(AbstractModel):
"""DeleteDB请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-rljoi3bf
:type InstanceId: str
:param Names: 数据库名数组
:type Names: list of str
"""
self.InstanceId = None
self.Names = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Names = params.get("Names")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteDBResponse(AbstractModel):
"""DeleteDB返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 任务流ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class DeleteIncrementalMigrationRequest(AbstractModel):
"""DeleteIncrementalMigration请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 目标实例ID
:type InstanceId: str
:param BackupMigrationId: 备份导入任务ID,由CreateBackupMigration接口返回
:type BackupMigrationId: str
:param IncrementalMigrationId: 增量备份导入任务ID,由CreateIncrementalMigration接口返回
:type IncrementalMigrationId: str
"""
self.InstanceId = None
self.BackupMigrationId = None
self.IncrementalMigrationId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupMigrationId = params.get("BackupMigrationId")
self.IncrementalMigrationId = params.get("IncrementalMigrationId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteIncrementalMigrationResponse(AbstractModel):
"""DeleteIncrementalMigration返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeleteMigrationRequest(AbstractModel):
"""DeleteMigration请求参数结构体
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务ID
:type MigrateId: int
"""
self.MigrateId = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteMigrationResponse(AbstractModel):
"""DeleteMigration返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DeletePublishSubscribeRequest(AbstractModel):
"""DeletePublishSubscribe请求参数结构体
"""
def __init__(self):
r"""
:param PublishSubscribeId: 发布订阅ID,可通过DescribePublishSubscribe接口获得
:type PublishSubscribeId: int
:param DatabaseTupleSet: 待删除的数据库的订阅发布关系集合
:type DatabaseTupleSet: list of DatabaseTuple
"""
self.PublishSubscribeId = None
self.DatabaseTupleSet = None
def _deserialize(self, params):
self.PublishSubscribeId = params.get("PublishSubscribeId")
if params.get("DatabaseTupleSet") is not None:
self.DatabaseTupleSet = []
for item in params.get("DatabaseTupleSet"):
obj = DatabaseTuple()
obj._deserialize(item)
self.DatabaseTupleSet.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeletePublishSubscribeResponse(AbstractModel):
"""DeletePublishSubscribe返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DescribeAccountsRequest(AbstractModel):
"""DescribeAccounts请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
:param Limit: 分页返回,每页返回的数目,取值为1-100,默认值为20
:type Limit: int
:param Offset: 分页返回,页编号,默认值为第0页
:type Offset: int
"""
self.InstanceId = None
self.Limit = None
self.Offset = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Limit = params.get("Limit")
self.Offset = params.get("Offset")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeAccountsResponse(AbstractModel):
"""DescribeAccounts返回参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
:param Accounts: 账户信息列表
:type Accounts: list of AccountDetail
:param TotalCount: 总数
:type TotalCount: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.InstanceId = None
self.Accounts = None
self.TotalCount = None
self.RequestId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
if params.get("Accounts") is not None:
self.Accounts = []
for item in params.get("Accounts"):
obj = AccountDetail()
obj._deserialize(item)
self.Accounts.append(obj)
self.TotalCount = params.get("TotalCount")
self.RequestId = params.get("RequestId")
class DescribeBackupByFlowIdRequest(AbstractModel):
"""DescribeBackupByFlowId请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,格式如:mssql-3l3fgqn7
:type InstanceId: str
:param FlowId: 创建备份流程ID,可通过 [CreateBackup](https://cloud.tencent.com/document/product/238/19946) 接口获取
:type FlowId: str
"""
self.InstanceId = None
self.FlowId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.FlowId = params.get("FlowId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeBackupByFlowIdResponse(AbstractModel):
"""DescribeBackupByFlowId返回参数结构体
"""
def __init__(self):
r"""
:param Id: 备份文件唯一标识,RestoreInstance接口会用到该字段,对于单库备份文件只返回第一条记录的备份文件唯一标识;单库备份文件需要通过DescribeBackupFiles接口获取全部记录的可回档的ID
:type Id: int
:param FileName: 文件名,对于单库备份文件只返回第一条记录的文件名;单库备份文件需要通过DescribeBackupFiles接口获取全部记录的文件名
:type FileName: str
:param BackupName: 备份任务名称,可自定义
:type BackupName: str
:param StartTime: 备份开始时间
:type StartTime: str
:param EndTime: 备份结束时间
:type EndTime: str
:param Size: 文件大小,单位 KB,对于单库备份文件只返回第一条记录的文件大小;单库备份文件需要通过DescribeBackupFiles接口获取全部记录的文件大小
:type Size: int
:param Strategy: 备份策略,0-实例备份;1-多库备份;实例状态是0-创建中时,该字段为默认值0,无实际意义
:type Strategy: int
:param Status: 备份文件状态,0-创建中;1-成功;2-失败
:type Status: int
:param BackupWay: 备份方式,0-定时备份;1-手动临时备份;实例状态是0-创建中时,该字段为默认值0,无实际意义
:type BackupWay: int
:param DBs: DB列表,对于单库备份文件只返回第一条记录包含的库名;单库备份文件需要通过DescribeBackupFiles接口获取全部记录的库名。
:type DBs: list of str
:param InternalAddr: 内网下载地址,对于单库备份文件只返回第一条记录的内网下载地址;单库备份文件需要通过DescribeBackupFiles接口获取全部记录的下载地址
:type InternalAddr: str
:param ExternalAddr: 外网下载地址,对于单库备份文件只返回第一条记录的外网下载地址;单库备份文件需要通过DescribeBackupFiles接口获取全部记录的下载地址
:type ExternalAddr: str
:param GroupId: 聚合Id,对于打包备份文件不返回此值。通过此值调用DescribeBackupFiles接口,获取单库备份文件的详细信息
:type GroupId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Id = None
self.FileName = None
self.BackupName = None
self.StartTime = None
self.EndTime = None
self.Size = None
self.Strategy = None
self.Status = None
self.BackupWay = None
self.DBs = None
self.InternalAddr = None
self.ExternalAddr = None
self.GroupId = None
self.RequestId = None
def _deserialize(self, params):
self.Id = params.get("Id")
self.FileName = params.get("FileName")
self.BackupName = params.get("BackupName")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Size = params.get("Size")
self.Strategy = params.get("Strategy")
self.Status = params.get("Status")
self.BackupWay = params.get("BackupWay")
self.DBs = params.get("DBs")
self.InternalAddr = params.get("InternalAddr")
self.ExternalAddr = params.get("ExternalAddr")
self.GroupId = params.get("GroupId")
self.RequestId = params.get("RequestId")
class DescribeBackupCommandRequest(AbstractModel):
"""DescribeBackupCommand请求参数结构体
"""
def __init__(self):
r"""
:param BackupFileType: 备份文件类型,FULL-全量备份,FULL_LOG-全量备份需要日志增量,FULL_DIFF-全量备份需要差异增量,LOG-日志备份,DIFF-差异备份
:type BackupFileType: str
:param DataBaseName: 数据库名称
:type DataBaseName: str
:param IsRecovery: 是否需要恢复,NO-不需要,YES-需要
:type IsRecovery: str
:param LocalPath: 备份文件保存的路径;如果不填则默认在D:\\
:type LocalPath: str
"""
self.BackupFileType = None
self.DataBaseName = None
self.IsRecovery = None
self.LocalPath = None
def _deserialize(self, params):
self.BackupFileType = params.get("BackupFileType")
self.DataBaseName = params.get("DataBaseName")
self.IsRecovery = params.get("IsRecovery")
self.LocalPath = params.get("LocalPath")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeBackupCommandResponse(AbstractModel):
"""DescribeBackupCommand返回参数结构体
"""
def __init__(self):
r"""
:param Command: 创建备份命令
:type Command: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Command = None
self.RequestId = None
def _deserialize(self, params):
self.Command = params.get("Command")
self.RequestId = params.get("RequestId")
class DescribeBackupFilesRequest(AbstractModel):
"""DescribeBackupFiles请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-njj2mtpl
:type InstanceId: str
:param GroupId: 聚合ID, 可通过接口DescribeBackups获取
:type GroupId: str
:param Limit: 分页返回,每页返回的数目,取值为1-100,默认值为20
:type Limit: int
:param Offset: 分页返回,页编号,默认值为第0页
:type Offset: int
:param DatabaseName: 按照备份的库名称筛选,不填则不筛选此项
:type DatabaseName: str
"""
self.InstanceId = None
self.GroupId = None
self.Limit = None
self.Offset = None
self.DatabaseName = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.GroupId = params.get("GroupId")
self.Limit = params.get("Limit")
self.Offset = params.get("Offset")
self.DatabaseName = params.get("DatabaseName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeBackupFilesResponse(AbstractModel):
"""DescribeBackupFiles返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 备份总数量
:type TotalCount: int
:param BackupFiles: 备份文件列表详情
:type BackupFiles: list of BackupFile
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.BackupFiles = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("BackupFiles") is not None:
self.BackupFiles = []
for item in params.get("BackupFiles"):
obj = BackupFile()
obj._deserialize(item)
self.BackupFiles.append(obj)
self.RequestId = params.get("RequestId")
class DescribeBackupMigrationRequest(AbstractModel):
"""DescribeBackupMigration请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 导入目标实例ID
:type InstanceId: str
:param BackupMigrationId: 备份导入任务ID,由CreateBackupMigration接口返回
:type BackupMigrationId: str
:param MigrationName: 导入任务名称
:type MigrationName: str
:param BackupFileName: 备份文件名称
:type BackupFileName: str
:param StatusSet: 导入任务状态集合
:type StatusSet: list of int
:param RecoveryType: 导入任务恢复类型,FULL,FULL_LOG,FULL_DIFF
:type RecoveryType: str
:param UploadType: COS_URL-备份放在用户的对象存储上,提供URL。COS_UPLOAD-备份放在业务的对象存储上,用户上传
:type UploadType: str
:param Limit: 分页,页大小,默认值:100
:type Limit: int
:param Offset: 分页,页数,默认值:0
:type Offset: int
:param OrderBy: 排序字段,name;createTime;startTime;endTime,默认按照createTime递增排序。
:type OrderBy: str
:param OrderByType: 排序方式,desc-递减排序,asc-递增排序。默认按照asc排序,且在OrderBy为有效值时,本参数有效
:type OrderByType: str
"""
self.InstanceId = None
self.BackupMigrationId = None
self.MigrationName = None
self.BackupFileName = None
self.StatusSet = None
self.RecoveryType = None
self.UploadType = None
self.Limit = None
self.Offset = None
self.OrderBy = None
self.OrderByType = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupMigrationId = params.get("BackupMigrationId")
self.MigrationName = params.get("MigrationName")
self.BackupFileName = params.get("BackupFileName")
self.StatusSet = params.get("StatusSet")
self.RecoveryType = params.get("RecoveryType")
self.UploadType = params.get("UploadType")
self.Limit = params.get("Limit")
self.Offset = params.get("Offset")
self.OrderBy = params.get("OrderBy")
self.OrderByType = params.get("OrderByType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeBackupMigrationResponse(AbstractModel):
"""DescribeBackupMigration返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 迁移任务总数
:type TotalCount: int
:param BackupMigrationSet: 迁移任务集合
:type BackupMigrationSet: list of Migration
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.BackupMigrationSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("BackupMigrationSet") is not None:
self.BackupMigrationSet = []
for item in params.get("BackupMigrationSet"):
obj = Migration()
obj._deserialize(item)
self.BackupMigrationSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeBackupUploadSizeRequest(AbstractModel):
"""DescribeBackupUploadSize请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 导入目标实例ID
:type InstanceId: str
:param BackupMigrationId: 备份导入任务ID,由CreateBackupMigration接口返回
:type BackupMigrationId: str
:param IncrementalMigrationId: 增量导入任务ID
:type IncrementalMigrationId: str
"""
self.InstanceId = None
self.BackupMigrationId = None
self.IncrementalMigrationId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupMigrationId = params.get("BackupMigrationId")
self.IncrementalMigrationId = params.get("IncrementalMigrationId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeBackupUploadSizeResponse(AbstractModel):
"""DescribeBackupUploadSize返回参数结构体
"""
def __init__(self):
r"""
:param CosUploadBackupFileSet: 已上传的备份的信息
:type CosUploadBackupFileSet: list of CosUploadBackupFile
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.CosUploadBackupFileSet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("CosUploadBackupFileSet") is not None:
self.CosUploadBackupFileSet = []
for item in params.get("CosUploadBackupFileSet"):
obj = CosUploadBackupFile()
obj._deserialize(item)
self.CosUploadBackupFileSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeBackupsRequest(AbstractModel):
"""DescribeBackups请求参数结构体
"""
def __init__(self):
r"""
:param StartTime: 开始时间(yyyy-MM-dd HH:mm:ss)
:type StartTime: str
:param EndTime: 结束时间(yyyy-MM-dd HH:mm:ss)
:type EndTime: str
:param InstanceId: 实例ID,形如mssql-njj2mtpl
:type InstanceId: str
:param Limit: 分页返回,每页返回的数目,取值为1-100,默认值为20
:type Limit: int
:param Offset: 分页返回,页编号,默认值为第0页
:type Offset: int
:param BackupName: 按照备份名称筛选,不填则不筛选此项
:type BackupName: str
:param Strategy: 按照备份策略筛选,0-实例备份,1-多库备份,不填则不筛选此项
:type Strategy: int
:param BackupWay: 按照备份方式筛选,0-后台自动定时备份,1-用户手动临时备份,不填则不筛选此项
:type BackupWay: int
:param BackupId: 按照备份ID筛选,不填则不筛选此项
:type BackupId: int
:param DatabaseName: 按照备份的库名称筛选,不填则不筛选此项
:type DatabaseName: str
:param Group: 是否分组查询,默认是0,单库备份情况下 0-兼容老方式不分组,1-单库备份分组后展示
:type Group: int
"""
self.StartTime = None
self.EndTime = None
self.InstanceId = None
self.Limit = None
self.Offset = None
self.BackupName = None
self.Strategy = None
self.BackupWay = None
self.BackupId = None
self.DatabaseName = None
self.Group = None
def _deserialize(self, params):
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.InstanceId = params.get("InstanceId")
self.Limit = params.get("Limit")
self.Offset = params.get("Offset")
self.BackupName = params.get("BackupName")
self.Strategy = params.get("Strategy")
self.BackupWay = params.get("BackupWay")
self.BackupId = params.get("BackupId")
self.DatabaseName = params.get("DatabaseName")
self.Group = params.get("Group")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeBackupsResponse(AbstractModel):
"""DescribeBackups返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 备份总数量
:type TotalCount: int
:param Backups: 备份列表详情
:type Backups: list of Backup
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.Backups = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("Backups") is not None:
self.Backups = []
for item in params.get("Backups"):
obj = Backup()
obj._deserialize(item)
self.Backups.append(obj)
self.RequestId = params.get("RequestId")
class DescribeCrossRegionZoneRequest(AbstractModel):
"""DescribeCrossRegionZone请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,格式如:mssql-3l3fgqn7
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeCrossRegionZoneResponse(AbstractModel):
"""DescribeCrossRegionZone返回参数结构体
"""
def __init__(self):
r"""
:param Region: 备机所在地域的字符串ID,形如:ap-guangzhou
:type Region: str
:param Zone: 备机所在可用区的字符串ID,形如:ap-guangzhou-1
:type Zone: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Region = None
self.Zone = None
self.RequestId = None
def _deserialize(self, params):
self.Region = params.get("Region")
self.Zone = params.get("Zone")
self.RequestId = params.get("RequestId")
class DescribeDBCharsetsRequest(AbstractModel):
"""DescribeDBCharsets请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-j8kv137v
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeDBCharsetsResponse(AbstractModel):
"""DescribeDBCharsets返回参数结构体
"""
def __init__(self):
r"""
:param DatabaseCharsets: 数据库字符集列表
:type DatabaseCharsets: list of str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DatabaseCharsets = None
self.RequestId = None
def _deserialize(self, params):
self.DatabaseCharsets = params.get("DatabaseCharsets")
self.RequestId = params.get("RequestId")
class DescribeDBInstancesRequest(AbstractModel):
"""DescribeDBInstances请求参数结构体
"""
def __init__(self):
r"""
:param ProjectId: 项目ID
:type ProjectId: int
:param Status: 实例状态。取值范围:
<li>1:申请中</li>
<li>2:运行中</li>
<li>3:受限运行中 (主备切换中)</li>
<li>4:已隔离</li>
<li>5:回收中</li>
<li>6:已回收</li>
<li>7:任务执行中 (实例做备份、回档等操作)</li>
<li>8:已下线</li>
<li>9:实例扩容中</li>
<li>10:实例迁移中</li>
<li>11:只读</li>
<li>12:重启中</li>
:type Status: int
:param Offset: 分页返回,页编号,默认值为第0页
:type Offset: int
:param Limit: 分页返回,每页返回的数目,取值为1-100,默认值为100
:type Limit: int
:param InstanceIdSet: 一个或者多个实例ID。实例ID,格式如:mssql-si2823jyl
:type InstanceIdSet: list of str
:param PayMode: 付费类型检索 1-包年包月,0-按量计费
:type PayMode: int
:param VpcId: 实例所属VPC的唯一字符串ID,格式如:vpc-xxx,传空字符串(“”)则按照基础网络筛选。
:type VpcId: str
:param SubnetId: 实例所属子网的唯一字符串ID,格式如: subnet-xxx,传空字符串(“”)则按照基础网络筛选。
:type SubnetId: str
:param VipSet: 实例内网地址列表,格式如:172.1.0.12
:type VipSet: list of str
:param InstanceNameSet: 实例名称列表,模糊查询
:type InstanceNameSet: list of str
:param VersionSet: 实例版本代号列表,格式如:2008R2,2012SP3等
:type VersionSet: list of str
:param Zone: 实例可用区,格式如:ap-guangzhou-2
:type Zone: str
:param TagKeys: 实例标签列表
:type TagKeys: list of str
:param SearchKey: 模糊查询关键字,支持实例id、实例名、内网ip
:type SearchKey: str
:param UidSet: 实例唯一Uid列表
:type UidSet: list of str
"""
self.ProjectId = None
self.Status = None
self.Offset = None
self.Limit = None
self.InstanceIdSet = None
self.PayMode = None
self.VpcId = None
self.SubnetId = None
self.VipSet = None
self.InstanceNameSet = None
self.VersionSet = None
self.Zone = None
self.TagKeys = None
self.SearchKey = None
self.UidSet = None
def _deserialize(self, params):
self.ProjectId = params.get("ProjectId")
self.Status = params.get("Status")
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
self.InstanceIdSet = params.get("InstanceIdSet")
self.PayMode = params.get("PayMode")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.VipSet = params.get("VipSet")
self.InstanceNameSet = params.get("InstanceNameSet")
self.VersionSet = params.get("VersionSet")
self.Zone = params.get("Zone")
self.TagKeys = params.get("TagKeys")
self.SearchKey = params.get("SearchKey")
self.UidSet = params.get("UidSet")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeDBInstancesResponse(AbstractModel):
"""DescribeDBInstances返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 符合条件的实例总数。分页返回的话,这个值指的是所有符合条件的实例的个数,而非当前根据Limit和Offset值返回的实例个数
:type TotalCount: int
:param DBInstances: 实例列表
:type DBInstances: list of DBInstance
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.DBInstances = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("DBInstances") is not None:
self.DBInstances = []
for item in params.get("DBInstances"):
obj = DBInstance()
obj._deserialize(item)
self.DBInstances.append(obj)
self.RequestId = params.get("RequestId")
class DescribeDBSecurityGroupsRequest(AbstractModel):
"""DescribeDBSecurityGroups请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,格式如:mssql-c1nl9rpv或者mssqlro-c1nl9rpv,与云数据库控制台页面中显示的实例ID相同。
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeDBSecurityGroupsResponse(AbstractModel):
"""DescribeDBSecurityGroups返回参数结构体
"""
def __init__(self):
r"""
:param SecurityGroupSet: 安全组详情。
:type SecurityGroupSet: list of SecurityGroup
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.SecurityGroupSet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("SecurityGroupSet") is not None:
self.SecurityGroupSet = []
for item in params.get("SecurityGroupSet"):
obj = SecurityGroup()
obj._deserialize(item)
self.SecurityGroupSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeDBsNormalRequest(AbstractModel):
"""DescribeDBsNormal请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-7vfv3rk3
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeDBsNormalResponse(AbstractModel):
"""DescribeDBsNormal返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 表示当前实例下的数据库总个数
:type TotalCount: int
:param DBList: 返回数据库的详细配置信息,例如:数据库是否开启CDC、CT等
:type DBList: list of DbNormalDetail
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.DBList = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("DBList") is not None:
self.DBList = []
for item in params.get("DBList"):
obj = DbNormalDetail()
obj._deserialize(item)
self.DBList.append(obj)
self.RequestId = params.get("RequestId")
class DescribeDBsRequest(AbstractModel):
"""DescribeDBs请求参数结构体
"""
def __init__(self):
r"""
:param InstanceIdSet: 实例ID
:type InstanceIdSet: list of str
:param Limit: 分页返回,每页返回的数目,取值为1-100,默认值为20
:type Limit: int
:param Offset: 分页返回,页编号,默认值为第0页
:type Offset: int
"""
self.InstanceIdSet = None
self.Limit = None
self.Offset = None
def _deserialize(self, params):
self.InstanceIdSet = params.get("InstanceIdSet")
self.Limit = params.get("Limit")
self.Offset = params.get("Offset")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeDBsResponse(AbstractModel):
"""DescribeDBs返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 数据库数量
:type TotalCount: int
:param DBInstances: 实例数据库列表
:type DBInstances: list of InstanceDBDetail
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.DBInstances = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("DBInstances") is not None:
self.DBInstances = []
for item in params.get("DBInstances"):
obj = InstanceDBDetail()
obj._deserialize(item)
self.DBInstances.append(obj)
self.RequestId = params.get("RequestId")
class DescribeFlowStatusRequest(AbstractModel):
"""DescribeFlowStatus请求参数结构体
"""
def __init__(self):
r"""
:param FlowId: 流程ID
:type FlowId: int
"""
self.FlowId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeFlowStatusResponse(AbstractModel):
"""DescribeFlowStatus返回参数结构体
"""
def __init__(self):
r"""
:param Status: 流程状态,0:成功,1:失败,2:运行中
:type Status: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Status = None
self.RequestId = None
def _deserialize(self, params):
self.Status = params.get("Status")
self.RequestId = params.get("RequestId")
class DescribeIncrementalMigrationRequest(AbstractModel):
"""DescribeIncrementalMigration请求参数结构体
"""
def __init__(self):
r"""
:param BackupMigrationId: 备份导入任务ID,由CreateBackupMigration接口返回
:type BackupMigrationId: str
:param InstanceId: 导入目标实例ID
:type InstanceId: str
:param BackupFileName: 备份文件名称
:type BackupFileName: str
:param StatusSet: 导入任务状态集合
:type StatusSet: list of int
:param Limit: 分页,页大小,默认值:100
:type Limit: int
:param Offset: 分页,页数,默认值:0
:type Offset: int
:param OrderBy: 排序字段,name;createTime;startTime;endTime,默认按照createTime递增排序。
:type OrderBy: str
:param OrderByType: 排序方式,desc-递减排序,asc-递增排序。默认按照asc排序,且在OrderBy为有效值时,本参数有效
:type OrderByType: str
:param IncrementalMigrationId: 增量备份导入任务ID,由CreateIncrementalMigration接口返回
:type IncrementalMigrationId: str
"""
self.BackupMigrationId = None
self.InstanceId = None
self.BackupFileName = None
self.StatusSet = None
self.Limit = None
self.Offset = None
self.OrderBy = None
self.OrderByType = None
self.IncrementalMigrationId = None
def _deserialize(self, params):
self.BackupMigrationId = params.get("BackupMigrationId")
self.InstanceId = params.get("InstanceId")
self.BackupFileName = params.get("BackupFileName")
self.StatusSet = params.get("StatusSet")
self.Limit = params.get("Limit")
self.Offset = params.get("Offset")
self.OrderBy = params.get("OrderBy")
self.OrderByType = params.get("OrderByType")
self.IncrementalMigrationId = params.get("IncrementalMigrationId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeIncrementalMigrationResponse(AbstractModel):
"""DescribeIncrementalMigration返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 增量导入任务总数
:type TotalCount: int
:param IncrementalMigrationSet: 增量导入任务集合
:type IncrementalMigrationSet: list of Migration
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.IncrementalMigrationSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("IncrementalMigrationSet") is not None:
self.IncrementalMigrationSet = []
for item in params.get("IncrementalMigrationSet"):
obj = Migration()
obj._deserialize(item)
self.IncrementalMigrationSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeInstanceParamRecordsRequest(AbstractModel):
"""DescribeInstanceParamRecords请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例 ID,格式如:mssql-dj5i29c5n,与云数据库控制台页面中显示的实例 ID 相同,可使用 DescribeDBInstances 接口获取,其值为输出参数中字段 InstanceId 的值。
:type InstanceId: str
:param Offset: 分页,页数,默认0
:type Offset: int
:param Limit: 分页,页大小,默认20,最大不超过100
:type Limit: int
"""
self.InstanceId = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeInstanceParamRecordsResponse(AbstractModel):
"""DescribeInstanceParamRecords返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 符合条件的记录数
:type TotalCount: int
:param Items: 参数修改记录
:type Items: list of ParamRecord
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.Items = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("Items") is not None:
self.Items = []
for item in params.get("Items"):
obj = ParamRecord()
obj._deserialize(item)
self.Items.append(obj)
self.RequestId = params.get("RequestId")
class DescribeInstanceParamsRequest(AbstractModel):
"""DescribeInstanceParams请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例 ID,格式如:mssql-dj5i29c5n,与云数据库控制台页面中显示的实例 ID 相同,可使用 DescribeDBInstances 接口获取,其值为输出参数中字段 InstanceId 的值。
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeInstanceParamsResponse(AbstractModel):
"""DescribeInstanceParams返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 实例的参数总数
:type TotalCount: int
:param Items: 参数详情
:type Items: list of ParameterDetail
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.Items = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("Items") is not None:
self.Items = []
for item in params.get("Items"):
obj = ParameterDetail()
obj._deserialize(item)
self.Items.append(obj)
self.RequestId = params.get("RequestId")
class DescribeMaintenanceSpanRequest(AbstractModel):
"""DescribeMaintenanceSpan请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-k8voqdlz
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeMaintenanceSpanResponse(AbstractModel):
"""DescribeMaintenanceSpan返回参数结构体
"""
def __init__(self):
r"""
:param Weekly: 以周为单位,表示周几允许维护,例如:[1,2,3,4,5,6,7]表示周一到周日均为可维护日。
:type Weekly: list of int
:param StartTime: 每天可维护的开始时间,例如:10:24标识可维护时间窗10点24分开始。
:type StartTime: str
:param Span: 每天可维护的持续时间,单位是h,例如:1 表示从可维护的开始时间起持续1小时。
:type Span: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Weekly = None
self.StartTime = None
self.Span = None
self.RequestId = None
def _deserialize(self, params):
self.Weekly = params.get("Weekly")
self.StartTime = params.get("StartTime")
self.Span = params.get("Span")
self.RequestId = params.get("RequestId")
class DescribeMigrationDatabasesRequest(AbstractModel):
"""DescribeMigrationDatabases请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 迁移源实例的ID,格式如:mssql-si2823jyl
:type InstanceId: str
:param UserName: 迁移源实例用户名
:type UserName: str
:param Password: 迁移源实例密码
:type Password: str
"""
self.InstanceId = None
self.UserName = None
self.Password = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.UserName = params.get("UserName")
self.Password = params.get("Password")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeMigrationDatabasesResponse(AbstractModel):
"""DescribeMigrationDatabases返回参数结构体
"""
def __init__(self):
r"""
:param Amount: 数据库数量
:type Amount: int
:param MigrateDBSet: 数据库名称数组
:type MigrateDBSet: list of str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Amount = None
self.MigrateDBSet = None
self.RequestId = None
def _deserialize(self, params):
self.Amount = params.get("Amount")
self.MigrateDBSet = params.get("MigrateDBSet")
self.RequestId = params.get("RequestId")
class DescribeMigrationDetailRequest(AbstractModel):
"""DescribeMigrationDetail请求参数结构体
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务ID
:type MigrateId: int
"""
self.MigrateId = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeMigrationDetailResponse(AbstractModel):
"""DescribeMigrationDetail返回参数结构体
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务ID
:type MigrateId: int
:param MigrateName: 迁移任务名称
:type MigrateName: str
:param AppId: 迁移任务所属的用户ID
:type AppId: int
:param Region: 迁移任务所属的地域
:type Region: str
:param SourceType: 迁移源的类型 1:TencentDB for SQLServer 2:云服务器自建SQLServer数据库 4:SQLServer备份还原 5:SQLServer备份还原(COS方式)
:type SourceType: int
:param CreateTime: 迁移任务的创建时间
:type CreateTime: str
:param StartTime: 迁移任务的开始时间
:type StartTime: str
:param EndTime: 迁移任务的结束时间
:type EndTime: str
:param Status: 迁移任务的状态(1:初始化,4:迁移中,5.迁移失败,6.迁移成功)
:type Status: int
:param Progress: 迁移任务当前进度
:type Progress: int
:param MigrateType: 迁移类型(1:结构迁移 2:数据迁移 3:增量同步)
:type MigrateType: int
:param Source: 迁移源
:type Source: :class:`tencentcloud.sqlserver.v20180328.models.MigrateSource`
:param Target: 迁移目标
:type Target: :class:`tencentcloud.sqlserver.v20180328.models.MigrateTarget`
:param MigrateDBSet: 迁移DB对象 ,离线迁移(SourceType=4或SourceType=5)不使用。
:type MigrateDBSet: list of MigrateDB
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.MigrateId = None
self.MigrateName = None
self.AppId = None
self.Region = None
self.SourceType = None
self.CreateTime = None
self.StartTime = None
self.EndTime = None
self.Status = None
self.Progress = None
self.MigrateType = None
self.Source = None
self.Target = None
self.MigrateDBSet = None
self.RequestId = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
self.MigrateName = params.get("MigrateName")
self.AppId = params.get("AppId")
self.Region = params.get("Region")
self.SourceType = params.get("SourceType")
self.CreateTime = params.get("CreateTime")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Status = params.get("Status")
self.Progress = params.get("Progress")
self.MigrateType = params.get("MigrateType")
if params.get("Source") is not None:
self.Source = MigrateSource()
self.Source._deserialize(params.get("Source"))
if params.get("Target") is not None:
self.Target = MigrateTarget()
self.Target._deserialize(params.get("Target"))
if params.get("MigrateDBSet") is not None:
self.MigrateDBSet = []
for item in params.get("MigrateDBSet"):
obj = MigrateDB()
obj._deserialize(item)
self.MigrateDBSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeMigrationsRequest(AbstractModel):
"""DescribeMigrations请求参数结构体
"""
def __init__(self):
r"""
:param StatusSet: 状态集合。只要符合集合中某一状态的迁移任务,就会查出来
:type StatusSet: list of int
:param MigrateName: 迁移任务的名称,模糊匹配
:type MigrateName: str
:param Limit: 分页返回,每页返回的数目,取值为1-100,默认值为100
:type Limit: int
:param Offset: 分页返回,页编号,默认值为第0页
:type Offset: int
:param OrderBy: 查询结果按照关键字排序,可选值为name、createTime、startTime,endTime,status
:type OrderBy: str
:param OrderByType: 排序方式,可选值为desc、asc
:type OrderByType: str
"""
self.StatusSet = None
self.MigrateName = None
self.Limit = None
self.Offset = None
self.OrderBy = None
self.OrderByType = None
def _deserialize(self, params):
self.StatusSet = params.get("StatusSet")
self.MigrateName = params.get("MigrateName")
self.Limit = params.get("Limit")
self.Offset = params.get("Offset")
self.OrderBy = params.get("OrderBy")
self.OrderByType = params.get("OrderByType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeMigrationsResponse(AbstractModel):
"""DescribeMigrations返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 查询结果的总数
:type TotalCount: int
:param MigrateTaskSet: 查询结果的列表
:type MigrateTaskSet: list of MigrateTask
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.MigrateTaskSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("MigrateTaskSet") is not None:
self.MigrateTaskSet = []
for item in params.get("MigrateTaskSet"):
obj = MigrateTask()
obj._deserialize(item)
self.MigrateTaskSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeOrdersRequest(AbstractModel):
"""DescribeOrders请求参数结构体
"""
def __init__(self):
r"""
:param DealNames: 订单数组。发货时会返回订单名字,利用该订单名字调用DescribeOrders接口查询发货情况
:type DealNames: list of str
"""
self.DealNames = None
def _deserialize(self, params):
self.DealNames = params.get("DealNames")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeOrdersResponse(AbstractModel):
"""DescribeOrders返回参数结构体
"""
def __init__(self):
r"""
:param Deals: 订单信息数组
:type Deals: list of DealInfo
:param TotalCount: 返回多少个订单的信息
:type TotalCount: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Deals = None
self.TotalCount = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Deals") is not None:
self.Deals = []
for item in params.get("Deals"):
obj = DealInfo()
obj._deserialize(item)
self.Deals.append(obj)
self.TotalCount = params.get("TotalCount")
self.RequestId = params.get("RequestId")
class DescribeProductConfigRequest(AbstractModel):
"""DescribeProductConfig请求参数结构体
"""
def __init__(self):
r"""
:param Zone: 可用区英文ID,形如ap-guangzhou-1
:type Zone: str
:param InstanceType: 购买实例的类型 HA-高可用型(包括双机高可用,alwaysOn集群),RO-只读副本型,SI-基础版本型
:type InstanceType: str
"""
self.Zone = None
self.InstanceType = None
def _deserialize(self, params):
self.Zone = params.get("Zone")
self.InstanceType = params.get("InstanceType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeProductConfigResponse(AbstractModel):
"""DescribeProductConfig返回参数结构体
"""
def __init__(self):
r"""
:param SpecInfoList: 规格信息数组
:type SpecInfoList: list of SpecInfo
:param TotalCount: 返回总共多少条数据
:type TotalCount: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.SpecInfoList = None
self.TotalCount = None
self.RequestId = None
def _deserialize(self, params):
if params.get("SpecInfoList") is not None:
self.SpecInfoList = []
for item in params.get("SpecInfoList"):
obj = SpecInfo()
obj._deserialize(item)
self.SpecInfoList.append(obj)
self.TotalCount = params.get("TotalCount")
self.RequestId = params.get("RequestId")
class DescribeProjectSecurityGroupsRequest(AbstractModel):
"""DescribeProjectSecurityGroups请求参数结构体
"""
def __init__(self):
r"""
:param ProjectId: 项目ID,可通过控制台项目管理中查看
:type ProjectId: int
"""
self.ProjectId = None
def _deserialize(self, params):
self.ProjectId = params.get("ProjectId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeProjectSecurityGroupsResponse(AbstractModel):
"""DescribeProjectSecurityGroups返回参数结构体
"""
def __init__(self):
r"""
:param SecurityGroupSet: 安全组详情。
:type SecurityGroupSet: list of SecurityGroup
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.SecurityGroupSet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("SecurityGroupSet") is not None:
self.SecurityGroupSet = []
for item in params.get("SecurityGroupSet"):
obj = SecurityGroup()
obj._deserialize(item)
self.SecurityGroupSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribePublishSubscribeRequest(AbstractModel):
"""DescribePublishSubscribe请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-j8kv137v
:type InstanceId: str
:param PubOrSubInstanceId: 订阅/发布实例ID,与InstanceId是发布实例还是订阅实例有关;当InstanceId为发布实例时,本字段按照订阅实例ID做筛选;当InstanceId为订阅实例时,本字段按照发布实例ID做筛选;
:type PubOrSubInstanceId: str
:param PubOrSubInstanceIp: 订阅/发布实例内网IP,与InstanceId是发布实例还是订阅实例有关;当InstanceId为发布实例时,本字段按照订阅实例内网IP做筛选;当InstanceId为订阅实例时,本字段按照发布实例内网IP做筛选;
:type PubOrSubInstanceIp: str
:param PublishSubscribeId: 订阅发布ID,用于筛选
:type PublishSubscribeId: int
:param PublishSubscribeName: 订阅发布名字,用于筛选
:type PublishSubscribeName: str
:param PublishDBName: 发布库名字,用于筛选
:type PublishDBName: str
:param SubscribeDBName: 订阅库名字,用于筛选
:type SubscribeDBName: str
:param Offset: 分页,页数
:type Offset: int
:param Limit: 分页,页大小
:type Limit: int
"""
self.InstanceId = None
self.PubOrSubInstanceId = None
self.PubOrSubInstanceIp = None
self.PublishSubscribeId = None
self.PublishSubscribeName = None
self.PublishDBName = None
self.SubscribeDBName = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.PubOrSubInstanceId = params.get("PubOrSubInstanceId")
self.PubOrSubInstanceIp = params.get("PubOrSubInstanceIp")
self.PublishSubscribeId = params.get("PublishSubscribeId")
self.PublishSubscribeName = params.get("PublishSubscribeName")
self.PublishDBName = params.get("PublishDBName")
self.SubscribeDBName = params.get("SubscribeDBName")
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribePublishSubscribeResponse(AbstractModel):
"""DescribePublishSubscribe返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 总数
:type TotalCount: int
:param PublishSubscribeSet: 发布订阅列表
:type PublishSubscribeSet: list of PublishSubscribe
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.PublishSubscribeSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("PublishSubscribeSet") is not None:
self.PublishSubscribeSet = []
for item in params.get("PublishSubscribeSet"):
obj = PublishSubscribe()
obj._deserialize(item)
self.PublishSubscribeSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeReadOnlyGroupByReadOnlyInstanceRequest(AbstractModel):
"""DescribeReadOnlyGroupByReadOnlyInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,格式如:mssqlro-3l3fgqn7
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeReadOnlyGroupByReadOnlyInstanceResponse(AbstractModel):
"""DescribeReadOnlyGroupByReadOnlyInstance返回参数结构体
"""
def __init__(self):
r"""
:param ReadOnlyGroupId: 只读组ID
:type ReadOnlyGroupId: str
:param ReadOnlyGroupName: 只读组名称
:type ReadOnlyGroupName: str
:param RegionId: 只读组的地域ID
:type RegionId: str
:param ZoneId: 只读组的可用区ID
:type ZoneId: str
:param IsOfflineDelay: 是否启动超时剔除功能 ,0-不开启剔除功能,1-开启剔除功能
:type IsOfflineDelay: int
:param ReadOnlyMaxDelayTime: 启动超时剔除功能后,使用的超时阈值,单位是秒
:type ReadOnlyMaxDelayTime: int
:param MinReadOnlyInGroup: 启动超时剔除功能后,只读组至少保留的只读副本数
:type MinReadOnlyInGroup: int
:param Vip: 只读组vip
:type Vip: str
:param Vport: 只读组vport
:type Vport: int
:param VpcId: 只读组在私有网络ID
:type VpcId: str
:param SubnetId: 只读组在私有网络子网ID
:type SubnetId: str
:param MasterInstanceId: 主实例ID,形如mssql-sgeshe3th
:type MasterInstanceId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.ReadOnlyGroupId = None
self.ReadOnlyGroupName = None
self.RegionId = None
self.ZoneId = None
self.IsOfflineDelay = None
self.ReadOnlyMaxDelayTime = None
self.MinReadOnlyInGroup = None
self.Vip = None
self.Vport = None
self.VpcId = None
self.SubnetId = None
self.MasterInstanceId = None
self.RequestId = None
def _deserialize(self, params):
self.ReadOnlyGroupId = params.get("ReadOnlyGroupId")
self.ReadOnlyGroupName = params.get("ReadOnlyGroupName")
self.RegionId = params.get("RegionId")
self.ZoneId = params.get("ZoneId")
self.IsOfflineDelay = params.get("IsOfflineDelay")
self.ReadOnlyMaxDelayTime = params.get("ReadOnlyMaxDelayTime")
self.MinReadOnlyInGroup = params.get("MinReadOnlyInGroup")
self.Vip = params.get("Vip")
self.Vport = params.get("Vport")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.MasterInstanceId = params.get("MasterInstanceId")
self.RequestId = params.get("RequestId")
class DescribeReadOnlyGroupDetailsRequest(AbstractModel):
"""DescribeReadOnlyGroupDetails请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 主实例ID,格式如:mssql-3l3fgqn7
:type InstanceId: str
:param ReadOnlyGroupId: 只读组ID,格式如:mssqlrg-3l3fgqn7
:type ReadOnlyGroupId: str
"""
self.InstanceId = None
self.ReadOnlyGroupId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.ReadOnlyGroupId = params.get("ReadOnlyGroupId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeReadOnlyGroupDetailsResponse(AbstractModel):
"""DescribeReadOnlyGroupDetails返回参数结构体
"""
def __init__(self):
r"""
:param ReadOnlyGroupId: 只读组ID
:type ReadOnlyGroupId: str
:param ReadOnlyGroupName: 只读组名称
:type ReadOnlyGroupName: str
:param RegionId: 只读组的地域ID,与主实例相同
:type RegionId: str
:param ZoneId: 只读组的可用区ID,与主实例相同
:type ZoneId: str
:param IsOfflineDelay: 是否启动超时剔除功能,0-不开启剔除功能,1-开启剔除功能
:type IsOfflineDelay: int
:param ReadOnlyMaxDelayTime: 启动超时剔除功能后,使用的超时阈值
:type ReadOnlyMaxDelayTime: int
:param MinReadOnlyInGroup: 启动超时剔除功能后,至少只读组保留的只读副本数
:type MinReadOnlyInGroup: int
:param Vip: 只读组vip
:type Vip: str
:param Vport: 只读组vport
:type Vport: int
:param VpcId: 只读组私有网络ID
:type VpcId: str
:param SubnetId: 只读组私有网络子网ID
:type SubnetId: str
:param ReadOnlyInstanceSet: 只读实例副本集合
:type ReadOnlyInstanceSet: list of ReadOnlyInstance
:param Status: 只读组状态: 1-申请成功运行中,5-申请中
:type Status: int
:param MasterInstanceId: 主实例ID,形如mssql-sgeshe3th
:type MasterInstanceId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.ReadOnlyGroupId = None
self.ReadOnlyGroupName = None
self.RegionId = None
self.ZoneId = None
self.IsOfflineDelay = None
self.ReadOnlyMaxDelayTime = None
self.MinReadOnlyInGroup = None
self.Vip = None
self.Vport = None
self.VpcId = None
self.SubnetId = None
self.ReadOnlyInstanceSet = None
self.Status = None
self.MasterInstanceId = None
self.RequestId = None
def _deserialize(self, params):
self.ReadOnlyGroupId = params.get("ReadOnlyGroupId")
self.ReadOnlyGroupName = params.get("ReadOnlyGroupName")
self.RegionId = params.get("RegionId")
self.ZoneId = params.get("ZoneId")
self.IsOfflineDelay = params.get("IsOfflineDelay")
self.ReadOnlyMaxDelayTime = params.get("ReadOnlyMaxDelayTime")
self.MinReadOnlyInGroup = params.get("MinReadOnlyInGroup")
self.Vip = params.get("Vip")
self.Vport = params.get("Vport")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
if params.get("ReadOnlyInstanceSet") is not None:
self.ReadOnlyInstanceSet = []
for item in params.get("ReadOnlyInstanceSet"):
obj = ReadOnlyInstance()
obj._deserialize(item)
self.ReadOnlyInstanceSet.append(obj)
self.Status = params.get("Status")
self.MasterInstanceId = params.get("MasterInstanceId")
self.RequestId = params.get("RequestId")
class DescribeReadOnlyGroupListRequest(AbstractModel):
"""DescribeReadOnlyGroupList请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 主实例ID,格式如:mssql-3l3fgqn7
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeReadOnlyGroupListResponse(AbstractModel):
"""DescribeReadOnlyGroupList返回参数结构体
"""
def __init__(self):
r"""
:param ReadOnlyGroupSet: 只读组列表
:type ReadOnlyGroupSet: list of ReadOnlyGroup
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.ReadOnlyGroupSet = None
self.RequestId = None
def _deserialize(self, params):
if params.get("ReadOnlyGroupSet") is not None:
self.ReadOnlyGroupSet = []
for item in params.get("ReadOnlyGroupSet"):
obj = ReadOnlyGroup()
obj._deserialize(item)
self.ReadOnlyGroupSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeRegionsRequest(AbstractModel):
"""DescribeRegions请求参数结构体
"""
class DescribeRegionsResponse(AbstractModel):
"""DescribeRegions返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 返回地域信息总的条目
:type TotalCount: int
:param RegionSet: 地域信息数组
:type RegionSet: list of RegionInfo
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.RegionSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("RegionSet") is not None:
self.RegionSet = []
for item in params.get("RegionSet"):
obj = RegionInfo()
obj._deserialize(item)
self.RegionSet.append(obj)
self.RequestId = params.get("RequestId")
class DescribeRollbackTimeRequest(AbstractModel):
"""DescribeRollbackTime请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
:param DBs: 需要查询的数据库列表
:type DBs: list of str
"""
self.InstanceId = None
self.DBs = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.DBs = params.get("DBs")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeRollbackTimeResponse(AbstractModel):
"""DescribeRollbackTime返回参数结构体
"""
def __init__(self):
r"""
:param Details: 数据库可回档实例信息
:type Details: list of DbRollbackTimeInfo
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Details = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Details") is not None:
self.Details = []
for item in params.get("Details"):
obj = DbRollbackTimeInfo()
obj._deserialize(item)
self.Details.append(obj)
self.RequestId = params.get("RequestId")
class DescribeSlowlogsRequest(AbstractModel):
"""DescribeSlowlogs请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-k8voqdlz
:type InstanceId: str
:param StartTime: 查询开始时间
:type StartTime: str
:param EndTime: 查询结束时间
:type EndTime: str
:param Limit: 分页返回,每页返回的数目,取值为1-100,默认值为20
:type Limit: int
:param Offset: 分页返回,页编号,默认值为第0页
:type Offset: int
"""
self.InstanceId = None
self.StartTime = None
self.EndTime = None
self.Limit = None
self.Offset = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Limit = params.get("Limit")
self.Offset = params.get("Offset")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeSlowlogsResponse(AbstractModel):
"""DescribeSlowlogs返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 查询总数
:type TotalCount: int
:param Slowlogs: 慢查询日志信息列表
:type Slowlogs: list of SlowlogInfo
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.Slowlogs = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("Slowlogs") is not None:
self.Slowlogs = []
for item in params.get("Slowlogs"):
obj = SlowlogInfo()
obj._deserialize(item)
self.Slowlogs.append(obj)
self.RequestId = params.get("RequestId")
class DescribeUploadBackupInfoRequest(AbstractModel):
"""DescribeUploadBackupInfo请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 导入目标实例ID
:type InstanceId: str
:param BackupMigrationId: 备份导入任务ID,由CreateBackupMigration接口返回
:type BackupMigrationId: str
"""
self.InstanceId = None
self.BackupMigrationId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupMigrationId = params.get("BackupMigrationId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeUploadBackupInfoResponse(AbstractModel):
"""DescribeUploadBackupInfo返回参数结构体
"""
def __init__(self):
r"""
:param BucketName: 存储桶名称
:type BucketName: str
:param Region: 存储桶地域信息
:type Region: str
:param Path: 存储路径
:type Path: str
:param TmpSecretId: 临时密钥ID
:type TmpSecretId: str
:param TmpSecretKey: 临时密钥Key
:type TmpSecretKey: str
:param XCosSecurityToken: 临时密钥Token
:type XCosSecurityToken: str
:param StartTime: 临时密钥开始时间
:type StartTime: str
:param ExpiredTime: 临时密钥到期时间
:type ExpiredTime: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.BucketName = None
self.Region = None
self.Path = None
self.TmpSecretId = None
self.TmpSecretKey = None
self.XCosSecurityToken = None
self.StartTime = None
self.ExpiredTime = None
self.RequestId = None
def _deserialize(self, params):
self.BucketName = params.get("BucketName")
self.Region = params.get("Region")
self.Path = params.get("Path")
self.TmpSecretId = params.get("TmpSecretId")
self.TmpSecretKey = params.get("TmpSecretKey")
self.XCosSecurityToken = params.get("XCosSecurityToken")
self.StartTime = params.get("StartTime")
self.ExpiredTime = params.get("ExpiredTime")
self.RequestId = params.get("RequestId")
class DescribeUploadIncrementalInfoRequest(AbstractModel):
"""DescribeUploadIncrementalInfo请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 导入目标实例ID
:type InstanceId: str
:param BackupMigrationId: 备份导入任务ID,由CreateBackupMigration接口返回
:type BackupMigrationId: str
:param IncrementalMigrationId: 增量导入任务ID
:type IncrementalMigrationId: str
"""
self.InstanceId = None
self.BackupMigrationId = None
self.IncrementalMigrationId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupMigrationId = params.get("BackupMigrationId")
self.IncrementalMigrationId = params.get("IncrementalMigrationId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeUploadIncrementalInfoResponse(AbstractModel):
"""DescribeUploadIncrementalInfo返回参数结构体
"""
def __init__(self):
r"""
:param BucketName: 存储桶名称
:type BucketName: str
:param Region: 存储桶地域信息
:type Region: str
:param Path: 存储路径
:type Path: str
:param TmpSecretId: 临时密钥ID
:type TmpSecretId: str
:param TmpSecretKey: 临时密钥Key
:type TmpSecretKey: str
:param XCosSecurityToken: 临时密钥Token
:type XCosSecurityToken: str
:param StartTime: 临时密钥开始时间
:type StartTime: str
:param ExpiredTime: 临时密钥到期时间
:type ExpiredTime: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.BucketName = None
self.Region = None
self.Path = None
self.TmpSecretId = None
self.TmpSecretKey = None
self.XCosSecurityToken = None
self.StartTime = None
self.ExpiredTime = None
self.RequestId = None
def _deserialize(self, params):
self.BucketName = params.get("BucketName")
self.Region = params.get("Region")
self.Path = params.get("Path")
self.TmpSecretId = params.get("TmpSecretId")
self.TmpSecretKey = params.get("TmpSecretKey")
self.XCosSecurityToken = params.get("XCosSecurityToken")
self.StartTime = params.get("StartTime")
self.ExpiredTime = params.get("ExpiredTime")
self.RequestId = params.get("RequestId")
class DescribeZonesRequest(AbstractModel):
"""DescribeZones请求参数结构体
"""
class DescribeZonesResponse(AbstractModel):
"""DescribeZones返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 返回多少个可用区信息
:type TotalCount: int
:param ZoneSet: 可用区数组
:type ZoneSet: list of ZoneInfo
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.ZoneSet = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("ZoneSet") is not None:
self.ZoneSet = []
for item in params.get("ZoneSet"):
obj = ZoneInfo()
obj._deserialize(item)
self.ZoneSet.append(obj)
self.RequestId = params.get("RequestId")
class DisassociateSecurityGroupsRequest(AbstractModel):
"""DisassociateSecurityGroups请求参数结构体
"""
def __init__(self):
r"""
:param SecurityGroupId: 安全组ID。
:type SecurityGroupId: str
:param InstanceIdSet: 实例ID 列表,一个或者多个实例ID组成的数组。多个实例必须是同一个地域,同一个可用区,同一个项目下的。
:type InstanceIdSet: list of str
"""
self.SecurityGroupId = None
self.InstanceIdSet = None
def _deserialize(self, params):
self.SecurityGroupId = params.get("SecurityGroupId")
self.InstanceIdSet = params.get("InstanceIdSet")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DisassociateSecurityGroupsResponse(AbstractModel):
"""DisassociateSecurityGroups返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class InquiryPriceCreateDBInstancesRequest(AbstractModel):
"""InquiryPriceCreateDBInstances请求参数结构体
"""
def __init__(self):
r"""
:param Zone: 可用区ID。该参数可以通过调用 DescribeZones 接口的返回值中的Zone字段来获取。
:type Zone: str
:param Memory: 内存大小,单位:GB
:type Memory: int
:param Storage: 实例容量大小,单位:GB。
:type Storage: int
:param InstanceChargeType: 计费类型,取值支持 PREPAID,POSTPAID。
:type InstanceChargeType: str
:param Period: 购买时长,单位:月。取值为1到48,默认为1
:type Period: int
:param GoodsNum: 一次性购买的实例数量。取值1-100,默认取值为1
:type GoodsNum: int
:param DBVersion: sqlserver版本,目前只支持:2008R2(SQL Server 2008 Enterprise),2012SP3(SQL Server 2012 Enterprise),2016SP1(SQL Server 2016 Enterprise),201602(SQL Server 2016 Standard)2017(SQL Server 2017 Enterprise)版本。默认为2008R2版本
:type DBVersion: str
:param Cpu: 预购买实例的CPU核心数
:type Cpu: int
:param InstanceType: 购买实例的类型 HA-高可用型(包括双机高可用,alwaysOn集群),RO-只读副本,SI-基础版,默认取值HA
:type InstanceType: str
:param MachineType: 购买实例的宿主机类型,PM-物理机, CLOUD_PREMIUM-虚拟机高性能云盘,CLOUD_SSD-虚拟机SSD云盘,默认取值PM
:type MachineType: str
"""
self.Zone = None
self.Memory = None
self.Storage = None
self.InstanceChargeType = None
self.Period = None
self.GoodsNum = None
self.DBVersion = None
self.Cpu = None
self.InstanceType = None
self.MachineType = None
def _deserialize(self, params):
self.Zone = params.get("Zone")
self.Memory = params.get("Memory")
self.Storage = params.get("Storage")
self.InstanceChargeType = params.get("InstanceChargeType")
self.Period = params.get("Period")
self.GoodsNum = params.get("GoodsNum")
self.DBVersion = params.get("DBVersion")
self.Cpu = params.get("Cpu")
self.InstanceType = params.get("InstanceType")
self.MachineType = params.get("MachineType")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class InquiryPriceCreateDBInstancesResponse(AbstractModel):
"""InquiryPriceCreateDBInstances返回参数结构体
"""
def __init__(self):
r"""
:param OriginalPrice: 未打折前价格,其值除以100表示多少钱。例如10010表示100.10元
:type OriginalPrice: int
:param Price: 实际需要支付的价格,其值除以100表示多少钱。例如10010表示100.10元
:type Price: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.OriginalPrice = None
self.Price = None
self.RequestId = None
def _deserialize(self, params):
self.OriginalPrice = params.get("OriginalPrice")
self.Price = params.get("Price")
self.RequestId = params.get("RequestId")
class InquiryPriceRenewDBInstanceRequest(AbstractModel):
"""InquiryPriceRenewDBInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
:param Period: 续费周期。按月续费最多48个月。默认查询续费一个月的价格
:type Period: int
:param TimeUnit: 续费周期单位。month表示按月续费,当前只支持按月付费查询价格
:type TimeUnit: str
"""
self.InstanceId = None
self.Period = None
self.TimeUnit = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Period = params.get("Period")
self.TimeUnit = params.get("TimeUnit")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class InquiryPriceRenewDBInstanceResponse(AbstractModel):
"""InquiryPriceRenewDBInstance返回参数结构体
"""
def __init__(self):
r"""
:param OriginalPrice: 未打折的原价,其值除以100表示最终的价格。例如10094表示100.94元
:type OriginalPrice: int
:param Price: 实际需要支付价格,其值除以100表示最终的价格。例如10094表示100.94元
:type Price: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.OriginalPrice = None
self.Price = None
self.RequestId = None
def _deserialize(self, params):
self.OriginalPrice = params.get("OriginalPrice")
self.Price = params.get("Price")
self.RequestId = params.get("RequestId")
class InquiryPriceUpgradeDBInstanceRequest(AbstractModel):
"""InquiryPriceUpgradeDBInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-njj2mtpl
:type InstanceId: str
:param Memory: 实例升级后的内存大小,单位GB,其值不能比当前实例内存小
:type Memory: int
:param Storage: 实例升级后的磁盘大小,单位GB,其值不能比当前实例磁盘小
:type Storage: int
:param Cpu: 实例升级后的CPU核心数,其值不能比当前实例CPU小
:type Cpu: int
"""
self.InstanceId = None
self.Memory = None
self.Storage = None
self.Cpu = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Memory = params.get("Memory")
self.Storage = params.get("Storage")
self.Cpu = params.get("Cpu")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class InquiryPriceUpgradeDBInstanceResponse(AbstractModel):
"""InquiryPriceUpgradeDBInstance返回参数结构体
"""
def __init__(self):
r"""
:param OriginalPrice: 未打折的原价,其值除以100表示最终的价格。例如10094表示100.94元
:type OriginalPrice: int
:param Price: 实际需要支付价格,其值除以100表示最终的价格。例如10094表示100.94元
:type Price: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.OriginalPrice = None
self.Price = None
self.RequestId = None
def _deserialize(self, params):
self.OriginalPrice = params.get("OriginalPrice")
self.Price = params.get("Price")
self.RequestId = params.get("RequestId")
class InstanceDBDetail(AbstractModel):
"""实例的数据库信息
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
:param DBDetails: 数据库信息列表
:type DBDetails: list of DBDetail
"""
self.InstanceId = None
self.DBDetails = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
if params.get("DBDetails") is not None:
self.DBDetails = []
for item in params.get("DBDetails"):
obj = DBDetail()
obj._deserialize(item)
self.DBDetails.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class InstanceRenewInfo(AbstractModel):
"""实例续费状态信息
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-j8kv137v
:type InstanceId: str
:param RenewFlag: 实例续费标记。0:正常续费,1:自动续费,2:到期不续
:type RenewFlag: int
"""
self.InstanceId = None
self.RenewFlag = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.RenewFlag = params.get("RenewFlag")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MigrateDB(AbstractModel):
"""需要迁移的DB列表
"""
def __init__(self):
r"""
:param DBName: 迁移数据库的名称
:type DBName: str
"""
self.DBName = None
def _deserialize(self, params):
self.DBName = params.get("DBName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MigrateDetail(AbstractModel):
"""迁移的进度详情类型
"""
def __init__(self):
r"""
:param StepName: 当前环节的名称
:type StepName: str
:param Progress: 当前环节的进度(单位是%)
:type Progress: int
"""
self.StepName = None
self.Progress = None
def _deserialize(self, params):
self.StepName = params.get("StepName")
self.Progress = params.get("Progress")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MigrateSource(AbstractModel):
"""迁移任务的源类型
"""
def __init__(self):
r"""
:param InstanceId: 迁移源实例的ID,MigrateType=1(TencentDB for SQLServers)时使用,格式如:mssql-si2823jyl
:type InstanceId: str
:param CvmId: 迁移源Cvm的ID,MigrateType=2(云服务器自建SQLServer数据库)时使用
:type CvmId: str
:param VpcId: 迁移源Cvm的Vpc网络标识,MigrateType=2(云服务器自建SQLServer数据库)时使用,格式如:vpc-6ys9ont9
:type VpcId: str
:param SubnetId: 迁移源Cvm的Vpc下的子网标识,MigrateType=2(云服务器自建SQLServer数据库)时使用,格式如:subnet-h9extioi
:type SubnetId: str
:param UserName: 用户名,MigrateType=1或MigrateType=2使用
:type UserName: str
:param Password: 密码,MigrateType=1或MigrateType=2使用
:type Password: str
:param Ip: 迁移源Cvm自建库的内网IP,MigrateType=2(云服务器自建SQLServer数据库)时使用
:type Ip: str
:param Port: 迁移源Cvm自建库的端口号,MigrateType=2(云服务器自建SQLServer数据库)时使用
:type Port: int
:param Url: 离线迁移的源备份地址,MigrateType=4或MigrateType=5使用
:type Url: list of str
:param UrlPassword: 离线迁移的源备份密码,MigrateType=4或MigrateType=5使用
:type UrlPassword: str
"""
self.InstanceId = None
self.CvmId = None
self.VpcId = None
self.SubnetId = None
self.UserName = None
self.Password = None
self.Ip = None
self.Port = None
self.Url = None
self.UrlPassword = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.CvmId = params.get("CvmId")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.UserName = params.get("UserName")
self.Password = params.get("Password")
self.Ip = params.get("Ip")
self.Port = params.get("Port")
self.Url = params.get("Url")
self.UrlPassword = params.get("UrlPassword")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MigrateTarget(AbstractModel):
"""迁移任务的目标类型
"""
def __init__(self):
r"""
:param InstanceId: 迁移目标实例的ID,格式如:mssql-si2823jyl
:type InstanceId: str
:param UserName: 迁移目标实例的用户名
:type UserName: str
:param Password: 迁移目标实例的密码
:type Password: str
"""
self.InstanceId = None
self.UserName = None
self.Password = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.UserName = params.get("UserName")
self.Password = params.get("Password")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MigrateTask(AbstractModel):
"""查询迁移任务列表类型
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务ID
:type MigrateId: int
:param MigrateName: 迁移任务名称
:type MigrateName: str
:param AppId: 迁移任务所属的用户ID
:type AppId: int
:param Region: 迁移任务所属的地域
:type Region: str
:param SourceType: 迁移源的类型 1:TencentDB for SQLServer 2:云服务器自建SQLServer数据库 4:SQLServer备份还原 5:SQLServer备份还原(COS方式)
:type SourceType: int
:param CreateTime: 迁移任务的创建时间
:type CreateTime: str
:param StartTime: 迁移任务的开始时间
:type StartTime: str
:param EndTime: 迁移任务的结束时间
:type EndTime: str
:param Status: 迁移任务的状态(1:初始化,4:迁移中,5.迁移失败,6.迁移成功,7已中止,8已删除,9中止中,10完成中,11中止失败,12完成失败)
:type Status: int
:param Message: 信息
:type Message: str
:param CheckFlag: 是否迁移任务经过检查(0:未校验,1:校验成功,2:校验失败,3:校验中)
:type CheckFlag: int
:param Progress: 迁移任务当前进度(单位%)
:type Progress: int
:param MigrateDetail: 迁移任务进度细节
:type MigrateDetail: :class:`tencentcloud.sqlserver.v20180328.models.MigrateDetail`
"""
self.MigrateId = None
self.MigrateName = None
self.AppId = None
self.Region = None
self.SourceType = None
self.CreateTime = None
self.StartTime = None
self.EndTime = None
self.Status = None
self.Message = None
self.CheckFlag = None
self.Progress = None
self.MigrateDetail = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
self.MigrateName = params.get("MigrateName")
self.AppId = params.get("AppId")
self.Region = params.get("Region")
self.SourceType = params.get("SourceType")
self.CreateTime = params.get("CreateTime")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Status = params.get("Status")
self.Message = params.get("Message")
self.CheckFlag = params.get("CheckFlag")
self.Progress = params.get("Progress")
if params.get("MigrateDetail") is not None:
self.MigrateDetail = MigrateDetail()
self.MigrateDetail._deserialize(params.get("MigrateDetail"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class Migration(AbstractModel):
"""冷备迁移导入
"""
def __init__(self):
r"""
:param MigrationId: 备份导入任务ID 或 增量导入任务ID
:type MigrationId: str
:param MigrationName: 备份导入名称,增量导入任务该字段为空
注意:此字段可能返回 null,表示取不到有效值。
:type MigrationName: str
:param AppId: 应用ID
:type AppId: int
:param Region: 地域
:type Region: str
:param InstanceId: 迁移目标实例ID
:type InstanceId: str
:param RecoveryType: 迁移任务恢复类型
:type RecoveryType: str
:param UploadType: 备份用户上传类型,COS_URL-备份放在用户的对象存储上,提供URL。COS_UPLOAD-备份放在业务的对象存储上,用户上传
:type UploadType: str
:param BackupFiles: 备份文件列表,UploadType确定,COS_URL则保存URL,COS_UPLOAD则保存备份名称
:type BackupFiles: list of str
:param Status: 迁移任务状态,2-创建完成,7-全量导入中,8-等待增量,9-导入成功,10-导入失败,12-增量导入中
:type Status: int
:param CreateTime: 迁移任务创建时间
:type CreateTime: str
:param StartTime: 迁移任务开始时间
:type StartTime: str
:param EndTime: 迁移任务结束时间
:type EndTime: str
:param Message: 说明信息
:type Message: str
:param Detail: 迁移细节
:type Detail: :class:`tencentcloud.sqlserver.v20180328.models.MigrationDetail`
:param Action: 当前状态允许的操作
:type Action: :class:`tencentcloud.sqlserver.v20180328.models.MigrationAction`
:param IsRecovery: 是否是最终恢复,全量导入任务该字段为空
注意:此字段可能返回 null,表示取不到有效值。
:type IsRecovery: str
"""
self.MigrationId = None
self.MigrationName = None
self.AppId = None
self.Region = None
self.InstanceId = None
self.RecoveryType = None
self.UploadType = None
self.BackupFiles = None
self.Status = None
self.CreateTime = None
self.StartTime = None
self.EndTime = None
self.Message = None
self.Detail = None
self.Action = None
self.IsRecovery = None
def _deserialize(self, params):
self.MigrationId = params.get("MigrationId")
self.MigrationName = params.get("MigrationName")
self.AppId = params.get("AppId")
self.Region = params.get("Region")
self.InstanceId = params.get("InstanceId")
self.RecoveryType = params.get("RecoveryType")
self.UploadType = params.get("UploadType")
self.BackupFiles = params.get("BackupFiles")
self.Status = params.get("Status")
self.CreateTime = params.get("CreateTime")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Message = params.get("Message")
if params.get("Detail") is not None:
self.Detail = MigrationDetail()
self.Detail._deserialize(params.get("Detail"))
if params.get("Action") is not None:
self.Action = MigrationAction()
self.Action._deserialize(params.get("Action"))
self.IsRecovery = params.get("IsRecovery")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MigrationAction(AbstractModel):
"""冷备导入任务允许的操作
"""
def __init__(self):
r"""
:param AllAction: 支持的所有操作,值包括:view(查看任务) ,modify(修改任务), start(启动任务),incremental(创建增量任务),delete(删除任务),upload(获取上传权限)。
:type AllAction: list of str
:param AllowedAction: 当前状态允许的操作,AllAction的子集,为空表示禁止所有操作
:type AllowedAction: list of str
"""
self.AllAction = None
self.AllowedAction = None
def _deserialize(self, params):
self.AllAction = params.get("AllAction")
self.AllowedAction = params.get("AllowedAction")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MigrationDetail(AbstractModel):
"""冷备导入任务迁移细节
"""
def __init__(self):
r"""
:param StepAll: 总步骤数
:type StepAll: int
:param StepNow: 当前步骤
:type StepNow: int
:param Progress: 总进度,如:"30"表示30%
:type Progress: int
:param StepInfo: 步骤信息,null表示还未开始迁移
注意:此字段可能返回 null,表示取不到有效值。
:type StepInfo: list of MigrationStep
"""
self.StepAll = None
self.StepNow = None
self.Progress = None
self.StepInfo = None
def _deserialize(self, params):
self.StepAll = params.get("StepAll")
self.StepNow = params.get("StepNow")
self.Progress = params.get("Progress")
if params.get("StepInfo") is not None:
self.StepInfo = []
for item in params.get("StepInfo"):
obj = MigrationStep()
obj._deserialize(item)
self.StepInfo.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MigrationStep(AbstractModel):
"""冷备导入任务迁移步骤细节
"""
def __init__(self):
r"""
:param StepNo: 步骤序列
:type StepNo: int
:param StepName: 步骤展现名称
:type StepName: str
:param StepId: 英文ID标识
:type StepId: str
:param Status: 步骤状态:0-默认值,1-成功,2-失败,3-执行中,4-未执行
:type Status: int
"""
self.StepNo = None
self.StepName = None
self.StepId = None
self.Status = None
def _deserialize(self, params):
self.StepNo = params.get("StepNo")
self.StepName = params.get("StepName")
self.StepId = params.get("StepId")
self.Status = params.get("Status")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyAccountPrivilegeRequest(AbstractModel):
"""ModifyAccountPrivilege请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 数据库实例ID,形如mssql-njj2mtpl
:type InstanceId: str
:param Accounts: 账号权限变更信息
:type Accounts: list of AccountPrivilegeModifyInfo
"""
self.InstanceId = None
self.Accounts = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
if params.get("Accounts") is not None:
self.Accounts = []
for item in params.get("Accounts"):
obj = AccountPrivilegeModifyInfo()
obj._deserialize(item)
self.Accounts.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyAccountPrivilegeResponse(AbstractModel):
"""ModifyAccountPrivilege返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 异步任务流程ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class ModifyAccountRemarkRequest(AbstractModel):
"""ModifyAccountRemark请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-j8kv137v
:type InstanceId: str
:param Accounts: 修改备注的账户信息
:type Accounts: list of AccountRemark
"""
self.InstanceId = None
self.Accounts = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
if params.get("Accounts") is not None:
self.Accounts = []
for item in params.get("Accounts"):
obj = AccountRemark()
obj._deserialize(item)
self.Accounts.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyAccountRemarkResponse(AbstractModel):
"""ModifyAccountRemark返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyBackupMigrationRequest(AbstractModel):
"""ModifyBackupMigration请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 导入目标实例ID
:type InstanceId: str
:param BackupMigrationId: 备份导入任务ID,由CreateBackupMigration接口返回
:type BackupMigrationId: str
:param MigrationName: 任务名称
:type MigrationName: str
:param RecoveryType: 迁移任务恢复类型,FULL,FULL_LOG,FULL_DIFF
:type RecoveryType: str
:param UploadType: COS_URL-备份放在用户的对象存储上,提供URL。COS_UPLOAD-备份放在业务的对象存储上,用户上传
:type UploadType: str
:param BackupFiles: UploadType是COS_URL时这里时URL,COS_UPLOAD这里填备份文件的名字;只支持1个备份文件,但1个备份文件内可包含多个库
:type BackupFiles: list of str
"""
self.InstanceId = None
self.BackupMigrationId = None
self.MigrationName = None
self.RecoveryType = None
self.UploadType = None
self.BackupFiles = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupMigrationId = params.get("BackupMigrationId")
self.MigrationName = params.get("MigrationName")
self.RecoveryType = params.get("RecoveryType")
self.UploadType = params.get("UploadType")
self.BackupFiles = params.get("BackupFiles")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyBackupMigrationResponse(AbstractModel):
"""ModifyBackupMigration返回参数结构体
"""
def __init__(self):
r"""
:param BackupMigrationId: 备份导入任务ID
:type BackupMigrationId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.BackupMigrationId = None
self.RequestId = None
def _deserialize(self, params):
self.BackupMigrationId = params.get("BackupMigrationId")
self.RequestId = params.get("RequestId")
class ModifyBackupNameRequest(AbstractModel):
"""ModifyBackupName请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,格式如:mssql-3l3fgqn7
:type InstanceId: str
:param BackupName: 修改的备份名称
:type BackupName: str
:param BackupId: 要修改名称的备份ID,可通过 [DescribeBackups](https://cloud.tencent.com/document/product/238/19943) 接口获取。
:type BackupId: int
:param GroupId: 备份任务组ID,在单库备份文件模式下,可通过[DescribeBackups](https://cloud.tencent.com/document/product/238/19943) 接口获得。
BackupId 和 GroupId 同时存在,按照BackupId进行修改。
:type GroupId: str
"""
self.InstanceId = None
self.BackupName = None
self.BackupId = None
self.GroupId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupName = params.get("BackupName")
self.BackupId = params.get("BackupId")
self.GroupId = params.get("GroupId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyBackupNameResponse(AbstractModel):
"""ModifyBackupName返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyBackupStrategyRequest(AbstractModel):
"""ModifyBackupStrategy请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
:param BackupType: 备份类型,当前只支持按天备份,取值为daily
:type BackupType: str
:param BackupTime: 备份时间点,取值为0-23的整数
:type BackupTime: int
:param BackupDay: BackupType取值为daily时,表示备份间隔天数。当前取值只能为1
:type BackupDay: int
:param BackupModel: 备份模式,master_pkg-主节点上打包备份文件;master_no_pkg-主节点单库备份文件;slave_pkg-从节点上打包备份文件;slave_no_pkg-从节点上单库备份文件,从节点上备份只有在always on容灾模式下支持。
:type BackupModel: str
"""
self.InstanceId = None
self.BackupType = None
self.BackupTime = None
self.BackupDay = None
self.BackupModel = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupType = params.get("BackupType")
self.BackupTime = params.get("BackupTime")
self.BackupDay = params.get("BackupDay")
self.BackupModel = params.get("BackupModel")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyBackupStrategyResponse(AbstractModel):
"""ModifyBackupStrategy返回参数结构体
"""
def __init__(self):
r"""
:param Errno: 返回错误码
:type Errno: int
:param Msg: 返回错误信息
:type Msg: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Errno = None
self.Msg = None
self.RequestId = None
def _deserialize(self, params):
self.Errno = params.get("Errno")
self.Msg = params.get("Msg")
self.RequestId = params.get("RequestId")
class ModifyDBInstanceNameRequest(AbstractModel):
"""ModifyDBInstanceName请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 数据库实例ID,形如mssql-njj2mtpl
:type InstanceId: str
:param InstanceName: 新的数据库实例名字
:type InstanceName: str
"""
self.InstanceId = None
self.InstanceName = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.InstanceName = params.get("InstanceName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDBInstanceNameResponse(AbstractModel):
"""ModifyDBInstanceName返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyDBInstanceNetworkRequest(AbstractModel):
"""ModifyDBInstanceNetwork请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例id
:type InstanceId: str
:param NewVpcId: 新VPC网络Id
:type NewVpcId: str
:param NewSubnetId: 新子网Id
:type NewSubnetId: str
:param OldIpRetainTime: 原vip保留时长,单位小时,默认为0,代表立即回收,最大为168小时
:type OldIpRetainTime: int
:param Vip: 指定VIP地址
:type Vip: str
"""
self.InstanceId = None
self.NewVpcId = None
self.NewSubnetId = None
self.OldIpRetainTime = None
self.Vip = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.NewVpcId = params.get("NewVpcId")
self.NewSubnetId = params.get("NewSubnetId")
self.OldIpRetainTime = params.get("OldIpRetainTime")
self.Vip = params.get("Vip")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDBInstanceNetworkResponse(AbstractModel):
"""ModifyDBInstanceNetwork返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 实例转网流程id,可通过[DescribeFlowStatus](https://cloud.tencent.com/document/product/238/19967)接口查询流程状态
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class ModifyDBInstanceProjectRequest(AbstractModel):
"""ModifyDBInstanceProject请求参数结构体
"""
def __init__(self):
r"""
:param InstanceIdSet: 实例ID数组,形如mssql-j8kv137v
:type InstanceIdSet: list of str
:param ProjectId: 项目ID,为0的话表示默认项目
:type ProjectId: int
"""
self.InstanceIdSet = None
self.ProjectId = None
def _deserialize(self, params):
self.InstanceIdSet = params.get("InstanceIdSet")
self.ProjectId = params.get("ProjectId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDBInstanceProjectResponse(AbstractModel):
"""ModifyDBInstanceProject返回参数结构体
"""
def __init__(self):
r"""
:param Count: 修改成功的实例个数
:type Count: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Count = None
self.RequestId = None
def _deserialize(self, params):
self.Count = params.get("Count")
self.RequestId = params.get("RequestId")
class ModifyDBInstanceRenewFlagRequest(AbstractModel):
"""ModifyDBInstanceRenewFlag请求参数结构体
"""
def __init__(self):
r"""
:param RenewFlags: 实例续费状态标记信息
:type RenewFlags: list of InstanceRenewInfo
"""
self.RenewFlags = None
def _deserialize(self, params):
if params.get("RenewFlags") is not None:
self.RenewFlags = []
for item in params.get("RenewFlags"):
obj = InstanceRenewInfo()
obj._deserialize(item)
self.RenewFlags.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDBInstanceRenewFlagResponse(AbstractModel):
"""ModifyDBInstanceRenewFlag返回参数结构体
"""
def __init__(self):
r"""
:param Count: 修改成功的个数
:type Count: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Count = None
self.RequestId = None
def _deserialize(self, params):
self.Count = params.get("Count")
self.RequestId = params.get("RequestId")
class ModifyDBInstanceSecurityGroupsRequest(AbstractModel):
"""ModifyDBInstanceSecurityGroups请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例 ID,格式如:mssql-c1nl9rpv 或者 mssqlro-c1nl9rpv,与云数据库控制台页面中显示的实例 ID 相同。
:type InstanceId: str
:param SecurityGroupIdSet: 要修改的安全组 ID 列表,一个或者多个安全组 ID 组成的数组。
:type SecurityGroupIdSet: list of str
"""
self.InstanceId = None
self.SecurityGroupIdSet = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.SecurityGroupIdSet = params.get("SecurityGroupIdSet")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDBInstanceSecurityGroupsResponse(AbstractModel):
"""ModifyDBInstanceSecurityGroups返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyDBNameRequest(AbstractModel):
"""ModifyDBName请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
:param OldDBName: 旧数据库名
:type OldDBName: str
:param NewDBName: 新数据库名
:type NewDBName: str
"""
self.InstanceId = None
self.OldDBName = None
self.NewDBName = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.OldDBName = params.get("OldDBName")
self.NewDBName = params.get("NewDBName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDBNameResponse(AbstractModel):
"""ModifyDBName返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 任务流ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class ModifyDBRemarkRequest(AbstractModel):
"""ModifyDBRemark请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-rljoi3bf
:type InstanceId: str
:param DBRemarks: 数据库名称及备注数组,每个元素包含数据库名和对应的备注
:type DBRemarks: list of DBRemark
"""
self.InstanceId = None
self.DBRemarks = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
if params.get("DBRemarks") is not None:
self.DBRemarks = []
for item in params.get("DBRemarks"):
obj = DBRemark()
obj._deserialize(item)
self.DBRemarks.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDBRemarkResponse(AbstractModel):
"""ModifyDBRemark返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyDatabaseCDCRequest(AbstractModel):
"""ModifyDatabaseCDC请求参数结构体
"""
def __init__(self):
r"""
:param DBNames: 数据库名数组
:type DBNames: list of str
:param ModifyType: 开启、关闭数据库CDC功能 enable;开启,disable:关闭
:type ModifyType: str
:param InstanceId: 实例ID
:type InstanceId: str
"""
self.DBNames = None
self.ModifyType = None
self.InstanceId = None
def _deserialize(self, params):
self.DBNames = params.get("DBNames")
self.ModifyType = params.get("ModifyType")
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDatabaseCDCResponse(AbstractModel):
"""ModifyDatabaseCDC返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 流程ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class ModifyDatabaseCTRequest(AbstractModel):
"""ModifyDatabaseCT请求参数结构体
"""
def __init__(self):
r"""
:param DBNames: 数据库名数组
:type DBNames: list of str
:param ModifyType: 启用、禁用数据库CT功能 enable;启用,disable:禁用
:type ModifyType: str
:param InstanceId: 实例ID
:type InstanceId: str
:param ChangeRetentionDay: 启用CT时额外保留天数,默认保留3天,最小3天,最大30天
:type ChangeRetentionDay: int
"""
self.DBNames = None
self.ModifyType = None
self.InstanceId = None
self.ChangeRetentionDay = None
def _deserialize(self, params):
self.DBNames = params.get("DBNames")
self.ModifyType = params.get("ModifyType")
self.InstanceId = params.get("InstanceId")
self.ChangeRetentionDay = params.get("ChangeRetentionDay")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDatabaseCTResponse(AbstractModel):
"""ModifyDatabaseCT返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 流程ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class ModifyDatabaseMdfRequest(AbstractModel):
"""ModifyDatabaseMdf请求参数结构体
"""
def __init__(self):
r"""
:param DBNames: 数据库名数组
:type DBNames: list of str
:param InstanceId: 实例ID
:type InstanceId: str
"""
self.DBNames = None
self.InstanceId = None
def _deserialize(self, params):
self.DBNames = params.get("DBNames")
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDatabaseMdfResponse(AbstractModel):
"""ModifyDatabaseMdf返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 流程ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class ModifyIncrementalMigrationRequest(AbstractModel):
"""ModifyIncrementalMigration请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 导入目标实例ID
:type InstanceId: str
:param BackupMigrationId: 备份导入任务ID,由CreateBackupMigration接口返回
:type BackupMigrationId: str
:param IncrementalMigrationId: 增量导入任务ID,由CreateIncrementalMigration接口返回
:type IncrementalMigrationId: str
:param IsRecovery: 是否需要恢复,NO-不需要,YES-需要,默认不修改增量备份导入任务是否需要恢复的属性。
:type IsRecovery: str
:param BackupFiles: UploadType是COS_URL时这里时URL,COS_UPLOAD这里填备份文件的名字;只支持1个备份文件,但1个备份文件内可包含多个库
:type BackupFiles: list of str
"""
self.InstanceId = None
self.BackupMigrationId = None
self.IncrementalMigrationId = None
self.IsRecovery = None
self.BackupFiles = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupMigrationId = params.get("BackupMigrationId")
self.IncrementalMigrationId = params.get("IncrementalMigrationId")
self.IsRecovery = params.get("IsRecovery")
self.BackupFiles = params.get("BackupFiles")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyIncrementalMigrationResponse(AbstractModel):
"""ModifyIncrementalMigration返回参数结构体
"""
def __init__(self):
r"""
:param IncrementalMigrationId: 增量备份导入任务ID
:type IncrementalMigrationId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.IncrementalMigrationId = None
self.RequestId = None
def _deserialize(self, params):
self.IncrementalMigrationId = params.get("IncrementalMigrationId")
self.RequestId = params.get("RequestId")
class ModifyInstanceParamRequest(AbstractModel):
"""ModifyInstanceParam请求参数结构体
"""
def __init__(self):
r"""
:param InstanceIds: 实例短 ID 列表
:type InstanceIds: list of str
:param ParamList: 要修改的参数列表。每一个元素是 Name 和 CurrentValue 的组合。Name 是参数名,CurrentValue 是要修改的值。<b>注意</b>:如果修改的参数需要<b>重启</b>实例,那么您的实例将会在执行修改时<b>重启</b>。您可以通过DescribeInstanceParams接口查询修改参数时是否会重启实例,以免导致您的实例不符合预期重启。
:type ParamList: list of Parameter
:param WaitSwitch: 执行参数调整任务的方式,默认为 0。支持值包括:0 - 立刻执行,1 - 时间窗执行。
:type WaitSwitch: int
"""
self.InstanceIds = None
self.ParamList = None
self.WaitSwitch = None
def _deserialize(self, params):
self.InstanceIds = params.get("InstanceIds")
if params.get("ParamList") is not None:
self.ParamList = []
for item in params.get("ParamList"):
obj = Parameter()
obj._deserialize(item)
self.ParamList.append(obj)
self.WaitSwitch = params.get("WaitSwitch")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyInstanceParamResponse(AbstractModel):
"""ModifyInstanceParam返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyMaintenanceSpanRequest(AbstractModel):
"""ModifyMaintenanceSpan请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-k8voqdlz
:type InstanceId: str
:param Weekly: 以周为单位,表示允许周几维护,例如:[1,2,3,4,5,6,7]表示周一到周日均为可维护日,本参数不填,则不修改此值。
:type Weekly: list of int
:param StartTime: 每天可维护的开始时间,例如:10:24标识可维护时间窗10点24分开始,本参数不填,则不修改此值。
:type StartTime: str
:param Span: 每天可维护的持续时间,单位是h,例如:1 表示从可维护的开始时间起持续1小时,本参数不填,则不修改此值。
:type Span: int
"""
self.InstanceId = None
self.Weekly = None
self.StartTime = None
self.Span = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Weekly = params.get("Weekly")
self.StartTime = params.get("StartTime")
self.Span = params.get("Span")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyMaintenanceSpanResponse(AbstractModel):
"""ModifyMaintenanceSpan返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyMigrationRequest(AbstractModel):
"""ModifyMigration请求参数结构体
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务ID
:type MigrateId: int
:param MigrateName: 新的迁移任务的名称,若不填则不修改
:type MigrateName: str
:param MigrateType: 新的迁移类型(1:结构迁移 2:数据迁移 3:增量同步),若不填则不修改
:type MigrateType: int
:param SourceType: 迁移源的类型 1:TencentDB for SQLServer 2:云服务器自建SQLServer数据库 4:SQLServer备份还原 5:SQLServer备份还原(COS方式),若不填则不修改
:type SourceType: int
:param Source: 迁移源,若不填则不修改
:type Source: :class:`tencentcloud.sqlserver.v20180328.models.MigrateSource`
:param Target: 迁移目标,若不填则不修改
:type Target: :class:`tencentcloud.sqlserver.v20180328.models.MigrateTarget`
:param MigrateDBSet: 迁移DB对象 ,离线迁移(SourceType=4或SourceType=5)不使用,若不填则不修改
:type MigrateDBSet: list of MigrateDB
"""
self.MigrateId = None
self.MigrateName = None
self.MigrateType = None
self.SourceType = None
self.Source = None
self.Target = None
self.MigrateDBSet = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
self.MigrateName = params.get("MigrateName")
self.MigrateType = params.get("MigrateType")
self.SourceType = params.get("SourceType")
if params.get("Source") is not None:
self.Source = MigrateSource()
self.Source._deserialize(params.get("Source"))
if params.get("Target") is not None:
self.Target = MigrateTarget()
self.Target._deserialize(params.get("Target"))
if params.get("MigrateDBSet") is not None:
self.MigrateDBSet = []
for item in params.get("MigrateDBSet"):
obj = MigrateDB()
obj._deserialize(item)
self.MigrateDBSet.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyMigrationResponse(AbstractModel):
"""ModifyMigration返回参数结构体
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务ID
:type MigrateId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.MigrateId = None
self.RequestId = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
self.RequestId = params.get("RequestId")
class ModifyPublishSubscribeNameRequest(AbstractModel):
"""ModifyPublishSubscribeName请求参数结构体
"""
def __init__(self):
r"""
:param PublishSubscribeId: 发布订阅ID
:type PublishSubscribeId: int
:param PublishSubscribeName: 待修改的发布订阅名称
:type PublishSubscribeName: str
"""
self.PublishSubscribeId = None
self.PublishSubscribeName = None
def _deserialize(self, params):
self.PublishSubscribeId = params.get("PublishSubscribeId")
self.PublishSubscribeName = params.get("PublishSubscribeName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyPublishSubscribeNameResponse(AbstractModel):
"""ModifyPublishSubscribeName返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ModifyReadOnlyGroupDetailsRequest(AbstractModel):
"""ModifyReadOnlyGroupDetails请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 主实例ID,格式如:mssql-3l3fgqn7
:type InstanceId: str
:param ReadOnlyGroupId: 只读组ID
:type ReadOnlyGroupId: str
:param ReadOnlyGroupName: 只读组名称,不填此参数,则不修改
:type ReadOnlyGroupName: str
:param IsOfflineDelay: 是否启动超时剔除功能,0-不开启剔除功能,1-开启剔除功能,不填此参数,则不修改
:type IsOfflineDelay: int
:param ReadOnlyMaxDelayTime: 启动超时剔除功能后,使用的超时阈值,不填此参数,则不修改
:type ReadOnlyMaxDelayTime: int
:param MinReadOnlyInGroup: 启动超时剔除功能后,只读组至少保留的只读副本数,不填此参数,则不修改
:type MinReadOnlyInGroup: int
:param WeightPairs: 只读组实例权重修改集合,不填此参数,则不修改
:type WeightPairs: list of ReadOnlyInstanceWeightPair
:param AutoWeight: 0-用户自定义权重(根据WeightPairs调整),1-系统自动分配权重(WeightPairs无效), 默认为0
:type AutoWeight: int
:param BalanceWeight: 0-不重新均衡负载,1-重新均衡负载,默认为0
:type BalanceWeight: int
"""
self.InstanceId = None
self.ReadOnlyGroupId = None
self.ReadOnlyGroupName = None
self.IsOfflineDelay = None
self.ReadOnlyMaxDelayTime = None
self.MinReadOnlyInGroup = None
self.WeightPairs = None
self.AutoWeight = None
self.BalanceWeight = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.ReadOnlyGroupId = params.get("ReadOnlyGroupId")
self.ReadOnlyGroupName = params.get("ReadOnlyGroupName")
self.IsOfflineDelay = params.get("IsOfflineDelay")
self.ReadOnlyMaxDelayTime = params.get("ReadOnlyMaxDelayTime")
self.MinReadOnlyInGroup = params.get("MinReadOnlyInGroup")
if params.get("WeightPairs") is not None:
self.WeightPairs = []
for item in params.get("WeightPairs"):
obj = ReadOnlyInstanceWeightPair()
obj._deserialize(item)
self.WeightPairs.append(obj)
self.AutoWeight = params.get("AutoWeight")
self.BalanceWeight = params.get("BalanceWeight")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyReadOnlyGroupDetailsResponse(AbstractModel):
"""ModifyReadOnlyGroupDetails返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ParamRecord(AbstractModel):
"""实例参数修改记录
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
:param ParamName: 参数名称
:type ParamName: str
:param OldValue: 参数修改前的值
:type OldValue: str
:param NewValue: 参数修改后的值
:type NewValue: str
:param Status: 参数修改状态,1-初始化等待被执行,2-执行成功,3-执行失败,4-参数修改中
:type Status: int
:param ModifyTime: 修改时间
:type ModifyTime: str
"""
self.InstanceId = None
self.ParamName = None
self.OldValue = None
self.NewValue = None
self.Status = None
self.ModifyTime = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.ParamName = params.get("ParamName")
self.OldValue = params.get("OldValue")
self.NewValue = params.get("NewValue")
self.Status = params.get("Status")
self.ModifyTime = params.get("ModifyTime")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class Parameter(AbstractModel):
"""数据库实例参数
"""
def __init__(self):
r"""
:param Name: 参数名称
:type Name: str
:param CurrentValue: 参数值
:type CurrentValue: str
"""
self.Name = None
self.CurrentValue = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.CurrentValue = params.get("CurrentValue")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ParameterDetail(AbstractModel):
"""实例参数的详细描述
"""
def __init__(self):
r"""
:param Name: 参数名称
:type Name: str
:param ParamType: 参数类型,integer-整型,enum-枚举型
:type ParamType: str
:param Default: 参数默认值
:type Default: str
:param Description: 参数描述
:type Description: str
:param CurrentValue: 参数当前值
:type CurrentValue: str
:param NeedReboot: 修改参数后,是否需要重启数据库以使参数生效,0-不需要重启,1-需要重启
:type NeedReboot: int
:param Max: 参数允许的最大值
:type Max: int
:param Min: 参数允许的最小值
:type Min: int
:param EnumValue: 参数允许的枚举类型
:type EnumValue: list of str
:param Status: 参数状态 0-状态正常 1-在修改中
:type Status: int
"""
self.Name = None
self.ParamType = None
self.Default = None
self.Description = None
self.CurrentValue = None
self.NeedReboot = None
self.Max = None
self.Min = None
self.EnumValue = None
self.Status = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.ParamType = params.get("ParamType")
self.Default = params.get("Default")
self.Description = params.get("Description")
self.CurrentValue = params.get("CurrentValue")
self.NeedReboot = params.get("NeedReboot")
self.Max = params.get("Max")
self.Min = params.get("Min")
self.EnumValue = params.get("EnumValue")
self.Status = params.get("Status")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class PublishSubscribe(AbstractModel):
"""发布订阅对象
"""
def __init__(self):
r"""
:param Id: 发布订阅ID
:type Id: int
:param Name: 发布订阅名称
:type Name: str
:param PublishInstanceId: 发布实例ID
:type PublishInstanceId: str
:param PublishInstanceName: 发布实例名称
:type PublishInstanceName: str
:param PublishInstanceIp: 发布实例IP
:type PublishInstanceIp: str
:param SubscribeInstanceId: 订阅实例ID
:type SubscribeInstanceId: str
:param SubscribeInstanceName: 订阅实例名称
:type SubscribeInstanceName: str
:param SubscribeInstanceIp: 订阅实例IP
:type SubscribeInstanceIp: str
:param DatabaseTupleSet: 数据库的订阅发布关系集合
:type DatabaseTupleSet: list of DatabaseTupleStatus
"""
self.Id = None
self.Name = None
self.PublishInstanceId = None
self.PublishInstanceName = None
self.PublishInstanceIp = None
self.SubscribeInstanceId = None
self.SubscribeInstanceName = None
self.SubscribeInstanceIp = None
self.DatabaseTupleSet = None
def _deserialize(self, params):
self.Id = params.get("Id")
self.Name = params.get("Name")
self.PublishInstanceId = params.get("PublishInstanceId")
self.PublishInstanceName = params.get("PublishInstanceName")
self.PublishInstanceIp = params.get("PublishInstanceIp")
self.SubscribeInstanceId = params.get("SubscribeInstanceId")
self.SubscribeInstanceName = params.get("SubscribeInstanceName")
self.SubscribeInstanceIp = params.get("SubscribeInstanceIp")
if params.get("DatabaseTupleSet") is not None:
self.DatabaseTupleSet = []
for item in params.get("DatabaseTupleSet"):
obj = DatabaseTupleStatus()
obj._deserialize(item)
self.DatabaseTupleSet.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class QueryMigrationCheckProcessRequest(AbstractModel):
"""QueryMigrationCheckProcess请求参数结构体
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务ID
:type MigrateId: int
"""
self.MigrateId = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class QueryMigrationCheckProcessResponse(AbstractModel):
"""QueryMigrationCheckProcess返回参数结构体
"""
def __init__(self):
r"""
:param TotalStep: 总步骤数量
:type TotalStep: int
:param CurrentStep: 当前步骤编号,从1开始
:type CurrentStep: int
:param StepDetails: 所有步骤详情
:type StepDetails: list of StepDetail
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalStep = None
self.CurrentStep = None
self.StepDetails = None
self.RequestId = None
def _deserialize(self, params):
self.TotalStep = params.get("TotalStep")
self.CurrentStep = params.get("CurrentStep")
if params.get("StepDetails") is not None:
self.StepDetails = []
for item in params.get("StepDetails"):
obj = StepDetail()
obj._deserialize(item)
self.StepDetails.append(obj)
self.RequestId = params.get("RequestId")
class ReadOnlyGroup(AbstractModel):
"""只读组对象
"""
def __init__(self):
r"""
:param ReadOnlyGroupId: 只读组ID
:type ReadOnlyGroupId: str
:param ReadOnlyGroupName: 只读组名称
:type ReadOnlyGroupName: str
:param RegionId: 只读组的地域ID,与主实例相同
:type RegionId: str
:param ZoneId: 只读组的可用区ID,与主实例相同
:type ZoneId: str
:param IsOfflineDelay: 是否启动超时剔除功能,0-不开启剔除功能,1-开启剔除功能
:type IsOfflineDelay: int
:param ReadOnlyMaxDelayTime: 启动超时剔除功能后,使用的超时阈值
:type ReadOnlyMaxDelayTime: int
:param MinReadOnlyInGroup: 启动超时剔除功能后,只读组至少保留的只读副本数
:type MinReadOnlyInGroup: int
:param Vip: 只读组vip
:type Vip: str
:param Vport: 只读组vport
:type Vport: int
:param VpcId: 只读组私有网络ID
:type VpcId: str
:param SubnetId: 只读组私有网络子网ID
:type SubnetId: str
:param Status: 只读组状态: 1-申请成功运行中,5-申请中
:type Status: int
:param MasterInstanceId: 主实例ID,形如mssql-sgeshe3th
:type MasterInstanceId: str
:param ReadOnlyInstanceSet: 只读实例副本集合
:type ReadOnlyInstanceSet: list of ReadOnlyInstance
"""
self.ReadOnlyGroupId = None
self.ReadOnlyGroupName = None
self.RegionId = None
self.ZoneId = None
self.IsOfflineDelay = None
self.ReadOnlyMaxDelayTime = None
self.MinReadOnlyInGroup = None
self.Vip = None
self.Vport = None
self.VpcId = None
self.SubnetId = None
self.Status = None
self.MasterInstanceId = None
self.ReadOnlyInstanceSet = None
def _deserialize(self, params):
self.ReadOnlyGroupId = params.get("ReadOnlyGroupId")
self.ReadOnlyGroupName = params.get("ReadOnlyGroupName")
self.RegionId = params.get("RegionId")
self.ZoneId = params.get("ZoneId")
self.IsOfflineDelay = params.get("IsOfflineDelay")
self.ReadOnlyMaxDelayTime = params.get("ReadOnlyMaxDelayTime")
self.MinReadOnlyInGroup = params.get("MinReadOnlyInGroup")
self.Vip = params.get("Vip")
self.Vport = params.get("Vport")
self.VpcId = params.get("VpcId")
self.SubnetId = params.get("SubnetId")
self.Status = params.get("Status")
self.MasterInstanceId = params.get("MasterInstanceId")
if params.get("ReadOnlyInstanceSet") is not None:
self.ReadOnlyInstanceSet = []
for item in params.get("ReadOnlyInstanceSet"):
obj = ReadOnlyInstance()
obj._deserialize(item)
self.ReadOnlyInstanceSet.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ReadOnlyInstance(AbstractModel):
"""只读副本实例
"""
def __init__(self):
r"""
:param InstanceId: 只读副本ID,格式如:mssqlro-3l3fgqn7
:type InstanceId: str
:param Name: 只读副本名称
:type Name: str
:param Uid: 只读副本唯一UID
:type Uid: str
:param ProjectId: 只读副本所在项目ID
:type ProjectId: int
:param Status: 只读副本状态。1:申请中 2:运行中 3:被延迟剔除 4:已隔离 5:回收中 6:已回收 7:任务执行中 8:已下线 9:实例扩容中 10:实例迁移中 12:重启中
:type Status: int
:param CreateTime: 只读副本创建时间
:type CreateTime: str
:param UpdateTime: 只读副本更新时间
:type UpdateTime: str
:param Memory: 只读副本内存大小,单位G
:type Memory: int
:param Storage: 只读副本存储空间大小,单位G
:type Storage: int
:param Cpu: 只读副本cpu核心数
:type Cpu: int
:param Version: 只读副本版本代号
:type Version: str
:param Type: 宿主机代号
:type Type: str
:param Model: 只读副本模式,2-单机
:type Model: int
:param PayMode: 只读副本计费模式,1-包年包月,0-按量计费
:type PayMode: int
:param Weight: 只读副本权重
:type Weight: int
:param DelayTime: 只读副本延迟时间,单位秒
:type DelayTime: str
:param SynStatus: 只读副本与主实例的同步状态。
Init:初始化
DeployReadOnlyInPorgress:部署副本进行中
DeployReadOnlySuccess:部署副本成功
DeployReadOnlyFail:部署副本失败
DeployMasterDBInPorgress:主节点上加入副本数据库进行中
DeployMasterDBSuccess:主节点上加入副本数据库成功
DeployMasterDBFail:主节点上加入副本数据库进失败
DeployReadOnlyDBInPorgress:副本还原加入数据库开始
DeployReadOnlyDBSuccess:副本还原加入数据库成功
DeployReadOnlyDBFail:副本还原加入数据库失败
SyncDelay:同步延迟
SyncFail:同步故障
SyncExcluded:已剔除只读组
SyncNormal:正常
:type SynStatus: str
:param DatabaseDifference: 只读副本与主实例没有同步的库
:type DatabaseDifference: str
:param AccountDifference: 只读副本与主实例没有同步的账户
:type AccountDifference: str
:param StartTime: 只读副本计费开始时间
:type StartTime: str
:param EndTime: 只读副本计费结束时间
:type EndTime: str
:param IsolateTime: 只读副本隔离时间
:type IsolateTime: str
"""
self.InstanceId = None
self.Name = None
self.Uid = None
self.ProjectId = None
self.Status = None
self.CreateTime = None
self.UpdateTime = None
self.Memory = None
self.Storage = None
self.Cpu = None
self.Version = None
self.Type = None
self.Model = None
self.PayMode = None
self.Weight = None
self.DelayTime = None
self.SynStatus = None
self.DatabaseDifference = None
self.AccountDifference = None
self.StartTime = None
self.EndTime = None
self.IsolateTime = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Name = params.get("Name")
self.Uid = params.get("Uid")
self.ProjectId = params.get("ProjectId")
self.Status = params.get("Status")
self.CreateTime = params.get("CreateTime")
self.UpdateTime = params.get("UpdateTime")
self.Memory = params.get("Memory")
self.Storage = params.get("Storage")
self.Cpu = params.get("Cpu")
self.Version = params.get("Version")
self.Type = params.get("Type")
self.Model = params.get("Model")
self.PayMode = params.get("PayMode")
self.Weight = params.get("Weight")
self.DelayTime = params.get("DelayTime")
self.SynStatus = params.get("SynStatus")
self.DatabaseDifference = params.get("DatabaseDifference")
self.AccountDifference = params.get("AccountDifference")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.IsolateTime = params.get("IsolateTime")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ReadOnlyInstanceWeightPair(AbstractModel):
"""只读实例与权重对应关系
"""
def __init__(self):
r"""
:param ReadOnlyInstanceId: 只读实例ID,格式如:mssqlro-3l3fgqn7
:type ReadOnlyInstanceId: str
:param ReadOnlyWeight: 只读实例权重 ,范围是0-100
:type ReadOnlyWeight: int
"""
self.ReadOnlyInstanceId = None
self.ReadOnlyWeight = None
def _deserialize(self, params):
self.ReadOnlyInstanceId = params.get("ReadOnlyInstanceId")
self.ReadOnlyWeight = params.get("ReadOnlyWeight")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RecycleDBInstanceRequest(AbstractModel):
"""RecycleDBInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RecycleDBInstanceResponse(AbstractModel):
"""RecycleDBInstance返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 流程id
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class RecycleReadOnlyGroupRequest(AbstractModel):
"""RecycleReadOnlyGroup请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 主实例的ID
:type InstanceId: str
:param ReadOnlyGroupId: 只读组的ID
:type ReadOnlyGroupId: str
"""
self.InstanceId = None
self.ReadOnlyGroupId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.ReadOnlyGroupId = params.get("ReadOnlyGroupId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RecycleReadOnlyGroupResponse(AbstractModel):
"""RecycleReadOnlyGroup返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 任务流ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class RegionInfo(AbstractModel):
"""地域信息
"""
def __init__(self):
r"""
:param Region: 地域英文ID,类似ap-guanghou
:type Region: str
:param RegionName: 地域中文名称
:type RegionName: str
:param RegionId: 地域数字ID
:type RegionId: int
:param RegionState: 该地域目前是否可以售卖,UNAVAILABLE-不可售卖;AVAILABLE-可售卖
:type RegionState: str
"""
self.Region = None
self.RegionName = None
self.RegionId = None
self.RegionState = None
def _deserialize(self, params):
self.Region = params.get("Region")
self.RegionName = params.get("RegionName")
self.RegionId = params.get("RegionId")
self.RegionState = params.get("RegionState")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RemoveBackupsRequest(AbstractModel):
"""RemoveBackups请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-j8kv137v
:type InstanceId: str
:param BackupNames: 待删除的备份名称,备份名称可通过DescribeBackups接口的FileName字段获得。单次请求批量删除备份数不能超过10个。
:type BackupNames: list of str
"""
self.InstanceId = None
self.BackupNames = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupNames = params.get("BackupNames")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RemoveBackupsResponse(AbstractModel):
"""RemoveBackups返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class RenameRestoreDatabase(AbstractModel):
"""用于RestoreInstance,RollbackInstance,CreateMigration、CloneDB 等接口;对恢复的库进行重命名,且支持选择要恢复的库。
"""
def __init__(self):
r"""
:param OldName: 库的名字,如果oldName不存在则返回失败。
在用于离线迁移任务时可不填。
:type OldName: str
:param NewName: 库的新名字,在用于离线迁移时,不填则按照OldName命名,OldName和NewName不能同时不填。在用于克隆数据库时,OldName和NewName都必须填写,且不能重复
:type NewName: str
"""
self.OldName = None
self.NewName = None
def _deserialize(self, params):
self.OldName = params.get("OldName")
self.NewName = params.get("NewName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RenewDBInstanceRequest(AbstractModel):
"""RenewDBInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-j8kv137v
:type InstanceId: str
:param Period: 续费多少个月,取值范围为1-48,默认为1
:type Period: int
:param AutoVoucher: 是否自动使用代金券,0-不使用;1-使用;默认不实用
:type AutoVoucher: int
:param VoucherIds: 代金券ID数组,目前只支持使用1张代金券
:type VoucherIds: list of str
:param AutoRenewFlag: 续费标记 0:正常续费 1:自动续费:只用于按量计费转包年包月时有效。
:type AutoRenewFlag: int
"""
self.InstanceId = None
self.Period = None
self.AutoVoucher = None
self.VoucherIds = None
self.AutoRenewFlag = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Period = params.get("Period")
self.AutoVoucher = params.get("AutoVoucher")
self.VoucherIds = params.get("VoucherIds")
self.AutoRenewFlag = params.get("AutoRenewFlag")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RenewDBInstanceResponse(AbstractModel):
"""RenewDBInstance返回参数结构体
"""
def __init__(self):
r"""
:param DealName: 订单名称
:type DealName: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DealName = None
self.RequestId = None
def _deserialize(self, params):
self.DealName = params.get("DealName")
self.RequestId = params.get("RequestId")
class RenewPostpaidDBInstanceRequest(AbstractModel):
"""RenewPostpaidDBInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,格式如:mssql-3l3fgqn7 或 mssqlro-3l3fgqn7
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RenewPostpaidDBInstanceResponse(AbstractModel):
"""RenewPostpaidDBInstance返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class ResetAccountPasswordRequest(AbstractModel):
"""ResetAccountPassword请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 数据库实例ID,形如mssql-njj2mtpl
:type InstanceId: str
:param Accounts: 更新后的账户密码信息数组
:type Accounts: list of AccountPassword
"""
self.InstanceId = None
self.Accounts = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
if params.get("Accounts") is not None:
self.Accounts = []
for item in params.get("Accounts"):
obj = AccountPassword()
obj._deserialize(item)
self.Accounts.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ResetAccountPasswordResponse(AbstractModel):
"""ResetAccountPassword返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 修改帐号密码的异步任务流程ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class ResourceTag(AbstractModel):
"""实例绑定的标签信息
"""
def __init__(self):
r"""
:param TagKey: 标签key
:type TagKey: str
:param TagValue: 标签value
:type TagValue: str
"""
self.TagKey = None
self.TagValue = None
def _deserialize(self, params):
self.TagKey = params.get("TagKey")
self.TagValue = params.get("TagValue")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RestartDBInstanceRequest(AbstractModel):
"""RestartDBInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 数据库实例ID,形如mssql-njj2mtpl
:type InstanceId: str
"""
self.InstanceId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RestartDBInstanceResponse(AbstractModel):
"""RestartDBInstance返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 异步任务流程ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class RestoreInstanceRequest(AbstractModel):
"""RestoreInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-j8kv137v
:type InstanceId: str
:param BackupId: 备份文件ID,该ID可以通过DescribeBackups接口返回数据中的Id字段获得
:type BackupId: int
:param TargetInstanceId: 备份恢复到的同一个APPID下的实例ID,不填则恢复到原实例ID
:type TargetInstanceId: str
:param RenameRestore: 按照ReNameRestoreDatabase中的库进行恢复,并重命名,不填则按照默认方式命名恢复的库,且恢复所有的库。
:type RenameRestore: list of RenameRestoreDatabase
:param GroupId: 备份任务组ID,在单库备份文件模式下,可通过[DescribeBackups](https://cloud.tencent.com/document/product/238/19943) 接口获得。
:type GroupId: str
"""
self.InstanceId = None
self.BackupId = None
self.TargetInstanceId = None
self.RenameRestore = None
self.GroupId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupId = params.get("BackupId")
self.TargetInstanceId = params.get("TargetInstanceId")
if params.get("RenameRestore") is not None:
self.RenameRestore = []
for item in params.get("RenameRestore"):
obj = RenameRestoreDatabase()
obj._deserialize(item)
self.RenameRestore.append(obj)
self.GroupId = params.get("GroupId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RestoreInstanceResponse(AbstractModel):
"""RestoreInstance返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 异步流程任务ID,使用FlowId调用DescribeFlowStatus接口获取任务执行状态
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class RollbackInstanceRequest(AbstractModel):
"""RollbackInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID
:type InstanceId: str
:param Type: 回档类型,0-回档的数据库覆盖原库;1-回档的数据库以重命名的形式生成,不覆盖原库
:type Type: int
:param DBs: 需要回档的数据库
:type DBs: list of str
:param Time: 回档目标时间点
:type Time: str
:param TargetInstanceId: 备份恢复到的同一个APPID下的实例ID,不填则恢复到原实例ID
:type TargetInstanceId: str
:param RenameRestore: 按照ReNameRestoreDatabase中的库进行重命名,仅在Type = 1重命名回档方式有效;不填则按照默认方式命名库,DBs参数确定要恢复的库
:type RenameRestore: list of RenameRestoreDatabase
"""
self.InstanceId = None
self.Type = None
self.DBs = None
self.Time = None
self.TargetInstanceId = None
self.RenameRestore = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Type = params.get("Type")
self.DBs = params.get("DBs")
self.Time = params.get("Time")
self.TargetInstanceId = params.get("TargetInstanceId")
if params.get("RenameRestore") is not None:
self.RenameRestore = []
for item in params.get("RenameRestore"):
obj = RenameRestoreDatabase()
obj._deserialize(item)
self.RenameRestore.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RollbackInstanceResponse(AbstractModel):
"""RollbackInstance返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 异步任务ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class RunMigrationRequest(AbstractModel):
"""RunMigration请求参数结构体
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务ID
:type MigrateId: int
"""
self.MigrateId = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class RunMigrationResponse(AbstractModel):
"""RunMigration返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 迁移流程启动后,返回流程ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class SecurityGroup(AbstractModel):
"""安全组
"""
def __init__(self):
r"""
:param ProjectId: 项目ID
:type ProjectId: int
:param CreateTime: 创建时间,时间格式:yyyy-mm-dd hh:mm:ss
:type CreateTime: str
:param InboundSet: 入站规则
:type InboundSet: list of SecurityGroupPolicy
:param OutboundSet: 出站规则
:type OutboundSet: list of SecurityGroupPolicy
:param SecurityGroupId: 安全组ID
:type SecurityGroupId: str
:param SecurityGroupName: 安全组名称
:type SecurityGroupName: str
:param SecurityGroupRemark: 安全组备注
:type SecurityGroupRemark: str
"""
self.ProjectId = None
self.CreateTime = None
self.InboundSet = None
self.OutboundSet = None
self.SecurityGroupId = None
self.SecurityGroupName = None
self.SecurityGroupRemark = None
def _deserialize(self, params):
self.ProjectId = params.get("ProjectId")
self.CreateTime = params.get("CreateTime")
if params.get("InboundSet") is not None:
self.InboundSet = []
for item in params.get("InboundSet"):
obj = SecurityGroupPolicy()
obj._deserialize(item)
self.InboundSet.append(obj)
if params.get("OutboundSet") is not None:
self.OutboundSet = []
for item in params.get("OutboundSet"):
obj = SecurityGroupPolicy()
obj._deserialize(item)
self.OutboundSet.append(obj)
self.SecurityGroupId = params.get("SecurityGroupId")
self.SecurityGroupName = params.get("SecurityGroupName")
self.SecurityGroupRemark = params.get("SecurityGroupRemark")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class SecurityGroupPolicy(AbstractModel):
"""安全组策略
"""
def __init__(self):
r"""
:param Action: 策略,ACCEPT 或者 DROP
:type Action: str
:param CidrIp: 目的 IP 或 IP 段,例如172.16.0.0/12
:type CidrIp: str
:param PortRange: 端口或者端口范围
:type PortRange: str
:param IpProtocol: 网络协议,支持 UDP、TCP等
:type IpProtocol: str
:param Dir: 规则限定的方向,OUTPUT-出战规则 INPUT-进站规则
:type Dir: str
"""
self.Action = None
self.CidrIp = None
self.PortRange = None
self.IpProtocol = None
self.Dir = None
def _deserialize(self, params):
self.Action = params.get("Action")
self.CidrIp = params.get("CidrIp")
self.PortRange = params.get("PortRange")
self.IpProtocol = params.get("IpProtocol")
self.Dir = params.get("Dir")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class SlowlogInfo(AbstractModel):
"""慢查询日志文件信息
"""
def __init__(self):
r"""
:param Id: 慢查询日志文件唯一标识
:type Id: int
:param StartTime: 文件生成的开始时间
:type StartTime: str
:param EndTime: 文件生成的结束时间
:type EndTime: str
:param Size: 文件大小(KB)
:type Size: int
:param Count: 文件中log条数
:type Count: int
:param InternalAddr: 内网下载地址
:type InternalAddr: str
:param ExternalAddr: 外网下载地址
:type ExternalAddr: str
:param Status: 状态(1成功 2失败)
注意:此字段可能返回 null,表示取不到有效值。
:type Status: int
"""
self.Id = None
self.StartTime = None
self.EndTime = None
self.Size = None
self.Count = None
self.InternalAddr = None
self.ExternalAddr = None
self.Status = None
def _deserialize(self, params):
self.Id = params.get("Id")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Size = params.get("Size")
self.Count = params.get("Count")
self.InternalAddr = params.get("InternalAddr")
self.ExternalAddr = params.get("ExternalAddr")
self.Status = params.get("Status")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class SpecInfo(AbstractModel):
"""实例可售卖的规格信息
"""
def __init__(self):
r"""
:param SpecId: 实例规格ID,利用DescribeZones返回的SpecId,结合DescribeProductConfig返回的可售卖规格信息,可获悉某个可用区下可购买什么规格的实例
:type SpecId: int
:param MachineType: 机型ID
:type MachineType: str
:param MachineTypeName: 机型中文名称
:type MachineTypeName: str
:param Version: 数据库版本信息。取值为2008R2(表示SQL Server 2008 R2),2012SP3(表示SQL Server 2012),2016SP1(表示SQL Server 2016 SP1)
:type Version: str
:param VersionName: Version字段对应的版本名称
:type VersionName: str
:param Memory: 内存大小,单位GB
:type Memory: int
:param CPU: CPU核数
:type CPU: int
:param MinStorage: 此规格下最小的磁盘大小,单位GB
:type MinStorage: int
:param MaxStorage: 此规格下最大的磁盘大小,单位GB
:type MaxStorage: int
:param QPS: 此规格对应的QPS大小
:type QPS: int
:param SuitInfo: 此规格的中文描述信息
:type SuitInfo: str
:param Pid: 此规格对应的包年包月Pid
:type Pid: int
:param PostPid: 此规格对应的按量计费Pid列表
注意:此字段可能返回 null,表示取不到有效值。
:type PostPid: list of int
:param PayModeStatus: 此规格下支持的付费模式,POST-仅支持按量计费 PRE-仅支持包年包月 ALL-支持所有
:type PayModeStatus: str
:param InstanceType: 产品类型,HA-高可用型(包括双机高可用,alwaysOn集群),RO-只读副本型,SI-基础版本型
:type InstanceType: str
:param MultiZonesStatus: 跨可用区类型,MultiZones-只支持跨可用区,SameZones-只支持同可用区,ALL-支持所有
:type MultiZonesStatus: str
"""
self.SpecId = None
self.MachineType = None
self.MachineTypeName = None
self.Version = None
self.VersionName = None
self.Memory = None
self.CPU = None
self.MinStorage = None
self.MaxStorage = None
self.QPS = None
self.SuitInfo = None
self.Pid = None
self.PostPid = None
self.PayModeStatus = None
self.InstanceType = None
self.MultiZonesStatus = None
def _deserialize(self, params):
self.SpecId = params.get("SpecId")
self.MachineType = params.get("MachineType")
self.MachineTypeName = params.get("MachineTypeName")
self.Version = params.get("Version")
self.VersionName = params.get("VersionName")
self.Memory = params.get("Memory")
self.CPU = params.get("CPU")
self.MinStorage = params.get("MinStorage")
self.MaxStorage = params.get("MaxStorage")
self.QPS = params.get("QPS")
self.SuitInfo = params.get("SuitInfo")
self.Pid = params.get("Pid")
self.PostPid = params.get("PostPid")
self.PayModeStatus = params.get("PayModeStatus")
self.InstanceType = params.get("InstanceType")
self.MultiZonesStatus = params.get("MultiZonesStatus")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class StartBackupMigrationRequest(AbstractModel):
"""StartBackupMigration请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 导入目标实例ID
:type InstanceId: str
:param BackupMigrationId: 备份导入任务ID,由CreateBackupMigration接口返回
:type BackupMigrationId: str
"""
self.InstanceId = None
self.BackupMigrationId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupMigrationId = params.get("BackupMigrationId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class StartBackupMigrationResponse(AbstractModel):
"""StartBackupMigration返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 流程ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class StartIncrementalMigrationRequest(AbstractModel):
"""StartIncrementalMigration请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 导入目标实例ID
:type InstanceId: str
:param BackupMigrationId: 备份导入任务ID,由CreateBackupMigration接口返回
:type BackupMigrationId: str
:param IncrementalMigrationId: 增量备份导入任务ID
:type IncrementalMigrationId: str
"""
self.InstanceId = None
self.BackupMigrationId = None
self.IncrementalMigrationId = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.BackupMigrationId = params.get("BackupMigrationId")
self.IncrementalMigrationId = params.get("IncrementalMigrationId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class StartIncrementalMigrationResponse(AbstractModel):
"""StartIncrementalMigration返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 流程ID
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class StartMigrationCheckRequest(AbstractModel):
"""StartMigrationCheck请求参数结构体
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务id
:type MigrateId: int
"""
self.MigrateId = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class StartMigrationCheckResponse(AbstractModel):
"""StartMigrationCheck返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 迁移检查流程发起后,返回的流程id
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class StepDetail(AbstractModel):
"""进度步骤详情
"""
def __init__(self):
r"""
:param Msg: 具体步骤返回信息
:type Msg: str
:param Status: 当前步骤状态,0成功,-2未开始
:type Status: int
:param Name: 步骤名称
:type Name: str
"""
self.Msg = None
self.Status = None
self.Name = None
def _deserialize(self, params):
self.Msg = params.get("Msg")
self.Status = params.get("Status")
self.Name = params.get("Name")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class StopMigrationRequest(AbstractModel):
"""StopMigration请求参数结构体
"""
def __init__(self):
r"""
:param MigrateId: 迁移任务ID
:type MigrateId: int
"""
self.MigrateId = None
def _deserialize(self, params):
self.MigrateId = params.get("MigrateId")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class StopMigrationResponse(AbstractModel):
"""StopMigration返回参数结构体
"""
def __init__(self):
r"""
:param FlowId: 中止迁移流程发起后,返回的流程id
:type FlowId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.FlowId = None
self.RequestId = None
def _deserialize(self, params):
self.FlowId = params.get("FlowId")
self.RequestId = params.get("RequestId")
class TerminateDBInstanceRequest(AbstractModel):
"""TerminateDBInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceIdSet: 主动销毁的实例ID列表,格式如:[mssql-3l3fgqn7]。与云数据库控制台页面中显示的实例ID相同
:type InstanceIdSet: list of str
"""
self.InstanceIdSet = None
def _deserialize(self, params):
self.InstanceIdSet = params.get("InstanceIdSet")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class TerminateDBInstanceResponse(AbstractModel):
"""TerminateDBInstance返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class UpgradeDBInstanceRequest(AbstractModel):
"""UpgradeDBInstance请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID,形如mssql-j8kv137v
:type InstanceId: str
:param Memory: 实例升级后内存大小,单位GB,其值不能小于当前实例内存大小
:type Memory: int
:param Storage: 实例升级后磁盘大小,单位GB,其值不能小于当前实例磁盘大小
:type Storage: int
:param AutoVoucher: 是否自动使用代金券,0 - 不使用;1 - 默认使用。取值默认为0
:type AutoVoucher: int
:param VoucherIds: 代金券ID,目前单个订单只能使用一张代金券
:type VoucherIds: list of str
:param Cpu: 实例升级后的CPU核心数
:type Cpu: int
:param DBVersion: 升级sqlserver的版本,目前支持:2008R2(SQL Server 2008 Enterprise),2012SP3(SQL Server 2012 Enterprise)版本等。每个地域支持售卖的版本不同,可通过DescribeProductConfig接口来拉取每个地域可售卖的版本信息,版本不支持降级,不填则不修改版本
:type DBVersion: str
:param HAType: 升级sqlserver的高可用架构,从镜像容灾升级到always on集群容灾,仅支持2017及以上版本且支持always on高可用的实例,不支持降级到镜像方式容灾,CLUSTER-升级为always on容灾,不填则不修改高可用架构
:type HAType: str
:param MultiZones: 修改实例是否为跨可用区容灾,SameZones-修改为同可用区 MultiZones-修改为夸可用区
:type MultiZones: str
"""
self.InstanceId = None
self.Memory = None
self.Storage = None
self.AutoVoucher = None
self.VoucherIds = None
self.Cpu = None
self.DBVersion = None
self.HAType = None
self.MultiZones = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Memory = params.get("Memory")
self.Storage = params.get("Storage")
self.AutoVoucher = params.get("AutoVoucher")
self.VoucherIds = params.get("VoucherIds")
self.Cpu = params.get("Cpu")
self.DBVersion = params.get("DBVersion")
self.HAType = params.get("HAType")
self.MultiZones = params.get("MultiZones")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class UpgradeDBInstanceResponse(AbstractModel):
"""UpgradeDBInstance返回参数结构体
"""
def __init__(self):
r"""
:param DealName: 订单名称
:type DealName: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DealName = None
self.RequestId = None
def _deserialize(self, params):
self.DealName = params.get("DealName")
self.RequestId = params.get("RequestId")
class ZoneInfo(AbstractModel):
"""可用区信息
"""
def __init__(self):
r"""
:param Zone: 可用区英文ID,形如ap-guangzhou-1,表示广州一区
:type Zone: str
:param ZoneName: 可用区中文名称
:type ZoneName: str
:param ZoneId: 可用区数字ID
:type ZoneId: int
:param SpecId: 该可用区目前可售卖的规格ID,利用SpecId,结合接口DescribeProductConfig返回的数据,可获悉该可用区目前可售卖的规格大小
:type SpecId: int
:param Version: 当前可用区与规格下,可售卖的数据库版本,形如2008R2(表示SQL Server 2008 R2)。其可选值有2008R2(表示SQL Server 2008 R2),2012SP3(表示SQL Server 2012),2016SP1(表示SQL Server 2016 SP1)
:type Version: str
"""
self.Zone = None
self.ZoneName = None
self.ZoneId = None
self.SpecId = None
self.Version = None
def _deserialize(self, params):
self.Zone = params.get("Zone")
self.ZoneName = params.get("ZoneName")
self.ZoneId = params.get("ZoneId")
self.SpecId = params.get("SpecId")
self.Version = params.get("Version")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
|
tzpBingo/github-trending
|
codespace/python/tencentcloud/sqlserver/v20180328/models.py
|
Python
|
mit
| 284,748 | 0.002484 |
from pygame.mixer import music as _music
from .loaders import ResourceLoader
from . import constants
__all__ = [
'rewind', 'stop', 'fadeout', 'set_volume', 'get_volume', 'get_pos',
'set_pos', 'play', 'queue', 'pause', 'unpause',
]
_music.set_endevent(constants.MUSIC_END)
class _MusicLoader(ResourceLoader):
"""Pygame's music API acts as a singleton with one 'current' track.
No objects are returned that represent different tracks, so this loader
can't return anything useful. But it can perform all the path name
validations and return the validated path, so that's what we do.
This loader should not be exposed to the user.
"""
EXTNS = ['mp3', 'ogg', 'oga']
TYPE = 'music'
def _load(self, path):
return path
_loader = _MusicLoader('music')
# State of whether we are paused or not
_paused = False
def _play(name, loop):
global _paused
path = _loader.load(name)
_music.load(path)
_music.play(loop)
_paused = False
def play(name):
"""Play a music file from the music/ directory.
The music will loop when it finishes playing.
"""
_play(name, -1)
def play_once(name):
"""Play a music file from the music/ directory."""
_play(name, 0)
def queue(name):
"""Queue a music file to follow the current track.
This will load a music file and queue it. A queued music file will begin as
soon as the current music naturally ends. If the current music is ever
stopped or changed, the queued song will be lost.
"""
path = _loader.load(name)
_music.queue(path)
def is_playing(name):
"""Return True if the music is playing and not paused."""
return _music.get_busy() and not _paused
def pause():
"""Temporarily stop playback of the music stream.
Call `unpause()` to resume.
"""
global _paused
_music.pause()
_paused = True
def unpause():
"""Resume playback of the music stream after it has been paused."""
global _paused
_music.unpause()
_paused = False
def fadeout(seconds):
"""Fade out and eventually stop the music playback.
:param seconds: The duration in seconds over which the sound will be faded
out. For example, to fade out over half a second, call
``music.fadeout(0.5)``.
"""
_music.fadeout(int(seconds * 1000))
rewind = _music.rewind
stop = _music.stop
get_volume = _music.get_volume
set_volume = _music.set_volume
get_pos = _music.get_pos
set_pos = _music.set_pos
|
yrobla/pyjuegos
|
pgzero/music.py
|
Python
|
lgpl-3.0
| 2,526 | 0.000396 |
from .requests_test import RequestsTest
import copy
import sys
class XSSTest(RequestsTest):
def test(self):
if self.DEBUG: print("Run the XSS Tests")
passed = 0
failed = 0
messages = []
url = self.domain['protocol'] + self.domain['host'] + self.config['path']
print("XSS Test for " + url)
for k, v in self.config['params'].items():
result_text = []
result = 'PASS'
for p in self.payloads:
if self.DEBUG: print(url + "?" + k + "=" + v + " (" + p + ")")
if self.config['method'] == 'GET':
if self.DEBUG: print("Using GET " + self.config['path'])
data = copy.deepcopy(self.config['params'])
data[k] = data[k] + p
res = self.get(url, params=data)
if res.status_code != 200:
result_text.append('Payload ' + p + ' caused an unknown error for parameter ' + k)
failed = failed + 1
result = 'ERROR'
else:
if 'testpath' in self.config:
res = self.get(self.domain['protocol'] + self.domain['host'] + self.config['testpath'])
if self.DEBUG: print("Status " + str(res.status_code))
# if self.DEBUG: print("Content " + str(res.text))
if p in res.text:
failed = failed + 1
result_text.append('=> Payload ' + p + ' not filtered for parameter ' + k)
sys.stderr.write('=> Payload ' + p + ' not filtered for parameter ' + k + '\n')
result = 'FAIL'
else:
passed = passed + 1
elif self.config['method'] == 'POST':
data = copy.deepcopy(self.config['params'])
data[k] = data[k] + p
if self.DEBUG: print("Using POST " + self.config['path'] + " data: " + str(data))
res1 = self.get(url) # Get in case we need CSRF tokens and/or other items from the form
res = self.post(url, data=data)
if res.status_code != 200:
result_text.append('Payload ' + p + ' caused an unknown error for parameter ' + k)
result = 'ERROR'
failed = failed + 1
elif res.status_code >= 300 and res.status_code <= 400:
print("Status Code: " + str(res.status_code))
else:
if 'testpath' in self.config:
res = self.get(self.domain['protocol'] + self.domain['host'] + self.config['testpath'])
if self.DEBUG: print("Status " + str(res.status_code))
# if self.DEBUG: print("Content " + str(res.text))
if p in res.text:
failed = failed + 1
result = 'FAIL'
result_text.append('=> Payload ' + p + ' not filtered for parameter ' + k)
sys.stderr.write('=> Payload ' + p + ' not filtered for parameter ' + k + '\n')
else:
passed = passed + 1
else:
if self.DEBUG: print("Endpoint method is not GET or POST")
self.report.add_test_result(url, self.config['method'], 'xss', k, result, result_text)
print("=> " + str(passed) + "/" + str(passed + failed) + " passed/total")
# print("Messages: " + str(messages))
|
sethlaw/sputr
|
tests/xss_test.py
|
Python
|
gpl-3.0
| 3,768 | 0.005308 |
import contextlib
from time import time
from .meter import Meter
from .stats import Stat
from .histogram import Histogram
class Timer(Stat):
def __init__(self):
self.count = 0
self.meter = Meter()
self.histogram = Histogram()
super(Timer, self).__init__()
@contextlib.contextmanager
def time(self):
start_time = time()
try:
yield
finally:
self.update(time() - start_time)
def update(self, value):
self.meter.mark()
self.histogram.update(value)
def get_values(self):
values = self.meter.get_values()
values.update(self.histogram.get_values())
return values
|
emilssolmanis/tapes
|
tapes/local/timer.py
|
Python
|
apache-2.0
| 702 | 0 |
#!/usr/bin/env python
import re
letters = 'abcdefghijklmnopqrstuvwxyz'
with open("../input/11.txt") as fileobj:
password = fileobj.readline().strip()
print password
def rules(password):
rules = [ rule1, rule2, rule3 ]
if all(rule(password) for rule in rules):
return True
else:
return False
def rule1(password):
# Rule 1: in the range of A-Z, must have 3 consecutive letters
# Check A-X for [abc, bcd, ..., wzy, xyz]
for i in range(24):
if letters[i:i+3] in password:
return True
# else rule 1 failed
return False
def rule2(password):
# Rule 2: No i, o, l
if 'i' in password or 'o' in password or 'l' in password:
return False
return True
def rule3(password):
# Rule 3: Password must contain at least 2 different, non-overlapping pairs of letters
# (aa, bb) or even (aa, aa) "aaaa"
pair = 0
skipnext = False
for i in range(len(password) - 1):
if skipnext:
skipnext = False
continue
if password[i] == password[i + 1]:
pair += 1
skipnext = True
return pair >1
def increment(password):
if password[-1] == 'z':
return increment(password[0:-1]) + 'a'
return password[0:-1] + letters[letters.index(password[-1]) + 1]
while True:
if rules(password):
print "Success! -- 1st password:", password
break
else:
password = increment(password)
# Next run
password = increment(password)
while True:
if rules(password):
print "Success! -- 2nd password:", password
break
else:
password = increment(password)
|
dsumike/adventofcode
|
python/11p2.py
|
Python
|
mit
| 1,491 | 0.032193 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pretend
from warehouse import i18n
def test_sets_locale(monkeypatch):
locale_obj = pretend.stub()
locale_cls = pretend.stub(
parse=pretend.call_recorder(lambda l: locale_obj),
)
monkeypatch.setattr(i18n, "Locale", locale_cls)
request = pretend.stub(locale_name=pretend.stub())
assert i18n._locale(request) is locale_obj
assert locale_cls.parse.calls == [pretend.call(request.locale_name)]
def test_includeme():
config_settings = {}
config = pretend.stub(
add_request_method=pretend.call_recorder(lambda f, name, reify: None),
get_settings=lambda: config_settings,
)
i18n.includeme(config)
assert config.add_request_method.calls == [
pretend.call(i18n._locale, name="locale", reify=True),
]
assert config_settings == {
"jinja2.filters": {
"format_date": "warehouse.i18n.filters:format_date",
"format_datetime": "warehouse.i18n.filters:format_datetime",
},
"jinja2.globals": {
"l20n": "warehouse.i18n.l20n:l20n",
},
}
|
ismail-s/warehouse
|
tests/unit/i18n/test_init.py
|
Python
|
apache-2.0
| 1,632 | 0 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
# Copyright 2012 Nebula Inc
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
import commands
import unittest
def parse_mailmap(mailmap='.mailmap'):
mapping = {}
if os.path.exists(mailmap):
fp = open(mailmap, 'r')
for l in fp:
l = l.strip()
if not l.startswith('#') and ' ' in l:
canonical_email, alias = l.split(' ')
mapping[alias] = canonical_email
return mapping
def str_dict_replace(s, mapping):
for s1, s2 in mapping.iteritems():
s = s.replace(s1, s2)
return s
class AuthorsTestCase(unittest.TestCase):
def test_authors_up_to_date(self):
path_bits = (os.path.dirname(__file__), '..', '..')
root = os.path.normpath(os.path.join(*path_bits))
contributors = set()
missing = set()
authors_file = open(os.path.join(root, 'AUTHORS'), 'r').read()
if os.path.exists(os.path.join(root, '.git')):
mailmap = parse_mailmap(os.path.join(root, '.mailmap'))
for email in commands.getoutput('git log --format=%ae').split():
if not email:
continue
if "jenkins" in email and "openstack.org" in email:
continue
email = '<' + email + '>'
contributors.add(str_dict_replace(email, mailmap))
for contributor in contributors:
if not contributor in authors_file:
missing.add(contributor)
self.assertTrue(len(missing) == 0,
'%r not listed in AUTHORS file.' % missing)
|
developerworks/horizon
|
horizon/tests/authors_tests.py
|
Python
|
apache-2.0
| 2,188 | 0.001371 |
# Patchwork - automated patch tracking system
# Copyright (C) 2017 Stephen Finucane <stephen@that.guru>
#
# SPDX-License-Identifier: GPL-2.0-or-later
from collections import OrderedDict
from rest_framework.generics import ListAPIView
from rest_framework.serializers import ModelSerializer
from rest_framework.serializers import SerializerMethodField
from patchwork.api.embedded import CheckSerializer
from patchwork.api.embedded import CoverLetterSerializer
from patchwork.api.embedded import PatchSerializer
from patchwork.api.embedded import ProjectSerializer
from patchwork.api.embedded import SeriesSerializer
from patchwork.api.embedded import UserSerializer
from patchwork.api.filters import EventFilterSet
from patchwork.api.patch import StateField
from patchwork.models import Event
class EventSerializer(ModelSerializer):
project = ProjectSerializer(read_only=True)
patch = PatchSerializer(read_only=True)
series = SeriesSerializer(read_only=True)
cover = CoverLetterSerializer(read_only=True)
previous_state = StateField()
current_state = StateField()
previous_delegate = UserSerializer()
current_delegate = UserSerializer()
created_check = SerializerMethodField()
created_check = CheckSerializer()
_category_map = {
Event.CATEGORY_COVER_CREATED: ['cover'],
Event.CATEGORY_PATCH_CREATED: ['patch'],
Event.CATEGORY_PATCH_COMPLETED: ['patch', 'series'],
Event.CATEGORY_PATCH_STATE_CHANGED: ['patch', 'previous_state',
'current_state'],
Event.CATEGORY_PATCH_DELEGATED: ['patch', 'previous_delegate',
'current_delegate'],
Event.CATEGORY_CHECK_CREATED: ['patch', 'created_check'],
Event.CATEGORY_SERIES_CREATED: ['series'],
Event.CATEGORY_SERIES_COMPLETED: ['series'],
}
def to_representation(self, instance):
data = super(EventSerializer, self).to_representation(instance)
payload = OrderedDict()
kept_fields = self._category_map[instance.category] + [
'id', 'category', 'project', 'date']
for field in [x for x in data]:
if field not in kept_fields:
del data[field]
elif field in self._category_map[instance.category]:
field_name = 'check' if field == 'created_check' else field
payload[field_name] = data.pop(field)
data['payload'] = payload
return data
class Meta:
model = Event
fields = ('id', 'category', 'project', 'date', 'patch', 'series',
'cover', 'previous_state', 'current_state',
'previous_delegate', 'current_delegate', 'created_check')
read_only_fields = fields
class EventList(ListAPIView):
"""List events."""
serializer_class = EventSerializer
filter_class = filterset_class = EventFilterSet
page_size_query_param = None # fixed page size
ordering_fields = ()
ordering = '-date'
def get_queryset(self):
return Event.objects.all()\
.prefetch_related('project', 'patch', 'series', 'cover',
'previous_state', 'current_state',
'previous_delegate', 'current_delegate',
'created_check')
|
stephenfin/patchwork
|
patchwork/api/event.py
|
Python
|
gpl-2.0
| 3,356 | 0 |
"""
GWR is tested against results from GWR4
"""
import unittest
import pickle as pk
from pysal.contrib.gwr.gwr import GWR
from pysal.contrib.gwr.sel_bw import Sel_BW
from pysal.contrib.gwr.diagnostics import get_AICc, get_AIC, get_BIC, get_CV
from pysal.contrib.glm.family import Gaussian, Poisson, Binomial
import numpy as np
import pysal
class TestGWRGaussian(unittest.TestCase):
def setUp(self):
data = pysal.open(pysal.examples.get_path('GData_utm.csv'))
self.coords = zip(data.by_col('X'), data.by_col('Y'))
self.y = np.array(data.by_col('PctBach')).reshape((-1,1))
rural = np.array(data.by_col('PctRural')).reshape((-1,1))
pov = np.array(data.by_col('PctPov')).reshape((-1,1))
black = np.array(data.by_col('PctBlack')).reshape((-1,1))
self.X = np.hstack([rural, pov, black])
self.BS_F = pysal.open(pysal.examples.get_path('georgia_BS_F_listwise.csv'))
self.BS_NN = pysal.open(pysal.examples.get_path('georgia_BS_NN_listwise.csv'))
self.GS_F = pysal.open(pysal.examples.get_path('georgia_GS_F_listwise.csv'))
self.GS_NN = pysal.open(pysal.examples.get_path('georgia_GS_NN_listwise.csv'))
self.MGWR = pk.load(open(pysal.examples.get_path('FB.p'), 'r'))
self.XB = pk.load(open(pysal.examples.get_path('XB.p'), 'r'))
self.err = pk.load(open(pysal.examples.get_path('err.p'), 'r'))
def test_BS_F(self):
est_Int = self.BS_F.by_col(' est_Intercept')
se_Int = self.BS_F.by_col(' se_Intercept')
t_Int = self.BS_F.by_col(' t_Intercept')
est_rural = self.BS_F.by_col(' est_PctRural')
se_rural = self.BS_F.by_col(' se_PctRural')
t_rural = self.BS_F.by_col(' t_PctRural')
est_pov = self.BS_F.by_col(' est_PctPov')
se_pov = self.BS_F.by_col(' se_PctPov')
t_pov = self.BS_F.by_col(' t_PctPov')
est_black = self.BS_F.by_col(' est_PctBlack')
se_black = self.BS_F.by_col(' se_PctBlack')
t_black = self.BS_F.by_col(' t_PctBlack')
yhat = self.BS_F.by_col(' yhat')
res = np.array(self.BS_F.by_col(' residual'))
std_res = np.array(self.BS_F.by_col(' std_residual')).reshape((-1,1))
localR2 = np.array(self.BS_F.by_col(' localR2')).reshape((-1,1))
inf = np.array(self.BS_F.by_col(' influence')).reshape((-1,1))
cooksD = np.array(self.BS_F.by_col(' CooksD')).reshape((-1,1))
model = GWR(self.coords, self.y, self.X, bw=209267.689, fixed=True)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
CV = get_CV(rslt)
self.assertAlmostEquals(np.floor(AICc), 894.0)
self.assertAlmostEquals(np.floor(AIC), 890.0)
self.assertAlmostEquals(np.floor(BIC), 944.0)
self.assertAlmostEquals(np.round(CV,2), 18.25)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-04)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-04)
np.testing.assert_allclose(est_rural, rslt.params[:,1], rtol=1e-04)
np.testing.assert_allclose(se_rural, rslt.bse[:,1], rtol=1e-04)
np.testing.assert_allclose(t_rural, rslt.tvalues[:,1], rtol=1e-04)
np.testing.assert_allclose(est_pov, rslt.params[:,2], rtol=1e-04)
np.testing.assert_allclose(se_pov, rslt.bse[:,2], rtol=1e-04)
np.testing.assert_allclose(t_pov, rslt.tvalues[:,2], rtol=1e-04)
np.testing.assert_allclose(est_black, rslt.params[:,3], rtol=1e-02)
np.testing.assert_allclose(se_black, rslt.bse[:,3], rtol=1e-02)
np.testing.assert_allclose(t_black, rslt.tvalues[:,3], rtol=1e-02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-05)
np.testing.assert_allclose(res, rslt.resid_response, rtol=1e-04)
np.testing.assert_allclose(std_res, rslt.std_res, rtol=1e-04)
np.testing.assert_allclose(localR2, rslt.localR2, rtol=1e-05)
np.testing.assert_allclose(inf, rslt.influ, rtol=1e-04)
np.testing.assert_allclose(cooksD, rslt.cooksD, rtol=1e-00)
def test_BS_NN(self):
est_Int = self.BS_NN.by_col(' est_Intercept')
se_Int = self.BS_NN.by_col(' se_Intercept')
t_Int = self.BS_NN.by_col(' t_Intercept')
est_rural = self.BS_NN.by_col(' est_PctRural')
se_rural = self.BS_NN.by_col(' se_PctRural')
t_rural = self.BS_NN.by_col(' t_PctRural')
est_pov = self.BS_NN.by_col(' est_PctPov')
se_pov = self.BS_NN.by_col(' se_PctPov')
t_pov = self.BS_NN.by_col(' t_PctPov')
est_black = self.BS_NN.by_col(' est_PctBlack')
se_black = self.BS_NN.by_col(' se_PctBlack')
t_black = self.BS_NN.by_col(' t_PctBlack')
yhat = self.BS_NN.by_col(' yhat')
res = np.array(self.BS_NN.by_col(' residual'))
std_res = np.array(self.BS_NN.by_col(' std_residual')).reshape((-1,1))
localR2 = np.array(self.BS_NN.by_col(' localR2')).reshape((-1,1))
inf = np.array(self.BS_NN.by_col(' influence')).reshape((-1,1))
cooksD = np.array(self.BS_NN.by_col(' CooksD')).reshape((-1,1))
model = GWR(self.coords, self.y, self.X, bw=90.000, fixed=False)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
CV = get_CV(rslt)
self.assertAlmostEquals(np.floor(AICc), 896.0)
self.assertAlmostEquals(np.floor(AIC), 892.0)
self.assertAlmostEquals(np.floor(BIC), 941.0)
self.assertAlmostEquals(np.around(CV, 2), 19.19)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-04)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-04)
np.testing.assert_allclose(est_rural, rslt.params[:,1], rtol=1e-04)
np.testing.assert_allclose(se_rural, rslt.bse[:,1], rtol=1e-04)
np.testing.assert_allclose(t_rural, rslt.tvalues[:,1], rtol=1e-04)
np.testing.assert_allclose(est_pov, rslt.params[:,2], rtol=1e-04)
np.testing.assert_allclose(se_pov, rslt.bse[:,2], rtol=1e-04)
np.testing.assert_allclose(t_pov, rslt.tvalues[:,2], rtol=1e-04)
np.testing.assert_allclose(est_black, rslt.params[:,3], rtol=1e-02)
np.testing.assert_allclose(se_black, rslt.bse[:,3], rtol=1e-02)
np.testing.assert_allclose(t_black, rslt.tvalues[:,3], rtol=1e-02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-05)
np.testing.assert_allclose(res, rslt.resid_response, rtol=1e-04)
np.testing.assert_allclose(std_res, rslt.std_res, rtol=1e-04)
np.testing.assert_allclose(localR2, rslt.localR2, rtol=1e-05)
np.testing.assert_allclose(inf, rslt.influ, rtol=1e-04)
np.testing.assert_allclose(cooksD, rslt.cooksD, rtol=1e-00)
def test_GS_F(self):
est_Int = self.GS_F.by_col(' est_Intercept')
se_Int = self.GS_F.by_col(' se_Intercept')
t_Int = self.GS_F.by_col(' t_Intercept')
est_rural = self.GS_F.by_col(' est_PctRural')
se_rural = self.GS_F.by_col(' se_PctRural')
t_rural = self.GS_F.by_col(' t_PctRural')
est_pov = self.GS_F.by_col(' est_PctPov')
se_pov = self.GS_F.by_col(' se_PctPov')
t_pov = self.GS_F.by_col(' t_PctPov')
est_black = self.GS_F.by_col(' est_PctBlack')
se_black = self.GS_F.by_col(' se_PctBlack')
t_black = self.GS_F.by_col(' t_PctBlack')
yhat = self.GS_F.by_col(' yhat')
res = np.array(self.GS_F.by_col(' residual'))
std_res = np.array(self.GS_F.by_col(' std_residual')).reshape((-1,1))
localR2 = np.array(self.GS_F.by_col(' localR2')).reshape((-1,1))
inf = np.array(self.GS_F.by_col(' influence')).reshape((-1,1))
cooksD = np.array(self.GS_F.by_col(' CooksD')).reshape((-1,1))
model = GWR(self.coords, self.y, self.X, bw=87308.298,
kernel='gaussian', fixed=True)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
CV = get_CV(rslt)
self.assertAlmostEquals(np.floor(AICc), 895.0)
self.assertAlmostEquals(np.floor(AIC), 890.0)
self.assertAlmostEquals(np.floor(BIC), 943.0)
self.assertAlmostEquals(np.around(CV, 2), 18.21)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-04)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-04)
np.testing.assert_allclose(est_rural, rslt.params[:,1], rtol=1e-04)
np.testing.assert_allclose(se_rural, rslt.bse[:,1], rtol=1e-04)
np.testing.assert_allclose(t_rural, rslt.tvalues[:,1], rtol=1e-04)
np.testing.assert_allclose(est_pov, rslt.params[:,2], rtol=1e-04)
np.testing.assert_allclose(se_pov, rslt.bse[:,2], rtol=1e-04)
np.testing.assert_allclose(t_pov, rslt.tvalues[:,2], rtol=1e-04)
np.testing.assert_allclose(est_black, rslt.params[:,3], rtol=1e-02)
np.testing.assert_allclose(se_black, rslt.bse[:,3], rtol=1e-02)
np.testing.assert_allclose(t_black, rslt.tvalues[:,3], rtol=1e-02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-05)
np.testing.assert_allclose(res, rslt.resid_response, rtol=1e-04)
np.testing.assert_allclose(std_res, rslt.std_res, rtol=1e-04)
np.testing.assert_allclose(localR2, rslt.localR2, rtol=1e-05)
np.testing.assert_allclose(inf, rslt.influ, rtol=1e-04)
np.testing.assert_allclose(cooksD, rslt.cooksD, rtol=1e-00)
def test_GS_NN(self):
est_Int = self.GS_NN.by_col(' est_Intercept')
se_Int = self.GS_NN.by_col(' se_Intercept')
t_Int = self.GS_NN.by_col(' t_Intercept')
est_rural = self.GS_NN.by_col(' est_PctRural')
se_rural = self.GS_NN.by_col(' se_PctRural')
t_rural = self.GS_NN.by_col(' t_PctRural')
est_pov = self.GS_NN.by_col(' est_PctPov')
se_pov = self.GS_NN.by_col(' se_PctPov')
t_pov = self.GS_NN.by_col(' t_PctPov')
est_black = self.GS_NN.by_col(' est_PctBlack')
se_black = self.GS_NN.by_col(' se_PctBlack')
t_black = self.GS_NN.by_col(' t_PctBlack')
yhat = self.GS_NN.by_col(' yhat')
res = np.array(self.GS_NN.by_col(' residual'))
std_res = np.array(self.GS_NN.by_col(' std_residual')).reshape((-1,1))
localR2 = np.array(self.GS_NN.by_col(' localR2')).reshape((-1,1))
inf = np.array(self.GS_NN.by_col(' influence')).reshape((-1,1))
cooksD = np.array(self.GS_NN.by_col(' CooksD')).reshape((-1,1))
model = GWR(self.coords, self.y, self.X, bw=49.000,
kernel='gaussian', fixed=False)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
CV = get_CV(rslt)
self.assertAlmostEquals(np.floor(AICc), 896)
self.assertAlmostEquals(np.floor(AIC), 894.0)
self.assertAlmostEquals(np.floor(BIC), 922.0)
self.assertAlmostEquals(np.around(CV, 2), 17.91)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-04)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-04)
np.testing.assert_allclose(est_rural, rslt.params[:,1], rtol=1e-04)
np.testing.assert_allclose(se_rural, rslt.bse[:,1], rtol=1e-04)
np.testing.assert_allclose(t_rural, rslt.tvalues[:,1], rtol=1e-04)
np.testing.assert_allclose(est_pov, rslt.params[:,2], rtol=1e-04)
np.testing.assert_allclose(se_pov, rslt.bse[:,2], rtol=1e-04)
np.testing.assert_allclose(t_pov, rslt.tvalues[:,2], rtol=1e-04)
np.testing.assert_allclose(est_black, rslt.params[:,3], rtol=1e-02)
np.testing.assert_allclose(se_black, rslt.bse[:,3], rtol=1e-02)
np.testing.assert_allclose(t_black, rslt.tvalues[:,3], rtol=1e-02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-05)
np.testing.assert_allclose(res, rslt.resid_response, rtol=1e-04)
np.testing.assert_allclose(std_res, rslt.std_res, rtol=1e-04)
np.testing.assert_allclose(localR2, rslt.localR2, rtol=1e-05)
np.testing.assert_allclose(inf, rslt.influ, rtol=1e-04)
np.testing.assert_allclose(cooksD, rslt.cooksD, rtol=1e-00)
def test_Prediction(self):
coords =np.array(self.coords)
index = np.arange(len(self.y))
test = index[-10:]
X_test = self.X[test]
coords_test = list(coords[test])
model = GWR(self.coords, self.y, self.X, 93, family=Gaussian(),
fixed=False, kernel='bisquare')
results = model.predict(coords_test, X_test)
params = np.array([22.77198, -0.10254, -0.215093, -0.01405,
19.10531, -0.094177, -0.232529, 0.071913,
19.743421, -0.080447, -0.30893, 0.083206,
17.505759, -0.078919, -0.187955, 0.051719,
27.747402, -0.165335, -0.208553, 0.004067,
26.210627, -0.138398, -0.360514, 0.072199,
18.034833, -0.077047, -0.260556, 0.084319,
28.452802, -0.163408, -0.14097, -0.063076,
22.353095, -0.103046, -0.226654, 0.002992,
18.220508, -0.074034, -0.309812, 0.108636]).reshape((10,4))
np.testing.assert_allclose(params, results.params, rtol=1e-03)
bse = np.array([2.080166, 0.021462, 0.102954, 0.049627,
2.536355, 0.022111, 0.123857, 0.051917,
1.967813, 0.019716, 0.102562, 0.054918,
2.463219, 0.021745, 0.110297, 0.044189,
1.556056, 0.019513, 0.12764, 0.040315,
1.664108, 0.020114, 0.131208, 0.041613,
2.5835, 0.021481, 0.113158, 0.047243,
1.709483, 0.019752, 0.116944, 0.043636,
1.958233, 0.020947, 0.09974, 0.049821,
2.276849, 0.020122, 0.107867, 0.047842]).reshape((10,4))
np.testing.assert_allclose(bse, results.bse, rtol=1e-03)
tvalues = np.array([10.947193, -4.777659, -2.089223, -0.283103,
7.532584, -4.259179, -1.877395, 1.385161,
10.033179, -4.080362, -3.012133, 1.515096,
7.106862, -3.629311, -1.704079, 1.17042,
17.831878, -8.473156, -1.633924, 0.100891,
15.750552, -6.880725, -2.74765, 1.734978,
6.980774, -3.586757, -2.302575, 1.784818,
16.644095, -8.273001, -1.205451, -1.445501,
11.414933, -4.919384, -2.272458, 0.060064,
8.00251, -3.679274, -2.872176, 2.270738]).reshape((10,4))
np.testing.assert_allclose(tvalues, results.tvalues, rtol=1e-03)
localR2 = np.array([[ 0.53068693],
[ 0.59582647],
[ 0.59700925],
[ 0.45769954],
[ 0.54634509],
[ 0.5494828 ],
[ 0.55159604],
[ 0.55634237],
[ 0.53903842],
[ 0.55884954]])
np.testing.assert_allclose(localR2, results.localR2, rtol=1e-05)
predictions = np.array([[ 10.51695514],
[ 9.93321992],
[ 8.92473026],
[ 5.47350219],
[ 8.61756585],
[ 12.8141851 ],
[ 5.55619405],
[ 12.63004172],
[ 8.70638418],
[ 8.17582599]])
np.testing.assert_allclose(predictions, results.predictions, rtol=1e-05)
class TestGWRPoisson(unittest.TestCase):
def setUp(self):
data = pysal.open(pysal.examples.get_path('Tokyomortality.csv'), mode='Ur')
self.coords = zip(data.by_col('X_CENTROID'), data.by_col('Y_CENTROID'))
self.y = np.array(data.by_col('db2564')).reshape((-1,1))
self.off = np.array(data.by_col('eb2564')).reshape((-1,1))
OCC = np.array(data.by_col('OCC_TEC')).reshape((-1,1))
OWN = np.array(data.by_col('OWNH')).reshape((-1,1))
POP = np.array(data.by_col('POP65')).reshape((-1,1))
UNEMP = np.array(data.by_col('UNEMP')).reshape((-1,1))
self.X = np.hstack([OCC,OWN,POP,UNEMP])
self.BS_F = pysal.open(pysal.examples.get_path('tokyo_BS_F_listwise.csv'))
self.BS_NN = pysal.open(pysal.examples.get_path('tokyo_BS_NN_listwise.csv'))
self.GS_F = pysal.open(pysal.examples.get_path('tokyo_GS_F_listwise.csv'))
self.GS_NN = pysal.open(pysal.examples.get_path('tokyo_GS_NN_listwise.csv'))
self.BS_NN_OFF = pysal.open(pysal.examples.get_path('tokyo_BS_NN_OFF_listwise.csv'))
def test_BS_F(self):
est_Int = self.BS_F.by_col(' est_Intercept')
se_Int = self.BS_F.by_col(' se_Intercept')
t_Int = self.BS_F.by_col(' t_Intercept')
est_OCC = self.BS_F.by_col(' est_OCC_TEC')
se_OCC = self.BS_F.by_col(' se_OCC_TEC')
t_OCC = self.BS_F.by_col(' t_OCC_TEC')
est_OWN = self.BS_F.by_col(' est_OWNH')
se_OWN = self.BS_F.by_col(' se_OWNH')
t_OWN = self.BS_F.by_col(' t_OWNH')
est_POP = self.BS_F.by_col(' est_POP65')
se_POP = self.BS_F.by_col(' se_POP65')
t_POP = self.BS_F.by_col(' t_POP65')
est_UNEMP = self.BS_F.by_col(' est_UNEMP')
se_UNEMP = self.BS_F.by_col(' se_UNEMP')
t_UNEMP = self.BS_F.by_col(' t_UNEMP')
yhat = self.BS_F.by_col(' yhat')
pdev = np.array(self.BS_F.by_col(' localpdev')).reshape((-1,1))
model = GWR(self.coords, self.y, self.X, bw=26029.625, family=Poisson(),
kernel='bisquare', fixed=True)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
self.assertAlmostEquals(np.floor(AICc), 13294.0)
self.assertAlmostEquals(np.floor(AIC), 13247.0)
self.assertAlmostEquals(np.floor(BIC), 13485.0)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-05)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-03)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-03)
np.testing.assert_allclose(est_OCC, rslt.params[:,1], rtol=1e-04)
np.testing.assert_allclose(se_OCC, rslt.bse[:,1], rtol=1e-02)
np.testing.assert_allclose(t_OCC, rslt.tvalues[:,1], rtol=1e-02)
np.testing.assert_allclose(est_OWN, rslt.params[:,2], rtol=1e-04)
np.testing.assert_allclose(se_OWN, rslt.bse[:,2], rtol=1e-03)
np.testing.assert_allclose(t_OWN, rslt.tvalues[:,2], rtol=1e-03)
np.testing.assert_allclose(est_POP, rslt.params[:,3], rtol=1e-04)
np.testing.assert_allclose(se_POP, rslt.bse[:,3], rtol=1e-02)
np.testing.assert_allclose(t_POP, rslt.tvalues[:,3], rtol=1e-02)
np.testing.assert_allclose(est_UNEMP, rslt.params[:,4], rtol=1e-04)
np.testing.assert_allclose(se_UNEMP, rslt.bse[:,4], rtol=1e-02)
np.testing.assert_allclose(t_UNEMP, rslt.tvalues[:,4], rtol=1e-02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-05)
np.testing.assert_allclose(pdev, rslt.pDev, rtol=1e-05)
def test_BS_NN(self):
est_Int = self.BS_NN.by_col(' est_Intercept')
se_Int = self.BS_NN.by_col(' se_Intercept')
t_Int = self.BS_NN.by_col(' t_Intercept')
est_OCC = self.BS_NN.by_col(' est_OCC_TEC')
se_OCC = self.BS_NN.by_col(' se_OCC_TEC')
t_OCC = self.BS_NN.by_col(' t_OCC_TEC')
est_OWN = self.BS_NN.by_col(' est_OWNH')
se_OWN = self.BS_NN.by_col(' se_OWNH')
t_OWN = self.BS_NN.by_col(' t_OWNH')
est_POP = self.BS_NN.by_col(' est_POP65')
se_POP = self.BS_NN.by_col(' se_POP65')
t_POP = self.BS_NN.by_col(' t_POP65')
est_UNEMP = self.BS_NN.by_col(' est_UNEMP')
se_UNEMP = self.BS_NN.by_col(' se_UNEMP')
t_UNEMP = self.BS_NN.by_col(' t_UNEMP')
yhat = self.BS_NN.by_col(' yhat')
pdev = np.array(self.BS_NN.by_col(' localpdev')).reshape((-1,1))
model = GWR(self.coords, self.y, self.X, bw=50, family=Poisson(),
kernel='bisquare', fixed=False)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
self.assertAlmostEquals(np.floor(AICc), 13285)
self.assertAlmostEquals(np.floor(AIC), 13259.0)
self.assertAlmostEquals(np.floor(BIC), 13442.0)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-02)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-02)
np.testing.assert_allclose(est_OCC, rslt.params[:,1], rtol=1e-03)
np.testing.assert_allclose(se_OCC, rslt.bse[:,1], rtol=1e-02)
np.testing.assert_allclose(t_OCC, rslt.tvalues[:,1], rtol=1e-02)
np.testing.assert_allclose(est_OWN, rslt.params[:,2], rtol=1e-04)
np.testing.assert_allclose(se_OWN, rslt.bse[:,2], rtol=1e-02)
np.testing.assert_allclose(t_OWN, rslt.tvalues[:,2], rtol=1e-02)
np.testing.assert_allclose(est_POP, rslt.params[:,3], rtol=1e-03)
np.testing.assert_allclose(se_POP, rslt.bse[:,3], rtol=1e-02)
np.testing.assert_allclose(t_POP, rslt.tvalues[:,3], rtol=1e-02)
np.testing.assert_allclose(est_UNEMP, rslt.params[:,4], rtol=1e-04)
np.testing.assert_allclose(se_UNEMP, rslt.bse[:,4], rtol=1e-02)
np.testing.assert_allclose(t_UNEMP, rslt.tvalues[:,4], rtol=1e-02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-04)
np.testing.assert_allclose(pdev, rslt.pDev, rtol=1e-05)
def test_BS_NN_Offset(self):
est_Int = self.BS_NN_OFF.by_col(' est_Intercept')
se_Int = self.BS_NN_OFF.by_col(' se_Intercept')
t_Int = self.BS_NN_OFF.by_col(' t_Intercept')
est_OCC = self.BS_NN_OFF.by_col(' est_OCC_TEC')
se_OCC = self.BS_NN_OFF.by_col(' se_OCC_TEC')
t_OCC = self.BS_NN_OFF.by_col(' t_OCC_TEC')
est_OWN = self.BS_NN_OFF.by_col(' est_OWNH')
se_OWN = self.BS_NN_OFF.by_col(' se_OWNH')
t_OWN = self.BS_NN_OFF.by_col(' t_OWNH')
est_POP = self.BS_NN_OFF.by_col(' est_POP65')
se_POP = self.BS_NN_OFF.by_col(' se_POP65')
t_POP = self.BS_NN_OFF.by_col(' t_POP65')
est_UNEMP = self.BS_NN_OFF.by_col(' est_UNEMP')
se_UNEMP = self.BS_NN_OFF.by_col(' se_UNEMP')
t_UNEMP = self.BS_NN_OFF.by_col(' t_UNEMP')
yhat = self.BS_NN_OFF.by_col(' yhat')
pdev = np.array(self.BS_NN_OFF.by_col(' localpdev')).reshape((-1,1))
model = GWR(self.coords, self.y, self.X, bw=100, offset=self.off, family=Poisson(),
kernel='bisquare', fixed=False)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
self.assertAlmostEquals(np.floor(AICc), 367.0)
self.assertAlmostEquals(np.floor(AIC), 361.0)
self.assertAlmostEquals(np.floor(BIC), 451.0)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-02,
atol=1e-02)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-02, atol=1e-02)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-01,
atol=1e-02)
np.testing.assert_allclose(est_OCC, rslt.params[:,1], rtol=1e-03,
atol=1e-02)
np.testing.assert_allclose(se_OCC, rslt.bse[:,1], rtol=1e-02, atol=1e-02)
np.testing.assert_allclose(t_OCC, rslt.tvalues[:,1], rtol=1e-01,
atol=1e-02)
np.testing.assert_allclose(est_OWN, rslt.params[:,2], rtol=1e-04,
atol=1e-02)
np.testing.assert_allclose(se_OWN, rslt.bse[:,2], rtol=1e-02, atol=1e-02)
np.testing.assert_allclose(t_OWN, rslt.tvalues[:,2], rtol=1e-01,
atol=1e-02)
np.testing.assert_allclose(est_POP, rslt.params[:,3], rtol=1e-03,
atol=1e-02)
np.testing.assert_allclose(se_POP, rslt.bse[:,3], rtol=1e-02, atol=1e-02)
np.testing.assert_allclose(t_POP, rslt.tvalues[:,3], rtol=1e-01,
atol=1e-02)
np.testing.assert_allclose(est_UNEMP, rslt.params[:,4], rtol=1e-04,
atol=1e-02)
np.testing.assert_allclose(se_UNEMP, rslt.bse[:,4], rtol=1e-02,
atol=1e-02)
np.testing.assert_allclose(t_UNEMP, rslt.tvalues[:,4], rtol=1e-01,
atol=1e-02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-03, atol=1e-02)
np.testing.assert_allclose(pdev, rslt.pDev, rtol=1e-04, atol=1e-02)
def test_GS_F(self):
est_Int = self.GS_F.by_col(' est_Intercept')
se_Int = self.GS_F.by_col(' se_Intercept')
t_Int = self.GS_F.by_col(' t_Intercept')
est_OCC = self.GS_F.by_col(' est_OCC_TEC')
se_OCC = self.GS_F.by_col(' se_OCC_TEC')
t_OCC = self.GS_F.by_col(' t_OCC_TEC')
est_OWN = self.GS_F.by_col(' est_OWNH')
se_OWN = self.GS_F.by_col(' se_OWNH')
t_OWN = self.GS_F.by_col(' t_OWNH')
est_POP = self.GS_F.by_col(' est_POP65')
se_POP = self.GS_F.by_col(' se_POP65')
t_POP = self.GS_F.by_col(' t_POP65')
est_UNEMP = self.GS_F.by_col(' est_UNEMP')
se_UNEMP = self.GS_F.by_col(' se_UNEMP')
t_UNEMP = self.GS_F.by_col(' t_UNEMP')
yhat = self.GS_F.by_col(' yhat')
pdev = np.array(self.GS_F.by_col(' localpdev')).reshape((-1,1))
model = GWR(self.coords, self.y, self.X, bw=8764.474, family=Poisson(),
kernel='gaussian', fixed=True)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
self.assertAlmostEquals(np.floor(AICc), 11283.0)
self.assertAlmostEquals(np.floor(AIC), 11211.0)
self.assertAlmostEquals(np.floor(BIC), 11497.0)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-03)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-02)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-02)
np.testing.assert_allclose(est_OCC, rslt.params[:,1], rtol=1e-03)
np.testing.assert_allclose(se_OCC, rslt.bse[:,1], rtol=1e-02)
np.testing.assert_allclose(t_OCC, rslt.tvalues[:,1], rtol=1e-02)
np.testing.assert_allclose(est_OWN, rslt.params[:,2], rtol=1e-03)
np.testing.assert_allclose(se_OWN, rslt.bse[:,2], rtol=1e-02)
np.testing.assert_allclose(t_OWN, rslt.tvalues[:,2], rtol=1e-02)
np.testing.assert_allclose(est_POP, rslt.params[:,3], rtol=1e-02)
np.testing.assert_allclose(se_POP, rslt.bse[:,3], rtol=1e-02)
np.testing.assert_allclose(t_POP, rslt.tvalues[:,3], rtol=1e-02)
np.testing.assert_allclose(est_UNEMP, rslt.params[:,4], rtol=1e-02)
np.testing.assert_allclose(se_UNEMP, rslt.bse[:,4], rtol=1e-02)
np.testing.assert_allclose(t_UNEMP, rslt.tvalues[:,4], rtol=1e-02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-04)
np.testing.assert_allclose(pdev, rslt.pDev, rtol=1e-05)
def test_GS_NN(self):
est_Int = self.GS_NN.by_col(' est_Intercept')
se_Int = self.GS_NN.by_col(' se_Intercept')
t_Int = self.GS_NN.by_col(' t_Intercept')
est_OCC = self.GS_NN.by_col(' est_OCC_TEC')
se_OCC = self.GS_NN.by_col(' se_OCC_TEC')
t_OCC = self.GS_NN.by_col(' t_OCC_TEC')
est_OWN = self.GS_NN.by_col(' est_OWNH')
se_OWN = self.GS_NN.by_col(' se_OWNH')
t_OWN = self.GS_NN.by_col(' t_OWNH')
est_POP = self.GS_NN.by_col(' est_POP65')
se_POP = self.GS_NN.by_col(' se_POP65')
t_POP = self.GS_NN.by_col(' t_POP65')
est_UNEMP = self.GS_NN.by_col(' est_UNEMP')
se_UNEMP = self.GS_NN.by_col(' se_UNEMP')
t_UNEMP = self.GS_NN.by_col(' t_UNEMP')
yhat = self.GS_NN.by_col(' yhat')
pdev = np.array(self.GS_NN.by_col(' localpdev')).reshape((-1,1))
model = GWR(self.coords, self.y, self.X, bw=50, family=Poisson(),
kernel='gaussian', fixed=False)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
self.assertAlmostEquals(np.floor(AICc), 21070.0)
self.assertAlmostEquals(np.floor(AIC), 21069.0)
self.assertAlmostEquals(np.floor(BIC), 21111.0)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-04)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-02)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-02)
np.testing.assert_allclose(est_OCC, rslt.params[:,1], rtol=1e-03)
np.testing.assert_allclose(se_OCC, rslt.bse[:,1], rtol=1e-02)
np.testing.assert_allclose(t_OCC, rslt.tvalues[:,1], rtol=1e-02)
np.testing.assert_allclose(est_OWN, rslt.params[:,2], rtol=1e-04)
np.testing.assert_allclose(se_OWN, rslt.bse[:,2], rtol=1e-02)
np.testing.assert_allclose(t_OWN, rslt.tvalues[:,2], rtol=1e-02)
np.testing.assert_allclose(est_POP, rslt.params[:,3], rtol=1e-02)
np.testing.assert_allclose(se_POP, rslt.bse[:,3], rtol=1e-02)
np.testing.assert_allclose(t_POP, rslt.tvalues[:,3], rtol=1e-02)
np.testing.assert_allclose(est_UNEMP, rslt.params[:,4], rtol=1e-02)
np.testing.assert_allclose(se_UNEMP, rslt.bse[:,4], rtol=1e-02)
np.testing.assert_allclose(t_UNEMP, rslt.tvalues[:,4], rtol=1e-02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-04)
np.testing.assert_allclose(pdev, rslt.pDev, rtol=1e-05)
class TestGWRBinomial(unittest.TestCase):
def setUp(self):
data = pysal.open(pysal.examples.get_path('landslides.csv'))
self.coords = zip(data.by_col('X'), data.by_col('Y'))
self.y = np.array(data.by_col('Landslid')).reshape((-1,1))
ELEV = np.array(data.by_col('Elev')).reshape((-1,1))
SLOPE = np.array(data.by_col('Slope')).reshape((-1,1))
SIN = np.array(data.by_col('SinAspct')).reshape((-1,1))
COS = np.array(data.by_col('CosAspct')).reshape((-1,1))
SOUTH = np.array(data.by_col('AbsSouth')).reshape((-1,1))
DIST = np.array(data.by_col('DistStrm')).reshape((-1,1))
self.X = np.hstack([ELEV, SLOPE, SIN, COS, SOUTH, DIST])
self.BS_F = pysal.open(pysal.examples.get_path('clearwater_BS_F_listwise.csv'))
self.BS_NN = pysal.open(pysal.examples.get_path('clearwater_BS_NN_listwise.csv'))
self.GS_F = pysal.open(pysal.examples.get_path('clearwater_GS_F_listwise.csv'))
self.GS_NN = pysal.open(pysal.examples.get_path('clearwater_GS_NN_listwise.csv'))
def test_BS_F(self):
est_Int = self.BS_F.by_col(' est_Intercept')
se_Int = self.BS_F.by_col(' se_Intercept')
t_Int = self.BS_F.by_col(' t_Intercept')
est_elev = self.BS_F.by_col(' est_Elev')
se_elev = self.BS_F.by_col(' se_Elev')
t_elev = self.BS_F.by_col(' t_Elev')
est_slope = self.BS_F.by_col(' est_Slope')
se_slope = self.BS_F.by_col(' se_Slope')
t_slope = self.BS_F.by_col(' t_Slope')
est_sin = self.BS_F.by_col(' est_SinAspct')
se_sin = self.BS_F.by_col(' se_SinAspct')
t_sin = self.BS_F.by_col(' t_SinAspct')
est_cos = self.BS_F.by_col(' est_CosAspct')
se_cos = self.BS_F.by_col(' se_CosAspct')
t_cos = self.BS_F.by_col(' t_CosAspct')
est_south = self.BS_F.by_col(' est_AbsSouth')
se_south = self.BS_F.by_col(' se_AbsSouth')
t_south = self.BS_F.by_col(' t_AbsSouth')
est_strm = self.BS_F.by_col(' est_DistStrm')
se_strm = self.BS_F.by_col(' se_DistStrm')
t_strm = self.BS_F.by_col(' t_DistStrm')
yhat = self.BS_F.by_col(' yhat')
pdev = np.array(self.BS_F.by_col(' localpdev')).reshape((-1,1))
model = GWR(self.coords, self.y, self.X, bw=19642.170, family=Binomial(),
kernel='bisquare', fixed=True)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
self.assertAlmostEquals(np.floor(AICc), 275.0)
self.assertAlmostEquals(np.floor(AIC), 271.0)
self.assertAlmostEquals(np.floor(BIC), 349.0)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-00)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-00)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-00)
np.testing.assert_allclose(est_elev, rslt.params[:,1], rtol=1e-00)
np.testing.assert_allclose(se_elev, rslt.bse[:,1], rtol=1e-00)
np.testing.assert_allclose(t_elev, rslt.tvalues[:,1], rtol=1e-00)
np.testing.assert_allclose(est_slope, rslt.params[:,2], rtol=1e-00)
np.testing.assert_allclose(se_slope, rslt.bse[:,2], rtol=1e-00)
np.testing.assert_allclose(t_slope, rslt.tvalues[:,2], rtol=1e-00)
np.testing.assert_allclose(est_sin, rslt.params[:,3], rtol=1e01)
np.testing.assert_allclose(se_sin, rslt.bse[:,3], rtol=1e01)
np.testing.assert_allclose(t_sin, rslt.tvalues[:,3], rtol=1e01)
np.testing.assert_allclose(est_cos, rslt.params[:,4], rtol=1e01)
np.testing.assert_allclose(se_cos, rslt.bse[:,4], rtol=1e01)
np.testing.assert_allclose(t_cos, rslt.tvalues[:,4], rtol=1e01)
np.testing.assert_allclose(est_south, rslt.params[:,5], rtol=1e01)
np.testing.assert_allclose(se_south, rslt.bse[:,5], rtol=1e01)
np.testing.assert_allclose(t_south, rslt.tvalues[:,5], rtol=1e01)
np.testing.assert_allclose(est_strm, rslt.params[:,6], rtol=1e02)
np.testing.assert_allclose(se_strm, rslt.bse[:,6], rtol=1e01)
np.testing.assert_allclose(t_strm, rslt.tvalues[:,6], rtol=1e02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-01)
#This test fails - likely due to compound rounding errors
#Has been tested using statsmodels.family calculations and
#code from Jing's python version, which both yield the same
#np.testing.assert_allclose(pdev, rslt.pDev, rtol=1e-05)
def test_BS_NN(self):
est_Int = self.BS_NN.by_col(' est_Intercept')
se_Int = self.BS_NN.by_col(' se_Intercept')
t_Int = self.BS_NN.by_col(' t_Intercept')
est_elev = self.BS_NN.by_col(' est_Elev')
se_elev = self.BS_NN.by_col(' se_Elev')
t_elev = self.BS_NN.by_col(' t_Elev')
est_slope = self.BS_NN.by_col(' est_Slope')
se_slope = self.BS_NN.by_col(' se_Slope')
t_slope = self.BS_NN.by_col(' t_Slope')
est_sin = self.BS_NN.by_col(' est_SinAspct')
se_sin = self.BS_NN.by_col(' se_SinAspct')
t_sin = self.BS_NN.by_col(' t_SinAspct')
est_cos = self.BS_NN.by_col(' est_CosAspct')
se_cos = self.BS_NN.by_col(' se_CosAspct')
t_cos = self.BS_NN.by_col(' t_CosAspct')
est_south = self.BS_NN.by_col(' est_AbsSouth')
se_south = self.BS_NN.by_col(' se_AbsSouth')
t_south = self.BS_NN.by_col(' t_AbsSouth')
est_strm = self.BS_NN.by_col(' est_DistStrm')
se_strm = self.BS_NN.by_col(' se_DistStrm')
t_strm = self.BS_NN.by_col(' t_DistStrm')
yhat = self.BS_NN.by_col(' yhat')
pdev = self.BS_NN.by_col(' localpdev')
model = GWR(self.coords, self.y, self.X, bw=158, family=Binomial(),
kernel='bisquare', fixed=False)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
self.assertAlmostEquals(np.floor(AICc), 277.0)
self.assertAlmostEquals(np.floor(AIC), 271.0)
self.assertAlmostEquals(np.floor(BIC), 358.0)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-00)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-00)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-00)
np.testing.assert_allclose(est_elev, rslt.params[:,1], rtol=1e-00)
np.testing.assert_allclose(se_elev, rslt.bse[:,1], rtol=1e-00)
np.testing.assert_allclose(t_elev, rslt.tvalues[:,1], rtol=1e-00)
np.testing.assert_allclose(est_slope, rslt.params[:,2], rtol=1e-00)
np.testing.assert_allclose(se_slope, rslt.bse[:,2], rtol=1e-00)
np.testing.assert_allclose(t_slope, rslt.tvalues[:,2], rtol=1e-00)
np.testing.assert_allclose(est_sin, rslt.params[:,3], rtol=1e01)
np.testing.assert_allclose(se_sin, rslt.bse[:,3], rtol=1e01)
np.testing.assert_allclose(t_sin, rslt.tvalues[:,3], rtol=1e01)
np.testing.assert_allclose(est_cos, rslt.params[:,4], rtol=1e01)
np.testing.assert_allclose(se_cos, rslt.bse[:,4], rtol=1e01)
np.testing.assert_allclose(t_cos, rslt.tvalues[:,4], rtol=1e01)
np.testing.assert_allclose(est_south, rslt.params[:,5], rtol=1e01)
np.testing.assert_allclose(se_south, rslt.bse[:,5], rtol=1e01)
np.testing.assert_allclose(t_south, rslt.tvalues[:,5], rtol=1e01)
np.testing.assert_allclose(est_strm, rslt.params[:,6], rtol=1e03)
np.testing.assert_allclose(se_strm, rslt.bse[:,6], rtol=1e01)
np.testing.assert_allclose(t_strm, rslt.tvalues[:,6], rtol=1e03)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-01)
#This test fails - likely due to compound rounding errors
#Has been tested using statsmodels.family calculations and
#code from Jing's python version, which both yield the same
#np.testing.assert_allclose(pdev, rslt.pDev, rtol=1e-05)
def test_GS_F(self):
est_Int = self.GS_F.by_col(' est_Intercept')
se_Int = self.GS_F.by_col(' se_Intercept')
t_Int = self.GS_F.by_col(' t_Intercept')
est_elev = self.GS_F.by_col(' est_Elev')
se_elev = self.GS_F.by_col(' se_Elev')
t_elev = self.GS_F.by_col(' t_Elev')
est_slope = self.GS_F.by_col(' est_Slope')
se_slope = self.GS_F.by_col(' se_Slope')
t_slope = self.GS_F.by_col(' t_Slope')
est_sin = self.GS_F.by_col(' est_SinAspct')
se_sin = self.GS_F.by_col(' se_SinAspct')
t_sin = self.GS_F.by_col(' t_SinAspct')
est_cos = self.GS_F.by_col(' est_CosAspct')
se_cos = self.GS_F.by_col(' se_CosAspct')
t_cos = self.GS_F.by_col(' t_CosAspct')
est_south = self.GS_F.by_col(' est_AbsSouth')
se_south = self.GS_F.by_col(' se_AbsSouth')
t_south = self.GS_F.by_col(' t_AbsSouth')
est_strm = self.GS_F.by_col(' est_DistStrm')
se_strm = self.GS_F.by_col(' se_DistStrm')
t_strm = self.GS_F.by_col(' t_DistStrm')
yhat = self.GS_F.by_col(' yhat')
pdev = self.GS_F.by_col(' localpdev')
model = GWR(self.coords, self.y, self.X, bw=8929.061, family=Binomial(),
kernel='gaussian', fixed=True)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
self.assertAlmostEquals(np.floor(AICc), 276.0)
self.assertAlmostEquals(np.floor(AIC), 272.0)
self.assertAlmostEquals(np.floor(BIC), 341.0)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-00)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-00)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-00)
np.testing.assert_allclose(est_elev, rslt.params[:,1], rtol=1e-00)
np.testing.assert_allclose(se_elev, rslt.bse[:,1], rtol=1e-00)
np.testing.assert_allclose(t_elev, rslt.tvalues[:,1], rtol=1e-00)
np.testing.assert_allclose(est_slope, rslt.params[:,2], rtol=1e-00)
np.testing.assert_allclose(se_slope, rslt.bse[:,2], rtol=1e-00)
np.testing.assert_allclose(t_slope, rslt.tvalues[:,2], rtol=1e-00)
np.testing.assert_allclose(est_sin, rslt.params[:,3], rtol=1e01)
np.testing.assert_allclose(se_sin, rslt.bse[:,3], rtol=1e01)
np.testing.assert_allclose(t_sin, rslt.tvalues[:,3], rtol=1e01)
np.testing.assert_allclose(est_cos, rslt.params[:,4], rtol=1e01)
np.testing.assert_allclose(se_cos, rslt.bse[:,4], rtol=1e01)
np.testing.assert_allclose(t_cos, rslt.tvalues[:,4], rtol=1e01)
np.testing.assert_allclose(est_south, rslt.params[:,5], rtol=1e01)
np.testing.assert_allclose(se_south, rslt.bse[:,5], rtol=1e01)
np.testing.assert_allclose(t_south, rslt.tvalues[:,5], rtol=1e01)
np.testing.assert_allclose(est_strm, rslt.params[:,6], rtol=1e02)
np.testing.assert_allclose(se_strm, rslt.bse[:,6], rtol=1e01)
np.testing.assert_allclose(t_strm, rslt.tvalues[:,6], rtol=1e02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-01)
#This test fails - likely due to compound rounding errors
#Has been tested using statsmodels.family calculations and
#code from Jing's python version, which both yield the same
#np.testing.assert_allclose(pdev, rslt.pDev, rtol=1e-05)
def test_GS_NN(self):
est_Int = self.GS_NN.by_col(' est_Intercept')
se_Int = self.GS_NN.by_col(' se_Intercept')
t_Int = self.GS_NN.by_col(' t_Intercept')
est_elev = self.GS_NN.by_col(' est_Elev')
se_elev = self.GS_NN.by_col(' se_Elev')
t_elev = self.GS_NN.by_col(' t_Elev')
est_slope = self.GS_NN.by_col(' est_Slope')
se_slope = self.GS_NN.by_col(' se_Slope')
t_slope = self.GS_NN.by_col(' t_Slope')
est_sin = self.GS_NN.by_col(' est_SinAspct')
se_sin = self.GS_NN.by_col(' se_SinAspct')
t_sin = self.GS_NN.by_col(' t_SinAspct')
est_cos = self.GS_NN.by_col(' est_CosAspct')
se_cos = self.GS_NN.by_col(' se_CosAspct')
t_cos = self.GS_NN.by_col(' t_CosAspct')
est_south = self.GS_NN.by_col(' est_AbsSouth')
se_south = self.GS_NN.by_col(' se_AbsSouth')
t_south = self.GS_NN.by_col(' t_AbsSouth')
est_strm = self.GS_NN.by_col(' est_DistStrm')
se_strm = self.GS_NN.by_col(' se_DistStrm')
t_strm = self.GS_NN.by_col(' t_DistStrm')
yhat = self.GS_NN.by_col(' yhat')
pdev = self.GS_NN.by_col(' localpdev')
model = GWR(self.coords, self.y, self.X, bw=64, family=Binomial(),
kernel='gaussian', fixed=False)
rslt = model.fit()
AICc = get_AICc(rslt)
AIC = get_AIC(rslt)
BIC = get_BIC(rslt)
self.assertAlmostEquals(np.floor(AICc), 276.0)
self.assertAlmostEquals(np.floor(AIC), 273.0)
self.assertAlmostEquals(np.floor(BIC), 331.0)
np.testing.assert_allclose(est_Int, rslt.params[:,0], rtol=1e-00)
np.testing.assert_allclose(se_Int, rslt.bse[:,0], rtol=1e-00)
np.testing.assert_allclose(t_Int, rslt.tvalues[:,0], rtol=1e-00)
np.testing.assert_allclose(est_elev, rslt.params[:,1], rtol=1e-00)
np.testing.assert_allclose(se_elev, rslt.bse[:,1], rtol=1e-00)
np.testing.assert_allclose(t_elev, rslt.tvalues[:,1], rtol=1e-00)
np.testing.assert_allclose(est_slope, rslt.params[:,2], rtol=1e-00)
np.testing.assert_allclose(se_slope, rslt.bse[:,2], rtol=1e-00)
np.testing.assert_allclose(t_slope, rslt.tvalues[:,2], rtol=1e-00)
np.testing.assert_allclose(est_sin, rslt.params[:,3], rtol=1e01)
np.testing.assert_allclose(se_sin, rslt.bse[:,3], rtol=1e01)
np.testing.assert_allclose(t_sin, rslt.tvalues[:,3], rtol=1e01)
np.testing.assert_allclose(est_cos, rslt.params[:,4], rtol=1e01)
np.testing.assert_allclose(se_cos, rslt.bse[:,4], rtol=1e01)
np.testing.assert_allclose(t_cos, rslt.tvalues[:,4], rtol=1e01)
np.testing.assert_allclose(est_south, rslt.params[:,5], rtol=1e01)
np.testing.assert_allclose(se_south, rslt.bse[:,5], rtol=1e01)
np.testing.assert_allclose(t_south, rslt.tvalues[:,5], rtol=1e01)
np.testing.assert_allclose(est_strm, rslt.params[:,6], rtol=1e02)
np.testing.assert_allclose(se_strm, rslt.bse[:,6], rtol=1e01)
np.testing.assert_allclose(t_strm, rslt.tvalues[:,6], rtol=1e02)
np.testing.assert_allclose(yhat, rslt.mu, rtol=1e-00)
#This test fails - likely due to compound rounding errors
#Has been tested using statsmodels.family calculations and
#code from Jing's python version, which both yield the same
#np.testing.assert_allclose(pdev, rslt.pDev, rtol=1e-05)
if __name__ == '__main__':
unittest.main()
|
ljwolf/pysal
|
pysal/contrib/gwr/tests/test_gwr.py
|
Python
|
bsd-3-clause
| 45,280 | 0.009408 |
from __future__ import division, absolute_import, unicode_literals
from qtpy import QtWidgets
from qtpy.QtCore import Qt
from . import cmds
from . import gitcmds
from . import hotkeys
from . import icons
from . import qtutils
from . import utils
from .i18n import N_
from .widgets import completion
from .widgets import defs
from .widgets import filetree
from .widgets import standard
def diff_commits(parent, a, b, context=None):
"""Show a dialog for diffing two commits"""
dlg = Difftool(parent, a=a, b=b, context=context)
dlg.show()
dlg.raise_()
return dlg.exec_() == QtWidgets.QDialog.Accepted
def diff_expression(parent, expr,
create_widget=False, hide_expr=False,
focus_tree=False, context=None):
"""Show a diff dialog for diff expressions"""
dlg = Difftool(parent,
expr=expr, hide_expr=hide_expr,
focus_tree=focus_tree, context=context)
if create_widget:
return dlg
dlg.show()
dlg.raise_()
return dlg.exec_() == QtWidgets.QDialog.Accepted
class Difftool(standard.Dialog):
def __init__(self, parent, a=None, b=None, expr=None, title=None,
hide_expr=False, focus_tree=False, context=None):
"""Show files with differences and launch difftool"""
standard.Dialog.__init__(self, parent=parent)
self.a = a
self.b = b
self.diff_expr = expr
self.context = context
if title is None:
title = N_('git-cola diff')
self.setWindowTitle(title)
self.setWindowModality(Qt.WindowModal)
self.expr = completion.GitRefLineEdit(parent=self)
if expr is not None:
self.expr.setText(expr)
if expr is None or hide_expr:
self.expr.hide()
self.tree = filetree.FileTree(parent=self)
self.diff_button = qtutils.create_button(text=N_('Compare'),
icon=icons.diff(),
enabled=False,
default=True)
self.diff_button.setShortcut(hotkeys.DIFF)
self.diff_all_button = qtutils.create_button(text=N_('Compare All'),
icon=icons.diff())
self.edit_button = qtutils.edit_button()
self.edit_button.setShortcut(hotkeys.EDIT)
self.close_button = qtutils.close_button()
self.button_layout = qtutils.hbox(defs.no_margin, defs.spacing,
self.close_button,
qtutils.STRETCH,
self.edit_button,
self.diff_all_button,
self.diff_button)
self.main_layout = qtutils.vbox(defs.margin, defs.spacing,
self.expr, self.tree,
self.button_layout)
self.setLayout(self.main_layout)
self.tree.itemSelectionChanged.connect(self.tree_selection_changed)
self.tree.itemDoubleClicked.connect(self.tree_double_clicked)
self.tree.up.connect(self.focus_input)
self.expr.textChanged.connect(self.text_changed)
self.expr.activated.connect(self.focus_tree)
self.expr.down.connect(self.focus_tree)
self.expr.enter.connect(self.focus_tree)
qtutils.connect_button(self.diff_button, self.diff)
qtutils.connect_button(self.diff_all_button,
lambda: self.diff(dir_diff=True))
qtutils.connect_button(self.edit_button, self.edit)
qtutils.connect_button(self.close_button, self.close)
qtutils.add_action(self, 'Focus Input', self.focus_input, hotkeys.FOCUS)
qtutils.add_action(self, 'Diff All', lambda: self.diff(dir_diff=True),
hotkeys.CTRL_ENTER, hotkeys.CTRL_RETURN)
qtutils.add_close_action(self)
self.init_state(None, self.resize_widget, parent)
self.refresh()
if focus_tree:
self.focus_tree()
def resize_widget(self, parent):
"""Set the initial size of the widget"""
width, height = qtutils.default_size(parent, 720, 420)
self.resize(width, height)
def focus_tree(self):
"""Focus the files tree"""
self.tree.setFocus()
def focus_input(self):
"""Focus the expression input"""
self.expr.setFocus()
def text_changed(self, txt):
self.diff_expr = txt
self.refresh()
def refresh(self):
"""Redo the diff when the expression changes"""
if self.diff_expr is not None:
self.diff_arg = utils.shell_split(self.diff_expr)
elif self.b is None:
self.diff_arg = [self.a]
else:
self.diff_arg = [self.a, self.b]
self.refresh_filenames()
def refresh_filenames(self):
if self.a and self.b is None:
filenames = gitcmds.diff_index_filenames(self.a)
else:
filenames = gitcmds.diff(self.diff_arg)
self.tree.set_filenames(filenames, select=True)
def tree_selection_changed(self):
has_selection = self.tree.has_selection()
self.diff_button.setEnabled(has_selection)
self.diff_all_button.setEnabled(has_selection)
def tree_double_clicked(self, item, column):
path = self.tree.filename_from_item(item)
left, right = self._left_right_args()
cmds.difftool_launch(left=left, right=right, paths=[path],
context=self.context)
def diff(self, dir_diff=False):
paths = self.tree.selected_filenames()
left, right = self._left_right_args()
cmds.difftool_launch(left=left, right=right, paths=paths,
dir_diff=dir_diff, context=self.context)
def _left_right_args(self):
if self.diff_arg:
left = self.diff_arg[0]
else:
left = None
if len(self.diff_arg) > 1:
right = self.diff_arg[1]
else:
right = None
return (left, right)
def edit(self):
paths = self.tree.selected_filenames()
cmds.do(cmds.Edit, paths)
|
Vdragon/git-cola
|
cola/difftool.py
|
Python
|
gpl-2.0
| 6,355 | 0.000157 |
import tensorflow as tf
'''
Model for sequence classification and localization with weighted loss
'''
class DeepLocalizationWeightedLossVariableLengthDeeper:
def get_name(self):
return "deep_localization_weighted_loss_variable_length_6"
def input_placeholders(self):
inputs_placeholder = tf.placeholder(tf.float32, shape=[None, 128, 256], name="inputs")
labels_placeholder = tf.placeholder(tf.float32, shape=[None, 5, 11], name="labels")
positions_placeholder = tf.placeholder(tf.float32, shape=[None, 4], name="positions")
keep_prob_placeholder = tf.placeholder(tf.float32)
is_training_placeholder = tf.placeholder(tf.bool)
return inputs_placeholder, labels_placeholder, positions_placeholder, keep_prob_placeholder, is_training_placeholder
def inference(self, input, keep_prob, is_training):
with tf.name_scope("inference"):
input = tf.reshape(input, [-1, 128, 256, 1])
conv1 = self._convolutional(input, [10, 10, 1, 8])
relu1 = self._relu(conv1)
max_pool1 = self._max_pooling(relu1, [1, 2, 2, 1], [1, 2, 2, 1])
conv2 = self._convolutional(max_pool1, [8, 8, 8, 14])
relu2 = self._relu(conv2)
max_pool2 = self._max_pooling(relu2, [1, 2, 2, 1], [1, 2, 2, 1])
conv3 = self._convolutional(max_pool2, [6, 6, 14, 20])
relu3 = self._relu(conv3)
max_pool3 = self._max_pooling(relu3, [1, 2, 2, 1], [1, 2, 2, 1])
conv4 = self._convolutional(max_pool3, [4, 4, 20, 24])
relu4 = self._relu(conv4)
max_pool4 = self._max_pooling(relu4, [1, 2, 2, 1], [1, 2, 2, 1])
conv5 = self._convolutional(max_pool4, [2, 2, 24, 32])
relu5 = self._relu(conv5)
max_pool5 = self._max_pooling(relu5, [1, 2, 2, 1], [1, 2, 2, 1])
conv6 = self._convolutional(max_pool5, [2, 2, 32, 128])
relu6 = self._relu(conv6)
max_pool6 = self._max_pooling(relu6, [1, 2, 2, 1], [1, 2, 2, 1])
reshaped = tf.reshape(max_pool6, [-1, 1024])
logits = []
gru = tf.contrib.rnn.GRUCell(576)
state = gru.zero_state(tf.shape(reshaped)[0], tf.float32)
with tf.variable_scope("RNN"):
for i in range(5):
if i > 0: tf.get_variable_scope().reuse_variables()
output, state = gru(reshaped, state)
number_logits = self._fully_connected(output, 576, 11)
logits.append(number_logits)
fc_position1 = self._fully_connected(reshaped, 1024, 768)
dropout_position_1 = tf.nn.dropout(fc_position1, keep_prob)
relu_position1 = self._relu(dropout_position_1)
fc_position2 = self._fully_connected(relu_position1, 768, 512)
dropout_position_2 = tf.nn.dropout(fc_position2, keep_prob)
relu_position2 = self._relu(dropout_position_2)
fc_position3 = self._fully_connected(relu_position2, 512, 256)
dropout_position_3 = tf.nn.dropout(fc_position3, keep_prob)
relu_position3 = self._relu(dropout_position_3)
fc_position4 = self._fully_connected(relu_position3, 256, 64)
dropout_position_4 = tf.nn.dropout(fc_position4, keep_prob)
relu_position4 = self._relu(dropout_position_4)
fc_position5 = self._fully_connected(relu_position4, 64, 32)
dropout_position_5 = tf.nn.dropout(fc_position5, keep_prob)
relu_position5 = self._relu(dropout_position_5)
predicted_positions = self._fully_connected(relu_position5, 32, 4)
return tf.stack(logits, axis=1), predicted_positions
def loss(self, logits, labels, predicted_positions, positions):
with tf.name_scope("loss"):
labels = tf.to_int64(labels)
cross_entropy = tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=logits, name="cross_entropy")
logits_loss = tf.reduce_mean(cross_entropy, name="cross_entropy_mean")
square_error = tf.square(positions - predicted_positions, name="square_error")
position_loss = tf.reduce_mean(square_error, name="square_error_mean")
total_loss = 1000 * logits_loss + position_loss
tf.summary.scalar("logits_loss", logits_loss)
tf.summary.scalar("positions_loss", position_loss)
tf.summary.scalar("total_loss", logits_loss + position_loss)
return {"logits_loss": logits_loss, "positions_loss": position_loss,
"total_loss": total_loss}
def training(self, loss, learning_rate):
with tf.name_scope("training"):
optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
train_operation = optimizer.minimize(loss)
return train_operation
def evaluation(self, logits, labels, predicted_positions, positions):
with tf.name_scope("evaluation"):
labels = tf.to_int64(labels)
labels = tf.argmax(labels, 2)
logits = tf.argmax(logits, 2)
difference = tf.subtract(labels, logits, name="sub")
character_errors = tf.count_nonzero(difference, axis=1, name="count_nonzero")
total_wrong_characters = tf.reduce_sum(character_errors)
total_characters = tf.to_int64(tf.size(labels))
total_correct_characters = total_characters - total_wrong_characters
corrects = tf.less_equal(character_errors, 0, name="is_zero")
position_error = tf.losses.mean_squared_error(positions, predicted_positions)
return self.tf_count(corrects,
True), corrects, logits, position_error, predicted_positions, total_correct_characters, total_characters
def tf_count(self, t, val):
elements_equal_to_value = tf.equal(t, val)
as_ints = tf.cast(elements_equal_to_value, tf.int32)
count = tf.reduce_sum(as_ints)
return count
def _fully_connected(self, input, size_in, size_out, name="fc"):
with tf.name_scope(name):
w = tf.Variable(tf.truncated_normal([size_in, size_out], stddev=0.1), name="W")
b = tf.Variable(tf.constant(0.1, shape=[size_out]), name="b")
act = tf.matmul(input, w) + b
return act
def _convolutional(self, input, dimensions, name="conv"):
with tf.name_scope(name):
w = tf.Variable(tf.truncated_normal(dimensions, stddev=0.1), name="W")
b = tf.Variable(tf.constant(0.1, shape=[dimensions[3]]), name="b")
return tf.nn.conv2d(input, w, strides=[1, 1, 1, 1], padding='SAME') + b
def _max_pooling(self, input, ksize, strides, name="max_pooling"):
with tf.name_scope(name):
return tf.nn.max_pool(input, ksize, strides, padding="SAME")
def _relu(self, input, name="relu"):
with tf.name_scope(name):
return tf.nn.relu(input)
|
thePetrMarek/SequenceOfDigitsRecognition
|
sequences_of_variable_length/deep_localization_weighted_loss_variable_length_deeper.py
|
Python
|
mit
| 7,054 | 0.00241 |
# -------------------------------------------------------------------#
# Released under the MIT license (https://opensource.org/licenses/MIT)
# Contact: mrinal.haloi11@gmail.com
# Enhancement Copyright 2016, Mrinal Haloi
# -------------------------------------------------------------------#
import random
import os
import tensorflow as tf
from core.solver import Solver
from env.environment import GymEnvironment, SimpleGymEnvironment
from config.config import cfg
# Set random seed
tf.set_random_seed(123)
random.seed(12345)
def main(_):
gpu_options = tf.GPUOptions(per_process_gpu_memory_fraction=0.4)
with tf.Session(config=tf.ConfigProto(gpu_options=gpu_options)) as sess:
if cfg.env_type == 'simple':
env = SimpleGymEnvironment(cfg)
else:
env = GymEnvironment(cfg)
if not os.path.exists('/tmp/model_dir'):
os.mkdir('/tmp/model_dir')
solver = Solver(cfg, env, sess, '/tmp/model_dir')
solver.train()
if __name__ == '__main__':
tf.app.run()
|
n3011/deeprl
|
train_dqn.py
|
Python
|
mit
| 1,042 | 0.00096 |
# framework/modules/get_modem_info.py
#
# Copyright 2011 Spencer J. McIntyre <SMcIntyre [at] SecureState [dot] net>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
from c1218.errors import C1218ReadTableError
from c1219.access.telephone import C1219TelephoneAccess
from c1219.data import C1219_CALL_STATUS_FLAGS
from framework.templates import TermineterModuleOptical
class Module(TermineterModuleOptical):
def __init__(self, *args, **kwargs):
TermineterModuleOptical.__init__(self, *args, **kwargs)
self.version = 1
self.author = ['Spencer McIntyre']
self.description = 'Get Information About The Integrated Modem'
self.detailed_description = 'This module reads various C1219 tables from decade 90 to gather information about the integrated modem. If successfully parsed, useful information will be displayed.'
def run(self):
conn = self.frmwk.serial_connection
logger = self.logger
if not self.frmwk.serial_login(): # don't alert on failed logins
logger.warning('meter login failed')
try:
telephone_ctl = C1219TelephoneAccess(conn)
except C1218ReadTableError:
self.frmwk.print_error('Could not read necessary tables, a modem is not likely present')
return
conn.stop()
info = {}
info['Can Answer'] = telephone_ctl.can_answer
info['Extended Status Available'] = telephone_ctl.use_extended_status
info['Number of Originating Phone Numbers'] = telephone_ctl.nbr_originate_numbers
info['PSEM Identity'] = telephone_ctl.psem_identity
if telephone_ctl.global_bit_rate:
info['Global Bit Rate'] = telephone_ctl.global_bit_rate
else:
info['Originate Bit Rate'] = telephone_ctl.originate_bit_rate
info['Answer Bit Rate'] = telephone_ctl.answer_bit_rate
info['Dial Delay'] = telephone_ctl.dial_delay
if len(telephone_ctl.prefix_number):
info['Prefix Number'] = telephone_ctl.prefix_number
keys = info.keys()
keys.sort()
self.frmwk.print_status('General Information:')
fmt_string = " {0:.<38}.{1}"
for key in keys:
self.frmwk.print_status(fmt_string.format(key, info[key]))
self.frmwk.print_status('Stored Telephone Numbers:')
fmt_string = " {0:<6} {1:<16} {2:<32}"
self.frmwk.print_status(fmt_string.format('Index', 'Number', 'Last Status'))
self.frmwk.print_status(fmt_string.format('-----', '------', '-----------'))
for idx, entry in telephone_ctl.originating_numbers.items():
self.frmwk.print_status(fmt_string.format(entry['idx'], entry['number'].strip(), C1219_CALL_STATUS_FLAGS[entry['status']]))
|
firebitsbr/termineter
|
framework/modules/get_modem_info.py
|
Python
|
gpl-3.0
| 3,174 | 0.015123 |
from typing import Union
from ray.rllib.models.action_dist import ActionDistribution
from ray.rllib.utils.annotations import override
from ray.rllib.utils.exploration.exploration import TensorType
from ray.rllib.utils.exploration.soft_q import SoftQ
from ray.rllib.utils.framework import try_import_tf, try_import_torch
tf1, tf, tfv = try_import_tf()
torch, _ = try_import_torch()
class SlateSoftQ(SoftQ):
@override(SoftQ)
def get_exploration_action(
self,
action_distribution: ActionDistribution,
timestep: Union[int, TensorType],
explore: bool = True,
):
assert (
self.framework == "torch"
), "ERROR: SlateSoftQ only supports torch so far!"
cls = type(action_distribution)
# Re-create the action distribution with the correct temperature
# applied.
action_distribution = cls(
action_distribution.inputs, self.model, temperature=self.temperature
)
batch_size = action_distribution.inputs.size()[0]
action_logp = torch.zeros(batch_size, dtype=torch.float)
self.last_timestep = timestep
# Explore.
if explore:
# Return stochastic sample over (q-value) logits.
action = action_distribution.sample()
# Return the deterministic "sample" (argmax) over (q-value) logits.
else:
action = action_distribution.deterministic_sample()
return action, action_logp
|
ray-project/ray
|
rllib/utils/exploration/slate_soft_q.py
|
Python
|
apache-2.0
| 1,483 | 0.000674 |
import numpy as np
class PriceHistoryPack(object):
def __init__(self, input_seq_len, num_features, target_seq_len):
super(PriceHistoryPack, self).__init__()
self.sku_ids = []
self.XX = np.empty((0, input_seq_len, num_features))
self.YY = np.empty((0, target_seq_len))
self.sequence_lens = []
self.seq_mask = np.empty((0, input_seq_len))
def update(self, sku_id, inputs, targets, input_seq_len):
self.sku_ids.append(sku_id)
inputs_len = len(inputs)
self.sequence_lens.append(inputs_len)
# build current mask with zeros and ones
cur_mask = np.zeros(input_seq_len)
cur_mask[:inputs_len] = 1 # only the valid firsts should have the value of one
xx_padded = np.pad(inputs, ((0, input_seq_len - inputs_len), (0, 0)), mode='constant', constant_values=0.)
# here targets do NOT need to be padded because we do not have a sequence to sequence model
# yy_padded = np.pad(targets, (0, series_max_len - len(targets)), mode='constant', constant_values=0.)
assert len(xx_padded) == input_seq_len
self.XX = np.vstack((self.XX, xx_padded[np.newaxis]))
self.YY = np.vstack((self.YY, targets[np.newaxis]))
self.seq_mask = np.vstack((self.seq_mask, cur_mask[np.newaxis]))
def get_data(self, fraction=None, random_state=None):
# from sklearn.model_selection import train_test_split
skuIds, xx, yy, seqLens, seqMask = np.array(self.sku_ids), self.XX, self.YY, np.array(
self.sequence_lens), self.seq_mask
if fraction is None:
return skuIds, xx, yy, seqLens, seqMask
else:
random_state = np.random if random_state is None else random_state
cur_len = len(skuIds)
assert cur_len == len(xx) and cur_len == len(yy) and cur_len == len(seqLens) and cur_len == len(seqMask)
random_inds = random_state.choice(cur_len, int(cur_len * fraction))
return skuIds[random_inds], xx[random_inds], yy[random_inds], seqLens[random_inds], seqMask[random_inds]
def save(self, filepath, fraction=None, random_state=None):
if fraction is None:
np.savez(filepath, sku_ids=self.sku_ids, inputs=self.XX, targets=self.YY,
sequence_lengths=self.sequence_lens,
sequence_masks=self.seq_mask)
else:
skuIds, xx, yy, seqLens, seqMask = self.get_data(fraction=fraction, random_state=random_state)
np.savez(filepath, sku_ids=skuIds, inputs=xx, targets=yy, sequence_lengths=seqLens, sequence_masks=seqMask)
|
pligor/predicting-future-product-prices
|
04_time_series_prediction/data_providers/price_history_pack.py
|
Python
|
agpl-3.0
| 2,640 | 0.003788 |
import windows.generated_def as gdef
def test_format_charactere_values():
assert gdef.FC_ZERO == 0
assert gdef.FC_PAD == 0x5c
assert gdef.FC_PAD == 0x5c
assert gdef.FC_SPLIT_DEREFERENCE == 0x74
assert gdef. FC_SPLIT_DIV_2 == 0x75
assert gdef.FC_HARD_STRUCT == 0xb1
assert gdef.FC_TRANSMIT_AS_PTR == 0xb2
assert gdef.FC_END_OF_UNIVERSE == 0xba
|
hakril/PythonForWindows
|
tests/test_midl.py
|
Python
|
bsd-3-clause
| 380 | 0.013158 |
# -*- coding: utf-8 -*-
import re
from module.plugins.internal.MultiHoster import MultiHoster, create_getInfo
from module.plugins.internal.SimpleHoster import seconds_to_midnight
class HighWayMe(MultiHoster):
__name__ = "HighWayMe"
__type__ = "hoster"
__version__ = "0.15"
__status__ = "testing"
__pattern__ = r'https?://.+high-way\.my'
__config__ = [("use_premium" , "bool", "Use premium account if available" , True),
("revertfailed", "bool", "Revert to standard download if fails", True)]
__description__ = """High-Way.me multi-hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("EvolutionClip", "evolutionclip@live.de")]
def setup(self):
self.chunk_limit = 4
def check_errors(self):
if self.html.get('code') == 302: #@NOTE: This is not working. It should by if 302 Moved Temporarily then... But I don't now how to implement it.
self.account.relogin()
self.retry()
elif "<code>9</code>" in self.html:
self.offline()
elif "downloadlimit" in self.html:
self.log_warning(_("Reached maximum connctions"))
self.retry(5, 60, _("Reached maximum connctions"))
elif "trafficlimit" in self.html:
self.log_warning(_("Reached daily limit"))
self.retry(wait=seconds_to_midnight(), msg="Daily limit for this host reached")
elif "<code>8</code>" in self.html:
self.log_warning(_("Hoster temporarily unavailable, waiting 1 minute and retry"))
self.retry(5, 60, _("Hoster is temporarily unavailable"))
def handle_premium(self, pyfile):
for _i in xrange(5):
self.html = self.load("https://high-way.me/load.php",
get={'link': self.pyfile.url})
if self.html:
self.log_debug("JSON data: " + self.html)
break
else:
self.log_info(_("Unable to get API data, waiting 1 minute and retry"))
self.retry(5, 60, _("Unable to get API data"))
self.check_errors()
try:
self.pyfile.name = re.search(r'<name>([^<]+)</name>', self.html).group(1)
except AttributeError:
self.pyfile.name = ""
try:
self.pyfile.size = re.search(r'<size>(\d+)</size>', self.html).group(1)
except AttributeError:
self.pyfile.size = 0
self.link = re.search(r'<download>([^<]+)</download>', self.html).group(1)
getInfo = create_getInfo(HighWayMe)
|
jansohn/pyload
|
module/plugins/hoster/HighWayMe.py
|
Python
|
gpl-3.0
| 2,588 | 0.008114 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'ActivityMigrationProgress'
db.create_table('activity_activitymigrationprogress', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('last_migrated_id', self.gf('django.db.models.fields.IntegerField')()),
))
db.send_create_signal('activity', ['ActivityMigrationProgress'])
def backwards(self, orm):
# Deleting model 'ActivityMigrationProgress'
db.delete_table('activity_activitymigrationprogress')
models = {
'activity.activitymigrationprogress': {
'Meta': {'object_name': 'ActivityMigrationProgress'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_migrated_id': ('django.db.models.fields.IntegerField', [], {})
},
'activity.activityrecord': {
'Meta': {'object_name': 'ActivityRecord'},
'copied_from': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['activity.ActivityRecord']", 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime(2016, 5, 19, 0, 0)', 'db_index': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '16', 'blank': 'True'}),
'related_obj_id': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'activity'", 'null': 'True', 'to': "orm['teams.Team']"}),
'type': ('codefield.CodeField', [], {}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'activity'", 'null': 'True', 'to': "orm['auth.CustomUser']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'activity'", 'null': 'True', 'to': "orm['videos.Video']"}),
'video_language_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '16', 'blank': 'True'})
},
'activity.urledit': {
'Meta': {'object_name': 'URLEdit'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'new_url': ('django.db.models.fields.URLField', [], {'max_length': '512', 'blank': 'True'}),
'old_url': ('django.db.models.fields.URLField', [], {'max_length': '512', 'blank': 'True'})
},
'activity.videodeletion': {
'Meta': {'object_name': 'VideoDeletion'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'url': ('django.db.models.fields.URLField', [], {'max_length': '512', 'blank': 'True'})
},
'auth.customuser': {
'Meta': {'object_name': 'CustomUser', '_ormbases': ['auth.User']},
'autoplay_preferences': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'award_points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'biography': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'can_send_messages': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'created_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'created_users'", 'null': 'True', 'to': "orm['auth.CustomUser']"}),
'full_name': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '63', 'blank': 'True'}),
'homepage': ('django.db.models.fields.URLField', [], {'max_length': '200', 'blank': 'True'}),
'is_partner': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_ip': ('django.db.models.fields.IPAddressField', [], {'max_length': '15', 'null': 'True', 'blank': 'True'}),
'notify_by_email': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'notify_by_message': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Partner']", 'null': 'True', 'blank': 'True'}),
'pay_rate_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '3', 'blank': 'True'}),
'picture': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'blank': 'True'}),
'preferred_language': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'show_tutorial': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'user_ptr': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['auth.User']", 'unique': 'True', 'primary_key': 'True'}),
'valid_email': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'teams.application': {
'Meta': {'unique_together': "(('team', 'user', 'status'),)", 'object_name': 'Application'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'history': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'note': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'applications'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_applications'", 'to': "orm['auth.CustomUser']"})
},
'teams.partner': {
'Meta': {'object_name': 'Partner'},
'admins': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'managed_partners'", 'null': 'True', 'symmetrical': 'False', 'to': "orm['auth.CustomUser']"}),
'can_request_paid_captions': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'})
},
'teams.project': {
'Meta': {'unique_together': "(('team', 'name'), ('team', 'slug'))", 'object_name': 'Project'},
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'guidelines': ('django.db.models.fields.TextField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'order': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'blank': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'workflow_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'})
},
'teams.team': {
'Meta': {'ordering': "['name']", 'object_name': 'Team'},
'applicants': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'applicated_teams'", 'symmetrical': 'False', 'through': "orm['teams.Application']", 'to': "orm['auth.CustomUser']"}),
'application_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'auth_provider_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '24', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'deleted': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'header_html_text': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'highlight': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_moderated': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'last_notification_time': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'logo': ('utils.amazon.fields.S3EnabledImageField', [], {'default': "''", 'max_length': '100', 'thumb_sizes': '[(280, 100), (100, 100)]', 'blank': 'True'}),
'max_tasks_per_member': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'membership_policy': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '250'}),
'notify_interval': ('django.db.models.fields.CharField', [], {'default': "'D'", 'max_length': '1'}),
'page_content': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'partner': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'teams'", 'null': 'True', 'to': "orm['teams.Partner']"}),
'points': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'projects_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}),
'square_logo': ('utils.amazon.fields.S3EnabledImageField', [], {'default': "''", 'max_length': '100', 'thumb_sizes': '[(100, 100), (48, 48)]', 'blank': 'True'}),
'subtitle_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'task_assign_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'task_expiration': ('django.db.models.fields.PositiveIntegerField', [], {'default': 'None', 'null': 'True', 'blank': 'True'}),
'translate_policy': ('django.db.models.fields.IntegerField', [], {'default': '10'}),
'users': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'teams'", 'symmetrical': 'False', 'through': "orm['teams.TeamMember']", 'to': "orm['auth.CustomUser']"}),
'video': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'intro_for_teams'", 'null': 'True', 'to': "orm['videos.Video']"}),
'video_policy': ('django.db.models.fields.IntegerField', [], {'default': '1'}),
'videos': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['videos.Video']", 'through': "orm['teams.TeamVideo']", 'symmetrical': 'False'}),
'workflow_enabled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'workflow_type': ('django.db.models.fields.CharField', [], {'default': "'O'", 'max_length': '2'})
},
'teams.teammember': {
'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'projects_managed': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'managers'", 'symmetrical': 'False', 'to': "orm['teams.Project']"}),
'role': ('django.db.models.fields.CharField', [], {'default': "'contributor'", 'max_length': '16', 'db_index': 'True'}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'members'", 'to': "orm['teams.Team']"}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'team_members'", 'to': "orm['auth.CustomUser']"})
},
'teams.teamvideo': {
'Meta': {'unique_together': "(('team', 'video'),)", 'object_name': 'TeamVideo'},
'added_by': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True'}),
'all_languages': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'created': ('django.db.models.fields.DateTimeField', [], {'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'partner_id': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '100', 'blank': 'True'}),
'project': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Project']"}),
'team': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['teams.Team']"}),
'thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'null': 'True', 'thumb_sizes': '((288, 162), (120, 90))', 'blank': 'True'}),
'video': ('django.db.models.fields.related.OneToOneField', [], {'to': "orm['videos.Video']", 'unique': 'True'})
},
'videos.video': {
'Meta': {'object_name': 'Video'},
'allow_community_edits': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'allow_video_urls_edit': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'complete_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'duration': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'edited': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'featured': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'followers': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "'followed_videos'", 'blank': 'True', 'to': "orm['auth.CustomUser']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_public': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'languages_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'meta_1_content': ('videos.metadata.MetadataContentField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'meta_1_type': ('videos.metadata.MetadataTypeField', [], {'null': 'True', 'blank': 'True'}),
'meta_2_content': ('videos.metadata.MetadataContentField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'meta_2_type': ('videos.metadata.MetadataTypeField', [], {'null': 'True', 'blank': 'True'}),
'meta_3_content': ('videos.metadata.MetadataContentField', [], {'default': "''", 'max_length': '255', 'blank': 'True'}),
'meta_3_type': ('videos.metadata.MetadataTypeField', [], {'null': 'True', 'blank': 'True'}),
'moderated_by': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'moderating'", 'null': 'True', 'to': "orm['teams.Team']"}),
'primary_audio_language_code': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '16', 'blank': 'True'}),
's3_thumbnail': ('utils.amazon.fields.S3EnabledImageField', [], {'max_length': '100', 'thumb_sizes': '((480, 270), (288, 162), (120, 90))', 'blank': 'True'}),
'small_thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'thumbnail': ('django.db.models.fields.CharField', [], {'max_length': '500', 'blank': 'True'}),
'title': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.CustomUser']", 'null': 'True', 'blank': 'True'}),
'video_id': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
'view_count': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'was_subtitled': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'writelock_owner': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'writelock_owners'", 'null': 'True', 'to': "orm['auth.CustomUser']"}),
'writelock_session_key': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'writelock_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'})
}
}
complete_apps = ['activity']
|
wevoice/wesub
|
apps/activity/migrations/0002_auto__add_activitymigrationprogress.py
|
Python
|
agpl-3.0
| 21,651 | 0.00799 |
from nct.utils.alch import Session, LSession
from nct.domain.instrument import Instrument
import random
import functools
import time
from nct.deploy.deploy import Deployer
import cProfile
INSTRUMENTS = ['GOOGL.O', 'TWTR.N', 'GS.N', 'BAC.N', 'IBM.N']
def profile_method(file_name = None):
def gen_wrapper(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
f = func
cProfile.runctx('f(*args,**kwargs)', globals(), locals(), file_name)
print("Done writing")
return wrapper
return gen_wrapper
def time_it(func):
@functools.wraps(func)
def wrapper(*args, **kwargs):
start = time.time()
func(*args,**kwargs)
print("It took {}".format(time.time() - start))
return wrapper
HASH_CACHE = {}
@profile_method(r"c:\temp\instrument_123.out")
def do_a_bunch():
s = LSession()
name = INSTRUMENTS[int(random.random()*100)%len(INSTRUMENTS)]
instr_id = s.query(Instrument).filter_by(name=name).one().id
for _ in range(10000):
s.query(Instrument).get(instr_id)
s.close()
import sys
print (sys.version)
Deployer(LSession).deploy()
print ("Deployed")
for _ in range(1):
do_a_bunch()
|
kozyarchuk/NCT-workers
|
tests/perf/pscrap.py
|
Python
|
gpl-2.0
| 1,216 | 0.011513 |
# flake8: noqa
from __future__ import absolute_import, unicode_literals
import warnings
from feincms.extensions.ct_tracker import *
warnings.warn(
'Import %s from feincms.extensions.%s' % (__name__, __name__),
DeprecationWarning, stacklevel=2)
|
mcmaxwell/idea_digital_agency
|
idea/feincms/module/extensions/ct_tracker.py
|
Python
|
mit
| 255 | 0 |
# pyui2
# Copyright (C) 2001-2002 Sean C. Riley
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of version 2.1 of the GNU Lesser General Public
# License as published by the Free Software Foundation.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import pyui2
from pyui2.desktop import getDesktop, getTheme
from pyui2.panel import Panel
from pyui2.layouts import Much
class FormPanel(Panel):
"""A Panel that shows data about an object and allows it to be updated.
The "fields" argument is a list of data fields to populate the panel with. It
is in the format:
[ (type, name, label, vspan, data),
(type, name, label, vspan, data)
]
where type is one the fieldTypes below, vspan is the vertical height of the widget,
and data is speficic data for the type of form widget to be used.
"""
fieldTypes = [
"string",
"int",
"float",
"text",
"password",
"slider",
"checkbox",
"list",
"dropdownlist",
"label"
]
def __init__(self, fieldList):
self.fieldList = fieldList
Panel.__init__(self)
self.object = None
# setup layout
num = 0
span = 0
for t, n, l, vspan, d in fieldList:
span = span + vspan
self.setLayout(pyui2.layouts.TableLayoutManager( 3, span))
for fieldType, fieldName, fieldLabel, fieldSpan, fieldData in fieldList:
newLabel = Label(fieldLabel)
newWidget = self.createFormWidget(fieldType, fieldData)
self.addChild( newLabel, (0,num,1,fieldSpan) )
self.addChild( newWidget, (1,num,2,fieldSpan) )
self.__dict__["label_%s" % fieldName] = newLabel
self.__dict__["widget_%s" % fieldName] = newWidget
num = num + fieldSpan
self.pack()
def populate(self, object):
"""populate the data fields from the supplied object
"""
self.object = object
for fieldType, fieldName, fieldLabel, fieldSpan, fieldDefault in self.fieldList:
formWidget = self.__dict__["widget_%s" % fieldName]
value = object.__dict__.get(fieldName, None)
self.populateFormWidget(fieldType, formWidget, value)
self.setDirty(1)
def process(self):
"""This takes the data in the form and updates it into the source object.
This assumes that the form has already been populated...
"""
for fieldType, fieldName, fieldLabel, fieldSpan, fieldData in self.fieldList:
formWidget = self.__dict__["widget_%s" % fieldName]
self.processFormWidget(fieldType, fieldName, formWidget)
def createFormWidget(self, fieldType, fieldData):
"""Create the right kind of widget based on the fieldType.
"""
tmp = "create_%s" % fieldType
createMethod = getattr(self, tmp)
if not createMethod:
raise "No widget of type: %s" % tmp
return createMethod(fieldData)
def populateFormWidget(self, fieldType, formWidget, value):
tmp = "populate_%s" % fieldType
populateMethod = getattr(self, tmp)
if not populateMethod:
raise "No widget of type: %s" % fieldType
return populateMethod(formWidget, value)
def processFormWidget(self, fieldType, fieldName, formWidget):
if not self.object:
raise "No object to process to!"
tmp = "process_%s" % fieldType
processMethod = getattr(self, tmp)
if not processMethod:
raise "No process method for %s" % fieldType
return processMethod(formWidget, fieldName)
##### Widget Creation Methods. #####
def create_string(self, size):
return Edit("", size, self._pyui2Edit)
def create_password(self, size):
return Password("", size, self._pyui2Edit)
def create_int(self, dummy):
return NumberEdit("", 12, self._pyui2Edit, 0)
def create_float(self, dummy):
return NumberEdit("", 12, self._pyui2Edit, 0)
def create_text(self, size):
#NOTE: make this a LineDisplay that can be edited...
return Edit("", size, self._pyui2Edit)
def create_slider(self, range):
return SliderBar(self._pyui2Slide, range)
def create_checkbox(self, title):
return CheckBox(title, self._pyui2Check)
def create_list(self, dummy):
return ListBox()
def create_dropdownlist(self, numVisible):
return DropDownBox(numVisible)
def create_label(self, dummy):
return Label("")
###### Widget Populate Methods. #######
def populate_string(self, formWidget, value):
if not value:
formWidget.setText("None")
else:
formWidget.setText("%s" % value)
def populate_float(self, formWidget, value):
if not value:
formWidget.setText("None")
else:
formWidget.setText("%.2f" % value)
populate_password = populate_string
populate_int = populate_string
populate_text = populate_string
populate_label = populate_string
def populate_slider(self, formWidget, value):
formWidget.position = value
def populate_checkbox(self, formWidget, value):
formWidget.setCheck(value)
def populate_list(self, formWidget, items):
#TODO: make a way to get a text value for an item
formWidget.clear()
for item in items:
formWidget.addItem(repr(item), item)
populate_dropdownlist = populate_list
##### Widget Processing Methods #####
def process_string(self, formWidget, fieldName):
setattr(self.object, fieldName, formWidget.text)
process_text = process_string
process_password = process_string
def process_label(self, formWidget, fieldName):
pass
def process_list(self, formWidget, fieldName):
pass
process_dropdownlist = process_list
def process_slider(self, formWidget, fieldName):
setattr(self.object, fieldName, formWidget.position)
def process_checkbox(self, formWidget, fieldName):
setattr(self.object, fieldName, formWidget.checkState)
def process_int(self, formWidget, fieldName):
setattr(self.object, fieldName, int(formWidget.text) )
def process_float(self, formWidget, fieldName):
setattr(self.object, fieldName, float(formWidget.text) )
##### Widget handler methods ######
def _pyui2Slide(self, value):
#print "slid to ", value
pass
def _pyui2Edit(self, edit):
#print "changing value for ", edit
return 1
def _pyui2Check(self, value):
#print "checkbox hit"
pass
|
Ripsnorta/pyui2
|
widgets/formpanel.py
|
Python
|
lgpl-2.1
| 7,289 | 0.006311 |
#!/bin/python
import sys,os,sqlite3,time,ntpath,psycopg2,grp,pwd
from random import randint
class color:
PURPLE = '\033[95m'
CYAN = '\033[96m'
DARKCYAN = '\033[36m'
BLUE = '\033[94m'
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
END = '\033[0m'
def is_folder_belongs_to_postgres(folderPath):
stat_info = os.stat(folderPath)
uid = stat_info.st_uid
gid = stat_info.st_gid
user = pwd.getpwuid(uid)[0]
group = grp.getgrgid(gid)[0]
r = 0
if ((str(user).lower() == "postgres") and (str(group).lower() == "postgres")):
r = 1
return r
print ""
print " PG_CHAIN v2017.10 (MIT License)"
print " Created by Doron Yaary (pglivebackup@gmail.com)"
if ((len(sys.argv) == 2) and (str(sys.argv[1]).lower() == "--help")):
print color.BOLD + " ----------------------------------------------------------------------------------------" + color.END
print color.BOLD + " PGCHAIN Help" + color.END
print color.BOLD + " ----------------------------------------------------------------------------------------" + color.END
print ""
print " " + color.UNDERLINE + "General PGCHAIN Usage Syntax:" + color.END
print " ./pgchain.py [COMMAND] [ARGUMENTS]"
print ""
print " " + color.UNDERLINE + "Available Commands:" + color.END
print " " + color.BOLD + "base-backup " + color.END + " - Creates a base backup of the local PostgreSQL cluster."
print " " + color.BOLD + "get-wal " + color.END + " - Used in the 'archive_command' for WAL files automation."
print " " + color.BOLD + "list-chains " + color.END + " - Lists the available backup chains (base backup & WAL files)."
print " " + color.BOLD + "clear-history" + color.END + " - Releases old backup chains (and deletes them from disk)."
print " " + color.BOLD + "restore-chain" + color.END + " - Restores the requested chain to the local PostgreSQL cluster."
print " " + color.BOLD + "chain-info " + color.END + " - Displays information abou the requested chain."
print " " + color.BOLD + "show-config " + color.END + " - Displays the configuration information summary."
print " " + color.BOLD + "clear-log " + color.END + " - Clears (truncates) the log file."
print " " + color.BOLD + "create-repo " + color.END + " - Creates the PGCHAIN repository."
print " " + color.BOLD + "keep-recent " + color.END + " - Keeps the most recent backups (according to the given argument)."
print ""
sys.exit(0)
con = None
internal_db_path = ""
internal_pgctl_path = ""
internal_log_enabled = ""
# The following line needs to be changed by you (see installation notes on GitHub)
internal_home_folder = "/pg_chain/"
print ""
if ((len(sys.argv) == 2) and (str(sys.argv[1]).lower() == "create-repo")):
if (os.path.isfile(internal_home_folder + "pgchain.db") == True):
print " " + color.BOLD + "ERROR:" + color.END + " The repository file (pgchain.db) already exists."
print " INFO: If you plan on this name after all, please backup the current one and move it elsewhere first."
print ""
sys.exit(0)
print " " + color.BOLD + "Please Confirm:" + color.END
print " --------------------------------------------------------------"
print " This will create the repository database by using the 'sqlite3' command."
print " The repostiroty database will be created here: " + color.UNDERLINE + str(internal_home_folder) + "pgchain.db" + color.END
ap = raw_input(" Please approve (Y/N): ")
ap = ap.lower()
if (ap != "y"):
print ""
print " You did not approve - nothing changed/created. Quiting."
print ""
sys.exit(0)
print ""
sql = " "
sql = sql + "CREATE TABLE chain_sequence (seq_next_id int not null); "
sql = sql + "CREATE TABLE chains (chain_id int not null, base_backup_full_path varchar(512) not null, chain_start_timestamp datetime not null); "
sql = sql + "CREATE TABLE file_sequence (file_next_id int not null); "
sql = sql + "CREATE TABLE wal_files (file_id int not null, file_full_path varchar(512) not null, file_timestamp datetime not null, file_size_mb int not null); "
sql = sql + "CREATE TABLE chain_files (file_id int not null, parent_chain_id int not null, file_type char(1) not null, file_timestamp datetime not null, file_full_path varchar(512), file_size_mb int); "
sql = sql + "INSERT INTO file_sequence VALUES (1001); "
sql = sql + "INSERT INTO chain_sequence VALUES (1001); "
print ""
print " Creating repository..."
os.system("echo '" + str(sql) + "' | sqlite3 " + str(internal_home_folder) + "pgchain.db")
print " Done."
print ""
sys.exit(0)
if (os.path.isfile(internal_home_folder + "pgchain.conf") == False):
print " " + color.BOLD + "ERROR:" + color.END + " The configuration files could not be found (pgchain.conf)"
print " HINT: Read the documentation regarding the configuration file."
print ""
sys.exit(0)
with open(internal_home_folder + "pgchain.conf") as f:
for line in f:
if (line != ""):
if not line.startswith("#"):
v = line.rstrip()
if (v.lower().startswith("db_path=")):
internal_db_path = v.replace("db_path=","")
if (os.path.isfile(internal_db_path) == False):
print " " + color.BOLD + "ERROR:" + color.END + " The repository file (db file) could not be found."
print " HINT: The configuration file directs to: " + internal_db_path
print " READ: If you never created the repository please use the 'create-repo' argument first."
print ""
sys.exit(0)
try:
con = sqlite3.connect(internal_db_path)
except:
print " " + color.BOLD + "ERROR:" + color.END + " Could not open the database file (unknown reason)"
print " HINT: The configuration file directs to: " + internal_db_path
print ""
sys.exit(0)
if (v.lower().startswith("pgctl_path=")):
internal_pgctl_path = v.replace("pgctl_path=","")
if (os.path.isfile(internal_pgctl_path) == False):
print " " + color.BOLD + "ERROR:" + color.END + " The path for PG_CTL is wrong (in the configuration file)."
print ""
sys.exit(0)
if (v.lower().startswith("log_enabled=")):
internal_log_enabled = v.replace("log_enabled=","")
if ((internal_log_enabled != "1") and (internal_log_enabled != "0")):
print " " + color.BOLD + "ERROR:" + color.END + " the log enabled/disabled parameter value is invalid."
print " HINT: Should be 0 or 1 - the given value is: " + internal_log_enabled
print ""
sys.exit(0)
'''
if (v.lower().startswith("home_folder=")):
internal_home_folder = v.replace("home_folder=","")
if (os.path.isdir(internal_home_folder) == False):
print " " + color.BOLD + "ERROR:" + color.END + " the home folder parameter value is invalid."
print " HINT: The given folder (" + internal_home_folder + ") is not a folder..."
print ""
sys.exit(0)
if (is_folder_belongs_to_postgres(internal_home_folder) == 0):
print " " + color.BOLD + "ERROR:" + color.END + " The home folder does not belong to the user postgres."
print " HINT: This can be fixed by running 'sudo chown -R postgres:postgres " + internal_home_folder + "'."
print ""
sys.exit(0)
if (internal_home_folder.endswith("/") == True):
internal_home_folder = internal_home_folder[:-1]
'''
# The following two lines are for backward compatibility and will be removed in future versions
is_nolog = int(internal_log_enabled)
conf_pg_ctl = internal_pgctl_path
# ---------------------------------------------------------------------------------------------
def adjust_string_size(mystring,maxlength):
a = ""
if (mystring == None):
a = ""
if (mystring != None):
a = mystring
while (len(a) < maxlength):
a = a + str(" ")
return a
def report_log_line(logline):
ts = ""
ts = str(time.strftime("%x")) + " " + str(time.strftime("%X"))
os.system("echo '" + str(ts) + ": " + str(logline) + "' >> " + internal_home_folder + "pgchain.log")
return 0
if (len(sys.argv) < 2):
print " ERROR: Bad arguments or missing arguments."
print ""
con.close()
sys.exit(0)
if (str(sys.argv[1]).lower() == "clear-log"):
os.system("echo > " + internal_home_folder + "pgchain.log")
print " INFO: The log was cleared."
print ""
sys.exit(0)
if (str(sys.argv[1]).lower() == "base-backup"):
report_log_line("==================================================================")
report_log_line("STARTING BASE BACKUP")
report_log_line("==================================================================")
newchainid = 0
cur = con.execute("select max(seq_next_id) from chain_sequence;")
for row in cur:
newchainid = int(row[0])
newchainid = newchainid + 1
con.execute("update chain_sequence set seq_next_id = " + str(newchainid) + ";")
con.commit()
report_log_line("Creating folders for new chain (ID = " + str(newchainid) + ")")
os.system("mkdir -p " + internal_home_folder + "/c" + str(newchainid))
os.system("mkdir -p " + internal_home_folder + "/c" + str(newchainid) + "/base_tmp")
report_log_line("Taking base backup...")
os.system("pg_basebackup --xlog-method=stream --format=p -D " + internal_home_folder + "/c" + str(newchainid) + "/base_tmp")
report_log_line("Compressing the base backup...")
os.system("cd " + internal_home_folder + "/c" + str(newchainid) + "/base_tmp && tar -zcf base" + str(newchainid) + ".tar .")
os.system("mv " + internal_home_folder + "/c" + str(newchainid) + "/base_tmp/base" + str(newchainid) + ".tar " + internal_home_folder + "/c" + str(newchainid) + "/base" + str(newchainid) + ".tar")
report_log_line("Removing un-needed files...")
os.system("rm -rf " + internal_home_folder + "/c" + str(newchainid) + "/base_tmp")
report_log_line("Registering new chain...")
basesize = 0
basesize = os.path.getsize(internal_home_folder + "/c" + str(newchainid) + "/base" + str(newchainid) + ".tar")
basesize = ((basesize / 1024)/1024)
con.execute("insert into chains values (" + str(newchainid) + ",'" + internal_home_folder + "/c" + str(newchainid) + "/',datetime('now'));")
con.commit()
newfileid = 0
cur = con.execute("select max(file_next_id) from file_sequence;")
for row in cur:
newfileid = int(row[0])
newfileid = newfileid + 1
con.execute("update file_sequence set file_next_id = " + str(newfileid) + ";")
con.commit()
con.execute("insert into chain_files values (" + str(newfileid) + "," + str(newchainid) + ",'B',datetime('now'),'" + internal_home_folder + "/c" + str(newchainid) + "/base" + str(newchainid) + ".tar'," + str(basesize) + ");")
con.commit()
report_log_line("Done with base backup.")
print ""
con.close()
sys.exit(0)
if (str(sys.argv[1]).lower() == "get-wal"):
report_log_line("==================================================================")
report_log_line("GET WAL ACTION")
report_log_line("==================================================================")
if (len(sys.argv) < 3):
report_log_line("ERROR: Could not register WAL file as thesecond argument is missing.")
con.close()
sys.exit(0)
if (str(sys.argv[2]) == ""):
report_log_line("ERROR: Could not register WAL file as thesecond argument is missing.")
con.close()
sys.exit(0)
if (os.path.isfile(str(sys.argv[2])) == False):
report_log_line("ERROR: The WAL file argument is no a file. check the archive_command in postgresql.conf")
con.close()
sys.exit(0)
curchain = 0
cur = con.execute("select max(chain_id) from chains;")
for row in cur:
curchain = int(row[0])
if (curchain == None):
con.close()
report_log_line("ERROR: Could not find a valid chain (perhaps no base-backup was done?)")
sys.exit(0)
walsize = 0
walsize = os.path.getsize(str(sys.argv[2]))
walsize = ((walsize / 1024) / 1024)
walshortname = ""
walshortname = ntpath.basename(str(sys.argv[2]))
newfileid = 0
cur = con.execute("select max(file_next_id) from file_sequence;")
for row in cur:
newfileid = int(row[0])
newfileid = newfileid + 1
cur.execute("update file_sequence set file_next_id = " + str(newfileid) + ";")
con.commit()
report_log_line("Accepting WAL file [" + str(sys.argv[2]) + "] with size = " + str(walsize) + "MB to chain ID = " + str(curchain) + ".")
report_log_line("Copying the WAL file [" + str(sys.argv[2]) + "] to chain folder [" + internal_home_folder + "/c" + str(curchain) + "/].")
os.system("cp " + str(sys.argv[2]) + " " + internal_home_folder + "/c" + str(curchain) + "/")
cur.execute("insert into chain_files values (" + str(newfileid) + "," + str(curchain) + ",'W',datetime('now'),'" + internal_home_folder + "/c" + str(curchain) + "/" + str(walshortname) + "'," + str(walsize) + ");")
con.commit()
con.close()
report_log_line("Done.")
print ""
if (str(sys.argv[1]).lower() == "list-chains"):
print " Chain Report (Last 10 Chains)"
print ""
print color.BOLD + " ---------------------------------------------------------------------------------------" + color.END
print color.BOLD + " ID DATE/TIME TOTAL SIZE (MB) #WAL FILES LAST RESTORE POINT" + color.END
print color.BOLD + " ---------------------------------------------------------------------------------------" + color.END
cur = con.execute("select a.chain_id,a.chain_start_timestamp,(select sum(b.file_size_mb) from chain_files b where b.parent_chain_id = a.chain_id),a.base_backup_full_path,(select count(*) from chain_files c where c.parent_chain_id = a.chain_id and c.file_type = 'W'),(select max(file_timestamp) from chain_files c where c.parent_chain_id = a.chain_id and c.file_type = 'W') from chains a order by chain_id desc limit 10;")
for row in cur:
ln = ""
ln = " " + adjust_string_size(str(row[0]),7) + adjust_string_size(str(row[1]),24)
if (str(row[2]) == ""):
ln = ln + adjust_string_size(str("MB"),20)
if (str(row[2]) != ""):
ln = ln + adjust_string_size(str(str(row[2]) + " MB"),20)
ln = ln + adjust_string_size(str(row[4]) + " Files",14)
if (str(row[5]) == "None"):
ln = ln + str(row[1])
if (str(row[5]) != "None"):
ln = ln + str(row[5])
print ln
print ""
con.close()
sys.exit(0)
if (str(sys.argv[1]).lower() == "clear-history"):
report_log_line("==================================================================")
report_log_line("CLEAR HISTORY TASK")
report_log_line("==================================================================")
if (len(sys.argv) < 3):
report_log_line("ERROR: Could not clear history - missing argument.")
print " ERROR: Could not clear history - missing argument."
print ""
con.close()
sys.exit(0)
if (str(sys.argv[2]).isdigit() == False):
report_log_line("ERROR: Could not clear history - invalid argument (perhaps string instead of number?).")
print " ERROR: Could not clear history - invalid argument."
print ""
con.close()
sys.exit(0)
chainsback = int(sys.argv[2])
report_log_line("Starting to remove/delete " + str(chainsback) + " oldest chains.")
report_log_line("INFO: This is due to a command line request that executed the clear-history command.")
chainsarray = ""
cur = con.execute("select chain_id from chains order by chain_id asc limit " + str(chainsback) + ";")
for row in cur:
report_log_line("INFO: Deleting chain #" + str(row[0]) + " from disk.")
os.system("rm -rf " + internal_home_folder + "/c" + str(row[0]))
chainsarray = chainsarray + str(row[0]) + ";"
tmp = chainsarray.split(';')
for ch in tmp:
if ((str(ch) != "") and (ch != None)):
report_log_line("Removing repository data for chain #" + str(ch) + " that was cleared.")
cur.execute("delete from chains where chain_id = " + str(ch) + ";")
con.commit()
cur.execute("delete from chain_files where parent_chain_id = " + str(ch) + ";")
con.commit()
report_log_line("Done.")
print ""
print " The repository history was removed."
print ""
con.close()
sys.exit(0)
if (str(sys.argv[1]).lower() == "restore-chain"):
if (is_nolog == 1):
print " INFO: Canceling the '--nolog' - restore must be written to the log."
if (len(sys.argv) < 3):
print " ERROR: Bad/missing arguments (missing the chain number). Quiting."
print " HINT: Use the --help switch for more information."
print ""
con.close()
sys.exit(0)
if (str(sys.argv[2]).isdigit() == False):
print " ERROR: Bad/missing arguments (missing the chain number). Quiting."
print " HINT: Use the --help switch for more information."
print ""
con.close()
sys.exit(0)
restore_check = 0
datadir = ""
pg = psycopg2.connect(host="127.0.0.1", port="5432")
pgcur = pg.cursor()
pgcur.execute("select pg_is_in_recovery()::int;")
pgrow = pgcur.fetchone()
restore_check = int(pgrow[0])
pgcur.execute("select setting from pg_settings where name = 'data_directory';")
pgrow = pgcur.fetchone()
datadir = str(pgrow[0])
pgrow = None
pgcur = None
pg.close()
pg = None
if (restore_check == 1):
print " ERROR: PostgreSQL is currently in restore mode - wait until it finishes first."
print " HINT: Please check why PostgreSQL is in restore mode if you are now trying to restore..."
print ""
con.close()
sys.exit(0)
chain_check = 0
cur = con.execute("select count(*) from chains where chain_id = " + str(sys.argv[2]) + ";")
for row in cur:
if ((row[0] != None) and (str(row[0]) != "")):
chain_check = int(row[0])
if (chain_check == 0):
print " ERROR: Could not find chain #" + str(sys.argv[2]) + "."
print " HINT: Check your chains with the --list-chains argument for more information."
print ""
con.close()
sys.exit(0)
print color.PURPLE + " ============================================================================================================" + color.END
print color.PURPLE + " IMPORTANT WARNING:" + color.END
print color.PURPLE + " ============================================================================================================" + color.END
print " The following action will do the following actions - you must approve them first:"
print " 1) Shut down PostgreSQL (by using pg_ctl)"
print " 2) Take a full backup for the current layout (BEFORE the restore process) by moving the data directory (quicker)"
print " 3) Restore the base backup (you have chosen to restore chain #" + str(sys.argv[2]) + ")"
print " 4) Create the recovery configuration file (to keep restoring the WAL files)"
print " 5) Start PostgreSQL server (which will go into recovery mode and then will resume normal activity)"
print ""
print " Please read this as well:"
print " ============================================================================================================"
print " Should any error occur in the process the instance will not be able to start and there is a chance"
print " of data-loss. In that case you will have to revert to the base copy made on step 2 (see above) which may"
print " get you back to the prior state."
print ""
apr = raw_input(" PLEASE APPROVE " + color.BOLD + "(Y/N)" + color.END + ": ")
if (str(apr).lower() != "y"):
print " INFO: Quiting as you didn't approve the above changes."
print ""
con.close()
sys.exit(0)
report_log_line("==================================================================")
report_log_line("RESTORE CHAIN ACTION")
report_log_line("==================================================================")
report_log_line("Stopping PostgreSQL now!")
os.system(conf_pg_ctl + " stop -s -D " + str(datadir))
temp_folder_name = str(randint(10000,99999))
temp_folder_name = str(datadir).replace(ntpath.basename(datadir),temp_folder_name)
report_log_line("Changing the 'data' folder name to a temporary name (" + str(temp_folder_name) +")")
os.system("mv " + str(datadir) + " " + str(temp_folder_name))
report_log_line("Creating new directory with the old name (" + str(datadir) + ")")
os.system("mkdir " + str(datadir))
os.system("chmod -R 700 " + str(datadir))
report_log_line("Getting base backup TAR file from PGCHAIN repostitory folder")
os.system("cp " + internal_home_folder + "/c" + str(sys.argv[2]) + "/base" + str(sys.argv[2]) + ".tar " + str(datadir))
report_log_line("Extracting files from TAR archive")
os.system("tar -xf " + str(datadir) + "/base" + str(sys.argv[2]) + ".tar -C " + str(datadir))
report_log_line("Removing the TAR file (as it is not needed anymore)")
os.system("rm -rf " + str(datadir) + "/base" + str(sys.argv[2]) + ".tar")
report_log_line("Creating recovery.conf file before starting the service again")
os.system("touch " + str(datadir) + "/recovery.conf")
os.system("echo \"restore_command = 'cp " + internal_home_folder + "/c" + str(sys.argv[2]) + "/%f \"%p\"'\" >> " + str(datadir) + "/recovery.conf")
report_log_line("Assuring chmod 700 to the data folder")
os.system("chmod -R 700 " + str(datadir))
report_log_line("Removing temporary folder")
os.system("rm -rf " + str(temp_folder_name))
report_log_line("Starting PostgreSQL again")
os.system(conf_pg_ctl + " -D " + str(datadir) + " start & ")
print ""
con.close()
con = None
sys.exit(0)
if (str(sys.argv[1]).lower() == "chain-info"):
if (len(sys.argv) < 3):
print " ERROR: Bad/missing arguments (missing the chain number). Quiting."
print " HINT: Use the --help switch for more information."
print ""
con.close()
sys.exit(0)
chain_check = 0
ch = str(sys.argv[2])
cur = con.execute("select count(*) from chains where chain_id = " + str(ch) + ";")
for row in cur:
if ((row[0] != None) and (str(row[0]) != "")):
chain_check = int(row[0])
if (chain_check == 0):
print " ERROR: The chain number you provided doesn't exist."
print ""
con.close()
sys.exit(0)
print color.BOLD + " ------------------------------------------------------------------" + color.END
print color.BOLD + " Chain Information" + color.END
print color.BOLD + " ------------------------------------------------------------------" + color.END
cur = con.execute("select chain_id,base_backup_full_path,chain_start_timestamp,(select file_size_mb from chain_files where parent_chain_id = " + str(ch) + " and file_type = 'B'),(select sum(file_size_mb) from chain_files where parent_chain_id = " + str(ch) + " and file_type = 'W'),(select count(*) from chain_files where parent_chain_id = " + str(ch) + " and file_type = 'W') from chains where chain_id = " + str(ch) + ";")
row = cur.fetchone()
print ""
print " Chain ID: " + color.BOLD + str(ch) + color.END
print " Chain Started: " + color.BOLD + str(row[2]) + color.END
print " Chain File Path: " + color.BOLD + str(row[1]) + color.END
print " Base Backup Size: " + color.BOLD + str(row[3]) + color.END + " MB"
print " Total WAL Size: " + color.BOLD + str(row[4]) + color.END + " MB"
print " Total WAL Count: " + color.BOLD + str(row[5]) + color.END + " File(s)"
print " " + color.UNDERLINE + "Restore Command:" + color.END
print " ./pgchain.py restore-chain " + str(ch)
print " (Always use extreme caution when deciding to restore)"
print ""
print ""
con.close()
con = None
sys.exit(0)
if (str(sys.argv[1]).lower() == "show-config"):
print color.BOLD + " ------------------------------------------------------------------" + color.END
print color.BOLD + " PGCHAIN Configuration Information" + color.END
print color.BOLD + " ------------------------------------------------------------------" + color.END
pg = psycopg2.connect(host="127.0.0.1", port="5432")
pgcur = pg.cursor()
pgcur.execute("select setting from pg_settings where name = 'server_version';")
pgrow = pgcur.fetchone()
print ""
print " PostgreSQL Version: " + color.BOLD + str(pgrow[0]) + color.END
pgcur = pg.cursor()
pgcur.execute("select setting from pg_settings where name = 'data_directory';")
pgrow = pgcur.fetchone()
print " PostgreSQL Data Folder: " + color.BOLD + str(pgrow[0]) + color.END
pg.close()
print " PG_CTL Executable: " + color.BOLD + internal_pgctl_path + color.END
print " PGCHAIN Version: " + color.BOLD + "2017.10 Beta2" + color.END
print " PGCHAIN Repository DB: " + color.BOLD + internal_db_path + color.END
if (internal_log_enabled == "0"):
print " PGCHAIN Log Status: " + color.BOLD + "Disabled" + color.END
if (internal_log_enabled == "1"):
print " PGCHAIN Log Status: " + color.BOLD + "Enabled" + color.END
print ""
con.close()
con = None
sys.exit(0)
if (str(sys.argv[1]).lower() == "keep-recent"):
if (len(sys.argv) < 3):
print " ERROR: Bad/missing arguments (missing the chain number). Quiting."
print " HINT: Use the --help switch for more information."
print ""
con.close()
sys.exit(0)
if (str(sys.argv[2]).isdigit() == False):
print " ERROR: Bad/missing arguments (missing the chain number). Quiting."
print " HINT: Use the --help switch for more information."
print ""
con.close()
sys.exit(0)
if (int(sys.argv[2]) == 0):
print " ERROR: Could not keep zero chains.. that would mean delete the entire backup repository."
print " HINT: Use the --help switch for more information."
print ""
con.close()
sys.exit(0)
requested_chain_count = int(sys.argv[2])
actual_chain_count = 0
cur = con.cursor()
cur.execute("select count(*) from chains;")
row = cur.fetchone()
actual_chain_count = int(row[0])
if (requested_chain_count > actual_chain_count):
print " " + color.BOLD + "ERROR:" + color.END + " The requested chains to keep if larger than the actual chains."
print ""
con.close()
sys.exit(0)
kepts = []
removs = []
cur.execute("select chain_id from chains order by chain_id desc limit " + str(requested_chain_count) + ";")
for row in cur:
kepts.append(str(row[0]))
cur.execute("select chain_id from chains order by chain_id desc;")
for row in cur:
if (str(row[0]) not in kepts):
removs.append(str(row[0]))
for chain in removs:
print " Removing chain #" + str(chain) + "..."
os.system("rm -rf " + str(internal_home_folder) + "c" + str(chain))
os.system("echo 'delete from chains where chain_id = " + str(chain) + ";' | sqlite3 " + str(internal_home_folder) + "pgchain.db")
os.system("echo 'delete from chain_files where parent_chain_id = " + str(chain) + ";' | sqlite3 " + str(internal_home_folder) + "pgchain.db")
print " Done."
print ""
con.close()
sys.exit(0)
con.close()
con = None
|
pglivebackup/pgchain
|
pgchain.py
|
Python
|
mit
| 26,324 | 0.02454 |
def f(s):
s = s[::-1]
return s.swapcase()
result = f(f(f(f(f('abcdef'))))) # breakpoint
|
allotria/intellij-community
|
python/testData/debug/stepping/test_smart_step_into_native_function_in_return.py
|
Python
|
apache-2.0
| 99 | 0 |
# Natural Language Toolkit: Chatbots
#
# Copyright (C) 2001-2013 NLTK Project
# Authors: Steven Bird <stevenbird1@gmail.com>
# URL: <http://www.nltk.org/>
# For license information, see LICENSE.TXT
# Based on an Eliza implementation by Joe Strout <joe@strout.net>,
# Jeff Epler <jepler@inetnebr.com> and Jez Higgins <jez@jezuk.co.uk>.
"""
A class for simple chatbots. These perform simple pattern matching on sentences
typed by users, and respond with automatically generated sentences.
These chatbots may not work using the windows command line or the
windows IDLE GUI.
"""
from __future__ import print_function
from .util import Chat
from .eliza import eliza_chat
from .iesha import iesha_chat
from .rude import rude_chat
from .suntsu import suntsu_chat
from .zen import zen_chat
bots = [
(eliza_chat, 'Eliza (psycho-babble)'),
(iesha_chat, 'Iesha (teen anime junky)'),
(rude_chat, 'Rude (abusive bot)'),
(suntsu_chat, 'Suntsu (Chinese sayings)'),
(zen_chat, 'Zen (gems of wisdom)')]
def chatbots():
import sys
print('Which chatbot would you like to talk to?')
botcount = len(bots)
for i in range(botcount):
print(' %d: %s' % (i+1, bots[i][1]))
while True:
print('\nEnter a number in the range 1-%d: ' % botcount, end=' ')
choice = sys.stdin.readline().strip()
if choice.isdigit() and (int(choice) - 1) in range(botcount):
break
else:
print(' Error: bad chatbot number')
chatbot = bots[int(choice)-1][0]
chatbot()
|
bbengfort/TextBlob
|
textblob/nltk/chat/__init__.py
|
Python
|
mit
| 1,546 | 0.001294 |
# flake8: noqa
from .store import Store
from .errors import StoreError, CredentialsNotFound
from .constants import *
|
jarv/cmdchallenge-site
|
lambda_src/runcmd/dockerpycreds/__init__.py
|
Python
|
mit
| 116 | 0.008621 |
# Copyright (c) 2006-2009 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
class DBSnapshot(object):
"""
Represents a RDS DB Snapshot
"""
def __init__(self, connection=None, id=None):
self.connection = connection
self.id = id
self.engine = None
self.snapshot_create_time = None
self.instance_create_time = None
self.port = None
self.status = None
self.availability_zone = None
self.main_username = None
self.allocated_storage = None
self.instance_id = None
self.availability_zone = None
def __repr__(self):
return 'DBSnapshot:%s' % self.id
def startElement(self, name, attrs, connection):
pass
def endElement(self, name, value, connection):
if name == 'Engine':
self.engine = value
elif name == 'InstanceCreateTime':
self.instance_create_time = value
elif name == 'SnapshotCreateTime':
self.snapshot_create_time = value
elif name == 'DBInstanceIdentifier':
self.instance_id = value
elif name == 'DBSnapshotIdentifier':
self.id = value
elif name == 'Port':
self.port = int(value)
elif name == 'Status':
self.status = value
elif name == 'AvailabilityZone':
self.availability_zone = value
elif name == 'MainUsername':
self.main_username = value
elif name == 'AllocatedStorage':
self.allocated_storage = int(value)
elif name == 'SnapshotTime':
self.time = value
else:
setattr(self, name, value)
|
kumar303/rockit
|
vendor-local/boto/rds/dbsnapshot.py
|
Python
|
bsd-3-clause
| 2,724 | 0.001468 |
'''
Created on Mar 1, 2017
@author: PJ
'''
from Scouting2017.model.reusable_models import Team
from BaseScouting.views.submissions.submit_bookmark import BaseUpdateBookmarks
class UpdateBookmarks2017(BaseUpdateBookmarks):
def __init__(self):
BaseUpdateBookmarks.__init__(self, Team)
|
ArcticWarriors/scouting-app
|
ScoutingWebsite/Scouting2017/view/submissions/submit_bookmark.py
|
Python
|
mit
| 299 | 0 |
"""
This component provides HA sensor support for Ring Door Bell/Chimes.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.ring/
"""
from datetime import timedelta
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION, CONF_ENTITY_NAMESPACE, CONF_MONITORED_CONDITIONS)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.icon import icon_for_battery_level
from . import ATTRIBUTION, DATA_RING, DEFAULT_ENTITY_NAMESPACE
DEPENDENCIES = ['ring']
_LOGGER = logging.getLogger(__name__)
SCAN_INTERVAL = timedelta(seconds=30)
# Sensor types: Name, category, units, icon, kind
SENSOR_TYPES = {
'battery': [
'Battery', ['doorbell', 'stickup_cams'], '%', 'battery-50', None],
'last_activity': [
'Last Activity', ['doorbell', 'stickup_cams'], None, 'history', None],
'last_ding': [
'Last Ding', ['doorbell'], None, 'history', 'ding'],
'last_motion': [
'Last Motion', ['doorbell', 'stickup_cams'], None,
'history', 'motion'],
'volume': [
'Volume', ['chime', 'doorbell', 'stickup_cams'], None,
'bell-ring', None],
'wifi_signal_category': [
'WiFi Signal Category', ['chime', 'doorbell', 'stickup_cams'], None,
'wifi', None],
'wifi_signal_strength': [
'WiFi Signal Strength', ['chime', 'doorbell', 'stickup_cams'], 'dBm',
'wifi', None],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_ENTITY_NAMESPACE, default=DEFAULT_ENTITY_NAMESPACE):
cv.string,
vol.Required(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)):
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up a sensor for a Ring device."""
ring = hass.data[DATA_RING]
sensors = []
for device in ring.chimes: # ring.chimes is doing I/O
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
if 'chime' in SENSOR_TYPES[sensor_type][1]:
sensors.append(RingSensor(hass, device, sensor_type))
for device in ring.doorbells: # ring.doorbells is doing I/O
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
if 'doorbell' in SENSOR_TYPES[sensor_type][1]:
sensors.append(RingSensor(hass, device, sensor_type))
for device in ring.stickup_cams: # ring.stickup_cams is doing I/O
for sensor_type in config[CONF_MONITORED_CONDITIONS]:
if 'stickup_cams' in SENSOR_TYPES[sensor_type][1]:
sensors.append(RingSensor(hass, device, sensor_type))
add_entities(sensors, True)
return True
class RingSensor(Entity):
"""A sensor implementation for Ring device."""
def __init__(self, hass, data, sensor_type):
"""Initialize a sensor for Ring device."""
super(RingSensor, self).__init__()
self._sensor_type = sensor_type
self._data = data
self._extra = None
self._icon = 'mdi:{}'.format(SENSOR_TYPES.get(self._sensor_type)[3])
self._kind = SENSOR_TYPES.get(self._sensor_type)[4]
self._name = "{0} {1}".format(
self._data.name, SENSOR_TYPES.get(self._sensor_type)[0])
self._state = None
self._tz = str(hass.config.time_zone)
self._unique_id = '{}-{}'.format(self._data.id, self._sensor_type)
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def state(self):
"""Return the state of the sensor."""
return self._state
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def device_state_attributes(self):
"""Return the state attributes."""
attrs = {}
attrs[ATTR_ATTRIBUTION] = ATTRIBUTION
attrs['device_id'] = self._data.id
attrs['firmware'] = self._data.firmware
attrs['kind'] = self._data.kind
attrs['timezone'] = self._data.timezone
attrs['type'] = self._data.family
attrs['wifi_name'] = self._data.wifi_name
if self._extra and self._sensor_type.startswith('last_'):
attrs['created_at'] = self._extra['created_at']
attrs['answered'] = self._extra['answered']
attrs['recording_status'] = self._extra['recording']['status']
attrs['category'] = self._extra['kind']
return attrs
@property
def icon(self):
"""Icon to use in the frontend, if any."""
if self._sensor_type == 'battery' and self._state is not None:
return icon_for_battery_level(battery_level=int(self._state),
charging=False)
return self._icon
@property
def unit_of_measurement(self):
"""Return the units of measurement."""
return SENSOR_TYPES.get(self._sensor_type)[2]
def update(self):
"""Get the latest data and updates the state."""
_LOGGER.debug("Pulling data from %s sensor", self._name)
self._data.update()
if self._sensor_type == 'volume':
self._state = self._data.volume
if self._sensor_type == 'battery':
self._state = self._data.battery_life
if self._sensor_type.startswith('last_'):
history = self._data.history(limit=5,
timezone=self._tz,
kind=self._kind,
enforce_limit=True)
if history:
self._extra = history[0]
created_at = self._extra['created_at']
self._state = '{0:0>2}:{1:0>2}'.format(
created_at.hour, created_at.minute)
if self._sensor_type == 'wifi_signal_category':
self._state = self._data.wifi_signal_category
if self._sensor_type == 'wifi_signal_strength':
self._state = self._data.wifi_signal_strength
|
jamespcole/home-assistant
|
homeassistant/components/ring/sensor.py
|
Python
|
apache-2.0
| 6,215 | 0 |
#!/usr/bin/env python3
###############################################################################
#
# VFR importer based on GDAL library
#
# Author: Martin Landa <landa.martin gmail.com>
#
# Licence: MIT/X
#
###############################################################################
"""
Imports VFR data to PostGIS database
Requires GDAL library version 1.11 or later.
"""
import sys
import atexit
import argparse
from vfr4ogr import VfrPg
from vfr4ogr.parse import parse_cmd
from vfr4ogr.logger import check_log, VfrLogger
from vfr4ogr.exception import VfrError, VfrErrorCmd
def parse_args():
parser = argparse.ArgumentParser(prog="vfr2pg",
description="Imports VFR data to PostGIS database. "
"Requires GDAL library version 1.11 or later.")
parser.add_argument("-e", "--extended",
action='store_true',
help="Extended layer list statistics")
parser.add_argument("-d", "--download",
action='store_true',
help="Download VFR data to the currect directory (--type required) and exit")
parser.add_argument("-s", "--fileschema",
action='store_true',
help="Create new schema for each VFR file")
parser.add_argument("-g", "--nogeomskip",
action='store_true',
help="Skip features without geometry")
parser.add_argument("-l", "--list",
action='store_true',
help="List existing layers in output database and exit")
parser.add_argument("--file",
help="Path to xml.gz|zip or URL list file")
parser.add_argument("--date",
help="Date in format 'YYYYMMDD'")
parser.add_argument("--type",
help="Type of request in format XY_ABCD, eg. 'ST_UKSH' or 'OB_000000_ABCD'")
parser.add_argument("--layer",
help="Import only selected layers separated by comma (if not given all layers are processed)")
parser.add_argument("--geom",
help="Preferred geometry 'OriginalniHranice' or 'GeneralizovaneHranice' (if not found or not given than first geometry is used)")
parser.add_argument("--dbname",
help="Output PostGIS database")
parser.add_argument("--schema",
help="Schema name (default: public)")
parser.add_argument("--user",
help="User name")
parser.add_argument("--passwd",
help="Password")
parser.add_argument("--host",
help="Host name")
parser.add_argument("--port",
help="Port")
parser.add_argument("-o", "--overwrite",
action='store_true',
help="Overwrite existing PostGIS tables")
parser.add_argument("-a", "--append",
action='store_true',
help="Append to existing PostGIS tables")
return parser.parse_args(), parser.print_help
def main():
# parse cmdline arguments
options, usage = parse_args()
options.format = 'PostgreSQL'
try:
file_list = parse_cmd(options)
except VfrErrorCmd as e:
usage()
sys.exit('ERROR: {}'.format(e))
# build datasource name
odsn = None
if options.dbname:
odsn = "PG:dbname=%s" % options.dbname
if options.user:
odsn += " user=%s" % options.user
if options.passwd:
odsn += " password=%s" % options.passwd
if options.host:
odsn += " host=%s" % options.host
if options.port:
odsn += " port=%s" % options.port
# create convertor
try:
pg = VfrPg(schema=options.schema, schema_per_file=options.fileschema,
dsn=odsn, geom_name=options.geom, layers=options.layer,
nogeomskip=options.nogeomskip, overwrite=options.overwrite)
except VfrError as e:
sys.exit('ERROR: {}'.format(e))
# write log process header
pg.cmd_log(sys.argv)
if options.list:
# list output database and exit
pg.print_summary()
return 0
# read file list and download VFR files if needed
try:
pg.download(file_list, options.date)
except VfrError as e:
VfrLogger.error(str(e))
if options.download:
# download only requested, exiting
return 0
# import input VFR files to PostGIS
ipass = pg.run(options.append, options.extended)
# create indices for output tables
pg.create_indices()
# print final summary
if (ipass > 1 and options.fileschema is False) \
or options.append:
pg.print_summary()
return 0
if __name__ == "__main__":
atexit.register(check_log)
sys.exit(main())
|
ctu-osgeorel/gdal-vfr
|
vfr2pg.py
|
Python
|
mit
| 5,000 | 0.0034 |
import numpy as np
import scipy as sp
import scipy.interpolate
import requests
from io import StringIO
def qvalues(pv, m = None, verbose = False, lowmem = False, pi0 = None):
"""
Copyright (c) 2012, Nicolo Fusi, University of Sheffield
All rights reserved.
Estimates q-values from p-values
Args
=====
m: number of tests. If not specified m = pv.size
verbose: print verbose messages? (default False)
lowmem: use memory-efficient in-place algorithm
pi0: if None, it's estimated as suggested in Storey and Tibshirani, 2003.
For most GWAS this is not necessary, since pi0 is extremely likely to be
1
:param pv:
:param m:
:param verbose:
:param lowmem:
:param pi0:
:return:
"""
assert(pv.min() >= 0 and pv.max() <= 1), "p-values should be between 0 and 1"
original_shape = pv.shape
pv = pv.ravel() # flattens the array in place, more efficient than flatten()
if m == None:
m = float(len(pv))
else:
# the user has supplied an m
m *= 1.0
# if the number of hypotheses is small, just set pi0 to 1
if len(pv) < 100 and pi0 == None:
pi0 = 1.0
elif pi0 != None:
pi0 = pi0
else:
# evaluate pi0 for different lambdas
pi0 = []
lam = sp.arange(0, 0.90, 0.01)
counts = sp.array([(pv > i).sum() for i in sp.arange(0, 0.9, 0.01)])
for l in range(len(lam)):
pi0.append(counts[l]/(m*(1-lam[l])))
pi0 = sp.array(pi0)
# fit natural cubic spline
tck = sp.interpolate.splrep(lam, pi0, k = 3)
pi0 = sp.interpolate.splev(lam[-1], tck)
if pi0 > 1:
if verbose:
print("got pi0 > 1 (%.3f) while estimating qvalues, setting it to 1" % pi0)
pi0 = 1.0
assert(pi0 >= 0 and pi0 <= 1), "pi0 is not between 0 and 1: %f" % pi0
if lowmem:
# low memory version, only uses 1 pv and 1 qv matrices
qv = sp.zeros((len(pv),))
last_pv = pv.argmax()
qv[last_pv] = (pi0*pv[last_pv]*m)/float(m)
pv[last_pv] = -sp.inf
prev_qv = last_pv
for i in range(int(len(pv))-2, -1, -1):
cur_max = pv.argmax()
qv_i = (pi0*m*pv[cur_max]/float(i+1))
pv[cur_max] = -sp.inf
qv_i1 = prev_qv
qv[cur_max] = min(qv_i, qv_i1)
prev_qv = qv[cur_max]
else:
p_ordered = sp.argsort(pv)
pv = pv[p_ordered]
qv = pi0 * m/len(pv) * pv
qv[-1] = min(qv[-1],1.0)
for i in range(len(pv)-2, -1, -1):
qv[i] = min(pi0*m*pv[i]/(i+1.0), qv[i+1])
# reorder qvalues
qv_temp = qv.copy()
qv = sp.zeros_like(qv)
qv[p_ordered] = qv_temp
# reshape qvalues
qv = qv.reshape(original_shape)
return qv
def get_protein_id(s):
"""
Return a shortened string, split on spaces, underlines and semicolons.
Extract the first, highest-ranked protein ID from a string containing
protein IDs in MaxQuant output format: e.g. P07830;P63267;Q54A44;P63268
Long names (containing species information) are eliminated (split on ' ') and
isoforms are removed (split on '_').
:param s: protein IDs in MaxQuant format
:type s: str or unicode
:return: string
"""
return str(s).split(';')[0].split(' ')[0].split('_')[0]
def get_protein_ids(s):
"""
Return a list of shortform protein IDs.
Extract all protein IDs from a string containing
protein IDs in MaxQuant output format: e.g. P07830;P63267;Q54A44;P63268
Long names (containing species information) are eliminated (split on ' ') and
isoforms are removed (split on '_').
:param s: protein IDs in MaxQuant format
:type s: str or unicode
:return: list of string ids
"""
return [p.split(' ')[0].split('_')[0] for p in s.split(';') ]
def get_protein_id_list(df, level=0):
"""
Return a complete list of shortform IDs from a DataFrame
Extract all protein IDs from a dataframe from multiple rows containing
protein IDs in MaxQuant output format: e.g. P07830;P63267;Q54A44;P63268
Long names (containing species information) are eliminated (split on ' ') and
isoforms are removed (split on '_').
:param df: DataFrame
:type df: pandas.DataFrame
:param level: Level of DataFrame index to extract IDs from
:type level: int or str
:return: list of string ids
"""
protein_list = []
for s in df.index.get_level_values(level):
protein_list.extend( get_protein_ids(s) )
return list(set(protein_list))
def get_shortstr(s):
"""
Return the first part of a string before a semicolon.
Extract the first, highest-ranked protein ID from a string containing
protein IDs in MaxQuant output format: e.g. P07830;P63267;Q54A44;P63268
:param s: protein IDs in MaxQuant format
:type s: str or unicode
:return: string
"""
return str(s).split(';')[0]
def get_index_list(l, ms):
"""
:param l:
:param ms:
:return:
"""
if type(ms) != list and type(ms) != tuple:
ms = [ms]
return [l.index(s) for s in ms if s in l]
def format_label(sl, fmt=None):
"""
Combine a list of strings to a single str, joined by sep.
Passes through single strings.
:param sl:
:return:
"""
if isinstance(sl, str):
# Already is a string.
return sl
if fmt:
return fmt.format(*sl)
return ' '.join(str(s) for s in sl)
def build_combined_label(sl, idxs, sep=' ', label_format=None):
"""
Generate a combined label from a list of indexes
into sl, by joining them with `sep` (str).
:param sl: Strings to combine
:type sl: dict of str
:param idxs: Indexes into sl
:type idxs: list of sl keys
:param sep:
:return: `str` of combined label
"""
if label_format:
return label_format % tuple([get_shortstr(str(sl[n])) for n in idxs])
else:
return sep.join([get_shortstr(str(sl[n])) for n in idxs])
def hierarchical_match(d, k, default=None):
"""
Match a key against a dict, simplifying element at a time
:param df: DataFrame
:type df: pandas.DataFrame
:param level: Level of DataFrame index to extract IDs from
:type level: int or str
:return: hiearchically matched value or default
"""
if d is None:
return default
if type(k) != list and type(k) != tuple:
k = [k]
for n, _ in enumerate(k):
key = tuple(k[0:len(k)-n])
if len(key) == 1:
key = key[0]
try:
d[key]
except:
pass
else:
return d[key]
return default
def chunks(seq, num):
"""
Separate `seq` (`np.array`) into `num` series of as-near-as possible equal
length values.
:param seq: Sequence to split
:type seq: np.array
:param num: Number of parts to split sequence into
:type num: int
:return: np.array of split parts
"""
avg = len(seq) / float(num)
out = []
last = 0.0
while last < len(seq):
out.append(seq[int(last):int(last + avg)])
last += avg
return np.array(out)
def calculate_s0_curve(s0, minpval, maxpval, minratio, maxratio, curve_interval=0.1):
"""
Calculate s0 curve for volcano plot.
Taking an min and max p value, and a min and max ratio, calculate an smooth
curve starting from parameter `s0` in each direction.
The `curve_interval` parameter defines the smoothness of the resulting curve.
:param s0: `float` offset of curve from interset
:param minpval: `float` minimum p value
:param maxpval: `float` maximum p value
:param minratio: `float` minimum ratio
:param maxratio: `float` maximum ratio
:param curve_interval: `float` stepsize (smoothness) of curve generator
:return: x, y, fn x,y points of curve, and fn generator
"""
mminpval = -np.log10(minpval)
mmaxpval = -np.log10(maxpval)
maxpval_adjust = mmaxpval - mminpval
ax0 = (s0 + maxpval_adjust * minratio) / maxpval_adjust
edge_offset = (maxratio-ax0) % curve_interval
max_x = maxratio-edge_offset
if (max_x > ax0):
x = np.arange(ax0, max_x, curve_interval)
else:
x = np.arange(max_x, ax0, curve_interval)
fn = lambda x: 10 ** (-s0/(x-minratio) - mminpval)
y = fn(x)
return x, y, fn
def find_nearest_idx(array,value):
"""
:param array:
:param value:
:return:
"""
array = array.copy()
array[np.isnan(array)] = 1
idx = (np.abs(array-value)).argmin()
return idx
def get_uniprot_id_mapping_pairs(f, t, seqids):
r = requests.post(
'https://www.uniprot.org/uploadlists/',
files={'file': StringIO(' '.join(seqids))},
params={
'from': 'ACC+ID',
'to': 'KEGG_ID',
'format': 'tab',
},
headers={'User-Agent': 'Python / padua - martin.fitzpatrick@gmail.com'}
)
result = {}
if r.text and 'html' not in r.text[:500].lower():
# Looks legitimate data.
lines = r.text.splitlines()
for line in lines[1:]:
key, value = line.split('\t')
if key in result:
result[key].add(value)
else:
result[key] = set([value])
return result
|
mfitzp/padua
|
padua/utils.py
|
Python
|
bsd-2-clause
| 9,476 | 0.004538 |
from __future__ import absolute_import, division, print_function
import os
from time import ctime
from qtpy import QtWidgets
from glue import core
from glue.utils.qt import load_ui
class MessageWidget(QtWidgets.QWidget, core.hub.HubListener):
""" This simple class displays all messages broadcast
by a hub. It is mainly intended for debugging """
def __init__(self):
QtWidgets.QWidget.__init__(self)
self.ui = load_ui('message_widget.ui', self,
directory=os.path.dirname(__file__))
self.ui.messageTable.setColumnCount(3)
labels = ['Time', 'Message', 'Sender']
self.ui.messageTable.setHorizontalHeaderLabels(labels)
def register_to_hub(self, hub):
# catch all messages
hub.subscribe(self, core.message.Message,
handler=self.process_message,
filter=lambda x: True)
def process_message(self, message):
row = self.ui.messageTable.rowCount() * 0
self.ui.messageTable.insertRow(0)
tm = QtWidgets.QTableWidgetItem(ctime().split()[3])
typ = str(type(message)).split("'")[-2].split('.')[-1]
mtyp = QtWidgets.QTableWidgetItem(typ)
typ = str(type(message.sender)).split("'")[-2].split('.')[-1]
sender = QtWidgets.QTableWidgetItem(typ)
self.ui.messageTable.setItem(row, 0, tm)
self.ui.messageTable.setItem(row, 1, mtyp)
self.ui.messageTable.setItem(row, 2, sender)
self.ui.messageTable.resizeColumnsToContents()
|
stscieisenhamer/glue
|
glue/core/qt/message_widget.py
|
Python
|
bsd-3-clause
| 1,541 | 0 |
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
def register_types(module):
root_module = module.get_root()
## ipv4-nix-vector-routing.h: ns3::Ipv4NixVectorRouting [class]
module.add_class('Ipv4NixVectorRouting', parent=root_module['ns3::Ipv4RoutingProtocol'])
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::NixVector >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::NixVector > > > >', 'ns3::NixMap_t')
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::NixVector >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::NixVector > > > >*', 'ns3::NixMap_t*')
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::NixVector >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::NixVector > > > >&', 'ns3::NixMap_t&')
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::Ipv4Route >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::Ipv4Route > > > >', 'ns3::Ipv4RouteMap_t')
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::Ipv4Route >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::Ipv4Route > > > >*', 'ns3::Ipv4RouteMap_t*')
typehandlers.add_type_alias('std::map< ns3::Ipv4Address, ns3::Ptr< ns3::Ipv4Route >, std::less< ns3::Ipv4Address >, std::allocator< std::pair< ns3::Ipv4Address const, ns3::Ptr< ns3::Ipv4Route > > > >&', 'ns3::Ipv4RouteMap_t&')
## Register a nested module for the namespace Config
nested_module = module.add_cpp_namespace('Config')
register_types_ns3_Config(nested_module)
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace addressUtils
nested_module = module.add_cpp_namespace('addressUtils')
register_types_ns3_addressUtils(nested_module)
## Register a nested module for the namespace aodv
nested_module = module.add_cpp_namespace('aodv')
register_types_ns3_aodv(nested_module)
## Register a nested module for the namespace dot11s
nested_module = module.add_cpp_namespace('dot11s')
register_types_ns3_dot11s(nested_module)
## Register a nested module for the namespace dsdv
nested_module = module.add_cpp_namespace('dsdv')
register_types_ns3_dsdv(nested_module)
## Register a nested module for the namespace flame
nested_module = module.add_cpp_namespace('flame')
register_types_ns3_flame(nested_module)
## Register a nested module for the namespace internal
nested_module = module.add_cpp_namespace('internal')
register_types_ns3_internal(nested_module)
## Register a nested module for the namespace olsr
nested_module = module.add_cpp_namespace('olsr')
register_types_ns3_olsr(nested_module)
def register_types_ns3_Config(module):
root_module = module.get_root()
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_addressUtils(module):
root_module = module.get_root()
def register_types_ns3_aodv(module):
root_module = module.get_root()
def register_types_ns3_dot11s(module):
root_module = module.get_root()
def register_types_ns3_dsdv(module):
root_module = module.get_root()
def register_types_ns3_flame(module):
root_module = module.get_root()
def register_types_ns3_internal(module):
root_module = module.get_root()
def register_types_ns3_olsr(module):
root_module = module.get_root()
def register_methods(root_module):
register_Ns3Ipv4NixVectorRouting_methods(root_module, root_module['ns3::Ipv4NixVectorRouting'])
return
def register_Ns3Ipv4NixVectorRouting_methods(root_module, cls):
## ipv4-nix-vector-routing.h: ns3::Ipv4NixVectorRouting::Ipv4NixVectorRouting(ns3::Ipv4NixVectorRouting const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4NixVectorRouting const &', 'arg0')])
## ipv4-nix-vector-routing.h: ns3::Ipv4NixVectorRouting::Ipv4NixVectorRouting() [constructor]
cls.add_constructor([])
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::FlushGlobalNixRoutingCache() [member function]
cls.add_method('FlushGlobalNixRoutingCache',
'void',
[])
## ipv4-nix-vector-routing.h: static ns3::TypeId ns3::Ipv4NixVectorRouting::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True, visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: bool ns3::Ipv4NixVectorRouting::RouteInput(ns3::Ptr<ns3::Packet const> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: ns3::Ptr<ns3::Ipv4Route> ns3::Ipv4NixVectorRouting::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
visibility='private', is_virtual=True)
## ipv4-nix-vector-routing.h: void ns3::Ipv4NixVectorRouting::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
visibility='private', is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_Config(module.get_submodule('Config'), root_module)
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_addressUtils(module.get_submodule('addressUtils'), root_module)
register_functions_ns3_aodv(module.get_submodule('aodv'), root_module)
register_functions_ns3_dot11s(module.get_submodule('dot11s'), root_module)
register_functions_ns3_dsdv(module.get_submodule('dsdv'), root_module)
register_functions_ns3_flame(module.get_submodule('flame'), root_module)
register_functions_ns3_internal(module.get_submodule('internal'), root_module)
register_functions_ns3_olsr(module.get_submodule('olsr'), root_module)
return
def register_functions_ns3_Config(module, root_module):
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_addressUtils(module, root_module):
return
def register_functions_ns3_aodv(module, root_module):
return
def register_functions_ns3_dot11s(module, root_module):
return
def register_functions_ns3_dsdv(module, root_module):
return
def register_functions_ns3_flame(module, root_module):
return
def register_functions_ns3_internal(module, root_module):
return
def register_functions_ns3_olsr(module, root_module):
return
|
annegabrielle/secure_adhoc_network_ns-3
|
ns3_source_code/ns-3.10/bindings/python/apidefs/gcc-LP64/ns3_module_nix_vector_routing.py
|
Python
|
gpl-2.0
| 11,439 | 0.012763 |
#!/usr/bin/python
import imaplib
import sys
import random
import os
import threading
import time
import types
import subprocess
SERVER = "localhost"
USER = ["ftfstest1", "ftfstest2", "ftfstest3", "ftfstest4", "ftfstest5", "ftfstest6", "ftfstest7", "ftfstest8", "ftfstest9", "ftfstest10", "ftfstest11", "ftfstest12", "ftfstest13", "ftfstest14", "ftfstest15", "ftfstest16"]
PASS = ["oscarlab", "oscarlab","oscarlab","oscarlab","oscarlab","oscarlab","oscarlab","oscarlab","oscarlab","oscarlab","oscarlab","oscarlab","oscarlab","oscarlab","oscarlab","oscarlab"]
n_user = 2
n_box = 80
boxsize = 1000
max_msg_len = 32768
run_time = 1800
n_top = 8000
def worker_thread(i, n_op_thread, running) :
m = imaplib.IMAP4_SSL(SERVER)
m.login(USER[i], PASS[i])
while not running.isSet() :
pass
n_ops = [0] * 3
# while running.isSet() :
for i in range(n_top) :
boxnum = random.randint(1, n_box) - 1
box = "box%d" % boxnum
x = m.select(box)
rand_op = random.randint(1, 2) - 1
if rand_op == 0 :
msg_len = random.randint(1, max_msg_len)
msg = os.urandom(msg_len)
m.APPEND(box, None, None, msg)
else :
typ, msg_ids = m.search(None, 'ALL')
msgs = msg_ids[0].split()
msg_num = random.randint(1, len(msgs)) - 1
msg = msgs[msg_num]
# if rand_op == 1 :
m.store(msg, "+FLAGS", "(\\Deleted)")
m.expunge()
# else :
# typ, data = m.fetch(msg, "(RFC822 FLAGS)")
# flagged = 0
# if type(data[0]) is types.NoneType :
# continue
# flagged = 0
# for flag in imaplib.ParseFlags(data[0][0]) :
# if (flag == "\Flagged") :
# flagged = 1
# if flagged :
# m.store(msg, "-FLAGS", "(\\FLAGGED)")
# else :
# m.store(msg, "+FLAGS", "(\\FLAGGED)")
n_ops[rand_op] = n_ops[rand_op] + 1
subprocess.call('echo "flush" > /proc/toku_flusher', shell=True)
m.logout()
print "Thread %d: append %d delete %d flag change %d" % (i, n_ops[0], n_ops[1], n_ops[2])
n_op_thread.append(n_ops[0] + n_ops[1] + n_ops[2])
print "MAILSERVER AGEING"
f=open('mailservertime.out','a')
t = []
running = threading.Event()
n_op_thread = []
for i in range(n_user) :
tmp_t = threading.Thread(target = worker_thread, args = (i, n_op_thread, running,))
tmp_t.start()
t.append(tmp_t)
time.sleep(2)
running.set()
t1 = time.time()
#time.sleep(run_time)
#running.clear()
for i in range(n_user):
t[i].join()
t2 = time.time()
n_op_total = 0
for i in range(n_user) :
n_op_total = n_op_total + n_op_thread[i]
print "This experiment took %f seconds" % (t2 - t1)
print "%d ops are executed (%f op/s)" % (n_op_total, n_op_total / (t2 - t1))
f.write("Time\t")
f.write(str(t2 - t1) + '\t')
f.write("Nops\t")
f.write(str(n_op_total) + '\n')
sys.exit(0)
|
oscarlab/betrfs
|
benchmarks/aging/mailserver/mailserver-aging.py
|
Python
|
gpl-2.0
| 2,672 | 0.030689 |
from django.conf.urls import patterns, include, url
from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
# not sure about line 7
admin.autodiscover()
urlpatterns = patterns('',
url(r'^admin/', include(admin.site.urls)),
url(r'^dropzone-drag-drop/$', include('dragdrop.urls', namespace="dragdrop", app_name="dragdrop")),
url(r'^index/$', 'dragdrop.views.GetUserImages'),
url(r'^$', 'signups.views.home', name='home'),
url(r'^register/$', 'drinker.views.DrinkerRegistration'),
url(r'^login/$', 'drinker.views.LoginRequest'),
url(r'^logout/$', 'drinker.views.LogOutRequest'),
url(r'^index/filter/$', 'filter.views.changeBright'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# not sure if I need an actual url wrapper in this code.
# url(r'^admin/varnish/', include('varnishapp.urls')),
)
if settings.DEBUG:
# urlpatterns add STATIC_URL and serves the STATIC_ROOT file
urlpatterns += static(settings.STATIC_URL,
document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL,
document_root=settings.MEDIA_ROOT)
|
vdmann/cse-360-image-hosting-website
|
src/mvp_landing/urls.py
|
Python
|
mit
| 1,288 | 0.003882 |
from sqlalchemy import Column, MetaData, Table
from sqlalchemy import DateTime, Integer, String, Text
from sqlalchemy import ForeignKeyConstraint, UniqueConstraint
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.orm import backref, dynamic_loader, mapper, relation
from sqlalchemy.orm.collections import column_mapped_collection
class Term:
def __init__(self, name, slug, term_group=0):
self.name = name
self.slug = slug
self.term_group = term_group
def __repr__(self):
return '<Term(%r, %r, %r)>' % (self.name, self.slug, self.term_group)
class Taxonomy(object):
def __init__(self, term, description):
self.term = term
self.description = description
class PostTag(Taxonomy):
def __repr__(self):
return '<PostTag(%r, %r)>' % (self.term, self.description)
class Category(Taxonomy):
def __repr__(self):
return '<Category(%r, %r)>' % (self.term, self.description)
class LinkCategory(Taxonomy):
def __repr__(self):
return '<LinkCategory(%r, %r)>' % (self.term, self.description)
class PostMeta(object):
def __init__(self, meta_key, meta_value):
self.meta_key = meta_key
self.meta_value = meta_value
def __repr__(self):
return '<PostMeta(%r, %r)>' % (self.meta_key, self.meta_value)
class Post(object):
def __init__(self, post_title, post_type='post'):
self.post_title = post_title
self.post_type = post_type
meta = association_proxy('_metadict', 'meta_value', creator=PostMeta)
def __repr__(self):
return '<Post(%r, %r)>' % (self.post_title, self.post_type)
class Link(object):
def __init__(self, link_url, link_name):
self.link_url = link_url
self.link_name = link_name
def __repr__(self):
return '<Link(%r, %r)>' % (self.link_url, self.link_name)
class CommentMeta(object):
def __init__(self, meta_key, meta_value):
self.meta_key = meta_key
self.meta_value = meta_value
def __repr__(self):
return '<CommentMeta(%r, %r)>' % (self.meta_key, self.meta_value)
class Comment(object):
def __init__(self, comment_author, comment_content):
self.comment_author = comment_author
self.comment_content = comment_content
meta = association_proxy('_metadict', 'meta_value', creator=CommentMeta)
def __repr__(self):
return '<Comment(%r, %r)>' % (self.comment_author, self.comment_content)
class UserMeta(object):
def __init__(self, meta_key, meta_value):
self.meta_key = meta_key
self.meta_value = meta_value
def __repr__(self):
return '<UserMeta(%r, %r)>' % (self.meta_key, self.meta_value)
class User(object):
def __init__(self, user_login):
self.user_login = user_login
meta = association_proxy('_metadict', 'meta_value', creator=UserMeta)
def __repr__(self):
return '<User(%r)>' % self.user_login
class Option(object):
def __init__(self, option_name, option_value):
self.option_name = option_name
self.option_value = option_value
def __repr__(self):
return '<Option(%r, %r)>' % (self.option_name, self.option_value)
def init(prefix='wp'):
metadata = MetaData()
# tables
terms = Table('%s_terms' % prefix, metadata,
Column('term_id', Integer(), primary_key=True, nullable=False),
Column('name', String(length=55), primary_key=False, nullable=False),
Column('slug', String(length=200), primary_key=False, nullable=False),
Column('term_group', Integer(), primary_key=False, nullable=False),
UniqueConstraint('slug'),
)
term_taxonomy = Table('%s_term_taxonomy' % prefix, metadata,
Column('term_taxonomy_id', Integer(), primary_key=True, nullable=False),
Column('term_id', Integer(), primary_key=False, nullable=False),
Column('taxonomy', String(length=32), primary_key=False, nullable=False),
Column('description', Text(length=None), primary_key=False, nullable=False),
Column('parent', Integer(), primary_key=False, nullable=False),
Column('count', Integer(), primary_key=False, nullable=False),
UniqueConstraint('term_id', 'taxonomy'),
ForeignKeyConstraint(['term_id'], ['%s_terms.term_id' % prefix]),
ForeignKeyConstraint(['parent'], ['%s_term_taxonomy.term_taxonomy_id' % prefix]),
)
term_relationships = Table('%s_term_relationships' % prefix, metadata,
Column('object_id', Integer(), primary_key=True, nullable=False),
Column('term_taxonomy_id', Integer(), primary_key=True, nullable=False),
ForeignKeyConstraint(['term_taxonomy_id'], ['%s_term_taxonomy.term_taxonomy_id' % prefix]),
)
postmeta = Table('%s_postmeta' % prefix, metadata,
Column('meta_id', Integer(), primary_key=True, nullable=False),
Column('post_id', Integer(), primary_key=False, nullable=False),
Column('meta_key', String(length=255), primary_key=False),
Column('meta_value', Text(length=None), primary_key=False),
ForeignKeyConstraint(['post_id'], ['%s_posts.ID' % prefix]),
)
posts = Table('%s_posts' % prefix, metadata,
Column('ID', Integer(), primary_key=True, nullable=False),
Column('post_author', Integer(), primary_key=False, nullable=False),
Column('post_date', DateTime(timezone=False), primary_key=False, nullable=False),
Column('post_date_gmt', DateTime(timezone=False), primary_key=False, nullable=False),
Column('post_content', Text(length=None), primary_key=False, nullable=False),
Column('post_title', Text(length=None), primary_key=False, nullable=False),
Column('post_excerpt', Text(length=None), primary_key=False, nullable=False),
Column('post_status', String(length=10), primary_key=False, nullable=False),
Column('comment_status', String(length=15), primary_key=False, nullable=False),
Column('ping_status', String(length=6), primary_key=False, nullable=False),
Column('post_password', String(length=20), primary_key=False, nullable=False),
Column('post_name', String(length=200), primary_key=False, nullable=False),
Column('to_ping', Text(length=None), primary_key=False, nullable=False),
Column('pinged', Text(length=None), primary_key=False, nullable=False),
Column('post_modified', DateTime(timezone=False), primary_key=False, nullable=False),
Column('post_modified_gmt', DateTime(timezone=False), primary_key=False, nullable=False),
Column('post_content_filtered', Text(length=None), primary_key=False, nullable=False),
Column('post_parent', Integer(), primary_key=False, nullable=False),
Column('guid', String(length=255), primary_key=False, nullable=False),
Column('menu_order', Integer(), primary_key=False, nullable=False),
Column('post_type', String(length=20), primary_key=False, nullable=False),
Column('post_mime_type', String(length=100), primary_key=False, nullable=False),
Column('comment_count', Integer(), primary_key=False, nullable=False),
ForeignKeyConstraint(['post_author'], ['%s_users.ID' % prefix]),
ForeignKeyConstraint(['post_parent'], ['%s_posts.ID' % prefix]),
)
links = Table('%s_links' % prefix, metadata,
Column('link_id', Integer(), primary_key=True, nullable=False),
Column('link_url', String(length=255), primary_key=False, nullable=False),
Column('link_name', String(length=255), primary_key=False, nullable=False),
Column('link_image', String(length=255), primary_key=False, nullable=False),
Column('link_target', String(length=25), primary_key=False, nullable=False),
Column('link_category', Integer(), primary_key=False, nullable=False),
Column('link_description', String(length=255), primary_key=False, nullable=False),
Column('link_visible', String(length=1), primary_key=False, nullable=False),
Column('link_owner', Integer(), primary_key=False, nullable=False),
Column('link_rating', Integer(), primary_key=False, nullable=False),
Column('link_updated', DateTime(timezone=False), primary_key=False, nullable=False),
Column('link_rel', String(length=255), primary_key=False, nullable=False),
Column('link_notes', Text(length=None), primary_key=False, nullable=False),
Column('link_rss', String(length=255), primary_key=False, nullable=False),
ForeignKeyConstraint(['link_owner'], ['%s_users.ID' % prefix]),
)
commentmeta = Table('%s_commentmeta' % prefix, metadata,
Column('meta_id', Integer(), primary_key=True, nullable=False),
Column('comment_id', Integer(), primary_key=False, nullable=False),
Column('meta_key', String(length=255), primary_key=False),
Column('meta_value', Text(length=None), primary_key=False),
ForeignKeyConstraint(['comment_id'], ['%s_comments.comment_ID' % prefix]),
)
comments = Table('%s_comments' % prefix, metadata,
Column('comment_ID', Integer(), primary_key=True, nullable=False),
Column('comment_post_ID', Integer(), primary_key=False, nullable=False),
Column('comment_author', Text(length=None), primary_key=False, nullable=False),
Column('comment_author_email', String(length=100), primary_key=False, nullable=False),
Column('comment_author_url', String(length=200), primary_key=False, nullable=False),
Column('comment_author_IP', String(length=100), primary_key=False, nullable=False),
Column('comment_date', DateTime(timezone=False), primary_key=False, nullable=False),
Column('comment_date_gmt', DateTime(timezone=False), primary_key=False, nullable=False),
Column('comment_content', Text(length=None), primary_key=False, nullable=False),
Column('comment_karma', Integer(), primary_key=False, nullable=False),
Column('comment_approved', String(length=4), primary_key=False, nullable=False),
Column('comment_agent', String(length=255), primary_key=False, nullable=False),
Column('comment_type', String(length=20), primary_key=False, nullable=False),
Column('comment_parent', Integer(), primary_key=False, nullable=False),
Column('user_id', Integer(), primary_key=False, nullable=False),
ForeignKeyConstraint(['comment_post_ID'], ['%s_posts.ID' % prefix]),
ForeignKeyConstraint(['comment_parent'], ['%s_comments.comment_ID' % prefix]),
ForeignKeyConstraint(['user_id'], ['%s_users.ID' % prefix]),
)
usermeta = Table('%s_usermeta' % prefix, metadata,
Column('umeta_id', Integer(), primary_key=True, nullable=False),
Column('user_id', Integer(), primary_key=False, nullable=False),
Column('meta_key', String(length=255), primary_key=False),
Column('meta_value', Text(length=None), primary_key=False),
ForeignKeyConstraint(['user_id'], ['%s_users.ID' % prefix]),
)
users = Table('%s_users' % prefix, metadata,
Column('ID', Integer(), primary_key=True, nullable=False),
Column('user_login', String(length=60), primary_key=False, nullable=False),
Column('user_pass', String(length=64), primary_key=False, nullable=False),
Column('user_nicename', String(length=50), primary_key=False, nullable=False),
Column('user_email', String(length=100), primary_key=False, nullable=False),
Column('user_url', String(length=100), primary_key=False, nullable=False),
Column('user_registered', DateTime(timezone=False), primary_key=False, nullable=False),
Column('user_activation_key', String(length=60), primary_key=False, nullable=False),
Column('user_status', Integer(), primary_key=False, nullable=False),
Column('display_name', String(length=250), primary_key=False, nullable=False),
)
options = Table('%s_options' % prefix, metadata,
Column('option_id', Integer(), primary_key=True, nullable=False),
Column('option_name', String(length=64), primary_key=True, nullable=False),
Column('option_value', Text(length=None), primary_key=False, nullable=False),
Column('autoload', String(length=3), primary_key=False, nullable=False),
)
# mappings
mapper(Term, terms)
taxonomy_mapper = mapper(
Taxonomy,
term_taxonomy,
properties={'term': relation(Term)},
polymorphic_on=term_taxonomy.c.taxonomy,
)
mapper(
PostTag,
properties={
'posts': dynamic_loader(
Post,
secondary=term_relationships,
primaryjoin=(term_taxonomy.c.term_taxonomy_id
== term_relationships.c.term_taxonomy_id),
secondaryjoin=(term_relationships.c.object_id
== posts.c.ID),
foreign_keys=[term_relationships.c.object_id,
term_relationships.c.term_taxonomy_id],
),
},
inherits=taxonomy_mapper,
polymorphic_identity='post_tag',
)
mapper(
Category,
properties={
'children': relation(
Category,
backref=backref('parent_category',
remote_side=[term_taxonomy.c.term_taxonomy_id]),
),
'posts': dynamic_loader(
Post,
secondary=term_relationships,
primaryjoin=(term_taxonomy.c.term_taxonomy_id
== term_relationships.c.term_taxonomy_id),
secondaryjoin=(term_relationships.c.object_id
== posts.c.ID),
foreign_keys=[term_relationships.c.object_id,
term_relationships.c.term_taxonomy_id],
),
},
inherits=taxonomy_mapper,
polymorphic_identity='category',
)
mapper(
LinkCategory,
properties={
'links': relation(
Link,
secondary=term_relationships,
primaryjoin=(term_taxonomy.c.term_taxonomy_id
== term_relationships.c.term_taxonomy_id),
secondaryjoin=(term_relationships.c.object_id
== links.c.link_id),
foreign_keys=[term_relationships.c.object_id,
term_relationships.c.term_taxonomy_id],
),
},
inherits=taxonomy_mapper,
polymorphic_identity='link_category',
)
mapper(PostMeta, postmeta)
mapper(
Post,
posts,
properties={
'_metadict': relation(PostMeta,
collection_class=column_mapped_collection(postmeta.c.meta_key)),
'children': relation(
Post,
backref=backref('parent', remote_side=[posts.c.ID]),
),
'post_tags': relation(
PostTag,
secondary=term_relationships,
primaryjoin=(posts.c.ID
== term_relationships.c.object_id),
secondaryjoin=(term_relationships.c.term_taxonomy_id
== term_taxonomy.c.term_taxonomy_id),
foreign_keys=[term_relationships.c.object_id,
term_relationships.c.term_taxonomy_id],
),
'categories': relation(
Category,
secondary=term_relationships,
primaryjoin=(posts.c.ID
== term_relationships.c.object_id),
secondaryjoin=(term_relationships.c.term_taxonomy_id
== term_taxonomy.c.term_taxonomy_id),
foreign_keys=[term_relationships.c.object_id,
term_relationships.c.term_taxonomy_id],
),
'comments': dynamic_loader(Comment, backref='post'),
},
)
mapper(
Link,
links,
properties={
'categories': relation(
LinkCategory,
secondary=term_relationships,
primaryjoin=(links.c.link_id
== term_relationships.c.object_id),
secondaryjoin=(term_relationships.c.term_taxonomy_id
== term_taxonomy.c.term_taxonomy_id),
foreign_keys=[term_relationships.c.object_id,
term_relationships.c.term_taxonomy_id],
),
},
)
mapper(CommentMeta, commentmeta)
mapper(
Comment,
comments,
properties={
'_metadict': relation(CommentMeta,
collection_class=column_mapped_collection(commentmeta.c.meta_key)),
'children': relation(
Comment,
backref=backref('parent',
remote_side=[comments.c.comment_ID]),
),
},
)
mapper(UserMeta, usermeta)
mapper(
User,
users,
properties={
'metadata': relation(
UserMeta,
collection_class=column_mapped_collection(usermeta.c.meta_key),
),
'posts': dynamic_loader(Post, backref='author'),
'links': dynamic_loader(Link, backref='user'),
'comments': dynamic_loader(Comment, backref='user'),
},
)
mapper(Option, options)
|
dongjinleekr/wpdb
|
wpdb.py
|
Python
|
apache-2.0
| 16,291 | 0.008348 |
import pymysql
from flask_restful import Resource
from flask import abort
ALLOWED_SHOW = ('processlist', 'databases', 'plugins', 'privileges')
class Mysql(Resource):
def __init__(self):
self.connection = pymysql.connect(user='root')
self.cursor = self.connection.cursor()
def _execute(self, sql):
self.cursor.execute(sql)
desc_id = tuple(x[0] for x in self.cursor.description)
query_result = self.cursor.fetchall()
results = [dict(zip(desc_id, item)) for item in query_result]
return results
def get(self, cmd):
if cmd in ALLOWED_SHOW:
return self._execute('show ' + cmd)
else:
abort(404)
class MysqlDatabase(Mysql):
def get(self, dbname):
try:
self.connection.select_db(dbname)
except pymysql.InternalError as e:
abort(400, e.args)
return self._execute('show tables')
def post(self, dbname):
try:
self.cursor.execute('create database ' + dbname)
except pymysql.ProgrammingError as e:
abort(400, e.args)
def delete(self, dbname):
try:
self.cursor.execute('drop database if exists ' + dbname)
except pymysql.ProgrammingError as e:
abort(400, e.args)
|
natict/roomservice
|
roomservice/mysql.py
|
Python
|
mit
| 1,306 | 0 |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2014, Florian Wesch <fw@dividuum.de>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import hashlib
from uuid import UUID
from simplejson import dumps as json_dumps
print "If you experience a crash due to Crypto, please do the following!"
print "goto your lib/site-packages and rename \'crypto\' to \'Crypto\'"
from Crypto.PublicKey import RSA
from Crypto import Random
from Crypto.Cipher import AES
import requests
log = logging.getLogger(__name__)
# Encryption magic based on sadimusi/mc3p encryption implementation
# https://github.com/sadimusi/mc3p/blob/master/mc3p/encryption.py
def _pkcs1_unpad(bytes):
pos = bytes.find('\x00')
if pos > 0:
return bytes[pos+1:]
def _pkcs1_pad(bytes):
assert len(bytes) < 117
padding = ""
while len(padding) < 125-len(bytes):
byte = Random.get_random_bytes(1)
if byte != '\x00':
padding += byte
return '\x00\x02%s\x00%s' % (padding, bytes)
def generate_key_pair():
"""Generates a 1024 bit RSA key pair"""
return RSA.generate(1024)
def encode_public_key(key):
"""Encodes a public RSA key in ASN.1 format as defined by x.509"""
return key.publickey().exportKey(format="DER")
def generate_random_bytes(length):
return Random.get_random_bytes(length)
def generate_challenge_token():
"""Generates 4 random bytes"""
return generate_random_bytes(4)
def generate_server_id():
"""Generates 20 random hex characters"""
return "".join("%02x" % ord(c) for c in generate_random_bytes(10))
def decrypt_with_private_key(data, private_key):
"""Decrypts the PKCS#1 padded shared secret using the private RSA key"""
return _pkcs1_unpad(private_key.decrypt(data))
def generated_cipher(shared_secret):
"""Creates a AES128 stream cipher using cfb8 mode"""
return AES.new(shared_secret, AES.MODE_CFB, shared_secret)
def decode_public_key(bytes):
"""Decodes a public RSA key in ASN.1 format as defined by x.509"""
return RSA.importKey(bytes)
def generate_shared_secret():
"""Generates a 128 bit secret key to be used in symmetric encryption"""
return generate_random_bytes(16)
def encrypt_with_public_key(data, public_key):
"""Encrypts the PKCS#1 padded shared secret using the public RSA key"""
return public_key.encrypt(_pkcs1_pad(data), 0)[0]
class SessionException(Exception):
pass
class Session(object):
YGGDRASIL_BASE = "https://authserver.mojang.com"
@classmethod
def make_client_token(cls):
return "".join("%02x" % ord(c) for c in generate_random_bytes(16))
@classmethod
def from_credentials(cls, username, password, client_token=None):
if client_token is None:
client_token = cls.make_client_token()
info = cls.do_request("/authenticate", {
'agent': {
'name': 'Minecraft',
'version': 1,
},
'username': username,
'password': password,
'clientToken': client_token,
})
return cls(
info['accessToken'],
info['selectedProfile']['name'],
info['selectedProfile']['id']
)
@classmethod
def from_access_token(cls, access_token):
info = cls.do_request("/refresh", {
'accessToken': access_token
})
return cls(
info['accessToken'],
info['selectedProfile']['name'],
info['selectedProfile']['id']
)
@classmethod
def from_authinfo(cls, access_token, player_ign, player_uuid):
return cls(
access_token,
player_ign,
player_uuid,
)
def __init__(self, access_token, player_ign, uuid):
self._access_token = access_token
self._player_ign = player_ign
self._uuid = UUID(uuid)
def refresh(self):
return Session(self._access_token)
@property
def player_ign(self):
return self._player_ign
@property
def uuid(self):
return str(self._uuid)
@property
def uuid_hex(self):
return self._uuid.hex
@property
def access_token(self):
return self._access_token
@property
def session_id(self):
return 'token:%s:%s' % (self._access_token, self.uuid_hex)
def __str__(self):
return "<Session: %s (%s) (accessToken: %s)>" % (
self._player_ign, self._uuid, self._access_token)
def validate(self):
r = requests.post(self.YGGDRASIL_BASE + "/validate", data=json_dumps({
'accessToken': self._access_token
}))
return r.status_code in (200, 204)
def invalidate(self):
r = requests.post(self.YGGDRASIL_BASE + "/invalidate", data=json_dumps({
'accessToken': self._access_token
}))
return r.status_code in (200, 204)
@classmethod
def do_request(cls, endpoint, data):
try:
log.debug("sending %s" % (data,))
r = requests.post(cls.YGGDRASIL_BASE + endpoint, data=json_dumps(data))
if not r.ok:
try:
error = r.json()['errorMessage']
except:
error = "unknown error"
raise SessionException("%d: %s" % (r.status_code, error))
json = r.json()
log.debug("received %s" % (json,))
return json
except requests.exceptions.RequestException, err:
raise SessionException(err.message)
def make_server_hash(server_id, shared_secret, key):
digest = hashlib.sha1()
digest.update(server_id)
digest.update(shared_secret)
digest.update(encode_public_key(key))
d = long(digest.hexdigest(), 16)
if d >> 39 * 4 & 0x8:
return "-%x" % ((-d) & (2 ** (40 * 4) - 1))
return "%x" % d
def join_server(session, server_hash):
r = requests.post('https://sessionserver.mojang.com/session/minecraft/join', data=json_dumps({
'accessToken': session.access_token,
'selectedProfile': session.uuid_hex,
'serverId': server_hash,
}), headers = {
'Content-Type': 'application/json', #; charset=utf-8',
'User-Agent': None,
})
return r.status_code in (200, 204)
def check_player(player_ign, server_hash):
r = requests.get('https://sessionserver.mojang.com/session/minecraft/hasJoined?username=%s&serverId=%s' % (
player_ign, server_hash))
return None if r.status_code != 200 else r.json()
|
phase/ApplePi
|
fastmc/auth.py
|
Python
|
mit
| 7,818 | 0.004861 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.