text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
#Written by Reid McIlroy-Young for Dr. John McLevey, University of Waterloo 2015
import unittest
import metaknowledge
import metaknowledge.WOS
import os
import filecmp
import networkx as nx
disableJournChecking = True
class TestRecordCollection(unittest.TestCase):
@classmethod
def setUpClass(cls):
metaknowledge.VERBOSE_MODE = False
cls.RCmain = metaknowledge.RecordCollection("metaknowledge/tests/testFile.isi")
cls.RCbadmain = metaknowledge.RecordCollection("metaknowledge/tests/badFile.isi")
def setUp(self):
self.RC = self.RCmain.copy()
self.RCbad = self.RCbadmain.copy()
def test_isCollection(self):
self.assertIsInstance(self.RC, metaknowledge.RecordCollection)
self.assertEqual(str(metaknowledge.RecordCollection()), "RecordCollection(Empty)")
self.assertTrue(self.RC == self.RC)
def test_fullRead(self):
RC = metaknowledge.RecordCollection("metaknowledge/tests/")
self.assertEqual(len(RC), 1032)
def test_caching(self):
RC = metaknowledge.RecordCollection("metaknowledge/tests/", cached = True, name = 'testingCache', extension = 'testFile.isi')
self.assertTrue(os.path.isfile("metaknowledge/tests/tests.[testFile.isi].mkRecordDirCache"))
accessTime = os.stat("metaknowledge/tests/testFile.isi").st_atime
RC2 = metaknowledge.RecordCollection("metaknowledge/tests/", cached = True, name = 'testingCache', extension = 'testFile.isi')
self.assertEqual(accessTime, os.stat("metaknowledge/tests/testFile.isi").st_atime)
RC.dropBadEntries()
RC2.dropBadEntries()
self.assertEqual(RC, RC2)
os.remove("metaknowledge/tests/tests.[testFile.isi].mkRecordDirCache")
def test_bad(self):
self.assertTrue(metaknowledge.RecordCollection('metaknowledge/tests/badFile.isi').bad)
with self.assertRaises(metaknowledge.mkExceptions.RCTypeError):
metaknowledge.RecordCollection('metaknowledge/tests/testFile.isi', extension = '.txt')
self.assertEqual(self.RCbad | self.RC, self.RCbad | self.RC )
self.assertEqual(len(self.RCbad | self.RCbad), 32)
self.assertFalse(self.RCbad == self.RC)
self.assertEqual('/Users/Reid/Documents/Work/NetworksLab/metaknowledge/metaknowledge/tests/badFile.isi', self.RCbad.errors.keys().__iter__().__next__())
def test_badEntries(self):
badRecs = self.RC.badEntries()
self.assertTrue(badRecs <= self.RC)
self.assertTrue(badRecs.pop().bad)
self.RC.dropBadEntries()
def test_dropJourn(self):
RCcopy = self.RC.copy()
self.RC.dropNonJournals()
self.assertEqual(len(self.RC), len(RCcopy) - 2)
self.RC.dropNonJournals(invert = True)
self.assertEqual(len(self.RC), 0)
RCcopy.dropNonJournals(ptVal = 'B')
self.assertEqual(len(RCcopy), 1)
def test_repr(self):
self.assertEqual(repr(self.RC), "<metaknowledge.RecordCollection object testFile>")
def test_hash(self):
self.assertNotEqual(hash(self.RC), hash(self.RCbad))
R = self.RC.pop()
RC = metaknowledge.RecordCollection([R])
self.assertEqual(hash(RC), hash(hash(R)))
def test_contains(self):
R = self.RC.peek()
self.assertTrue(R in self.RC)
R = self.RC.pop()
self.assertFalse(R in self.RC)
def test_conID(self):
R = self.RC.peek()
self.assertTrue(self.RC.containsID(R.id))
self.assertFalse(self.RC.containsID('234567654'))
def test_discard(self):
R = self.RC.peek()
l = len(self.RC)
self.RC.discard(R)
l2 = len(self.RC)
self.assertEqual(l, l2 + 1)
self.RC.discard(R)
self.assertEqual(l2, len(self.RC))
def test_pop(self):
R = self.RC.pop()
self.assertFalse(R in self.RC)
self.RC.clear()
with self.assertRaises(KeyError):
R = self.RC.pop()
def test_peek(self):
R = self.RC.peek()
self.assertTrue(R in self.RC)
self.RC.clear()
R = self.RC.peek()
self.assertTrue(R is None)
def test_clear(self):
R = self.RCbad.peek()
self.assertTrue(self.RCbad.bad)
self.RCbad.clear()
self.assertFalse(self.RCbad.bad)
self.assertFalse(R in self.RCbad)
def test_remove(self):
R = self.RC.peek()
l = len(self.RC)
self.RC.remove(R)
self.assertEqual(l, len(self.RC) + 1)
with self.assertRaises(KeyError):
self.RC.remove(R)
def test_equOps(self):
l = len(self.RC)
for i in range(10):
self.RCbad.pop()
lb = len(self.RCbad)
RC = metaknowledge.RecordCollection([])
RC.bad = True
RC |= self.RC
self.assertEqual(self.RC, RC)
RC -= self.RC
self.assertNotEqual(self.RC, RC)
RC ^= self.RC
self.assertEqual(self.RC, RC)
RC &= self.RCbad
self.assertNotEqual(self.RC, RC)
def test_newOps(self):
l = len(self.RC)
for i in range(10):
self.RCbad.pop()
lb = len(self.RCbad)
RC = metaknowledge.RecordCollection([])
RC.bad = True
RC3 = self.RC | RC
self.assertEqual(self.RC, RC3)
RC4 = RC3 - self.RC
self.assertNotEqual(self.RC, RC4)
RC5 = RC4 ^ self.RC
self.assertEqual(self.RC, RC5)
RC6 = RC5 & self.RCbad
self.assertNotEqual(self.RC, RC6)
def test_opErrors(self):
with self.assertRaises(TypeError):
self.RC <= 1
with self.assertRaises(TypeError):
self.RC >= 1
self.assertTrue(self.RC != 1)
with self.assertRaises(TypeError):
self.RC >= 1
with self.assertRaises(TypeError):
self.RC |= 1
with self.assertRaises(TypeError):
self.RC ^= 1
with self.assertRaises(TypeError):
self.RC &= 1
with self.assertRaises(TypeError):
self.RC -= 1
with self.assertRaises(TypeError):
self.RC | 1
with self.assertRaises(TypeError):
self.RC ^ 1
with self.assertRaises(TypeError):
self.RC & 1
with self.assertRaises(TypeError):
self.RC - 1
def test_addRec(self):
l = len(self.RC)
R = self.RC.pop()
self.assertEqual(len(self.RC), l - 1)
self.RC.add(R)
self.assertEqual(len(self.RC), l)
RC2 = metaknowledge.RecordCollection("metaknowledge/tests/TwoPaper.isi")
self.RC |= RC2
self.assertEqual(len(self.RC), l + 2)
with self.assertRaises(metaknowledge.CollectionTypeError):
self.RC.add(1)
def test_bytes(self):
with self.assertRaises(metaknowledge.BadRecord):
self.assertIsInstance(bytes(self.RC), bytes)
self.RC.dropBadEntries()
self.assertIsInstance(bytes(self.RC), bytes)
def test_WOS(self):
self.RC.dropBadEntries()
R = self.RC.peek()
l = len(self.RC)
self.assertTrue(R, self.RC.getID(R.id))
self.assertEqual(len(self.RC), l)
self.RC.removeID(R.id)
self.assertEqual(len(self.RC), l - 1)
self.RC.getID(self.RC.peek().id)
self.assertEqual(len(self.RC), l - 1)
self.assertFalse(self.RC.getID(self.RC.pop().id))
self.RC.discardID('sdfghjkjhgfdfghj')
self.RC.discardID('WOS:A1979GV55600001')
with self.assertRaises(KeyError):
self.RC.removeID('ghjkljhgfdfghjmh')
def test_directoryRead(self):
self.assertEqual(len(metaknowledge.RecordCollection('.')), 0)
self.assertTrue(metaknowledge.RecordCollection('metaknowledge/tests/') >= self.RC)
self.assertTrue(metaknowledge.RecordCollection('metaknowledge/tests/', extension= '.txt') <= self.RC)
def test_contentType(self):
RC = metaknowledge.RecordCollection('metaknowledge/tests/')
self.assertEqual(RC._collectedTypes, {'MedlineRecord', 'WOSRecord', 'ProQuestRecord', 'ScopusRecord'})
self.assertEqual(self.RC._collectedTypes, {'WOSRecord'})
def test_write(self):
fileName = 'OnePaper2.isi'
RC = metaknowledge.RecordCollection('metaknowledge/tests/' + fileName)
RC.writeFile(fileName + '.tmp')
RC.writeFile()
self.assertTrue(filecmp.cmp('metaknowledge/tests/' + fileName, fileName + '.tmp'))
self.assertTrue(filecmp.cmp('metaknowledge/tests/' + fileName, RC.name + '.txt'))
os.remove(fileName + '.tmp')
os.remove(RC.name + '.txt')
def test_writeCSV(self):
filename = "test_writeCSV_temporaryFile.csv"
if os.path.isfile(filename):
os.remove(filename)
self.RC.writeCSV(filename, onlyTheseTags=['UT', 'PT', 'TI', 'AF','J9' ,'CR', 'pubMedID'], firstTags = ['CR', 'UT', 'J9', 'citations'], csvDelimiter = '∂', csvQuote='≠', listDelimiter= '«', longNames=True, numAuthors = False)
self.assertTrue(os.path.isfile(filename))
self.assertEqual(os.path.getsize(filename), 107396)
os.remove(filename)
self.RC.writeCSV(filename)
self.assertTrue(os.path.isfile(filename))
self.assertEqual(os.path.getsize(filename), 89272)
os.remove(filename)
self.RC.writeCSV(splitByTag = 'PY', onlyTheseTags = ['id', 'title', 'authorsFull', 'citations', 'keywords', 'DOI'])
yearsSt = set()
for R in self.RC:
yearsSt.add(str(R.get('PY', 2012)))
for year in yearsSt:
f = open("{}-testFile.csv".format(year))
self.assertEqual(f.readline(), '"id","TI","AF","CR","ID","DI","num-Authors","num-Male","num-Female","num-Unknown"\n')
self.assertGreater(len(f.readline()), 1)
f.close()
os.remove("{}-testFile.csv".format(year))
def test_writeBib(self):
filename = 'testFile.bib'
if os.path.isfile(filename):
os.remove(filename)
self.RC.dropBadEntries()
self.RC.writeBib(maxStringLength = 100)
self.assertEqual(os.path.getsize(filename), 100418)
os.remove(filename)
self.RC.writeBib(fname = filename, wosMode = True, reducedOutput = True, niceIDs = False)
self.assertEqual(os.path.getsize(filename), 78163)
os.remove(filename)
def test_rpys(self):
d = self.RC.rpys()
self.assertIn(17, d['count'])
d = self.RC.rpys(1990, 2000)
self.assertEqual(len(d['year']), 11)
for v in d.values():
for i in v:
self.assertIsInstance(i, int)
def test_CopyrightFinder(self):
l = self.RC.findProbableCopyright()
self.assertEqual(len(l), 7)
l = self.RC.findProbableCopyright()
self.assertTrue(' (C) 2002 Optical Society of America.' in l)
def test_NLP(self):
filename = 'NLP_test.csv'
full = self.RC.forNLP(filename, removeCopyright = True, extraColumns = ['ID'])
self.assertEqual(len(full), 7)
self.assertEqual(len(full['id']), 33)
self.assertEqual(full['keywords'][0], full['ID'][0])
self.assertTrue(' (C) 2002 Optical Society of America.' in full['copyright'])
self.assertEqual(os.path.getsize(filename), 14445)
os.remove(filename)
dropping = self.RC.forNLP(filename,removeNumbers = False, dropList = ['a', 'and', 'the', 'is'], stemmer = lambda x: x.title())
self.assertEqual(len(dropping), 5)
self.assertEqual(len(dropping['id']), 33)
self.assertEqual(os.path.getsize(filename), 12901)
os.remove(filename)
def test_forBurst(self):
filename = 'Burst_test.csv'
full = self.RC.forBurst('keywords', outputFile = filename)
self.assertEqual(len(full), 2)
self.assertEqual(len(full['year']), 75)
self.assertIn('guides', full['word'])
os.remove(filename)
def test_genderStats(self):
stats = self.RC.genderStats()
self.assertEqual(stats, {'Unknown': 65, 'Male': 6, 'Female': 1})
stats = self.RC.genderStats(asFractions = True)
self.assertEqual(stats['Male'], 0.08333333333333333)
def test_getCitations(self):
cites = self.RC.getCitations()
self.assertIn('LAUE MV, 1920, RELATIVITATSTHEORIE, V1, P227', cites['citeString'])
def test_makeDict(self):
d = self.RC.makeDict(onlyTheseTags = list(metaknowledge.WOS.tagsAndNameSet), longNames = True)
self.assertEqual(len(d), 65)
self.assertEqual(len(d['wosString']), len(self.RC))
if d['eISSN'][0] == '2155-3165':
self.assertEqual(d['eISSN'][1], None)
else:
self.assertEqual(d['eISSN'][0], None)
self.assertIsInstance(d['citations'], list)
d = self.RC.makeDict(longNames = False, raw = True, numAuthors = False)
self.assertEqual(len(d), 45)
self.assertEqual(len(d['UT']), len(self.RC))
self.assertIsInstance(d['CR'], list)
def test_coCite(self):
Gdefault = self.RC.networkCoCitation(fullInfo = True)
Gauths = self.RC.networkCoCitation(nodeType = "author", dropAnon = False, detailedCore = True)
GauthsNoExtra = self.RC.networkCoCitation(nodeType = "author", nodeInfo = False)
Gunwei = self.RC.networkCoCitation(nodeType = 'original', weighted = False)
if not disableJournChecking:
Gjour = self.RC.networkCoCitation(nodeType = "journal", dropNonJournals = True)
Gyear = self.RC.networkCoCitation(nodeType = "year", fullInfo = True, count = False)
Gcore = self.RC.networkCoCitation(detailedCore = ['AF','AU', 'DE', 'ID', 'PY'], coreOnly = True)
Gexplode = self.RC.networkCoCitation(expandedCore = True, keyWords = 'a')
Gcr = self.RC.networkCoCitation(addCR = True, coreOnly = True)
self.assertIsInstance(Gdefault, nx.classes.graph.Graph)
self.assertLessEqual(len(Gdefault.edges()), len(Gunwei.edges()))
self.assertLessEqual(len(Gdefault.nodes()), len(Gunwei.nodes()))
self.assertEqual(len(GauthsNoExtra.edges()), len(Gauths.edges()))
self.assertEqual(len(GauthsNoExtra.nodes()), len(Gauths.nodes()) - 1 )
self.assertTrue('weight' in list(Gdefault.edges(data = True))[0][2])
self.assertTrue('info' in list(Gdefault.nodes(data = True))[0][1])
self.assertTrue('fullCite' in list(Gdefault.nodes(data = True))[0][1])
self.assertFalse('weight' in list(Gunwei.edges(data = True))[0][2])
self.assertEqual(metaknowledge.graphStats(Gdefault, sentenceString = True), "The graph has 493 nodes, 13000 edges, 0 isolates, 22 self loops, a density of 0.107282 and a transitivity of 0.611431")
self.assertEqual(metaknowledge.graphStats(Gauths, sentenceString = True), "The graph has 321 nodes, 6699 edges, 1 isolates, 68 self loops, a density of 0.131094 and a transitivity of 0.598575")
self.assertEqual(metaknowledge.graphStats(Gyear, sentenceString = True), "The graph has 91 nodes, 1898 edges, 0 isolates, 55 self loops, a density of 0.47033 and a transitivity of 0.702332")
if not disableJournChecking:
self.assertEqual(len(Gjour.nodes()), 85)
self.assertEqual(len(Gjour.edges()), 1195)
self.assertTrue('info' in Gjour.nodes(data=True)[0][1])
self.assertTrue('info' in list(Gyear.nodes(data=True))[0][1])
self.assertTrue('fullCite' in list(Gyear.nodes(data = True))[0][1])
self.assertEqual(Gcore.node['Costadebeauregard O, 1975, CAN J PHYS']['info'], 'COSTADEBEAUREGARD O, COSTADEBEAUREGARD O')
self.assertEqual(metaknowledge.graphStats(Gexplode, sentenceString = True), "The graph has 73 nodes, 366 edges, 0 isolates, 5 self loops, a density of 0.140411 and a transitivity of 0.523179")
self.assertIn('AUDOIN C, 1976, J PHYS E SCI INSTRUM', Gcr.node['Huard S, 1979, CAN J PHYS']['citations'])
def test_coAuth(self):
Gdefault = self.RC.networkCoAuthor()
if not disableJournChecking:
Gdetailed = self.RC.networkCoAuthor(count = False, weighted = False, detailedInfo = True, dropNonJournals = True)
self.assertIsInstance(Gdefault, nx.classes.graph.Graph)
self.assertEqual(len(Gdefault.nodes()), 45)
self.assertEqual(len(Gdefault.edges()), 46)
if not disableJournChecking:
self.assertEqual(metaknowledge.graphStats(Gdetailed, sentenceString = True), 'The graph has 45 nodes, 46 edges, 9 isolates, 0 self loops, a density of 0.0464646 and a transitivity of 0.822581')
def test_cite(self):
Gdefault = self.RC.networkCitation(fullInfo = True, count = False, dropAnon = True)
Ganon = self.RC.networkCitation(dropAnon = False)
Gauths = self.RC.networkCitation(nodeType = "author", detailedCore = True, dropAnon = True)
GauthsNoExtra = self.RC.networkCitation(nodeType = "author", nodeInfo = False, dropAnon = True)
Gunwei = self.RC.networkCitation(nodeType = 'original', weighted = False)
if not disableJournChecking:
Gjour = self.RC.networkCitation(nodeType = "author", dropNonJournals = True, nodeInfo = True, count = False)
Gyear = self.RC.networkCitation(nodeType = "year", nodeInfo = True)
Gcore = self.RC.networkCitation(detailedCore = True, coreOnly = False)
Gexplode = self.RC.networkCitation(expandedCore = True, keyWords = ['b', 'c'])
self.assertIsInstance(Gdefault, nx.classes.digraph.DiGraph)
self.assertLessEqual(len(Gdefault.edges()), len(Gunwei.edges()))
self.assertLessEqual(len(Gdefault.nodes()), len(Gunwei.nodes()))
self.assertEqual(len(GauthsNoExtra.edges()), len(Gauths.edges()))
self.assertEqual(len(GauthsNoExtra.nodes()), len(Gauths.nodes()))
self.assertTrue('weight' in list(Gdefault.edges(data = True))[0][2])
self.assertTrue('info' in list(Gdefault.nodes(data = True))[0][1])
self.assertFalse('weight' in list(Gunwei.edges(data = True))[0][2])
self.assertEqual(metaknowledge.graphStats(Gdefault, sentenceString = True), "The graph has 510 nodes, 816 edges, 1 isolates, 0 self loops, a density of 0.00314342 and a transitivity of 0.00600437")
self.assertEqual(metaknowledge.graphStats(Ganon, sentenceString = True), "The graph has 511 nodes, 817 edges, 0 isolates, 0 self loops, a density of 0.00313495 and a transitivity of 0.00600437")
self.assertEqual(metaknowledge.graphStats(Gauths, sentenceString = True), "The graph has 324 nodes, 568 edges, 1 isolates, 15 self loops, a density of 0.00542751 and a transitivity of 0.0210315")
if not disableJournChecking:
self.assertEqual(len(Gjour.edges()), 432)
self.assertTrue('info' in list(Gjour.nodes(data=True))[0][1])
self.assertTrue('info' in list(Gyear.nodes(data=True))[0][1])
self.assertEqual(Gcore.node['Gilles H, 2002, OPT LETT']['info'], 'WOS:000177484300017, Gilles H, Simple technique for measuring the Goos-Hanchen effect with polarization modulation and a position-sensitive detector, OPTICS LETTERS, 27, 1421')
self.assertEqual(metaknowledge.graphStats(Gexplode, sentenceString = True), "The graph has 19 nodes, 29 edges, 0 isolates, 3 self loops, a density of 0.0847953 and a transitivity of 0.132075")
def test_networkBibCoupling(self):
G = self.RC.networkBibCoupling()
self.assertEqual(metaknowledge.graphStats(G, sentenceString = True), 'The graph has 32 nodes, 304 edges, 1 isolates, 0 self loops, a density of 0.612903 and a transitivity of 0.836511')
def test_coOccurnce(self):
self.assertEqual(sum(self.RC.cooccurrenceCounts('TI', *tuple(self.RC.tags()))['Longitudinal and transverse effects of nonspecular reflection'].values()), 104)
def test_nLevel(self):
G = self.RC.networkMultiLevel(*tuple(self.RC.tags()))
self.assertEqual(metaknowledge.graphStats(G, sentenceString = True), 'The graph has 1187 nodes, 58731 edges, 0 isolates, 59 self loops, a density of 0.0834803 and a transitivity of 0.493814')
def test_oneMode(self):
Gcr = self.RC.networkOneMode('CR')
Gcite = self.RC.networkOneMode('citations', nodeCount = False, edgeWeight = False)
GcoCit = self.RC.networkCoCitation()
Gtit = self.RC.networkOneMode('title')
stemFunc = lambda x: x[:-1]
Gstem = self.RC.networkOneMode('keywords', stemmer = stemFunc)
self.assertEqual(len(Gcite.edges()), len(Gcr.edges()))
self.assertEqual(len(Gcite.nodes()), len(Gcr.nodes()))
self.assertAlmostEqual(len(Gcite.nodes()), len(GcoCit.nodes()), delta = 50)
self.assertEqual(len(self.RC.networkOneMode('D2').nodes()), 0)
self.assertEqual(len(Gtit.nodes()), 31)
self.assertEqual(len(Gtit.edges()), 0)
self.assertEqual(len(self.RC.networkOneMode('email').edges()), 3)
self.assertEqual(len(self.RC.networkOneMode('UT').nodes()), len(self.RC) - 1)
self.assertEqual(metaknowledge.graphStats(Gstem, sentenceString = True), 'The graph has 41 nodes, 142 edges, 2 isolates, 0 self loops, a density of 0.173171 and a transitivity of 0.854015')
self.assertIsInstance(list(Gstem.nodes())[0], str)
with self.assertRaises(TypeError):
G = self.RC.networkOneMode(b'Not a Tag')
del G
def test_twoMode(self):
self.RC.dropBadEntries()
Gutti = self.RC.networkTwoMode('UT', 'title', directed = True, recordType = False)
Gafwc = self.RC.networkTwoMode('AF', 'WC', nodeCount = False, edgeWeight = False)
Gd2em = self.RC.networkTwoMode('D2', 'email')
Gemd2 = self.RC.networkTwoMode('email', 'D2')
Gstemm = self.RC.networkTwoMode('title', 'title', stemmerTag1 = lambda x: x[:-1], stemmerTag2 = lambda x: x + 's')
self.assertIsInstance(Gutti, nx.classes.digraph.DiGraph)
self.assertIsInstance(Gafwc, nx.classes.graph.Graph)
self.assertEqual(list(Gutti.edges('WOS:A1979GV55600001'))[0][1][:31], "EXPERIMENTS IN PHENOMENOLOGICAL")
self.assertEqual(len(Gutti.nodes()), 2 * len(self.RC) - 1)
with self.assertRaises(metaknowledge.TagError):
G = self.RC.networkTwoMode('TI', b'not a tag')
del G
with self.assertRaises(metaknowledge.TagError):
G = self.RC.networkTwoMode(b'Not a Tag', 'TI')
del G
self.assertTrue(nx.is_isomorphic(Gd2em, Gemd2))
self.assertEqual(metaknowledge.graphStats(Gstemm, sentenceString = True), 'The graph has 62 nodes, 31 edges, 0 isolates, 0 self loops, a density of 0.0163934 and a transitivity of 0')
self.assertTrue('Optical properties of nanostructured thin filmss' in Gstemm)
def test_nMode(self):
G = self.RC.networkMultiMode(metaknowledge.WOS.tagToFullDict.keys())
Gstem = self.RC.networkMultiMode(metaknowledge.WOS.tagToFullDict.keys(), stemmer = lambda x : x[0])
self.assertEqual(metaknowledge.graphStats(G, sentenceString = True), 'The graph has 1186 nodes, 38564 edges, 0 isolates, 56 self loops, a density of 0.0549192 and a transitivity of 0.295384')
self.assertEqual(metaknowledge.graphStats(Gstem, sentenceString = True), 'The graph has 50 nodes, 997 edges, 0 isolates, 35 self loops, a density of 0.828571 and a transitivity of 0.855834')
def test_localCiteStats(self):
d = self.RC.localCiteStats()
dPan = self.RC.localCiteStats(pandasFriendly = True)
dYear = self.RC.localCiteStats(keyType = 'year')
self.assertEqual(d[metaknowledge.Citation("Azzam R. M. A., 1977, ELLIPSOMETRY POLARIZ")], 1)
self.assertEqual(len(dPan['Citations']),len(d))
self.assertTrue(dPan['Citations'][0] in d)
self.assertEqual(dYear[2009], 2)
def test_localCitesOf(self):
C = metaknowledge.Citation("COSTADEB.O, 1974, LETT NUOVO CIMENTO, V10, P852")
self.assertEqual("WOS:A1976CW02200002", self.RC.localCitesOf(C).peek().id)
self.assertEqual(self.RC.localCitesOf(self.RC.peek().id),
self.RC.localCitesOf(self.RC.peek().createCitation()))
def test_citeFilter(self):
RCmin = self.RC.citeFilter('', reverse = True)
RCmax = self.RC.citeFilter('')
RCanon = self.RC.citeFilter('', 'anonymous')
RC1970 = self.RC.citeFilter(1970, 'year')
RCno1970 = self.RC.citeFilter(1970, 'year', reverse = True)
RCMELLER = self.RC.citeFilter('meller', 'author')
self.assertEqual(len(RCmin), 0)
self.assertEqual(len(RCmax), len(self.RC))
self.assertEqual(len(RCanon), 1)
self.assertEqual(len(RC1970), 15)
self.assertEqual(len(RC1970) + len(RCno1970), len(self.RC))
self.assertEqual(len(RCMELLER), 1)
RCnocite = metaknowledge.RecordCollection('metaknowledge/tests/OnePaperNoCites.isi')
self.assertEqual(len(RCnocite.citeFilter('')), 0)
def test_yearDiff(self):
Gdefault = self.RC.networkCitation()
Gfull = self.RC.networkCitation(nodeType="full")
Goriginal = self.RC.networkCitation(nodeType="original")
# Is yearDiff included as an attribute
self.assertTrue('yearDiff' in list(Gdefault.edges(data=True))[0][2])
self.assertTrue('yearDiff' in list(Gfull.edges(data=True))[0][2])
self.assertTrue('yearDiff' in list(Goriginal.edges(data=True))[0][2])
# Is yearDiff being calculated correctly?
self.assertEqual(Gdefault["Costadebo, 1974, CR ACAD SCI A MATH"]["Gordon Jp, 1973, PHYS REV A"]["yearDiff"], 1)
self.assertEqual(Gfull["Costadebo, 1974, CR ACAD SCI A MATH"]["Gordon Jp, 1973, PHYS REV A"]["yearDiff"], 1)
self.assertEqual(Goriginal["COWAN JJ, 1977, J OPT SOC AM, V67, P1307, DOI 10.1364/JOSA.67.001307"]["GOOS F, 1947, ANN PHYS-BERLIN, V1, P333"]['yearDiff'], 30)
def test_glimpse(self):
#These tests do depend on terminal size
gBasic = self.RC.glimpse()
gCompact = self.RC.glimpse(compact = True)
gEmpty = self.RC.glimpse('AF', 'qwertyhujk')
self.assertIn('RecordCollection glimpse made at:', gBasic)
self.assertIn('Top Authors\n', gBasic)
self.assertIn('1 Gilles, H\n', gBasic)
self.assertIn('|1 JOURNAL OF THE OPTICA', gCompact)
self.assertIn('|Columns are ranked by num. of occurrences and are independent of one another++', gCompact)
self.assertIn('qwertyhujk', gEmpty)
|
networks-lab/metaknowledge
|
metaknowledge/tests/test_recordcollection.py
|
Python
|
gpl-2.0
| 26,484 | 0.012538 |
# Copyright (c) 2009 Bea Lam. All rights reserved.
#
# This file is part of LightBlue.
#
# LightBlue is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# LightBlue is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with LightBlue. If not, see <http://www.gnu.org/licenses/>.
# Mac OS X bluetooth sockets implementation.
#
# To-do:
# - allow socket options
#
# if doing security AUTH, should set bool arg when calling
# openConnection_withPageTimeout_authenticationRequired_() in connect()
import time
import socket as _socket
import threading
import os
import errno
import types
import objc
import Foundation
from . import _IOBluetooth
from . import _lightbluecommon
from . import _macutil
from ._LightAquaBlue import BBServiceAdvertiser, BBBluetoothChannelDelegate
#import sets # python 2.3
try:
SHUT_RD, SHUT_WR, SHUT_RDWR = \
_socket.SHUT_RD, _socket.SHUT_WR, _socket.SHUT_RDWR
except AttributeError:
# python 2.3
SHUT_RD, SHUT_WR, SHUT_RDWR = (0, 1, 2)
def _getavailableport(proto):
# Just advertise a service and see what channel it was assigned, then
# stop advertising the service and return the channel.
# It's a hacky way of doing it, but IOBluetooth doesn't seem to provide
# functionality for just getting an available channel.
if proto == _lightbluecommon.RFCOMM:
try:
result, channelID, servicerecordhandle = BBServiceAdvertiser.addRFCOMMServiceDictionary_withName_UUID_channelID_serviceRecordHandle_(BBServiceAdvertiser.serialPortProfileDictionary(), "DummyService", None, None, None)
except:
result, channelID, servicerecordhandle = BBServiceAdvertiser.addRFCOMMServiceDictionary_withName_UUID_channelID_serviceRecordHandle_(BBServiceAdvertiser.serialPortProfileDictionary(), "DummyService", None)
if result != _macutil.kIOReturnSuccess:
raise _lightbluecommon.BluetoothError(result, \
"Could not retrieve an available service channel")
result = BBServiceAdvertiser.removeService_(servicerecordhandle)
if result != _macutil.kIOReturnSuccess:
raise _lightbluecommon.BluetoothError(result, \
"Could not retrieve an available service channel")
return channelID
else:
raise NotImplementedError("L2CAP server sockets not currently supported")
def _checkaddrpair(address, checkbtaddr=True):
# will want checkbtaddr=False if the address might be empty string
# (for binding to a server address)
if not isinstance(address, tuple):
raise TypeError("address must be (address, port) tuple, was %s" % \
type(address))
if len(address) != 2:
raise TypeError("address tuple must have 2 items (has %d)" % \
len(address))
if not isinstance(address[0], str):
raise TypeError("address host value must be string, was %s" % \
type(address[0]))
if checkbtaddr:
if not _lightbluecommon._isbtaddr(address[0]):
raise TypeError("address '%s' is not a bluetooth address" % \
address[0])
if not isinstance(address[1], int):
raise TypeError("address port value must be int, was %s" % \
type(address[1]))
# from std lib socket module
class _closedsocket(object):
__slots__ = []
def _dummy(*args):
raise _socket.error(errno.EBADF, 'Bad file descriptor')
send = recv = sendto = recvfrom = __getattr__ = _dummy
# Thanks to Simon Wittber for string queue recipe
# http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/426060
# (this is a modified version)
class _StringQueue(object):
def __init__(self):
self.l_buffer = []
self.s_buffer = ""
self.lock = threading.RLock()
self.bufempty = True
def empty(self):
return self.bufempty
def write(self, data):
# no type check, and assumes data is not empty!
#append data to list, no need to "".join just yet.
self.lock.acquire()
try:
self.l_buffer.append(data)
self.bufempty = False
finally:
self.lock.release()
def _build_str(self):
#build a new string out of list
new_string = "".join([str(x.tobytes()) for x in self.l_buffer])
#join string buffer and new string
self.s_buffer = "".join((self.s_buffer, new_string))
#clear list
self.l_buffer = []
def __len__(self):
#calculate length without needing to _build_str
return sum([len(i) for i in self.l_buffer]) + len(self.s_buffer)
def read(self, count):
self.lock.acquire()
try:
#if string doesn't have enough chars to satisfy caller
if count > len(self.s_buffer):
self._build_str()
#get data requested by caller
result = self.s_buffer[:count]
#remove requested data from string buffer
self.s_buffer = self.s_buffer[len(result):]
self.bufempty = (len(self.s_buffer) == 0)
finally:
self.lock.release()
return result
#class _SocketWrapper(_socket._socketobject):
class _SocketWrapper(object):
"""
A Bluetooth socket object has the same interface as a socket object from
the Python standard library <socket> module. It also uses the same
exceptions, raising socket.error for general errors and socket.timeout for
timeout errors.
Note that L2CAP sockets are not available on Python For Series 60, and
only L2CAP client sockets are supported on Mac OS X and Linux.
A simple client socket example:
>>> from lightblue import *
>>> s = socket() # or socket(L2CAP) to create an L2CAP socket
>>> s.connect(("00:12:2c:45:8a:7b", 5))
>>> s.send("hello")
5
>>> s.close()
A simple server socket example:
>>> from lightblue import *
>>> s = socket()
>>> s.bind(("", 0))
>>> s.listen(1)
>>> advertise("My RFCOMM Service", s, RFCOMM)
>>> conn, addr = s.accept()
>>> print "Connected by", addr
Connected by ('00:0D:93:19:C8:68', 5)
>>> conn.recv(1024)
"hello"
>>> conn.close()
>>> s.close()
"""
def __init__(self, sock):
self._sock = sock
def accept(self):
sock, addr = self._sock.accept()
return _SocketWrapper(sock), addr
accept.__doc__ = _lightbluecommon._socketdocs["accept"]
def dup(self):
return _SocketWrapper(self._sock)
dup.__doc__ = _lightbluecommon._socketdocs["dup"]
def close(self):
self._sock.close()
self._sock = _closedsocket()
self.send = self.recv = self.sendto = self.recvfrom = self._sock._dummy
try:
import lightblue
lightblue.stopadvertise(self)
except:
pass
close.__doc__ = _lightbluecommon._socketdocs["close"]
def makefile(self, mode='r', bufsize=-1):
# use std lib socket's _fileobject
return _socket._fileobject(self._sock, mode, bufsize)
makefile.__doc__ = _lightbluecommon._socketdocs["makefile"]
# delegate all other method calls to internal sock obj
def __getattr__(self, attr):
return getattr(self._sock, attr)
# internal _sock object for RFCOMM and L2CAP sockets
class _BluetoothSocket(object):
_boundports = { _lightbluecommon.L2CAP: set(),
_lightbluecommon.RFCOMM: set() }
# conn is the associated _RFCOMMConnection or _L2CAPConnection
def __init__(self, conn):
self.__conn = conn
if conn is not None and conn.channel is not None:
self.__remotedevice = conn.channel.getDevice()
else:
self.__remotedevice = None
# timeout=None cos sockets default to blocking mode
self.__timeout = None
#self.__isserverspawned = (conn.channel is not None)
self.__port = 0
self.__eventlistener = None
self.__closed = False
self.__maxqueuedconns = 0
self.__incomingdata = _StringQueue()
self.__queuedchannels = []
self.__queuedchannels_lock = threading.RLock()
# whether send or recv has been shut down
# set initial value to be other than SHUT_WR/SHUT_RD/SHUT_RDWR
self.__commstate = -1
def accept(self):
if not self.__isbound():
raise _socket.error('Socket not bound')
if not self.__islistening():
raise _socket.error('Socket must be listening first')
def clientconnected():
return len(self.__queuedchannels) > 0
if not clientconnected():
self.__waituntil(clientconnected, "accept timed out")
self.__queuedchannels_lock.acquire()
try:
newchannel = self.__queuedchannels.pop(0)
finally:
self.__queuedchannels_lock.release()
# return (new-socket, addr) pair using the new channel
newconn = _SOCKET_CLASSES[self.__conn.proto](newchannel)
sock = _SocketWrapper(_BluetoothSocket(newconn))
sock.__startevents()
return (sock, sock.getpeername())
def bind(self, address):
_checkaddrpair(address, False)
if self.__isbound():
raise _socket.error('Socket is already bound')
elif self.__isconnected():
raise _socket.error("Socket is already connected, cannot be bound")
if self.__conn.proto == _lightbluecommon.L2CAP:
raise NotImplementedError("L2CAP server sockets not currently supported")
if address[1] != 0:
raise _socket.error("must bind to port 0, other ports not supported on Mac OS X")
address = (address[0], _getavailableport(self.__conn.proto))
# address must be either empty string or local device address
if address[0] != "":
try:
import lightblue
localaddr = lightblue.gethostaddr()
except:
localaddr = None
if localaddr is None or address[0] != localaddr:
raise _socket.error(
errno.EADDRNOTAVAIL, os.strerror(errno.EADDRNOTAVAIL))
# is this port already in use?
if address[1] in self._boundports[self.__conn.proto]:
raise _socket.error(errno.EADDRINUSE, os.strerror(errno.EADDRINUSE))
self._boundports[self.__conn.proto].add(address[1])
self.__port = address[1]
def close(self):
wasconnected = self.__isconnected() or self.__isbound()
self.__stopevents()
if self.__conn is not None:
if self.__isbound():
self._boundports[self.__conn.proto].discard(self.__port)
else:
if self.__conn.channel is not None:
self.__conn.channel.setDelegate_(None)
self.__conn.channel.closeChannel()
# disconnect the baseband connection.
# This will fail if other RFCOMM channels to the remote device are
# still open (which is what we want, cos we don't know if another
# process is talking to the device)
if self.__remotedevice is not None:
self.__remotedevice.closeConnection() # returns err code
# if you don't run the event loop a little here, it's likely you won't
# be able to reconnect to the same remote device later
if wasconnected:
_macutil.waitfor(0.5)
def connect(self, address):
if self.__isbound():
raise _socket.error("Can't connect, socket has been bound")
elif self.__isconnected():
raise _socket.error("Socket is already connected")
_checkaddrpair(address)
# open a connection to device
self.__remotedevice = _IOBluetooth.IOBluetoothDevice.withAddressString_(address[0])
if not self.__remotedevice.isConnected():
if self.__timeout is None:
result = self.__remotedevice.openConnection()
else:
result = self.__remotedevice.openConnection_withPageTimeout_authenticationRequired_(
None, self.__timeout*1000, False)
if result != _macutil.kIOReturnSuccess:
if result == _macutil.kBluetoothHCIErrorPageTimeout:
if self.__timeout == 0:
raise _socket.error(errno.EAGAIN,
"Resource temporarily unavailable")
else:
raise _socket.timeout("connect timed out")
else:
raise _socket.error(result,
"Cannot connect to %s, can't open connection." \
% str(address[0]))
# open RFCOMM or L2CAP channel
self.__eventlistener = self.__createlistener()
result = self.__conn.connect(self.__remotedevice, address[1],
self.__eventlistener) # pass listener as cocoa delegate
if result != _macutil.kIOReturnSuccess:
self.__remotedevice.closeConnection()
self.__stopevents()
self.__eventlistener = None
raise _socket.error(result,
"Cannot connect to %d on %s" % (address[1], address[0]))
return
# if you don't run the event loop a little here, it's likely you won't
# be able to reconnect to the same remote device later
_macutil.waitfor(0.5)
def connect_ex(self, address):
try:
self.connect(address)
except _socket.error as err:
if len(err.args) > 1:
return err.args[0]
else:
# there's no error code, just a message, so this error wasn't
# from a system call -- so re-raise the exception
raise _socket.error(err)
return 0
def getpeername(self):
self.__checkconnected()
addr = _macutil.formatdevaddr(self.__remotedevice.getAddressString())
return (addr, self._getport())
def getsockname(self):
if self.__isbound() or self.__isconnected():
import lightblue
return (lightblue.gethostaddr(), self._getport())
else:
return ("00:00:00:00:00:00", 0)
def listen(self, backlog):
if self.__islistening():
return
if not self.__isbound():
raise _socket.error('Socket not bound')
if not isinstance(backlog, int):
raise TypeError("backlog must be int, was %s" % type(backlog))
if backlog < 0:
raise ValueError("backlog cannot be negative, was %d" % backlog)
self.__maxqueuedconns = backlog
# start listening for client connections
self.__startevents()
def _isclosed(self):
# isOpen() check doesn't work for incoming (server-spawned) channels
if (self.__conn.proto == _lightbluecommon.RFCOMM and
self.__conn.channel is not None and
not self.__conn.channel.isIncoming()):
return not self.__conn.channel.isOpen()
return self.__closed
def recv(self, bufsize, flags=0):
if self.__commstate in (SHUT_RD, SHUT_RDWR):
return ""
self.__checkconnected()
if not isinstance(bufsize, int):
raise TypeError("buffer size must be int, was %s" % type(bufsize))
if bufsize < 0:
raise ValueError("negative buffersize in recv") # as for tcp
if bufsize == 0:
return ""
# need this to ensure the _isclosed() check is up-to-date
_macutil.looponce()
if self._isclosed():
if len(self.__incomingdata) == 0:
raise _socket.error(errno.ECONNRESET,
os.strerror(errno.ECONNRESET))
return self.__incomingdata.read(bufsize)
# if incoming data buffer is empty, wait until data is available or
# channel is closed
def gotdata():
return not self.__incomingdata.empty() or self._isclosed()
if not gotdata():
self.__waituntil(gotdata, "recv timed out")
# other side closed connection while waiting?
if self._isclosed() and len(self.__incomingdata) == 0:
raise _socket.error(errno.ECONNRESET, os.strerror(errno.ECONNRESET))
return self.__incomingdata.read(bufsize)
# recvfrom() is really for datagram sockets not stream sockets but it
# can be implemented anyway.
def recvfrom(self, bufsize, flags=0):
# stream sockets return None, instead of address
return (self.recv(bufsize, flags), None)
def sendall(self, data, flags=0):
sentbytescount = self.send(data, flags)
while sentbytescount < len(data):
sentbytescount += self.send(data[sentbytescount:], flags)
return None
def send(self, data, flags=0):
if not isinstance(data, str):
raise TypeError("data must be string, was %s" % type(data))
if self.__commstate in (SHUT_WR, SHUT_RDWR):
raise _socket.error(errno.EPIPE, os.strerror(errno.EPIPE))
self.__checkconnected()
# do setup for if sock is in non-blocking mode
if self.__timeout is not None:
if self.__timeout == 0:
# in non-blocking mode
# isTransmissionPaused() is not available for L2CAP sockets,
# what to do for that?
if self.__conn.proto == _lightbluecommon.RFCOMM and \
self.__conn.channel.isTransmissionPaused():
# sending data now will block
raise _socket.error(errno.EAGAIN,
"Resource temporarily unavailable")
elif self.__timeout > 0:
# non-blocking with timeout
starttime = time.time()
# loop until all data is sent
writebuf = data
bytesleft = len(data)
mtu = self.__conn.getwritemtu()
while bytesleft > 0:
if self.__timeout is not None and self.__timeout > 0:
if time.time() - starttime > self.__timeout:
raise _socket.timeout("send timed out")
# write the data to the channel (only the allowed amount)
# the method/selector is the same for L2CAP and RFCOMM channels
if bytesleft > mtu:
sendbytecount = mtu
else:
sendbytecount = bytesleft
#result = self.__conn.channel.writeSync_length_(
# writebuf[:sendbytecount], sendbytecount)
result = self.__conn.write(writebuf[:sendbytecount])
# normal tcp sockets don't seem to actually error on the first
# send() after a connection has broken; if you try a second time,
# then you get the (32, 'Broken pipe') socket.error
if result != _macutil.kIOReturnSuccess:
raise _socket.error(result, "Error sending data")
bytesleft -= sendbytecount
writebuf = writebuf[sendbytecount:] # remove the data just sent
return len(data) - bytesleft
# sendto args may be one of:
# - data, address
# - data, flags, address
#
# The standard behaviour seems to be to ignore the given address if already
# connected.
# sendto() is really for datagram sockets not stream sockets but it
# can be implemented anyway.
def sendto(self, data, *args):
if len(args) == 1:
address = args[0]
flags = 0
elif len(args) == 2:
flags, address = args
else:
raise TypeError("sendto takes at most 3 arguments (%d given)" % \
(len(args) + 1))
_checkaddrpair(address)
# must already be connected, cos this is stream socket
self.__checkconnected()
return self.send(data, flags)
def fileno(self):
raise NotImplementedError
def getsockopt(self, level, optname, buflen=0):
# see what options on Linux+s60
# possibly have socket security option.
raise _socket.error(
errno.ENOPROTOOPT, os.strerror(errno.ENOPROTOOPT))
def setsockopt(self, level, optname, value):
# see what options on Linux+s60
# possibly have socket security option.
raise _socket.error(
errno.ENOPROTOOPT, os.strerror(errno.ENOPROTOOPT))
def setblocking(self, flag):
if flag == 0:
self.__timeout = 0 # non-blocking
else:
self.__timeout = None # blocking
def gettimeout(self):
return self.__timeout
def settimeout(self, value):
if value is not None and not isinstance(value, (float, int)):
msg = "timeout value must be a number or None, was %s" % \
type(value)
raise TypeError(msg)
if value < 0:
msg = "timeout value cannot be negative, was %d" % value
raise ValueError(msg)
self.__timeout = value
def shutdown(self, how):
if how not in (SHUT_RD, SHUT_WR, SHUT_RDWR):
raise _socket.error(22, "Invalid argument")
self.__commstate = how
# This method is called from outside this file.
def _getport(self):
if self.__isconnected():
return self.__conn.getport()
if self.__isbound():
return self.__port
raise _lightbluecommon.BluetoothError("socket is neither connected nor bound")
# This method is called from outside this file.
def _getchannel(self):
if self.__conn is None:
return None
return self.__conn.channel
# Called by the event listener when data is available
# 'channel' is IOBluetoothRFCOMMChannel or IOBluetoothL2CAPChannel object
def _handle_channeldata(self, channel, data):
self.__incomingdata.write(data)
_macutil.interruptwait()
# Called by the event listener when a client connects to a server socket
def _handle_channelopened(self, channel):
# put new channels into a queue, which 'accept' can then pull out
self.__queuedchannels_lock.acquire()
try:
# need to implement max connections
#if len(self.__queuedchannels) < self.__maxqueuedconns:
self.__queuedchannels.append(channel)
_macutil.interruptwait()
finally:
self.__queuedchannels_lock.release()
# Called by the event listener when the channel is closed.
def _handle_channelclosed(self, channel):
# beware that this value won't actually be set until the event loop
# has been driven so that this method is actually called
self.__closed = True
_macutil.interruptwait()
def __waituntil(self, stopwaiting, timeoutmsg):
"""
Waits until stopwaiting() returns True, or until the wait times out
(according to the self.__timeout value).
This is to make a function wait until a buffer has been filled. i.e.
stopwaiting() should return True when the buffer is no longer empty.
"""
if not stopwaiting():
if self.__timeout == 0:
# in non-blocking mode (immediate timeout)
# push event loop to really be sure there is no data available
_macutil.looponce()
if not stopwaiting():
# trying to perform operation now would block
raise _socket.error(errno.EAGAIN, os.strerror(errno.EAGAIN))
else:
# block and wait until we get data, or time out
if not _macutil.waituntil(stopwaiting, self.__timeout):
raise _socket.timeout(timeoutmsg)
def __createlistener(self):
if self.__isbound():
return _ChannelServerEventListener.alloc().initWithDelegate_port_protocol_(self,
self._getport(), self.__conn.proto)
else:
listener = _ChannelEventListener.alloc().initWithDelegate_(self)
if self.__conn.channel is not None:
self.__conn.channel.setDelegate_(listener.delegate())
listener.registerclosenotif(self.__conn.channel)
return listener
# should not call this if connect() has been called to connect this socket
def __startevents(self):
if self.__eventlistener is not None:
raise _lightbluecommon.BluetoothError("socket already listening")
self.__eventlistener = self.__createlistener()
def __stopevents(self):
if self.__eventlistener is not None:
self.__eventlistener.close()
def __islistening(self):
return self.__eventlistener is not None
def __checkconnected(self):
if not self._sock.isconnected(): # i.e. is connected, non-server socket
# not connected, raise "socket not connected"
raise _socket.error(errno.ENOTCONN, os.strerror(errno.ENOTCONN))
# returns whether socket is a bound server socket
def __isbound(self):
return self.__port != 0
def __isconnected(self):
return self.__conn.channel is not None
def __checkconnected(self):
if not self.__isconnected():
# not connected, raise "socket not connected"
raise _socket.error(errno.ENOTCONN, os.strerror(errno.ENOTCONN))
# set method docstrings
definedmethods = locals() # i.e. defined methods in _SocketWrapper
for name, doc in list(_lightbluecommon._socketdocs.items()):
try:
definedmethods[name].__doc__ = doc
except KeyError:
pass
class _RFCOMMConnection(object):
proto = _lightbluecommon.RFCOMM
def __init__(self, channel=None):
# self.channel is accessed by _BluetoothSocket parent
self.channel = channel
def connect(self, device, port, listener):
# open RFCOMM channel (should timeout actually apply to opening of
# channel as well? if so need to do timeout with async callbacks)
try:
# pyobjc 2.0
result, self.channel = device.openRFCOMMChannelSync_withChannelID_delegate_(None, port, listener.delegate())
except TypeError:
result, self.channel = device.openRFCOMMChannelSync_withChannelID_delegate_(port, listener.delegate())
if result == _macutil.kIOReturnSuccess:
self.channel.setDelegate_(listener.delegate())
listener.registerclosenotif(self.channel)
else:
self.channel = None
return result
def write(self, data):
if self.channel is None:
raise _socket.error("socket not connected")
return \
BBBluetoothChannelDelegate.synchronouslyWriteData_toRFCOMMChannel_(
Foundation.NSData.alloc().initWithBytes_length_(data, len(data)),
self.channel)
def getwritemtu(self):
return self.channel.getMTU()
def getport(self):
return self.channel.getChannelID()
class _L2CAPConnection(object):
proto = _lightbluecommon.L2CAP
def __init__(self, channel=None):
# self.channel is accessed by _BluetoothSocket parent
self.channel = channel
def connect(self, device, port, listener):
try:
# pyobjc 2.0
result, self.channel = device.openL2CAPChannelSync_withPSM_delegate_(None, port, listener.delegate())
except TypeError:
result, self.channel = device.openL2CAPChannelSync_withPSM_delegate_(port, listener.delegate())
if result == _macutil.kIOReturnSuccess:
self.channel.setDelegate_(listener.delegate())
listener.registerclosenotif(self.channel)
else:
self.channel = None
return result
def write(self, data):
if self.channel is None:
raise _socket.error("socket not connected")
return \
BBBluetoothChannelDelegate.synchronouslyWriteData_toL2CAPChannel_(
buffer(data), self.channel)
def getwritemtu(self):
return self.channel.getOutgoingMTU()
def getport(self):
return self.channel.getPSM()
class _ChannelEventListener(Foundation.NSObject):
"""
Uses a BBBluetoothChannelDelegate to listen for events on an
IOBluetoothRFCOMMChannel or IOBluetoothL2CAPChannel, and makes callbacks to
a specified object when events occur.
"""
# note this is a NSObject "init", not a python object "__init__"
def initWithDelegate_(self, cb_obj):
"""
Arguments:
- cb_obj: An object that receives callbacks when events occur. This
object should have:
- a method '_handle_channeldata' which takes the related channel (a
IOBluetoothRFCOMMChannel or IOBluetoothL2CAPChannel) and the new
data (a string) as the arguments.
- a method '_handle_channelclosed' which takes the related channel
as the argument.
If this listener's delegate is passed to the openRFCOMMChannel... or
openL2CAPChannel... selectors as the delegate, the delegate (and
therefore this listener) will automatically start receiving events.
Otherwise, call setDelegate_() on the channel with this listener's
delegate as the argument to allow this listener to start receiving
channel events. (This is the only option for server-spawned sockets.)
"""
self = super(_ChannelEventListener, self).init()
if cb_obj is None:
raise TypeError("callback object is None")
self.__cb_obj = cb_obj
self.__closenotif = None
self.__channelDelegate = \
BBBluetoothChannelDelegate.alloc().initWithDelegate_(self)
return self
initWithDelegate_ = objc.selector(initWithDelegate_, signature=b"@@:@")
def delegate(self):
return self.__channelDelegate
@objc.python_method
def registerclosenotif(self, channel):
# oddly enough, sometimes the channelClosed: selector doesn't get called
# (maybe if there's a lot of data being passed?) but this seems to work
notif = channel.registerForChannelCloseNotification_selector_(self,
"channelClosedEvent:channel:")
if notif is not None:
self.__closenotif = notif
def close(self):
if self.__closenotif is not None:
self.__closenotif.unregister()
def channelClosedEvent_channel_(self, notif, channel):
if hasattr(self.__cb_obj, '_handle_channelclosed'):
self.__cb_obj._handle_channelclosed(channel)
channelClosedEvent_channel_ = objc.selector(
channelClosedEvent_channel_, signature=b"v@:@@")
# implement method from BBBluetoothChannelDelegateObserver protocol:
# - (void)channelData:(id)channel data:(NSData *)data;
def channelData_data_(self, channel, data):
if hasattr(self.__cb_obj, '_handle_channeldata'):
self.__cb_obj._handle_channeldata(channel, data[:])
channelData_data_ = objc.selector(channelData_data_, signature=b"v@:@@")
# implement method from BBBluetoothChannelDelegateObserver protocol:
# - (void)channelClosed:(id)channel;
def channelClosed_(self, channel):
if hasattr(self.__cb_obj, '_handle_channelclosed'):
self.__cb_obj._handle_channelclosed(channel)
channelClosed_ = objc.selector(channelClosed_, signature=b"v@:@")
class _ChannelServerEventListener(Foundation.NSObject):
"""
Listens for server-specific events on a RFCOMM or L2CAP channel (i.e. when a
client connects) and makes callbacks to a specified object when events
occur.
"""
# note this is a NSObject "init", not a python object "__init__"
def initWithDelegate_port_protocol_(self, cb_obj, port, proto):
"""
Arguments:
- cb_obj: to receive callbacks when a client connects to
to the channel, the callback object should have a method
'_handle_channelopened' which takes the newly opened
IOBluetoothRFCOMMChannel or IOBluetoothL2CAPChannel as its argument.
- port: the channel or PSM that the server is listening on
- proto: L2CAP or RFCOMM.
"""
self = super(_ChannelServerEventListener, self).init()
if cb_obj is None:
raise TypeError("callback object is None")
self.__cb_obj = cb_obj
self.__usernotif = None
if proto == _lightbluecommon.RFCOMM:
usernotif = _IOBluetooth.IOBluetoothRFCOMMChannel.registerForChannelOpenNotifications_selector_withChannelID_direction_(self, "newChannelOpened:channel:", port, _macutil.kIOBluetoothUserNotificationChannelDirectionIncoming)
elif proto == _lightbluecommon.L2CAP:
usernotif = _IOBluetooth.IOBluetoothL2CAPChannel.registerForChannelOpenNotifications_selector_withPSM_direction_(self, "newChannelOpened:channel:", port, _macutil.kIOBluetoothUserNotificationChannelDirectionIncoming)
if usernotif is None:
raise _socket.error("Unable to register for channel-" + \
"opened notifications on server socket on channel/PSM %d" % \
port)
self.__usernotif = usernotif
return self
initWithDelegate_port_protocol_ = objc.selector(
initWithDelegate_port_protocol_, signature=b"@@:@ii")
def close(self):
if self.__usernotif is not None:
self.__usernotif.unregister()
def newChannelOpened_channel_(self, notif, newChannel):
"""
Handle when a client connects to the server channel.
(This method is called for both RFCOMM and L2CAP channels.)
"""
if newChannel is not None and newChannel.isIncoming():
# not sure if delegate really needs to be set
newChannel.setDelegate_(self)
if hasattr(self.__cb_obj, '_handle_channelopened'):
self.__cb_obj._handle_channelopened(newChannel)
# makes this method receive notif and channel as objects
newChannelOpened_channel_ = objc.selector(
newChannelOpened_channel_, signature=b"v@:@@")
# -----------------------------------------------------------
# protocol-specific classes
_SOCKET_CLASSES = { _lightbluecommon.RFCOMM: _RFCOMMConnection,
_lightbluecommon.L2CAP: _L2CAPConnection }
def _getsocketobject(proto):
if proto not in list(_SOCKET_CLASSES.keys()):
raise ValueError("Unknown socket protocol, must be L2CAP or RFCOMM")
return _SocketWrapper(_BluetoothSocket(_SOCKET_CLASSES[proto]()))
|
karulis/pybluez
|
osx/_bluetoothsockets.py
|
Python
|
gpl-2.0
| 35,262 | 0.002354 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo_serialization import jsonutils
from magnum.common import urlfetch
from magnum.conductor.monitors import MonitorBase
class MesosMonitor(MonitorBase):
def __init__(self, context, bay):
super(MesosMonitor, self).__init__(context, bay)
self.data = {}
@property
def metrics_spec(self):
return {
'memory_util': {
'unit': '%',
'func': 'compute_memory_util',
},
'cpu_util': {
'unit': '%',
'func': 'compute_cpu_util',
},
}
def _build_url(self, url, protocol='http', port='80', path='/'):
return protocol + '://' + url + ':' + port + path
def _is_leader(self, state):
return state['leader'] == state['pid']
def pull_data(self):
self.data['mem_total'] = 0
self.data['mem_used'] = 0
self.data['cpu_total'] = 0
self.data['cpu_used'] = 0
for master_addr in self.bay.master_addresses:
mesos_master_url = self._build_url(master_addr, port='5050',
path='/state')
master = jsonutils.loads(urlfetch.get(mesos_master_url))
if self._is_leader(master):
for slave in master['slaves']:
self.data['mem_total'] += slave['resources']['mem']
self.data['mem_used'] += slave['used_resources']['mem']
self.data['cpu_total'] += slave['resources']['cpus']
self.data['cpu_used'] += slave['used_resources']['cpus']
break
def compute_memory_util(self):
if self.data['mem_total'] == 0 or self.data['mem_used'] == 0:
return 0
else:
return self.data['mem_used'] * 100 / self.data['mem_total']
def compute_cpu_util(self):
if self.data['cpu_used'] == 0:
return 0
else:
return self.data['cpu_used'] * 100 / self.data['cpu_total']
|
jay-lau/magnum
|
magnum/conductor/mesos_monitor.py
|
Python
|
apache-2.0
| 2,587 | 0 |
from flask import url_for
from openatlas import app
from openatlas.models.entity import Entity
from tests.base import TestBaseCase
class SearchTest(TestBaseCase):
def test_search(self) -> None:
with app.test_request_context():
app.preprocess_request() # type: ignore
person = Entity.insert('person', 'Waldo')
person.begin_to = '2018-01-01'
person.update()
person.link(
'P131',
Entity.insert('actor_appellation', 'Waldo alias'))
object_ = Entity.insert('place', 'Waldorf')
object_.link('P1', Entity.insert('appellation', 'Waldorf alias'))
Entity.insert('person', 'Waldo without date')
with app.app_context(): # type: ignore
self.app.post(url_for('search_index'), data={'global-term': ''})
rv = self.app.post(
url_for('search_index'),
data={
'global-term': 'wal',
'include_dateless': True,
'begin_year': -100, 'end_year': 3000})
assert b'Waldo' in rv.data
rv = self.app.post(
url_for('search_index'),
data={'term': 'wal', 'own': True})
assert b'Waldo' not in rv.data
data = {'term': 'do', 'classes': 'person'}
rv = self.app.post(url_for('search_index'), data=data)
assert b'Waldo' in rv.data
rv = self.app.post(
url_for('search_index'),
follow_redirects=True,
data={'term': 'x', 'begin_year': 2, 'end_year': -1})
assert b'cannot start after' in rv.data
|
craws/OpenAtlas-Python
|
tests/test_search.py
|
Python
|
gpl-2.0
| 1,697 | 0 |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
from setuptools import setup, find_packages
from codecs import open
from os import path
import re
import ast
here = path.abspath(path.dirname(__file__))
_version_re = re.compile(r'__version__\s+=\s+(.*)')
with open('radmyarchive/__init__.py', 'rb') as vf:
version = str(ast.literal_eval(_version_re.search(
vf.read().decode('utf-8')).group(1)))
with open(path.join(here, 'README.rst'), encoding='utf-8') as f:
readme_file = f.read()
setup(
name="radmyarchive",
version=version,
author="Ömer Fadıl Usta",
author_email="omerusta@gmail.com",
packages=find_packages(),
scripts=["scripts/RADMYARCHIVE.py"],
url="https://github.com/usta/radmyarchive-py",
license="BSD",
keywords="exif image photo rename metadata arrange rearrange catalogue",
description="A simple photo rearranger with help of EXIF tags",
install_requires=['exifread', 'termcolor', 'colorama'],
long_description=readme_file,
classifiers=(
"Development Status :: 3 - Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"License :: OSI Approved :: BSD License",
"Operating System :: OS Independent",
"Programming Language :: Python :: 3.4",
"Topic :: Utilities",
),
)
|
usta/radmyarchive-py
|
setup.py
|
Python
|
bsd-3-clause
| 1,376 | 0.000728 |
from django.db import models
from django.utils import timezone
# Create your models here.
class Comment(models.Model):
title = models.CharField(max_length=200)
comment_text = models.TextField()
rating = models.IntegerField()
created_date = models.DateTimeField(default = timezone.now)
published_date = models.DateTimeField(blank=True, null= True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title
class Reservation(models.Model):
name = models.CharField(max_length=200)
people_amount = models.IntegerField(default = 1)
time = models.TimeField(default = timezone.now)
created_date = models.DateTimeField(default = timezone.now)
published_date = models.DateTimeField(blank=True,null = True)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__ (self):
return self.name
|
marioluigiman/geekchicken
|
cafe/models.py
|
Python
|
mit
| 960 | 0.015625 |
#!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
from configurator import Configurator
if __name__ == '__main__':
try:
configurator = Configurator()
configurator.update_deck_settings()
except (RuntimeError, IOError, ValueError) as e:
sys.stderr.write(str(e) + '\n')
sys.exit(-1)
|
tgracchus/spinnaker
|
pylib/spinnaker/reconfigure_spinnaker.py
|
Python
|
apache-2.0
| 891 | 0.003367 |
"""Benchmark for SQLAlchemy.
An adaptation of Robert Brewers' ZooMark speed tests. """
import datetime
import sys
import time
from sqlalchemy import *
from sqlalchemy.orm import *
from sqlalchemy.test import *
ITERATIONS = 1
dbapi_session = engines.ReplayableSession()
metadata = None
class ZooMarkTest(TestBase):
"""Runs the ZooMark and squawks if method counts vary from the norm.
Each test has an associated `call_range`, the total number of
accepted function calls made during the test. The count can vary
between Python 2.4 and 2.5.
Unlike a unit test, this is a ordered collection of steps. Running
components individually will fail.
"""
__only_on__ = 'postgresql+psycopg2'
__skip_if__ = lambda : sys.version_info < (2, 5), # TODO: get 2.4
# support
def test_baseline_0_setup(self):
global metadata, session
creator = testing.db.pool._creator
recorder = lambda : dbapi_session.recorder(creator())
engine = engines.testing_engine(options={'creator': recorder})
metadata = MetaData(engine)
session = sessionmaker()()
engine.connect()
def test_baseline_1_create_tables(self):
zoo = Table(
'Zoo',
metadata,
Column('ID', Integer, Sequence('zoo_id_seq'),
primary_key=True, index=True),
Column('Name', Unicode(255)),
Column('Founded', Date),
Column('Opens', Time),
Column('LastEscape', DateTime),
Column('Admission', Float),
)
animal = Table(
'Animal',
metadata,
Column('ID', Integer, Sequence('animal_id_seq'),
primary_key=True),
Column('ZooID', Integer, ForeignKey('Zoo.ID'), index=True),
Column('Name', Unicode(100)),
Column('Species', Unicode(100)),
Column('Legs', Integer, default=4),
Column('LastEscape', DateTime),
Column('Lifespan', Float(4)),
Column('MotherID', Integer, ForeignKey('Animal.ID')),
Column('PreferredFoodID', Integer),
Column('AlternateFoodID', Integer),
)
metadata.create_all()
global Zoo, Animal
class Zoo(object):
def __init__(self, **kwargs):
for k, v in kwargs.iteritems():
setattr(self, k, v)
class Animal(object):
def __init__(self, **kwargs):
for k, v in kwargs.iteritems():
setattr(self, k, v)
mapper(Zoo, zoo)
mapper(Animal, animal)
def test_baseline_1a_populate(self):
wap = Zoo(Name=u'Wild Animal Park', Founded=datetime.date(2000,
1, 1), Opens=datetime.time(8, 15, 59),
LastEscape=datetime.datetime( 2004, 7, 29, 5, 6, 7, ),
Admission=4.95)
session.add(wap)
sdz = Zoo(Name=u'San Diego Zoo', Founded=datetime.date(1835, 9,
13), Opens=datetime.time(9, 0, 0), Admission=0)
session.add(sdz)
bio = Zoo(Name=u'Montr\xe9al Biod\xf4me',
Founded=datetime.date(1992, 6, 19),
Opens=datetime.time(9, 0, 0), Admission=11.75)
session.add(bio)
seaworld = Zoo(Name=u'Sea_World', Admission=60)
session.add(seaworld)
# Let's add a crazy futuristic Zoo to test large date values.
lp = Zoo(Name=u'Luna Park', Founded=datetime.date(2072, 7, 17),
Opens=datetime.time(0, 0, 0), Admission=134.95)
session.add(lp)
session.flush()
# Animals
leopard = Animal(Species=u'Leopard', Lifespan=73.5)
session.add(leopard)
leopard.ZooID = wap.ID
leopard.LastEscape = \
datetime.datetime(2004, 12, 21, 8, 15, 0, 999907, )
session.add(Animal(Species=u'Lion', ZooID=wap.ID))
session.add(Animal(Species=u'Slug', Legs=1, Lifespan=.75))
session.add(Animal(Species=u'Tiger', ZooID=sdz.ID))
# Override Legs.default with itself just to make sure it works.
session.add(Animal(Species=u'Bear', Legs=4))
session.add(Animal(Species=u'Ostrich', Legs=2, Lifespan=103.2))
session.add(Animal(Species=u'Centipede', Legs=100))
session.add(Animal(Species=u'Emperor Penguin', Legs=2,
ZooID=seaworld.ID))
session.add(Animal(Species=u'Adelie Penguin', Legs=2,
ZooID=seaworld.ID))
session.add(Animal(Species=u'Millipede', Legs=1000000,
ZooID=sdz.ID))
# Add a mother and child to test relationships
bai_yun = Animal(Species=u'Ape', Nameu=u'Bai Yun', Legs=2)
session.add(bai_yun)
session.add(Animal(Species=u'Ape', Name=u'Hua Mei', Legs=2,
MotherID=bai_yun.ID))
session.flush()
session.commit()
def test_baseline_2_insert(self):
for x in xrange(ITERATIONS):
session.add(Animal(Species=u'Tick', Name=u'Tick %d' % x,
Legs=8))
session.flush()
def test_baseline_3_properties(self):
for x in xrange(ITERATIONS):
# Zoos
WAP = list(session.query(Zoo).filter(Zoo.Name
== u'Wild Animal Park'))
SDZ = list(session.query(Zoo).filter(Zoo.Founded
== datetime.date(1835, 9, 13)))
Biodome = list(session.query(Zoo).filter(Zoo.Name
== u'Montr\xe9al Biod\xf4me'))
seaworld = list(session.query(Zoo).filter(Zoo.Admission
== float(60)))
# Animals
leopard = list(session.query(Animal).filter(Animal.Species
== u'Leopard'))
ostrich = list(session.query(Animal).filter(Animal.Species
== u'Ostrich'))
millipede = list(session.query(Animal).filter(Animal.Legs
== 1000000))
ticks = list(session.query(Animal).filter(Animal.Species
== u'Tick'))
def test_baseline_4_expressions(self):
for x in xrange(ITERATIONS):
assert len(list(session.query(Zoo))) == 5
assert len(list(session.query(Animal))) == ITERATIONS + 12
assert len(list(session.query(Animal).filter(Animal.Legs
== 4))) == 4
assert len(list(session.query(Animal).filter(Animal.Legs
== 2))) == 5
assert len(list(session.query(Animal).filter(and_(Animal.Legs
>= 2, Animal.Legs < 20)))) == ITERATIONS + 9
assert len(list(session.query(Animal).filter(Animal.Legs
> 10))) == 2
assert len(list(session.query(Animal).filter(Animal.Lifespan
> 70))) == 2
assert len(list(session.query(Animal).
filter(Animal.Species.like(u'L%')))) == 2
assert len(list(session.query(Animal).
filter(Animal.Species.like(u'%pede')))) == 2
assert len(list(session.query(Animal).filter(Animal.LastEscape
!= None))) == 1
assert len(list(session.query(Animal).filter(Animal.LastEscape
== None))) == ITERATIONS + 11
# In operator (containedby)
assert len(list(session.query(Animal).filter(
Animal.Species.like(u'%pede%')))) == 2
assert len(list(session.query(Animal).
filter(Animal.Species.in_((u'Lion'
, u'Tiger', u'Bear'))))) == 3
# Try In with cell references
class thing(object):
pass
pet, pet2 = thing(), thing()
pet.Name, pet2.Name = u'Slug', u'Ostrich'
assert len(list(session.query(Animal).
filter(Animal.Species.in_((pet.Name,
pet2.Name))))) == 2
# logic and other functions
name = u'Lion'
assert len(list(session.query(Animal).
filter(func.length(Animal.Species)
== len(name)))) == ITERATIONS + 3
assert len(list(session.query(Animal).
filter(Animal.Species.like(u'%i%'
)))) == ITERATIONS + 7
# Test now(), today(), year(), month(), day()
assert len(list(session.query(Zoo).filter(and_(Zoo.Founded
!= None, Zoo.Founded < func.now())))) == 3
assert len(list(session.query(Animal).filter(Animal.LastEscape
== func.now()))) == 0
assert len(list(session.query(Animal).filter(func.date_part('year'
, Animal.LastEscape) == 2004))) == 1
assert len(list(session.query(Animal).
filter(func.date_part('month'
, Animal.LastEscape) == 12))) == 1
assert len(list(session.query(Animal).filter(func.date_part('day'
, Animal.LastEscape) == 21))) == 1
def test_baseline_5_aggregates(self):
Animal = metadata.tables['Animal']
Zoo = metadata.tables['Zoo']
# TODO: convert to ORM
for x in xrange(ITERATIONS):
# views
view = select([Animal.c.Legs]).execute().fetchall()
legs = [x[0] for x in view]
legs.sort()
expected = {
'Leopard': 73.5,
'Slug': .75,
'Tiger': None,
'Lion': None,
'Bear': None,
'Ostrich': 103.2,
'Centipede': None,
'Emperor Penguin': None,
'Adelie Penguin': None,
'Millipede': None,
'Ape': None,
'Tick': None,
}
for species, lifespan in select([Animal.c.Species,
Animal.c.Lifespan]).execute().fetchall():
assert lifespan == expected[species]
expected = [u'Montr\xe9al Biod\xf4me', 'Wild Animal Park']
e = select([Zoo.c.Name], and_(Zoo.c.Founded != None,
Zoo.c.Founded <= func.current_timestamp(),
Zoo.c.Founded >= datetime.date(1990, 1, 1)))
values = [val[0] for val in e.execute().fetchall()]
assert set(values) == set(expected)
# distinct
legs = [x[0] for x in select([Animal.c.Legs],
distinct=True).execute().fetchall()]
legs.sort()
def test_baseline_6_editing(self):
for x in xrange(ITERATIONS):
# Edit
SDZ = session.query(Zoo).filter(Zoo.Name == u'San Diego Zoo'
).one()
SDZ.Name = u'The San Diego Zoo'
SDZ.Founded = datetime.date(1900, 1, 1)
SDZ.Opens = datetime.time(7, 30, 0)
SDZ.Admission = 35.00
# Test edits
SDZ = session.query(Zoo).filter(Zoo.Name
== u'The San Diego Zoo').one()
assert SDZ.Founded == datetime.date(1900, 1, 1), SDZ.Founded
# Change it back
SDZ.Name = u'San Diego Zoo'
SDZ.Founded = datetime.date(1835, 9, 13)
SDZ.Opens = datetime.time(9, 0, 0)
SDZ.Admission = 0
# Test re-edits
SDZ = session.query(Zoo).filter(Zoo.Name == u'San Diego Zoo'
).one()
assert SDZ.Founded == datetime.date(1835, 9, 13), \
SDZ.Founded
def test_baseline_7_drop(self):
session.rollback()
metadata.drop_all()
# Now, run all of these tests again with the DB-API driver factored
# out: the ReplayableSession playback stands in for the database.
#
# How awkward is this in a unittest framework? Very.
def test_profile_0(self):
global metadata, session
player = lambda : dbapi_session.player()
engine = create_engine('postgresql:///', creator=player)
metadata = MetaData(engine)
session = sessionmaker()()
engine.connect()
@profiling.function_call_count(4898)
def test_profile_1_create_tables(self):
self.test_baseline_1_create_tables()
@profiling.function_call_count(9225)
def test_profile_1a_populate(self):
self.test_baseline_1a_populate()
@profiling.function_call_count(640)
def test_profile_2_insert(self):
self.test_baseline_2_insert()
# this number...
@profiling.function_call_count(6783, {
'2.6': 7194,
'2.7': 7298,
'2.7+cextension': 7288,
'2.6+cextension': 7184,
})
def test_profile_3_properties(self):
self.test_baseline_3_properties()
# and this number go down slightly when using the C extensions
@profiling.function_call_count(22510, {'2.6': 24055, '2.7': 24214})
def test_profile_4_expressions(self):
self.test_baseline_4_expressions()
@profiling.function_call_count(1313, {'2.6+cextension': 1236,
'2.7+cextension': 1207},
variance=0.1)
def test_profile_5_aggregates(self):
self.test_baseline_5_aggregates()
@profiling.function_call_count(3172)
def test_profile_6_editing(self):
self.test_baseline_6_editing()
def test_profile_7_drop(self):
self.test_baseline_7_drop()
|
simplegeo/sqlalchemy
|
test/aaa_profiling/test_zoomark_orm.py
|
Python
|
mit
| 13,694 | 0.002629 |
def bits_set(x):
bits = 0
for i in range(0,8):
if (x & (1<<i))>0:
bits += 1
return bits
def find_ber(sent, received):
assert(len(received)<=len(sent))
if len(received) < len(sent)/2:
print "frame detection error, more than half of the frames were lost!"
return 0.5
errors = 0
for i in range(0,len(received)):
errors += bits_set(sent[i] ^ received[i]) # ^ is xor
return float(errors)/float(8*len(received))
|
kit-cel/gr-dab
|
python/channel_tests/ber.py
|
Python
|
gpl-3.0
| 427 | 0.04918 |
"""
***************************************************************************
MDWF_Sensan_a.py
-------------------------------------
Copyright (C) 2014 TIGER-NET (www.tiger-net.org)
***************************************************************************
* This plugin is part of the Water Observation Information System (WOIS) *
* developed under the TIGER-NET project funded by the European Space *
* Agency as part of the long-term TIGER initiative aiming at promoting *
* the use of Earth Observation (EO) for improved Integrated Water *
* Resources Management (IWRM) in Africa. *
* *
* WOIS is a free software i.e. you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published *
* by the Free Software Foundation, either version 3 of the License, *
* or (at your option) any later version. *
* *
* WOIS is distributed in the hope that it will be useful, but WITHOUT ANY *
* WARRANTY; without even the implied warranty of MERCHANTABILITY or *
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License *
* for more details. *
* *
* You should have received a copy of the GNU General Public License along *
* with this program. If not, see <http://www.gnu.org/licenses/>. *
***************************************************************************
"""
import os
from PyQt4 import QtGui
from processing.core.GeoAlgorithmExecutionException import GeoAlgorithmExecutionException
from processing.core.parameters import *
from SWATAlgorithm import SWATAlgorithm
from SWAT_SENSAN_specs import SWAT_SENSAN_specs
SENSAN_specs = SWAT_SENSAN_specs()
class MDWF_Sensan_a(SWATAlgorithm):
SRC_FOLDER = "SRC_FOLDER"
PAR_SRC = "PAR_SRC"
PST_FILE = "PST_FILE"
PAR_FILE = "PAR_FILE"
PCT_DEV = "PCT_DEV"
def __init__(self):
super(MDWF_Sensan_a, self).__init__(__file__)
def defineCharacteristics(self):
self.name = "5.3 - Sensitivity analysis and calibration of SWAT model with PEST (MDWF) - generate parameter variation file"
self.group = "Model development workflow (MDWF)"
self.addParameter(ParameterFile(MDWF_Sensan_a.SRC_FOLDER, "Select model source folder", True))
self.addParameter(ParameterSelection(MDWF_Sensan_a.PAR_SRC, "Select source for parameter variation", ['Initial parameter values in PEST control file (.pst)','Initial parameter values defined when creating template files (.pbf)','Optimal parameter values output file from PEST (.par)'], False))
self.addParameter(ParameterFile(MDWF_Sensan_a.PST_FILE, "Select PEST control file", False))
self.addParameter(ParameterFile(MDWF_Sensan_a.PAR_FILE, "Select PEST output parameter file", False))
self.addParameter(ParameterNumber(MDWF_Sensan_a.PCT_DEV, "Percent deviation in parameter values"))
def processAlgorithm(self, progress):
SRC_FOLDER = self.getParameterValue(MDWF_Sensan_a.SRC_FOLDER)
PAR_SRC = self.getParameterValue(MDWF_Sensan_a.PAR_SRC)
PST_FILE = self.getParameterValue(MDWF_Sensan_a.PST_FILE)
PAR_FILE = self.getParameterValue(MDWF_Sensan_a.PAR_FILE)
PCT_DEV = self.getParameterValue(MDWF_Sensan_a.PCT_DEV)
pct_devfile = open(SRC_FOLDER + os.sep + 'pct_dev.dat','w')
pct_devfile.writelines(str(PCT_DEV)+'\r\n')
pct_devfile.close()
pvfilename = SRC_FOLDER + os.sep + SENSAN_specs.VARFLE
pvfile = open(pvfilename,'w')
PARNAME = []
PARVAL1 = []
PARLBND = []
PARUBND = []
# Find number of parameters and prepare the parameter variation block
if PAR_SRC == 0:
if os.path.isfile(PST_FILE):
pst_lines = open(PST_FILE,'r').readlines()
no_par = int(pst_lines[3].split()[0])
no_obsgr = int(pst_lines[3].split()[2])
for i in range(no_obsgr+12,no_obsgr+12+no_par):
PARNAME.append(pst_lines[i].split()[0])
PARVAL1.append(pst_lines[i].split()[3])
PARLBND.append(pst_lines[i].split()[4])
PARUBND.append(pst_lines[i].split()[5])
else:
raise GeoAlgorithmExecutionException('File ' + PST_FILE + ' does not exist. Please chose another source for parameter variation.')
elif PAR_SRC == 1:
filelist = os.listdir(SRC_FOLDER)
for f in filelist:
if '.pbf' in f:
PARNAME.append(open(SRC_FOLDER + os.sep +f,'r').readlines()[0].split()[0])
PARVAL1.append(open(SRC_FOLDER + os.sep +f,'r').readlines()[0].split()[3])
PARLBND.append(open(SRC_FOLDER + os.sep +f,'r').readlines()[0].split()[4])
PARUBND.append(open(SRC_FOLDER + os.sep +f,'r').readlines()[0].split()[5])
elif PAR_SRC == 2:
if os.path.isfile(PAR_FILE):
par_lines = open(PAR_FILE,'r').readlines()
no_par = len(par_lines)-1
for i in range(1,no_par+1):
PARNAME.append(par_lines[i].split()[0])
PARVAL1.append(par_lines[i].split()[1])
pst_lines = open(PST_FILE,'r').readlines()
no_par = int(pst_lines[3].split()[0])
no_obsgr = int(pst_lines[3].split()[2])
for i in range(no_obsgr+12,no_obsgr+12+no_par):
PARLBND.append(pst_lines[i].split()[4])
PARUBND.append(pst_lines[i].split()[5])
else:
raise GeoAlgorithmExecutionException('File ' + PAR_FILE + ' does not exist. Please chose another source for parameter variation.')
# Write header and baseline parameter set
for i in range(0,len(PARNAME)):
pvfile.writelines(PARNAME[i] + '\t')
pvfile.writelines('\r\n')
for i in range(0,len(PARVAL1)):
pvfile.writelines(PARVAL1[i] + '\t')
pvfile.writelines('\r\n')
# Write parameter sets having one parameter deviate from the baseline parameter set
for j in range(0,len(PARVAL1)):
for i in range(0,len(PARVAL1)):
if j == i:
if (float(PARVAL1[i]) * (1+PCT_DEV/100.) >= float(PARLBND[i])) & (float(PARVAL1[i]) * (1+PCT_DEV/100.) <= float(PARUBND[i])):
pvfile.writelines(str(float(PARVAL1[i]) * (1+PCT_DEV/100.)) + '\t')
elif (float(PARVAL1[i]) * (1+PCT_DEV/100.) > float(PARUBND[i])):
raise GeoAlgorithmExecutionException(PARNAME[i] + ' exceeds upper boundary with a deviation of '+ str(PCT_DEV)+' %.')
else:
raise GeoAlgorithmExecutionException(PARNAME[i] + ' is smaller than lower boundary with a deviation of '+ str(PCT_DEV)+' %.')
else:
pvfile.writelines(PARVAL1[i] + '\t')
pvfile.writelines('\r\n')
pvfile.close()
def getIcon(self):
return QtGui.QIcon(os.path.dirname(__file__) + "/images/tigerNET.png")
|
DHI-GRAS/processing_SWAT
|
MDWF_Sensan_a.py
|
Python
|
gpl-3.0
| 7,579 | 0.006333 |
"""
Make sure the !N and !-N commands work properly.
"""
from __future__ import print_function
import lldb
import lldbsuite.test.lldbutil as lldbutil
from lldbsuite.test.lldbtest import *
class TestHistoryRecall(TestBase):
mydir = TestBase.compute_mydir(__file__)
# If your test case doesn't stress debug info, the
# set this to true. That way it won't be run once for
# each debug info format.
NO_DEBUG_INFO_TESTCASE = True
def test_history_recall(self):
"""Test the !N and !-N functionality of the command interpreter."""
self.sample_test()
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
def sample_test(self):
interp = self.dbg.GetCommandInterpreter()
result = lldb.SBCommandReturnObject()
interp.HandleCommand("command history", result, True)
interp.HandleCommand("platform list", result, True)
interp.HandleCommand("!0", result, False)
self.assertTrue(result.Succeeded(), "!0 command did not work: %s"%(result.GetError()))
self.assertTrue("command history" in result.GetOutput(), "!0 didn't rerun command history")
interp.HandleCommand("!-1", result, False)
self.assertTrue(result.Succeeded(), "!-1 command did not work: %s"%(result.GetError()))
self.assertTrue("host:" in result.GetOutput(), "!-1 didn't rerun platform list.")
|
apple/swift-lldb
|
packages/Python/lldbsuite/test/functionalities/history/TestHistoryRecall.py
|
Python
|
apache-2.0
| 1,405 | 0.00427 |
import scrapy
from locations.items import GeojsonPointItem
import itertools
def chunks(l, n):
for i in range(0, len(l), n):
yield l[i:i + n]
def partition(l, n):
return list(chunks(l, n))
def process_hours(opening_hours):
ret_hours = []
for hours_str in opening_hours:
split_hours = hours_str.replace(",", "").replace("AM AM","").replace("PM PM", "").split(" ")
if split_hours[1] == "-":
range_start = split_hours[0]
range_end = split_hours[2]
times = partition([x for x in split_hours[3:] if x != "-"], 2)
else:
range_start, range_end = split_hours[0], None
times = partition([x for x in split_hours[1:] if x != "-"], 2)
periods = partition(times, 2)
periods = [list(itertools.chain(*r)) for r in periods]
period_list = []
for start, start_period, end, end_period in periods:
start_hour, start_minutes = [int(x) for x in start.split(":")]
end_hour, end_minutes = [int(x) for x in end.split(":")]
if start_period == "PM":
start_hour += 12
end_hour += 12
hours = (start_hour, start_minutes, end_hour, end_minutes)
period_list.append("%02d:%02d-%02d:%02d" % hours)
periods_str = ", ".join(period_list)
if range_start and range_end:
ret_hours.append("{}-{} {}".format(range_start[:2], range_end[:2], periods_str))
elif range_start:
ret_hours.append("{} {}".format(range_start[:2], periods_str))
return "; ".join(ret_hours)
class HMSpider(scrapy.Spider):
name = "hm-worldwide"
all_stores_uri = 'https://hm.storelocator.hm.com/rest/storelocator/stores/1.0/locale/en_US/country/{}/'
start_urls = ["http://www.hm.com/entrance.ahtml"]
def parse(self, response):
country_urls = response.css(".column li a::attr('href')").extract()
country_codes = {x.split("=")[1].split("&")[0].upper() for x in country_urls}
for country_code in country_codes:
yield scrapy.Request(url=self.all_stores_uri.format(country_code), callback=self.parse_country)
def parse_country(self, response):
stores = response.css("storeComplete")
for store in stores:
point = {
"lat": store.xpath("latitude/text()").extract_first(),
"lon": store.xpath("longitude/text()").extract_first(),
"name": store.xpath("name/text()").extract_first(),
"addr_full": store.xpath("address/addressLine/text()").extract_first(),
"city": store.xpath("city/text()").extract_first(),
"country": store.xpath("country/text()").extract_first(),
"phone": store.xpath("phone/text()").extract_first(),
"opening_hours": process_hours(store.xpath("openingHours/openingHour/text()").extract()),
"ref": store.xpath("storeId/text()").extract_first()
}
if "/country/US" in response.url:
point["state"] = store.xpath("region/name/text()").extract_first()
point["postcode"] = store.xpath("address/addressLine/text()").extract()[-1].split(" ")[-1]
yield GeojsonPointItem(**point)
|
iandees/all-the-places
|
locations/spiders/hm.py
|
Python
|
mit
| 3,295 | 0.003642 |
import json
import re
import requests
from requests.adapters import HTTPAdapter
from requests.packages.urllib3 import Retry
URL_TEMPLATE = "https://itunes.apple.com/lookup?id=%s&entity=podcast"
def id_from_url(url):
"""
Extract ID from iTunes podcast URL
:param url (str)
:return: (str)
"""
matches = re.findall(r'\/id([0-9]+)',url)
if len(matches) == 0:
raise LookupError("No ID present in the given URL")
if len(matches) > 1:
raise LookupError("More than one ID present in the URL, cannot decide which one to take")
return matches[0]
def lookup_id(id):
"""
Looks up podcast ID in Itunes lookup service
https://itunes.apple.com/lookup?id=<id>&entity=podcast
:param id (str):
:return: itunes response for the lookup as dict
"""
try:
retries = Retry(total=3,
backoff_factor=0.1,
status_forcelist=[ 500, 502, 503, 504 ])
s = requests.Session()
s.mount('https://', HTTPAdapter(max_retries=retries))
response = s.get(URL_TEMPLATE % id)
content = json.loads(response.content.decode('utf-8'))
except:
raise
return content
def feed_url(itunes_lookup_response):
"""
Returns feed URL from the itunes lookup response
:param itunes_lookup_response:
:return: str
"""
if len(itunes_lookup_response.get('results')) == 0:
raise LookupError("iTunes response has no results")
url = itunes_lookup_response.get('results')[0].get('feedUrl')
if url is None:
raise LookupError("feedUrl field is not present in response")
return url
def extract_feed_url(url):
id = id_from_url(url)
response = lookup_id(id)
url = feed_url(response)
return url
|
wotaen/itunes_podcast_rss
|
extract.py
|
Python
|
mit
| 1,768 | 0.005656 |
import gtk, gobject, gtk.glade
import gnome, gnome.ui, pango
import os, os.path
from gtk_goodies import gconf_wrapper, Undo, dialog_extras, image_extras
import gsudoku, sudoku, saver, sudoku_maker, printing, sudoku_generator_gui
import game_selector
import time, threading
from gettext import gettext as _
from gettext import ngettext
from defaults import *
from timer import ActiveTimer
from simple_debug import simple_debug,options
from dialog_swallower import SwappableArea
icon_factory = gtk.IconFactory()
STOCK_PIXBUFS = {}
for filename,stock_id in [('footprints.png','tracks'),]:
pb = gtk.gdk.pixbuf_new_from_file(os.path.join(IMAGE_DIR,filename))
STOCK_PIXBUFS[stock_id]=pb
iconset = gtk.IconSet(pb)
icon_factory.add(stock_id,iconset)
icon_factory.add_default()
gtk.stock_add([('tracks',
_('Track moves'),
0,0,""),])
try:
STOCK_FULLSCREEN = gtk.STOCK_FULLSCREEN
except:
STOCK_FULLSCREEN = _('Full Screen')
class UI (gconf_wrapper.GConfWrapper):
ui='''<ui>
<menubar name="MenuBar">
<menu name="File" action="File">
<menuitem action="New"/>
<menuitem action="Open"/>
<menuitem action="ByHand"/>
<separator/>
<menuitem action="Print"/>
<menuitem action="PrintMany"/>
<separator/>
<!--<menuitem action="Save"/>-->
<separator/>
<menuitem action="Generator"/>
<menuitem action="BackgroundGenerator"/>
<separator/>
<menuitem action="Close"/>
<!--<menuitem action="Quit"/>-->
</menu>
<menu action="Edit">
<menuitem action="Undo"/>
<menuitem action="Redo"/>
<separator/>
<menuitem action="Clear"/>
<menuitem action="ClearNotes"/>
</menu>
<menu action="View">
<menuitem action="FullScreen"/>
<separator/>
<menuitem action="ToggleToolbar"/>
<menuitem action="ToggleBackground"/>
<menuitem action="ToggleHighlight"/>
</menu>
<menu action="Game">
<menuitem action="ShowPossible"/>
<menuitem action="AutofillCurrentSquare"/>
<menuitem action="Autofill"/>
<separator/>
<menuitem action="AlwaysShowPossible"/>
<menuitem action="ShowImpossibleImplications"/>
<separator/>
<menuitem action="Tracker"/>
<separator/>
<menuitem action="PuzzleInfo"/>
<separator/>
<menuitem action="HighScores"/>
</menu>
<menu action="Help">
<menuitem action="About"/>
<menuitem action="ShowHelp"/>
</menu>
</menubar>
<toolbar name="Toolbar">
<!--<toolitem action="Quit"/>-->
<toolitem action="New"/>
<!--<toolitem action="Open"/>-->
<!--<toolitem action="Print"/>-->
<!--<toolitem action="Save"/>-->
<separator/>
<toolitem action="Clear"/>
<toolitem action="ClearNotes"/>
<!--<separator/>
<toolitem action="Undo"/>
<toolitem action="Redo"/>-->
<separator/>
<toolitem action="ShowPossible"/>
<!--<toolitem action="AlwaysShowPossible"/>-->
<toolitem action="AutofillCurrentSquare"/>
<separator/>
<toolitem action="ToggleHighlight"/>
<!--<toolitem action="AlwaysShowPossible"/>-->
<toolitem action="Tracker"/>
</toolbar>
</ui>'''
initial_prefs = {'group_size':9,
'font_zoom':0,
'zoom_on_resize':1,
'always_show_hints':0,
'player':os.environ.get('USERNAME',''),
'difficulty':0.0,
'minimum_font_size':pango.SCALE * 7, # minimum font-size
'minimum_number_of_new_puzzles':MIN_NEW_PUZZLES,
'bg_black':1,
'bg_custom_color':'',
#'show_notes':0
}
@simple_debug
def __init__ (self):
self.w = gtk.Window()
self.w.set_default_size(700,675)
self.timer = ActiveTimer(self.w)
self.won = False
gconf_wrapper.GConfWrapper.__init__(self,
gconf_wrapper.GConf('gnome-sudoku')
)
self.initialize_prefs()
self.player = self.gconf['player']
self.cleared = [] # used for Undo memory
self.cleared_notes = [] # used for Undo memory
gnome.program_init('gnome-sudoku',VERSION,
properties={gnome.PARAM_APP_DATADIR:APP_DATA_DIR}
)
self.w.connect('delete-event',self.quit_cb)
self.vb = gtk.VBox()
self.uimanager = gtk.UIManager()
if self.gconf['bg_custom_color']:
bgcol = self.gconf['bg_custom_color']
elif self.gconf['bg_black']:
bgcol = 'black'
else:
bgcol = None
self.gsd = gsudoku.SudokuGameDisplay()
if bgcol: self.gsd.set_bg_color(bgcol)
self.gsd.connect('puzzle-finished',self.you_win_callback)
self.main_actions = gtk.ActionGroup('MainActions')
self.main_actions.add_actions([
('File',None,'_File'),
('New',gtk.STOCK_NEW,None,
'<Control>n',_('New game'),self.new_cb),
('Print',gtk.STOCK_PRINT,None,
None,_('Print current game'),self.print_game),
('PrintMany',gtk.STOCK_PRINT,_('Print _Multiple Sudokus'),
None,_('Print more than one sudoku at a time.'),self.print_multiple_games),
#('Quit',gtk.STOCK_QUIT,None,'<Control>q',
# 'Quit Sudoku game',self.quit_cb),
('Close',gtk.STOCK_CLOSE,None,'<Control>w',
_('Close Sudoku (save game for later)'),self.quit_cb),
#('Save',gtk.STOCK_SAVE,_('_Save'),
# '<Control>s','Save game to play later.',
# self.save_game),
('ByHand',gtk.STOCK_EDIT,_('_Enter custom game'),
None,_('Enter new puzzle by hand (use this to copy a puzzle from another source).'),
self.enter_game_by_hand),
('Open',gtk.STOCK_OPEN,_('_Resume old game'),
'<Control>r',_('Resume a previous saved game.'),
self.open_game),
('Game',None,_('_Game')),
('View',None,_('_View')),
('ShowPossible',gtk.STOCK_HELP,_('_Hint'),
'<Control>i',
_('Show which numbers could go in the current square.'),
self.show_hint_cb),
('AutofillCurrentSquare',gtk.STOCK_APPLY,_('_Fill'),'<Control>f',
_('Automatically fill in the current square if possible.'),
self.auto_fill_current_square_cb),
('Autofill',gtk.STOCK_REFRESH,_('Fill _all squares'),'<Control>a',
_('Automatically fill in all squares for which there is only one valid value.'),
self.auto_fill_cb),
#('ZoomIn',gtk.STOCK_ZOOM_IN,'_Increase size',
# '<Control>plus','Increase the size of numbers and squares',
# self.zoom_in_cb),
#('ZoomOut',gtk.STOCK_ZOOM_OUT,'_Decrease size',
# '<Control>minus','Decrease the size of numbers and squares.',
# self.zoom_out_cb),
('FullScreen',STOCK_FULLSCREEN,None,
'F11',None,self.full_screen_cb),
('PuzzleInfo',gtk.STOCK_ABOUT,_('Puzzle _Statistics'),
None,_('Show statistics about current puzzle'),
self.show_info_cb),
('Help',None,_('_Help'),
None,None,None),
('About',gtk.STOCK_ABOUT,None,
None,None,self.show_about),
('ShowHelp',gtk.STOCK_HELP,None,
None,None,self.show_help),
('HighScores',None,_('High _Scores'),
None,_('Show high scores or replay old games.'),
self.show_high_scores_cb),
])
self.main_actions.add_toggle_actions([
('AlwaysShowPossible',
None,
_('_Always show hint'),
None,
_('Always show possible numbers in a square'),
self.auto_hint_cb),
('ShowImpossibleImplications',
None,
_('Warn about _unfillable squares'),
None,
_('Warn about squares made unfillable by a move'),
self.impossible_implication_cb),
('Tracker','tracks',_('_Track additions'),
'<Control>T',
_('Mark new additions in a separate color so you can keep track of them.'),
self.tracker_toggle_cb,False),
#('ZoomOnResize',None,_('_Adjust size of grid when resizing window'),
# None,_('Automatically change the size of numbers and squares to fit the window.'),
# ),
('ToggleToolbar',None,_('Show _Toolbar'),None,None,self.toggle_toolbar_cb,True),
#('ToggleNotes',None,_('Show _Notes'),'<Control>O',
# _('Include room for notes at the top and bottom of squares.'),self.toggle_notes_cb),
('ToggleBackground',None,_('_Black background'),
None,_("Background of game is black; otherwise, the background will follow your theme colors."),
self.toggle_background_cb,True),
('ToggleHighlight',gtk.STOCK_SELECT_COLOR,_('_Highlighter'),
None,_('Highlight the current row, column and box'),self.toggle_highlight_cb,False),
('BackgroundGenerator',None,_('Generate new puzzles _while you play'),
None,
_('Generate new puzzles in the background while you play. This will automatically pause when the game goes into the background.'),
self.toggle_generator_cb, True),
])
self.edit_actions = gtk.ActionGroup('EditActions')
self.edit_actions.add_actions(
[('Edit',None,'_Edit'),
('Undo',gtk.STOCK_UNDO,_('_Undo'),'<Control>z',_('Undo last action')),
('Redo',gtk.STOCK_REDO,_('_Redo'),'<Shift><Control>z',_('Redo last action')),
('Clear',gtk.STOCK_CLEAR,_('_Clear'),'<Control>b',_("Clear entries you've filled in"),self.clear_cb),
('ClearNotes',gtk.STOCK_CLEAR,_('Clear _Notes'),None,_("Clear notes and hints"),self.clear_notes_cb),
# Trackers...
('Tracker%s',None,_('No Tracker'),'<Control>0',None,lambda *args: self.set_tracker(-1)),
('Generator',None,_('_Generate new puzzles'),None,_('Generate new puzzles.'),
self.generate_puzzle_gui,),
])
self.edit_actions.add_actions(
[('Tracker%s'%n,None,'Tracker _%s'%n,'<Control>%s'%n,None,lambda *args: self.set_tracker(n-1)) for
n in range(1,9)])
self.uimanager.insert_action_group(self.main_actions,0)
self.uimanager.insert_action_group(self.edit_actions,0)
self.uimanager.add_ui_from_string(self.ui)
# Set up our UNDO stuff
undo_widg = self.edit_actions.get_action('Undo')
redo_widg = self.edit_actions.get_action('Redo')
self.history = Undo.UndoHistoryList(undo_widg,redo_widg)
for e in self.gsd.__entries__.values():
Undo.UndoableGenericWidget(e,self.history,
set_method='set_value_from_undo',
pre_change_signal='value-about-to-change'
)
Undo.UndoableGenericWidget(e,self.history,
set_method='set_notes',
get_method='get_note_text',
signal='notes-changed',
pre_change_signal='value-about-to-change',
)
# add the accelerator group to our toplevel window
self.w.add_accel_group(self.uimanager.get_accel_group())
mb = self.uimanager.get_widget('/MenuBar')
mb.show()
self.vb.pack_start(mb,fill=False,expand=False)
self.tb = self.uimanager.get_widget('/Toolbar')
#self.tb.show()
self.vb.pack_start(self.tb,fill=False,expand=False)
self.main_area = gtk.HBox()
self.swallower = SwappableArea(self.main_area)
self.swallower.show()
self.vb.pack_start(self.swallower,True,padding=12)
self.main_area.pack_start(self.gsd,padding=6)
self.main_actions.set_visible(True)
self.game_box = gtk.VBox()
self.main_area.show()
self.vb.show()
# Set up area for by-hand editing...
self.by_hand_label = gtk.Label()
self.by_hand_label.set_alignment(0,0)
self.by_hand_label.set_markup('<i>%s</i>'%_('Entering custom grid...'))
self.game_box.pack_start(self.by_hand_label,False,)#padding=12)
self.by_hand_buttonbox = gtk.HButtonBox()
self.by_hand_buttonbox.set_spacing(12)
self.by_hand_save_button = gtk.Button(_('_Play game'))
self.by_hand_save_button.connect('clicked',self.save_handmade_grid)
self.by_hand_cancel_button = gtk.Button(stock=gtk.STOCK_CANCEL)
self.by_hand_cancel_button.connect('clicked',self.cancel_handmade_grid)
self.by_hand_buttonbox.add(self.by_hand_cancel_button)
self.by_hand_buttonbox.add(self.by_hand_save_button)
self.game_box.pack_start(self.by_hand_buttonbox,False,padding=18)
self.game_box.show()
self.by_hand_widgets = [self.by_hand_label,self.by_hand_buttonbox]
self.main_area.pack_start(self.game_box,False,padding=12)
# Set up trackers
self.trackers = {}
self.setup_tracker_interface()
self.w.add(self.vb)
self.statusbar = gtk.Statusbar(); self.statusbar.show()
gobject.timeout_add(500,self.update_statusbar_cb)
self.vb.pack_end(self.statusbar,fill=False,expand=False)
self.worker_connections=[]
mb.show()
# sync up toggles with gconf values...
map(lambda tpl: self.gconf_wrap_toggle(*tpl),
[('always_show_hints',
self.main_actions.get_action('AlwaysShowPossible')),
('show_impossible_implications',
self.main_actions.get_action('ShowImpossibleImplications')),
('generate_puzzles_in_background',
self.main_actions.get_action('BackgroundGenerator')),
('show_toolbar',
self.main_actions.get_action('ToggleToolbar')),
('bg_black',
self.main_actions.get_action('ToggleBackground')),
('show_tracker',
self.main_actions.get_action('Tracker')),
])
self.timer.start_timing()
# setup sudoku maker...
self.sudoku_maker = sudoku_maker.SudokuMaker()
self.sudoku_tracker = sudoku_maker.SudokuTracker(self.sudoku_maker)
if not self.sudoku_tracker.playing:
self.main_actions.get_action('Open').set_sensitive(False)
else:
self.main_actions.get_action('Open').set_sensitive(True)
if not self.sudoku_tracker.finished:
self.main_actions.get_action('HighScores').set_sensitive(False)
# auto-load
try:
game = self.gconf['current_game']
except:
self.gconf['current_game']=""
game = ""
if game:
try:
self.sudoku_tracker.open_game(self, game)
except:
#print 'We appear to have lost ',game
try:
self.gsd.load_game(game)
except:
puz,d=self.sudoku_tracker.get_new_puzzle(self.gconf['difficulty'])
else:
# select an easy puzzle...
puz,d=self.sudoku_tracker.get_new_puzzle(self.gconf['difficulty'])
#print 'Default to ',puz
self.gsd.change_grid(puz,
9)
# generate puzzles while our use is working...
if self.gconf['generate_puzzles_in_background']:
gobject.timeout_add(1000,lambda *args: self.start_worker_thread() and True)
self.gsd.show()
icon = gtk.gdk.pixbuf_new_from_file(os.path.join(ICON_DIR,'sudoku.png'))
self.w.set_icon(icon)
self.w.show()
@simple_debug
def start_worker_thread (self, *args):
n_new_puzzles = len(self.sudoku_tracker.list_new_puzzles())
if n_new_puzzles < self.gconf['minimum_number_of_new_puzzles']:
self.worker = threading.Thread(target=lambda *args: self.sudoku_maker.work(limit=5))
self.worker_connections = [
self.timer.connect('timing-started',self.sudoku_maker.resume),
self.timer.connect('timing-stopped',self.sudoku_maker.pause)
]
self.worker.start()
#else:
# print 'We already have ',n_new_puzzles,'!'
@simple_debug
def stop_worker_thread (self, *args):
if hasattr(self,'worker'):
self.sudoku_maker.stop()
for c in self.worker_connections:
self.timer.disconnect(c)
@simple_debug
def you_win_callback (self,grid):
self.won = True
# increase difficulty for next time.
self.gconf['difficulty']=self.gconf['difficulty']+0.1
self.timer.finish_timing()
self.sudoku_tracker.finish_game(self)
#time_string = "%s (%s active)"%(self.timer.total_time_string(),
# self.timer.active_time_string()
# )
#sublabel = _("You completed the puzzle in %s")%time_string
#sublabel += "\n"
#sublabel += ngettext("You got %s hint","You got %s hints",self.gsd.hints)%self.gsd.hints
#sublabel += "\n"
#if self.gsd.impossible_hints:
# sublabel += ngettext("You had %s impossibility pointed out.",
# "You had %s impossibilities pointed out.",
# self.gsd.impossible_hints)%self.gsd.impossible_hints
#if self.gsd.auto_fills:
# sublabel += ngettext("You used the auto-fill %s time",
# "You used the auto-fill %s times",
# self.gsd.auto_fills)%self.gsd.auto_fills
#dialog_extras.show_message("You win!",label="You win!",
# icon=os.path.join(IMAGE_DIR,'winner2.png'),
# sublabel=sublabel
# )
hs = game_selector.HighScores(self.sudoku_tracker)
hs.highlight_newest=True
#hs.run_swallowed_dialog(self.swallower)
hs.run_dialog()
self.main_actions.get_action('HighScores').set_sensitive(True)
#self.gsd.blank_grid()
self.stop_game()
self.new_cb()
@simple_debug
def initialize_prefs (self):
for k,v in self.initial_prefs.items():
try:
self.gconf[k]
except:
self.gconf[k]=v
@simple_debug
def new_cb (self,*args):
gs = game_selector.NewGameSelector(self.sudoku_tracker)
gs.difficulty = self.gconf['difficulty']
ret = gs.run_swallowed_dialog(self.swallower)
if ret:
puz,d = ret
self.gconf['difficulty']=d.value
self.stop_game()
self.gsd.change_grid(puz,9)
@simple_debug
def stop_game (self):
if self.gsd.grid and self.gsd.grid.is_changed():
self.sudoku_tracker.save_game(self)
self.main_actions.get_action('Open').set_sensitive(True)
self.tracker_ui.reset()
self.timer.reset_timer()
self.timer.start_timing()
self.won = False
@simple_debug
def quit_cb (self, *args):
if gtk.main_level() > 1:
# If we are in an embedded mainloop, that means that one
# of our "swallowed" dialogs is active, in which case we
# have to quit that mainloop before we can quit
# properly.
if self.swallower.running:
d = self.swallower.running
d.response(gtk.RESPONSE_DELETE_EVENT)
gtk.main_quit() # Quit the embedded mainloop
gobject.idle_add(self.quit_cb,100) # Call ourselves again
# to quit the main
# mainloop
return
#buttons = d.action_area.get_children()
#for b in buttons:
# if d.get_response_for_widget(b) in [gtk.RESPONSE_CLOSE,gtk.RESPONSE_CANCEL]:
# print 'clicking button',b
# b.emit('clicked')
# while gtk.events_pending():
# print 'Take care of iters...'
# gtk.main_iteration()
# break
print 'Now we hide the window'
self.w.hide()
# make sure we really go away before doing our saving --
# otherwise we appear sluggish.
while gtk.events_pending():
gtk.main_iteration()
if self.won:
self.gconf['current_game']=''
if not self.won:
if not self.gsd.grid:
self.gconf['current_game']=''
else: #always save the game
self.gconf['current_game']=self.sudoku_tracker.save_game(self)
self.stop_worker_thread()
self.sudoku_tracker.save()
gtk.main_quit()
@simple_debug
def enter_game_by_hand (self, *args):
self.stop_game()
self.gsd.change_grid(sudoku.InteractiveSudoku(),9)
for w in self.by_hand_widgets: w.show_all()
@simple_debug
def save_handmade_grid (self, *args):
for w in self.by_hand_widgets: w.hide()
# this should make our active grid into our virgin grid
self.won = False
self.gsd.change_grid(self.gsd.grid,9)
self.sudoku_maker.names[self.gsd.grid.to_string()]=self.sudoku_maker.get_puzzle_name('Custom Puzzle')
@simple_debug
def cancel_handmade_grid (self, *args):
for w in self.by_hand_widgets: w.hide()
@simple_debug
def open_game (self, *args):
#game_file = dialog_extras.getFileOpen(_("Load saved game"),
# default_file=os.path.join(DATA_DIR,'games/')
# )
#saver.unpickle_game(self, game_file)
#ifi = file(game_file,'r')
#self.gsd.load_game(ifi.read())
#ifi.close()
puzzl=game_selector.OldGameSelector(self.sudoku_tracker).run_swallowed_dialog(self.swallower)
if puzzl:
self.stop_game()
saver.open_game(self,puzzl)
@simple_debug
def save_game (self, *args):
save_to_dir=os.path.join(DATA_DIR,'games/')
if not os.path.exists(save_to_dir):
os.makedirs(save_to_dir)
game_number = 1
while os.path.exists(
os.path.join(save_to_dir,"game%s"%game_number)
):
game_number+=1
game_loc = os.path.join(save_to_dir,
"game%s"%game_number)
saver.pickle_game(self, game_loc)
return game_loc
@simple_debug
def zoom_in_cb (self,*args):
self.gh.change_font_size(multiplier=1.1)
self.zoom = self.zoom * 1.1
@simple_debug
def zoom_out_cb (self,*args):
self.gh.change_font_size(multiplier=0.9)
self.zoom = self.zoom * 0.9
def full_screen_cb (self, *args):
if not hasattr(self,'is_fullscreen'): self.is_fullscreen = False
if self.is_fullscreen:
self.w.unfullscreen()
self.is_fullscreen = False
else:
self.w.fullscreen()
self.is_fullscreen = True
@simple_debug
def clear_cb (self,*args):
clearer=Undo.UndoableObject(
lambda *args: self.cleared.append(self.gsd.reset_grid()), #action
lambda *args: [self.gsd.add_value(*entry) for entry in self.cleared.pop()], #inverse
self.history #history
)
clearer.perform()
def clear_notes_cb (self, *args):
clearer = Undo.UndoableObject(
lambda *args: self.cleared_notes.append(self.gsd.clear_notes()), #action
# clear_notes returns a list of tuples indicating the cleared notes...
# (x,y,(top,bottom)) -- this is what we need for undoing
lambda *args: [self.gsd.__entries__[t[0],t[1]].set_notes(t[2]) for t in self.cleared_notes.pop()], #inverse
self.history
)
clearer.perform()
@simple_debug
def show_hint_cb (self, *args):
self.gsd.show_hint()
@simple_debug
def auto_hint_cb (self, action):
if action.get_active():
self.gsd.always_show_hints = True
self.gsd.update_all_hints()
else:
self.gsd.clear_hints()
@simple_debug
def impossible_implication_cb (self, action):
if action.get_active():
self.gsd.show_impossible_implications = True
else:
self.gsd.show_impossible_implications = False
@simple_debug
def auto_fill_cb (self, *args):
if not hasattr(self,'autofilled'): self.autofilled=[]
if not hasattr(self,'autofiller'):
self.autofiller = Undo.UndoableObject(
lambda *args: self.autofilled.append(self.gsd.auto_fill()),
lambda *args: [self.gsd.remove(entry[0],entry[1],do_removal=True) for entry in self.autofilled.pop()],
self.history
)
self.autofiller.perform()
@simple_debug
def auto_fill_current_square_cb (self, *args):
self.gsd.auto_fill_current_entry()
@simple_debug
def setup_tracker_interface (self):
self.tracker_ui = TrackerBox(self)
self.tracker_ui.show_all()
self.tracker_ui.hide()
self.game_box.add(self.tracker_ui)
@simple_debug
def set_tracker (self, n):
if self.gsd.trackers.has_key(n):
self.tracker_ui.select_tracker(n)
e = self.gsd.get_focused_entry()
if e:
if n==-1:
for tid in self.gsd.trackers_for_point(e.x,e.y):
self.gsd.remove_tracker(e.x,e.y,tid)
else:
self.gsd.add_tracker(e.x,e.y,n)
else:
print 'No tracker ',n,'yet'
@simple_debug
def tracker_toggle_cb (self, widg):
if widg.get_active():
#if len(self.tracker_ui.tracker_model)<=1:
# self.tracker_ui.add_tracker()
self.tracker_ui.show_all()
else:
self.tracker_ui.hide()
@simple_debug
def toggle_toolbar_cb (self, widg):
if widg.get_active(): self.tb.show()
else: self.tb.hide()
def update_statusbar_cb (self, *args):
if not self.gsd.grid: return
puzz = self.gsd.grid.virgin.to_string()
if (not hasattr(self,'current_puzzle_string') or
self.current_puzzle_string != puzz):
if not self.sudoku_tracker.sudoku_maker.names.has_key(puzz):
self.sudoku_tracker.sudoku_maker.names[puzz] = self.sudoku_tracker.sudoku_maker.get_puzzle_name(puzz)
self.current_puzzle_string = puzz
self.current_puzzle_name = self.sudoku_tracker.sudoku_maker.names[puzz]
if len(self.current_puzzle_name)>18: self.current_puzzle_name = self.current_puzzle_name[:17]+u'\u2026'
self.current_puzzle_diff = self.sudoku_tracker.get_difficulty(puzz)
tot_string = _("Playing ") + self.current_puzzle_name
tot_string += " - " + "%s"%self.current_puzzle_diff.value_string()
tot_string += " " + "(%1.2f)"%self.current_puzzle_diff.value
#if self.timer.tot_time or self.timer.tot_time_complete:
# time_string = _("%s (%s active)")%(
# self.timer.total_time_string(),
# self.timer.active_time_string()
# )
# if not self.timer.__timing__:
# time_string += " %s"%_('paused')
# tot_string += " - " + time_string
#if self.gsd.hints and not self.gconf['always_show_hints']:
# tot_string += " - " +ngettext("%s hint","%s hints",
# self.gsd.hints)%self.gsd.hints
#if self.gsd.auto_fills:
# tot_string += " " +ngettext("%s auto-fill","%s auto-fills",
# self.gsd.auto_fills)%self.gsd.auto_fills
if not hasattr(self,'sbid'):
self.sbid = self.statusbar.get_context_id('game_info')
self.statusbar.pop(self.sbid)
self.statusbar.push(self.sbid,
tot_string)
return True
@simple_debug
def toggle_background_cb (self, widg):
if widg.get_active():
self.gsd.set_bg_color('black')
else:
self.gsd.set_bg_color(None)
def toggle_highlight_cb (self, widg):
if widg.get_active():
self.gsd.toggle_highlight(True)
else:
self.gsd.toggle_highlight(False)
@simple_debug
def show_info_cb (self, *args):
if not self.gsd.grid:
dialog_extras.show_message(parent=self.w,
title=_("Puzzle Information"),
label=_("There is no current puzzle.")
)
return
puzzle = self.gsd.grid.virgin.to_string()
diff = self.sudoku_tracker.get_difficulty(puzzle)
information = _("Calculated difficulty: ")
information += diff.value_string()
information += " (%1.2f)"%diff.value
information += "\n"
information += _("Number of moves instantly fillable by elimination: ")
information += str(int(diff.instant_elimination_fillable))
information += "\n"
information += _("Number of moves instantly fillable by filling: ")
information += str(int(diff.instant_fill_fillable))
information += "\n"
information += _("Amount of trial-and-error required to solve: ")
information += str(len(diff.guesses))
if not self.sudoku_tracker.sudoku_maker.names.has_key(puzzle):
self.sudoku_tracker.sudoku_maker.names[puzzle]=self.sudoku_tracker.sudoku_maker.get_puzzle_name(
_('Puzzle'))
name = self.sudoku_tracker.sudoku_maker.names[puzzle]
dialog_extras.show_message(parent=self.w,
title=_("Puzzle Information"),
label=_("Statistics for %s")%name,
sublabel=information)
@simple_debug
def toggle_generator_cb (self, toggle):
if toggle.get_active():
self.start_worker_thread()
else:
self.stop_worker_thread()
@simple_debug
def show_high_scores_cb (self, *args):
hs=game_selector.HighScores(self.sudoku_tracker)
replay_game = hs.run_dialog()
if replay_game:
self.stop_game()
self.gsd.change_grid(replay_game,9)
@simple_debug
def show_about (self, *args):
about = gnome.ui.About(
APPNAME,#appname
VERSION,#version
COPYRIGHT,#copyright
DESCRIPTION,#description
AUTHORS,#authors
None,#comments
None,#translator
gtk.gdk.pixbuf_new_from_file(os.path.join(ICON_DIR,'sudoku.png'))#logo
)
try:
about.set_website('http://gnome-sudoku.sourceforge.net')
except AttributeError: pass
about.show()
@simple_debug
def show_help (self, *args):
#dialog_extras.show_faq(faq_file=os.path.join(BASE_DIR,_('FAQ')))
gnome.help_display('gnome-sudoku')
@simple_debug
def print_game (self, *args):
printing.print_sudokus([self.gsd])
@simple_debug
def print_multiple_games (self, *args):
gp=game_selector.GamePrinter(self.sudoku_tracker, self.gconf)
gp.run_dialog()
@simple_debug
def generate_puzzle_gui (self, *args):
sudoku_generator_gui.GameGenerator(self,self.gconf)
class TrackerBox (gtk.VBox):
@simple_debug
def __init__ (self, main_ui):
gtk.VBox.__init__(self)
self.glade = gtk.glade.XML(os.path.join(GLADE_DIR,'tracker.glade'))
self.main_ui = main_ui
self.vb = self.glade.get_widget('vbox1')
self.vb.unparent()
self.pack_start(self.vb,expand=True,fill=True)
self.setup_actions()
self.setup_tree()
self.show_all()
@simple_debug
def reset (self):
for tree in self.tracker_model:
if tree[0]>-1:
self.tracker_model.remove(tree.iter)
@simple_debug
def setup_tree (self):
self.tracker_tree = self.glade.get_widget('treeview1')
self.tracker_model = gtk.ListStore(int,gtk.gdk.Pixbuf,str)
self.tracker_tree.set_model(self.tracker_model)
col1 = gtk.TreeViewColumn("",gtk.CellRendererPixbuf(),pixbuf=1)
col2 = gtk.TreeViewColumn("",gtk.CellRendererText(),text=2)
self.tracker_tree.append_column(col2)
self.tracker_tree.append_column(col1)
# Our initial row...
self.tracker_model.append([-1,None,_('No Tracker')])
self.tracker_tree.get_selection().connect('changed',self.selection_changed_cb)
@simple_debug
def setup_actions (self):
self.tracker_actions = gtk.ActionGroup('tracker_actions')
self.tracker_actions.add_actions(
[('Clear',
gtk.STOCK_CLEAR,
_('_Clear Tracker'),
None,_('Clear all moves tracked by selected tracker.'),
self.clear_cb
),
('Keep',None,
_('_Clear Others'),
None,
_('Clear all moves not tracked by selected tracker.'),
self.keep_cb),
]
)
for action,widget_name in [('Clear','ClearTrackerButton'),
('Keep','KeepTrackerButton'),
]:
a=self.tracker_actions.get_action(action)
a.connect_proxy(self.glade.get_widget(widget_name))
self.glade.get_widget('AddTrackerButton').connect('clicked',
self.add_tracker)
@simple_debug
def add_tracker (self,*args):
#print 'Adding tracker!'
tracker_id = self.main_ui.gsd.create_tracker()
#print 'tracker_id = ',tracker_id
pb=image_extras.pixbuf_transform_color(
STOCK_PIXBUFS['tracks'],
(0,0,0),#black
self.main_ui.gsd.get_tracker_color(tracker_id),
)
# select our new tracker
self.tracker_tree.get_selection().select_iter(
self.tracker_model.append([tracker_id,
pb,
_("Tracker %s")%(tracker_id+1)]
)
)
@simple_debug
def select_tracker (self, tracker_id):
for row in self.tracker_model:
if row[0]==tracker_id:
self.tracker_tree.get_selection().select_iter(row.iter)
@simple_debug
def selection_changed_cb (self, selection):
mod,itr = selection.get_selected()
if itr: selected_tracker_id = mod.get_value(itr,0)
else: selected_tracker_id=-1
# This should be cheap since we don't expect many trackers...
# We cycle through each row and toggle it off if it's not
# selected; on if it is selected
for row in self.tracker_model:
tid = row[0]
if tid != -1: # -1 == no tracker
self.main_ui.gsd.toggle_tracker(tid,tid==selected_tracker_id)
self.tracker_actions.set_sensitive(selected_tracker_id != -1)
@simple_debug
def clear_cb (self, action):
mod,itr=self.tracker_tree.get_selection().get_selected()
selected_tracker_id=mod.get_value(itr,0)
self.tracker_delete_tracks(selected_tracker_id)
@simple_debug
def keep_cb (self, action):
mod,itr=self.tracker_tree.get_selection().get_selected()
selected_tracker_id=mod.get_value(itr,0)
self.tracker_keep_tracks(selected_tracker_id)
@simple_debug
def tracker_delete_tracks (self, tracker_id):
clearer=Undo.UndoableObject(
lambda *args: self.main_ui.cleared.append(self.main_ui.gsd.delete_by_tracker(tracker_id)),
lambda *args: [self.main_ui.gsd.add(*entry) for entry in self.main_ui.cleared.pop()],
self.main_ui.history)
clearer.perform()
@simple_debug
def tracker_keep_tracks (self, tracker_id):
clearer=Undo.UndoableObject(
lambda *args: self.main_ui.cleared.append(self.main_ui.gsd.delete_except_for_tracker(tracker_id)),
lambda *args: [self.main_ui.gsd.add(*entry) for entry in self.main_ui.cleared.pop()],
self.main_ui.history)
clearer.perform()
class GamesTracker (sudoku_maker.SudokuTracker):
@simple_debug
def __init__ (self, sudoku_maker):
SudokuTracker.__init__(self, sudoku_maker)
@simple_debug
def build_model (self):
# puzzle / difficulty / % completed / game started / game finished
self.model = gtk.TreeModel(str, str )
def start_game ():
if options.debug: print 'Starting GNOME Sudoku in debug mode'
if options.profile:
options.profile = False
profile_me()
return
u = UI()
gobject.threads_init()
try:
gtk.main()
except KeyboardInterrupt:
# properly quit on a keyboard interrupt...
u.quit_cb()
def profile_me ():
print 'Profiling GNOME Sudoku'
import tempfile,os.path
import hotshot, hotshot.stats
pname = os.path.join(tempfile.gettempdir(),'GNOME_SUDOKU_HOTSHOT_PROFILE')
prof = hotshot.Profile(pname)
prof.runcall(start_game)
stats = hotshot.stats.load(pname)
stats.strip_dirs()
stats.sort_stats('time','calls').print_stats()
if __name__ == '__main__':
import defaults
defaults.DATA_DIR == '/tmp/'; DATA_DIR=='/tmp/'
|
TheBestHuman/DesktopSudokuGenerator
|
lib/gnome_sudoku.py
|
Python
|
gpl-3.0
| 38,776 | 0.013642 |
#!/usr/bin/env python
#
# Translation of videogamena.me javascript to python
#
# http://videogamena.me/vgng.js
# http://videogamena.me/video_game_names.txt
#
# (C) 2014 Dustin Knie <dustin@nulldomain.com>
import argparse
import os
import random
from math import floor, trunc
_word_list_file = 'video_game_names.txt'
_word_list = []
def _build_list(word_list=_word_list_file):
try:
f = open(word_list, 'r')
words = []
for line in f:
line = line.strip('\n')
if line == "----":
_word_list.append(words)
words = []
else:
words.append(line)
_word_list.append(words)
except IOError as e:
print("Error opening {}: {}".format(word_list, e))
exit(1)
def _get_word(word_list, words=[], bad_match_list=[], allow_similar_matches=False):
bad_word = True
while bad_word:
word = word_list[trunc(floor(random.random() * len(word_list)))]
if '^' in word:
if not allow_similar_matches:
bad_match_list += word.split('^')[1].split('|')
word = word.split('^')[0]
if word in words or word in bad_match_list:
continue
bad_word = False
words.append(word)
return (words, bad_match_list)
def generate_game_name(allow_similar_matches=False):
words = []
bad_match_list = []
for word_list in _word_list:
(words, bad_match_list) = _get_word(word_list,
words=words,
bad_match_list=bad_match_list,
allow_similar_matches=allow_similar_matches)
return ' '.join(words)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('count', type=int, nargs='?', help='Number of names to create')
parser.add_argument('-l', '--list', action='store', help='Word list to use for generating names.')
args = parser.parse_args()
_build_list(word_list=args.list if args.list else _word_list_file)
for i in range(args.count if args.count else 1):
print(generate_game_name())
|
nullpuppy/vgng
|
vgng.py
|
Python
|
mit
| 2,102 | 0.00333 |
"""
bamboo.globals
~~~~~~~~~~~~~
"""
from peak.util.proxies import CallbackProxy
from bamboo.context import context
db = CallbackProxy(lambda: context["db"])
|
hahnicity/bamboo
|
bamboo/globals.py
|
Python
|
mit
| 159 | 0 |
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""This program wraps an arbitrary command since gn currently can only execute
scripts."""
import os
import subprocess
import sys
from shutil import copy2
args = sys.argv[1:]
args[0] = os.path.abspath(args[0])
#if sys.platform == 'darwin':
# copy2(os.path.join(os.path.dirname(args[0]), 'libffmpeg.dylib'), os.path.dirname(os.path.dirname(args[0])))
sys.exit(subprocess.call(args))
|
nwjs/chromium.src
|
tools/v8_context_snapshot/run.py
|
Python
|
bsd-3-clause
| 552 | 0.003623 |
import yaml
from mongoengine import *
import datetime
import time
import hashlib
import uuid
from pprint import pprint
from user import User, Users
from cloudmesh_management.generate import random_user
from cloudmesh_management.user import read_user
FILENAME = "/tmp/user.yaml"
connect('user', port=27777)
users = Users()
# Reads user information from file
def main():
# user = random_user()
# with open(FILENAME, "w") as f:
# f.write(user.yaml())
print 70 * "="
user = User()
user = read_user(FILENAME)
print 70 * "="
pprint(user.json())
user.save()
user.update(**{"set__username": "Hallo"})
user.save()
print User.objects(username="Hallo")
if __name__ == "__main__":
main()
|
rajpushkar83/cloudmesh
|
cloudmesh/management/read.py
|
Python
|
apache-2.0
| 752 | 0.00133 |
"""
Provide tests for sysadmin dashboard feature in sysadmin.py
"""
from __future__ import absolute_import
import glob
import os
import re
import shutil
import unittest
from datetime import datetime
from uuid import uuid4
import mongoengine
from django.conf import settings
from django.test.client import Client
from django.test.utils import override_settings
from django.urls import reverse
from opaque_keys.edx.locator import CourseLocator
from pytz import UTC
from six import text_type
from six.moves import range
from dashboard.git_import import GitImportErrorNoDir
from dashboard.models import CourseImportLog
from openedx.core.djangolib.markup import Text
from student.roles import CourseStaffRole, GlobalStaff
from student.tests.factories import UserFactory
from util.date_utils import DEFAULT_DATE_TIME_FORMAT, get_time_display
from xmodule.modulestore.django import modulestore
from xmodule.modulestore.tests.django_utils import TEST_DATA_SPLIT_MODULESTORE, SharedModuleStoreTestCase
from xmodule.modulestore.tests.mongo_connection import MONGO_HOST, MONGO_PORT_NUM
TEST_MONGODB_LOG = {
'host': MONGO_HOST,
'port': MONGO_PORT_NUM,
'user': '',
'password': '',
'db': 'test_xlog',
}
class SysadminBaseTestCase(SharedModuleStoreTestCase):
"""
Base class with common methods used in XML and Mongo tests
"""
TEST_REPO = 'https://github.com/edx/edx4edx_lite.git'
TEST_BRANCH = 'testing_do_not_delete'
TEST_BRANCH_COURSE = CourseLocator.from_string('course-v1:MITx+edx4edx_branch+edx4edx')
MODULESTORE = TEST_DATA_SPLIT_MODULESTORE
def setUp(self):
"""Setup test case by adding primary user."""
super(SysadminBaseTestCase, self).setUp()
self.user = UserFactory.create(username='test_user',
email='test_user+sysadmin@edx.org',
password='foo')
self.client = Client()
def _setstaff_login(self):
"""Makes the test user staff and logs them in"""
GlobalStaff().add_users(self.user)
self.client.login(username=self.user.username, password='foo')
def _add_edx4edx(self, branch=None):
"""Adds the edx4edx sample course"""
post_dict = {'repo_location': self.TEST_REPO, 'action': 'add_course', }
if branch:
post_dict['repo_branch'] = branch
return self.client.post(reverse('sysadmin_courses'), post_dict)
def _rm_edx4edx(self):
"""Deletes the sample course from the XML store"""
def_ms = modulestore()
course_path = '{0}/edx4edx_lite'.format(
os.path.abspath(settings.DATA_DIR))
try:
# using XML store
course = def_ms.courses.get(course_path, None)
except AttributeError:
# Using mongo store
course = def_ms.get_course(CourseLocator('MITx', 'edx4edx', 'edx4edx'))
# Delete git loaded course
response = self.client.post(
reverse('sysadmin_courses'),
{
'course_id': text_type(course.id),
'action': 'del_course',
}
)
self.addCleanup(self._rm_glob, '{0}_deleted_*'.format(course_path))
return response
def _rm_glob(self, path):
"""
Create a shell expansion of passed in parameter and iteratively
remove them. Must only expand to directories.
"""
for path in glob.glob(path):
shutil.rmtree(path)
def _mkdir(self, path):
"""
Create directory and add the cleanup for it.
"""
os.mkdir(path)
self.addCleanup(shutil.rmtree, path)
@override_settings(
MONGODB_LOG=TEST_MONGODB_LOG,
GIT_REPO_DIR=settings.TEST_ROOT / "course_repos_{}".format(uuid4().hex)
)
@unittest.skipUnless(settings.FEATURES.get('ENABLE_SYSADMIN_DASHBOARD'),
"ENABLE_SYSADMIN_DASHBOARD not set")
class TestSysAdminMongoCourseImport(SysadminBaseTestCase):
"""
Check that importing into the mongo module store works
"""
@classmethod
def tearDownClass(cls):
"""Delete mongo log entries after test."""
super(TestSysAdminMongoCourseImport, cls).tearDownClass()
try:
mongoengine.connect(TEST_MONGODB_LOG['db'])
CourseImportLog.objects.all().delete()
except mongoengine.connection.ConnectionError:
pass
def _setstaff_login(self):
"""
Makes the test user staff and logs them in
"""
self.user.is_staff = True
self.user.save()
self.client.login(username=self.user.username, password='foo')
def test_missing_repo_dir(self):
"""
Ensure that we handle a missing repo dir
"""
self._setstaff_login()
if os.path.isdir(settings.GIT_REPO_DIR):
shutil.rmtree(settings.GIT_REPO_DIR)
# Create git loaded course
response = self._add_edx4edx()
self.assertIn(Text(text_type(GitImportErrorNoDir(settings.GIT_REPO_DIR))),
response.content.decode('UTF-8'))
def test_mongo_course_add_delete(self):
"""
This is the same as TestSysadmin.test_xml_course_add_delete,
but it uses a mongo store
"""
self._setstaff_login()
self._mkdir(settings.GIT_REPO_DIR)
def_ms = modulestore()
self.assertNotEqual('xml', def_ms.get_modulestore_type(None))
self._add_edx4edx()
course = def_ms.get_course(CourseLocator('MITx', 'edx4edx', 'edx4edx'))
self.assertIsNotNone(course)
self._rm_edx4edx()
course = def_ms.get_course(CourseLocator('MITx', 'edx4edx', 'edx4edx'))
self.assertIsNone(course)
def test_course_info(self):
"""
Check to make sure we are getting git info for courses
"""
# Regex of first 3 columns of course information table row for
# test course loaded from git. Would not have sha1 if
# git_info_for_course failed.
table_re = re.compile(u"""
<tr>\\s+
<td>edX\\sAuthor\\sCourse</td>\\s+ # expected test git course name
<td>course-v1:MITx\\+edx4edx\\+edx4edx</td>\\s+ # expected test git course_id
<td>[a-fA-F\\d]{40}</td> # git sha1 hash
""", re.VERBOSE)
self._setstaff_login()
self._mkdir(settings.GIT_REPO_DIR)
# Make sure we don't have any git hashes on the page
response = self.client.get(reverse('sysadmin_courses'))
self.assertNotRegexpMatches(response.content.decode('utf-8'), table_re)
# Now add the course and make sure it does match
response = self._add_edx4edx()
self.assertRegexpMatches(response.content.decode('utf-8'), table_re)
def test_gitlogs(self):
"""
Create a log entry and make sure it exists
"""
self._setstaff_login()
self._mkdir(settings.GIT_REPO_DIR)
self._add_edx4edx()
response = self.client.get(reverse('gitlogs'))
# Check that our earlier import has a log with a link to details
self.assertIn('/gitlogs/course-v1:MITx+edx4edx+edx4edx', response.content.decode('utf-8'))
response = self.client.get(
reverse('gitlogs_detail', kwargs={
'course_id': 'course-v1:MITx+edx4edx+edx4edx'}))
self.assertIn('======> IMPORTING course', response.content.decode('utf-8'))
self._rm_edx4edx()
def test_gitlog_date(self):
"""
Make sure the date is timezone-aware and being converted/formatted
properly.
"""
tz_names = [
'America/New_York', # UTC - 5
'Asia/Pyongyang', # UTC + 9
'Europe/London', # UTC
'Canada/Yukon', # UTC - 8
'Europe/Moscow', # UTC + 4
]
tz_format = DEFAULT_DATE_TIME_FORMAT
self._setstaff_login()
self._mkdir(settings.GIT_REPO_DIR)
self._add_edx4edx()
date = CourseImportLog.objects.first().created.replace(tzinfo=UTC)
for timezone in tz_names:
with (override_settings(TIME_ZONE=timezone)):
date_text = get_time_display(date, tz_format, settings.TIME_ZONE)
response = self.client.get(reverse('gitlogs'))
self.assertIn(date_text, response.content.decode('UTF-8'))
self._rm_edx4edx()
def test_gitlog_bad_course(self):
"""
Make sure we gracefully handle courses that don't exist.
"""
self._setstaff_login()
response = self.client.get(
reverse('gitlogs_detail', kwargs={
'course_id': 'Not/Real/Testing'}))
self.assertIn(
'No git import logs have been recorded for this course.',
response.content
)
def test_gitlog_no_logs(self):
"""
Make sure the template behaves well when rendered despite there not being any logs.
(This is for courses imported using methods other than the git_add_course command)
"""
self._setstaff_login()
self._mkdir(settings.GIT_REPO_DIR)
self._add_edx4edx()
# Simulate a lack of git import logs
import_logs = CourseImportLog.objects.all()
import_logs.delete()
response = self.client.get(
reverse('gitlogs_detail', kwargs={
'course_id': 'course-v1:MITx+edx4edx+edx4edx'
})
)
self.assertIn('No git import logs have been recorded for this course.', response.content.decode('utf-8'))
self._rm_edx4edx()
def test_gitlog_pagination_out_of_range_invalid(self):
"""
Make sure the pagination behaves properly when the requested page is out
of range.
"""
self._setstaff_login()
mongoengine.connect(TEST_MONGODB_LOG['db'])
for _ in range(15):
CourseImportLog(
course_id=CourseLocator.from_string("test/test/test"),
location="location",
import_log="import_log",
git_log="git_log",
repo_dir="repo_dir",
created=datetime.now()
).save()
for page, expected in [(-1, 1), (1, 1), (2, 2), (30, 2), ('abc', 1)]:
response = self.client.get(
'{}?page={}'.format(
reverse('gitlogs'),
page
)
)
self.assertIn(
u'Page {} of 2'.format(expected),
response.content.decode(response.charset)
)
CourseImportLog.objects.delete()
def test_gitlog_courseteam_access(self):
"""
Ensure course team users are allowed to access only their own course.
"""
self._mkdir(settings.GIT_REPO_DIR)
self._setstaff_login()
self._add_edx4edx()
self.user.is_staff = False
self.user.save()
logged_in = self.client.login(username=self.user.username,
password='foo')
response = self.client.get(reverse('gitlogs'))
# Make sure our non privileged user doesn't have access to all logs
self.assertEqual(response.status_code, 404)
# Or specific logs
response = self.client.get(reverse('gitlogs_detail', kwargs={
'course_id': 'course-v1:MITx+edx4edx+edx4edx'
}))
self.assertEqual(response.status_code, 404)
# Add user as staff in course team
def_ms = modulestore()
course = def_ms.get_course(CourseLocator('MITx', 'edx4edx', 'edx4edx'))
CourseStaffRole(course.id).add_users(self.user)
self.assertTrue(CourseStaffRole(course.id).has_user(self.user))
logged_in = self.client.login(username=self.user.username,
password='foo')
self.assertTrue(logged_in)
response = self.client.get(
reverse('gitlogs_detail', kwargs={
'course_id': 'course-v1:MITx+edx4edx+edx4edx'
}))
self.assertIn('======> IMPORTING course', response.content.decode('utf-8'))
self._rm_edx4edx()
|
ESOedX/edx-platform
|
lms/djangoapps/dashboard/tests/test_sysadmin.py
|
Python
|
agpl-3.0
| 12,287 | 0.001139 |
# A unit fraction contains 1 in the numerator. The decimal representation of the unit fractions with denominators 2 to 10 are given:
# 1/2 = 0.5
# 1/3 = 0.(3)
# 1/4 = 0.25
# 1/5 = 0.2
# 1/6 = 0.1(6)
# 1/7 = 0.(142857)
# 1/8 = 0.125
# 1/9 = 0.(1)
# 1/10 = 0.1
# Where 0.1(6) means 0.166666..., and has a 1-digit recurring cycle. It can be seen that 1/7 has a 6-digit recurring cycle.
# Find the value of d < 1000 for which 1/d contains the longest recurring cycle in its decimal fraction part.
from decimal import *
getcontext().prec = 3000
max_len = 6
repeating = 6
repeating_num = 7
for n in range(6,1000):
num = str(Decimal(1) / Decimal(n))[2:]
if len(num) <= max_len * 2:continue
for j in range(10):
breaker = False
for i in range(max_len, int(len(num)/2)-j):
# print(n, num[j:j+i], num[j+i:i*2+j])
if num[j:j+i] == num[j+i:i*2+j]:
if len(num[j:i]) >= repeating:
repeating = len(num[j:i])
repeating_num = n
# print(n, num[j:i])
breaker = True
break
if breaker:break
print(repeating_num)
|
drcsturm/project-euler
|
p026.py
|
Python
|
mit
| 1,084 | 0.021218 |
import errno
import os
import unittest
import mock
from taskcat._common_utils import (
exit_with_code,
fetch_ssm_parameter_value,
get_s3_domain,
make_dir,
merge_dicts,
name_from_stack_id,
param_list_to_dict,
pascal_to_snake,
region_from_stack_id,
s3_bucket_name_from_url,
s3_key_from_url,
s3_url_maker,
)
from taskcat.exceptions import TaskCatException
class TestCommonUtils(unittest.TestCase):
def test_get_param_includes(self):
bad_testcases = [{}, [[]], [{}]]
for bad in bad_testcases:
with self.assertRaises(TaskCatException):
param_list_to_dict(bad)
def test_region_from_stack_id(self):
actual = region_from_stack_id("arn:::us-east-1")
self.assertEqual("us-east-1", actual)
def test_name_from_stack_id(self):
actual = name_from_stack_id("arn:::us-east-1::Stack/test-name")
self.assertEqual("test-name", actual)
@mock.patch("taskcat._common_utils.get_s3_domain", return_value="amazonaws.com")
def test_s3_url_maker(self, m_get_s3_domain):
m_s3 = mock.Mock()
m_s3.get_bucket_location.return_value = {"LocationConstraint": None}
actual = s3_url_maker("test-bucket", "test-key/1", m_s3)
self.assertEqual(
"https://test-bucket.s3.us-east-1.amazonaws.com/test-key/1", actual
)
m_s3.get_bucket_location.return_value = {"LocationConstraint": "us-west-2"}
actual = s3_url_maker("test-bucket", "test-key/1", m_s3)
self.assertEqual(
"https://test-bucket.s3.us-west-2.amazonaws.com/test-key/1", actual
)
m_get_s3_domain.assert_called_once()
def test_get_s3_domain(self):
actual = get_s3_domain("cn-north-1")
self.assertEqual("amazonaws.com.cn", actual)
with self.assertRaises(TaskCatException):
get_s3_domain("totally-invalid-region")
def test_merge_dicts(self):
input = [{}, {}]
actual = merge_dicts(input)
self.assertEqual({}, actual)
input = [{"a": 1}, {"b": 2}]
actual = merge_dicts(input)
self.assertEqual({"a": 1, "b": 2}, actual)
def test_pascal_to_snake(self):
actual = pascal_to_snake("MyParam")
self.assertEqual("my_param", actual)
actual = pascal_to_snake("VPCParam")
self.assertEqual("vpcparam", actual)
def test_make_dir(self):
path = "/tmp/test_make_dir_path"
try:
os.rmdir(path)
except FileNotFoundError:
pass
os.makedirs(path)
make_dir(path)
os.rmdir(path)
make_dir(path)
self.assertEqual(os.path.isdir(path), True)
with self.assertRaises(FileExistsError) as cm:
make_dir(path, False)
self.assertEqual(cm.exception.errno, errno.EEXIST)
os.rmdir(path)
@mock.patch("taskcat._common_utils.sys.exit", autospec=True)
@mock.patch("taskcat._common_utils.LOG", autospec=True)
def test_exit_with_code(self, mock_log, mock_exit):
exit_with_code(1)
mock_log.error.assert_not_called()
mock_exit.assert_called_once_with(1)
mock_exit.reset_mock()
exit_with_code(0, "msg")
mock_exit.assert_called_once_with(0)
mock_exit.assert_called_once()
def test_s3_key_from_url(self):
k = s3_key_from_url("https://testbuk.s3.amazonaws.com/testprefix/testobj.yaml")
self.assertEqual("testprefix/testobj.yaml", k)
def test_s3_bucket_name_from_url(self):
bucket = s3_bucket_name_from_url("https://buk.s3.amazonaws.com/obj.yaml")
self.assertEqual("buk", bucket)
def test_fetch_ssm_parameter_value(self):
# String, no explicit version.
m_boto_client = mock.Mock()
m_ssm = mock.Mock()
m_boto_client.return_value = m_ssm
m_ssm.get_parameter.return_value = {
"Parameter": {"Name": "foo", "Type": "String", "Value": "bar", "Version": 1}
}
expected = "bar"
actual = fetch_ssm_parameter_value(m_boto_client, "foo")
self.assertEqual(expected, actual)
m_ssm.get_parameter.return_value = {
"Parameter": {
"Name": "foo",
"Type": "StringList",
"Value": "bar,baz,11",
"Version": 1,
}
}
expected = "bar,baz,11"
actual = fetch_ssm_parameter_value(m_boto_client, "foo")
self.assertEqual(expected, actual)
|
aws-quickstart/taskcat
|
tests/test_common_utils.py
|
Python
|
apache-2.0
| 4,514 | 0.001108 |
"""
@date 2014-11-16
@author Hong-She Liang <starofrainnight@gmail.com>
"""
from selenium.common.exceptions import *
|
starofrainnight/rabird.selenium
|
rabird/selenium/exceptions.py
|
Python
|
apache-2.0
| 118 | 0 |
"""
Script which define fabric's methods.
"""
from fabric.api import local, task, lcd
from utils import create_bundle_server as create_bs, create_bundle_client as create_bc
@task
def create_bundle_server(bundle_name=None):
"""
Creates bundle for server-side of the application.
"""
create_bs(bundle_name)
@task
def create_bundle_client(bundle_name=None):
"""
Creates bundle for client-side of the application.
"""
create_bc(bundle_name)
@task
def create_bundle(bundle_name=None):
"""
Creates bundle for server and client.
"""
create_bundle_server(bundle_name)
create_bundle_client(bundle_name)
@task
def run_test_server(is_ci=False):
"""
Runs unittests for server-side of the application.
"""
bundle_list = local('find server/* -type d | grep tests', capture=True)
for catalog in bundle_list.split("\n"):
if is_ci:
xunit_file = catalog.replace('/', '_')
local("nosetests --with-xunit \
--xunit-file=../build/logs/xunit/" +
xunit_file + '.xml ' + catalog)
else:
local("nosetests " + catalog)
@task
def run_stylechecker_server(is_ci=False):
"""
Runs style checkers for server-side of the application.
"""
if is_ci:
local("pep8 server | tee ../build/logs/stylechecker/pep8.out")
local("pylint server | tee ../build/logs/stylechecker/pylint.out")
else:
local("pep8 server")
local("pylint server")
@task
def run_test_client():
"""
Runs unittests for client-side of the application.
"""
with lcd("client"):
local('grunt clean', capture=True)
local('grunt build', capture=True)
local('grunt test', capture=True)
@task
def run_test():
"""
Runs unittests for server and client.
"""
run_test_server()
run_test_client()
|
PearsonIOKI/ioki-social
|
fabfile.py
|
Python
|
mit
| 1,897 | 0.001054 |
from django.db import models
from django.utils.translation import ugettext_lazy as _
from cms.models import CMSPlugin, Page
from cms import settings
class InheritPagePlaceholder(CMSPlugin):
"""
Provides the ability to inherit plugins for a certain placeholder from an associated "parent" page instance
"""
from_page = models.ForeignKey(Page, null=True, blank=True, help_text=_("Choose a page to include its plugins into this placeholder, empty will choose current page"))
from_language = models.CharField(_("language"), max_length=5, choices=settings.CMS_LANGUAGES, blank=True, null=True, help_text=_("Optional: the language of the plugins you want"))
|
team-xue/xue
|
xue/cms/plugins/inherit/models.py
|
Python
|
bsd-3-clause
| 673 | 0.005944 |
#
# highlight.py
# Part of SublimeLinter3, a code checking framework for Sublime Text 3
#
# Written by Ryan Hileman and Aparajita Fishman
#
# Project: https://github.com/SublimeLinter/SublimeLinter3
# License: MIT
#
"""
This module implements highlighting code with marks.
The following classes are exported:
HighlightSet
Highlight
The following constants are exported:
WARNING - name of warning type
ERROR - name of error type
MARK_KEY_FORMAT - format string for key used to mark code regions
GUTTER_MARK_KEY_FORMAT - format string for key used to mark gutter mark regions
MARK_SCOPE_FORMAT - format string used for color scheme scope names
"""
import re
import sublime
from . import persist
#
# Error types
#
WARNING = 'warning'
ERROR = 'error'
MARK_KEY_FORMAT = 'sublimelinter-{}-marks'
GUTTER_MARK_KEY_FORMAT = 'sublimelinter-{}-gutter-marks'
MARK_SCOPE_FORMAT = 'sublimelinter.mark.{}'
UNDERLINE_FLAGS = sublime.DRAW_NO_FILL | sublime.DRAW_NO_OUTLINE | sublime.DRAW_EMPTY_AS_OVERWRITE
MARK_STYLES = {
'outline': sublime.DRAW_NO_FILL,
'fill': sublime.DRAW_NO_OUTLINE,
'solid underline': sublime.DRAW_SOLID_UNDERLINE | UNDERLINE_FLAGS,
'squiggly underline': sublime.DRAW_SQUIGGLY_UNDERLINE | UNDERLINE_FLAGS,
'stippled underline': sublime.DRAW_STIPPLED_UNDERLINE | UNDERLINE_FLAGS,
'none': sublime.HIDDEN
}
WORD_RE = re.compile(r'^([-\w]+)')
NEAR_RE_TEMPLATE = r'(?<!"){}({}){}(?!")'
def mark_style_names():
"""Return the keys from MARK_STYLES, sorted and capitalized, with None at the end."""
names = list(MARK_STYLES)
names.remove('none')
names.sort()
names.append('none')
return [name.capitalize() for name in names]
class HighlightSet:
"""This class maintains a set of Highlight objects and performs bulk operations on them."""
def __init__(self):
"""Initialize a new instance."""
self.all = set()
def add(self, highlight):
"""Add a Highlight to the set."""
self.all.add(highlight)
def draw(self, view):
"""
Draw all of the Highlight objects in our set.
Rather than draw each Highlight object individually, the marks in each
object are aggregated into a new Highlight object, and that object
is then drawn for the given view.
"""
if not self.all:
return
all = Highlight()
for highlight in self.all:
all.update(highlight)
all.draw(view)
@staticmethod
def clear(view):
"""Clear all marks in the given view."""
for error_type in (WARNING, ERROR):
view.erase_regions(MARK_KEY_FORMAT.format(error_type))
view.erase_regions(GUTTER_MARK_KEY_FORMAT.format(error_type))
def redraw(self, view):
"""Redraw all marks in the given view."""
self.clear(view)
self.draw(view)
def reset(self, view):
"""Clear all marks in the given view and reset the list of marks in our Highlights."""
self.clear(view)
for highlight in self.all:
highlight.reset()
def line_type(self, line):
"""Return the primary error type for the given line number."""
if not self.all:
return None
line_type = None
for highlight in self.all:
if line_type == ERROR:
continue
_line_type = highlight.lines.get(line)
if _line_type != WARNING and line_type == WARNING:
continue
line_type = _line_type
return line_type
class Highlight:
"""This class maintains error marks and knows how to draw them."""
def __init__(self, code=''):
"""Initialize a new instance."""
self.code = code
self.marks = {WARNING: [], ERROR: []}
self.mark_style = 'outline'
self.mark_flags = MARK_STYLES[self.mark_style]
# Every line that has a mark is kept in this dict, so we know which
# lines to mark in the gutter.
self.lines = {}
# These are used when highlighting embedded code, for example JavaScript
# or CSS within an HTML file. The embedded code is linted as if it begins
# at (0, 0), but we need to keep track of where the actual start is within the source.
self.line_offset = 0
self.char_offset = 0
# Linting runs asynchronously on a snapshot of the code. Marks are added to the code
# during that asynchronous linting, and the markup code needs to calculate character
# positions given a line + column. By the time marks are added, the actual buffer
# may have changed, so we can't reliably use the plugin API to calculate character
# positions. The solution is to calculate and store the character positions for
# every line when this object is created, then reference that when needed.
self.newlines = newlines = [0]
last = -1
while True:
last = code.find('\n', last + 1)
if last == -1:
break
newlines.append(last + 1)
newlines.append(len(code))
@staticmethod
def strip_quotes(text):
"""Return text stripped of enclosing single/double quotes."""
first = text[0]
if first in ('\'', '"') and text[-1] == first:
text = text[1:-1]
return text
def full_line(self, line):
"""
Return the start/end character positions for the given line.
This returns *real* character positions (relative to the beginning of self.code)
base on the *virtual* line number (adjusted by the self.line_offset).
"""
# The first line of the code needs the character offset
if line == 0:
char_offset = self.char_offset
else:
char_offset = 0
line += self.line_offset
start = self.newlines[line] + char_offset
end = self.newlines[min(line + 1, len(self.newlines) - 1)]
return start, end
def range(self, line, pos, length=-1, near=None, error_type=ERROR, word_re=None):
"""
Mark a range of text.
line and pos should be zero-based. The pos and length argument can be used to control marking:
- If pos < 0, the entire line is marked and length is ignored.
- If near is not None, it is stripped of quotes and length = len(near)
- If length < 0, the nearest word starting at pos is marked, and if
no word is matched, the character at pos is marked.
- If length == 0, no text is marked, but a gutter mark will appear on that line.
error_type determines what type of error mark will be drawn (ERROR or WARNING).
When length < 0, this method attempts to mark the closest word at pos on the given line.
If you want to customize the word matching regex, pass it in word_re.
If the error_type is WARNING and an identical ERROR region exists, it is not added.
If the error_type is ERROR and an identical WARNING region exists, the warning region
is removed and the error region is added.
"""
start, end = self.full_line(line)
if pos < 0:
pos = 0
length = (end - start) - 1
elif near is not None:
near = self.strip_quotes(near)
length = len(near)
elif length < 0:
code = self.code[start:end][pos:]
match = (word_re or WORD_RE).search(code)
if match:
length = len(match.group())
else:
length = 1
pos += start
region = sublime.Region(pos, pos + length)
other_type = ERROR if error_type == WARNING else WARNING
i_offset = 0
for i, mark in enumerate(self.marks[other_type].copy()):
if mark.a == region.a and mark.b == region.b:
if error_type == WARNING:
return
else:
self.marks[other_type].pop(i - i_offset)
i_offset += 1
self.marks[error_type].append(region)
def regex(self, line, regex, error_type=ERROR,
line_match=None, word_match=None, word_re=None):
"""
Mark a range of text that matches a regex.
line, error_type and word_re are the same as in range().
line_match may be a string pattern or a compiled regex.
If provided, it must have a named group called 'match' that
determines which part of the source line will be considered
for marking.
word_match may be a string pattern or a compiled regex.
If provided, it must have a named group called 'mark' that
determines which part of the source line will actually be marked.
Multiple portions of the source line may match.
"""
offset = 0
start, end = self.full_line(line)
line_text = self.code[start:end]
if line_match:
match = re.match(line_match, line_text)
if match:
line_text = match.group('match')
offset = match.start('match')
else:
return
it = re.finditer(regex, line_text)
results = [
result.span('mark')
for result in it
if word_match is None or result.group('mark') == word_match
]
for start, end in results:
self.range(line, start + offset, end - start, error_type=error_type)
def near(self, line, near, error_type=ERROR, word_re=None):
"""
Mark a range of text near a given word.
line, error_type and word_re are the same as in range().
If near is enclosed by quotes, they are stripped. The first occurrence
of near in the given line of code is matched. If the first and last
characters of near are word characters, a match occurs only if near
is a complete word.
The position at which near is found is returned, or zero if there
is no match.
"""
if not near:
return
start, end = self.full_line(line)
text = self.code[start:end]
near = self.strip_quotes(near)
# Add \b fences around the text if it begins/ends with a word character
fence = ['', '']
for i, pos in enumerate((0, -1)):
if near[pos].isalnum() or near[pos] == '_':
fence[i] = r'\b'
pattern = NEAR_RE_TEMPLATE.format(fence[0], re.escape(near), fence[1])
match = re.search(pattern, text)
if match:
start = match.start(1)
else:
start = -1
if start != -1:
self.range(line, start, len(near), error_type=error_type, word_re=word_re)
return start
else:
return 0
def update(self, other):
"""
Update this object with another Highlight.
It is assumed that other.code == self.code.
other's marks and error positions are merged, and this
object takes the newlines array from other.
"""
for error_type in (WARNING, ERROR):
self.marks[error_type].extend(other.marks[error_type])
# Errors override warnings on the same line
for line, error_type in other.lines.items():
current_type = self.lines.get(line)
if current_type is None or current_type == WARNING:
self.lines[line] = error_type
self.newlines = other.newlines
def set_mark_style(self):
"""Setup the mark style and flags based on settings."""
self.mark_style = persist.settings.get('mark_style', 'outline')
self.mark_flags = MARK_STYLES[self.mark_style]
if not persist.settings.get('show_marks_in_minimap'):
self.mark_flags |= sublime.HIDE_ON_MINIMAP
def draw(self, view):
"""
Draw code and gutter marks in the given view.
Error, warning and gutter marks are drawn with separate regions,
since each one potentially needs a different color.
"""
self.set_mark_style()
gutter_regions = {WARNING: [], ERROR: []}
draw_gutter_marks = persist.settings.get('gutter_theme') != 'None'
if draw_gutter_marks:
# We use separate regions for the gutter marks so we can use
# a scope that will not colorize the gutter icon, and to ensure
# that errors will override warnings.
for line, error_type in self.lines.items():
region = sublime.Region(self.newlines[line], self.newlines[line])
gutter_regions[error_type].append(region)
for error_type in (WARNING, ERROR):
if self.marks[error_type]:
view.add_regions(
MARK_KEY_FORMAT.format(error_type),
self.marks[error_type],
MARK_SCOPE_FORMAT.format(error_type),
flags=self.mark_flags
)
if draw_gutter_marks and gutter_regions[error_type]:
if persist.gutter_marks['colorize']:
scope = MARK_SCOPE_FORMAT.format(error_type)
else:
scope = 'sublimelinter.gutter-mark'
view.add_regions(
GUTTER_MARK_KEY_FORMAT.format(error_type),
gutter_regions[error_type],
scope,
icon=persist.gutter_marks[error_type]
)
@staticmethod
def clear(view):
"""Clear all marks in the given view."""
for error_type in (WARNING, ERROR):
view.erase_regions(MARK_KEY_FORMAT.format(error_type))
view.erase_regions(GUTTER_MARK_KEY_FORMAT.format(error_type))
def reset(self):
"""
Clear the list of marks maintained by this object.
This method does not clear the marks, only the list.
The next time this object is used to draw, the marks will be cleared.
"""
for error_type in (WARNING, ERROR):
del self.marks[error_type][:]
self.lines.clear()
def line(self, line, error_type):
"""Record the given line as having the given error type."""
line += self.line_offset
# Errors override warnings, if it's already an error leave it
if self.lines.get(line) == ERROR:
return
self.lines[line] = error_type
def move_to(self, line, char_offset):
"""
Move the highlight to the given line and character offset.
The character offset is relative to the start of the line.
This method is used to create virtual line numbers
and character positions when linting embedded code.
"""
self.line_offset = line
self.char_offset = char_offset
|
micahwood/linux-dotfiles
|
sublime/Packages/SublimeLinter/lint/highlight.py
|
Python
|
mit
| 14,958 | 0.001738 |
#!/usr/bin/env python
#
# Generate pnSeed[] from Pieter's DNS seeder
#
NSEEDS=600
import re
import sys
from subprocess import check_output
def main():
lines = sys.stdin.readlines()
ips = []
pattern = re.compile(r"^(\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3}):17792")
for line in lines:
m = pattern.match(line)
if m is None:
continue
ip = 0
for i in range(0,4):
ip = ip + (int(m.group(i+1)) << (8*(i)))
if ip == 0:
continue
ips.append(ip)
for row in range(0, min(NSEEDS,len(ips)), 8):
print " " + ", ".join([ "0x%08x"%i for i in ips[row:row+8] ]) + ","
if __name__ == '__main__':
main()
|
prolifik/cakecoin
|
contrib/seeds/makeseeds.py
|
Python
|
mit
| 709 | 0.015515 |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: proto/lib/python/bool.proto
"""Generated protocol buffer code."""
# third party
from google.protobuf import descriptor as _descriptor
from google.protobuf import descriptor_pool as _descriptor_pool
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
# syft absolute
from syft.proto.core.common import (
common_object_pb2 as proto_dot_core_dot_common_dot_common__object__pb2,
)
DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(
b'\n\x1bproto/lib/python/bool.proto\x12\x0fsyft.lib.python\x1a%proto/core/common/common_object.proto"7\n\x04\x42ool\x12\x0c\n\x04\x64\x61ta\x18\x01 \x01(\x08\x12!\n\x02id\x18\x02 \x01(\x0b\x32\x15.syft.core.common.UIDb\x06proto3'
)
_BOOL = DESCRIPTOR.message_types_by_name["Bool"]
Bool = _reflection.GeneratedProtocolMessageType(
"Bool",
(_message.Message,),
{
"DESCRIPTOR": _BOOL,
"__module__": "proto.lib.python.bool_pb2"
# @@protoc_insertion_point(class_scope:syft.lib.python.Bool)
},
)
_sym_db.RegisterMessage(Bool)
if _descriptor._USE_C_DESCRIPTORS == False:
DESCRIPTOR._options = None
_BOOL._serialized_start = 87
_BOOL._serialized_end = 142
# @@protoc_insertion_point(module_scope)
|
OpenMined/PySyft
|
packages/syft/src/syft/proto/lib/python/bool_pb2.py
|
Python
|
apache-2.0
| 1,474 | 0.002035 |
# -*- coding: utf-8 -*-
# Copyright 2016 Camptocamp SA
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl)
import openerp.tests.common as test_common
# @test_common.post_install(True)
class TestSwedishRounding(test_common.TransactionCase):
def create_dummy_invoice(self):
invoice = self.env['account.invoice'].create({
'partner_id': self.partner.id,
'currency_id': self.env.ref('base.EUR').id,
'account_id': self.account.id,
'date_invoice': '2018-01-01',
'invoice_line': [(0, 0, {
'name': 'Dummy invoice line',
'product_id': self.product.id,
'invoice_line_tax_id': [(4, self.tax_10.id)],
'account_id': self.account.id,
'quantity': 1,
'price_unit': 99.99,
'journal_id': self.journal_sale.id
})]
})
return invoice
def create_dummy_invoice_2(self):
invoice = self.env['account.invoice'].create({
'partner_id': self.partner.id,
'currency_id': self.env.ref('base.EUR').id,
'account_id': self.account.id,
'date_invoice': '2018-01-01',
'invoice_line': [(0, 0, {
'name': 'Dummy invoice line',
'product_id': self.product.id,
'invoice_line_tax_id': [(4, self.tax_77.id)],
'account_id': self.account.id,
'quantity': 1,
'price_unit': 90,
'journal_id': self.journal_sale.id
})]
})
return invoice
def create_two_lines_dummy_invoice(self):
invoice = self.env['account.invoice'].create({
'partner_id': self.partner.id,
'currency_id': self.env.ref('base.EUR').id,
'account_id': self.account.id,
'date_invoice': '2018-01-01',
'invoice_line': [(0, 0, {
'name': 'Dummy invoice line',
'product_id': self.product.id,
'invoice_line_tax_id': [(4, self.tax_10.id)],
'account_id': self.account.id,
'quantity': 1,
'price_unit': 99.99,
'journal_id': self.journal_sale.id
}), (0, 0, {
'name': 'Dummy invoice line',
'product_id': self.product2.id,
'invoice_line_tax_id': [(4, self.tax_20.id)],
'account_id': self.account.id,
'quantity': 1,
'price_unit': 19.99,
'journal_id': self.journal_sale.id
})]
})
return invoice
def setUp(self):
super(TestSwedishRounding, self).setUp()
# self.sudo(self.ref('base.user_demo'))
expense_type = self.env.ref('account.data_account_type_expense')
self.journal_sale = self.env["account.journal"].create({
"name": "Test sale journal",
"type": "sale",
"code": "TEST_SJ",
})
self.account = self.env['account.account'].create({
'name': 'Rounding account',
'code': '6666',
'user_type': expense_type.id
})
tax_code_0 = self.env['account.tax.code'].create({
'name': 'Tax0',
'sign': 1,
})
tax_code_1 = self.env['account.tax.code'].create({
'name': 'Tax1',
'sign': 1,
})
tax_code_2 = self.env['account.tax.code'].create({
'name': 'Tax2',
'sign': 1,
})
self.tax_77 = self.env['account.tax'].create({
'name': 'Dummy tax 7.7%',
'type': 'percent',
'amount': .077,
'type_tax_use': 'sale',
'tax_code_id': tax_code_0.id,
})
self.tax_10 = self.env['account.tax'].create({
'name': 'Dummy tax 10%',
'type': 'percent',
'amount': .1,
'type_tax_use': 'sale',
'tax_code_id': tax_code_1.id,
})
self.tax_20 = self.env['account.tax'].create({
'name': 'Dummy tax 20%',
'type': 'percent',
'amount': .20,
'type_tax_use': 'sale',
'tax_code_id': tax_code_2.id,
})
self.partner = self.env['res.partner'].create({
'name': 'Test Partner',
})
self.product = self.env['product.product'].create({
'name': 'Product Test',
'list_price': 99.99,
'default_code': 'TEST0001',
})
self.product2 = self.env['product.product'].create({
'name': 'Product Test 2',
'list_price': 19.99,
'default_code': 'TEST0001',
})
def test_rounding_globally(self):
company = self.env.ref('base.main_company')
company.write({
'tax_calculation_rounding_method': 'swedish_round_globally',
'tax_calculation_rounding': 0.05,
})
invoice1 = self.create_dummy_invoice()
invoice1.button_reset_taxes()
invoice1.signal_workflow('invoice_open')
self.assertEqual(invoice1.amount_total, 110)
invoice2 = self.create_two_lines_dummy_invoice()
invoice2.button_reset_taxes()
self.assertEqual(invoice2.amount_total, 134)
self.assertEqual(sum([t.amount for t in invoice2.tax_line]), 14.02)
bigger_tax = self.env['account.invoice.tax'].search([
('invoice_id', '=', invoice2.id)], limit=1, order='amount desc')
self.assertEqual(bigger_tax.amount, 10.02)
self.assertEqual(len(invoice2.invoice_line), 2)
self.assertFalse(invoice2.global_round_line_id)
def test_rounding_per_line(self):
company = self.env.ref('base.main_company')
company.write({
'tax_calculation_rounding_method': 'swedish_add_invoice_line',
'tax_calculation_rounding': 0.05,
'tax_calculation_rounding_account_id': self.account.id
})
invoice1 = self.create_dummy_invoice()
invoice1.signal_workflow('invoice_open')
invoice1.button_reset_taxes()
self.assertEqual(invoice1.amount_total, 110)
invoice2 = self.create_two_lines_dummy_invoice()
invoice2.button_reset_taxes()
invoice2.signal_workflow('invoice_open')
self.assertEqual(invoice2.amount_total, 134)
self.assertEqual(sum([t.amount for t in invoice2.tax_line]), 14)
self.assertEqual(len(invoice2.invoice_line), 3)
self.assertEqual(invoice2.global_round_line_id.price_subtotal, 0.02)
# test with pressing taxes reset button before validation
invoice3 = self.create_dummy_invoice_2()
invoice3.button_reset_taxes()
invoice3.signal_workflow('invoice_open')
self.assertEqual(invoice3.amount_total, 96.95)
self.assertEqual(invoice3.amount_untaxed, 90.02)
# test without pressing taxes reset button before validation
invoice3 = self.create_dummy_invoice_2()
invoice3.signal_workflow('invoice_open')
self.assertEqual(invoice3.amount_total, 96.95)
self.assertEqual(invoice3.amount_untaxed, 90.02)
|
acsone/account-invoicing
|
account_invoice_rounding/tests/test_invoice_rounding.py
|
Python
|
agpl-3.0
| 7,217 | 0 |
def joke():
return 'Knock Knock. Who is there?'
|
cloudfoundry/python-buildpack
|
fixtures/setup_py/funniest/__init__.py
|
Python
|
apache-2.0
| 52 | 0 |
from kivy.uix.floatlayout import FloatLayout
from kivy.properties import NumericProperty, ObjectProperty
from kivy.graphics import Color, Ellipse, Line
from kivy.graphics.transformation import Matrix
from kivy.core.window import Window
from simulationLine import SimulationLine
from simulationAngle import SimulationAngle
from simulationSled import SimulationSled
from chainLengthToXY import ChainLengthtoXY
from posToChainLength import PosToChainLength
from kivy.graphics.transformation import Matrix
import re
import math
class SimulationCanvas(FloatLayout):
scatterObject = ObjectProperty(None)
motorLift = 220
motorTranslate = 258.8
bedWidth = 2438.4 #8'
bedHeight = 1219.2 #4'
motorY = bedHeight + motorLift
motor2X = bedWidth + motorTranslate
def initialize(self):
self.startChains()
self.drawFrame()
self.setSpindleLocation(self.bedWidth/2,self.bedHeight/2)
self.setInitialZoom()
self.xPosSlider.bind(value=self.xPosSliderValueChange)
self.yPosSlider.bind(value=self.yPosSliderValueChange)
self.setupAngles()
self.setupSled()
self.lengthToXY.initialize(self.chainA, self.chainB, self.bedWidth+2*self.motorTranslate, self.bedHeight+self.motorLift, self.motorTranslate, self.motorLift)
self.posToLength.initialize(self.sled, self.bedWidth+2*self.motorTranslate, self.bedHeight+self.motorLift, self.motorTranslate, self.motorLift)
def setSpindleLocation(self,x,y):
self.chainA.setEnd(x,y)
self.chainB.setEnd(x,y)
def xPosSliderValueChange(self,callback,value):
self.setSpindleLocation(value,self.chainA.toPos[1])
def yPosSliderValueChange(self,callback,value):
self.setSpindleLocation(self.chainA.toPos[0], value)
def drawFrame(self):
self.frameLeft.initialize()
self.frameTop.initialize()
self.frameRight.initialize()
self.frameBottom.initialize()
self.frameLeft.setStart(0,0)
self.frameLeft.setEnd(0,self.bedHeight)
self.frameLeft.color = (1,0,0)
self.frameTop.setStart(0,self.bedHeight)
self.frameTop.setEnd(self.bedWidth,self.bedHeight)
self.frameTop.color = (1,0,0)
self.frameRight.setStart(self.bedWidth,0)
self.frameRight.setEnd(self.bedWidth,self.bedHeight)
self.frameRight.color = (1,0,0)
self.frameBottom.setStart(0,0)
self.frameBottom.setEnd(self.bedWidth,0)
self.frameBottom.color = (1,0,0)
def setupAngles(self):
self.angleA.initialize(self.chainA, self.lineT, 0)
self.angleB.initialize(self.chainB, self.lineT, 0)
self.angleP.initialize(self.chainA, self.chainB, 1)
def setupSled(self):
self.sled.initialize(self.chainA, self.chainB, 1, self.angleP)
def setInitialZoom(self):
mat = Matrix().scale(.4, .4, 1)
self.scatterInstance.apply_transform(mat, (0,0))
mat = Matrix().translate(200, 100, 0)
self.scatterInstance.apply_transform(mat)
def startChains(self):
self.chainA.initialize()
self.chainB.initialize()
self.lineT.initialize()
self.lineT.color = (0,0,1)
self.chainA.setStart(-self.motorTranslate, self.motorY)
self.chainB.setStart(self.motor2X, self.motorY)
self.lineT.setStart(-self.motorTranslate,self.motorY)
self.lineT.setEnd(self.motor2X,self.motorY)
|
shy21grams/GroundControl
|
Simulation/simulationCanvas.py
|
Python
|
gpl-3.0
| 3,902 | 0.022553 |
# -*- coding: utf-8 -*-
"""Operating system independent (generic) preprocessor plugins."""
from dfvfs.helpers import file_system_searcher
from plaso.lib import definitions
from plaso.preprocessors import interface
from plaso.preprocessors import manager
class DetermineOperatingSystemPlugin(
interface.FileSystemArtifactPreprocessorPlugin):
"""Plugin to determine the operating system."""
# pylint: disable=abstract-method
# This plugin does not use an artifact definition and therefore does not
# use _ParsePathSpecification.
# We need to check for both forward and backward slashes since the path
# specification will be dfVFS back-end dependent.
_WINDOWS_LOCATIONS = set([
'/windows/system32', '\\windows\\system32', '/winnt/system32',
'\\winnt\\system32', '/winnt35/system32', '\\winnt35\\system32',
'\\wtsrv\\system32', '/wtsrv/system32'])
def __init__(self):
"""Initializes a plugin to determine the operating system."""
super(DetermineOperatingSystemPlugin, self).__init__()
self._find_specs = [
file_system_searcher.FindSpec(
case_sensitive=False, location='/etc',
location_separator='/'),
file_system_searcher.FindSpec(
case_sensitive=False, location='/System/Library',
location_separator='/'),
file_system_searcher.FindSpec(
case_sensitive=False, location='\\Windows\\System32',
location_separator='\\'),
file_system_searcher.FindSpec(
case_sensitive=False, location='\\WINNT\\System32',
location_separator='\\'),
file_system_searcher.FindSpec(
case_sensitive=False, location='\\WINNT35\\System32',
location_separator='\\'),
file_system_searcher.FindSpec(
case_sensitive=False, location='\\WTSRV\\System32',
location_separator='\\')]
# pylint: disable=unused-argument
def Collect(self, mediator, artifact_definition, searcher, file_system):
"""Collects values using a file artifact definition.
Args:
mediator (PreprocessMediator): mediates interactions between preprocess
plugins and other components, such as storage and knowledge base.
artifact_definition (artifacts.ArtifactDefinition): artifact definition.
searcher (dfvfs.FileSystemSearcher): file system searcher to preprocess
the file system.
file_system (dfvfs.FileSystem): file system to be preprocessed.
Raises:
PreProcessFail: if the preprocessing fails.
"""
locations = []
for path_spec in searcher.Find(find_specs=self._find_specs):
relative_path = searcher.GetRelativePath(path_spec)
if relative_path:
locations.append(relative_path.lower())
operating_system = definitions.OPERATING_SYSTEM_FAMILY_UNKNOWN
if self._WINDOWS_LOCATIONS.intersection(set(locations)):
operating_system = definitions.OPERATING_SYSTEM_FAMILY_WINDOWS_NT
elif '/system/library' in locations:
operating_system = definitions.OPERATING_SYSTEM_FAMILY_MACOS
elif '/etc' in locations:
operating_system = definitions.OPERATING_SYSTEM_FAMILY_LINUX
if operating_system != definitions.OPERATING_SYSTEM_FAMILY_UNKNOWN:
mediator.SetValue('operating_system', operating_system)
manager.PreprocessPluginsManager.RegisterPlugins([
DetermineOperatingSystemPlugin])
|
log2timeline/plaso
|
plaso/preprocessors/generic.py
|
Python
|
apache-2.0
| 3,399 | 0.005001 |
import openvoronoi as ovd
import ovdvtk # helper library for visualization using vtk
import time
import vtk
import datetime
import math
import random
import os
import sys
import pickle
import gzip
if __name__ == "__main__":
# size of viewport in pixels
# w=2500
# h=1500
# w=1920
# h=1080
w = 1024
h = 800
myscreen = ovdvtk.VTKScreen(width=w, height=h)
ovdvtk.drawOCLtext(myscreen, rev_text=ovd.version())
w2if = vtk.vtkWindowToImageFilter() # for screenshots
w2if.SetInput(myscreen.renWin)
lwr = vtk.vtkPNGWriter()
lwr.SetInputConnection(w2if.GetOutputPort())
# w2if.Modified()
# lwr.SetFileName("tux1.png")
scale = 1
myscreen.render()
random.seed(42)
far = 1
camPos = far
zmult = 3
# camPos/float(1000)
myscreen.camera.SetPosition(0, -camPos / float(1000), zmult * camPos)
myscreen.camera.SetClippingRange(-(zmult + 1) * camPos, (zmult + 1) * camPos)
myscreen.camera.SetFocalPoint(0.0, 0, 0)
vd = ovd.VoronoiDiagram(far, 120)
print ovd.version(), ovd.build_type()
# for vtk visualization
vod = ovdvtk.VD(myscreen, vd, float(scale), textscale=0.01, vertexradius=0.003)
vod.drawFarCircle()
vod.textScale = 0.02
vod.vertexRadius = 0.0031
vod.drawVertices = 0
vod.drawVertexIndex = 1
vod.drawGenerators = 0
vod.offsetEdges = 1
vd.setEdgeOffset(0.05)
linesegs = 1 # switch to turn on/off line-segments
segs = []
# ovd.Point(1,1)
eps = 0.9
p1 = ovd.Point(-0.1, -0.2)
p2 = ovd.Point(0.2, 0.1)
p3 = ovd.Point(0.4, 0.2)
p4 = ovd.Point(0.6, 0.6)
p5 = ovd.Point(-0.6, 0.3)
pts = [p1, p2, p3, p4, p5]
# t_after = time.time()
# print ".done in {0:.3f} s.".format( t_after-t_before )
times = []
id_list = []
m = 0
t_before = time.time()
print "inserting %d VertexSites one by one: " % len(pts)
for p in pts:
id_list.append(vd.addVertexSite(p))
print " %02d added vertex %3d at ( %1.3f, %1.3f )" % (m, id_list[m], p.x, p.y)
m = m + 1
t_after = time.time()
times.append(t_after - t_before)
print "all VertexSites inserted."
vd.check()
t_before = time.time()
# vd.debug_on()
print "inserting %d LineSites one by one: " % (len(id_list))
for n in range(len(id_list)):
print " %02d source - target = %02d - %02d " % (n, id_list[n - 1], id_list[n])
vd.addLineSite(id_list[n - 1], id_list[n])
print "all LineSites inserted."
vd.check()
t_after = time.time()
line_time = t_after - t_before
if line_time < 1e-3:
line_time = 1
times.append(line_time)
vod.setVDText2(times)
err = vd.getStat()
print "getStat() got errorstats for ", len(err), " points"
if len(err) > 1:
minerr = min(err)
maxerr = max(err)
print " min error= ", minerr
print " max error= ", maxerr
print " num vertices: ", vd.numVertices()
print " num SPLIT vertices: ", vd.numSplitVertices()
calctime = t_after - t_before
vod.setAll()
print "PYTHON All DONE."
myscreen.render()
# w2if.Modified()
# lwr.SetFileName("{0}.png".format(Nmax))
# lwr.Write() # write screenshot to file
myscreen.iren.Start()
|
aewallin/openvoronoi
|
python_examples/chain_1.py
|
Python
|
lgpl-2.1
| 3,292 | 0.001215 |
# -*- coding: utf-8 -*-
##
##
## This file is part of Indico.
## Copyright (C) 2002 - 2014 European Organization for Nuclear Research (CERN).
##
## Indico is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 3 of the
## License, or (at your option) any later version.
##
## Indico is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Indico;if not, see <http://www.gnu.org/licenses/>.
from MaKaC.fossils.contribution import IContributionWithSpeakersFossil
from MaKaC.fossils.subcontribution import ISubContributionWithSpeakersFossil
from MaKaC.common.fossilize import addFossil
from MaKaC.conference import Contribution
class IContributionRMFossil(IContributionWithSpeakersFossil):
""" This fossil is ready for when we add subcontribution granularity to contributions
and to provide an example for a plugin-specific fossil
"""
def getSubContributionList(self):
pass
getSubContributionList.result = ISubContributionWithSpeakersFossil
# We cannot include this fossil in the Contribution class directly because it belongs to a plugin
addFossil(Contribution, IContributionRMFossil)
|
pferreir/indico-backup
|
indico/MaKaC/plugins/Collaboration/RecordingManager/fossils.py
|
Python
|
gpl-3.0
| 1,501 | 0.009993 |
# Copyright 2012 VMware, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
import time
import mock
from oslo_config import cfg
from oslo_log import log
from neutron.agent.common import ovs_lib
from neutron.agent.linux import ip_lib
from neutron.plugins.common import constants as p_const
from neutron.plugins.ml2.drivers.openvswitch.agent.common import constants
from neutron.tests.unit.plugins.ml2.drivers.openvswitch.agent \
import ovs_test_base
# Useful global dummy variables.
NET_UUID = '3faeebfe-5d37-11e1-a64b-000c29d5f0a7'
LS_ID = 420
LV_ID = 42
LV_IDS = [42, 43]
VIF_ID = '404deaec-5d37-11e1-a64b-000c29d5f0a8'
VIF_MAC = '3c:09:24:1e:78:23'
OFPORT_NUM = 1
VIF_PORT = ovs_lib.VifPort('port', OFPORT_NUM,
VIF_ID, VIF_MAC, 'switch')
VIF_PORTS = {VIF_ID: VIF_PORT}
FIXED_IPS = [{'subnet_id': 'my-subnet-uuid',
'ip_address': '1.1.1.1'}]
VM_DEVICE_OWNER = "compute:None"
TUN_OFPORTS = {p_const.TYPE_GRE: {'ip1': '11', 'ip2': '12'}}
BCAST_MAC = "01:00:00:00:00:00/01:00:00:00:00:00"
UCAST_MAC = "00:00:00:00:00:00/01:00:00:00:00:00"
class DummyPort(object):
def __init__(self, interface_id):
self.interface_id = interface_id
class DummyVlanBinding(object):
def __init__(self, network_id, vlan_id):
self.network_id = network_id
self.vlan_id = vlan_id
class TunnelTest(object):
USE_VETH_INTERCONNECTION = False
VETH_MTU = None
def setUp(self):
super(TunnelTest, self).setUp()
cfg.CONF.set_default('firewall_driver',
'neutron.agent.firewall.NoopFirewallDriver',
group='SECURITYGROUP')
cfg.CONF.set_override('report_interval', 0, 'AGENT')
self.INT_BRIDGE = 'integration_bridge'
self.TUN_BRIDGE = 'tunnel_bridge'
self.MAP_TUN_BRIDGE = 'tun_br_map'
self.NET_MAPPING = {'net1': self.MAP_TUN_BRIDGE}
self.INT_OFPORT = 11111
self.TUN_OFPORT = 22222
self.MAP_TUN_INT_OFPORT = 33333
self.MAP_TUN_PHY_OFPORT = 44444
self.LVM = self.mod_agent.LocalVLANMapping(
LV_ID, 'gre', None, LS_ID, VIF_PORTS)
self.LVM_FLAT = self.mod_agent.LocalVLANMapping(
LV_ID, 'flat', 'net1', LS_ID, VIF_PORTS)
self.LVM_VLAN = self.mod_agent.LocalVLANMapping(
LV_ID, 'vlan', 'net1', LS_ID, VIF_PORTS)
self.inta = mock.Mock()
self.intb = mock.Mock()
self.ovs_bridges = {
self.INT_BRIDGE: mock.create_autospec(
self.br_int_cls('br-int')),
self.TUN_BRIDGE: mock.create_autospec(
self.br_tun_cls('br-tun')),
self.MAP_TUN_BRIDGE: mock.create_autospec(
self.br_phys_cls('br-phys')),
}
self.ovs_int_ofports = {
'patch-tun': self.TUN_OFPORT,
'int-%s' % self.MAP_TUN_BRIDGE: self.MAP_TUN_INT_OFPORT
}
def lookup_br(br_name, *args, **kwargs):
return self.ovs_bridges[br_name]
self.mock_int_bridge_cls = mock.patch(self._BR_INT_CLASS,
autospec=True).start()
self.mock_int_bridge_cls.side_effect = lookup_br
self.mock_phys_bridge_cls = mock.patch(self._BR_PHYS_CLASS,
autospec=True).start()
self.mock_phys_bridge_cls.side_effect = lookup_br
self.mock_tun_bridge_cls = mock.patch(self._BR_TUN_CLASS,
autospec=True).start()
self.mock_tun_bridge_cls.side_effect = lookup_br
self.mock_int_bridge = self.ovs_bridges[self.INT_BRIDGE]
self.mock_int_bridge.add_port.return_value = self.MAP_TUN_INT_OFPORT
self.mock_int_bridge.add_patch_port.side_effect = (
lambda tap, peer: self.ovs_int_ofports[tap])
self.mock_int_bridge.get_vif_ports.return_value = []
self.mock_int_bridge.get_ports_attributes.return_value = []
self.mock_int_bridge.db_get_val.return_value = {}
self.mock_map_tun_bridge = self.ovs_bridges[self.MAP_TUN_BRIDGE]
self.mock_map_tun_bridge.br_name = self.MAP_TUN_BRIDGE
self.mock_map_tun_bridge.add_port.return_value = (
self.MAP_TUN_PHY_OFPORT)
self.mock_map_tun_bridge.add_patch_port.return_value = (
self.MAP_TUN_PHY_OFPORT)
self.mock_tun_bridge = self.ovs_bridges[self.TUN_BRIDGE]
self.mock_tun_bridge.add_port.return_value = self.INT_OFPORT
self.mock_tun_bridge.add_patch_port.return_value = self.INT_OFPORT
self.device_exists = mock.patch.object(ip_lib, 'device_exists').start()
self.device_exists.return_value = True
self.ipdevice = mock.patch.object(ip_lib, 'IPDevice').start()
self.ipwrapper = mock.patch.object(ip_lib, 'IPWrapper').start()
add_veth = self.ipwrapper.return_value.add_veth
add_veth.return_value = [self.inta, self.intb]
self.get_bridges = mock.patch.object(ovs_lib.BaseOVS,
'get_bridges').start()
self.get_bridges.return_value = [self.INT_BRIDGE,
self.TUN_BRIDGE,
self.MAP_TUN_BRIDGE]
self.execute = mock.patch('neutron.agent.common.utils.execute').start()
self._define_expected_calls()
def _define_expected_calls(self, arp_responder=False):
self.mock_int_bridge_cls_expected = [
mock.call(self.INT_BRIDGE),
]
self.mock_phys_bridge_cls_expected = [
mock.call(self.MAP_TUN_BRIDGE),
]
self.mock_tun_bridge_cls_expected = [
mock.call(self.TUN_BRIDGE),
]
self.mock_int_bridge = self.ovs_bridges[self.INT_BRIDGE]
self.mock_int_bridge_expected = [
mock.call.create(),
mock.call.set_secure_mode(),
mock.call.setup_controllers(mock.ANY),
mock.call.delete_port('patch-tun'),
mock.call.setup_default_table(),
]
self.mock_map_tun_bridge_expected = [
mock.call.setup_controllers(mock.ANY),
mock.call.setup_default_table(),
mock.call.delete_port('phy-%s' % self.MAP_TUN_BRIDGE),
mock.call.add_patch_port('phy-%s' % self.MAP_TUN_BRIDGE,
constants.NONEXISTENT_PEER), ]
self.mock_int_bridge_expected += [
mock.call.delete_port('int-%s' % self.MAP_TUN_BRIDGE),
mock.call.add_patch_port('int-%s' % self.MAP_TUN_BRIDGE,
constants.NONEXISTENT_PEER),
]
self.mock_int_bridge_expected += [
mock.call.drop_port(in_port=self.MAP_TUN_INT_OFPORT),
mock.call.set_db_attribute(
'Interface', 'int-%s' % self.MAP_TUN_BRIDGE,
'options:peer', 'phy-%s' % self.MAP_TUN_BRIDGE),
]
self.mock_map_tun_bridge_expected += [
mock.call.drop_port(in_port=self.MAP_TUN_PHY_OFPORT),
mock.call.set_db_attribute(
'Interface', 'phy-%s' % self.MAP_TUN_BRIDGE,
'options:peer', 'int-%s' % self.MAP_TUN_BRIDGE),
]
self.mock_tun_bridge_expected = [
mock.call.reset_bridge(secure_mode=True),
mock.call.setup_controllers(mock.ANY),
mock.call.add_patch_port('patch-int', 'patch-tun'),
]
self.mock_int_bridge_expected += [
mock.call.add_patch_port('patch-tun', 'patch-int'),
]
self.mock_int_bridge_expected += [
mock.call.get_vif_ports(),
mock.call.get_ports_attributes(
'Port', columns=['name', 'other_config', 'tag'], ports=[])
]
self.mock_tun_bridge_expected += [
mock.call.delete_flows(),
mock.call.setup_default_table(self.INT_OFPORT, arp_responder),
]
self.device_exists_expected = []
self.ipdevice_expected = []
self.ipwrapper_expected = [mock.call()]
self.get_bridges_expected = [mock.call(), mock.call()]
self.inta_expected = []
self.intb_expected = []
self.execute_expected = []
def _build_agent(self, **kwargs):
bridge_classes = {
'br_int': self.mock_int_bridge_cls,
'br_phys': self.mock_phys_bridge_cls,
'br_tun': self.mock_tun_bridge_cls,
}
kwargs.setdefault('bridge_classes', bridge_classes)
kwargs.setdefault('integ_br', self.INT_BRIDGE)
kwargs.setdefault('tun_br', self.TUN_BRIDGE)
kwargs.setdefault('local_ip', '10.0.0.1')
kwargs.setdefault('bridge_mappings', self.NET_MAPPING)
kwargs.setdefault('polling_interval', 2)
kwargs.setdefault('tunnel_types', ['gre'])
kwargs.setdefault('veth_mtu', self.VETH_MTU)
kwargs.setdefault('use_veth_interconnection',
self.USE_VETH_INTERCONNECTION)
return self.mod_agent.OVSNeutronAgent(**kwargs)
def _verify_mock_call(self, mock_obj, expected):
mock_obj.assert_has_calls(expected)
self.assertEqual(expected, mock_obj.mock_calls)
def _verify_mock_calls(self):
self._verify_mock_call(self.mock_int_bridge_cls,
self.mock_int_bridge_cls_expected)
self._verify_mock_call(self.mock_tun_bridge_cls,
self.mock_tun_bridge_cls_expected)
self._verify_mock_call(self.mock_phys_bridge_cls,
self.mock_phys_bridge_cls_expected)
self._verify_mock_call(self.mock_int_bridge,
self.mock_int_bridge_expected)
self._verify_mock_call(self.mock_map_tun_bridge,
self.mock_map_tun_bridge_expected)
self._verify_mock_call(self.mock_tun_bridge,
self.mock_tun_bridge_expected)
self._verify_mock_call(self.device_exists, self.device_exists_expected)
self._verify_mock_call(self.ipdevice, self.ipdevice_expected)
self._verify_mock_call(self.ipwrapper, self.ipwrapper_expected)
self._verify_mock_call(self.get_bridges, self.get_bridges_expected)
self._verify_mock_call(self.inta, self.inta_expected)
self._verify_mock_call(self.intb, self.intb_expected)
self._verify_mock_call(self.execute, self.execute_expected)
def test_construct(self):
agent = self._build_agent()
self.assertEqual(agent.agent_id, 'ovs-agent-%s' % cfg.CONF.host)
self._verify_mock_calls()
# TODO(ethuleau): Initially, local ARP responder is be dependent to the
# ML2 l2 population mechanism driver.
# The next two tests use l2_pop flag to test ARP responder
def test_construct_with_arp_responder(self):
self._build_agent(l2_population=True, arp_responder=True)
self._define_expected_calls(True)
self._verify_mock_calls()
def test_construct_without_arp_responder(self):
self._build_agent(l2_population=False, arp_responder=True)
self._verify_mock_calls()
def test_construct_vxlan(self):
self._build_agent(tunnel_types=['vxlan'])
self._verify_mock_calls()
def test_provision_local_vlan(self):
ofports = list(TUN_OFPORTS[p_const.TYPE_GRE].values())
self.mock_tun_bridge_expected += [
mock.call.install_flood_to_tun(LV_ID, LS_ID, ofports),
mock.call.provision_local_vlan(
network_type=p_const.TYPE_GRE,
lvid=LV_ID,
segmentation_id=LS_ID),
]
a = self._build_agent()
a.available_local_vlans = set([LV_ID])
a.tun_br_ofports = TUN_OFPORTS
a.provision_local_vlan(NET_UUID, p_const.TYPE_GRE, None, LS_ID)
self._verify_mock_calls()
def test_provision_local_vlan_flat(self):
self.mock_map_tun_bridge_expected.append(
mock.call.provision_local_vlan(
port=self.MAP_TUN_PHY_OFPORT,
lvid=LV_ID,
segmentation_id=None,
distributed=False))
self.mock_int_bridge_expected.append(
mock.call.provision_local_vlan(
port=self.INT_OFPORT,
lvid=LV_ID,
segmentation_id=None))
a = self._build_agent()
a.available_local_vlans = set([LV_ID])
a.phys_brs['net1'] = self.mock_map_tun_bridge
a.phys_ofports['net1'] = self.MAP_TUN_PHY_OFPORT
a.int_ofports['net1'] = self.INT_OFPORT
a.provision_local_vlan(NET_UUID, p_const.TYPE_FLAT, 'net1', LS_ID)
self._verify_mock_calls()
def test_provision_local_vlan_flat_fail(self):
a = self._build_agent()
a.provision_local_vlan(NET_UUID, p_const.TYPE_FLAT, 'net2', LS_ID)
self._verify_mock_calls()
def test_provision_local_vlan_vlan(self):
self.mock_map_tun_bridge_expected.append(
mock.call.provision_local_vlan(
port=self.MAP_TUN_PHY_OFPORT,
lvid=LV_ID,
segmentation_id=LS_ID,
distributed=False))
self.mock_int_bridge_expected.append(
mock.call.provision_local_vlan(
port=self.INT_OFPORT,
lvid=LV_ID,
segmentation_id=LS_ID))
a = self._build_agent()
a.available_local_vlans = set([LV_ID])
a.phys_brs['net1'] = self.mock_map_tun_bridge
a.phys_ofports['net1'] = self.MAP_TUN_PHY_OFPORT
a.int_ofports['net1'] = self.INT_OFPORT
a.provision_local_vlan(NET_UUID, p_const.TYPE_VLAN, 'net1', LS_ID)
self._verify_mock_calls()
def test_provision_local_vlan_vlan_fail(self):
a = self._build_agent()
a.provision_local_vlan(NET_UUID, p_const.TYPE_VLAN, 'net2', LS_ID)
self._verify_mock_calls()
def test_reclaim_local_vlan(self):
self.mock_tun_bridge_expected += [
mock.call.reclaim_local_vlan(network_type='gre',
segmentation_id=LS_ID),
mock.call.delete_flood_to_tun(LV_ID),
mock.call.delete_unicast_to_tun(LV_ID, None),
mock.call.delete_arp_responder(LV_ID, None),
]
a = self._build_agent()
a.available_local_vlans = set()
a.local_vlan_map[NET_UUID] = self.LVM
a.reclaim_local_vlan(NET_UUID)
self.assertIn(self.LVM.vlan, a.available_local_vlans)
self._verify_mock_calls()
def test_reclaim_local_vlan_flat(self):
self.mock_map_tun_bridge_expected.append(
mock.call.reclaim_local_vlan(
port=self.MAP_TUN_PHY_OFPORT,
lvid=self.LVM_FLAT.vlan))
self.mock_int_bridge_expected.append(
mock.call.reclaim_local_vlan(
port=self.INT_OFPORT,
segmentation_id=None))
a = self._build_agent()
a.phys_brs['net1'] = self.mock_map_tun_bridge
a.phys_ofports['net1'] = self.MAP_TUN_PHY_OFPORT
a.int_ofports['net1'] = self.INT_OFPORT
a.available_local_vlans = set()
a.local_vlan_map[NET_UUID] = self.LVM_FLAT
a.reclaim_local_vlan(NET_UUID)
self.assertIn(self.LVM_FLAT.vlan, a.available_local_vlans)
self._verify_mock_calls()
def test_reclaim_local_vlan_vlan(self):
self.mock_map_tun_bridge_expected.append(
mock.call.reclaim_local_vlan(
port=self.MAP_TUN_PHY_OFPORT,
lvid=self.LVM_VLAN.vlan))
self.mock_int_bridge_expected.append(
mock.call.reclaim_local_vlan(
port=self.INT_OFPORT,
segmentation_id=LS_ID))
a = self._build_agent()
a.phys_brs['net1'] = self.mock_map_tun_bridge
a.phys_ofports['net1'] = self.MAP_TUN_PHY_OFPORT
a.int_ofports['net1'] = self.INT_OFPORT
a.available_local_vlans = set()
a.local_vlan_map[NET_UUID] = self.LVM_VLAN
a.reclaim_local_vlan(NET_UUID)
self.assertIn(self.LVM_VLAN.vlan, a.available_local_vlans)
self._verify_mock_calls()
def test_port_bound(self):
vlan_mapping = {'segmentation_id': LS_ID,
'physical_network': None,
'net_uuid': NET_UUID,
'network_type': 'gre'}
self.mock_int_bridge_expected += [
mock.call.db_get_val('Port', 'port', 'other_config'),
mock.call.set_db_attribute('Port', VIF_PORT.port_name,
'other_config',
vlan_mapping)]
a = self._build_agent()
a.local_vlan_map[NET_UUID] = self.LVM
a.local_dvr_map = {}
self.ovs_bridges[self.INT_BRIDGE].db_get_val.return_value = {}
a.port_bound(VIF_PORT, NET_UUID, 'gre', None, LS_ID,
FIXED_IPS, VM_DEVICE_OWNER, False)
self._verify_mock_calls()
def test_port_unbound(self):
with mock.patch.object(self.mod_agent.OVSNeutronAgent,
'reclaim_local_vlan') as reclaim_local_vlan:
a = self._build_agent()
a.local_vlan_map[NET_UUID] = self.LVM
a.port_unbound(VIF_ID, NET_UUID)
reclaim_local_vlan.assert_called_once_with(NET_UUID)
self._verify_mock_calls()
def test_port_dead(self):
self.mock_int_bridge_expected += [
mock.call.db_get_val('Port', VIF_PORT.port_name, 'tag',
log_errors=True),
mock.call.set_db_attribute(
'Port', VIF_PORT.port_name,
'tag', self.mod_agent.DEAD_VLAN_TAG,
log_errors=True),
mock.call.drop_port(in_port=VIF_PORT.ofport),
]
a = self._build_agent()
a.available_local_vlans = set([LV_ID])
a.local_vlan_map[NET_UUID] = self.LVM
self.ovs_bridges[self.INT_BRIDGE].db_get_val.return_value = mock.Mock()
a.port_dead(VIF_PORT)
self._verify_mock_calls()
def test_tunnel_update(self):
tunnel_port = '9999'
self.mock_tun_bridge.add_tunnel_port.return_value = tunnel_port
self.mock_tun_bridge_expected += [
mock.call.add_tunnel_port('gre-0a000a01', '10.0.10.1', '10.0.0.1',
'gre', 4789, True),
mock.call.setup_tunnel_port('gre', tunnel_port),
]
a = self._build_agent()
a.tunnel_update(
mock.sentinel.ctx, tunnel_ip='10.0.10.1',
tunnel_type=p_const.TYPE_GRE)
self._verify_mock_calls()
def test_tunnel_update_self(self):
a = self._build_agent()
a.tunnel_update(
mock.sentinel.ctx, tunnel_ip='10.0.0.1')
self._verify_mock_calls()
def test_daemon_loop(self):
reply2 = {'current': set(['tap0']),
'added': set(['tap2']),
'removed': set([])}
reply3 = {'current': set(['tap2']),
'added': set([]),
'removed': set(['tap0'])}
self.mock_int_bridge_expected += [
mock.call.check_canary_table(),
mock.call.check_canary_table()
]
self.ovs_bridges[self.INT_BRIDGE].check_canary_table.return_value = \
constants.OVS_NORMAL
with mock.patch.object(log.KeywordArgumentAdapter,
'exception') as log_exception,\
mock.patch.object(self.mod_agent.OVSNeutronAgent,
'scan_ports') as scan_ports,\
mock.patch.object(
self.mod_agent.OVSNeutronAgent,
'process_network_ports') as process_network_ports,\
mock.patch.object(self.mod_agent.OVSNeutronAgent,
'tunnel_sync'),\
mock.patch.object(time, 'sleep'),\
mock.patch.object(self.mod_agent.OVSNeutronAgent,
'update_stale_ofport_rules') as update_stale:
log_exception.side_effect = Exception(
'Fake exception to get out of the loop')
scan_ports.side_effect = [reply2, reply3]
update_stale.return_value = []
process_network_ports.side_effect = [
False, Exception('Fake exception to get out of the loop')]
n_agent = self._build_agent()
# Hack to test loop
# We start method and expect it will raise after 2nd loop
# If something goes wrong, assert_has_calls below will catch it
try:
n_agent.daemon_loop()
except Exception:
pass
# FIXME(salv-orlando): There should not be assertions on log
# messages
log_exception.assert_called_once_with(
"Error while processing VIF ports")
scan_ports.assert_has_calls([
mock.call(set(), set()),
mock.call(set(['tap0']), set())
])
process_network_ports.assert_has_calls([
mock.call({'current': set(['tap0']),
'removed': set([]),
'added': set(['tap2'])}, False),
mock.call({'current': set(['tap2']),
'removed': set(['tap0']),
'added': set([])}, False)
])
self.assertTrue(update_stale.called)
self._verify_mock_calls()
class TunnelTestOFCtl(TunnelTest, ovs_test_base.OVSOFCtlTestBase):
pass
class TunnelTestUseVethInterco(TunnelTest):
USE_VETH_INTERCONNECTION = True
def _define_expected_calls(self, arp_responder=False):
self.mock_int_bridge_cls_expected = [
mock.call(self.INT_BRIDGE),
]
self.mock_phys_bridge_cls_expected = [
mock.call(self.MAP_TUN_BRIDGE),
]
self.mock_tun_bridge_cls_expected = [
mock.call(self.TUN_BRIDGE),
]
self.mock_int_bridge_expected = [
mock.call.create(),
mock.call.set_secure_mode(),
mock.call.setup_controllers(mock.ANY),
mock.call.delete_port('patch-tun'),
mock.call.setup_default_table(),
]
self.mock_map_tun_bridge_expected = [
mock.call.setup_controllers(mock.ANY),
mock.call.setup_default_table(),
mock.call.delete_port('phy-%s' % self.MAP_TUN_BRIDGE),
mock.call.add_port(self.intb),
]
self.mock_int_bridge_expected += [
mock.call.delete_port('int-%s' % self.MAP_TUN_BRIDGE),
mock.call.add_port(self.inta)
]
self.mock_int_bridge_expected += [
mock.call.drop_port(in_port=self.MAP_TUN_INT_OFPORT),
]
self.mock_map_tun_bridge_expected += [
mock.call.drop_port(in_port=self.MAP_TUN_PHY_OFPORT),
]
self.mock_tun_bridge_expected = [
mock.call.reset_bridge(secure_mode=True),
mock.call.setup_controllers(mock.ANY),
mock.call.add_patch_port('patch-int', 'patch-tun'),
]
self.mock_int_bridge_expected += [
mock.call.add_patch_port('patch-tun', 'patch-int')
]
self.mock_int_bridge_expected += [
mock.call.get_vif_ports(),
mock.call.get_ports_attributes(
'Port', columns=['name', 'other_config', 'tag'], ports=[])
]
self.mock_tun_bridge_expected += [
mock.call.delete_flows(),
mock.call.setup_default_table(self.INT_OFPORT, arp_responder),
]
self.device_exists_expected = [
mock.call('int-%s' % self.MAP_TUN_BRIDGE),
]
self.ipdevice_expected = [
mock.call('int-%s' % self.MAP_TUN_BRIDGE),
mock.call().link.delete()
]
self.ipwrapper_expected = [
mock.call(),
mock.call().add_veth('int-%s' % self.MAP_TUN_BRIDGE,
'phy-%s' % self.MAP_TUN_BRIDGE)
]
self.get_bridges_expected = [mock.call(), mock.call()]
self.inta_expected = [mock.call.link.set_up()]
self.intb_expected = [mock.call.link.set_up()]
self.execute_expected = [mock.call(['udevadm', 'settle',
'--timeout=10'])]
class TunnelTestUseVethIntercoOFCtl(TunnelTestUseVethInterco,
ovs_test_base.OVSOFCtlTestBase):
pass
class TunnelTestWithMTU(TunnelTestUseVethInterco):
VETH_MTU = 1500
def _define_expected_calls(self, arp_responder=False):
super(TunnelTestWithMTU, self)._define_expected_calls(arp_responder)
self.inta_expected.append(mock.call.link.set_mtu(self.VETH_MTU))
self.intb_expected.append(mock.call.link.set_mtu(self.VETH_MTU))
class TunnelTestWithMTUOFCtl(TunnelTestWithMTU,
ovs_test_base.OVSOFCtlTestBase):
pass
|
suneeth51/neutron
|
neutron/tests/unit/plugins/ml2/drivers/openvswitch/agent/test_ovs_tunnel.py
|
Python
|
apache-2.0
| 25,812 | 0 |
#!/usr/bin/env python2
# -*- mode: python -*-
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2016 The Electrum developers
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from struct import pack
from electrum_arg.wallet import BIP44_Wallet
class BIP44_HW_Wallet(BIP44_Wallet):
'''A BIP44 hardware wallet base class.'''
# Derived classes must set:
# - device
# - DEVICE_IDS
# - wallet_type
restore_wallet_class = BIP44_Wallet
max_change_outputs = 1
def __init__(self, storage):
BIP44_Wallet.__init__(self, storage)
# Errors and other user interaction is done through the wallet's
# handler. The handler is per-window and preserved across
# device reconnects
self.handler = None
def unpaired(self):
'''A device paired with the wallet was diconnected. This can be
called in any thread context.'''
self.print_error("unpaired")
def paired(self):
'''A device paired with the wallet was (re-)connected. This can be
called in any thread context.'''
self.print_error("paired")
def get_action(self):
pass
def can_create_accounts(self):
return True
def can_export(self):
return False
def is_watching_only(self):
'''The wallet is not watching-only; the user will be prompted for
pin and passphrase as appropriate when needed.'''
assert not self.has_seed()
return False
def can_change_password(self):
return False
def get_client(self, force_pair=True):
return self.plugin.get_client(self, force_pair)
def first_address(self):
'''Used to check a hardware wallet matches a software wallet'''
account = self.accounts.get('0')
derivation = self.address_derivation('0', 0, 0)
return (account.first_address()[0] if account else None, derivation)
def derive_xkeys(self, root, derivation, password):
if self.master_public_keys.get(self.root_name):
return BIP44_wallet.derive_xkeys(self, root, derivation, password)
# When creating a wallet we need to ask the device for the
# master public key
xpub = self.get_public_key(derivation)
return xpub, None
def i4b(self, x):
return pack('>I', x)
|
argentumproject/electrum-arg
|
plugins/hw_wallet/hw_wallet.py
|
Python
|
mit
| 3,347 | 0.000299 |
# -*- coding: utf-8 -*-
from checker.backends import BaseBackend
from checker import logger
log = logger.getLogger(__name__)
class GnomeBackend(BaseBackend):
"""for projects hosted on gnome.org"""
name = 'Gnome'
domain = 'gnome.org'
example = 'https://download.gnome.org/sources/gnome-control-center'
def __init__(self, url):
super(GnomeBackend, self).__init__()
self._url = url
self._rule_type = "xpath"
def get_urls(self, branch=None):
return self._url,
def get_rules(self):
log.debug('use %s backend rule for %s package.' %
(self.name, self._url.split('/')[-1]))
return [("//tr/td[3][contains(text(), '-')]/text()", ""), ("", "")]
@classmethod
def isrelease(cls, url):
return True
|
1dot75cm/repo-checker
|
checker/backends/gnome.py
|
Python
|
mit
| 803 | 0 |
# -*- coding: utf-8 -*-
import os
import sys
root = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.append(root + '/python')
import ccxt # noqa: E402
exchanges = {} # a placeholder for your instances
for id in ccxt.exchanges:
exchange = getattr(ccxt, id)
exchanges[id] = exchange()
# now exchanges dictionary contains all exchange instances...
exchanges['bittrex'].fetch_order_book('ETH/BTC')
|
ccxt/ccxt
|
examples/py/instantiate-all-at-once.py
|
Python
|
mit
| 447 | 0.002237 |
# -*- coding: utf-8 -*-
# Define here the models for your scraped items
#
# See documentation in:
# http://doc.scrapy.org/en/latest/topics/items.html
import scrapy
class ListItem(scrapy.Item):
# define the fields for your item here like:
# name = scrapy.Field()
name = scrapy.Field()
location = scrapy.Field()
|
ThunderShiviah/london-social-network-analysis
|
web_subscriptions/items.py
|
Python
|
mit
| 330 | 0 |
import sys
import smtplib
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from config import config
def send_mail(message,receivers=config['smtp_receivers']):
"""
Module that sends emails.
Message should be a dictionary containing the following keys:
subject -> Subject of the email
text -> The plain text part of the email
uses options from config.py
"""
sender = config['smtp_sender']
print sender
print receivers
print message
try:
smtpObj = smtplib.SMTP_SSL(config['smtp_server'],config['smtp_port'])
smtpObj.login(config['smtp_login'],config['smtp_password'])
if not receivers:
return True
smtpObj.sendmail(sender, receivers, message)
smtpObj.quit()
return True
except Exception, exc:
print >> sys.stderr, "Exception From email_base"
print >> sys.stderr, exc
return False
if __name__ == '__main__':
send_mail('Test message')
|
rajaram1990/GetNSEStockPrice
|
GetNSEStockPrice/email_base.py
|
Python
|
mit
| 1,041 | 0.011527 |
from generator.actions import Actions
import random
import string
class Palindrome(Actions):
def start(self):
pass
def banner(self):
# Confirm the initial empty line
self.read(delim='\n', expect='\n')
# Confirm the actual banner
self.read(delim='\n', expect='Welcome to Palindrome Finder\n')
def request(self):
# Skip the empty line
self.read(delim='\n', expect='\n')
# Confirm the request
self.read(length=37, expect='\tPlease enter a possible palindrome: ')
def palindrome(self):
halfword = self.random_string(random.randint(1, 16))
self.write(halfword + halfword[::-1] + "\n")
self.read(delim='\n', expect="\t\tYes, that's a palindrome!\n")
def not_palindrome(self):
word = self.random_string(random.randint(2, 32))
while self.is_palindrome(word):
word = self.random_string(random.randint(2, 32))
self.write(word + "\n")
self.read(delim='\n', expect="\t\tNope, that's not a palindrome\n")
def is_palindrome(self, word):
for i in range(0, len(word) / 2):
if word[i] != word[-i - 1]:
return False
return True
def random_string(self, size):
chars = string.letters + string.digits
return ''.join(random.choice(chars) for _ in range(size))
|
f0rki/cb-multios
|
original-challenges/Palindrome/poller/for-testing/machine.py
|
Python
|
mit
| 1,305 | 0.011494 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-08-17 08:53
from __future__ import unicode_literals
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('cms', '0008_blogindexpage_blogpost_blogposttag'),
]
operations = [
migrations.AddField(
model_name='blogpost',
name='date',
field=models.DateField(default=django.utils.timezone.now, verbose_name='body'),
preserve_default=False,
),
]
|
kingsdigitallab/kdl-django
|
cms/migrations/0009_blogpost_date.py
|
Python
|
mit
| 565 | 0.00177 |
#
# Copyright (C) 2008, 2013 Red Hat, Inc.
# Copyright (C) 2008 Cole Robinson <crobinso@redhat.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301 USA.
#
# pylint: disable=E0611
from gi.repository import Gtk
from gi.repository import Gdk
# pylint: enable=E0611
import logging
from virtManager.baseclass import vmmGObjectUI
from virtManager.asyncjob import vmmAsyncJob
from virtManager import uiutil
from virtinst import StoragePool
PAGE_NAME = 0
PAGE_FORMAT = 1
class vmmCreatePool(vmmGObjectUI):
def __init__(self, conn):
vmmGObjectUI.__init__(self, "createpool.ui", "vmm-create-pool")
self.conn = conn
self._pool = None
self.builder.connect_signals({
"on_pool_forward_clicked" : self.forward,
"on_pool_back_clicked" : self.back,
"on_pool_cancel_clicked" : self.close,
"on_vmm_create_pool_delete_event" : self.close,
"on_pool_finish_clicked" : self.forward,
"on_pool_pages_change_page" : self.page_changed,
"on_pool_source_button_clicked" : self.browse_source_path,
"on_pool_target_button_clicked" : self.browse_target_path,
"on_pool_name_activate": self.forward,
"on_pool_hostname_activate" : self.hostname_changed,
"on_pool_iqn_chk_toggled": self.iqn_toggled,
})
self.bind_escape_key_close()
self.set_initial_state()
self.set_page(PAGE_NAME)
def show(self, parent):
logging.debug("Showing new pool wizard")
self.reset_state()
self.topwin.set_transient_for(parent)
self.topwin.present()
def close(self, ignore1=None, ignore2=None):
logging.debug("Closing new pool wizard")
self.topwin.hide()
return 1
def _cleanup(self):
self.conn = None
self._pool = None
def set_initial_state(self):
self.widget("pool-pages").set_show_tabs(False)
blue = Gdk.Color.parse("#0072A8")[1]
self.widget("header").modify_bg(Gtk.StateType.NORMAL, blue)
type_list = self.widget("pool-type")
type_model = Gtk.ListStore(str, str)
type_list.set_model(type_model)
uiutil.set_combo_text_column(type_list, 1)
format_list = self.widget("pool-format")
format_model = Gtk.ListStore(str, str)
format_list.set_model(format_model)
uiutil.set_combo_text_column(format_list, 1)
# Target path combo box entry
target_list = self.widget("pool-target-path")
# target_path, Label, pool class instance
target_model = Gtk.ListStore(str, str, object)
target_model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
target_list.set_model(target_model)
target_list.set_entry_text_column(0)
# Source path combo box entry
source_list = self.widget("pool-source-path")
# source_path, Label, pool class instance
source_model = Gtk.ListStore(str, str, object)
source_model.set_sort_column_id(0, Gtk.SortType.ASCENDING)
source_list.set_model(source_model)
source_list.set_entry_text_column(0)
self.populate_pool_type()
def reset_state(self):
self.widget("pool-pages").set_current_page(0)
self.widget("pool-forward").show()
self.widget("pool-finish").hide()
self.widget("pool-back").set_sensitive(False)
self.widget("pool-name").set_text("")
self.widget("pool-name").grab_focus()
self.widget("pool-type").set_active(0)
self.widget("pool-target-path").get_child().set_text("")
self.widget("pool-source-path").get_child().set_text("")
self.widget("pool-hostname").set_text("")
self.widget("pool-iqn-chk").set_active(False)
self.widget("pool-iqn-chk").toggled()
self.widget("pool-iqn").set_text("")
self.widget("pool-format").set_active(-1)
self.widget("pool-build").set_sensitive(True)
self.widget("pool-build").set_active(False)
self.widget("pool-details-grid").set_visible(False)
def hostname_changed(self, ignore):
# If a hostname was entered, try to lookup valid pool sources.
self.populate_pool_sources()
def iqn_toggled(self, src):
self.widget("pool-iqn").set_sensitive(src.get_active())
def populate_pool_type(self):
model = self.widget("pool-type").get_model()
model.clear()
types = StoragePool.get_pool_types()
types.sort()
for typ in types:
model.append([typ, "%s: %s" %
(typ, StoragePool.get_pool_type_desc(typ))])
def populate_pool_format(self, formats):
model = self.widget("pool-format").get_model()
model.clear()
for f in formats:
model.append([f, f])
def populate_pool_sources(self):
source_list = self.widget("pool-source-path")
source_model = source_list.get_model()
source_model.clear()
target_list = self.widget("pool-target-path")
target_model = target_list.get_model()
target_model.clear()
use_list = source_list
use_model = source_model
entry_list = []
if self._pool.type == StoragePool.TYPE_SCSI:
entry_list = self.list_scsi_adapters()
use_list = source_list
use_model = source_model
elif self._pool.type == StoragePool.TYPE_LOGICAL:
pool_list = self.list_pool_sources()
entry_list = [[p.target_path, p.target_path, p]
for p in pool_list]
use_list = target_list
use_model = target_model
elif self._pool.type == StoragePool.TYPE_DISK:
entry_list = self.list_disk_devs()
use_list = source_list
use_model = source_model
elif self._pool.type == StoragePool.TYPE_NETFS:
host = self.get_config_host()
if host:
pool_list = self.list_pool_sources(host=host)
entry_list = [[p.source_path, p.source_path, p]
for p in pool_list]
use_list = source_list
use_model = source_model
for e in entry_list:
use_model.append(e)
if entry_list:
use_list.set_active(0)
def list_scsi_adapters(self):
scsi_hosts = self.conn.get_nodedevs("scsi_host")
host_list = [dev.host for dev in scsi_hosts]
clean_list = []
for h in host_list:
name = "host%s" % h
tmppool = self._make_stub_pool()
tmppool.source_path = name
entry = [name, name, tmppool]
if name not in [l[0] for l in clean_list]:
clean_list.append(entry)
return clean_list
def list_disk_devs(self):
devs = self.conn.get_nodedevs("storage")
devlist = []
for dev in devs:
if dev.drive_type != "disk" or not dev.block:
continue
devlist.append(dev.block)
devlist.sort()
clean_list = []
for dev in devlist:
tmppool = self._make_stub_pool()
tmppool.source_path = dev
entry = [dev, dev, tmppool]
if dev not in [l[0] for l in clean_list]:
clean_list.append(entry)
return clean_list
def list_pool_sources(self, host=None):
pool_type = self._pool.type
plist = []
try:
plist = StoragePool.pool_list_from_sources(
self.conn.get_backend(),
pool_type,
host=host)
except Exception:
logging.exception("Pool enumeration failed")
return plist
def show_options_by_pool(self):
def show_row(base, do_show):
widget = self.widget(base + "-label")
uiutil.set_grid_row_visible(widget, do_show)
src = self._pool.supports_property("source_path")
src_b = src and not self.conn.is_remote()
src_name = self._pool.type == StoragePool.TYPE_GLUSTER
tgt = self._pool.supports_property("target_path")
tgt_b = tgt and not self.conn.is_remote()
host = self._pool.supports_property("host")
fmt = self._pool.supports_property("formats")
iqn = self._pool.supports_property("iqn")
builddef, buildsens = self.get_build_default()
# Source path broswing is meaningless for net pools
if self._pool.type in [StoragePool.TYPE_NETFS,
StoragePool.TYPE_ISCSI,
StoragePool.TYPE_SCSI]:
src_b = False
show_row("pool-target", tgt)
show_row("pool-source", src)
show_row("pool-hostname", host)
show_row("pool-format", fmt)
show_row("pool-build", buildsens)
show_row("pool-iqn", iqn)
show_row("pool-source-name", src_name)
if tgt:
self.widget("pool-target-path").get_child().set_text(
self._pool.target_path)
self.widget("pool-target-button").set_sensitive(tgt_b)
self.widget("pool-source-button").set_sensitive(src_b)
self.widget("pool-build").set_active(builddef)
if src_name:
self.widget("pool-source-name").get_child().set_text(
self._pool.source_name)
self.widget("pool-format").set_active(-1)
if fmt:
self.populate_pool_format(self._pool.list_formats("formats"))
self.widget("pool-format").set_active(0)
self.populate_pool_sources()
def get_config_type(self):
return uiutil.get_list_selection(self.widget("pool-type"), 0)
def get_config_name(self):
return self.widget("pool-name").get_text()
def get_config_target_path(self):
src = self.widget("pool-target-path")
if not src.get_sensitive():
return None
ret = uiutil.get_list_selection(src, 1)
if ret is not None:
return ret
return src.get_child().get_text()
def get_config_source_path(self):
src = self.widget("pool-source-path")
if not src.get_sensitive():
return None
ret = uiutil.get_list_selection(src, 1)
if ret is not None:
return ret
return src.get_child().get_text().strip()
def get_config_host(self):
host = self.widget("pool-hostname")
if host.get_sensitive():
return host.get_text().strip()
return None
def get_config_source_name(self):
name = self.widget("pool-source-name")
if name.get_sensitive():
return name.get_text().strip()
return None
def get_config_format(self):
return uiutil.get_list_selection(self.widget("pool-format"), 0)
def get_config_iqn(self):
iqn = self.widget("pool-iqn")
if iqn.get_sensitive() and iqn.get_visible():
return iqn.get_text().strip()
return None
def get_build_default(self):
""" Return (default value, whether build option can be changed)"""
if not self._pool:
return (False, False)
if self._pool.type in [StoragePool.TYPE_DIR,
StoragePool.TYPE_FS,
StoragePool.TYPE_NETFS]:
# Building for these simply entails creating a directory
return (True, False)
elif self._pool.type in [StoragePool.TYPE_LOGICAL,
StoragePool.TYPE_DISK]:
# This is a dangerous operation, anything (False, True)
# should be assumed to be one.
return (False, True)
else:
return (False, False)
def browse_source_path(self, ignore1=None):
source = self._browse_file(_("Choose source path"),
startfolder="/dev", foldermode=False)
if source:
self.widget("pool-source-path").get_child().set_text(source)
def browse_target_path(self, ignore1=None):
target = self._browse_file(_("Choose target directory"),
startfolder="/var/lib/libvirt",
foldermode=True)
if target:
self.widget("pool-target-path").get_child().set_text(target)
def forward(self, ignore=None):
notebook = self.widget("pool-pages")
try:
if self.validate(notebook.get_current_page()) is not True:
return
if notebook.get_current_page() == PAGE_FORMAT:
self.finish()
else:
notebook.next_page()
except Exception, e:
self.err.show_err(_("Uncaught error validating input: %s") % str(e))
return
def back(self, ignore=None):
self.widget("pool-pages").prev_page()
def _finish_cb(self, error, details):
self.topwin.set_sensitive(True)
self.topwin.get_window().set_cursor(
Gdk.Cursor.new(Gdk.CursorType.TOP_LEFT_ARROW))
if error:
error = _("Error creating pool: %s") % error
self.err.show_err(error,
details=details)
else:
self.conn.schedule_priority_tick(pollpool=True)
self.close()
def finish(self):
self.topwin.set_sensitive(False)
self.topwin.get_window().set_cursor(
Gdk.Cursor.new(Gdk.CursorType.WATCH))
build = self.widget("pool-build").get_active()
progWin = vmmAsyncJob(self._async_pool_create, [build],
self._finish_cb, [],
_("Creating storage pool..."),
_("Creating the storage pool may take a "
"while..."),
self.topwin)
progWin.run()
def _async_pool_create(self, asyncjob, build):
meter = asyncjob.get_meter()
logging.debug("Starting backround pool creation.")
poolobj = self._pool.install(create=True, meter=meter, build=build)
poolobj.setAutostart(True)
logging.debug("Pool creation succeeded")
def set_page(self, page_number):
# Update page number
page_lbl = ("<span color='#59B0E2'>%s</span>" %
_("Step %(current_page)d of %(max_page)d") %
{'current_page': page_number + 1,
'max_page': PAGE_FORMAT + 1})
self.widget("header-pagenum").set_markup(page_lbl)
isfirst = (page_number == PAGE_NAME)
islast = (page_number == PAGE_FORMAT)
self.widget("pool-back").set_sensitive(not isfirst)
self.widget("pool-finish").set_visible(islast)
self.widget("pool-forward").set_visible(not islast)
self.widget(islast and "pool-finish" or "pool-forward").grab_focus()
self.widget("pool-details-grid").set_visible(islast)
if islast:
self.show_options_by_pool()
def page_changed(self, notebook_ignore, page_ignore, page_number):
self.set_page(page_number)
def get_pool_to_validate(self):
"""
Return a pool instance to use for parameter assignment validation.
For most pools this will be the one we built after step 1, but for
pools we find via FindPoolSources, this will be different
"""
source_list = self.widget("pool-source-path")
target_list = self.widget("pool-target-path")
pool = uiutil.get_list_selection(source_list, 2)
if pool is None:
pool = uiutil.get_list_selection(target_list, 2)
return pool
def _make_stub_pool(self):
pool = StoragePool(self.conn.get_backend())
pool.type = self.get_config_type()
return pool
def _validate_page_name(self, usepool=None):
try:
if usepool:
self._pool = usepool
else:
self._pool = self._make_stub_pool()
self._pool.name = self.get_config_name()
except ValueError, e:
return self.err.val_err(_("Pool Parameter Error"), e)
return True
def _validate_page_format(self):
target = self.get_config_target_path()
host = self.get_config_host()
source = self.get_config_source_path()
fmt = self.get_config_format()
iqn = self.get_config_iqn()
source_name = self.get_config_source_name()
if not self._validate_page_name(self.get_pool_to_validate()):
return
try:
self._pool.target_path = target
if host:
self._pool.host = host
if source:
self._pool.source_path = source
if fmt:
self._pool.format = fmt
if iqn:
self._pool.iqn = iqn
if source_name:
self._pool.source_name = source_name
self._pool.validate()
except ValueError, e:
return self.err.val_err(_("Pool Parameter Error"), e)
buildval = self.widget("pool-build").get_active()
buildsen = (self.widget("pool-build").get_sensitive() and
self.widget("pool-build").get_visible())
if buildsen and buildval:
ret = self.err.yes_no(_("Building a pool of this type will "
"format the source device. Are you "
"sure you want to 'build' this pool?"))
if not ret:
return ret
return True
def validate(self, page):
if page == PAGE_NAME:
return self._validate_page_name()
elif page == PAGE_FORMAT:
return self._validate_page_format()
def _browse_file(self, dialog_name, startfolder=None, foldermode=False):
mode = Gtk.FileChooserAction.OPEN
if foldermode:
mode = Gtk.FileChooserAction.SELECT_FOLDER
return self.err.browse_local(self.conn, dialog_name,
dialog_type=mode, start_folder=startfolder)
|
aurex-linux/virt-manager
|
virtManager/createpool.py
|
Python
|
gpl-2.0
| 18,923 | 0.000951 |
# This Python file uses the following encoding: utf-8
"""
Copyright 2013 Giacomo Antolini <giacomo.antolini@gmail.com>.
This file is part of flocca_dot_com.
flocca_dot_com is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
flocca_dot_com is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Nome-Programma. If not, see <http://www.gnu.org/licenses/>.
"""
# Django settings for flocca_dot_com project.
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Giacomo Antolini', 'giacomo.antolini@gmail.com')
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(os.path.dirname(__file__), '..', 'flocca_dot_com.sql').replace('\\', '/'),
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Hosts/domain names that are valid for this site; required if DEBUG is False
# See https://docs.djangoproject.com/en/1.5/ref/settings/#allowed-hosts
ALLOWED_HOSTS = []
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
os.path.join(os.path.dirname(__file__), '..', 'static').replace('\\', '/'),
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '7x8i2+j(yn4(s^%)n6s+%ergrl9ba0e*0^1t+%cbhuvx#8k6(5'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'flocca_dot_com.urls'
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'flocca_dot_com.wsgi.application'
import os
TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), '..', 'templates').replace('\\','/'),)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
|
flocca/flocca_dot_com
|
flocca_dot_com/settings.py
|
Python
|
gpl-3.0
| 5,929 | 0.001012 |
import sys
sys.path.insert(1, "../../../")
import h2o
def iris_ignore(ip,port):
# Connect to h2o
h2o.init(ip,port)
iris = h2o.import_frame(path=h2o.locate("smalldata/iris/iris2.csv"))
for maxx in range(4):
model = h2o.random_forest(y=iris[4], x=iris[range(maxx+1)], ntrees=50, max_depth=100)
model.show()
if __name__ == "__main__":
h2o.run_test(sys.argv, iris_ignore)
|
bikash/h2o-dev
|
h2o-py/tests/testdir_algos/rf/pyunit_iris_ignoreRF.py
|
Python
|
apache-2.0
| 405 | 0.02716 |
# This file is part of Buildbot. Buildbot is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
# Copyright Buildbot Team Members
import sys
import twisted
from twisted.trial import unittest
from buildslave import monkeypatches
# apply the same patches the slave does when it starts
monkeypatches.patch_all(for_tests=True)
def add_debugging_monkeypatches():
"""
DO NOT CALL THIS DIRECTLY
This adds a few "harmless" monkeypatches which make it easier to debug
failing tests.
"""
from twisted.application.service import Service
old_startService = Service.startService
old_stopService = Service.stopService
def startService(self):
assert not self.running
return old_startService(self)
def stopService(self):
assert self.running
return old_stopService(self)
Service.startService = startService
Service.stopService = stopService
# versions of Twisted before 9.0.0 did not have a UnitTest.patch that worked
# on Python-2.7
if twisted.version.major <= 9 and sys.version_info[:2] == (2,7):
def nopatch(self, *args):
raise unittest.SkipTest('unittest.TestCase.patch is not available')
unittest.TestCase.patch = nopatch
add_debugging_monkeypatches()
__all__ = []
# import mock so we bail out early if it's not installed
try:
import mock
mock = mock
except ImportError:
raise ImportError("Buildbot tests require the 'mock' module; "
"try 'pip install mock'")
|
denny820909/builder
|
lib/python2.7/site-packages/buildbot_slave-0.8.8-py2.7.egg/buildslave/test/__init__.py
|
Python
|
mit
| 2,076 | 0.003372 |
# -*- coding: utf-8 -*-
#
# papyon - a python client library for Msn
#
# Copyright (C) 2005-2006 Ali Sabil <ali.sabil@gmail.com>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import xml.sax.saxutils as xml
class LiveService(object):
CONTACTS = ("contacts.msn.com", "MBI")
MESSENGER = ("messenger.msn.com", "?id=507")
MESSENGER_CLEAR = ("messengerclear.live.com", "MBI_KEY_OLD")
MESSENGER_SECURE = ("messengersecure.live.com", "MBI_SSL")
SPACES = ("spaces.live.com", "MBI")
STORAGE = ("storage.msn.com", "MBI")
TB = ("http://Passport.NET/tb", None)
VOICE = ("voice.messenger.msn.com", "?id=69264")
@classmethod
def url_to_service(cls, url):
for attr_name in dir(cls):
if attr_name.startswith('_'):
continue
attr = getattr(cls, attr_name)
if isinstance(attr, tuple) and attr[0] == url:
return attr
return None
def transport_headers():
"""Returns a dictionary, containing transport (http) headers
to use for the request"""
return {}
def soap_action():
"""Returns the SOAPAction value to pass to the transport
or None if no SOAPAction needs to be specified"""
return None
def soap_header(account, password):
"""Returns the SOAP xml header"""
return """
<ps:AuthInfo xmlns:ps="http://schemas.microsoft.com/Passport/SoapServices/PPCRL" Id="PPAuthInfo">
<ps:HostingApp>{7108E71A-9926-4FCB-BCC9-9A9D3F32E423}</ps:HostingApp>
<ps:BinaryVersion>4</ps:BinaryVersion>
<ps:UIVersion>1</ps:UIVersion>
<ps:Cookies/>
<ps:RequestParams>AQAAAAIAAABsYwQAAAAxMDMz</ps:RequestParams>
</ps:AuthInfo>
<wsse:Security xmlns:wsse="http://schemas.xmlsoap.org/ws/2003/06/secext">
<wsse:UsernameToken Id="user">
<wsse:Username>%(account)s</wsse:Username>
<wsse:Password>%(password)s</wsse:Password>
</wsse:UsernameToken>
</wsse:Security>""" % {'account': xml.escape(account),
'password': xml.escape(password)}
def soap_body(*tokens):
"""Returns the SOAP xml body"""
token_template = """
<wst:RequestSecurityToken xmlns:wst="http://schemas.xmlsoap.org/ws/2004/04/trust" Id="RST%(id)d">
<wst:RequestType>http://schemas.xmlsoap.org/ws/2004/04/security/trust/Issue</wst:RequestType>
<wsp:AppliesTo xmlns:wsp="http://schemas.xmlsoap.org/ws/2002/12/policy">
<wsa:EndpointReference xmlns:wsa="http://schemas.xmlsoap.org/ws/2004/03/addressing">
<wsa:Address>%(address)s</wsa:Address>
</wsa:EndpointReference>
</wsp:AppliesTo>
%(policy_reference)s
</wst:RequestSecurityToken>"""
policy_reference_template = """
<wsse:PolicyReference xmlns:wsse="http://schemas.xmlsoap.org/ws/2003/06/secext" URI=%(uri)s/>"""
tokens = list(tokens)
if LiveService.TB in tokens:
tokens.remove(LiveService.TB)
assert(len(tokens) >= 1)
body = token_template % \
{'id': 0,
'address': xml.escape(LiveService.TB[0]),
'policy_reference': ''}
for id, token in enumerate(tokens):
if token[1] is not None:
policy_reference = policy_reference_template % \
{'uri': xml.quoteattr(token[1])}
else:
policy_reference = ""
t = token_template % \
{'id': id + 1,
'address': xml.escape(token[0]),
'policy_reference': policy_reference}
body += t
return '<ps:RequestMultipleSecurityTokens ' \
'xmlns:ps="http://schemas.microsoft.com/Passport/SoapServices/PPCRL" ' \
'Id="RSTS">%s</ps:RequestMultipleSecurityTokens>' % body
def process_response(soap_response):
body = soap_response.body
return body.findall("./wst:RequestSecurityTokenResponseCollection/" \
"wst:RequestSecurityTokenResponse")
|
billiob/papyon
|
papyon/service/description/SingleSignOn/RequestMultipleSecurityTokens.py
|
Python
|
gpl-2.0
| 4,656 | 0.004296 |
# -*- coding: utf-8 -*-
# @date 161103 - Export excel with get_work_order_report function
"""
Data exportor (Excel, CSV...)
"""
import io
import math
from datetime import datetime
from xlsxwriter.workbook import Workbook
import tablib
from utils.tools import get_product_size
def get_customers(customer_list=None, file_format='csv'):
"""Generate customer data file for download."""
if customer_list is None:
customer_list = []
data = tablib.Dataset()
data.headers = ('客戶代碼', '客戶名稱')
for c in customer_list:
data.append((c.c_code, c.c_name))
if file_format == 'csv':
return data.csv
return data
def get_maintenance_log(log_list=None, file_format='csv'):
"""Generate maintenance log to csv file for download."""
if log_list is None:
log_list = []
data = tablib.Dataset()
data.headers = ('機台', '維修項目', '開始時間',
'員工', '結束時間', '員工',
'總計時間')
for log in log_list:
m_code = log['m_code'].replace('<br>', '\n')
data.append((log['machine_id'], m_code, log['start_time'],
log['who_start'], log['end_time'], log['who_end'],
log['total_time'][0])
)
if file_format == 'csv':
return data.csv
return data
def get_w_m_performance_report(file_format='xls'):
"""Generate excel file for download by worker and machine performance."""
row_number = 11
data = tablib.Dataset()
data.append(['個人效率期間表 ({})'.format(
datetime.now().strftime("%Y/%m/%d"))] + [''] * (row_number - 1))
data.append(['工號', '姓名', '日期', '標準量', '效率標準量',
'實質生產量', '總稼動時間', '總停機時間', '稼動 %', '數量效率 %',
'平均效率 %'])
if file_format == 'xls':
return data.xls
return data
def get_loss_rate_report(report_data, file_format='csv'):
"""Generate csv file for download by machine loss rate."""
data = tablib.Dataset()
data.headers = ('機台', '機型', '良品數', '不良品數', '損耗率(%)',
'損耗金額(RMB)', '損耗率排名')
rank = 0
old_loss_rate = None
for r in sorted(report_data, key=lambda k: k['loss_rate'], reverse=True):
if old_loss_rate != r['loss_rate']:
rank += 1
old_loss_rate = r['loss_rate']
record = [r['machine_id'], r['machine_type'], r['count_qty'],
r['event_qty'], r['loss_rate'], r['total_loss_money'],
rank]
data.append(record)
if file_format == 'csv':
return data.csv
return data
def get_loss_rate_detail_report(report_data, file_format='csv'):
"""Generate csv file for download by machine loss rate detail."""
data = tablib.Dataset()
data.headers = ('日期', '良品數', '不良品數', '損耗率(%)',
'損耗金額(RMB)')
for r in sorted(report_data, key=lambda k: k['record_date']):
record = [r['record_date'], r['count_qty'], r['event_qty'],
r['loss_rate'], r['total_loss_money']]
data.append(record)
if file_format == 'csv':
return data.csv
return data
def get_uptime_report(report_data='', file_format='xls'):
"""Generate excel file for download by uptime information."""
data = tablib.Dataset()
data.append_separator('製造部各工程稼動率一覽表')
data.append(['月份:10', '星期', '', '', '', '', '',
'目標', '', '', '', ''])
data.append(['', '', '加締卷取(%)', '組立(%)', '老化(%)',
'CUTTING(%)', 'TAPPING(%)', '加締卷取',
'組立', '老化', 'CUTTING', 'TAPPING'])
if file_format == 'xls':
return data.xls
return data
def get_work_order_report(report_data, file_format='csv'):
"""Generate csv file for download by work order."""
# data = tablib.Dataset()
# data.headers = ('製令編號', '料號', '客戶', '產品規格',
# '投入數', '應繳庫數',
# '加締捲取', '組立', '老化', '選別', '加工切角')
# for r in sorted(report_data, key=lambda k: k['order_no']):
# try:
# intput_count = int(r['input_count'])
# except (TypeError, ValueError):
# intput_count = -1
# record = [r['order_no'], r['part_no'], r['customer'], r['product'],
# intput_count, math.floor(intput_count / 1.03),
# r['step1_status'], r['step2_status'], r['step3_status'],
# r['step4_status'], r['step5_status']]
# data.append(record)
# if file_format == 'csv':
# return data.csv
# return data
output = io.BytesIO()
if file_format == 'xls':
workbook = Workbook(output, {'in_memory': True})
worksheet = workbook.add_worksheet()
# merge_format = workbook.add_format({
# 'bold': 1,
# 'border': 1,
# 'align': 'center',
# 'valign': 'vcenter'})
worksheet.merge_range('A1:A3', '製令編號')
worksheet.merge_range('B1:B3', '料號')
worksheet.merge_range('C1:C3', '客戶')
worksheet.merge_range('D1:D3', '產品規格')
worksheet.merge_range('E1:E3', '投入數')
worksheet.merge_range('F1:F3', '應繳庫數')
worksheet.write('G1', '加締捲取')
worksheet.write('H1', '組立')
worksheet.write('I1', '老化')
worksheet.write('J1', '選別')
worksheet.write('K1', '加工切角')
for col_name in ('G', 'H', 'I', 'J', 'K'):
worksheet.write(col_name + '2', '機器')
worksheet.write(col_name + '3', '良品數')
row = 4
for r in sorted(report_data, key=lambda k: k['order_no']):
try:
intput_count = int(r['input_count'])
except (TypeError, ValueError):
intput_count = -1
worksheet.merge_range('A{}:A{}'.format(row, row + 2),
r['order_no'])
worksheet.merge_range('B{}:B{}'.format(row, row + 2), r['part_no'])
worksheet.merge_range('C{}:C{}'.format(row, row + 2),
r['customer'])
worksheet.merge_range('D{}:D{}'.format(row, row + 2), r['product'])
worksheet.merge_range('E{}:E{}'.format(row, row + 2), intput_count)
worksheet.merge_range('F{}:F{}'.format(row, row + 2),
math.floor(intput_count / 1.03))
for process in range(1, 6):
row_tag = chr(71 + process - 1)
worksheet.write_string('{}{}'.format(row_tag, row),
r['step{}_status'.format(process)])
machine = r['step{}_machine'.format(process)]
count = r['step{}_count'.format(process)]
worksheet.write_string('{}{}'.format(row_tag, row + 1),
machine if machine else '')
worksheet.write_string('{}{}'.format(row_tag, row + 2),
str(count) if count else '')
row += 3
workbook.close()
output.seek(0)
return output.read()
def get_order_report(report_data, file_format='csv'):
"""Generate csv file for download by machine loss rate detail."""
data = tablib.Dataset()
data.headers = ('製令編號', '客戶', '規格', '投入數', '需求數',
'加締捲曲', '組立', '老化', '選別', '加工切腳')
for r in sorted(report_data, key=lambda k: k['order_no']):
record = [r['order_no'], r['customer'], get_product_size(r['part_no']),
r['input_count'], r['require_count'],
r['step1_prod_qty'], r['step2_prod_qty'],
r['step3_prod_qty'], r['step4_prod_qty'],
r['step5_prod_qty']]
data.append(record)
if file_format == 'csv':
return data.csv
return data
|
grtfou/data-analytics-web
|
website/utils/data_exportor.py
|
Python
|
mit
| 8,235 | 0 |
import sys
import time
sleep = time.sleep
if sys.platform == 'win32':
time = time.clock
else:
time = time.time
|
egbertbouman/tribler-g
|
Tribler/Core/DecentralizedTracking/pymdht/core/ptime.py
|
Python
|
lgpl-2.1
| 124 | 0.008065 |
import sys
try:
from django.conf import settings
settings.configure(
DEBUG=True,
USE_TZ=True,
DATABASES={
"default": {
"ENGINE": "django.db.backends.sqlite3",
}
},
ROOT_URLCONF="modals.urls",
INSTALLED_APPS=[
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sites",
"modals",
"templates",
"images",
"attrs",
"tests",
"menus"
],
SITE_ID=1,
NOSE_ARGS=['-s'],
)
try:
import django
setup = django.setup
except AttributeError:
pass
else:
setup()
from django_nose import NoseTestSuiteRunner
except ImportError:
import traceback
traceback.print_exc()
raise ImportError("To fix this error, run: pip install -r requirements-test.txt")
def run_tests(*test_args):
if not test_args:
test_args = ['tests']
# Run tests
test_runner = NoseTestSuiteRunner(verbosity=1)
failures = test_runner.run_tests(test_args)
if failures:
sys.exit(failures)
if __name__ == '__main__':
run_tests(*sys.argv[1:])
|
publica-io/django-publica-modals
|
runtests.py
|
Python
|
bsd-3-clause
| 1,244 | 0.000804 |
import numpy
from chainer.backends import cuda
from chainer import initializer
# Original code forked from MIT licensed keras project
# https://github.com/fchollet/keras/blob/master/keras/initializations.py
class Orthogonal(initializer.Initializer):
"""Initializes array with an orthogonal system.
This initializer first makes a matrix of the same shape as the
array to be initialized whose elements are drawn independently from
standard Gaussian distribution.
Next, it applies Singular Value Decomposition (SVD) to the matrix.
Then, it initializes the array with either side of resultant
orthogonal matrices, depending on the shape of the input array.
Finally, the array is multiplied by the constant ``scale``.
If the ``ndim`` of the input array is more than 2, we consider the array
to be a matrix by concatenating all axes except the first one.
The number of vectors consisting of the orthogonal system
(i.e. first element of the shape of the array) must be equal to or smaller
than the dimension of each vector (i.e. second element of the shape of
the array).
Attributes:
~Orthogonal.scale (float): A constant to be multiplied by.
~Orthogonal.dtype: Data type specifier.
Reference: Saxe et al., https://arxiv.org/abs/1312.6120
"""
def __init__(self, scale=1.1, dtype=None):
self.scale = scale
super(Orthogonal, self).__init__(dtype)
# TODO(Kenta Oono)
# How do we treat overcomplete base-system case?
def __call__(self, array):
if self.dtype is not None:
assert array.dtype == self.dtype
xp = cuda.get_array_module(array)
if not array.shape: # 0-dim case
array[...] = self.scale
elif not array.size:
raise ValueError('Array to be initialized must be non-empty.')
else:
# numpy.prod returns float value when the argument is empty.
flat_shape = (len(array), int(numpy.prod(array.shape[1:])))
if flat_shape[0] > flat_shape[1]:
raise ValueError('Cannot make orthogonal system because'
' # of vectors ({}) is larger than'
' that of dimensions ({})'.format(
flat_shape[0], flat_shape[1]))
a = numpy.random.normal(size=flat_shape)
# we do not have cupy.linalg.svd for now
u, _, v = numpy.linalg.svd(a, full_matrices=False)
# pick the one with the correct shape
q = u if u.shape == flat_shape else v
array[...] = xp.asarray(q.reshape(array.shape))
array *= self.scale
|
aonotas/chainer
|
chainer/initializers/orthogonal.py
|
Python
|
mit
| 2,709 | 0 |
from JumpScale import j
import JumpScale.baselib.watchdog.manager
import JumpScale.baselib.redis
import JumpScale.lib.rogerthat
descr = """
critical alert
"""
organization = "jumpscale"
enable = True
REDIS_PORT = 9999
# API_KEY = j.application.config.get('rogerthat.apikey')
redis_client = j.clients.credis.getRedisClient('127.0.0.1', REDIS_PORT)
# rogerthat_client = j.clients.rogerthat.get(API_KEY)
# ANSWERS = [{'id': 'yes', 'caption': 'Take', 'action': '', 'type': 'button'},]
# def _send_message(message, contacts, answers=ANSWERS, alert_flags=6):
# result = rogerthat_client.send_message(message, contacts, answers=answers, alert_flags=alert_flags)
# if result:
# if result['error']:
# j.logger.log('Could not send rogerthat message')
# return
# else:
# message_id = result['result']
# return message_id
def escalateL1(watchdogevent):
if not j.tools.watchdog.manager.inAlert(watchdogevent):
watchdogevent.escalationstate = 'L1'
# contact1 = redis_client.hget('contacts', '1')
message = str(watchdogevent)
# message_id = _send_message(message, [contact1,])
# watchdogevent.message_id = message_id
j.tools.watchdog.manager.setAlert(watchdogevent)
print "Escalate:%s"%message
def escalateL2(watchdogevent):
if watchdogevent.escalationstate == 'L1':
watchdogevent.escalationstate = 'L2'
contacts = redis_client.hgetall('contacts')
message = str(watchdogevent)
message_id = _send_message(message, [contacts['2'], contacts['3']])
watchdogevent.message_id = message_id
j.tools.watchdog.manager.setAlert(watchdogevent)
def escalateL3(watchdogevent):
if watchdogevent.escalationstate == 'L2':
watchdogevent.escalationstate = 'L3'
contacts = redis_client.hgetall('contacts')['all'].split(',')
message = str(watchdogevent)
message_id = _send_message(message, contacts)
watchdogevent.message_id = message_id
j.tools.watchdog.manager.setAlert(watchdogevent)
|
Jumpscale/jumpscale6_core
|
apps/watchdogmanager/alerttypes/critical.py
|
Python
|
bsd-2-clause
| 2,105 | 0.003325 |
import csv
import logging
import json
import sys
import urllib2
from django.conf import settings
from django.core.management.base import BaseCommand
from machine.models import Machine
logger = logging.getLogger(__name__)
XMAN_URL = "http://10.180.2.243/api/hostinfo.php?sql=hostname+=+'%s'"
IDC_ABBR = {
'shangdi': 'sd',
'lugu': 'lg',
'lugu6': 'lg',
'haihang': 'hh',
'wucaicheng': 'dp',
}
class Command(BaseCommand):
def handle(self, *args, **options):
changes = []
for machine in Machine.objects.order_by('hostname'):
hostname = machine.hostname
url = XMAN_URL % hostname
data = json.load(urllib2.urlopen(url))
xman = {}
if data and type(data) is dict:
k, v = data.popitem()
if v and type(v) is dict:
try:
xman = {
'ip': v['ipaddr'],
'idc': IDC_ABBR[v['site'].lower()],
'rack': v['location'].lower(),
}
except Exception as e:
print 'Error on host: %s' % hostname
raise
if not xman:
# the machine doesn't exist in xman, delete it later.
changes.append((machine, xman, ))
else:
# check if any field changed.
# can't use iteritems as the dict might change.
for k, v in xman.items():
if getattr(machine, k) == v:
del xman[k]
if xman:
# some fields changed.
changes.append((machine, xman, ))
if not changes:
print 'Nothing updated from xman, exiting.'
else:
print 'All changes from xman:'
for machine, xman in changes:
self.print_change(machine, xman)
print
print 'Confirm following changes...'
answer = None
for machine, xman in changes:
self.print_change(machine, xman)
while answer != 'a':
answer = raw_input('Apply this or all following change[s]? '
'<y[es]/n[o]/a[ll]>: ')
if answer in ['y', 'n', 'a']: break
if answer == 'n': continue
# apply change
self.apply_change(machine, xman)
def print_change(self, machine, xman):
if not xman:
action = 'host deleted'
else:
action = ', '.join(['%s: %s ==> %s' % (k, getattr(machine, k), v)
for k, v in xman.iteritems()])
print '%s: %s' % (machine.hostname, action)
def apply_change(self, machine, xman):
if not xman:
machine.delete()
else:
for k, v in xman.iteritems():
setattr(machine, k, v)
machine.save()
|
zxl200406/minos
|
owl/machine/management/commands/import_xman.py
|
Python
|
apache-2.0
| 2,543 | 0.012977 |
"""empty message
Revision ID: 6d8e9e4138bf
Revises: 445667ce6268
Create Date: 2016-03-03 10:36:03.205829
"""
# revision identifiers, used by Alembic.
revision = '6d8e9e4138bf'
down_revision = '445667ce6268'
from alembic import op
import app
import sqlalchemy as sa
def upgrade():
### commands auto generated by Alembic - please adjust! ###
op.add_column('provas', sa.Column('data_inicio', sa.DateTime(), nullable=True))
op.add_column('provas', sa.Column('tempo_execucao', sa.Integer(), nullable=True))
### end Alembic commands ###
def downgrade():
### commands auto generated by Alembic - please adjust! ###
op.drop_column('provas', 'tempo_execucao')
op.drop_column('provas', 'data_inicio')
### end Alembic commands ###
|
Maethorin/concept2
|
migrations/versions/6d8e9e4138bf_.py
|
Python
|
mit
| 760 | 0.011842 |
import re
from lxml import etree
from nxpy.util import tag_pattern, whitespace_pattern
class Flow(object):
def __init__(self):
self.routes = []
def export(self):
flow = etree.Element('flow')
if len(self.routes):
for route in self.routes:
flow.append(route.export())
return flow
else:
return False
def build(self, node):
for child in node:
nodeName_ = tag_pattern.match(child.tag).groups()[-1]
self.buildChildren(child, nodeName_)
def buildChildren(self, child_, nodeName_, from_subclass=False):
if nodeName_ == 'route':
obj_ = Route()
obj_.build(child_)
self.routes.append(obj_)
class Route(object):
def __init__(self):
self.name = ''
self.operation = None
self.match = {
"destination": [],
"source": [],
"protocol": [],
"port": [],
"destination-port": [],
"source-port": [],
"icmp-code": [],
"icmp-type": [],
"tcp-flags": [],
"packet-length": [],
"dscp": [],
"fragment": []
}
''' Match is a dict with list values
example: self. match = {
"destination": [<ip-prefix(es)>],
"source": [<ip-prefix(es)>],
"protocol": [<numeric-expression(s)>],
"port": [<numeric-expression(s)>],
"destination-port": [<numeric-expression(s)>]
"source-port": [<numeric-expression(s)>],
"icmp-code": [<numeric-expression(s)>],
"icmp-type": [<numeric-expression(s)>],
"tcp-flags": [<bitwise-expression(s)>],
"packet-length": [<numeric-expression(s)>],
"dscp": [<numeric-expression(s)>],
"fragment": [
"dont-fragment"
"not-a-fragment"
"is-fragment"
"first-fragment"
"last-fragment"
]
'''
self.then = {
"accept": False,
"discard": False,
"community": False,
"next-term": False,
"rate-limit": False,
"sample": False,
"routing-instance": False
}
'''Then is a dict (have to see about this in the future:
self.then = {
"accept": True/False,
"discard": True/False,
"community": "<name>"/False,
"next-term": True/False,
"rate-limit": <rate>/False,
"sample": True/False,
"routing-instance": "<RouteTarget extended community>"
}
'''
def export(self):
if self.operation:
ro = etree.Element('route', {'operation': self.operation})
else:
ro = etree.Element('route')
if self.name:
etree.SubElement(ro, "name").text = self.name
match = etree.Element("match")
for key in self.match:
if self.match[key]:
for value in self.match[key]:
etree.SubElement(match, key).text = value
if match.getchildren():
ro.append(match)
then = etree.Element("then")
for key in self.then:
if self.then[key]:
if self.then[key] is not True and self.then[key] is not False:
etree.SubElement(then, key).text = self.then[key]
else:
etree.SubElement(then, key)
if then.getchildren():
ro.append(then)
if ro.getchildren():
return ro
else:
return False
def build(self, node):
for child in node:
nodeName_ = tag_pattern.match(child.tag).groups()[-1]
self.buildChildren(child, nodeName_)
def buildChildren(self, child_, nodeName_, from_subclass=False):
if nodeName_ == 'name':
name_ = child_.text
name_ = re.sub(whitespace_pattern, " ", name_).strip()
self.name = name_
elif nodeName_ == 'match':
for grandChild_ in child_:
grandChildName_ = tag_pattern.match(
grandChild_.tag).groups()[-1]
grandChildText = grandChild_.text
grandChildText = re.sub(
whitespace_pattern, " ", grandChildText).strip()
self.match[grandChildName_].append(grandChildText)
elif nodeName_ == 'then':
for grandChild_ in child_:
grandChildName_ = tag_pattern.match(
grandChild_.tag).groups()[-1]
self.then[grandChildName_] = True
|
Kent1/nxpy
|
nxpy/flow.py
|
Python
|
apache-2.0
| 4,735 | 0 |
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2012 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import subprocess
import os
import re
import requests
from weboob.capabilities.radio import CapRadio, Radio
from weboob.capabilities.audio import CapAudio, BaseAudio, Playlist, Album
from weboob.capabilities.base import empty
from weboob.tools.application.repl import ReplApplication, defaultcount
from weboob.tools.application.media_player import InvalidMediaPlayer, MediaPlayer, MediaPlayerNotFound
from weboob.tools.application.formatters.iformatter import PrettyFormatter
__all__ = ['Radioob']
class RadioListFormatter(PrettyFormatter):
MANDATORY_FIELDS = ('id', 'title')
def get_title(self, obj):
return obj.title
def get_description(self, obj):
result = ''
if hasattr(obj, 'description') and not empty(obj.description):
result += '%-30s' % obj.description
if hasattr(obj, 'current') and not empty(obj.current):
if obj.current.who:
result += ' (Current: %s - %s)' % (obj.current.who, obj.current.what)
else:
result += ' (Current: %s)' % obj.current.what
return result
class SongListFormatter(PrettyFormatter):
MANDATORY_FIELDS = ('id', 'title')
def get_title(self, obj):
result = obj.title
if hasattr(obj, 'author') and not empty(obj.author):
result += ' (%s)' % obj.author
return result
def get_description(self, obj):
result = ''
if hasattr(obj, 'description') and not empty(obj.description):
result += '%-30s' % obj.description
return result
class AlbumTrackListInfoFormatter(PrettyFormatter):
MANDATORY_FIELDS = ('id', 'title', 'tracks_list')
def get_title(self, obj):
result = obj.title
if hasattr(obj, 'author') and not empty(obj.author):
result += ' (%s)' % obj.author
return result
def get_description(self, obj):
result = ''
for song in obj.tracks_list:
result += '- %s%-30s%s ' % (self.BOLD, song.title, self.NC)
if hasattr(song, 'duration') and not empty(song.duration):
result += '%-10s ' % song.duration
else:
result += '%-10s ' % ' '
result += '(%s)\r\n\t' % (song.id)
return result
class PlaylistTrackListInfoFormatter(PrettyFormatter):
MANDATORY_FIELDS = ('id', 'title', 'tracks_list')
def get_title(self, obj):
return obj.title
def get_description(self, obj):
result = ''
for song in obj.tracks_list:
result += '- %s%-30s%s ' % (self.BOLD, song.title, self.NC)
if hasattr(song, 'author') and not empty(song.author):
result += '(%-15s) ' % song.author
if hasattr(song, 'duration') and not empty(song.duration):
result += '%-10s ' % song.duration
else:
result += '%-10s ' % ' '
result += '(%s)\r\n\t' % (song.id)
return result
class Radioob(ReplApplication):
APPNAME = 'radioob'
VERSION = '1.1'
COPYRIGHT = 'Copyright(C) 2010-YEAR Romain Bignon\nCopyright(C) YEAR Pierre Maziere'
DESCRIPTION = "Console application allowing to search for web radio stations, listen to them and get information " \
"like the current song."
SHORT_DESCRIPTION = "search, show or listen to radio stations"
CAPS = (CapRadio, CapAudio)
EXTRA_FORMATTERS = {'radio_list': RadioListFormatter,
'song_list': SongListFormatter,
'album_tracks_list_info': AlbumTrackListInfoFormatter,
'playlist_tracks_list_info': PlaylistTrackListInfoFormatter,
}
COMMANDS_FORMATTERS = {'ls': 'radio_list',
'playlist': 'radio_list',
}
COLLECTION_OBJECTS = (Radio, BaseAudio, )
PLAYLIST = []
def __init__(self, *args, **kwargs):
ReplApplication.__init__(self, *args, **kwargs)
self.player = MediaPlayer(self.logger)
def main(self, argv):
self.load_config()
return ReplApplication.main(self, argv)
def complete_download(self, text, line, *ignored):
args = line.split(' ')
if len(args) == 2:
return self._complete_object()
elif len(args) >= 3:
return self.path_completer(args[2])
def do_download(self, line):
"""
download ID [DIRECTORY]
Download an audio file
"""
_id, dest = self.parse_command_args(line, 2, 1)
obj = self.retrieve_obj(_id)
if obj is None:
print('No object matches with this id:', _id, file=self.stderr)
return 3
if isinstance(obj, BaseAudio):
streams = [obj]
else:
streams = obj.tracks_list
if len(streams) == 0:
print('Radio or Audio file not found:', _id, file=self.stderr)
return 3
for stream in streams:
self.download_file(stream, dest)
def download_file(self, audio, dest):
_obj = self.get_object(audio.id, 'get_audio', ['url', 'title'])
if not _obj:
print('Audio file not found: %s' % audio.id, file=self.stderr)
return 3
if not _obj.url:
print('Error: the direct URL is not available.', file=self.stderr)
return 4
audio.url = _obj.url
def check_exec(executable):
with open('/dev/null', 'w') as devnull:
process = subprocess.Popen(['which', executable], stdout=devnull)
if process.wait() != 0:
print('Please install "%s"' % executable, file=self.stderr)
return False
return True
def audio_to_file(_audio):
ext = _audio.ext
if not ext:
ext = 'audiofile'
title = _audio.title if _audio.title else _audio.id
return '%s.%s' % (re.sub('[?:/]', '-', title), ext)
if dest is not None and os.path.isdir(dest):
dest += '/%s' % audio_to_file(audio)
if dest is None:
dest = audio_to_file(audio)
if audio.url.startswith('rtmp'):
if not check_exec('rtmpdump'):
return 1
args = ('rtmpdump', '-e', '-r', audio.url, '-o', dest)
elif audio.url.startswith('mms'):
if not check_exec('mimms'):
return 1
args = ('mimms', '-r', audio.url, dest)
else:
if check_exec('wget'):
args = ('wget', '-c', audio.url, '-O', dest)
elif check_exec('curl'):
args = ('curl', '-C', '-', audio.url, '-o', dest)
else:
return 1
os.spawnlp(os.P_WAIT, args[0], *args)
def complete_play(self, text, line, *ignored):
args = line.split(' ')
if len(args) == 2:
return self._complete_object()
def do_play(self, line):
"""
play ID [stream_id]
Play a radio or a audio file with a found player (optionnaly specify the wanted stream).
"""
_id, stream_id = self.parse_command_args(line, 2, 1)
if not _id:
print('This command takes an argument: %s' % self.get_command_help('play', short=True), file=self.stderr)
return 2
try:
stream_id = int(stream_id)
except (ValueError, TypeError):
stream_id = 0
obj = self.retrieve_obj(_id)
if obj is None:
print('No object matches with this id:', _id, file=self.stderr)
return 3
if isinstance(obj, Radio):
try:
streams = [obj.streams[stream_id]]
except IndexError:
print('Stream %d not found' % stream_id, file=self.stderr)
return 1
elif isinstance(obj, BaseAudio):
streams = [obj]
else:
streams = obj.tracks_list
if len(streams) == 0:
print('Radio or Audio file not found:', _id, file=self.stderr)
return 3
try:
player_name = self.config.get('media_player')
media_player_args = self.config.get('media_player_args')
if not player_name:
self.logger.debug(u'You can set the media_player key to the player you prefer in the radioob '
'configuration file.')
for stream in streams:
if isinstance(stream, BaseAudio) and not stream.url:
stream = self.get_object(stream.id, 'get_audio')
else:
r = requests.get(stream.url, stream=True)
buf = r.iter_content(512).next()
r.close()
playlistFormat = None
for line in buf.split("\n"):
if playlistFormat is None:
if line == "[playlist]":
playlistFormat = "pls"
elif line == "#EXTM3U":
playlistFormat = "m3u"
else:
break
elif playlistFormat == "pls":
if line.startswith('File'):
stream.url = line.split('=', 1).pop(1).strip()
break
elif playlistFormat == "m3u":
if line[0] != "#":
stream.url = line.strip()
break
self.player.play(stream, player_name=player_name, player_args=media_player_args)
except (InvalidMediaPlayer, MediaPlayerNotFound) as e:
print('%s\nRadio URL: %s' % (e, stream.url))
def retrieve_obj(self, _id):
obj = None
if self.interactive:
try:
obj = self.objects[int(_id) - 1]
_id = obj.id
except (IndexError, ValueError):
pass
m = CapAudio.get_object_method(_id)
if m:
obj = self.get_object(_id, m)
return obj if obj is not None else self.get_object(_id, 'get_radio')
def do_playlist(self, line):
"""
playlist cmd [args]
playlist add ID [ID2 ID3 ...]
playlist remove ID [ID2 ID3 ...]
playlist export [FILENAME]
playlist display
"""
if not line:
print('This command takes an argument: %s' % self.get_command_help('playlist'), file=self.stderr)
return 2
cmd, args = self.parse_command_args(line, 2, req_n=1)
if cmd == "add":
_ids = args.strip().split(' ')
for _id in _ids:
audio = self.get_object(_id, 'get_audio')
if not audio:
print('Audio file not found: %s' % _id, file=self.stderr)
return 3
if not audio.url:
print('Error: the direct URL is not available.', file=self.stderr)
return 4
self.PLAYLIST.append(audio)
elif cmd == "remove":
_ids = args.strip().split(' ')
for _id in _ids:
audio_to_remove = self.get_object(_id, 'get_audio')
if not audio_to_remove:
print('Audio file not found: %s' % _id, file=self.stderr)
return 3
if not audio_to_remove.url:
print('Error: the direct URL is not available.', file=self.stderr)
return 4
for audio in self.PLAYLIST:
if audio.id == audio_to_remove.id:
self.PLAYLIST.remove(audio)
break
elif cmd == "export":
filename = "playlist.m3u"
if args:
filename = args
file = open(filename, 'w')
for audio in self.PLAYLIST:
file.write('%s\r\n' % audio.url)
file.close()
elif cmd == "display":
for audio in self.PLAYLIST:
self.cached_format(audio)
else:
print('Playlist command only support "add", "remove", "display" and "export" arguments.', file=self.stderr)
return 2
def complete_info(self, text, line, *ignored):
args = line.split(' ')
if len(args) == 2:
return self._complete_object()
def do_info(self, _id):
"""
info ID
Get information about a radio or an audio file.
"""
if not _id:
print('This command takes an argument: %s' % self.get_command_help('info', short=True), file=self.stderr)
return 2
obj = self.retrieve_obj(_id)
if isinstance(obj, Album):
self.set_formatter('album_tracks_list_info')
elif isinstance(obj, Playlist):
self.set_formatter('playlist_tracks_list_info')
if obj is None:
print('No object matches with this id:', _id, file=self.stderr)
return 3
self.format(obj)
@defaultcount(10)
def do_search(self, pattern=None):
"""
search (radio|song|file|album|playlist) PATTERN
List (radio|song|file|album|playlist) matching a PATTERN.
If PATTERN is not given, this command will list all the (radio|song|album|playlist).
"""
if not pattern:
print('This command takes an argument: %s' % self.get_command_help('playlist'), file=self.stderr)
return 2
cmd, args = self.parse_command_args(pattern, 2, req_n=1)
if not args:
args = ""
self.set_formatter_header(u'Search pattern: %s' % pattern if pattern else u'All radios')
self.change_path([u'search'])
if cmd == "radio":
self.set_formatter('radio_list')
for radio in self.do('iter_radios_search', pattern=args):
self.add_object(radio)
self.format(radio)
elif cmd == "song" or cmd == "file":
self.set_formatter('song_list')
for audio in self.do('search_audio', pattern=args):
self.add_object(audio)
self.format(audio)
elif cmd == "album":
self.set_formatter('song_list')
for album in self.do('search_album', pattern=args):
self.add_object(album)
self.format(album)
elif cmd == "playlist":
self.set_formatter('song_list')
for playlist in self.do('search_playlist', pattern=args):
self.add_object(playlist)
self.format(playlist)
else:
print('Search command only supports "radio", "song", "file", "album" and "playlist" arguments.', file=self.stderr)
return 2
def do_ls(self, line):
"""
ls
List radios
"""
ret = super(Radioob, self).do_ls(line)
return ret
|
willprice/weboob
|
weboob/applications/radioob/radioob.py
|
Python
|
agpl-3.0
| 15,978 | 0.001189 |
# -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import pytest
from django.template import Context, Template
from pootle.core.delegate import scores
def _render_str(string, context=None):
context = context or {}
context = Context(context)
return Template(string).render(context)
def test_templatetag_progress_bar():
rendered = _render_str("{% load common_tags %}{% progress_bar 0 0 0 %}")
assert "<span class=\'value translated\'>0%</span>" in rendered
assert '<span class=\'value fuzzy\'>0%</span>' in rendered
assert '<span class=\'value untranslated\'>0%</span>' in rendered
rendered = _render_str(
"{% load common_tags %}{% progress_bar 123 23 73 %}")
assert "<span class=\'value translated\'>59.3%</span>" in rendered
assert "<span class=\'value fuzzy\'>18.7%</span>" in rendered
assert "<span class=\'value untranslated\'>22.0%</span>" in rendered
assert '<td class="translated" style="width: 59.3%">' in rendered
assert '<td class="fuzzy" style="width: 18.7%">' in rendered
assert '<td class="untranslated" style="width: 22.0%">' in rendered
@pytest.mark.django_db
def test_inclusion_tag_top_scorers(project_set, member):
score_data = scores.get(project_set.__class__)(project_set)
rendered = _render_str(
"{% load common_tags %}{% top_scorers user score_data %}",
context=dict(
user=member,
score_data=score_data.display()))
top_scorer = list(score_data.display())[0]
assert top_scorer["public_total_score"] in rendered
assert top_scorer["user"].email_hash in rendered
|
claudep/pootle
|
tests/pootle_misc/templatetags.py
|
Python
|
gpl-3.0
| 1,836 | 0 |
from ..base import AST
CLASS = "statements.skip"
class SkipStatement(AST):
def __init__(self):
super().__init__(CLASS, "skip_statement")
|
PetukhovVictor/compiler
|
src/Parser/AST/statements/skip.py
|
Python
|
mit
| 152 | 0 |
#!/usr/bin/env python -u
# -------------------------------------------------------------------------- #
# Copyright 2011, MadeiraCloud (support@madeiracloud.com) #
# -------------------------------------------------------------------------- #
"""MadeiraCloud Agent
"""
import os
import sys
import errno
import signal
import urllib
import logging
from optparse import Option, OptionParser
from madeiracloud import Log
from madeiracloud import Task
from madeiracloud import Health
from madeiracloud import Watcher
from madeiracloud import RTimer
__copyright__ = "Copyright 2011, MadeiraCloud (http://www.madeiracloud.com))"
__license__ = "GPL"
__version__ = "1.0.0"
__maintainer__ = "MadeiraCloud"
__author__ = "dev@madeiracloud.com"
__email__ = "support@madeiracloud.com"
__status__ = "Production"
# -----------------------------------------------------
# Exception
# -----------------------------------------------------
class MadeiraAgentException(Exception):
"""A simple exception class used for MadeiraAgent exceptions"""
pass
# ----------------------------------------------------------------------------------------------
# MadeiraAgent
# ----------------------------------------------------------------------------------------------
class MadeiraAgent(object):
log_level = 'INFO'
log_dir = '/var/log/madeiracloud/'
log_file = log_dir + 'madeiracloud.log'
log_rotate = 3
log_size = 10000000
pidfile = '/var/lock/subsys/madeiracloud'
endpoint_task = 'https://api.madeiracloud.com/agent/task/'
endpoint_health = 'https://api.madeiracloud.com/agent/health/'
interval_task = 6
interval_health = 60
url_metadata = 'http://169.254.169.254/latest/meta-data'
url_userdata = 'http://169.254.169.254/latest/user-data'
def __init__(self, daemon=True, no_task=False, no_health=False):
""" Initializes MadeiraAgent. """
self.__daemon = daemon
self.__no_task = no_task
self.__no_health= no_health
self.__timer = []
self.__metadata = {}
self.__userdata = None
self.__distro = None
# Log, Daemonize and Signal
self._log()
if daemon: self._daemonize()
signal.signal(signal.SIGTERM, self._signal)
signal.signal(signal.SIGINT, self._signal)
signal.signal(signal.SIGQUIT, self._signal)
signal.signal(signal.SIGHUP, self._signal)
signal.signal(signal.SIGCHLD, self._signal)
signal.signal(signal.SIGUSR2, self._signal)
try:
# metadata
self.__userdata['instance-id'] = urllib.urlopen("%s/instance-id" % self.url_metadata).read()
if self.__userdata['instance-id'].find("404"):
raise MadeiraAgentException("Failed to retreive instance instance-id from metadata: %s" % e)
# userdata
self.__userdata = urllib.urlopen(self.url_userdata).read()
# distro
f = open('/etc/issue')
self.__distro = f.readlines()[0].split(' ')[0].lower()
f.close()
except Exception, e:
logging.fatal("Failed to initialize MadeiraAgent: %s" % e)
raise MadeiraAgentException
def _log(self):
# setup LOG
try:
level = logging.getLevelName(self.log_level)
logging.getLogger().setLevel(level)
logger = logging.getLogger()
if self.__daemon:
# Add the log message handler to the logger
if not os.path.exists(MadieraAgent.log_dir):
os.makedirs(self.log_dir, 0755)
fh = logger.handlers.RotatingFileHandler(
filename = self.log_file,
maxBytes = self.log_size,
backupCount = self.log_rotate
)
formatter = Log.LogFormatter(console=False)
else:
# Set up color if we are in a tty and curses is installed
fh = logging.StreamHandler()
formatter = Log.LogFormatter(console=True)
fh.setFormatter(formatter)
logger.addHandler(fh)
except OSError, msg:
raise MadeiraAgentException
def _signal(self, sig, frame):
if sig in (signal.SIGTERM, signal.SIGINT, signal.SIGQUIT): # exit
logging.info('caught signal %s' % sig)
self.exit()
elif sig == signal.SIGHUP: # reload
logging.info('caught signal %s' % sig)
self.reload()
elif sig == signal.SIGCHLD: # TODO:
pass
logging.debug('caught signal %s' % sig)
elif sig == signal.SIGUSR2: # TODO:
pass
logging.debug('caught signal %s' % sig)
else:
logging.warning('caught signal %s' % sig)
def _daemonize(self):
try:
# First fork
try:
pid = os.fork()
if pid > 0:
# Exit first parent
sys.exit(0)
except OSError, e:
logging.error("Cannot run MadeiraAgent in daemon mode: (%d) %s\n" % (e.errno, e.strerror))
raise MadeiraAgentException
# Decouple from parent environment.
os.chdir(".")
os.umask(0)
os.setsid()
# Second fork
try:
pid = os.fork()
if pid > 0:
# Exit second parent.
sys.exit(0)
except OSError, e:
logging.error("Cannot run MadeiraAgent in daemon mode: (%d) %s\n" % (e.errno, e.strerror))
raise MadeiraAgentException
# Open file descriptors and print start message
si = file('/dev/null', 'r')
so = file('/dev/null', 'a+')
se = file('/dev/null', 'a+', 0)
pid = os.getpid()
sys.stderr.write("\nStarted MadeiraAgent with pid %i\n\n" % pid)
sys.stderr.flush()
if not os.path.exists(os.path.dirname(self.pidfile)):
os.mkdir(os.path.dirname(self.pidfile))
file(self.pidfile,'w+').write("%i\n" % pid)
# Redirect standard file descriptors.
os.close(sys.stdin.fileno())
os.close(sys.stdout.fileno())
os.close(sys.stderr.fileno())
os.dup2(si.fileno(), sys.stdin.fileno())
os.dup2(so.fileno(), sys.stdout.fileno())
os.dup2(se.fileno(), sys.stderr.fileno())
except OSError, e:
logging.error("Cannot run MadeiraAgent as daemon: %s" % e)
raise MadeiraAgentException
def run(self):
logging.info("------------------------- Starting MadeiraAgent -------------------------")
logging.info("Log Level: %s" % self.log_level)
if self.__daemon:
logging.info("Log File: %s" % self.log_file)
logging.info("Log Size: %s" % self.log_size)
logging.info("Log Rotate: %s" % self.log_rotate)
try:
logging.info("Endpoint - Task: %s" % self.endpoint_task)
logging.info("Endpoint - Health: %s" % self.endpoint_health)
logging.info("Interval - Task: %d seconds" % self.interval_task)
logging.info("Interval - Health: %d seconds" % self.interval_health)
# task & health
if not self.__no_task: self.__timer.append(RTimer.RTimer(self.interval_task, Task.run, args=[self.endpoint_task, self.__metadata['instance-id'], self.__distro]))
if not self.__no_health: self.__timer.append(RTimer.RTimer(self.interval_health, Health.run, args=[self.endpoint_task, self.__metadata, self.__distro]))
for t in self.__timer: t.run()
# monitor forever
self._monitor()
except Exception, e:
logging.error(e)
raise MadeiraAgentException
def reload(self):
# TODO
pass
def exit(self):
for t in self.__timer: t.cancel()
logging.info("------------------------- MadeiraAgent is stopped -----------------------")
exit()
def _monitor(self):
Watcher.run()
####################### main() #########################
if __name__ == "__main__":
# Check if a daemon is already running
pidfile = '/var/lock/subsys/madeiracloud'
if os.path.exists(pidfile):
pf = file(pidfile,'r')
pid = int(pf.read().strip())
pf.close()
try:
os.kill(pid, signal.SIG_DFL)
except OSError, (err, msg):
if err == errno.ESRCH:
# Pidfile is stale. Remove it.
os.remove(pidfile)
else:
msg = "Unexpected error when checking pid file '%s'.\n%s\n" %(pidfile, msg)
sys.stderr.write(msg)
sys.exit(1)
else:
msg = "MadeiraAgent is already running (pid %i)\n" % pid
sys.stderr.write(msg)
sys.exit(1)
# options
usage = "[-h] [-f] [-t] [-l]"
optparser = OptionParser(usage=usage)
optparser.add_option(Option("-f", "--fg", action="store_true", dest="foreground",
help = "Runs in the foreground. Default is background"))
optparser.add_option(Option("-t", "--no-task", action="store_true", dest="no_task",
help = "If True, the agent will not try to retrieve any task"))
optparser.add_option(Option("-l", "--no-health", action="store_true", dest="no_health",
help = "If True, the agent will not try to report system health"))
opt = optparser.parse_args(sys.argv)[0]
# run
try:
agent = MadeiraAgent(not opt.foreground, opt.no_task, opt.no_health)
agent.run()
except:
print >> sys.stderr, "Failed to launch MadeiraAgent, please check log file"
exit(1)
|
BillTheBest/MadeiraAgent
|
bin/madeira.py
|
Python
|
bsd-3-clause
| 8,479 | 0.034202 |
# coding: utf-8
from __future__ import unicode_literals
import logging
import pytest
import requests
LOG = logging.getLogger("fm.crashmanager.tests.bugproviders.rest")
@pytest.mark.parametrize("method", ["delete", "get", "patch", "post", "put"])
def test_rest_bugproviders_no_auth(db, api_client, method):
"""must yield unauthorized without authentication"""
assert getattr(api_client, method)(
"/crashmanager/rest/bugproviders/", {}
).status_code == requests.codes['unauthorized']
@pytest.mark.parametrize("method", ["delete", "get", "patch", "post", "put"])
def test_rest_bugproviders_no_perm(user_noperm, api_client, method):
"""must yield forbidden without permission"""
assert getattr(api_client, method)(
"/crashmanager/rest/bugproviders/", {}
).status_code == requests.codes['forbidden']
@pytest.mark.parametrize("method, url, user", [
("delete", "/crashmanager/rest/bugproviders/", "normal"),
("delete", "/crashmanager/rest/bugproviders/", "restricted"),
("patch", "/crashmanager/rest/bugproviders/", "normal"),
("patch", "/crashmanager/rest/bugproviders/", "restricted"),
("post", "/crashmanager/rest/bugproviders/", "normal"),
("post", "/crashmanager/rest/bugproviders/", "restricted"),
("put", "/crashmanager/rest/bugproviders/", "normal"),
("put", "/crashmanager/rest/bugproviders/", "restricted"),
], indirect=["user"])
def test_rest_bugproviders_methods(api_client, user, method, url):
"""must yield method-not-allowed for unsupported methods"""
assert getattr(api_client, method)(url, {}).status_code == requests.codes['method_not_allowed']
@pytest.mark.parametrize("method, url, user", [
("get", "/crashmanager/rest/bugproviders/1/", "normal"),
("get", "/crashmanager/rest/bugproviders/1/", "restricted"),
("delete", "/crashmanager/rest/bugproviders/1/", "normal"),
("delete", "/crashmanager/rest/bugproviders/1/", "restricted"),
("patch", "/crashmanager/rest/bugproviders/1/", "normal"),
("patch", "/crashmanager/rest/bugproviders/1/", "restricted"),
("post", "/crashmanager/rest/bugproviders/1/", "normal"),
("post", "/crashmanager/rest/bugproviders/1/", "restricted"),
("put", "/crashmanager/rest/bugproviders/1/", "normal"),
("put", "/crashmanager/rest/bugproviders/1/", "restricted"),
], indirect=["user"])
def test_rest_bugproviders_methods_not_found(api_client, user, method, url):
"""must yield not-found for undeclared methods"""
assert getattr(api_client, method)(url, {}).status_code == requests.codes['not_found']
def _compare_rest_result_to_bugprovider(result, provider):
expected_fields = {"id", "classname", "hostname", "urlTemplate"}
assert set(result) == expected_fields
for key, value in result.items():
assert value == getattr(provider, key)
@pytest.mark.parametrize("user", ["normal", "restricted"], indirect=True)
def test_rest_bugproviders_list(api_client, user, cm):
"""test that list returns the right bug providers"""
expected = 4
providers = [cm.create_bugprovider(hostname="test-provider%d.com" % (i + 1),
urlTemplate="test-provider%d.com/template" % (i + 1))
for i in range(expected)]
resp = api_client.get("/crashmanager/rest/bugproviders/")
LOG.debug(resp)
assert resp.status_code == requests.codes['ok']
resp = resp.json()
assert set(resp) == {'count', 'next', 'previous', 'results'}
assert resp['count'] == expected
assert resp['next'] is None
assert resp['previous'] is None
assert len(resp['results']) == expected
for result, provider in zip(resp['results'], providers[:expected]):
_compare_rest_result_to_bugprovider(result, provider)
|
MozillaSecurity/FuzzManager
|
server/crashmanager/tests/test_bugproviders_rest.py
|
Python
|
mpl-2.0
| 3,764 | 0.001063 |
import Util
import time
import unittest
import selectBrowser
from selenium import webdriver
from flaky import flaky
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.common.by import By
# Test loading images.
from flaky import flaky
class tLoadImage(unittest.TestCase):
def setUp(self):
browser = selectBrowser._getBrowser()
Util.setUp(self, browser)
# Test that an image can be loaded and then closed.
def test_load_image(self):
driver = self.driver
timeout = selectBrowser._getSleep()
# Load a specific image.
imageWindow = Util.load_image(self, driver, "Default")
time.sleep( timeout )
# Click on the Data->Close->Image button to close the image.
ActionChains(driver).double_click( imageWindow ).perform()
dataButton = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "//div[text()='Data']/..")))
ActionChains(driver).click( dataButton ).send_keys(Keys.ARROW_DOWN).send_keys(Keys.ARROW_DOWN).send_keys(
Keys.ARROW_RIGHT).send_keys(Keys.ENTER).perform()
time.sleep( timeout )
# Test was written in response to Issue 178. Loading a particular image produced
# a crash.
def test_load_image178(self):
driver = self.driver
timeout = selectBrowser._getSleep()
# Load a specific image.
imageWindow = Util.load_image(self, driver, "SI1.fits")
time.sleep( timeout )
# Make sure we have not crashed by closing the image
ActionChains(driver).double_click( imageWindow ).perform()
dataButton = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "//div[text()='Data']/..")))
ActionChains(driver).click( dataButton ).send_keys(Keys.ARROW_DOWN).send_keys(Keys.ARROW_DOWN).send_keys(
Keys.ARROW_RIGHT).send_keys(Keys.ENTER).perform()
time.sleep( timeout )
# Test was written in response to Issue 152. Loading an image and then pressing the
# 100% clip button produced and error rather than changing the clip.
def test_clip100(self):
driver = self.driver
timeout = selectBrowser._getSleep()
# Load a specific image.
imageWindow = Util.load_image(self, driver, "Default")
time.sleep( timeout )
# Store the minimum clip value
minClipText = driver.find_element_by_xpath("//div[@id='clipMinIntensity']/input")
driver.execute_script( "arguments[0].scrollIntoView(true);", minClipText )
minClip = minClipText.get_attribute( "value")
print "Min intensity", minClip
# Press the 100% clip button
ActionChains(driver).double_click( imageWindow ).perform()
clippingButton = WebDriverWait(driver, 20).until(EC.presence_of_element_located((By.XPATH, "//div[text()='Clipping']/..")))
ActionChains(driver).click( clippingButton ).send_keys(Keys.ARROW_RIGHT
).send_keys(Keys.ARROW_DOWN).send_keys(Keys.ARROW_DOWN
).send_keys(Keys.ARROW_DOWN).send_keys(Keys.ARROW_DOWN
).send_keys(Keys.ARROW_DOWN).send_keys(Keys.ARROW_DOWN
).send_keys(Keys.ENTER).perform()
time.sleep( timeout )
# Make sure the minimum clip value goes down.
newMinClip = minClipText.get_attribute( "value")
print "New min intensity", newMinClip
self.assertTrue( float(newMinClip) < float(minClip), "Minimum clip value did not go down")
# Test that we can load a large number of images, one after another
def test_load_images(self):
driver = self.driver
timeout = selectBrowser._getSleep()
# Load a specific image.
imageWindow = Util.load_image(self, driver, "aH.fits")
time.sleep( timeout )
Util.load_image( self, driver, "aJ.fits")
time.sleep( timeout )
Util.load_image( self, driver, "N15693D.fits")
time.sleep( timeout )
Util.load_image( self, driver, "Orion.cont.image.fits")
time.sleep( timeout )
Util.load_image( self, driver, "Orion.methanol.cbc.contsub.image.fits")
time.sleep( timeout )
Util.load_image( self, driver, "TWHydra_CO2_1line.image.fits")
time.sleep( timeout )
Util.load_image( self, driver, "br1202_wide.image")
time.sleep( timeout )
Util.load_image( self, driver, "TWHydra_CO3_2line.image")
time.sleep( timeout )
Util.load_image( self, driver, "TWHydra_cont1.3mm.image")
time.sleep( timeout )
Util.load_image( self, driver, "v2.0_ds2_l000_13pca_map20.fits")
time.sleep( timeout )
#Find the image animator and verify that there are 9 images loaded
upperBoundText = WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.XPATH, "//div[@id='ImageUpperBoundSpin']/input")))
driver.execute_script( "arguments[0].scrollIntoView(true);", upperBoundText)
imageCount = upperBoundText.get_attribute("value")
print "Image Count: ", imageCount
self.assertEqual( imageCount, str(9), "Wrong number of images were loaded")
def tearDown(self):
# Close the browser
self.driver.close()
# Allow browser to fully close before continuing
time.sleep(2)
# Close the session and delete temporary files
self.driver.quit()
if __name__ == "__main__":
unittest.main()
|
slovelan/NRAODev
|
carta/html5/common/skel/source/class/skel/simulation/tLoadImage.py
|
Python
|
gpl-2.0
| 5,677 | 0.01603 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The main command group for cloud source command group."""
from googlecloudsdk.api_lib.source import source
from googlecloudsdk.api_lib.sourcerepo import sourcerepo
from googlecloudsdk.calliope import base
from googlecloudsdk.core import properties
from googlecloudsdk.core import resolvers
from googlecloudsdk.core import resources
from googlecloudsdk.core.credentials import store as c_store
@base.ReleaseTracks(base.ReleaseTrack.ALPHA, base.ReleaseTrack.BETA,
base.ReleaseTrack.GA)
class Source(base.Group):
"""Cloud git repository commands."""
def Filter(self, context, args):
"""Initialize context for source commands.
Args:
context: The current context.
args: The argparse namespace that was specified on the CLI or API.
Returns:
The updated context.
"""
resources.REGISTRY.SetParamDefault(
api='source', collection=None, param='projectId',
resolver=resolvers.FromProperty(properties.VALUES.core.project))
resources.REGISTRY.SetParamDefault(
api='sourcerepo', collection=None, param='projectId',
resolver=resolvers.FromProperty(properties.VALUES.core.project))
source.Source.SetResourceParser(resources.REGISTRY)
source.Source.SetApiEndpoint()
sourcerepo.Source.SetResourceParser(resources.REGISTRY)
sourcerepo.Source.SetApiEndpoint()
|
KaranToor/MA450
|
google-cloud-sdk/lib/surface/source/__init__.py
|
Python
|
apache-2.0
| 1,960 | 0.00102 |
from __future__ import print_function
try:
import astropy.io.fits as pyfits
except ImportError:
import pyfits
def intersection(header1, header2, if_conflict=None):
"""
Return a pyfits Header containing the intersection of two pyfits Headers
*if_conflict* [ '1'/1/'Header1' | '2'/2/'Header2' | None ]
Defines behavior if a keyword conflict is found. Default is to remove the key
"""
newheader = pyfits.Header()
for key,value in header1.items():
if key in header2:
try:
if value == header2[key]:
newheader[key] = value
elif if_conflict in ('1',1,'Header1'):
newheader[key] = value
elif if_conflict in ('2',2,'Header2'):
newheader[key] = Header2[key]
except KeyError:
""" Assume pyfits doesn't want you to have that keyword
(because it shouldn't be possible to get here otherwise) """
pass
else:
try:
newheader[key] = value
except KeyError:
""" Assume pyfits doesn't want you to have that keyword
(because it shouldn't be possible to get here otherwise) """
pass
return newheader
|
vlas-sokolov/pyspeckit
|
pyspeckit/spectrum/headers.py
|
Python
|
mit
| 1,312 | 0.005335 |
'''
This file is part of evopy.
Copyright 2012, Jendrik Poloczek
evopy is free software: you can redistribute it
and/or modify it under the terms of the GNU General Public License as published
by the Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
evopy is distributed in the hope that it will be
useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
Public License for more details.
You should have received a copy of the GNU General Public License along with
evopy. If not, see <http://www.gnu.org/licenses/>.
'''
from sys import path
path.append("../../../..")
from numpy import matrix
from sklearn.cross_validation import KFold
from evopy.strategies.ori_dses_aligned_svc import ORIDSESAlignedSVC
from evopy.problems.tr_problem import TRProblem
from evopy.problems.schwefels_problem_26 import SchwefelsProblem26
from evopy.simulators.simulator import Simulator
from evopy.metamodel.dses_svc_linear_meta_model import DSESSVCLinearMetaModel
from evopy.operators.scaling.scaling_standardscore import ScalingStandardscore
from evopy.metamodel.cv.svc_cv_sklearn_grid_linear import SVCCVSkGridLinear
from evopy.operators.termination.accuracy import Accuracy
def get_method():
sklearn_cv = SVCCVSkGridLinear(\
C_range = [2 ** i for i in range(-1, 14, 2)],
cv_method = KFold(20, 5))
meta_model = DSESSVCLinearMetaModel(\
window_size = 10,
scaling = ScalingStandardscore(),
crossvalidation = sklearn_cv,
repair_mode = 'mirror')
method = ORIDSESAlignedSVC(\
mu = 15,
lambd = 100,
theta = 0.3,
pi = 70,
initial_sigma = matrix([[4.5, 4.5]]),
delta = 4.5,
tau0 = 0.5,
tau1 = 0.6,
initial_pos = matrix([[10.0, 10.0]]),
beta = 1.0,
meta_model = meta_model)
return method
if __name__ == "__main__":
optimizer = get_method()
problem = SchwefelsProblem26()
optfit = problem.optimum_fitness()
sim = Simulator(optimizer, problem, Accuracy(optfit, 10**(-6)))
results = sim.simulate()
|
jpzk/evopy
|
evopy/examples/problems/SchwefelsProblem26/ORIDSESAlignedSVC.py
|
Python
|
gpl-3.0
| 2,196 | 0.023679 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import functools
from typing import Any, AsyncIterable, Callable, Dict, Generic, List, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._app_service_plans_operations import build_create_or_update_request_initial, build_create_or_update_vnet_route_request, build_delete_hybrid_connection_request, build_delete_request, build_delete_vnet_route_request, build_get_hybrid_connection_plan_limit_request, build_get_hybrid_connection_request, build_get_request, build_get_route_for_vnet_request, build_get_server_farm_skus_request, build_get_vnet_from_server_farm_request, build_get_vnet_gateway_request, build_list_by_resource_group_request, build_list_capabilities_request, build_list_hybrid_connection_keys_request, build_list_hybrid_connections_request, build_list_request, build_list_routes_for_vnet_request, build_list_usages_request, build_list_vnets_request, build_list_web_apps_by_hybrid_connection_request, build_list_web_apps_request, build_reboot_worker_request, build_restart_web_apps_request, build_update_request, build_update_vnet_gateway_request, build_update_vnet_route_request
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class AppServicePlansOperations:
"""AppServicePlansOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.web.v2021_01_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
@distributed_trace
def list(
self,
detailed: Optional[bool] = None,
**kwargs: Any
) -> AsyncIterable["_models.AppServicePlanCollection"]:
"""Get all App Service plans for a subscription.
Description for Get all App Service plans for a subscription.
:param detailed: Specify :code:`<code>true</code>` to return all App Service plan properties.
The default is :code:`<code>false</code>`, which returns a subset of the properties.
Retrieval of all properties may increase the API latency.
:type detailed: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AppServicePlanCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2021_01_01.models.AppServicePlanCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServicePlanCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
detailed=detailed,
template_url=self.list.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_request(
subscription_id=self._config.subscription_id,
detailed=detailed,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("AppServicePlanCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Web/serverfarms'} # type: ignore
@distributed_trace
def list_by_resource_group(
self,
resource_group_name: str,
**kwargs: Any
) -> AsyncIterable["_models.AppServicePlanCollection"]:
"""Get all App Service plans in a resource group.
Description for Get all App Service plans in a resource group.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AppServicePlanCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2021_01_01.models.AppServicePlanCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServicePlanCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=self.list_by_resource_group.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("AppServicePlanCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_by_resource_group.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms'} # type: ignore
@distributed_trace_async
async def get(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> Optional["_models.AppServicePlan"]:
"""Get an App Service plan.
Description for Get an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AppServicePlan, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_01.models.AppServicePlan or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.AppServicePlan"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
template_url=self.get.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('AppServicePlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
name: str,
app_service_plan: "_models.AppServicePlan",
**kwargs: Any
) -> "_models.AppServicePlan":
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServicePlan"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(app_service_plan, 'AppServicePlan')
request = build_create_or_update_request_initial(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self._create_or_update_initial.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AppServicePlan', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('AppServicePlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}'} # type: ignore
@distributed_trace_async
async def begin_create_or_update(
self,
resource_group_name: str,
name: str,
app_service_plan: "_models.AppServicePlan",
**kwargs: Any
) -> AsyncLROPoller["_models.AppServicePlan"]:
"""Creates or updates an App Service Plan.
Description for Creates or updates an App Service Plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param app_service_plan: Details of the App Service plan.
:type app_service_plan: ~azure.mgmt.web.v2021_01_01.models.AppServicePlan
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either AppServicePlan or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.web.v2021_01_01.models.AppServicePlan]
:raises: ~azure.core.exceptions.HttpResponseError
"""
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
polling = kwargs.pop('polling', True) # type: Union[bool, azure.core.polling.AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServicePlan"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
name=name,
app_service_plan=app_service_plan,
content_type=content_type,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
def get_long_running_output(pipeline_response):
response = pipeline_response.http_response
deserialized = self._deserialize('AppServicePlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True: polling_method = AsyncARMPolling(lro_delay, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}'} # type: ignore
@distributed_trace_async
async def delete(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> None:
"""Delete an App Service plan.
Description for Delete an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
template_url=self.delete.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}'} # type: ignore
@distributed_trace_async
async def update(
self,
resource_group_name: str,
name: str,
app_service_plan: "_models.AppServicePlanPatchResource",
**kwargs: Any
) -> "_models.AppServicePlan":
"""Creates or updates an App Service Plan.
Description for Creates or updates an App Service Plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param app_service_plan: Details of the App Service plan.
:type app_service_plan: ~azure.mgmt.web.v2021_01_01.models.AppServicePlanPatchResource
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AppServicePlan, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_01.models.AppServicePlan
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AppServicePlan"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(app_service_plan, 'AppServicePlanPatchResource')
request = build_update_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.update.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AppServicePlan', pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize('AppServicePlan', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}'} # type: ignore
@distributed_trace_async
async def list_capabilities(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> List["_models.Capability"]:
"""List all capabilities of an App Service plan.
Description for List all capabilities of an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of Capability, or the result of cls(response)
:rtype: list[~azure.mgmt.web.v2021_01_01.models.Capability]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.Capability"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_capabilities_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
template_url=self.list_capabilities.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[Capability]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_capabilities.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/capabilities'} # type: ignore
@distributed_trace_async
async def get_hybrid_connection(
self,
resource_group_name: str,
name: str,
namespace_name: str,
relay_name: str,
**kwargs: Any
) -> "_models.HybridConnection":
"""Retrieve a Hybrid Connection in use in an App Service plan.
Description for Retrieve a Hybrid Connection in use in an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param namespace_name: Name of the Service Bus namespace.
:type namespace_name: str
:param relay_name: Name of the Service Bus relay.
:type relay_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: HybridConnection, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_01.models.HybridConnection
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.HybridConnection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_hybrid_connection_request(
resource_group_name=resource_group_name,
name=name,
namespace_name=namespace_name,
relay_name=relay_name,
subscription_id=self._config.subscription_id,
template_url=self.get_hybrid_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('HybridConnection', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_hybrid_connection.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/hybridConnectionNamespaces/{namespaceName}/relays/{relayName}'} # type: ignore
@distributed_trace_async
async def delete_hybrid_connection(
self,
resource_group_name: str,
name: str,
namespace_name: str,
relay_name: str,
**kwargs: Any
) -> None:
"""Delete a Hybrid Connection in use in an App Service plan.
Description for Delete a Hybrid Connection in use in an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param namespace_name: Name of the Service Bus namespace.
:type namespace_name: str
:param relay_name: Name of the Service Bus relay.
:type relay_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_hybrid_connection_request(
resource_group_name=resource_group_name,
name=name,
namespace_name=namespace_name,
relay_name=relay_name,
subscription_id=self._config.subscription_id,
template_url=self.delete_hybrid_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_hybrid_connection.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/hybridConnectionNamespaces/{namespaceName}/relays/{relayName}'} # type: ignore
@distributed_trace_async
async def list_hybrid_connection_keys(
self,
resource_group_name: str,
name: str,
namespace_name: str,
relay_name: str,
**kwargs: Any
) -> "_models.HybridConnectionKey":
"""Get the send key name and value of a Hybrid Connection.
Description for Get the send key name and value of a Hybrid Connection.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param namespace_name: The name of the Service Bus namespace.
:type namespace_name: str
:param relay_name: The name of the Service Bus relay.
:type relay_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: HybridConnectionKey, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_01.models.HybridConnectionKey
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.HybridConnectionKey"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_hybrid_connection_keys_request(
resource_group_name=resource_group_name,
name=name,
namespace_name=namespace_name,
relay_name=relay_name,
subscription_id=self._config.subscription_id,
template_url=self.list_hybrid_connection_keys.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('HybridConnectionKey', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_hybrid_connection_keys.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/hybridConnectionNamespaces/{namespaceName}/relays/{relayName}/listKeys'} # type: ignore
@distributed_trace
def list_web_apps_by_hybrid_connection(
self,
resource_group_name: str,
name: str,
namespace_name: str,
relay_name: str,
**kwargs: Any
) -> AsyncIterable["_models.ResourceCollection"]:
"""Get all apps that use a Hybrid Connection in an App Service Plan.
Description for Get all apps that use a Hybrid Connection in an App Service Plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param namespace_name: Name of the Hybrid Connection namespace.
:type namespace_name: str
:param relay_name: Name of the Hybrid Connection relay.
:type relay_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ResourceCollection or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2021_01_01.models.ResourceCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ResourceCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_web_apps_by_hybrid_connection_request(
resource_group_name=resource_group_name,
name=name,
namespace_name=namespace_name,
relay_name=relay_name,
subscription_id=self._config.subscription_id,
template_url=self.list_web_apps_by_hybrid_connection.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_web_apps_by_hybrid_connection_request(
resource_group_name=resource_group_name,
name=name,
namespace_name=namespace_name,
relay_name=relay_name,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("ResourceCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_web_apps_by_hybrid_connection.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/hybridConnectionNamespaces/{namespaceName}/relays/{relayName}/sites'} # type: ignore
@distributed_trace_async
async def get_hybrid_connection_plan_limit(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> "_models.HybridConnectionLimits":
"""Get the maximum number of Hybrid Connections allowed in an App Service plan.
Description for Get the maximum number of Hybrid Connections allowed in an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: HybridConnectionLimits, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_01.models.HybridConnectionLimits
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.HybridConnectionLimits"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_hybrid_connection_plan_limit_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
template_url=self.get_hybrid_connection_plan_limit.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('HybridConnectionLimits', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_hybrid_connection_plan_limit.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/hybridConnectionPlanLimits/limit'} # type: ignore
@distributed_trace
def list_hybrid_connections(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> AsyncIterable["_models.HybridConnectionCollection"]:
"""Retrieve all Hybrid Connections in use in an App Service plan.
Description for Retrieve all Hybrid Connections in use in an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either HybridConnectionCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2021_01_01.models.HybridConnectionCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.HybridConnectionCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_hybrid_connections_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
template_url=self.list_hybrid_connections.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_hybrid_connections_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("HybridConnectionCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_hybrid_connections.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/hybridConnectionRelays'} # type: ignore
@distributed_trace_async
async def restart_web_apps(
self,
resource_group_name: str,
name: str,
soft_restart: Optional[bool] = None,
**kwargs: Any
) -> None:
"""Restart all apps in an App Service plan.
Description for Restart all apps in an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param soft_restart: Specify :code:`<code>true</code>` to perform a soft restart, applies the
configuration settings and restarts the apps if necessary. The default is
:code:`<code>false</code>`, which always restarts and reprovisions the apps.
:type soft_restart: bool
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_restart_web_apps_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
soft_restart=soft_restart,
template_url=self.restart_web_apps.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
restart_web_apps.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/restartSites'} # type: ignore
@distributed_trace
def list_web_apps(
self,
resource_group_name: str,
name: str,
skip_token: Optional[str] = None,
filter: Optional[str] = None,
top: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.WebAppCollection"]:
"""Get all apps associated with an App Service plan.
Description for Get all apps associated with an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param skip_token: Skip to a web app in the list of webapps associated with app service plan.
If specified, the resulting list will contain web apps starting from (including) the skipToken.
Otherwise, the resulting list contains web apps from the start of the list.
:type skip_token: str
:param filter: Supported filter: $filter=state eq running. Returns only web apps that are
currently running.
:type filter: str
:param top: List page size. If specified, results are paged.
:type top: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either WebAppCollection or the result of cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2021_01_01.models.WebAppCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.WebAppCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_web_apps_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
skip_token=skip_token,
filter=filter,
top=top,
template_url=self.list_web_apps.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_web_apps_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
skip_token=skip_token,
filter=filter,
top=top,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("WebAppCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_web_apps.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/sites'} # type: ignore
@distributed_trace_async
async def get_server_farm_skus(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> Any:
"""Gets all selectable SKUs for a given App Service Plan.
Description for Gets all selectable SKUs for a given App Service Plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of App Service Plan.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: any, or the result of cls(response)
:rtype: any
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Any]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_server_farm_skus_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
template_url=self.get_server_farm_skus.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('object', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_server_farm_skus.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/skus'} # type: ignore
@distributed_trace
def list_usages(
self,
resource_group_name: str,
name: str,
filter: Optional[str] = None,
**kwargs: Any
) -> AsyncIterable["_models.CsmUsageQuotaCollection"]:
"""Gets server farm usage information.
Description for Gets server farm usage information.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of App Service Plan.
:type name: str
:param filter: Return only usages/metrics specified in the filter. Filter conforms to odata
syntax. Example: $filter=(name.value eq 'Metric1' or name.value eq 'Metric2').
:type filter: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either CsmUsageQuotaCollection or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.web.v2021_01_01.models.CsmUsageQuotaCollection]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.CsmUsageQuotaCollection"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
def prepare_request(next_link=None):
if not next_link:
request = build_list_usages_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=self.list_usages.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
request = build_list_usages_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
filter=filter,
template_url=next_link,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("CsmUsageQuotaCollection", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list_usages.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/usages'} # type: ignore
@distributed_trace_async
async def list_vnets(
self,
resource_group_name: str,
name: str,
**kwargs: Any
) -> List["_models.VnetInfo"]:
"""Get all Virtual Networks associated with an App Service plan.
Description for Get all Virtual Networks associated with an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VnetInfo, or the result of cls(response)
:rtype: list[~azure.mgmt.web.v2021_01_01.models.VnetInfo]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VnetInfo"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_vnets_request(
resource_group_name=resource_group_name,
name=name,
subscription_id=self._config.subscription_id,
template_url=self.list_vnets.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VnetInfo]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_vnets.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/virtualNetworkConnections'} # type: ignore
@distributed_trace_async
async def get_vnet_from_server_farm(
self,
resource_group_name: str,
name: str,
vnet_name: str,
**kwargs: Any
) -> Optional["_models.VnetInfo"]:
"""Get a Virtual Network associated with an App Service plan.
Description for Get a Virtual Network associated with an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param vnet_name: Name of the Virtual Network.
:type vnet_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VnetInfo, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_01.models.VnetInfo or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VnetInfo"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_vnet_from_server_farm_request(
resource_group_name=resource_group_name,
name=name,
vnet_name=vnet_name,
subscription_id=self._config.subscription_id,
template_url=self.get_vnet_from_server_farm.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VnetInfo', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_vnet_from_server_farm.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/virtualNetworkConnections/{vnetName}'} # type: ignore
@distributed_trace_async
async def get_vnet_gateway(
self,
resource_group_name: str,
name: str,
vnet_name: str,
gateway_name: str,
**kwargs: Any
) -> "_models.VnetGateway":
"""Get a Virtual Network gateway.
Description for Get a Virtual Network gateway.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param vnet_name: Name of the Virtual Network.
:type vnet_name: str
:param gateway_name: Name of the gateway. Only the 'primary' gateway is supported.
:type gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VnetGateway, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_01.models.VnetGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VnetGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_vnet_gateway_request(
resource_group_name=resource_group_name,
name=name,
vnet_name=vnet_name,
gateway_name=gateway_name,
subscription_id=self._config.subscription_id,
template_url=self.get_vnet_gateway.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('VnetGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_vnet_gateway.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/virtualNetworkConnections/{vnetName}/gateways/{gatewayName}'} # type: ignore
@distributed_trace_async
async def update_vnet_gateway(
self,
resource_group_name: str,
name: str,
vnet_name: str,
gateway_name: str,
connection_envelope: "_models.VnetGateway",
**kwargs: Any
) -> "_models.VnetGateway":
"""Update a Virtual Network gateway.
Description for Update a Virtual Network gateway.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param vnet_name: Name of the Virtual Network.
:type vnet_name: str
:param gateway_name: Name of the gateway. Only the 'primary' gateway is supported.
:type gateway_name: str
:param connection_envelope: Definition of the gateway.
:type connection_envelope: ~azure.mgmt.web.v2021_01_01.models.VnetGateway
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VnetGateway, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_01.models.VnetGateway
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VnetGateway"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(connection_envelope, 'VnetGateway')
request = build_update_vnet_gateway_request(
resource_group_name=resource_group_name,
name=name,
vnet_name=vnet_name,
gateway_name=gateway_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.update_vnet_gateway.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('VnetGateway', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_vnet_gateway.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/virtualNetworkConnections/{vnetName}/gateways/{gatewayName}'} # type: ignore
@distributed_trace_async
async def list_routes_for_vnet(
self,
resource_group_name: str,
name: str,
vnet_name: str,
**kwargs: Any
) -> List["_models.VnetRoute"]:
"""Get all routes that are associated with a Virtual Network in an App Service plan.
Description for Get all routes that are associated with a Virtual Network in an App Service
plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param vnet_name: Name of the Virtual Network.
:type vnet_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VnetRoute, or the result of cls(response)
:rtype: list[~azure.mgmt.web.v2021_01_01.models.VnetRoute]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[List["_models.VnetRoute"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_list_routes_for_vnet_request(
resource_group_name=resource_group_name,
name=name,
vnet_name=vnet_name,
subscription_id=self._config.subscription_id,
template_url=self.list_routes_for_vnet.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize('[VnetRoute]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list_routes_for_vnet.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/virtualNetworkConnections/{vnetName}/routes'} # type: ignore
@distributed_trace_async
async def get_route_for_vnet(
self,
resource_group_name: str,
name: str,
vnet_name: str,
route_name: str,
**kwargs: Any
) -> Optional[List["_models.VnetRoute"]]:
"""Get a Virtual Network route in an App Service plan.
Description for Get a Virtual Network route in an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param vnet_name: Name of the Virtual Network.
:type vnet_name: str
:param route_name: Name of the Virtual Network route.
:type route_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: list of VnetRoute, or the result of cls(response)
:rtype: list[~azure.mgmt.web.v2021_01_01.models.VnetRoute] or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional[List["_models.VnetRoute"]]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_get_route_for_vnet_request(
resource_group_name=resource_group_name,
name=name,
vnet_name=vnet_name,
route_name=route_name,
subscription_id=self._config.subscription_id,
template_url=self.get_route_for_vnet.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[VnetRoute]', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_route_for_vnet.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/virtualNetworkConnections/{vnetName}/routes/{routeName}'} # type: ignore
@distributed_trace_async
async def create_or_update_vnet_route(
self,
resource_group_name: str,
name: str,
vnet_name: str,
route_name: str,
route: "_models.VnetRoute",
**kwargs: Any
) -> Optional["_models.VnetRoute"]:
"""Create or update a Virtual Network route in an App Service plan.
Description for Create or update a Virtual Network route in an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param vnet_name: Name of the Virtual Network.
:type vnet_name: str
:param route_name: Name of the Virtual Network route.
:type route_name: str
:param route: Definition of the Virtual Network route.
:type route: ~azure.mgmt.web.v2021_01_01.models.VnetRoute
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VnetRoute, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_01.models.VnetRoute or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VnetRoute"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(route, 'VnetRoute')
request = build_create_or_update_vnet_route_request(
resource_group_name=resource_group_name,
name=name,
vnet_name=vnet_name,
route_name=route_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.create_or_update_vnet_route.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 400, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VnetRoute', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update_vnet_route.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/virtualNetworkConnections/{vnetName}/routes/{routeName}'} # type: ignore
@distributed_trace_async
async def delete_vnet_route(
self,
resource_group_name: str,
name: str,
vnet_name: str,
route_name: str,
**kwargs: Any
) -> None:
"""Delete a Virtual Network route in an App Service plan.
Description for Delete a Virtual Network route in an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param vnet_name: Name of the Virtual Network.
:type vnet_name: str
:param route_name: Name of the Virtual Network route.
:type route_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_delete_vnet_route_request(
resource_group_name=resource_group_name,
name=name,
vnet_name=vnet_name,
route_name=route_name,
subscription_id=self._config.subscription_id,
template_url=self.delete_vnet_route.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete_vnet_route.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/virtualNetworkConnections/{vnetName}/routes/{routeName}'} # type: ignore
@distributed_trace_async
async def update_vnet_route(
self,
resource_group_name: str,
name: str,
vnet_name: str,
route_name: str,
route: "_models.VnetRoute",
**kwargs: Any
) -> Optional["_models.VnetRoute"]:
"""Create or update a Virtual Network route in an App Service plan.
Description for Create or update a Virtual Network route in an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param vnet_name: Name of the Virtual Network.
:type vnet_name: str
:param route_name: Name of the Virtual Network route.
:type route_name: str
:param route: Definition of the Virtual Network route.
:type route: ~azure.mgmt.web.v2021_01_01.models.VnetRoute
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VnetRoute, or the result of cls(response)
:rtype: ~azure.mgmt.web.v2021_01_01.models.VnetRoute or None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.VnetRoute"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
content_type = kwargs.pop('content_type', "application/json") # type: Optional[str]
_json = self._serialize.body(route, 'VnetRoute')
request = build_update_vnet_route_request(
resource_group_name=resource_group_name,
name=name,
vnet_name=vnet_name,
route_name=route_name,
subscription_id=self._config.subscription_id,
content_type=content_type,
json=_json,
template_url=self.update_vnet_route.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 400, 404]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('VnetRoute', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_vnet_route.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/virtualNetworkConnections/{vnetName}/routes/{routeName}'} # type: ignore
@distributed_trace_async
async def reboot_worker(
self,
resource_group_name: str,
name: str,
worker_name: str,
**kwargs: Any
) -> None:
"""Reboot a worker machine in an App Service plan.
Description for Reboot a worker machine in an App Service plan.
:param resource_group_name: Name of the resource group to which the resource belongs.
:type resource_group_name: str
:param name: Name of the App Service plan.
:type name: str
:param worker_name: Name of worker machine, which typically starts with RD.
:type worker_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None, or the result of cls(response)
:rtype: None
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
request = build_reboot_worker_request(
resource_group_name=resource_group_name,
name=name,
worker_name=worker_name,
subscription_id=self._config.subscription_id,
template_url=self.reboot_worker.metadata['url'],
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.DefaultErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
reboot_worker.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Web/serverfarms/{name}/workers/{workerName}/reboot'} # type: ignore
|
Azure/azure-sdk-for-python
|
sdk/appservice/azure-mgmt-web/azure/mgmt/web/v2021_01_01/aio/operations/_app_service_plans_operations.py
|
Python
|
mit
| 83,276 | 0.004731 |
# Copyright (c) 2016-2021 Renata Hodovan, Akos Kiss.
#
# Licensed under the BSD 3-Clause License
# <LICENSE.rst or https://opensource.org/licenses/BSD-3-Clause>.
# This file may not be copied, modified, or distributed except
# according to those terms.
from functools import cmp_to_key
from urwid import *
from .decor_widgets import PatternBox
from .graphics import fz_box_pattern
class TableRowsListWalker(ListWalker):
def __init__(self, table, sort=None):
self.table = table
self.sort = sort
self.focus = 0
self.rows = []
super().__init__()
def __getitem__(self, position):
if position < 0 or position >= len(self.rows):
raise IndexError
return self.rows[position]
def __delitem__(self, index):
if -1 < index < len(self.rows):
del self.rows[index]
self._modified()
def __len__(self):
return len(self.rows)
def add(self, item):
self.rows.append(item)
self._modified()
def insert(self, *args):
self.rows.insert(*args)
self._modified()
def clear(self):
self.focus = 0
del self.rows[:]
def remove(self, value):
self.rows.remove(value)
def next_position(self, position):
index = position + 1
if position >= len(self.rows):
raise IndexError
return index
def prev_position(self, position):
index = position - 1
if position < 0:
raise IndexError
return index
def set_focus(self, position):
self.rows[self.focus].unhighlight()
self.focus = position
self.rows[self.focus].highlight()
def set_sort_column(self, column, **kwargs):
self._modified()
# It contains two columns: the content of the rows and the scrollbar (at least the original version).
class ScrollingListBox(WidgetWrap):
signals = ['select', 'load_more']
def __init__(self, body, infinite=False):
self.infinite = infinite
self.requery = False
self.height = 0
self.listbox = ListBox(body)
self.body = self.listbox.body
self.ends_visible = self.listbox.ends_visible
super().__init__(self.listbox)
def keypress(self, size, key):
if key == 'home':
if self.body: # len(self.body) != 0
self.focus_position = 0
self._invalidate()
return key
if key == 'end':
if self.body: # len(self.body) != 0
self.focus_position = len(self.body) - 1
self._invalidate()
return key
if key in ['page down', 'down'] and self.infinite and self.focus_position == len(self.body) - 1:
self.requery = True
self._invalidate()
return None
if key == 'enter':
if self.body: # len(self.body) != 0
emit_signal(self, 'select', self, self.selection)
return None
if key == 'left':
return None
return super().keypress(size, key)
def render(self, size, focus=False):
maxcol, maxrow = size
if self.requery and 'bottom' in self.ends_visible((maxcol, maxrow)):
self.requery = False
emit_signal(self, 'load_more', len(self.body))
self.height = maxrow
return super().render((maxcol, maxrow), focus)
@property
def focus(self):
return self.listbox.focus
@property
def focus_position(self):
if self.listbox.body: # len(self.listbox.body) != 0
return self.listbox.focus_position
return 0
@focus_position.setter
def focus_position(self, value):
self.listbox.focus_position = value
self.listbox._invalidate()
@property
def row_count(self):
return len(self.listbox.body)
@property
def selection(self):
if self.body: # len(self.body) != 0
return self.body[self.focus_position]
return None
class TableColumn(object):
align = 'left'
wrap = 'space'
padding = None
def __init__(self, name, label=None, width=('weight', 1),
format_fn=None,
sort_key=None, sort_fn=None, sort_reverse=False):
self.name = name
self.label = label if label else name
self.format_fn = format_fn
self.sort_key = sort_key
self.sort_fn = sort_fn
self.sort_reverse = sort_reverse
self.sizing, self.width = width
def _format(self, v):
if isinstance(v, str):
return Text(v, align=self.align, wrap=self.wrap)
# First, call the format function for the column, if there is one
if self.format_fn:
try:
v = self.format_fn(v)
except TypeError:
return Text('', align=self.align, wrap=self.wrap)
return self.format(v)
def format(self, v):
# Do our best to make the value into something presentable
if v is None:
v = ''
elif isinstance(v, int):
v = '%d' % v
elif isinstance(v, float):
v = '%.03f' % v
# If v doesn't match any of the previous options than it might be a Widget.
if not isinstance(v, Widget):
return Text(v, align=self.align, wrap=self.wrap)
return v
class HeaderColumns(Columns):
def __init__(self, contents):
self.selected_column = None
super().__init__(contents)
def __setitem__(self, i, v):
self.contents[i * 2] = (v, self.contents[i * 2][1])
class BodyColumns(Columns):
def __init__(self, contents, header=None):
self.header = header
super().__init__(contents)
@property
def selected_column(self):
return self.header.selected_column
@selected_column.setter
def selected_column(self, value):
self.header.selected_column = value
class TableCell(WidgetWrap):
signals = ['click', 'select']
def __init__(self, table, column, row, value):
self.table = table
self.column = column
self.row = row
self.value = value
self.contents = self.column._format(self.value)
padding = self.column.padding or self.table.padding
self.padding = Padding(self.contents, left=padding, right=padding)
self.attr = AttrMap(self.padding, attr_map=row.attr_map, focus_map=row.focus_map)
super().__init__(self.attr)
def selectable(self):
return isinstance(self.row, TableBodyRow)
def highlight(self):
self.attr.set_attr_map(self.row.focus_map)
def unhighlight(self):
self.attr.set_attr_map(self.row.attr_map)
def set_attr_map(self, attr_map):
self.attr.set_attr_map(attr_map)
def set_focus_map(self, focus_map):
self.attr.set_focus_map(focus_map)
def keypress(self, size, key):
if key == 'enter':
emit_signal(self, 'select')
return key
# Override the mouse_event method (param list is fixed).
def mouse_event(self, size, event, button, col, row, focus):
if event == 'mouse press':
emit_signal(self, 'click')
class TableRow(WidgetWrap):
attr_map = {}
focus_map = {}
border_char = ' '
column_class = Columns # To be redefined by subclasses.
decorate = True
_selectable = True
def __init__(self, table, data,
header=None,
cell_click=None, cell_select=None,
attr_map=None, focus_map=None):
self.table = table
if isinstance(data, (list, tuple)):
self.data = dict(zip([c.name for c in self.table.columns], data))
elif isinstance(data, dict):
self.data = data
self.header = header
self.cell_click = cell_click
self.cell_select = cell_select
self.contents = []
if self.decorate:
if attr_map:
self.attr_map = attr_map
elif table.attr_map:
self.attr_map.update(table.attr_map)
if focus_map:
self.focus_map = focus_map
elif table.focus_map:
self.focus_map.update(table.focus_map)
# Create tuples to describe the sizing of the column.
for i, col in enumerate(self.table.columns):
lst = []
if col.sizing == 'weight':
lst.extend([col.sizing, col.width])
else:
lst.append(col.width)
cell = TableCell(self.table, col, self, self.data.get(col.name, None))
if self.cell_click:
connect_signal(cell, 'click', self.cell_click, i * 2)
if self.cell_select:
connect_signal(cell, 'select', self.cell_select, i * 2)
lst.append(cell)
self.contents.append(tuple(lst))
if isinstance(table.border, tuple):
border_width = table.border[0]
elif isinstance(table.border, int):
border_width = table.border
else:
raise Exception('Invalid border specification: %s' % table.border)
self.row = self.column_class(self.contents)
if self.header:
self.row.header = self.header
self.row.selected_column = None
# content sep content sep ...
self.row.contents = sum(([x, (Divider(self.border_char), ('given', border_width, False))] for x in self.row.contents), [])
self.attr = AttrMap(self.row, attr_map=self.attr_map, focus_map=self.focus_map)
super().__init__(self.attr)
def __len__(self):
return len(self.contents)
def __getitem__(self, key):
return self.data.get(key, None)
def __iter__(self):
return iter(self.data)
def __setitem__(self, i, v):
self.row.contents[i * 2] = (v, self.row.options(self.table.columns[i].sizing,
self.table.columns[i].width))
@property
def focus(self):
return self.row.focus
def set_attr_map(self, attr_map):
self.attr.set_attr_map(attr_map)
def set_focus_map(self, focus_map):
self.attr.set_focus_map(focus_map)
def get(self, key, default):
if key in self:
return self[key]
return default
def _key(self):
return frozenset([self.get(c, None) for c in self.table.key_columns])
def cell(self, i):
return self.row[i * 2]
def highlight(self):
for x in self.contents:
x[-1].highlight()
def unhighlight(self):
for x in self.contents:
x[-1].unhighlight()
class TableBodyRow(TableRow):
column_class = BodyColumns
attr_map = {None: 'default'}
focus_map = {None: 'selected'}
class TableHeaderRow(TableRow):
signals = ['column_click']
column_class = HeaderColumns
decorate = False
def __init__(self, table, *args, **kwargs):
self.row = None
self.attr_map = {None: 'table_head'}
self.focus_map = {None: 'table_head'}
self.table = table
self.contents = [str(x.label) for x in self.table.columns]
super().__init__(
self.table,
self.contents,
cell_click=self.header_clicked,
cell_select=self.header_clicked,
*args, **kwargs)
@property
def selected_column(self):
return self.row.selected_column
@selected_column.setter
def selected_column(self, value):
self.row.selected_column = value
def header_clicked(self, index):
emit_signal(self, 'column_click', index)
def highlight_column(self, index):
if self.selected_column is not None:
self.row[self.selected_column].unhighlight()
self.row[index].highlight()
self.selected_column = index
class Table(WidgetWrap):
signals = ['select', 'refresh', 'focus', 'delete']
attr_map = {}
focus_map = {}
row_dict = {}
title = ''
columns = []
query_data = []
key_columns = None
sort_field = None
_selectable = True
def __init__(self, initial_sort=None, limit=None):
self.border = (1, ' ', 'table_border')
self.padding = 1
self.initial_sort = initial_sort
self.limit = limit
if not self.key_columns:
self.key_columns = self.columns
self.walker = TableRowsListWalker(self, sort=self.initial_sort)
self.listbox = ScrollingListBox(self.walker, infinite=self.limit)
self.selected_column = None
self.sort_reverse = False
# Forward 'select' signal to the caller of table.
connect_signal(self.listbox, 'select',
lambda source, selection: emit_signal(self, 'select', self, selection))
if self.limit:
connect_signal(self.listbox, 'load_more', self.load_more)
self.offset = 0
self.header = TableHeaderRow(self)
self.pile = Pile([('pack', self.header),
('weight', 1, self.listbox)])
self.pattern_box = PatternBox(self.pile, title=['[', ('border_title', ' {title} (0) '.format(title=self.title)), ']'], **fz_box_pattern())
self.attr = AttrMap(self.pattern_box, attr_map=self.attr_map)
super().__init__(self.attr)
connect_signal(self.header, 'column_click', lambda index: self.sort_by_column(index, toggle=True))
if self.initial_sort and self.initial_sort in [c.name for c in self.columns]:
self.sort_by_column(self.initial_sort, toggle=False)
else:
self.requery(self.query_data)
def update_header(self):
self.pattern_box.set_title(['[', ('border_title', ' {title} ({cnt}) '.format(title=self.title, cnt=len(self.walker))), ']'])
def __delitem__(self, i):
del self.body[i]
def __iter__(self):
return iter(self.body)
def __len__(self):
return len(self.body)
def __getitem__(self, i):
return self.body[i]
def __setitem__(self, i, v):
self.body[i] = v
def insert(self, i, v):
self.body.insert(i, v)
@property
def body(self):
return self.listbox.body
@property
def contents(self):
return self.listbox.listbox.contents
@property
def focus(self):
return self.listbox.focus
@property
def height(self):
return self.body.row_count + 1
@property
def focus_position(self):
return self.listbox.focus_position
@focus_position.setter
def focus_position(self, value):
self.listbox.focus_position = value
@property
def selection(self):
if self.body: # len(self.body) != 0
return self.body[self.focus_position]
return None
def add_row(self, data, position=None, attr_map=None, focus_map=None):
row = TableBodyRow(self, data, header=self.header.row, attr_map=attr_map, focus_map=focus_map)
if '_id' in data:
self.row_dict[data['_id']] = row
if not position:
self.walker.add(row)
else:
self.walker.insert(position, row)
self.update_header()
def update_row_style(self, row_id, attr_map, focus_map):
if not self.attr_map:
self.row_dict[row_id].attr_map = attr_map
else:
self.row_dict[row_id].attr_map = self.attr_map
self.row_dict[row_id].attr_map.update(attr_map)
if not self.focus_map:
self.row_dict[row_id].focus_map = focus_map
else:
self.row_dict[row_id].focus_map = self.focus_map
self.row_dict[row_id].focus_map.update(self.focus_map)
self.row_dict[row_id]._wrapped_widget.set_attr_map(self.row_dict[row_id].attr_map)
self.row_dict[row_id]._wrapped_widget.set_focus_map(self.row_dict[row_id].focus_map)
def clear(self):
self.listbox.body.clear()
def highlight_column(self, index):
self.header.highlight_column(index)
def load_more(self, offset):
self.requery(offset)
self._invalidate()
self.listbox._invalidate()
# These two methods will might be overridden in subclasses.
def query(self, data, sort=(None, None), offset=None):
sort_field, sort_reverse = sort
if sort_field:
def sort_natural_none_last(a, b):
if a is None:
return 1
if b is None:
return -1
return (a > b) - (a < b)
def sort_reverse_none_last(a, b):
if a is None:
return 1
if b is None:
return -1
return (a > b) - (a < b)
if not sort_reverse:
sort_fn = cmp_to_key(sort_natural_none_last)
else:
sort_fn = cmp_to_key(sort_reverse_none_last)
data.sort(key=lambda x: sort_fn(x[sort_field]))
if offset is not None:
r = data[offset:offset + self.limit]
else:
r = data
for d in r:
yield d
def requery(self, data, offset=0):
kwargs = {'sort': (self.sort_field, self.sort_reverse)}
if self.limit:
kwargs['offset'] = offset
if not offset:
self.clear()
if self.selected_column is not None:
self.highlight_column(self.selected_column)
for r in self.query(data, **kwargs):
if isinstance(r, (tuple, list)):
r = dict(zip([c.name for c in self.columns], r))
self.add_row(r)
self.update_header()
def sort_by_column(self, index=None, reverse=None, toggle=False):
if index is None:
if self.sort_field is None:
return
index = self.sort_field
if isinstance(index, str):
sort_field = index
for i, col in enumerate(self.columns):
if col.name == sort_field:
index = i * 2
break
else:
sort_field = self.columns[index // 2].name
if not isinstance(index, int):
raise Exception('invalid column index: %s' % index)
if reverse is not None:
self.sort_reverse = reverse ^ self.columns[index // 2].sort_reverse
elif not toggle or sort_field != self.sort_field:
self.sort_reverse = self.columns[index // 2].sort_reverse
else:
self.sort_reverse = not self.sort_reverse
self.sort_field = sort_field
self.selected_column = index
self.walker.set_sort_column(self.columns[index // 2], reverse=self.sort_reverse)
self.requery(self.query_data)
|
renatahodovan/fuzzinator
|
fuzzinator/ui/tui/table.py
|
Python
|
bsd-3-clause
| 18,857 | 0.000955 |
"""
Tests for the Video Branding configuration.
"""
from django.test import TestCase
from django.core.exceptions import ValidationError
from nose.plugins.attrib import attr
from branding.models import BrandingInfoConfig
@attr(shard=1)
class BrandingInfoConfigTest(TestCase):
"""
Test the BrandingInfoConfig model.
"""
def setUp(self):
super(BrandingInfoConfigTest, self).setUp()
self.configuration_string = """{
"CN": {
"url": "http://www.xuetangx.com",
"logo_src": "http://www.xuetangx.com/static/images/logo.png",
"logo_tag": "Video hosted by XuetangX.com"
}
}"""
self.config = BrandingInfoConfig(configuration=self.configuration_string)
def test_create(self):
"""
Tests creation of configuration.
"""
self.config.save()
self.assertEquals(self.config.configuration, self.configuration_string)
def test_clean_bad_json(self):
"""
Tests if bad Json string was given.
"""
self.config = BrandingInfoConfig(configuration='{"bad":"test"')
self.assertRaises(ValidationError, self.config.clean)
def test_get(self):
"""
Tests get configuration from saved string.
"""
self.config.enabled = True
self.config.save()
expected_config = {
"CN": {
"url": "http://www.xuetangx.com",
"logo_src": "http://www.xuetangx.com/static/images/logo.png",
"logo_tag": "Video hosted by XuetangX.com"
}
}
self.assertEquals(self.config.get_config(), expected_config)
def test_get_not_enabled(self):
"""
Tests get configuration that is not enabled.
"""
self.config.enabled = False
self.config.save()
self.assertEquals(self.config.get_config(), {})
|
naresh21/synergetics-edx-platform
|
lms/djangoapps/branding/tests/test_models.py
|
Python
|
agpl-3.0
| 1,936 | 0.001033 |
odd = [1,3,5,7,9]
even = [2,4,6,8,10]
answer = input("Please enter your number! \n")
answer = int(answer)
if answer in odd:
print("That is quite odd!")
elif answer in even:
print("That's even")
else:
print("You are broken!")
|
UTC-Coding/Benji-s-Python
|
Rob/Odd or Even.py
|
Python
|
gpl-3.0
| 238 | 0.037815 |
import numpy as np
import colorsys
from numpy import sin, cos, tan
import core
from core.anim import Animation
from core.obj import Point, Line, Vector, Curve
def update_p1(p1, t, tmax):
p1.x = np.cos(t)
def update_p2(p2, t, tmax):
p2.y = np.sin(t/3)
def update_p3(p3, t, tmax):
p3.pos = (p1.x, p2.y)
c = colorsys.rgb_to_hsv(*p3.color)
c = ((c[0]+anim.dt/(2*np.pi)) % 1, c[1], c[2])
p3.color = colorsys.hsv_to_rgb(*c)
def update_line(l, t, tmax):
l.p1 = (np.cos(t)/2, np.sin(t)/2)
l.p2 = (-np.cos(t)/2, -np.sin(t)/2)
def update_v(v, t, tmax):
r2 = np.sqrt(2)/4
c = r2 * cos(2*t)
s = r2/3 * sin(2*t)
v.hx = s - c
v.hy = c + s
def update_c(c, t, tmax):
c.set_params(
tmin=min(v.hx, p3.x),
tmax=max(v.hx, p3.x)
)
def update_seg1(s, t, tmax):
s.p1 = c.p1
s.p2 = (c.p1[0], 0)
def update_seg2(s, t, tmax):
s.p1 = c.p2
s.p2 = (c.p2[0], 0)
def update_seg3(s, t, tmax):
s.p1 = seg1.p2
s.p2 = seg2.p2
def update_circumf(c, t, tmax):
c.set_params(
tmin=c.tmin+anim.dt,
tmax=c.tmax+anim.dt
)
col = colorsys.rgb_to_hsv(*c.color)
col = ((col[0]+anim.dt/(2*np.pi)) % 1, col[1], col[2])
c.color = colorsys.hsv_to_rgb(*col)
def init(anim):
global p1, p2, p3, v, c, seg1, seg2
p1 = anim.create(
Point,
0, 0,
color='g', size=10,
update=update_p1
)
p2 = anim.create(
Point,
0, 0,
color='b', size=10,
update=update_p2
)
p3 = anim.create(
Point,
0, 0,
color='r', size=7,
update=update_p3
)
anim.create(
Line,
0, 0, 0, 0,
color='r',
update=update_line
)
v = anim.create(
Vector,
-.05, -.25,
color='b',
update=update_v
)
c = anim.create(
Curve,
lambda t: (t, sin(np.pi*t)),
-1, 1,
color='w',
update=update_c
)
seg1 = anim.create(
Line,
0, 0, 0, 0,
color='w',
lw=1,
update=update_seg1
)
seg2 = anim.create(
Line,
0, 0, 0, 0,
color='w',
lw=1,
update=update_seg2
)
anim.create(
Line,
0, 0, 0, 0,
color='w',
lw=1,
update=update_seg3
)
anim.create(
Curve,
lambda t: (cos(t), sin(t)),
0, np.pi/3,
color='g',
update=update_circumf
)
if __name__ == '__main__':
global anim
anim = Animation(dt=0.01, length=6*np.pi, init_func=init, repeat=True)
anim.play()
|
wqferr/AniMathors
|
01.py
|
Python
|
mit
| 2,630 | 0.004563 |
from __future__ import unicode_literals
from optparse import make_option
from django.core.management.base import BaseCommand, CommandError
from django.db import transaction
from jenkins.management.helpers import import_jenkinsserver
# TODO: implement optional field updating...
class Command(BaseCommand):
help = "Import or update a JenkinsServer"
args = "[name] [url] [username] [password]"
option_list = BaseCommand.option_list + (
make_option(
"--update", action="store_true", dest="update",
default=False, help="Update if server already exists."),
)
def handle(self, *args, **options):
if len(args) != 4:
raise CommandError("must provide all parameters")
name, url, username, password = args
import_jenkinsserver(
name, url, username, password, update=options["update"],
stdout=self.stdout)
transaction.commit_unless_managed()
|
caio1982/capomastro
|
jenkins/management/commands/import_jenkinsserver.py
|
Python
|
mit
| 960 | 0 |
#!/usr/bin/env python
# coding: utf-8
# Copyright (c) 2013
# Gmail:liuzheng712
#
import time
def task():
print "task ..."
def timer(n):
while True:
print time.strftime('%Y-%m-%d %X', time.localtime())
task()
time.sleep(n)
if __name__ == '__main__':
timer(5)
|
windskyer/weibo
|
weibo/test/sleep.py
|
Python
|
apache-2.0
| 300 | 0.003333 |
import datetime
import re
import binascii
import random
import string
import logging
from django import forms
from django.db import models
from django.forms import fields
from django.utils.encoding import force_text, smart_bytes
import sys
from core.encryption.Factories import FactoryEncryptionServices as efac
from core.encryption.encryptionFieldsBase import encryptionBaseMethods as ebm
log = logging.getLogger(__name__)
class BaseField(models.Field):
def __init__(self, *args, **kwargs):
# Get the active encryption and key management services, if any
self.use_encryption = efac.use_encryption()
self.aes = efac.active_encryption_service() if self.use_encryption else None
self.akms = efac.active_key_management_service() if self.use_encryption else None
self.block_size = self.aes.block_size() if self.use_encryption else None
# Need to adjust the max length supplied in the user's field args to account for
# cipher block size and padding
if self.use_encryption:
user_specified_length = kwargs.get('max_length', 20)
unique = kwargs.get('unique', False)
max_length, usl = ebm._max_db_length (unique, user_specified_length, self.block_size, self.aes)
self.user_specified_max_length = usl
kwargs['max_length'] = max_length
models.Field.__init__(self, *args, **kwargs)
def _is_encrypted(self, value, key, iv):
'''
If value contains any non hex symbols or its length is odd, then it was
not encrypted because the encrypted values are all converted to ascii hex
before storing in db using the binascii.a2b_hex method which only operates
on even length values
'''
hexValues = True
# test to see if value is a hexadecimal
# get rid of extra spaces
value = value.strip()
try:
int(value, 16)
except ValueError:
hexValues = False
if hexValues == False or (len(value) % 2) != 0 :
return False
else:
# Have the encryption service verify if this is encrypted
return self.aes.is_encrypted(binascii.a2b_hex(value), key, iv)
def get_decrypted_value (self, value):
"""Converts the input value into the expected Python data type by
dehexifying and decrypting the value. It raises
django.core.exceptions.ValidationError if the data can't be converted.
Returns the converted value. """
if len(value.strip()) == 0:
return value
if self.use_encryption:
key = self.akms.get_key()
iv = self.akms.get_iv()
if self._is_encrypted(value, key, iv):
# dehexify and decrypt
decrypted_value = self.aes.decrypt(binascii.a2b_hex(value), key, iv)
# get rid of extra bytes
decrypted_value = decrypted_value.split(ebm._split_byte())
# forcing to string text
decrypted_value = force_text(decrypted_value[0])
return decrypted_value
else:
return value
else:
return value
def get_encrypted_value (self, value, connection=None, prepared=False):
'''
Perform preliminary non-db specific value checks and conversions:
convert value from unicode to full byte, encrypted string, otherwise encryption
service may fail according to django docs this is different than str(value)
and necessary to django internals
https://docs.djangoproject.com/en/dev/ref/unicode/
'''
if value is None:
return value
if len(value.strip()) == 0:
return value
# convert string value to bytes
value = smart_bytes(value, encoding='utf-8', strings_only=False, errors='strict')
if self.use_encryption:
key = self.akms.get_key()
iv = self.akms.get_iv()
if value and not self._is_encrypted(value, key, iv):
if len(value) > self.user_specified_max_length:
raise ValueError(
"Field value longer than max allowed: {0} > {1}".format(
str(len(value)),
self.user_specified_max_length
)
)
pad_length = ebm._padding_length(value, self.block_size)
if pad_length > 0:
value += ebm._split_byte() + ebm._semi_random_padding_string(pad_length-1)
value = self.aes.encrypt(value, key, iv)
if len(value) % 2 != 0:
# Some encryption services add a checksum byte which throws off the pad_length
value += ebm._split_byte()
value = binascii.b2a_hex(value)
# need to decode to string to store in database
value = value.decode("utf8")
return value
class EncryptCharField(BaseField):
# from_db_value is called in all circumstances when
# the data is loaded from the database
def from_db_value(self, value, expression, connection, context):
if value is None:
return value
return self.get_decrypted_value(value)
def get_internal_type(self):
return 'CharField'
def deconstruct(self):
name, path, args, kwargs = super(EncryptCharField, self).deconstruct()
kwargs["max_length"] = 255
return name, path, args, kwargs
def formfield(self, **kwargs):
"Returns a django.forms.Field instance for this database Field."
defaults = {'max_length': self.max_length}
defaults.update(kwargs)
return super(EncryptCharField, self).formfield(**defaults)
# method to convert data to encrypted format before they are stored in database
def get_db_prep_value(self, value, connection=None, prepared=False):
if self.use_encryption:
key = self.akms.get_key()
iv = self.akms.get_iv()
if value and not self._is_encrypted(value, key, iv):
if len(value) > self.user_specified_max_length:
raise ValueError(
"Field value longer than max allowed: {0} > {1}".format(
str(len(value)),
self.user_specified_max_length
)
)
return self.get_encrypted_value(value, connection=connection, prepared=prepared)
class EncryptDateField(BaseField):
def __init__(self, *args, **kwargs):
kwargs['max_length'] = 10 # YYYY:MM:DD format
super(EncryptDateField, self).__init__(*args, **kwargs)
# from_db_value is called in all circumstances
# when the data is loaded from the database
def from_db_value(self, value, expression, connection, context):
dv = None
if value in fields.EMPTY_VALUES:
dv = value
elif isinstance(value, datetime.date):
dv = value
else:
input_text = self.get_decrypted_value(value)
try:
dv = datetime.date(*[int(x) for x in input_text.split(':')])
except ValueError:
log.error("Decryption failed - old ehb values need to be updated")
return dv
def deconstruct(self):
name, path, args, kwargs = super(EncryptDateField, self).deconstruct()
kwargs["max_length"] = 10
return name, path, args, kwargs
def get_internal_type(self):
return 'CharField'
def formfield(self, **kwargs):
defaults = {'widget': forms.DateInput, 'form_class': forms.DateField}
defaults.update(kwargs)
return super(EncryptDateField, self).formfield(**defaults)
# for django custom fields, to_python() is called by deserialization
# and during the clean() method used from forms
def to_python(self, value):
dv = None
if value in fields.EMPTY_VALUES:
dv = value
elif isinstance(value, datetime.date):
dv = value
else:
input_text = self.get_decrypted_value(value)
try:
dv = datetime.date(*[int(x) for x in input_text.split('-')])
except:
dv = datetime.date(*[int(x) for x in input_text.split(':')])
return dv
# method to convert data to encrypted format before they are stored in database
def get_db_prep_value(self, value, connection=None, prepared=False):
if isinstance(value, datetime.date):
value = value.strftime('%Y:%m:%d')
return self.get_encrypted_value(value, connection=connection, prepared=prepared)
|
chop-dbhi/ehb-service
|
ehb_service/apps/core/encryption/encryptionFields.py
|
Python
|
bsd-2-clause
| 8,789 | 0.003186 |
# Задача 3. Вариант 10.
# Напишите программу, которая выводит имя "Игорь Васильевич Лотарев", и запрашивает его псевдоним. Программа должна сцеплять две эти строки и выводить полученную строку, разделяя имя и псевдоним с помощью тире.
# Donkor A.H.
# 14.04.2016
print("Итак,гость нынешнего дня является русский поэт - Игорь Васильевич Лотарев")
print("Под каким же псевдонимом мы знаем этого человека?")
input("\n\nВаш ответ: ")
print("\nВсе верно: Игорь Васильевич Лотарев - Северянин, Игорь Васильевич")
input("\nНажмите Enter для завершения")
|
Mariaanisimova/pythonintask
|
PMIa/2015/Donkor_A_H/task_3_10.py
|
Python
|
apache-2.0
| 928 | 0.018018 |
from __future__ import print_function
import sys, time
import requests, urllib
import demjson, shelve
import os.path
class Archiver:
def __init__(self):
"""
A class for archiving URLS into the wayback machine
"""
self._machine = "http://archive.org/wayback/available?url="
self._arch = "https://web.archive.org/save/"
self.archived_urls = []
# load data
if os.path.isfile("archived_urls.dat"):
self.archived_urls = self.load_data()
def available(self, url, silent=False):
"""
:param: url
:param: silent=False
Checks if the given URL exists in the wayback machine.
The silent argument if set True does not print anything to the console
"""
print("[Checking]: %s\n" % url) if silent == False else 0
data = demjson.decode(requests.get(self._machine+url).text)["archived_snapshots"]
if "closest" in data:
print(self.print_item(data)) if silent == False else 0
return (data["closest"])["available"]
return False
def load_data(self):
"""
Loads the archived URLS from a file called archived_urls.dat
"""
return shelve.open("archived_urls.dat")["main"]
def out_text(self, filename):
"""
:param: filename
Outputs a list of archived urls into text format
"""
map(open(filename, 'w').write, map(lambda x : x+"\n",self.archived_urls))
print("Done.")
def save_data(self):
"""
Saves the archived urls into archived_urls.dat
"""
shelve.open("archived_urls.dat")["main"] = self.archived_urls
def archive(self, url):
"""
:param: url
Archves a url into the wayback machine.
"""
l = requests.get(self._arch+url)
print("Archiving...")
self.archived_urls.append(url)
self.save_data()
def print_item(self, data):
"""
:param: data
Print function for json data for archive data
"""
dat = data["closest"]
stamp = "Archived:%s\nAvailable:%s\nURL:%s\nStatus:%s" % (dat["timestamp"], dat['available'], dat['url'], dat['status'])
return stamp
def save_webpage(self, url, filename):
"""
:param: url
:param: filename
Saves a webpage
"""
print("[OK]: Saving webpage..")
if not os.path.isdir(os.getcwd()+"\\saved_webpages"): os.mkdir("saved_webpages")
open(os.getcwd()+"\\saved_webpages\\"+filename, 'w').write((requests.get(url).text).encode("utf-8"))
if os.path.isfile(os.getcwd()+"\\saved_webpages\\"+filename): print("Done.")
Help = \
" \
Usage: archive.py [option] [option2]\n \
\
Options:\n \
-CH/ch [url] - Check if a URL already exists in the wayback machine and return it's information if it does\n \
-ARCH/arch [url] - Archive a URL\n \
-CHARCH/charch [url] - Archive a url if it doesn't already exists\n \
-OUTT/outt [filename] - Output a list of archived urls in text format\n \
-H/h - Print this help message\n \
-LARCH/larch - print out a list of urls you archived\n \
-SAVE/save [url] [filename] - Save a url into a file"
def main():
global Help
A = Archiver()
args = map(lambda x : x.lower(), sys.argv[1:len(sys.argv)])
print(args)
if len(args) == 2:
print(args[0])
if args[0] == "-ch":
if A.available(args[1]) is True:
print("URL found.")
else:
print("URL not found in wayback machine.")
sys.exit(0)
elif args[0] == "-arch":
A.archive(args[1])
if A.available(args[1], True) is True:
print("[Success]: Archiving is successful")
else:
print("[Error]: Archiving failed!")
b = list(A.archived_urls[len(A.archived_urls)-1])
A.archived_urls.remove(A.archived_urls[len(A.archived_urls)-1])
b.insert(0, "FAILED TO ARCHIVE: ")
A.archived_urls.append(b)
sys.exit(0)
elif args[0] == "-charch":
main = A.available(args[1])
if main is True or main == "True":
print("URL exists.")
elif main is False:
print("URL does not exist.")
A.archive(args[1])
sys.exit(0)
elif args[0] == "-outt":
A.out_text(args[1])
sys.exit(0)
elif len(args) == 3:
if args[0] == "-save":
A.save_webpage(args[1], args[2])
sys.exit(0)
elif len(args) == 1:
if args[0] == "-h":
print("-h")
print(Help)
sys.exit(0)
elif args[0] == "-larch":
print("-larch")
map(lambda x : print(x), A.archived_urls)
sys.exit(0)
else:
print("[Error]: Unknown argument \'%s\'" % args[0])
sys.exit(0)
else:
print("Archiver: No arguments found.\n Type '-h' for help")
sys.exit(0)
if __name__ == "__main__":
main()
|
saberman888/Archiver
|
archive.py
|
Python
|
mit
| 5,894 | 0.023583 |
from .trainsession import TrainSession
from .helper import FixedNoise
class SmurffSession(TrainSession):
def __init__(self, Ytrain, priors, is_scarce = True, Ytest=None, side_info=None, direct=True, *args, **kwargs):
TrainSession.__init__(self, priors=priors, *args, **kwargs)
self.addTrainAndTest(Ytrain, Ytest, is_scarce = is_scarce)
if side_info is not None:
nmodes = len(Ytrain.shape)
assert len(side_info) == nmodes, "Too many side info, got %d, expected %d" % (len(side_info), nmodes)
for mode, si in enumerate(side_info):
if si is not None:
self.addSideInfo(mode, si, direct=direct)
class MacauSession(SmurffSession):
"""A train trainSession specialized for use with the Macau algorithm
Attributes
----------
Ytrain : :class: `numpy.ndarray`, :mod:`scipy.sparse` matrix or :class: `SparseTensor`
Train matrix/tensor
Ytest : :mod:`scipy.sparse` matrix or :class: `SparseTensor`
Test matrix/tensor. Mainly used for calculating RMSE.
side_info : list of :class: `numpy.ndarray`, :mod:`scipy.sparse` matrix or None
Side info matrix/tensor for each dimension
If there is no side info for a certain mode, pass `None`.
Each side info should have as many rows as you have elemnts in corresponding dimension of `Ytrain`.
direct : bool
Use Cholesky instead of CG solver
univariate : bool
Use univariate or multivariate sampling.
\*\*args:
Extra arguments are passed to the :class:`TrainSession`
"""
def __init__(self, Ytrain, is_scarce = True, Ytest=None, side_info=None, univariate=False, direct=True, *args, **kwargs):
nmodes = len(Ytrain.shape)
priors = ['normal'] * nmodes
if side_info is not None:
assert len(side_info) == nmodes
for d in range(nmodes):
if side_info[d] is not None:
priors[d] = 'macau'
if univariate:
priors = [p + "one" for p in priors]
SmurffSession.__init__(self, Ytrain, priors, is_scarce, Ytest, side_info, direct, *args, **kwargs)
class BPMFSession(MacauSession):
"""A train trainSession specialized for use with the BPMF algorithm
Attributes
----------
Ytrain : :class: `numpy.ndarray`, :mod:`scipy.sparse` matrix or :class: `SparseTensor`
Train matrix/tensor
Ytest : :mod:`scipy.sparse` matrix or :class: `SparseTensor`
Test matrix/tensor. Mainly used for calculating RMSE.
univariate : bool
Use univariate or multivariate sampling.
\*\*args:
Extra arguments are passed to the :class:`TrainSession`
"""
def __init__(self, Ytrain, is_scarce = True, Ytest=None, univariate=False, *args, **kwargs):
MacauSession.__init__(self, Ytrain, is_scarce, Ytest, None, univariate, *args, **kwargs)
class GFASession(SmurffSession):
def __init__(self, Views, Ytest=None, *args, noise = FixedNoise(), **kwargs):
Ytrain = Views[0]
nmodes = len(Ytrain.shape)
assert nmodes == 2
priors = ['normal', 'spikeandslab']
TrainSession.__init__(self, priors=priors, *args, **kwargs)
self.addTrainAndTest(Ytrain, Ytest, noise = noise)
for p in range(1, len(Views)):
self.addData([0, p], Views[p], noise = noise)
# old API -- for compatibility reasons
def smurff(*args, **kwargs):
return SmurffSession(*args, **kwargs).run()
def bpmf(*args, **kwargs):
return BPMFSession(*args, **kwargs).run()
def macau(*args, **kwargs):
return MacauSession(*args, **kwargs).run()
def gfa(*args, **kwargs):
return GFASession(*args, **kwargs).run()
|
ExaScience/smurff
|
python/smurff/smurff.py
|
Python
|
mit
| 3,868 | 0.009566 |
#!/usr/bin/env python
from __future__ import division
from std_msgs.msg import Int32
import roslib; roslib.load_manifest('beep_imu')
import rospy
import smbus
import time
bus = smbus.SMBus(1)
irAddress = 0x55
#pub_mag_xsens = rospy.Publisher('imu/mag', Vector3Stamped)
def decFrom2Compl(val, bitlen):
if val & (1 << bitlen - 1):
val = val - (1 << bitlen)
return val
#returns the val scaled from an old range into a new continouse range
def scaleToRange(val, oldMin, oldMax, newMin, newMax):
val -= oldMin
val /= oldMax - oldMin
val *= newMax - newMin
val += newMin
return val
def init():
bus.write_byte_data(irAddress, 0x80, 0x01) # start unit
bus.write_byte_data(irAddress, 0x81, 0xD0) # pulses
bus.write_byte_data(irAddress, 0x82, 0x20) # start messurement
# reading imu data and publishing Imu msg to topic Imu
def talker():
rospy.init_node('IRNode')
rospy.loginfo('starting IR_Node')
while not rospy.is_shutdown():
storeDistance()
rospy.sleep(0.05)
rospy.loginfo('IRNode shut down')
def storeDistance():
#read current linear accelerations
#msg = decFrom2Compl(msg,12)
#update acceleration in msg
#msg = scaleToRange(dist, -512, 511, -19.6133, 19.6133)
pub = rospy.Publisher('topic/IR3', Int32)
pub2 = rospy.Publisher('topic/IR0', Int32)
msg = Int32()
msg.data = bus.read_byte_data(irAddress, 0x85)
msg.data += bus.read_byte_data(irAddress, 0x86) * 0x100
pub.publish(msg)
pub2.publish(msg)
print msg
if __name__ == '__main__':
init()
talker()
pass
|
iti-luebeck/BEEP
|
Software/catkin_ws/src/beep_imu/ir_distance_zusmoro.py
|
Python
|
bsd-3-clause
| 1,517 | 0.039552 |
#encoding:utf-8
import Queue
import threading
import urllib2
# called by each thread
def get_url(q, url):
q.put(urllib2.urlopen(url).read())
theurls = '''http://google.com http://yahoo.com'''.split()
print theurls
q = Queue.Queue()
for u in theurls:
t = threading.Thread(target=get_url, args = (q,u))
t.daemon = True
t.start()
s = q.get()
print s
|
ubaldino/pyloop
|
prueba3.py
|
Python
|
agpl-3.0
| 369 | 0.01897 |
"""
Define common steps for instructor dashboard acceptance tests.
"""
# pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
from __future__ import absolute_import
from lettuce import step, world
from mock import patch
from nose.tools import assert_in
from courseware.tests.factories import InstructorFactory, StaffFactory
@step(u'Given I am "([^"]*)" for a very large course')
def make_staff_or_instructor_for_large_course(step, role):
make_large_course(step, role)
@patch.dict('courseware.access.settings.FEATURES', {"MAX_ENROLLMENT_INSTR_BUTTONS": 0})
def make_large_course(step, role):
i_am_staff_or_instructor(step, role)
@step(u'Given I am "([^"]*)" for a course')
def i_am_staff_or_instructor(step, role): # pylint: disable=unused-argument
## In summary: makes a test course, makes a new Staff or Instructor user
## (depending on `role`), and logs that user in to the course
# Store the role
assert_in(role, ['instructor', 'staff'])
# Clear existing courses to avoid conflicts
world.clear_courses()
# Create a new course
course = world.CourseFactory.create(
org='edx',
number='999',
display_name='Test Course'
)
world.course_key = course.id
world.role = 'instructor'
# Log in as the an instructor or staff for the course
if role == 'instructor':
# Make & register an instructor for the course
world.instructor = InstructorFactory(course_key=world.course_key)
world.enroll_user(world.instructor, world.course_key)
world.log_in(
username=world.instructor.username,
password='test',
email=world.instructor.email,
name=world.instructor.profile.name
)
else:
world.role = 'staff'
# Make & register a staff member
world.staff = StaffFactory(course_key=world.course_key)
world.enroll_user(world.staff, world.course_key)
world.log_in(
username=world.staff.username,
password='test',
email=world.staff.email,
name=world.staff.profile.name
)
def go_to_section(section_name):
# section name should be one of
# course_info, membership, student_admin, data_download, analytics, send_email
world.visit(u'/courses/{}'.format(world.course_key))
world.css_click(u'a[href="/courses/{}/instructor"]'.format(world.course_key))
world.css_click('[data-section="{0}"]'.format(section_name))
@step(u'I click "([^"]*)"')
def click_a_button(step, button): # pylint: disable=unused-argument
if button == "Generate Grade Report":
# Go to the data download section of the instructor dash
go_to_section("data_download")
# Click generate grade report button
world.css_click('input[name="calculate-grades-csv"]')
# Expect to see a message that grade report is being generated
expected_msg = "The grade report is being created." \
" To view the status of the report, see" \
" Pending Tasks below."
world.wait_for_visible('#report-request-response')
assert_in(
expected_msg, world.css_text('#report-request-response'),
msg="Could not find grade report generation success message."
)
elif button == "Grading Configuration":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="dump-gradeconf"]')
elif button == "List enrolled students' profile information":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="list-profiles"]')
elif button == "Download profile information as a CSV":
# Go to the data download section of the instructor dash
go_to_section("data_download")
world.css_click('input[name="list-profiles-csv"]')
else:
raise ValueError("Unrecognized button option " + button)
@step(u'I visit the "([^"]*)" tab')
def click_a_button(step, tab_name): # pylint: disable=unused-argument
# course_info, membership, student_admin, data_download, analytics, send_email
tab_name_dict = {
'Course Info': 'course_info',
'Membership': 'membership',
'Student Admin': 'student_admin',
'Data Download': 'data_download',
'Analytics': 'analytics',
'Email': 'send_email',
}
go_to_section(tab_name_dict[tab_name])
|
pepeportela/edx-platform
|
lms/djangoapps/instructor/features/common.py
|
Python
|
agpl-3.0
| 4,560 | 0.001316 |
#!/usr/bin/env python
#
# __COPYRIGHT__
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
__revision__ = "__FILE__ __REVISION__ __DATE__ __DEVELOPER__"
"""
Test that a module that we import into an SConscript file can itself
easily import the global SCons variables, and a handful of other variables
directly from SCons.Script modules.
"""
import TestSCons
test = TestSCons.TestSCons()
test.write('SConstruct', """\
import m1
""")
test.write("m1.py", """\
from SCons.Script import *
SConscript('SConscript')
""")
test.write('SConscript', """\
import m2
import m3
import m4
""")
test.write("m2.py", """\
from SCons.Script import *
Command("file.out", "file.in", Copy("$TARGET", "$SOURCE"))
""")
test.write("m3.py", """\
import SCons.Script
SCons.Script.BuildTask
SCons.Script.CleanTask
SCons.Script.QuestionTask
old_SCons_Script_variables = [
'PrintHelp',
'OptParser',
'keep_going_on_error',
'print_explanations',
'print_includes',
'print_objects',
'print_time',
'memory_stats',
'ignore_errors',
'repositories',
'print_dtree',
'print_tree',
'sconscript_time',
'command_time',
'exit_status',
'profiling',
]
for var in old_SCons_Script_variables:
try:
getattr(SCons.Script, var)
except AttributeError:
pass
else:
raise Exception("unexpected variable SCons.Script.%s" % var)
""")
test.write("m4.py", """\
import SCons.Script.SConscript
SCons.Script.SConscript.Arguments
SCons.Script.SConscript.ArgList
SCons.Script.SConscript.BuildTargets
SCons.Script.SConscript.CommandLineTargets
SCons.Script.SConscript.DefaultTargets
""")
test.write("file.in", "file.in\n")
test.run(arguments = '.')
test.must_match("file.out", "file.in\n")
test.pass_test()
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4:
|
Distrotech/scons
|
test/Script-import.py
|
Python
|
mit
| 2,878 | 0.000695 |
import defaulted_methods
if __name__ == "__main__":
defaulted_methods.call_main()
|
bozzzzo/quark
|
quarkc/test/emit/expected/py/defaulted-methods/defaulted_methods.py
|
Python
|
apache-2.0
| 87 | 0 |
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def sumOfLeftLeavesRec(self, root, left):
if root is None:
return 0
elif root.left is None and root.right is None:
if left:
return root.val
else:
return 0
else:
return self.sumOfLeftLeavesRec(root.left, True) + self.sumOfLeftLeavesRec(root.right, False)
def sumOfLeftLeaves(self, root):
"""
:type root: TreeNode
:rtype: int
"""
return self.sumOfLeftLeavesRec(root, False)
|
everyevery/programming_study
|
leetcode/404-SumOfLeftLeaves/sum_of_left_leaves.py
|
Python
|
mit
| 719 | 0.002782 |
__author__ = 'bharathramh'
EMAIL_VERIFICATION_EXPIRATION_DAYS = 1
FORGOT_PASSWORD_EXPIRATION_DAYS = 1
|
bharathramh92/easy-ecom
|
accounts/constants.py
|
Python
|
apache-2.0
| 102 | 0.009804 |
# -*- coding: utf-8 -*-
from .base_premierecinemas import BasePremierecinemasCinemaSpider
class Spider(BasePremierecinemasCinemaSpider):
name = 'praha-premierecinemas'
calendar_url = 'http://www.premierecinemas.cz/'
|
zitkino/backend
|
zitkino/spiders/praha_premierecinemas.py
|
Python
|
agpl-3.0
| 229 | 0 |
def omp_parallel_sections_reduction():
import math
dt = 0.5
rounding_error = 1.E-9
sum = 7
dsum = 0
dt = 1. / 3.
result = True
product = 1
logic_and = 1
logic_or = 0
bit_and = 1
bit_or = 0
i = 0
exclusiv_bit_or = 0
known_sum = (1000 * 999) / 2 + 7
if 'omp parallel sections private(i) reduction(+:sum)':
if 'omp section':
for i in xrange(1,300):
sum += i
if 'omp section':
for i in xrange(300,700):
sum += i
if 'omp section':
for i in xrange(700,1000):
sum += i
if known_sum != sum:
print "E: reduction(+:sum)"
result = False
diff = (1000 * 999) / 2
if 'omp parallel sections private(i) reduction(-:diff)':
if 'omp section':
for i in xrange(1,300):
diff -= i
if 'omp section':
for i in xrange(300,700):
diff -= i
if 'omp section':
for i in xrange(700,1000):
diff -= i
if diff != 0:
print "E: reduction(-:diff)"
result = False
dsum = 0
dpt = 0
for i in xrange(0, 20):
dpt *= dt
dknown_sum = (1 - dpt) / (1 - dt)
if 'omp parallel sections private(i) reduction(+:dsum)':
if 'omp section':
for i in xrange(0,7):
dsum += math.pow(dt, i)
if 'omp section':
for i in xrange(7,14):
dsum += math.pow(dt, i)
if 'omp section':
for i in xrange(14,20):
dsum += math.pow(dt, i)
if abs(dsum-dknown_sum) > rounding_error:
print "E: reduction(+:dsum)"
result = False
dsum = 0
dpt = 0
for i in xrange(0, 20):
dpt *= dt
ddiff = (1 - dpt) / (1 - dt)
if 'omp parallel sections private(i) reduction(-:ddiff)':
if 'omp section':
for i in xrange(0,6):
ddiff -= math.pow(dt, i)
if 'omp section':
for i in xrange(6,12):
ddiff -= math.pow(dt, i)
if 'omp section':
for i in xrange(12,20):
ddiff -= math.pow(dt, i)
if abs(ddiff) > rounding_error:
print "E: reduction(-:ddiff)"
result = False
if 'omp parallel sections private(i) reduction(*:product)':
if 'omp section':
for i in xrange(1,3):
product *= i
if 'omp section':
for i in xrange(3,6):
product *= i
if 'omp section':
for i in xrange(6,11):
product *= i
known_product = 3628800
if known_product != product:
print "E: reduction(*:product)"
result = False
logics = [1 for i in xrange(0,1000)]
if 'omp parallel sections private(i) reduction(&&:logic_and)':
if 'omp section':
for i in xrange(0, 300):
logic_and = (logic_and and logics[i])
if 'omp section':
for i in xrange(300, 700):
logic_and = (logic_and and logics[i])
if 'omp section':
for i in xrange(700, 1000):
logic_and = (logic_and and logics[i])
if not logic_and:
print "E: reduction(&&:logic_and)"
result = False
logic_and = 1;
logics[1000/2]=0
if 'omp parallel sections private(i) reduction(&&:logic_and)':
if 'omp section':
for i in xrange(0, 300):
logic_and = (logic_and and logics[i])
if 'omp section':
for i in xrange(300, 700):
logic_and = (logic_and and logics[i])
if 'omp section':
for i in xrange(700, 1000):
logic_and = (logic_and and logics[i])
if logic_and:
print "E: reduction(&&:logic_and) with logics[1000/2]=0"
result = False
logics = [0 for i in xrange(0,1000)]
if 'omp parallel sections private(i) reduction(||:logic_or)':
if 'omp section':
for i in xrange(0, 300):
logic_or = (logic_or or logics[i])
if 'omp section':
for i in xrange(300, 700):
logic_or = (logic_or or logics[i])
if 'omp section':
for i in xrange(700, 1000):
logic_or = (logic_or or logics[i])
if logic_or:
print "E: reduction(||:logic_or)"
result = False
logic_or = 0;
logics[1000/2]=1
if 'omp parallel sections private(i) reduction(||:logic_or)':
if 'omp section':
for i in xrange(0, 300):
logic_or = (logic_or or logics[i])
if 'omp section':
for i in xrange(300, 700):
logic_or = (logic_or or logics[i])
if 'omp section':
for i in xrange(700, 1000):
logic_or = (logic_or or logics[i])
if not logic_or:
print "E: reduction(||:logic_or) with logics[1000/2]=1"
result = False
logics = [1 for i in xrange(0,1000)]
if 'omp parallel sections private(i) reduction(&:bit_and)':
if 'omp section':
for i in xrange(0, 300):
bit_and = (bit_and & logics[i])
if 'omp section':
for i in xrange(300, 700):
bit_and = (bit_and & logics[i])
if 'omp section':
for i in xrange(700, 1000):
bit_and = (bit_and & logics[i])
if not bit_and:
print "E: reduction(&:bit_and)"
result = False
bit_and = 1;
logics[1000/2]=0
if 'omp parallel sections private(i) reduction(&:bit_and)':
if 'omp section':
for i in xrange(0, 300):
bit_and = (bit_and & logics[i])
if 'omp section':
for i in xrange(300, 700):
bit_and = (bit_and & logics[i])
if 'omp section':
for i in xrange(700, 1000):
bit_and = (bit_and & logics[i])
if bit_and:
print "E: reduction(&:bit_and) with logics[1000/2]=0"
result = False
logics = [0 for i in xrange(0,1000)]
if 'omp parallel sections private(i) reduction(|:bit_or)':
if 'omp section':
for i in xrange(0, 300):
bit_or = (bit_or | logics[i])
if 'omp section':
for i in xrange(300, 700):
bit_or = (bit_or | logics[i])
if 'omp section':
for i in xrange(700, 1000):
bit_or = (bit_or | logics[i])
if bit_or:
print "E: reduction(|:bit_or)"
result = False
bit_or = 0;
logics[1000/2]=1
if 'omp parallel sections private(i) reduction(|:bit_or)':
if 'omp section':
for i in xrange(0, 300):
bit_or = (bit_or | logics[i])
if 'omp section':
for i in xrange(300, 700):
bit_or = (bit_or | logics[i])
if 'omp section':
for i in xrange(700, 1000):
bit_or = (bit_or | logics[i])
if not bit_or:
print "E: reduction(|:bit_or) with logics[1000/2]=1"
result = False
logics = [0 for i in xrange(0,1000)]
if 'omp parallel sections private(i) reduction(^:exclusiv_bit_or)':
if 'omp section':
for i in xrange(0, 300):
exclusiv_bit_or = (exclusiv_bit_or ^ logics[i])
if 'omp section':
for i in xrange(300, 700):
exclusiv_bit_or = (exclusiv_bit_or ^ logics[i])
if 'omp section':
for i in xrange(700, 1000):
exclusiv_bit_or = (exclusiv_bit_or ^ logics[i])
if exclusiv_bit_or:
print "E: reduction(^:exclusiv_bit_or)"
result = False
exclusiv_bit_or = 0;
logics[1000/2]=1
if 'omp parallel sections private(i) reduction(^:exclusiv_bit_or)':
if 'omp section':
for i in xrange(0, 300):
exclusiv_bit_or = (exclusiv_bit_or ^ logics[i])
if 'omp section':
for i in xrange(300, 700):
exclusiv_bit_or = (exclusiv_bit_or ^ logics[i])
if 'omp section':
for i in xrange(700, 1000):
exclusiv_bit_or = (exclusiv_bit_or ^ logics[i])
if not exclusiv_bit_or:
print "E: reduction(^:exclusiv_bit_or) with logics[1000/2]=1"
result = False
return result
|
hainm/pythran
|
pythran/tests/openmp.legacy/omp_parallel_sections_reduction.py
|
Python
|
bsd-3-clause
| 8,373 | 0.003583 |
from pycp2k.inputsection import InputSection
class _each126(InputSection):
def __init__(self):
InputSection.__init__(self)
self.Just_energy = None
self.Powell_opt = None
self.Qs_scf = None
self.Xas_scf = None
self.Md = None
self.Pint = None
self.Metadynamics = None
self.Geo_opt = None
self.Rot_opt = None
self.Cell_opt = None
self.Band = None
self.Ep_lin_solver = None
self.Spline_find_coeffs = None
self.Replica_eval = None
self.Bsse = None
self.Shell_opt = None
self.Tddft_scf = None
self._name = "EACH"
self._keywords = {'Bsse': 'BSSE', 'Cell_opt': 'CELL_OPT', 'Just_energy': 'JUST_ENERGY', 'Band': 'BAND', 'Xas_scf': 'XAS_SCF', 'Rot_opt': 'ROT_OPT', 'Replica_eval': 'REPLICA_EVAL', 'Tddft_scf': 'TDDFT_SCF', 'Shell_opt': 'SHELL_OPT', 'Md': 'MD', 'Pint': 'PINT', 'Metadynamics': 'METADYNAMICS', 'Geo_opt': 'GEO_OPT', 'Spline_find_coeffs': 'SPLINE_FIND_COEFFS', 'Powell_opt': 'POWELL_OPT', 'Qs_scf': 'QS_SCF', 'Ep_lin_solver': 'EP_LIN_SOLVER'}
|
SINGROUP/pycp2k
|
pycp2k/classes/_each126.py
|
Python
|
lgpl-3.0
| 1,114 | 0.001795 |
#!/usr/bin/python
#
# \file 0_setup.py
# \brief setup pacs_prim_list
# \date 2011-09-28 7:22GMT
# \author Jan Boon (Kaetemi)
# Python port of game data build pipeline.
# Setup pacs_prim_list
#
# NeL - MMORPG Framework <http://dev.ryzom.com/projects/nel/>
# Copyright (C) 2010 Winch Gate Property Limited
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import time, sys, os, shutil, subprocess, distutils.dir_util
sys.path.append("../../configuration")
if os.path.isfile("log.log"):
os.remove("log.log")
log = open("log.log", "w")
from scripts import *
from buildsite import *
from process import *
from tools import *
from directories import *
printLog(log, "")
printLog(log, "-------")
printLog(log, "--- Setup pacs_prim_list")
printLog(log, "-------")
printLog(log, time.strftime("%Y-%m-%d %H:%MGMT", time.gmtime(time.time())))
printLog(log, "")
# Setup source directories
printLog(log, ">>> Setup source directories <<<")
for dir in PacsPrimExportSourceDirectories:
mkPath(log, ExportBuildDirectory + "/" + dir)
# Setup build directories
printLog(log, ">>> Setup build directories <<<")
mkPath(log, DataCommonDirectory) # no choice
log.close()
# end of file
|
osgcc/ryzom
|
nel/tools/build_gamedata/processes/pacs_prim_list/0_setup.py
|
Python
|
agpl-3.0
| 1,786 | 0.006719 |
#!/usr/bin/env python2
from GSettingsWidgets import *
from ChooserButtonWidgets import TweenChooserButton, EffectChooserButton
EFFECT_SETS = {
"cinnamon": ("traditional", "traditional", "traditional", "none", "none", "none"),
"scale": ("scale", "scale", "scale", "scale", "scale", "scale"),
"fade": ("fade", "fade", "fade", "scale", "scale", "scale"),
"blend": ("blend", "blend", "blend", "scale", "scale", "scale"),
"move": ("move", "move", "move", "scale", "scale", "scale"),
"flyUp": ("flyUp", "flyDown", "flyDown", "scale", "scale", "scale"),
"flyDown": ("flyDown", "flyUp", "flyUp", "scale", "scale", "scale"),
"default": ("scale", "scale", "none", "none", "none", "none")
}
TRANSITIONS_SETS = {
"cinnamon": ("easeOutQuad", "easeOutQuad", "easeInQuad", "easeInExpo", "easeNone", "easeInQuad"),
"normal": ("easeOutSine", "easeInBack", "easeInSine", "easeInBack", "easeOutBounce", "easeInBack"),
"extra": ("easeOutElastic", "easeOutBounce", "easeOutExpo", "easeInExpo", "easeOutElastic", "easeInExpo"),
"fade": ("easeOutQuart", "easeInQuart", "easeInQuart", "easeInBack", "easeOutBounce", "easeInBack")
}
TIME_SETS = {
"cinnamon": (175, 175, 200, 100, 100, 100),
"slow": (400, 400, 400, 100, 100, 100),
"normal": (250, 250, 250, 100, 100, 100),
"fast": (100, 100, 100, 100, 100, 100),
"default": (250, 250, 150, 400, 400, 400)
}
COMBINATIONS = {
# name effect transition time
"cinnamon": ("cinnamon", "cinnamon", "cinnamon"),
"scale": ("scale", "normal", "normal"),
"fancyScale": ("scale", "extra", "slow"),
"fade": ("fade", "fade", "normal"),
"blend": ("blend", "fade", "normal"),
"move": ("move", "normal", "fast"),
"flyUp": ("flyUp", "normal", "fast"),
"flyDown": ("flyDown", "normal", "fast"),
#for previous versions
"default": ("default", "normal", "default")
}
OPTIONS = (
("cinnamon", _("Cinnamon")),
("scale", _("Scale")),
("fancyScale", _("Fancy Scale")),
("fade", _("Fade")),
("blend", _("Blend")),
("move", _("Move")),
("flyUp", _("Fly up, down")),
("flyDown", _("Fly down, up")),
#for previous versions
("default", _("Default"))
)
TYPES = ("map", "close", "minimize", "maximize", "unmaximize", "tile")
SCHEMA = "org.cinnamon"
DEP_PATH = "org.cinnamon/desktop-effects"
KEY_TEMPLATE = "desktop-effects-%s-%s"
class GSettingsTweenChooserButton(TweenChooserButton, CSGSettingsBackend):
def __init__(self, schema, key, dep_key):
self.key = key
self.bind_prop = "tween"
self.bind_dir = Gio.SettingsBindFlags.DEFAULT
self.bind_object = self
if schema not in settings_objects.keys():
settings_objects[schema] = Gio.Settings.new(schema)
self.settings = settings_objects[schema]
super(GSettingsTweenChooserButton, self).__init__()
self.bind_settings()
class GSettingsEffectChooserButton(EffectChooserButton, CSGSettingsBackend):
def __init__(self, schema, key, dep_key, options):
self.key = key
self.bind_prop = "effect"
self.bind_dir = Gio.SettingsBindFlags.DEFAULT
self.bind_object = self
if schema not in settings_objects.keys():
settings_objects[schema] = Gio.Settings.new(schema)
self.settings = settings_objects[schema]
super(GSettingsEffectChooserButton, self).__init__(options)
self.bind_settings()
class Module:
name = "effects"
category = "appear"
comment = _("Control Cinnamon visual effects.")
def __init__(self, content_box):
keywords = _("effects, fancy, window")
sidePage = SidePage(_("Effects"), "cs-desktop-effects", keywords, content_box, module=self)
self.sidePage = sidePage
def on_module_selected(self):
if not self.loaded:
print "Loading Effects module"
self.sidePage.stack = SettingsStack()
self.sidePage.add_widget(self.sidePage.stack)
self.schema = Gio.Settings(SCHEMA)
self.effect_sets = {}
for name, sets in COMBINATIONS.items():
self.effect_sets[name] = (EFFECT_SETS[sets[0]], TRANSITIONS_SETS[sets[1]], TIME_SETS[sets[2]])
# Enable effects
page = SettingsPage()
self.sidePage.stack.add_titled(page, "effects", _("Enable effects"))
settings = page.add_section(_("Enable Effects"))
widget = GSettingsSwitch(_("Window effects"), "org.cinnamon", "desktop-effects")
settings.add_row(widget)
widget = GSettingsSwitch(_("Effects on dialog boxes"), "org.cinnamon", "desktop-effects-on-dialogs")
settings.add_reveal_row(widget, "org.cinnamon", "desktop-effects")
widget = GSettingsSwitch(_("Effects on menus"), "org.cinnamon", "desktop-effects-on-menus")
settings.add_reveal_row(widget, "org.cinnamon", "desktop-effects")
self.chooser = GSettingsComboBox(_("Effects style"), "org.cinnamon", "desktop-effects-style", OPTIONS)
self.chooser.content_widget.connect("changed", self.on_value_changed)
settings.add_reveal_row(self.chooser, "org.cinnamon", "desktop-effects")
widget = GSettingsSwitch(_("Fade effect on Cinnamon scrollboxes (like the Menu application list)"), "org.cinnamon", "enable-vfade")
settings.add_row(widget)
widget = GSettingsSwitch(_("Session startup animation"), "org.cinnamon", "startup-animation")
settings.add_row(widget)
if Gtk.get_major_version() == 3 and Gtk.get_minor_version() >= 16:
widget = GSettingsSwitch(_("Overlay scroll bars (logout required)"), "org.cinnamon.desktop.interface", "gtk-overlay-scrollbars")
settings.add_row(widget)
self.schema.connect("changed::desktop-effects", self.on_desktop_effects_enabled_changed)
# Customize
page = SettingsPage()
self.sidePage.stack.add_titled(page, "customize", _("Customize"))
box = Gtk.Box(orientation=Gtk.Orientation.HORIZONTAL)
label = Gtk.Label()
label.set_markup("<b>%s</b>" % _("Customize settings"))
box.pack_start(label, False, False, 0)
self.custom_switch = Gtk.Switch(active = self.is_custom())
box.pack_end(self.custom_switch, False, False, 0)
self.custom_switch.connect("notify::active", self.update_effects)
page.add(box)
self.revealer = Gtk.Revealer()
self.revealer.set_transition_type(Gtk.RevealerTransitionType.SLIDE_DOWN)
self.revealer.set_transition_duration(150)
page.add(self.revealer)
settings = SettingsBox(_("Effect"))
self.revealer.add(settings)
self.size_group = Gtk.SizeGroup.new(Gtk.SizeGroupMode.HORIZONTAL)
effects = ["none", "scale", "fade", "blend", "move", "flyUp", "flyDown", "traditional"]
# MAPPING WINDOWS
widget = self.make_effect_group(_("Mapping windows"), "map", effects)
settings.add_row(widget)
# CLOSING WINDOWS
widget = self.make_effect_group(_("Closing windows"), "close", effects)
settings.add_row(widget)
# MINIMIZING WINDOWS
widget = self.make_effect_group(_("Minimizing windows"), "minimize", effects)
settings.add_row(widget)
# MAXIMIZING WINDOWS
# effects = ["none", _("None")], ["scale", _("Scale")]]
widget = self.make_effect_group(_("Maximizing windows"), "maximize")
settings.add_row(widget)
# UNMAXIMIZING WINDOWS
widget = self.make_effect_group(_("Unmaximizing windows"), "unmaximize")
settings.add_row(widget)
# TILING WINDOWS
widget = self.make_effect_group(_("Tiling and snapping windows"), "tile")
settings.add_row(widget)
self.update_effects(self.custom_switch, None)
def make_effect_group(self, group_label, key, effects=None):
tmin, tmax, tstep, tdefault = (0, 2000, 50, 200)
row =SettingsWidget()
row.set_spacing(5)
label = Gtk.Label()
label.set_markup(group_label)
label.props.xalign = 0.0
row.pack_start(label, False, False, 0)
label = Gtk.Label(_("ms"))
row.pack_end(label, False, False, 0)
effect = GSettingsEffectChooserButton(SCHEMA, KEY_TEMPLATE % (key, "effect"), DEP_PATH, effects)
self.size_group.add_widget(effect)
tween = GSettingsTweenChooserButton(SCHEMA, KEY_TEMPLATE % (key, "transition"), DEP_PATH)
self.size_group.add_widget(tween)
time = GSettingsSpinButton("", SCHEMA, KEY_TEMPLATE % (key, "time"), dep_key=DEP_PATH, mini=tmin, maxi=tmax, step=tstep, page=tdefault)
time.set_border_width(0)
time.set_margin_right(0)
time.set_margin_left(0)
time.set_spacing(0)
row.pack_end(time, False, False, 0)
row.pack_end(tween, False, False, 0)
row.pack_end(effect, False, False, 0)
return row
def is_custom(self):
effects = []
transitions = []
times = []
for i in TYPES:
effects.append(self.schema.get_string(KEY_TEMPLATE % (i, "effect")))
transitions.append(self.schema.get_string(KEY_TEMPLATE % (i, "transition")))
times.append(self.schema.get_int(KEY_TEMPLATE % (i, "time")))
value = (tuple(effects), tuple(transitions), tuple(times))
return value != self.effect_sets[self.chooser.value]
def on_value_changed(self, widget):
value = self.effect_sets[self.schema.get_string("desktop-effects-style")]
j = 0
for i in TYPES:
self.schema.set_string(KEY_TEMPLATE % (i, "effect"), value[0][j])
self.schema.set_string(KEY_TEMPLATE % (i, "transition"), value[1][j])
self.schema.set_int(KEY_TEMPLATE % (i, "time"), value[2][j])
j += 1
def update_effects(self, switch, gparam):
active = switch.get_active()
self.revealer.set_reveal_child(active)
#when unchecking the checkbutton, reset the values
if not active:
self.on_value_changed(self.chooser)
def on_desktop_effects_enabled_changed(self, schema, key):
active = schema.get_boolean(key)
if not active and schema.get_boolean("desktop-effects-on-dialogs"):
schema.set_boolean("desktop-effects-on-dialogs", False)
self.update_effects(self.custom_switch, None)
|
pixunil/Cinnamon
|
files/usr/share/cinnamon/cinnamon-settings/modules/cs_effects.py
|
Python
|
gpl-2.0
| 10,983 | 0.004826 |
#!/usr/bin/python
# Youtube-dl-GUI provides a front-end GUI to youtube-dl
# Copyright (C) 2013 Amit Seal Ami
#
# Th== program == free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Th== program == distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with th== program. If not, see {http://www.gnu.org/licenses/}.
#
from PySide.QtGui import QMessageBox
from PySide import QtGui
from ui.main_window import Ui_MainWindow
import sys
from os import system
from urllib2 import urlopen
from urllib2 import HTTPError
class MyApplication(QtGui.QMainWindow):
format_selected = 35
def __init__(self, parent=None):
"""Initializes"""
QtGui.QMainWindow.__init__(self, parent)
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.ui.comboBoxFormats.activated[str].connect(self.combo_formats)
self.ui.btnDownload.clicked.connect(self.download_button_pressed)
def download_button_pressed(self):
if self.ui.textEditDownload is not None:
if self.check_url(self.ui.textEditDownload.toPlainText()):
#subprocess.Popen(self.return_youtube_dl_cmd())
system(self.return_youtube_dl_cmd())
else:
msgBox = QMessageBox()
msgBox.setIcon(QMessageBox.Critical)
msgBox.setText("Error in URL")
msgBox.setInformativeText("Please check the URL you provided.")
msgBox.setStandardButtons(QMessageBox.Ok)
msgBox.exec_()
def check_url(self, url_tobe_checked):
"""
@param url_tobe_checked:
@return:
"""
try:
code = urlopen(url_tobe_checked).code
except ValueError:
return False
except HTTPError:
return False
if (code / 100) >= 4:
return False
else:
return True
def return_youtube_dl_cmd(self):
from os.path import expanduser
home = expanduser("~")
cmd = "gnome-terminal -e "
cmd += '"youtube-dl -f{0} -c -o {1}/Downloads/%(title)s-%(id)s.%(ext)s {2}"'.format(
self.format_selected,
home,
self.ui.textEditDownload.toPlainText()
)
return cmd
def combo_formats(self, text):
"""
checks the selected option
@param text: the selected option's text
"""
if text == 'H264 MP4 1080p':
self.format_selected = 37
if text == 'H264 MP4 720p':
self.format_selected = 22
if text == 'WebM 720p':
self.format_selected = 45
if text == 'WebM 480p':
self.format_selected = 43
if text == 'H264 MP4 480p':
self.format_selected = 18
if text == 'H264 FLV 480p':
self.format_selected = 35
if text == 'H264 FLV 360p':
self.format_selected = 34
if text == 'H263 240p':
self.format_selected = 5
if text == '3GP video':
self.format_selected = 17
if __name__ == "__main__":
APP = QtGui.QApplication(sys.argv)
WINDOW = MyApplication()
WINDOW.show()
sys.exit(APP.exec_())
|
lordamit/youtube-dl-gui
|
youtube-dl-gui.py
|
Python
|
gpl-3.0
| 3,654 | 0.001368 |
from .sql import *
__all__ = ['DBAdapter', 'get_db_adapter', 'async_atomic', 'async_atomic_func', 'get_db_settings']
|
technomaniac/trelliopg
|
trelliopg/__init__.py
|
Python
|
mit
| 118 | 0.008475 |
#-*- coding: utf-8 -*-
"""
envois.test
~~~~~~~~~~~~
nosetests for the envois pkg
:copyright: (c) 2012 by Mek
:license: BSD, see LICENSE for more details.
"""
import os
import json
import unittest
from envois import invoice
jsn = {"seller": {"name": "Lambda Labs, Inc.", "address": {"street": "857 Clay St. Suite 206", "city": "San Francisco", "state": "CA", "zip": "94108", "phone": "(555) 555-5555", "email": "some@email.com" }, "account": {"swift": "...", "number": "...", "name": "Lambda Labs Inc.", "same_address": True}}, "buyer": {"name": "Foo Corp", "address": {"street": "88 Foo Road, Foo Place", "city": "Fooville", "state": "BA", "zip": "31337"}, "logo": "http://lambdal.com/images/lambda-labs-logo.png"}, "items": [{"description": "Facial Detection & Landmark Recognition Perpetual License", "qty": 1, "unit_price": 32768}], "terms": {"days": 30, "string": ""}}
class Envois_Test(unittest.TestCase):
def test_invoice(self):
invoice.make_invoice(jsn)
|
lambdal/envois
|
test/test_envois.py
|
Python
|
mit
| 1,000 | 0.004 |
import re
text = 'This is some text -- with punctuation.'
pattern = 'is'
print('Text :', text)
print('Pattern :', pattern)
m = re.search(pattern, text)
print('Search :', m)
s = re.fullmatch(pattern, text)
print('Full match :', s)
|
jasonwee/asus-rt-n14uhp-mrtg
|
src/lesson_text/re_fullmatch.py
|
Python
|
apache-2.0
| 247 | 0.004049 |
from coalib.parsing.StringProcessing import position_is_escaped
from tests.parsing.StringProcessing.StringProcessingTestBase import (
StringProcessingTestBase)
class PositionIsEscapedTest(StringProcessingTestBase):
# Test the position_is_escaped() function.
def test_basic(self):
expected_results = [
30 * [False] + [True] + 7 * [False],
30 * [False] + [True] + 7 * [False],
30 * [False] + [True] + 7 * [False],
28 * [False] + [True, False, True] + 7 * [False],
31 * [False] + [True] + 6 * [False],
31 * [False] + [True] + 6 * [False],
38 * [False],
6 * [False] + [True] + 31 * [False],
6 * [False] + [True, False, True] + 29 * [False],
6 * [False] + [True] + 31 * [False],
6 * [False] + [True, False, True] + 29 * [False],
14 * [False] + [True] + 23 * [False],
12 * [False] + [True, False, True] + 23 * [False],
38 * [False],
[],
14 * [False],
[False],
[False, True]]
self.assertResultsEqual(
position_is_escaped,
{(test_string, position): result
for test_string, string_result in zip(self.test_strings,
expected_results)
for position, result in zip(range(len(test_string)),
string_result)})
# Test position_is_escaped() with a more special test string.
def test_extended(self):
test_string = r"\\\\\abcabccba###\\13q4ujsabbc\+'**'ac###.#.####-ba"
result_dict = {
0: False,
1: True,
2: False,
3: True,
4: False,
5: True,
6: False,
7: False,
17: False,
18: True,
19: False,
30: False,
31: True,
50: False,
51: False,
6666666: False,
-1: False,
-20: True,
-21: False}
self.assertResultsEqual(
position_is_escaped,
{(test_string, position): result
for position, result in result_dict.items()})
|
yland/coala
|
tests/parsing/StringProcessing/PositionIsEscapedTest.py
|
Python
|
agpl-3.0
| 2,282 | 0 |
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
from odoo.exceptions import ValidationError
from odoo import models, fields, api, _
from odoo.osv import expression
SII_VAT = '60805000-0'
class AccountMove(models.Model):
_inherit = "account.move"
partner_id_vat = fields.Char(related='partner_id.vat', string='VAT No')
l10n_latam_internal_type = fields.Selection(
related='l10n_latam_document_type_id.internal_type', string='L10n Latam Internal Type')
def _get_l10n_latam_documents_domain(self):
self.ensure_one()
if self.journal_id.company_id.account_fiscal_country_id != self.env.ref('base.cl') or not \
self.journal_id.l10n_latam_use_documents:
return super()._get_l10n_latam_documents_domain()
if self.journal_id.type == 'sale':
domain = [('country_id.code', '=', "CL"), ('internal_type', '!=', 'invoice_in')]
if self.company_id.partner_id.l10n_cl_sii_taxpayer_type == '1':
domain += [('code', '!=', '71')] # Companies with VAT Affected doesn't have "Boleta de honorarios Electrónica"
return domain
domain = [
('country_id.code', '=', 'CL'),
('internal_type', 'in', ['invoice', 'debit_note', 'credit_note', 'invoice_in'])]
if self.partner_id.l10n_cl_sii_taxpayer_type == '1' and self.partner_id_vat != '60805000-0':
domain += [('code', 'not in', ['39', '70', '71', '914', '911'])]
elif self.partner_id.l10n_cl_sii_taxpayer_type == '1' and self.partner_id_vat == '60805000-0':
domain += [('code', 'not in', ['39', '70', '71'])]
if self.move_type == 'in_invoice':
domain += [('internal_type', '!=', 'credit_note')]
elif self.partner_id.l10n_cl_sii_taxpayer_type == '2':
domain += [('code', 'in', ['70', '71', '56', '61'])]
elif self.partner_id.l10n_cl_sii_taxpayer_type == '3':
domain += [('code', 'in', ['35', '38', '39', '41', '56', '61'])]
elif not self.partner_id.l10n_cl_sii_taxpayer_type or self.partner_id.country_id != self.env.ref(
'base.cl') or self.partner_id.l10n_cl_sii_taxpayer_type == '4':
domain += [('code', 'in', [])]
return domain
def _check_document_types_post(self):
for rec in self.filtered(
lambda r: r.company_id.account_fiscal_country_id.code == "CL" and
r.journal_id.type in ['sale', 'purchase']):
tax_payer_type = rec.partner_id.l10n_cl_sii_taxpayer_type
vat = rec.partner_id.vat
country_id = rec.partner_id.country_id
latam_document_type_code = rec.l10n_latam_document_type_id.code
if (not tax_payer_type or not vat) and (country_id.code == "CL" and latam_document_type_code
and latam_document_type_code not in ['35', '38', '39', '41']):
raise ValidationError(_('Tax payer type and vat number are mandatory for this type of '
'document. Please set the current tax payer type of this customer'))
if rec.journal_id.type == 'sale' and rec.journal_id.l10n_latam_use_documents:
if country_id.code != "CL":
if not ((tax_payer_type == '4' and latam_document_type_code in ['110', '111', '112']) or (
tax_payer_type == '3' and latam_document_type_code in ['39', '41', '61', '56'])):
raise ValidationError(_(
'Document types for foreign customers must be export type (codes 110, 111 or 112) or you \
should define the customer as an end consumer and use receipts (codes 39 or 41)'))
if rec.journal_id.type == 'purchase' and rec.journal_id.l10n_latam_use_documents:
if vat != SII_VAT and latam_document_type_code == '914':
raise ValidationError(_('The DIN document is intended to be used only with RUT 60805000-0'
' (Tesorería General de La República)'))
if not tax_payer_type or not vat:
if country_id.code == "CL" and latam_document_type_code not in [
'35', '38', '39', '41']:
raise ValidationError(_('Tax payer type and vat number are mandatory for this type of '
'document. Please set the current tax payer type of this supplier'))
if tax_payer_type == '2' and latam_document_type_code not in ['70', '71', '56', '61']:
raise ValidationError(_('The tax payer type of this supplier is incorrect for the selected type'
' of document.'))
if tax_payer_type in ['1', '3']:
if latam_document_type_code in ['70', '71']:
raise ValidationError(_('The tax payer type of this supplier is not entitled to deliver '
'fees documents'))
if latam_document_type_code in ['110', '111', '112']:
raise ValidationError(_('The tax payer type of this supplier is not entitled to deliver '
'imports documents'))
if tax_payer_type == '4' or country_id.code != "CL":
raise ValidationError(_('You need a journal without the use of documents for foreign '
'suppliers'))
@api.onchange('journal_id')
def _l10n_cl_onchange_journal(self):
if self.company_id.country_id.code == 'CL':
self.l10n_latam_document_type_id = False
def _post(self, soft=True):
self._check_document_types_post()
return super()._post(soft)
def _l10n_cl_get_formatted_sequence(self, number=0):
return '%s %06d' % (self.l10n_latam_document_type_id.doc_code_prefix, number)
def _get_starting_sequence(self):
""" If use documents then will create a new starting sequence using the document type code prefix and the
journal document number with a 6 padding number """
if self.journal_id.l10n_latam_use_documents and self.env.company.account_fiscal_country_id.code == "CL":
if self.l10n_latam_document_type_id:
return self._l10n_cl_get_formatted_sequence()
return super()._get_starting_sequence()
def _get_last_sequence_domain(self, relaxed=False):
where_string, param = super(AccountMove, self)._get_last_sequence_domain(relaxed)
if self.company_id.account_fiscal_country_id.code == "CL" and self.l10n_latam_use_documents:
where_string = where_string.replace('journal_id = %(journal_id)s AND', '')
where_string += ' AND l10n_latam_document_type_id = %(l10n_latam_document_type_id)s AND ' \
'company_id = %(company_id)s AND move_type IN %(move_type)s'
param['company_id'] = self.company_id.id or False
param['l10n_latam_document_type_id'] = self.l10n_latam_document_type_id.id or 0
param['move_type'] = (('in_invoice', 'in_refund') if
self.l10n_latam_document_type_id._is_doc_type_vendor() else ('out_invoice', 'out_refund'))
return where_string, param
def _get_name_invoice_report(self):
self.ensure_one()
if self.l10n_latam_use_documents and self.company_id.account_fiscal_country_id.code == 'CL':
return 'l10n_cl.report_invoice_document'
return super()._get_name_invoice_report()
def _l10n_cl_get_invoice_totals_for_report(self):
self.ensure_one()
tax_ids_filter = tax_line_id_filter = None
include_sii = self._l10n_cl_include_sii()
if include_sii:
tax_ids_filter = (lambda aml, tax: bool(tax.l10n_cl_sii_code != 14))
tax_line_id_filter = (lambda aml, tax: bool(tax.l10n_cl_sii_code != 14))
tax_lines_data = self._prepare_tax_lines_data_for_totals_from_invoice(
tax_ids_filter=tax_ids_filter, tax_line_id_filter=tax_line_id_filter)
if include_sii:
amount_untaxed = self.currency_id.round(
self.amount_total - sum([x['tax_amount'] for x in tax_lines_data if 'tax_amount' in x]))
else:
amount_untaxed = self.amount_untaxed
return self._get_tax_totals(self.partner_id, tax_lines_data, self.amount_total, amount_untaxed, self.currency_id)
def _l10n_cl_include_sii(self):
self.ensure_one()
return self.l10n_latam_document_type_id.code in ['39', '41', '110', '111', '112', '34']
def _is_manual_document_number(self):
if self.journal_id.company_id.country_id.code == 'CL':
return self.journal_id.type == 'purchase' and not self.l10n_latam_document_type_id._is_doc_type_vendor()
return super()._is_manual_document_number()
|
jeremiahyan/odoo
|
addons/l10n_cl/models/account_move.py
|
Python
|
gpl-3.0
| 9,118 | 0.005485 |
# -*- coding:utf-8 -*-
# Modified from https://github.com/tylerneylon/explacy
import io
from collections import defaultdict
from pprint import pprint
from phrasetree.tree import Tree
def make_table(rows, insert_header=False):
col_widths = [max(len(s) for s in col) for col in zip(*rows[1:])]
rows[0] = [x[:l] for x, l in zip(rows[0], col_widths)]
fmt = '\t'.join('%%-%ds' % width for width in col_widths)
if insert_header:
rows.insert(1, ['─' * width for width in col_widths])
return '\n'.join(fmt % tuple(row) for row in rows)
def _start_end(arrow):
start, end = arrow['from'], arrow['to']
mn = min(start, end)
mx = max(start, end)
return start, end, mn, mx
def pretty_tree_horizontal(arrows, _do_print_debug_info=False):
"""Print the dependency tree horizontally
Args:
arrows:
_do_print_debug_info: (Default value = False)
Returns:
"""
# Set the base height; these may increase to allow room for arrowheads after this.
arrows_with_deps = defaultdict(set)
for i, arrow in enumerate(arrows):
arrow['underset'] = set()
if _do_print_debug_info:
print('Arrow %d: "%s" -> "%s"' % (i, arrow['from'], arrow['to']))
num_deps = 0
start, end, mn, mx = _start_end(arrow)
for j, other in enumerate(arrows):
if arrow is other:
continue
o_start, o_end, o_mn, o_mx = _start_end(other)
if ((start == o_start and mn <= o_end <= mx) or
(start != o_start and mn <= o_start <= mx)):
num_deps += 1
if _do_print_debug_info:
print('%d is over %d' % (i, j))
arrow['underset'].add(j)
arrow['num_deps_left'] = arrow['num_deps'] = num_deps
arrows_with_deps[num_deps].add(i)
if _do_print_debug_info:
print('')
print('arrows:')
pprint(arrows)
print('')
print('arrows_with_deps:')
pprint(arrows_with_deps)
# Render the arrows in characters. Some heights will be raised to make room for arrowheads.
sent_len = (max([max(arrow['from'], arrow['to']) for arrow in arrows]) if arrows else 0) + 1
lines = [[] for i in range(sent_len)]
num_arrows_left = len(arrows)
while num_arrows_left > 0:
assert len(arrows_with_deps[0])
arrow_index = arrows_with_deps[0].pop()
arrow = arrows[arrow_index]
src, dst, mn, mx = _start_end(arrow)
# Check the height needed.
height = 3
if arrow['underset']:
height = max(arrows[i]['height'] for i in arrow['underset']) + 1
height = max(height, 3, len(lines[dst]) + 3)
arrow['height'] = height
if _do_print_debug_info:
print('')
print('Rendering arrow %d: "%s" -> "%s"' % (arrow_index,
arrow['from'],
arrow['to']))
print(' height = %d' % height)
goes_up = src > dst
# Draw the outgoing src line.
if lines[src] and len(lines[src]) < height:
lines[src][-1].add('w')
while len(lines[src]) < height - 1:
lines[src].append(set(['e', 'w']))
if len(lines[src]) < height:
lines[src].append({'e'})
lines[src][height - 1].add('n' if goes_up else 's')
# Draw the incoming dst line.
lines[dst].append(u'►')
while len(lines[dst]) < height:
lines[dst].append(set(['e', 'w']))
lines[dst][-1] = set(['e', 's']) if goes_up else set(['e', 'n'])
# Draw the adjoining vertical line.
for i in range(mn + 1, mx):
while len(lines[i]) < height - 1:
lines[i].append(' ')
lines[i].append(set(['n', 's']))
# Update arrows_with_deps.
for arr_i, arr in enumerate(arrows):
if arrow_index in arr['underset']:
arrows_with_deps[arr['num_deps_left']].remove(arr_i)
arr['num_deps_left'] -= 1
arrows_with_deps[arr['num_deps_left']].add(arr_i)
num_arrows_left -= 1
return render_arrows(lines)
def render_arrows(lines):
arr_chars = {'ew': u'─',
'ns': u'│',
'en': u'└',
'es': u'┌',
'enw': u'┴',
'ensw': u'┼',
'ens': u'├',
'esw': u'┬'}
# Convert the character lists into strings.
max_len = max(len(line) for line in lines)
for i in range(len(lines)):
lines[i] = [arr_chars[''.join(sorted(ch))] if type(ch) is set else ch for ch in lines[i]]
lines[i] = ''.join(reversed(lines[i]))
lines[i] = ' ' * (max_len - len(lines[i])) + lines[i]
return lines
def render_span(begin, end, unidirectional=False):
if end - begin == 1:
return ['───►']
elif end - begin == 2:
return [
'──┐',
'──┴►',
] if unidirectional else [
'◄─┐',
'◄─┴►',
]
rows = []
for i in range(begin, end):
if i == (end - begin) // 2 + begin:
rows.append(' ├►')
elif i == begin:
rows.append('──┐' if unidirectional else '◄─┐')
elif i == end - 1:
rows.append('──┘' if unidirectional else '◄─┘')
else:
rows.append(' │')
return rows
def tree_to_list(T):
return [T.label(), [tree_to_list(t) if isinstance(t, Tree) else t for t in T]]
def list_to_tree(L):
if isinstance(L, str):
return L
return Tree(L[0], [list_to_tree(child) for child in L[1]])
def render_labeled_span(b, e, spans, labels, label, offset, unidirectional=False):
spans.extend([''] * (b - offset))
spans.extend(render_span(b, e, unidirectional))
center = b + (e - b) // 2
labels.extend([''] * (center - offset))
labels.append(label)
labels.extend([''] * (e - center - 1))
def main():
# arrows = [{'from': 1, 'to': 0}, {'from': 2, 'to': 1}, {'from': 2, 'to': 4}, {'from': 2, 'to': 5},
# {'from': 4, 'to': 3}]
# lines = pretty_tree_horizontal(arrows)
# print('\n'.join(lines))
# print('\n'.join([
# '◄─┐',
# ' │',
# ' ├►',
# ' │',
# '◄─┘',
# ]))
print('\n'.join(render_span(7, 12)))
if __name__ == '__main__':
main()
left_rule = {'<': ':', '^': ':', '>': '-'}
right_rule = {'<': '-', '^': ':', '>': ':'}
def evalute_field(record, field_spec):
"""Evalute a field of a record using the type of the field_spec as a guide.
Args:
record:
field_spec:
Returns:
"""
if type(field_spec) is int:
return str(record[field_spec])
elif type(field_spec) is str:
return str(getattr(record, field_spec))
else:
return str(field_spec(record))
def markdown_table(headings, records, fields=None, alignment=None, file=None):
"""Generate a Doxygen-flavor Markdown table from records.
See https://stackoverflow.com/questions/13394140/generate-markdown-tables
file -- Any object with a 'write' method that takes a single string
parameter.
records -- Iterable. Rows will be generated from this.
fields -- List of fields for each row. Each entry may be an integer,
string or a function. If the entry is an integer, it is assumed to be
an index of each record. If the entry is a string, it is assumed to be
a field of each record. If the entry is a function, it is called with
the record and its return value is taken as the value of the field.
headings -- List of column headings.
alignment - List of pairs alignment characters. The first of the pair
specifies the alignment of the header, (Doxygen won't respect this, but
it might look good, the second specifies the alignment of the cells in
the column.
Possible alignment characters are:
'<' = Left align
'>' = Right align (default for cells)
'^' = Center (default for column headings)
Args:
headings:
records:
fields: (Default value = None)
alignment: (Default value = None)
file: (Default value = None)
Returns:
"""
if not file:
file = io.StringIO()
num_columns = len(headings)
if not fields:
fields = list(range(num_columns))
assert len(headings) == num_columns
# Compute the table cell data
columns = [[] for i in range(num_columns)]
for record in records:
for i, field in enumerate(fields):
columns[i].append(evalute_field(record, field))
# Fill out any missing alignment characters.
extended_align = alignment if alignment is not None else [('^', '<')]
if len(extended_align) > num_columns:
extended_align = extended_align[0:num_columns]
elif len(extended_align) < num_columns:
extended_align += [('^', '>') for i in range(num_columns - len(extended_align))]
heading_align, cell_align = [x for x in zip(*extended_align)]
field_widths = [len(max(column, key=len)) if len(column) > 0 else 0
for column in columns]
heading_widths = [max(len(head), 2) for head in headings]
column_widths = [max(x) for x in zip(field_widths, heading_widths)]
_ = ' | '.join(['{:' + a + str(w) + '}'
for a, w in zip(heading_align, column_widths)])
heading_template = '| ' + _ + ' |'
_ = ' | '.join(['{:' + a + str(w) + '}'
for a, w in zip(cell_align, column_widths)])
row_template = '| ' + _ + ' |'
_ = ' | '.join([left_rule[a] + '-' * (w - 2) + right_rule[a]
for a, w in zip(cell_align, column_widths)])
ruling = '| ' + _ + ' |'
file.write(heading_template.format(*headings).rstrip() + '\n')
file.write(ruling.rstrip() + '\n')
for row in zip(*columns):
file.write(row_template.format(*row).rstrip() + '\n')
if isinstance(file, io.StringIO):
text = file.getvalue()
file.close()
return text
|
hankcs/HanLP
|
plugins/hanlp_common/hanlp_common/visualization.py
|
Python
|
apache-2.0
| 10,303 | 0.000883 |
import sys
sys.path.append('/Users/natj/projects/arcmancer/lib/')
import pyarcmancer as pyac
from img import Imgplane
from visualize_polar import Visualize
from lineprofile import *
import units
import numpy as np
import matplotlib as mpl
from pylab import *
import os
from matplotlib import cm
import scipy.interpolate as interp
#from joblib import Parallel, delayed
#import multiprocessing
outdir = 'out/lines2/'
##################################################
# Set up figure & layout
fig = figure(figsize=(6,10))
mpl.rc('font', family='serif')
mpl.rc('xtick', labelsize='x-small')
mpl.rc('ytick', labelsize='x-small')
mpl.rcParams['image.cmap'] = 'inferno'
#num_cores = multiprocessing.cpu_count()
#print "num of cores {}", num_cores
#Setup pyarcmancer
##################################################
conf = pyac.Configuration()
conf.absolute_tolerance = 1e-12
conf.relative_tolerance = 1e-12
conf.henon_tolerance = 1e-8
conf.sampling_interval = 1e-3
conf.minimum_stepsize = 1e-10
conf.maximum_steps = 10000
conf.enforce_maximum_stepsize = False
conf.enforce_minimum_stepsize = True
conf.enforce_maximum_steps = True
conf.store_only_endpoints = True
#pyac.Log.set_console()
pyac.Log.set_file()
##################################################
# Star parameters
#R = 12.0
#M = 1.6
freq = 700.0
#incl = 15.0
#for M in [1.5, 1.1, 1.8]:
for M in [1.4]:
print "##################################################"
print "M = ", M
for R in [10.0]:
print "##################################################"
print " R = ", R
#for incl in [90, 80, 70, 60, 50, 40, 30, 20, 15, 10, 5, 1]:
#for incl in [9, 8, 7, 6, 4, 3, 2, 0.5]:
for incl in [20.0]:
print "##################################################"
print " i = ",incl
fname = 'neutronstar_f{:03d}_bb_r{:02d}_m{:03.1f}_i{:02d}.png'.format( np.int(freq), np.int(R), M, np.int(incl))
if os.path.isfile( outdir+fname ):
continue
# Variables in units of solar mass are derived here
# and typically presented with full name
mass = M
radius = R * units.solar_mass_per_km / mass
angvel = freq * 2.0*np.pi / units.solar_mass_per_s * mass
imgscale = (mass/units.solar_mass_per_km*1.0e5)**2 #cm^2/Msun
compactness = np.sqrt(1 - 2/radius) #isotropic radius compactness
conf.absolute_tolerance = 1e-12 * radius
conf.minimum_stepsize = 1e-10 * radius
##################################################
#Define metric and surfaces of the spacetime
#S+D
metric = pyac.AGMMetric(radius, 1.0, angvel, pyac.AGMMetric.MetricType.agm_no_quadrupole)
ns_surface = pyac.AGMSurface(radius, 1.0, angvel, pyac.AGMSurface.SurfaceType.spherical)
#Oblate Sch #WORKS
#metric = pyac.AGMMetric(radius, 1.0, angvel, pyac.AGMMetric.MetricType.agm_no_quadrupole)
#ns_surface = pyac.AGMSurface(radius, 1.0, angvel, pyac.AGMSurface.SurfaceType.agm_no_quadrupole)
#Full AGM + oblate
#metric = pyac.AGMMetric(radius, 1.0, angvel, pyac.AGMMetric.MetricType.agm_standard)
#ns_surface = pyac.AGMSurface(radius, 1.0, angvel, pyac.AGMSurface.SurfaceType.agm)
surfaces = [ ns_surface ]
# Build and configure image plane by hand
img = Imgplane(conf, metric, surfaces)
img.verbose = 1
img.incl = np.deg2rad(incl) #set inclination
img.distance = 100000.0*mass #set distance
#Locate star edges
img.find_boundaries(Nedge=50, reltol=1.0e-4, max_iterations=30)
#Build internal coarse grid for the interpolation routines
img.generate_internal_grid(Nrad = 80, Nchi = 50 )
img.dissect_geos()
#Construct output xy image plane from img object
##################################################
ion()
visz = Visualize()
visz.gs.update(hspace = 0.5)
visz.compactness = compactness
visz.plot(img)
#prepare line profile axis object
visz.axs[6] = subplot( visz.gs[3,:] )
visz.axs[6].minorticks_on()
visz.axs[6].set_xlabel(r'Energy')
visz.axs[6].set_ylabel(r'Flux')
#Construct image
#visz.star(img, spot)
#visz.polar(img, spot)
visz.dissect(img)
visz.star_plot(0.0)
visz.polar_dissect(img)
visz.polar_plot(0.0)
##################################################
# Compute line profile
es, yy2 = lineprofile(visz.redshift**4, visz.redshift)
dE = np.max( np.abs(es[0] - compactness), np.abs(compactness - es[-1]))
##################################################
#Save redshift into a file
fname = 'reds_f{:03d}_bb_r{:02d}_m{:03.1f}_i{:02d}.csv'.format(
np.int(freq),
np.int(R),
M,
np.int(incl),
)
print 'Saving to a file: '+fname
np.savetxt(outdir+fname,
visz.redshift.flatten(),
delimiter=',',
fmt = '%10.9e'
)
#Save thetas into a file
fname = 'thetas_f{:03d}_bb_r{:02d}_m{:03.1f}_i{:02d}.csv'.format(
np.int(freq),
np.int(R),
M,
np.int(incl),
)
print 'Saving to a file: '+fname
np.savetxt(outdir+fname,
visz.thetas.flatten(),
delimiter=',',
fmt = '%10.9e'
)
#Save phi into a file
fname = 'phis_f{:03d}_bb_r{:02d}_m{:03.1f}_i{:02d}.csv'.format(
np.int(freq),
np.int(R),
M,
np.int(incl),
)
print 'Saving to a file: '+fname
np.savetxt(outdir+fname,
visz.phis.flatten(),
delimiter=',',
fmt = '%10.9e'
)
#redshift limits
vmin = compactness - dE
vmax = compactness + dE
# Line profile
##################################################
#ax = subplot(gs[2,2])
#ax.set_xlim(0.8, 1.2)
visz.axs[6].plot(es, yy2, "b-")
pause(1.0)
fname = 'neutronstar_f{:03d}_bb_r{:02d}_m{:03.1f}_i{:02d}.png'.format(
np.int(freq),
np.int(R),
M,
np.int(incl),
)
savefig(outdir+fname)
#save lineprofile
##################################################
#Finally save to file
fname = 'lineprofile_f{:03d}_bb_r{:02d}_m{:03.1f}_i{:02d}.csv'.format(
np.int(freq),
np.int(R),
M,
np.int(incl),
)
print 'Saving to a file: '+fname
np.savetxt(outdir+fname,
np.vstack((es, yy2)).T,
delimiter=',',
fmt = '%10.9e',
header='Energy, pdf'
)
|
natj/bender
|
sweep.py
|
Python
|
mit
| 8,380 | 0.024105 |
from GangaCore.GPIDev.Schema import *
from GangaCore.GPIDev.Lib.Tasks.common import *
from GangaCore.GPIDev.Lib.Tasks.ITransform import ITransform
from GangaCore.GPIDev.Lib.Job.Job import JobError
from GangaCore.GPIDev.Lib.Registry.JobRegistry import JobRegistrySlice, JobRegistrySliceProxy
from GangaCore.Core.exceptions import ApplicationConfigurationError
from GangaCore.GPIDev.Lib.Tasks.ITransform import ITransform
from GangaCore.GPIDev.Lib.Tasks.TaskLocalCopy import TaskLocalCopy
from GangaCore.Utility.logging import getLogger
from .ND280Unit_CSVEvtList import ND280Unit_CSVEvtList
from GangaND280.ND280Dataset.ND280Dataset import ND280LocalDataset
from GangaND280.ND280Splitter import splitCSVFile
import GangaCore.GPI as GPI
import os
logger = getLogger()
class ND280Transform_CSVEvtList(ITransform):
_schema = Schema(Version(1,0), dict(list(ITransform._schema.datadict.items()) + list({
'nbevents' : SimpleItem(defvalue=-1,doc='The number of events for each unit'),
}.items())))
_category = 'transforms'
_name = 'ND280Transform_CSVEvtList'
_exportmethods = ITransform._exportmethods + [ ]
def __init__(self):
super(ND280Transform_CSVEvtList,self).__init__()
def createUnits(self):
"""Create new units if required given the inputdata"""
# call parent for chaining
super(ND280Transform_CSVEvtList,self).createUnits()
# Look at the application schema and check if there is a csvfile variable
try:
csvfile = self.application.csvfile
except AttributeError:
logger.error('This application doesn\'t contain a csvfile variable. Use another Transform !')
return
subsets = splitCSVFile(self.application.csvfile, self.nbevents)
for s,sub in enumerate(subsets):
# check if this data is being run over by checking all the names listed
ok = False
for unit in self.units:
if unit.subpartid == s:
ok = True
if ok:
continue
# new unit required for this dataset
unit = ND280Unit_CSVEvtList()
unit.name = "Unit %d" % len(self.units)
unit.subpartid = s
unit.eventswanted = sub
unit.inputdata = self.inputdata[0]
self.addUnitToTRF( unit )
def createChainUnit( self, parent_units, use_copy_output = True ):
"""Create a chained unit using the output data from the given units"""
# check all parent units for copy_output
copy_output_ok = True
for parent in parent_units:
if not parent.copy_output:
copy_output_ok = False
# all parent units must be completed so the outputfiles are filled correctly
for parent in parent_units:
if parent.status != "completed":
return None
if not use_copy_output or not copy_output_ok:
unit = ND280Unit_CSVEvtList()
unit.inputdata = ND280LocalDataset()
for parent in parent_units:
# loop over the output files and add them to the ND280LocalDataset - THIS MIGHT NEED SOME WORK!
job = GPI.jobs(parent.active_job_ids[0])
for f in job.outputfiles:
# should check for different file types and add them as appropriate to the dataset
# self.inputdata (== TaskChainInput).include/exclude_file_mask could help with this
# This will be A LOT easier with Ganga 6.1 as you can easily map outputfiles -> inputfiles!
unit.inputdata.names.append( os.path.join( job.outputdir, f.namePattern ) )
else:
unit = ND280Unit_CSVEvtList()
unit.inputdata = ND280LocalDataset()
for parent in parent_units:
# unit needs to have completed and downloaded before we can get file list
if parent.status != "completed":
return None
# we should be OK so copy all output to the dataset
for f in parent.copy_output.files:
unit.inputdata.names.append( os.path.join( parent.copy_output.local_location, f ) )
return unit
|
ganga-devs/ganga
|
ganga/GangaND280/Tasks/ND280Transform_CSVEvtList.py
|
Python
|
gpl-3.0
| 4,112 | 0.020185 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A class that creates resource projection specification."""
import sys
from googlecloudsdk.third_party.py27 import py27_copy as copy
PROJECTION_ARG_DOC = ' projection: The parent ProjectionSpec.'
ALIGN_DEFAULT = 'left'
ALIGNMENTS = {'left': lambda s, w: s.ljust(w),
'center': lambda s, w: s.center(w),
'right': lambda s, w: s.rjust(w)}
class ProjectionSpec(object):
"""Creates a resource projection specification.
A resource projection is an expression string that contains a list of resource
keys with optional attributes. A projector is a method that takes a projection
specification and a resource object as input and produces a new
JSON-serializable object containing only the values corresponding to the keys
in the projection specification.
Optional projection key attributes may transform the values in the output
JSON-serializable object. Cloud SDK projection attributes are used for output
formatting.
A default or empty projection expression still produces a projector that
converts a resource to a JSON-serializable object.
This class is used by the resource projection expression parser to create a
resource projection specification from a projection expression string.
Attributes:
aliases: The short key name alias dictionary.
_active: The transform active level. Incremented each time Defaults() is
called. Used to determine active transforms.
attributes: Projection attributes dict indexed by attribute name.
_columns: A list of (key,_Attribute) tuples used to project a resource to
a list of columns.
_compiler: The projection compiler method for nested projections.
_empty: An empty projection _Tree used by Projector().
_name: The projection name from the expression string.
_tree: The projection _Tree root, used by
resource_projector.Evaluate() to efficiently project each resource.
symbols: Default and caller-defined transform function dict indexed by
function name.
"""
DEFAULT = 0 # _Attribute default node flag.
INNER = 1 # _Attribute inner node flag.
PROJECT = 2 # _Attribute project node flag.
class _Column(object):
"""Column key and transform attribute for self._columns.
Attributes:
key: The column key.
attribute: The column key _Attribute.
"""
def __init__(self, key, attribute):
self.key = key
self.attribute = attribute
def __init__(self, defaults=None, symbols=None, compiler=None):
"""Initializes a projection.
Args:
defaults: resource_projection_spec.ProjectionSpec defaults.
symbols: Transform function symbol table dict indexed by function name.
compiler: The projection compiler method for nested projections.
"""
self.aliases = {}
self.attributes = {}
self._columns = []
self._compiler = compiler
self._empty = None
self._name = None
self._snake_headings = {}
self._snake_re = None
if defaults:
self._active = defaults.active
self._tree = copy.deepcopy(defaults.GetRoot())
self.Defaults()
if defaults.symbols:
self.symbols = copy.deepcopy(defaults.symbols)
if symbols:
self.symbols.update(symbols)
else:
self.symbols = symbols if symbols else {}
self.aliases.update(defaults.aliases)
else:
self._active = 0
self._tree = None
self.symbols = symbols
@property
def active(self):
"""Gets the transform active level."""
return self._active
@property
def compiler(self):
"""Returns the projection compiler method for nested projections."""
return self._compiler
def _Defaults(self, projection):
"""Defaults() helper -- converts a projection to a default projection.
Args:
projection: A node in the original projection _Tree.
"""
projection.attribute.flag = self.DEFAULT
for node in projection.tree.values():
self._Defaults(node)
def _Print(self, projection, out, level):
"""Print() helper -- prints projection node p and its children.
Args:
projection: A _Tree node in the original projection.
out: The output stream.
level: The nesting level counting from 1 at the root.
"""
for key in projection.tree:
out.write('{indent} {key} : {attribute}\n'.format(
indent=' ' * level,
key=key,
attribute=projection.tree[key].attribute))
self._Print(projection.tree[key], out, level + 1)
def AddAttribute(self, name, value):
"""Adds name=value to the attributes.
Args:
name: The attribute name.
value: The attribute value
"""
self.attributes[name] = value
def DelAttribute(self, name):
"""Deletes name from the attributes if it is in the attributes.
Args:
name: The attribute name.
"""
if name in self.attributes:
del self.attributes[name]
def AddAlias(self, name, key):
"""Adds name as an alias for key to the projection.
Args:
name: The short (no dots) alias name for key.
key: The parsed key to add.
"""
self.aliases[name] = key
def AddKey(self, key, attribute):
"""Adds key and attribute to the projection.
Args:
key: The parsed key to add.
attribute: Parsed _Attribute to add.
"""
self._columns.append(self._Column(key, attribute))
def SetName(self, name):
"""Sets the projection name.
The projection name is the rightmost of the names in the expression.
Args:
name: The projection name.
"""
if self._name:
# Reset the name-specific attributes.
self.attributes = {}
self._name = name
def GetRoot(self):
"""Returns the projection root node.
Returns:
The resource_projector_parser._Tree root node.
"""
return self._tree
def SetRoot(self, root):
"""Sets the projection root node.
Args:
root: The resource_projector_parser._Tree root node.
"""
self._tree = root
def GetEmpty(self):
"""Returns the projector resource_projector_parser._Tree empty node.
Returns:
The projector resource_projector_parser._Tree empty node.
"""
return self._empty
def SetEmpty(self, node):
"""Sets the projector resource_projector_parser._Tree empty node.
The empty node is used by to apply [] empty slice projections.
Args:
node: The projector resource_projector_parser._Tree empty node.
"""
self._empty = node
def Columns(self):
"""Returns the projection columns.
Returns:
The columns in the projection, None if the entire resource is projected.
"""
return self._columns
def ColumnCount(self):
"""Returns the number of columns in the projection.
Returns:
The number of columns in the projection, 0 if the entire resource is
projected.
"""
return len(self._columns)
def Defaults(self):
"""Converts the projection to a default projection.
A default projection provides defaults for attribute values and function
symbols. An explicit non-default projection value always overrides the
corresponding default value.
"""
if self._tree:
self._Defaults(self._tree)
self._columns = []
self._active += 1
def Aliases(self):
"""Returns the short key name alias dictionary.
This dictionary maps short (no dots) names to parsed keys.
Returns:
The short key name alias dictionary.
"""
return self.aliases
def Attributes(self):
"""Returns the projection _Attribute dictionary.
Returns:
The projection _Attribute dictionary.
"""
return self.attributes
def Alignments(self):
"""Returns the projection column justfication list.
Returns:
The ordered list of alignment functions, where each function is one of
ljust [default], center, or rjust.
"""
return [ALIGNMENTS[col.attribute.align] for col in self._columns]
def Labels(self):
"""Returns the ordered list of projection labels.
Returns:
The ordered list of projection label strings, None if all labels are
empty.
"""
labels = [col.attribute.label or '' for col in self._columns]
return labels if any(labels) else None
def Name(self):
"""Returns the projection name.
The projection name is the rightmost of the names in the expression.
Returns:
The projection name, None if none was specified.
"""
return self._name
def Order(self):
"""Returns the projection sort key order suitable for use by sorted().
Example:
projection = resource_projector.Compile('...')
order = projection.Order()
if order:
rows = sorted(rows, key=itemgetter(*order))
Returns:
The list of (sort-key-index, reverse), [] if projection is None
or if all sort order indices in the projection are None (unordered).
"""
ordering = []
for i, col in enumerate(self._columns):
if col.attribute.order or col.attribute.reverse:
ordering.append(
(col.attribute.order or sys.maxint, i, col.attribute.reverse))
return [(i, reverse) for _, i, reverse in sorted(ordering)]
def Print(self, out=sys.stdout):
"""Prints the projection with indented nesting.
Args:
out: The output stream, sys.stdout if None.
"""
if self._tree:
self._Print(self._tree, out, 1)
def Tree(self):
"""Returns the projection tree root.
Returns:
The projection tree root.
"""
return self._tree
|
flgiordano/netcash
|
+/google-cloud-sdk/lib/googlecloudsdk/core/resource/resource_projection_spec.py
|
Python
|
bsd-3-clause
| 10,147 | 0.00542 |
import pytest
from eventum.lib.text import clean_markdown
@pytest.mark.parametrize(["markdown", "output"], [
('**Bold** text is unbolded.', 'Bold text is unbolded.'),
('So is *underlined* text.', 'So is underlined text.'),
('An [](http://empty-link).', 'An.'),
('A [test](https://adicu.com)', 'A test (https://adicu.com)'),
('A [test](http://adicu.com)', 'A test (http://adicu.com)'),
('A [test](garbage) passes.', 'A test passes.'),
('An  gets removed.', 'An gets removed.'),
('An , [link](http://adicu.com), and an '
'[](http://adicu.com).',
'An, link (http://adicu.com), and an.'),
])
def test_clean_markdown(markdown, output):
assert clean_markdown(markdown) == output
|
danrschlosser/eventum
|
tests/test_text.py
|
Python
|
mit
| 790 | 0 |
"""Provide access to Python's configuration information.
This is actually PyPy's minimal configuration information.
The specific configuration variables available depend heavily on the
platform and configuration. The values may be retrieved using
get_config_var(name), and the list of variables is available via
get_config_vars().keys(). Additional convenience functions are also
available.
"""
__revision__ = "$Id: sysconfig.py 85358 2010-10-10 09:54:59Z antoine.pitrou $"
import sys
import os
import shlex
from distutils.errors import DistutilsPlatformError
PREFIX = os.path.normpath(sys.prefix)
EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
project_base = os.path.dirname(os.path.abspath(sys.executable))
python_build = False
def get_python_inc(plat_specific=0, prefix=None):
from os.path import join as j
return j(sys.prefix, 'include')
def get_python_version():
"""Return a string containing the major and minor Python version,
leaving off the patchlevel. Sample return values could be '1.5'
or '2.2'.
"""
return sys.version[:3]
def get_python_lib(plat_specific=0, standard_lib=0, prefix=None):
"""Return the directory containing the Python library (standard or
site additions).
If 'plat_specific' is true, return the directory containing
platform-specific modules, i.e. any module from a non-pure-Python
module distribution; otherwise, return the platform-shared library
directory. If 'standard_lib' is true, return the directory
containing standard Python library modules; otherwise, return the
directory for site-specific modules.
If 'prefix' is supplied, use it instead of sys.prefix or
sys.exec_prefix -- i.e., ignore 'plat_specific'.
"""
if prefix is None:
prefix = PREFIX
if standard_lib:
return os.path.join(prefix, "lib-python", get_python_version())
return os.path.join(prefix, 'site-packages')
_config_vars = None
def _init_posix():
"""Initialize the module as appropriate for POSIX systems."""
g = {}
g['EXE'] = ""
g['SO'] = ".so"
g['SOABI'] = g['SO'].rsplit('.')[0]
g['LIBDIR'] = os.path.join(sys.prefix, 'lib')
g['CC'] = "gcc -pthread" # -pthread might not be valid on OS/X, check
global _config_vars
_config_vars = g
def _init_nt():
"""Initialize the module as appropriate for NT"""
g = {}
g['EXE'] = ".exe"
g['SO'] = ".pyd"
g['SOABI'] = g['SO'].rsplit('.')[0]
global _config_vars
_config_vars = g
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform. Generally this includes
everything needed to build extensions and install both pure modules and
extensions. On Unix, this means every variable defined in Python's
installed Makefile; on Windows and Mac OS it's a much smaller set.
With arguments, return a list of values that result from looking up
each argument in the configuration variable dictionary.
"""
global _config_vars
if _config_vars is None:
func = globals().get("_init_" + os.name)
if func:
func()
else:
_config_vars = {}
_config_vars['prefix'] = PREFIX
_config_vars['exec_prefix'] = EXEC_PREFIX
if args:
vals = []
for name in args:
vals.append(_config_vars.get(name))
return vals
else:
return _config_vars
def get_config_var(name):
"""Return the value of a single variable using the dictionary
returned by 'get_config_vars()'. Equivalent to
get_config_vars().get(name)
"""
return get_config_vars().get(name)
def customize_compiler(compiler):
"""Dummy method to let some easy_install packages that have
optional C speedup components.
"""
if compiler.compiler_type == "unix":
compiler.compiler_so.extend(['-O2', '-fPIC', '-Wimplicit'])
compiler.shared_lib_extension = get_config_var('SO')
if "CPPFLAGS" in os.environ:
cppflags = shlex.split(os.environ["CPPFLAGS"])
compiler.compiler.extend(cppflags)
compiler.compiler_so.extend(cppflags)
compiler.linker_so.extend(cppflags)
if "CFLAGS" in os.environ:
cflags = shlex.split(os.environ["CFLAGS"])
compiler.compiler.extend(cflags)
compiler.compiler_so.extend(cflags)
compiler.linker_so.extend(cflags)
if "LDFLAGS" in os.environ:
ldflags = shlex.split(os.environ["LDFLAGS"])
compiler.linker_so.extend(ldflags)
from sysconfig_cpython import (
parse_makefile, _variable_rx, expand_makefile_vars)
|
jedie/pypyjs-standalone
|
website/js/pypy.js-0.3.0/lib/modules/distutils/sysconfig_pypy.py
|
Python
|
mit
| 4,713 | 0.001485 |
from ez_setup import use_setuptools
use_setuptools()
from setuptools import setup, find_packages
setup(name = 'Adafruit_Nokia_LCD',
version = '0.1.0',
author = 'Tony DiCola',
author_email = 'tdicola@adafruit.com',
description = 'Library to display images on the Nokia 5110/3110 LCD.',
license = 'MIT',
url = 'https://github.com/adafruit/Adafruit_Nokia_LCD/',
dependency_links = ['https://github.com/adafruit/Adafruit_Python_GPIO/tarball/master#egg=Adafruit-GPIO-0.1.0'],
install_requires = ['Adafruit-GPIO>=0.1.0'],
packages = find_packages())
|
projectbuendia/server-status
|
libraries/Adafruit_Nokia_LCD/setup.py
|
Python
|
apache-2.0
| 591 | 0.099831 |
"""
Plots Arctic sea ice extent from June 2002-present using JAXA metadata
Website : https://ads.nipr.ac.jp/vishop/vishop-extent.html
Author : Zachary M. Labe
Date : 4 August 2016
"""
### Import modules
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import matplotlib
import datetime
import urllib.request
import urllib as UL
### Directory and time
directory = './Figures/'
source = 'twitter'
now = datetime.datetime.now()
currentmn = str(now.month)
currentdy = str(now.day-1)
currentyr = str(now.year)
currenttime = currentmn + '_' + currentdy + '_' + currentyr
### Load url
url = 'https://ads.nipr.ac.jp/vishop.ver1/data/graph/plot_extent_n_v2.csv'
### Read file
raw_data = UL.request.urlopen(url)
dataset = np.genfromtxt(raw_data, skip_header=0,delimiter=",",)
### Set missing data to nan
dataset[np.where(dataset==-9999)] = np.nan
### Variables
month = dataset[1:,0] # 1-12, nan as month[0]
day = dataset[1:,1] # 1-31, nan as day[0]
mean1980 = dataset[1:,2] # km^2, nan as mean1980[0]
mean1990 = dataset[1:,3] # km^2, nan as mean1990[0]
mean2000 = dataset[1:,4] # km^2, nan as mean2000[0]
years = dataset[1:,5:]
doy = np.arange(0,len(day),1)
### Change units to million km^2
years = years/1e6
### Recent day of current year
currentyear = years[:,-1]
lastday = now.timetuple().tm_yday -1
currentice = currentyear[lastday]
currentanom = currentice - (mean1980[lastday]/1e6)
### Leap year
currentyear[59] = currentyear[58]
### Changes
weekchange = currentice - currentyear[lastday-7]
daychange = currentice - currentyear[lastday-1]
### Make plot
matplotlib.rc('savefig', facecolor='black')
matplotlib.rc('axes', edgecolor='white')
matplotlib.rc('xtick', color='white')
matplotlib.rc('ytick', color='white')
matplotlib.rc('axes', labelcolor='white')
matplotlib.rc('axes', facecolor='black')
plt.rc('text',usetex=True)
plt.rc('font',**{'family':'sans-serif','sans-serif':['Avant Garde']})
fig = plt.figure()
ax = plt.subplot(111)
### Adjust axes in time series plots
def adjust_spines(ax, spines):
for loc, spine in ax.spines.items():
if loc in spines:
spine.set_position(('outward', 5))
else:
spine.set_color('none')
if 'left' in spines:
ax.yaxis.set_ticks_position('left')
else:
ax.yaxis.set_ticks([])
if 'bottom' in spines:
ax.xaxis.set_ticks_position('bottom')
else:
ax.xaxis.set_ticks([])
plt.plot(doy,mean1980/1e6,linewidth=1,linestyle='--',
color='darkmagenta',label=r'1980s Mean',zorder=1)
plt.plot(doy,mean1990/1e6,linewidth=1,linestyle='--',
color='c',label=r'1990s Mean',zorder=1)
plt.plot(doy,mean2000/1e6,linewidth=1,linestyle='--',
color='dodgerblue',label=r'2000s Mean',zorder=1)
bar4, = ax.plot(doy,years[:,-2],color='salmon',label=r'Year 2017',linewidth=1.8,
alpha=1,zorder=3)
bar2, = ax.plot(doy,years[:,-7],color='gold',label=r'Year 2012',linewidth=1.8,
alpha=1,zorder=3)
bar3, = ax.plot(doy,years[:,5],color='white',label=r'Year 2007',linewidth=1.8,
alpha=1,zorder=2)
bar, = ax.plot(doy,currentyear,linewidth=2.5,zorder=4,color='r')
plt.scatter(doy[lastday],currentyear[lastday],
s=25,color='r',zorder=4)
xlabels = [r'Jan',r'Feb',r'Mar',r'Apr',r'May',r'Jun',r'Jul',
r'Aug',r'Sep',r'Oct',r'Nov',r'Dec',r'Jan']
strmonth = xlabels[int(currentmn)-1]
asof = strmonth + ' ' + currentdy + ', ' + currentyr
plt.text(0.6,3.9,r'\textbf{DATA:} JAXA (Arctic Data archive System, NIPR)',
fontsize=6,rotation='horizontal',ha='left',color='w',alpha=0.6)
plt.text(0.6,3.5,r'\textbf{SOURCE:} https://ads.nipr.ac.jp/vishop/vishop-extent.html',
fontsize=6,rotation='horizontal',ha='left',color='w',alpha=0.6)
plt.text(0.6,3.1,r'\textbf{GRAPHIC:} Zachary Labe (@ZLabe)',
fontsize=6,rotation='horizontal',ha='left',color='w',alpha=0.6)
### Insert sea ice text
if lastday <= 365:
xcord = 120
ycord = 10
plt.text(xcord-4,ycord-0.65,r'\textbf{%s}' '\n' r'\textbf{%s} \textbf{km}$^2$' \
% (asof,format(currentice*1e6,",f")[:-7]),fontsize=10,
rotation='horizontal',ha='right',color='w',alpha=0.6)
if lastday <= 365:
plt.text(xcord-4,ycord-2.5,r'\textbf{7--day change}'\
'\n' r'\textbf{%s} \textbf{km}$^2$'\
% (format(weekchange*1e6,",f")[:-7]),fontsize=10,
rotation='horizontal',ha='right',color='w',alpha=0.6)
plt.text(xcord-4,ycord-4,r'\textbf{1--day change}' \
'\n' r'\textbf{%s} \textbf{km}$^2$'\
% (format((daychange*1e6),",f")[:-7]),fontsize=10,
rotation='horizontal',ha='right',color='w',alpha=0.6)
adjust_spines(ax, ['left', 'bottom'])
ax.spines['top'].set_color('none')
ax.spines['right'].set_color('none')
ax.spines['left'].set_linewidth(2)
ax.spines['bottom'].set_linewidth(2)
plt.ylabel(r'\textbf{Extent [$\bf{\times}$10$^{6}$\ \textbf{km}$^2$]}',
fontsize=15,alpha=0.6)
l = plt.legend(shadow=False,fontsize=6,loc='upper left',
bbox_to_anchor=(0.655, 1.013),fancybox=True,ncol=2)
for text in l.get_texts():
text.set_color('w')
plt.xticks(np.arange(0,361,30),xlabels,rotation=0,fontsize=10)
ylabels = map(str,np.arange(2,18,1))
plt.yticks(np.arange(2,18,1),ylabels,fontsize=10)
plt.ylim([3,16])
plt.xlim([0,360])
ax.yaxis.grid(zorder=1,color='w',alpha=0.35)
fig.suptitle(r'\textbf{ARCTIC SEA ICE}',
fontsize=33,color='w',alpha=0.6)
ax.tick_params('both',length=5.5,width=2,which='major')
year2012 = years[:,-7]
year2007 = years[:,5]
year2017 = years[:,-2]
def update(num,doy,currentyear,year2017,year2012,year2007,bar,bar2,bar4):
bar.set_data(doy[:num+1],currentyear[:num+1])
bar.axes.axis([0,360,3,16])
bar2.set_data(doy[:num+1],year2012[:num+1])
bar2.axes.axis([0,360,3,16])
bar3.set_data(doy[:num+1],year2007[:num+1])
bar3.axes.axis([0,360,3,16])
bar4.set_data(doy[:num+1],year2017[:num+1])
bar4.axes.axis([0,360,3,16])
return bar,
ani = animation.FuncAnimation(fig,update,370,fargs=[doy,currentyear,year2017,year2012,year2007,bar,bar2,bar4],
interval=.001,blit=True)
ani.save(directory + 'moving_SIE_JAXA.gif',dpi=150)
print('\n')
print('JAXA Sea Ice Loss Missing Days')
print('Day 5 Loss = %s km^2' % ((currentyear[lastday-4] - currentyear[lastday-5])*1e6))
print('Day 4 Loss = %s km^2' % ((currentyear[lastday-3] - currentyear[lastday-4])*1e6))
print('Day 3 Loss = %s km^2' % ((currentyear[lastday-2] - currentyear[lastday-3])*1e6))
print('Day 2 Loss = %s km^2' % ((currentyear[lastday-1] - currentyear[lastday-2])*1e6))
print('Day 1 Loss = %s km^2' % ((currentyear[lastday] - currentyear[lastday-1])*1e6))
print('\n' 'Total 5-day Loss = %s km^2' % ((currentyear[lastday]-currentyear[lastday-5])*1e6))
print('\n')
|
zmlabe/IceVarFigs
|
Scripts/SeaIce/JAXA_seaice_movinglines.py
|
Python
|
mit
| 6,992 | 0.034611 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.