repo_name
stringlengths
5
100
path
stringlengths
4
231
language
stringclasses
1 value
license
stringclasses
15 values
size
int64
6
947k
score
float64
0
0.34
prefix
stringlengths
0
8.16k
middle
stringlengths
3
512
suffix
stringlengths
0
8.17k
geoenvo/opendims
opendims/automaticweathersystem/migrations/0001_initial.py
Python
gpl-3.0
2,669
0.005995
# -*- coding: utf-8 -*- # Generated by Django 1.9.2 on 2016-04-21 07:08 from __future__ import unicode_literals import django.contrib.gis.db.models.fields from django.db import migrations, models import django.db.models.deletion import django.utils.timezone class Migration(migrations.Migration): initial = True dependencies = [ ] operations = [ migrations.CreateModel( name='AWSReport', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Created')), ('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')), ('temperature', models.PositiveIntegerField(default=0, verbose_name='Temperature')), ('humidity', models.PositiveIntegerField(default=0, verbose_name='Humidity')), ('pressure', models.PositiveIntegerField(default=0, verbose_name='Pressure')),
('wind_speed', models.PositiveIntegerField(default=0, verbose_name='Wind Speed')), ('wind_direction', models.PositiveIntegerField(default=0, verbose_name='Wind Direction')), ('day_rain', models.PositiveIntegerField(default=0, verbose_name='Day Rain')), ('rain_rate', models.PositiveIntegerField(default=0, verbose_name='Rain Rate')), ('uv
_index', models.PositiveIntegerField(default=0, verbose_name='UV Index')), ('solar_radiation', models.PositiveIntegerField(default=0, verbose_name='Solar Radiation')), ], options={ 'ordering': ['-updated', '-created'], 'get_latest_by': 'updated', }, ), migrations.CreateModel( name='AWSStation', fields=[ ('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')), ('name', models.CharField(max_length=100, verbose_name='Name')), ('point', django.contrib.gis.db.models.fields.PointField(blank=True, null=True, srid=4326, verbose_name='Point')), ('note', models.TextField(blank=True, verbose_name='Note')), ], options={ 'abstract': False, }, ), migrations.AddField( model_name='awsreport', name='awsstation', field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='automaticweathersystem.AWSStation', verbose_name='AWS Station'), ), ]
DiMartinoX/plugin.video.kinopoisk.ru
script.myshows/episode_sync.py
Python
gpl-3.0
16,581
0.024365
# -*- coding: utf-8 -*- import xbmc import xbmcgui import xbmcaddon from utilities import xbmcJsonRequest, Debug, notification, chunks, get_bool_setting __setting__ = xbmcaddon.Addon('script.myshows').getSetting __getstring__ = xbmcaddon.Addon('script.myshows').getLocalizedString add_episodes_to_myshows = get_bool_setting('add_episodes_to_myshows') myshows_episode_playcount = get_bool_setting('myshows_episode_playcount') xbmc_episode_playcount = get_bool_setting('xbmc_episode_playcount') clean_myshows_episodes = get_bool_setting('clean_myshows_episodes') progress = xbmcgui.DialogProgress() def compare_show(xbmc_show, myshows_show): missing = [] myshows_seasons = [x['season'] for x in myshows_show['seasons']] for xbmc_episode in xbmc_show['episodes']: if xbmc_episode['season'] not in myshows_seasons: missing.append(xbmc_episode) else: for myshows_season in myshows_show['seasons']: if xbmc_episode['season'] == myshows_season['season']: if xbmc_episode['episode'] not in myshows_season['episodes']: missing.append(xbmc_episode) return missing def compare_show_watched_myshows(xbmc_show, myshows_show): missing = [] for xbmc_episode in xbmc_show['episodes']: if xbmc_episode['playcount']: if xbmc_episode['season'] not in [x['season'] for x in myshows_show['seasons']]: missing.append(xbmc_episode) else: for myshows_season in myshows_show['seasons']: if xbmc_episode['season'] == myshows_season['season']: if xbmc_episode['episode'] not in myshows_season['episodes']: missing.append(xbmc_episode) return missing def compare_show_watched_xbmc(xbmc_show, myshows_show): missing = [] for xbmc_episode in xbmc_show['episodes']: if not xbmc_episode['playcount']: for myshows_season in myshows_show['seasons']: if xbmc_episode['season'] == myshows_season['season']: if xbmc_episode['episode'] in myshows_season['episodes']: missing.append(xbmc_episode) return missing class SyncEpisodes(): def __init__(self, show_progress=False, api=None): self.myshowsapi = api if self.myshowsapi == None: from myshowsapi import myshowsAPI self.myshowsapi = myshowsAPI() self.xbmc_shows = [] self.myshows_shows = {'collection': [], 'watched': []} self.notify = __setting__('show_sync_notifications') == 'true' self.show_progress = show_progress if self.show_progress: progress.create('%s %s' % (__getstring__(1400), __getstring__(1406)), line1=' ', line2=' ', line3=' ') def Canceled(self): if self.show_progress and progress.iscanceled(): Debug('[Episodes Sync] Sync was canceled by user') return True elif xbmc.abortRequested: Debug('XBMC abort requested') return True else: return False def GetFromXBMC(self): Debug('[Episodes Sync] Getting episodes from XBMC') if self.show_progress: progress.update(5, line1=__getstring__(1432), line2=' ', line3=' ') shows = xbmcJsonRequest({'jsonrpc': '2.0', 'method': 'VideoLibrary.GetTVShows', 'params': {'properties': ['title', 'imdbnumber']}, 'id': 0}) # sanity check, test for empty result if not shows: Debug("[Episodes Sync] xbmc json request was empty.") return # test to see if tvshows key exists in xbmc json request if 'tvshows' in shows: shows = shows['tvshows'] Debug("[Episodes Sync] XBMC JSON Result: '%s'" % str(shows)) else: Debug("[Episodes Sync] Key 'tvshows' not found") return if self.show_progress: progress.update(10, line1=__getstring__(1433), line2=' ', line3=' ') for show in shows: if self.Canceled(): return show['episodes'] = [] episodes = xbmcJsonRequest({'jsonrpc': '2.0', 'method': 'VideoLibrary.GetEpisodes', 'params': {'tvshowid': show['tvshowid'], 'properties': ['season', 'episode', 'playcount', 'uniqueid']}, 'id': 0}) if 'episodes' in episodes: episodes = episodes['episodes'] show['episodes'] = [x for x in episodes if type(x) == type(dict())] self.xbmc_shows = [x for x in shows if x['episodes']] def GetCollectionFrommyshows(self): Debug('[Episodes Sync] Getting episode collection from myshows.tv') if self.show_progress: progress.update(15, line1=__getstring__(1434), line2=' ', line3=' ') self.myshows_shows['collection'] = self.m
yshowsapi.getShowLibrary() def AddTomyshows(self): Debug('[Episodes Sync] Checking for episodes missing from myshows.tv collection') if self.show_progress: progress.update(30, line1=__getstring__
(1435), line2=' ', line3=' ') add_to_myshows = [] myshows_imdb_index = {} myshows_tvdb_index = {} myshows_title_index = {} for i in range(len(self.myshows_shows['collection'])): if 'imdb_id' in self.myshows_shows['collection'][i]: myshows_imdb_index[self.myshows_shows['collection'][i]['imdb_id']] = i if 'tvdb_id' in self.myshows_shows['collection'][i]: myshows_tvdb_index[self.myshows_shows['collection'][i]['tvdb_id']] = i myshows_title_index[self.myshows_shows['collection'][i]['title']] = i for xbmc_show in self.xbmc_shows: missing = [] #IMDB ID if xbmc_show['imdbnumber'].startswith('tt'): if xbmc_show['imdbnumber'] not in myshows_imdb_index.keys(): missing = xbmc_show['episodes'] else: myshows_show = self.myshows_shows['collection'][myshows_imdb_index[xbmc_show['imdbnumber']]] missing = compare_show(xbmc_show, myshows_show) #TVDB ID elif xbmc_show['imdbnumber'].isdigit(): if xbmc_show['imdbnumber'] not in myshows_tvdb_index.keys(): missing = xbmc_show['episodes'] else: myshows_show = self.myshows_shows['collection'][myshows_tvdb_index[xbmc_show['imdbnumber']]] missing = compare_show(xbmc_show, myshows_show) #Title else: if xbmc_show['title'] not in myshows_title_index.keys(): missing = xbmc_show['episodes'] else: myshows_show = self.myshows_shows['collection'][myshows_title_index[xbmc_show['title']]] missing = compare_show(xbmc_show, myshows_show) if missing: show = {'title': xbmc_show['title'], 'episodes': [{'episode': x['episode'], 'season': x['season'], 'episode_tvdb_id': x['uniqueid']['unknown']} for x in missing]} Debug('[Episodes Sync][AddTomyshows] %s' % show) if xbmc_show['imdbnumber'].isdigit(): show['tvdb_id'] = xbmc_show['imdbnumber'] else: show['imdb_id'] = xbmc_show['imdbnumber'] add_to_myshows.append(show) if add_to_myshows: Debug('[Episodes Sync] %i shows(s) have episodes added to myshows.tv collection' % len(add_to_myshows)) if self.show_progress: progress.update(35, line1=__getstring__(1435), line2='%i %s' % (len(add_to_myshows), __getstring__(1436))) for show in add_to_myshows: if self.Canceled(): return if self.show_progress: progress.update(45, line1=__getstring__(1435), line2=show['title'].encode('utf-8', 'ignore'), line3='%i %s' % (len(show['episodes']), __getstring__(1437))) self.myshowsapi.addEpisode(show) else: Debug('[Episodes Sync] myshows.tv episode collection is up to date') def GetWatchedFrommyshows(self): Debug('[Episodes Sync] Getting watched episodes from myshows.tv') if self.show_progress: progress.update(50, line1=__getstring__(1438), line2=' ', line3=' ') self.myshows_shows['watched'] = self.myshowsapi.getWatchedEpisodeLibrary() def UpdatePlaysmyshows(self): Debug('[Episodes Sync] Checking watched episodes on myshows.tv') if self.show_progress: progress.update(60, line1=__getstring__(1438), line2=' ', line3=' ') update_playcount = [] myshows_imdb_index = {} myshows_tvdb_index = {} myshows_title_index = {} for i in range(len(self.myshows_shows['watched'])): if 'imdb_id' in self.myshows_shows['watched'][i]: myshows_imdb_index[self.myshows_shows['watched'][i]['imdb_id']] = i if 'tvdb_id' in self.myshows_shows['watched'][i]: myshows_tvdb_index[self.myshows_shows['watched'][i]['tvdb_id']] = i myshows_title_index[self.myshows_shows['watched'][i]['title']] = i xbmc_shows_watched = [] for show in self.xbmc_shows: watched_episodes = [x for x in show['episodes'] if x['playcount']] if watched_episodes: xbmc_shows_watched.append(show) for xbmc_show in xb
indictranstech/omnitech-frappe
setup.py
Python
mit
438
0.004566
from setuptools import setup, find_packages version = "6.3.0" with open("requirements.txt", "r") as f: install_requires = f.readlines() setup( name='frappe', version=version, description='Metadata driven, full-stack web f
ramework', author='Frappe Technologies', author_email='info@frappe.io', packages=find_packages(), zip_safe=False, include_package_data=True,
install_requires=install_requires )
thiagoferreiraw/mixapp
events/views/choose_image_view.py
Python
mit
1,771
0.003953
from django.shortcuts import render, redirect from django.http import Http404 from events.forms import Event from django.views.generic import View from events.services import PlacesService from events.forms import ImageUploadForm from django.contrib import messages
import uuid class ChooseImageView(View): template_name = "events/choose_image.html" form_action = "Edit" def __init__(self): self.places_service = PlacesService() def get(self, request, event_id): event = self.get_event_or_404(event_id, request.user.id) images = self.places_service.get_images_google_place(event.location.place_
id, "en") images += self.places_service.get_images_street_view(event.location_lat, event.location_lng) images = [{'idx': idx, 'url': image} for idx, image in enumerate(images)] if "image_idx" in request.GET: event.get_remote_image(images[int(request.GET['image_idx'])]['url']) return redirect("list_events") return render(request, self.template_name, {'images': images, 'form': ImageUploadForm()}) def post(self, request, event_id): form = ImageUploadForm(request.POST, request.FILES, instance=Event.objects.get(pk=event_id)) if form.is_valid(): form.save() messages.success(request, "Image uploaded successfully!") return redirect("list_events") else: messages.error(request, 'Invalid file, try again') return redirect("edit_event_image", event_id) @staticmethod def get_event_or_404(event_id, user_id): event = Event.objects.filter(pk=event_id, hosted_by=user_id).first() if not event: raise Http404() return event
DrewMcCarthy/dartboard
game.py
Python
apache-2.0
3,208
0.037406
import player import pygame import menu import settings as s pygame.init() pygame.mixer.init() class Game: def __init__(self, numplayers=2, doublebull='ON', mpcalc='ROUND'): #GAME INFO self.mpcalc = mpcalc self.doublebull = doublebull self.numplayers = numplayers self.mainLoop = True self.clock = pygame.time.Clock() self.valid_marks = [i for i in range(21)] self.valid_marks[0] = 'Bull' self.roundNum = 1 self.players = [] #ACTIVE PLAYER ID self.ap = 0 self.init_players() self.total_darts_thrown = 0 self.darts_remaining = 3 self.next_turn_flag = False self.last_dart = None #SOUNDS self.soundSingle = pygame.mixer.Sound(s.HIT_SOUND) #EVENT FOR NORMAL PROGRESSION TO NEXT TURN self.ENDTURN = pygame.USEREVENT+1 self.end_turn_event = pygame.event.Event(self.ENDTURN) #EVENT FOR FORCED PROGRESSION TO NEXT TURN self.forced_next_turn = False def init_players(self): for i in range(self.numplayers): self.players.append(player.Player(pnum=i)) def check_bust(self, prev_total): pass def check_winner(self): pass def update_current_score(self): pass def update_darts_remaining(self): self.darts_remaining = 3 - (self.total_darts_thrown % 3) def update_round(self): if self.total_darts_thrown % (3 * self.numplayers) == 0: self.roundNum += 1 def update_total_score(self, cur_score, direction): if direction == 'up': self.players[self.ap].total_score += cur_score elif direction == 'down': self.players[self.ap].total_score -= cur_score def update_turn(self): self.update_round() self.ap += 1 if self.ap > self.numplayers-1: self.ap = 0 message_text = ' Up Next' sleep_secs = 5 self.render_dropdown(message_text, sleep_secs, self.mpcalc) self.players[self.ap].prev_round_score = self.players[self.ap].total_score self.players[self.ap].current_score = 0 self.next_turn_flag = False def update_game(self, last_dart): if last_dart.segment[1] == 1: self.soundSingle.play() elif last_dart.segment[1] == 2: self.soundSingle.play(1) elif last_dart.segment[1] == 3: self.soundSingle.play(2) self.total_darts_thrown += 1 self.update_darts_remaining() self.update_current
_score() if self.total_darts_thrown % 3 == 0: pygame.event.post(self.end_turn_event) if self.forced_next_turn: self.next_turn_flag = False else: self.next_turn_flag = True self.players[self.ap].add_dart(self.roundNum, self.last_dart, self.valid_marks) self.check_winner() def on_event(self, events): for event in events: if event.type == pygame.QUIT: self.manager.go_to(me
nu.Menu()) if event.type == pygame.MOUSEMOTION: print(pygame.mouse.get_pos()) if event.type == self.ENDTURN: print('game on_event ENDTURN') self.update_turn() if event.type == pygame.KEYDOWN: if event.key == pygame.K_ESCAPE: self.manager.go_to(menu.Menu()) def on_gpio(self, segment): print('game on_gpio') self.players[self.ap].throw_dart(segment) self.last_dart = self.players[self.ap].last_dart self.update_game(self.last_dart)
ragb/sudoaudio
sudoaudio/core.py
Python
gpl-3.0
2,629
0.002282
# Copyright (c) 2011 - Rui Batista <ruiandrebatista@gmail.com> # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import functools import inspect import sys import pygame def key_event(*keys): def wrap(f): f.__key_events__ = keys return f return wrap class _KeyHandlerMeta(type): def __new__(cls, name, bases, dct): if not '__key_handlers__' in dct: dct['__key_handlers__'] = {} for v in dct.values(): if hasattr(v, '__key_events__') and callable(v): for e in v.__key_events__: dct['__key_handlers__'][e] = v return type.__new__(cls, name, bases, dct) class PygameMainLoop(object): __metaclass__ = _KeyHandlerMeta def __init__(self): self._mainloop_running = False self._retval = None def run(self): self.on_run() self._mainloop_running = True while self._mainloop_running: self.run_before() for event in self.get_events(): self.dispatch_event(event) self.run_after() return self._retval def quit(self, retval=None): self._retval = retval self._mainloop_running = False def dispatch_event(self, event): if event.type == pygame.QUIT: self.on_quit_event() elif event.type == pygame.KEYDOWN and event.key in self.__key_handlers__: self.__key_handlers__[event.key](self,event) else: self.on_event_defa
ult(event) def on_quit_event(self): pygame.quit() sys.exit(0) def get_events(self): return pygame.event.get() def run_before(self): pass def run_after(self): pass
def on_run(self): pass def on_event_default(self, event): pass class VoiceDialog(PygameMainLoop): @key_event(pygame.K_ESCAPE) def escape(self, event): self.quit(None) def get_events(self): return [pygame.event.wait()]
rpiotti/Flask-AppBuilder
setup.py
Python
bsd-3-clause
1,758
0.003413
import os import sys import imp import multiprocessing from setuptools import setup, find_packages version = imp.load_source('version', os.path.join('flask_appbuilder', 'version.py')) def fpath(name): return os.path.join(os.path.dirname(__file__), name) def read(fname): return open(fpath(fname)).read() def desc(): return read('README.rst') setup( name='Flask-AppBuilder', version=version.VERSION_STRING, url='https://github.com/dpgaspar/flask-appbuilder/', license='BSD', author='Daniel Vaz Gaspar', author_email='danielvazgaspar@gmail.com', description='Simple and rapid application development framework, built on top of Flask. includes detailed security, auto CRUD generation for your models, google charts and much more.', long_description=desc(), packages=find_packages(), package_data={'': ['LICENSE']}, entry_points={'console_scripts': [ 'fabmanager = flask_appbuilder.console:cli', ]}, include_package_data=True, zip_safe=False, platforms='any', install_requires=[ 'colorama>=0.3', 'click>=3.0', 'Flask>=0.10', 'Flask-BabelPkg>=0.9.4', 'Flask-Login>=0.2.0', 'Flask-OpenID>=1.1.0', 'Flask-SQLAlchemy>=0.16', 'Flask-WTF>=0.9.1', ], tests_require=[ 'nose>=1.0', ], classifiers=[ 'Development Status :: 5 - Production/Stable', 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Pytho
n', 'Topic :: Software Development :: Libraries :: Python Modules' ], test_suite=
'nose.collector' )
quarkslab/irma
probe/tests/modules/antivirus/test_avg.py
Python
apache-2.0
4,618
0
from .test_antivirus import AbstractTests import modules.antivirus.avg.avg as module import modules.antivirus.base as base from mock import patch from pathlib import Path class TestAvg(Abstr
actTests.TestAntivirus): name = "AVG AntiVirus Free (Linux)" scan_path = Path("/usr/bin/avgscan") scan_args = ('--heur', '--paranoid', '--arc', '--macrow', '--pwdw', '--pup') module = module.AVGAntiVirusFree scan_clean_stdout = """AVG command line Anti-Virus scanner Copyright (c) 2013 AVG Technologies CZ Virus database version: 4793/15678 Vi
rus database release date: Mon, 21 May 2018 13:00:00 +0000 Files scanned : 1(1) Infections found : 0(0) PUPs found : 0 Files healed : 0 Warnings reported : 0 Errors reported : 0 """ scan_virus_retcode = 4 virusname = "EICAR_Test" scan_virus_stdout = """AVG command line Anti-Virus scanner Copyright (c) 2013 AVG Technologies CZ Virus database version: 4793/15678 Virus database release date: Mon, 21 May 2018 13:00:00 +0000 eicar.com.txt Virus identified EICAR_Test Files scanned : 1(1) Infections found : 1(1) PUPs found : 0 Files healed : 0 Warnings reported : 0 Errors reported : 0 """ version = "13.0.3118" virus_database_version = "4793/15678 (21 May 2018)" version_stdout = """AVG command line controller Copyright (c) 2013 AVG Technologies CZ ------ AVG status ------ AVG version : 13.0.3118 Components version : Aspam:3111, Cfg:3109, Cli:3115, Common:3110, Core:4793, Doc:3115, Ems:3111, Initd:3113, Lng:3112, Oad:3118, Other:3109, Scan:3115, Sched:3110, Update:3109 Last update : Tue, 22 May 2018 07:52:31 +0000 ------ License status ------ License number : LUOTY-674PL-VRWOV-APYEG-ZXHMA-E License version : 10 License type : FREE License expires on : Registered user : Registered company : ------ WD status ------ Component State Restarts UpTime Avid running 0 13 minute(s) Oad running 0 13 minute(s) Sched running 0 13 minute(s) Tcpd running 0 13 minute(s) Update stopped 0 - ------ Sched status ------ Task name Next runtime Last runtime Virus update Tue, 22 May 2018 18:04:00 +0000 Tue, 22 May 2018 07:46:29 +0000 Program update - - User counting Wed, 23 May 2018 07:46:29 +0000 Tue, 22 May 2018 07:46:29 +0000 ------ Tcpd status ------ E-mails checked : 0 SPAM messages : 0 Phishing messages : 0 E-mails infected : 0 E-mails dropped : 0 ------ Avid status ------ Virus database reload times : 0 Virus database version : 4793/15678 Virus database release date : Mon, 21 May 2018 13:00:00 +0000 Virus database shared in memory : yes ------ Oad status ------ Files scanned : 0(0) Infections found : 0(0) PUPs found : 0 Files healed : 0 Warnings reported : 0 Errors reported : 0 Operation successful. """ # nopep8 @patch.object(base.AntivirusUnix, "locate") @patch.object(base.AntivirusUnix, "locate_one") @patch.object(base.AntivirusUnix, "run_cmd") def setUp(self, m_run_cmd, m_locate_one, m_locate): m_run_cmd.return_value = 0, self.version_stdout, "" m_locate_one.return_value = self.scan_path m_locate.return_value = self.database super().setUp() @patch.object(module, "locate_one") @patch.object(base.AntivirusUnix, "run_cmd") def test_get_virus_db_error(self, m_run_cmd, m_locate_one): m_locate_one.return_value = self.scan_path m_run_cmd.return_value = -1, self.version_stdout, "" with self.assertRaises(RuntimeError): self.plugin.get_virus_database_version() @patch.object(module, "locate_one") @patch.object(base.AntivirusUnix, "run_cmd") def test_get_virus_db_no_version(self, m_run_cmd, m_locate_one): m_locate_one.return_value = self.scan_path wrong_stdout = "LOREM IPSUM" m_run_cmd.return_value = 0, wrong_stdout, "" with self.assertRaises(RuntimeError): self.plugin.get_virus_database_version() @patch.object(module, "locate_one") @patch.object(base.AntivirusUnix, "run_cmd") def test_get_virus_db_no_release(self, m_run_cmd, m_locate_one): m_locate_one.return_value = self.scan_path wrong_stdout = "Virus database version : 4793/15678" m_run_cmd.return_value = 0, wrong_stdout, "" version = self.plugin.get_virus_database_version() self.assertEquals(version, "4793/15678")
hftools/hftools
hftools/networks/tests/test_spar_functions.py
Python
bsd-3-clause
3,696
0.000812
#----------------------------------------------------------------------------- # Copyright (c) 2014, HFTools Development Team. # # Distributed under the terms of the Modified BSD License. # # The full license is in the file COPYING.txt, distributed with this software. #----------------------------------------------------------------------------- import os import numpy as np import hftools.dataset.arrayobj as aobj import hftools.networks.spar_functions as spfun from hftools.testing import TestCase basepath = os.path.split(__file__)[0]
def make_array(a): dims = (aobj.DimSweep("f", len(a)), aobj.DimMatrix_i("i", 2), aobj.DimMatrix_j("j", 2)) return aobj.hfarray(a, dims=dims) class Test_cascade(TestCase): def setUp(self): self.a = make_array([[[0, 1], [1, 0j]]]) self.b = make_array([[[0, 2], [2, 0j]]]) self.c = make_array([[[0.1, 0j], [0, 0.1]]]) def test_cascade_1(self): r = spfun.cascadeS(self.a, self.a) self.assertTrue(np.allclose(r, self.a)) def
test_cascade_2(self): r = spfun.cascadeS(self.a, self.b) self.assertTrue(np.allclose(r, self.b)) def test_cascade_3(self): r = spfun.cascadeS(self.b, self.b) self.assertTrue(np.allclose(r, self.a * 4)) def test_cascade_4(self): r = spfun.cascadeS(self.a, self.c) self.assertTrue(np.allclose(r, self.c)) def test_cascade_5(self): r = spfun.cascadeS(self.b, self.c) self.assertTrue(np.allclose(r, make_array([[[0.4, 0j], [0, 0.1]]]))) def test_cascade_6(self): r = spfun.cascadeS(self.c, self.b) self.assertTrue(np.allclose(r, make_array([[[0.1, 0j], [0, 0.4]]]))) class Test_deembedleft(TestCase): def setUp(self): self.a = make_array([[[0, 1], [1, 0j]]]) self.b = make_array([[[0, 2], [2, 0j]]]) self.c = make_array([[[0.1, 0j], [0, 0.1]]]) def test_cascade_1(self): r = spfun.deembedleft(self.a, self.a) self.assertTrue(np.allclose(r, self.a)) def test_cascade_2(self): r = spfun.deembedleft(self.b, self.b) self.assertTrue(np.allclose(r, self.a)) def test_cascade_3(self): r = spfun.deembedleft(self.b, self.c) self.assertTrue(np.allclose(r, make_array([[[0.025, 0j], [0, 0.1]]]))) class Test_deembedright(TestCase): def setUp(self): self.a = make_array([[[0, 1], [1, 0j]]]) self.b = make_array([[[0, 2], [2, 0j]]]) self.c = make_array([[[0.1, 0j], [0, 0.1]]]) def test_cascade_1(self): r = spfun.deembedright(self.a, self.a) self.assertTrue(np.allclose(r, self.a)) def test_cascade_2(self): r = spfun.deembedright(self.b, self.b) self.assertTrue(np.allclose(r, self.a)) def test_cascade_3(self): r = spfun.deembedright(self.c, self.b) self.assertTrue(np.allclose(r, make_array([[[0.1, 0j], [0, 0.025]]]))) class Test_deembed(TestCase): def setUp(self): self.a = make_array([[[0, 1], [1, 0j]]]) self.b = make_array([[[0, 2], [2, 0j]]]) self.c = make_array([[[0.1, 0j], [0, 0.1]]]) def test_cascade_1(self): r = spfun.deembed(self.a, self.a, self.a) self.assertTrue(np.allclose(r, self.a)) def test_cascade_2(self): r = spfun.deembed(self.b, self.b, self.a) self.assertTrue(np.allclose(r, self.a)) def test_cascade_3(self): r = spfun.deembed(self.a, self.b, self.b) self.assertTrue(np.allclose(r, self.a)) def test_cascade_4(self): r = spfun.deembed(self.b, self.c, self.b) self.assertAllclose(r, make_array([[[0.025, 0j], [0, 0.025]]]))
dasbruns/netzob
src/netzob/Common/Utils/NetzobRegex.py
Python
gpl-3.0
15,254
0.004132
#-*- coding: utf-8 -*- #+---------------------------------------------------------------------------+ #| 01001110 01100101 01110100 01111010 01101111 01100010 | #| | #| Netzob : Inferring communication protocols | #+---------------------------------------------------------------------------+ #| Copyright (C) 2011-2014 Georges Bossert and Frédéric Guihéry | #| This program is free software: you can redistribute it and/or modify | #| it under the terms of the GNU General Public License as published by | #| the Free Software Foundation, either version 3 of the License, or | #| (at your option) any later version. | #| | #| This program is distributed in the hope that it will be useful, | #| but WITHOUT ANY WARRANTY; without even the implied warranty of | #| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the | #| GNU General Public License for more details. | #| | #| You should have received a copy of the GNU General Public License | #| along with this program. If not, see <http://www.gnu.org/licenses/>. | #+---------------------------------------------------------------------------+ #| @url : http://www.netzob.org | #| @contact : contact@netzob.org | #| @sponsors : Amossys, http://www.amossys.fr | #| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ | #+---------------------------------------------------------------------------+ #+---------------------------------------------------------------------------+ #| File contributors : | #| - Georges Bossert <georges.bossert (a) supelec.fr> | #| - Frédéric Guihéry <frederic.guihery (a) amossys.fr> | #+---------------------------------------------------------------------------+ #+---------------------------------------------------------------------------+ #| Standard library imports | #+---------------------------------------------------------------------------+ import uuid #+---------------------------------------------------------------------------+ #| Related third party imports | #+---------------------------------------------------------------------------+ #+---------------------------------------------------------------------------+ #| Local application imports | #+---------------------------------------------------------------------------+ from netzob.Common.Utils.Decorators import typeCheck, NetzobLogger from netzob.Common.Utils.TypedList import TypedList from netzob.Common.Models.Types.TypeConverter import TypeConverter from netzob.Common.Models.Types.Raw import Raw from netzob.Common.Models.Types.HexaString import HexaString @NetzobLogger class NetzobRegex(object): """Represents a regex describing field boundaries. Static methods can be used to build the regex. Don't use the constructor unless you really know what you do. I
nstead we highly recommend the use of the static methods offered to build different types o
f regex. If still you want to use the constructor, don't specify the group since it will be automaticaly added. For example, if your regex is (.*), only specify .* and forget the () that will be added. In addition the constructor will also generate and add the group identifier. Your regex will therefore look like : (P<f45689676567987628>.*). """ DEFAULT_REGEX = '.*' def __init__(self): self.id = 'f' + str(uuid.uuid4().hex) self.regex = NetzobRegex.DEFAULT_REGEX @property def regex(self): return self.__regex @regex.setter @typeCheck(str) def regex(self, regex): if regex is None: raise TypeError("The regex cannot be None") self.__regex = "(?P<{0}>{1})".format(self.id, regex) @property def id(self): return self.__id @id.setter @typeCheck(str) def id(self, _id): if _id is None: raise TypeError("Id cannot be None") if len(_id) == 0: raise ValueError("Id must be defined (len>0)") self.__id = _id def __str__(self): return str(self.regex) def finalRegex(self): """This method returns the current regex with the starting and ending indicators added. For this reason, the returned regex can't be merged with others regexes. :return: a string with the final regex definition including start and end indicators :rtype: :class:`str`. """ return "^" + str(self) + "$" @staticmethod def buildDefaultRegex(): """It creates the default regex which means we have to knowledge over the format of the field. >>> from netzob.all import * >>> import regex as re >>> data = "Hello netzob, a default regex grabs everything." >>> hexData = TypeConverter.convert(data, ASCII, HexaString) >>> nRegex = NetzobRegex.buildDefaultRegex() >>> compiledRegex = re.compile(str(nRegex)) >>> dynamicDatas = compiledRegex.match(hexData) >>> print TypeConverter.convert(hexData[dynamicDatas.start(nRegex.id):dynamicDatas.end(nRegex.id)], HexaString, ASCII) Hello netzob, a default regex grabs everything. :return: a .* default NetzobRegex :rtype: :class:`netzob.Common.Utils.NetzobRegex.NetzobRegex` """ regex = NetzobRegex() regex.regex = '.*' return regex @staticmethod def buildRegexForStaticValue(value): """It creates a NetzobRegex which represents a regex with the specified Raw static value. >>> from netzob.all import * >>> import regex as re >>> data = "Hello netzob" >>> hexData = TypeConverter.convert(data, ASCII, HexaString) >>> nRegex = NetzobRegex.buildRegexForStaticValue(data) >>> compiledRegex = re.compile(str(nRegex)) >>> dynamicDatas = compiledRegex.match(hexData) >>> print TypeConverter.convert(hexData[dynamicDatas.start(nRegex.id):dynamicDatas.end(nRegex.id)], HexaString, ASCII) Hello netzob :param value: the static value the regex must represents :type value: python raw (will be encoded in HexaString in the regex) :return: the regex which represents the specified valued encoed in HexaString :type: :class:`netzob.Common.Utils.NetzobRegex.NetzobRegex` """ hexaStringValue = TypeConverter.convert(value, Raw, HexaString) return NetzobStaticRegex(hexaStringValue) @staticmethod def buildRegexForEol(): """It creates a NetzobRegex which represents an EOL :return: the regex which represents an EOL :type: :class:`netzob.Common.Utils.NetzobRegex.NetzobRegex` """ return NetzobEolRegex() @staticmethod def buildRegexForSizedValue(size): return NetzobSizedRegex(size) @staticmethod def buildRegexForAlternativeRegexes(regexes): return NetzobAlternativeRegex(regexes) @staticmethod def buildRegexForAggregateRegexes(regexes): return NetzobAggregateRegex(regexes) @NetzobLogger class NetzobSizedRegex(NetzobRegex): """Represents an aggregate regex. >>> from netzob.Common.Utils.NetzobRegex import NetzobRegex >>> from netzob.all import * >>> import regex as re >>> data = "Hello netzob" >>> hexData = TypeConverter.convert(data, ASCII, HexaString) >>> nRegex = NetzobRegex.buildRegexForSizedValue((8*4,8*5)) >>> compiledRegex
poojavade/Genomics_Docker
Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/pygsl/chebyshev.py
Python
apache-2.0
6,561
0.00381
#!/usr/bin/env python # Author : Pierre Schnizer """ This module describes routines for computing Chebyshev approximations to univariate functions. A Chebyshev approximation is a truncation of the series \M{f(x) = S{sum} c_n T_n(x)}, where the Chebyshev polynomials \M{T_n(x) = cos(n \arccos x)} provide an orthogonal basis of polynomials on the interval [-1,1] with the weight function \M{1 / sqrt{1-x^2}}. The first few Chebyshev polynomials are, T_0(x) = 1, T_1(x) = x, T_2(x) = 2 x^2 - 1. def f(x, p): if x < 0.5: return 0.25 else: return 0.75 n = 10000; cs = cheb_series(40) F = gsl_function(f, None) cs.init(F, 0.0, 1.0) nf = float(n) for i in range(100): x = i / nf r10 = cs.eval_n(10, x) r40 = cs.eval(x) print "%g %g %g %g" % (x, f(x, None), r10, r40) """ import _callback from _generic_solver import _workspace from gsl_function import gsl_function class cheb_series(_workspace): """ This class manages all internal detail. It provides the space for a Chebyshev series of order N. """ _alloc = _callback.gsl_cheb_alloc _free = _callback.gsl_cheb_free _init = _callback.gsl_cheb_init _eval = _callback.gsl_cheb_eval _eval_err = _callback.gsl_cheb_eval_err _eval_n = _callback.gsl_cheb_eval_n _eval_n_err = _callback.gsl_cheb_eval_n_err #_eval_mode = _callback.gsl_cheb_eval_mode #_eval_mode_e = _callback.gsl_cheb_eval_mode_e _calc_deriv = _callback.gsl_cheb_calc_deriv _calc_integ = _callback.gsl_cheb_calc_integ _get_coeff = _callback.pygsl_cheb_get_coefficients _set_coeff = _callback.pygsl_cheb_set_coefficients _get_a = _callback.pygsl_cheb_get_a _set_a = _callback.pygsl_cheb_set_a _get_b = _callback.pygsl_cheb_get_b _set_b = _callback.pygsl_cheb_set_b _get_f = _callback.pygsl_cheb_get_f _set_f = _callback.pygsl_cheb_set_f _get_order_sp = _callback.pygsl_cheb_get_order_sp _set_order_sp = _callback.pygsl_cheb_set_order_sp def __init__(self, size): """ input : n @params n : number of coefficients """ self._size = size _workspace.__init__(self, size) def init(self, f, a, b): """ This function computes the Chebyshev approximation for the function F over the range (a,b) to the previously specified order. The computation of the Chebyshev approximation is an \M{O(n^2)} process, and requires n function evaluations. input : f, a, b @params f : a gsl_function @params a : lower limit @params b : upper limit """ return self._init(self._ptr, f.get_ptr(), a, b) def eval(self, x): """ This function evaluates the Chebyshev series CS at a given point X input : x x ... value where the series shall be evaluated. """ return self._eval(self._ptr, x) def eval_err(self, x): """ This function computes the Chebyshev series at a given point X, estimating both the series RESULT and its absolute error ABSERR. The error estimate is made from the first neglected term in the series. input : x x ... value where the error shall be evaluated. """ return self._eval_err(self._ptr, x) def eval_n(self, order, x): """ This function evaluates the Chebyshev series CS at a given point N, to (at most) the given order ORDER. input : n, x n ... number of cooefficients x ... value where the series shall be evaluated. """ return self._eval_n(self._ptr, order, x) def eval_n_err(self, order, x): """ This function evaluates a Chebyshev series CS at a given point X, estimating both the series RESULT and its absolute error ABSERR, to (at most) the given order ORDER. The error estimate is made from the first neglected term in the series. input : n, x n ... number of cooefficients x ... value where the error shall be evaluated. """ return self._eval_n_err(self._ptr, order, x) # def eval_mode(self, x, mode): # """ # # """ # return self._eval(self._ptr, x,
mode) # # def eval_mode_e(self, x, mode): # return self._eval(self._ptr, x, mode) def calc_deriv(self): """ This method computes the derivative of the series CS. It returns a new instance of the cheb_series class. """ tmp = cheb_series(self._size) self._calc_deriv(tmp._ptr, self._ptr) return tmp def
calc_integ(self): """ This method computes the integral of the series CS. It returns a new instance of the cheb_series class. """ tmp = cheb_series(self._size) self._calc_integ(tmp._ptr, self._ptr) return tmp def get_coefficients(self): """ Get the chebyshev coefficients. """ return self._get_coeff(self._ptr) def set_coefficients(self, coefs): """ Sets the chebyshev coefficients. """ return self._set_coeff(self._ptr, coefs) def get_a(self): """ Get the lower boundary of the current representation """ return self._get_a(self._ptr) def set_a(self, a): """ Set the lower boundary of the current representation """ return self._set_a(self._ptr, a) def get_b(self): """ Get the upper boundary of the current representation """ return self._get_b(self._ptr) def set_b(self, a): """ Set the upper boundary of the current representation """ return self._set_b(self._ptr, a) def get_f(self): """ Get the value f (what is it ?) The documentation does not tell anything about it. """ return self._get_f(self._ptr) def set_f(self, a): """ Set the value f (what is it ?) """ return self._set_f(self._ptr, a) def get_order_sp(self): """ Get the value f (what is it ?) The documentation does not tell anything about it. """ return self._get_order_sp(self._ptr) def set_order_sp(self, a): """ Set the value f (what is it ?) """ return self._set_order_sp(self._ptr, a)
mysociety/yournextrepresentative
candidates/tests/test_search.py
Python
agpl-3.0
4,466
0.000672
import re from django.core.management import call_command from django_webtest import WebTest from .auth import TestUserMixin from .settings import SettingsMixin from popolo.models import Person from .uk_examples import UK2015ExamplesMixin class TestSearchView(TestUserMixin,
SettingsMixin, UK2015Example
sMixin, WebTest): def setUp(self): super(TestSearchView, self).setUp() call_command('rebuild_index', verbosity=0, interactive=False) def test_search_page(self): # we have to create the candidate by submitting the form as otherwise # we're not making sure the index update hook fires response = self.app.get('/search?q=Elizabeth') # have to use re to avoid matching search box self.assertFalse( re.search( r'''<a[^>]*>Elizabeth''', response.text ) ) self.assertFalse( re.search( r'''<a[^>]*>Mr Darcy''', response.text ) ) response = self.app.get( '/election/2015/post/65808/dulwich-and-west-norwood', user=self.user, ) form = response.forms['new-candidate-form'] form['name'] = 'Mr Darcy' form['email'] = 'darcy@example.com' form['source'] = 'Testing adding a new person to a post' form['party_gb_2015'] = self.labour_party_extra.base_id form.submit() response = self.app.get( '/election/2015/post/65808/dulwich-and-west-norwood', user=self.user, ) form = response.forms['new-candidate-form'] form['name'] = 'Elizabeth Bennet' form['email'] = 'lizzie@example.com' form['source'] = 'Testing adding a new person to a post' form['party_gb_2015'] = self.labour_party_extra.base_id form.submit() response = self.app.get( '/election/2015/post/65808/dulwich-and-west-norwood', user=self.user, ) form = response.forms['new-candidate-form'] form['name'] = "Charlotte O'Lucas" # testers license form['email'] = 'charlotte@example.com' form['source'] = 'Testing adding a new person to a post' form['party_gb_2015'] = self.labour_party_extra.base_id form.submit() # check searching finds them response = self.app.get('/search?q=Elizabeth') self.assertTrue( re.search( r'''<a[^>]*>Elizabeth''', response.text ) ) self.assertFalse( re.search( r'''<a[^>]*>Mr Darcy''', response.text ) ) response = self.app.get( '/election/2015/post/65808/dulwich-and-west-norwood', user=self.user, ) form = response.forms['new-candidate-form'] form['name'] = 'Elizabeth Jones' form['email'] = 'e.jones@example.com' form['source'] = 'Testing adding a new person to a post' form['party_gb_2015'] = self.labour_party_extra.base_id form.submit() response = self.app.get('/search?q=Elizabeth') self.assertTrue( re.search( r'''<a[^>]*>Elizabeth Bennet''', response.text ) ) self.assertTrue( re.search( r'''<a[^>]*>Elizabeth Jones''', response.text ) ) person = Person.objects.get(name='Elizabeth Jones') response = self.app.get( '/person/{0}/update'.format(person.id), user=self.user, ) form = response.forms['person-details'] form['name'] = 'Lizzie Jones' form['source'] = "Some source of this information" form.submit() response = self.app.get('/search?q=Elizabeth') self.assertTrue( re.search( r'''<a[^>]*>Elizabeth Bennet''', response.text ) ) self.assertFalse( re.search( r'''<a[^>]*>Elizabeth Jones''', response.text ) ) # check that searching for names with apostrophe works response = self.app.get("/search?q=O'Lucas") self.assertTrue( re.search( r'''<a[^>]*>Charlotte''', response.text ) )
vapkarian/soccer-analyzer
src/versions/f4.py
Python
mit
3,131
0.000639
from collections import OrderedDict from src.models import FlashscoreMatch from src.settings import match_cache, Colors @match_cache def is_available(match: FlashscoreMatch) -> bool: return ( match.ah_0_1_current_odds is not None and match.ah_0_2_current_odds is not None and match.home_draw_current_odds is not None and match.home_away_current_odds is not None and match.draw_away_current_odds is not None and match.home_team_rank_across_home is not None and match.away_team_rank_across_away is not None ) @match_cache def e1(match: FlashscoreMatch) -> bool: return ( 2.8 > match.home_current_odds > 1.9 and match.away_current_odds > 2.45 and match.draw_current_odds >=3 and match.prediction_a3 < 2 and match.prediction_a4 > 0 and match.home_team_rank_across_home < 18 and match.away_team_rank_across_away < 15 and 6.5 > (match.xxx or 0.) >= 3.67 ) @match_cache def e2(match: FlashscoreMatch) -> bool: return ( 3.4 > match.home_current_odds > 2.45 and 3 > match.away_current_odds > 1.59 and match.prediction_a4 == 0 and match.prediction_a3 != 1 and (match.xxx or 0.) >= 4 ) @match_cache def a1(match: FlashscoreMatch) -> bool: return ( match.away_current_odds < 2 and 2.5 < match.ah_0_1_current_odds < 5.25 and match.prediction_a3 < 3 and match.prediction_a4 < 2 and (match.xxx or 0.) > 3.5 ) @match_cache def h2(match: FlashscoreMatch) -> bool: return ( match.home_current_odds < 2 and match.prediction_a4 == 0 and (match.xxx or 0.) > 4.16 ) @match_cache def test(match: FlashscoreMatch) -> bool: return ( False) @match_cach
e def other(match: FlashscoreMatch) -> bool: return not(e1(match) or e2(match) or a1(match) or h2(match) or test(match)) @match_cache def bet(match: FlashscoreMatch) -> str: values = OrderedDict([ ('e1', e1(mat
ch)), ('e2', e2(match)), ('a1', a1(match)), ('h2', h2(match)), ('test', test(match))]) return ', '.join((key for key, value in values.items() if value)) @match_cache def ah_0_1_color(match: FlashscoreMatch) -> Colors: if e1(match) or a1(match): return Colors.GREEN return Colors.EMPTY @match_cache def ah_0_2_color(match: FlashscoreMatch) -> Colors: if e2(match) or h2(match): return Colors.GREEN return Colors.EMPTY @match_cache def total_score_color(match: FlashscoreMatch) -> Colors: if match.home_score is not None and match.away_score is not None: if e1(match) or a1(match): if match.home_score > match.away_score: return Colors.GREEN elif match.home_score < match.away_score: return Colors.RED elif e2(match) or h2(match): if match.away_score > match.home_score: return Colors.GREEN elif match.away_score < match.home_score: return Colors.RED return Colors.EMPTY
hydroffice/hyo_soundspeed
hyo2/soundspeed/atlas/regofsoffline.py
Python
lgpl-2.1
8,583
0.001748
from datetime import datetime as dt import os from enum import IntEnum import logging from typing import Optional from netCDF4 import Dataset, num2date from hyo2.soundspeed.base.geodesy import Geodesy from hyo2.soundspeed.profile.dicts import Dicts from hyo2.soundspeed.profile.profile import Profile from hyo2.soundspeed.profile.profilelist import ProfileList from hyo2.abc.lib.progress.cli_progress import CliProgress logger = logging.getLogger(__name__) class RegOfsOffline: class Model(IntEnum): # East Coast CBOFS = 10 # RG = True # Format is GoMOFS DBOFS = 11 # RG = True # Format is GoMOFS GoMOFS = 12 # RG = True # Format is GoMOFS NYOFS = 13 # RG = False SJROFS = 14 # RG = False
# Gulf of Mexico NGOFS = 20 # RG = True # Format is GoMOFS TBOFS = 21 # RG = True # Format is GoMOFS # Great Lakes LEOFS = 30 # RG = True # Format is GoMOFS LHOFS = 31 # RG = False LMOFS = 32 # RG = False LOOFS = 33 # RG = F
alse LSOFS = 34 # RG = False # Pacific Coast CREOFS = 40 # RG = True # Format is GoMOFS SFBOFS = 41 # RG = True # Format is GoMOFS # noinspection DuplicatedCode regofs_model_descs = \ { Model.CBOFS: "Chesapeake Bay Operational Forecast System", Model.DBOFS: "Delaware Bay Operational Forecast System", Model.GoMOFS: "Gulf of Maine Operational Forecast System", Model.NYOFS: "Port of New York and New Jersey Operational Forecast System", Model.SJROFS: "St. John's River Operational Forecast System", Model.NGOFS: "Northern Gulf of Mexico Operational Forecast System", Model.TBOFS: "Tampa Bay Operational Forecast System", Model.LEOFS: "Lake Erie Operational Forecast System", Model.LHOFS: "Lake Huron Operational Forecast System", Model.LMOFS: "Lake Michigan Operational Forecast System", Model.LOOFS: "Lake Ontario Operational Forecast System", Model.LSOFS: "Lake Superior Operational Forecast System", Model.CREOFS: "Columbia River Estuary Operational Forecast System", Model.SFBOFS: "San Francisco Bay Operational Forecast System" } def __init__(self, data_folder: str, prj: 'hyo2.soundspeed.soundspeed import SoundSpeedLibrary') -> None: self.name = self.__class__.__name__ self.desc = "Abstract atlas" # a human-readable description self.data_folder = data_folder self.prj = prj self.g = Geodesy() self._has_data_loaded = False # grids are "loaded" ? (netCDF files are opened) self._file = None self._day_idx = 0 self._timestamp = None self._zeta = None self._siglay = None self._h = None self._lats = None self._lons = None self._lat = None self._lon = None self._loc_idx = None self._d = None self._temp = None self._sal = None def query(self, nc_path: str, lat: float, lon: float) -> Optional[ProfileList]: if not os.path.exists(nc_path): raise RuntimeError('Unable to locate %s' % nc_path) logger.debug('nc path: %s' % nc_path) if (lat is None) or (lon is None): logger.error("invalid location query: (%s, %s)" % (lon, lat)) return None logger.debug('query location: %s, %s' % (lat, lon)) progress = CliProgress() try: self._file = Dataset(nc_path) progress.update(20) except (RuntimeError, IOError) as e: logger.warning("unable to access data: %s" % e) self.clear_data() progress.end() return None try: self.name = self._file.title time = self._file.variables['time'] self._timestamp = num2date(time[0], units=time.units) logger.debug("Retrieved time: %s" % self._timestamp.isoformat()) # Now get latitudes, longitudes and depths for x,y,z referencing self._lats = self._file.variables['lat'][:] self._lons = self._file.variables['lon'][:] # logger.debug('lat:(%s)\n%s' % (self._lats.shape, self._lats)) # logger.debug('lon:(%s)\n%s' % (self._lons.shape, self._lons)) self._zeta = self._file.variables['zeta'][0, :] self._siglay = self._file.variables['siglay'][:] self._h = self._file.variables['h'][:] # logger.debug('zeta:(%s)\n%s' % (self._zeta.shape, self._zeta)) # logger.debug('siglay:(%s)\n%s' % (self._siglay.shape, self._siglay[:, 0])) # logger.debug('h:(%s)\n%s' % (self._h.shape, self._h)) self._temp = self._file.variables['temp'][:] self._sal = self._file.variables['salinity'][:] # logger.debug('temp:(%s)\n%s' % (self._temp.shape, self._temp[:, 0])) # logger.debug('sal:(%s)\n%s' % (self._sal.shape, self._sal[:, 0])) except Exception as e: logger.error("troubles in variable lookup for lat/long grid and/or depth: %s" % e) self.clear_data() progress.end() return None min_dist = 100000.0 min_idx = None for idx, _ in enumerate(self._lats): nc_lat = self._lats[idx] nc_lon = self._lons[idx] if nc_lon > 180.0: nc_lon = nc_lon - 360.0 nc_dist = self.g.distance(nc_lon, nc_lat, lon, lat) # logger.debug('loc: %.6f, %.6f -> %.6f' % (nc_lat, nc_lon, nc_dist)) if nc_dist < min_dist: min_dist = nc_dist min_idx = idx if min_dist >= 10000.0: logger.error("location too far from model nodes: %.f" % min_dist) self.clear_data() progress.end() return None self._loc_idx = min_idx self._lon = self._lons[self._loc_idx] if self._lon > 180.0: self._lon = self._lon - 360.0 self._lat = self._lats[self._loc_idx] logger.debug('closest node: %d [%s, %s] -> %s' % (self._loc_idx, self._lat, self._lon, min_dist)) zeta = self._zeta[self._loc_idx] h = self._h[self._loc_idx] siglay = -self._siglay[:, self._loc_idx] # logger.debug('zeta: %s, h: %s, siglay: %s' % (zeta, h, siglay)) self._d = siglay * (h + zeta) # logger.debug('d:(%s)\n%s' % (self._h.shape, self._d)) # Make a new SV object to return our query in ssp = Profile() ssp.meta.sensor_type = Dicts.sensor_types['Synthetic'] ssp.meta.probe_type = Dicts.probe_types[self.name] ssp.meta.latitude = self._lat ssp.meta.longitude = self._lon ssp.meta.utc_time = dt(year=self._timestamp.year, month=self._timestamp.month, day=self._timestamp.day, hour=self._timestamp.hour, minute=self._timestamp.minute, second=self._timestamp.second) ssp.meta.original_path = "%s_%s" % (self.name, self._timestamp.strftime("%Y%m%d_%H%M%S")) ssp.init_data(self._d.shape[0]) ssp.data.depth = self._d[:] ssp.data.temp = self._temp[0, :, self._loc_idx] ssp.data.sal = self._sal[0, :, self._loc_idx] ssp.calc_data_speed() ssp.clone_data_to_proc() ssp.init_sis() profiles = ProfileList() profiles.append_profile(ssp) progress.end() return profiles def clear_data(self) -> None: """Delete the data and reset the last loaded day""" logger.debug("clearing data") if self._has_data_loaded: if self._file: self._file.close() self._has_data_loaded = False # grids are "loaded" ? (netCDF files are opened) self._file = None self._day_idx = 0 self._timestamp = None self._zeta = None self._siglay = None self._h = None self._lats = None self._lons
macobo/python-grader
tasks/MTAT.03.100/2013/Midterm_1_resit/KT2_J1_vahenda_tester.py
Python
mit
1,967
0.028659
""" Task description (in Estonian): 3. Maatriksi vähendamine (6p) Kirjuta funktsioon vähenda, mis võtab argumendiks arvumaatriksi, milles ridu ja veerge on paarisarv, ning tagastab uue maatriksi, milles on kaks korda vähem ridu ja kaks korda vähem veerge, ja kus iga element on esialgse maatriksi nelja elemendi keskmine, järgnevas näites toodud skeemi järgi: See tähendab, et vähenda([[1,5,2,6,3,6], [1,3,2,7,3,3], [4,8,5,1,1,6], [4,4,9,5,6,1]]) peab tagastama [[2.5, 4.25, 3.75], [5.0, 5.0, 3.5]]. """ from grader import * from KT2_util import make_checker def vähenda(maatriks): tulemus = [] for r in range(0, len(maatriks), 2): rida = [] for c in range(0, len(maatriks[r]), 2): tul = 0 for i in range(4): tul += maatriks[r+i%2][c+i//2] rida.append(tul / 4.0) tulemus.append(rida) return tulemus checker = make_checker(vähenda) checker([[1, 2], [3, 4]], description="Ruudukujuline 2x2 maatriks- {function}({args}) == {expected}") checker([[1, 2, 3, 4], [5
, 6, 7, 8]], description="Mitte-ruudukujuline maatriks - {function}({args}) == {expected}") checker([[1,5,2,6,3,6], [1,3,2,7,3,3], [4,8,5,1,1,6], [4,4,9,5,6,1]]) checker([[1,5,2,6,3,6], [1,3,2,7,3,3], [4,8,5,1,1,6], [4,4,9,5,6,1]]) checker([], description="Erijuht, tühi maatriks- {function}({args}) == {expected}") random_tests = [ [[7, 5, 2, 6, 6, 9], [2, 8, 6, 3, 8, 7]], [[3, 1, 0, 9],
[0, 5, 1, 7]], [[4, 4], [0, 8], [4, 9], [3, 0], [3, 6], [8, 2]], [[9, 4, 6, 5, 4, 6], [3, 8, 7, 1, 2, 5], [8, 9, 8, 5, 0, 2], [2, 7, 2, 4, 3, 5], [2, 6, 8, 0, 2, 9], [7, 4, 6, 4, 8, 2]], [[-1, -3], [-6, 6], [5, -6], [1, 0]], [[-5, -10, 6, -1], [-8, -10, -5, 7], [-7, 9, -5, -5], [-8, -7, -10, 8]], [[-3, 6, -3, 6], [4, -6, 3, 8], [-9, -6, 7, -6], [6, 6, 4, -3]], [[1, 6], [2, -6]] ] for test_case in random_tests: checker(test_case)
kylon/pacman-fakeroot
test/pacman/tests/upgrade055.py
Python
gpl-2.0
618
0.001618
self.description = "Upgrade a package that provides one of two imaginary packages" lp1 = pmpkg("pkg1") lp1.depends = ["imaginary", "imaginary2"] self.addpkg2db("local", lp1) lp2 = pmpkg("pkg2") l
p2.provides = ["imaginary"] self.addpkg2db("local", lp2) lp3 = pmpkg("pkg3") lp3.provides = ["imaginary2"] self.addpkg2db("local", lp3) p = pmpkg("pkg2", "1.0-2") p.provides = ["imaginary"] self.addpkg(p) self.args = "-U %s" % p.filename() self.addrule("PACMA
N_RETCODE=0") self.addrule("PKG_EXIST=pkg1") self.addrule("PKG_VERSION=pkg2|1.0-2") self.addrule("PKG_EXIST=pkg3") self.addrule("PKG_DEPENDS=pkg1|imaginary")
sokil/VotingEngine
models/voting_variant.py
Python
mit
361
0.00277
from app import db f
rom sqlalchemy import Column, String, Integer, ForeignKey class VotingVariant(db.Model): __tablename__ = 'voting_variants' id = Column(Integer, primary_key=True) voting_id = Column(Integer, ForeignKey('votings.id')) title = Column(String(255)) description = Column(String(1000)) voting =
db.relationship('Voting')
DarknessSwitch/django-tutorial
catalog/admin.py
Python
cc0-1.0
2,208
0.005888
from django.contrib import admin # Register your models here. from .models import Author, Genre, Book, BookInstance, Language """ # Minimal registration of Models. admin.site.register(Book) admin.site.register(Author) admin.site.register(BookInstance) admin.site.register(Genre) admin.site.register(Language) """ admin.site.register(Genre) admin.site.register(Language) class BooksInline(admin.TabularInline): """ Defines format of inline book insertion (used in AuthorAdmin) """ model = Book @admin.register(Auth
or) class AuthorAdmin(admin.ModelAdmin): """ Administration object for Author models. Defines:
- fields to be displayed in list view (list_display) - orders fields in detail view (fields), grouping the date fields horizontally - adds inline addition of books in author view (inlines) """ list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death') fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')] inlines = [BooksInline] class BooksInstanceInline(admin.TabularInline): """ Defines format of inline book instance insertion (used in BookAdmin) """ model = BookInstance class BookAdmin(admin.ModelAdmin): """ Administration object for Book models. Defines: - fields to be displayed in list view (list_display) - adds inline addition of book instances in book view (inlines) """ list_display = ('title', 'author', 'display_genre') inlines = [BooksInstanceInline] admin.site.register(Book, BookAdmin) @admin.register(BookInstance) class BookInstanceAdmin(admin.ModelAdmin): """ Administration object for BookInstance models. Defines: - fields to be displayed in list view (list_display) - filters that will be displayed in sidebar (list_filter) - grouping of fields into sections (fieldsets) """ list_display = ('book', 'status', 'borrower','due_back', 'id') list_filter = ('status', 'due_back') fieldsets = ( (None, { 'fields': ('book','imprint', 'id') }), ('Availability', { 'fields': ('status', 'due_back','borrower') }), )
endlessm/chromium-browser
build/run_swarming_xcode_install.py
Python
bsd-3-clause
3,039
0.005923
#!/usr/bin/env python # Copyright 2017 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """ This script runs swarming_xcode_install on the bots. It should be run when we need to upgrade all the swarming testers. It: 1) Packages two python files into an isolate. 2) Runs the isolate on swarming machines that satisfy certain dimensions. Example usage: $ ./build/run_swarming_xcode_install.py --luci_path ~/work/luci-py \ --swarming-server touch-swarming.appspot.com \ --isolate-server touch-isolate.appspot.com """ from __future__ import print_function import argparse import os import shutil import subprocess import sys import tempfile def main(): parser = argparse.ArgumentParser( description='Run swarming_xcode_install on the bots.') parser.add_argument('--luci_path', required=True, type=os.path.abspath) parser.add_argument('--swarming-server', required=True, type=str) parser.add_argument('--isolate-server', required=True, type=str) parser.add_argument('--batches', type=int, default=25, help="Run xcode install in batches of size |batches|.") parser.add_argument('--dimension', nargs=2, action='append') args = parser.parse_args() args.dimension = args.dimension or [] script_dir = os.path.dirname(os.path.abspath(__file__)) tmp_dir = tempfile.mkdtemp(prefix='swarming_xcode') try: print('Making isolate.') shutil.copyfile(os.path.join(script_dir, 'swarming_xcode_install.py'), os.path.join(tmp_dir, 'swarming_xcode_install.py')) shutil.copyfile(os.path.join(script_dir, 'mac_toolchain.py'), os.path.join(tmp_dir, 'mac_toolchain.py')) luci_client = os.path.join(args.luci_path, 'client') cmd = [ sys.executable, os.path.join(luci_client, 'isolateserver.py'), 'archive', '-I', args.isolate_server, tmp_dir, ] isolate_hash = subprocess.check_output(cmd).split()[0] print('Running swarming_xcode_install.') # TODO(crbug.com/765361): The dimensions below should be updated once # swarming for iOS is fleshed out, likely removing xcode_version 9 and # adding different dimensions. luci_tools = os.path.join(luci_client, 'tools') dimensions = [['pool', 'Chrome'], ['xcode_version', '9.0']] + args.dimension dim_args = [] for d in dimensions: dim_args += ['--dimension'] + d cmd = [ sys.executable, os.path.join(luci_tools, 'run_on_bots.py'), '--swarming', args.swarming_server, '--isolate-server', a
rgs.isolate_server, '--priority', '20', '--batches', str(args.batches), '--tags', 'name:run_swarming_xcode_install', ] + dim_args + ['--name', 'run_swarming_xcode_install', '--', isolate_hash, 'pyt
hon', 'swarming_xcode_install.py', ] subprocess.check_call(cmd) print('All tasks completed.') finally: shutil.rmtree(tmp_dir) return 0 if __name__ == '__main__': sys.exit(main())
sebastic/QGIS
python/plugins/processing/algs/qgis/VariableDistanceBuffer.py
Python
gpl-2.0
3,079
0.001299
# -*- coding: utf-8 -*- """ *************************************************************************** VariableDistanceBuffer.py --------------------- Date : August 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'August 2012' __copyright__ = '(C) 2012, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' from qgis.core import QGis from processing.core.GeoAlgorithm import GeoAlgorithm from processing.core.parameters import ParameterVector from processing.core.parameters import ParameterBoolean from processing.core.parameters import ParameterNumber from processing.core.parameters import ParameterTableField from processing.core.outputs import OutputVector import Buffer as buff from processing.tools import dataobjects class VariableDistanceBuffer(GeoAlgorithm): INPUT = 'INPUT' OUTPUT = 'OUTPUT' FIELD = 'FIELD' SEGMENTS = 'SEGMENTS' DISSOLVE = 'DISSOLVE' def defineCharacteristics(self): self.name, self.i18n_name = self.trAlgorithm('Variable distance buffer') self.group, self.i18n_group = self.trAlgorithm('Vector geometry tools') self.addParameter(ParameterVector(self.INPUT, self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY])) self.addParameter(ParameterTableField(self.FIELD, self.tr('Distance field'), self.INPUT)) self.addParameter(ParameterNumber(self.SEGMENTS, self.tr('Segments'), 1, default=5)) self.addParameter(ParameterBoolean(self.DISSOLVE, self.tr('Dissolve result'), False)) self.addOutput(OutputVector(self.OUTPUT, self.tr('Buffer'))) def processAlgorithm(self, progress): layer = dataob
jects.getObjectFromUri(self.getParameterValue(self.INPUT)) dissolve = self.getParameterValue(self.DISSOLVE) field = self.getParameterValue(self.FIELD) segments = int(self.getParameterValue(self.SEGMENTS)) writer = self.getOutputFromName(self.OUTPUT).getVectorWriter( layer.pendingFields().toList(), QGis.WKBPolygon, layer.crs()) buff.buffering(progress, writer, 0, field, True, layer, dissolve,
segments)
arubertoson/maya-mamprefs
mamprefs/markingmenus.py
Python
mit
11,879
0.000253
""" """ import sys import re import keyword import logging import traceback from functools import partial from PySide import QtCore from PySide.QtCore import QObject from maya import cmds from mampy.pyside.utils import get_qt_object from mamprefs import config from mamprefs.base import BaseManager, deleteUI, file_to_pyobject logger = logging.getLogger(__name__) logger.setLevel(logging.INFO) ACTIVE_MENU = None def get_parent_panel(): """ Return current panels parent. """ panel = cmds.getPanel(up=True) if cmds.panel(panel, q=True, ex=True): panel_layout = cmds.layout(panel, q=True, p=True) while not cmds.paneLayout(panel_layout, q=True, ex=True): panel_layout = cmds.control(panel_layout, q=True, p=True) if cmds.paneLayout(panel_layout, q=True, ex=True): return panel_layout else: return 'viewPanes' class MarkingMenuManager(BaseManager): """ """ def __init__(self): super(MarkingMenuManager, self).__init__('.markingmenu') def __getitem__(self, key): for menu_list in self.map.itervalues(): for menu in menu_list: if menu.name == key: return menu else: raise KeyError('"{}" is not in menu map.'.format(key)) def add_menu_items(self): """ Create menu items for every existing marking menu. """ cmds.menuItem(divider=True) for file_name, menu_list in self.map.iteritems(): for menu in menu_list: cmds.menuItem( l=menu.name.title(), c=partial(self.output, menu), ) cmds.menuItem(ob=True, c=partial(self.edit, file_name)) cmds.menuItem(divider=True) def initUI(self): """ Creates the user interface, can be used to update it aswell. """ super(MarkingMenuManager, self).initUI() # UI element names main_menu = config['MENU_MAIN_NAME'] marking_menu = config['MENU_MARKING_NAME'] layout_menu = config['MENU_LAYOUT_NAME'] # Delete UI elements if they exists. deleteUI(marking_menu) # Create the UI cmds.menuItem( marking_menu, label='Marking Menus', subMenu=True, allowOptionBoxes=True, insertAfter=layout_menu, parent=main_menu, tearOff=True, ) cmds.menuItem(l='Update', c
=lambda *args: self.reload_marking_menus()) if self.map: self.add_menu_items() else: cmds.menuItem(l='No Marking Menus', enable=False) cmds.menuItem(l='Clean Scene', c=lambda *args: self.clean_menu()) def parse_files(self): for file_name, f in self.files.iteritems(): file_map = file_to_pyobject(f) self.map[file_nam
e] = [ MarkingMenu(**menu) for menu in file_map # for name, item in menu.iteritems() ] def reload_marking_menus(self): """ Rebuild menus and re-parse files. Then rebuild the UI. """ self.reload() self.initUI() def clean_menu(self): """ .. note:: Might be redundant. """ deleteUI(config['MENU_MARKING_POPUP_NAME']) def output(self, menu, *args): """ Outputs to script editor. """ if not any('radialPosition' in item for item in menu.items): for item in menu.items: print item else: for radial in ["N", "NW", "W", "SW", "S", "SE", "E", "NE"]: for item in menu.items: try: if radial == item['radialPosition']: print '{}: {}'.format(radial, item) except KeyError: pass class MarkingMenu(object): """ """ def __init__(self, name, button, marking_menu, modifiers, items, option_boxes=False): self.name = name self.marking_menu = marking_menu self.button = button self.option_boxes = option_boxes self.items = list() self.modifiers = {'{}Modifier'.format(i): True for i in modifiers} self.pane_widget = None self.closing_event = MarkingMenuEventFilter() self.parse_items(items) logger.debug([name, button, marking_menu, modifiers, items]) def __str__(self): return '{}({})'.format(self.__class__.__name__, self.name) __repr__ = __str__ def parse_items(self, items): logger.debug('New menu.') for item in items: logger.debug(item) if 'sub_menu' in item: logging.debug('building sub menu') sub_list = item.pop('sub_menu', []) sub_list.append({'set_parent': True}) logging.debug(sub_list) item['subMenu'] = True self.items.append(MarkingMenuItem(**item)) self.parse_items(sub_list) else: self.items.append(MarkingMenuItem(**item)) def build_menu(self): """ Creates menu items. """ try: cmds.popupMenu( config['MENU_MARKING_POPUP_NAME'], button=self.button, allowOptionBoxes=self.option_boxes, markingMenu=self.marking_menu, parent=get_parent_panel(), **self.modifiers ) logger.debug('building menu items:') for item in self.items: logger.debug(item) if 'set_parent' in item: cmds.setParent('..', m=True) else: cmds.menuItem(**item.unpack()) except: traceback.print_exc(file=sys.stdout) def show(self): """ Shows marking menu on hotkey press. """ try: self.pane_widget.removeEventFilter(self.closing_event) except AttributeError: pass deleteUI(config['MENU_MARKING_POPUP_NAME']) self.build_menu() self.pane_widget = get_qt_object(get_parent_panel()) self.pane_widget.installEventFilter(self.closing_event) def hide(self): try: self.pane_widget.removeEventFilter(self.closing_event) except AttributeError: pass deleteUI(config['MENU_MARKING_POPUP_NAME']) class MarkingMenuEventFilter(QObject): """ Filter to handle events when building and hiding marking menus. """ key_release = False is_child = False destroy = False def eventFilter(self, obj, event): """Make marking menus behave like other maya marking menus.""" # Destroy the menu in a new event cycle. If we don't do this we will # delete the menu before the commands or sub menus are shown and crash # maya. if self.destroy: self.destroy = False hide_menu() etype = event.type() if etype == QtCore.QEvent.ChildRemoved: self.is_child = False if self.key_release: self.destroy = True if etype == QtCore.QEvent.ChildAdded: self.is_child = True else: if etype == QtCore.QEvent.ShortcutOverride: if event.isAutoRepeat(): self.key_release = False return True elif etype == QtCore.QEvent.KeyRelease: if not self.is_child: hide_menu() self.key_release = True return super(MarkingMenuEventFilter, self).eventFilter(obj, event) class MarkingMenuItem(object): """ """ default_menu = {
zstackio/zstack-woodpecker
integrationtest/vm/vm_password/test_chg_unexist_usr_passwd_u14.py
Python
apache-2.0
2,941
0.011561
''' test for changing unexisted user password @author: SyZhao ''' import apibinding.inventory as inventory import zstackwoodpecker.test_util as test_util import zstackwoodpecker.test_lib as test_lib import zstackwoodpecker.test_state as test_state import zstackwoodpecker.operations.vm_operations as vm_ops import zstacklib.utils.ssh as ssh import test_stub exist_users = ["root"] users = ["a_", "aa" ] passwds = ["0aIGFDFBB_N", "a1_" ] vm = None def test(): global vm, exist_users test_util.test_dsc('change unexisted user password test') vm = test_stub.create_vm(vm_name = 'cknewusrvmpswd-u14-64', image_name = "imageName_i_u14") vm.check() backup_storage_list = test_lib.lib_get_backup_storage_list_by_vm(vm.vm) for bs in backup_storage_list: if bs.type == inventory.IMAGE_STORE_BACKUP_STORAGE_TYPE: break if bs.type == inventory.SFTP_BACKUP_STORAGE_TYPE: break if bs.type == inventory.CEPH_BACKUP_STORAGE_TYPE: break else: vm.destroy() test_util.test_skip('Not find image store type backup storage.') for (usr,passwd) in zip(users, passwds): if usr not in exist_users: test_util.test_logger("un-existed user:%s change vm password" %(usr)) #if the user is not existed, it should report #try: # vm_ops.change_vm_password(vm.get_vm().uuid, usr, passwd, skip_stopped_vm = None, session_uuid = None) #except Exception,e: # test_util.test_logger("unexisted user change vm password exception is %s" %(str(e)))
# normal_failed_string = "not exist" # if normal_failed_string
in str(e): # test_util.test_logger("unexisted user return correct, create a the user for it.") #else: # test_util.test_fail("user not exist in this OS, it should not raise exception, but return a failure.") test_stub.create_user_in_vm(vm.get_vm(), usr, passwd) exist_users.append(usr) #When vm is running: vm_ops.change_vm_password(vm.get_vm().uuid, usr, passwd, skip_stopped_vm = None, session_uuid = None) if not test_lib.lib_check_login_in_vm(vm.get_vm(), usr, passwd): test_util.test_fail("create vm with user:%s password: %s failed", usr, passwd) #When vm is stopped: #vm.stop() vm_ops.change_vm_password(vm.get_vm().uuid, "root", test_stub.original_root_password) #vm.start() vm.check() vm.destroy() vm.check() vm.expunge() vm.check() test_util.test_pass('Set password when VM is creating is successful.') #Will be called only if exception happens in test(). def error_cleanup(): global vm if vm: vm.destroy() vm.expunge()
dantebarba/docker-media-server
plex/Subliminal.bundle/Contents/Libraries/Shared/guessit/transfo/split_on_dash.py
Python
gpl-3.0
1,659
0.000603
#!/usr/bin/env python # -*- coding: utf-8 -*- # # GuessIt - A library for guessing information from filenames # Copyright (c) 2013 Nicolas Wa
ck <wackou@gmail.com> # # GuessIt is free software; you can redistribute it and/or modify it under # the terms of the Lesser GNU General Public
License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # GuessIt is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # Lesser GNU General Public License for more details. # # You should have received a copy of the Lesser GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # from __future__ import absolute_import, division, print_function, unicode_literals from guessit.plugins.transformers import Transformer from guessit.patterns import sep import re class SplitOnDash(Transformer): def __init__(self): Transformer.__init__(self, 245) def process(self, mtree, options=None): """split into '-' separated subgroups (with required separator chars around the dash) """ for node in mtree.unidentified_leaves(): indices = [] pattern = re.compile(sep + '-' + sep) match = pattern.search(node.value) while match: span = match.span() indices.extend([span[0], span[1]]) match = pattern.search(node.value, span[1]) if indices: node.partition(indices)
RivuletStudio/rivuletpy
tests/testssm.py
Python
bsd-3-clause
624
0.001603
from filtering.morphology import ssm from rivuletpy.utils.io import * import matplotlib.pyplot as plt import skfmm ITER = 30 img = loadimg('/home/siqi/ncidata/rivuletpy/tests/data/test-crop.tif') bimg = (img > 0).astype('int') dt = skfmm.distance(bimg, dx=1) sdt = ssm(dt, anisotro
pic=True, iterations=ITER) try: from skimage import filters except ImportError: from skimage import filter as filters s_seg = s > filters.threshold_otsu(s) plt.figure() plt.title('DT') plt.imshow(dt.max(-1)) plt.figure() plt
.title('img > 0') plt.imshow((img > 0).max(-1)) plt.figure() plt.title('SSM-DT') plt.imshow(sdt.max(-1))
yavalvas/yav_com
build/matplotlib/lib/matplotlib/tight_bbox.py
Python
mit
2,604
0
""" This module is to support *bbox_inches* option in savefig command. """ from __future__ import (absolute_import, division, print_function, unicode_literals) import six import warnings from matplotlib.transforms import Bbox, TransformedBbox, Affine2D def adjust_bbox(fig, bbox_inches, fixed_dpi=None): """ Temporarily adjust the figure so that only the specified area (bbox_inches) is saved. It modifies fig.bbox, fig.bbox_inches, fig.transFigure._boxout, and fig.patch. While the figure size changes, the scale of the original figure is conserved. A function which restores the original values are returned. """ origBbox = fig.bbox ori
gBboxInches = fig.bbox_inches _boxout = fig.transFigure._boxout asp_list = [] locator_list = [] for ax in fig.axes: pos = ax.get_position(original=False).frozen() locator_list.append(ax.get_axes_locator()) asp_list.append(ax.get_aspect()) def _l(a, r, pos=pos): return pos ax.set_a
xes_locator(_l) ax.set_aspect("auto") def restore_bbox(): for ax, asp, loc in zip(fig.axes, asp_list, locator_list): ax.set_aspect(asp) ax.set_axes_locator(loc) fig.bbox = origBbox fig.bbox_inches = origBboxInches fig.transFigure._boxout = _boxout fig.transFigure.invalidate() fig.patch.set_bounds(0, 0, 1, 1) if fixed_dpi is not None: tr = Affine2D().scale(fixed_dpi) dpi_scale = fixed_dpi / fig.dpi else: tr = Affine2D().scale(fig.dpi) dpi_scale = 1. _bbox = TransformedBbox(bbox_inches, tr) fig.bbox_inches = Bbox.from_bounds(0, 0, bbox_inches.width, bbox_inches.height) x0, y0 = _bbox.x0, _bbox.y0 w1, h1 = fig.bbox.width * dpi_scale, fig.bbox.height * dpi_scale fig.transFigure._boxout = Bbox.from_bounds(-x0, -y0, w1, h1) fig.transFigure.invalidate() fig.bbox = TransformedBbox(fig.bbox_inches, tr) fig.patch.set_bounds(x0 / w1, y0 / h1, fig.bbox.width / w1, fig.bbox.height / h1) return restore_bbox def process_figure_for_rasterizing(fig, bbox_inches_restore, fixed_dpi=None): """ This need to be called when figure dpi changes during the drawing (e.g., rasterizing). It recovers the bbox and re-adjust it with the new dpi. """ bbox_inches, restore_bbox = bbox_inches_restore restore_bbox() r = adjust_bbox(figure, bbox_inches, fixed_dpi) return bbox_inches, r
rohitsinha54/Learning-Python
algorithms/stack.py
Python
mit
716
0
#!/usr/bin/env python """stack.py: Stack implementation""" __author__ = 'Rohit Sinha' class Stack:
def __init__(self): self.items = [] def isEmpty(self): return self.items == [] def push(self, item): self.items.append(item) def pop(self): return self.items.pop() def peek(self): return self.items[le
n(self.items) - 1] def size(self): return len(self.items) def __str__(self): return str(self.items).strip('[]') if __name__ == '__main__': s = Stack() print(s.isEmpty()) s.push(5) s.push('Hello') print(s.peek()) s.push(True) print(s.peek()) print(s.size()) print(s.pop()) print(s)
fruce-ki/utility_scripts
chromosome_surgery.py
Python
mit
3,387
0.006791
#!/usr/bin/env python3 import sys from Bio import SeqIO from argparse import ArgumentParser, RawDescriptionHelpFormatter usage = "Chromosome surgery: Splice something into and/or out of a chromosome." # Main Parsers parser = ArgumentParser(description=usage, formatter_class=RawDescriptionHelpFormatter) parser.add_argument("-f", "--fasta", type=str, required=True, help="Input FASTA.") parser.add_argument("-o", "--output", type=str, required=False, help="Output FASTA.") parser.add_argument("-c", "--cid", type=str, required=True, help="Chromosome ID to edit.") parser.add_argument("-i", "--insert", type=str, required=False, help="FASTA of sequence to insert.") parser.add_argument("-I", "--incision", type=int, required=False, help="1-based nucleotide after which to insert the insert.") parser.add_argument("-e", "--excision_start", type=int, required=False, help="1-based nucleotide that is the first to delete (0).") parser.add_argument("-E", "--excision_end", type=int, required=False, help="1-based nucleotide that is the last to delete (0).") args = parser.parse_args() # Harmless defaults splice_in = '' incision = 0 excision_start = 0 excision_end = 0 no_insert = (not args.incision) or (not args.insert) no_excision = (not args.excision_start) or (not args.excision_end) if no_insert and no_excision: sys.stderr.write("Incomplete edit arguments!\n") exit(0) # Get insert if not no_insert: incision = args.incision with open(args.insert, 'r') as splicein: record = list(SeqIO.parse(splicein, 'fasta'))[0] splice_in = record.seq # No need to shift the incision coordinate. # The 1-based right-closed index after which to cut is the same location as the 0-based right-open substring end before the cut. if not no_excision: excision_start = args.excision_start excision_end = args.excision_end # Pythonize start coordinate from 1-based left-closed to 0-based left-closed. excision_start -= 1 # No need to change the end coordinate. The 1-based right-closed index is the same location as the 0-based right-open substring end. if (not no_insert) and not (no_excision): # Do excision after the incision. # Adjust coord
inates. if args.excision_start > args.incision and args.excision_end > args.incision: excision_start = args.excision_start + len(splice_in) excision_end = args.excision_end + len(splice_in) elif args.excision_start < incision and args.excision_end < incision: pass
# The incision will be applied first, no need to adjust it. The excision is unaffected by the incision anyway. else: sys.stderr.write('Error: Cannot apply the specified coordinates. Excision end must be after excision start, and the incision cannot be inside the excision.') # Parse and apply edit with open(args.fasta, 'r') as genome: if args.output: out = open(args.output, 'w') else: out =sys.stdout for record in SeqIO.parse(genome, 'fasta'): # Only edit the relevant entry if (record.id == args.cid): # Splice-in record.seq = record.seq[:incision] + splice_in + record.seq[incision:] # Splice-out record.seq = record.seq[:excision_start] + record.seq[excision_end:] # Output all the entries SeqIO.write(record, out, 'fasta') print("Done")
SteveDiamond/cvxpy
examples/extensions/sudoku_admm.py
Python
gpl-3.0
2,414
0.018641
""" Copyright 2013 Ste
ven Diamond Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governi
ng permissions and limitations under the License. """ from cvxpy import * from mixed_integer import * import cvxopt import numpy as np n = 9 # 9x9 sudoku grid numbers = Variable(n,n) # TODO: 9*[Boolean(9,9)] doesn't work.... solution = cvxopt.matrix([ [0, 5, 2, 3, 7, 1, 8, 6, 4], [6, 3, 7, 8, 0, 4, 5, 2, 1], [1, 4, 8, 5, 2 ,6, 3, 0, 7], [4, 7, 1, 2, 3, 0, 6, 5, 8], [3, 6, 5, 1, 4, 8, 0, 7, 2], [8, 2, 0, 6, 5, 7, 4, 1, 3], [5, 1, 6, 7, 8, 3, 2, 4, 0], [7, 0, 3, 4, 6, 2, 1, 8, 5], [2, 8, 4, 0, 1, 5, 7, 3, 6] ]) # partial grid known =[(0,6), (0,7), (1,4), (1,5), (1,8), (2,0), (2,2), (2,7), (2,8), (3,0), (3,1), (4,0), (4,2), (4,4), (4,6), (4,8), (5,7), (5,8), (6,0), (6,1), (6,6), (6,8), (7,0), (7,3), (7,4), (8,1), (8,2)] def row(x,r): m, n = x.size for i in range(m): for j in range(n): if i == r: yield x[i,j] def col(x,c): m, n = x.size for i in range(m): for j in range(n): if j == c: yield x[i,j] def block(x,b): m, n = x.size for i in range(m): for j in range(n): # 0 block is r = 0,1, c = 0,1 # 1 block is r = 0,1, c = 2,3 # 2 block is r = 2,3, c = 0,1 # 3 block is r = 2,3, c = 2,3 if i // 3 == b // 3 and j // 3 == b % 3: yield x[i,j] # create the suboku constraints perms = lambda: Assign(n, n)*cvxopt.matrix(range(1,10)) constraints = [] for i in range(n): constraints += [vstack(*list(row(numbers, i))) == perms()] constraints += [vstack(*list(col(numbers, i))) == perms()] constraints += [vstack(*list(block(numbers, i))) == perms()] #constraints.extend(numbers[k] == solution[k] for k in known) # attempt to solve p = Problem(Minimize(sum(abs(numbers-solution))), constraints) p.solve(method="admm2", rho=0.5, iterations=25) print(sum(numbers.value - solution))
Hoshiyo/fabric_navitia
integration_tests/test_tyr/test_setup.py
Python
agpl-3.0
5,824
0.00206
# encoding: utf-8 from ..docker import docker_exec from ..utils import filter_column, python_requirements_compare from ..test_common import skipifdev # @skipifdev def test_update_tyr_config_file(distributed_undeployed): platform, fabric = distributed_undeployed # create empty directory for task under test platform.docker_exec("mkdir -p /srv/tyr") value, exception, stdout, stderr = fabric.execute_forked('update_tyr_config_file') assert exception is None assert stderr == '' # check fabric tasks execution count assert stdout.count("Executing task 'update_tyr_config_file'") == 2 # check existence of files created by the task under test assert platform.path_exists('
/srv/tyr/settings.py') assert platform.path_exists('/srv/tyr/se
ttings.wsgi') env = fabric.get_object('env') env.tyr_broker_username = 'toto' value, exception, stdout, stderr = fabric.execute_forked('update_tyr_config_file') assert exception is None assert stderr.count("Warning: run() received nonzero return code 1 while executing " "'diff /srv/tyr/settings.py /srv/tyr/settings.py.temp'") == 2 assert stdout.count("> CELERY_BROKER_URL = 'amqp://toto:guest@localhost:5672//'") == 2 assert stdout.count("< CELERY_BROKER_URL = 'amqp://guest:guest@localhost:5672//'") == 2 @skipifdev def test_setup_tyr(distributed_undeployed): platform, fabric = distributed_undeployed # create some objects used (symlinked) by the task under test platform.docker_exec('mkdir -p /usr/share/tyr/migrations/') platform.docker_exec('touch /usr/bin/manage_tyr.py') value, exception, stdout, stderr = fabric.execute_forked('setup_tyr') assert exception is None # check fabric tasks execution count assert stdout.count("Executing task 'setup_tyr'") == 2 assert stdout.count("Executing task 'update_cities_conf'") == 2 assert stdout.count("Executing task 'update_tyr_config_file'") == 0 assert stdout.count("Executing task 'update_tyr_instance_conf'") == 0 # check that user www-data exists assert filter_column(platform.get_data('/etc/passwd', 'host1'), 0, startswith='www-data') assert filter_column(platform.get_data('/etc/passwd', 'host2'), 0, startswith='www-data') # check existence of directories and files created by the task under test assert platform.path_exists('/etc/tyr.d') assert platform.path_exists('/srv/tyr') assert platform.path_exists('/var/log/tyr') assert platform.path_exists('/srv/ed/data/') assert platform.path_exists('/var/log/tyr/tyr.log') for instance in fabric.env.instances: assert platform.path_exists('/srv/ed/{}/'.format(instance)) assert platform.path_exists('/srv/ed/{}/alembic.ini'.format(instance)) assert platform.path_exists('/srv/ed/{}/settings.sh'.format(instance)) assert platform.path_exists('/etc/tyr.d/{}.ini'.format(instance)) assert platform.path_exists('/etc/init.d/tyr_worker') assert platform.path_exists('/srv/tyr/migrations') assert platform.path_exists('/srv/tyr/manage.py') assert platform.path_exists('/srv/tyr/cities_alembic.ini', 'host1') @skipifdev def test_update_tyr_confs(distributed_undeployed): platform, fabric = distributed_undeployed # create empty directories for task under test platform.docker_exec("mkdir -p /etc/tyr.d /srv/tyr") value, exception, stdout, stderr = fabric.execute_forked('update_tyr_confs') assert exception is None assert stderr == '' # check fabric tasks execution count assert stdout.count("Executing task 'update_tyr_config_file'") == 2 assert stdout.count("Executing task 'update_tyr_instance_conf'") == 2 * len(fabric.env.instances) assert stdout.count("Executing task 'update_cities_conf'") == 1 @skipifdev def test_upgrade_tyr_packages(distributed_undeployed): platform, fabric = distributed_undeployed fabric.execute('upgrade_tyr_packages') assert platform.get_version('python', 'host1').startswith('2.7') assert platform.get_version('python', 'host2').startswith('2.7') assert docker_exec(platform.containers['host1'], 'pip -V', return_code_only=True) == 0 assert docker_exec(platform.containers['host2'], 'pip -V', return_code_only=True) == 0 assert platform.get_version('navitia-tyr', 'host1') assert platform.get_version('navitia-tyr', 'host2') assert platform.get_version('navitia-common', 'host1') assert platform.get_version('navitia-common', 'host2') known_missing = ['argparse==1.2.1', 'wsgiref==0.1.2'] for host in ('host1', 'host2'): assert python_requirements_compare( platform.docker_exec('pip freeze', host), platform.get_data('/usr/share/tyr/requirements.txt', host) ) == known_missing # TODO this seems redundant with setup_tyr assert platform.path_exists('/etc/init.d/tyr_worker') @skipifdev def test_setup_tyr_master(distributed_undeployed): platform, fabric = distributed_undeployed fabric.execute('setup_tyr_master') assert platform.path_exists('/srv/ed/', 'host1') assert platform.path_exists('/srv/ed/', 'host2', negate=True) assert platform.path_exists('/etc/init.d/tyr_beat', 'host1') assert platform.path_exists('/etc/init.d/tyr_beat', 'host2', negate=True) @skipifdev def test_upgrade_ed_packages(distributed_undeployed): platform, fabric = distributed_undeployed fabric.execute('upgrade_ed_packages') assert platform.get_version('navitia-ed', 'host1') assert platform.get_version('navitia-ed', 'host2') assert platform.get_version('navitia-common', 'host1') assert platform.get_version('navitia-common', 'host2') assert platform.get_version('navitia-cities', 'host1') assert platform.get_version('navitia-cities', 'host2')
jgmanzanas/CMNT_004_15
project-addons/purchase_picking/purchase.py
Python
agpl-3.0
5,513
0.000908
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2014 Pexego Sistemas Informáticos All Rights Reserved # $Jesús Ventosinos Mayor <jesus@pexego.es>$ # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as published # by the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp import models, fields, api, _ from openerp.exceptions import except_orm class purchase_order(models.Model): _inherit = 'purchase.order' picking_created = fields.Boolean('Picking created', compute='is_picking_created') @api.multi def test_moves_done(self): '''PO is done at the delivery side if all the incoming shipments
are done''' for purchase in self: for line in purchase.order_line: for move in line.move_ids: if move.state != 'done': return False return True def is_picking_created(self): self.picking_created = self.picking_ids and True or False def _prepare_order_line_move(self, cr, uid, order, order_line
, picking_id, group_id, context=None): """ prepare the stock move data from the PO line. This function returns a list of dictionary ready to be used in stock.move's create() """ purchase_line_obj = self.pool['purchase.order.line'] res = super(purchase_order, self)._prepare_order_line_move( cr, uid, order, order_line, picking_id, group_id, context) for move_dict in res: move_dict.pop('picking_id', None) move_dict.pop('product_uos_qty', None) move_dict.pop('product_uos', None) move_dict['partner_id'] = order.partner_id.id if order.partner_ref: move_dict['origin'] += ":" + order.partner_ref return res def action_picking_create(self, cr, uid, ids, context=None): """ Se sobreescribe la función para que no se cree el picking. """ for order in self.browse(cr, uid, ids): self._create_stock_moves(cr, uid, order, order.order_line, False, context=context) def _create_stock_moves(self, cr, uid, order, order_lines, picking_id=False, context=None): """ MOD: Se sobreescribe la función para no confirmar los movimientos. """ stock_move = self.pool.get('stock.move') todo_moves = [] new_group = self.pool.get("procurement.group").create( cr, uid, {'name': order.name, 'partner_id': order.partner_id.id}, context=context) for order_line in order_lines: if not order_line.product_id: continue if order_line.product_id.type in ('product', 'consu'): for vals in self._prepare_order_line_move( cr, uid, order, order_line, picking_id, new_group, context=context): move = stock_move.create(cr, uid, vals, context=context) todo_moves.append(move) def move_lines_create_picking(self, cr, uid, ids, context=None): mod_obj = self.pool.get('ir.model.data') act_obj = self.pool.get('ir.actions.act_window') moves = self.pool('stock.move') result = mod_obj.get_object_reference(cr, uid, 'stock', 'action_receive_move') id = result and result[1] or False result = act_obj.read(cr, uid, [id], context=context)[0] self_purchase = self.browse(cr, uid, ids) move_lines = moves.search(cr, uid, [('origin', 'like', self_purchase.name + '%'), ('picking_id', '=', False)], context=context) if len(move_lines) < 1: raise except_orm(_('Warning'), _('There is any move line without associated picking')) result['context'] = [] if len(move_lines) > 1: result['domain'] = "[('id','in',[" + ','.join(map(str, move_lines)) + "])]" else: result['domain'] = "[('id','='," + str(move_lines[0]) + ")]" return result class purchase_order_line(models.Model): _inherit = 'purchase.order.line' @api.multi def write(self, vals): res = super(purchase_order_line, self).write(vals) for line in self: if line.move_ids and vals.get('date_planned', False): for move in line.move_ids: if move.state not in ['cancel',u'done'] and \ not move.container_id: move.date_expected = vals['date_planned'] return res
slozier/ironpython2
Src/StdLib/Lib/clrtype.py
Python
apache-2.0
27,760
0.006484
# Licensed to the .NET Foundation under one or more agreements. # The .NET Foundation licenses this file to you under the Apache 2.0 License. # See the LICENSE file in the project root for more information. __all__ = ["ClrClass", "ClrInterface", "accepts", "returns", "attribute", "propagate_attributes"] import clr clr.AddReference("Microsoft.Dynamic") clr.AddReference("Microsoft.Scripting") clr.AddReference("IronPython") if clr.IsNetCoreApp: clr.AddReference("System.Reflection.Emit") import System from System import Char, Void, Boolean, Array, Type, AppDomain from System.Reflection import FieldAttributes, MethodAttributes, PropertyAttributes, ParameterAttributes from System.Reflection import CallingConventions, TypeAttributes, AssemblyName from System.Reflection.Emit import OpCodes, CustomAttributeBuilder, AssemblyBuilder, AssemblyBuilderAccess from System.Runtime.InteropServices import DllImportAttribute, CallingConvention, CharSet from Microsoft.Scripting.Generation import Snippets from Microsoft.Scripting.Runtime import DynamicOperations from Microsoft.Scripting.Utils import ReflectionUtils from IronPython.Runtime import NameType, PythonContext from IronPython.Runtime.Types import PythonType, ReflectedField, ReflectedProperty def validate_clr_types(signature_types, var_signature = False): if not isinstance(signature_types, tuple): signature_types = (signature_types,) for t in signature_types: if type(t) is type(System.IComparable): # type overloaded on generic arity, eg IComparable and IComparable[T] t = t[()] # select non-generic version clr_type = clr.GetClrType(t) if t == Void: raise TypeError("Void cannot be used in signature") is_typed = clr.GetPythonType(clr_type) == t # is_typed needs to be weakened unt
il the generated
type # gets explicitly published as the underlying CLR type is_typed = is_typed or (hasattr(t, "__metaclass__") and t.__metaclass__ in [ClrInterface, ClrClass]) if not is_typed: raise Exception, "Invalid CLR type %s" % str(t) if not var_signature: if clr_type.IsByRef: raise TypeError("Byref can only be used as arguments and locals") # ArgIterator is not present in Silverlight if hasattr(System, "ArgIterator") and t == System.ArgIterator: raise TypeError("Stack-referencing types can only be used as arguments and locals") class TypedFunction(object): """ A strongly-typed function can get wrapped up as a staticmethod, a property, etc. This class represents the raw function, but with the type information it is decorated with. Other information is stored as attributes on the function. See propagate_attributes """ def __init__(self, function, is_static = False, prop_name_if_prop_get = None, prop_name_if_prop_set = None): self.function = function self.is_static = is_static self.prop_name_if_prop_get = prop_name_if_prop_get self.prop_name_if_prop_set = prop_name_if_prop_set class ClrType(type): """ Base metaclass for creating strongly-typed CLR types """ def is_typed_method(self, function): if hasattr(function, "arg_types") != hasattr(function, "return_type"): raise TypeError("One of @accepts and @returns is missing for %s" % function.func_name) return hasattr(function, "arg_types") def get_typed_properties(self): for item_name, item in self.__dict__.items(): if isinstance(item, property): if item.fget: if not self.is_typed_method(item.fget): continue prop_type = item.fget.return_type else: if not self.is_typed_method(item.fset): continue prop_type = item.fset.arg_types[0] validate_clr_types(prop_type) clr_prop_type = clr.GetClrType(prop_type) yield item, item_name, clr_prop_type def emit_properties(self, typebld): for prop, prop_name, clr_prop_type in self.get_typed_properties(): self.emit_property(typebld, prop, prop_name, clr_prop_type) def emit_property(self, typebld, prop, name, clrtype): prpbld = typebld.DefineProperty(name, PropertyAttributes.None, clrtype, None) if prop.fget: getter = self.emitted_methods[(prop.fget.func_name, prop.fget.arg_types)] prpbld.SetGetMethod(getter) if prop.fset: setter = self.emitted_methods[(prop.fset.func_name, prop.fset.arg_types)] prpbld.SetSetMethod(setter) def dummy_function(self): raise RuntimeError("this should not get called") def get_typed_methods(self): """ Get all the methods with @accepts (and @returns) decorators Functions are assumed to be instance methods, unless decorated with @staticmethod """ # We avoid using the "types" library as it is not a builtin FunctionType = type(ClrType.__dict__["dummy_function"]) for item_name, item in self.__dict__.items(): function = None is_static = False if isinstance(item, FunctionType): function, is_static = item, False elif isinstance(item, staticmethod): function, is_static = getattr(self, item_name), True elif isinstance(item, property): if item.fget and self.is_typed_method(item.fget): if item.fget.func_name == item_name: # The property hides the getter. So yield the getter yield TypedFunction(item.fget, False, item_name, None) if item.fset and self.is_typed_method(item.fset): if item.fset.func_name == item_name: # The property hides the setter. So yield the setter yield TypedFunction(item.fset, False, None, item_name) continue else: continue if self.is_typed_method(function): yield TypedFunction(function, is_static) def emit_methods(self, typebld): # We need to track the generated methods so that we can emit properties # referring these methods. # Also, the hash is indexed by name *and signature*. Even though Python does # not have method overloading, property getter and setter functions can have # the same func_name attribute self.emitted_methods = {} for function_info in self.get_typed_methods(): method_builder = self.emit_method(typebld, function_info) function = function_info.function if self.emitted_methods.has_key((function.func_name, function.arg_types)): raise TypeError("methods with clashing names") self.emitted_methods[(function.func_name, function.arg_types)] = method_builder def emit_classattribs(self, typebld): if hasattr(self, '_clrclassattribs'): for attrib_info in self._clrclassattribs: if isinstance(attrib_info, type): ci = clr.GetClrType(attrib_info).GetConstructor(()) cab = CustomAttributeBuilder(ci, ()) elif isinstance(attrib_info, CustomAttributeDecorator): cab = attrib_info.GetBuilder() else: make_decorator = attrib_info() cab = make_decorator.GetBuilder() typebld.SetCustomAttribute(cab) def get_clr_type_name(self): if hasattr(self, "_clrnamespace"): return self._clrnamespace + "." + self.__name__ else: return self.__name__ def create_type(self, typebld): self.emit_members(typebld) new_type = typebld.CreateType() self.map_members(new_type) return new_type class ClrInterface(ClrType): """ Set __metaclass__ in a Python class declaration to declare a CLR interface type. You need t
tjwei/HackNTU_Data_2017
Week09/q_posts_friends.py
Python
mit
367
0.00545
from collections import Counter from pprint import pprint count = Counter() posts = gra
ph.get_object('me', fields=['posts.limit(100)'])['posts']['data'] for i, p in enumerate(posts): likes = get_all_data(graph, p['id']+"/likes") print(i, p['id'], len(
likes)) for x in likes: name = x['name'] count[name] += 1 pprint(count.most_common(15))
obulpathi/cdn1
cdn/transport/pecan/controllers/__init__.py
Python
apache-2.0
879
0
# Copyright (c) 2014 Rackspace, Inc. # # Licensed und
er the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permi
ssions and # limitations under the License. """Pecan Controllers""" from cdn.transport.pecan.controllers import root from cdn.transport.pecan.controllers import services from cdn.transport.pecan.controllers import v1 # Hoist into package namespace Root = root.RootController Services = services.ServicesController V1 = v1.ControllerV1
citrix-openstack-build/tempest
tempest/services/compute/xml/interfaces_client.py
Python
apache-2.0
4,377
0
# Copyright 2013 IBM Corp. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import time from lxml import etree from tempest.common.rest_client import RestClientXML from tempest import exceptions from tempest.services.compute.xml.common import Document from tempest.services.compute.xml.common import Element from tempest.services.compute.xml.common import Text from tempest.services.compute.xml.common import xml_to_json class InterfacesClientXML(RestClientXML): def __init__(self, config, username, password, auth_url, tenant_name=None): super(InterfacesClientXML, self).__init__(config, username, password,
auth
_url, tenant_name) self.service = self.config.compute.catalog_type def _process_xml_interface(self, node): iface = xml_to_json(node) # NOTE(danms): if multiple addresses per interface is ever required, # xml_to_json will need to be fixed or replaced in this case iface['fixed_ips'] = [dict(iface['fixed_ips']['fixed_ip'].items())] return iface def list_interfaces(self, server): resp, body = self.get('servers/%s/os-interface' % server, self.headers) node = etree.fromstring(body) interfaces = [self._process_xml_interface(x) for x in node.getchildren()] return resp, interfaces def create_interface(self, server, port_id=None, network_id=None, fixed_ip=None): doc = Document() iface = Element('interfaceAttachment') if port_id: _port_id = Element('port_id') _port_id.append(Text(port_id)) iface.append(_port_id) if network_id: _network_id = Element('net_id') _network_id.append(Text(network_id)) iface.append(_network_id) if fixed_ip: _fixed_ips = Element('fixed_ips') _fixed_ip = Element('fixed_ip') _ip_address = Element('ip_address') _ip_address.append(Text(fixed_ip)) _fixed_ip.append(_ip_address) _fixed_ips.append(_fixed_ip) iface.append(_fixed_ips) doc.append(iface) resp, body = self.post('servers/%s/os-interface' % server, headers=self.headers, body=str(doc)) body = self._process_xml_interface(etree.fromstring(body)) return resp, body def show_interface(self, server, port_id): resp, body = self.get('servers/%s/os-interface/%s' % (server, port_id), self.headers) body = self._process_xml_interface(etree.fromstring(body)) return resp, body def delete_interface(self, server, port_id): resp, body = self.delete('servers/%s/os-interface/%s' % (server, port_id)) return resp, body def wait_for_interface_status(self, server, port_id, status): """Waits for a interface to reach a given status.""" resp, body = self.show_interface(server, port_id) interface_status = body['port_state'] start = int(time.time()) while(interface_status != status): time.sleep(self.build_interval) resp, body = self.show_interface(server, port_id) interface_status = body['port_state'] timed_out = int(time.time()) - start >= self.build_timeout if interface_status != status and timed_out: message = ('Interface %s failed to reach %s status within ' 'the required time (%s s).' % (port_id, status, self.build_timeout)) raise exceptions.TimeoutException(message) return resp, body
googleads/google-ads-python
google/ads/googleads/v9/services/services/product_bidding_category_constant_service/transports/__init__.py
Python
apache-2.0
1,165
0.000858
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the
License. # from collections import OrderedDict from typing import Dict, Type from .base import ProductBiddingCategoryConstantS
erviceTransport from .grpc import ProductBiddingCategoryConstantServiceGrpcTransport # Compile a registry of transports. _transport_registry = ( OrderedDict() ) # type: Dict[str, Type[ProductBiddingCategoryConstantServiceTransport]] _transport_registry["grpc"] = ProductBiddingCategoryConstantServiceGrpcTransport __all__ = ( "ProductBiddingCategoryConstantServiceTransport", "ProductBiddingCategoryConstantServiceGrpcTransport", )
ara-ta3/jupyter-gauche
jupyter_gauche/install.py
Python
mit
1,024
0.006836
import json import os import sys from jupyter_client.kernelspec import install_kernel_spec from IPython.utils.tempdir import TemporaryDirectory kernel_json = { "display_name": "Gauche", "language": "gauche", "argv": [sys.executable, "-m", "jupyter_gauche", "-f", "{connection_file}"], "codemirror_mode": "scheme" } def install_gauche_kernel_spec(user=True): with TemporaryDirectory() as td: os.chmod(td, 0o755) #
Starts off as 700, not user readable with open(os.path.join(td, 'kernel.json'), 'w') as f: json.dump(kernel_json, f, sort_keys=True) print('Installing IPython kernel spec') install_kernel_spec(td, 'gauche', user=user, replace=True) def _is_root(): try: return os.geteuid() == 0 except AttributeError: return False # assume not an admin on non-Unix platforms def
main(argv=[]): user = '--user' in argv or not _is_root() install_gauche_kernel_spec(user=user) if __name__ == '__main__': main(argv=sys.argv)
darren-rogan/CouchPotatoServer
couchpotato/core/providers/base.py
Python
gpl-3.0
2,933
0.004773
from couchpotato.core.event import addEvent from couchpotato.core.helpers.variable import tryFloat from couchpotato.core.logger import CPLog from couchpotato.core.plugins.base import Plugin from couchpotato.environment import Env from urlparse import urlparse import re import time log = CPLog(__name__) class Provider(Plugin): type = None # movie, nzb, torrent, subtitle, trailer http_time_between_calls = 10 # Default timeout for url requests last_available_check = {} is_available = {} def isAvailable(self, test_url): if Env.get('dev'): return True now = time.time() host = urlparse(test_url).hostname if self.last_available_check.get(host) < now - 900: self.last_available_check[host] = now try: self.urlopen(test_url, 30) self.is_available[host] = True except: log.error('"%s" unavailable, trying again in an 15 minutes.', host) self.is_available[host] = False return self.is_available.get(host, False) class YarrProvider(Provider): cat_ids = [] sizeGb = ['gb', 'gib'] sizeMb = ['mb', 'mib'] sizeKb = ['kb', 'kib'] def __init__(self): addEvent('provider.belongs_to', self.belongsTo) addEvent('%s.search' % self.type, self.search) addEvent('yarr.search', self.search) addEvent('nzb.feed', self.feed) def download(self, url = '', nzb_id = ''): return self.urlopen(url) def feed(self): return [] def search(self, movie, quality): return [] def belongsTo(self, url, provider = None, host = None): try: if provider and provider == self.getName(): return self hostname = urlparse(url).hostname if host and hostname in host: return self else: for url_type in self.urls: download_url = self.urls[url_type] if hostname in download_url: return self except: log.debug('Url % s doesn\'t belong to %s', (url, self.getName())) return def parseSize(self, size): sizeRaw = size.lower() size = tryFloat(re.sub(r'[^0-9.]', '', size).strip()) for s in self.sizeGb: if s in sizeRaw: return s
ize * 1024 for s in self.sizeMb: if s in sizeRaw: return size for s in self.sizeKb: if s in sizeRaw: return size / 1024 return 0 def getCatId(self, identifier): for cats in self.ca
t_ids: ids, qualities = cats if identifier in qualities: return ids return [self.cat_backup_id] def found(self, new): log.info('Found: score(%(score)s) on %(provider)s: %(name)s', new)
Levis0045/MetaLex
metalex/xmlised/makeBalise.py
Python
agpl-3.0
26,115
0.01425
#! usr/bin/env python # coding: utf8 from __future__ import print_function from __future__ import absolute_import from __future__ import unicode_literals """metalex is general tool for lexicographic and metalexicographic activities Copyright (C) 2017 by Elvis MBONING This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for more details. You should have received a copy of the GNU Affero General Public License along with this program. If not, see <https://www.gnu.org/licenses/>. Contact: levismboning@yahoo.fr --------------------------------------------------------------------------- makeBalise transform extracted articles into well formed xml file. It can also generate HTML file for article edition Packages: >>> sudo apt-get install python-html5lib >>> sudo apt-get install python-lxml >>> sudo apt-get install python-bs4 Usage: >>> from metalex.dicXmilised import * >>> dicoHtml(save=True) """ # ----Internal Modules------------------------------------------------------ import metalex from .composeArticle import * from .dicXmlTool import * # ----External Modules------------------------------------------------------ import re import sys import codecs import os from bs4 import BeautifulSoup from random import sample from shutil import copyfile from lxml import etree from termcolor import colored # -----Exported Functions----------------------------------------------------- __all__ = ['BaliseXML', 'dico_html'] # -----Global Variables----------------------------------------------------- components = { 'xml' : { 'metalexMetadata' : ['metalexMetadata', 'projectName', 'author', 'dateCreation', 'comment', 'contributors', 'candidate'], 'metalexContent' : ['article', 'definition', 'example', 'figured', 'contrary', 'entry', 'flexion', 'category', 'gender', 'rection', 'phonetic', 'identificationComponent', 'treatmentComponent', 'cte_cat', 'processingUnit', 'cgr_pt', 'cgr_vrg', 'cgr_fpar', 'cgr_opar', 'cgr_ocrch', 'cgr_fcrch', 'metalexContent', 'cte_gender', 'metalexResultDictionary'] }, 'tei' : { 'teiHeader' : ['teiHeader', 'text', 'TEI', 'fileDesc', 'titleStmt', 'title', 'publicationStmt', 'p', 'sourceDesc', 'author'], 'text' : ['body', 'head', 'entry', 'form', 'orth', 'gramGrp', 'sense', 'def', 'cite', 'quote', 'span', 'usg', 'bibl', 'pos', 'genre', 'number', 'pron', 'etym'] }, 'lmf' : { 'GlobalInformation' : ['LexicalResource', 'feat', 'p', 'GlobalInformation'], 'Lexicon' : ['Lexicon', 'feat', 'LexicalEntry', 'WordForm', 'Definition', 'Sense', 'Lexicon'] }, 'dtd' : ['ELEMENT', 'ATTRIBUTE', 'PCDATA', 'CDATA', 'REQUIRED', 'IMPLIED'], 'xsd' : [] } codifArticles = [] # ---------------------------------------------------------- def dico_html(save=False): """Bu
ild HTML editor file of the all articles :return file: metalexViewerEditor.html """ print('\n --- %s %s \n\n' %(colored('Part 4: Generate Output formats', attrs=['bold']), '--'*25)) metalex.plugins instanceHtml = BaliseHTML() filepath = metalex.html_template metalex.utils.create_temp() if metalex.utils.in_dir('CopymetalexTemplate.html'): copyfile(filepath, 'CopymetalexTemplate.html')
souphtl = instanceHtml.html_inject('CopymetalexTemplate.html') if save: metalex.utils.go_to_dicresult() name = metalex.currentOcr+'_metalexViewerEditor.html' with codecs.open(name, 'w') as htmlresult: htmlresult.write(souphtl) metalex.utils.create_temp() os.remove('CopymetalexTemplate.html') message = "*"+name+"* has correctly been generated > Saved in dicTemp folder" metalex.logs.manageLog.write_log(message) else: souphtl = instanceHtml.html_inject('CopymetalexTemplate.html') if save: metalex.utils.go_to_dicresult() with codecs.open(name, 'w') as htmlresult: htmlresult.write(souphtl) metalex.utils.create_temp() os.remove('CopymetalexTemplate.html') message = "*"+name+"* has correctly been generated > Saved in dicTemp folder" metalex.logs.manageLog.write_log(message) print('\n\n --- %s --------------- \n\n' %colored('MetaLex Processes was ended: consult results data in "dicTemp" folder', 'green', attrs=['bold'])) class BaliseHTML(): def __init__(self): self.resultHtml = '' def html_inject(self, template): """Create prettify HTML file all previous data generated :return str: html (prettify by BeautifulSoup) """ instanceXml = BaliseXML() contentxml = instanceXml.put_xml(typ='xml', save=True) metalex.utils.create_temp() soupXml = BeautifulSoup(contentxml, "html.parser") projectconf = metalex.utils.read_conf() Hauthor, Hname = projectconf['Author'], projectconf['Projectname'], Hdate,Hcomment = projectconf['Creationdate'], projectconf['Comment'] Hcontrib = projectconf['Contributors'] filetemplate = codecs.open(template, 'r', 'utf-8') souphtml = BeautifulSoup(filetemplate, "html5lib") content = souphtml.find('div', attrs={'id': 'all-articles'}) author = content.find('h3', attrs={'id': 'author'}) author.string = 'main: '+Hauthor date = content.find('h5', attrs={'id': 'date'}) date.string = Hdate descipt = content.find('p', attrs={'id': 'description'}) descipt.string = Hcomment contrib = content.find('h4', attrs={'id': 'contributors'}) contrib.string = 'contributors: '+Hcontrib project = content.find('h4', attrs={'id': 'projetname'}) project.string = Hname articlesxml = soupXml.findAll('article') articleshtml = souphtml.find('div', attrs={'id': 'mtl:articles'}) for x in articlesxml: elementart = BeautifulSoup('<article id=""></article>', 'html5lib') idart = x.get('id') artlem = x.get_text() elementart.article.append(artlem) elementart.article['id'] = idart articleshtml.append(elementart.find('article')) listlemme = souphtml.find('ul', attrs={'id': 'list-articles'}) for x in articlesxml: art = x.get_text() idart = x.get('id') lem = x.find('entry').get_text() lemme = BeautifulSoup('<li class="w3-hover-light-grey"><span class="lemme" onclick="changeImage('+ "'"+idart+"'"+')">'+lem+'</span><span class="fa fa-plus w3-closebtn" onclick="add('+ "'"+idart+"'"+')"/></li>', 'html5lib') listlemme.append(lemme.find('li')) filetemplate.close() self.resultHtml = souphtml.prettify('utf-8') return self.resultHtml class BaliseXML (): """Build XML file type (xml|tei|lmf) with global metadata of the project :param typ: str :return obj: instance of Bali
Alberto-Beralix/Beralix
i386-squashfs-root/usr/lib/python2.7/dist-packages/twisted/internet/iocpreactor/tcp.py
Python
gpl-3.0
68
0.014706
../../../
../../../share/pyshared/twisted/
internet/iocpreactor/tcp.py
rcosnita/fantastico
fantastico/middleware/tests/test_routing_middleware.py
Python
mit
4,545
0.011441
''' Copyright 2013 Cosnita Radu Viorel Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. .. codeauthor:: Radu Viorel Cosnita <radu.cosnita@gmail.com> .. py:module:: fantastico.middleware.tests.test_routing_middleware ''' from fantastico.exceptions import FantasticoNoRequestError, FantasticoError from fantastico.middleware.routing_middleware import RoutingMiddleware from fantastico.tests.base_case import FantasticoUnitTestsCase from mock import Mock from webob.request import Request class RoutingMiddlewareTests(FantasticoUnitTestsCase): '''This class provides all test cases required to make sure routing middleware is working as expected.''' def init(self): self._environ = {} self._app = Mock() self._router = Mock() self._router_cls = Mock(return_value=self._router) self._routing_middleware = RoutingMiddleware(self._app, self._router_cls) def test_route_handled_correctly(self): '''This test case ensures a route is handled correctly using the middleware.''' self._environ["fantastico.request"] = Request.blank("/simple/request") def handle_route(url, environ): if url == "/simple/request": environ["route_/simple/request_handler"] = {"controller": SimpleController(), "method": "do_request"} self._router.handle_route = handle_route self._routing_middleware(self._environ, Mock()) route_handler = self._environ.get("route_/simple/request_handler") self.assertIsNotNone(route_handler) self.assertIsInstance(route_handler.get("controller"), SimpleController) self.assertEqual("do_request", route_handler.get("method")) def test_route_norequest_built(self): '''This test case ensures an exception is raised if no request is available in wsgi environ.''' self.assertRaises(FantasticoNoRequestError, self._routing_middleware, *[self._environ, Mock()]) def test_route_unhandled_exception(self): '''This test case ensures that unhandled exceptions are correctly transformed to fantastico exceptions.''' self._environ["fantastico.request"] = Request.blank("/simple/request") self._router.handle_route = Mock(side_effect=Exception("Unhandled error")) with self.assertRaises(FantasticoError) as cm: self._routing_middleware(self._environ, Mock()) self.assertTrue(str(cm.exception).find("Unhandled error") > -1) def test_router_registration_ok(self): '''This test case ensures that routing middleware correctly calls underlining methods from the given router so that it correctly discovers all available routes.''' self.get_loaders_invoked = False self.register_routes_invoked = False def get_loaders(): self.get_loaders_invoked = True def register_routes(): self.register_routes_inv
oked = True router = Mock() router_cls = Mock(return_value=router) router.get_loaders = lambda: get_loaders() router.register_routes = lambda: register_routes() RoutingMiddleware(Mock(), router_cls)
self.assertTrue(self.get_loaders_invoked) self.assertTrue(self.register_routes_invoked) class SimpleController(object): '''Class used to simulate a controller that can handle certain requests.''' pass
jeffery-do/Vizdoombot
doom/lib/python3.5/site-packages/dask/dataframe/tests/test_utils_dataframe.py
Python
mit
7,032
0
import numpy as np import pandas as pd import pandas.util.testing as tm import dask.dataframe as dd from dask.dataframe.utils import (shard_df_on_index, meta_nonempty, make_meta, raise_on_meta_error) import pytest def test_shard_df_on_index(): df = pd.DataFrame({'x': [1, 2, 3, 4, 5, 6], 'y': list('abdabd')}, index=[10, 20, 30, 40, 50, 60]) result = list(shard_df_on_index(df, [20, 50])) assert list(result[0].index) == [10] assert list(result[1].index) == [20, 30, 40] assert list(result[2].index) == [50, 60] def test_make_meta(): df = pd.DataFrame({'a': [1, 2, 3], 'b': list('abc'), 'c': [1., 2., 3.]}, index=[10, 20, 30]) # Pandas dataframe meta = make_meta(df) assert len(meta) == 0 assert (meta.dtypes == df.dtypes).all() assert isinstance(meta.index, type(df.index)) # Pandas series meta = make_meta(df.a) assert len(meta) == 0 assert meta.dtype == df.a.dtype assert isinstance(meta.index, type(df.index)) # Pandas index meta = make_meta(df.index) assert isinstance(meta, type(df.index)) assert len(meta) == 0 # Dask object ddf = dd.from_pandas(df, npartitions=2) assert make_meta(ddf) is ddf._meta # Dict meta = make_meta({'a': 'i8', 'b': 'O', 'c': 'f8'}) assert isinstance(meta, pd.DataFrame) assert len(meta) == 0 assert (meta.dtypes == df.dtypes).all() assert isinstance(meta.index, pd.RangeIndex) # Iterable meta = make_meta([('a', 'i8'), ('c', 'f8'), ('b', 'O')]) assert (meta.columns == ['a', 'c', 'b']).all() assert len(meta) == 0 assert (meta.dtypes == df.dtypes[meta.dtypes.index]).all() assert isinstance(meta.index, pd.RangeIndex) # Tuple meta = make_meta(('a', 'i8')) assert isinstance(meta, pd.Series) assert len(meta) == 0 assert meta.dtype == 'i8' assert meta.name == 'a' # With index meta = make_meta({'a': 'i8', 'b': 'i4'}, pd.Int64Index([1, 2], name='foo')) assert isinstance(meta.index, pd.Int64Index) assert len(meta.index) == 0 meta = make_meta(('a', 'i8'), pd.Int64Index([1, 2], name='foo')) assert isinstance(meta.index, pd.Int64Index) assert len(meta.index) == 0 # Numpy scalar meta = make_meta(np.float64(1.0)) assert isinstance(meta, np.float64) # Python scalar meta = make_meta(1.0) assert isinstance(meta, np.float64) # Timestamp x = pd.Timestamp(2000, 1, 1) meta = make_meta(x) assert meta is x # Dtype expressions meta = make_meta('i8') assert isinstance(meta, np.int64) meta = make_meta(float) assert isinstance(meta, np.dtype(float).type) meta = make_meta(np.dtype('bool')) assert isinstance(meta, np.bool_) assert pytest.raises(TypeError, lambda: make_meta(None)) def test_meta_nonempty(): df1 = pd.DataFrame({'A': pd.Categorical(['Alice', 'Bob', 'Carol']), 'B': list('abc'), 'C': 'bar', 'D': np.float32(1), 'E': np.int32(1), 'F': pd.Timestamp('2016-01-01'), 'G': pd.date_range('2016-01-01', periods=3, tz='America/New_York'), 'H': pd.Timedelta('1 hours', 'ms'), 'I': np.void(b' ')}, columns=list('DCBAHGFEI')) df2 = df1.iloc[0:0] df3 = meta_nonempty(df2) assert (df3.dtypes == df2.dtypes).all() assert df3['A'][0] == 'Alice' assert df3['B'][0] == 'foo' assert df3['C'][0] == 'foo' assert df3['D'][0] == np.float32(1) assert df3['D'][0].dtype == 'f4' assert df3['E'][0] == np.int32(1) assert df3['E'][0].dtype == 'i4' assert df3['F'][0] == pd.Timestamp('1970-01-01 00:00:00') assert df3['G'][0] == pd.Timestamp('1970-01-01 00:00:00', tz='America/New_York') assert df3['H'][0] == pd.Timedelta('1', 'ms') assert df3['I'][0] == 'foo' s = meta_nonempty(df2['A']) assert s.dtype == df2['A'].dtype assert (df3['A'] == s).all() def test_meta_duplicated(): df = pd.DataFrame(columns=['A', 'A', 'B']) res = meta_nonempty(df) exp = pd.DataFrame([['foo', 'foo', 'foo'], ['foo', 'foo', 'foo']], index=['a', 'b'], columns=['A', 'A', 'B']) tm.assert_frame_equal(res, exp) def test_meta_nonempty_index(): idx = pd.RangeIndex(1, name='foo') res = meta_nonempty(idx) assert type(res) is pd.RangeIndex assert res.n
ame == idx.name idx = pd.Int64Index([1], name='foo') res = meta_nonempty(idx) assert type(res) is pd.Int64Index assert res.name == idx.name idx = pd.Index(['a'], name='foo') res = meta_nonempty(idx) assert type(res) is pd.Index assert res.name == idx.name idx = pd.DatetimeIndex(['1970-01-01'], freq='d', tz='America/New_York', name='foo') res = meta_nonempty(idx) assert type(res
) is pd.DatetimeIndex assert res.tz == idx.tz assert res.freq == idx.freq assert res.name == idx.name idx = pd.PeriodIndex(['1970-01-01'], freq='d', name='foo') res = meta_nonempty(idx) assert type(res) is pd.PeriodIndex assert res.freq == idx.freq assert res.name == idx.name idx = pd.TimedeltaIndex([np.timedelta64(1, 'D')], freq='d', name='foo') res = meta_nonempty(idx) assert type(res) is pd.TimedeltaIndex assert res.freq == idx.freq assert res.name == idx.name idx = pd.CategoricalIndex(['a'], ['a', 'b'], ordered=True, name='foo') res = meta_nonempty(idx) assert type(res) is pd.CategoricalIndex assert (res.categories == idx.categories).all() assert res.ordered == idx.ordered assert res.name == idx.name levels = [pd.Int64Index([1], name='a'), pd.Float64Index([1.0], name='b')] idx = pd.MultiIndex(levels=levels, labels=[[0], [0]], names=['a', 'b']) res = meta_nonempty(idx) assert type(res) is pd.MultiIndex for idx1, idx2 in zip(idx.levels, res.levels): assert type(idx1) is type(idx2) assert idx1.name == idx2.name assert res.names == idx.names def test_meta_nonempty_scalar(): meta = meta_nonempty(np.float64(1.0)) assert isinstance(meta, np.float64) x = pd.Timestamp(2000, 1, 1) meta = meta_nonempty(x) assert meta is x def test_raise_on_meta_error(): try: with raise_on_meta_error(): raise RuntimeError("Bad stuff") except Exception as e: assert e.args[0].startswith("Metadata inference failed.\n") assert 'RuntimeError' in e.args[0] try: with raise_on_meta_error("myfunc"): raise RuntimeError("Bad stuff") except Exception as e: assert e.args[0].startswith("Metadata inference failed in `myfunc`.\n") assert 'RuntimeError' in e.args[0]
houseurmusic/my-swift
swift/obj/updater.py
Python
apache-2.0
9,252
0.001405
# Copyright (c) 2010-2011 OpenStack, LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import cPickle as pickle import os import signal import sys import time from random import random from eventlet import patcher, Timeout, TimeoutError from swift.common.bufferedhttp import http_connect from swift.common.exceptions import ConnectionTimeout from swift.common.ring import Ring from swift.common.utils import get_logger, renamer, write_pickle from swift.common.daemon import Daemon from swift.obj.server import ASYNCDIR class ObjectUpdater(Daemon): """Update object information in container listings.""" def __init__(self, conf): self.conf = conf self.logger = get_logger(conf, log_route='object-updater') self.devices = conf.get('devices', '/srv/node') self.mount_check = conf.get('mount_check', 'true').lower() in \ ('true', 't', '1', 'on', 'yes', 'y') swift_dir = conf.get('swift_dir', '/etc/swift') self.interval = int(conf.get('interval', 300)) self.container_ring_path = os.path.join(swift_dir, 'container.ring.gz') self.container_ring = None self.concurrency = int(conf.get('concurrency', 1)) self.slowdown = float(conf.get('slowdown', 0.01)) self.node_timeout = int(conf.get('node_timeout', 10)) self.conn_timeout = float(conf.get('conn_timeout', 0.5)) self.successes = 0 self.failures = 0 def get_container_ring(self): """Get the container ring. Load it, if it hasn't been yet.""" if not self.container_ring: self.logger.debug( _('Loading container ring from %s'), self.container_ring_path) self.container_ring = Ring(self.container_ring_path) return self.container_ring def run_forever(self, *args, **kwargs): """Run the updater continuously.""" time.sleep(random() * self.interval) while True: self.logger.info(_('Begin object update sweep')) begin = time.time() pids = [] # read from container ring to ensure it's fresh self.get_container_ring().get_nodes('') for device in os.listdir(self.devices): if self.mount_check and not \ os.path.ismount(os.path.join(self.devices, device)): self.logger.warn( _('Skipping %s as it is not mounted'), device) continue while len(pids) >= self.concurrency: pids.remove(os.wait()[0]) pid = os.fork() if pid: pids.append(pid) else: signal.signal(signal.SIGTERM, signal.SIG_DFL) patcher.monkey_patch(all=False, socket=True) self.successes = 0 self.failures = 0 forkbegin = time.time() self.object_sweep(os.path.join(self.devices, device)) elapsed = time.time() - forkbegin self.logger.info(_('Object update sweep of %(device)s' ' completed: %(elapsed).02fs, %(success)s successes' ', %(fail)s failures'), {'device': device, 'elapsed': elapsed, 'success': self.successes, 'fail': self.failures}) sys.exit() while pids: pids.remove(os.wait()[0]) elapsed = time.time() - begin self.logger.info(_('Object update sweep completed: %.02fs'), elapsed) if elapsed < self.interval: time.sleep(self.interval - elapsed) def run_once(self, *args, **kwargs): """Run the updater once""" self.logger.info(_('Begin object update single threaded sweep')) begin = time.time() self.successes = 0 self.failures = 0 for device in os.listdir(self.devices): if self.mount_check and \ not os.path.ismount(os.path.join(self.devices, device)): self.logger.warn( _('Skipping %s as it is not mounted'), device) continue self.object_sweep(os.path.join(self.devices, device)) elapsed = time.time() - begin self.logger.info(_('Object update single threaded sweep completed: ' '%(elapsed).02fs, %(success)s successes, %(fail)s failures'), {'elapsed': elapsed, 'success': self.successes, 'fail': self.failures}) def object_sweep(self, device): """ If there are async pendings on the device, walk each one and update. :param device: path to device """
async_pending = os.path.join(device, ASYNCDIR) if not os.path.isdir(async_pending): return for prefix in os.listdir(async_pending): prefix_path = os.path.join(async_pending, prefix) if not os.path.isdir(prefix_path): continue last_obj_hash = None for update in sorted(os.listdir(prefix_path), reverse=True): updat
e_path = os.path.join(prefix_path, update) if not os.path.isfile(update_path): continue try: obj_hash, timestamp = update.split('-') except ValueError: self.logger.error( _('ERROR async pending file with unexpected name %s') % (update_path)) continue if obj_hash == last_obj_hash: os.unlink(update_path) else: self.process_object_update(update_path, device) last_obj_hash = obj_hash time.sleep(self.slowdown) try: os.rmdir(prefix_path) except OSError: pass def process_object_update(self, update_path, device): """ Process the object information to be updated and update. :param update_path: path to pickled object update file :param device: path to device """ try: update = pickle.load(open(update_path, 'rb')) except Exception: self.logger.exception( _('ERROR Pickle problem, quarantining %s'), update_path) renamer(update_path, os.path.join(device, 'quarantined', 'objects', os.path.basename(update_path))) return successes = update.get('successes', []) part, nodes = self.get_container_ring().get_nodes( update['account'], update['container']) obj = '/%s/%s/%s' % \ (update['account'], update['container'], update['obj']) success = True for node in nodes: if node['id'] not in successes: status = self.object_update(node, part, update['op'], obj, update['headers']) if not (200 <= status < 300) and status != 404: success = False else: successes.append(node['id']) if success: self.successes += 1 self.logger.debug(_('Update sent for %(obj)s %(path)s'), {'obj': obj, 'path': update_path}) os.unlink(update_path) else: self.failures += 1 self.logger.debug(_('Update failed for %(obj)s %(path)s'), {'obj': obj, 'path'
shakamunyi/fig
tests/integration/project_test.py
Python
apache-2.0
55,924
0.00059
from __future__ import absolute_import from __future__ import unicode_literals import os.path import random import py import pytest from docker.errors import APIError from docker.errors import NotFound from .. import mock from ..helpers import build_config as load_config from ..helpers import create_host_file from .testcases import DockerClientTestCase from .testcases import SWARM_SKIP_CONTAINERS_ALL from compose.config import config from compose.config import ConfigurationError from compose.config import types from compose.config.types import VolumeFromSpec from compose.config.types import VolumeSpec from compose.const import COMPOSEFILE_V2_0 as V2_0 from compose.const import COMPOSEFILE_V2_1 as V2_1 from compose.const import COMPOSEFILE_V2_2 as V2_2 from compose.const import COMPOSEFILE_V3_1 as V3_1 from compose.const import LABEL_PROJECT from compose.const import LABEL_SERVICE from compose.container import Container from compose.errors import HealthCheckFailed from compose.errors import NoHealthCheckConfigured from compose.project import Project from compose.project import ProjectError from compose.service import ConvergenceStrategy from tests.integration.testcases import is_cluster from tests.integration.testcases import no_cluster from tests.integration.testcases import v2_1_only from tests.integration.testcases import v2_2_only from tests.integration.testcases import v2_only from tests.integration.testcases import v3_only def build_config(**kwargs): return config.Config( version=kwargs.get('version'), services=kwargs.get('services'), volumes=kwargs.get('volumes'), networks=kwargs.get('networks'), secrets=kwargs.get('secrets'), configs=kwargs.get('configs'), ) class ProjectTest(DockerClientTestCase): def test_containers(self): web = self.create_service('web') db = self.create_service('db') project = Project('composetest', [web, db], self.client) project.up() containers = project.containers() self.assertEqual(len(containers), 2) @pytest.mark.skipif(SWARM_SKIP_CONTAINERS_ALL, reason='Swarm /containers/json bug') def test_containers_stopped(self): web = self.create_service('web') db = self.create_service('db') project = Project('composetest', [web, db], self.client) project.up() assert len(project.containers()) == 2 assert len(project.containers(stopped=True)) == 2 project.stop() assert len(project.containers()) == 0 assert len(project.containers(stopped=True)) == 2 def test_containers_with_service_names(self): web = self.create_service('web') db = self.create_service('db') project = Project('composetest', [web, db], self.client) project.up() containers = project.containers(['web']) self.assertEqual( [c.name for c in containers], ['composetest_web_1']) def test_containers_with_extra_service(self): web = self.create_service('web') web_1 = web.create_container() db = self.create_service('db') db_1 = db.create_container() self.create_service('extra').create_container() project = Project('composetest', [web, db], self.client) self.assertEqual( set(project.containers(stopped=True)), set([web_1, db_1]), ) def test_volumes_from_service(self): project = Project.from_config( name='composetest', config_data=load_config({ 'data': { 'image': 'busybox:latest', 'volumes': ['/var/data'], }, 'db': { 'image': 'busybox:latest', 'volumes_from': ['data'], }, }), client=self.client, ) db = project.get_service('db') data = project.get_service('data') self.assertEqual(db.volumes_from, [VolumeFromSpec(data, 'rw', 'service')]) def test_volumes_from_container(self): data_container = Container.create( self.client, image='busybox:latest', volumes=['/var/data'], name='composetest_data_container', labels={LABEL_PROJECT: 'composetest'}, host_config={}, ) project = Project.from_config( name='composetest', config_data=load_config({ 'db': { 'image': 'busybox:latest', 'volumes_from': ['composetest_data_container'], }, }), client=self.client, ) db = project.get_service('db') self.assertEqual(db._get_volumes_from(), [data_container.id + ':rw']) @v2_only() @no_cluster('container networks not supported in Swarm') def test_network_mode_from_service(self): project = Project.from_config( name='composetest', client=self.client, config_data=load_config({ 'version': str(V2_0), 'services': { 'net': { 'image': 'busybox:latest', 'command': ["top"] }, 'web': { 'image': 'busybox:latest', 'network_mode': 'service:net', 'command': ["top"] }, }, }), ) project.up() web = project.get_service('web') net = project.get_service('net') self.assertEqual(web.network_mode.mode, 'container:' + net.containers()[0].id) @v2_only() @no_cluster('container networks not supported in Swarm') def test_network_mode_from_container(self): def get_project(): return Project.from_config( name='composetest', config_data=load_config({ 'version': str(V2_0), 'services': { 'web': { 'image': 'busybox:latest', 'network_mode': 'container:composetest_net_container' }, }, }), client=self.client, ) with pytest.raises(ConfigurationError) as excinfo: get_project() assert "container 'composetest_net_container' which does not exist" in excinfo.exconly() net_container = Container.create( self.client, image='busybox:latest', name='composetest_net_container', command='top', labels={LABEL_PROJECT: 'composetest'}, host_config={}, ) net_container.start() project = get_project() project.up() web = project.get_service('web') self.assertEqual(web.network_mode.mode, 'container:' + net_container.id) @no_cluster('container networks not supported in Swarm') def test_net_from_service_v1(self): project = Project.from_config( name='composetest', config_data=load_config({ 'net': { 'image': 'busybox:latest', 'command': ["top"] }, 'web': { 'image': 'busybox:latest', 'net': 'container:net', 'command': ["top"] }, }), client=sel
f.client, ) project.up() web = project.get_service('web') net = project.get_service('net') self.assertEqual(web.network_mode.mode, 'container:' + net.containers()[0].id) @no_cluster('container networks not supported in Swarm') def test_net_from_container_v1(self): def get_project(): return Project.from_config( name='composete
st', config_data=load_config({ 'web': { 'image': 'busybox:latest', 'net': 'con
smarrazzo/pyava
consts.py
Python
mit
1,919
0.039083
USER_ID_LEN = 64; NR_WAVELEN_POL_COEF = 5; NR_NONLIN_POL_COEF = 8; NR_DEFECTIVE_PIXELS = 30; MAX_NR_PIXELS = 4096; MEAS_DEAD_PIX = 13;# 18 MEAS_PIXELS = 3648; SIZE_PREFIX = 6; NR_TEMP_POL_COEF = 5; MAX_TEMP_SENSORS = 3; ROOT_NAME_LEN = 6; AVS_SERIAL_LEN = 9; # 10 MAX_PIXEL_VALUE = 0xFFFC; MAX_VIDEO_CHANNELS = 2; MAX_LASER_WIDTH = 0xFFFF; HW_TRIGGER_MODE = 1; SW_TRIGGER_MODE = 0; EDGE_TRIGGER_SOURCE = 0; LEVEL_TRIGGER_SOURCE = 1; MAX_TRIGGER_MODE = 1; MAX_TRIGGER_SOURCE = 1; MAX_TRIGGER_SOURCE_TYPE = 1; MAX_INTEGRATION_TIME = 600000; SAT_DISABLE_DET = 0; SAT_ENABLE_DET = 1; SAT_PEAK_INVERSION = 2; NR_DA
C_POL_COEF = 2; TIMEOUT = 1000000; ADR_WRITE = 0x02; ADR_READ = 0x86; ID_VENDOR = 0x1992; ID_PRODUCT = 0x0
667; ERR_CODE = [ 'CODE 0x00 : UNKNOW', 'CODE 0x01 : INVALID PARAMETER', 'CODE 0x02 : INVALID PASSWORD', 'CODE 0x03 : INVALID COMMAND', 'CODE 0x04 : INVALID SIZE', 'CODE 0x05 : MEASUREMENT PENDING', 'CODE 0x06 : INVALID PIXEL RANGE', 'CODE 0x07 : INVALID INTEGRATION TIME', 'CODE 0x08 : OPERATION NOT SUPPORTED', 'CODE 0x09 : INVALID COMBINATION', 'CODE 0x0A : NO BUFFER AVAIBLE', 'CODE 0x0B : NO SPECTRA AVAIBLE', 'CODE 0x0C : INVALID STATE', 'CODE 0x0D : UNEXPECTED DMA INT', 'CODE 0x0E : INVALID FPGA FILE' ] SENSOR_TYPE = ['RESERVED', 'Hams 8378-256', 'Hams 8378-1024', 'ILX554', 'Hams 9201', 'Toshiba TCD 1304', 'TSL 1301', 'TSL 1401', 'Hams 8378-512', 'Hams 9840']
maxdrib/Trump-Twitter
keywords/parse_companies.py
Python
mit
633
0.004739
import re useless_words = ['Inc', 'Corporation', 'Company', 'Corp', 'Co', 'Energy', '&', 'The', '.com', "Inc.", "Corp.", "Co.", "of", "Ltd.", "Ltd"] with open('companie
s.txt', 'r') as f: companies = f.readlines() new_companies = [] for company in companies: company = company[:-1] company_words = company.split() new_company = [word for word in company_words if word not in useless_words] new_companies.append([word.lower() for word in new_company if len(word)>1]) print new_companies with open('parsed_companies.txt', 'w') as f: for company in new_companies
: f.write(' '.join(company)+'\n')
mpascu/SmartThermostatServer
server.py
Python
mit
6,963
0.012638
from flask import Flask, request, json import RPi.GPIO as GPIO import threading import time import socket import ast import Adafruit_DHT GPIO.setmode(GPIO.BCM) USE_TEST_TEMPERATURES = False app = Flask(__name__) class sensorReader(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.exitapp = False print ('SENSOR SERVER STARTED') if USE_TEST_TEMPERATURES: global server_socket server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) server_socket.bind(('localhost', 5001)) server_socket.listen(5) def run(self): global data if USE_TEST_TEMPERATURES: (client_socket, address) = server_socket.accept() while not self.exitapp: size = len(data['sensors']) if (size!=0): client_socket.send (str(size)) values = client_socket.recv(512) #print "RECEIVED:" , values parsedValues = json.loads(values) for x in range(size): data['sensors'][x][str(x+1)]['value'] = parsedValues[x] else: while not self.exitapp: humidity, temperature = Adafruit_DHT.read_retry(Adafruit_DHT.DHT11, 5) data['sensors'][0]['1']['value'] = str(int(temperature)) print 'Temp={0:0.1f}*C Humidity={1:0.1f}%'.format(temperature, humidity) time.sleep(1) class actuatorTrigger(threading.Thread): def __init__(self): threading.Thread.__init__(self) self.exitapp = False GPIO.setup(4, GPIO.OUT) GPIO.setup(17, GPIO.OUT) GPIO.setup(27, GPIO.OUT) def run(self): global data pin = [4,17,27] while not self.exitapp: x=1 tempCount = 0 for t in data['thermostats']: mode=t.get(str(x))['mode'] if mode == 'ON': GPIO.output(pin[x-1], True) if mode == 'OFF': GPIO.output(pin[x-1], False) if mode == 'AUTO': for s in t.get(str(x))['sensors']: tempCount += int(data['sensors'][s-1][str(s)]['value']) '''print tempCount''' avg = tempCount / float(len(t.get(str(x))['sensors'])) '''print avg''' '''print t.get(str(x))['temperature']''' if (t.get(str(x))['hot']!='true'): if (float(t.get(str(x))['temperature'])-avg)<0.5: GPIO.output(pin[x-1], True) else: GPIO.output(pin[x-1], False) else: if (float(t.get(str(x))['temperature'])-avg)<0.5: GPIO.output(pin[x-1], False) else: GPIO.output(pin[x-1], True) x=x+1 time.sleep(1) @app.route("/") def hello(): """Brief introduction message""" return "Hello this is the API server of a smart thermostate!" @app.route('/temp', methods=['GET','DELETE','POST']) def showTemp(): """Offers the three available methods of the api for the temperature sensors GET - Lists all the sensors values POST - Adds a new temperature sensor DELETE - Delete all sensors """ global data if request.method == 'GET': return json.dumps(data.get('sensors'), indent=4) if request.method == 'DELETE': data['sensors'] = [] file = open('testData.json','w') json.dump(data,file,indent=4) file.close() return "All sensors deleted successfully" if request.method == 'POST': id = len(data['sensors'])+1 temp= {str(id) : {"value":"0", "name":request.form['name']}} data['sensors'].append(temp) file = open('testData.json','w') json.dump(data,file,indent=4) file.close() return "New temperature value created successfully" else: return "Not a valid method" @app.route('/thermo/<thermid>', methods=['GET','PUT']) def getThermostate(thermid): """Retunrs the thermostat data specified by <thermid>""" global data id = int(thermid) if request.method == 'GET': return json.dumps(data['thermostats'][id-1].get(str(id)), indent=4) if request.method == 'PUT': temp = request.form['temperature'] data['thermostats'][id-1].get(str(id))['temperature']=temp mode = request.form['mode'] data['thermostats'][id-1].get(str(id))['mode']=mode sensors = request.form['sensors'] sensors= ast.literal_eval(sensors) data['thermostats'][id-1].get(str(id))['sensors']=sensors time_programming = (request.form['time_programming']) print (time_programming) ''' n=json.dumps(time_programming)''' data['thermostats'][id-1].get(str(id))['time']=json.loads(time_programming) hot = (request.form['hot']) data['thermostats'][id-1].get(str(id))['hot']=hot file = open('testData.json','w') json.dump(data,file,indent=4) file.close() return ' ' @app.route('/thermo', methods=['GET','POST','DELETE']) def showThermo(): """Offers the three available methods of the api for the thermostates GET - Lists all thermostates POST - Adds a default thermostate with no sensors assigned and 21 degree DELETE - Delete all thermostates """ global data if request.method == 'GET': return json.dumps(data['thermostats'], indent=4) if request.method == 'POST': id = len(data['thermostats'])+1 thermo= {str(id) : {"name":request.form['name'], 'sensors':[], 'temperature':'21', 'mode':'OFF'}} data['thermostats'].append(thermo) file = open('testData.json','w') json.dump(data,file,indent=4) file.close() return "New thermostate created successfully" if request.method == 'DELETE': data['thermostats']=[] file = open('testData.json','w') json.dump(data
,file,indent=4) file.close() return "All thermostates deleted successfully" else: return "Not a valid method" def main(): global data file=open('testData.json','r') data = json.load(file) file.close() mySensorReader = sensorReader() mySensorReader.start() myActuatorTrigger = actuatorTrigger() myActuatorTrigger.start() app.run(host='0.0.0.0', port=6789,threaded=True, debug=False) try: mySensorReader.join
() myActuatorTrigger.join() except KeyboardInterrupt: mySensorReader.exitapp = True myActuatorTrigger.exitapp = True GPIO.cleanup() if __name__ == "__main__": main()
Canpio/Paddle
python/paddle/fluid/tests/unittests/test_im2sequence_op.py
Python
apache-2.0
5,232
0.004014
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # #Licensed under the Apache License, Version 2.0 (the "License"); #you may not use this file except in compliance with the License. #You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # #Unless required by applicable law or agreed to in writing, software #distributed under the License is distributed on an "AS IS" BASIS, #WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. #See the License for the specific language governing permissions and #limitations under the License. import unittest import numpy as np from op_test import OpTest def get_output_shape(attrs, in_shape): img_height = in_shape[2] img_width = in_shape[3] paddings = attrs['paddings'] kernels = attrs['kernels'] strides = attrs['strides'] output_height = \ 1 + \ (img_height + paddings[0] + paddings[2] - kernels[0] + strides[0] - 1) / \ strides[0] output_width = \ 1 + \ (img_width + paddings[1] + paddings[3] - kernels[1] + strides[1] - 1) / \ strides[1] return output_height, output_width def im2col(attrs, im, col): """ im: {CHW} col: {outputHeight, outputWidth, inputChannels, filterHeight, filterWidth} """ input_channels, input_height, input_width = im.shape output_height, output_width, _, filter_height, filter_width = col.shape stride_height, stride_width = attrs['strides'] padding_height, padding_width = attrs['paddings'][0:2] for col_row_idx in range(0, output_height): for col_col_idx in range(0, output_width): for channel in range(0, input_channels): for filter_row_idx in range(0, filter_height): for filter_col_idx in range(0, filter_width): im_row_offset = col_row_idx * stride_height \ + filter_row_idx - padding_height im_col_offset = col_col_idx * stride_width \ + filter_col_idx - padding_width if (im_row_offset < 0 or im_row_offset >= input_height or im_col_offset < 0 or im_col_offset >= input_width): col[col_row_idx][col_col_idx][channel][\ filter_row_idx][filter_col_idx] = 0.0 else: im_offset = (channel * input_height + im_row_offset \ ) * input_width + im_col_offset col[col_row_idx][col_col_idx][channel][\ filter_row_idx][filter_col_idx] = im[channel][ \ im_row_offset][im_col_offset] def Im2Sequence(inputs, attrs): output_height, output_width = get_output_shape(attrs, inputs.shape) img_channels = inputs.shape[1] batch_size = inputs.shape[0] out = np.zeros([ batch_size, output_height, output_width, img_channels, attrs['kernels'][0], attrs['kernels'][1] ]).astype("float32") for i in range(len(inputs)): im2col(attrs, inputs[i], out[i]) out = out.reshape([ batch_size * output_height * output_width, img_channels * attrs['kernels'][0] * attrs['kernels'][1] ]) return out class TestBlockExpandOp(OpTest): def config(self): self.batch_size = 1 self.img_channels = 3 self.img_height = 4 self.img_width = 4 self.attrs = { 'kernels': [2, 2], 'strides': [1, 1], 'paddings': [1, 1, 1, 1] } def setUp(self): self.config() self.op_type = "im2sequence" x = np.random.uniform(0.1, 1, [ self.batch_size, self.img_channels, self.img_height, self.img_width ]).astype("float32") out = Im2Sequence(x, self.attrs) self.inputs = {'X': x} self.outputs = {'Out': out} def test_check_output(self): self.check_output() def test_check_grad_normal(self): self.check_grad(['X'], 'Out') class TestBlockExpandOpCase2(TestBlockExpandOp): def config(self): self.batch_size = 2 self.img_channels = 3 self.img_height = 4 self.img_width = 5 self.attrs = { 'kernels': [2, 1], 'strides': [2, 1], 'paddings': [2, 1, 2, 1] } class TestBlockExpandOpCase3(TestBlockExpandOp): def config(self): self.batch_size = 3 self.img_channels = 1 self.img_height = 4 self.img_width = 5 self.attrs = { 'kernels': [2, 1], 'strides': [2, 1], 'paddings': [2, 0, 2, 0] } class TestBlockExpandOpCase4(TestBlockExpandOp): def config(self): self.batch_size = 2 self.img_channels =
2 self.img_height = 3 self.img_wi
dth = 3 self.attrs = { 'kernels': [2, 2], 'strides': [1, 1], 'paddings': [0, 0, 0, 0] } if __name__ == '__main__': unittest.main()
naikymen/QB9
uniprot_parser/uniprot_parser_v01.py
Python
gpl-3.0
14,072
0.004417
__author__ = 'nicolas' # coding=utf-8 from os.path import expanduser from ordereddict import OrderedDict from Bio import SwissProt import time import MySQLdb as mdb """ Fuck! from ordereddict import OrderedDict import MySQLdb as mdb dicc = {} dictdebug_empty = OrderedDict() dictdebug = dictdebug_empty dictdebug['hola'] = 'chau' print(dictdebug.items()) print(dictdebug_empty.items()) dictdebug_empty.clear() print(dictdebug_empty.items()) print(dictdebug.items()) """ # Establecer el tiempo de inicio del script start_time = time.time() # Variables del script database = "ptmdb" tabla_cuentas = "sprot_count1" tabla_ptms = "sprot_ptms1" file_name = "uniprot_sprot.dat" desde = 0 hasta = 542783 # Hay 542782 entradas de AC?? # Conectar a la base de datos con = mdb.connect('localhost', 'nicolas', passwd="nicolaslfp", db=database) cur = con.cursor() cur.execute("SELECT VERSION()") cur.execute("USE " + database) print("USE ptmdb;") # Abrir el .dat de uniprot uniprot_file = expanduser("~") + '/QB9_Files/' + file_name output_file = expanduser("~") + '/QB9-git/QB9/resources/output.txt' def count_amino_acids_ext(seq): # Defino una función que toma una secuencia y los cuenta prot_dic2 = prot_dic for aa in prot_dic2: prot_dic2[aa] = seq.count(aa) return prot_dic2 # y devuelve un dict ordenado con pares AA, #AA # Armo un diccionario con los AAs que voy a contar abc = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' prot_dic = OrderedDict((k, 0) for k in abc) # Interesting feature types ptmrecords = ["MOD_RES", "LIPID", "CARBOHYD", "DISULFID", "CROSSLNK"] # Non-experimental qualifiers for feature annotations neqs = ["Probable", "Potential", "By similar
ity"] # Y "Experimental" # Las categorías están en un diccionario con su type de mysql todo volar categories = OrderedDict() categories['AC'] = "varchar(30) NOT NULL" # accesion number categories['FT'] = "varchar(30) NOT NULL" categories['STATUS'] = "varchar(30) NOT NULL" categories['PTM'] = "varchar(100) NOT NULL" categories['FROM_RES'] = "varchar(10) NOT NULL" categories['TO_RES'] = "varchar(10) NOT NULL" categories['FROM_AA'] = "varchar(10) NOT NULL" # vamo a
implementar el target directamente!!!! =D categories['TO_AA'] = "varchar(10) NOT NULL" categories['SQ'] = "text(45000) NOT NULL" # SQ SEQUENCE XXXX AA; XXXXX MW; XXXXXXXXXXXXXXXX CRC64; categories['LENGTH'] = "varchar(200) NOT NULL" # SQ SEQUENCE XXXX AA; XXXXX MW; XXXXXXXXXXXXXXXX CRC64; categories['ORG'] = "text(500) NOT NULL" # organism categories['OC'] = "varchar(30) NOT NULL" # organism classification, vamos solo con el dominio categories['OX'] = "varchar(200) NOT NULL" # taxonomic ID categories['HO'] = "text(500)" # host organism categories['inumber'] = "varchar(200) NOT NULL" # categories['CC'] = "varchar(200)" # comments section, nos interesa el campo "PTM" # categories['SQi'] = "varchar(200)" # SQ SEQUENCE XXXX AA; XXXXX MW; XXXXXXXXXXXXXXXX CRC64; # Defino un diccionario modelo donde cargar los valores que voy a extraer de la lista empty_data = OrderedDict() for gato in categories: # usando las keys de categories y un valor por defecto todo vacío no es nulo ¿cómo hago? empty_data[gato] = 'NOFT' empty_data['FROM_RES'] = '?' empty_data['TO_RES'] = '?' empty_data['FROM_AA'] = '?' empty_data['TO_AA'] = '?' data = empty_data.copy() # este es el diccionario de registros vacío que voy a usar print("DROP TABLE " + tabla_cuentas + ";") print("DROP TABLE " + tabla_ptms + ";") # Crear la tabla de cuentas prot_dic_def_items = [] prot_dic_def = OrderedDict((k, 'SMALLINT') for k in abc) for cat, value in prot_dic_def.items(): # concatenaciones key y valor prot_dic_def_items.append(cat + ' ' + value) # guardadaes en la lista table_def = ', '.join(prot_dic_def_items) # definicion de la tabla print("CREATE TABLE IF NOT EXISTS " + tabla_cuentas + " (AC VARCHAR(30) UNIQUE, OC_ID VARCHAR(30), LENGTH MEDIUMINT," + table_def + ") ENGINE=InnoDB;") print("commit;") # con.commit() # Crear la tabla de ptms table_def_items = [] # lista para concatenaciones de key y valor for cat, value in categories.items(): # concatenaciones key y valor table_def_items.append(cat + ' ' + value) # guardadaes en la lista table_def_2 = ', '.join(table_def_items) # definicion de la tabla print("CREATE TABLE IF NOT EXISTS " + tabla_ptms + " (" + table_def_2 + ") ENGINE=InnoDB;") print("commit;") # con.commit() # Variables del loop i = 0 j = 0 ptm = '' out = [] listap = [] listaq = [] listar = [] olista = [] interes = [] with open(uniprot_file) as uniprot: # esto me abre y cierra el archivo al final for record in SwissProt.parse(uniprot): # parseando los records de uniprot i += 1 if i % 100 == 0: print("commit;") data = empty_data.copy() # en vez de vaciar el diccionario, le asigno el dafault sin enlazarlo al vacío # Acá cargo los datos generales para las PTMs de una proteína/entrada de uniprot (instancias de entradas) # tienen que cargarse en el orden de las columnas en la ptmdb y el del insert # print(record.accessions[0]) data['AC'] = record.accessions[0] # solo el principal, el resto nose. data['SQ'] = record.sequence data['LENGTH'] = record.sequence_length # todo acá hay un problema? no entran las de mas de 999 residuos data['ORG'] = record.organism # el bicho data['OC'] = record.organism_classification[0] # el dominio del bicho data['OX'] = record.taxonomy_id[0] # Id taxonomica del bicho del olista[:] if not record.host_organism: data['HO'] = 'No host' else: for o in record.host_organism: olista.append((o.split(";"))[0]) data['HO'] = ', '.join(olista) # y esto el host del virus ¿o parásito? data['inumber'] = str(i) # solo para debuguear =) ver hasta donde llegó # Generar y guardar el insert del #AA en la secuencia del listaq[:] contenido_aa = count_amino_acids_ext(record.sequence) # Guardo el dict con partes AA, #AA de la secuencia for q in contenido_aa.itervalues(): listaq.append(str(q)) # y los pongo en una lista sql_insert_values_q = ', '.join(listaq) if i >= desde: print("INSERT INTO " + tabla_cuentas + " VALUES ('" + record.accessions[0] + "', '" + record.organism_classification[0] + "', " + str(record.sequence_length) + ", " + sql_insert_values_q + ");") # print("commit;") # con.commit() # Acá empiezo con los features, hay alguno interesante? features = record.features # todo insertar los FTs en otra tabla junto con OC; OX, OR...? del out[:] del interes[:] for a in range(0, len(features)): # guardar los campos "candidato" del FT en una lista llamada out out.append(features[a][0]) interes = list(set(out).intersection(ptmrecords)) # armar un set con los interesantes y hacerlo lista interes if interes: # si interes no está vacía, entonces hay algo para cargar # todo evitar duplicados de secuencia, relacion via AC? # ahora cargo cada PTM en data (subinstancias de entrada) for feature in features: # iterar los features de la entrada if feature[0] in interes: # si el titulo del FT interesa, proseguir ¡mejora un poco! =D for tipo in interes: # iterear los tipos interesantes encontrados en el feature if feature[0] in tipo: # si el feature evaluado interesante, cargar los datos en data[] A = feature[1] # de el residuo tal (va a ser el mismo que el siguiente si está solo) B = feature[2] # hacia el otro. OJO hay algunos desconocidos indicados con un "?" C = feature[3] # este tiene la posta? D = feature[4] # este aparece a veces? todo wtf? # reiniciar FT, FROM y
mitsei/dlkit
dlkit/json_/relationship/searches.py
Python
mit
11,651
0.001717
"""JSON implementations of relationship searches.""" # pylint: disable=no-init # Numerous classes don't require __init__. # pylint: disable=too-many-public-methods,too-few-public-methods # Number of methods are defined in specification # pylint: disable=protected-access # Access to protected methods allowed in package json package scope # pylint: disable=too-many-ancestors # Inheritance defined in specification from . import objects from . import queries from .. import utilities from ..osid import searches as osid_searches from ..primitives import Id from ..utilities import get_registry from dlkit.abstract_osid.osid import errors from dlkit.abstract_osid.relationship import searches as abc_relationship_searches class RelationshipSearch(abc_relationship_searches.RelationshipSearch, osid_searches.OsidSearch): """The search interface for governing relationship searches.""" def __init__(self, runtime): self._namespace = 'relationship.Relationship' self._runtime = runtime record_type_data_sets = get_registry('RESOURCE_RECORD_TYPES', runtime) self._record_type_data_sets = record_type_data_sets self._all_supported_record_type_data_sets = record_type_data_sets self._all_supported_record_type_ids = [] self._id_list = None for data_set in record_type_data_sets: self._all_supported_record_type_ids.append(str(Id(**record_type_data_sets[data_set]))) osid_searches.OsidSearch.__init__(self, runtime) @utilities.arguments_not_none def search_among_relationships(self, relationship_ids): """Execute this search among the given list of relationships. arg: relationship_ids (osid.id.IdList): list of relationships raise: NullArgument - ``relationship_ids`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ self._id_list = relationship_ids @utilities.arguments_not_none def order_relationship_results(self, relationship_search_order): """Specify an ordering to the search results. arg: relationship_search_order (osid.relationship.RelationshipSearchOrder): relationship search order raise: NullArgument - ``relationship_search_order`` is ``null`` raise: Unsupported - ``relationship_search_order`` is not of this service *compliance: mandatory -- This method must be implemented.* """ raise errors.Unimplemented() @utilities.arguments_not_none def get_relationship_search_record(self, relationship_search_record_type): """Gets the relationship search record corresponding to the given relationship search record ``Type``. This method is used to retrieve an object implementing the requested record. arg: relationship_search_record_type (osid.type.Type): a relationship search record type return: (osid.relationship.records.RelationshipSearchRecord) - the relationship search record
raise: NullArgument - ``relationship_search_record_type`` is ``null`` raise: OperationFailed - unable to complete requ
est raise: PermissionDenied - authorization failure occurred raise: Unsupported - ``has_record_type(relationship_search_record_type)`` is ``false`` *compliance: mandatory -- This method must be implemented.* """ raise errors.Unimplemented() class RelationshipSearchResults(abc_relationship_searches.RelationshipSearchResults, osid_searches.OsidSearchResults): """This interface provides a means to capture results of a search.""" def __init__(self, results, query_terms, runtime): # if you don't iterate, then .count() on the cursor is an inaccurate representation of limit / skip # self._results = [r for r in results] self._namespace = 'relationship.Relationship' self._results = results self._query_terms = query_terms self._runtime = runtime self.retrieved = False def get_relationships(self): """Gets the relationship list resulting from a search. return: (osid.relationship.RelationshipList) - the relationship list raise: IllegalState - list already retrieved *compliance: mandatory -- This method must be implemented.* """ if self.retrieved: raise errors.IllegalState('List has already been retrieved.') self.retrieved = True return objects.RelationshipList(self._results, runtime=self._runtime) relationships = property(fget=get_relationships) def get_relationship_query_inspector(self): """Gets the inspector for the query to examine the terms used in the search. return: (osid.relationship.RelationshipQueryInspector) - the relationship query inspector *compliance: mandatory -- This method must be implemented.* """ return queries.RelationshipQueryInspector(self._query_terms, runtime=self._runtime) relationship_query_inspector = property(fget=get_relationship_query_inspector) @utilities.arguments_not_none def get_relationship_search_results_record(self, relationship_search_record_type): """Gets the relationship search results record corresponding to the given relationship search record ``Type``. This method must be used to retrieve an object implementing the requested record interface along with all of its ancestor interfaces. arg: relationship_search_record_type (osid.type.Type): a relationship search record type return: (osid.relationship.records.RelationshipSearchResultsReco rd) - the relationship search results record raise: NullArgument - ``relationship_search_record_type`` is ``null`` raise: OperationFailed - unable to complete request raise: PermissionDenied - authorization failure occurred raise: Unsupported - ``has_record_type(relationship_search_record_type)`` is ``false`` *compliance: mandatory -- This method must be implemented.* """ raise errors.Unimplemented() class FamilySearch(abc_relationship_searches.FamilySearch, osid_searches.OsidSearch): """The search interface for governing family searches.""" def __init__(self, runtime): self._namespace = 'relationship.Family' self._runtime = runtime record_type_data_sets = get_registry('RESOURCE_RECORD_TYPES', runtime) self._record_type_data_sets = record_type_data_sets self._all_supported_record_type_data_sets = record_type_data_sets self._all_supported_record_type_ids = [] self._id_list = None for data_set in record_type_data_sets: self._all_supported_record_type_ids.append(str(Id(**record_type_data_sets[data_set]))) osid_searches.OsidSearch.__init__(self, runtime) @utilities.arguments_not_none def search_among_families(self, family_ids): """Execute this search among the given list of families. arg: family_ids (osid.id.IdList): list of families raise: NullArgument - ``family_ids`` is ``null`` *compliance: mandatory -- This method must be implemented.* """ self._id_list = family_ids @utilities.arguments_not_none def order_family_results(self, family_search_order): """Specify an ordering to the search results. arg: family_search_order (osid.relationship.FamilySearchOrder): family search order raise: NullArgument - ``family_search_order`` is ``null`` raise: Unsupported - ``family_search_order`` is not of this service *compliance: mandatory -- This method must be implemented.* """ raise errors.Unimplemented() @utilities.arguments_not_none def get_family_search_record(self, family_search_record_type
liyu1990/tensorflow
tensorflow/python/platform/default/_app.py
Python
apache-2.0
214
0
"""Generic entry point
script.""" import sys from tensorflow.python.platform import flags def run(): f = flags.FLAGS f._parse_flags() main = sys.modules[
'__main__'].main sys.exit(main(sys.argv))
Elchi3/kuma
kuma/users/signup.py
Python
mpl-2.0
1,747
0.001145
from allauth.socialaccount.forms import SignupForm as BaseSignupForm from django import forms from django.core import validators from django.utils.translation import gettext_lazy as _ USERNAME_REQUIRED = _("Username is requ
ired.") USERNAME_SHORT = _( "Username is too short (%(show_value)s characters). " "It must be at least %(limit_value)s characters." ) USERNAME_LONG = _( "Username is too long (%(show_value)s characters). " "It must be %(limit_value)s characters or less." ) TERMS_REQUIRED = _("You must agree to the terms of use.") class SignupForm(BaseSignupForm): """ The user registration form for allauth. This overrides the default error messages for the username
form field with our own strings. The heavy lifting happens in the view. """ terms = forms.BooleanField( label=_("I agree"), required=True, error_messages={"required": TERMS_REQUIRED} ) is_github_url_public = forms.BooleanField( label=_("I would like to make my GitHub profile URL public"), required=False ) is_newsletter_subscribed = forms.BooleanField(required=False) duplicate_email_error_label = "_duplicate_email" def __init__(self, *args, **kwargs): super(SignupForm, self).__init__(*args, **kwargs) self.fields["username"].error_messages = { "required": USERNAME_REQUIRED, "min_length": USERNAME_SHORT, "max_length": USERNAME_LONG, } def clean_email(self): value = self.cleaned_data["email"] validators.validate_email(value) return super(SignupForm, self).clean_email() def raise_duplicate_email_error(self): raise forms.ValidationError(self.duplicate_email_error_label)
yiwen-luo/LeetCode
Python/search-in-rotated-sorted-array-ii.py
Python
mit
1,138
0.006151
# Time: O(logn) # Space: O(1) # # Follow up for "Search in Rotated Sorted Array": # What if duplicate
s are allowed? # # Would thi
s affect the run-time complexity? How and why? # # Write a function to determine if a given target is in the array. # class Solution(object): def search(self, nums, target): """ :type nums: List[int] :type target: int :rtype: int """ left, right = 0, len(nums) - 1 while left <= right: mid = left + (right - left) / 2 if nums[mid] == target: return True elif nums[mid] == nums[left]: left += 1 elif (nums[mid] > nums[left] and nums[left] <= target < nums[mid]) or \ (nums[mid] < nums[left] and not (nums[mid] < target <= nums[right])): right = mid - 1 else: left = mid + 1 return False if __name__ == "__main__": print Solution().search([3, 5, 1], 3) print Solution().search([2, 2, 3, 3, 4, 1], 1) print Solution().search([4, 4, 5, 6, 7, 0, 1, 2], 5)
ProgramRepair/experiments-infrastructure
awsscripts-defects4j/fullrepairtesting.py
Python
gpl-2.0
8,390
0.019547
from time import sleep import boto.ec2 import sys import subprocess import argparse import os # specify AWS keys auth = {"aws_access_key_id": "<aws key>", "aws_secret_access_key": "<value>"} # create the connection object conn = boto.ec2.connect_to_region("us-east-1", **auth) # status of vms instancefree={} # status of projects evaluated. set it False for the projects which need to be evaluated projects={} projects['Chart']=False projects['Closure']=False projects['Lang']=False projects['Math']=False projects['Time']=False # parameters required for launching experiment faultlocflag="false" startseed=1 endseed=20 startdefectid=1 enddefectid=2 testtype="allHuman" genprogpath="/home/ubuntu/genprog4java" defects4jpath="/home/ubuntu/defects4j" testpercent=100 # parameters required for vm alldone=False vmcount=1 ami='ami-<id>' # also need to specify this in create_instances method instancetype='c4.xlarge' # also need to specify this in create_instances method # number of defects to run for each project plus one. Here we are testing 15 defects (3 defects from each project). # We can randomize the defects by modifying startdefect variable above and accordingly modify the values below defects={} defects['Chart']=4 defects['Closure']=4 defects['Lang']=4 defects['Math']=4 defects['Time']=4 def main(): if len(sys.argv) < 2: create_script() print "Usage: python fullrepairtesting.py {start|stop|terminate} path-to-key-file \n" sys.exit(0) else: action = sys.argv[1] keypath = sys.argv[2] if action == "start": terminate_instances() sleep(30) delete_volumes() create_instances(keypath) elif action == "stop": stopInstances() elif action == "terminate": terminate_instances() sleep(30) delete_volumes() else: print "Usage: python faultloctesting.py {start|stop|terminate} path-to-key-file \n" # method to create instances def create_instances(vm_key): # create instances. specify ami, key, type, min and max count instances_resv = conn.run_instances('ami-8385f094',key_name='defect4jvm',instance_type='c4.xlarge',security_group_ids=["sg-6a3e5112"], min_count = 5, max_count = 5) print instances_resv print "number of instances created = ", len(instances_resv.instances) for i in instances_resv.instances: print "creating instance ", i.id while i.state == u'pending': # wait until instance gets created print("Instance state: %s" % i.state) sleep(10) i.update() global alldone global vmcount if projects['Closure'] and alldone: # if all experiments are launched and there are unused vms and unattached volumes then delete them conn.terminate_instances(instance_ids=[i.id]) delete_volumes() else: # setup the instance setupInstance(i, vm_key) vmcount = vmcount + 1 print("Instance state: %s" % i.state) print("Public dns: %s" % i.public_dns_name) return i.public_dns_name # method to setup an instance for running the experiment def setupInstance(i, vm_key): print "Starting instance", i.id if i.state == "stopped": i.start() while i.state == "pending": sleep(1) i.update() status=conn.get_all_instance_status(instance_ids=[i.id]) print "system status is: ",status[0].system_status.status, status[0].instance_status.status # wait until instance is initialized and reachable while status[0].system_status.status != 'ok' and status[0].instance_status.status != 'ok': status=conn.get_all_instance_status(instance_ids=[i.id])
print "system status is: ",status[0].system_status.status, status[0].instance_status.status sleep(10) print "instance started = ", i.id, " ip address is ", i.ip_address instancefree[i.ip_address]=True # launch experiment on instance
if i.ip_address != None and i.id!="i-10fa21c8": print "copying launch-repair script to ", i.ip_address do_scp(i.ip_address,"~/", vm_key) print "set permissions of script on ", i.ip_address set_permissions(i.ip_address, vm_key) if not projects['Chart']: # launch chart defects startdefectid = 1 enddefectid=defects['Chart'] if instancefree[i.ip_address] is True: vmname="vm%s-Chart-%s-%s" %(vmcount, startdefectid, enddefectid) i.add_tag("Name", vmname) run(i.ip_address, vm_key, "Chart", startdefectid, enddefectid, "chart") instancefree[i.ip_address]=False if not projects['Lang']: # launch lang defects startdefectid = 1 enddefectid=defects['Lang'] if instancefree[i.ip_address] is True: vmname="vm%s-Lang-%s-%s" %(vmcount, startdefectid, enddefectid) i.add_tag("Name", vmname) run(i.ip_address, vm_key, "Lang", startdefectid, enddefectid, "lang") instancefree[i.ip_address]=False if not projects['Time']: # launch time defects startdefectid = 1 enddefectid=defects['Time'] if instancefree[i.ip_address] is True: vmname="vm%s-Time-%s-%s" %(vmcount, startdefectid, enddefectid) i.add_tag("Name", vmname) run(i.ip_address, vm_key, "Time", startdefectid, enddefectid, "time") instancefree[i.ip_address]=False if not projects['Math']: # launch math defects startdefectid = 1 enddefectid=defects['Math'] if instancefree[i.ip_address] is True: vmname="vm%s-Math-%s-%s" %(vmcount, startdefectid, enddefectid) i.add_tag("Name", vmname) run(i.ip_address, vm_key, "Math", startdefectid, enddefectid, "math") instancefree[i.ip_address]=False if not projects['Closure']: # launch closure defects startdefectid = 1 enddefectid=defects['Closure'] if instancefree[i.ip_address] is True: vmname="vm%s-Closure-%s-%s" %(vmcount, startdefectid, enddefectid) i.add_tag("Name", vmname) run(i.ip_address, vm_key, "Closure", startdefectid, enddefectid, "closure") instancefree[i.ip_address]=False # method to shutdown instances def stopInstances(): print "stopping instances" reservations = conn.get_all_reservations() for reservation in reservations: for instance in reservation.instances: if instance.image_id == ami and instance.instance_type == instancetype and instance.state == "running": print "stopping instance ", instance.id instance.stop() # method to terminate instances def terminate_instances(): print "terminating not required instances" reservations = conn.get_all_reservations() for reservation in reservations: for instance in reservation.instances: if instance.image_id == ami and instance.instance_type == instancetype and instance.state == "stopped": print "terminating instance ", instance.id conn.terminate_instances(instance_ids=[instance.id]) # method to delete unattached volumes def delete_volumes(): for vol in conn.get_all_volumes(): state = vol.attachment_state() if state == None: print vol.id, state print "deleting volume = ", vol.id conn.delete_volume(vol.id) # method to run the script to launch an experiment on vm def run(vmip, vm_key, project, startdefectid, enddefectid, folder): ssh_cmd = "ssh -o StrictHostKeyChecking=false -n -i %s ubuntu@%s \"/bin/bash launch-repair.sh %s %s %s %s %s %s %s %s %s %s %s\" &" ssh_str = ssh_cmd % (vm_key, vmip, project, startdefectid, enddefectid, folder, startseed, endseed, faultlocflag, genprogpath, defects4jpath, testtype, testpercent) print "executing script remotely using ", ssh_str FNULL = open(os.devnull, 'w') return (subprocess.call(ssh_str,shell=True, stdout=FNULL, stderr=subprocess.STDOUT) == 0) # method to copy the script to the instance def do_scp(to_scp, where_scp, vm_key): script_path = "./launch-repair.sh" scp_script_cmd = "scp -o StrictHostKeyChecking=false -i %s %s %s ubuntu@%s:%s" scp_str = scp_script_cmd % (vm_key, vm_key, script_path, to_scp, where_scp) print "copying script and key file to vm using:", scp_str return (subprocess.call(scp_str,shell=True) == 0) # method to set appropriate permissions to run the script def set_permissions(vmip, vm_key): ssh_cmd = "ssh -o StrictHostKeyChecking=false -n -i %s ubuntu@%s \"chmod +x /home/ubuntu/
AlexMog/IRCPokemonBot
commands/classes/Battle.py
Python
mit
1,766
0.003964
#!/usr/bin/env python2 import time import random class Battle: def __init__(self, user1, user2): self.user1 = user1 self.user2 = user2 self.turn = user1 self.notTurn = user2 self.accepted = False self.finished = False self.auto = False self.turnCount = 1 def fight(self, spell): attacker = self.turn.getActivePokemon() defender = self.notTurn.getActivePokemon() message = attacker.fight(spell, defender) if defender.life <= 0: message += defender.name + " n'a plus de points de vie. " if self.notTurn.hasAlivePokemon(): message += self.notTurn.username + " doit invoquer un nouveau pokemon. " else: message += self.notTurn.username + " a perdu. " + self.turn.username + " a gagne. " message += attacker.name + " gagne " + str(attacker.calcGainedExp(defender)) + " points d'experience. " old = attacker.level
attacker.gainExp(defender) if attacker.level != old:
message += attacker.name + " passe niveau " + str(attacker.level) + "!" self.finished = True self.turn, self.notTurn = self.notTurn, self.turn self.turnCount += 1 return message def itemUsed(self): self.turn, self.notTurn = self.notTurn, self.turn def nextStep(self): if self.finished: self.user1.battle = None self.user2.battle = None return False elif self.auto and self.turnCount % 2 == 0: time.sleep(2) return self.fight(self.turn.getActivePokemon().spells[random.randint(0, len(self.turn.getActivePokemon().spells) - 1)].name)
RevansChen/online-judge
Codefights/arcade/python-arcade/level-13/90.Url-Similarity/Python/test.py
Python
mit
1,205
0.00249
# Python3 from solution1 import urlSimilarity as f qa = [ ('https://codesignal.com/home/test?param1=42&param3=testing&login=admin', 'https://codesignal.com/home/secret/test?param3=fish&param1=42&password=admin', 19),
('https://codesignal.com/home/test?param1=42&param3=testing&login=admin', 'http://codesignal.org/about?42=param1&tesing=param3&admin=login', 0), ('https://www.google.com/search?q=codesignal', 'http://www.google.com/search?q=codesignal',
13), ('ftp://www.example.com/query?varName=value', 'http://example.com/query?varName=value', 3), ('ftp://www', 'http://anotherexample.com/www?ftp=http', 0), ('https://codesignal.com/home/test?param1=42&param3=testing&login=admin&param4=abc&param5=codesignal', 'https://codesignal.com/home/secret/test?param3=fish&param1=42&codesignal=admin&param5=test', 20) ] for *q, a in qa: for i, e in enumerate(q): print('input{0}: {1}'.format(i + 1, e)) ans = f(*q) if ans != a: print(' [failed]') print(' output:', ans) print(' expected:', a) else: print(' [ok]') print(' output:', ans) print()
eevee/cocos2d-mirror
test/test_moveby.py
Python
bsd-3-clause
867
0.026528
# This code is so you can run the samples without installing the package import sys import os sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..')) # testinfo = "s, t 3, s, t 6.1, s, q" tags = "MoveBy" import cocos from cocos.director import director from cocos.actions import MoveBy from cocos.sprite import Sprite import pyglet class TestLayer(cocos.layer.Layer): def __init__(self): super( TestLayer, self ).__init__() x,y = director.get_window_size() self.sprite = Sprite( 'grossini.png', (x/2, y/2) ) self.add
( self.sprite, name='sprite' ) self.sprite.do( MoveBy( (x/2,y/2), 6 ) ) def main(): director.init() test_layer = TestLayer () main_scene = cocos.scene.Scene() main_scene.add(test_layer, name='test_layer') director.run (main_scene) if __name__ == '__main__': mai
n()
ovresko/erpnext
erpnext/accounts/doctype/cash_flow_mapper/cash_flow_mapper.py
Python
gpl-3.0
267
0.003745
# -*- coding: utf-
8 -*- # Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors # For license information, please see license.txt from __future__ import unicode_literals from frappe.model.document import Document class CashFlowMapper(Doc
ument): pass
deltaburnt/LameBot
api/loader.py
Python
gpl-2.0
1,994
0.037111
import os, importlib, inspect import api.plugin, util.logger_factory class Loader(): def __init__(self, scheduler, bot, sql_conn): filenames = os.listdir(os.path.abspath(__file__+'/../../ext')) self.ext_names = [x[:-3] for x in filenames if x[-3:] == '.py' and x != '__init__.py'] self.scheduler = scheduler self.bot = bot self.sql_conn = sql_conn self.sql = sql_conn.cursor() self.logger = util.logger_factory.instance().getLogger('api.loader') self.sql.execute('CREATE TABLE IF NOT EXISTS `__plugins` (name)') self.sql_conn.commit() def load_all(self, load_extensions = None): self.logger.debug('Loading all extensions') self.plugins = [] for module in self.ext_names: module = importlib.import_module('ext.'+module) class_info = self._get_class(module) if class_info is None: continue if load_extensions != '~~All~~' and class_info[0] not in load_extensions: self.logger.debug('Skipping extension %s, not included in load_extensions config value', class_info[0]) continue logger = util.logger_factory.instance().getLogger('ext.'+class_info[0]) class_obj = class_info[1](self.scheduler, self.bot.network_list, self.sql, logger) self.plugins.append({'name':class_info[0], 'object':class_obj, 'module': module}) self._install_plugins() self._start_plugins() self.sql_conn.commit() def
_get_class(self, module): for info in inspect.getmembers(module): if issubclass(info[1], api.plugin.Plugin) and info[1] is not api.plugin.Plugin: return info def _install_plugins(self): for plugin in self.plugins: self.sql.execute('SELECT * FROM `__plugins` WHERE name = ?', (plugin['name'],)) if self.sql.fetchone() is None: self.logger.info('Installing extension %s', plugin['name']) plugin['object']._install_() self.sql.execute('INSERT INTO `__plugins`(name)
values (?)', (plugin['name'],)) def _start_plugins(self): for plugin in self.plugins: plugin['object']._start_()
Gab-km/papylon
setup.py
Python
mit
1,466
0
# -*- coding: utf-8 -*- import os from setuptools import setup, find_packages root = os.path.abspath(os.path.dirname(__file__)) try: with open(os.path.join(root, 'README.rst')) as f: README = f.read() with open(os.path.join(root, 'CHANGES.rst')) as f: CHANGES = f.read() except IOError: README, CHANGES = '', '' install_requires = [ 'setuptools' ] tests_require = [ 'pytest >= 2.6.4' ] setup(name='papylon', version='0.6.1', description='Random testing for Python', long_description=README + '\n\n'
+ CHANGES, classifiers=[ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Programming Language :: Python", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Software Development :: Libraries", "Topic :: Software Development :: Testin
g", "Topic :: Utilities" ], keywords='papylon quickcheck random test', author='Kazuhiro Matsushima', author_email='the25thcromosome@gmail.com', license='The MIT License (MIT)', url='https://github.com/Gab-km/papylon', packages=find_packages(), include_package_data=True, install_requires=install_requires, tests_require=tests_require, test_suite='py.test')
catapult-project/catapult
third_party/gsutil/gslib/tests/test_notification.py
Python
bsd-3-clause
4,981
0.002409
# -*- coding: utf-8 -*- # Copyright 2013 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Integration tests for notification command.""" from __future__ import absolute_import from __future__ import print_function from __future__ import division from __future__ import unicode_literals import re import time import uuid import boto import gslib.tests.testcase as testcase from gslib.tests.util import ObjectToURI as suri from gslib.tests.util import unittest from gslib.utils.retry_util import Retry def _LoadNotificationUrl(): return boto.config.get_value('GSUtil', 'test_notification_url') NOTIFICATION_URL = _LoadNotificationUrl() class TestNotification(testcase.GsUtilIntegrationTestCase): """Integration tests for notification command.""" @unittest.skipUnless(NOTIFICATION_URL, 'Test requires notification URL configuration.') def test_watch_bucket(self): """Tests creating a notification channel on a bucket.""" bucket_uri = self.CreateBucket() self.RunGsUtil( ['notification', 'watchbucket', NOTIFICATION_URL, suri(bucket_uri)]) identifier = str(uuid.uuid4()) token = str(uuid.uuid4()) stderr = self.RunGsUtil([ 'notification', 'watchbucket', '-i', identifier, '-t', token, NOTIFICATION_URL, suri(bucket_uri) ], return_stderr=True) self.assertIn('token: %s' % token, stderr) self.assertIn('identifier: %s' % identifier, stderr) @unittest.skipUnless(NOTIFICATION_URL, 'Test requires notification URL configuration.') def test_stop_channel(self): """Tests stopping a notification channel on a bucket.""" bucket_uri = self.CreateBucket() stderr = self.RunGsUtil( ['notification', 'watchbucket', NOTIFICATION_URL, suri(bucket_uri)], return_stderr=True) channel_id = re.findall(r'channel identifier: (?P<id>.*)', stderr) self.assertEqual(len(channel_id), 1) resource_id = re.findall(r'resource identifier: (?P<id>.*)', stderr) self.assertEqual(len(resource_id), 1) channel_id = channel_id[0] resource_id = resource_id[0] self.RunGsUtil(['notification', 'stopchannel', channel_id, resource_id]) @unittest.skipUnless(NOTIFICATION_URL, 'Test requires notification URL configuration.') def test_list_one_channel(self): """Tests listing notification channel on a bucket.""" # TODO(b/132277269): Re-enable these once the service-side bug
is fixed. return unittest.skip('Functionality has been disabled due to b/132277269') bucket_uri = self.CreateBucket() # Set up an OCN (object change notification) on the newly created bucket. self.RunGsUtil( ['notific
ation', 'watchbucket', NOTIFICATION_URL, suri(bucket_uri)], return_stderr=False) # The OCN listing in the service is eventually consistent. In initial # tests, it almost never was ready immediately after calling WatchBucket # above, so we A) sleep for a few seconds before the first OCN listing # attempt, and B) wrap the OCN listing attempt in retry logic in case # it raises a BucketNotFoundException (note that RunGsUtil will raise this # as an AssertionError due to the exit status not being 0). @Retry(AssertionError, tries=3, timeout_secs=5) def _ListObjectChangeNotifications(): stderr = self.RunGsUtil(['notification', 'list', '-o', suri(bucket_uri)], return_stderr=True) return stderr time.sleep(5) stderr = _ListObjectChangeNotifications() channel_id = re.findall(r'Channel identifier: (?P<id>.*)', stderr) self.assertEqual(len(channel_id), 1) resource_id = re.findall(r'Resource identifier: (?P<id>.*)', stderr) self.assertEqual(len(resource_id), 1) push_url = re.findall(r'Application URL: (?P<id>.*)', stderr) self.assertEqual(len(push_url), 1) subscriber_email = re.findall(r'Created by: (?P<id>.*)', stderr) self.assertEqual(len(subscriber_email), 1) creation_time = re.findall(r'Creation time: (?P<id>.*)', stderr) self.assertEqual(len(creation_time), 1) def test_invalid_subcommand(self): stderr = self.RunGsUtil(['notification', 'foo', 'bar', 'baz'], return_stderr=True, expected_status=1) self.assertIn('Invalid subcommand', stderr)
apagac/robottelo
tests/foreman/api/test_discovery.py
Python
gpl-3.0
7,489
0
# -*- encoding: utf-8 -*- """API Tests for foreman discovery feature""" from robottelo.common.decorators import stubbed from robottelo.test import APITestCase class Discovery(APITestCase): """Implements tests for foreman discovery feature""" @stubbed() def test_list_all_discovered_host(self): """@Test: List all discovered hosts @Feature: Foreman Discovery @Setup: Provisioning should be configured and a host should be discovered @Steps: 1. GET /api/v2/discovered_hosts @Assert: List of all discovered hosts are retrieved @Status: Manual """ @stubbed() def test_show_discovered_host(self): """@Test: Show a specific discovered hosts @Feature: Foreman Discovery @Setup: Provisioning should be configured and a host should be discovered @Steps: 1. GET /api/v2/discovered_hosts/:id @Assert: Selected host is retrieved @Status: Manual """ @stubbed() def test_create_discovered_host(self): """@Test: Create a discovered hosts @Feature: Foreman Discovery @Setup: Provisioning should be configured and a host should be discovered @Steps: 1. POST /api/v2/discovered_hosts @Assert: Host should be created successfully @Status: Manual """ @stubbed() def test_provision_discovered_host(self): """@Test: Provision a discovered hosts @Feature: Foreman Discovery @Setup: Provisioning should be configured and a host should be discovered @Steps: 1. PUT /api/v2/discovered_hosts/:id @Assert: Host should be provisioned successfully @Status: Manual """ @stubbed() def test_delete_discovered_host(self): """@Test: Delete a discovered hosts @Feature: Foreman Discovery @Setup: Provisioning should be configured and a host should be discovered @Steps: 1. DELETE /api/v2/discovered_hosts/:id @Assert: Discovered Host should be deleted successfully
@Status: Manual """ @stubbed() def test_auto_provision_host(self): """@Test: Auto provision a host by executing discovery rules @Feature: Foreman Discovery @Setup: Provisioning should be configured and a host should be disco
vered @Steps: 1. POST /api/v2/discovered_hosts/:id/auto_provision @Assert: Selected Host should be auto-provisioned successfully @Status: Manual """ @stubbed() def test_auto_provision_all_host(self): """@Test: Auto provision all host by executing discovery rules @Feature: Foreman Discovery @Setup: Provisioning should be configured and more than one host should be discovered @Steps: 1. POST /api/v2/discovered_hosts/auto_provision_all @Assert: All discovered hosts should be auto-provisioned successfully @Status: Manual """ @stubbed() def test_refresh_facts(self): """@Test: Refreshing the facts of discovered host @Feature: Foreman Discovery @Setup: 1. Provisioning should be configured and more than one host should be discovered 2. Add a NIC on discovered host @Steps: 1. PUT /api/v2/discovered_hosts/:id/refresh_facts @Assert: Added Fact should be displayed on refreshing the facts @Status: Manual """ @stubbed() def test_reboot_host(self): """@Test: Rebooting a discovered host @Feature: Foreman Discovery @Setup: Provisioning should be configured and more than one host should be discovered @Steps: 1. PUT /api/v2/discovered_hosts/:id/reboot @Assert: Selected host should be rebooted successfully @Status: Manual """ @stubbed() def test_create_discovery_rule_1(self): """@Test: Create a new discovery rule Set query as (e.g IP=IP_of_discovered_host) @Feature: Foreman Discovery @Setup: Host should already be discovered @Assert: Host should reboot and provision @Status: Manual """ @stubbed() def test_create_discovery_rule_2(self): """@Test: Create a new discovery rule with (host_limit = 0) that applies to multi hosts. Set query as cpu_count = 1 OR mem > 500 @Feature: Foreman Discovery @Setup: Host should already be discovered @Assert: All Hosts of same subnet should reboot and provision @Status: Manual """ @stubbed() def test_create_discovery_rule_3(self): """@Test: Create multiple discovery rules with different priority @Feature: Foreman Discovery @Setup: Multiple hosts should already be discovered @Assert: Host with lower count have higher priority and that rule should be executed first @Status: Manual """ @stubbed() def test_create_discovery_rule_4(self): """@Test: Create a discovery rule (CPU_COUNT = 2) with host limit 1 and provision more than one host with same rule @Feature: Foreman Discovery @Setup: Host with two CPUs should already be discovered @Assert: Rule should only be applied to one discovered host and for other rule should already be skipped. @Status: Manual """ @stubbed() def test_rule_with_invalid_host_limit(self): """@Test: Create a discovery rule with invalid(-ve/text value) host limit @Feature: Foreman Discovery @Setup: Host with two CPUs should already be discovered @Assert: Validation error should be raised @Status: Manual """ @stubbed() def test_rule_with_invalid_priority(self): """@Test: Create a discovery rule with invalid(text value) priority @Feature: Foreman Discovery @Setup: Host with two CPUs should already be discovered @Assert: Validation error should be raised @Status: Manual """ @stubbed() def test_create_rule_with_long_name(self): """@Test: Create a discovery rule with more than 255 char @Feature: Foreman Discovery @Setup: Host with two CPUs should already be discovered @Assert: Validation error should be raised @Status: Manual """ @stubbed() def test_delete_discovery_rule(self): """@Test: Delete a discovery rule @Feature: Foreman Discovery @Assert: Rule should be deleted successfully @Status: Manual """ @stubbed() def test_update_discovery_rule_1(self): """@Test: Update an existing rule and execute it @Feature: Foreman Discovery @Setup: Host should already be discovered @Assert: User should be able to update the rule and it should be executed on discovered host @Status: Manual """ @stubbed() def test_update_discovery_rule_2(self): """@Test: Update the discovered host name and provision it @Feature: Foreman Discovery @Setup: Host should already be discovered @Assert: The host name should be updated and host should be provisioned @Status: Manual """
google/citest
tests/json_contract/observation_verifier_test.py
Python
apache-2.0
14,132
0.004033
# Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # pylint: disable=missing-docstring # pylint: disable=invalid-name import unittest from citest.base import ( ExecutionContext, JsonSnapshotHelper) import citest.json_contract as jc import citest.json_predicate as jp _called_verifiers = [] _TEST_FOUND_ERROR_COMMENT='Found error.' class TestObsoleteObservationFailureVerifier(jc.ObservationFailureVerifier): def __init__(self, title, expect): super(TestObsoleteObservationFailureVerifier, self).__init__(title) self.__expect = expect def _error_comment_or_none(self, error): if error.args[0] == self.__expect: return _TEST_FOUND_ERROR_COMMENT return None def _makeObservationVerifyResult( valid, observation=None, good_results=None, bad_results=None, failed_constraints=None): default_result = jp.PredicateResult(valid=valid) good_results = good_results or ([default_result] if valid else []) bad_results = bad_results or ([] if valid else [default_result]) failed_constraints = failed_constraints or [] observation = observation or jc.Observation() good_attempt_results = [jp.ObjectResultMapAttempt(observation, result) for result in good_results] bad_attempt_results = [jp.ObjectResultMapAttempt(observation, result) for result in bad_results] return jc.ObservationVerifyResult( valid=valid, observation=observation, good_results=good_attempt_results, bad_results=bad_attempt_results, failed_constraints=failed_constraints) class FakeObservationVerifier(jc.ObservationVerifier): def __init__(self, title, dnf_verifier, result): super(FakeObservationVerifier, self).__init__( title=title, dnf_verifiers=dnf_verifier) self.__result = result def __call__(self, context, observation): _called_verifiers.append(self) return self.__result class ObservationVerifierTest(unittest.TestCase): def assertEqual(self, expect, have, msg=''): if not msg: msg = 'EXPECTED\n{0!r}\nGOT\n{1!r}'.format(expect, have) JsonSnapshotHelper.AssertExpectedValue(expect, have, msg) def test_result_builder_add_good_result(self): context = ExecutionContext() observation = jc.Observation() observation.add_object('A') pred = jp.PathPredicate(None, jp.STR_EQ('A')) builder = jc.ObservationVerifyResultBuilder(observation) map_pred = jp.MapPredicate(pred) map_result = map_pred(context, observation.objects) builder.add_map_result(map_result) verify_results = builder.build(True) self.assertTrue(verify_results) self.assertEqual(observation, verify_results.observation) self.assertEqual([], verify_results.bad_results) self.assertEqual([], verify_results.failed_constraints) self.assertEqual(map_result.good_object_result_mappings, verify_results.good_results) def test_result_builder_add_bad_result(self): context = ExecutionContext() observation = jc.Observation() observation.add_object('A') pred = jp.PathPredicate(None, jp.STR_EQ('B')) builder = jc.ObservationVerifyResultBuilder(observation) map_pred = jp.MapPredicate(pred) map_result = map_pred(context, observation.objects) builder.add_map_result(map_result) verify_results = builder.build(False) self.assertFalse(verify_results) self.assertEqual(observation, verify_results.observation) self.assertEqual([], verify_results.good_results) self.assertEqual([pred], verify_results.failed_constraints) self.assertEqual(map_result.bad_object_result_mappings, verify_results.bad_results) def test_result_builder_add_mixed_results(self): context = ExecutionContext() observation = jc.Observation() observation.add_object('GOOD') observation.add_object('BAD') pred = jp.PathPredicate(None, jp.STR_EQ('GOOD')) builder = jc.ObservationVerifyResultBuilder(observation) map_pred = jp.MapPredicate(pred) map_result = map_pred(context, observation.objects) builder.add_map_result(map_result) verify_results = builder.build(False) self.assertFalse(verify_results) self.assertEqual(observation, verify_results.observation) self.assertEqual(map_result.good_object_result_mappings, verify_results.good_results) self.assertEqual([], verify_results.failed_constraints) self.assertEqual(map_result.bad_object_result_mappings, verify_results.bad_results) def test_result_observation_verifier_conjunction_ok(self): context = ExecutionContext() builder = jc.ObservationVerifierBuilder(title='Test') verifiers = [] pred_results = [] for i in range(3): this_result = jp.PredicateResult(True, comment='Pred {0}'.format(i)) pred_results.append(this_result) result = _makeObservationVerifyResult( valid=True, good_results=[this_result]) fake_verifier = FakeObservationVerifier( title=i, dnf_verifier=[], result=result) verifiers.append(fake_verifier) builder.AND(fake_verifier) # verify build can work multiple times self.assertEqual(builder.build(), builder.build()) verifier = builder.build() self.assertEqual([verifiers], verifier.dnf_verifiers) expect = _makeObservationVerifyResult(True, good_results=pred_results) global _called_verifiers _called_verifiers = [] got = verifier(context, jc.Observation()) self.assertEqual(expect, got) self.assertEqual(verifiers, _called_verifiers) def test_result_observation_verifier_conjunction_failure_aborts_early(self): context = ExecutionContext() builder = jc.ObservationVerifierBuilder(title='Test') verifiers = [] results = [] pred_results = [jp.PredicateResult(False, comment='Result %d' % i) for i in range(3)] for i in range(3): result = _makeObservationVerifyResult( valid=False, bad_results=[pred_results[i]]) fake_verifier = FakeObservationVerifier( title=i, dnf_verifier=[], result=result) verifiers.append(fake_verifier) results.append(result) builder.AND(fake_verifier) # verify build can work multiple times self.assertEqual(builder.build(), builder.build()) verifier = builder.build() self.assertEqual([verifier
s], verifier.dnf_verifiers) expect = _makeObservationVerifyResult( False, bad_results=[pred_results[0]]) global _called_verifiers _called_verifiers = [] got = verifier(context, jc.Observation()) self.assertEqual(expect, got) self.assertEqual(verifiers[:1], _called_verifiers) def test_result_obser
vation_verifier_disjunction_success_aborts_early(self): context = ExecutionContext() builder = jc.ObservationVerifierBuilder(title='Test') verifiers = [] results = [] pred_results = [jp.PredicateResult(False, comment='Result %d' % i) for i in range(2)] for i in range(2): result = _makeObservationVerifyResult( valid=True, good_results=[pred_results[i]]) fake_verifier = FakeObservationVerifier( title=i, dnf_verifier=[], result=result) verifiers.append(fake_verifier) results.append(result) builder.OR(fake_verifier) verifier = builder.build() self.assertEqual([verifiers[0:1], verifiers[1:2]], verifier.dnf_verifiers) expect = _makeObservationVerifyResult(True, good_results=[pred_results[0]]) global _called_verifiers _called_verifiers = [] got = verifier(contex
pkgw/pwkit
pwkit/environments/ciao/analysis.py
Python
mit
6,635
0.008747
# -*- mode: python; coding: utf-8 -*- # Copyright 2016-2017 Peter Williams <peter@newton.cx> and collaborators # Licensed under the MIT License """Various helpers for X-ray analysis that rely on CIAO tools. """ from __future__ import absolute_import, division, print_function, unicode_literals __all__ = str(''' get_region_area count_events compute_bgband simple_srcflux ''').split () def get_region_area (env, evtpath, region): with env.slurp (argv=['dmlist', '%s[sky=%s]' % (evtpath, region), 'subspace'], linebreak=True) as s: for etype, payload in s: if etype != 'stdout': continue if b'Region area' not in payload: continue return float (payload.split ()[-1]) raise Exception ('parsing of dmlist output failed') def count_events (env, evtpath, filter): """TODO: this can probably be replaced with simply reading the file ourselves! """ with env.slurp (argv=['dmstat', '%s%s[cols energy]' % (evtpath, filter)], linebreak=True) as s: for etype, payload in s: if etype != 'stdout': continue if b'good:' not in payload: continue return int (payload.split ()[-1]) raise Exception ('parsing of dmlist output failed') def compute_bgband (evtpath, srcreg, bkgreg, ebins, env=None): """Compute background information for a source in one or more energy bands. evtpath Path to a CIAO events file srcreg String specifying the source region to consider; use 'region(path.reg)' if you have the region saved in a file. bkgreg String specifying the background region to consider; same format as srcreg ebins Iterable of 2-tuples giving low and high bounds of the energy bins to consider, measured in eV. env An optional CiaoEnvironment instance; default settings are used if unspecified. Returns a DataFrame containing at least the following columns: elo The low bound of this energy bin, in eV. ehi The high bound of this energy bin, in eV. ewidth The width of the bin in eV; simply `abs(ehi - elo)`. nsrc The number of events within the specified source region and energy range. nbkg The number of events within the specified background region and energy range. nbkg_scaled The number of background events scaled to the source area; not an integer. nsrc_subbed The estimated number of non-background events in the source region; simply `nsrc - nbkg_scaled`. log_prob_bkg The logarithm of the probability that
all counts in the source region are due to background events. src_sigma The confidence of source detection in sigma inferred from log_prob_bkg. The proba
bility of backgrounditude is computed as: b^s * exp (-b) / s! where `b` is `nbkg_scaled` and `s` is `nsrc`. The confidence of source detection is computed as: sqrt(2) * erfcinv (prob_bkg) where `erfcinv` is the inverse complementary error function. """ import numpy as np import pandas as pd from scipy.special import erfcinv, gammaln if env is None: from . import CiaoEnvironment env = CiaoEnvironment () srcarea = get_region_area (env, evtpath, srcreg) bkgarea = get_region_area (env, evtpath, bkgreg) srccounts = [count_events (env, evtpath, '[sky=%s][energy=%d:%d]' % (srcreg, elo, ehi)) for elo, ehi in ebins] bkgcounts = [count_events (env, evtpath, '[sky=%s][energy=%d:%d]' % (bkgreg, elo, ehi)) for elo, ehi in ebins] df = pd.DataFrame ({ 'elo': [t[0] for t in ebins], 'ehi': [t[1] for t in ebins], 'nsrc': srccounts, 'nbkg': bkgcounts }) df['ewidth'] = np.abs (df['ehi'] - df['elo']) df['nbkg_scaled'] = df['nbkg'] * srcarea / bkgarea df['log_prob_bkg'] = df['nsrc'] * np.log (df['nbkg_scaled']) - df['nbkg_scaled'] - gammaln (df['nsrc'] + 1) df['src_sigma'] = np.sqrt (2) * erfcinv (np.exp (df['log_prob_bkg'])) df['nsrc_subbed'] = df['nsrc'] - df['nbkg_scaled'] return df def _rmtree_error (func, path, excinfo): from ...cli import warn warn ('couldn\'t delete temporary file %s: %s (%s)', path, excinfo[0], func) def simple_srcflux(env, infile=None, psfmethod='arfcorr', conf=0.68, verbose=0, **kwargs): """Run the CIAO "srcflux" script and retrieve its results. *infile* The input events file; must be specified. The computation is done in a temporary directory, so this path — and all others passed in as arguments — **must be made absolute**. *psfmethod* = "arfcorr" The PSF modeling method to be used; see the "srcflux" documentation. *conf* = 0.68 The confidence limit to detect. We default to 1 sigma, instead of the 90% mark, which is the srcflux default. *verbose* = 0 The level of verbosity to be used by the tool. *kwargs* Remaining keyword arguments are passed to the tool as command-line keyword arguments, with values stringified. Returns: A :class:`pandas.DataFrame` extracted from the results table generated by the tool. There is one row for each source analyzed; in common usage, this means that there will be one row. """ from ...io import Path import shutil, signal, tempfile if infile is None: raise ValueError('must specify infile') kwargs.update(dict( infile = infile, psfmethod = psfmethod, conf = conf, verbose = verbose, clobber = 'yes', outroot = 'sf', )) argv = ['srcflux'] + ['%s=%s' % t for t in kwargs.items()] argstr = ' '.join(argv) tempdir = None try: tempdir = tempfile.mkdtemp(prefix='srcflux') proc = env.launch(argv, cwd=tempdir, shell=False) retcode = proc.wait() if retcode > 0: raise RuntimeError('command "%s" failed with exit code %d' % (argstr, retcode)) elif retcode == -signal.SIGINT: raise KeyboardInterrupt() elif retcode < 0: raise RuntimeError('command "%s" killed by signal %d' % (argstr, -retcode)) tables = list(Path(tempdir).glob('*.flux')) if len(tables) != 1: raise RuntimeError('expected exactly one flux table from srcflux; got %d' % len(tables)) return tables[0].read_fits_bintable(hdu=1) finally: if tempdir is not None: shutil.rmtree(tempdir, onerror=_rmtree_error)
Etharr/plugin.video.youtube
resources/lib/youtube_plugin/youtube/helper/yt_context_menu.py
Python
gpl-2.0
9,739
0.009241
__author__ = 'bromix' from ... import kodion def append_more_for_video(context_menu, provider, context, video_id, is_logged_in=False, refresh_container=False): _is_logged_in = '0' if is_logged_in: _is_logged_in = '1' _refresh_container = '0' if refresh_container: _refresh_container = '1' context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.more']), 'Container.Update(%s)' % context.create_uri(['video', 'more'], {'video_id': video_id, 'logged_in': _is_logged_in, 'refresh_container': _refresh_container}))) def append_content_from_description(context_menu, provider, context, video_id): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.description.links']), 'Container.Update(%s)' % context.create_uri(['special', 'description_links'], {'video_id': video_id}))) def append_play_with(context_menu, provider, context): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.play_with']), 'Action(SwitchPlayer)')) def append_queue_video(context_menu, provider, context): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.queue']), 'Action(Queue)')) def append_play_all_from_playlist(context_menu, provider, context, playlist_id, video_id=''): if video_id: context_menu.append((context.localize(provider.LOCAL_MAP['youtube.playlist.play.from_here']), 'RunPlugin(%s)' % context.create_uri(['play'], {'playlist_id': playlist_id, 'video_id': video_id, 'play': '1'}))) else: context_menu.append((context.localize(provider.LOCAL_MAP['youtube.playlist.play.all']), 'RunPlugin(%s)' % context.create_uri(['play'], {'playlist_id': playlist_id, 'play': '1'}))) def append_add_video_to_playlist(context_menu, provider, context, video_id): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.add_to_playlist']), 'RunPlugin(%s)' % context.create_uri(['playlist', 'select', 'playlist'], {'video_id': video_id}))) def append_rename_playlist(context_menu, provider, context, playlist_id, playlist_name): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.rename']), 'RunPlugin(%s)' % context.create_uri(['playlist', 'rename', 'playlist'], {'playlist_id': playlist_id, 'playlist_name': playlist_name}))) def append_delete_playlist(context_menu, provider, context, playlist_id, playlist_name): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.delete']), 'RunPlugin(%s)' % context.create_uri(['playlist', 'remove', 'playlist'], {'playlist_id': playlist_id, 'playlist_name': playlist_name}))) def append_remove_as_watchlater(context_menu, provider, context, playlist_id, playlist_name): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.remove.as.watchlater']), 'RunPlugin(%s)' % context.create_uri(['playlist', 'remove', 'watchlater'], {'playlist_id': playlist_id, 'playlist_name': playlist_name}))) def append_set_as_watchlater(context_menu, provider, context, playlist_id, playlist_name): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.set.as.watchlater']), 'RunPlugin(%s)' % context.create_uri(['playlist', 'set', 'watchlater'], {'playlist_id': playlist_id, 'playlist_name': playlist_name}))) def append_remove_as_history(context_menu, provider, context, playlist_id, playlist_name): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.remove.as.history']), 'RunPlugin(%s)' % context.create_uri(['playlist', 'remove', 'history'], {'playlist_id': playlist_id, 'playlist_name': playlist_name}))) def append_set_as_history(context_menu, provider, context, playlist_id, playlist_name): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.set.as.history']), 'RunPlugin(%s)' % context.create_uri(['playlist', 'set', 'history'], {'playlist_id': playlist_id, 'playlist_name': playlist_name}))) def append_remove_my_subscriptions_filter(context_menu, provider, context, channel_name): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.remove.my_subscriptions.filter']), 'RunPlugin(%s)' % context.create_uri(['my_subscriptions', 'filter'], {'channel_name': channel_name,
'action
': 'remove'}))) def append_add_my_subscriptions_filter(context_menu, provider, context, channel_name): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.add.my_subscriptions.filter']), 'RunPlugin(%s)' % context.create_uri(['my_subscriptions', 'filter'], {'channel_name': channel_name, 'action': 'add'}))) def append_rate_video(context_menu, provider, context, video_id, refresh_container=False): if refresh_container: refresh_container = '1' else: refresh_container = '0' context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.rate']), 'RunPlugin(%s)' % context.create_uri(['video', 'rate'], {'video_id': video_id, 'refresh_container': refresh_container}))) def append_watch_later(context_menu, provider, context, playlist_id, video_id): playlist_path = kodion.utils.create_path('channel', 'mine', 'playlist', playlist_id) if playlist_id and playlist_path != context.get_path(): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.watch_later']), 'RunPlugin(%s)' % context.create_uri(['playlist', 'add', 'video'], {'playlist_id': playlist_id, 'video_id': video_id}))) def append_go_to_channel(context_menu, provider, context, channel_id, channel_name): text = context.localize(provider.LOCAL_MAP['youtube.go_to_channel']) % ('[B]%s[/B]' % channel_name) context_menu.append((text, 'Container.Update(%s)' % context.create_uri(['channel', channel_id]))) def append_related_videos(context_menu, provider, context, video_id): context_menu.append((context.localize(provider.LOCAL_MAP['youtube.related_videos']), 'Container.Update(%s)' % context.create_uri(['special', 'related_videos'],
mbauskar/helpdesk-erpnext
erpnext/accounts/report/profit_and_loss_statement/profit_and_loss_statement.py
Python
agpl-3.0
1,352
0.022189
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors # License: GNU General Public License v3. See license.txt from __future__ import unicode_literals import frappe from frappe import _ from frappe.utils import flt from erpnext.accounts.report.financial_statements import (get_peri
od_list, get_columns, get_data) def execute(filters=None): period_list = get_period_list(filters.fiscal_year, filters.periodicity) income = get_data(filters.company, "Income", "Credit", period_list, ignore_closing_entries=True) expense = get_data(filters.company, "Expense", "Debit", period_list, ignore_closing_entries=True) net_profi
t_loss = get_net_profit_loss(income, expense, period_list, filters.company) data = [] data.extend(income or []) data.extend(expense or []) if net_profit_loss: data.append(net_profit_loss) columns = get_columns(period_list, filters.company) return columns, data def get_net_profit_loss(income, expense, period_list, company): if income and expense: net_profit_loss = { "account_name": "'" + _("Net Profit / Loss") + "'", "account": None, "warn_if_negative": True, "currency": frappe.db.get_value("Company", company, "default_currency") } for period in period_list: net_profit_loss[period.key] = flt(income[-2][period.key] - expense[-2][period.key], 3) return net_profit_loss
dbrattli/RxPY
tests/test_observable/test_forin.py
Python
apache-2.0
1,468
0.004768
import unittest from rx import Observable from rx.testing import TestScheduler, ReactiveTest, is_prime, MockDisposable from rx.disposables import Disposable, SerialDisposable on_next = ReactiveTest.on_next on_completed = ReactiveTest.on_completed on_error = ReactiveTest.on_error subscribe = ReactiveTest.subscribe subscribed = ReactiveTest.subscribed disposed = ReactiveTest.disposed created = ReactiveTest.created class TestForIn(unittest.TestCase): def test_for_basic(self): scheduler = TestScheduler() def create(): def selector(x): return scheduler.create_cold_observable(on_next(x * 100 + 10, x * 10 + 1), on_next(x * 100 + 20, x * 10 + 2), on_
next(x * 100 + 30, x * 10 + 3), on_completed(x * 100 + 40)) return Observable.for_in([1, 2, 3], selector) results = scheduler.start(create=create) results.messages.assert_equal(on_next(310, 11), on_next(320, 12), on_next(330, 13), on_next(550, 21), on_next(560, 22), on_next(57
0, 23), on_next(890, 31), on_next(900, 32), on_next(910, 33), on_completed(920)) def test_for_throws(self): ex = 'ex' scheduler = TestScheduler() def create(): def selector(x): raise Exception(ex) return Observable.for_in([1, 2, 3], selector) results = scheduler.start(create=create) results.messages.assert_equal(on_error(200, ex))
yujikato/DIRAC
src/DIRAC/ResourceStatusSystem/Agent/ElementInspectorAgent.py
Python
gpl-3.0
9,113
0.008449
""" ElementInspectorAgent This agent inspect Resources (or maybe Nodes), and evaluates policies that apply. The following options can be set for the ElementInspectorAgent. .. literalinclude:: ../ConfigTemplate.cfg :start-after: ##BEGIN ElementInspectorAgent :end-before: ##END :dedent: 2 :caption: ElementInspectorAgent options """ from __future__ import absolute_import from __future__ import division from __future__ import print_function __RCSID__ = '$Id$' import datetime import math from six.moves import queue as Queue from DIRAC import S_ERROR, S_OK from DIRAC.Core.Base.AgentModule import AgentModule from DIRAC.Core.Utilities.ThreadPool import ThreadPool from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader from DIRAC.ResourceStatusSystem.PolicySystem.PEP import PEP AGENT_NAME = 'ResourceStatus/ElementInspectorAgent' class ElementInspectorAgent(AgentModule): """ ElementInspectorAgent The ElementInspector agent is a generic agent used to check the elements of type "Resource" -- which includes ComputingElement, StorageElement, and other types This Agent takes care of the Elements. In order to do so, it gathers the eligible ones and then evaluates their statuses with the PEP. """ # Max number of worker threads by default __maxNumberOfThreads = 15 # Inspection freqs, defaults, the lower, the higher priority to be checked. # Error state usually means there is a glitch somewhere, so it has the highest # priority. __checkingFreqs = {'Active': 20, 'Degraded': 20, 'Probing': 20, 'Banned': 15, 'Unknown': 10, 'Error': 5} def __init__(self, *args, **kwargs): """ c'tor """ AgentModule.__init__(self, *args, **kwargs) # ElementType, to be defined among Resource or Node self.elementType = 'Resource' self.elementsToBeChecked = None self.threadPool = None self.rsClient = None self.clients = {} def initialize(self): """ Standard initialize. """ maxNumberOfThreads = self.am_getOption('maxNumberOfThreads', self.__maxNumberOfThreads) self.threadPool = ThreadPool(maxNumberOfThreads, maxNumberOfThreads) self.elementType = self.am_getOption('elementType', self.elementType) res = ObjectLoader().loadObject('DIRAC.ResourceStatusSystem.Client.ResourceStatusClient') if not res['OK']: self.log.error('Failed to load ResourceStatusClient class: %s' % res['Message']) return res rsClass = res['Value'] res = ObjectLoader().loadObject('DIRAC.ResourceStatusSystem.Client.ResourceManagementClient') if not res['OK']: self.log.error('Failed to load ResourceManagementClient class: %s' % res['Message']) return res rmClass = res['Value'] self.rsClient = rsClass() self.clients['ResourceStatusClient'] = rsClass() self.clients['ResourceManagementClient'] = rmClass() if not self.elementType: return S_ERROR('Missing elementType') return S_OK() def execute(self): """ execute This is the main method of the agent. It gets the elements from the Database which are eligible to be re-checked, calculates how many threads should be started and spawns them. Each thread will get an element from the queue until it is empty. At the end, the method will join the queue such that the agent will not terminate a cycle until all elements have been processed. """ # Gets elements to be checked (returns a Queue) elementsToBeChecked = self.getElementsToBeChecked() if not elementsToBeChecked['OK']: self.log.error(elementsToBeChecked['Message']) return elementsToBeChecked self.elementsToBeChecked = elementsToBeChecked['Value'] queueSize = self.elementsToBeChecked.qsize() pollingTime = self.am_getPollingTime() # Assigns number of threads on the fly such that we exhaust the PollingTime # without having to spawn too many threads. We assume 10 seconds per element # to be processed ( actually, it takes something like 1 sec per element ): # numberOfThreads = elements * 10(s/element) / pollingTime numberOfThreads = int(math.ceil(queueSize * 10. / pollingTime)) self.log.info('Needed %d threads to process %d elements' % (numberOfThreads, queueSize)) for _x in range(numberOfThreads): jobUp = self.threadPool.generateJobAndQueueIt(self._execute) if not jobUp['OK']: self.log.error(jobUp['Message']) self.log.info('blocking until all elements have been processed') # block until all tasks are done self.elementsToBeChecked.join() self.log.info('done') return S_OK() def getElementsToBeChecked(self): """ getElementsToBeChecked This method gets all the rows in the <self.elementType>Status table, and then discards entries with TokenOwner != rs_svc. On top of that, there are check frequencies that are applied: depending on the current status of the element, they will be checked more or less often. """ toBeChecked = Queue.Queue() # We get all the elements, then we filter. elements = self.rsClient.selectStatusElement(self.elementType, 'Status') if not elements['OK']: return elements utcnow = datetime.datetime.utcnow().replace(microsecond=0) # filter elements by Type for element in elements['Value']: # Maybe an overkill, but this way I have NEVER again to worry about order # of elements returned by mySQL on tuples elemDict = dict(zip(elements['Columns'], element)) # This if-clause skips all the elements that should not be checked yet timeToNextCheck = self.__checkingFreqs[elemDict['Status']] if utcnow <= elemDict['LastCheckTime'] + datetime.timedelta(minutes=timeToNextCheck): continue # We skip the elements with token different than "rs_svc" if elemDict['TokenOwner'] != 'rs_svc': self.log.verbose('Skipping %s ( %s ) with token %s' % (elemDict['Name'], elemDict['StatusType'], elemDict['TokenOwner'])) continue # We are not checking if the item
is already on the queue or not. It may # be there, but in any case, it is not a big problem. lowerElementDict = {'element': self.elementType} for key, value in elemDict.items(): if len(key) >= 2: # VO ! lowerElementDict[key[0].lower() + key[1:]] = value
# We add lowerElementDict to the queue toBeChecked.put(lowerElementDict) self.log.verbose('%s # "%s" # "%s" # %s # %s' % (elemDict['Name'], elemDict['ElementType'], elemDict['StatusType'], elemDict['Status'], elemDict['LastCheckTime'])) return S_OK(toBeChecked) def _execute(self): """ Method run by the thread pool. It enters a loop until there are no elements on the queue. On each iteration, it evaluates the policies for such element and enforces the necessary actions. If there are no more elements in the queue, the loop is finished. """ pep = PEP(clients=self.clients) while True: try: element = self.elementsToBeChecked.get_nowait() except Queue.Empty: return S_OK() self.log.verbose('%s ( VO=%s / status=%s / statusType=%s ) being processed' % (element['name'], element['vO'], element['status'], element['statusType'])) try: resEnforce = pep.enforce(element) except Exception as e: self.log.exception('Exception during enforcement') resEnforce = S_ERROR('Exception during enforcement')
kd0aij/matrixpilot_old
Tools/MAVLink/MAVProxy/modules/lib/mp_tile.py
Python
gpl-3.0
18,202
0.028843
#!/usr/bin/env python ''' access satellite map tile database some functions are based on code from mapUtils.py in gmapcatcher Andrew Tridgell May 2012 released under GNU GPL v3 or later ''' import math, cv, sys, os, mp_util, httplib2, threading, time, collections, string, hashlib, errno, tempfile class TileException(Exception): '''tile error class''' def __init__(self, msg): Exception.__init__(self, msg) TILE_SERVICES = { # thanks to http://go2log.com/2011/09/26/fetching-tiles-for-offline-map/ # for the URL mapping info "GoogleSat" : "http://khm${GOOG_DIGIT}.google.com/kh/v=113&src=app&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}", "GoogleMap" : "http://mt${GOOG_DIGIT}.google.com/vt/lyrs=m@121&hl=en&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}", "GoogleHyb" : "http://mt${GOOG_DIGIT}.google.com/vt/lyrs=h@121&hl=en&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}", "GoogleTer" : "http://mt${GOOG_DIGIT}.google.com/vt/lyrs=t@108,r@121&hl=en&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}", "GoogleChina" : "http://mt${GOOG_DIGIT}.google.cn/vt/lyrs=m@121&hl=en&gl=cn&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}", "YahooMap" : "http://maps${Y_DIGIT}.yimg.com/hx/tl?v=4.3&.intl=en&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM}&r=1", "YahooSat" : "http://maps${Y_DIGIT}.yimg.com/ae/ximg?v=1.9&t=a&s=256&.intl=en&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM}&r=1", "YahooInMap" : "http://maps.yimg.com/hw/tile?locale=en&imgtype=png&yimgv=1.2&v=4.1&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM_2}", "YahooInHyb" : "http://maps.yimg.com/hw/tile?imgtype=png&yimgv=0.95&t=h&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM_2}", "YahooHyb" : "http://maps${Y_DIGIT}.yimg.com/hx/tl?v=4.3&t=h&.intl=en&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM}&r=1", "MicrosoftBrMap" : "http://imakm${MS_DIGITBR}.maplink3.com.br/maps.ashx?v=${QUAD}|t&call=2.2.4", "MicrosoftHyb" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/h${QUAD}.png?g=441&mkt=en-us&n=z", "MicrosoftSat" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/a${QUAD}.png?g=441&mkt=en-us&n=z", "MicrosoftMap" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/r${QUAD}.png?g=441&mkt=en-us&n=z", "MicrosoftTer" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/r${QUAD}.png?g=441&mkt=en-us&shading=hill&n=z", "OpenStreetMap" : "http://tile.openstreetmap.org/${ZOOM}/${X}/${Y}.png", "OSMARender" : "http://tah.openstreetmap.org/Tiles/tile/${ZOOM}/${X}/${Y}.png", "OpenAerialMap" : "http://tile.openaerialmap.org/tiles/?v=mgm&layer=openaerialmap-900913&x=${X}&y=${Y}&zoom=${OAM_ZOOM}", "OpenCycleMap" : "http://andy.sandbox.cloudmade.com/tiles/cycle/${ZOOM}/${X}/${Y}.png" } # these are the md5sums of "unavailable" tiles BLANK_TILES = set(["d16657bbee25d7f15c583f5c5bf23f50", "c0e76e6e90ff881da047c15dbea380c7", "d41d8cd98f00b204e9800998ecf8427e"]) # all tiles are 256x256 TILES_WIDTH = 256 TILES_HEIGHT = 256 class TileServiceInfo: '''a lookup object for the URL templates''' def __init__(self, x, y, zoom): self.X = x self.Y = y self.Z = zoom quadcode = '' for i in range(zoom - 1, -1, -1): quadcode += str((((((y >> i) & 1) << 1) + ((x >> i) & 1)))) self.ZOOM = zoom self.QUAD = quadcode self.YAHOO_Y = 2**(zoom-1) - 1 - y self.YAHOO_ZOOM = zoom + 1 self.YAHOO_ZOOM_2 = 17 - zoom + 1 self.OAM_ZOOM = 17 - zoom self.GOOG_DIGIT = (x + y) & 3 self.MS_DIGITBR = (((y & 1) << 1) + (x & 1)) + 1 self.MS_DIGIT = (((y & 3) << 1) + (x & 1)) self.Y_DIGIT = (x + y + zoom) % 3 + 1 self.GALILEO = "Galileo"[0:(3 * x + y) & 7] def __getitem__(self, a): return str(getattr(self, a)) class TileInfo: '''description of a tile''' def __init__(self, tile, zoom, offset=(0,0)): self.tile = tile (self.x, self.y) = tile self.zoom = zoom (self.offsetx, self.offsety) = offset self.refresh_time() def key(self): '''tile cache key''' return (self.tile, se
lf.zoom) def refresh_time(self): '''reset the request time''' self.request_time = time.time() def coord(self, offset=(0,0)): '''return lat,lon within a tile given (offsetx,offsety)''' (tilex, tiley) = self.tile (o
ffsetx, offsety) = offset world_tiles = 1<<self.zoom x = ( tilex + 1.0*offsetx/TILES_WIDTH ) / (world_tiles/2.) - 1 y = ( tiley + 1.0*offsety/TILES_HEIGHT) / (world_tiles/2.) - 1 lon = x * 180.0 y = math.exp(-y*2*math.pi) e = (y-1)/(y+1) lat = 180.0/math.pi * math.asin(e) return (lat, lon) def size(self): '''return tile size as (width,height) in meters''' (lat1, lon1) = self.coord((0,0)) (lat2, lon2) = self.coord((TILES_WIDTH,0)) width = mp_util.gps_distance(lat1, lon1, lat2, lon2) (lat2, lon2) = self.coord((0,TILES_HEIGHT)) height = mp_util.gps_distance(lat1, lon1, lat2, lon2) return (width,height) def distance(self, lat, lon): '''distance of this tile from a given lat/lon''' (tlat, tlon) = self.coord((TILES_WIDTH/2,TILES_HEIGHT/2)) return mp_util.gps_distance(lat, lon, tlat, tlon) def path(self): '''return relative path of tile image''' (x, y) = self.tile return "%u/%u/%u.img" % (self.zoom, y, x) def url(self, service): '''return URL for a tile''' url = string.Template(TILE_SERVICES[service]) (x,y) = self.tile tile_info = TileServiceInfo(x, y, self.zoom) return url.substitute(tile_info) class TileInfoScaled(TileInfo): '''information on a tile with scale information and placement''' def __init__(self, tile, zoom, scale, src, dst): TileInfo.__init__(self, tile, zoom) self.scale = scale (self.srcx, self.srcy) = src (self.dstx, self.dsty) = dst class MPTile: '''map tile object''' def __init__(self, cache_path=None, download=True, cache_size=500, service="MicrosoftSat", tile_delay=0.3, debug=False, max_zoom=19): if cache_path is None: try: cache_path = os.path.join(os.environ['HOME'], '.tilecache') except Exception: cache_path = os.path.join(tempfile.gettempdir(), 'MAVtilecache') if not os.path.exists(cache_path): mp_util.mkdir_p(cache_path) self.cache_path = cache_path self.max_zoom = max_zoom self.min_zoom = 1 self.download = download self.cache_size = cache_size self.tile_delay = tile_delay self.service = service self.debug = debug if service not in TILE_SERVICES: raise TileException('unknown tile service %s' % service) # _download_pending is a dictionary of TileInfo objects self._download_pending = {} self._download_thread = None self._loading = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data', 'loading.jpg') self._unavailable = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data', 'unavailable.jpg') self._tile_cache = collections.OrderedDict() def coord_to_tile(self, lat, lon, zoom): '''convert lat/lon/zoom to a TileInfo''' world_tiles = 1<<zoom x = world_tiles / 360.0 * (lon + 180.0) tiles_pre_radian = world_tiles / (2 * math.pi) e = math.sin(lat * (1/180.*math.pi)) y = world_tiles/2 + 0.5*math.log((1+e)/(1-e)) * (-tiles_pre_radian) offsetx = int((x - int(x)) * TILES_WIDTH) offsety = int((y - int(y)) * TILES_HEIGHT) return TileInfo((int(x) % world_tiles, int(y) % world_tiles), zoom, offset=(offsetx, offsety)) def tile_to_path(self, tile): '''return full path to a tile''' return os.path.join(self.cache_path, self.service, tile.path()) def coord_to_tilepath(self, lat, lon, zoom): '''return the tile ID that covers a latitude/longitude at a specified zoom level ''' tile = self.coord_to_tile(lat, lon, zoom) return self.tile_to_path(tile) def tiles_pending(self): '''return number of tiles pending download''' return len(self._download_pending) def downloader(self): '''the download thread''' http = httplib2.Http() while self.tiles_pending() > 0: time.sleep(self.tile_delay) keys = self._download_pending.keys()[:] # work out which one to download next, choosing by request_time tile_info = self._download_pending[keys[0]] for key
napalm-automation/napalm-yang
napalm_yang/models/openconfig/interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/__init__.py
Python
apache-2.0
15,915
0.00132
# -*- coding: utf-8 -*- from operator import attrgetter from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType from pyangbind.lib.yangtypes import RestrictedClassType from pyangbind.lib.yangtypes import TypedListType from pyangbind.lib.yangtypes import YANGBool from pyangbind.lib.yangtypes import YANGListType from pyangbind.lib.yangtypes import YANGDynClass from pyangbind.lib.yangtypes import ReferenceType from pyangbind.lib.base import PybindBase from collections import OrderedDict from decimal import Decimal from bitarray import bitarray import six # PY3 support of some PY2 keywords (needs improved) if six.PY3: import builtins as __builtin__ long = int elif six.PY2: import __builtin__ from . import config from . import state from . import vrrp class address(PybindBase): """ This class was auto-generated by the PythonClass plugin for PYANG from YANG module openconfig-interfaces - based on the path /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address. Each member element of the container is represented as a class variable - with a specific YANG type. YANG Description: The list of configured IPv4 addresses on the interface. """ __slots__ = ("_path_helper", "_extmethods", "__ip", "__config", "__state", "__vrrp") _yang_name = "address" _pybind_generated_by = "container" def __init__(self, *args, **kwargs): self._path_helper = False self._extmethods = False self.__ip = YANGDynClass( base=six.text_type, is_leaf=True, yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="leafref", is_config=True, ) self.__config = YANGDynClass( base=config.config, is_container="container", yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="container", is_config=True, ) self.__state = YANGDynClass( base=state.state, is_container="container", yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="container", is_config=True, ) self.__vrrp = YANGDynClass( base=vrrp.vrrp, is_container="container", yang_name="vrrp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="container", is_config=True, ) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path() + [self._yang_name] else: return [ "interfaces", "interface", "subinterfaces", "subinterface", "ipv4", "addresses", "address", ] def _get_ip(self): """ Getter method for ip, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/ip (leafref) YANG Description: References the configured IP address """ return self.__ip def _set_ip(self, v, load=False): """ Setter method for ip, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/ip (leafref) If this variable is read-only (config: false) in the source YANG file, then _set_ip is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_ip() directly. YANG Description: References the configured IP address """ parent = getattr(self, "_parent", None) if parent is not None and load is False: raise AttributeError( "Cannot set keys directly when" + " within an instantiated list" ) if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass( v, base=six.text_type, is_leaf=True, yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="leafref", is_config=True, ) except (TypeError, ValueError): raise ValueError( { "error-string": """ip must be of a type compatible with leafref""", "defined-type": "leafref", "generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)""", } ) self.__ip = t if hasattr(self, "_set"): self._set() def _unset_ip(self): self.__ip = YANGDynClass( base=six.text_type, is_leaf=True, yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethod
s, register_paths=True, is_keyval=True, namespace="http://openconfig.net/yang/interfaces/ip", defining_module="openconfig-if-ip", yang_type="leafre
f", is_config=True, ) def _get_config(self): """ Getter method for config, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/config (container) YANG Description: Configuration data for each configured IPv4 address on the interface """ return self.__config def _set_config(self, v, load=False): """ Setter method for config, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/config (container) If this variable is read-only (config: false) in the source YANG file, then _set_config is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._se
supriyagarg/pydatalab
datalab/bigquery/_view.py
Python
apache-2.0
11,620
0.007831
# Copyright 2015 Google Inc. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except # in compliance with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed under the License # is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express # or implied. See the License for the specific language governing permissions and limitations under # the License. """Implements BigQuery Views.""" from __future__ import absolute_import from __future__ import unicode_literals from b
uiltins import str from builtins import object import datalab.context from . import _query from . import _tabl
e # Query import is at end to avoid issues with circular dependencies. class View(object): """ An implementation of a BigQuery View. """ # Views in BigQuery are virtual tables, but it is useful to have a mixture of both Table and # Query semantics; our version thus internally has a BaseTable and a Query (for materialization; # not the same as the view query), and exposes a number of the same APIs as Table and Query # through wrapper functions around these. def __init__(self, name, context=None): """Initializes an instance of a View object. Args: name: the name of the view either as a string or a 3-part tuple (projectid, datasetid, name). If a string, it must have the form '<project>:<dataset>.<view>' or '<dataset>.<view>'. context: an optional Context object providing project_id and credentials. If a specific project id or credentials are unspecified, the default ones configured at the global level are used. Raises: Exception if the name is invalid. """ if context is None: context = datalab.context.Context.default() self._context = context self._table = _table.Table(name, context=context) self._materialization = _query.Query('SELECT * FROM %s' % self._repr_sql_(), context=context) def __str__(self): """The full name for the view as a string.""" return str(self._table) @property def name(self): """The name for the view as a named tuple.""" return self._table.name @property def description(self): """The description of the view if it exists.""" return self._table.metadata.description @property def friendly_name(self): """The friendly name of the view if it exists.""" return self._table.metadata.friendly_name @property def query(self): """The Query that defines the view.""" if not self.exists(): return None self._table._load_info() if 'view' in self._table._info and 'query' in self._table._info['view']: return _query.Query(self._table._info['view']['query'], context=self._context) return None def exists(self): """Whether the view's Query has been executed and the view is available or not.""" return self._table.exists() def delete(self): """Removes the view if it exists.""" self._table.delete() def create(self, query): """ Creates the view with the specified query. Args: query: the query to use to for the View; either a string containing a SQL query or a Query object. Returns: The View instance. Raises: Exception if the view couldn't be created or already exists and overwrite was False. """ if isinstance(query, _query.Query): query = query.sql try: response = self._table._api.tables_insert(self._table.name, query=query) except Exception as e: raise e if 'selfLink' in response: return self raise Exception("View %s could not be created as it already exists" % str(self)) def sample(self, fields=None, count=5, sampling=None, use_cache=True, dialect=None, billing_tier=None): """Retrieves a sampling of data from the view. Args: fields: an optional list of field names to retrieve. count: an optional count of rows to retrieve which is used if a specific sampling is not specified. sampling: an optional sampling strategy to apply to the view. use_cache: whether to use cached results or not. dialect : {'legacy', 'standard'}, default 'legacy' 'legacy' : Use BigQuery's legacy SQL dialect. 'standard' : Use BigQuery's standard SQL (beta), which is compliant with the SQL 2011 standard. billing_tier: Limits the billing tier for this job. Queries that have resource usage beyond this tier will fail (without incurring a charge). If unspecified, this will be set to your project default. This can also be used to override your project-wide default billing tier on a per-query basis. Returns: A QueryResultsTable object containing the resulting data. Raises: Exception if the sample query could not be executed or the query response was malformed. """ return self._table.sample(fields=fields, count=count, sampling=sampling, use_cache=use_cache, dialect=dialect, billing_tier=billing_tier) @property def schema(self): """Retrieves the schema of the table. Returns: A Schema object containing a list of schema fields and associated metadata. Raises Exception if the request could not be executed or the response was malformed. """ return self._table.schema def update(self, friendly_name=None, description=None, query=None): """ Selectively updates View information. Any parameters that are None (the default) are not applied in the update. Args: friendly_name: if not None, the new friendly name. description: if not None, the new description. query: if not None, a new query string for the View. """ self._table._load_info() if query is not None: if isinstance(query, _query.Query): query = query.sql self._table._info['view'] = {'query': query} self._table.update(friendly_name=friendly_name, description=description) def results(self, use_cache=True, dialect=None, billing_tier=None): """Materialize the view synchronously. If you require more control over the execution, use execute() or execute_async(). Args: use_cache: whether to use cached results or not. dialect : {'legacy', 'standard'}, default 'legacy' 'legacy' : Use BigQuery's legacy SQL dialect. 'standard' : Use BigQuery's standard SQL (beta), which is compliant with the SQL 2011 standard. billing_tier: Limits the billing tier for this job. Queries that have resource usage beyond this tier will fail (without incurring a charge). If unspecified, this will be set to your project default. This can also be used to override your project-wide default billing tier on a per-query basis. Returns: A QueryResultsTable containing the result set. Raises: Exception if the query could not be executed or query response was malformed. """ return self._materialization.results(use_cache=use_cache, dialect=dialect, billing_tier=billing_tier) def execute_async(self, table_name=None, table_mode='create', use_cache=True, priority='high', allow_large_results=False, dialect=None, billing_tier=None): """Materialize the View asynchronously. Args: table_name: the result table name; if None, then a temporary table will be used. table_mode: one of 'create', 'overwrite' or 'append'. If 'create' (the default), the request will fail if the table exists. use_cache: whether to use past query results or ignore cache. Has no effect if destination is specified (default True). priority:one of 'low' or 'high' (default). Note that 'high' is more expensive, but is better suited to exploratory analysis. allow_large_results: whether to allow large results; i.e. compressed data over 100MB. This is
akissa/baruwa2
baruwa/tasks/status.py
Python
gpl-3.0
13,729
0.003933
# -*- coding: utf-8 -*- # Baruwa - Web 2.0 MailScanner front-end. # Copyright (C) 2010-2015 Andrew Colin Kissa <andrew@topdog.za.net> # vim: ai ts=4 sts=4 et sw=4 "status tasks" import os import datetime import psutil from StringIO import StringIO from pylons import config from celery.task import task from sqlalchemy.pool import NullPool from eventlet.green import subprocess from sqlalchemy import desc from sqlalchemy import engine_from_config from sqlalchemy.exc import DatabaseError from sphinxapi import SphinxClient, SPH_MATCH_EXTENDED2 from reportlab.lib import colors from reportlab.lib.units import inch from reportlab.lib.styles import getSampleStyleSheet from reportlab.platypus import Table from reportlab.lib.styles import ParagraphStyle from reportlab.platypus import Paragraph, Image, Spacer, TableStyle from baruwa.model.meta import Session from baruwa.lib.graphs import PIE_TABLE from baruwa.lib.net import system_hostname from baruwa.lib.misc import extract_sphinx_opts from baruwa.lib.query import clean_sphinx_q from baruwa.lib.mail.queue.exim import EximQueue from baruwa.lib.mail.message import PreviewMessage from baruwa.lib.mail.queue.convert import Exim2Mbox from baruwa.lib.mail.queue.search import search_queue from baruwa.model.status import AuditLog, CATEGORY_MAP from baruwa.commands.queuestats import update_queue_stats from baruwa.lib.regex import EXIM_MSGID_RE, BAYES_INFO_RE from baruwa.lib.outputformats import build_csv, BaruwaPDFTemplate from baruwa.lib.misc import get_processes, get_config_option, wrap_string, _ STYLES = getSampleStyleSheet() if not Session.registry.has(): try: engine = engine_from_config(config, 'sqlalchemy.', poolclass=NullPool) Session.configure(bind=engine) except KeyError: pass @task(name="get-system-status") def systemstatus(): "process via mq" logger = systemstatus.get_logger() logger.info("Checking system status") stats = dict(mem=None, cpu=None, load=None, net=[], mta=None, scanners=None, time=None, uptime=None, av=None, partitions=[]) def _obj2dict(obj): "convert object attribs to dict" val = {} for key in obj._fields: val[key] = getattr(obj, key) return val pipe = subprocess.Popen(["uptime"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) upt = pipe.communicate()[0].split() pipe.wait(timeout=2) pipe = subprocess.Popen(["date"], stdout=subprocess.PIPE, stderr=subprocess.PIPE) stats['time'] = pipe.communicate()[0] pipe.wait(timeout=2) stats['uptime'] = "%s %s" % (upt[2], upt[3].rstrip(',')) stats['mem'] = _obj2dict(psutil.virtual_memory()) stats['mem']['percent'] = ((stats['mem']['used'] / float(stats['mem']['total'])) * 100) stats['cpu'] = psutil.cpu_percent() stats['load'] = os.getloadavg() net = psutil.network_io_counters(True) infs = {} for inf in net: infs[inf] = _obj2dict(net[inf]) stats['net'] = infs partitions = [] for part in psutil.disk_partitions(all=False): usage = psutil.disk_usage(part.mountpoint) dpart = _obj2dict(part) dpart.update(_obj2dict(usage)) partitions.append(dpart) stats['partitions'] = partitions stats['mta'] = get_processes('exim') stats['scanners'] = get_processes('MailScanner') stats['av'] = get_processes('clamd') return stats @task(name="spamassassin-lint") def salint(): "Spamassassin lint" logger = salint.get_logger() logger.info("Running Spamassassin lint checks") lint = [] pipe1 = subprocess.Popen(['spamassassin', '-x', '-D', '--lint'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) while True: line = pipe1.stderr.readline() if not line: break lint.append(line) pipe1.wait(timeout=2) return lint @task(name="get-bayes-info") def bayesinfo(): "Get bayes info" logger = bayesinfo.get_logger() logger.info("Generating Bayesian stats") info = {} saprefs = config.get( 'ms.saprefs', '/etc/MailScanner/spam.assassin.prefs.conf' ) pipe1 = subprocess.Popen(['sa-learn', '-p', saprefs, '--dump', 'magic'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) while True: line = pipe1.stdout.readline() if not line: break match = BAYES_INFO_RE.match(line) if match: if match.group(5) == 'bayes db version': info['version'] = match.group(3) elif match.group(5) == 'nspam': info['spam'] = match.group(3) elif match.group(5) == 'nham': info['ham'] = match.group(3) elif match.group(5) == 'ntokens': info['tokens'] = match.group(3) elif match.group(5) == 'oldest atime': info['otoken'] = datetime.datetime\ .fromtimestamp(float(match.group(3))) elif match.group(5) == 'newest atime': info['ntoken'] = datetime.datetime\ .fromtimestamp(float(match.group(3))) elif match.group(5) == 'last journal sync atime': info['ljournal'] = datetime.datetime\ .fromtimestamp(float(match.group(3))) elif match.group(5) == 'last expiry atime': info['expiry'] = datetime.datetime\ .fromtimestamp(float(match.group(3))) elif match.group(5) == 'last expire reduction count': info['rcount'] = match.group(3) pipe1.wait(timeout=2) return info @task(name="p
review-queued-msg") def preview_queued_msg(msgid, direction, attachid=None, imgid=None): "Preview a queued message" try: logger = preview_queued_msg.get_logger()
header = search_queue(msgid, int(direction)) convertor = Exim2Mbox(header) mbox = convertor() msgfile = StringIO(mbox) previewer = PreviewMessage(msgfile) if attachid: logger.info("Download attachment: %(attachid)s of " "message: %(id)s", dict(id=msgid, attachid=attachid)) return previewer.attachment(attachid) if imgid: logger.info("Image access: %(img)s", dict(img=imgid)) return previewer.img(imgid) logger.info("Preview of message: %(id)s", dict(id=msgid)) return previewer.preview() except TypeError, type_error: logger.info("Error occured: %s" % str(type_error)) return {} except (AssertionError, IOError), error: logger.info("Accessing message: %(id)s, Failed: %(error)s", dict(id=msgid, error=error)) return None finally: if 'msgfile' in locals(): msgfile.close() @task(name='process-queued-msgs', ignore_result=True) def process_queued_msgs(msgids, action, direction, *args): "Process queued messages" try: logger = process_queued_msgs.get_logger() eximcmd = get_config_option('Sendmail2') if direction == 2 else 'exim' if 'exim' not in eximcmd: logger.info("Invalid exim command: %s" % eximcmd) return if direction == 1 and action not in ['bounce', 'delete']: logger.info("Invalid action: %s" % action) return exim_user = config.get('baruwa.mail.user', 'exim') queue = EximQueue('sudo -u %s %s' % (exim_user, eximcmd)) func = getattr(queue, action) msgids = [msgid for msgid in msgids if EXIM_MSGID_RE.match(msgid)
robotarium/robotarium-python-simulator
examples/barrierCertificates.py
Python
mit
2,240
0
import numpy as np from robotarium import Robotarium, transformations, controllers
# Get Robotarium object used to communicate with the robots/simulator. r = Robotarium() # Get the number of available agents from the Robotarium. We don't need a # specific value from this algorithm. n = r.get_available_agents() # Number of iterations. iterations = 20000 # Initialize the Robotarium object with the desired number of agents. r.initialize(n)
# Initialize velocity vector for agents. Each agent expects a 2x1 velocity # vector containing the linear and angular velocity, respectively. dx = np.zeros((2, n)) xy_bound = np.array([-0.5, 0.5, -0.3, 0.3]) p_theta = (np.arange(1, 2*n, 2)) / (2 * n) * 2 * np.pi p_circ = np.vstack( [np.hstack([xy_bound[1] * np.cos(p_theta), xy_bound[1] * np.cos(p_theta + np.pi)]), np.hstack([xy_bound[3] * np.sin(p_theta), xy_bound[3] * np.sin(p_theta + np.pi)])]) x_goal = p_circ[:, 0:n] flag = 0 # Flag of task completion # iterate for the previously specified number of iterations. for _ in range(0, iterations): # Retrieve teh most recent poses from teh Robotarium. The time delay is # approximately 0.033 seconds. x = r.get_poses() x_temp = x[0:2, :] # ALGORITHM # Nominal controller, go2goal if np.linalg.norm(x_goal-x_temp, ord=1) < 0.08: flag = 1 - flag if flag == 0: x_goal = p_circ[:, 0:n] else: x_goal = p_circ[:, n:2*n] # Use different go-to-goal dx = controllers.position_int(x, x_goal, 0.05) # Saturation of controls dx_max = 0.1 for i in range(0, n): if np.linalg.norm(dx[:, i]) > dx_max: dx[:, i] = dx[:, i] / np.linalg.norm(dx[:, i]) * dx_max # END ALGORITHM # Ensure the robots don't collide dx = transformations.barrier_certificate(dx, x, ds=0.1) # Transform the single-integrator dynamics to unicycle dynamics using a # diffeomorphism, which can be found in the utilities. dx = transformations.int_to_uni2(dx, x, 0.75, np.pi) # Set velocities of agents 1,...,n r.set_velocities(range(0, n), dx) # Send the previously set velocities to the agents. # This function must be called. r.step()
rdoh/pixelated-user-agent
service/pixelated/config/services.py
Python
agpl-3.0
2,909
0.002063
from pixelated.adapter.mailstore.searchable_mailstore import SearchableMailStore from pixelated.adapter.services.mail_service import MailService from pixelated.adapter.model.mail import InputMail from pixelated.adapter.services.mail_sender import MailSender from pixelated.adapter.search import SearchEngine from pixelated.adapter.services.draft_service import DraftService from pixelated.adapter.listeners.mailbox_indexer_listener import listen_all_mailboxes from twisted.internet import defer from pixelated.adapter.search.index_storage_key import SearchIndexStorageKey from pixelated.adapter.services.feedback_service import FeedbackService class Services(object): def __init__(self, leap_home, leap_session): pass @defer.inlineCallbacks def setup(self, leap_home, leap_session): InputMail.FROM_EMAIL_ADDRESS = leap_session.account_email() search_index_storage_key = self.setup_search_index_storage_key(leap_session.soledad) yield self.setup_search_engine( leap_home, search_index_storage_key) self.wrap_mail_store_with_indexing_mail_store(leap_session) yield listen_all_mailboxes(leap_session.account, self.search_engine, leap_session.mail_store) self.mail_service = self.setup_mail_service( leap_session, self.search_engine) self.keymanager = leap_session.nicknym self.draft_service = self.setup_draft_service(leap_session.mail_store) self.feedback_service = self.setup_feedback_service(leap_session) yield self.index_all_mails() def wrap_mail_store_with_indexing_mail_s
tore(self, leap_session): leap_session.ma
il_store = SearchableMailStore(leap_session.mail_store, self.search_engine) @defer.inlineCallbacks def index_all_mails(self): all_mails = yield self.mail_service.all_mails() self.search_engine.index_mails(all_mails) @defer.inlineCallbacks def setup_search_engine(self, leap_home, search_index_storage_key): key_unicode = yield search_index_storage_key.get_or_create_key() key = str(key_unicode) print 'The key len is: %s' % len(key) search_engine = SearchEngine(key, agent_home=leap_home) self.search_engine = search_engine def setup_mail_service(self, leap_session, search_engine): pixelated_mail_sender = MailSender(leap_session.smtp_config, leap_session.nicknym.keymanager) return MailService( pixelated_mail_sender, leap_session.mail_store, search_engine, leap_session.account_email()) def setup_draft_service(self, mail_store): return DraftService(mail_store) def setup_search_index_storage_key(self, soledad): return SearchIndexStorageKey(soledad) def setup_feedback_service(self, leap_session): return FeedbackService(leap_session)
diego0020/PySurfer
examples/save_movie.py
Python
bsd-3-clause
1,110
0
""" Create movie from MEG inverse solution ======================================= Data were computed using mne-python (http://martinos.org/mne) """ import os
import numpy as np from surfer import Brain from surfer.io import read_stc print(__doc__) """ create Brain object for visualization """ brain = Brain('fsaverage', 'split', 'inflated', size=(800, 400)) """ read and display MNE dSPM inverse solution """ stc_fname = os.path.join('example_data', 'meg_source_estimate-%s.stc') for hemi in ['lh', 'rh']: stc = read_stc(stc_fname % hemi) data = s
tc['data'] times = np.arange(data.shape[1]) * stc['tstep'] + stc['tmin'] brain.add_data(data, colormap='hot', vertices=stc['vertices'], smoothing_steps=10, time=times, hemi=hemi, time_label=lambda t: '%s ms' % int(round(t * 1e3))) """ scale colormap """ brain.scale_data_colormap(fmin=13, fmid=18, fmax=22, transparent=True) """ Save a movie. Use a large value for time_dilation because the sample stc only covers 30 ms. """ brain.save_movie('example_current.mov', time_dilation=30) brain.close()
jcchoiling/learningPython
books/learn-python-the-hard-way/ex17_More_Files.py
Python
gpl-3.0
645
0.009302
#!/usr/bin/env python # -*- coding: utf-8 -*- # Author: Janice Cheng # ex17_inFile.txt # ex17_outFile.txt from sys import argv from os.path import exists script,from_file,to_file = argv print("Copying from {} to {}".format(from_file,to_file)) # we could do these two on one line too, how? in_file = open(from_file) indata = in_file.read() print("The input file is {} b
ytes long".format(len(indata))) print("Does the output file exists? %r" %exists(to_file)) print("Ready, hit RETURN to continue, CTRL-C to abort.") i
nput() out_file = open(to_file,'w') out_file.write(indata) print("Alright, all done.") out_file.close() in_file.close()
corcra/UMLS
parse_metamap.py
Python
mit
8,989
0.00534
#!/bin/python # The purpose of this script is to take the *machine-readable* output of UMLS # MetaMap and convert it to something that looks like a sentence of UMLS CUIs, # if possible. Ideally there would be an option in MetaMap to do this, assuming # it is sensible. import re import sys #INTERACTIVE = True INTERACTIVE = False # "hacks" to fix metamap weirdness POSTPROC = True if POSTPROC: print 'WARNING: Performing dataset-specific postprocessing.' # --- some regexes --- # utterance_re = re.compile('^utterance\(') phrase_re = re.compile('^phrase\(') mappings_re = re.compile('^mappings\(') candidates_re = re.compile('^candidates\(') EOU_re = re.compile('^\'EOU') # this is a file of sentences, fed into metamap raw_data_path = '' # --- grab in paths --- # # this is the metamap output. YMMV # created by the command: # metamap14 -q -Q 3 --word_sense_disambiguation raw_data_path metamap_output_path # must provide an input path assert len(sys.argv) >= 2 metamap_output_path = sys.argv[1] # optionally provide output path # (this is the processed data path, the output of this script) try: proc_data_path = sys.argv[2] # do not write over the input, please assert not proc_data_path == metamap_output_path except IndexError: # not provided proc_data_path = metamap_output_path + '.reform' # --- open files --- # metamap_output = open(metamap_output_path, 'r') proc_data = open(proc_data_path, 'w') # --- the first line is 'args', pop that --- # args_line = metamap_output.readline() # not sure what second line is but pop it too unknown_line = metamap_output.readline() # --- the relevant and important functions --- # def parse_phrase(line, neg_dict={}): """ Takes a phrase from machine-readable format, parses its mappings, returns a string of mapped terms (into CUIs, when possible). """ wordmap = dict() # list of words in the phrase # (note: the phrase looks like phrase('PHRASEHERE', [sometext(... ) phrase = re.sub('[\'\.]','',re.split(',\[[a-zA-Z]+\(', re.sub('phrase\(','', line))[0]) # get the candidates (and most importantly, their numbers) candidates = metamap_output.readline() if candidates == '' or not candidates_re.match(candidates): parsed_phrase = phrase + ' ' return parsed_phrase TotalCandidateCount = int(re.sub('candidates\(','',candidates).split(',')[0]) # get the mappings mappings = metamap_output.readline() if mappings == '' or not mappings_re.match(mappings): parsed_phrase = phrase + ' ' return parsed_phrase if TotalCandidateCount == 0: # there were no mappings for this phrase parsed_phrase = phrase + ' ' else: # accounted for by other words delwords = [] parsed_phrase = '' # split the mappings up into 'ev's split_mappings = mappings.split('ev(') outstring = '' for mapping in split_mappings[1:]: CUI = mapping.split(',')[1].strip('\'') try: words = re.split('[\[\]]',','.join(mapping.split(',')[4:]))[1].split(',') except IndexError: # ugh, mapping is messed up print 'WARNING: input is messed up' return parsed_phrase umls_strings = mapping.split(',')[2:4] # CPI is the final [] in this mapping, I think/believe ConceptPositionalInfo = mapping.split('[')[-1].split(']')[0] if ConceptPositionalInfo in neg_dict: # this concept has been negated! # make sure it's the same one... assert CUI in neg_dict[ConceptPositionalInfo] # need to make sure it's ONE of the CUIs which was negated at this location CUI = 'NOT_' + CUI if INTERACTIVE: outstring += '\n\tAssociation between '+ CUI + ' and ' + ', '.join(map(lambda x: '"'+x+'"',words)) if len(words) > 1: outstring += ' (subsuming ' + ' '.join(map(lambda x: '"'+x+'"', words[1:])) + ')' outstring += '\n\tbased on UMLS strings ' + ', '.join(umls_strings) +'\n' wordmap[words[0]] = CUI # if multiple words mapped to this CUI, remember to delete the rest # that is: when we consume the sentence later we will 'replace' the # first word in this list with the CUI, then delete the rest # brittleness: delwords may appear elsewhere in the sentence delwords += words[1:] # split on spaces, commas for word in re.split(', | ', phrase): try: # lowercase word, cause it is represented in the prolog that way parsed_phrase += wordmap[word.lower()] + ' ' except KeyError: if word.lower() in delwords: continue else: parsed_phrase += word + ' ' if INTERACTIVE: if len(wordmap) > 0: # yolo print '\nMapping phrase:', print phrase, '...' print outstring print 'Mapped:', phrase, '--->', print parsed_phrase print '' eh = raw_input('') return parsed_phrase def postproc_utterance(parsed_utterance): """ HACKS! Do some 'manual' post-processing to make up for MetaMap peculiarity. WARNING: dataset specific. """ # _ S__ DEID --> _S__DEID parsed_utterance = re.sub('_ S__ DEID', '_S__DEID', parsed_utterance) # _ S__ C2825141 --> _S__FINDING (FINDING...) parsed_utterance = re.sub('_ S__ C2825141', '_S__FINDING', parsed_utterance) return parsed_utterance def parse_utterance(neg_dict={}): """ Suck in an utterance from the machine-readable format, parse its mapping and then return a string of mapped terms (into CUIs). May not be the same length as the input sentence. """ phrases = '' line = metamap_output.readline() while not EOU_re.match(line): if phrase_re.match(line): parsed_phrase = parse_phrase(line, neg_dict) phrases += parsed_phrase e
lif line == '': # EOF I guess... return phrases elif not EOU_re.match(line): print'ERROR: utterance not followed
by EOU line, followed by:' print line sys.exit('ERROR: missing EOU') line = metamap_output.readline() return phrases def parse_negline(neg_line): """ Parse the THIRD line of the .mmo file, where the negations are stored. Why does it not do this per-phrase? Mystery. We connect the negated-CUI to its appearance in the text using the ConceptPositionalInfo which _appears_ to correspond to the PosInfo field which appears in the ev found in a mapping. The output is neg_dict which maps these ConceptPositionalInfos into the associated CUIs :we use this for sanity checking while parsing the mappings; the position should be enough to identify it, but for extra-safety we assert that the CUIs are matching. """ assert 'neg_list([' in neg_line neg_dict = dict() # strip things out # (removing "neg_list(["... and ..."]).\n") l_stripped = neg_line[10:][:-5] # split into seprate 'negations'... # split on ( and then remove the training ", negation(" at the end, first entry is useless negations = map(lambda x: x.rstrip(')')[:-10] if 'negation' in x else x.rstrip(')'), l_stripped.split('('))[1:] # for each negation, grab its location and CUI for neg in negations: # strip the string part of the CUI: we know it's between the SECOND pair of [], and before a : NegatedConcept = neg.split('[')[2].split(':')[0].strip('\'') # now get the concept... we know it's in the THIRD set of []... and there may be several separated by , ConceptPositionalInfo = neg.split('[')[3].rstrip(']') try: neg_dict[ConceptPositionalInfo].add(NegatedConcept) except KeyError: neg_dict[ConceptPositionalInfo] = set([NegatedConcept]) return neg_dict # --- run through the
lixiangning888/whole_project
modules/signatures_merge_tmp/ek_silverlight.py
Python
lgpl-3.0
1,071
0.009794
# -*- coding: utf-8 -*- try: import re2 as re except ImportError: import re from lib.cuckoo.common.abstracts import Signature class Silverlight_JS(Signature): name = "silverlight_js" description = "执行伪装过的包含一个Silverlight对象的JavaScript,可能被用于漏洞攻击尝试" weight = 3 severity = 3 categories = ["exploit_kit", "silverlight"] authors = ["Kevin Ross"] minimum = "1.3" evented = True def __init__(self, *args, **kwargs): Signature.__init__(self, *args, **kwargs) filter_categories = set
(["browser"]) # backward compat filter_apinames = set(["JsEval", "COleScript_Compile", "COleScript_ParseScriptText"]) def on_call(self, call, process): if call["api"] == "JsEval": buf = self.g
et_argument(call, "Javascript") else: buf = self.get_argument(call, "Script") if re.search("application\/x\-silverlight.*?\<param name[ \t\n]*=.*?value[ \t\n]*=.*?\<\/object\>.*", buf, re.IGNORECASE|re.DOTALL): return True
anhstudios/swganh
data/scripts/templates/object/tangible/lair/base/shared_poi_all_lair_insecthill_large_fog_gray.py
Python
mit
469
0.046908
#### NOTICE: THIS FILE IS AUTOGENERATED #### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY #### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES from swgpy.object import * def create(kernel): result = Tangible() result.template = "object/tangible/lair/base/shared_poi_all_lair_insecthill_large_fog_gray
.iff" result.attribute_template_id = -1 result.stfName("lair_n","insecthill") #### BEGIN MODIFICATIONS #### #### END MODIFICATI
ONS #### return result
lu-ci/apex-sigma-plugins
core_functions/stats/ev_mention.py
Python
gpl-3.0
572
0
async def ev_mention(ev, message):
def_stat_data = { 'event': 'mention', 'count': 0 } collection = 'EventStats' database = ev.bot.cfg.db.database
check = ev.db[database][collection].find_one({"event": 'mention'}) if not check: ev.db[database][collection].insert_one(def_stat_data) ev_count = 0 else: ev_count = check['count'] ev_count += 1 update_target = {"event": 'mention'} update_data = {"$set": {'count': ev_count}} ev.db[database][collection].update_one(update_target, update_data)
nawawi/poedit
deps/boost/tools/build/test/lib_source_property.py
Python
mit
917
0
#!/usr/bin/python # Copyright (C) Vladimir Prus 2006. # Distributed under the Boost Software License, Version 1.0. (See # accompanying file LICENSE_1_0.txt or copy at # http://www.boost.org/LICENSE_1_0.txt) # Regression test: if a library had no explicit sources, but only <source> # properties, it was built as if it were a searched library, and the specified # sources were not compiled. import BoostBuild t = BoostBuild.Tester(use_test_config=False) t.write("jamroot.jam", """ lib a : : <source>a.cpp ; """) t.write("a.cpp", """ #ifdef _WIN32 __declspec(dllexport) #endif void foo() {} """) t.run_build_system() t.expect_addition("bin/$toolset/debug
*/a.obj") t.rm("bin") # N
ow try with <conditional>. t.write("jamroot.jam", """ rule test ( properties * ) { return <source>a.cpp ; } lib a : : <conditional>@test ; """) t.run_build_system() t.expect_addition("bin/$toolset/debug*/a.obj") t.cleanup()
slackhq/python-slackclient
slack_sdk/rtm_v2/__init__.py
Python
mit
16,519
0.001574
"""
A Pyth
on module for interacting with Slack's RTM API.""" import inspect import json import logging import time from concurrent.futures.thread import ThreadPoolExecutor from logging import Logger from queue import Queue, Empty from ssl import SSLContext from threading import Lock, Event from typing import Optional, Callable, List, Union from slack_sdk.errors import SlackApiError, SlackClientError from slack_sdk.proxy_env_variable_loader import load_http_proxy_from_env from slack_sdk.socket_mode.builtin.connection import Connection, ConnectionState from slack_sdk.socket_mode.interval_runner import IntervalRunner from slack_sdk.web import WebClient class RTMClient: token: Optional[str] bot_id: Optional[str] default_auto_reconnect_enabled: bool auto_reconnect_enabled: bool ssl: Optional[SSLContext] proxy: str timeout: int base_url: str ping_interval: int logger: Logger web_client: WebClient current_session: Optional[Connection] current_session_state: Optional[ConnectionState] wss_uri: Optional[str] message_queue: Queue message_listeners: List[Callable[["RTMClient", dict], None]] message_processor: IntervalRunner message_workers: ThreadPoolExecutor closed: bool connect_operation_lock: Lock on_message_listeners: List[Callable[[str], None]] on_error_listeners: List[Callable[[Exception], None]] on_close_listeners: List[Callable[[int, Optional[str]], None]] def __init__( self, *, token: Optional[str] = None, web_client: Optional[WebClient] = None, auto_reconnect_enabled: bool = True, ssl: Optional[SSLContext] = None, proxy: Optional[str] = None, timeout: int = 30, base_url: str = WebClient.BASE_URL, headers: Optional[dict] = None, ping_interval: int = 5, concurrency: int = 10, logger: Optional[logging.Logger] = None, on_message_listeners: Optional[List[Callable[[str], None]]] = None, on_error_listeners: Optional[List[Callable[[Exception], None]]] = None, on_close_listeners: Optional[List[Callable[[int, Optional[str]], None]]] = None, trace_enabled: bool = False, all_message_trace_enabled: bool = False, ping_pong_trace_enabled: bool = False, ): self.token = token.strip() if token is not None else None self.bot_id = None self.default_auto_reconnect_enabled = auto_reconnect_enabled # You may want temporarily turn off the auto_reconnect as necessary self.auto_reconnect_enabled = self.default_auto_reconnect_enabled self.ssl = ssl self.proxy = proxy self.timeout = timeout self.base_url = base_url self.headers = headers self.ping_interval = ping_interval self.logger = logger or logging.getLogger(__name__) if self.proxy is None or len(self.proxy.strip()) == 0: env_variable = load_http_proxy_from_env(self.logger) if env_variable is not None: self.proxy = env_variable self.web_client = web_client or WebClient( token=self.token, base_url=self.base_url, timeout=self.timeout, ssl=self.ssl, proxy=self.proxy, headers=self.headers, logger=logger, ) self.on_message_listeners = on_message_listeners or [] self.on_error_listeners = on_error_listeners or [] self.on_close_listeners = on_close_listeners or [] self.trace_enabled = trace_enabled self.all_message_trace_enabled = all_message_trace_enabled self.ping_pong_trace_enabled = ping_pong_trace_enabled self.message_queue = Queue() def goodbye_listener(_self, event: dict): if event.get("type") == "goodbye": message = "Got a goodbye message. Reconnecting to the server ..." self.logger.info(message) self.connect_to_new_endpoint(force=True) self.message_listeners = [goodbye_listener] self.socket_mode_request_listeners = [] self.current_session = None self.current_session_state = ConnectionState() self.current_session_runner = IntervalRunner( self._run_current_session, 0.1 ).start() self.wss_uri = None self.current_app_monitor_started = False self.current_app_monitor = IntervalRunner( self._monitor_current_session, self.ping_interval, ) self.closed = False self.connect_operation_lock = Lock() self.message_processor = IntervalRunner(self.process_messages, 0.001).start() self.message_workers = ThreadPoolExecutor(max_workers=concurrency) # -------------------------------------------------------------- # Decorator to register listeners # -------------------------------------------------------------- def on(self, event_type: str) -> Callable: """Registers a new event listener. Args: event_type: str representing an event's type (e.g., message, reaction_added) """ def __call__(*args, **kwargs): func = args[0] if func is not None: if isinstance(func, Callable): name = ( func.__name__ if hasattr(func, "__name__") else f"{func.__class__.__module__}.{func.__class__.__name__}" ) inspect_result: inspect.FullArgSpec = inspect.getfullargspec(func) if inspect_result is not None and len(inspect_result.args) != 2: actual_args = ", ".join(inspect_result.args) error = f"The listener '{name}' must accept two args: client, event (actual: {actual_args})" raise SlackClientError(error) def new_message_listener(_self, event: dict): actual_event_type = event.get("type") if event.get("bot_id") == self.bot_id: # SKip the events generated by this bot user return # https://github.com/slackapi/python-slack-sdk/issues/533 if event_type == "*" or ( actual_event_type is not None and actual_event_type == event_type ): func(_self, event) self.message_listeners.append(new_message_listener) else: error = f"The listener '{func}' is not a Callable (actual: {type(func).__name__})" raise SlackClientError(error) # Not to cause modification to the decorated method return func return __call__ # -------------------------------------------------------------- # Connections # -------------------------------------------------------------- def is_connected(self) -> bool: """Returns True if this client is connected.""" return self.current_session is not None and self.current_session.is_active() def issue_new_wss_url(self) -> str: """Acquires a new WSS URL using rtm.connect API method""" try: api_response = self.web_client.rtm_connect() return api_response["url"] except SlackApiError as e: if e.response["error"] == "ratelimited": delay = int(e.response.headers.get("Retry-After", "30")) # Tier1 self.logger.info(f"Rate limited. Retrying in {delay} seconds...") time.sleep(delay) # Retry to issue a new WSS URL return self.issue_new_wss_url() else: # other errors self.logger.error(f"Failed to retrieve WSS URL: {e}") raise e def connect_to_new_endpoint(self, force: bool = False): """Acquires a new WSS URL and tries to connect to the endpoi
noironetworks/neutron
neutron/tests/functional/agent/linux/helpers.py
Python
apache-2.0
2,881
0
# Copyright (c) 2014 Red Hat, Inc. # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import multiprocessing import os import time import fixtures from neutron.agent.linux import utils from neutron.tests import tools class RecursivePermDirFixture(fixtures.Fixture): """Ensure at least perms permissions on directory and ancestors.""" def __init__(self, directory, perms): super(RecursivePermDirFixture, self).__init__() self.directory = directory self.least_perms = perms def _setUp(self): previous_directory = None current_directory = self.directory while previous_directory != current_directory: perms = os.stat(current_directory).st_mode if perms & self.least_perms != self.least_perms: os.chmod(current_directory, perms | self.least_perms) previous_directory = current_directory current_directory = os.path.dirname(current_directory) class AdminDirFixture(fixtures.Fixture): """Handle directory create/delete with admin permissions required""" def __init__(self, directory): super(AdminDirFixture, self).__init__() self.directory = directory def _setUp(self): # NOTE(cbrandily): Ensure we will not delete a directory existing # before test run during cleanup. if os.path.exists(self.directory): tools.fail('%s already exists' % self.directory) create_cmd = ['mkdir', '-p', self.
directory] delete_cmd = ['rm', '-r', self.directory] utils.execute(create_cmd, run_as_root=True) self.addCleanup(utils.execute, delete_cmd, run_as_root=True) class SleepyProcessFixture(fixtures.Fixture): """Process fixture to perform time.sleep for a given numbe
r of seconds.""" def __init__(self, timeout=60): super(SleepyProcessFixture, self).__init__() self.timeout = timeout @staticmethod def yawn(seconds): time.sleep(seconds) def _setUp(self): self.process = multiprocessing.Process(target=self.yawn, args=[self.timeout]) self.process.start() self.addCleanup(self.destroy) def destroy(self): self.process.terminate() @property def pid(self): return self.process.pid
dbinetti/barberscore-django
project/apps/bhs/config.py
Python
bsd-2-clause
898
0
# Django from django.apps import AppConfig class BhsConfig(AppConfig): name = 'apps.bhs' verbose_name = 'Base' def ready(self): import algoliasearch_django as algoliasearch from .indexes import AwardIndex Award = self.get_model('award') algoliasearch.register(Award, AwardIndex) from .indexes import ChartIndex Chart = self.get_model('chart') algoliasearch.register(Chart, ChartIndex) from .indexes import GroupIndex
Group = self.get_model('group') algoliasearch.register(Group, GroupIndex) from .indexes import PersonIndex
Person = self.get_model('person') algoliasearch.register(Person, PersonIndex) from .indexes import ConventionIndex Convention = self.get_model('convention') algoliasearch.register(Convention, ConventionIndex) return
mweb/python
exercises/circular-buffer/circular_buffer_test.py
Python
mit
3,083
0
import unittest from circular_buffer import ( CircularBuffer, BufferFullException, BufferEmptyException ) class CircularBufferTest(unittest.TestCase): def test_read_empty_buffer(self): buf
= CircularBuffer(1) with self.assertRaises(BufferEmptyException): buf.read() def test_write_and_read_back_one_item(self): buf = CircularBuffer(1) buf.write('1') self.assertEqual('1', buf.read())
with self.assertRaises(BufferEmptyException): buf.read() def test_write_and_read_back_multiple_items(self): buf = CircularBuffer(2) buf.write('1') buf.write('2') self.assertEqual(buf.read(), '1') self.assertEqual(buf.read(), '2') with self.assertRaises(BufferEmptyException): buf.read() def test_clearing_buffer(self): buf = CircularBuffer(3) for c in '123': buf.write(c) buf.clear() with self.assertRaises(BufferEmptyException): buf.read() buf.write('1') buf.write('2') self.assertEqual(buf.read(), '1') buf.write('3') self.assertEqual(buf.read(), '2') def test_alternate_write_and_read(self): buf = CircularBuffer(2) buf.write('1') self.assertEqual(buf.read(), '1') buf.write('2') self.assertEqual(buf.read(), '2') def test_read_back_oldest_item(self): buf = CircularBuffer(3) buf.write('1') buf.write('2') buf.read() buf.write('3') buf.read() self.assertEqual(buf.read(), '3') def test_write_full_buffer(self): buf = CircularBuffer(2) buf.write('1') buf.write('2') with self.assertRaises(BufferFullException): buf.write('A') def test_overwrite_full_buffer(self): buf = CircularBuffer(2) buf.write('1') buf.write('2') buf.overwrite('A') self.assertEqual(buf.read(), '2') self.assertEqual(buf.read(), 'A') with self.assertRaises(BufferEmptyException): buf.read() def test_overwrite_non_full_buffer(self): buf = CircularBuffer(2) buf.overwrite('1') buf.overwrite('2') self.assertEqual(buf.read(), '1') self.assertEqual(buf.read(), '2') with self.assertRaises(BufferEmptyException): buf.read() def test_alternate_read_and_overwrite(self): buf = CircularBuffer(5) for c in '123': buf.write(c) buf.read() buf.read() buf.write('4') buf.read() for c in '5678': buf.write(c) buf.overwrite('A') buf.overwrite('B') self.assertEqual(buf.read(), '6') self.assertEqual(buf.read(), '7') self.assertEqual(buf.read(), '8') self.assertEqual(buf.read(), 'A') self.assertEqual(buf.read(), 'B') with self.assertRaises(BufferEmptyException): buf.read() if __name__ == '__main__': unittest.main()
varunarya10/ironic
ironic/api/config.py
Python
apache-2.0
1,302
0
# All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo.config import cfg # Server Specific Configurations # See https://pecan.readthedocs.org/en/latest/configuration.html#server-configuration # noqa server = { 'port': '6385', 'host': '0.0.0.0' } # Pecan Application Configurations # See https://pecan.readthedocs.org/en/latest/configuration.html#application-configuration # noqa app = { 'root': 'ironic.api.controllers.root.RootController', 'modules': ['ironic.api'], 'static_root': '%(confdir)s/public', 'debug': False, 'enable_acl': True, 'acl_public_routes': ['/', '/v1'], } # WSME Configurations # See https://wsme.
readthedocs.org/en/latest/integrate.html#configuration wsme = { 'debug':
cfg.CONF.debug, }
lich-uct/molpher-lib
src/python/molpher/algorithms/pathfinders.py
Python
gpl-3.0
1,908
0.003145
from molpher.algorithms.functions import find_path from molpher.core import ExplorationTree as ETree class BasicPathfinder: """ :param settings: settings to use in the search :type settings: `Settings` A very basic pathfinder class that can be used to run exploration with any combination of operations. """ class MaxItersReachedException(Exception): def __init__(self, tree): super(BasicPathfinder.MaxItersReachedException, self).__init__( "Maximum number of iterations reached while searching " "for a path\n\t source: {0}\n\t target: {1}".format(tree.source, tree.target)) def __init__(self, settings, operations): self.settings =
settings """a settings class (should be a subclass of `Settings`)""" self.tree = ETree.create(source=self.settings.source, target=self.settings.target) """:class:`~molpher.core.ExplorationTree.ExplorationTree` used in the search""" if self.settings.tree_params:
self.tree.params = self.settings.tree_params self.tree.thread_count = self.settings.max_threads self._iteration = operations self.path = None """a list of SMILES strings if a path was found, `None` otherwise""" def __call__(self): """ Executes the search :return: discovered path :rtype: `list` of `str` """ counter = 0 while not self.tree.path_found: counter+=1 if counter > self.settings.max_iters: raise BasicPathfinder.MaxItersReachedException(self.tree) print('Iteration {0}'.format(counter)) for oper in self._iteration: self.tree.runOperation(oper) self.path = find_path(self.tree, self.tree.params['target']) print('Path found:', self.path) return self.path
girasquid/cab
cab/models.py
Python
bsd-3-clause
7,952
0.006916
""" Models for code snippets and related data. Most of these models also have custom managers defined which add convenient shortcuts for repetitive or common bits of logic; see ``managers.py`` in this directory. """ import datetime, re from django.db import connection, models from django.template.defaultfilters import slugify from django.contrib.auth.models import User from django.core.urlresolvers import reverse import managers from markdown import markdown from pygments import highlight, lexers, formatters RATING_CHOICES = ( (-1, 'Not useful'), (1, 'Useful') ) class Language(models.Model): """ A language in which a Snippet can be written. The ``language_code`` field should be set to an alias of a Pygments lexer which is capable of processing this language. The ``file_extension`` and ``mime_type`` fields will be used when users download Snippets, to set the filename and HTTP Content-Type of the download appropriately. """ name = models.CharField(max_length=50) slug = models.SlugField(editable=False) language_code = models.CharField(max_length=50, help_text="This should be an alias of a Pygments lexer which can handle this language.") file_extension = models.CharField(max_length=10, help_text="The file extension to use when downloading Snippets in this Language; leave out the dot.") mime_type = models.CharField(max_length=100, help_text="The HTTP Content-Type to use when downloading Snippets in this Language.") class Meta: ordering = ('name',) def save(self, *args, **kwargs): if not self.id: self.slug = slugify(self.name) super(Language, self).save(*args, **kwargs) def get_absolute_url(self): return reverse('cab:snippets_by_language', kwargs={'slug': self.slug}) def __unicode__(self): return self.name def get_lexer(self): """ Returns an instance of the Pygments lexer for this language. """ return lexers.get_lexer_by_name(self.language_code) class Tag(models.Model): """ A descriptive tag to be applied to a Snippet. """ name = models.CharField(max_length=50, unique=True) slug = models.SlugField(editable=False) class Meta: ordering = ('name',) def save(self, *args, **kwargs): if not self.id: self.slug = slugify(self.name) super(Tag, self).save(*args, **kwargs) def get_absolute_url(self): return reverse('cab:snippets_by_tag', kwargs={'slug':self.slug}) def __unicode__(self): return self.name class Snippet(models.Model): """ A snippet of code in some Language. This is slightly denormalized in two ways: 1. Because it's wasteful to run Pygments over the code each time the Snippet is viewed, it is instead run on save, and two copies of the code -- one the original input, the other highlighted by Pygments -- are stored. 2. For much the same reason, Markdown is run over the Snippet's description on save, instead of on each view, and the result is stored in a separate column. Also, Tags are added through the ``tag_list`` field which, after the Snippet has been saved, will be iterated over to set up the relationships to actual Tag objects. """ title = models.CharField(max_length=250) language = models.ForeignKey(Language) description = models.TextField(help_text="Accepts HTML.") description_html = models.TextField(editable=False) code = models.TextField() highlighted_code = models.TextField(editable=False) pub_date = models.DateTimeField(editable=False) updated_date = models.DateTimeField(editable=False) author = models.ForeignKey(User) tag_list = models.CharField(max_length=250, help_text="Separate tags with spaces. Maximum 250 characters.") tags = models.ManyToManyField(Tag, editable=False) original = models.ForeignKey('self', null=True, blan
k=True, help_text="Optional. Fill this in if this Snippet is based on another.") objects = managers.SnippetsManager() class Meta: ordering = ('-pub_date',) def save(self, *args, **kwargs): if not self.id: self.pub_date = datetime.datetime.now() self.updated_date = datetime.datetime.now()
self.description_html = self.sanitize(self.description) # Use safe_mode in Markdown to prevent arbitrary tags. # self.description_html = markdown(self.description, safe_mode=True) self.highlighted_code = self.highlight() self.tag_list = self.tag_list.lower() # Normalize to lower-case super(Snippet, self).save(*args, **kwargs) # Now that the Snippet is saved, deal with the tags. current_tags = list(self.tags.all()) # We only want to query this once. # Splitting to get the new tag list is tricky, because people # will stick commas and other whitespace in the darndest places. new_tag_list = [t for t in re.split('[\s,]+', self.tag_list) if t] # First, clear out tags that aren't on the Snippet anymore. for tag in current_tags: if tag.name not in new_tag_list: self.tags.remove(tag) # Then add any new tags. for tag_name in new_tag_list: if tag_name not in [tag.name for tag in current_tags]: tag, created = Tag.objects.get_or_create(name=tag_name) self.tags.add(tag) def sanitize(self, value): from BeautifulSoup import BeautifulSoup, Comment import re js_regex = re.compile(r'[\s]*(&#x.{1,7})?'.join(list('javascript'))) allowed_tags = 'strong em a p br img'.split() soup = BeautifulSoup(value) for comment in soup.findAll(text=lambda text: isinstance(text, Comment)): comment.extract() for tag in soup.findAll(True): if tag.name not in allowed_tags: tag.hidden = True return soup.renderContents().decode('utf8') def __unicode__(self): return self.title def get_absolute_url(self): return reverse('cab:snippet_detail', kwargs={'snippet_id': self.id}) def highlight(self): """ Returns this Snippet's originally-input code, highlighted via Pygments. """ return highlight(self.code, self.language.get_lexer(), formatters.HtmlFormatter(linenos=True)) class Rating(models.Model): """ A particular User's rating of a particular Snippet. """ snippet = models.ForeignKey(Snippet) user = models.ForeignKey(User) date = models.DateTimeField(editable=False) score = models.IntegerField(choices=RATING_CHOICES) objects = managers.RatingsManager() def save(self, *args, **kwargs): if not self.id: self.date = datetime.datetime.now() super(Rating, self).save(*args, **kwargs) def __unicode__(self): return "%s rating '%s'" % (self.user.username, self.snippet.title) class Bookmark(models.Model): """ A Snippet bookmarked by a User. """ snippet = models.ForeignKey(Snippet) user = models.ForeignKey(User) date = models.DateTimeField(editable=False, auto_now_add=True) objects = managers.BookmarksManager() class Meta: ordering = ('date',) def __unicode__(self): return "%s bookmarked by %s" % (self.snippet.title, self.user.username)
shajoezhu/server
tests/unit/test_simulated_stack.py
Python
apache-2.0
6,031
0
""" End-to-end tests for the simulator configuration. Sets up a server with the backend, sends some basic queries to that server and verifies results are as expected. """ from __future__ import division from __future__ import print_function from __future__ import unicode_literals import unittest import ga4gh.frontend as frontend import ga4gh.protocol as protocol import tests.utils as utils class TestSimulatedStack(unittest.TestCase): """ Tests the full stack for the Simulated backend by using the Flask testing client. """ @classmethod def setUpClass(cls): config = { "DATA_SOURCE": "__SIMULATED__", "SIMULATED_BACKEND_RANDOM_SEED": 1111, "SIMULATED_BACKEND_NUM_CALLS": 0, "SIMULATED_BACKEND_VARIANT_DENSITY": 1.0, "SIMULATED_BACKEND_NUM_VARIANT_SETS": 10, } frontend.configure( baseConfig="TestConfig", extraConfig=config) cls.app = frontend.app.test_client() @classmethod def tearDownClass(cls): cls.app = None def setUp(self): self.backend = frontend.app.backend self.variantSetIds = [ variantSet.getId() for variantSet in self.backend.getDataset().getVariantSets()] def sendJsonPostRequest(self, path, data): return self.app.post( path, headers={'Content-type': 'application/json'}, data=data) def testVariantSetsSearch(self): expectedIds = self.variantSetIds request = protocol.SearchVariantSetsRequest() request.pageSize = len(expectedIds) path = utils.applyVersion('/variantsets/search') response = self.sendJsonPostRequest( path, request.toJsonString()) self.assertEqual(200, response.status_code) responseData = protocol.SearchVariantSetsResponse.fromJsonString( response.data) self.assertTrue(protocol.SearchVariantSetsResponse.validate( responseData.toJsonDict())) self.assertIsNone(responseData.nextPageToken) self.assertEqual(len(expectedIds), len(responseData.variantSets)) for variantSet in responseData.variantSets: self.assertTrue(variantSet.id in expectedIds) def testVariantsSearch(self): expectedIds = self.variantSetIds[:1] referenceName = '1' request = protocol.SearchVariantsRequest() request.referenceName = referenceName request.start = 0 request.end = 0 request.variantSetIds = expectedIds # Request windows is too small, no results path = utils.applyVersion('/variants/search') response = self.sendJsonPostRequest( path, request.toJsonString()) self.assertEqual(200, response.status_code) responseData = protocol.SearchVariantsResponse.fromJsonString( response.data) self.assertIsNone(responseData.nextPageToken) self.assertEqual([], responseData.variants) # Larger request window, expect results request.end = 2 ** 16 path = utils.applyVersion('/variants/search') response = self.sendJsonPostRequest( path, request.toJsonString()) self.assertEqual(200, response.status_code) responseData = protocol.SearchVariantsResponse.fromJsonString( response.data) self.assertTrue(protocol.SearchVariantsResponse.validate( responseData.toJsonDict())) self.assertGreater(len(responseData.variants), 0) # Verify all results are in the correct range, set and reference for variant in responseData.variants: self.assertGreaterEqual(variant.start, 0) self.assertLessEqual(variant.end, 2 ** 16) self.assertTrue(variant.variantSetId in expectedIds) self.assertEqual(variant.referenceName, referenceName) # TODO: Add more useful test scenarios, including some covering # pagination behavior. # TODO: Add test cases for other methods when they are implemented. @unittest.skipIf(True, "") def testCallSetsSearch(self): # TODO remove the @skipIf decorator here once calls have been # properly implemented in the simulator. request = protocol.SearchCallSetsRequest() request.name = None path = utils.applyVersion('/callsets/search') # when variantSetIds are wrong, no results request.variantSetIds = ["xxxx"] response = self.sendJsonPostRequest( path, request.toJsonString()) self.assertEqual(200, response.status_code) responseData = protocol.SearchCallSetsResponse.fromJsonString( response.data) self.assertIsNone(responseData.nextPageToken) self.assertEqual([], responseData.callSets) # if no callset name is given return all callsets request.variantSetIds = self.variantSetIds[:1] response = self.sendJsonPostRequest( path, request.toJsonString()) self.assertEqual(200, response.status_code) responseData = protocol.SearchCallSetsResponse.fromJsonString( response.data) self.assertTrue(protocol.SearchCallSetsResponse.validate( responseData.toJsonDict())) self.assertNotEqual([], responseData.callSets) # TODO test the length of responseData.callSets equal to all callsets # Verify all results are of t
he correct type and range for callSet in responseData.callSets: self.assertIs(type(callSet.info), dict) self.assertIs(type(callSet.
variantSetIds), list) splits = callSet.id.split(".") variantSetId = '.'.join(splits[:2]) callSetName = splits[-1] self.assertIn(variantSetId, callSet.variantSetIds) self.assertEqual(callSetName, callSet.name) self.assertEqual(callSetName, callSet.sampleId) # TODO add tests after name string search schemas is implemented
samklr/spark-gce
setup.py
Python
apache-2.0
1,354
0.00517
# Copyright 2015 Michael Broxton # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from setuptools import setup import spark_gce import fnmatch import os support_files = [] for root, dirnames, filenames in os.walk('spark_gce/support_files'): for filename in fnmatch.filter(filenames, '*'): support_files.append(os.path.join(root, filename)[10:]) setup( name='spark-gce', packages=['spark_gce'], version=str(spark_gce.__version__), description='This script helps you
create a Spark cluster on Google Compute Engine.', author='Michael Broxton', author_email='broxton@gmail.com', url='https://github.com/broxtronix/spark-gce', download_url = 'https://github.com/broxtronix/spark-gce/tarball/1.0.6', scripts = ['bin/spark-gce'], package_data =
{'spark_gce': support_files}, install_requires=['boto'] )
hockeybuggy/bigcommerce-api-python
bigcommerce/__init__.py
Python
mit
225
0.004444
# from bigcommer
ce.connection import Connection, OAuthConnection, HttpException, ClientRequestException, \ # EmptyResponseWarning, RedirectionException, ServerException import bigcommerce.resources im
port bigcommerce.api
wagoodman/bridgy
bridgy/inventory/source.py
Python
mit
6,467
0.003402
import re import abc import warnings import collections from functools import partial from bridgy.error import MissingBastionHost with warnings.catch_warnings(): # Thiw warns about using the slow implementation of SequenceMatcher # instead of the python-Levenshtein module, which requires compilation. # I'd prefer for users tp simply use this tool without the need to # compile since the search space is probably fairly small warnings.filterwarnings("ignore", category=UserWarning) from fuzzywuzzy import fuzz class InstanceType: ALL = 'ALL' VM = 'VM' ECS = 'ECS' Bastion = collections.namedtuple("Bastion", "destination options") Instance = collections.namedtuple("Instance", "name address aliases source container_id type") # allow there to be optional kwargs that default to None Instance.__new__.__defaults__ = (None,) * len(Instance._fields) class InventorySource(object): __metaclass__ = abc.ABCMeta name = "Invalid" bastion = None ssh_user = None ssh_options = None include_pattern = None exclude_pattern = None def __init__(self, *args, **kwargs): if 'name' in kwargs: self.name = "%s (%s)" % (kwargs['name'], self.name) self.source = kwargs['name'] if 'bastion' in kwargs: if 'address' not in kwargs['bastion']: raise MissingBastionHost if 'user' in kwargs['bastion']: destination = '{user}@{host}'.format(user=kwargs['bastion']['user'], host=kwargs['bastion']['address']) else: destination = kwargs['bastion']['address'] bastion_options = '' if 'options' in kwargs['bastion']: bastion_options = kwargs['bastion']['options'] self.bastion = Bastion(destination=destination, options=bastion_options) if 'ssh' in kwargs: if 'user' in kwargs['ssh']: self.ssh_user = kwargs['ssh']['user'] if 'options' in kwargs['ssh']: self.ssh_options = kwargs['ssh']['options'] else: self.ssh_options = '' if 'include_pattern' in kwargs: self.include_pattern = kwargs['include_pattern'] if 'exclude_pattern' in kwargs: self.exclude_pattern = kwargs['exclude_pattern'] def instance_filter(self, instance, include_re=None, exclude_re=None): comparables = [instance.name, instance.address] if instance.aliases: comparables.extend(list(instance.aliases)) if include_re: for name in comparables: if include_re.search(name): return True return False elif exclude_re: for name in comparables: if exclude_re.search(name): return False return True else: return True def filter(self, all_instances): include_re, exclude_re = None, None if self.include_pattern: include_re = re.compile(self.include_pattern) if self.exclude_pattern: exclude_re = re.compile(self.exclude_pattern) config_instance_filter = partial(self.instance_filter, include_re=include_re, exclude_re=exclude_re) return list(filter(config_instance_filter, all_instances)) @abc.abstractmethod def update(self): pass @abc.abstractmethod def instances(self, stub=True): pass def search(self, targets, partial=True, fuzzy=False): allInstances = self.instances() matchedInstances = set() for host in targets: for instance in allInstances: names = [instance.name] if instance.aliases != None: names += list(instance.aliases) for name in names: if host.lower() == name.lower(): matchedInstances.add((100, instance)) elif partial and host.lower() in name.lower(): matchedInstances.add((99, instance)) if fuzzy: score = fuzz.partial_ratio(host.lower(), name.lower()) if score > 85 or host.lower() in name.lower(): matchedInstances.add((score, instance)) # it is possible for the same instance to be matched, if so, it should only # appear on the return list once (still ordered by the most probable match) return list(collections.OrderedDict([(v, None) for k, v in sorted(list(matchedInstances))]).keys()) class InventorySet(InventorySource): def __init__(self, inventories=None, **kwargs): super(InventorySet, self).__init__(inventories, **kwargs) self.inventories = [] if inventories != None: if not isinstance(inventories, list) and not isinstance(inventories, tuple): raise RuntimeError("InventorySet only takes a list of inventories. Given: %s" % repr(type(inventories))) for inventory in inventories: self.add(inventory) def add(self, inventory): if not isinstance(inventory, InventorySource): raise RuntimeError("InventorySet item is not an inventory. Given: %s" % repr(type(inventory))) self.inventories.append(inventory) @property def name(self): return " + ".join([inventory.name for inventory in self.inventories]) def update(self, filter_sources=tuple()): for inventory in self.inventories: if len(filter_sources) == 0 or (len(filter_sources) > 0 and inventory.source in filter_sources): inventory.update() def i
nstances(self, stub=True, filter_sour
ces=tuple()): instances = [] for inventory in self.inventories: if len(filter_sources) == 0 or (len(filter_sources) > 0 and inventory.source in filter_sources): instances.extend(inventory.instances()) return instances def search(self, targets, partial=True, fuzzy=False, filter_sources=tuple()): instances = [] for inventory in self.inventories: if len(filter_sources) == 0 or (len(filter_sources) > 0 and inventory.source in filter_sources): instances.extend(inventory.search(targets, partial, fuzzy)) return instances
bigown/SOpt
Python/Algorithm/Payroll.py
Python
mit
159
0.006289
salario
= 1000 ano = 1996 while ano <= 2020: salario *= 1.015
ano += 1 print("{0:.2f}".format(salario, 2)) #https://pt.stackoverflow.com/q/432854/101
nkremerh/cctools
work_queue/src/bindings/python2/work_queue_futures.py
Python
gpl-2.0
15,412
0.003634
## @package work_queue_futures # Python Work Queue bindings. # # This is a library on top of work_queue which replaces q.wait with the concept # of futures. # # This is experimental. # # - @ref work_queue_futures::WorkQueue # - @ref work_queue::Task import work_queue import multiprocessing import os import subprocess import sys import threading import time import traceback import concurrent.futures as futures import atexit try: # from py3 import queue as ThreadQueue except ImportError: # from py2 import Queue as ThreadQueue ## # Python Work Queue object # # Implements an asynchronous WorkQueueFutures object. # @ref work_queue_futures::WorkQueueFutures. class WorkQueueFutures(object): def __init__(self, *args, **kwargs): local_worker_args = kwargs.get('local_worker', None) if local_worker_args: del kwargs['local_worker'] if local_worker_args is True: # local_worker_args can be a dictionary of worker options, or # simply 'True' to get the defaults (1 core, 512MB memory, # 1000MB of disk) local_worker_args = {} # calls to synchronous WorkQueueFutures are coordinated with _queue_lock self._queue_lock = threading.Lock() self._stop_queue_event = threading.Event() # set when queue is empty self._join_event = threading.Event() self._tasks_to_submit = ThreadQueue.Queue() self._tasks_before_callbacks = ThreadQueue.Queue() self._sync_loop = threading.Thread(target = self._sync_loop) self._sync_loop.daemon = True self._callback_loop = threading.Thread(target = self._callback_loop) self._callback_loop.daemon = True self._local_worker = None self._queue = work_queue.WorkQueue(*args, **kwargs) if local_worker_args: self._local_worker = Worker(self.port, **local_worker_args) self._sync_loop.start() self._callback_loop.start() atexit.register(self._terminate) # methods not explicitly defined we route to synchronous WorkQueue, using a lock. def __getattr__(self, name): attr = getattr(self._queue, name) if callable(attr): def method_wrapped(*args, **kwargs): result = None with self._queue_lock: result = attr(*args, **kwargs) return result return method_wrapped else: return attr ## # Submit a task to the queue. # # @param self Reference to the current work queue object. # @param task A task description created from @ref work_queue::Task. def submit(self, future_task): if isinstance(future_task, FutureTask): self._tasks_to_submit.put(future_task, False) else: raise TypeError("{} is not a WorkQueue.Task") ## # Disable wait when using the futures interface def wait(self, *args, **kwargs): raise AttributeError('wait cannot be used with the futures interface.') ## # Determine whether there are any known tasks queued, running, or waiting to be collected. # # Returns 0 if there are tasks remaining in the system, 1 if the system is "empty". # # @param self Reference to the current work queue object. def empty(self): if self._tasks_to_submit.empty(): return self._queue.empty() else: return 0 def _callback_loop(self): while not self._stop_queue_event.is_set(): task = None try: task = self._tasks_before_callbacks.get(True, 1) task.set_result_or_exception() self._tasks_before_callbacks.task_done() except ThreadQueue.Empty: pass except Exception as e: err = traceback.format_exc() if task: task.set_exception(FutureTaskError(t, err)) else: print(err) def _sync_loop(self): # map from taskids to FutureTask objects active_tasks = {} while True: try: if self._stop_queue_event.is_set(): return # if the queue is empty, we wait for tasks to be declared for # submission, otherwise _queue.wait return immediately and we # busy-wait submit_timeout = 1 if len(active_tasks.keys()) > 0: submit_timeout = 0 # do the submits, if any empty = False while not empty: try: task = self._tasks_to_submit.get(True, submit_timeout) if not task.cancelled(): with self._queue_lock: submit_timeout = 0 taskid = self._queue.submit(task) task._set_queue(self) active_tasks[task.id] = task self._tasks_to_submit.task_done() except ThreadQueue.Empty: empty = True # wait for any task with self._queue_lock: if not self._queue.empty(): task = self._queue.wait(1) if task: self._tasks_before_callbacks.put(task, False) del active_tasks[task.id] if len(active_tasks) == 0 and self._tasks_to_submit.empty(): self._join_event.set() if self._local_worker: self._local_worker.check_alive() except Exception as e: # on error, we set exception to all the known tasks so that .result() does not block err = traceback.format_exc() while not self._tasks_to_submit.empty(): try: t = self._tasks_to_submit.get(False)
t.set_exception(FutureTaskError(t, err)) self._tasks_to_submit.task_done()
except ThreadQueue.Empty: pass while not self._tasks_before_callbacks.empty(): try: t = self._tasks_before_callbacks.get(False) t.set_exception(FutureTaskError(t, err)) self._tasks_before_callbacks.task_done() except ThreadQueue.Empty: pass for t in active_tasks.values(): t.set_exception(FutureTaskError(t, err)) active_tasks.clear() self._stop_queue_event.set() def join(self, timeout=None): now = time.time() self._join_event.clear() return self._join_event.wait(timeout) def _terminate(self): self._stop_queue_event.set() for thread in [self._sync_loop, self._callback_loop]: try: thread.join() except RuntimeError: pass if self._local_worker: try: self._local_worker.shutdown() except Exception as e: pass def __del__(self): self._terminate() class FutureTask(work_queue.Task): valid_runtime_envs = ['conda', 'singularity'] def __init__(self, command): super(FutureTask, self).__init__(command) self._queue = None self._cancelled = False self._exception = None self._done_event = threading.Event() self._callbacks = [] self._runtime_env_type = None @property def queue(self): return self._queue def _set_queue(self, queue): self._queue = queue self.set_running_or_notify_cancel() def cancel(self): if self.queue: self.queue.cancel_by_taskid(self.id) self._cancelled = True
maxis1314/pyutils
web/views/base.py
Python
apache-2.0
1,087
0.014719
# coding: utf-8 from flask import Flask, session, redirect, url_for, request,abort import config config = config.rec() def on_finish(): None def currentUserGet(): if 'user' in session: user = session['user'] return user['username'] else: return None def currentUserSet(username): if username:
session['user'] = dict({'username':username}) else: session.pop('user',None) def replyerSet(name, email, website): if name: session['replyer'] = dict({'name': name, 'email': email,'website': webs
ite}) else: session.pop('replyer',None) def replyerGet(): if 'replyer' in session: reply = session['replyer'] name = reply['name'] return name else: return None def userAuth(username, password): return username == config.admin_username and password == config.admin_password def isAdmin(): return currentUserGet() == config.admin_username def checkAdmin(): if not isAdmin(): abort(404) def get_current_user(): return currentUserGet()
nlgcoin/guldencoin-official
test/functional/p2p_disconnect_ban.py
Python
mit
5,333
0.003375
#!/usr/bin/env python3 # Copyright (c) 2014-2018 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """Test node disconnect and ban behavior""" import time from test_framework.test_framework import GuldenTestFramework from test_framework.util import ( assert_equal, assert_raises_rpc_error, connect_nodes_bi, wait_until, ) class DisconnectBanTest(GuldenTestFramework): def set_test_params(self): self.num_nodes = 2 def run_test(self): self.log.info("Test setban and listbanned RPCs") self.log.info("setban: successfully ban single IP address") assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point self.nodes[1].setban(subnet="127.0.0.1", command="add") wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10) assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point assert_equal(len(self.nodes[1].listbanned()), 1) self.log.info("clearbanned: successfully clear ban list") self.nodes[1].clearbanned() assert_equal(len(self.nodes[1].listbanned()), 0) self.nodes[1].setban("127.0.0.0/24", "add") self.log.info("setban: fail to ban an already banned subnet") assert_equal(len(self.nodes[1].listbanned()), 1) assert_raises_rpc_error(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add") self.log.info("setban: fail to ban an invalid subnet") assert_raises_rpc_error(-30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add") assert_equal(len(self.nodes[1].listbanned()), 1) # still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24 self.log.info("setban remove: fail to unban a non-banned subnet") assert_raises_rpc_error(-30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove") assert_equal(len(self.nodes[1].listbanned()), 1) self.log.info("setban remove: successfully unban subnet") self.nodes[1].setban("127.0.0.0/24", "remove") assert_equal(len(self.nodes[1].listbanned()), 0) self.nodes[1].clearbanned() assert_equal(len(self.nodes[1].listbanned()), 0) self.log.info("setban: test persistence across node restart") self.nodes[1].setban("127.0.0.0/32", "add") self.nodes[1].setban("127.0.0.0/24", "add") # Set the mocktime so we can control when bans expire old_time = int(time.time()) self.n
odes[1].setmocktime(old_time) self.nodes[1].setban("192.168.0.1", "add", 1) # ban for 1 seconds self.nodes[1].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) # ba
n for 1000 seconds listBeforeShutdown = self.nodes[1].listbanned() assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address']) # Move time forward by 3 seconds so the third ban has expired self.nodes[1].setmocktime(old_time + 3) assert_equal(len(self.nodes[1].listbanned()), 3) self.stop_node(1) self.start_node(1) listAfterShutdown = self.nodes[1].listbanned() assert_equal("127.0.0.0/24", listAfterShutdown[0]['address']) assert_equal("127.0.0.0/32", listAfterShutdown[1]['address']) assert_equal("/19" in listAfterShutdown[2]['address'], True) # Clear ban lists self.nodes[1].clearbanned() connect_nodes_bi(self.nodes, 0, 1) self.log.info("Test disconnectnode RPCs") self.log.info("disconnectnode: fail to disconnect when calling with address and nodeid") address1 = self.nodes[0].getpeerinfo()[0]['addr'] node1 = self.nodes[0].getpeerinfo()[0]['addr'] assert_raises_rpc_error(-32602, "Only one of address and nodeid should be provided.", self.nodes[0].disconnectnode, address=address1, node_id=node1) self.log.info("disconnectnode: fail to disconnect when calling with junk address") assert_raises_rpc_error(-29, "Node not found in connected nodes", self.nodes[0].disconnectnode, address="221B Baker Street") self.log.info("disconnectnode: successfully disconnect node by address") address1 = self.nodes[0].getpeerinfo()[0]['addr'] self.nodes[0].disconnectnode(address=address1) wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10) assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1] self.log.info("disconnectnode: successfully reconnect node") connect_nodes_bi(self.nodes, 0, 1) # reconnect the node assert_equal(len(self.nodes[0].getpeerinfo()), 2) assert [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1] self.log.info("disconnectnode: successfully disconnect node by node id") id1 = self.nodes[0].getpeerinfo()[0]['id'] self.nodes[0].disconnectnode(node_id=id1) wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10) assert not [node for node in self.nodes[0].getpeerinfo() if node['id'] == id1] if __name__ == '__main__': DisconnectBanTest().main()
PNNutkung/Coursing-Field
coursing_field/wsgi.py
Python
apache-2.0
406
0
""" WSGI config for coursing_field project.
It exposes the WSGI callable as a module-level variable named ``application``. For more information on this file, see https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/ """ import os from django.core.wsgi import get_wsgi_application os.environ.setdefault("DJANGO_SETTINGS_MO
DULE", "coursing_field.settings") application = get_wsgi_application()
abramhindle/UnnaturalCodeFork
python/testdata/launchpad/lib/lp/app/widgets/tests/test_widget_doctests.py
Python
agpl-3.0
498
0
# Copyright 2009-2011 Canonical Ltd. This software is licensed under the # GNU Affero General Public License version 3 (see the file LICENSE). __metaclass__
= type import doctest import unittest from lp.testing.layers import DatabaseFunctionalLayer def test_suite(): suite = unittest.TestSuite() suite.layer = DatabaseFunctionalLayer
suite.addTest(doctest.DocTestSuite('lp.app.widgets.textwidgets')) suite.addTest(doctest.DocTestSuite('lp.app.widgets.date')) return suite
GreatLakesEnergy/sesh-dash-beta
seshdash/api/forecast.py
Python
mit
1,952
0.019467
import forecastio class ForecastAPI: _API_KEY = "8eefab4d187a39b993ca9c875fe
f6159" _LAZY = False _LAT = 0 _LNG = 0 _forecast = () def __init__(self,key,lat,lng,lazy=False)
: self._LAT = lat self._LNG = lng self._API_KEY = key self._LAZY = lazy self._forecast = forecastio.load_forecast(self._API_KEY,self._LAT,self._LNG,lazy=lazy) def get_7day_forecast_detailed(self): return self._forecast.daily().data """ Help getting cloud data from the future """ def get_7_day_cloudCover(self): c_data = self._forecast.daily().data cloud_results = {} for day in c_data: cloud_results[day.time.isoformat()] = day.cloudCover return cloud_results """ Helper on getting cloud sunrise and sunset data """ def get_n_day_minimal_solar(self,n_days): c_data = self._forecast.daily().data sun_results = {} count = 0 for day in c_data: if count < n_days: sun_results[day.time.isoformat()] = {"sunrise":day.sunriseTime,"sunset":day.sunsetTime,"stat":day.icon,"cloudcover":day.cloudCover} count = count + 1 return sun_results """ Helper on getting cloud sunrise and sunset data from the past """ def get_historical_day_minimal_solar(self,days): #TODO get temp just for reference sun_results = {} for day in days: print "getting date for %s"%day self._forecast = forecastio.load_forecast(self._API_KEY,self._LAT,self._LNG,lazy=self._LAZY,time=day) c_data = self._forecast.daily().data for f_day in c_data: print "adding date for %s"%f_day sun_results[day.isoformat()] = {"sunrise":f_day.sunriseTime,"sunset":f_day.sunsetTime,"stat":f_day.icon,"cloudcover":f_day.cloudCover} return sun_results
becm/meson
mesonbuild/mconf.py
Python
apache-2.0
12,095
0.002232
# Copyright 2014-2016 The Meson development team # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # http://www.apache.org/licenses/LICENSE-2.0 # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from . import coredata, environment, mesonlib, build, mintro, mlog from .ast import AstIDGenerator def add_arguments(parser): coredata.register_builtin_arguments(parser) parser.add_argument('builddir', nargs='?', default='.') parser.add_argument('--clearcache', action='store_true', default=False, help='Clear cached state (e.g. found dependencies)') def make_lower_case(val): if isinstance(val, bool): return str(val).lower() elif isinstance(val, list): return [make_lower_case(i) for i in val] else: return str(val) class ConfException(mesonlib.MesonException): pass class Conf: def __init__(self, build_dir): self.build_dir = os.path.abspath(os.path.realpath(build_dir)) if 'meson.build' in [os.path.basename(self.build_dir), self.build_dir]: self.build_dir = os.path.dirname(self.build_dir) self.build = None self.max_choices_line_length = 60 self.name_col = [] self.value_col = [] self.choices_col = [] self.descr_col = [] self.has_choices = False self.all_subprojects = set() self.yielding_options = set() if os.path.isdir(os.path.join(self.build_dir, 'meson-private')): self.build = build.load(self.build_dir) self.source_dir = self.build.environment.get_source_dir() self.coredata = coredata.load(self.build_dir)
self.default_values_only = False elif os.path.isfile(os.path.join(self.build_dir, environment.build_filename)): # Make sure that log entries in other parts of meson
don't interfere with the JSON output mlog.disable() self.source_dir = os.path.abspath(os.path.realpath(self.build_dir)) intr = mintro.IntrospectionInterpreter(self.source_dir, '', 'ninja', visitors = [AstIDGenerator()]) intr.analyze() # Re-enable logging just in case mlog.enable() self.coredata = intr.coredata self.default_values_only = True else: raise ConfException('Directory {} is neither a Meson build directory nor a project source directory.'.format(build_dir)) def clear_cache(self): self.coredata.deps.host.clear() self.coredata.deps.build.clear() def set_options(self, options): self.coredata.set_options(options) def save(self): # Do nothing when using introspection if self.default_values_only: return # Only called if something has changed so overwrite unconditionally. coredata.save(self.coredata, self.build_dir) # We don't write the build file because any changes to it # are erased when Meson is executed the next time, i.e. when # Ninja is run. def print_aligned(self): col_widths = (max([len(i) for i in self.name_col], default=0), max([len(i) for i in self.value_col], default=0), max([len(i) for i in self.choices_col], default=0)) for line in zip(self.name_col, self.value_col, self.choices_col, self.descr_col): if self.has_choices: print('{0:{width[0]}} {1:{width[1]}} {2:{width[2]}} {3}'.format(*line, width=col_widths)) else: print('{0:{width[0]}} {1:{width[1]}} {3}'.format(*line, width=col_widths)) def split_options_per_subproject(self, options): result = {} for k, o in options.items(): subproject = '' if ':' in k: subproject, optname = k.split(':') if o.yielding and optname in options: self.yielding_options.add(k) self.all_subprojects.add(subproject) result.setdefault(subproject, {})[k] = o return result def _add_line(self, name, value, choices, descr): self.name_col.append(' ' * self.print_margin + name) self.value_col.append(value) self.choices_col.append(choices) self.descr_col.append(descr) def add_option(self, name, descr, value, choices): if isinstance(value, list): value = '[{0}]'.format(', '.join(make_lower_case(value))) else: value = make_lower_case(value) if choices: self.has_choices = True if isinstance(choices, list): choices_list = make_lower_case(choices) current = '[' while choices_list: i = choices_list.pop(0) if len(current) + len(i) >= self.max_choices_line_length: self._add_line(name, value, current + ',', descr) name = '' value = '' descr = '' current = ' ' if len(current) > 1: current += ', ' current += i choices = current + ']' else: choices = make_lower_case(choices) else: choices = '' self._add_line(name, value, choices, descr) def add_title(self, title): titles = {'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'} if self.default_values_only: titles['value'] = 'Default Value' self._add_line('', '', '', '') self._add_line(title, titles['value'], titles['choices'], titles['descr']) self._add_line('-' * len(title), '-' * len(titles['value']), '-' * len(titles['choices']), '-' * len(titles['descr'])) def add_section(self, section): self.print_margin = 0 self._add_line('', '', '', '') self._add_line(section + ':', '', '', '') self.print_margin = 2 def print_options(self, title, options): if not options: return if title: self.add_title(title) for k, o in sorted(options.items()): printable_value = o.printable_value() if k in self.yielding_options: printable_value = '<inherited from main project>' self.add_option(k, o.description, printable_value, o.choices) def print_conf(self): def print_default_values_warning(): mlog.warning('The source directory instead of the build directory was specified.') mlog.warning('Only the default values for the project are printed, and all command line parameters are ignored.') if self.default_values_only: print_default_values_warning() print('') print('Core properties:') print(' Source dir', self.source_dir) if not self.default_values_only: print(' Build dir ', self.build_dir) dir_option_names = ['bindir', 'datadir', 'includedir', 'infodir', 'libdir', 'libexecdir', 'localedir', 'localstatedir', 'mandir', 'prefix', 'sbindir', 'sharedstatedir', 'sysconfdir'] test_option_names = ['errorlogs', 'stdsplit'] core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names] dir_options =
FluidityStokes/fluidity
tests/lagrangian_detectors_3d_1e2/get_RK_traj.py
Python
lgpl-2.1
640
0.029688
from scipy import * from pylab import * num_detectors = 100 x = 0.5+0.25*arange(0,float(num_detectors))/float(num_detectors) y = zeros(num_detectors) + 0.5 t = 0. n_cycles = 1 dt = 0.1/n_cycles tmax = 8 def vel(x,y): return [-(y-0.5),x-0.5] while(t<tmax): t = t +
dt [k1_x,k1_y] = vel(x,y) [k2_x,k2_y] = vel(x+0.5*dt*k1_x,y+0.5*dt*k1_y) [k3_x,k3_y] = vel(x+0.5*dt*k2_x,y+0.5*dt*k2_y) [k4_x,k4_y
] = vel(x+dt*k3_x,y+dt*k3_y) x = x + dt*(k1_x/6.+k2_x/3. + k3_x/3. + k4_x/6.) y = y + dt*(k1_y/6.+k2_y/3. + k3_y/3. + k4_y/6.) plot(x,y,'.') #show() x.tofile('Xvals.txt',sep=' ') y.tofile('Yvals.txt',sep=' ')