text
stringlengths 6
947k
| repo_name
stringlengths 5
100
| path
stringlengths 4
231
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 6
947k
| score
float64 0
0.34
|
---|---|---|---|---|---|---|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2016-04-21 07:08
from __future__ import unicode_literals
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='AWSReport',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created', models.DateTimeField(default=django.utils.timezone.now, verbose_name='Created')),
('updated', models.DateTimeField(auto_now=True, verbose_name='Updated')),
('temperature', models.PositiveIntegerField(default=0, verbose_name='Temperature')),
('humidity', models.PositiveIntegerField(default=0, verbose_name='Humidity')),
('pressure', models.PositiveIntegerField(default=0, verbose_name='Pressure')),
('wind_speed', models.PositiveIntegerField(default=0, verbose_name='Wind Speed')),
('wind_direction', models.PositiveIntegerField(default=0, verbose_name='Wind Direction')),
('day_rain', models.PositiveIntegerField(default=0, verbose_name='Day Rain')),
('rain_rate', models.PositiveIntegerField(default=0, verbose_name='Rain Rate')),
('uv_index', models.PositiveIntegerField(default=0, verbose_name='UV Index')),
('solar_radiation', models.PositiveIntegerField(default=0, verbose_name='Solar Radiation')),
],
options={
'ordering': ['-updated', '-created'],
'get_latest_by': 'updated',
},
),
migrations.CreateModel(
name='AWSStation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100, verbose_name='Name')),
('point', django.contrib.gis.db.models.fields.PointField(blank=True, null=True, srid=4326, verbose_name='Point')),
('note', models.TextField(blank=True, verbose_name='Note')),
],
options={
'abstract': False,
},
),
migrations.AddField(
model_name='awsreport',
name='awsstation',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='automaticweathersystem.AWSStation', verbose_name='AWS Station'),
),
]
| geoenvo/opendims | opendims/automaticweathersystem/migrations/0001_initial.py | Python | gpl-3.0 | 2,669 | 0.005995 |
# -*- coding: utf-8 -*-
import xbmc
import xbmcgui
import xbmcaddon
from utilities import xbmcJsonRequest, Debug, notification, chunks, get_bool_setting
__setting__ = xbmcaddon.Addon('script.myshows').getSetting
__getstring__ = xbmcaddon.Addon('script.myshows').getLocalizedString
add_episodes_to_myshows = get_bool_setting('add_episodes_to_myshows')
myshows_episode_playcount = get_bool_setting('myshows_episode_playcount')
xbmc_episode_playcount = get_bool_setting('xbmc_episode_playcount')
clean_myshows_episodes = get_bool_setting('clean_myshows_episodes')
progress = xbmcgui.DialogProgress()
def compare_show(xbmc_show, myshows_show):
missing = []
myshows_seasons = [x['season'] for x in myshows_show['seasons']]
for xbmc_episode in xbmc_show['episodes']:
if xbmc_episode['season'] not in myshows_seasons:
missing.append(xbmc_episode)
else:
for myshows_season in myshows_show['seasons']:
if xbmc_episode['season'] == myshows_season['season']:
if xbmc_episode['episode'] not in myshows_season['episodes']:
missing.append(xbmc_episode)
return missing
def compare_show_watched_myshows(xbmc_show, myshows_show):
missing = []
for xbmc_episode in xbmc_show['episodes']:
if xbmc_episode['playcount']:
if xbmc_episode['season'] not in [x['season'] for x in myshows_show['seasons']]:
missing.append(xbmc_episode)
else:
for myshows_season in myshows_show['seasons']:
if xbmc_episode['season'] == myshows_season['season']:
if xbmc_episode['episode'] not in myshows_season['episodes']:
missing.append(xbmc_episode)
return missing
def compare_show_watched_xbmc(xbmc_show, myshows_show):
missing = []
for xbmc_episode in xbmc_show['episodes']:
if not xbmc_episode['playcount']:
for myshows_season in myshows_show['seasons']:
if xbmc_episode['season'] == myshows_season['season']:
if xbmc_episode['episode'] in myshows_season['episodes']:
missing.append(xbmc_episode)
return missing
class SyncEpisodes():
def __init__(self, show_progress=False, api=None):
self.myshowsapi = api
if self.myshowsapi == None:
from myshowsapi import myshowsAPI
self.myshowsapi = myshowsAPI()
self.xbmc_shows = []
self.myshows_shows = {'collection': [], 'watched': []}
self.notify = __setting__('show_sync_notifications') == 'true'
self.show_progress = show_progress
if self.show_progress:
progress.create('%s %s' % (__getstring__(1400), __getstring__(1406)), line1=' ', line2=' ', line3=' ')
def Canceled(self):
if self.show_progress and progress.iscanceled():
Debug('[Episodes Sync] Sync was canceled by user')
return True
elif xbmc.abortRequested:
Debug('XBMC abort requested')
return True
else:
return False
def GetFromXBMC(self):
Debug('[Episodes Sync] Getting episodes from XBMC')
if self.show_progress:
progress.update(5, line1=__getstring__(1432), line2=' ', line3=' ')
shows = xbmcJsonRequest({'jsonrpc': '2.0', 'method': 'VideoLibrary.GetTVShows', 'params': {'properties': ['title', 'imdbnumber']}, 'id': 0})
# sanity check, test for empty result
if not shows:
Debug("[Episodes Sync] xbmc json request was empty.")
return
# test to see if tvshows key exists in xbmc json request
if 'tvshows' in shows:
shows = shows['tvshows']
Debug("[Episodes Sync] XBMC JSON Result: '%s'" % str(shows))
else:
Debug("[Episodes Sync] Key 'tvshows' not found")
return
if self.show_progress:
progress.update(10, line1=__getstring__(1433), line2=' ', line3=' ')
for show in shows:
if self.Canceled():
return
show['episodes'] = []
episodes = xbmcJsonRequest({'jsonrpc': '2.0', 'method': 'VideoLibrary.GetEpisodes', 'params': {'tvshowid': show['tvshowid'], 'properties': ['season', 'episode', 'playcount', 'uniqueid']}, 'id': 0})
if 'episodes' in episodes:
episodes = episodes['episodes']
show['episodes'] = [x for x in episodes if type(x) == type(dict())]
self.xbmc_shows = [x for x in shows if x['episodes']]
def GetCollectionFrommyshows(self):
Debug('[Episodes Sync] Getting episode collection from myshows.tv')
if self.show_progress:
progress.update(15, line1=__getstring__(1434), line2=' ', line3=' ')
self.myshows_shows['collection'] = self.myshowsapi.getShowLibrary()
def AddTomyshows(self):
Debug('[Episodes Sync] Checking for episodes missing from myshows.tv collection')
if self.show_progress:
progress.update(30, line1=__getstring__(1435), line2=' ', line3=' ')
add_to_myshows = []
myshows_imdb_index = {}
myshows_tvdb_index = {}
myshows_title_index = {}
for i in range(len(self.myshows_shows['collection'])):
if 'imdb_id' in self.myshows_shows['collection'][i]:
myshows_imdb_index[self.myshows_shows['collection'][i]['imdb_id']] = i
if 'tvdb_id' in self.myshows_shows['collection'][i]:
myshows_tvdb_index[self.myshows_shows['collection'][i]['tvdb_id']] = i
myshows_title_index[self.myshows_shows['collection'][i]['title']] = i
for xbmc_show in self.xbmc_shows:
missing = []
#IMDB ID
if xbmc_show['imdbnumber'].startswith('tt'):
if xbmc_show['imdbnumber'] not in myshows_imdb_index.keys():
missing = xbmc_show['episodes']
else:
myshows_show = self.myshows_shows['collection'][myshows_imdb_index[xbmc_show['imdbnumber']]]
missing = compare_show(xbmc_show, myshows_show)
#TVDB ID
elif xbmc_show['imdbnumber'].isdigit():
if xbmc_show['imdbnumber'] not in myshows_tvdb_index.keys():
missing = xbmc_show['episodes']
else:
myshows_show = self.myshows_shows['collection'][myshows_tvdb_index[xbmc_show['imdbnumber']]]
missing = compare_show(xbmc_show, myshows_show)
#Title
else:
if xbmc_show['title'] not in myshows_title_index.keys():
missing = xbmc_show['episodes']
else:
myshows_show = self.myshows_shows['collection'][myshows_title_index[xbmc_show['title']]]
missing = compare_show(xbmc_show, myshows_show)
if missing:
show = {'title': xbmc_show['title'], 'episodes': [{'episode': x['episode'], 'season': x['season'], 'episode_tvdb_id': x['uniqueid']['unknown']} for x in missing]}
Debug('[Episodes Sync][AddTomyshows] %s' % show)
if xbmc_show['imdbnumber'].isdigit():
show['tvdb_id'] = xbmc_show['imdbnumber']
else:
show['imdb_id'] = xbmc_show['imdbnumber']
add_to_myshows.append(show)
if add_to_myshows:
Debug('[Episodes Sync] %i shows(s) have episodes added to myshows.tv collection' % len(add_to_myshows))
if self.show_progress:
progress.update(35, line1=__getstring__(1435), line2='%i %s' % (len(add_to_myshows), __getstring__(1436)))
for show in add_to_myshows:
if self.Canceled():
return
if self.show_progress:
progress.update(45, line1=__getstring__(1435), line2=show['title'].encode('utf-8', 'ignore'), line3='%i %s' % (len(show['episodes']), __getstring__(1437)))
self.myshowsapi.addEpisode(show)
else:
Debug('[Episodes Sync] myshows.tv episode collection is up to date')
def GetWatchedFrommyshows(self):
Debug('[Episodes Sync] Getting watched episodes from myshows.tv')
if self.show_progress:
progress.update(50, line1=__getstring__(1438), line2=' ', line3=' ')
self.myshows_shows['watched'] = self.myshowsapi.getWatchedEpisodeLibrary()
def UpdatePlaysmyshows(self):
Debug('[Episodes Sync] Checking watched episodes on myshows.tv')
if self.show_progress:
progress.update(60, line1=__getstring__(1438), line2=' ', line3=' ')
update_playcount = []
myshows_imdb_index = {}
myshows_tvdb_index = {}
myshows_title_index = {}
for i in range(len(self.myshows_shows['watched'])):
if 'imdb_id' in self.myshows_shows['watched'][i]:
myshows_imdb_index[self.myshows_shows['watched'][i]['imdb_id']] = i
if 'tvdb_id' in self.myshows_shows['watched'][i]:
myshows_tvdb_index[self.myshows_shows['watched'][i]['tvdb_id']] = i
myshows_title_index[self.myshows_shows['watched'][i]['title']] = i
xbmc_shows_watched = []
for show in self.xbmc_shows:
watched_episodes = [x for x in show['episodes'] if x['playcount']]
if watched_episodes:
xbmc_shows_watched.append(show)
for xbmc_show in xbmc_shows_watched:
missing = []
myshows_show = {}
#IMDB ID
if xbmc_show['imdbnumber'].startswith('tt') and xbmc_show['imdbnumber'] in myshows_imdb_index.keys():
myshows_show = self.myshows_shows['watched'][myshows_imdb_index[xbmc_show['imdbnumber']]]
#TVDB ID
elif xbmc_show['imdbnumber'].isdigit() and xbmc_show['imdbnumber'] in myshows_tvdb_index.keys():
myshows_show = self.myshows_shows['watched'][myshows_tvdb_index[xbmc_show['imdbnumber']]]
#Title
else:
if xbmc_show['title'] in myshows_title_index.keys():
myshows_show = self.myshows_shows['watched'][myshows_title_index[xbmc_show['title']]]
if myshows_show:
missing = compare_show_watched_myshows(xbmc_show, myshows_show)
else:
missing = [x for x in xbmc_show['episodes'] if x['playcount']]
if missing:
show = {'title': xbmc_show['title'], 'episodes': [{'episode': x['episode'], 'season': x['season'], 'episode_tvdb_id': x['uniqueid']['unknown']} for x in missing]}
Debug('[Episodes Sync][UpdatePlaysmyshows] %s' % show)
if xbmc_show['imdbnumber'].isdigit():
show['tvdb_id'] = xbmc_show['imdbnumber']
else:
show['imdb_id'] = xbmc_show['imdbnumber']
update_playcount.append(show)
if update_playcount:
Debug('[Episodes Sync] %i shows(s) shows are missing playcounts on myshows.tv' % len(update_playcount))
if self.show_progress:
progress.update(65, line1=__getstring__(1438), line2='%i %s' % (len(update_playcount), __getstring__(1439)))
for show in update_playcount:
if self.Canceled():
return
if self.show_progress:
progress.update(70, line1=__getstring__(1438), line2=show['title'].encode('utf-8', 'ignore'), line3='%i %s' % (len(show['episodes']), __getstring__(1440)))
self.myshowsapi.updateSeenEpisode(show)
else:
Debug('[Episodes Sync] myshows.tv episode playcounts are up to date')
def UpdatePlaysXBMC(self):
Debug('[Episodes Sync] Checking watched episodes on XBMC')
if self.show_progress:
progress.update(80, line1=__getstring__(1441), line2=' ', line3=' ')
update_playcount = []
myshows_imdb_index = {}
myshows_tvdb_index = {}
myshows_title_index = {}
for i in range(len(self.myshows_shows['watched'])):
if 'imdb_id' in self.myshows_shows['watched'][i]:
myshows_imdb_index[self.myshows_shows['watched'][i]['imdb_id']] = i
if 'tvdb_id' in self.myshows_shows['watched'][i]:
myshows_tvdb_index[self.myshows_shows['watched'][i]['tvdb_id']] = i
myshows_title_index[self.myshows_shows['watched'][i]['title']] = i
for xbmc_show in self.xbmc_shows:
missing = []
myshows_show = None
#IMDB ID
if xbmc_show['imdbnumber'].startswith('tt') and xbmc_show['imdbnumber'] in myshows_imdb_index.keys():
myshows_show = self.myshows_shows['watched'][myshows_imdb_index[xbmc_show['imdbnumber']]]
#TVDB ID
elif xbmc_show['imdbnumber'].isdigit() and xbmc_show['imdbnumber'] in myshows_tvdb_index.keys():
myshows_show = self.myshows_shows['watched'][myshows_tvdb_index[xbmc_show['imdbnumber']]]
#Title
else:
if xbmc_show['title'] in myshows_title_index.keys():
myshows_show = self.myshows_shows['watched'][myshows_title_index[xbmc_show['title']]]
if myshows_show:
missing = compare_show_watched_xbmc(xbmc_show, myshows_show)
else:
Debug('[Episodes Sync] Failed to find %s on myshows.tv' % xbmc_show['title'])
if missing:
show = {'title': xbmc_show['title'], 'episodes': [{'episodeid': x['episodeid'], 'playcount': 1} for x in missing]}
update_playcount.append(show)
if update_playcount:
Debug('[Episodes Sync] %i shows(s) shows are missing playcounts on XBMC' % len(update_playcount))
if self.show_progress:
progress.update(85, line1=__getstring__(1441), line2='%i %s' % (len(update_playcount), __getstring__(1439)))
for show in update_playcount:
if self.show_progress:
progress.update(85, line1=__getstring__(1441), line2=show['title'].encode('utf-8', 'ignore'), line3='%i %s' % (len(show['episodes']), __getstring__(1440)))
#split episode list into chunks of 50
chunked_episodes = chunks([{"jsonrpc": "2.0", "method": "VideoLibrary.SetEpisodeDetails", "params": show['episodes'][i], "id": i} for i in range(len(show['episodes']))], 50)
for chunk in chunked_episodes:
if self.Canceled():
return
xbmcJsonRequest(chunk)
else:
Debug('[Episodes Sync] XBMC episode playcounts are up to date')
def RemoveFrommyshows(self):
Debug('[Movies Sync] Cleaning myshows tvshow collection')
if self.show_progress:
progress.update(90, line1=__getstring__(1445), line2=' ', line3=' ')
def convert_seasons(show):
episodes = []
if 'seasons' in show and show['seasons']:
for season in show['seasons']:
for episode in season['episodes']:
episodes.append({'season': season['season'], 'episode': episode})
return episodes
remove_from_myshows = []
indices = {'imdb_id': {}, 'tvdb_id': {}, 'title': {}}
for i in range(len(self.xbmc_shows)):
if self.xbmc_shows[i]['imdbnumber'].startswith('tt'):
indices['imdb_id'][self.xbmc_shows[i]['imdbnumber']] = i
if self.xbmc_shows[i]['imdbnumber'].isdigit():
indices['tvdb_id'][self.xbmc_shows[i]['imdbnumber']] = i
indices['title'][self.xbmc_shows[i]['title']] = i
for myshows_show in self.myshows_shows['collection']:
matched = False
remove = []
if 'tvdb_id' in myshows_show:
if myshows_show['tvdb_id'] in indices['tvdb_id']:
matched = 'tvdb_id'
if not matched and 'imdb_id' in myshows_show:
if myshows_show['imdb_id'] in indices['imdb_id']:
matched = 'imdb_id'
if not matched:
if myshows_show['title'] in indices['title']:
matched = 'title'
if matched:
xbmc_show = self.xbmc_shows[indices[matched][myshows_show[matched]]]
myshows_episodes = convert_seasons(myshows_show)
xbmc_episodes = [{'season': x['season'], 'episode': x['episode']} for x in xbmc_show['episodes']]
for episode in myshows_episodes:
if episode not in xbmc_episodes:
remove.append(episode)
else:
remove = convert_seasons(myshows_show)
if remove:
show = {'title': myshows_show['title'], 'year': myshows_show['year'], 'episodes': remove}
if matched:
show[matched] = myshows_show[matched]
remove_from_myshows.append(show)
if remove_from_myshows:
Debug('[Episodes Sync] %ishow(s) will have episodes removed from myshows.tv collection' % len(remove_from_myshows))
if self.show_progress:
progress.update(90, line1=__getstring__(1445), line2='%i %s' % (len(remove_from_myshows), __getstring__(1446)))
for show in remove_from_myshows:
if self.Canceled():
return
if self.show_progress:
progress.update(95, line1=__getstring__(1445), line2=show['title'].encode('utf-8', 'ignore'), line3='%i %s' % (len(show['episodes']), __getstring__(1447)))
self.myshowsapi.removeEpisode(show)
else:
Debug('[Episodes Sync] myshows.tv episode collection is clean')
def Run(self):
if not self.show_progress and __setting__('sync_on_update') == 'true' and self.notify:
notification('%s %s' % (__getstring__(1400), __getstring__(1406)), __getstring__(1420)) #Sync started
self.GetFromXBMC()
# sanity check, test for non-empty xbmc movie list
if self.xbmc_shows:
if not self.Canceled() and add_episodes_to_myshows:
self.GetCollectionFrommyshows()
if not self.Canceled():
self.AddTomyshows()
if myshows_episode_playcount or xbmc_episode_playcount:
if not self.Canceled():
self.GetWatchedFrommyshows()
if not self.Canceled() and myshows_episode_playcount:
self.UpdatePlaysmyshows()
if xbmc_episode_playcount:
if not self.Canceled():
self.UpdatePlaysXBMC()
if clean_myshows_episodes:
if not self.Canceled() and not add_episodes_to_myshows:
self.GetCollectionFrommyshows()
if not self.Canceled():
self.RemoveFrommyshows()
else:
Debug("[Episodes Sync] XBMC Show list is empty, aborting Episodes Sync.")
if not self.show_progress and __setting__('sync_on_update') == 'true' and self.notify:
notification('%s %s' % (__getstring__(1400), __getstring__(1406)), __getstring__(1421)) #Sync complete
if not self.Canceled() and self.show_progress:
progress.update(100, line1=__getstring__(1442), line2=' ', line3=' ')
progress.close()
Debug('[Episodes Sync] Complete') | DiMartinoX/plugin.video.kinopoisk.ru | script.myshows/episode_sync.py | Python | gpl-3.0 | 16,581 | 0.024365 |
from setuptools import setup, find_packages
version = "6.3.0"
with open("requirements.txt", "r") as f:
install_requires = f.readlines()
setup(
name='frappe',
version=version,
description='Metadata driven, full-stack web framework',
author='Frappe Technologies',
author_email='info@frappe.io',
packages=find_packages(),
zip_safe=False,
include_package_data=True,
install_requires=install_requires
)
| indictranstech/omnitech-frappe | setup.py | Python | mit | 438 | 0.004566 |
from django.shortcuts import render, redirect
from django.http import Http404
from events.forms import Event
from django.views.generic import View
from events.services import PlacesService
from events.forms import ImageUploadForm
from django.contrib import messages
import uuid
class ChooseImageView(View):
template_name = "events/choose_image.html"
form_action = "Edit"
def __init__(self):
self.places_service = PlacesService()
def get(self, request, event_id):
event = self.get_event_or_404(event_id, request.user.id)
images = self.places_service.get_images_google_place(event.location.place_id, "en")
images += self.places_service.get_images_street_view(event.location_lat, event.location_lng)
images = [{'idx': idx, 'url': image} for idx, image in enumerate(images)]
if "image_idx" in request.GET:
event.get_remote_image(images[int(request.GET['image_idx'])]['url'])
return redirect("list_events")
return render(request, self.template_name,
{'images': images, 'form': ImageUploadForm()})
def post(self, request, event_id):
form = ImageUploadForm(request.POST, request.FILES, instance=Event.objects.get(pk=event_id))
if form.is_valid():
form.save()
messages.success(request, "Image uploaded successfully!")
return redirect("list_events")
else:
messages.error(request, 'Invalid file, try again')
return redirect("edit_event_image", event_id)
@staticmethod
def get_event_or_404(event_id, user_id):
event = Event.objects.filter(pk=event_id, hosted_by=user_id).first()
if not event:
raise Http404()
return event | thiagoferreiraw/mixapp | events/views/choose_image_view.py | Python | mit | 1,771 | 0.003953 |
import player
import pygame
import menu
import settings as s
pygame.init()
pygame.mixer.init()
class Game:
def __init__(self, numplayers=2, doublebull='ON', mpcalc='ROUND'):
#GAME INFO
self.mpcalc = mpcalc
self.doublebull = doublebull
self.numplayers = numplayers
self.mainLoop = True
self.clock = pygame.time.Clock()
self.valid_marks = [i for i in range(21)]
self.valid_marks[0] = 'Bull'
self.roundNum = 1
self.players = []
#ACTIVE PLAYER ID
self.ap = 0
self.init_players()
self.total_darts_thrown = 0
self.darts_remaining = 3
self.next_turn_flag = False
self.last_dart = None
#SOUNDS
self.soundSingle = pygame.mixer.Sound(s.HIT_SOUND)
#EVENT FOR NORMAL PROGRESSION TO NEXT TURN
self.ENDTURN = pygame.USEREVENT+1
self.end_turn_event = pygame.event.Event(self.ENDTURN)
#EVENT FOR FORCED PROGRESSION TO NEXT TURN
self.forced_next_turn = False
def init_players(self):
for i in range(self.numplayers):
self.players.append(player.Player(pnum=i))
def check_bust(self, prev_total):
pass
def check_winner(self):
pass
def update_current_score(self):
pass
def update_darts_remaining(self):
self.darts_remaining = 3 - (self.total_darts_thrown % 3)
def update_round(self):
if self.total_darts_thrown % (3 * self.numplayers) == 0:
self.roundNum += 1
def update_total_score(self, cur_score, direction):
if direction == 'up':
self.players[self.ap].total_score += cur_score
elif direction == 'down':
self.players[self.ap].total_score -= cur_score
def update_turn(self):
self.update_round()
self.ap += 1
if self.ap > self.numplayers-1:
self.ap = 0
message_text = ' Up Next'
sleep_secs = 5
self.render_dropdown(message_text, sleep_secs, self.mpcalc)
self.players[self.ap].prev_round_score = self.players[self.ap].total_score
self.players[self.ap].current_score = 0
self.next_turn_flag = False
def update_game(self, last_dart):
if last_dart.segment[1] == 1:
self.soundSingle.play()
elif last_dart.segment[1] == 2:
self.soundSingle.play(1)
elif last_dart.segment[1] == 3:
self.soundSingle.play(2)
self.total_darts_thrown += 1
self.update_darts_remaining()
self.update_current_score()
if self.total_darts_thrown % 3 == 0:
pygame.event.post(self.end_turn_event)
if self.forced_next_turn:
self.next_turn_flag = False
else:
self.next_turn_flag = True
self.players[self.ap].add_dart(self.roundNum, self.last_dart, self.valid_marks)
self.check_winner()
def on_event(self, events):
for event in events:
if event.type == pygame.QUIT:
self.manager.go_to(menu.Menu())
if event.type == pygame.MOUSEMOTION:
print(pygame.mouse.get_pos())
if event.type == self.ENDTURN:
print('game on_event ENDTURN')
self.update_turn()
if event.type == pygame.KEYDOWN:
if event.key == pygame.K_ESCAPE:
self.manager.go_to(menu.Menu())
def on_gpio(self, segment):
print('game on_gpio')
self.players[self.ap].throw_dart(segment)
self.last_dart = self.players[self.ap].last_dart
self.update_game(self.last_dart)
| DrewMcCarthy/dartboard | game.py | Python | apache-2.0 | 3,208 | 0.037406 |
# Copyright (c) 2011 - Rui Batista <ruiandrebatista@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import functools
import inspect
import sys
import pygame
def key_event(*keys):
def wrap(f):
f.__key_events__ = keys
return f
return wrap
class _KeyHandlerMeta(type):
def __new__(cls, name, bases, dct):
if not '__key_handlers__' in dct:
dct['__key_handlers__'] = {}
for v in dct.values():
if hasattr(v, '__key_events__') and callable(v):
for e in v.__key_events__:
dct['__key_handlers__'][e] = v
return type.__new__(cls, name, bases, dct)
class PygameMainLoop(object):
__metaclass__ = _KeyHandlerMeta
def __init__(self):
self._mainloop_running = False
self._retval = None
def run(self):
self.on_run()
self._mainloop_running = True
while self._mainloop_running:
self.run_before()
for event in self.get_events():
self.dispatch_event(event)
self.run_after()
return self._retval
def quit(self, retval=None):
self._retval = retval
self._mainloop_running = False
def dispatch_event(self, event):
if event.type == pygame.QUIT:
self.on_quit_event()
elif event.type == pygame.KEYDOWN and event.key in self.__key_handlers__:
self.__key_handlers__[event.key](self,event)
else:
self.on_event_default(event)
def on_quit_event(self):
pygame.quit()
sys.exit(0)
def get_events(self):
return pygame.event.get()
def run_before(self):
pass
def run_after(self):
pass
def on_run(self):
pass
def on_event_default(self, event):
pass
class VoiceDialog(PygameMainLoop):
@key_event(pygame.K_ESCAPE)
def escape(self, event):
self.quit(None)
def get_events(self):
return [pygame.event.wait()]
| ragb/sudoaudio | sudoaudio/core.py | Python | gpl-3.0 | 2,629 | 0.002282 |
import os
import sys
import imp
import multiprocessing
from setuptools import setup, find_packages
version = imp.load_source('version', os.path.join('flask_appbuilder', 'version.py'))
def fpath(name):
return os.path.join(os.path.dirname(__file__), name)
def read(fname):
return open(fpath(fname)).read()
def desc():
return read('README.rst')
setup(
name='Flask-AppBuilder',
version=version.VERSION_STRING,
url='https://github.com/dpgaspar/flask-appbuilder/',
license='BSD',
author='Daniel Vaz Gaspar',
author_email='danielvazgaspar@gmail.com',
description='Simple and rapid application development framework, built on top of Flask. includes detailed security, auto CRUD generation for your models, google charts and much more.',
long_description=desc(),
packages=find_packages(),
package_data={'': ['LICENSE']},
entry_points={'console_scripts': [
'fabmanager = flask_appbuilder.console:cli',
]},
include_package_data=True,
zip_safe=False,
platforms='any',
install_requires=[
'colorama>=0.3',
'click>=3.0',
'Flask>=0.10',
'Flask-BabelPkg>=0.9.4',
'Flask-Login>=0.2.0',
'Flask-OpenID>=1.1.0',
'Flask-SQLAlchemy>=0.16',
'Flask-WTF>=0.9.1',
],
tests_require=[
'nose>=1.0',
],
classifiers=[
'Development Status :: 5 - Production/Stable',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules'
],
test_suite='nose.collector'
)
| rpiotti/Flask-AppBuilder | setup.py | Python | bsd-3-clause | 1,758 | 0.003413 |
from .test_antivirus import AbstractTests
import modules.antivirus.avg.avg as module
import modules.antivirus.base as base
from mock import patch
from pathlib import Path
class TestAvg(AbstractTests.TestAntivirus):
name = "AVG AntiVirus Free (Linux)"
scan_path = Path("/usr/bin/avgscan")
scan_args = ('--heur', '--paranoid', '--arc', '--macrow', '--pwdw',
'--pup')
module = module.AVGAntiVirusFree
scan_clean_stdout = """AVG command line Anti-Virus scanner
Copyright (c) 2013 AVG Technologies CZ
Virus database version: 4793/15678
Virus database release date: Mon, 21 May 2018 13:00:00 +0000
Files scanned : 1(1)
Infections found : 0(0)
PUPs found : 0
Files healed : 0
Warnings reported : 0
Errors reported : 0
"""
scan_virus_retcode = 4
virusname = "EICAR_Test"
scan_virus_stdout = """AVG command line Anti-Virus scanner
Copyright (c) 2013 AVG Technologies CZ
Virus database version: 4793/15678
Virus database release date: Mon, 21 May 2018 13:00:00 +0000
eicar.com.txt Virus identified EICAR_Test
Files scanned : 1(1)
Infections found : 1(1)
PUPs found : 0
Files healed : 0
Warnings reported : 0
Errors reported : 0
"""
version = "13.0.3118"
virus_database_version = "4793/15678 (21 May 2018)"
version_stdout = """AVG command line controller
Copyright (c) 2013 AVG Technologies CZ
------ AVG status ------
AVG version : 13.0.3118
Components version : Aspam:3111, Cfg:3109, Cli:3115, Common:3110, Core:4793, Doc:3115, Ems:3111, Initd:3113, Lng:3112, Oad:3118, Other:3109, Scan:3115, Sched:3110, Update:3109
Last update : Tue, 22 May 2018 07:52:31 +0000
------ License status ------
License number : LUOTY-674PL-VRWOV-APYEG-ZXHMA-E
License version : 10
License type : FREE
License expires on :
Registered user :
Registered company :
------ WD status ------
Component State Restarts UpTime
Avid running 0 13 minute(s)
Oad running 0 13 minute(s)
Sched running 0 13 minute(s)
Tcpd running 0 13 minute(s)
Update stopped 0 -
------ Sched status ------
Task name Next runtime Last runtime
Virus update Tue, 22 May 2018 18:04:00 +0000 Tue, 22 May 2018 07:46:29 +0000
Program update - -
User counting Wed, 23 May 2018 07:46:29 +0000 Tue, 22 May 2018 07:46:29 +0000
------ Tcpd status ------
E-mails checked : 0
SPAM messages : 0
Phishing messages : 0
E-mails infected : 0
E-mails dropped : 0
------ Avid status ------
Virus database reload times : 0
Virus database version : 4793/15678
Virus database release date : Mon, 21 May 2018 13:00:00 +0000
Virus database shared in memory : yes
------ Oad status ------
Files scanned : 0(0)
Infections found : 0(0)
PUPs found : 0
Files healed : 0
Warnings reported : 0
Errors reported : 0
Operation successful.
""" # nopep8
@patch.object(base.AntivirusUnix, "locate")
@patch.object(base.AntivirusUnix, "locate_one")
@patch.object(base.AntivirusUnix, "run_cmd")
def setUp(self, m_run_cmd, m_locate_one, m_locate):
m_run_cmd.return_value = 0, self.version_stdout, ""
m_locate_one.return_value = self.scan_path
m_locate.return_value = self.database
super().setUp()
@patch.object(module, "locate_one")
@patch.object(base.AntivirusUnix, "run_cmd")
def test_get_virus_db_error(self, m_run_cmd, m_locate_one):
m_locate_one.return_value = self.scan_path
m_run_cmd.return_value = -1, self.version_stdout, ""
with self.assertRaises(RuntimeError):
self.plugin.get_virus_database_version()
@patch.object(module, "locate_one")
@patch.object(base.AntivirusUnix, "run_cmd")
def test_get_virus_db_no_version(self, m_run_cmd, m_locate_one):
m_locate_one.return_value = self.scan_path
wrong_stdout = "LOREM IPSUM"
m_run_cmd.return_value = 0, wrong_stdout, ""
with self.assertRaises(RuntimeError):
self.plugin.get_virus_database_version()
@patch.object(module, "locate_one")
@patch.object(base.AntivirusUnix, "run_cmd")
def test_get_virus_db_no_release(self, m_run_cmd, m_locate_one):
m_locate_one.return_value = self.scan_path
wrong_stdout = "Virus database version : 4793/15678"
m_run_cmd.return_value = 0, wrong_stdout, ""
version = self.plugin.get_virus_database_version()
self.assertEquals(version, "4793/15678")
| quarkslab/irma | probe/tests/modules/antivirus/test_avg.py | Python | apache-2.0 | 4,618 | 0 |
#-----------------------------------------------------------------------------
# Copyright (c) 2014, HFTools Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#-----------------------------------------------------------------------------
import os
import numpy as np
import hftools.dataset.arrayobj as aobj
import hftools.networks.spar_functions as spfun
from hftools.testing import TestCase
basepath = os.path.split(__file__)[0]
def make_array(a):
dims = (aobj.DimSweep("f", len(a)),
aobj.DimMatrix_i("i", 2),
aobj.DimMatrix_j("j", 2))
return aobj.hfarray(a, dims=dims)
class Test_cascade(TestCase):
def setUp(self):
self.a = make_array([[[0, 1], [1, 0j]]])
self.b = make_array([[[0, 2], [2, 0j]]])
self.c = make_array([[[0.1, 0j], [0, 0.1]]])
def test_cascade_1(self):
r = spfun.cascadeS(self.a, self.a)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_2(self):
r = spfun.cascadeS(self.a, self.b)
self.assertTrue(np.allclose(r, self.b))
def test_cascade_3(self):
r = spfun.cascadeS(self.b, self.b)
self.assertTrue(np.allclose(r, self.a * 4))
def test_cascade_4(self):
r = spfun.cascadeS(self.a, self.c)
self.assertTrue(np.allclose(r, self.c))
def test_cascade_5(self):
r = spfun.cascadeS(self.b, self.c)
self.assertTrue(np.allclose(r, make_array([[[0.4, 0j], [0, 0.1]]])))
def test_cascade_6(self):
r = spfun.cascadeS(self.c, self.b)
self.assertTrue(np.allclose(r, make_array([[[0.1, 0j], [0, 0.4]]])))
class Test_deembedleft(TestCase):
def setUp(self):
self.a = make_array([[[0, 1], [1, 0j]]])
self.b = make_array([[[0, 2], [2, 0j]]])
self.c = make_array([[[0.1, 0j], [0, 0.1]]])
def test_cascade_1(self):
r = spfun.deembedleft(self.a, self.a)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_2(self):
r = spfun.deembedleft(self.b, self.b)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_3(self):
r = spfun.deembedleft(self.b, self.c)
self.assertTrue(np.allclose(r, make_array([[[0.025, 0j], [0, 0.1]]])))
class Test_deembedright(TestCase):
def setUp(self):
self.a = make_array([[[0, 1], [1, 0j]]])
self.b = make_array([[[0, 2], [2, 0j]]])
self.c = make_array([[[0.1, 0j], [0, 0.1]]])
def test_cascade_1(self):
r = spfun.deembedright(self.a, self.a)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_2(self):
r = spfun.deembedright(self.b, self.b)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_3(self):
r = spfun.deembedright(self.c, self.b)
self.assertTrue(np.allclose(r, make_array([[[0.1, 0j], [0, 0.025]]])))
class Test_deembed(TestCase):
def setUp(self):
self.a = make_array([[[0, 1], [1, 0j]]])
self.b = make_array([[[0, 2], [2, 0j]]])
self.c = make_array([[[0.1, 0j], [0, 0.1]]])
def test_cascade_1(self):
r = spfun.deembed(self.a, self.a, self.a)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_2(self):
r = spfun.deembed(self.b, self.b, self.a)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_3(self):
r = spfun.deembed(self.a, self.b, self.b)
self.assertTrue(np.allclose(r, self.a))
def test_cascade_4(self):
r = spfun.deembed(self.b, self.c, self.b)
self.assertAllclose(r, make_array([[[0.025, 0j], [0, 0.025]]]))
| hftools/hftools | hftools/networks/tests/test_spar_functions.py | Python | bsd-3-clause | 3,696 | 0.000812 |
#-*- coding: utf-8 -*-
#+---------------------------------------------------------------------------+
#| 01001110 01100101 01110100 01111010 01101111 01100010 |
#| |
#| Netzob : Inferring communication protocols |
#+---------------------------------------------------------------------------+
#| Copyright (C) 2011-2014 Georges Bossert and Frédéric Guihéry |
#| This program is free software: you can redistribute it and/or modify |
#| it under the terms of the GNU General Public License as published by |
#| the Free Software Foundation, either version 3 of the License, or |
#| (at your option) any later version. |
#| |
#| This program is distributed in the hope that it will be useful, |
#| but WITHOUT ANY WARRANTY; without even the implied warranty of |
#| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
#| GNU General Public License for more details. |
#| |
#| You should have received a copy of the GNU General Public License |
#| along with this program. If not, see <http://www.gnu.org/licenses/>. |
#+---------------------------------------------------------------------------+
#| @url : http://www.netzob.org |
#| @contact : contact@netzob.org |
#| @sponsors : Amossys, http://www.amossys.fr |
#| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| File contributors : |
#| - Georges Bossert <georges.bossert (a) supelec.fr> |
#| - Frédéric Guihéry <frederic.guihery (a) amossys.fr> |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Standard library imports |
#+---------------------------------------------------------------------------+
import uuid
#+---------------------------------------------------------------------------+
#| Related third party imports |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Local application imports |
#+---------------------------------------------------------------------------+
from netzob.Common.Utils.Decorators import typeCheck, NetzobLogger
from netzob.Common.Utils.TypedList import TypedList
from netzob.Common.Models.Types.TypeConverter import TypeConverter
from netzob.Common.Models.Types.Raw import Raw
from netzob.Common.Models.Types.HexaString import HexaString
@NetzobLogger
class NetzobRegex(object):
"""Represents a regex describing field boundaries. Static methods
can be used to build the regex. Don't use the constructor unless you really
know what you do. Instead we highly recommend the use of the static methods offered to
build different types of regex.
If still you want to use the constructor, don't specify the group since it will
be automaticaly added. For example, if your regex is (.*), only specify .* and forget the
() that will be added. In addition the constructor will also generate and add the group identifier. Your
regex will therefore look like : (P<f45689676567987628>.*).
"""
DEFAULT_REGEX = '.*'
def __init__(self):
self.id = 'f' + str(uuid.uuid4().hex)
self.regex = NetzobRegex.DEFAULT_REGEX
@property
def regex(self):
return self.__regex
@regex.setter
@typeCheck(str)
def regex(self, regex):
if regex is None:
raise TypeError("The regex cannot be None")
self.__regex = "(?P<{0}>{1})".format(self.id, regex)
@property
def id(self):
return self.__id
@id.setter
@typeCheck(str)
def id(self, _id):
if _id is None:
raise TypeError("Id cannot be None")
if len(_id) == 0:
raise ValueError("Id must be defined (len>0)")
self.__id = _id
def __str__(self):
return str(self.regex)
def finalRegex(self):
"""This method returns the current regex with the starting and ending indicators
added. For this reason, the returned regex can't be merged with others regexes.
:return: a string with the final regex definition including start and end indicators
:rtype: :class:`str`.
"""
return "^" + str(self) + "$"
@staticmethod
def buildDefaultRegex():
"""It creates the default regex which means
we have to knowledge over the format of the field.
>>> from netzob.all import *
>>> import regex as re
>>> data = "Hello netzob, a default regex grabs everything."
>>> hexData = TypeConverter.convert(data, ASCII, HexaString)
>>> nRegex = NetzobRegex.buildDefaultRegex()
>>> compiledRegex = re.compile(str(nRegex))
>>> dynamicDatas = compiledRegex.match(hexData)
>>> print TypeConverter.convert(hexData[dynamicDatas.start(nRegex.id):dynamicDatas.end(nRegex.id)], HexaString, ASCII)
Hello netzob, a default regex grabs everything.
:return: a .* default NetzobRegex
:rtype: :class:`netzob.Common.Utils.NetzobRegex.NetzobRegex`
"""
regex = NetzobRegex()
regex.regex = '.*'
return regex
@staticmethod
def buildRegexForStaticValue(value):
"""It creates a NetzobRegex which represents
a regex with the specified Raw static value.
>>> from netzob.all import *
>>> import regex as re
>>> data = "Hello netzob"
>>> hexData = TypeConverter.convert(data, ASCII, HexaString)
>>> nRegex = NetzobRegex.buildRegexForStaticValue(data)
>>> compiledRegex = re.compile(str(nRegex))
>>> dynamicDatas = compiledRegex.match(hexData)
>>> print TypeConverter.convert(hexData[dynamicDatas.start(nRegex.id):dynamicDatas.end(nRegex.id)], HexaString, ASCII)
Hello netzob
:param value: the static value the regex must represents
:type value: python raw (will be encoded in HexaString in the regex)
:return: the regex which represents the specified valued encoed in HexaString
:type: :class:`netzob.Common.Utils.NetzobRegex.NetzobRegex`
"""
hexaStringValue = TypeConverter.convert(value, Raw, HexaString)
return NetzobStaticRegex(hexaStringValue)
@staticmethod
def buildRegexForEol():
"""It creates a NetzobRegex which represents an EOL
:return: the regex which represents an EOL
:type: :class:`netzob.Common.Utils.NetzobRegex.NetzobRegex`
"""
return NetzobEolRegex()
@staticmethod
def buildRegexForSizedValue(size):
return NetzobSizedRegex(size)
@staticmethod
def buildRegexForAlternativeRegexes(regexes):
return NetzobAlternativeRegex(regexes)
@staticmethod
def buildRegexForAggregateRegexes(regexes):
return NetzobAggregateRegex(regexes)
@NetzobLogger
class NetzobSizedRegex(NetzobRegex):
"""Represents an aggregate regex.
>>> from netzob.Common.Utils.NetzobRegex import NetzobRegex
>>> from netzob.all import *
>>> import regex as re
>>> data = "Hello netzob"
>>> hexData = TypeConverter.convert(data, ASCII, HexaString)
>>> nRegex = NetzobRegex.buildRegexForSizedValue((8*4,8*5))
>>> compiledRegex = re.compile(str(nRegex))
>>> dynamicDatas = compiledRegex.match(hexData)
>>> print TypeConverter.convert(hexData[dynamicDatas.start(nRegex.id):dynamicDatas.end(nRegex.id)], HexaString, ASCII)
Hello
>>> nRegex = NetzobRegex.buildRegexForSizedValue((None, None))
>>> compiledRegex = re.compile(str(nRegex))
>>> dynamicDatas = compiledRegex.match(hexData)
>>> print TypeConverter.convert(hexData[dynamicDatas.start(nRegex.id):dynamicDatas.end(nRegex.id)], HexaString, ASCII)
Hello netzob
>>> nRegex = NetzobRegex.buildRegexForSizedValue((16, None))
>>> compiledRegex = re.compile(str(nRegex))
>>> dynamicDatas = compiledRegex.match(hexData)
>>> print TypeConverter.convert(hexData[dynamicDatas.start(nRegex.id):dynamicDatas.end(nRegex.id)], HexaString, ASCII)
Hello netzob
>>> nRegex = NetzobRegex.buildRegexForSizedValue((None, 80))
>>> compiledRegex = re.compile(str(nRegex))
>>> dynamicDatas = compiledRegex.match(hexData)
>>> print TypeConverter.convert(hexData[dynamicDatas.start(nRegex.id):dynamicDatas.end(nRegex.id)], HexaString, ASCII)
Hello netz
"""
def __init__(self, size):
super(NetzobSizedRegex, self).__init__()
self.size = size
def __updateRegex(self):
(minSize, maxSize) = self.size
if maxSize is None:
maxSize = ''
self.regex = ".{" + str(minSize) + "," + str(maxSize) + "}"
@property
def size(self):
return self.__size
@size.setter
def size(self, size):
(minSize, maxSize) = size
if minSize is None:
minSize = 0
minSize = minSize / 4
if maxSize is not None:
maxSize = maxSize / 4
if minSize < 0 or maxSize < 0:
raise ValueError("The value min and max cannot be inferior to 0")
if maxSize < minSize:
raise ValueError("The max size must be superior to the min size")
self.__size = (minSize, maxSize)
self.__updateRegex()
@NetzobLogger
class NetzobAggregateRegex(NetzobRegex):
"""Represents an aggregate regex.
Below is an example of such aggregate regex with two aggregated regexes..
>>> from netzob.all import *
>>> import regex as re
>>> data = "Hello netzob, what's up ?"
>>> hexData = TypeConverter.convert(data, ASCII, HexaString)
>>> regex1 = NetzobRegex.buildRegexForStaticValue("Hello netzob")
>>> regex2 = NetzobRegex.buildRegexForStaticValue(", what's up ?")
>>> nRegex = NetzobRegex.buildRegexForAggregateRegexes([regex1, regex2])
>>> compiledRegex = re.compile(str(nRegex))
>>> dynamicDatas = compiledRegex.match(hexData)
>>> print TypeConverter.convert(hexData[dynamicDatas.start(regex1.id):dynamicDatas.end(regex1.id)], HexaString, ASCII)
Hello netzob
>>> print TypeConverter.convert(hexData[dynamicDatas.start(regex2.id):dynamicDatas.end(regex2.id)], HexaString, ASCII)
, what's up ?
>>> print TypeConverter.convert(hexData[dynamicDatas.start(nRegex.id):dynamicDatas.end(nRegex.id)], HexaString, ASCII)
Hello netzob, what's up ?
"""
def __init__(self, children):
super(NetzobAggregateRegex, self).__init__()
self.__children = TypedList(NetzobRegex)
self.children = children
def __updateRegex(self):
self.regex = "".join([str(child) for child in self.children])
@property
def children(self):
return self.__children
@children.setter
def children(self, children):
self._logger.debug("PAN {0}".format(children))
# for child in children:
# if child is None:
# raise TypeError("No child can be None")
for child in children:
if child is not None:
self.__children.append(child)
self.__updateRegex()
@NetzobLogger
class NetzobAlternativeRegex(NetzobRegex):
"""Represents an alternative regex.
>>> from netzob.all import *
>>> import random
>>> import regex as re
>>> possibleData =["Netzob", "Zoby"]
>>> data = random.choice(possibleData)
>>> hexData = TypeConverter.convert(data, ASCII, HexaString)
>>> regex1 = NetzobRegex.buildRegexForStaticValue("Netzob")
>>> regex2 = NetzobRegex.buildRegexForStaticValue("Zoby")
>>> nRegex = NetzobRegex.buildRegexForAlternativeRegexes([regex1, regex2])
>>> compiledRegex = re.compile(str(nRegex))
>>> dynamicDatas = compiledRegex.match(hexData)
>>> matchedData = TypeConverter.convert(hexData[dynamicDatas.start(nRegex.id):dynamicDatas.end(nRegex.id)], HexaString, ASCII)
>>> matchedData in possibleData
True
"""
def __init__(self, children):
super(NetzobAlternativeRegex, self).__init__()
self.__children = TypedList(NetzobRegex)
self.children = children
def __updateRegex(self):
self.regex = "|".join([str(child) for child in self.children])
@property
def children(self):
return self.__children
@children.setter
def children(self, children):
for child in children:
if child is None:
raise TypeError("No child can be None")
for child in children:
self.__children.append(child)
self.__updateRegex()
@NetzobLogger
class NetzobStaticRegex(NetzobRegex):
"""Represents a regex with a static value.
Example of a static regex:
>>> from netzob.all import *
>>> import regex as re
>>> data = "Hello netzob !"
>>> hexData = TypeConverter.convert(data, ASCII, HexaString)
>>> nRegex = NetzobRegex.buildRegexForStaticValue("Hello netzob !")
>>> compiledRegex = re.compile(str(nRegex))
>>> dynamicDatas = compiledRegex.match(hexData)
>>> print TypeConverter.convert(hexData[dynamicDatas.start(nRegex.id):dynamicDatas.end(nRegex.id)], HexaString, ASCII)
Hello netzob !
To illustrate that only an hexastring can be specified
>>> regex = NetzobStaticRegex("toto")
Traceback (most recent call last):
...
ValueError: The specified value cannot be parse as an HexaString: toto
"""
def __init__(self, staticValue):
super(NetzobStaticRegex, self).__init__()
self.staticValue = staticValue
def __updateRegex(self):
"""This method must be called everytime the static
value is modified"""
self.regex = self.staticValue
@property
def staticValue(self):
"""The static value which current regex represents.
:type: str: an hexastring
:raise: TypeError is param not valid
"""
return self.__staticValue
@staticValue.setter
@typeCheck(str)
def staticValue(self, staticValue):
if staticValue is None:
raise TypeError("Static value cannot be None")
if not HexaString().canParse(staticValue):
raise ValueError("The specified value cannot be parse as an HexaString: {0}".format(str(staticValue)))
self.__staticValue = staticValue.lower()
self.__updateRegex()
| dasbruns/netzob | src/netzob/Common/Utils/NetzobRegex.py | Python | gpl-3.0 | 15,254 | 0.004132 |
#!/usr/bin/env python
# Author : Pierre Schnizer
"""
This module describes routines for computing Chebyshev
approximations to univariate functions. A Chebyshev approximation is a
truncation of the series \M{f(x) = S{sum} c_n T_n(x)}, where the Chebyshev
polynomials \M{T_n(x) = cos(n \arccos x)} provide an orthogonal basis of
polynomials on the interval [-1,1] with the weight function
\M{1 / sqrt{1-x^2}}. The first few Chebyshev polynomials are, T_0(x) = 1,
T_1(x) = x, T_2(x) = 2 x^2 - 1.
def f(x, p):
if x < 0.5:
return 0.25
else:
return 0.75
n = 10000;
cs = cheb_series(40)
F = gsl_function(f, None)
cs.init(F, 0.0, 1.0)
nf = float(n)
for i in range(100):
x = i / nf
r10 = cs.eval_n(10, x)
r40 = cs.eval(x)
print "%g %g %g %g" % (x, f(x, None), r10, r40)
"""
import _callback
from _generic_solver import _workspace
from gsl_function import gsl_function
class cheb_series(_workspace):
"""
This class manages all internal detail. It provides the space for a
Chebyshev series of order N.
"""
_alloc = _callback.gsl_cheb_alloc
_free = _callback.gsl_cheb_free
_init = _callback.gsl_cheb_init
_eval = _callback.gsl_cheb_eval
_eval_err = _callback.gsl_cheb_eval_err
_eval_n = _callback.gsl_cheb_eval_n
_eval_n_err = _callback.gsl_cheb_eval_n_err
#_eval_mode = _callback.gsl_cheb_eval_mode
#_eval_mode_e = _callback.gsl_cheb_eval_mode_e
_calc_deriv = _callback.gsl_cheb_calc_deriv
_calc_integ = _callback.gsl_cheb_calc_integ
_get_coeff = _callback.pygsl_cheb_get_coefficients
_set_coeff = _callback.pygsl_cheb_set_coefficients
_get_a = _callback.pygsl_cheb_get_a
_set_a = _callback.pygsl_cheb_set_a
_get_b = _callback.pygsl_cheb_get_b
_set_b = _callback.pygsl_cheb_set_b
_get_f = _callback.pygsl_cheb_get_f
_set_f = _callback.pygsl_cheb_set_f
_get_order_sp = _callback.pygsl_cheb_get_order_sp
_set_order_sp = _callback.pygsl_cheb_set_order_sp
def __init__(self, size):
"""
input : n
@params n : number of coefficients
"""
self._size = size
_workspace.__init__(self, size)
def init(self, f, a, b):
"""
This function computes the Chebyshev approximation for the
function F over the range (a,b) to the previously specified order.
The computation of the Chebyshev approximation is an \M{O(n^2)}
process, and requires n function evaluations.
input : f, a, b
@params f : a gsl_function
@params a : lower limit
@params b : upper limit
"""
return self._init(self._ptr, f.get_ptr(), a, b)
def eval(self, x):
"""
This function evaluates the Chebyshev series CS at a given point X
input : x
x ... value where the series shall be evaluated.
"""
return self._eval(self._ptr, x)
def eval_err(self, x):
"""
This function computes the Chebyshev series at a given point X,
estimating both the series RESULT and its absolute error ABSERR.
The error estimate is made from the first neglected term in the
series.
input : x
x ... value where the error shall be evaluated.
"""
return self._eval_err(self._ptr, x)
def eval_n(self, order, x):
"""
This function evaluates the Chebyshev series CS at a given point
N, to (at most) the given order ORDER.
input : n, x
n ... number of cooefficients
x ... value where the series shall be evaluated.
"""
return self._eval_n(self._ptr, order, x)
def eval_n_err(self, order, x):
"""
This function evaluates a Chebyshev series CS at a given point X,
estimating both the series RESULT and its absolute error ABSERR,
to (at most) the given order ORDER. The error estimate is made
from the first neglected term in the series.
input : n, x
n ... number of cooefficients
x ... value where the error shall be evaluated.
"""
return self._eval_n_err(self._ptr, order, x)
# def eval_mode(self, x, mode):
# """
#
# """
# return self._eval(self._ptr, x, mode)
#
# def eval_mode_e(self, x, mode):
# return self._eval(self._ptr, x, mode)
def calc_deriv(self):
"""
This method computes the derivative of the series CS. It returns
a new instance of the cheb_series class.
"""
tmp = cheb_series(self._size)
self._calc_deriv(tmp._ptr, self._ptr)
return tmp
def calc_integ(self):
"""
This method computes the integral of the series CS. It returns
a new instance of the cheb_series class.
"""
tmp = cheb_series(self._size)
self._calc_integ(tmp._ptr, self._ptr)
return tmp
def get_coefficients(self):
"""
Get the chebyshev coefficients.
"""
return self._get_coeff(self._ptr)
def set_coefficients(self, coefs):
"""
Sets the chebyshev coefficients.
"""
return self._set_coeff(self._ptr, coefs)
def get_a(self):
"""
Get the lower boundary of the current representation
"""
return self._get_a(self._ptr)
def set_a(self, a):
"""
Set the lower boundary of the current representation
"""
return self._set_a(self._ptr, a)
def get_b(self):
"""
Get the upper boundary of the current representation
"""
return self._get_b(self._ptr)
def set_b(self, a):
"""
Set the upper boundary of the current representation
"""
return self._set_b(self._ptr, a)
def get_f(self):
"""
Get the value f (what is it ?) The documentation does not tell anything
about it.
"""
return self._get_f(self._ptr)
def set_f(self, a):
"""
Set the value f (what is it ?)
"""
return self._set_f(self._ptr, a)
def get_order_sp(self):
"""
Get the value f (what is it ?) The documentation does not tell anything
about it.
"""
return self._get_order_sp(self._ptr)
def set_order_sp(self, a):
"""
Set the value f (what is it ?)
"""
return self._set_order_sp(self._ptr, a)
| poojavade/Genomics_Docker | Dockerfiles/gedlab-khmer-filter-abund/pymodules/python2.7/lib/python/pygsl/chebyshev.py | Python | apache-2.0 | 6,561 | 0.00381 |
import re
from django.core.management import call_command
from django_webtest import WebTest
from .auth import TestUserMixin
from .settings import SettingsMixin
from popolo.models import Person
from .uk_examples import UK2015ExamplesMixin
class TestSearchView(TestUserMixin, SettingsMixin, UK2015ExamplesMixin, WebTest):
def setUp(self):
super(TestSearchView, self).setUp()
call_command('rebuild_index', verbosity=0, interactive=False)
def test_search_page(self):
# we have to create the candidate by submitting the form as otherwise
# we're not making sure the index update hook fires
response = self.app.get('/search?q=Elizabeth')
# have to use re to avoid matching search box
self.assertFalse(
re.search(
r'''<a[^>]*>Elizabeth''',
response.text
)
)
self.assertFalse(
re.search(
r'''<a[^>]*>Mr Darcy''',
response.text
)
)
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user,
)
form = response.forms['new-candidate-form']
form['name'] = 'Mr Darcy'
form['email'] = 'darcy@example.com'
form['source'] = 'Testing adding a new person to a post'
form['party_gb_2015'] = self.labour_party_extra.base_id
form.submit()
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user,
)
form = response.forms['new-candidate-form']
form['name'] = 'Elizabeth Bennet'
form['email'] = 'lizzie@example.com'
form['source'] = 'Testing adding a new person to a post'
form['party_gb_2015'] = self.labour_party_extra.base_id
form.submit()
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user,
)
form = response.forms['new-candidate-form']
form['name'] = "Charlotte O'Lucas" # testers license
form['email'] = 'charlotte@example.com'
form['source'] = 'Testing adding a new person to a post'
form['party_gb_2015'] = self.labour_party_extra.base_id
form.submit()
# check searching finds them
response = self.app.get('/search?q=Elizabeth')
self.assertTrue(
re.search(
r'''<a[^>]*>Elizabeth''',
response.text
)
)
self.assertFalse(
re.search(
r'''<a[^>]*>Mr Darcy''',
response.text
)
)
response = self.app.get(
'/election/2015/post/65808/dulwich-and-west-norwood',
user=self.user,
)
form = response.forms['new-candidate-form']
form['name'] = 'Elizabeth Jones'
form['email'] = 'e.jones@example.com'
form['source'] = 'Testing adding a new person to a post'
form['party_gb_2015'] = self.labour_party_extra.base_id
form.submit()
response = self.app.get('/search?q=Elizabeth')
self.assertTrue(
re.search(
r'''<a[^>]*>Elizabeth Bennet''',
response.text
)
)
self.assertTrue(
re.search(
r'''<a[^>]*>Elizabeth Jones''',
response.text
)
)
person = Person.objects.get(name='Elizabeth Jones')
response = self.app.get(
'/person/{0}/update'.format(person.id),
user=self.user,
)
form = response.forms['person-details']
form['name'] = 'Lizzie Jones'
form['source'] = "Some source of this information"
form.submit()
response = self.app.get('/search?q=Elizabeth')
self.assertTrue(
re.search(
r'''<a[^>]*>Elizabeth Bennet''',
response.text
)
)
self.assertFalse(
re.search(
r'''<a[^>]*>Elizabeth Jones''',
response.text
)
)
# check that searching for names with apostrophe works
response = self.app.get("/search?q=O'Lucas")
self.assertTrue(
re.search(
r'''<a[^>]*>Charlotte''',
response.text
)
)
| mysociety/yournextrepresentative | candidates/tests/test_search.py | Python | agpl-3.0 | 4,466 | 0.000672 |
from collections import OrderedDict
from src.models import FlashscoreMatch
from src.settings import match_cache, Colors
@match_cache
def is_available(match: FlashscoreMatch) -> bool:
return (
match.ah_0_1_current_odds is not None and
match.ah_0_2_current_odds is not None and
match.home_draw_current_odds is not None and
match.home_away_current_odds is not None and
match.draw_away_current_odds is not None and
match.home_team_rank_across_home is not None and
match.away_team_rank_across_away is not None
)
@match_cache
def e1(match: FlashscoreMatch) -> bool:
return (
2.8 > match.home_current_odds > 1.9 and
match.away_current_odds > 2.45 and
match.draw_current_odds >=3 and
match.prediction_a3 < 2 and
match.prediction_a4 > 0 and
match.home_team_rank_across_home < 18 and
match.away_team_rank_across_away < 15 and
6.5 > (match.xxx or 0.) >= 3.67
)
@match_cache
def e2(match: FlashscoreMatch) -> bool:
return (
3.4 > match.home_current_odds > 2.45 and
3 > match.away_current_odds > 1.59 and
match.prediction_a4 == 0 and
match.prediction_a3 != 1 and
(match.xxx or 0.) >= 4
)
@match_cache
def a1(match: FlashscoreMatch) -> bool:
return (
match.away_current_odds < 2 and
2.5 < match.ah_0_1_current_odds < 5.25 and
match.prediction_a3 < 3 and
match.prediction_a4 < 2 and
(match.xxx or 0.) > 3.5
)
@match_cache
def h2(match: FlashscoreMatch) -> bool:
return (
match.home_current_odds < 2 and
match.prediction_a4 == 0 and
(match.xxx or 0.) > 4.16
)
@match_cache
def test(match: FlashscoreMatch) -> bool:
return (
False)
@match_cache
def other(match: FlashscoreMatch) -> bool:
return not(e1(match) or e2(match) or a1(match) or h2(match) or test(match))
@match_cache
def bet(match: FlashscoreMatch) -> str:
values = OrderedDict([
('e1', e1(match)),
('e2', e2(match)),
('a1', a1(match)),
('h2', h2(match)),
('test', test(match))])
return ', '.join((key for key, value in values.items() if value))
@match_cache
def ah_0_1_color(match: FlashscoreMatch) -> Colors:
if e1(match) or a1(match):
return Colors.GREEN
return Colors.EMPTY
@match_cache
def ah_0_2_color(match: FlashscoreMatch) -> Colors:
if e2(match) or h2(match):
return Colors.GREEN
return Colors.EMPTY
@match_cache
def total_score_color(match: FlashscoreMatch) -> Colors:
if match.home_score is not None and match.away_score is not None:
if e1(match) or a1(match):
if match.home_score > match.away_score:
return Colors.GREEN
elif match.home_score < match.away_score:
return Colors.RED
elif e2(match) or h2(match):
if match.away_score > match.home_score:
return Colors.GREEN
elif match.away_score < match.home_score:
return Colors.RED
return Colors.EMPTY
| vapkarian/soccer-analyzer | src/versions/f4.py | Python | mit | 3,131 | 0.000639 |
from datetime import datetime as dt
import os
from enum import IntEnum
import logging
from typing import Optional
from netCDF4 import Dataset, num2date
from hyo2.soundspeed.base.geodesy import Geodesy
from hyo2.soundspeed.profile.dicts import Dicts
from hyo2.soundspeed.profile.profile import Profile
from hyo2.soundspeed.profile.profilelist import ProfileList
from hyo2.abc.lib.progress.cli_progress import CliProgress
logger = logging.getLogger(__name__)
class RegOfsOffline:
class Model(IntEnum):
# East Coast
CBOFS = 10 # RG = True # Format is GoMOFS
DBOFS = 11 # RG = True # Format is GoMOFS
GoMOFS = 12 # RG = True # Format is GoMOFS
NYOFS = 13 # RG = False
SJROFS = 14 # RG = False
# Gulf of Mexico
NGOFS = 20 # RG = True # Format is GoMOFS
TBOFS = 21 # RG = True # Format is GoMOFS
# Great Lakes
LEOFS = 30 # RG = True # Format is GoMOFS
LHOFS = 31 # RG = False
LMOFS = 32 # RG = False
LOOFS = 33 # RG = False
LSOFS = 34 # RG = False
# Pacific Coast
CREOFS = 40 # RG = True # Format is GoMOFS
SFBOFS = 41 # RG = True # Format is GoMOFS
# noinspection DuplicatedCode
regofs_model_descs = \
{
Model.CBOFS: "Chesapeake Bay Operational Forecast System",
Model.DBOFS: "Delaware Bay Operational Forecast System",
Model.GoMOFS: "Gulf of Maine Operational Forecast System",
Model.NYOFS: "Port of New York and New Jersey Operational Forecast System",
Model.SJROFS: "St. John's River Operational Forecast System",
Model.NGOFS: "Northern Gulf of Mexico Operational Forecast System",
Model.TBOFS: "Tampa Bay Operational Forecast System",
Model.LEOFS: "Lake Erie Operational Forecast System",
Model.LHOFS: "Lake Huron Operational Forecast System",
Model.LMOFS: "Lake Michigan Operational Forecast System",
Model.LOOFS: "Lake Ontario Operational Forecast System",
Model.LSOFS: "Lake Superior Operational Forecast System",
Model.CREOFS: "Columbia River Estuary Operational Forecast System",
Model.SFBOFS: "San Francisco Bay Operational Forecast System"
}
def __init__(self, data_folder: str, prj: 'hyo2.soundspeed.soundspeed import SoundSpeedLibrary') -> None:
self.name = self.__class__.__name__
self.desc = "Abstract atlas" # a human-readable description
self.data_folder = data_folder
self.prj = prj
self.g = Geodesy()
self._has_data_loaded = False # grids are "loaded" ? (netCDF files are opened)
self._file = None
self._day_idx = 0
self._timestamp = None
self._zeta = None
self._siglay = None
self._h = None
self._lats = None
self._lons = None
self._lat = None
self._lon = None
self._loc_idx = None
self._d = None
self._temp = None
self._sal = None
def query(self, nc_path: str, lat: float, lon: float) -> Optional[ProfileList]:
if not os.path.exists(nc_path):
raise RuntimeError('Unable to locate %s' % nc_path)
logger.debug('nc path: %s' % nc_path)
if (lat is None) or (lon is None):
logger.error("invalid location query: (%s, %s)" % (lon, lat))
return None
logger.debug('query location: %s, %s' % (lat, lon))
progress = CliProgress()
try:
self._file = Dataset(nc_path)
progress.update(20)
except (RuntimeError, IOError) as e:
logger.warning("unable to access data: %s" % e)
self.clear_data()
progress.end()
return None
try:
self.name = self._file.title
time = self._file.variables['time']
self._timestamp = num2date(time[0], units=time.units)
logger.debug("Retrieved time: %s" % self._timestamp.isoformat())
# Now get latitudes, longitudes and depths for x,y,z referencing
self._lats = self._file.variables['lat'][:]
self._lons = self._file.variables['lon'][:]
# logger.debug('lat:(%s)\n%s' % (self._lats.shape, self._lats))
# logger.debug('lon:(%s)\n%s' % (self._lons.shape, self._lons))
self._zeta = self._file.variables['zeta'][0, :]
self._siglay = self._file.variables['siglay'][:]
self._h = self._file.variables['h'][:]
# logger.debug('zeta:(%s)\n%s' % (self._zeta.shape, self._zeta))
# logger.debug('siglay:(%s)\n%s' % (self._siglay.shape, self._siglay[:, 0]))
# logger.debug('h:(%s)\n%s' % (self._h.shape, self._h))
self._temp = self._file.variables['temp'][:]
self._sal = self._file.variables['salinity'][:]
# logger.debug('temp:(%s)\n%s' % (self._temp.shape, self._temp[:, 0]))
# logger.debug('sal:(%s)\n%s' % (self._sal.shape, self._sal[:, 0]))
except Exception as e:
logger.error("troubles in variable lookup for lat/long grid and/or depth: %s" % e)
self.clear_data()
progress.end()
return None
min_dist = 100000.0
min_idx = None
for idx, _ in enumerate(self._lats):
nc_lat = self._lats[idx]
nc_lon = self._lons[idx]
if nc_lon > 180.0:
nc_lon = nc_lon - 360.0
nc_dist = self.g.distance(nc_lon, nc_lat, lon, lat)
# logger.debug('loc: %.6f, %.6f -> %.6f' % (nc_lat, nc_lon, nc_dist))
if nc_dist < min_dist:
min_dist = nc_dist
min_idx = idx
if min_dist >= 10000.0:
logger.error("location too far from model nodes: %.f" % min_dist)
self.clear_data()
progress.end()
return None
self._loc_idx = min_idx
self._lon = self._lons[self._loc_idx]
if self._lon > 180.0:
self._lon = self._lon - 360.0
self._lat = self._lats[self._loc_idx]
logger.debug('closest node: %d [%s, %s] -> %s' % (self._loc_idx, self._lat, self._lon, min_dist))
zeta = self._zeta[self._loc_idx]
h = self._h[self._loc_idx]
siglay = -self._siglay[:, self._loc_idx]
# logger.debug('zeta: %s, h: %s, siglay: %s' % (zeta, h, siglay))
self._d = siglay * (h + zeta)
# logger.debug('d:(%s)\n%s' % (self._h.shape, self._d))
# Make a new SV object to return our query in
ssp = Profile()
ssp.meta.sensor_type = Dicts.sensor_types['Synthetic']
ssp.meta.probe_type = Dicts.probe_types[self.name]
ssp.meta.latitude = self._lat
ssp.meta.longitude = self._lon
ssp.meta.utc_time = dt(year=self._timestamp.year, month=self._timestamp.month,
day=self._timestamp.day, hour=self._timestamp.hour,
minute=self._timestamp.minute, second=self._timestamp.second)
ssp.meta.original_path = "%s_%s" % (self.name, self._timestamp.strftime("%Y%m%d_%H%M%S"))
ssp.init_data(self._d.shape[0])
ssp.data.depth = self._d[:]
ssp.data.temp = self._temp[0, :, self._loc_idx]
ssp.data.sal = self._sal[0, :, self._loc_idx]
ssp.calc_data_speed()
ssp.clone_data_to_proc()
ssp.init_sis()
profiles = ProfileList()
profiles.append_profile(ssp)
progress.end()
return profiles
def clear_data(self) -> None:
"""Delete the data and reset the last loaded day"""
logger.debug("clearing data")
if self._has_data_loaded:
if self._file:
self._file.close()
self._has_data_loaded = False # grids are "loaded" ? (netCDF files are opened)
self._file = None
self._day_idx = 0
self._timestamp = None
self._zeta = None
self._siglay = None
self._h = None
self._lats = None
self._lons = None
self._lat = None
self._lon = None
self._loc_idx = None
self._d = None
self._temp = None
self._sal = None
def __repr__(self):
msg = "%s" % super().__repr__()
msg += " <has data loaded: %s>\n" % (self._has_data_loaded,)
msg += " <loaded day: %s>\n" % (self._timestamp.strftime(r"%d\%m\%Y"),)
return msg
| hydroffice/hyo_soundspeed | hyo2/soundspeed/atlas/regofsoffline.py | Python | lgpl-2.1 | 8,583 | 0.001748 |
"""
Task description (in Estonian):
3. Maatriksi vähendamine (6p)
Kirjuta funktsioon vähenda, mis võtab argumendiks arvumaatriksi, milles ridu ja
veerge on paarisarv, ning tagastab uue maatriksi, milles on kaks korda vähem
ridu ja kaks korda vähem veerge, ja kus iga element on esialgse maatriksi nelja
elemendi keskmine, järgnevas näites toodud skeemi järgi:
See tähendab, et
vähenda([[1,5,2,6,3,6], [1,3,2,7,3,3], [4,8,5,1,1,6], [4,4,9,5,6,1]])
peab tagastama
[[2.5, 4.25, 3.75], [5.0, 5.0, 3.5]].
"""
from grader import *
from KT2_util import make_checker
def vähenda(maatriks):
tulemus = []
for r in range(0, len(maatriks), 2):
rida = []
for c in range(0, len(maatriks[r]), 2):
tul = 0
for i in range(4):
tul += maatriks[r+i%2][c+i//2]
rida.append(tul / 4.0)
tulemus.append(rida)
return tulemus
checker = make_checker(vähenda)
checker([[1, 2], [3, 4]],
description="Ruudukujuline 2x2 maatriks- {function}({args}) == {expected}")
checker([[1, 2, 3, 4], [5, 6, 7, 8]],
description="Mitte-ruudukujuline maatriks - {function}({args}) == {expected}")
checker([[1,5,2,6,3,6], [1,3,2,7,3,3], [4,8,5,1,1,6], [4,4,9,5,6,1]])
checker([[1,5,2,6,3,6], [1,3,2,7,3,3], [4,8,5,1,1,6], [4,4,9,5,6,1]])
checker([],
description="Erijuht, tühi maatriks- {function}({args}) == {expected}")
random_tests = [
[[7, 5, 2, 6, 6, 9], [2, 8, 6, 3, 8, 7]],
[[3, 1, 0, 9], [0, 5, 1, 7]],
[[4, 4], [0, 8], [4, 9], [3, 0], [3, 6], [8, 2]],
[[9, 4, 6, 5, 4, 6],
[3, 8, 7, 1, 2, 5],
[8, 9, 8, 5, 0, 2],
[2, 7, 2, 4, 3, 5],
[2, 6, 8, 0, 2, 9],
[7, 4, 6, 4, 8, 2]],
[[-1, -3], [-6, 6], [5, -6], [1, 0]],
[[-5, -10, 6, -1], [-8, -10, -5, 7], [-7, 9, -5, -5], [-8, -7, -10, 8]],
[[-3, 6, -3, 6], [4, -6, 3, 8], [-9, -6, 7, -6], [6, 6, 4, -3]],
[[1, 6], [2, -6]]
]
for test_case in random_tests:
checker(test_case) | macobo/python-grader | tasks/MTAT.03.100/2013/Midterm_1_resit/KT2_J1_vahenda_tester.py | Python | mit | 1,967 | 0.028659 |
self.description = "Upgrade a package that provides one of two imaginary packages"
lp1 = pmpkg("pkg1")
lp1.depends = ["imaginary", "imaginary2"]
self.addpkg2db("local", lp1)
lp2 = pmpkg("pkg2")
lp2.provides = ["imaginary"]
self.addpkg2db("local", lp2)
lp3 = pmpkg("pkg3")
lp3.provides = ["imaginary2"]
self.addpkg2db("local", lp3)
p = pmpkg("pkg2", "1.0-2")
p.provides = ["imaginary"]
self.addpkg(p)
self.args = "-U %s" % p.filename()
self.addrule("PACMAN_RETCODE=0")
self.addrule("PKG_EXIST=pkg1")
self.addrule("PKG_VERSION=pkg2|1.0-2")
self.addrule("PKG_EXIST=pkg3")
self.addrule("PKG_DEPENDS=pkg1|imaginary")
| kylon/pacman-fakeroot | test/pacman/tests/upgrade055.py | Python | gpl-2.0 | 618 | 0.001618 |
from app import db
from sqlalchemy import Column, String, Integer, ForeignKey
class VotingVariant(db.Model):
__tablename__ = 'voting_variants'
id = Column(Integer, primary_key=True)
voting_id = Column(Integer, ForeignKey('votings.id'))
title = Column(String(255))
description = Column(String(1000))
voting = db.relationship('Voting') | sokil/VotingEngine | models/voting_variant.py | Python | mit | 361 | 0.00277 |
from django.contrib import admin
# Register your models here.
from .models import Author, Genre, Book, BookInstance, Language
"""
# Minimal registration of Models.
admin.site.register(Book)
admin.site.register(Author)
admin.site.register(BookInstance)
admin.site.register(Genre)
admin.site.register(Language)
"""
admin.site.register(Genre)
admin.site.register(Language)
class BooksInline(admin.TabularInline):
"""
Defines format of inline book insertion (used in AuthorAdmin)
"""
model = Book
@admin.register(Author)
class AuthorAdmin(admin.ModelAdmin):
"""
Administration object for Author models.
Defines:
- fields to be displayed in list view (list_display)
- orders fields in detail view (fields), grouping the date fields horizontally
- adds inline addition of books in author view (inlines)
"""
list_display = ('last_name', 'first_name', 'date_of_birth', 'date_of_death')
fields = ['first_name', 'last_name', ('date_of_birth', 'date_of_death')]
inlines = [BooksInline]
class BooksInstanceInline(admin.TabularInline):
"""
Defines format of inline book instance insertion (used in BookAdmin)
"""
model = BookInstance
class BookAdmin(admin.ModelAdmin):
"""
Administration object for Book models.
Defines:
- fields to be displayed in list view (list_display)
- adds inline addition of book instances in book view (inlines)
"""
list_display = ('title', 'author', 'display_genre')
inlines = [BooksInstanceInline]
admin.site.register(Book, BookAdmin)
@admin.register(BookInstance)
class BookInstanceAdmin(admin.ModelAdmin):
"""
Administration object for BookInstance models.
Defines:
- fields to be displayed in list view (list_display)
- filters that will be displayed in sidebar (list_filter)
- grouping of fields into sections (fieldsets)
"""
list_display = ('book', 'status', 'borrower','due_back', 'id')
list_filter = ('status', 'due_back')
fieldsets = (
(None, {
'fields': ('book','imprint', 'id')
}),
('Availability', {
'fields': ('status', 'due_back','borrower')
}),
) | DarknessSwitch/django-tutorial | catalog/admin.py | Python | cc0-1.0 | 2,208 | 0.005888 |
#!/usr/bin/env python
# Copyright 2017 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
This script runs swarming_xcode_install on the bots. It should be run when we
need to upgrade all the swarming testers. It:
1) Packages two python files into an isolate.
2) Runs the isolate on swarming machines that satisfy certain dimensions.
Example usage:
$ ./build/run_swarming_xcode_install.py --luci_path ~/work/luci-py \
--swarming-server touch-swarming.appspot.com \
--isolate-server touch-isolate.appspot.com
"""
from __future__ import print_function
import argparse
import os
import shutil
import subprocess
import sys
import tempfile
def main():
parser = argparse.ArgumentParser(
description='Run swarming_xcode_install on the bots.')
parser.add_argument('--luci_path', required=True, type=os.path.abspath)
parser.add_argument('--swarming-server', required=True, type=str)
parser.add_argument('--isolate-server', required=True, type=str)
parser.add_argument('--batches', type=int, default=25,
help="Run xcode install in batches of size |batches|.")
parser.add_argument('--dimension', nargs=2, action='append')
args = parser.parse_args()
args.dimension = args.dimension or []
script_dir = os.path.dirname(os.path.abspath(__file__))
tmp_dir = tempfile.mkdtemp(prefix='swarming_xcode')
try:
print('Making isolate.')
shutil.copyfile(os.path.join(script_dir, 'swarming_xcode_install.py'),
os.path.join(tmp_dir, 'swarming_xcode_install.py'))
shutil.copyfile(os.path.join(script_dir, 'mac_toolchain.py'),
os.path.join(tmp_dir, 'mac_toolchain.py'))
luci_client = os.path.join(args.luci_path, 'client')
cmd = [
sys.executable, os.path.join(luci_client, 'isolateserver.py'), 'archive',
'-I', args.isolate_server, tmp_dir,
]
isolate_hash = subprocess.check_output(cmd).split()[0]
print('Running swarming_xcode_install.')
# TODO(crbug.com/765361): The dimensions below should be updated once
# swarming for iOS is fleshed out, likely removing xcode_version 9 and
# adding different dimensions.
luci_tools = os.path.join(luci_client, 'tools')
dimensions = [['pool', 'Chrome'], ['xcode_version', '9.0']] + args.dimension
dim_args = []
for d in dimensions:
dim_args += ['--dimension'] + d
cmd = [
sys.executable, os.path.join(luci_tools, 'run_on_bots.py'),
'--swarming', args.swarming_server, '--isolate-server',
args.isolate_server, '--priority', '20', '--batches', str(args.batches),
'--tags', 'name:run_swarming_xcode_install',
] + dim_args + ['--name', 'run_swarming_xcode_install', '--', isolate_hash,
'python', 'swarming_xcode_install.py',
]
subprocess.check_call(cmd)
print('All tasks completed.')
finally:
shutil.rmtree(tmp_dir)
return 0
if __name__ == '__main__':
sys.exit(main())
| endlessm/chromium-browser | build/run_swarming_xcode_install.py | Python | bsd-3-clause | 3,039 | 0.005923 |
# -*- coding: utf-8 -*-
"""
***************************************************************************
VariableDistanceBuffer.py
---------------------
Date : August 2012
Copyright : (C) 2012 by Victor Olaya
Email : volayaf at gmail dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Victor Olaya'
__date__ = 'August 2012'
__copyright__ = '(C) 2012, Victor Olaya'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.core import QGis
from processing.core.GeoAlgorithm import GeoAlgorithm
from processing.core.parameters import ParameterVector
from processing.core.parameters import ParameterBoolean
from processing.core.parameters import ParameterNumber
from processing.core.parameters import ParameterTableField
from processing.core.outputs import OutputVector
import Buffer as buff
from processing.tools import dataobjects
class VariableDistanceBuffer(GeoAlgorithm):
INPUT = 'INPUT'
OUTPUT = 'OUTPUT'
FIELD = 'FIELD'
SEGMENTS = 'SEGMENTS'
DISSOLVE = 'DISSOLVE'
def defineCharacteristics(self):
self.name, self.i18n_name = self.trAlgorithm('Variable distance buffer')
self.group, self.i18n_group = self.trAlgorithm('Vector geometry tools')
self.addParameter(ParameterVector(self.INPUT,
self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY]))
self.addParameter(ParameterTableField(self.FIELD,
self.tr('Distance field'), self.INPUT))
self.addParameter(ParameterNumber(self.SEGMENTS,
self.tr('Segments'), 1, default=5))
self.addParameter(ParameterBoolean(self.DISSOLVE,
self.tr('Dissolve result'), False))
self.addOutput(OutputVector(self.OUTPUT, self.tr('Buffer')))
def processAlgorithm(self, progress):
layer = dataobjects.getObjectFromUri(self.getParameterValue(self.INPUT))
dissolve = self.getParameterValue(self.DISSOLVE)
field = self.getParameterValue(self.FIELD)
segments = int(self.getParameterValue(self.SEGMENTS))
writer = self.getOutputFromName(self.OUTPUT).getVectorWriter(
layer.pendingFields().toList(), QGis.WKBPolygon, layer.crs())
buff.buffering(progress, writer, 0, field, True, layer, dissolve,
segments)
| sebastic/QGIS | python/plugins/processing/algs/qgis/VariableDistanceBuffer.py | Python | gpl-2.0 | 3,079 | 0.001299 |
"""
"""
import sys
import re
import keyword
import logging
import traceback
from functools import partial
from PySide import QtCore
from PySide.QtCore import QObject
from maya import cmds
from mampy.pyside.utils import get_qt_object
from mamprefs import config
from mamprefs.base import BaseManager, deleteUI, file_to_pyobject
logger = logging.getLogger(__name__)
logger.setLevel(logging.INFO)
ACTIVE_MENU = None
def get_parent_panel():
"""
Return current panels parent.
"""
panel = cmds.getPanel(up=True)
if cmds.panel(panel, q=True, ex=True):
panel_layout = cmds.layout(panel, q=True, p=True)
while not cmds.paneLayout(panel_layout, q=True, ex=True):
panel_layout = cmds.control(panel_layout, q=True, p=True)
if cmds.paneLayout(panel_layout, q=True, ex=True):
return panel_layout
else:
return 'viewPanes'
class MarkingMenuManager(BaseManager):
"""
"""
def __init__(self):
super(MarkingMenuManager, self).__init__('.markingmenu')
def __getitem__(self, key):
for menu_list in self.map.itervalues():
for menu in menu_list:
if menu.name == key:
return menu
else:
raise KeyError('"{}" is not in menu map.'.format(key))
def add_menu_items(self):
"""
Create menu items for every existing marking menu.
"""
cmds.menuItem(divider=True)
for file_name, menu_list in self.map.iteritems():
for menu in menu_list:
cmds.menuItem(
l=menu.name.title(),
c=partial(self.output, menu),
)
cmds.menuItem(ob=True, c=partial(self.edit, file_name))
cmds.menuItem(divider=True)
def initUI(self):
"""
Creates the user interface, can be used to update it aswell.
"""
super(MarkingMenuManager, self).initUI()
# UI element names
main_menu = config['MENU_MAIN_NAME']
marking_menu = config['MENU_MARKING_NAME']
layout_menu = config['MENU_LAYOUT_NAME']
# Delete UI elements if they exists.
deleteUI(marking_menu)
# Create the UI
cmds.menuItem(
marking_menu,
label='Marking Menus',
subMenu=True,
allowOptionBoxes=True,
insertAfter=layout_menu,
parent=main_menu,
tearOff=True,
)
cmds.menuItem(l='Update', c=lambda *args: self.reload_marking_menus())
if self.map:
self.add_menu_items()
else:
cmds.menuItem(l='No Marking Menus', enable=False)
cmds.menuItem(l='Clean Scene', c=lambda *args: self.clean_menu())
def parse_files(self):
for file_name, f in self.files.iteritems():
file_map = file_to_pyobject(f)
self.map[file_name] = [
MarkingMenu(**menu)
for menu in file_map
# for name, item in menu.iteritems()
]
def reload_marking_menus(self):
"""
Rebuild menus and re-parse files. Then rebuild the UI.
"""
self.reload()
self.initUI()
def clean_menu(self):
"""
.. note:: Might be redundant.
"""
deleteUI(config['MENU_MARKING_POPUP_NAME'])
def output(self, menu, *args):
"""
Outputs to script editor.
"""
if not any('radialPosition' in item for item in menu.items):
for item in menu.items:
print item
else:
for radial in ["N", "NW", "W", "SW", "S", "SE", "E", "NE"]:
for item in menu.items:
try:
if radial == item['radialPosition']:
print '{}: {}'.format(radial, item)
except KeyError:
pass
class MarkingMenu(object):
"""
"""
def __init__(self, name, button, marking_menu, modifiers, items,
option_boxes=False):
self.name = name
self.marking_menu = marking_menu
self.button = button
self.option_boxes = option_boxes
self.items = list()
self.modifiers = {'{}Modifier'.format(i): True for i in modifiers}
self.pane_widget = None
self.closing_event = MarkingMenuEventFilter()
self.parse_items(items)
logger.debug([name, button, marking_menu, modifiers, items])
def __str__(self):
return '{}({})'.format(self.__class__.__name__, self.name)
__repr__ = __str__
def parse_items(self, items):
logger.debug('New menu.')
for item in items:
logger.debug(item)
if 'sub_menu' in item:
logging.debug('building sub menu')
sub_list = item.pop('sub_menu', [])
sub_list.append({'set_parent': True})
logging.debug(sub_list)
item['subMenu'] = True
self.items.append(MarkingMenuItem(**item))
self.parse_items(sub_list)
else:
self.items.append(MarkingMenuItem(**item))
def build_menu(self):
"""
Creates menu items.
"""
try:
cmds.popupMenu(
config['MENU_MARKING_POPUP_NAME'],
button=self.button,
allowOptionBoxes=self.option_boxes,
markingMenu=self.marking_menu,
parent=get_parent_panel(),
**self.modifiers
)
logger.debug('building menu items:')
for item in self.items:
logger.debug(item)
if 'set_parent' in item:
cmds.setParent('..', m=True)
else:
cmds.menuItem(**item.unpack())
except:
traceback.print_exc(file=sys.stdout)
def show(self):
"""
Shows marking menu on hotkey press.
"""
try:
self.pane_widget.removeEventFilter(self.closing_event)
except AttributeError:
pass
deleteUI(config['MENU_MARKING_POPUP_NAME'])
self.build_menu()
self.pane_widget = get_qt_object(get_parent_panel())
self.pane_widget.installEventFilter(self.closing_event)
def hide(self):
try:
self.pane_widget.removeEventFilter(self.closing_event)
except AttributeError:
pass
deleteUI(config['MENU_MARKING_POPUP_NAME'])
class MarkingMenuEventFilter(QObject):
"""
Filter to handle events when building and hiding marking menus.
"""
key_release = False
is_child = False
destroy = False
def eventFilter(self, obj, event):
"""Make marking menus behave like other maya marking menus."""
# Destroy the menu in a new event cycle. If we don't do this we will
# delete the menu before the commands or sub menus are shown and crash
# maya.
if self.destroy:
self.destroy = False
hide_menu()
etype = event.type()
if etype == QtCore.QEvent.ChildRemoved:
self.is_child = False
if self.key_release:
self.destroy = True
if etype == QtCore.QEvent.ChildAdded:
self.is_child = True
else:
if etype == QtCore.QEvent.ShortcutOverride:
if event.isAutoRepeat():
self.key_release = False
return True
elif etype == QtCore.QEvent.KeyRelease:
if not self.is_child:
hide_menu()
self.key_release = True
return super(MarkingMenuEventFilter, self).eventFilter(obj, event)
class MarkingMenuItem(object):
"""
"""
default_menu = {
# Requiered
# 'label': None,
# 'command': None,
# 'radialPosition': None,
# Optional
'divider': False,
'subMenu': False,
'tearOff': False,
'altModifier': False,
'ctrlModifier': False,
'shiftModifier': False,
'optionModifier': False,
'commandModifier': False,
'optionBox': False,
'enable': True,
'data': False,
'allowOptionBoxes': True,
'postMenuCommandOnce': False,
'enableCommandRepeat': True,
'echoCommand': False,
'italicized': False,
'boldFont': True,
'sourceType': 'python',
}
def __init__(self, **kwargs):
self.menu_kwargs = {}
if 'divider' in kwargs:
self.menu_kwargs = {'divider': True}
elif 'set_parent' in kwargs:
self.menu_kwargs['set_parent'] = '..'
# self.menu_kwargs['label'] = 'set_parent'
else:
self.menu_kwargs = self.default_menu.copy()
if 'position' in kwargs:
kwargs['radialPosition'] = kwargs.pop('position', None)
if 'command' in kwargs:
kwargs['command'] = str(Command(kwargs['command']))
self.menu_kwargs.update(kwargs)
def __str__(self):
if 'label' not in self.menu_kwargs:
return '{}()'.format(self.__class__.__name__)
return '{}({})'.format(self.__class__.__name__, self.menu_kwargs['label'])
def __getitem__(self, key):
return self.menu_kwargs[key]
def __contains__(self, key):
return key in self.menu_kwargs
__repr__ = __str__
def unpack(self):
return self.menu_kwargs
class Command(object):
regex = re.compile(ur'^\w+')
def __init__(self, command_string):
self.command_string = command_string
self._module = None
self._parsed_command = None
def __str__(self):
return '{}'.format(self.parsed_command)
@property
def module(self):
if self._module is None:
try:
_module = re.findall(self.regex, self.command_string)[0]
except IndexError:
_module = None
return _module
@property
def is_module_keyword(self):
return keyword.iskeyword(self.module)
@property
def is_maya_keyword(self):
return self.module in ['cmds', 'mel']
@property
def parsed_command(self):
if self._parsed_command is None:
self._parsed_command = self.parse()
return self._parsed_command
def parse(self):
tmpcommand = ''
if self.module is None:
return 'null'
if self.is_module_keyword or self.is_maya_keyword:
tmpcommand = self.command_string
else:
tmpcommand = 'import {0.module}; {0.command_string}'.format(self)
logger.debug('parsed command to: {}'.format(tmpcommand))
return tmpcommand
MARKING_MENU_MANAGER = MarkingMenuManager()
def init():
MARKING_MENU_MANAGER.initUI()
def show_menu(menu):
global ACTIVE_MENU
ACTIVE_MENU = menu
logger.debug(MARKING_MENU_MANAGER[menu])
try:
MARKING_MENU_MANAGER[menu].show()
except KeyError:
logger.exception(traceback.format_exc())
def hide_menu():
logger.debug(MARKING_MENU_MANAGER[ACTIVE_MENU])
try:
MARKING_MENU_MANAGER[ACTIVE_MENU].hide()
except KeyError:
logger.exception(traceback.format_exc())
if __name__ == '__main__':
pass
| arubertoson/maya-mamprefs | mamprefs/markingmenus.py | Python | mit | 11,879 | 0.000253 |
'''
test for changing unexisted user password
@author: SyZhao
'''
import apibinding.inventory as inventory
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.operations.vm_operations as vm_ops
import zstacklib.utils.ssh as ssh
import test_stub
exist_users = ["root"]
users = ["a_", "aa" ]
passwds = ["0aIGFDFBB_N", "a1_" ]
vm = None
def test():
global vm, exist_users
test_util.test_dsc('change unexisted user password test')
vm = test_stub.create_vm(vm_name = 'cknewusrvmpswd-u14-64', image_name = "imageName_i_u14")
vm.check()
backup_storage_list = test_lib.lib_get_backup_storage_list_by_vm(vm.vm)
for bs in backup_storage_list:
if bs.type == inventory.IMAGE_STORE_BACKUP_STORAGE_TYPE:
break
if bs.type == inventory.SFTP_BACKUP_STORAGE_TYPE:
break
if bs.type == inventory.CEPH_BACKUP_STORAGE_TYPE:
break
else:
vm.destroy()
test_util.test_skip('Not find image store type backup storage.')
for (usr,passwd) in zip(users, passwds):
if usr not in exist_users:
test_util.test_logger("un-existed user:%s change vm password" %(usr))
#if the user is not existed, it should report
#try:
# vm_ops.change_vm_password(vm.get_vm().uuid, usr, passwd, skip_stopped_vm = None, session_uuid = None)
#except Exception,e:
# test_util.test_logger("unexisted user change vm password exception is %s" %(str(e)))
# normal_failed_string = "not exist"
# if normal_failed_string in str(e):
# test_util.test_logger("unexisted user return correct, create a the user for it.")
#else:
# test_util.test_fail("user not exist in this OS, it should not raise exception, but return a failure.")
test_stub.create_user_in_vm(vm.get_vm(), usr, passwd)
exist_users.append(usr)
#When vm is running:
vm_ops.change_vm_password(vm.get_vm().uuid, usr, passwd, skip_stopped_vm = None, session_uuid = None)
if not test_lib.lib_check_login_in_vm(vm.get_vm(), usr, passwd):
test_util.test_fail("create vm with user:%s password: %s failed", usr, passwd)
#When vm is stopped:
#vm.stop()
vm_ops.change_vm_password(vm.get_vm().uuid, "root", test_stub.original_root_password)
#vm.start()
vm.check()
vm.destroy()
vm.check()
vm.expunge()
vm.check()
test_util.test_pass('Set password when VM is creating is successful.')
#Will be called only if exception happens in test().
def error_cleanup():
global vm
if vm:
vm.destroy()
vm.expunge()
| zstackio/zstack-woodpecker | integrationtest/vm/vm_password/test_chg_unexist_usr_passwd_u14.py | Python | apache-2.0 | 2,941 | 0.011561 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# GuessIt - A library for guessing information from filenames
# Copyright (c) 2013 Nicolas Wack <wackou@gmail.com>
#
# GuessIt is free software; you can redistribute it and/or modify it under
# the terms of the Lesser GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# GuessIt is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# Lesser GNU General Public License for more details.
#
# You should have received a copy of the Lesser GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from __future__ import absolute_import, division, print_function, unicode_literals
from guessit.plugins.transformers import Transformer
from guessit.patterns import sep
import re
class SplitOnDash(Transformer):
def __init__(self):
Transformer.__init__(self, 245)
def process(self, mtree, options=None):
"""split into '-' separated subgroups (with required separator chars
around the dash)
"""
for node in mtree.unidentified_leaves():
indices = []
pattern = re.compile(sep + '-' + sep)
match = pattern.search(node.value)
while match:
span = match.span()
indices.extend([span[0], span[1]])
match = pattern.search(node.value, span[1])
if indices:
node.partition(indices)
| dantebarba/docker-media-server | plex/Subliminal.bundle/Contents/Libraries/Shared/guessit/transfo/split_on_dash.py | Python | gpl-3.0 | 1,659 | 0.000603 |
from filtering.morphology import ssm
from rivuletpy.utils.io import *
import matplotlib.pyplot as plt
import skfmm
ITER = 30
img = loadimg('/home/siqi/ncidata/rivuletpy/tests/data/test-crop.tif')
bimg = (img > 0).astype('int')
dt = skfmm.distance(bimg, dx=1)
sdt = ssm(dt, anisotropic=True, iterations=ITER)
try:
from skimage import filters
except ImportError:
from skimage import filter as filters
s_seg = s > filters.threshold_otsu(s)
plt.figure()
plt.title('DT')
plt.imshow(dt.max(-1))
plt.figure()
plt.title('img > 0')
plt.imshow((img > 0).max(-1))
plt.figure()
plt.title('SSM-DT')
plt.imshow(sdt.max(-1))
| RivuletStudio/rivuletpy | tests/testssm.py | Python | bsd-3-clause | 624 | 0.001603 |
"""
This module is to support *bbox_inches* option in savefig command.
"""
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import six
import warnings
from matplotlib.transforms import Bbox, TransformedBbox, Affine2D
def adjust_bbox(fig, bbox_inches, fixed_dpi=None):
"""
Temporarily adjust the figure so that only the specified area
(bbox_inches) is saved.
It modifies fig.bbox, fig.bbox_inches,
fig.transFigure._boxout, and fig.patch. While the figure size
changes, the scale of the original figure is conserved. A
function which restores the original values are returned.
"""
origBbox = fig.bbox
origBboxInches = fig.bbox_inches
_boxout = fig.transFigure._boxout
asp_list = []
locator_list = []
for ax in fig.axes:
pos = ax.get_position(original=False).frozen()
locator_list.append(ax.get_axes_locator())
asp_list.append(ax.get_aspect())
def _l(a, r, pos=pos):
return pos
ax.set_axes_locator(_l)
ax.set_aspect("auto")
def restore_bbox():
for ax, asp, loc in zip(fig.axes, asp_list, locator_list):
ax.set_aspect(asp)
ax.set_axes_locator(loc)
fig.bbox = origBbox
fig.bbox_inches = origBboxInches
fig.transFigure._boxout = _boxout
fig.transFigure.invalidate()
fig.patch.set_bounds(0, 0, 1, 1)
if fixed_dpi is not None:
tr = Affine2D().scale(fixed_dpi)
dpi_scale = fixed_dpi / fig.dpi
else:
tr = Affine2D().scale(fig.dpi)
dpi_scale = 1.
_bbox = TransformedBbox(bbox_inches, tr)
fig.bbox_inches = Bbox.from_bounds(0, 0,
bbox_inches.width, bbox_inches.height)
x0, y0 = _bbox.x0, _bbox.y0
w1, h1 = fig.bbox.width * dpi_scale, fig.bbox.height * dpi_scale
fig.transFigure._boxout = Bbox.from_bounds(-x0, -y0, w1, h1)
fig.transFigure.invalidate()
fig.bbox = TransformedBbox(fig.bbox_inches, tr)
fig.patch.set_bounds(x0 / w1, y0 / h1,
fig.bbox.width / w1, fig.bbox.height / h1)
return restore_bbox
def process_figure_for_rasterizing(fig, bbox_inches_restore, fixed_dpi=None):
"""
This need to be called when figure dpi changes during the drawing
(e.g., rasterizing). It recovers the bbox and re-adjust it with
the new dpi.
"""
bbox_inches, restore_bbox = bbox_inches_restore
restore_bbox()
r = adjust_bbox(figure, bbox_inches, fixed_dpi)
return bbox_inches, r
| yavalvas/yav_com | build/matplotlib/lib/matplotlib/tight_bbox.py | Python | mit | 2,604 | 0 |
#!/usr/bin/env python
"""stack.py: Stack implementation"""
__author__ = 'Rohit Sinha'
class Stack:
def __init__(self):
self.items = []
def isEmpty(self):
return self.items == []
def push(self, item):
self.items.append(item)
def pop(self):
return self.items.pop()
def peek(self):
return self.items[len(self.items) - 1]
def size(self):
return len(self.items)
def __str__(self):
return str(self.items).strip('[]')
if __name__ == '__main__':
s = Stack()
print(s.isEmpty())
s.push(5)
s.push('Hello')
print(s.peek())
s.push(True)
print(s.peek())
print(s.size())
print(s.pop())
print(s)
| rohitsinha54/Learning-Python | algorithms/stack.py | Python | mit | 716 | 0 |
#!/usr/bin/env python3
import sys
from Bio import SeqIO
from argparse import ArgumentParser, RawDescriptionHelpFormatter
usage = "Chromosome surgery: Splice something into and/or out of a chromosome."
# Main Parsers
parser = ArgumentParser(description=usage, formatter_class=RawDescriptionHelpFormatter)
parser.add_argument("-f", "--fasta", type=str, required=True, help="Input FASTA.")
parser.add_argument("-o", "--output", type=str, required=False, help="Output FASTA.")
parser.add_argument("-c", "--cid", type=str, required=True, help="Chromosome ID to edit.")
parser.add_argument("-i", "--insert", type=str, required=False, help="FASTA of sequence to insert.")
parser.add_argument("-I", "--incision", type=int, required=False, help="1-based nucleotide after which to insert the insert.")
parser.add_argument("-e", "--excision_start", type=int, required=False, help="1-based nucleotide that is the first to delete (0).")
parser.add_argument("-E", "--excision_end", type=int, required=False, help="1-based nucleotide that is the last to delete (0).")
args = parser.parse_args()
# Harmless defaults
splice_in = ''
incision = 0
excision_start = 0
excision_end = 0
no_insert = (not args.incision) or (not args.insert)
no_excision = (not args.excision_start) or (not args.excision_end)
if no_insert and no_excision:
sys.stderr.write("Incomplete edit arguments!\n")
exit(0)
# Get insert
if not no_insert:
incision = args.incision
with open(args.insert, 'r') as splicein:
record = list(SeqIO.parse(splicein, 'fasta'))[0]
splice_in = record.seq
# No need to shift the incision coordinate.
# The 1-based right-closed index after which to cut is the same location as the 0-based right-open substring end before the cut.
if not no_excision:
excision_start = args.excision_start
excision_end = args.excision_end
# Pythonize start coordinate from 1-based left-closed to 0-based left-closed.
excision_start -= 1
# No need to change the end coordinate. The 1-based right-closed index is the same location as the 0-based right-open substring end.
if (not no_insert) and not (no_excision):
# Do excision after the incision.
# Adjust coordinates.
if args.excision_start > args.incision and args.excision_end > args.incision:
excision_start = args.excision_start + len(splice_in)
excision_end = args.excision_end + len(splice_in)
elif args.excision_start < incision and args.excision_end < incision:
pass # The incision will be applied first, no need to adjust it. The excision is unaffected by the incision anyway.
else:
sys.stderr.write('Error: Cannot apply the specified coordinates. Excision end must be after excision start, and the incision cannot be inside the excision.')
# Parse and apply edit
with open(args.fasta, 'r') as genome:
if args.output:
out = open(args.output, 'w')
else:
out =sys.stdout
for record in SeqIO.parse(genome, 'fasta'):
# Only edit the relevant entry
if (record.id == args.cid):
# Splice-in
record.seq = record.seq[:incision] + splice_in + record.seq[incision:]
# Splice-out
record.seq = record.seq[:excision_start] + record.seq[excision_end:]
# Output all the entries
SeqIO.write(record, out, 'fasta')
print("Done")
| fruce-ki/utility_scripts | chromosome_surgery.py | Python | mit | 3,387 | 0.006791 |
"""
Copyright 2013 Steven Diamond
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from cvxpy import *
from mixed_integer import *
import cvxopt
import numpy as np
n = 9
# 9x9 sudoku grid
numbers = Variable(n,n)
# TODO: 9*[Boolean(9,9)] doesn't work....
solution = cvxopt.matrix([
[0, 5, 2, 3, 7, 1, 8, 6, 4],
[6, 3, 7, 8, 0, 4, 5, 2, 1],
[1, 4, 8, 5, 2 ,6, 3, 0, 7],
[4, 7, 1, 2, 3, 0, 6, 5, 8],
[3, 6, 5, 1, 4, 8, 0, 7, 2],
[8, 2, 0, 6, 5, 7, 4, 1, 3],
[5, 1, 6, 7, 8, 3, 2, 4, 0],
[7, 0, 3, 4, 6, 2, 1, 8, 5],
[2, 8, 4, 0, 1, 5, 7, 3, 6]
])
# partial grid
known =[(0,6), (0,7), (1,4), (1,5), (1,8), (2,0), (2,2), (2,7), (2,8),
(3,0), (3,1), (4,0), (4,2), (4,4), (4,6), (4,8), (5,7), (5,8),
(6,0), (6,1), (6,6), (6,8), (7,0), (7,3), (7,4), (8,1), (8,2)]
def row(x,r):
m, n = x.size
for i in range(m):
for j in range(n):
if i == r: yield x[i,j]
def col(x,c):
m, n = x.size
for i in range(m):
for j in range(n):
if j == c: yield x[i,j]
def block(x,b):
m, n = x.size
for i in range(m):
for j in range(n):
# 0 block is r = 0,1, c = 0,1
# 1 block is r = 0,1, c = 2,3
# 2 block is r = 2,3, c = 0,1
# 3 block is r = 2,3, c = 2,3
if i // 3 == b // 3 and j // 3 == b % 3:
yield x[i,j]
# create the suboku constraints
perms = lambda: Assign(n, n)*cvxopt.matrix(range(1,10))
constraints = []
for i in range(n):
constraints += [vstack(*list(row(numbers, i))) == perms()]
constraints += [vstack(*list(col(numbers, i))) == perms()]
constraints += [vstack(*list(block(numbers, i))) == perms()]
#constraints.extend(numbers[k] == solution[k] for k in known)
# attempt to solve
p = Problem(Minimize(sum(abs(numbers-solution))), constraints)
p.solve(method="admm2", rho=0.5, iterations=25)
print(sum(numbers.value - solution))
| SteveDiamond/cvxpy | examples/extensions/sudoku_admm.py | Python | gpl-3.0 | 2,414 | 0.018641 |
# encoding: utf-8
from ..docker import docker_exec
from ..utils import filter_column, python_requirements_compare
from ..test_common import skipifdev
# @skipifdev
def test_update_tyr_config_file(distributed_undeployed):
platform, fabric = distributed_undeployed
# create empty directory for task under test
platform.docker_exec("mkdir -p /srv/tyr")
value, exception, stdout, stderr = fabric.execute_forked('update_tyr_config_file')
assert exception is None
assert stderr == ''
# check fabric tasks execution count
assert stdout.count("Executing task 'update_tyr_config_file'") == 2
# check existence of files created by the task under test
assert platform.path_exists('/srv/tyr/settings.py')
assert platform.path_exists('/srv/tyr/settings.wsgi')
env = fabric.get_object('env')
env.tyr_broker_username = 'toto'
value, exception, stdout, stderr = fabric.execute_forked('update_tyr_config_file')
assert exception is None
assert stderr.count("Warning: run() received nonzero return code 1 while executing "
"'diff /srv/tyr/settings.py /srv/tyr/settings.py.temp'") == 2
assert stdout.count("> CELERY_BROKER_URL = 'amqp://toto:guest@localhost:5672//'") == 2
assert stdout.count("< CELERY_BROKER_URL = 'amqp://guest:guest@localhost:5672//'") == 2
@skipifdev
def test_setup_tyr(distributed_undeployed):
platform, fabric = distributed_undeployed
# create some objects used (symlinked) by the task under test
platform.docker_exec('mkdir -p /usr/share/tyr/migrations/')
platform.docker_exec('touch /usr/bin/manage_tyr.py')
value, exception, stdout, stderr = fabric.execute_forked('setup_tyr')
assert exception is None
# check fabric tasks execution count
assert stdout.count("Executing task 'setup_tyr'") == 2
assert stdout.count("Executing task 'update_cities_conf'") == 2
assert stdout.count("Executing task 'update_tyr_config_file'") == 0
assert stdout.count("Executing task 'update_tyr_instance_conf'") == 0
# check that user www-data exists
assert filter_column(platform.get_data('/etc/passwd', 'host1'), 0, startswith='www-data')
assert filter_column(platform.get_data('/etc/passwd', 'host2'), 0, startswith='www-data')
# check existence of directories and files created by the task under test
assert platform.path_exists('/etc/tyr.d')
assert platform.path_exists('/srv/tyr')
assert platform.path_exists('/var/log/tyr')
assert platform.path_exists('/srv/ed/data/')
assert platform.path_exists('/var/log/tyr/tyr.log')
for instance in fabric.env.instances:
assert platform.path_exists('/srv/ed/{}/'.format(instance))
assert platform.path_exists('/srv/ed/{}/alembic.ini'.format(instance))
assert platform.path_exists('/srv/ed/{}/settings.sh'.format(instance))
assert platform.path_exists('/etc/tyr.d/{}.ini'.format(instance))
assert platform.path_exists('/etc/init.d/tyr_worker')
assert platform.path_exists('/srv/tyr/migrations')
assert platform.path_exists('/srv/tyr/manage.py')
assert platform.path_exists('/srv/tyr/cities_alembic.ini', 'host1')
@skipifdev
def test_update_tyr_confs(distributed_undeployed):
platform, fabric = distributed_undeployed
# create empty directories for task under test
platform.docker_exec("mkdir -p /etc/tyr.d /srv/tyr")
value, exception, stdout, stderr = fabric.execute_forked('update_tyr_confs')
assert exception is None
assert stderr == ''
# check fabric tasks execution count
assert stdout.count("Executing task 'update_tyr_config_file'") == 2
assert stdout.count("Executing task 'update_tyr_instance_conf'") == 2 * len(fabric.env.instances)
assert stdout.count("Executing task 'update_cities_conf'") == 1
@skipifdev
def test_upgrade_tyr_packages(distributed_undeployed):
platform, fabric = distributed_undeployed
fabric.execute('upgrade_tyr_packages')
assert platform.get_version('python', 'host1').startswith('2.7')
assert platform.get_version('python', 'host2').startswith('2.7')
assert docker_exec(platform.containers['host1'], 'pip -V', return_code_only=True) == 0
assert docker_exec(platform.containers['host2'], 'pip -V', return_code_only=True) == 0
assert platform.get_version('navitia-tyr', 'host1')
assert platform.get_version('navitia-tyr', 'host2')
assert platform.get_version('navitia-common', 'host1')
assert platform.get_version('navitia-common', 'host2')
known_missing = ['argparse==1.2.1', 'wsgiref==0.1.2']
for host in ('host1', 'host2'):
assert python_requirements_compare(
platform.docker_exec('pip freeze', host),
platform.get_data('/usr/share/tyr/requirements.txt', host)
) == known_missing
# TODO this seems redundant with setup_tyr
assert platform.path_exists('/etc/init.d/tyr_worker')
@skipifdev
def test_setup_tyr_master(distributed_undeployed):
platform, fabric = distributed_undeployed
fabric.execute('setup_tyr_master')
assert platform.path_exists('/srv/ed/', 'host1')
assert platform.path_exists('/srv/ed/', 'host2', negate=True)
assert platform.path_exists('/etc/init.d/tyr_beat', 'host1')
assert platform.path_exists('/etc/init.d/tyr_beat', 'host2', negate=True)
@skipifdev
def test_upgrade_ed_packages(distributed_undeployed):
platform, fabric = distributed_undeployed
fabric.execute('upgrade_ed_packages')
assert platform.get_version('navitia-ed', 'host1')
assert platform.get_version('navitia-ed', 'host2')
assert platform.get_version('navitia-common', 'host1')
assert platform.get_version('navitia-common', 'host2')
assert platform.get_version('navitia-cities', 'host1')
assert platform.get_version('navitia-cities', 'host2')
| Hoshiyo/fabric_navitia | integration_tests/test_tyr/test_setup.py | Python | agpl-3.0 | 5,824 | 0.00206 |
# -*- coding: utf-8 -*-
##############################################################################
#
# Copyright (C) 2014 Pexego Sistemas Informáticos All Rights Reserved
# $Jesús Ventosinos Mayor <jesus@pexego.es>$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import models, fields, api, _
from openerp.exceptions import except_orm
class purchase_order(models.Model):
_inherit = 'purchase.order'
picking_created = fields.Boolean('Picking created',
compute='is_picking_created')
@api.multi
def test_moves_done(self):
'''PO is done at the delivery side if all the incoming shipments
are done'''
for purchase in self:
for line in purchase.order_line:
for move in line.move_ids:
if move.state != 'done':
return False
return True
def is_picking_created(self):
self.picking_created = self.picking_ids and True or False
def _prepare_order_line_move(self, cr, uid, order, order_line, picking_id,
group_id, context=None):
"""
prepare the stock move data from the PO line.
This function returns a list of dictionary ready to be used in
stock.move's create()
"""
purchase_line_obj = self.pool['purchase.order.line']
res = super(purchase_order, self)._prepare_order_line_move(
cr, uid, order, order_line, picking_id, group_id, context)
for move_dict in res:
move_dict.pop('picking_id', None)
move_dict.pop('product_uos_qty', None)
move_dict.pop('product_uos', None)
move_dict['partner_id'] = order.partner_id.id
if order.partner_ref:
move_dict['origin'] += ":" + order.partner_ref
return res
def action_picking_create(self, cr, uid, ids, context=None):
"""
Se sobreescribe la función para que no se cree el picking.
"""
for order in self.browse(cr, uid, ids):
self._create_stock_moves(cr, uid, order, order.order_line,
False, context=context)
def _create_stock_moves(self, cr, uid, order, order_lines,
picking_id=False, context=None):
"""
MOD: Se sobreescribe la función para no confirmar los movimientos.
"""
stock_move = self.pool.get('stock.move')
todo_moves = []
new_group = self.pool.get("procurement.group").create(
cr, uid, {'name': order.name, 'partner_id': order.partner_id.id},
context=context)
for order_line in order_lines:
if not order_line.product_id:
continue
if order_line.product_id.type in ('product', 'consu'):
for vals in self._prepare_order_line_move(
cr, uid, order, order_line, picking_id, new_group,
context=context):
move = stock_move.create(cr, uid, vals, context=context)
todo_moves.append(move)
def move_lines_create_picking(self, cr, uid, ids, context=None):
mod_obj = self.pool.get('ir.model.data')
act_obj = self.pool.get('ir.actions.act_window')
moves = self.pool('stock.move')
result = mod_obj.get_object_reference(cr, uid, 'stock', 'action_receive_move')
id = result and result[1] or False
result = act_obj.read(cr, uid, [id], context=context)[0]
self_purchase = self.browse(cr, uid, ids)
move_lines = moves.search(cr, uid,
[('origin', 'like', self_purchase.name + '%'),
('picking_id', '=', False)],
context=context)
if len(move_lines) < 1:
raise except_orm(_('Warning'), _('There is any move line without associated picking'))
result['context'] = []
if len(move_lines) > 1:
result['domain'] = "[('id','in',[" + ','.join(map(str, move_lines)) + "])]"
else:
result['domain'] = "[('id','='," + str(move_lines[0]) + ")]"
return result
class purchase_order_line(models.Model):
_inherit = 'purchase.order.line'
@api.multi
def write(self, vals):
res = super(purchase_order_line, self).write(vals)
for line in self:
if line.move_ids and vals.get('date_planned', False):
for move in line.move_ids:
if move.state not in ['cancel',u'done'] and \
not move.container_id:
move.date_expected = vals['date_planned']
return res
| jgmanzanas/CMNT_004_15 | project-addons/purchase_picking/purchase.py | Python | agpl-3.0 | 5,513 | 0.000908 |
# Licensed to the .NET Foundation under one or more agreements.
# The .NET Foundation licenses this file to you under the Apache 2.0 License.
# See the LICENSE file in the project root for more information.
__all__ = ["ClrClass", "ClrInterface", "accepts", "returns", "attribute", "propagate_attributes"]
import clr
clr.AddReference("Microsoft.Dynamic")
clr.AddReference("Microsoft.Scripting")
clr.AddReference("IronPython")
if clr.IsNetCoreApp:
clr.AddReference("System.Reflection.Emit")
import System
from System import Char, Void, Boolean, Array, Type, AppDomain
from System.Reflection import FieldAttributes, MethodAttributes, PropertyAttributes, ParameterAttributes
from System.Reflection import CallingConventions, TypeAttributes, AssemblyName
from System.Reflection.Emit import OpCodes, CustomAttributeBuilder, AssemblyBuilder, AssemblyBuilderAccess
from System.Runtime.InteropServices import DllImportAttribute, CallingConvention, CharSet
from Microsoft.Scripting.Generation import Snippets
from Microsoft.Scripting.Runtime import DynamicOperations
from Microsoft.Scripting.Utils import ReflectionUtils
from IronPython.Runtime import NameType, PythonContext
from IronPython.Runtime.Types import PythonType, ReflectedField, ReflectedProperty
def validate_clr_types(signature_types, var_signature = False):
if not isinstance(signature_types, tuple):
signature_types = (signature_types,)
for t in signature_types:
if type(t) is type(System.IComparable): # type overloaded on generic arity, eg IComparable and IComparable[T]
t = t[()] # select non-generic version
clr_type = clr.GetClrType(t)
if t == Void:
raise TypeError("Void cannot be used in signature")
is_typed = clr.GetPythonType(clr_type) == t
# is_typed needs to be weakened until the generated type
# gets explicitly published as the underlying CLR type
is_typed = is_typed or (hasattr(t, "__metaclass__") and t.__metaclass__ in [ClrInterface, ClrClass])
if not is_typed:
raise Exception, "Invalid CLR type %s" % str(t)
if not var_signature:
if clr_type.IsByRef:
raise TypeError("Byref can only be used as arguments and locals")
# ArgIterator is not present in Silverlight
if hasattr(System, "ArgIterator") and t == System.ArgIterator:
raise TypeError("Stack-referencing types can only be used as arguments and locals")
class TypedFunction(object):
"""
A strongly-typed function can get wrapped up as a staticmethod, a property, etc.
This class represents the raw function, but with the type information
it is decorated with.
Other information is stored as attributes on the function. See propagate_attributes
"""
def __init__(self, function, is_static = False, prop_name_if_prop_get = None, prop_name_if_prop_set = None):
self.function = function
self.is_static = is_static
self.prop_name_if_prop_get = prop_name_if_prop_get
self.prop_name_if_prop_set = prop_name_if_prop_set
class ClrType(type):
"""
Base metaclass for creating strongly-typed CLR types
"""
def is_typed_method(self, function):
if hasattr(function, "arg_types") != hasattr(function, "return_type"):
raise TypeError("One of @accepts and @returns is missing for %s" % function.func_name)
return hasattr(function, "arg_types")
def get_typed_properties(self):
for item_name, item in self.__dict__.items():
if isinstance(item, property):
if item.fget:
if not self.is_typed_method(item.fget): continue
prop_type = item.fget.return_type
else:
if not self.is_typed_method(item.fset): continue
prop_type = item.fset.arg_types[0]
validate_clr_types(prop_type)
clr_prop_type = clr.GetClrType(prop_type)
yield item, item_name, clr_prop_type
def emit_properties(self, typebld):
for prop, prop_name, clr_prop_type in self.get_typed_properties():
self.emit_property(typebld, prop, prop_name, clr_prop_type)
def emit_property(self, typebld, prop, name, clrtype):
prpbld = typebld.DefineProperty(name, PropertyAttributes.None, clrtype, None)
if prop.fget:
getter = self.emitted_methods[(prop.fget.func_name, prop.fget.arg_types)]
prpbld.SetGetMethod(getter)
if prop.fset:
setter = self.emitted_methods[(prop.fset.func_name, prop.fset.arg_types)]
prpbld.SetSetMethod(setter)
def dummy_function(self): raise RuntimeError("this should not get called")
def get_typed_methods(self):
"""
Get all the methods with @accepts (and @returns) decorators
Functions are assumed to be instance methods, unless decorated with @staticmethod
"""
# We avoid using the "types" library as it is not a builtin
FunctionType = type(ClrType.__dict__["dummy_function"])
for item_name, item in self.__dict__.items():
function = None
is_static = False
if isinstance(item, FunctionType):
function, is_static = item, False
elif isinstance(item, staticmethod):
function, is_static = getattr(self, item_name), True
elif isinstance(item, property):
if item.fget and self.is_typed_method(item.fget):
if item.fget.func_name == item_name:
# The property hides the getter. So yield the getter
yield TypedFunction(item.fget, False, item_name, None)
if item.fset and self.is_typed_method(item.fset):
if item.fset.func_name == item_name:
# The property hides the setter. So yield the setter
yield TypedFunction(item.fset, False, None, item_name)
continue
else:
continue
if self.is_typed_method(function):
yield TypedFunction(function, is_static)
def emit_methods(self, typebld):
# We need to track the generated methods so that we can emit properties
# referring these methods.
# Also, the hash is indexed by name *and signature*. Even though Python does
# not have method overloading, property getter and setter functions can have
# the same func_name attribute
self.emitted_methods = {}
for function_info in self.get_typed_methods():
method_builder = self.emit_method(typebld, function_info)
function = function_info.function
if self.emitted_methods.has_key((function.func_name, function.arg_types)):
raise TypeError("methods with clashing names")
self.emitted_methods[(function.func_name, function.arg_types)] = method_builder
def emit_classattribs(self, typebld):
if hasattr(self, '_clrclassattribs'):
for attrib_info in self._clrclassattribs:
if isinstance(attrib_info, type):
ci = clr.GetClrType(attrib_info).GetConstructor(())
cab = CustomAttributeBuilder(ci, ())
elif isinstance(attrib_info, CustomAttributeDecorator):
cab = attrib_info.GetBuilder()
else:
make_decorator = attrib_info()
cab = make_decorator.GetBuilder()
typebld.SetCustomAttribute(cab)
def get_clr_type_name(self):
if hasattr(self, "_clrnamespace"):
return self._clrnamespace + "." + self.__name__
else:
return self.__name__
def create_type(self, typebld):
self.emit_members(typebld)
new_type = typebld.CreateType()
self.map_members(new_type)
return new_type
class ClrInterface(ClrType):
"""
Set __metaclass__ in a Python class declaration to declare a
CLR interface type.
You need to specify object as the base-type if you do not specify any other
interfaces as the base interfaces
"""
def __init__(self, *args):
return super(ClrInterface, self).__init__(*args)
def emit_method(self, typebld, function_info):
assert(not function_info.is_static)
function = function_info.function
attributes = MethodAttributes.Public | MethodAttributes.Virtual | MethodAttributes.Abstract
method_builder = typebld.DefineMethod(
function.func_name,
attributes,
function.return_type,
function.arg_types)
instance_offset = 0 if function_info.is_static else 1
arg_names = function.func_code.co_varnames
for i in xrange(len(function.arg_types)):
# TODO - set non-trivial ParameterAttributes, default value and custom attributes
p = method_builder.DefineParameter(i + 1, ParameterAttributes.None, arg_names[i + instance_offset])
if hasattr(function, "CustomAttributeBuilders"):
for cab in function.CustomAttributeBuilders:
method_builder.SetCustomAttribute(cab)
return method_builder
def emit_members(self, typebld):
self.emit_methods(typebld)
self.emit_properties(typebld)
self.emit_classattribs(typebld)
def map_members(self, new_type): pass
interface_module_builder = None
@staticmethod
def define_interface(typename, bases):
for b in bases:
validate_clr_types(b)
if not ClrInterface.interface_module_builder:
name = AssemblyName("interfaces")
access = AssemblyBuilderAccess.Run
assembly_builder = ReflectionUtils.DefineDynamicAssembly(name, access)
ClrInterface.interface_module_builder = assembly_builder.DefineDynamicModule("interfaces")
attrs = TypeAttributes.Public | TypeAttributes.Interface | TypeAttributes.Abstract
return ClrInterface.interface_module_builder.DefineType(typename, attrs, None, bases)
def map_clr_type(self, clr_type):
"""
TODO - Currently "t = clr.GetPythonType(clr.GetClrType(C)); t == C" will be False
for C where C.__metaclass__ is ClrInterface, even though both t and C
represent the same CLR type. This can be fixed by publishing a mapping
between t and C in the IronPython runtime.
"""
pass
def __clrtype__(self):
# CFoo below will use ClrInterface as its metaclass, but the user will not expect CFoo
# to be an interface in this case:
#
# class IFoo(object):
# __metaclass__ = ClrInterface
# class CFoo(IFoo): pass
if not "__metaclass__" in self.__dict__:
return super(ClrInterface, self).__clrtype__()
bases = list(self.__bases__)
bases.remove(object)
bases = tuple(bases)
if False: # Snippets currently does not support creating interfaces
typegen = Snippets.Shared.DefineType(self.get_clr_type_name(), bases, True, False)
typebld = typegen.TypeBuilder
else:
typebld = ClrInterface.define_interface(self.get_clr_type_name(), bases)
clr_type = self.create_type(typebld)
self.map_clr_type(clr_type)
return clr_type
# Note that ClrClass inherits from ClrInterface to satisfy Python requirements of metaclasses.
# A metaclass of a subtype has to be subtype of the metaclass of a base type. As a result,
# if you define a type hierarchy as shown below, it requires ClrClass to be a subtype
# of ClrInterface:
#
# class IFoo(object):
# __metaclass__ = ClrInterface
# class CFoo(IFoo):
# __metaclass__ = ClrClass
class ClrClass(ClrInterface):
"""
Set __metaclass__ in a Python class declaration to specify strong-type
information for the class or its attributes. The Python class
retains its Python attributes, like being able to add or remove methods.
"""
# Holds the FieldInfo for a static CLR field which points to a
# Microsoft.Scripting.Runtime.DynamicOperations corresponding to the current ScriptEngine
dynamic_operations_field = None
def emit_fields(self, typebld):
if hasattr(self, "_clrfields"):
for fldname in self._clrfields:
field_type = self._clrfields[fldname]
validate_clr_types(field_type)
typebld.DefineField(
fldname,
clr.GetClrType(field_type),
FieldAttributes.Public)
def map_fields(self, new_type):
if hasattr(self, "_clrfields"):
for fldname in self._clrfields:
fldinfo = new_type.GetField(fldname)
setattr(self, fldname, ReflectedField(fldinfo))
@staticmethod
def get_dynamic_operations_field():
if ClrClass.dynamic_operations_field:
return ClrClass.dynamic_operations_field
python_context = clr.GetCurrentRuntime().GetLanguage(PythonContext)
dynamic_operations = DynamicOperations(python_context)
typegen = Snippets.Shared.DefineType(
"DynamicOperationsHolder" + str(hash(python_context)),
object,
True,
False)
typebld = typegen.TypeBuilder
typebld.DefineField(
"DynamicOperations",
DynamicOperations,
FieldAttributes.Public | FieldAttributes.Static)
new_type = typebld.CreateType()
ClrClass.dynamic_operations_field = new_type.GetField("DynamicOperations")
ClrClass.dynamic_operations_field.SetValue(None, dynamic_operations)
return ClrClass.dynamic_operations_field
def emit_typed_stub_to_python_method(self, typebld, function_info):
function = function_info.function
"""
Generate a stub method that repushes all the arguments and
dispatches to DynamicOperations.InvokeMember
"""
invoke_member = clr.GetClrType(DynamicOperations).GetMethod(
"InvokeMember",
Array[Type]((object, str, Array[object])))
# Type.GetMethod raises an AmbiguousMatchException if there is a generic and a non-generic method
# (like DynamicOperations.GetMember) with the same name and signature. So we have to do things
# the hard way
get_member_search = [m for m in clr.GetClrType(DynamicOperations).GetMethods() if m.Name == "GetMember" and not m.IsGenericMethod and m.GetParameters().Length == 2]
assert(len(get_member_search) == 1)
get_member = get_member_search[0]
set_member_search = [m for m in clr.GetClrType(DynamicOperations).GetMethods() if m.Name == "SetMember" and not m.IsGenericMethod and m.GetParameters().Length == 3]
assert(len(set_member_search) == 1)
set_member = set_member_search[0]
convert_to = clr.GetClrType(DynamicOperations).GetMethod(
"ConvertTo",
Array[Type]((object, Type)))
get_type_from_handle = clr.GetClrType(Type).GetMethod("GetTypeFromHandle")
attributes = MethodAttributes.Public
if function_info.is_static: attributes |= MethodAttributes.Static
if function.func_name == "__new__":
if function_info.is_static: raise TypeError
method_builder = typebld.DefineConstructor(
attributes,
CallingConventions.HasThis,
function.arg_types)
raise NotImplementedError("Need to call self.baseType ctor passing in self.get_python_type_field()")
else:
method_builder = typebld.DefineMethod(
function.func_name,
attributes,
function.return_type,
function.arg_types)
instance_offset = 0 if function_info.is_static else 1
arg_names = function.func_code.co_varnames
for i in xrange(len(function.arg_types)):
# TODO - set non-trivial ParameterAttributes, default value and custom attributes
p = method_builder.DefineParameter(i + 1, ParameterAttributes.None, arg_names[i + instance_offset])
ilgen = method_builder.GetILGenerator()
args_array = ilgen.DeclareLocal(Array[object])
args_count = len(function.arg_types)
ilgen.Emit(OpCodes.Ldc_I4, args_count)
ilgen.Emit(OpCodes.Newarr, object)
ilgen.Emit(OpCodes.Stloc, args_array)
for i in xrange(args_count):
arg_type = function.arg_types[i]
if clr.GetClrType(arg_type).IsByRef:
raise NotImplementedError("byref params not supported")
ilgen.Emit(OpCodes.Ldloc, args_array)
ilgen.Emit(OpCodes.Ldc_I4, i)
ilgen.Emit(OpCodes.Ldarg, i + int(not function_info.is_static))
ilgen.Emit(OpCodes.Box, arg_type)
ilgen.Emit(OpCodes.Stelem_Ref)
has_return_value = True
if function_info.prop_name_if_prop_get:
ilgen.Emit(OpCodes.Ldsfld, ClrClass.get_dynamic_operations_field())
ilgen.Emit(OpCodes.Ldarg, 0)
ilgen.Emit(OpCodes.Ldstr, function_info.prop_name_if_prop_get)
ilgen.Emit(OpCodes.Callvirt, get_member)
elif function_info.prop_name_if_prop_set:
ilgen.Emit(OpCodes.Ldsfld, ClrClass.get_dynamic_operations_field())
ilgen.Emit(OpCodes.Ldarg, 0)
ilgen.Emit(OpCodes.Ldstr, function_info.prop_name_if_prop_set)
ilgen.Emit(OpCodes.Ldarg, 1)
ilgen.Emit(OpCodes.Callvirt, set_member)
has_return_value = False
else:
ilgen.Emit(OpCodes.Ldsfld, ClrClass.get_dynamic_operations_field())
if function_info.is_static:
raise NotImplementedError("need to load Python class object from a CLR static field")
# ilgen.Emit(OpCodes.Ldsfld, class_object)
else:
ilgen.Emit(OpCodes.Ldarg, 0)
ilgen.Emit(OpCodes.Ldstr, function.func_name)
ilgen.Emit(OpCodes.Ldloc, args_array)
ilgen.Emit(OpCodes.Callvirt, invoke_member)
if has_return_value:
if function.return_type == Void:
ilgen.Emit(OpCodes.Pop)
else:
ret_val = ilgen.DeclareLocal(object)
ilgen.Emit(OpCodes.Stloc, ret_val)
ilgen.Emit(OpCodes.Ldsfld, ClrClass.get_dynamic_operations_field())
ilgen.Emit(OpCodes.Ldloc, ret_val)
ilgen.Emit(OpCodes.Ldtoken, clr.GetClrType(function.return_type))
ilgen.Emit(OpCodes.Call, get_type_from_handle)
ilgen.Emit(OpCodes.Callvirt, convert_to)
ilgen.Emit(OpCodes.Unbox_Any, function.return_type)
ilgen.Emit(OpCodes.Ret)
return method_builder
def emit_method(self, typebld, function_info):
function = function_info.function
if hasattr(function, "DllImportAttributeDecorator"):
dllImportAttributeDecorator = function.DllImportAttributeDecorator
name = function.func_name
dllName = dllImportAttributeDecorator.args[0]
entryName = function.func_name
attributes = MethodAttributes.Public | MethodAttributes.Static | MethodAttributes.PinvokeImpl
callingConvention = CallingConventions.Standard
returnType = function.return_type
returnTypeRequiredCustomModifiers = ()
returnTypeOptionalCustomModifiers = ()
parameterTypes = function.arg_types
parameterTypeRequiredCustomModifiers = None
parameterTypeOptionalCustomModifiers = None
nativeCallConv = CallingConvention.Winapi
nativeCharSet = CharSet.Auto
method_builder = typebld.DefinePInvokeMethod(
name,
dllName,
entryName,
attributes,
callingConvention,
returnType,
returnTypeRequiredCustomModifiers,
returnTypeOptionalCustomModifiers,
parameterTypes,
parameterTypeRequiredCustomModifiers,
parameterTypeOptionalCustomModifiers,
nativeCallConv,
nativeCharSet)
else:
method_builder = self.emit_typed_stub_to_python_method(typebld, function_info)
if hasattr(function, "CustomAttributeBuilders"):
for cab in function.CustomAttributeBuilders:
method_builder.SetCustomAttribute(cab)
return method_builder
def map_pinvoke_methods(self, new_type):
pythonType = clr.GetPythonType(new_type)
for function_info in self.get_typed_methods():
function = function_info.function
if hasattr(function, "DllImportAttributeDecorator"):
# Overwrite the Python function with the pinvoke_method
pinvoke_method = getattr(pythonType, function.func_name)
setattr(self, function.func_name, pinvoke_method)
def emit_python_type_field(self, typebld):
return typebld.DefineField(
"PythonType",
PythonType,
FieldAttributes.Public | FieldAttributes.Static)
def set_python_type_field(self, new_type):
self.PythonType = new_type.GetField("PythonType")
self.PythonType.SetValue(None, self)
def add_wrapper_ctors(self, baseType, typebld):
python_type_field = self.emit_python_type_field(typebld)
for ctor in baseType.GetConstructors():
ctorparams = ctor.GetParameters()
# leave out the PythonType argument
assert(ctorparams[0].ParameterType == clr.GetClrType(PythonType))
ctorparams = ctorparams[1:]
ctorbld = typebld.DefineConstructor(
ctor.Attributes,
ctor.CallingConvention,
tuple([p.ParameterType for p in ctorparams]))
ilgen = ctorbld.GetILGenerator()
ilgen.Emit(OpCodes.Ldarg, 0)
ilgen.Emit(OpCodes.Ldsfld, python_type_field)
for index in xrange(len(ctorparams)):
ilgen.Emit(OpCodes.Ldarg, index + 1)
ilgen.Emit(OpCodes.Call, ctor)
ilgen.Emit(OpCodes.Ret)
def emit_members(self, typebld):
self.emit_fields(typebld)
self.add_wrapper_ctors(self.baseType, typebld)
super(ClrClass, self).emit_members(typebld)
def map_members(self, new_type):
self.map_fields(new_type)
self.map_pinvoke_methods(new_type)
self.set_python_type_field(new_type)
super(ClrClass, self).map_members(new_type)
def __clrtype__(self):
# CDerived below will use ClrClass as its metaclass, but the user may not expect CDerived
# to be a typed .NET class in this case:
#
# class CBase(object):
# __metaclass__ = ClrClass
# class CDerived(CBase): pass
if not "__metaclass__" in self.__dict__:
return super(ClrClass, self).__clrtype__()
# Create a simple Python type first.
self.baseType = super(ClrType, self).__clrtype__()
# We will now subtype it to create a customized class with the
# CLR attributes as defined by the user
typegen = Snippets.Shared.DefineType(self.get_clr_type_name(), self.baseType, True, False)
typebld = typegen.TypeBuilder
return self.create_type(typebld)
def make_cab(attrib_type, *args, **kwds):
clrtype = clr.GetClrType(attrib_type)
argtypes = tuple(map(lambda x:clr.GetClrType(type(x)), args))
ci = clrtype.GetConstructor(argtypes)
props = ([],[])
fields = ([],[])
for kwd in kwds:
pi = clrtype.GetProperty(kwd)
if pi is not None:
props[0].append(pi)
props[1].append(kwds[kwd])
else:
fi = clrtype.GetField(kwd)
if fi is not None:
fields[0].append(fi)
fields[1].append(kwds[kwd])
else:
raise TypeError("No %s Member found on %s" % (kwd, clrtype.Name))
return CustomAttributeBuilder(ci, args,
tuple(props[0]), tuple(props[1]),
tuple(fields[0]), tuple(fields[1]))
def accepts(*args):
"""
TODO - needs to be merged with clr.accepts
"""
validate_clr_types(args, True)
def decorator(function):
function.arg_types = args
return function
return decorator
def returns(return_type = Void):
"""
TODO - needs to be merged with clr.returns
"""
if return_type != Void:
validate_clr_types(return_type)
def decorator(function):
function.return_type = return_type
return function
return decorator
class CustomAttributeDecorator(object):
"""
This represents information about a custom-attribute applied to a type or a method
Note that we cannot use an instance of System.Attribute to capture this information
as it is not possible to go from an instance of System.Attribute to an instance
of System.Reflection.Emit.CustomAttributeBuilder as the latter needs to know
how to represent information in metadata to later *recreate* a similar instance of
System.Attribute.
Also note that once a CustomAttributeBuilder is created, it is not possible to
query it. Hence, we need to store the arguments required to store the
CustomAttributeBuilder so that pseudo-custom-attributes can get to the information.
"""
def __init__(self, attrib_type, *args, **kwargs):
self.attrib_type = attrib_type
self.args = args
self.kwargs = kwargs
def __call__(self, function):
if self.attrib_type == DllImportAttribute:
function.DllImportAttributeDecorator = self
else:
if not hasattr(function, "CustomAttributeBuilders"):
function.CustomAttributeBuilders = []
function.CustomAttributeBuilders.append(self.GetBuilder())
return function
def GetBuilder(self):
assert not self.attrib_type in [DllImportAttribute]
return make_cab(self.attrib_type, *self.args, **self.kwargs)
def attribute(attrib_type):
"""
This decorator is used to specify a CustomAttribute for a type or method.
"""
def make_decorator(*args, **kwargs):
return CustomAttributeDecorator(attrib_type, *args, **kwargs)
return make_decorator
def propagate_attributes(old_function, new_function):
"""
Use this if you replace a function in a type with ClrInterface or ClrClass as the metaclass.
This will typically be needed if you are defining a decorator which wraps functions with
new functions, and want it to work in conjunction with clrtype
"""
if hasattr(old_function, "return_type"):
new_function.func_name = old_function.func_name
new_function.return_type = old_function.return_type
new_function.arg_types = old_function.arg_types
if hasattr(old_function, "CustomAttributeBuilders"):
new_function.CustomAttributeBuilders = old_function.CustomAttributeBuilders
if hasattr(old_function, "CustomAttributeBuilders"):
new_function.DllImportAttributeDecorator = old_function.DllImportAttributeDecorator
| slozier/ironpython2 | Src/StdLib/Lib/clrtype.py | Python | apache-2.0 | 27,760 | 0.006484 |
from collections import Counter
from pprint import pprint
count = Counter()
posts = graph.get_object('me', fields=['posts.limit(100)'])['posts']['data']
for i, p in enumerate(posts):
likes = get_all_data(graph, p['id']+"/likes")
print(i, p['id'], len(likes))
for x in likes:
name = x['name']
count[name] += 1
pprint(count.most_common(15)) | tjwei/HackNTU_Data_2017 | Week09/q_posts_friends.py | Python | mit | 367 | 0.00545 |
# Copyright (c) 2014 Rackspace, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Pecan Controllers"""
from cdn.transport.pecan.controllers import root
from cdn.transport.pecan.controllers import services
from cdn.transport.pecan.controllers import v1
# Hoist into package namespace
Root = root.RootController
Services = services.ServicesController
V1 = v1.ControllerV1
| obulpathi/cdn1 | cdn/transport/pecan/controllers/__init__.py | Python | apache-2.0 | 879 | 0 |
# Copyright 2013 IBM Corp.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import time
from lxml import etree
from tempest.common.rest_client import RestClientXML
from tempest import exceptions
from tempest.services.compute.xml.common import Document
from tempest.services.compute.xml.common import Element
from tempest.services.compute.xml.common import Text
from tempest.services.compute.xml.common import xml_to_json
class InterfacesClientXML(RestClientXML):
def __init__(self, config, username, password, auth_url, tenant_name=None):
super(InterfacesClientXML, self).__init__(config, username, password,
auth_url, tenant_name)
self.service = self.config.compute.catalog_type
def _process_xml_interface(self, node):
iface = xml_to_json(node)
# NOTE(danms): if multiple addresses per interface is ever required,
# xml_to_json will need to be fixed or replaced in this case
iface['fixed_ips'] = [dict(iface['fixed_ips']['fixed_ip'].items())]
return iface
def list_interfaces(self, server):
resp, body = self.get('servers/%s/os-interface' % server, self.headers)
node = etree.fromstring(body)
interfaces = [self._process_xml_interface(x)
for x in node.getchildren()]
return resp, interfaces
def create_interface(self, server, port_id=None, network_id=None,
fixed_ip=None):
doc = Document()
iface = Element('interfaceAttachment')
if port_id:
_port_id = Element('port_id')
_port_id.append(Text(port_id))
iface.append(_port_id)
if network_id:
_network_id = Element('net_id')
_network_id.append(Text(network_id))
iface.append(_network_id)
if fixed_ip:
_fixed_ips = Element('fixed_ips')
_fixed_ip = Element('fixed_ip')
_ip_address = Element('ip_address')
_ip_address.append(Text(fixed_ip))
_fixed_ip.append(_ip_address)
_fixed_ips.append(_fixed_ip)
iface.append(_fixed_ips)
doc.append(iface)
resp, body = self.post('servers/%s/os-interface' % server,
headers=self.headers,
body=str(doc))
body = self._process_xml_interface(etree.fromstring(body))
return resp, body
def show_interface(self, server, port_id):
resp, body = self.get('servers/%s/os-interface/%s' % (server, port_id),
self.headers)
body = self._process_xml_interface(etree.fromstring(body))
return resp, body
def delete_interface(self, server, port_id):
resp, body = self.delete('servers/%s/os-interface/%s' % (server,
port_id))
return resp, body
def wait_for_interface_status(self, server, port_id, status):
"""Waits for a interface to reach a given status."""
resp, body = self.show_interface(server, port_id)
interface_status = body['port_state']
start = int(time.time())
while(interface_status != status):
time.sleep(self.build_interval)
resp, body = self.show_interface(server, port_id)
interface_status = body['port_state']
timed_out = int(time.time()) - start >= self.build_timeout
if interface_status != status and timed_out:
message = ('Interface %s failed to reach %s status within '
'the required time (%s s).' %
(port_id, status, self.build_timeout))
raise exceptions.TimeoutException(message)
return resp, body
| citrix-openstack-build/tempest | tempest/services/compute/xml/interfaces_client.py | Python | apache-2.0 | 4,377 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
from typing import Dict, Type
from .base import ProductBiddingCategoryConstantServiceTransport
from .grpc import ProductBiddingCategoryConstantServiceGrpcTransport
# Compile a registry of transports.
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[ProductBiddingCategoryConstantServiceTransport]]
_transport_registry["grpc"] = ProductBiddingCategoryConstantServiceGrpcTransport
__all__ = (
"ProductBiddingCategoryConstantServiceTransport",
"ProductBiddingCategoryConstantServiceGrpcTransport",
)
| googleads/google-ads-python | google/ads/googleads/v9/services/services/product_bidding_category_constant_service/transports/__init__.py | Python | apache-2.0 | 1,165 | 0.000858 |
import json
import os
import sys
from jupyter_client.kernelspec import install_kernel_spec
from IPython.utils.tempdir import TemporaryDirectory
kernel_json = {
"display_name": "Gauche",
"language": "gauche",
"argv": [sys.executable, "-m", "jupyter_gauche", "-f", "{connection_file}"],
"codemirror_mode": "scheme"
}
def install_gauche_kernel_spec(user=True):
with TemporaryDirectory() as td:
os.chmod(td, 0o755) # Starts off as 700, not user readable
with open(os.path.join(td, 'kernel.json'), 'w') as f:
json.dump(kernel_json, f, sort_keys=True)
print('Installing IPython kernel spec')
install_kernel_spec(td, 'gauche', user=user, replace=True)
def _is_root():
try:
return os.geteuid() == 0
except AttributeError:
return False # assume not an admin on non-Unix platforms
def main(argv=[]):
user = '--user' in argv or not _is_root()
install_gauche_kernel_spec(user=user)
if __name__ == '__main__':
main(argv=sys.argv)
| ara-ta3/jupyter-gauche | jupyter_gauche/install.py | Python | mit | 1,024 | 0.006836 |
from couchpotato.core.event import addEvent
from couchpotato.core.helpers.variable import tryFloat
from couchpotato.core.logger import CPLog
from couchpotato.core.plugins.base import Plugin
from couchpotato.environment import Env
from urlparse import urlparse
import re
import time
log = CPLog(__name__)
class Provider(Plugin):
type = None # movie, nzb, torrent, subtitle, trailer
http_time_between_calls = 10 # Default timeout for url requests
last_available_check = {}
is_available = {}
def isAvailable(self, test_url):
if Env.get('dev'): return True
now = time.time()
host = urlparse(test_url).hostname
if self.last_available_check.get(host) < now - 900:
self.last_available_check[host] = now
try:
self.urlopen(test_url, 30)
self.is_available[host] = True
except:
log.error('"%s" unavailable, trying again in an 15 minutes.', host)
self.is_available[host] = False
return self.is_available.get(host, False)
class YarrProvider(Provider):
cat_ids = []
sizeGb = ['gb', 'gib']
sizeMb = ['mb', 'mib']
sizeKb = ['kb', 'kib']
def __init__(self):
addEvent('provider.belongs_to', self.belongsTo)
addEvent('%s.search' % self.type, self.search)
addEvent('yarr.search', self.search)
addEvent('nzb.feed', self.feed)
def download(self, url = '', nzb_id = ''):
return self.urlopen(url)
def feed(self):
return []
def search(self, movie, quality):
return []
def belongsTo(self, url, provider = None, host = None):
try:
if provider and provider == self.getName():
return self
hostname = urlparse(url).hostname
if host and hostname in host:
return self
else:
for url_type in self.urls:
download_url = self.urls[url_type]
if hostname in download_url:
return self
except:
log.debug('Url % s doesn\'t belong to %s', (url, self.getName()))
return
def parseSize(self, size):
sizeRaw = size.lower()
size = tryFloat(re.sub(r'[^0-9.]', '', size).strip())
for s in self.sizeGb:
if s in sizeRaw:
return size * 1024
for s in self.sizeMb:
if s in sizeRaw:
return size
for s in self.sizeKb:
if s in sizeRaw:
return size / 1024
return 0
def getCatId(self, identifier):
for cats in self.cat_ids:
ids, qualities = cats
if identifier in qualities:
return ids
return [self.cat_backup_id]
def found(self, new):
log.info('Found: score(%(score)s) on %(provider)s: %(name)s', new)
| darren-rogan/CouchPotatoServer | couchpotato/core/providers/base.py | Python | gpl-3.0 | 2,933 | 0.004773 |
#! usr/bin/env python
# coding: utf8
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
"""metalex is general tool for lexicographic and metalexicographic activities
Copyright (C) 2017 by Elvis MBONING
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Contact: levismboning@yahoo.fr
---------------------------------------------------------------------------
makeBalise transform extracted articles into well formed xml file.
It can also generate HTML file for article edition
Packages:
>>> sudo apt-get install python-html5lib
>>> sudo apt-get install python-lxml
>>> sudo apt-get install python-bs4
Usage:
>>> from metalex.dicXmilised import *
>>> dicoHtml(save=True)
"""
# ----Internal Modules------------------------------------------------------
import metalex
from .composeArticle import *
from .dicXmlTool import *
# ----External Modules------------------------------------------------------
import re
import sys
import codecs
import os
from bs4 import BeautifulSoup
from random import sample
from shutil import copyfile
from lxml import etree
from termcolor import colored
# -----Exported Functions-----------------------------------------------------
__all__ = ['BaliseXML', 'dico_html']
# -----Global Variables-----------------------------------------------------
components = {
'xml' : {
'metalexMetadata' : ['metalexMetadata', 'projectName', 'author',
'dateCreation', 'comment', 'contributors', 'candidate'],
'metalexContent' : ['article', 'definition', 'example', 'figured', 'contrary',
'entry', 'flexion', 'category', 'gender', 'rection', 'phonetic',
'identificationComponent', 'treatmentComponent', 'cte_cat',
'processingUnit', 'cgr_pt', 'cgr_vrg', 'cgr_fpar', 'cgr_opar',
'cgr_ocrch', 'cgr_fcrch', 'metalexContent', 'cte_gender',
'metalexResultDictionary']
},
'tei' : {
'teiHeader' : ['teiHeader', 'text', 'TEI', 'fileDesc', 'titleStmt',
'title', 'publicationStmt', 'p', 'sourceDesc', 'author'],
'text' : ['body', 'head', 'entry', 'form', 'orth', 'gramGrp',
'sense', 'def', 'cite', 'quote', 'span', 'usg', 'bibl',
'pos', 'genre', 'number', 'pron', 'etym']
},
'lmf' : {
'GlobalInformation' : ['LexicalResource', 'feat', 'p', 'GlobalInformation'],
'Lexicon' : ['Lexicon', 'feat', 'LexicalEntry', 'WordForm',
'Definition', 'Sense', 'Lexicon']
},
'dtd' : ['ELEMENT', 'ATTRIBUTE', 'PCDATA', 'CDATA', 'REQUIRED', 'IMPLIED'],
'xsd' : []
}
codifArticles = []
# ----------------------------------------------------------
def dico_html(save=False):
"""Build HTML editor file of the all articles
:return file: metalexViewerEditor.html
"""
print('\n --- %s %s \n\n' %(colored('Part 4: Generate Output formats', attrs=['bold']), '--'*25))
metalex.plugins
instanceHtml = BaliseHTML()
filepath = metalex.html_template
metalex.utils.create_temp()
if metalex.utils.in_dir('CopymetalexTemplate.html'):
copyfile(filepath, 'CopymetalexTemplate.html')
souphtl = instanceHtml.html_inject('CopymetalexTemplate.html')
if save:
metalex.utils.go_to_dicresult()
name = metalex.currentOcr+'_metalexViewerEditor.html'
with codecs.open(name, 'w') as htmlresult:
htmlresult.write(souphtl)
metalex.utils.create_temp()
os.remove('CopymetalexTemplate.html')
message = "*"+name+"* has correctly been generated > Saved in dicTemp folder"
metalex.logs.manageLog.write_log(message)
else:
souphtl = instanceHtml.html_inject('CopymetalexTemplate.html')
if save:
metalex.utils.go_to_dicresult()
with codecs.open(name, 'w') as htmlresult:
htmlresult.write(souphtl)
metalex.utils.create_temp()
os.remove('CopymetalexTemplate.html')
message = "*"+name+"* has correctly been generated > Saved in dicTemp folder"
metalex.logs.manageLog.write_log(message)
print('\n\n --- %s --------------- \n\n' %colored('MetaLex Processes was ended: consult results data in "dicTemp" folder',
'green', attrs=['bold']))
class BaliseHTML():
def __init__(self):
self.resultHtml = ''
def html_inject(self, template):
"""Create prettify HTML file all previous data generated
:return str: html (prettify by BeautifulSoup)
"""
instanceXml = BaliseXML()
contentxml = instanceXml.put_xml(typ='xml', save=True)
metalex.utils.create_temp()
soupXml = BeautifulSoup(contentxml, "html.parser")
projectconf = metalex.utils.read_conf()
Hauthor, Hname = projectconf['Author'], projectconf['Projectname'],
Hdate,Hcomment = projectconf['Creationdate'], projectconf['Comment']
Hcontrib = projectconf['Contributors']
filetemplate = codecs.open(template, 'r', 'utf-8')
souphtml = BeautifulSoup(filetemplate, "html5lib")
content = souphtml.find('div', attrs={'id': 'all-articles'})
author = content.find('h3', attrs={'id': 'author'})
author.string = 'main: '+Hauthor
date = content.find('h5', attrs={'id': 'date'})
date.string = Hdate
descipt = content.find('p', attrs={'id': 'description'})
descipt.string = Hcomment
contrib = content.find('h4', attrs={'id': 'contributors'})
contrib.string = 'contributors: '+Hcontrib
project = content.find('h4', attrs={'id': 'projetname'})
project.string = Hname
articlesxml = soupXml.findAll('article')
articleshtml = souphtml.find('div', attrs={'id': 'mtl:articles'})
for x in articlesxml:
elementart = BeautifulSoup('<article id=""></article>', 'html5lib')
idart = x.get('id')
artlem = x.get_text()
elementart.article.append(artlem)
elementart.article['id'] = idart
articleshtml.append(elementart.find('article'))
listlemme = souphtml.find('ul', attrs={'id': 'list-articles'})
for x in articlesxml:
art = x.get_text()
idart = x.get('id')
lem = x.find('entry').get_text()
lemme = BeautifulSoup('<li class="w3-hover-light-grey"><span class="lemme" onclick="changeImage('+
"'"+idart+"'"+')">'+lem+'</span><span class="fa fa-plus w3-closebtn" onclick="add('+
"'"+idart+"'"+')"/></li>', 'html5lib')
listlemme.append(lemme.find('li'))
filetemplate.close()
self.resultHtml = souphtml.prettify('utf-8')
return self.resultHtml
class BaliseXML ():
"""Build XML file type (xml|tei|lmf) with global metadata of the project
:param typ: str
:return obj: instance of BaliseXML
"""
def __init__(self, typ="xml"):
self.typ = typ
def build_structure(self, data, Sfile=None, typ='dtd'):
return False
def message(self, name):
return "*"+name+"* dictionary articles formated in xml is"+\
" created > Saved in dicTemp folder"
def put_xml(self, typ='xml', save=False):
"""Create well formed (xml|tei|lmf) file with metadata and content xml
:return metalexXml
"""
metadata = self.xml_metadata(typ)
content = self.xml_content(typ)
metalex.utils.go_to_dicresult()
if typ == 'xml':
if save:
name = 'metalex-'+metalex.projectName+'_'+metalex.currentOcr+'.xml'
metalexXml = self.balise(metadata+content, 'metalexResultDictionary',
attr={'xmlns':'https://www.w3schools.com',
'xmlns:xsi':'http://www.w3.org/2001/XMLSchema-in',
'xsi:schemaLocation':'metalexSchemaXML.xsd'})
metalexXml = '<?xml version="1.0" encoding="UTF-8" ?>'+metalexXml
metalexXmlTree = BeautifulSoup(metalexXml, 'xml')
if metalex.utils.in_dir(name):
with codecs.open(name, 'w', 'utf-8') as fle:
fle.write(metalexXmlTree.prettify(formatter=None))
mge = self.message(name)
metalex.logs.manageLog.write_log(mge)
else:
mge = self.message(name)
metalex.logs.manageLog.write_log(mge)
return metalexXml
else:
metalexXml = self.balise(metadata+content, 'metalexResultDictionary', attr={})
metalexXml = '<?xml version="1.0" encoding="UTF-8" ?>'+metalexXml
metalexXmlTree = BeautifulSoup(metalexXml, 'xml')
print(metalexXmlTree.prettify(formatter=None))
if typ == 'tei':
if save:
name = 'metalex-'+metalex.projectName+'_'+metalex.currentOcr+'-TEI.xml'
metalexXml = self.balise(metadata+content, 'TEI', typ= 'tei')
metalexXml = '<?xml version="1.0" encoding="UTF-8" ?>'+metalexXml
metalexXmlTree = BeautifulSoup(metalexXml, 'xml')
if metalex.utils.in_dir(name):
with codecs.open(name, 'w', 'utf-8') as fle:
fle.write(metalexXmlTree.prettify(formatter=None))
mge = self.message(name)
metalex.logs.manageLog.write_log(mge)
else:
mge = self.message(name)
metalex.logs.manageLog.write_log(mge)
return metalexXml
else:
metalexXml = self.balise(metadata+content, 'TEI', typ= 'tei')
metalexXml = '<?xml version="1.0" encoding="UTF-8" ?>'+metalexXml
metalexXmlTree = BeautifulSoup(metalexXml, 'xml')
print(metalexXmlTree.prettify(formatter=None))
if typ == 'lmf':
os.listdir('.')
if save:
name = 'metalex-'+metalex.projectName+'_'+metalex.currentOcr+'-LMF.xml'
metalexXml = self.balise(metadata+content, 'LexicalResource', attr={'dtdVersion':'15'}, typ= 'lmf')
metalexXml = '<?xml version="1.0" encoding="UTF-8" ?>'+metalexXml
metalexXmlTree = BeautifulSoup(metalexXml, 'xml')
if metalex.utils.in_dir(name):
with codecs.open(name, 'w', 'utf-8') as fle:
fle.write(metalexXmlTree.prettify(formatter=None))
mge = self.message(name)
metalex.logs.manageLog.write_log(mge)
else:
mge = self.message(name)
metalex.logs.manageLog.write_log(mge)
return metalexXml
else:
metalexXml = self.balise(metadata+content, 'LexicalResource', attr={'dtdVersion':'15'}, typ= 'lmf')
metalexXml = '<?xml version="1.0" encoding="UTF-8" ?>'+metalexXml
metalexXmlTree = BeautifulSoup(metalexXml, 'xml')
print(metalexXmlTree.prettify(formatter=None))
def xml_metadata(self, typ='xml'):
"""Create xml metadata file with configuration of the project
:return str: metadata
"""
metalex.utils.create_temp()
projectconf = metalex.utils.read_conf()
contribtab = projectconf['Contributors'].split(',') \
if projectconf['Contributors'].find(',') else projectconf['Contributors']
contrib = ''
if typ == 'xml':
author = self.balise(projectconf['Author'], 'author', typ)
name = self.balise(projectconf['Projectname'].strip(), 'projectName', typ)
date = self.balise(projectconf['Creationdate'].strip(), 'dateCreation', typ)
comment = self.balise(projectconf['Comment'], 'comment', typ)
if len(contribtab) > 1:
for data in contribtab: contrib += self.balise(data.strip(), 'candidate', typ)
else: contrib = self.balise(''.join(contribtab), 'candidate', typ)
contrib = self.balise(contrib, 'contributors', typ)
cont = name+author+date+comment+contrib
metadataxml = self.balise(cont, 'metalexMetadata', typ)
return metadataxml
if typ == 'tei':
if len(contribtab) > 1:
for data in contribtab:
if len(data) > 2: contrib += self.balise(data.strip(), 'span',
attr={'content':'contributor'}, typ='tei')
else: contrib = self.balise(''.join(contribtab), 'span', typ='tei')
author = self.balise(projectconf['Author'], 'author', typ='tei')
title = self.balise(projectconf['Projectname'], 'title', typ='tei')
RtitleStmt = self.balise(title, 'titleStmt', typ='tei')
pdate = self.balise(projectconf['Creationdate'], 'p', typ='tei')
pcomment = self.balise(projectconf['Comment'], 'p', typ='tei')
pcontrib = self.balise(contrib, 'p', attr={'content':'contributors'}, typ='tei')
Rpubli = self.balise(author+pdate+pcomment+pcontrib, 'publicationStmt', typ='tei')
sourc = self.balise('TEI metadata for metalex project output', 'p', typ='tei')
Rsourc = self.balise(sourc, 'sourceDesc', typ='tei')
RfilD = self.balise(RtitleStmt+Rpubli+Rsourc, 'fileDesc', typ='tei')
metadatatei = self.balise(RfilD, 'teiHeader', typ='tei')
return metadatatei
if typ == 'lmf':
if len(contribtab) > 1:
for data in contribtab:
if len(data) > 2: contrib += data.strip()+', '
else: contrib = ', '.join(contribtab)
enc = self.balise('', 'feat', attr={'att':'languageCoding', 'val':'utf-8'},
typ='lmf', sclose=True)
pauthor = self.balise('', 'feat', attr={'att':'author', 'val':projectconf['Author'].strip()},
typ='lmf', sclose=True)
pdate = self.balise('', 'feat', attr={'att':'dateCreation', 'val':projectconf['Creationdate'].strip()},
typ='lmf', sclose=True)
pname = self.balise('', 'feat', attr={'att':'projectName', 'val':projectconf['Projectname'].strip()},
typ='lmf', sclose=True)
pcomment = self.balise('', 'feat', attr={'att':'comment', 'val':projectconf['Comment'].strip()},
typ='lmf', sclose=True)
pcontrib = self.balise('', 'feat', attr={'att':'contributors', 'val':contrib.strip(', ')},
typ='lmf', sclose=True)
meta = self.balise('', 'p', attr={'att':'meta', 'val':'TEI metadata for metalex project output'},
typ='lmf', sclose=True)
metadatalmf = self.balise(enc+pauthor+pname+meta+pdate+pcomment+pcontrib, 'GlobalInformation', typ='lmf')
return metadatalmf
def balise_content_article (self):
data = get_data_articles('text')
cod = StructuredWithCodif(data, 'xml')
resultArticles = []
for art in cod.format_articles():
article_type_form(art)
if article_type_form(art) == '1':
partArt = re.search(r'(([a-zéèàûô]+)\s(<cte_cat>.+</cte_cat>)\s(.+)<cgr_pt>\.</cgr_pt>)', art, re.I)
if partArt != None:
ident, entry, cat, treat = partArt.group(1), partArt.group(2), partArt.group(3), partArt.group(4)
id = generate_id()
entry = self.balise(entry, 'entry')
ident = self.balise(entry+cat, 'identificationComponent')
treat = self.balise(self.balise(treat, 'definition'), 'processingUnit')
article = self.balise(ident+self.balise(treat, 'treatmentComponent'), 'article', attr={'id':id})
resultArticles.append(article)
if article_type_form(art) == '2':
research = r'(([a-zéèàûô]+)\s(<cte_cat>.+</cte_cat>\s<cte_gender>..</cte_gender>)\s(.+)<cgr_pt>\.</cgr_pt>)'
partArt = re.search(research, art, re.I)
if partArt != None:
ident, entry, cat, treat = partArt.group(1), partArt.group(2), partArt.group(3), partArt.group(4)
id = generate_id()
entry = self.balise(entry, 'entry')
ident = self.balise(entry+cat, 'identificationComponent')
if not re.search(r'(<cgr_pt>\.</cgr_pt>|<cte_cat>.+</cte_cat>|<cgr_vrg>,</cgr_vrg>)', partArt.group(4), re.I):
treat = self.balise(self.balise(treat+'.', 'definition'), 'processingUnit')
article = self.balise(ident+self.balise(treat, 'treatmentComponent'), 'article', attr={'id':id})
resultArticles.append(article)
elif partArt.group(4).find(' et ') != -1:
suite = 'hahaha'
return resultArticles
def xml_content(self, typ='xml', forme='text'):
"""Create xml content file (representing articles) with data articles extracting
:return str: contentXml
"""
content = ''
contentXml = ''
data = self.balise_content_article()
if typ == 'xml':
if forme == 'pickle':
data = get_data_articles('pickle')
for dicart in data:
for art in dicart.keys():
art = self.balise(dicart[art], 'article', art=True)
content += art
contentXml = self.balise(content, 'metalexContent')
return contentXml
else:
for art in data: content += art
contentXml = self.balise(content, 'metalexContent', attr={'totalArticle': str(len(data))})
return contentXml
if typ == 'tei':
for art in data:
soupart = BeautifulSoup(art, 'html.parser')
orth = soupart.find('entry').getText()
atOrth = soupart.find('article').get('id')
orth = self.balise(orth, 'orth', {'id': atOrth}, typ='tei')
formB = self.balise(orth, 'form', attr={'xml:lang':'fr', 'type':'lemma'}, typ='tei')
pos = soupart.find('cte_cat').getText()
posB = self.balise(pos, 'pos', typ='tei')
genB = ''
if soupart.find('cte_gender'): genB = soupart.find('cte_gender').getText().strip()
if genB == 'f.' or genB == 'm.': genB = self.balise(genB, 'genre', typ='tei')
gramgrp = self.balise(posB+genB, 'gramGrp', typ='tei')
sens = soupart.find('processingunit').getText().replace(' .', '.')
defi = self.balise(sens, 'def', typ='tei')
if sens != None: sens = self.balise(defi, 'sense', typ='tei')
entry = self.balise(formB+gramgrp+sens, 'entry', typ='tei')
content += entry
body = self.balise(content, 'body', typ='tei')
contentXml = self.balise(body, 'text', attr={'totalArticle': str(len(data))}, typ='tei')
return contentXml
if typ == 'lmf':
for art in data:
soupart = BeautifulSoup(art, 'html.parser')
orth = soupart.find('entry').getText()
atOrth = soupart.find('article').get('id')
orth = self.balise('', 'feat', attr={'att':'writtenForm','val':orth},
typ='lmf', sclose=True)
wordF = self.balise(orth, 'WordForm', attr={'id': atOrth}, typ='lmf')
pos = soupart.find('cte_cat').getText()
posB = self.balise('', 'feat', attr={'att':'partOfSpeech','val':pos},
typ='lmf', sclose=True)
genB = ''
if soupart.find('cte_gender'): genB = soupart.find('cte_gender').getText().strip()
if genB == 'f.' or genB == 'm.':
genB = self.balise('', 'feat', attr={'att':'grammaticalNumber','val': genB},
typ='lmf', sclose=True)
sens = soupart.find('processingunit').getText().replace(' .', '.')
sensnb = self.balise('', 'feat', attr={'att':'sensNumber','val':'1'},
typ='lmf', sclose=True)
definb = self.balise('', 'feat', attr={'att':'text','val':sens.strip()},
typ='lmf', sclose=True)
defi = self.balise(definb, 'Definition', typ='lmf')
if sens != None: sens = self.balise(sensnb+defi, 'Sense', typ='lmf')
entry = self.balise(wordF+posB+genB+sens, 'LexicalEntry', typ='lmf')
content += entry
body = self.balise('', 'feat', attr={'att':'language','val':'fra'},
typ='lmf', sclose=True)+content
contentXml = self.balise(body, 'Lexicon', attr={'totalArticle': str(len(data))}, typ='lmf')
return contentXml
def balise(self, element, markup, sclose=False, attr=None, typ='xml', art=False):
"""Markup data with a specific format type (xml|tei|lmf)
:return str: balised element
"""
if typ == 'xml':
if markup in components['xml']['metalexContent'] or markup \
in components['xml']['metalexMetadata']:
if art:
element = self.chevron(markup, attr, art=True)+element+self.chevron(markup, attr, False)
return element
else:
element = self.chevron(markup, attr)+element+self.chevron(markup, attr, False)
return element
if typ == 'tei':
if markup in components['tei']['text'] or markup in components['tei']['teiHeader']:
if art:
element = self.chevron(markup, attr, art=True)+element+self.chevron(markup, attr, False)
return element
else:
element = self.chevron(markup, attr)+element+self.chevron(markup, attr, False)
return element
if typ == 'lmf':
if markup in components['lmf']['GlobalInformation'] \
or components['lmf']['Lexicon']:
if sclose:
element = self.chevron(markup, attr, True, sclose=True)
return element
else:
element = self.chevron(markup, attr)+element+self.chevron(markup, attr, False)
return element
def chevron(self, el, attr, openchev=True, art=False, sclose=False):
"""Put tag around the data of element
:return str: tagging element
"""
idart = generate_id()
if art and attr == None:
if openchev : return "<"+el+" id='"+idart+"' class='data-entry'"+">"
if not openchev: return "</"+el+">"
if sclose : return "<"+el+" id='"+idart+"'/>"
if art and attr != None:
allattrib = ''
for at in attr.keys():
allattrib += ' '+at+'="'+attr[at]+'"'
if openchev and not sclose : return "<"+el+" id='"+idart+"' class='data-entry'"+' '+allattrib+">"
if openchev and sclose: return "<"+el+" id='"+idart+"' class='data-entry'"+' '+allattrib+"/>"
if not openchev: return "</"+el+">"
elif art == False and attr != None:
#print openchev
allattrib = ''
for at in attr.keys(): allattrib += ' '+at+'="'+attr[at]+'"'
if openchev and not sclose: return "<"+el+' '+allattrib+">"
if openchev and sclose: return "<"+el+' '+allattrib+"/>"
if not openchev: return "</"+el+">"
elif art == False and attr == None:
if openchev : return "<"+el+">"
if sclose : return "<"+el+"/>"
if not openchev: return "</"+el+">"
| Levis0045/MetaLex | metalex/xmlised/makeBalise.py | Python | agpl-3.0 | 26,115 | 0.01425 |
../../../../../../share/pyshared/twisted/internet/iocpreactor/tcp.py | Alberto-Beralix/Beralix | i386-squashfs-root/usr/lib/python2.7/dist-packages/twisted/internet/iocpreactor/tcp.py | Python | gpl-3.0 | 68 | 0.014706 |
'''
Copyright 2013 Cosnita Radu Viorel
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
.. codeauthor:: Radu Viorel Cosnita <radu.cosnita@gmail.com>
.. py:module:: fantastico.middleware.tests.test_routing_middleware
'''
from fantastico.exceptions import FantasticoNoRequestError, FantasticoError
from fantastico.middleware.routing_middleware import RoutingMiddleware
from fantastico.tests.base_case import FantasticoUnitTestsCase
from mock import Mock
from webob.request import Request
class RoutingMiddlewareTests(FantasticoUnitTestsCase):
'''This class provides all test cases required to make sure routing middleware is working as expected.'''
def init(self):
self._environ = {}
self._app = Mock()
self._router = Mock()
self._router_cls = Mock(return_value=self._router)
self._routing_middleware = RoutingMiddleware(self._app, self._router_cls)
def test_route_handled_correctly(self):
'''This test case ensures a route is handled correctly using the middleware.'''
self._environ["fantastico.request"] = Request.blank("/simple/request")
def handle_route(url, environ):
if url == "/simple/request":
environ["route_/simple/request_handler"] = {"controller": SimpleController(), "method": "do_request"}
self._router.handle_route = handle_route
self._routing_middleware(self._environ, Mock())
route_handler = self._environ.get("route_/simple/request_handler")
self.assertIsNotNone(route_handler)
self.assertIsInstance(route_handler.get("controller"), SimpleController)
self.assertEqual("do_request", route_handler.get("method"))
def test_route_norequest_built(self):
'''This test case ensures an exception is raised if no request is available in wsgi environ.'''
self.assertRaises(FantasticoNoRequestError, self._routing_middleware, *[self._environ, Mock()])
def test_route_unhandled_exception(self):
'''This test case ensures that unhandled exceptions are correctly transformed to fantastico exceptions.'''
self._environ["fantastico.request"] = Request.blank("/simple/request")
self._router.handle_route = Mock(side_effect=Exception("Unhandled error"))
with self.assertRaises(FantasticoError) as cm:
self._routing_middleware(self._environ, Mock())
self.assertTrue(str(cm.exception).find("Unhandled error") > -1)
def test_router_registration_ok(self):
'''This test case ensures that routing middleware correctly calls underlining methods from the given router
so that it correctly discovers all available routes.'''
self.get_loaders_invoked = False
self.register_routes_invoked = False
def get_loaders():
self.get_loaders_invoked = True
def register_routes():
self.register_routes_invoked = True
router = Mock()
router_cls = Mock(return_value=router)
router.get_loaders = lambda: get_loaders()
router.register_routes = lambda: register_routes()
RoutingMiddleware(Mock(), router_cls)
self.assertTrue(self.get_loaders_invoked)
self.assertTrue(self.register_routes_invoked)
class SimpleController(object):
'''Class used to simulate a controller that can handle certain requests.'''
pass | rcosnita/fantastico | fantastico/middleware/tests/test_routing_middleware.py | Python | mit | 4,545 | 0.011441 |
import numpy as np
import pandas as pd
import pandas.util.testing as tm
import dask.dataframe as dd
from dask.dataframe.utils import (shard_df_on_index, meta_nonempty, make_meta,
raise_on_meta_error)
import pytest
def test_shard_df_on_index():
df = pd.DataFrame({'x': [1, 2, 3, 4, 5, 6], 'y': list('abdabd')},
index=[10, 20, 30, 40, 50, 60])
result = list(shard_df_on_index(df, [20, 50]))
assert list(result[0].index) == [10]
assert list(result[1].index) == [20, 30, 40]
assert list(result[2].index) == [50, 60]
def test_make_meta():
df = pd.DataFrame({'a': [1, 2, 3], 'b': list('abc'), 'c': [1., 2., 3.]},
index=[10, 20, 30])
# Pandas dataframe
meta = make_meta(df)
assert len(meta) == 0
assert (meta.dtypes == df.dtypes).all()
assert isinstance(meta.index, type(df.index))
# Pandas series
meta = make_meta(df.a)
assert len(meta) == 0
assert meta.dtype == df.a.dtype
assert isinstance(meta.index, type(df.index))
# Pandas index
meta = make_meta(df.index)
assert isinstance(meta, type(df.index))
assert len(meta) == 0
# Dask object
ddf = dd.from_pandas(df, npartitions=2)
assert make_meta(ddf) is ddf._meta
# Dict
meta = make_meta({'a': 'i8', 'b': 'O', 'c': 'f8'})
assert isinstance(meta, pd.DataFrame)
assert len(meta) == 0
assert (meta.dtypes == df.dtypes).all()
assert isinstance(meta.index, pd.RangeIndex)
# Iterable
meta = make_meta([('a', 'i8'), ('c', 'f8'), ('b', 'O')])
assert (meta.columns == ['a', 'c', 'b']).all()
assert len(meta) == 0
assert (meta.dtypes == df.dtypes[meta.dtypes.index]).all()
assert isinstance(meta.index, pd.RangeIndex)
# Tuple
meta = make_meta(('a', 'i8'))
assert isinstance(meta, pd.Series)
assert len(meta) == 0
assert meta.dtype == 'i8'
assert meta.name == 'a'
# With index
meta = make_meta({'a': 'i8', 'b': 'i4'}, pd.Int64Index([1, 2], name='foo'))
assert isinstance(meta.index, pd.Int64Index)
assert len(meta.index) == 0
meta = make_meta(('a', 'i8'), pd.Int64Index([1, 2], name='foo'))
assert isinstance(meta.index, pd.Int64Index)
assert len(meta.index) == 0
# Numpy scalar
meta = make_meta(np.float64(1.0))
assert isinstance(meta, np.float64)
# Python scalar
meta = make_meta(1.0)
assert isinstance(meta, np.float64)
# Timestamp
x = pd.Timestamp(2000, 1, 1)
meta = make_meta(x)
assert meta is x
# Dtype expressions
meta = make_meta('i8')
assert isinstance(meta, np.int64)
meta = make_meta(float)
assert isinstance(meta, np.dtype(float).type)
meta = make_meta(np.dtype('bool'))
assert isinstance(meta, np.bool_)
assert pytest.raises(TypeError, lambda: make_meta(None))
def test_meta_nonempty():
df1 = pd.DataFrame({'A': pd.Categorical(['Alice', 'Bob', 'Carol']),
'B': list('abc'),
'C': 'bar',
'D': np.float32(1),
'E': np.int32(1),
'F': pd.Timestamp('2016-01-01'),
'G': pd.date_range('2016-01-01', periods=3,
tz='America/New_York'),
'H': pd.Timedelta('1 hours', 'ms'),
'I': np.void(b' ')},
columns=list('DCBAHGFEI'))
df2 = df1.iloc[0:0]
df3 = meta_nonempty(df2)
assert (df3.dtypes == df2.dtypes).all()
assert df3['A'][0] == 'Alice'
assert df3['B'][0] == 'foo'
assert df3['C'][0] == 'foo'
assert df3['D'][0] == np.float32(1)
assert df3['D'][0].dtype == 'f4'
assert df3['E'][0] == np.int32(1)
assert df3['E'][0].dtype == 'i4'
assert df3['F'][0] == pd.Timestamp('1970-01-01 00:00:00')
assert df3['G'][0] == pd.Timestamp('1970-01-01 00:00:00',
tz='America/New_York')
assert df3['H'][0] == pd.Timedelta('1', 'ms')
assert df3['I'][0] == 'foo'
s = meta_nonempty(df2['A'])
assert s.dtype == df2['A'].dtype
assert (df3['A'] == s).all()
def test_meta_duplicated():
df = pd.DataFrame(columns=['A', 'A', 'B'])
res = meta_nonempty(df)
exp = pd.DataFrame([['foo', 'foo', 'foo'],
['foo', 'foo', 'foo']],
index=['a', 'b'],
columns=['A', 'A', 'B'])
tm.assert_frame_equal(res, exp)
def test_meta_nonempty_index():
idx = pd.RangeIndex(1, name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.RangeIndex
assert res.name == idx.name
idx = pd.Int64Index([1], name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.Int64Index
assert res.name == idx.name
idx = pd.Index(['a'], name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.Index
assert res.name == idx.name
idx = pd.DatetimeIndex(['1970-01-01'], freq='d',
tz='America/New_York', name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.DatetimeIndex
assert res.tz == idx.tz
assert res.freq == idx.freq
assert res.name == idx.name
idx = pd.PeriodIndex(['1970-01-01'], freq='d', name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.PeriodIndex
assert res.freq == idx.freq
assert res.name == idx.name
idx = pd.TimedeltaIndex([np.timedelta64(1, 'D')], freq='d', name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.TimedeltaIndex
assert res.freq == idx.freq
assert res.name == idx.name
idx = pd.CategoricalIndex(['a'], ['a', 'b'], ordered=True, name='foo')
res = meta_nonempty(idx)
assert type(res) is pd.CategoricalIndex
assert (res.categories == idx.categories).all()
assert res.ordered == idx.ordered
assert res.name == idx.name
levels = [pd.Int64Index([1], name='a'),
pd.Float64Index([1.0], name='b')]
idx = pd.MultiIndex(levels=levels, labels=[[0], [0]], names=['a', 'b'])
res = meta_nonempty(idx)
assert type(res) is pd.MultiIndex
for idx1, idx2 in zip(idx.levels, res.levels):
assert type(idx1) is type(idx2)
assert idx1.name == idx2.name
assert res.names == idx.names
def test_meta_nonempty_scalar():
meta = meta_nonempty(np.float64(1.0))
assert isinstance(meta, np.float64)
x = pd.Timestamp(2000, 1, 1)
meta = meta_nonempty(x)
assert meta is x
def test_raise_on_meta_error():
try:
with raise_on_meta_error():
raise RuntimeError("Bad stuff")
except Exception as e:
assert e.args[0].startswith("Metadata inference failed.\n")
assert 'RuntimeError' in e.args[0]
try:
with raise_on_meta_error("myfunc"):
raise RuntimeError("Bad stuff")
except Exception as e:
assert e.args[0].startswith("Metadata inference failed in `myfunc`.\n")
assert 'RuntimeError' in e.args[0]
| jeffery-do/Vizdoombot | doom/lib/python3.5/site-packages/dask/dataframe/tests/test_utils_dataframe.py | Python | mit | 7,032 | 0 |
# Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import cPickle as pickle
import os
import signal
import sys
import time
from random import random
from eventlet import patcher, Timeout, TimeoutError
from swift.common.bufferedhttp import http_connect
from swift.common.exceptions import ConnectionTimeout
from swift.common.ring import Ring
from swift.common.utils import get_logger, renamer, write_pickle
from swift.common.daemon import Daemon
from swift.obj.server import ASYNCDIR
class ObjectUpdater(Daemon):
"""Update object information in container listings."""
def __init__(self, conf):
self.conf = conf
self.logger = get_logger(conf, log_route='object-updater')
self.devices = conf.get('devices', '/srv/node')
self.mount_check = conf.get('mount_check', 'true').lower() in \
('true', 't', '1', 'on', 'yes', 'y')
swift_dir = conf.get('swift_dir', '/etc/swift')
self.interval = int(conf.get('interval', 300))
self.container_ring_path = os.path.join(swift_dir, 'container.ring.gz')
self.container_ring = None
self.concurrency = int(conf.get('concurrency', 1))
self.slowdown = float(conf.get('slowdown', 0.01))
self.node_timeout = int(conf.get('node_timeout', 10))
self.conn_timeout = float(conf.get('conn_timeout', 0.5))
self.successes = 0
self.failures = 0
def get_container_ring(self):
"""Get the container ring. Load it, if it hasn't been yet."""
if not self.container_ring:
self.logger.debug(
_('Loading container ring from %s'), self.container_ring_path)
self.container_ring = Ring(self.container_ring_path)
return self.container_ring
def run_forever(self, *args, **kwargs):
"""Run the updater continuously."""
time.sleep(random() * self.interval)
while True:
self.logger.info(_('Begin object update sweep'))
begin = time.time()
pids = []
# read from container ring to ensure it's fresh
self.get_container_ring().get_nodes('')
for device in os.listdir(self.devices):
if self.mount_check and not \
os.path.ismount(os.path.join(self.devices, device)):
self.logger.warn(
_('Skipping %s as it is not mounted'), device)
continue
while len(pids) >= self.concurrency:
pids.remove(os.wait()[0])
pid = os.fork()
if pid:
pids.append(pid)
else:
signal.signal(signal.SIGTERM, signal.SIG_DFL)
patcher.monkey_patch(all=False, socket=True)
self.successes = 0
self.failures = 0
forkbegin = time.time()
self.object_sweep(os.path.join(self.devices, device))
elapsed = time.time() - forkbegin
self.logger.info(_('Object update sweep of %(device)s'
' completed: %(elapsed).02fs, %(success)s successes'
', %(fail)s failures'),
{'device': device, 'elapsed': elapsed,
'success': self.successes, 'fail': self.failures})
sys.exit()
while pids:
pids.remove(os.wait()[0])
elapsed = time.time() - begin
self.logger.info(_('Object update sweep completed: %.02fs'),
elapsed)
if elapsed < self.interval:
time.sleep(self.interval - elapsed)
def run_once(self, *args, **kwargs):
"""Run the updater once"""
self.logger.info(_('Begin object update single threaded sweep'))
begin = time.time()
self.successes = 0
self.failures = 0
for device in os.listdir(self.devices):
if self.mount_check and \
not os.path.ismount(os.path.join(self.devices, device)):
self.logger.warn(
_('Skipping %s as it is not mounted'), device)
continue
self.object_sweep(os.path.join(self.devices, device))
elapsed = time.time() - begin
self.logger.info(_('Object update single threaded sweep completed: '
'%(elapsed).02fs, %(success)s successes, %(fail)s failures'),
{'elapsed': elapsed, 'success': self.successes,
'fail': self.failures})
def object_sweep(self, device):
"""
If there are async pendings on the device, walk each one and update.
:param device: path to device
"""
async_pending = os.path.join(device, ASYNCDIR)
if not os.path.isdir(async_pending):
return
for prefix in os.listdir(async_pending):
prefix_path = os.path.join(async_pending, prefix)
if not os.path.isdir(prefix_path):
continue
last_obj_hash = None
for update in sorted(os.listdir(prefix_path), reverse=True):
update_path = os.path.join(prefix_path, update)
if not os.path.isfile(update_path):
continue
try:
obj_hash, timestamp = update.split('-')
except ValueError:
self.logger.error(
_('ERROR async pending file with unexpected name %s')
% (update_path))
continue
if obj_hash == last_obj_hash:
os.unlink(update_path)
else:
self.process_object_update(update_path, device)
last_obj_hash = obj_hash
time.sleep(self.slowdown)
try:
os.rmdir(prefix_path)
except OSError:
pass
def process_object_update(self, update_path, device):
"""
Process the object information to be updated and update.
:param update_path: path to pickled object update file
:param device: path to device
"""
try:
update = pickle.load(open(update_path, 'rb'))
except Exception:
self.logger.exception(
_('ERROR Pickle problem, quarantining %s'), update_path)
renamer(update_path, os.path.join(device,
'quarantined', 'objects', os.path.basename(update_path)))
return
successes = update.get('successes', [])
part, nodes = self.get_container_ring().get_nodes(
update['account'], update['container'])
obj = '/%s/%s/%s' % \
(update['account'], update['container'], update['obj'])
success = True
for node in nodes:
if node['id'] not in successes:
status = self.object_update(node, part, update['op'], obj,
update['headers'])
if not (200 <= status < 300) and status != 404:
success = False
else:
successes.append(node['id'])
if success:
self.successes += 1
self.logger.debug(_('Update sent for %(obj)s %(path)s'),
{'obj': obj, 'path': update_path})
os.unlink(update_path)
else:
self.failures += 1
self.logger.debug(_('Update failed for %(obj)s %(path)s'),
{'obj': obj, 'path': update_path})
update['successes'] = successes
write_pickle(update, update_path, os.path.join(device, 'tmp'))
def object_update(self, node, part, op, obj, headers):
"""
Perform the object update to the container
:param node: node dictionary from the container ring
:param part: partition that holds the container
:param op: operation performed (ex: 'POST' or 'DELETE')
:param obj: object name being updated
:param headers: headers to send with the update
"""
try:
with ConnectionTimeout(self.conn_timeout):
conn = http_connect(node['ip'], node['port'], node['device'],
part, op, obj, headers)
with Timeout(self.node_timeout):
resp = conn.getresponse()
resp.read()
return resp.status
except (Exception, TimeoutError):
self.logger.exception(_('ERROR with remote server '
'%(ip)s:%(port)s/%(device)s'), node)
return 500
| houseurmusic/my-swift | swift/obj/updater.py | Python | apache-2.0 | 9,252 | 0.001405 |
from __future__ import absolute_import
from __future__ import unicode_literals
import os.path
import random
import py
import pytest
from docker.errors import APIError
from docker.errors import NotFound
from .. import mock
from ..helpers import build_config as load_config
from ..helpers import create_host_file
from .testcases import DockerClientTestCase
from .testcases import SWARM_SKIP_CONTAINERS_ALL
from compose.config import config
from compose.config import ConfigurationError
from compose.config import types
from compose.config.types import VolumeFromSpec
from compose.config.types import VolumeSpec
from compose.const import COMPOSEFILE_V2_0 as V2_0
from compose.const import COMPOSEFILE_V2_1 as V2_1
from compose.const import COMPOSEFILE_V2_2 as V2_2
from compose.const import COMPOSEFILE_V3_1 as V3_1
from compose.const import LABEL_PROJECT
from compose.const import LABEL_SERVICE
from compose.container import Container
from compose.errors import HealthCheckFailed
from compose.errors import NoHealthCheckConfigured
from compose.project import Project
from compose.project import ProjectError
from compose.service import ConvergenceStrategy
from tests.integration.testcases import is_cluster
from tests.integration.testcases import no_cluster
from tests.integration.testcases import v2_1_only
from tests.integration.testcases import v2_2_only
from tests.integration.testcases import v2_only
from tests.integration.testcases import v3_only
def build_config(**kwargs):
return config.Config(
version=kwargs.get('version'),
services=kwargs.get('services'),
volumes=kwargs.get('volumes'),
networks=kwargs.get('networks'),
secrets=kwargs.get('secrets'),
configs=kwargs.get('configs'),
)
class ProjectTest(DockerClientTestCase):
def test_containers(self):
web = self.create_service('web')
db = self.create_service('db')
project = Project('composetest', [web, db], self.client)
project.up()
containers = project.containers()
self.assertEqual(len(containers), 2)
@pytest.mark.skipif(SWARM_SKIP_CONTAINERS_ALL, reason='Swarm /containers/json bug')
def test_containers_stopped(self):
web = self.create_service('web')
db = self.create_service('db')
project = Project('composetest', [web, db], self.client)
project.up()
assert len(project.containers()) == 2
assert len(project.containers(stopped=True)) == 2
project.stop()
assert len(project.containers()) == 0
assert len(project.containers(stopped=True)) == 2
def test_containers_with_service_names(self):
web = self.create_service('web')
db = self.create_service('db')
project = Project('composetest', [web, db], self.client)
project.up()
containers = project.containers(['web'])
self.assertEqual(
[c.name for c in containers],
['composetest_web_1'])
def test_containers_with_extra_service(self):
web = self.create_service('web')
web_1 = web.create_container()
db = self.create_service('db')
db_1 = db.create_container()
self.create_service('extra').create_container()
project = Project('composetest', [web, db], self.client)
self.assertEqual(
set(project.containers(stopped=True)),
set([web_1, db_1]),
)
def test_volumes_from_service(self):
project = Project.from_config(
name='composetest',
config_data=load_config({
'data': {
'image': 'busybox:latest',
'volumes': ['/var/data'],
},
'db': {
'image': 'busybox:latest',
'volumes_from': ['data'],
},
}),
client=self.client,
)
db = project.get_service('db')
data = project.get_service('data')
self.assertEqual(db.volumes_from, [VolumeFromSpec(data, 'rw', 'service')])
def test_volumes_from_container(self):
data_container = Container.create(
self.client,
image='busybox:latest',
volumes=['/var/data'],
name='composetest_data_container',
labels={LABEL_PROJECT: 'composetest'},
host_config={},
)
project = Project.from_config(
name='composetest',
config_data=load_config({
'db': {
'image': 'busybox:latest',
'volumes_from': ['composetest_data_container'],
},
}),
client=self.client,
)
db = project.get_service('db')
self.assertEqual(db._get_volumes_from(), [data_container.id + ':rw'])
@v2_only()
@no_cluster('container networks not supported in Swarm')
def test_network_mode_from_service(self):
project = Project.from_config(
name='composetest',
client=self.client,
config_data=load_config({
'version': str(V2_0),
'services': {
'net': {
'image': 'busybox:latest',
'command': ["top"]
},
'web': {
'image': 'busybox:latest',
'network_mode': 'service:net',
'command': ["top"]
},
},
}),
)
project.up()
web = project.get_service('web')
net = project.get_service('net')
self.assertEqual(web.network_mode.mode, 'container:' + net.containers()[0].id)
@v2_only()
@no_cluster('container networks not supported in Swarm')
def test_network_mode_from_container(self):
def get_project():
return Project.from_config(
name='composetest',
config_data=load_config({
'version': str(V2_0),
'services': {
'web': {
'image': 'busybox:latest',
'network_mode': 'container:composetest_net_container'
},
},
}),
client=self.client,
)
with pytest.raises(ConfigurationError) as excinfo:
get_project()
assert "container 'composetest_net_container' which does not exist" in excinfo.exconly()
net_container = Container.create(
self.client,
image='busybox:latest',
name='composetest_net_container',
command='top',
labels={LABEL_PROJECT: 'composetest'},
host_config={},
)
net_container.start()
project = get_project()
project.up()
web = project.get_service('web')
self.assertEqual(web.network_mode.mode, 'container:' + net_container.id)
@no_cluster('container networks not supported in Swarm')
def test_net_from_service_v1(self):
project = Project.from_config(
name='composetest',
config_data=load_config({
'net': {
'image': 'busybox:latest',
'command': ["top"]
},
'web': {
'image': 'busybox:latest',
'net': 'container:net',
'command': ["top"]
},
}),
client=self.client,
)
project.up()
web = project.get_service('web')
net = project.get_service('net')
self.assertEqual(web.network_mode.mode, 'container:' + net.containers()[0].id)
@no_cluster('container networks not supported in Swarm')
def test_net_from_container_v1(self):
def get_project():
return Project.from_config(
name='composetest',
config_data=load_config({
'web': {
'image': 'busybox:latest',
'net': 'container:composetest_net_container'
},
}),
client=self.client,
)
with pytest.raises(ConfigurationError) as excinfo:
get_project()
assert "container 'composetest_net_container' which does not exist" in excinfo.exconly()
net_container = Container.create(
self.client,
image='busybox:latest',
name='composetest_net_container',
command='top',
labels={LABEL_PROJECT: 'composetest'},
host_config={},
)
net_container.start()
project = get_project()
project.up()
web = project.get_service('web')
self.assertEqual(web.network_mode.mode, 'container:' + net_container.id)
def test_start_pause_unpause_stop_kill_remove(self):
web = self.create_service('web')
db = self.create_service('db')
project = Project('composetest', [web, db], self.client)
project.start()
self.assertEqual(len(web.containers()), 0)
self.assertEqual(len(db.containers()), 0)
web_container_1 = web.create_container()
web_container_2 = web.create_container()
db_container = db.create_container()
project.start(service_names=['web'])
self.assertEqual(
set(c.name for c in project.containers() if c.is_running),
set([web_container_1.name, web_container_2.name]))
project.start()
self.assertEqual(
set(c.name for c in project.containers() if c.is_running),
set([web_container_1.name, web_container_2.name, db_container.name]))
project.pause(service_names=['web'])
self.assertEqual(
set([c.name for c in project.containers() if c.is_paused]),
set([web_container_1.name, web_container_2.name]))
project.pause()
self.assertEqual(
set([c.name for c in project.containers() if c.is_paused]),
set([web_container_1.name, web_container_2.name, db_container.name]))
project.unpause(service_names=['db'])
self.assertEqual(len([c.name for c in project.containers() if c.is_paused]), 2)
project.unpause()
self.assertEqual(len([c.name for c in project.containers() if c.is_paused]), 0)
project.stop(service_names=['web'], timeout=1)
self.assertEqual(
set(c.name for c in project.containers() if c.is_running), set([db_container.name])
)
project.kill(service_names=['db'])
self.assertEqual(len([c for c in project.containers() if c.is_running]), 0)
self.assertEqual(len(project.containers(stopped=True)), 3)
project.remove_stopped(service_names=['web'])
self.assertEqual(len(project.containers(stopped=True)), 1)
project.remove_stopped()
self.assertEqual(len(project.containers(stopped=True)), 0)
def test_create(self):
web = self.create_service('web')
db = self.create_service('db', volumes=[VolumeSpec.parse('/var/db')])
project = Project('composetest', [web, db], self.client)
project.create(['db'])
containers = project.containers(stopped=True)
assert len(containers) == 1
assert not containers[0].is_running
db_containers = db.containers(stopped=True)
assert len(db_containers) == 1
assert not db_containers[0].is_running
assert len(web.containers(stopped=True)) == 0
def test_create_twice(self):
web = self.create_service('web')
db = self.create_service('db', volumes=[VolumeSpec.parse('/var/db')])
project = Project('composetest', [web, db], self.client)
project.create(['db', 'web'])
project.create(['db', 'web'])
containers = project.containers(stopped=True)
assert len(containers) == 2
db_containers = db.containers(stopped=True)
assert len(db_containers) == 1
assert not db_containers[0].is_running
web_containers = web.containers(stopped=True)
assert len(web_containers) == 1
assert not web_containers[0].is_running
def test_create_with_links(self):
db = self.create_service('db')
web = self.create_service('web', links=[(db, 'db')])
project = Project('composetest', [db, web], self.client)
project.create(['web'])
# self.assertEqual(len(project.containers()), 0)
assert len(project.containers(stopped=True)) == 2
assert not [c for c in project.containers(stopped=True) if c.is_running]
assert len(db.containers(stopped=True)) == 1
assert len(web.containers(stopped=True)) == 1
def test_create_strategy_always(self):
db = self.create_service('db')
project = Project('composetest', [db], self.client)
project.create(['db'])
old_id = project.containers(stopped=True)[0].id
project.create(['db'], strategy=ConvergenceStrategy.always)
assert len(project.containers(stopped=True)) == 1
db_container = project.containers(stopped=True)[0]
assert not db_container.is_running
assert db_container.id != old_id
def test_create_strategy_never(self):
db = self.create_service('db')
project = Project('composetest', [db], self.client)
project.create(['db'])
old_id = project.containers(stopped=True)[0].id
project.create(['db'], strategy=ConvergenceStrategy.never)
assert len(project.containers(stopped=True)) == 1
db_container = project.containers(stopped=True)[0]
assert not db_container.is_running
assert db_container.id == old_id
def test_project_up(self):
web = self.create_service('web')
db = self.create_service('db', volumes=[VolumeSpec.parse('/var/db')])
project = Project('composetest', [web, db], self.client)
project.start()
self.assertEqual(len(project.containers()), 0)
project.up(['db'])
self.assertEqual(len(project.containers()), 1)
self.assertEqual(len(db.containers()), 1)
self.assertEqual(len(web.containers()), 0)
def test_project_up_starts_uncreated_services(self):
db = self.create_service('db')
web = self.create_service('web', links=[(db, 'db')])
project = Project('composetest', [db, web], self.client)
project.up(['db'])
self.assertEqual(len(project.containers()), 1)
project.up()
self.assertEqual(len(project.containers()), 2)
self.assertEqual(len(db.containers()), 1)
self.assertEqual(len(web.containers()), 1)
def test_recreate_preserves_volumes(self):
web = self.create_service('web')
db = self.create_service('db', volumes=[VolumeSpec.parse('/etc')])
project = Project('composetest', [web, db], self.client)
project.start()
self.assertEqual(len(project.containers()), 0)
project.up(['db'])
self.assertEqual(len(project.containers()), 1)
old_db_id = project.containers()[0].id
db_volume_path = project.containers()[0].get('Volumes./etc')
project.up(strategy=ConvergenceStrategy.always)
self.assertEqual(len(project.containers()), 2)
db_container = [c for c in project.containers() if 'db' in c.name][0]
self.assertNotEqual(db_container.id, old_db_id)
self.assertEqual(db_container.get('Volumes./etc'), db_volume_path)
def test_project_up_with_no_recreate_running(self):
web = self.create_service('web')
db = self.create_service('db', volumes=[VolumeSpec.parse('/var/db')])
project = Project('composetest', [web, db], self.client)
project.start()
self.assertEqual(len(project.containers()), 0)
project.up(['db'])
self.assertEqual(len(project.containers()), 1)
old_db_id = project.containers()[0].id
container, = project.containers()
db_volume_path = container.get_mount('/var/db')['Source']
project.up(strategy=ConvergenceStrategy.never)
self.assertEqual(len(project.containers()), 2)
db_container = [c for c in project.containers() if 'db' in c.name][0]
self.assertEqual(db_container.id, old_db_id)
self.assertEqual(
db_container.get_mount('/var/db')['Source'],
db_volume_path)
def test_project_up_with_no_recreate_stopped(self):
web = self.create_service('web')
db = self.create_service('db', volumes=[VolumeSpec.parse('/var/db')])
project = Project('composetest', [web, db], self.client)
project.start()
self.assertEqual(len(project.containers()), 0)
project.up(['db'])
project.kill()
old_containers = project.containers(stopped=True)
self.assertEqual(len(old_containers), 1)
old_container, = old_containers
old_db_id = old_container.id
db_volume_path = old_container.get_mount('/var/db')['Source']
project.up(strategy=ConvergenceStrategy.never)
new_containers = project.containers(stopped=True)
self.assertEqual(len(new_containers), 2)
self.assertEqual([c.is_running for c in new_containers], [True, True])
db_container = [c for c in new_containers if 'db' in c.name][0]
self.assertEqual(db_container.id, old_db_id)
self.assertEqual(
db_container.get_mount('/var/db')['Source'],
db_volume_path)
def test_project_up_without_all_services(self):
console = self.create_service('console')
db = self.create_service('db')
project = Project('composetest', [console, db], self.client)
project.start()
self.assertEqual(len(project.containers()), 0)
project.up()
self.assertEqual(len(project.containers()), 2)
self.assertEqual(len(db.containers()), 1)
self.assertEqual(len(console.containers()), 1)
def test_project_up_starts_links(self):
console = self.create_service('console')
db = self.create_service('db', volumes=[VolumeSpec.parse('/var/db')])
web = self.create_service('web', links=[(db, 'db')])
project = Project('composetest', [web, db, console], self.client)
project.start()
self.assertEqual(len(project.containers()), 0)
project.up(['web'])
self.assertEqual(len(project.containers()), 2)
self.assertEqual(len(web.containers()), 1)
self.assertEqual(len(db.containers()), 1)
self.assertEqual(len(console.containers()), 0)
def test_project_up_starts_depends(self):
project = Project.from_config(
name='composetest',
config_data=load_config({
'console': {
'image': 'busybox:latest',
'command': ["top"],
},
'data': {
'image': 'busybox:latest',
'command': ["top"]
},
'db': {
'image': 'busybox:latest',
'command': ["top"],
'volumes_from': ['data'],
},
'web': {
'image': 'busybox:latest',
'command': ["top"],
'links': ['db'],
},
}),
client=self.client,
)
project.start()
self.assertEqual(len(project.containers()), 0)
project.up(['web'])
self.assertEqual(len(project.containers()), 3)
self.assertEqual(len(project.get_service('web').containers()), 1)
self.assertEqual(len(project.get_service('db').containers()), 1)
self.assertEqual(len(project.get_service('data').containers()), 1)
self.assertEqual(len(project.get_service('console').containers()), 0)
def test_project_up_with_no_deps(self):
project = Project.from_config(
name='composetest',
config_data=load_config({
'console': {
'image': 'busybox:latest',
'command': ["top"],
},
'data': {
'image': 'busybox:latest',
'command': ["top"]
},
'db': {
'image': 'busybox:latest',
'command': ["top"],
'volumes_from': ['data'],
},
'web': {
'image': 'busybox:latest',
'command': ["top"],
'links': ['db'],
},
}),
client=self.client,
)
project.start()
self.assertEqual(len(project.containers()), 0)
project.up(['db'], start_deps=False)
self.assertEqual(len(project.containers(stopped=True)), 2)
self.assertEqual(len(project.get_service('web').containers()), 0)
self.assertEqual(len(project.get_service('db').containers()), 1)
self.assertEqual(len(project.get_service('data').containers(stopped=True)), 1)
assert not project.get_service('data').containers(stopped=True)[0].is_running
self.assertEqual(len(project.get_service('console').containers()), 0)
def test_project_up_recreate_with_tmpfs_volume(self):
# https://github.com/docker/compose/issues/4751
project = Project.from_config(
name='composetest',
config_data=load_config({
'version': '2.1',
'services': {
'foo': {
'image': 'busybox:latest',
'tmpfs': ['/dev/shm'],
'volumes': ['/dev/shm']
}
}
}), client=self.client
)
project.up()
project.up(strategy=ConvergenceStrategy.always)
def test_unscale_after_restart(self):
web = self.create_service('web')
project = Project('composetest', [web], self.client)
project.start()
service = project.get_service('web')
service.scale(1)
self.assertEqual(len(service.containers()), 1)
service.scale(3)
self.assertEqual(len(service.containers()), 3)
project.up()
service = project.get_service('web')
self.assertEqual(len(service.containers()), 1)
service.scale(1)
self.assertEqual(len(service.containers()), 1)
project.up(scale_override={'web': 3})
service = project.get_service('web')
self.assertEqual(len(service.containers()), 3)
# does scale=0 ,makes any sense? after recreating at least 1 container is running
service.scale(0)
project.up()
service = project.get_service('web')
self.assertEqual(len(service.containers()), 1)
@v2_only()
def test_project_up_networks(self):
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top',
'networks': {
'foo': None,
'bar': None,
'baz': {'aliases': ['extra']},
},
}],
networks={
'foo': {'driver': 'bridge'},
'bar': {'driver': None},
'baz': {},
},
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data,
)
project.up()
containers = project.containers()
assert len(containers) == 1
container, = containers
for net_name in ['foo', 'bar', 'baz']:
full_net_name = 'composetest_{}'.format(net_name)
network_data = self.client.inspect_network(full_net_name)
assert network_data['Name'] == full_net_name
aliases_key = 'NetworkSettings.Networks.{net}.Aliases'
assert 'web' in container.get(aliases_key.format(net='composetest_foo'))
assert 'web' in container.get(aliases_key.format(net='composetest_baz'))
assert 'extra' in container.get(aliases_key.format(net='composetest_baz'))
foo_data = self.client.inspect_network('composetest_foo')
assert foo_data['Driver'] == 'bridge'
@v2_only()
def test_up_with_ipam_config(self):
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'networks': {'front': None},
}],
networks={
'front': {
'driver': 'bridge',
'driver_opts': {
"com.docker.network.bridge.enable_icc": "false",
},
'ipam': {
'driver': 'default',
'config': [{
"subnet": "172.28.0.0/16",
"ip_range": "172.28.5.0/24",
"gateway": "172.28.5.254",
"aux_addresses": {
"a": "172.28.1.5",
"b": "172.28.1.6",
"c": "172.28.1.7",
},
}],
},
},
},
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data,
)
project.up()
network = self.client.networks(names=['composetest_front'])[0]
assert network['Options'] == {
"com.docker.network.bridge.enable_icc": "false"
}
assert network['IPAM'] == {
'Driver': 'default',
'Options': None,
'Config': [{
'Subnet': "172.28.0.0/16",
'IPRange': "172.28.5.0/24",
'Gateway': "172.28.5.254",
'AuxiliaryAddresses': {
'a': '172.28.1.5',
'b': '172.28.1.6',
'c': '172.28.1.7',
},
}],
}
@v2_only()
def test_up_with_ipam_options(self):
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'networks': {'front': None},
}],
networks={
'front': {
'driver': 'bridge',
'ipam': {
'driver': 'default',
'options': {
"com.docker.compose.network.test": "9-29-045"
}
},
},
},
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data,
)
project.up()
network = self.client.networks(names=['composetest_front'])[0]
assert network['IPAM']['Options'] == {
"com.docker.compose.network.test": "9-29-045"
}
@v2_1_only()
def test_up_with_network_static_addresses(self):
config_data = build_config(
version=V2_1,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top',
'networks': {
'static_test': {
'ipv4_address': '172.16.100.100',
'ipv6_address': 'fe80::1001:102'
}
},
}],
networks={
'static_test': {
'driver': 'bridge',
'driver_opts': {
"com.docker.network.enable_ipv6": "true",
},
'ipam': {
'driver': 'default',
'config': [
{"subnet": "172.16.100.0/24",
"gateway": "172.16.100.1"},
{"subnet": "fe80::/64",
"gateway": "fe80::1001:1"}
]
},
'enable_ipv6': True,
}
}
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data,
)
project.up(detached=True)
service_container = project.get_service('web').containers()[0]
IPAMConfig = (service_container.inspect().get('NetworkSettings', {}).
get('Networks', {}).get('composetest_static_test', {}).
get('IPAMConfig', {}))
assert IPAMConfig.get('IPv4Address') == '172.16.100.100'
assert IPAMConfig.get('IPv6Address') == 'fe80::1001:102'
@v2_1_only()
def test_up_with_enable_ipv6(self):
self.require_api_version('1.23')
config_data = build_config(
version=V2_1,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top',
'networks': {
'static_test': {
'ipv6_address': 'fe80::1001:102'
}
},
}],
networks={
'static_test': {
'driver': 'bridge',
'enable_ipv6': True,
'ipam': {
'driver': 'default',
'config': [
{"subnet": "fe80::/64",
"gateway": "fe80::1001:1"}
]
}
}
}
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data,
)
project.up(detached=True)
network = [n for n in self.client.networks() if 'static_test' in n['Name']][0]
service_container = project.get_service('web').containers()[0]
assert network['EnableIPv6'] is True
ipam_config = (service_container.inspect().get('NetworkSettings', {}).
get('Networks', {}).get('composetest_static_test', {}).
get('IPAMConfig', {}))
assert ipam_config.get('IPv6Address') == 'fe80::1001:102'
@v2_only()
def test_up_with_network_static_addresses_missing_subnet(self):
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'networks': {
'static_test': {
'ipv4_address': '172.16.100.100',
'ipv6_address': 'fe80::1001:101'
}
},
}],
networks={
'static_test': {
'driver': 'bridge',
'driver_opts': {
"com.docker.network.enable_ipv6": "true",
},
'ipam': {
'driver': 'default',
},
},
},
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data,
)
with self.assertRaises(ProjectError):
project.up()
@v2_1_only()
def test_up_with_network_link_local_ips(self):
config_data = build_config(
version=V2_1,
services=[{
'name': 'web',
'image': 'busybox:latest',
'networks': {
'linklocaltest': {
'link_local_ips': ['169.254.8.8']
}
}
}],
networks={
'linklocaltest': {'driver': 'bridge'}
}
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data
)
project.up(detached=True)
service_container = project.get_service('web').containers(stopped=True)[0]
ipam_config = service_container.inspect().get(
'NetworkSettings', {}
).get(
'Networks', {}
).get(
'composetest_linklocaltest', {}
).get('IPAMConfig', {})
assert 'LinkLocalIPs' in ipam_config
assert ipam_config['LinkLocalIPs'] == ['169.254.8.8']
@v2_1_only()
def test_up_with_isolation(self):
self.require_api_version('1.24')
config_data = build_config(
version=V2_1,
services=[{
'name': 'web',
'image': 'busybox:latest',
'isolation': 'default'
}],
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data
)
project.up(detached=True)
service_container = project.get_service('web').containers(stopped=True)[0]
assert service_container.inspect()['HostConfig']['Isolation'] == 'default'
@v2_1_only()
def test_up_with_invalid_isolation(self):
self.require_api_version('1.24')
config_data = build_config(
version=V2_1,
services=[{
'name': 'web',
'image': 'busybox:latest',
'isolation': 'foobar'
}],
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data
)
with self.assertRaises(ProjectError):
project.up()
@v2_only()
def test_project_up_with_network_internal(self):
self.require_api_version('1.23')
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'networks': {'internal': None},
}],
networks={
'internal': {'driver': 'bridge', 'internal': True},
},
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data,
)
project.up()
network = self.client.networks(names=['composetest_internal'])[0]
assert network['Internal'] is True
@v2_1_only()
def test_project_up_with_network_label(self):
self.require_api_version('1.23')
network_name = 'network_with_label'
config_data = build_config(
version=V2_1,
services=[{
'name': 'web',
'image': 'busybox:latest',
'networks': {network_name: None}
}],
networks={
network_name: {'labels': {'label_key': 'label_val'}}
}
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data
)
project.up()
networks = [
n for n in self.client.networks()
if n['Name'].startswith('composetest_')
]
assert [n['Name'] for n in networks] == ['composetest_{}'.format(network_name)]
assert 'label_key' in networks[0]['Labels']
assert networks[0]['Labels']['label_key'] == 'label_val'
@v2_only()
def test_project_up_volumes(self):
vol_name = '{0:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name)
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top'
}],
volumes={vol_name: {'driver': 'local'}},
)
project = Project.from_config(
name='composetest',
config_data=config_data, client=self.client
)
project.up()
self.assertEqual(len(project.containers()), 1)
volume_data = self.get_volume_data(full_vol_name)
assert volume_data['Name'].split('/')[-1] == full_vol_name
self.assertEqual(volume_data['Driver'], 'local')
@v2_1_only()
def test_project_up_with_volume_labels(self):
self.require_api_version('1.23')
volume_name = 'volume_with_label'
config_data = build_config(
version=V2_1,
services=[{
'name': 'web',
'image': 'busybox:latest',
'volumes': [VolumeSpec.parse('{}:/data'.format(volume_name))]
}],
volumes={
volume_name: {
'labels': {
'label_key': 'label_val'
}
}
},
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data,
)
project.up()
volumes = [
v for v in self.client.volumes().get('Volumes', [])
if v['Name'].split('/')[-1].startswith('composetest_')
]
assert set([v['Name'].split('/')[-1] for v in volumes]) == set(
['composetest_{}'.format(volume_name)]
)
assert 'label_key' in volumes[0]['Labels']
assert volumes[0]['Labels']['label_key'] == 'label_val'
@v2_only()
def test_project_up_logging_with_multiple_files(self):
base_file = config.ConfigFile(
'base.yml',
{
'version': str(V2_0),
'services': {
'simple': {'image': 'busybox:latest', 'command': 'top'},
'another': {
'image': 'busybox:latest',
'command': 'top',
'logging': {
'driver': "json-file",
'options': {
'max-size': "10m"
}
}
}
}
})
override_file = config.ConfigFile(
'override.yml',
{
'version': str(V2_0),
'services': {
'another': {
'logging': {
'driver': "none"
}
}
}
})
details = config.ConfigDetails('.', [base_file, override_file])
tmpdir = py.test.ensuretemp('logging_test')
self.addCleanup(tmpdir.remove)
with tmpdir.as_cwd():
config_data = config.load(details)
project = Project.from_config(
name='composetest', config_data=config_data, client=self.client
)
project.up()
containers = project.containers()
self.assertEqual(len(containers), 2)
another = project.get_service('another').containers()[0]
log_config = another.get('HostConfig.LogConfig')
self.assertTrue(log_config)
self.assertEqual(log_config.get('Type'), 'none')
@v2_only()
def test_project_up_port_mappings_with_multiple_files(self):
base_file = config.ConfigFile(
'base.yml',
{
'version': str(V2_0),
'services': {
'simple': {
'image': 'busybox:latest',
'command': 'top',
'ports': ['1234:1234']
},
},
})
override_file = config.ConfigFile(
'override.yml',
{
'version': str(V2_0),
'services': {
'simple': {
'ports': ['1234:1234']
}
}
})
details = config.ConfigDetails('.', [base_file, override_file])
config_data = config.load(details)
project = Project.from_config(
name='composetest', config_data=config_data, client=self.client
)
project.up()
containers = project.containers()
self.assertEqual(len(containers), 1)
@v2_2_only()
def test_project_up_config_scale(self):
config_data = build_config(
version=V2_2,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top',
'scale': 3
}]
)
project = Project.from_config(
name='composetest', config_data=config_data, client=self.client
)
project.up()
assert len(project.containers()) == 3
project.up(scale_override={'web': 2})
assert len(project.containers()) == 2
project.up(scale_override={'web': 4})
assert len(project.containers()) == 4
project.stop()
project.up()
assert len(project.containers()) == 3
@v2_only()
def test_initialize_volumes(self):
vol_name = '{0:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name)
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top'
}],
volumes={vol_name: {}},
)
project = Project.from_config(
name='composetest',
config_data=config_data, client=self.client
)
project.volumes.initialize()
volume_data = self.get_volume_data(full_vol_name)
assert volume_data['Name'].split('/')[-1] == full_vol_name
assert volume_data['Driver'] == 'local'
@v2_only()
def test_project_up_implicit_volume_driver(self):
vol_name = '{0:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name)
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top'
}],
volumes={vol_name: {}},
)
project = Project.from_config(
name='composetest',
config_data=config_data, client=self.client
)
project.up()
volume_data = self.get_volume_data(full_vol_name)
assert volume_data['Name'].split('/')[-1] == full_vol_name
self.assertEqual(volume_data['Driver'], 'local')
@v3_only()
def test_project_up_with_secrets(self):
node = create_host_file(self.client, os.path.abspath('tests/fixtures/secrets/default'))
config_data = build_config(
version=V3_1,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'cat /run/secrets/special',
'secrets': [
types.ServiceSecret.parse({'source': 'super', 'target': 'special'}),
],
'environment': ['constraint:node=={}'.format(node if node is not None else '*')]
}],
secrets={
'super': {
'file': os.path.abspath('tests/fixtures/secrets/default'),
},
},
)
project = Project.from_config(
client=self.client,
name='composetest',
config_data=config_data,
)
project.up()
project.stop()
containers = project.containers(stopped=True)
assert len(containers) == 1
container, = containers
output = container.logs()
assert output == b"This is the secret\n"
@v2_only()
def test_initialize_volumes_invalid_volume_driver(self):
vol_name = '{0:x}'.format(random.getrandbits(32))
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top'
}],
volumes={vol_name: {'driver': 'foobar'}},
)
project = Project.from_config(
name='composetest',
config_data=config_data, client=self.client
)
with self.assertRaises(APIError if is_cluster(self.client) else config.ConfigurationError):
project.volumes.initialize()
@v2_only()
@no_cluster('inspect volume by name defect on Swarm Classic')
def test_initialize_volumes_updated_driver(self):
vol_name = '{0:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name)
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top'
}],
volumes={vol_name: {'driver': 'local'}},
)
project = Project.from_config(
name='composetest',
config_data=config_data, client=self.client
)
project.volumes.initialize()
volume_data = self.get_volume_data(full_vol_name)
assert volume_data['Name'].split('/')[-1] == full_vol_name
self.assertEqual(volume_data['Driver'], 'local')
config_data = config_data._replace(
volumes={vol_name: {'driver': 'smb'}}
)
project = Project.from_config(
name='composetest',
config_data=config_data,
client=self.client
)
with self.assertRaises(config.ConfigurationError) as e:
project.volumes.initialize()
assert 'Configuration for volume {0} specifies driver smb'.format(
vol_name
) in str(e.exception)
@v2_only()
def test_initialize_volumes_updated_blank_driver(self):
vol_name = '{0:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name)
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top'
}],
volumes={vol_name: {'driver': 'local'}},
)
project = Project.from_config(
name='composetest',
config_data=config_data, client=self.client
)
project.volumes.initialize()
volume_data = self.get_volume_data(full_vol_name)
assert volume_data['Name'].split('/')[-1] == full_vol_name
self.assertEqual(volume_data['Driver'], 'local')
config_data = config_data._replace(
volumes={vol_name: {}}
)
project = Project.from_config(
name='composetest',
config_data=config_data,
client=self.client
)
project.volumes.initialize()
volume_data = self.get_volume_data(full_vol_name)
assert volume_data['Name'].split('/')[-1] == full_vol_name
self.assertEqual(volume_data['Driver'], 'local')
@v2_only()
@no_cluster('inspect volume by name defect on Swarm Classic')
def test_initialize_volumes_external_volumes(self):
# Use composetest_ prefix so it gets garbage-collected in tearDown()
vol_name = 'composetest_{0:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name)
self.client.create_volume(vol_name)
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top'
}],
volumes={
vol_name: {'external': True, 'name': vol_name}
},
)
project = Project.from_config(
name='composetest',
config_data=config_data, client=self.client
)
project.volumes.initialize()
with self.assertRaises(NotFound):
self.client.inspect_volume(full_vol_name)
@v2_only()
def test_initialize_volumes_inexistent_external_volume(self):
vol_name = '{0:x}'.format(random.getrandbits(32))
config_data = build_config(
version=V2_0,
services=[{
'name': 'web',
'image': 'busybox:latest',
'command': 'top'
}],
volumes={
vol_name: {'external': True, 'name': vol_name}
},
)
project = Project.from_config(
name='composetest',
config_data=config_data, client=self.client
)
with self.assertRaises(config.ConfigurationError) as e:
project.volumes.initialize()
assert 'Volume {0} declared as external'.format(
vol_name
) in str(e.exception)
@v2_only()
def test_project_up_named_volumes_in_binds(self):
vol_name = '{0:x}'.format(random.getrandbits(32))
full_vol_name = 'composetest_{0}'.format(vol_name)
base_file = config.ConfigFile(
'base.yml',
{
'version': str(V2_0),
'services': {
'simple': {
'image': 'busybox:latest',
'command': 'top',
'volumes': ['{0}:/data'.format(vol_name)]
},
},
'volumes': {
vol_name: {'driver': 'local'}
}
})
config_details = config.ConfigDetails('.', [base_file])
config_data = config.load(config_details)
project = Project.from_config(
name='composetest', config_data=config_data, client=self.client
)
service = project.services[0]
self.assertEqual(service.name, 'simple')
volumes = service.options.get('volumes')
self.assertEqual(len(volumes), 1)
self.assertEqual(volumes[0].external, full_vol_name)
project.up()
engine_volumes = self.client.volumes()['Volumes']
container = service.get_container()
assert [mount['Name'] for mount in container.get('Mounts')] == [full_vol_name]
assert next((v for v in engine_volumes if v['Name'] == vol_name), None) is None
def test_project_up_orphans(self):
config_dict = {
'service1': {
'image': 'busybox:latest',
'command': 'top',
}
}
config_data = load_config(config_dict)
project = Project.from_config(
name='composetest', config_data=config_data, client=self.client
)
project.up()
config_dict['service2'] = config_dict['service1']
del config_dict['service1']
config_data = load_config(config_dict)
project = Project.from_config(
name='composetest', config_data=config_data, client=self.client
)
with mock.patch('compose.project.log') as mock_log:
project.up()
mock_log.warning.assert_called_once_with(mock.ANY)
assert len([
ctnr for ctnr in project._labeled_containers()
if ctnr.labels.get(LABEL_SERVICE) == 'service1'
]) == 1
project.up(remove_orphans=True)
assert len([
ctnr for ctnr in project._labeled_containers()
if ctnr.labels.get(LABEL_SERVICE) == 'service1'
]) == 0
@v2_1_only()
def test_project_up_healthy_dependency(self):
config_dict = {
'version': '2.1',
'services': {
'svc1': {
'image': 'busybox:latest',
'command': 'top',
'healthcheck': {
'test': 'exit 0',
'retries': 1,
'timeout': '10s',
'interval': '1s'
},
},
'svc2': {
'image': 'busybox:latest',
'command': 'top',
'depends_on': {
'svc1': {'condition': 'service_healthy'},
}
}
}
}
config_data = load_config(config_dict)
project = Project.from_config(
name='composetest', config_data=config_data, client=self.client
)
project.up()
containers = project.containers()
assert len(containers) == 2
svc1 = project.get_service('svc1')
svc2 = project.get_service('svc2')
assert 'svc1' in svc2.get_dependency_names()
assert svc1.is_healthy()
@v2_1_only()
def test_project_up_unhealthy_dependency(self):
config_dict = {
'version': '2.1',
'services': {
'svc1': {
'image': 'busybox:latest',
'command': 'top',
'healthcheck': {
'test': 'exit 1',
'retries': 1,
'timeout': '10s',
'interval': '1s'
},
},
'svc2': {
'image': 'busybox:latest',
'command': 'top',
'depends_on': {
'svc1': {'condition': 'service_healthy'},
}
}
}
}
config_data = load_config(config_dict)
project = Project.from_config(
name='composetest', config_data=config_data, client=self.client
)
with pytest.raises(ProjectError):
project.up()
containers = project.containers()
assert len(containers) == 1
svc1 = project.get_service('svc1')
svc2 = project.get_service('svc2')
assert 'svc1' in svc2.get_dependency_names()
with pytest.raises(HealthCheckFailed):
svc1.is_healthy()
@v2_1_only()
def test_project_up_no_healthcheck_dependency(self):
config_dict = {
'version': '2.1',
'services': {
'svc1': {
'image': 'busybox:latest',
'command': 'top',
'healthcheck': {
'disable': True
},
},
'svc2': {
'image': 'busybox:latest',
'command': 'top',
'depends_on': {
'svc1': {'condition': 'service_healthy'},
}
}
}
}
config_data = load_config(config_dict)
project = Project.from_config(
name='composetest', config_data=config_data, client=self.client
)
with pytest.raises(ProjectError):
project.up()
containers = project.containers()
assert len(containers) == 1
svc1 = project.get_service('svc1')
svc2 = project.get_service('svc2')
assert 'svc1' in svc2.get_dependency_names()
with pytest.raises(NoHealthCheckConfigured):
svc1.is_healthy()
| shakamunyi/fig | tests/integration/project_test.py | Python | apache-2.0 | 55,924 | 0.00059 |
USER_ID_LEN = 64;
NR_WAVELEN_POL_COEF = 5;
NR_NONLIN_POL_COEF = 8;
NR_DEFECTIVE_PIXELS = 30;
MAX_NR_PIXELS = 4096;
MEAS_DEAD_PIX = 13;# 18
MEAS_PIXELS = 3648;
SIZE_PREFIX = 6;
NR_TEMP_POL_COEF = 5;
MAX_TEMP_SENSORS = 3;
ROOT_NAME_LEN = 6;
AVS_SERIAL_LEN = 9; # 10
MAX_PIXEL_VALUE = 0xFFFC;
MAX_VIDEO_CHANNELS = 2;
MAX_LASER_WIDTH = 0xFFFF;
HW_TRIGGER_MODE = 1;
SW_TRIGGER_MODE = 0;
EDGE_TRIGGER_SOURCE = 0;
LEVEL_TRIGGER_SOURCE = 1;
MAX_TRIGGER_MODE = 1;
MAX_TRIGGER_SOURCE = 1;
MAX_TRIGGER_SOURCE_TYPE = 1;
MAX_INTEGRATION_TIME = 600000;
SAT_DISABLE_DET = 0;
SAT_ENABLE_DET = 1;
SAT_PEAK_INVERSION = 2;
NR_DAC_POL_COEF = 2;
TIMEOUT = 1000000;
ADR_WRITE = 0x02;
ADR_READ = 0x86;
ID_VENDOR = 0x1992;
ID_PRODUCT = 0x0667;
ERR_CODE = [
'CODE 0x00 : UNKNOW',
'CODE 0x01 : INVALID PARAMETER',
'CODE 0x02 : INVALID PASSWORD',
'CODE 0x03 : INVALID COMMAND',
'CODE 0x04 : INVALID SIZE',
'CODE 0x05 : MEASUREMENT PENDING',
'CODE 0x06 : INVALID PIXEL RANGE',
'CODE 0x07 : INVALID INTEGRATION TIME',
'CODE 0x08 : OPERATION NOT SUPPORTED',
'CODE 0x09 : INVALID COMBINATION',
'CODE 0x0A : NO BUFFER AVAIBLE',
'CODE 0x0B : NO SPECTRA AVAIBLE',
'CODE 0x0C : INVALID STATE',
'CODE 0x0D : UNEXPECTED DMA INT',
'CODE 0x0E : INVALID FPGA FILE'
]
SENSOR_TYPE = ['RESERVED',
'Hams 8378-256',
'Hams 8378-1024',
'ILX554',
'Hams 9201',
'Toshiba TCD 1304',
'TSL 1301',
'TSL 1401',
'Hams 8378-512',
'Hams 9840']
| smarrazzo/pyava | consts.py | Python | mit | 1,919 | 0.039083 |
import re
useless_words = ['Inc', 'Corporation', 'Company', 'Corp', 'Co', 'Energy', '&', 'The', '.com', "Inc.", "Corp.", "Co.", "of", "Ltd.", "Ltd"]
with open('companies.txt', 'r') as f:
companies = f.readlines()
new_companies = []
for company in companies:
company = company[:-1]
company_words = company.split()
new_company = [word for word in company_words if word not in useless_words]
new_companies.append([word.lower() for word in new_company if len(word)>1])
print new_companies
with open('parsed_companies.txt', 'w') as f:
for company in new_companies:
f.write(' '.join(company)+'\n')
| maxdrib/Trump-Twitter | keywords/parse_companies.py | Python | mit | 633 | 0.004739 |
from flask import Flask, request, json
import RPi.GPIO as GPIO
import threading
import time
import socket
import ast
import Adafruit_DHT
GPIO.setmode(GPIO.BCM)
USE_TEST_TEMPERATURES = False
app = Flask(__name__)
class sensorReader(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.exitapp = False
print ('SENSOR SERVER STARTED')
if USE_TEST_TEMPERATURES:
global server_socket
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
server_socket.bind(('localhost', 5001))
server_socket.listen(5)
def run(self):
global data
if USE_TEST_TEMPERATURES:
(client_socket, address) = server_socket.accept()
while not self.exitapp:
size = len(data['sensors'])
if (size!=0):
client_socket.send (str(size))
values = client_socket.recv(512)
#print "RECEIVED:" , values
parsedValues = json.loads(values)
for x in range(size):
data['sensors'][x][str(x+1)]['value'] = parsedValues[x]
else:
while not self.exitapp:
humidity, temperature = Adafruit_DHT.read_retry(Adafruit_DHT.DHT11, 5)
data['sensors'][0]['1']['value'] = str(int(temperature))
print 'Temp={0:0.1f}*C Humidity={1:0.1f}%'.format(temperature, humidity)
time.sleep(1)
class actuatorTrigger(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.exitapp = False
GPIO.setup(4, GPIO.OUT)
GPIO.setup(17, GPIO.OUT)
GPIO.setup(27, GPIO.OUT)
def run(self):
global data
pin = [4,17,27]
while not self.exitapp:
x=1
tempCount = 0
for t in data['thermostats']:
mode=t.get(str(x))['mode']
if mode == 'ON':
GPIO.output(pin[x-1], True)
if mode == 'OFF':
GPIO.output(pin[x-1], False)
if mode == 'AUTO':
for s in t.get(str(x))['sensors']:
tempCount += int(data['sensors'][s-1][str(s)]['value'])
'''print tempCount'''
avg = tempCount / float(len(t.get(str(x))['sensors']))
'''print avg'''
'''print t.get(str(x))['temperature']'''
if (t.get(str(x))['hot']!='true'):
if (float(t.get(str(x))['temperature'])-avg)<0.5:
GPIO.output(pin[x-1], True)
else:
GPIO.output(pin[x-1], False)
else:
if (float(t.get(str(x))['temperature'])-avg)<0.5:
GPIO.output(pin[x-1], False)
else:
GPIO.output(pin[x-1], True)
x=x+1
time.sleep(1)
@app.route("/")
def hello():
"""Brief introduction message"""
return "Hello this is the API server of a smart thermostate!"
@app.route('/temp', methods=['GET','DELETE','POST'])
def showTemp():
"""Offers the three available methods of the api for the temperature sensors
GET - Lists all the sensors values
POST - Adds a new temperature sensor
DELETE - Delete all sensors
"""
global data
if request.method == 'GET':
return json.dumps(data.get('sensors'), indent=4)
if request.method == 'DELETE':
data['sensors'] = []
file = open('testData.json','w')
json.dump(data,file,indent=4)
file.close()
return "All sensors deleted successfully"
if request.method == 'POST':
id = len(data['sensors'])+1
temp= {str(id) : {"value":"0", "name":request.form['name']}}
data['sensors'].append(temp)
file = open('testData.json','w')
json.dump(data,file,indent=4)
file.close()
return "New temperature value created successfully"
else:
return "Not a valid method"
@app.route('/thermo/<thermid>', methods=['GET','PUT'])
def getThermostate(thermid):
"""Retunrs the thermostat data specified by <thermid>"""
global data
id = int(thermid)
if request.method == 'GET':
return json.dumps(data['thermostats'][id-1].get(str(id)), indent=4)
if request.method == 'PUT':
temp = request.form['temperature']
data['thermostats'][id-1].get(str(id))['temperature']=temp
mode = request.form['mode']
data['thermostats'][id-1].get(str(id))['mode']=mode
sensors = request.form['sensors']
sensors= ast.literal_eval(sensors)
data['thermostats'][id-1].get(str(id))['sensors']=sensors
time_programming = (request.form['time_programming'])
print (time_programming)
''' n=json.dumps(time_programming)'''
data['thermostats'][id-1].get(str(id))['time']=json.loads(time_programming)
hot = (request.form['hot'])
data['thermostats'][id-1].get(str(id))['hot']=hot
file = open('testData.json','w')
json.dump(data,file,indent=4)
file.close()
return ' '
@app.route('/thermo', methods=['GET','POST','DELETE'])
def showThermo():
"""Offers the three available methods of the api for the thermostates
GET - Lists all thermostates
POST - Adds a default thermostate with no sensors assigned and 21 degree
DELETE - Delete all thermostates
"""
global data
if request.method == 'GET':
return json.dumps(data['thermostats'], indent=4)
if request.method == 'POST':
id = len(data['thermostats'])+1
thermo= {str(id) : {"name":request.form['name'], 'sensors':[], 'temperature':'21', 'mode':'OFF'}}
data['thermostats'].append(thermo)
file = open('testData.json','w')
json.dump(data,file,indent=4)
file.close()
return "New thermostate created successfully"
if request.method == 'DELETE':
data['thermostats']=[]
file = open('testData.json','w')
json.dump(data,file,indent=4)
file.close()
return "All thermostates deleted successfully"
else:
return "Not a valid method"
def main():
global data
file=open('testData.json','r')
data = json.load(file)
file.close()
mySensorReader = sensorReader()
mySensorReader.start()
myActuatorTrigger = actuatorTrigger()
myActuatorTrigger.start()
app.run(host='0.0.0.0', port=6789,threaded=True, debug=False)
try:
mySensorReader.join()
myActuatorTrigger.join()
except KeyboardInterrupt:
mySensorReader.exitapp = True
myActuatorTrigger.exitapp = True
GPIO.cleanup()
if __name__ == "__main__":
main()
| mpascu/SmartThermostatServer | server.py | Python | mit | 6,963 | 0.012638 |
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
#Licensed under the Apache License, Version 2.0 (the "License");
#you may not use this file except in compliance with the License.
#You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#Unless required by applicable law or agreed to in writing, software
#distributed under the License is distributed on an "AS IS" BASIS,
#WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
#See the License for the specific language governing permissions and
#limitations under the License.
import unittest
import numpy as np
from op_test import OpTest
def get_output_shape(attrs, in_shape):
img_height = in_shape[2]
img_width = in_shape[3]
paddings = attrs['paddings']
kernels = attrs['kernels']
strides = attrs['strides']
output_height = \
1 + \
(img_height + paddings[0] + paddings[2] - kernels[0] + strides[0] - 1) / \
strides[0]
output_width = \
1 + \
(img_width + paddings[1] + paddings[3] - kernels[1] + strides[1] - 1) / \
strides[1]
return output_height, output_width
def im2col(attrs, im, col):
"""
im: {CHW}
col:
{outputHeight, outputWidth, inputChannels, filterHeight, filterWidth}
"""
input_channels, input_height, input_width = im.shape
output_height, output_width, _, filter_height, filter_width = col.shape
stride_height, stride_width = attrs['strides']
padding_height, padding_width = attrs['paddings'][0:2]
for col_row_idx in range(0, output_height):
for col_col_idx in range(0, output_width):
for channel in range(0, input_channels):
for filter_row_idx in range(0, filter_height):
for filter_col_idx in range(0, filter_width):
im_row_offset = col_row_idx * stride_height \
+ filter_row_idx - padding_height
im_col_offset = col_col_idx * stride_width \
+ filter_col_idx - padding_width
if (im_row_offset < 0 or
im_row_offset >= input_height or
im_col_offset < 0 or
im_col_offset >= input_width):
col[col_row_idx][col_col_idx][channel][\
filter_row_idx][filter_col_idx] = 0.0
else:
im_offset = (channel * input_height + im_row_offset \
) * input_width + im_col_offset
col[col_row_idx][col_col_idx][channel][\
filter_row_idx][filter_col_idx] = im[channel][ \
im_row_offset][im_col_offset]
def Im2Sequence(inputs, attrs):
output_height, output_width = get_output_shape(attrs, inputs.shape)
img_channels = inputs.shape[1]
batch_size = inputs.shape[0]
out = np.zeros([
batch_size, output_height, output_width, img_channels,
attrs['kernels'][0], attrs['kernels'][1]
]).astype("float32")
for i in range(len(inputs)):
im2col(attrs, inputs[i], out[i])
out = out.reshape([
batch_size * output_height * output_width,
img_channels * attrs['kernels'][0] * attrs['kernels'][1]
])
return out
class TestBlockExpandOp(OpTest):
def config(self):
self.batch_size = 1
self.img_channels = 3
self.img_height = 4
self.img_width = 4
self.attrs = {
'kernels': [2, 2],
'strides': [1, 1],
'paddings': [1, 1, 1, 1]
}
def setUp(self):
self.config()
self.op_type = "im2sequence"
x = np.random.uniform(0.1, 1, [
self.batch_size, self.img_channels, self.img_height, self.img_width
]).astype("float32")
out = Im2Sequence(x, self.attrs)
self.inputs = {'X': x}
self.outputs = {'Out': out}
def test_check_output(self):
self.check_output()
def test_check_grad_normal(self):
self.check_grad(['X'], 'Out')
class TestBlockExpandOpCase2(TestBlockExpandOp):
def config(self):
self.batch_size = 2
self.img_channels = 3
self.img_height = 4
self.img_width = 5
self.attrs = {
'kernels': [2, 1],
'strides': [2, 1],
'paddings': [2, 1, 2, 1]
}
class TestBlockExpandOpCase3(TestBlockExpandOp):
def config(self):
self.batch_size = 3
self.img_channels = 1
self.img_height = 4
self.img_width = 5
self.attrs = {
'kernels': [2, 1],
'strides': [2, 1],
'paddings': [2, 0, 2, 0]
}
class TestBlockExpandOpCase4(TestBlockExpandOp):
def config(self):
self.batch_size = 2
self.img_channels = 2
self.img_height = 3
self.img_width = 3
self.attrs = {
'kernels': [2, 2],
'strides': [1, 1],
'paddings': [0, 0, 0, 0]
}
if __name__ == '__main__':
unittest.main()
| Canpio/Paddle | python/paddle/fluid/tests/unittests/test_im2sequence_op.py | Python | apache-2.0 | 5,232 | 0.004014 |
__author__ = 'nicolas'
# coding=utf-8
from os.path import expanduser
from ordereddict import OrderedDict
from Bio import SwissProt
import time
import MySQLdb as mdb
"""
Fuck!
from ordereddict import OrderedDict
import MySQLdb as mdb
dicc = {}
dictdebug_empty = OrderedDict()
dictdebug = dictdebug_empty
dictdebug['hola'] = 'chau'
print(dictdebug.items())
print(dictdebug_empty.items())
dictdebug_empty.clear()
print(dictdebug_empty.items())
print(dictdebug.items())
"""
# Establecer el tiempo de inicio del script
start_time = time.time()
# Variables del script
database = "ptmdb"
tabla_cuentas = "sprot_count1"
tabla_ptms = "sprot_ptms1"
file_name = "uniprot_sprot.dat"
desde = 0
hasta = 542783 # Hay 542782 entradas de AC??
# Conectar a la base de datos
con = mdb.connect('localhost', 'nicolas', passwd="nicolaslfp", db=database)
cur = con.cursor()
cur.execute("SELECT VERSION()")
cur.execute("USE " + database)
print("USE ptmdb;")
# Abrir el .dat de uniprot
uniprot_file = expanduser("~") + '/QB9_Files/' + file_name
output_file = expanduser("~") + '/QB9-git/QB9/resources/output.txt'
def count_amino_acids_ext(seq): # Defino una función que toma una secuencia y los cuenta
prot_dic2 = prot_dic
for aa in prot_dic2:
prot_dic2[aa] = seq.count(aa)
return prot_dic2 # y devuelve un dict ordenado con pares AA, #AA
# Armo un diccionario con los AAs que voy a contar
abc = 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
prot_dic = OrderedDict((k, 0) for k in abc)
# Interesting feature types
ptmrecords = ["MOD_RES", "LIPID", "CARBOHYD", "DISULFID", "CROSSLNK"]
# Non-experimental qualifiers for feature annotations
neqs = ["Probable", "Potential", "By similarity"] # Y "Experimental"
# Las categorías están en un diccionario con su type de mysql todo volar
categories = OrderedDict()
categories['AC'] = "varchar(30) NOT NULL" # accesion number
categories['FT'] = "varchar(30) NOT NULL"
categories['STATUS'] = "varchar(30) NOT NULL"
categories['PTM'] = "varchar(100) NOT NULL"
categories['FROM_RES'] = "varchar(10) NOT NULL"
categories['TO_RES'] = "varchar(10) NOT NULL"
categories['FROM_AA'] = "varchar(10) NOT NULL" # vamo a implementar el target directamente!!!! =D
categories['TO_AA'] = "varchar(10) NOT NULL"
categories['SQ'] = "text(45000) NOT NULL" # SQ SEQUENCE XXXX AA; XXXXX MW; XXXXXXXXXXXXXXXX CRC64;
categories['LENGTH'] = "varchar(200) NOT NULL" # SQ SEQUENCE XXXX AA; XXXXX MW; XXXXXXXXXXXXXXXX CRC64;
categories['ORG'] = "text(500) NOT NULL" # organism
categories['OC'] = "varchar(30) NOT NULL" # organism classification, vamos solo con el dominio
categories['OX'] = "varchar(200) NOT NULL" # taxonomic ID
categories['HO'] = "text(500)" # host organism
categories['inumber'] = "varchar(200) NOT NULL"
# categories['CC'] = "varchar(200)" # comments section, nos interesa el campo "PTM"
# categories['SQi'] = "varchar(200)" # SQ SEQUENCE XXXX AA; XXXXX MW; XXXXXXXXXXXXXXXX CRC64;
# Defino un diccionario modelo donde cargar los valores que voy a extraer de la lista
empty_data = OrderedDict()
for gato in categories: # usando las keys de categories y un valor por defecto todo vacío no es nulo ¿cómo hago?
empty_data[gato] = 'NOFT'
empty_data['FROM_RES'] = '?'
empty_data['TO_RES'] = '?'
empty_data['FROM_AA'] = '?'
empty_data['TO_AA'] = '?'
data = empty_data.copy() # este es el diccionario de registros vacío que voy a usar
print("DROP TABLE " + tabla_cuentas + ";")
print("DROP TABLE " + tabla_ptms + ";")
# Crear la tabla de cuentas
prot_dic_def_items = []
prot_dic_def = OrderedDict((k, 'SMALLINT') for k in abc)
for cat, value in prot_dic_def.items(): # concatenaciones key y valor
prot_dic_def_items.append(cat + ' ' + value) # guardadaes en la lista
table_def = ', '.join(prot_dic_def_items) # definicion de la tabla
print("CREATE TABLE IF NOT EXISTS "
+ tabla_cuentas
+ " (AC VARCHAR(30) UNIQUE, OC_ID VARCHAR(30), LENGTH MEDIUMINT,"
+ table_def
+ ") ENGINE=InnoDB;")
print("commit;")
# con.commit()
# Crear la tabla de ptms
table_def_items = [] # lista para concatenaciones de key y valor
for cat, value in categories.items(): # concatenaciones key y valor
table_def_items.append(cat + ' ' + value) # guardadaes en la lista
table_def_2 = ', '.join(table_def_items) # definicion de la tabla
print("CREATE TABLE IF NOT EXISTS " + tabla_ptms + " (" + table_def_2 + ") ENGINE=InnoDB;")
print("commit;")
# con.commit()
# Variables del loop
i = 0
j = 0
ptm = ''
out = []
listap = []
listaq = []
listar = []
olista = []
interes = []
with open(uniprot_file) as uniprot: # esto me abre y cierra el archivo al final
for record in SwissProt.parse(uniprot): # parseando los records de uniprot
i += 1
if i % 100 == 0:
print("commit;")
data = empty_data.copy() # en vez de vaciar el diccionario, le asigno el dafault sin enlazarlo al vacío
# Acá cargo los datos generales para las PTMs de una proteína/entrada de uniprot (instancias de entradas)
# tienen que cargarse en el orden de las columnas en la ptmdb y el del insert
# print(record.accessions[0])
data['AC'] = record.accessions[0] # solo el principal, el resto nose.
data['SQ'] = record.sequence
data['LENGTH'] = record.sequence_length # todo acá hay un problema? no entran las de mas de 999 residuos
data['ORG'] = record.organism # el bicho
data['OC'] = record.organism_classification[0] # el dominio del bicho
data['OX'] = record.taxonomy_id[0] # Id taxonomica del bicho
del olista[:]
if not record.host_organism:
data['HO'] = 'No host'
else:
for o in record.host_organism:
olista.append((o.split(";"))[0])
data['HO'] = ', '.join(olista) # y esto el host del virus ¿o parásito?
data['inumber'] = str(i) # solo para debuguear =) ver hasta donde llegó
# Generar y guardar el insert del #AA en la secuencia
del listaq[:]
contenido_aa = count_amino_acids_ext(record.sequence) # Guardo el dict con partes AA, #AA de la secuencia
for q in contenido_aa.itervalues():
listaq.append(str(q)) # y los pongo en una lista
sql_insert_values_q = ', '.join(listaq)
if i >= desde:
print("INSERT INTO " + tabla_cuentas + " VALUES ('"
+ record.accessions[0] + "', '"
+ record.organism_classification[0] + "', "
+ str(record.sequence_length)
+ ", " + sql_insert_values_q + ");")
# print("commit;")
# con.commit()
# Acá empiezo con los features, hay alguno interesante?
features = record.features # todo insertar los FTs en otra tabla junto con OC; OX, OR...?
del out[:]
del interes[:]
for a in range(0, len(features)): # guardar los campos "candidato" del FT en una lista llamada out
out.append(features[a][0])
interes = list(set(out).intersection(ptmrecords)) # armar un set con los interesantes y hacerlo lista interes
if interes: # si interes no está vacía, entonces hay algo para cargar
# todo evitar duplicados de secuencia, relacion via AC?
# ahora cargo cada PTM en data (subinstancias de entrada)
for feature in features: # iterar los features de la entrada
if feature[0] in interes: # si el titulo del FT interesa, proseguir ¡mejora un poco! =D
for tipo in interes: # iterear los tipos interesantes encontrados en el feature
if feature[0] in tipo: # si el feature evaluado interesante, cargar los datos en data[]
A = feature[1] # de el residuo tal (va a ser el mismo que el siguiente si está solo)
B = feature[2] # hacia el otro. OJO hay algunos desconocidos indicados con un "?"
C = feature[3] # este tiene la posta?
D = feature[4] # este aparece a veces? todo wtf?
# reiniciar FT, FROM y TO
data['FT'] = 'NOFT'
data['FROM_RES'] = '?'
data['TO_RES'] = '?'
data['FROM_AA'] = '?'
data['TO_AA'] = '?'
# Asignar FT
data['FT'] = feature[0]
data['FROM_RES'] = A
data['TO_RES'] = B
# reiniciar PTM y STATUS
ptm = ''
data['PTM'] = 'NOFT'
data['STATUS'] = "Experimental"
# Asignar STATUS y PTM
if C: # si C (el que tiene el nombre de la PTM y el STATUS) contiene algo
for neq in neqs: # iterar los STATUS posibles
if neq in C: # si C contiene el STATUS pirulo
data['STATUS'] = neq # asignar el valor a STATUS
C = C.replace('(' + neq + ")", '') # hay que sacar esta porquería
C = C.replace(neq, '')
# hay que sacar esta porquería si no aparece con paréntesis
break # esto corta con el loop más "cercano" en indentación
ptm = ((C.split(" /"))[0].split(';')[0]). \
rstrip(" ").rstrip(".").rstrip(" ")
# Obs: a veces las mods tienen identificadores estables que empiezan con "/"
# así que hay que sacarlo. y otas cosas después de un ";" CHAU.
# También hay CROSSLNKs con otras anotaciones, que los hace aparecer como únicas
# al contarlas, pero en realidad son casi iguales todo quizás ocurre con otras?
# Ver http://web.expasy.org/docs/userman.html#FT_line
# También le saco espacios y puntos al final.
# Odio esto del formato... todo no hay algo que lo haga mejor?
if tipo == 'DISULFID': # si el tipo es disulfuro, no hay mucho que decir.
ptm = "S-cysteinyl 3-(oxidosulfanyl)alanine (Cys-Cys)"
data['FROM_AA'] = 'C'
data['TO_AA'] = 'C'
else: # pero si no lo es, guardamos cosas normalmente.
# Asignar target residue
if A != '?':
data['FROM_AA'] = data['SQ'][int(data['FROM_RES'])-1]
else:
data['FROM_AA'] = '?'
if B != '?':
data['TO_AA'] = data['SQ'][int(data['TO_RES'])-1]
else:
data['TO_AA'] = '?'
if ptm.find("with") != -1: # si la ptm contiene la palabra "with" (caso crosslink)
ptm = ptm.split(" (with")[0].split(" (int")[0] # pero si la contiene, recortar
data['PTM'] = ptm
del listap[:]
for p in data.itervalues(): # itero los valores de los datos que fui cargando al dict.
listap.append(str(p).replace("'", "''")) # y los pongo en una lista
sql_insert_values_p = '\'' + \
'\', \''.join(listap) + \
'\''
# Que después uno como van en el INSERT
# El insert, en el que reemplazo ' por '', para escaparlas en sql
if i >= desde: # para hacerlo en partes
print(("INSERT INTO " + tabla_ptms + " VALUES (%r);"
% sql_insert_values_p).replace("-...", "").replace("\"", '').replace('.', ''))
# print("commit;")
# con.commit()
# unir los elementos de values con comas
else:
# Si, en cambio, la entrada no tiene FT insteresantes, solo cargo los datos generales y defaults
del listar[:]
for r in data.itervalues():
listar.append(str(r).replace("'", "''"))
sql_insert_values_r = '\'' + '\', \''.join(listar) + '\''
if i >= desde: # para hacerlo en partes
print(("INSERT INTO " + tabla_ptms + " VALUES (%r);"
% sql_insert_values_r).replace("\"", '').replace('.', ''))
# print("commit;")
# con.commit()
if i >= hasta: # segun uniprot el número de entradas de secuencias es 54247468
# print("\n")
# print(i)
break
# The sequence counts 60 amino acids per line, in groups of 10 amino acids, beginning in position 6 of the line.
# http://www.uniprot.org/manual/
# General Annotation: cofactores, mass spectrometry data, PTM (complementario al MOD_RES y otras PTMs..?)
# Sequence Annotation (Features): Sites (cleavage sites?), non-standard residue,
# MOD_RES (excluye lipidos, crosslinks y glycanos), lipidación, puente disulfuro, cross-link, glycosylation
# todo consider PE "protein existence", KW contiene "glycoprotein" qué otros?
# todo también dentro de FT
# output.close()
# print('\n')
# print(time.time() - start_time)
# """
| naikymen/QB9 | uniprot_parser/uniprot_parser_v01.py | Python | gpl-3.0 | 14,072 | 0.004417 |
"""JSON implementations of relationship searches."""
# pylint: disable=no-init
# Numerous classes don't require __init__.
# pylint: disable=too-many-public-methods,too-few-public-methods
# Number of methods are defined in specification
# pylint: disable=protected-access
# Access to protected methods allowed in package json package scope
# pylint: disable=too-many-ancestors
# Inheritance defined in specification
from . import objects
from . import queries
from .. import utilities
from ..osid import searches as osid_searches
from ..primitives import Id
from ..utilities import get_registry
from dlkit.abstract_osid.osid import errors
from dlkit.abstract_osid.relationship import searches as abc_relationship_searches
class RelationshipSearch(abc_relationship_searches.RelationshipSearch, osid_searches.OsidSearch):
"""The search interface for governing relationship searches."""
def __init__(self, runtime):
self._namespace = 'relationship.Relationship'
self._runtime = runtime
record_type_data_sets = get_registry('RESOURCE_RECORD_TYPES', runtime)
self._record_type_data_sets = record_type_data_sets
self._all_supported_record_type_data_sets = record_type_data_sets
self._all_supported_record_type_ids = []
self._id_list = None
for data_set in record_type_data_sets:
self._all_supported_record_type_ids.append(str(Id(**record_type_data_sets[data_set])))
osid_searches.OsidSearch.__init__(self, runtime)
@utilities.arguments_not_none
def search_among_relationships(self, relationship_ids):
"""Execute this search among the given list of relationships.
arg: relationship_ids (osid.id.IdList): list of relationships
raise: NullArgument - ``relationship_ids`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
self._id_list = relationship_ids
@utilities.arguments_not_none
def order_relationship_results(self, relationship_search_order):
"""Specify an ordering to the search results.
arg: relationship_search_order
(osid.relationship.RelationshipSearchOrder):
relationship search order
raise: NullArgument - ``relationship_search_order`` is ``null``
raise: Unsupported - ``relationship_search_order`` is not of
this service
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_relationship_search_record(self, relationship_search_record_type):
"""Gets the relationship search record corresponding to the given relationship search record ``Type``.
This method is used to retrieve an object implementing the
requested record.
arg: relationship_search_record_type (osid.type.Type): a
relationship search record type
return: (osid.relationship.records.RelationshipSearchRecord) -
the relationship search record
raise: NullArgument - ``relationship_search_record_type`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported -
``has_record_type(relationship_search_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class RelationshipSearchResults(abc_relationship_searches.RelationshipSearchResults, osid_searches.OsidSearchResults):
"""This interface provides a means to capture results of a search."""
def __init__(self, results, query_terms, runtime):
# if you don't iterate, then .count() on the cursor is an inaccurate representation of limit / skip
# self._results = [r for r in results]
self._namespace = 'relationship.Relationship'
self._results = results
self._query_terms = query_terms
self._runtime = runtime
self.retrieved = False
def get_relationships(self):
"""Gets the relationship list resulting from a search.
return: (osid.relationship.RelationshipList) - the relationship
list
raise: IllegalState - list already retrieved
*compliance: mandatory -- This method must be implemented.*
"""
if self.retrieved:
raise errors.IllegalState('List has already been retrieved.')
self.retrieved = True
return objects.RelationshipList(self._results, runtime=self._runtime)
relationships = property(fget=get_relationships)
def get_relationship_query_inspector(self):
"""Gets the inspector for the query to examine the terms used in the search.
return: (osid.relationship.RelationshipQueryInspector) - the
relationship query inspector
*compliance: mandatory -- This method must be implemented.*
"""
return queries.RelationshipQueryInspector(self._query_terms, runtime=self._runtime)
relationship_query_inspector = property(fget=get_relationship_query_inspector)
@utilities.arguments_not_none
def get_relationship_search_results_record(self, relationship_search_record_type):
"""Gets the relationship search results record corresponding to the given relationship search record ``Type``.
This method must be used to retrieve an object implementing the
requested record interface along with all of its ancestor
interfaces.
arg: relationship_search_record_type (osid.type.Type): a
relationship search record type
return:
(osid.relationship.records.RelationshipSearchResultsReco
rd) - the relationship search results record
raise: NullArgument - ``relationship_search_record_type`` is
``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported -
``has_record_type(relationship_search_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class FamilySearch(abc_relationship_searches.FamilySearch, osid_searches.OsidSearch):
"""The search interface for governing family searches."""
def __init__(self, runtime):
self._namespace = 'relationship.Family'
self._runtime = runtime
record_type_data_sets = get_registry('RESOURCE_RECORD_TYPES', runtime)
self._record_type_data_sets = record_type_data_sets
self._all_supported_record_type_data_sets = record_type_data_sets
self._all_supported_record_type_ids = []
self._id_list = None
for data_set in record_type_data_sets:
self._all_supported_record_type_ids.append(str(Id(**record_type_data_sets[data_set])))
osid_searches.OsidSearch.__init__(self, runtime)
@utilities.arguments_not_none
def search_among_families(self, family_ids):
"""Execute this search among the given list of families.
arg: family_ids (osid.id.IdList): list of families
raise: NullArgument - ``family_ids`` is ``null``
*compliance: mandatory -- This method must be implemented.*
"""
self._id_list = family_ids
@utilities.arguments_not_none
def order_family_results(self, family_search_order):
"""Specify an ordering to the search results.
arg: family_search_order
(osid.relationship.FamilySearchOrder): family search
order
raise: NullArgument - ``family_search_order`` is ``null``
raise: Unsupported - ``family_search_order`` is not of this
service
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
@utilities.arguments_not_none
def get_family_search_record(self, family_search_record_type):
"""Gets the family search record corresponding to the given family search record ``Type``.
This method is used to retrieve an object implementing the
requested record.
arg: family_search_record_type (osid.type.Type): a family
search record type
return: (osid.relationship.records.FamilySearchRecord) - the
family search record
raise: NullArgument - ``family_search_record_type`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported -
``has_record_type(family_search_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
class FamilySearchResults(abc_relationship_searches.FamilySearchResults, osid_searches.OsidSearchResults):
"""This interface provides a means to capture results of a search and is used as a vehicle to perform a search within a previous result set."""
def __init__(self, results, query_terms, runtime):
# if you don't iterate, then .count() on the cursor is an inaccurate representation of limit / skip
# self._results = [r for r in results]
self._namespace = 'relationship.Family'
self._results = results
self._query_terms = query_terms
self._runtime = runtime
self.retrieved = False
def get_families(self):
"""Gets the family list resulting from a search.
return: (osid.relationship.FamilyList) - the family list
raise: IllegalState - list already retrieved
*compliance: mandatory -- This method must be implemented.*
"""
if self.retrieved:
raise errors.IllegalState('List has already been retrieved.')
self.retrieved = True
return objects.FamilyList(self._results, runtime=self._runtime)
families = property(fget=get_families)
def get_family_query_inspector(self):
"""Gets the inspector for the query to examine the terms used in the search.
return: (osid.relationship.FamilyQueryInspector) - the family
query inspector
*compliance: mandatory -- This method must be implemented.*
"""
return queries.FamilyQueryInspector(self._query_terms, runtime=self._runtime)
family_query_inspector = property(fget=get_family_query_inspector)
@utilities.arguments_not_none
def get_family_search_results_record(self, family_search_record_type):
"""Gets the family search results record corresponding to the given family search record Type.
This method is used to retrieve an object implementing the
requested record.
arg: family_search_record_type (osid.type.Type): a family
search record type
return: (osid.relationship.records.FamilySearchResultsRecord) -
the family search results record
raise: NullArgument - ``FamilySearchRecordType`` is ``null``
raise: OperationFailed - unable to complete request
raise: PermissionDenied - authorization failure occurred
raise: Unsupported -
``has_record_type(family_search_record_type)`` is
``false``
*compliance: mandatory -- This method must be implemented.*
"""
raise errors.Unimplemented()
| mitsei/dlkit | dlkit/json_/relationship/searches.py | Python | mit | 11,651 | 0.001717 |
"""Generic entry point script."""
import sys
from tensorflow.python.platform import flags
def run():
f = flags.FLAGS
f._parse_flags()
main = sys.modules['__main__'].main
sys.exit(main(sys.argv))
| liyu1990/tensorflow | tensorflow/python/platform/default/_app.py | Python | apache-2.0 | 214 | 0 |
from allauth.socialaccount.forms import SignupForm as BaseSignupForm
from django import forms
from django.core import validators
from django.utils.translation import gettext_lazy as _
USERNAME_REQUIRED = _("Username is required.")
USERNAME_SHORT = _(
"Username is too short (%(show_value)s characters). "
"It must be at least %(limit_value)s characters."
)
USERNAME_LONG = _(
"Username is too long (%(show_value)s characters). "
"It must be %(limit_value)s characters or less."
)
TERMS_REQUIRED = _("You must agree to the terms of use.")
class SignupForm(BaseSignupForm):
"""
The user registration form for allauth.
This overrides the default error messages for the username form field
with our own strings.
The heavy lifting happens in the view.
"""
terms = forms.BooleanField(
label=_("I agree"), required=True, error_messages={"required": TERMS_REQUIRED}
)
is_github_url_public = forms.BooleanField(
label=_("I would like to make my GitHub profile URL public"), required=False
)
is_newsletter_subscribed = forms.BooleanField(required=False)
duplicate_email_error_label = "_duplicate_email"
def __init__(self, *args, **kwargs):
super(SignupForm, self).__init__(*args, **kwargs)
self.fields["username"].error_messages = {
"required": USERNAME_REQUIRED,
"min_length": USERNAME_SHORT,
"max_length": USERNAME_LONG,
}
def clean_email(self):
value = self.cleaned_data["email"]
validators.validate_email(value)
return super(SignupForm, self).clean_email()
def raise_duplicate_email_error(self):
raise forms.ValidationError(self.duplicate_email_error_label)
| Elchi3/kuma | kuma/users/signup.py | Python | mpl-2.0 | 1,747 | 0.001145 |
# Time: O(logn)
# Space: O(1)
#
# Follow up for "Search in Rotated Sorted Array":
# What if duplicates are allowed?
#
# Would this affect the run-time complexity? How and why?
#
# Write a function to determine if a given target is in the array.
#
class Solution(object):
def search(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: int
"""
left, right = 0, len(nums) - 1
while left <= right:
mid = left + (right - left) / 2
if nums[mid] == target:
return True
elif nums[mid] == nums[left]:
left += 1
elif (nums[mid] > nums[left] and nums[left] <= target < nums[mid]) or \
(nums[mid] < nums[left] and not (nums[mid] < target <= nums[right])):
right = mid - 1
else:
left = mid + 1
return False
if __name__ == "__main__":
print Solution().search([3, 5, 1], 3)
print Solution().search([2, 2, 3, 3, 4, 1], 1)
print Solution().search([4, 4, 5, 6, 7, 0, 1, 2], 5)
| yiwen-luo/LeetCode | Python/search-in-rotated-sorted-array-ii.py | Python | mit | 1,138 | 0.006151 |
from time import sleep
import boto.ec2
import sys
import subprocess
import argparse
import os
# specify AWS keys
auth = {"aws_access_key_id": "<aws key>", "aws_secret_access_key": "<value>"}
# create the connection object
conn = boto.ec2.connect_to_region("us-east-1", **auth)
# status of vms
instancefree={}
# status of projects evaluated. set it False for the projects which need to be evaluated
projects={}
projects['Chart']=False
projects['Closure']=False
projects['Lang']=False
projects['Math']=False
projects['Time']=False
# parameters required for launching experiment
faultlocflag="false"
startseed=1
endseed=20
startdefectid=1
enddefectid=2
testtype="allHuman"
genprogpath="/home/ubuntu/genprog4java"
defects4jpath="/home/ubuntu/defects4j"
testpercent=100
# parameters required for vm
alldone=False
vmcount=1
ami='ami-<id>' # also need to specify this in create_instances method
instancetype='c4.xlarge' # also need to specify this in create_instances method
# number of defects to run for each project plus one. Here we are testing 15 defects (3 defects from each project).
# We can randomize the defects by modifying startdefect variable above and accordingly modify the values below
defects={}
defects['Chart']=4
defects['Closure']=4
defects['Lang']=4
defects['Math']=4
defects['Time']=4
def main():
if len(sys.argv) < 2:
create_script()
print "Usage: python fullrepairtesting.py {start|stop|terminate} path-to-key-file \n"
sys.exit(0)
else:
action = sys.argv[1]
keypath = sys.argv[2]
if action == "start":
terminate_instances()
sleep(30)
delete_volumes()
create_instances(keypath)
elif action == "stop":
stopInstances()
elif action == "terminate":
terminate_instances()
sleep(30)
delete_volumes()
else:
print "Usage: python faultloctesting.py {start|stop|terminate} path-to-key-file \n"
# method to create instances
def create_instances(vm_key):
# create instances. specify ami, key, type, min and max count
instances_resv = conn.run_instances('ami-8385f094',key_name='defect4jvm',instance_type='c4.xlarge',security_group_ids=["sg-6a3e5112"], min_count = 5, max_count = 5)
print instances_resv
print "number of instances created = ", len(instances_resv.instances)
for i in instances_resv.instances:
print "creating instance ", i.id
while i.state == u'pending': # wait until instance gets created
print("Instance state: %s" % i.state)
sleep(10)
i.update()
global alldone
global vmcount
if projects['Closure'] and alldone: # if all experiments are launched and there are unused vms and unattached volumes then delete them
conn.terminate_instances(instance_ids=[i.id])
delete_volumes()
else: # setup the instance
setupInstance(i, vm_key)
vmcount = vmcount + 1
print("Instance state: %s" % i.state)
print("Public dns: %s" % i.public_dns_name)
return i.public_dns_name
# method to setup an instance for running the experiment
def setupInstance(i, vm_key):
print "Starting instance", i.id
if i.state == "stopped":
i.start()
while i.state == "pending":
sleep(1)
i.update()
status=conn.get_all_instance_status(instance_ids=[i.id])
print "system status is: ",status[0].system_status.status, status[0].instance_status.status
# wait until instance is initialized and reachable
while status[0].system_status.status != 'ok' and status[0].instance_status.status != 'ok':
status=conn.get_all_instance_status(instance_ids=[i.id])
print "system status is: ",status[0].system_status.status, status[0].instance_status.status
sleep(10)
print "instance started = ", i.id, " ip address is ", i.ip_address
instancefree[i.ip_address]=True
# launch experiment on instance
if i.ip_address != None and i.id!="i-10fa21c8":
print "copying launch-repair script to ", i.ip_address
do_scp(i.ip_address,"~/", vm_key)
print "set permissions of script on ", i.ip_address
set_permissions(i.ip_address, vm_key)
if not projects['Chart']: # launch chart defects
startdefectid = 1
enddefectid=defects['Chart']
if instancefree[i.ip_address] is True:
vmname="vm%s-Chart-%s-%s" %(vmcount, startdefectid, enddefectid)
i.add_tag("Name", vmname)
run(i.ip_address, vm_key, "Chart", startdefectid, enddefectid, "chart")
instancefree[i.ip_address]=False
if not projects['Lang']: # launch lang defects
startdefectid = 1
enddefectid=defects['Lang']
if instancefree[i.ip_address] is True:
vmname="vm%s-Lang-%s-%s" %(vmcount, startdefectid, enddefectid)
i.add_tag("Name", vmname)
run(i.ip_address, vm_key, "Lang", startdefectid, enddefectid, "lang")
instancefree[i.ip_address]=False
if not projects['Time']: # launch time defects
startdefectid = 1
enddefectid=defects['Time']
if instancefree[i.ip_address] is True:
vmname="vm%s-Time-%s-%s" %(vmcount, startdefectid, enddefectid)
i.add_tag("Name", vmname)
run(i.ip_address, vm_key, "Time", startdefectid, enddefectid, "time")
instancefree[i.ip_address]=False
if not projects['Math']: # launch math defects
startdefectid = 1
enddefectid=defects['Math']
if instancefree[i.ip_address] is True:
vmname="vm%s-Math-%s-%s" %(vmcount, startdefectid, enddefectid)
i.add_tag("Name", vmname)
run(i.ip_address, vm_key, "Math", startdefectid, enddefectid, "math")
instancefree[i.ip_address]=False
if not projects['Closure']: # launch closure defects
startdefectid = 1
enddefectid=defects['Closure']
if instancefree[i.ip_address] is True:
vmname="vm%s-Closure-%s-%s" %(vmcount, startdefectid, enddefectid)
i.add_tag("Name", vmname)
run(i.ip_address, vm_key, "Closure", startdefectid, enddefectid, "closure")
instancefree[i.ip_address]=False
# method to shutdown instances
def stopInstances():
print "stopping instances"
reservations = conn.get_all_reservations()
for reservation in reservations:
for instance in reservation.instances:
if instance.image_id == ami and instance.instance_type == instancetype and instance.state == "running":
print "stopping instance ", instance.id
instance.stop()
# method to terminate instances
def terminate_instances():
print "terminating not required instances"
reservations = conn.get_all_reservations()
for reservation in reservations:
for instance in reservation.instances:
if instance.image_id == ami and instance.instance_type == instancetype and instance.state == "stopped":
print "terminating instance ", instance.id
conn.terminate_instances(instance_ids=[instance.id])
# method to delete unattached volumes
def delete_volumes():
for vol in conn.get_all_volumes():
state = vol.attachment_state()
if state == None:
print vol.id, state
print "deleting volume = ", vol.id
conn.delete_volume(vol.id)
# method to run the script to launch an experiment on vm
def run(vmip, vm_key, project, startdefectid, enddefectid, folder):
ssh_cmd = "ssh -o StrictHostKeyChecking=false -n -i %s ubuntu@%s \"/bin/bash launch-repair.sh %s %s %s %s %s %s %s %s %s %s %s\" &"
ssh_str = ssh_cmd % (vm_key, vmip, project, startdefectid, enddefectid, folder, startseed, endseed, faultlocflag, genprogpath, defects4jpath, testtype, testpercent)
print "executing script remotely using ", ssh_str
FNULL = open(os.devnull, 'w')
return (subprocess.call(ssh_str,shell=True, stdout=FNULL, stderr=subprocess.STDOUT) == 0)
# method to copy the script to the instance
def do_scp(to_scp, where_scp, vm_key):
script_path = "./launch-repair.sh"
scp_script_cmd = "scp -o StrictHostKeyChecking=false -i %s %s %s ubuntu@%s:%s"
scp_str = scp_script_cmd % (vm_key, vm_key, script_path, to_scp, where_scp)
print "copying script and key file to vm using:", scp_str
return (subprocess.call(scp_str,shell=True) == 0)
# method to set appropriate permissions to run the script
def set_permissions(vmip, vm_key):
ssh_cmd = "ssh -o StrictHostKeyChecking=false -n -i %s ubuntu@%s \"chmod +x /home/ubuntu/launch-repair.sh\" &"
ssh_str = ssh_cmd % (vm_key,vmip)
print "setting permission on script remotely using ",ssh_str
return (subprocess.call(ssh_str,shell=True) == 0)
if __name__ == '__main__':
main()
| ProgramRepair/experiments-infrastructure | awsscripts-defects4j/fullrepairtesting.py | Python | gpl-2.0 | 8,390 | 0.019547 |
#!/usr/bin/env python2
import time
import random
class Battle:
def __init__(self, user1, user2):
self.user1 = user1
self.user2 = user2
self.turn = user1
self.notTurn = user2
self.accepted = False
self.finished = False
self.auto = False
self.turnCount = 1
def fight(self, spell):
attacker = self.turn.getActivePokemon()
defender = self.notTurn.getActivePokemon()
message = attacker.fight(spell, defender)
if defender.life <= 0:
message += defender.name + " n'a plus de points de vie. "
if self.notTurn.hasAlivePokemon():
message += self.notTurn.username + " doit invoquer un nouveau pokemon. "
else:
message += self.notTurn.username + " a perdu. " + self.turn.username + " a gagne. "
message += attacker.name + " gagne " + str(attacker.calcGainedExp(defender)) + " points d'experience. "
old = attacker.level
attacker.gainExp(defender)
if attacker.level != old:
message += attacker.name + " passe niveau " + str(attacker.level) + "!"
self.finished = True
self.turn, self.notTurn = self.notTurn, self.turn
self.turnCount += 1
return message
def itemUsed(self):
self.turn, self.notTurn = self.notTurn, self.turn
def nextStep(self):
if self.finished:
self.user1.battle = None
self.user2.battle = None
return False
elif self.auto and self.turnCount % 2 == 0:
time.sleep(2)
return self.fight(self.turn.getActivePokemon().spells[random.randint(0, len(self.turn.getActivePokemon().spells) - 1)].name)
| AlexMog/IRCPokemonBot | commands/classes/Battle.py | Python | mit | 1,766 | 0.003964 |
# Python3
from solution1 import urlSimilarity as f
qa = [
('https://codesignal.com/home/test?param1=42¶m3=testing&login=admin',
'https://codesignal.com/home/secret/test?param3=fish¶m1=42&password=admin',
19),
('https://codesignal.com/home/test?param1=42¶m3=testing&login=admin',
'http://codesignal.org/about?42=param1&tesing=param3&admin=login',
0),
('https://www.google.com/search?q=codesignal',
'http://www.google.com/search?q=codesignal',
13),
('ftp://www.example.com/query?varName=value',
'http://example.com/query?varName=value',
3),
('ftp://www',
'http://anotherexample.com/www?ftp=http',
0),
('https://codesignal.com/home/test?param1=42¶m3=testing&login=admin¶m4=abc¶m5=codesignal',
'https://codesignal.com/home/secret/test?param3=fish¶m1=42&codesignal=admin¶m5=test',
20)
]
for *q, a in qa:
for i, e in enumerate(q):
print('input{0}: {1}'.format(i + 1, e))
ans = f(*q)
if ans != a:
print(' [failed]')
print(' output:', ans)
print(' expected:', a)
else:
print(' [ok]')
print(' output:', ans)
print()
| RevansChen/online-judge | Codefights/arcade/python-arcade/level-13/90.Url-Similarity/Python/test.py | Python | mit | 1,205 | 0.00249 |
# This code is so you can run the samples without installing the package
import sys
import os
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
#
testinfo = "s, t 3, s, t 6.1, s, q"
tags = "MoveBy"
import cocos
from cocos.director import director
from cocos.actions import MoveBy
from cocos.sprite import Sprite
import pyglet
class TestLayer(cocos.layer.Layer):
def __init__(self):
super( TestLayer, self ).__init__()
x,y = director.get_window_size()
self.sprite = Sprite( 'grossini.png', (x/2, y/2) )
self.add( self.sprite, name='sprite' )
self.sprite.do( MoveBy( (x/2,y/2), 6 ) )
def main():
director.init()
test_layer = TestLayer ()
main_scene = cocos.scene.Scene()
main_scene.add(test_layer, name='test_layer')
director.run (main_scene)
if __name__ == '__main__':
main()
| eevee/cocos2d-mirror | test/test_moveby.py | Python | bsd-3-clause | 867 | 0.026528 |
# -*- coding: utf-8 -*-
# Copyright (c) 2018, Frappe Technologies Pvt. Ltd. and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
from frappe.model.document import Document
class CashFlowMapper(Document):
pass
| ovresko/erpnext | erpnext/accounts/doctype/cash_flow_mapper/cash_flow_mapper.py | Python | gpl-3.0 | 267 | 0.003745 |
import os, importlib, inspect
import api.plugin, util.logger_factory
class Loader():
def __init__(self, scheduler, bot, sql_conn):
filenames = os.listdir(os.path.abspath(__file__+'/../../ext'))
self.ext_names = [x[:-3] for x in filenames if x[-3:] == '.py' and x != '__init__.py']
self.scheduler = scheduler
self.bot = bot
self.sql_conn = sql_conn
self.sql = sql_conn.cursor()
self.logger = util.logger_factory.instance().getLogger('api.loader')
self.sql.execute('CREATE TABLE IF NOT EXISTS `__plugins` (name)')
self.sql_conn.commit()
def load_all(self, load_extensions = None):
self.logger.debug('Loading all extensions')
self.plugins = []
for module in self.ext_names:
module = importlib.import_module('ext.'+module)
class_info = self._get_class(module)
if class_info is None:
continue
if load_extensions != '~~All~~' and class_info[0] not in load_extensions:
self.logger.debug('Skipping extension %s, not included in load_extensions config value', class_info[0])
continue
logger = util.logger_factory.instance().getLogger('ext.'+class_info[0])
class_obj = class_info[1](self.scheduler, self.bot.network_list, self.sql, logger)
self.plugins.append({'name':class_info[0], 'object':class_obj, 'module': module})
self._install_plugins()
self._start_plugins()
self.sql_conn.commit()
def _get_class(self, module):
for info in inspect.getmembers(module):
if issubclass(info[1], api.plugin.Plugin) and info[1] is not api.plugin.Plugin:
return info
def _install_plugins(self):
for plugin in self.plugins:
self.sql.execute('SELECT * FROM `__plugins` WHERE name = ?', (plugin['name'],))
if self.sql.fetchone() is None:
self.logger.info('Installing extension %s', plugin['name'])
plugin['object']._install_()
self.sql.execute('INSERT INTO `__plugins`(name) values (?)', (plugin['name'],))
def _start_plugins(self):
for plugin in self.plugins:
plugin['object']._start_() | deltaburnt/LameBot | api/loader.py | Python | gpl-2.0 | 1,994 | 0.037111 |
# -*- coding: utf-8 -*-
import os
from setuptools import setup, find_packages
root = os.path.abspath(os.path.dirname(__file__))
try:
with open(os.path.join(root, 'README.rst')) as f:
README = f.read()
with open(os.path.join(root, 'CHANGES.rst')) as f:
CHANGES = f.read()
except IOError:
README, CHANGES = '', ''
install_requires = [
'setuptools'
]
tests_require = [
'pytest >= 2.6.4'
]
setup(name='papylon',
version='0.6.1',
description='Random testing for Python',
long_description=README + '\n\n' + CHANGES,
classifiers=[
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 3.3",
"Programming Language :: Python :: 3.4",
"Programming Language :: Python :: 3.5",
"Topic :: Software Development :: Libraries",
"Topic :: Software Development :: Testing",
"Topic :: Utilities"
],
keywords='papylon quickcheck random test',
author='Kazuhiro Matsushima',
author_email='the25thcromosome@gmail.com',
license='The MIT License (MIT)',
url='https://github.com/Gab-km/papylon',
packages=find_packages(),
include_package_data=True,
install_requires=install_requires,
tests_require=tests_require,
test_suite='py.test')
| Gab-km/papylon | setup.py | Python | mit | 1,466 | 0 |
# -*- coding: utf-8 -*-
# Copyright 2013 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration tests for notification command."""
from __future__ import absolute_import
from __future__ import print_function
from __future__ import division
from __future__ import unicode_literals
import re
import time
import uuid
import boto
import gslib.tests.testcase as testcase
from gslib.tests.util import ObjectToURI as suri
from gslib.tests.util import unittest
from gslib.utils.retry_util import Retry
def _LoadNotificationUrl():
return boto.config.get_value('GSUtil', 'test_notification_url')
NOTIFICATION_URL = _LoadNotificationUrl()
class TestNotification(testcase.GsUtilIntegrationTestCase):
"""Integration tests for notification command."""
@unittest.skipUnless(NOTIFICATION_URL,
'Test requires notification URL configuration.')
def test_watch_bucket(self):
"""Tests creating a notification channel on a bucket."""
bucket_uri = self.CreateBucket()
self.RunGsUtil(
['notification', 'watchbucket', NOTIFICATION_URL,
suri(bucket_uri)])
identifier = str(uuid.uuid4())
token = str(uuid.uuid4())
stderr = self.RunGsUtil([
'notification', 'watchbucket', '-i', identifier, '-t', token,
NOTIFICATION_URL,
suri(bucket_uri)
],
return_stderr=True)
self.assertIn('token: %s' % token, stderr)
self.assertIn('identifier: %s' % identifier, stderr)
@unittest.skipUnless(NOTIFICATION_URL,
'Test requires notification URL configuration.')
def test_stop_channel(self):
"""Tests stopping a notification channel on a bucket."""
bucket_uri = self.CreateBucket()
stderr = self.RunGsUtil(
['notification', 'watchbucket', NOTIFICATION_URL,
suri(bucket_uri)],
return_stderr=True)
channel_id = re.findall(r'channel identifier: (?P<id>.*)', stderr)
self.assertEqual(len(channel_id), 1)
resource_id = re.findall(r'resource identifier: (?P<id>.*)', stderr)
self.assertEqual(len(resource_id), 1)
channel_id = channel_id[0]
resource_id = resource_id[0]
self.RunGsUtil(['notification', 'stopchannel', channel_id, resource_id])
@unittest.skipUnless(NOTIFICATION_URL,
'Test requires notification URL configuration.')
def test_list_one_channel(self):
"""Tests listing notification channel on a bucket."""
# TODO(b/132277269): Re-enable these once the service-side bug is fixed.
return unittest.skip('Functionality has been disabled due to b/132277269')
bucket_uri = self.CreateBucket()
# Set up an OCN (object change notification) on the newly created bucket.
self.RunGsUtil(
['notification', 'watchbucket', NOTIFICATION_URL,
suri(bucket_uri)],
return_stderr=False)
# The OCN listing in the service is eventually consistent. In initial
# tests, it almost never was ready immediately after calling WatchBucket
# above, so we A) sleep for a few seconds before the first OCN listing
# attempt, and B) wrap the OCN listing attempt in retry logic in case
# it raises a BucketNotFoundException (note that RunGsUtil will raise this
# as an AssertionError due to the exit status not being 0).
@Retry(AssertionError, tries=3, timeout_secs=5)
def _ListObjectChangeNotifications():
stderr = self.RunGsUtil(['notification', 'list', '-o',
suri(bucket_uri)],
return_stderr=True)
return stderr
time.sleep(5)
stderr = _ListObjectChangeNotifications()
channel_id = re.findall(r'Channel identifier: (?P<id>.*)', stderr)
self.assertEqual(len(channel_id), 1)
resource_id = re.findall(r'Resource identifier: (?P<id>.*)', stderr)
self.assertEqual(len(resource_id), 1)
push_url = re.findall(r'Application URL: (?P<id>.*)', stderr)
self.assertEqual(len(push_url), 1)
subscriber_email = re.findall(r'Created by: (?P<id>.*)', stderr)
self.assertEqual(len(subscriber_email), 1)
creation_time = re.findall(r'Creation time: (?P<id>.*)', stderr)
self.assertEqual(len(creation_time), 1)
def test_invalid_subcommand(self):
stderr = self.RunGsUtil(['notification', 'foo', 'bar', 'baz'],
return_stderr=True,
expected_status=1)
self.assertIn('Invalid subcommand', stderr)
| catapult-project/catapult | third_party/gsutil/gslib/tests/test_notification.py | Python | bsd-3-clause | 4,981 | 0.002409 |
# -*- encoding: utf-8 -*-
"""API Tests for foreman discovery feature"""
from robottelo.common.decorators import stubbed
from robottelo.test import APITestCase
class Discovery(APITestCase):
"""Implements tests for foreman discovery feature"""
@stubbed()
def test_list_all_discovered_host(self):
"""@Test: List all discovered hosts
@Feature: Foreman Discovery
@Setup: Provisioning should be configured and a host should be
discovered
@Steps:
1. GET /api/v2/discovered_hosts
@Assert: List of all discovered hosts are retrieved
@Status: Manual
"""
@stubbed()
def test_show_discovered_host(self):
"""@Test: Show a specific discovered hosts
@Feature: Foreman Discovery
@Setup: Provisioning should be configured and a host should be
discovered
@Steps:
1. GET /api/v2/discovered_hosts/:id
@Assert: Selected host is retrieved
@Status: Manual
"""
@stubbed()
def test_create_discovered_host(self):
"""@Test: Create a discovered hosts
@Feature: Foreman Discovery
@Setup: Provisioning should be configured and a host should be
discovered
@Steps:
1. POST /api/v2/discovered_hosts
@Assert: Host should be created successfully
@Status: Manual
"""
@stubbed()
def test_provision_discovered_host(self):
"""@Test: Provision a discovered hosts
@Feature: Foreman Discovery
@Setup: Provisioning should be configured and a host should be
discovered
@Steps:
1. PUT /api/v2/discovered_hosts/:id
@Assert: Host should be provisioned successfully
@Status: Manual
"""
@stubbed()
def test_delete_discovered_host(self):
"""@Test: Delete a discovered hosts
@Feature: Foreman Discovery
@Setup: Provisioning should be configured and a host should be
discovered
@Steps:
1. DELETE /api/v2/discovered_hosts/:id
@Assert: Discovered Host should be deleted successfully
@Status: Manual
"""
@stubbed()
def test_auto_provision_host(self):
"""@Test: Auto provision a host by executing discovery rules
@Feature: Foreman Discovery
@Setup: Provisioning should be configured and a host should be
discovered
@Steps:
1. POST /api/v2/discovered_hosts/:id/auto_provision
@Assert: Selected Host should be auto-provisioned successfully
@Status: Manual
"""
@stubbed()
def test_auto_provision_all_host(self):
"""@Test: Auto provision all host by executing discovery rules
@Feature: Foreman Discovery
@Setup: Provisioning should be configured and more than one host should
be discovered
@Steps:
1. POST /api/v2/discovered_hosts/auto_provision_all
@Assert: All discovered hosts should be auto-provisioned successfully
@Status: Manual
"""
@stubbed()
def test_refresh_facts(self):
"""@Test: Refreshing the facts of discovered host
@Feature: Foreman Discovery
@Setup:
1. Provisioning should be configured and more than one host should
be discovered
2. Add a NIC on discovered host
@Steps:
1. PUT /api/v2/discovered_hosts/:id/refresh_facts
@Assert: Added Fact should be displayed on refreshing the facts
@Status: Manual
"""
@stubbed()
def test_reboot_host(self):
"""@Test: Rebooting a discovered host
@Feature: Foreman Discovery
@Setup: Provisioning should be configured and more than one host should
be discovered
@Steps:
1. PUT /api/v2/discovered_hosts/:id/reboot
@Assert: Selected host should be rebooted successfully
@Status: Manual
"""
@stubbed()
def test_create_discovery_rule_1(self):
"""@Test: Create a new discovery rule
Set query as (e.g IP=IP_of_discovered_host)
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: Host should reboot and provision
@Status: Manual
"""
@stubbed()
def test_create_discovery_rule_2(self):
"""@Test: Create a new discovery rule with (host_limit = 0)
that applies to multi hosts.
Set query as cpu_count = 1 OR mem > 500
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: All Hosts of same subnet should reboot and provision
@Status: Manual
"""
@stubbed()
def test_create_discovery_rule_3(self):
"""@Test: Create multiple discovery rules with different priority
@Feature: Foreman Discovery
@Setup: Multiple hosts should already be discovered
@Assert: Host with lower count have higher priority
and that rule should be executed first
@Status: Manual
"""
@stubbed()
def test_create_discovery_rule_4(self):
"""@Test: Create a discovery rule (CPU_COUNT = 2) with host limit 1 and
provision more than one host with same rule
@Feature: Foreman Discovery
@Setup: Host with two CPUs should already be discovered
@Assert: Rule should only be applied to one discovered host and for
other rule should already be skipped.
@Status: Manual
"""
@stubbed()
def test_rule_with_invalid_host_limit(self):
"""@Test: Create a discovery rule with invalid(-ve/text value) host
limit
@Feature: Foreman Discovery
@Setup: Host with two CPUs should already be discovered
@Assert: Validation error should be raised
@Status: Manual
"""
@stubbed()
def test_rule_with_invalid_priority(self):
"""@Test: Create a discovery rule with invalid(text value) priority
@Feature: Foreman Discovery
@Setup: Host with two CPUs should already be discovered
@Assert: Validation error should be raised
@Status: Manual
"""
@stubbed()
def test_create_rule_with_long_name(self):
"""@Test: Create a discovery rule with more than 255 char
@Feature: Foreman Discovery
@Setup: Host with two CPUs should already be discovered
@Assert: Validation error should be raised
@Status: Manual
"""
@stubbed()
def test_delete_discovery_rule(self):
"""@Test: Delete a discovery rule
@Feature: Foreman Discovery
@Assert: Rule should be deleted successfully
@Status: Manual
"""
@stubbed()
def test_update_discovery_rule_1(self):
"""@Test: Update an existing rule and execute it
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: User should be able to update the rule and it should be
executed on discovered host
@Status: Manual
"""
@stubbed()
def test_update_discovery_rule_2(self):
"""@Test: Update the discovered host name and provision it
@Feature: Foreman Discovery
@Setup: Host should already be discovered
@Assert: The host name should be updated and host should be provisioned
@Status: Manual
"""
| apagac/robottelo | tests/foreman/api/test_discovery.py | Python | gpl-3.0 | 7,489 | 0 |
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=missing-docstring
# pylint: disable=invalid-name
import unittest
from citest.base import (
ExecutionContext,
JsonSnapshotHelper)
import citest.json_contract as jc
import citest.json_predicate as jp
_called_verifiers = []
_TEST_FOUND_ERROR_COMMENT='Found error.'
class TestObsoleteObservationFailureVerifier(jc.ObservationFailureVerifier):
def __init__(self, title, expect):
super(TestObsoleteObservationFailureVerifier, self).__init__(title)
self.__expect = expect
def _error_comment_or_none(self, error):
if error.args[0] == self.__expect:
return _TEST_FOUND_ERROR_COMMENT
return None
def _makeObservationVerifyResult(
valid, observation=None,
good_results=None, bad_results=None, failed_constraints=None):
default_result = jp.PredicateResult(valid=valid)
good_results = good_results or ([default_result] if valid else [])
bad_results = bad_results or ([] if valid else [default_result])
failed_constraints = failed_constraints or []
observation = observation or jc.Observation()
good_attempt_results = [jp.ObjectResultMapAttempt(observation, result)
for result in good_results]
bad_attempt_results = [jp.ObjectResultMapAttempt(observation, result)
for result in bad_results]
return jc.ObservationVerifyResult(
valid=valid, observation=observation,
good_results=good_attempt_results,
bad_results=bad_attempt_results,
failed_constraints=failed_constraints)
class FakeObservationVerifier(jc.ObservationVerifier):
def __init__(self, title, dnf_verifier, result):
super(FakeObservationVerifier, self).__init__(
title=title, dnf_verifiers=dnf_verifier)
self.__result = result
def __call__(self, context, observation):
_called_verifiers.append(self)
return self.__result
class ObservationVerifierTest(unittest.TestCase):
def assertEqual(self, expect, have, msg=''):
if not msg:
msg = 'EXPECTED\n{0!r}\nGOT\n{1!r}'.format(expect, have)
JsonSnapshotHelper.AssertExpectedValue(expect, have, msg)
def test_result_builder_add_good_result(self):
context = ExecutionContext()
observation = jc.Observation()
observation.add_object('A')
pred = jp.PathPredicate(None, jp.STR_EQ('A'))
builder = jc.ObservationVerifyResultBuilder(observation)
map_pred = jp.MapPredicate(pred)
map_result = map_pred(context, observation.objects)
builder.add_map_result(map_result)
verify_results = builder.build(True)
self.assertTrue(verify_results)
self.assertEqual(observation, verify_results.observation)
self.assertEqual([], verify_results.bad_results)
self.assertEqual([], verify_results.failed_constraints)
self.assertEqual(map_result.good_object_result_mappings,
verify_results.good_results)
def test_result_builder_add_bad_result(self):
context = ExecutionContext()
observation = jc.Observation()
observation.add_object('A')
pred = jp.PathPredicate(None, jp.STR_EQ('B'))
builder = jc.ObservationVerifyResultBuilder(observation)
map_pred = jp.MapPredicate(pred)
map_result = map_pred(context, observation.objects)
builder.add_map_result(map_result)
verify_results = builder.build(False)
self.assertFalse(verify_results)
self.assertEqual(observation, verify_results.observation)
self.assertEqual([], verify_results.good_results)
self.assertEqual([pred], verify_results.failed_constraints)
self.assertEqual(map_result.bad_object_result_mappings,
verify_results.bad_results)
def test_result_builder_add_mixed_results(self):
context = ExecutionContext()
observation = jc.Observation()
observation.add_object('GOOD')
observation.add_object('BAD')
pred = jp.PathPredicate(None, jp.STR_EQ('GOOD'))
builder = jc.ObservationVerifyResultBuilder(observation)
map_pred = jp.MapPredicate(pred)
map_result = map_pred(context, observation.objects)
builder.add_map_result(map_result)
verify_results = builder.build(False)
self.assertFalse(verify_results)
self.assertEqual(observation, verify_results.observation)
self.assertEqual(map_result.good_object_result_mappings,
verify_results.good_results)
self.assertEqual([], verify_results.failed_constraints)
self.assertEqual(map_result.bad_object_result_mappings,
verify_results.bad_results)
def test_result_observation_verifier_conjunction_ok(self):
context = ExecutionContext()
builder = jc.ObservationVerifierBuilder(title='Test')
verifiers = []
pred_results = []
for i in range(3):
this_result = jp.PredicateResult(True, comment='Pred {0}'.format(i))
pred_results.append(this_result)
result = _makeObservationVerifyResult(
valid=True, good_results=[this_result])
fake_verifier = FakeObservationVerifier(
title=i, dnf_verifier=[], result=result)
verifiers.append(fake_verifier)
builder.AND(fake_verifier)
# verify build can work multiple times
self.assertEqual(builder.build(), builder.build())
verifier = builder.build()
self.assertEqual([verifiers], verifier.dnf_verifiers)
expect = _makeObservationVerifyResult(True, good_results=pred_results)
global _called_verifiers
_called_verifiers = []
got = verifier(context, jc.Observation())
self.assertEqual(expect, got)
self.assertEqual(verifiers, _called_verifiers)
def test_result_observation_verifier_conjunction_failure_aborts_early(self):
context = ExecutionContext()
builder = jc.ObservationVerifierBuilder(title='Test')
verifiers = []
results = []
pred_results = [jp.PredicateResult(False, comment='Result %d' % i)
for i in range(3)]
for i in range(3):
result = _makeObservationVerifyResult(
valid=False, bad_results=[pred_results[i]])
fake_verifier = FakeObservationVerifier(
title=i, dnf_verifier=[], result=result)
verifiers.append(fake_verifier)
results.append(result)
builder.AND(fake_verifier)
# verify build can work multiple times
self.assertEqual(builder.build(), builder.build())
verifier = builder.build()
self.assertEqual([verifiers], verifier.dnf_verifiers)
expect = _makeObservationVerifyResult(
False, bad_results=[pred_results[0]])
global _called_verifiers
_called_verifiers = []
got = verifier(context, jc.Observation())
self.assertEqual(expect, got)
self.assertEqual(verifiers[:1], _called_verifiers)
def test_result_observation_verifier_disjunction_success_aborts_early(self):
context = ExecutionContext()
builder = jc.ObservationVerifierBuilder(title='Test')
verifiers = []
results = []
pred_results = [jp.PredicateResult(False, comment='Result %d' % i)
for i in range(2)]
for i in range(2):
result = _makeObservationVerifyResult(
valid=True, good_results=[pred_results[i]])
fake_verifier = FakeObservationVerifier(
title=i, dnf_verifier=[], result=result)
verifiers.append(fake_verifier)
results.append(result)
builder.OR(fake_verifier)
verifier = builder.build()
self.assertEqual([verifiers[0:1], verifiers[1:2]], verifier.dnf_verifiers)
expect = _makeObservationVerifyResult(True, good_results=[pred_results[0]])
global _called_verifiers
_called_verifiers = []
got = verifier(context, jc.Observation())
self.assertEqual(expect, got)
self.assertEqual(verifiers[:1], _called_verifiers)
def test_result_observation_verifier_disjunction_failure(self):
context = ExecutionContext()
observation = jc.Observation()
builder = jc.ObservationVerifierBuilder(title='Test')
verifiers = []
results = []
pred_results = [jp.PredicateResult(False, comment='Result %d' % i)
for i in range(2)]
for i in range(2):
result = _makeObservationVerifyResult(observation=observation,
valid=False, bad_results=[pred_results[i]])
fake_verifier = FakeObservationVerifier(
title=i, dnf_verifier=[], result=result)
verifiers.append(fake_verifier)
results.append(result)
builder.OR(fake_verifier)
verifier = builder.build()
self.assertEqual([verifiers[0:1], verifiers[1:2]], verifier.dnf_verifiers)
expect = _makeObservationVerifyResult(
False, observation=observation, bad_results=pred_results)
global _called_verifiers
_called_verifiers = []
got = verifier(context, observation)
self.assertEqual(expect, got)
self.assertEqual(verifiers, _called_verifiers)
def test_obsolete_observation_failure_ok(self):
error_text = 'the error'
context = ExecutionContext()
observation = jc.Observation()
error = ValueError(error_text)
observation.add_error(error)
failure_verifier = TestObsoleteObservationFailureVerifier(
'Test', error_text)
failure_pred_result = jc.ObservationFailedError([error], valid=True)
expect_failure = jc.ObservationVerifyResult(
valid=True, observation=observation,
good_results=[jp.ObjectResultMapAttempt(observation,
failure_pred_result)],
bad_results=[], failed_constraints=[],
comment=_TEST_FOUND_ERROR_COMMENT)
got = failure_verifier(context, observation)
self.assertEqual(expect_failure, got)
builder = jc.ObservationVerifierBuilder(title='Test')
builder.EXPECT(failure_verifier)
verifier = builder.build()
expect = jc.ObservationVerifyResult(
valid=True, observation=observation,
good_results=expect_failure.good_results,
bad_results=[], failed_constraints=[])
got = verifier(context, observation)
self.assertEqual(expect, got)
def test_observation_failure_ok(self):
error_text = 'the error'
context = ExecutionContext()
observation = jc.Observation()
error = ValueError(error_text)
observation.add_error(error)
exception_pred = jp.ExceptionMatchesPredicate(
ValueError, regex=error_text)
builder = jc.ObservationVerifierBuilder(title='Test')
builder.EXPECT(jc.ObservationErrorPredicate(jp.LIST_MATCHES([exception_pred])))
failure_verifier = builder.build()
observation_predicate_result = jc.ObservationPredicateResult(
True, observation, jp.LIST_MATCHES([exception_pred]),
jp.LIST_MATCHES([exception_pred])(context, [error]))
expect_failure = jc.ObservationVerifyResult(
True, observation,
good_results=[observation_predicate_result],
bad_results=[], failed_constraints=[])
got = failure_verifier(context, observation)
self.assertEqual(expect_failure, got)
def test_obsolete_observation_failure_not_ok(self):
error_text = 'the error'
context = ExecutionContext()
observation = jc.Observation()
error = ValueError('not the error')
observation.add_error(error)
failure_verifier = TestObsoleteObservationFailureVerifier(
'Test', error_text)
comment = failure_verifier._error_not_found_comment(observation)
failure_pred_result = jp.PredicateResult(valid=False, comment=comment)
expect_failure = jc.ObservationVerifyResult(
valid=False, observation=observation,
bad_results=[jp.ObjectResultMapAttempt(observation,
failure_pred_result)],
good_results=[], failed_constraints=[],
comment=comment)
self.assertEqual(expect_failure, failure_verifier(context, observation))
builder = jc.ObservationVerifierBuilder(title='Test Verifier')
builder.EXPECT(failure_verifier)
verifier = builder.build()
expect = jc.ObservationVerifyResult(
valid=False, observation=observation,
bad_results=expect_failure.bad_results,
good_results=[], failed_constraints=[])
got = verifier(context, observation)
self.assertEqual(expect, got)
def test_obsolete_observation_failure_or_found(self):
context = ExecutionContext()
observation = jc.Observation()
observation.add_error(ValueError('not the error'))
failure_verifier = TestObsoleteObservationFailureVerifier(
'Verify', 'NotFound')
comment = failure_verifier._error_not_found_comment(observation)
failure_result = jp.PredicateResult(valid=False, comment=comment)
# We've already established this result is what we expect
bad_observation_result = failure_verifier(context, observation)
success_pred_result = jp.PredicateResult(valid=True)
good_observation_result = _makeObservationVerifyResult(
valid=True,
good_results=[success_pred_result],
observation=observation)
success_verifier = FakeObservationVerifier(
'Found', dnf_verifier=[], result=good_observation_result)
builder = jc.ObservationVerifierBuilder(title='Observation Verifier')
builder.EXPECT(failure_verifier).OR(success_verifier)
verifier = builder.build()
expect = jc.ObservationVerifyResult(
valid=True, observation=observation,
bad_results=bad_observation_result.bad_results,
good_results=good_observation_result.good_results,
failed_constraints=[])
got = verifier(context, observation)
self.assertEqual(expect, got)
if __name__ == '__main__':
unittest.main()
| google/citest | tests/json_contract/observation_verifier_test.py | Python | apache-2.0 | 14,132 | 0.004033 |
# -*- mode: python; coding: utf-8 -*-
# Copyright 2016-2017 Peter Williams <peter@newton.cx> and collaborators
# Licensed under the MIT License
"""Various helpers for X-ray analysis that rely on CIAO tools.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
__all__ = str('''
get_region_area
count_events
compute_bgband
simple_srcflux
''').split ()
def get_region_area (env, evtpath, region):
with env.slurp (argv=['dmlist', '%s[sky=%s]' % (evtpath, region), 'subspace'], linebreak=True) as s:
for etype, payload in s:
if etype != 'stdout':
continue
if b'Region area' not in payload:
continue
return float (payload.split ()[-1])
raise Exception ('parsing of dmlist output failed')
def count_events (env, evtpath, filter):
"""TODO: this can probably be replaced with simply reading the file
ourselves!
"""
with env.slurp (argv=['dmstat', '%s%s[cols energy]' % (evtpath, filter)], linebreak=True) as s:
for etype, payload in s:
if etype != 'stdout':
continue
if b'good:' not in payload:
continue
return int (payload.split ()[-1])
raise Exception ('parsing of dmlist output failed')
def compute_bgband (evtpath, srcreg, bkgreg, ebins, env=None):
"""Compute background information for a source in one or more energy bands.
evtpath
Path to a CIAO events file
srcreg
String specifying the source region to consider; use 'region(path.reg)' if you
have the region saved in a file.
bkgreg
String specifying the background region to consider; same format as srcreg
ebins
Iterable of 2-tuples giving low and high bounds of the energy bins to
consider, measured in eV.
env
An optional CiaoEnvironment instance; default settings are used if unspecified.
Returns a DataFrame containing at least the following columns:
elo
The low bound of this energy bin, in eV.
ehi
The high bound of this energy bin, in eV.
ewidth
The width of the bin in eV; simply `abs(ehi - elo)`.
nsrc
The number of events within the specified source region and energy range.
nbkg
The number of events within the specified background region and energy range.
nbkg_scaled
The number of background events scaled to the source area; not an integer.
nsrc_subbed
The estimated number of non-background events in the source region; simply
`nsrc - nbkg_scaled`.
log_prob_bkg
The logarithm of the probability that all counts in the source region are due
to background events.
src_sigma
The confidence of source detection in sigma inferred from log_prob_bkg.
The probability of backgrounditude is computed as:
b^s * exp (-b) / s!
where `b` is `nbkg_scaled` and `s` is `nsrc`. The confidence of source detection is
computed as:
sqrt(2) * erfcinv (prob_bkg)
where `erfcinv` is the inverse complementary error function.
"""
import numpy as np
import pandas as pd
from scipy.special import erfcinv, gammaln
if env is None:
from . import CiaoEnvironment
env = CiaoEnvironment ()
srcarea = get_region_area (env, evtpath, srcreg)
bkgarea = get_region_area (env, evtpath, bkgreg)
srccounts = [count_events (env, evtpath, '[sky=%s][energy=%d:%d]' % (srcreg, elo, ehi))
for elo, ehi in ebins]
bkgcounts = [count_events (env, evtpath, '[sky=%s][energy=%d:%d]' % (bkgreg, elo, ehi))
for elo, ehi in ebins]
df = pd.DataFrame ({
'elo': [t[0] for t in ebins],
'ehi': [t[1] for t in ebins],
'nsrc': srccounts,
'nbkg': bkgcounts
})
df['ewidth'] = np.abs (df['ehi'] - df['elo'])
df['nbkg_scaled'] = df['nbkg'] * srcarea / bkgarea
df['log_prob_bkg'] = df['nsrc'] * np.log (df['nbkg_scaled']) - df['nbkg_scaled'] - gammaln (df['nsrc'] + 1)
df['src_sigma'] = np.sqrt (2) * erfcinv (np.exp (df['log_prob_bkg']))
df['nsrc_subbed'] = df['nsrc'] - df['nbkg_scaled']
return df
def _rmtree_error (func, path, excinfo):
from ...cli import warn
warn ('couldn\'t delete temporary file %s: %s (%s)', path, excinfo[0], func)
def simple_srcflux(env, infile=None, psfmethod='arfcorr', conf=0.68,
verbose=0, **kwargs):
"""Run the CIAO "srcflux" script and retrieve its results.
*infile*
The input events file; must be specified. The computation is done
in a temporary directory, so this path — and all others passed in
as arguments — **must be made absolute**.
*psfmethod* = "arfcorr"
The PSF modeling method to be used; see the "srcflux" documentation.
*conf* = 0.68
The confidence limit to detect. We default to 1 sigma, instead of
the 90% mark, which is the srcflux default.
*verbose* = 0
The level of verbosity to be used by the tool.
*kwargs*
Remaining keyword arguments are passed to the tool as command-line
keyword arguments, with values stringified.
Returns:
A :class:`pandas.DataFrame` extracted from the results table generated
by the tool. There is one row for each source analyzed; in common usage,
this means that there will be one row.
"""
from ...io import Path
import shutil, signal, tempfile
if infile is None:
raise ValueError('must specify infile')
kwargs.update(dict(
infile = infile,
psfmethod = psfmethod,
conf = conf,
verbose = verbose,
clobber = 'yes',
outroot = 'sf',
))
argv = ['srcflux'] + ['%s=%s' % t for t in kwargs.items()]
argstr = ' '.join(argv)
tempdir = None
try:
tempdir = tempfile.mkdtemp(prefix='srcflux')
proc = env.launch(argv, cwd=tempdir, shell=False)
retcode = proc.wait()
if retcode > 0:
raise RuntimeError('command "%s" failed with exit code %d' % (argstr, retcode))
elif retcode == -signal.SIGINT:
raise KeyboardInterrupt()
elif retcode < 0:
raise RuntimeError('command "%s" killed by signal %d' % (argstr, -retcode))
tables = list(Path(tempdir).glob('*.flux'))
if len(tables) != 1:
raise RuntimeError('expected exactly one flux table from srcflux; got %d' % len(tables))
return tables[0].read_fits_bintable(hdu=1)
finally:
if tempdir is not None:
shutil.rmtree(tempdir, onerror=_rmtree_error)
| pkgw/pwkit | pwkit/environments/ciao/analysis.py | Python | mit | 6,635 | 0.008747 |
__author__ = 'bromix'
from ... import kodion
def append_more_for_video(context_menu, provider, context, video_id, is_logged_in=False, refresh_container=False):
_is_logged_in = '0'
if is_logged_in:
_is_logged_in = '1'
_refresh_container = '0'
if refresh_container:
_refresh_container = '1'
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.more']),
'Container.Update(%s)' % context.create_uri(['video', 'more'],
{'video_id': video_id,
'logged_in': _is_logged_in,
'refresh_container': _refresh_container})))
def append_content_from_description(context_menu, provider, context, video_id):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.description.links']),
'Container.Update(%s)' % context.create_uri(['special', 'description_links'],
{'video_id': video_id})))
def append_play_with(context_menu, provider, context):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.play_with']), 'Action(SwitchPlayer)'))
def append_queue_video(context_menu, provider, context):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.queue']), 'Action(Queue)'))
def append_play_all_from_playlist(context_menu, provider, context, playlist_id, video_id=''):
if video_id:
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.playlist.play.from_here']),
'RunPlugin(%s)' % context.create_uri(['play'],
{'playlist_id': playlist_id,
'video_id': video_id,
'play': '1'})))
else:
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.playlist.play.all']),
'RunPlugin(%s)' % context.create_uri(['play'],
{'playlist_id': playlist_id,
'play': '1'})))
def append_add_video_to_playlist(context_menu, provider, context, video_id):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.add_to_playlist']),
'RunPlugin(%s)' % context.create_uri(['playlist', 'select', 'playlist'],
{'video_id': video_id})))
def append_rename_playlist(context_menu, provider, context, playlist_id, playlist_name):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.rename']),
'RunPlugin(%s)' % context.create_uri(['playlist', 'rename', 'playlist'],
{'playlist_id': playlist_id,
'playlist_name': playlist_name})))
def append_delete_playlist(context_menu, provider, context, playlist_id, playlist_name):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.delete']),
'RunPlugin(%s)' % context.create_uri(['playlist', 'remove', 'playlist'],
{'playlist_id': playlist_id,
'playlist_name': playlist_name})))
def append_remove_as_watchlater(context_menu, provider, context, playlist_id, playlist_name):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.remove.as.watchlater']),
'RunPlugin(%s)' % context.create_uri(['playlist', 'remove', 'watchlater'],
{'playlist_id': playlist_id,
'playlist_name': playlist_name})))
def append_set_as_watchlater(context_menu, provider, context, playlist_id, playlist_name):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.set.as.watchlater']),
'RunPlugin(%s)' % context.create_uri(['playlist', 'set', 'watchlater'],
{'playlist_id': playlist_id,
'playlist_name': playlist_name})))
def append_remove_as_history(context_menu, provider, context, playlist_id, playlist_name):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.remove.as.history']),
'RunPlugin(%s)' % context.create_uri(['playlist', 'remove', 'history'],
{'playlist_id': playlist_id,
'playlist_name': playlist_name})))
def append_set_as_history(context_menu, provider, context, playlist_id, playlist_name):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.set.as.history']),
'RunPlugin(%s)' % context.create_uri(['playlist', 'set', 'history'],
{'playlist_id': playlist_id,
'playlist_name': playlist_name})))
def append_remove_my_subscriptions_filter(context_menu, provider, context, channel_name):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.remove.my_subscriptions.filter']),
'RunPlugin(%s)' % context.create_uri(['my_subscriptions', 'filter'],
{'channel_name': channel_name,
'action': 'remove'})))
def append_add_my_subscriptions_filter(context_menu, provider, context, channel_name):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.add.my_subscriptions.filter']),
'RunPlugin(%s)' % context.create_uri(['my_subscriptions', 'filter'],
{'channel_name': channel_name,
'action': 'add'})))
def append_rate_video(context_menu, provider, context, video_id, refresh_container=False):
if refresh_container:
refresh_container = '1'
else:
refresh_container = '0'
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.video.rate']),
'RunPlugin(%s)' % context.create_uri(['video', 'rate'],
{'video_id': video_id,
'refresh_container': refresh_container})))
def append_watch_later(context_menu, provider, context, playlist_id, video_id):
playlist_path = kodion.utils.create_path('channel', 'mine', 'playlist', playlist_id)
if playlist_id and playlist_path != context.get_path():
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.watch_later']),
'RunPlugin(%s)' % context.create_uri(['playlist', 'add', 'video'],
{'playlist_id': playlist_id, 'video_id': video_id})))
def append_go_to_channel(context_menu, provider, context, channel_id, channel_name):
text = context.localize(provider.LOCAL_MAP['youtube.go_to_channel']) % ('[B]%s[/B]' % channel_name)
context_menu.append((text, 'Container.Update(%s)' % context.create_uri(['channel', channel_id])))
def append_related_videos(context_menu, provider, context, video_id):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.related_videos']),
'Container.Update(%s)' % context.create_uri(['special', 'related_videos'],
{'video_id': video_id})))
def append_clear_watch_history(context_menu, provider, context):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.clear_history']),
'Container.Update(%s)' % context.create_uri(['history', 'clear'])))
def append_refresh(context_menu, provider, context):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.refresh']), 'Container.Refresh'))
def append_subscribe_to_channel(context_menu, provider, context, channel_id, channel_name=u''):
text = u''
if channel_name:
text = context.localize(provider.LOCAL_MAP['youtube.subscribe_to']).replace('%s', '[B]' + channel_name + '[/B]')
context_menu.append(
(text, 'RunPlugin(%s)' % context.create_uri(['subscriptions', 'add'], {'subscription_id': channel_id})))
else:
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.subscribe']),
'RunPlugin(%s)' % context.create_uri(['subscriptions', 'add'],
{'subscription_id': channel_id})))
def append_unsubscribe_from_channel(context_menu, provider, context, channel_id, channel_name=u''):
context_menu.append((context.localize(provider.LOCAL_MAP['youtube.unsubscribe']),
'RunPlugin(%s)' % context.create_uri(['subscriptions', 'remove'],
{'subscription_id': channel_id})))
| Etharr/plugin.video.youtube | resources/lib/youtube_plugin/youtube/helper/yt_context_menu.py | Python | gpl-2.0 | 9,739 | 0.009241 |
# Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.utils import flt
from erpnext.accounts.report.financial_statements import (get_period_list, get_columns, get_data)
def execute(filters=None):
period_list = get_period_list(filters.fiscal_year, filters.periodicity)
income = get_data(filters.company, "Income", "Credit", period_list, ignore_closing_entries=True)
expense = get_data(filters.company, "Expense", "Debit", period_list, ignore_closing_entries=True)
net_profit_loss = get_net_profit_loss(income, expense, period_list, filters.company)
data = []
data.extend(income or [])
data.extend(expense or [])
if net_profit_loss:
data.append(net_profit_loss)
columns = get_columns(period_list, filters.company)
return columns, data
def get_net_profit_loss(income, expense, period_list, company):
if income and expense:
net_profit_loss = {
"account_name": "'" + _("Net Profit / Loss") + "'",
"account": None,
"warn_if_negative": True,
"currency": frappe.db.get_value("Company", company, "default_currency")
}
for period in period_list:
net_profit_loss[period.key] = flt(income[-2][period.key] - expense[-2][period.key], 3)
return net_profit_loss
| mbauskar/helpdesk-erpnext | erpnext/accounts/report/profit_and_loss_statement/profit_and_loss_statement.py | Python | agpl-3.0 | 1,352 | 0.022189 |
import unittest
from rx import Observable
from rx.testing import TestScheduler, ReactiveTest, is_prime, MockDisposable
from rx.disposables import Disposable, SerialDisposable
on_next = ReactiveTest.on_next
on_completed = ReactiveTest.on_completed
on_error = ReactiveTest.on_error
subscribe = ReactiveTest.subscribe
subscribed = ReactiveTest.subscribed
disposed = ReactiveTest.disposed
created = ReactiveTest.created
class TestForIn(unittest.TestCase):
def test_for_basic(self):
scheduler = TestScheduler()
def create():
def selector(x):
return scheduler.create_cold_observable(on_next(x * 100 + 10, x * 10 + 1), on_next(x * 100 + 20, x * 10 + 2), on_next(x * 100 + 30, x * 10 + 3), on_completed(x * 100 + 40))
return Observable.for_in([1, 2, 3], selector)
results = scheduler.start(create=create)
results.messages.assert_equal(on_next(310, 11), on_next(320, 12), on_next(330, 13), on_next(550, 21), on_next(560, 22), on_next(570, 23), on_next(890, 31), on_next(900, 32), on_next(910, 33), on_completed(920))
def test_for_throws(self):
ex = 'ex'
scheduler = TestScheduler()
def create():
def selector(x):
raise Exception(ex)
return Observable.for_in([1, 2, 3], selector)
results = scheduler.start(create=create)
results.messages.assert_equal(on_error(200, ex))
| dbrattli/RxPY | tests/test_observable/test_forin.py | Python | apache-2.0 | 1,468 | 0.004768 |
""" ElementInspectorAgent
This agent inspect Resources (or maybe Nodes), and evaluates policies that apply.
The following options can be set for the ElementInspectorAgent.
.. literalinclude:: ../ConfigTemplate.cfg
:start-after: ##BEGIN ElementInspectorAgent
:end-before: ##END
:dedent: 2
:caption: ElementInspectorAgent options
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = '$Id$'
import datetime
import math
from six.moves import queue as Queue
from DIRAC import S_ERROR, S_OK
from DIRAC.Core.Base.AgentModule import AgentModule
from DIRAC.Core.Utilities.ThreadPool import ThreadPool
from DIRAC.Core.Utilities.ObjectLoader import ObjectLoader
from DIRAC.ResourceStatusSystem.PolicySystem.PEP import PEP
AGENT_NAME = 'ResourceStatus/ElementInspectorAgent'
class ElementInspectorAgent(AgentModule):
""" ElementInspectorAgent
The ElementInspector agent is a generic agent used to check the elements
of type "Resource" -- which includes ComputingElement, StorageElement, and other types
This Agent takes care of the Elements. In order to do so, it gathers
the eligible ones and then evaluates their statuses with the PEP.
"""
# Max number of worker threads by default
__maxNumberOfThreads = 15
# Inspection freqs, defaults, the lower, the higher priority to be checked.
# Error state usually means there is a glitch somewhere, so it has the highest
# priority.
__checkingFreqs = {'Active': 20,
'Degraded': 20,
'Probing': 20,
'Banned': 15,
'Unknown': 10,
'Error': 5}
def __init__(self, *args, **kwargs):
""" c'tor
"""
AgentModule.__init__(self, *args, **kwargs)
# ElementType, to be defined among Resource or Node
self.elementType = 'Resource'
self.elementsToBeChecked = None
self.threadPool = None
self.rsClient = None
self.clients = {}
def initialize(self):
""" Standard initialize.
"""
maxNumberOfThreads = self.am_getOption('maxNumberOfThreads', self.__maxNumberOfThreads)
self.threadPool = ThreadPool(maxNumberOfThreads, maxNumberOfThreads)
self.elementType = self.am_getOption('elementType', self.elementType)
res = ObjectLoader().loadObject('DIRAC.ResourceStatusSystem.Client.ResourceStatusClient')
if not res['OK']:
self.log.error('Failed to load ResourceStatusClient class: %s' % res['Message'])
return res
rsClass = res['Value']
res = ObjectLoader().loadObject('DIRAC.ResourceStatusSystem.Client.ResourceManagementClient')
if not res['OK']:
self.log.error('Failed to load ResourceManagementClient class: %s' % res['Message'])
return res
rmClass = res['Value']
self.rsClient = rsClass()
self.clients['ResourceStatusClient'] = rsClass()
self.clients['ResourceManagementClient'] = rmClass()
if not self.elementType:
return S_ERROR('Missing elementType')
return S_OK()
def execute(self):
""" execute
This is the main method of the agent. It gets the elements from the Database
which are eligible to be re-checked, calculates how many threads should be
started and spawns them. Each thread will get an element from the queue until
it is empty. At the end, the method will join the queue such that the agent
will not terminate a cycle until all elements have been processed.
"""
# Gets elements to be checked (returns a Queue)
elementsToBeChecked = self.getElementsToBeChecked()
if not elementsToBeChecked['OK']:
self.log.error(elementsToBeChecked['Message'])
return elementsToBeChecked
self.elementsToBeChecked = elementsToBeChecked['Value']
queueSize = self.elementsToBeChecked.qsize()
pollingTime = self.am_getPollingTime()
# Assigns number of threads on the fly such that we exhaust the PollingTime
# without having to spawn too many threads. We assume 10 seconds per element
# to be processed ( actually, it takes something like 1 sec per element ):
# numberOfThreads = elements * 10(s/element) / pollingTime
numberOfThreads = int(math.ceil(queueSize * 10. / pollingTime))
self.log.info('Needed %d threads to process %d elements' % (numberOfThreads, queueSize))
for _x in range(numberOfThreads):
jobUp = self.threadPool.generateJobAndQueueIt(self._execute)
if not jobUp['OK']:
self.log.error(jobUp['Message'])
self.log.info('blocking until all elements have been processed')
# block until all tasks are done
self.elementsToBeChecked.join()
self.log.info('done')
return S_OK()
def getElementsToBeChecked(self):
""" getElementsToBeChecked
This method gets all the rows in the <self.elementType>Status table, and then
discards entries with TokenOwner != rs_svc. On top of that, there are check
frequencies that are applied: depending on the current status of the element,
they will be checked more or less often.
"""
toBeChecked = Queue.Queue()
# We get all the elements, then we filter.
elements = self.rsClient.selectStatusElement(self.elementType, 'Status')
if not elements['OK']:
return elements
utcnow = datetime.datetime.utcnow().replace(microsecond=0)
# filter elements by Type
for element in elements['Value']:
# Maybe an overkill, but this way I have NEVER again to worry about order
# of elements returned by mySQL on tuples
elemDict = dict(zip(elements['Columns'], element))
# This if-clause skips all the elements that should not be checked yet
timeToNextCheck = self.__checkingFreqs[elemDict['Status']]
if utcnow <= elemDict['LastCheckTime'] + datetime.timedelta(minutes=timeToNextCheck):
continue
# We skip the elements with token different than "rs_svc"
if elemDict['TokenOwner'] != 'rs_svc':
self.log.verbose('Skipping %s ( %s ) with token %s' % (elemDict['Name'],
elemDict['StatusType'],
elemDict['TokenOwner']))
continue
# We are not checking if the item is already on the queue or not. It may
# be there, but in any case, it is not a big problem.
lowerElementDict = {'element': self.elementType}
for key, value in elemDict.items():
if len(key) >= 2: # VO !
lowerElementDict[key[0].lower() + key[1:]] = value
# We add lowerElementDict to the queue
toBeChecked.put(lowerElementDict)
self.log.verbose('%s # "%s" # "%s" # %s # %s' % (elemDict['Name'],
elemDict['ElementType'],
elemDict['StatusType'],
elemDict['Status'],
elemDict['LastCheckTime']))
return S_OK(toBeChecked)
def _execute(self):
"""
Method run by the thread pool. It enters a loop until there are no elements
on the queue. On each iteration, it evaluates the policies for such element
and enforces the necessary actions. If there are no more elements in the
queue, the loop is finished.
"""
pep = PEP(clients=self.clients)
while True:
try:
element = self.elementsToBeChecked.get_nowait()
except Queue.Empty:
return S_OK()
self.log.verbose('%s ( VO=%s / status=%s / statusType=%s ) being processed' % (element['name'],
element['vO'],
element['status'],
element['statusType']))
try:
resEnforce = pep.enforce(element)
except Exception as e:
self.log.exception('Exception during enforcement')
resEnforce = S_ERROR('Exception during enforcement')
if not resEnforce['OK']:
self.log.error('Failed policy enforcement', resEnforce['Message'])
self.elementsToBeChecked.task_done()
continue
resEnforce = resEnforce['Value']
oldStatus = resEnforce['decisionParams']['status']
statusType = resEnforce['decisionParams']['statusType']
newStatus = resEnforce['policyCombinedResult']['Status']
reason = resEnforce['policyCombinedResult']['Reason']
if oldStatus != newStatus:
self.log.info('%s (%s) is now %s ( %s ), before %s' % (element['name'],
statusType,
newStatus,
reason,
oldStatus))
# Used together with join !
self.elementsToBeChecked.task_done()
| yujikato/DIRAC | src/DIRAC/ResourceStatusSystem/Agent/ElementInspectorAgent.py | Python | gpl-3.0 | 9,113 | 0.008449 |
#!/usr/bin/env python
'''
access satellite map tile database
some functions are based on code from mapUtils.py in gmapcatcher
Andrew Tridgell
May 2012
released under GNU GPL v3 or later
'''
import math, cv, sys, os, mp_util, httplib2, threading, time, collections, string, hashlib, errno, tempfile
class TileException(Exception):
'''tile error class'''
def __init__(self, msg):
Exception.__init__(self, msg)
TILE_SERVICES = {
# thanks to http://go2log.com/2011/09/26/fetching-tiles-for-offline-map/
# for the URL mapping info
"GoogleSat" : "http://khm${GOOG_DIGIT}.google.com/kh/v=113&src=app&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}",
"GoogleMap" : "http://mt${GOOG_DIGIT}.google.com/vt/lyrs=m@121&hl=en&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}",
"GoogleHyb" : "http://mt${GOOG_DIGIT}.google.com/vt/lyrs=h@121&hl=en&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}",
"GoogleTer" : "http://mt${GOOG_DIGIT}.google.com/vt/lyrs=t@108,r@121&hl=en&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}",
"GoogleChina" : "http://mt${GOOG_DIGIT}.google.cn/vt/lyrs=m@121&hl=en&gl=cn&x=${X}&y=${Y}&z=${ZOOM}&s=${GALILEO}",
"YahooMap" : "http://maps${Y_DIGIT}.yimg.com/hx/tl?v=4.3&.intl=en&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM}&r=1",
"YahooSat" : "http://maps${Y_DIGIT}.yimg.com/ae/ximg?v=1.9&t=a&s=256&.intl=en&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM}&r=1",
"YahooInMap" : "http://maps.yimg.com/hw/tile?locale=en&imgtype=png&yimgv=1.2&v=4.1&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM_2}",
"YahooInHyb" : "http://maps.yimg.com/hw/tile?imgtype=png&yimgv=0.95&t=h&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM_2}",
"YahooHyb" : "http://maps${Y_DIGIT}.yimg.com/hx/tl?v=4.3&t=h&.intl=en&x=${X}&y=${YAHOO_Y}&z=${YAHOO_ZOOM}&r=1",
"MicrosoftBrMap" : "http://imakm${MS_DIGITBR}.maplink3.com.br/maps.ashx?v=${QUAD}|t&call=2.2.4",
"MicrosoftHyb" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/h${QUAD}.png?g=441&mkt=en-us&n=z",
"MicrosoftSat" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/a${QUAD}.png?g=441&mkt=en-us&n=z",
"MicrosoftMap" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/r${QUAD}.png?g=441&mkt=en-us&n=z",
"MicrosoftTer" : "http://ecn.t${MS_DIGIT}.tiles.virtualearth.net/tiles/r${QUAD}.png?g=441&mkt=en-us&shading=hill&n=z",
"OpenStreetMap" : "http://tile.openstreetmap.org/${ZOOM}/${X}/${Y}.png",
"OSMARender" : "http://tah.openstreetmap.org/Tiles/tile/${ZOOM}/${X}/${Y}.png",
"OpenAerialMap" : "http://tile.openaerialmap.org/tiles/?v=mgm&layer=openaerialmap-900913&x=${X}&y=${Y}&zoom=${OAM_ZOOM}",
"OpenCycleMap" : "http://andy.sandbox.cloudmade.com/tiles/cycle/${ZOOM}/${X}/${Y}.png"
}
# these are the md5sums of "unavailable" tiles
BLANK_TILES = set(["d16657bbee25d7f15c583f5c5bf23f50",
"c0e76e6e90ff881da047c15dbea380c7",
"d41d8cd98f00b204e9800998ecf8427e"])
# all tiles are 256x256
TILES_WIDTH = 256
TILES_HEIGHT = 256
class TileServiceInfo:
'''a lookup object for the URL templates'''
def __init__(self, x, y, zoom):
self.X = x
self.Y = y
self.Z = zoom
quadcode = ''
for i in range(zoom - 1, -1, -1):
quadcode += str((((((y >> i) & 1) << 1) + ((x >> i) & 1))))
self.ZOOM = zoom
self.QUAD = quadcode
self.YAHOO_Y = 2**(zoom-1) - 1 - y
self.YAHOO_ZOOM = zoom + 1
self.YAHOO_ZOOM_2 = 17 - zoom + 1
self.OAM_ZOOM = 17 - zoom
self.GOOG_DIGIT = (x + y) & 3
self.MS_DIGITBR = (((y & 1) << 1) + (x & 1)) + 1
self.MS_DIGIT = (((y & 3) << 1) + (x & 1))
self.Y_DIGIT = (x + y + zoom) % 3 + 1
self.GALILEO = "Galileo"[0:(3 * x + y) & 7]
def __getitem__(self, a):
return str(getattr(self, a))
class TileInfo:
'''description of a tile'''
def __init__(self, tile, zoom, offset=(0,0)):
self.tile = tile
(self.x, self.y) = tile
self.zoom = zoom
(self.offsetx, self.offsety) = offset
self.refresh_time()
def key(self):
'''tile cache key'''
return (self.tile, self.zoom)
def refresh_time(self):
'''reset the request time'''
self.request_time = time.time()
def coord(self, offset=(0,0)):
'''return lat,lon within a tile given (offsetx,offsety)'''
(tilex, tiley) = self.tile
(offsetx, offsety) = offset
world_tiles = 1<<self.zoom
x = ( tilex + 1.0*offsetx/TILES_WIDTH ) / (world_tiles/2.) - 1
y = ( tiley + 1.0*offsety/TILES_HEIGHT) / (world_tiles/2.) - 1
lon = x * 180.0
y = math.exp(-y*2*math.pi)
e = (y-1)/(y+1)
lat = 180.0/math.pi * math.asin(e)
return (lat, lon)
def size(self):
'''return tile size as (width,height) in meters'''
(lat1, lon1) = self.coord((0,0))
(lat2, lon2) = self.coord((TILES_WIDTH,0))
width = mp_util.gps_distance(lat1, lon1, lat2, lon2)
(lat2, lon2) = self.coord((0,TILES_HEIGHT))
height = mp_util.gps_distance(lat1, lon1, lat2, lon2)
return (width,height)
def distance(self, lat, lon):
'''distance of this tile from a given lat/lon'''
(tlat, tlon) = self.coord((TILES_WIDTH/2,TILES_HEIGHT/2))
return mp_util.gps_distance(lat, lon, tlat, tlon)
def path(self):
'''return relative path of tile image'''
(x, y) = self.tile
return "%u/%u/%u.img" % (self.zoom, y, x)
def url(self, service):
'''return URL for a tile'''
url = string.Template(TILE_SERVICES[service])
(x,y) = self.tile
tile_info = TileServiceInfo(x, y, self.zoom)
return url.substitute(tile_info)
class TileInfoScaled(TileInfo):
'''information on a tile with scale information and placement'''
def __init__(self, tile, zoom, scale, src, dst):
TileInfo.__init__(self, tile, zoom)
self.scale = scale
(self.srcx, self.srcy) = src
(self.dstx, self.dsty) = dst
class MPTile:
'''map tile object'''
def __init__(self, cache_path=None, download=True, cache_size=500,
service="MicrosoftSat", tile_delay=0.3, debug=False,
max_zoom=19):
if cache_path is None:
try:
cache_path = os.path.join(os.environ['HOME'], '.tilecache')
except Exception:
cache_path = os.path.join(tempfile.gettempdir(), 'MAVtilecache')
if not os.path.exists(cache_path):
mp_util.mkdir_p(cache_path)
self.cache_path = cache_path
self.max_zoom = max_zoom
self.min_zoom = 1
self.download = download
self.cache_size = cache_size
self.tile_delay = tile_delay
self.service = service
self.debug = debug
if service not in TILE_SERVICES:
raise TileException('unknown tile service %s' % service)
# _download_pending is a dictionary of TileInfo objects
self._download_pending = {}
self._download_thread = None
self._loading = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data', 'loading.jpg')
self._unavailable = os.path.join(os.path.dirname(os.path.realpath(__file__)), '..', 'data', 'unavailable.jpg')
self._tile_cache = collections.OrderedDict()
def coord_to_tile(self, lat, lon, zoom):
'''convert lat/lon/zoom to a TileInfo'''
world_tiles = 1<<zoom
x = world_tiles / 360.0 * (lon + 180.0)
tiles_pre_radian = world_tiles / (2 * math.pi)
e = math.sin(lat * (1/180.*math.pi))
y = world_tiles/2 + 0.5*math.log((1+e)/(1-e)) * (-tiles_pre_radian)
offsetx = int((x - int(x)) * TILES_WIDTH)
offsety = int((y - int(y)) * TILES_HEIGHT)
return TileInfo((int(x) % world_tiles, int(y) % world_tiles), zoom, offset=(offsetx, offsety))
def tile_to_path(self, tile):
'''return full path to a tile'''
return os.path.join(self.cache_path, self.service, tile.path())
def coord_to_tilepath(self, lat, lon, zoom):
'''return the tile ID that covers a latitude/longitude at
a specified zoom level
'''
tile = self.coord_to_tile(lat, lon, zoom)
return self.tile_to_path(tile)
def tiles_pending(self):
'''return number of tiles pending download'''
return len(self._download_pending)
def downloader(self):
'''the download thread'''
http = httplib2.Http()
while self.tiles_pending() > 0:
time.sleep(self.tile_delay)
keys = self._download_pending.keys()[:]
# work out which one to download next, choosing by request_time
tile_info = self._download_pending[keys[0]]
for key in keys:
if self._download_pending[key].request_time > tile_info.request_time:
tile_info = self._download_pending[key]
url = tile_info.url(self.service)
path = self.tile_to_path(tile_info)
key = tile_info.key()
try:
if self.debug:
print("Downloading %s [%u left]" % (url, len(keys)))
resp,img = http.request(url)
except httplib2.HttpLib2Error as e:
#print('Error loading %s' % url)
self._tile_cache[key] = self._unavailable
self._download_pending.pop(key)
if self.debug:
print("Failed %s: %s" % (url, str(e)))
continue
if 'content-type' not in resp or resp['content-type'].find('image') == -1:
self._tile_cache[key] = self._unavailable
self._download_pending.pop(key)
if self.debug:
print("non-image response %s" % url)
continue
# see if its a blank/unavailable tile
md5 = hashlib.md5(img).hexdigest()
if md5 in BLANK_TILES:
if self.debug:
print("blank tile %s" % url)
self._tile_cache[key] = self._unavailable
self._download_pending.pop(key)
continue
mp_util.mkdir_p(os.path.dirname(path))
h = open(path+'.tmp','wb')
h.write(img)
h.close()
os.rename(path+'.tmp', path)
self._download_pending.pop(key)
self._download_thread = None
def start_download_thread(self):
'''start the downloader'''
if self._download_thread:
return
t = threading.Thread(target=self.downloader)
t.daemon = True
self._download_thread = t
t.start()
def load_tile_lowres(self, tile):
'''load a lower resolution tile from cache to fill in a
map while waiting for a higher resolution tile'''
if tile.zoom == self.min_zoom:
return None
# find the equivalent lower res tile
(lat,lon) = tile.coord()
width2 = TILES_WIDTH
height2 = TILES_HEIGHT
for zoom2 in range(tile.zoom-1, self.min_zoom-1, -1):
width2 /= 2
height2 /= 2
if width2 == 0 or height2 == 0:
break
tile_info = self.coord_to_tile(lat, lon, zoom2)
# see if its in the tile cache
key = tile_info.key()
if key in self._tile_cache:
img = self._tile_cache[key]
if img == self._unavailable:
continue
else:
path = self.tile_to_path(tile_info)
try:
img = cv.LoadImage(path)
# add it to the tile cache
self._tile_cache[key] = img
while len(self._tile_cache) > self.cache_size:
self._tile_cache.popitem(0)
except IOError as e:
continue
# copy out the quadrant we want
cv.SetImageROI(img, (tile_info.offsetx, tile_info.offsety, width2, height2))
img2 = cv.CreateImage((width2,height2), 8, 3)
cv.Copy(img, img2)
cv.ResetImageROI(img)
# and scale it
scaled = cv.CreateImage((TILES_WIDTH, TILES_HEIGHT), 8, 3)
cv.Resize(img2, scaled)
#cv.Rectangle(scaled, (0,0), (255,255), (0,255,0), 1)
return scaled
return None
def load_tile(self, tile):
'''load a tile from cache or tile server'''
# see if its in the tile cache
key = tile.key()
if key in self._tile_cache:
img = self._tile_cache[key]
if img == self._unavailable:
img = self.load_tile_lowres(tile)
if img is None:
img = cv.LoadImage(self._unavailable)
return img
path = self.tile_to_path(tile)
try:
ret = cv.LoadImage(path)
# add it to the tile cache
self._tile_cache[key] = ret
while len(self._tile_cache) > self.cache_size:
self._tile_cache.popitem(0)
return ret
except IOError as e:
if not e.errno in [errno.ENOENT]:
raise
pass
if not self.download:
img = self.load_tile_lowres(tile)
if img is None:
img = cv.LoadImage(self._unavailable)
return img
try:
self._download_pending[key].refresh_time()
except Exception:
self._download_pending[key] = tile
self.start_download_thread()
img = self.load_tile_lowres(tile)
if img is None:
img = cv.LoadImage(self._loading)
return img
def scaled_tile(self, tile):
'''return a scaled tile'''
width = int(TILES_WIDTH / tile.scale)
height = int(TILES_HEIGHT / tile.scale)
scaled_tile = cv.CreateImage((width,height), 8, 3)
full_tile = self.load_tile(tile)
cv.Resize(full_tile, scaled_tile)
return scaled_tile
def coord_from_area(self, x, y, lat, lon, width, ground_width):
'''return (lat,lon) for a pixel in an area image'''
pixel_width = ground_width / float(width)
dx = x * pixel_width
dy = y * pixel_width
return mp_util.gps_offset(lat, lon, dx, -dy)
def coord_to_pixel(self, lat, lon, width, ground_width, lat2, lon2):
'''return pixel coordinate (px,py) for position (lat2,lon2)
in an area image. Note that the results are relative to top,left
and may be outside the image'''
pixel_width = ground_width / float(width)
if lat is None or lon is None or lat2 is None or lon2 is None:
return (0,0)
dx = mp_util.gps_distance(lat, lon, lat, lon2)
if lon2 < lon:
dx = -dx
dy = mp_util.gps_distance(lat, lon, lat2, lon)
if lat2 > lat:
dy = -dy
dx /= pixel_width
dy /= pixel_width
return (int(dx), int(dy))
def area_to_tile_list(self, lat, lon, width, height, ground_width, zoom=None):
'''return a list of TileInfoScaled objects needed for
an area of land, with ground_width in meters, and
width/height in pixels.
lat/lon is the top left corner. If unspecified, the
zoom is automatically chosen to avoid having to grow
the tiles
'''
pixel_width = ground_width / float(width)
ground_height = ground_width * (height/(float(width)))
top_right = mp_util.gps_newpos(lat, lon, 90, ground_width)
bottom_left = mp_util.gps_newpos(lat, lon, 180, ground_height)
bottom_right = mp_util.gps_newpos(bottom_left[0], bottom_left[1], 90, ground_width)
# choose a zoom level if not provided
if zoom is None:
zooms = range(self.min_zoom, self.max_zoom+1)
else:
zooms = [zoom]
for zoom in zooms:
tile_min = self.coord_to_tile(lat, lon, zoom)
(twidth,theight) = tile_min.size()
tile_pixel_width = twidth / float(TILES_WIDTH)
scale = pixel_width / tile_pixel_width
if scale >= 1.0:
break
scaled_tile_width = int(TILES_WIDTH / scale)
scaled_tile_height = int(TILES_HEIGHT / scale)
# work out the bottom right tile
tile_max = self.coord_to_tile(bottom_right[0], bottom_right[1], zoom)
ofsx = int(tile_min.offsetx / scale)
ofsy = int(tile_min.offsety / scale)
srcy = ofsy
dsty = 0
ret = []
# place the tiles
for y in range(tile_min.y, tile_max.y+1):
srcx = ofsx
dstx = 0
for x in range(tile_min.x, tile_max.x+1):
if dstx < width and dsty < height:
ret.append(TileInfoScaled((x,y), zoom, scale, (srcx,srcy), (dstx,dsty)))
dstx += scaled_tile_width-srcx
srcx = 0
dsty += scaled_tile_height-srcy
srcy = 0
return ret
def area_to_image(self, lat, lon, width, height, ground_width, zoom=None, ordered=True):
'''return an RGB image for an area of land, with ground_width
in meters, and width/height in pixels.
lat/lon is the top left corner. The zoom is automatically
chosen to avoid having to grow the tiles'''
img = cv.CreateImage((width,height),8,3)
tlist = self.area_to_tile_list(lat, lon, width, height, ground_width, zoom)
# order the display by distance from the middle, so the download happens
# close to the middle of the image first
if ordered:
(midlat, midlon) = self.coord_from_area(width/2, height/2, lat, lon, width, ground_width)
tlist.sort(key=lambda d: d.distance(midlat, midlon), reverse=True)
for t in tlist:
scaled_tile = self.scaled_tile(t)
w = min(width - t.dstx, scaled_tile.width - t.srcx)
h = min(height - t.dsty, scaled_tile.height - t.srcy)
if w > 0 and h > 0:
cv.SetImageROI(scaled_tile, (t.srcx, t.srcy, w, h))
cv.SetImageROI(img, (t.dstx, t.dsty, w, h))
cv.Copy(scaled_tile, img)
cv.ResetImageROI(img)
cv.ResetImageROI(scaled_tile)
# return as an RGB image
cv.CvtColor(img, img, cv.CV_BGR2RGB)
return img
if __name__ == "__main__":
from optparse import OptionParser
parser = OptionParser("mp_tile.py [options]")
parser.add_option("--lat", type='float', default=-35.362938, help="start latitude")
parser.add_option("--lon", type='float', default=149.165085, help="start longitude")
parser.add_option("--width", type='float', default=1000.0, help="width in meters")
parser.add_option("--service", default="YahooSat", help="tile service")
parser.add_option("--zoom", default=None, type='int', help="zoom level")
parser.add_option("--max-zoom", type='int', default=19, help="maximum tile zoom")
parser.add_option("--delay", type='float', default=1.0, help="tile download delay")
parser.add_option("--boundary", default=None, help="region boundary")
parser.add_option("--debug", action='store_true', default=False, help="show debug info")
(opts, args) = parser.parse_args()
lat = opts.lat
lon = opts.lon
ground_width = opts.width
if opts.boundary:
boundary = mp_util.polygon_load(opts.boundary)
bounds = mp_util.polygon_bounds(boundary)
lat = bounds[0]+bounds[2]
lon = bounds[1]
ground_width = max(mp_util.gps_distance(lat, lon, lat, lon+bounds[3]),
mp_util.gps_distance(lat, lon, lat-bounds[2], lon))
print lat, lon, ground_width
mt = MPTile(debug=opts.debug, service=opts.service,
tile_delay=opts.delay, max_zoom=opts.max_zoom)
if opts.zoom is None:
zooms = range(mt.min_zoom, mt.max_zoom+1)
else:
zooms = [opts.zoom]
for zoom in zooms:
tlist = mt.area_to_tile_list(lat, lon, width=1024, height=1024,
ground_width=ground_width, zoom=zoom)
print("zoom %u needs %u tiles" % (zoom, len(tlist)))
for tile in tlist:
mt.load_tile(tile)
while mt.tiles_pending() > 0:
time.sleep(2)
print("Waiting on %u tiles" % mt.tiles_pending())
print('Done')
| kd0aij/matrixpilot_old | Tools/MAVLink/MAVProxy/modules/lib/mp_tile.py | Python | gpl-3.0 | 18,202 | 0.028843 |
# -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import config
from . import state
from . import vrrp
class address(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-interfaces - based on the path /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: The list of configured IPv4 addresses on the interface.
"""
__slots__ = ("_path_helper", "_extmethods", "__ip", "__config", "__state", "__vrrp")
_yang_name = "address"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__ip = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="ip",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="leafref",
is_config=True,
)
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
self.__vrrp = YANGDynClass(
base=vrrp.vrrp,
is_container="container",
yang_name="vrrp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"interfaces",
"interface",
"subinterfaces",
"subinterface",
"ipv4",
"addresses",
"address",
]
def _get_ip(self):
"""
Getter method for ip, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/ip (leafref)
YANG Description: References the configured IP address
"""
return self.__ip
def _set_ip(self, v, load=False):
"""
Setter method for ip, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/ip (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip() directly.
YANG Description: References the configured IP address
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError(
"Cannot set keys directly when" + " within an instantiated list"
)
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=six.text_type,
is_leaf=True,
yang_name="ip",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="leafref",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ip must be of a type compatible with leafref""",
"defined-type": "leafref",
"generated-type": """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)""",
}
)
self.__ip = t
if hasattr(self, "_set"):
self._set()
def _unset_ip(self):
self.__ip = YANGDynClass(
base=six.text_type,
is_leaf=True,
yang_name="ip",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
is_keyval=True,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="leafref",
is_config=True,
)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/config (container)
YANG Description: Configuration data for each configured IPv4
address on the interface
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration data for each configured IPv4
address on the interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """config must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""",
}
)
self.__config = t
if hasattr(self, "_set"):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(
base=config.config,
is_container="container",
yang_name="config",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/state (container)
YANG Description: Operational state data for each IPv4 address
configured on the interface
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: Operational state data for each IPv4 address
configured on the interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
def _get_vrrp(self):
"""
Getter method for vrrp, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/vrrp (container)
YANG Description: Enclosing container for VRRP groups handled by this
IP interface
"""
return self.__vrrp
def _set_vrrp(self, v, load=False):
"""
Setter method for vrrp, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/vrrp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_vrrp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vrrp() directly.
YANG Description: Enclosing container for VRRP groups handled by this
IP interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=vrrp.vrrp,
is_container="container",
yang_name="vrrp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """vrrp must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=vrrp.vrrp, is_container='container', yang_name="vrrp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""",
}
)
self.__vrrp = t
if hasattr(self, "_set"):
self._set()
def _unset_vrrp(self):
self.__vrrp = YANGDynClass(
base=vrrp.vrrp,
is_container="container",
yang_name="vrrp",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/interfaces/ip",
defining_module="openconfig-if-ip",
yang_type="container",
is_config=True,
)
ip = __builtin__.property(_get_ip, _set_ip)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
vrrp = __builtin__.property(_get_vrrp, _set_vrrp)
_pyangbind_elements = OrderedDict(
[("ip", ip), ("config", config), ("state", state), ("vrrp", vrrp)]
)
| napalm-automation/napalm-yang | napalm_yang/models/openconfig/interfaces/interface/subinterfaces/subinterface/ipv4/addresses/address/__init__.py | Python | apache-2.0 | 15,915 | 0.00132 |
# Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software distributed under the License
# is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
# or implied. See the License for the specific language governing permissions and limitations under
# the License.
"""Implements BigQuery Views."""
from __future__ import absolute_import
from __future__ import unicode_literals
from builtins import str
from builtins import object
import datalab.context
from . import _query
from . import _table
# Query import is at end to avoid issues with circular dependencies.
class View(object):
""" An implementation of a BigQuery View. """
# Views in BigQuery are virtual tables, but it is useful to have a mixture of both Table and
# Query semantics; our version thus internally has a BaseTable and a Query (for materialization;
# not the same as the view query), and exposes a number of the same APIs as Table and Query
# through wrapper functions around these.
def __init__(self, name, context=None):
"""Initializes an instance of a View object.
Args:
name: the name of the view either as a string or a 3-part tuple
(projectid, datasetid, name). If a string, it must have the form
'<project>:<dataset>.<view>' or '<dataset>.<view>'.
context: an optional Context object providing project_id and credentials. If a specific
project id or credentials are unspecified, the default ones configured at the global
level are used.
Raises:
Exception if the name is invalid.
"""
if context is None:
context = datalab.context.Context.default()
self._context = context
self._table = _table.Table(name, context=context)
self._materialization = _query.Query('SELECT * FROM %s' % self._repr_sql_(), context=context)
def __str__(self):
"""The full name for the view as a string."""
return str(self._table)
@property
def name(self):
"""The name for the view as a named tuple."""
return self._table.name
@property
def description(self):
"""The description of the view if it exists."""
return self._table.metadata.description
@property
def friendly_name(self):
"""The friendly name of the view if it exists."""
return self._table.metadata.friendly_name
@property
def query(self):
"""The Query that defines the view."""
if not self.exists():
return None
self._table._load_info()
if 'view' in self._table._info and 'query' in self._table._info['view']:
return _query.Query(self._table._info['view']['query'], context=self._context)
return None
def exists(self):
"""Whether the view's Query has been executed and the view is available or not."""
return self._table.exists()
def delete(self):
"""Removes the view if it exists."""
self._table.delete()
def create(self, query):
""" Creates the view with the specified query.
Args:
query: the query to use to for the View; either a string containing a SQL query or
a Query object.
Returns:
The View instance.
Raises:
Exception if the view couldn't be created or already exists and overwrite was False.
"""
if isinstance(query, _query.Query):
query = query.sql
try:
response = self._table._api.tables_insert(self._table.name, query=query)
except Exception as e:
raise e
if 'selfLink' in response:
return self
raise Exception("View %s could not be created as it already exists" % str(self))
def sample(self, fields=None, count=5, sampling=None, use_cache=True, dialect=None,
billing_tier=None):
"""Retrieves a sampling of data from the view.
Args:
fields: an optional list of field names to retrieve.
count: an optional count of rows to retrieve which is used if a specific
sampling is not specified.
sampling: an optional sampling strategy to apply to the view.
use_cache: whether to use cached results or not.
dialect : {'legacy', 'standard'}, default 'legacy'
'legacy' : Use BigQuery's legacy SQL dialect.
'standard' : Use BigQuery's standard SQL (beta), which is
compliant with the SQL 2011 standard.
billing_tier: Limits the billing tier for this job. Queries that have resource
usage beyond this tier will fail (without incurring a charge). If unspecified, this
will be set to your project default. This can also be used to override your
project-wide default billing tier on a per-query basis.
Returns:
A QueryResultsTable object containing the resulting data.
Raises:
Exception if the sample query could not be executed or the query response was malformed.
"""
return self._table.sample(fields=fields, count=count, sampling=sampling, use_cache=use_cache,
dialect=dialect, billing_tier=billing_tier)
@property
def schema(self):
"""Retrieves the schema of the table.
Returns:
A Schema object containing a list of schema fields and associated metadata.
Raises
Exception if the request could not be executed or the response was malformed.
"""
return self._table.schema
def update(self, friendly_name=None, description=None, query=None):
""" Selectively updates View information.
Any parameters that are None (the default) are not applied in the update.
Args:
friendly_name: if not None, the new friendly name.
description: if not None, the new description.
query: if not None, a new query string for the View.
"""
self._table._load_info()
if query is not None:
if isinstance(query, _query.Query):
query = query.sql
self._table._info['view'] = {'query': query}
self._table.update(friendly_name=friendly_name, description=description)
def results(self, use_cache=True, dialect=None, billing_tier=None):
"""Materialize the view synchronously.
If you require more control over the execution, use execute() or execute_async().
Args:
use_cache: whether to use cached results or not.
dialect : {'legacy', 'standard'}, default 'legacy'
'legacy' : Use BigQuery's legacy SQL dialect.
'standard' : Use BigQuery's standard SQL (beta), which is
compliant with the SQL 2011 standard.
billing_tier: Limits the billing tier for this job. Queries that have resource
usage beyond this tier will fail (without incurring a charge). If unspecified, this
will be set to your project default. This can also be used to override your
project-wide default billing tier on a per-query basis.
Returns:
A QueryResultsTable containing the result set.
Raises:
Exception if the query could not be executed or query response was malformed.
"""
return self._materialization.results(use_cache=use_cache, dialect=dialect,
billing_tier=billing_tier)
def execute_async(self, table_name=None, table_mode='create', use_cache=True, priority='high',
allow_large_results=False, dialect=None, billing_tier=None):
"""Materialize the View asynchronously.
Args:
table_name: the result table name; if None, then a temporary table will be used.
table_mode: one of 'create', 'overwrite' or 'append'. If 'create' (the default), the request
will fail if the table exists.
use_cache: whether to use past query results or ignore cache. Has no effect if destination is
specified (default True).
priority:one of 'low' or 'high' (default). Note that 'high' is more expensive, but is
better suited to exploratory analysis.
allow_large_results: whether to allow large results; i.e. compressed data over 100MB. This is
slower and requires a table_name to be specified) (default False).
dialect : {'legacy', 'standard'}, default 'legacy'
'legacy' : Use BigQuery's legacy SQL dialect.
'standard' : Use BigQuery's standard SQL (beta), which is
compliant with the SQL 2011 standard.
billing_tier: Limits the billing tier for this job. Queries that have resource
usage beyond this tier will fail (without incurring a charge). If unspecified, this
will be set to your project default. This can also be used to override your
project-wide default billing tier on a per-query basis.
Returns:
A QueryJob for the materialization
Raises:
Exception (KeyError) if View could not be materialized.
"""
return self._materialization.execute_async(table_name=table_name, table_mode=table_mode,
use_cache=use_cache, priority=priority,
allow_large_results=allow_large_results,
dialect=dialect, billing_tier=billing_tier)
def execute(self, table_name=None, table_mode='create', use_cache=True, priority='high',
allow_large_results=False, dialect=None, billing_tier=None):
"""Materialize the View synchronously.
Args:
table_name: the result table name; if None, then a temporary table will be used.
table_mode: one of 'create', 'overwrite' or 'append'. If 'create' (the default), the request
will fail if the table exists.
use_cache: whether to use past query results or ignore cache. Has no effect if destination is
specified (default True).
priority:one of 'low' or 'high' (default). Note that 'high' is more expensive, but is
better suited to exploratory analysis.
allow_large_results: whether to allow large results; i.e. compressed data over 100MB. This is
slower and requires a table_name to be specified) (default False).
dialect : {'legacy', 'standard'}, default 'legacy'
'legacy' : Use BigQuery's legacy SQL dialect.
'standard' : Use BigQuery's standard SQL (beta), which is
compliant with the SQL 2011 standard.
billing_tier: Limits the billing tier for this job. Queries that have resource
usage beyond this tier will fail (without incurring a charge). If unspecified, this
will be set to your project default. This can also be used to override your
project-wide default billing tier on a per-query basis.
Returns:
A QueryJob for the materialization
Raises:
Exception (KeyError) if View could not be materialized.
"""
return self._materialization.execute(table_name=table_name, table_mode=table_mode,
use_cache=use_cache, priority=priority,
allow_large_results=allow_large_results,
dialect=dialect, billing_tier=billing_tier)
def _repr_sql_(self):
"""Returns a representation of the view for embedding into a SQL statement.
Returns:
A formatted table name for use within SQL statements.
"""
return '[' + str(self) + ']'
def __repr__(self):
"""Returns a representation for the view for showing in the notebook.
"""
return 'View %s\n%s' % (self._table, self.query)
| supriyagarg/pydatalab | datalab/bigquery/_view.py | Python | apache-2.0 | 11,620 | 0.007831 |
# -*- coding: utf-8 -*-
# Baruwa - Web 2.0 MailScanner front-end.
# Copyright (C) 2010-2015 Andrew Colin Kissa <andrew@topdog.za.net>
# vim: ai ts=4 sts=4 et sw=4
"status tasks"
import os
import datetime
import psutil
from StringIO import StringIO
from pylons import config
from celery.task import task
from sqlalchemy.pool import NullPool
from eventlet.green import subprocess
from sqlalchemy import desc
from sqlalchemy import engine_from_config
from sqlalchemy.exc import DatabaseError
from sphinxapi import SphinxClient, SPH_MATCH_EXTENDED2
from reportlab.lib import colors
from reportlab.lib.units import inch
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.platypus import Table
from reportlab.lib.styles import ParagraphStyle
from reportlab.platypus import Paragraph, Image, Spacer, TableStyle
from baruwa.model.meta import Session
from baruwa.lib.graphs import PIE_TABLE
from baruwa.lib.net import system_hostname
from baruwa.lib.misc import extract_sphinx_opts
from baruwa.lib.query import clean_sphinx_q
from baruwa.lib.mail.queue.exim import EximQueue
from baruwa.lib.mail.message import PreviewMessage
from baruwa.lib.mail.queue.convert import Exim2Mbox
from baruwa.lib.mail.queue.search import search_queue
from baruwa.model.status import AuditLog, CATEGORY_MAP
from baruwa.commands.queuestats import update_queue_stats
from baruwa.lib.regex import EXIM_MSGID_RE, BAYES_INFO_RE
from baruwa.lib.outputformats import build_csv, BaruwaPDFTemplate
from baruwa.lib.misc import get_processes, get_config_option, wrap_string, _
STYLES = getSampleStyleSheet()
if not Session.registry.has():
try:
engine = engine_from_config(config, 'sqlalchemy.', poolclass=NullPool)
Session.configure(bind=engine)
except KeyError:
pass
@task(name="get-system-status")
def systemstatus():
"process via mq"
logger = systemstatus.get_logger()
logger.info("Checking system status")
stats = dict(mem=None,
cpu=None,
load=None,
net=[],
mta=None,
scanners=None,
time=None,
uptime=None,
av=None,
partitions=[])
def _obj2dict(obj):
"convert object attribs to dict"
val = {}
for key in obj._fields:
val[key] = getattr(obj, key)
return val
pipe = subprocess.Popen(["uptime"], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
upt = pipe.communicate()[0].split()
pipe.wait(timeout=2)
pipe = subprocess.Popen(["date"], stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
stats['time'] = pipe.communicate()[0]
pipe.wait(timeout=2)
stats['uptime'] = "%s %s" % (upt[2], upt[3].rstrip(','))
stats['mem'] = _obj2dict(psutil.virtual_memory())
stats['mem']['percent'] = ((stats['mem']['used']
/ float(stats['mem']['total'])) * 100)
stats['cpu'] = psutil.cpu_percent()
stats['load'] = os.getloadavg()
net = psutil.network_io_counters(True)
infs = {}
for inf in net:
infs[inf] = _obj2dict(net[inf])
stats['net'] = infs
partitions = []
for part in psutil.disk_partitions(all=False):
usage = psutil.disk_usage(part.mountpoint)
dpart = _obj2dict(part)
dpart.update(_obj2dict(usage))
partitions.append(dpart)
stats['partitions'] = partitions
stats['mta'] = get_processes('exim')
stats['scanners'] = get_processes('MailScanner')
stats['av'] = get_processes('clamd')
return stats
@task(name="spamassassin-lint")
def salint():
"Spamassassin lint"
logger = salint.get_logger()
logger.info("Running Spamassassin lint checks")
lint = []
pipe1 = subprocess.Popen(['spamassassin',
'-x',
'-D',
'--lint'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
while True:
line = pipe1.stderr.readline()
if not line:
break
lint.append(line)
pipe1.wait(timeout=2)
return lint
@task(name="get-bayes-info")
def bayesinfo():
"Get bayes info"
logger = bayesinfo.get_logger()
logger.info("Generating Bayesian stats")
info = {}
saprefs = config.get(
'ms.saprefs',
'/etc/MailScanner/spam.assassin.prefs.conf'
)
pipe1 = subprocess.Popen(['sa-learn',
'-p',
saprefs,
'--dump',
'magic'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
while True:
line = pipe1.stdout.readline()
if not line:
break
match = BAYES_INFO_RE.match(line)
if match:
if match.group(5) == 'bayes db version':
info['version'] = match.group(3)
elif match.group(5) == 'nspam':
info['spam'] = match.group(3)
elif match.group(5) == 'nham':
info['ham'] = match.group(3)
elif match.group(5) == 'ntokens':
info['tokens'] = match.group(3)
elif match.group(5) == 'oldest atime':
info['otoken'] = datetime.datetime\
.fromtimestamp(float(match.group(3)))
elif match.group(5) == 'newest atime':
info['ntoken'] = datetime.datetime\
.fromtimestamp(float(match.group(3)))
elif match.group(5) == 'last journal sync atime':
info['ljournal'] = datetime.datetime\
.fromtimestamp(float(match.group(3)))
elif match.group(5) == 'last expiry atime':
info['expiry'] = datetime.datetime\
.fromtimestamp(float(match.group(3)))
elif match.group(5) == 'last expire reduction count':
info['rcount'] = match.group(3)
pipe1.wait(timeout=2)
return info
@task(name="preview-queued-msg")
def preview_queued_msg(msgid, direction, attachid=None, imgid=None):
"Preview a queued message"
try:
logger = preview_queued_msg.get_logger()
header = search_queue(msgid, int(direction))
convertor = Exim2Mbox(header)
mbox = convertor()
msgfile = StringIO(mbox)
previewer = PreviewMessage(msgfile)
if attachid:
logger.info("Download attachment: %(attachid)s of "
"message: %(id)s",
dict(id=msgid, attachid=attachid))
return previewer.attachment(attachid)
if imgid:
logger.info("Image access: %(img)s", dict(img=imgid))
return previewer.img(imgid)
logger.info("Preview of message: %(id)s", dict(id=msgid))
return previewer.preview()
except TypeError, type_error:
logger.info("Error occured: %s" % str(type_error))
return {}
except (AssertionError, IOError), error:
logger.info("Accessing message: %(id)s, Failed: %(error)s",
dict(id=msgid, error=error))
return None
finally:
if 'msgfile' in locals():
msgfile.close()
@task(name='process-queued-msgs', ignore_result=True)
def process_queued_msgs(msgids, action, direction, *args):
"Process queued messages"
try:
logger = process_queued_msgs.get_logger()
eximcmd = get_config_option('Sendmail2') if direction == 2 else 'exim'
if 'exim' not in eximcmd:
logger.info("Invalid exim command: %s" % eximcmd)
return
if direction == 1 and action not in ['bounce', 'delete']:
logger.info("Invalid action: %s" % action)
return
exim_user = config.get('baruwa.mail.user', 'exim')
queue = EximQueue('sudo -u %s %s' % (exim_user, eximcmd))
func = getattr(queue, action)
msgids = [msgid for msgid in msgids if EXIM_MSGID_RE.match(msgid)]
func(msgids, *args)
for result in queue.results:
logger.info("STDOUT: %s" % result)
if queue.errors:
for errmsg in queue.errors:
logger.info("STDERR: %s" % errmsg)
hostname = system_hostname()
update_queue_stats(hostname)
except TypeError, error:
logger.info("Invalid input: %s" % error)
except AttributeError:
logger.info("Invalid action: %s" % action)
@task(name='update-audit-log', ignore_result=True)
def update_audit_log(username,
category,
info,
hostname,
remoteip,
timestamp=None):
"Update the audit log"
logger = update_audit_log.get_logger()
try:
entry = AuditLog(username,
category,
info,
hostname,
remoteip)
if timestamp:
entry.timestamp = timestamp
Session.add(entry)
Session.commit()
logger.info("Audit Log update for: %s from: %s" %
(username, remoteip))
except DatabaseError, err:
logger.error("Audit Log FAILURE: %s %s %s %s %s %s Error: %s" %
(username,
category,
info,
hostname,
remoteip,
timestamp,
err))
finally:
Session.close()
def build_pdf(rows):
"Build PDF"
pdffile = StringIO()
doc = BaruwaPDFTemplate(pdffile, topMargin=50, bottomMargin=18)
import baruwa
here = os.path.dirname(
os.path.dirname(os.path.abspath(baruwa.__file__))
)
logo = os.path.join(here, 'baruwa', 'public', 'imgs', 'logo.png')
img = Image(logo)
logobj = [(img, _('Audit Log exported report'))]
logo_table = Table(logobj, [2.0 * inch, 5.4 * inch])
logo_table.setStyle(PIE_TABLE)
parts = [logo_table]
parts.append(Spacer(1, 20))
parts.append(Paragraph(_('Audit Logs'), STYLES['Heading1']))
heading = ((Paragraph(_('Date/Time'), STYLES["Heading6"]),
Paragraph(_('Username'), STYLES["Heading6"]),
Paragraph(_('Info'), STYLES["Heading6"]),
Paragraph(_('Hostname'), STYLES["Heading6"]),
Paragraph(_('Remote IP'), STYLES["Heading6"]),
Paragraph(_('Action'), STYLES["Heading6"]), ))
rows.insert(0, heading)
table = Table(rows, [1.10 * inch, 1.23 * inch,
1.96 * inch, 1.69 * inch,
0.95 * inch, 0.45 * inch, ])
table.setStyle(TableStyle([
('FONTSIZE', (0, 0), (-1, -1), 8),
('FONT', (0, 0), (-1, -1), 'Helvetica'),
('FONT', (0, 0), (-1, 0), 'Helvetica-Bold'),
('GRID', (0, 0), (-1, -1), 0.15, colors.black),
]))
parts.append(table)
doc.title = _('Baruwa Audit log export')
doc.build(parts)
return pdffile.getvalue()
@task(name='export-audit-log')
def export_auditlog(format, query):
"Export the audit log"
logger = export_auditlog.get_logger()
filename = 'auditlog-%s.%s' % (export_auditlog.request.id, format)
content_type = 'text/csv' if format == 'csv' else 'application/pdf'
results = dict(id=export_auditlog.request.id,
f=None,
content_type=content_type,
filename=filename,
errormsg='')
try:
dbquery = Session.query(AuditLog)
if query:
conn = SphinxClient()
sphinxopts = extract_sphinx_opts(config['sphinx.url'])
conn.SetServer(sphinxopts.get('host', '127.0.0.1'))
conn.SetMatchMode(SPH_MATCH_EXTENDED2)
conn.SetLimits(0, 500, 500)
query = clean_sphinx_q(query)
qresults = conn.Query(query, 'auditlog, auditlog_rt')
if qresults and qresults['matches']:
ids = [hit['id'] for hit in qresults['matches']]
dbquery = dbquery.filter(AuditLog.id.in_(ids))
dbquery = dbquery.order_by(desc('timestamp')).all()
if format == 'pdf':
PS = ParagraphStyle('auditlogp',
fontName='Helvetica',
fontSize=8,
borderPadding=(2, 2, 2, 2))
rows = [(Paragraph(item.timestamp.strftime('%Y-%m-%d %H:%M'), PS),
Paragraph(wrap_string(item.username, 27), PS),
Paragraph(wrap_string(item.info, 33), PS),
Paragraph(wrap_string(item.hostname, 27), PS),
Paragraph(wrap_string(item.remoteip, 15), PS),
Paragraph(CATEGORY_MAP[item.category], PS))
for item in dbquery]
pdf = build_pdf(rows)
results['f'] = pdf
elif format == 'csv':
rows = [item.tojson() for item in dbquery]
keys = ('timestamp',
'username',
'info',
'hostname',
'remoteip',
'category')
results['f'] = build_csv(rows, keys)
logger.info("Audit Log export complete: %s" % results['filename'])
return results
except (DatabaseError), err:
results['errormsg'] = str(err)
logger.info("Audit Log export FAILURE: %s" % str(err))
return results
finally:
Session.close()
| akissa/baruwa2 | baruwa/tasks/status.py | Python | gpl-3.0 | 13,729 | 0.003933 |
import numpy as np
from robotarium import Robotarium, transformations, controllers
# Get Robotarium object used to communicate with the robots/simulator.
r = Robotarium()
# Get the number of available agents from the Robotarium. We don't need a
# specific value from this algorithm.
n = r.get_available_agents()
# Number of iterations.
iterations = 20000
# Initialize the Robotarium object with the desired number of agents.
r.initialize(n)
# Initialize velocity vector for agents. Each agent expects a 2x1 velocity
# vector containing the linear and angular velocity, respectively.
dx = np.zeros((2, n))
xy_bound = np.array([-0.5, 0.5, -0.3, 0.3])
p_theta = (np.arange(1, 2*n, 2)) / (2 * n) * 2 * np.pi
p_circ = np.vstack(
[np.hstack([xy_bound[1] * np.cos(p_theta),
xy_bound[1] * np.cos(p_theta + np.pi)]),
np.hstack([xy_bound[3] * np.sin(p_theta),
xy_bound[3] * np.sin(p_theta + np.pi)])])
x_goal = p_circ[:, 0:n]
flag = 0 # Flag of task completion
# iterate for the previously specified number of iterations.
for _ in range(0, iterations):
# Retrieve teh most recent poses from teh Robotarium. The time delay is
# approximately 0.033 seconds.
x = r.get_poses()
x_temp = x[0:2, :]
# ALGORITHM
# Nominal controller, go2goal
if np.linalg.norm(x_goal-x_temp, ord=1) < 0.08:
flag = 1 - flag
if flag == 0:
x_goal = p_circ[:, 0:n]
else:
x_goal = p_circ[:, n:2*n]
# Use different go-to-goal
dx = controllers.position_int(x, x_goal, 0.05)
# Saturation of controls
dx_max = 0.1
for i in range(0, n):
if np.linalg.norm(dx[:, i]) > dx_max:
dx[:, i] = dx[:, i] / np.linalg.norm(dx[:, i]) * dx_max
# END ALGORITHM
# Ensure the robots don't collide
dx = transformations.barrier_certificate(dx, x, ds=0.1)
# Transform the single-integrator dynamics to unicycle dynamics using a
# diffeomorphism, which can be found in the utilities.
dx = transformations.int_to_uni2(dx, x, 0.75, np.pi)
# Set velocities of agents 1,...,n
r.set_velocities(range(0, n), dx)
# Send the previously set velocities to the agents.
# This function must be called.
r.step()
| robotarium/robotarium-python-simulator | examples/barrierCertificates.py | Python | mit | 2,240 | 0 |
from pixelated.adapter.mailstore.searchable_mailstore import SearchableMailStore
from pixelated.adapter.services.mail_service import MailService
from pixelated.adapter.model.mail import InputMail
from pixelated.adapter.services.mail_sender import MailSender
from pixelated.adapter.search import SearchEngine
from pixelated.adapter.services.draft_service import DraftService
from pixelated.adapter.listeners.mailbox_indexer_listener import listen_all_mailboxes
from twisted.internet import defer
from pixelated.adapter.search.index_storage_key import SearchIndexStorageKey
from pixelated.adapter.services.feedback_service import FeedbackService
class Services(object):
def __init__(self, leap_home, leap_session):
pass
@defer.inlineCallbacks
def setup(self, leap_home, leap_session):
InputMail.FROM_EMAIL_ADDRESS = leap_session.account_email()
search_index_storage_key = self.setup_search_index_storage_key(leap_session.soledad)
yield self.setup_search_engine(
leap_home,
search_index_storage_key)
self.wrap_mail_store_with_indexing_mail_store(leap_session)
yield listen_all_mailboxes(leap_session.account, self.search_engine, leap_session.mail_store)
self.mail_service = self.setup_mail_service(
leap_session,
self.search_engine)
self.keymanager = leap_session.nicknym
self.draft_service = self.setup_draft_service(leap_session.mail_store)
self.feedback_service = self.setup_feedback_service(leap_session)
yield self.index_all_mails()
def wrap_mail_store_with_indexing_mail_store(self, leap_session):
leap_session.mail_store = SearchableMailStore(leap_session.mail_store, self.search_engine)
@defer.inlineCallbacks
def index_all_mails(self):
all_mails = yield self.mail_service.all_mails()
self.search_engine.index_mails(all_mails)
@defer.inlineCallbacks
def setup_search_engine(self, leap_home, search_index_storage_key):
key_unicode = yield search_index_storage_key.get_or_create_key()
key = str(key_unicode)
print 'The key len is: %s' % len(key)
search_engine = SearchEngine(key, agent_home=leap_home)
self.search_engine = search_engine
def setup_mail_service(self, leap_session, search_engine):
pixelated_mail_sender = MailSender(leap_session.smtp_config, leap_session.nicknym.keymanager)
return MailService(
pixelated_mail_sender,
leap_session.mail_store,
search_engine,
leap_session.account_email())
def setup_draft_service(self, mail_store):
return DraftService(mail_store)
def setup_search_index_storage_key(self, soledad):
return SearchIndexStorageKey(soledad)
def setup_feedback_service(self, leap_session):
return FeedbackService(leap_session)
| rdoh/pixelated-user-agent | service/pixelated/config/services.py | Python | agpl-3.0 | 2,909 | 0.002063 |
"""
Create movie from MEG inverse solution
=======================================
Data were computed using mne-python (http://martinos.org/mne)
"""
import os
import numpy as np
from surfer import Brain
from surfer.io import read_stc
print(__doc__)
"""
create Brain object for visualization
"""
brain = Brain('fsaverage', 'split', 'inflated', size=(800, 400))
"""
read and display MNE dSPM inverse solution
"""
stc_fname = os.path.join('example_data', 'meg_source_estimate-%s.stc')
for hemi in ['lh', 'rh']:
stc = read_stc(stc_fname % hemi)
data = stc['data']
times = np.arange(data.shape[1]) * stc['tstep'] + stc['tmin']
brain.add_data(data, colormap='hot', vertices=stc['vertices'],
smoothing_steps=10, time=times, hemi=hemi,
time_label=lambda t: '%s ms' % int(round(t * 1e3)))
"""
scale colormap
"""
brain.scale_data_colormap(fmin=13, fmid=18, fmax=22, transparent=True)
"""
Save a movie. Use a large value for time_dilation because the sample stc only
covers 30 ms.
"""
brain.save_movie('example_current.mov', time_dilation=30)
brain.close()
| diego0020/PySurfer | examples/save_movie.py | Python | bsd-3-clause | 1,110 | 0 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Author: Janice Cheng
# ex17_inFile.txt
# ex17_outFile.txt
from sys import argv
from os.path import exists
script,from_file,to_file = argv
print("Copying from {} to {}".format(from_file,to_file))
# we could do these two on one line too, how?
in_file = open(from_file)
indata = in_file.read()
print("The input file is {} bytes long".format(len(indata)))
print("Does the output file exists? %r" %exists(to_file))
print("Ready, hit RETURN to continue, CTRL-C to abort.")
input()
out_file = open(to_file,'w')
out_file.write(indata)
print("Alright, all done.")
out_file.close()
in_file.close() | jcchoiling/learningPython | books/learn-python-the-hard-way/ex17_More_Files.py | Python | gpl-3.0 | 645 | 0.009302 |
#!/bin/python
# The purpose of this script is to take the *machine-readable* output of UMLS
# MetaMap and convert it to something that looks like a sentence of UMLS CUIs,
# if possible. Ideally there would be an option in MetaMap to do this, assuming
# it is sensible.
import re
import sys
#INTERACTIVE = True
INTERACTIVE = False
# "hacks" to fix metamap weirdness
POSTPROC = True
if POSTPROC:
print 'WARNING: Performing dataset-specific postprocessing.'
# --- some regexes --- #
utterance_re = re.compile('^utterance\(')
phrase_re = re.compile('^phrase\(')
mappings_re = re.compile('^mappings\(')
candidates_re = re.compile('^candidates\(')
EOU_re = re.compile('^\'EOU')
# this is a file of sentences, fed into metamap
raw_data_path = ''
# --- grab in paths --- #
# this is the metamap output. YMMV
# created by the command:
# metamap14 -q -Q 3 --word_sense_disambiguation raw_data_path metamap_output_path
# must provide an input path
assert len(sys.argv) >= 2
metamap_output_path = sys.argv[1]
# optionally provide output path
# (this is the processed data path, the output of this script)
try:
proc_data_path = sys.argv[2]
# do not write over the input, please
assert not proc_data_path == metamap_output_path
except IndexError:
# not provided
proc_data_path = metamap_output_path + '.reform'
# --- open files --- #
metamap_output = open(metamap_output_path, 'r')
proc_data = open(proc_data_path, 'w')
# --- the first line is 'args', pop that --- #
args_line = metamap_output.readline()
# not sure what second line is but pop it too
unknown_line = metamap_output.readline()
# --- the relevant and important functions --- #
def parse_phrase(line, neg_dict={}):
"""
Takes a phrase from machine-readable format, parses its mappings, returns
a string of mapped terms (into CUIs, when possible).
"""
wordmap = dict()
# list of words in the phrase
# (note: the phrase looks like phrase('PHRASEHERE', [sometext(... )
phrase = re.sub('[\'\.]','',re.split(',\[[a-zA-Z]+\(', re.sub('phrase\(','', line))[0])
# get the candidates (and most importantly, their numbers)
candidates = metamap_output.readline()
if candidates == '' or not candidates_re.match(candidates):
parsed_phrase = phrase + ' '
return parsed_phrase
TotalCandidateCount = int(re.sub('candidates\(','',candidates).split(',')[0])
# get the mappings
mappings = metamap_output.readline()
if mappings == '' or not mappings_re.match(mappings):
parsed_phrase = phrase + ' '
return parsed_phrase
if TotalCandidateCount == 0:
# there were no mappings for this phrase
parsed_phrase = phrase + ' '
else:
# accounted for by other words
delwords = []
parsed_phrase = ''
# split the mappings up into 'ev's
split_mappings = mappings.split('ev(')
outstring = ''
for mapping in split_mappings[1:]:
CUI = mapping.split(',')[1].strip('\'')
try:
words = re.split('[\[\]]',','.join(mapping.split(',')[4:]))[1].split(',')
except IndexError:
# ugh, mapping is messed up
print 'WARNING: input is messed up'
return parsed_phrase
umls_strings = mapping.split(',')[2:4]
# CPI is the final [] in this mapping, I think/believe
ConceptPositionalInfo = mapping.split('[')[-1].split(']')[0]
if ConceptPositionalInfo in neg_dict:
# this concept has been negated!
# make sure it's the same one...
assert CUI in neg_dict[ConceptPositionalInfo]
# need to make sure it's ONE of the CUIs which was negated at this location
CUI = 'NOT_' + CUI
if INTERACTIVE:
outstring += '\n\tAssociation between '+ CUI + ' and ' + ', '.join(map(lambda x: '"'+x+'"',words))
if len(words) > 1:
outstring += ' (subsuming ' + ' '.join(map(lambda x: '"'+x+'"', words[1:])) + ')'
outstring += '\n\tbased on UMLS strings ' + ', '.join(umls_strings) +'\n'
wordmap[words[0]] = CUI
# if multiple words mapped to this CUI, remember to delete the rest
# that is: when we consume the sentence later we will 'replace' the
# first word in this list with the CUI, then delete the rest
# brittleness: delwords may appear elsewhere in the sentence
delwords += words[1:]
# split on spaces, commas
for word in re.split(', | ', phrase):
try:
# lowercase word, cause it is represented in the prolog that way
parsed_phrase += wordmap[word.lower()] + ' '
except KeyError:
if word.lower() in delwords:
continue
else:
parsed_phrase += word + ' '
if INTERACTIVE:
if len(wordmap) > 0:
# yolo
print '\nMapping phrase:',
print phrase, '...'
print outstring
print 'Mapped:', phrase, '--->',
print parsed_phrase
print ''
eh = raw_input('')
return parsed_phrase
def postproc_utterance(parsed_utterance):
"""
HACKS!
Do some 'manual' post-processing to make up for MetaMap peculiarity.
WARNING: dataset specific.
"""
# _ S__ DEID --> _S__DEID
parsed_utterance = re.sub('_ S__ DEID', '_S__DEID', parsed_utterance)
# _ S__ C2825141 --> _S__FINDING (FINDING...)
parsed_utterance = re.sub('_ S__ C2825141', '_S__FINDING', parsed_utterance)
return parsed_utterance
def parse_utterance(neg_dict={}):
"""
Suck in an utterance from the machine-readable format, parse its mapping
and then return a string of mapped terms (into CUIs).
May not be the same length as the input sentence.
"""
phrases = ''
line = metamap_output.readline()
while not EOU_re.match(line):
if phrase_re.match(line):
parsed_phrase = parse_phrase(line, neg_dict)
phrases += parsed_phrase
elif line == '':
# EOF I guess...
return phrases
elif not EOU_re.match(line):
print'ERROR: utterance not followed by EOU line, followed by:'
print line
sys.exit('ERROR: missing EOU')
line = metamap_output.readline()
return phrases
def parse_negline(neg_line):
"""
Parse the THIRD line of the .mmo file, where the negations are stored.
Why does it not do this per-phrase? Mystery.
We connect the negated-CUI to its appearance in the text using the
ConceptPositionalInfo which _appears_ to correspond to the PosInfo field
which appears in the ev found in a mapping.
The output is neg_dict which maps these ConceptPositionalInfos into the
associated CUIs :we use this for sanity checking while parsing the mappings;
the position should be enough to identify it, but for extra-safety we assert
that the CUIs are matching.
"""
assert 'neg_list([' in neg_line
neg_dict = dict()
# strip things out
# (removing "neg_list(["... and ..."]).\n")
l_stripped = neg_line[10:][:-5]
# split into seprate 'negations'...
# split on ( and then remove the training ", negation(" at the end, first entry is useless
negations = map(lambda x: x.rstrip(')')[:-10] if 'negation' in x else x.rstrip(')'), l_stripped.split('('))[1:]
# for each negation, grab its location and CUI
for neg in negations:
# strip the string part of the CUI: we know it's between the SECOND pair of [], and before a :
NegatedConcept = neg.split('[')[2].split(':')[0].strip('\'')
# now get the concept... we know it's in the THIRD set of []... and there may be several separated by ,
ConceptPositionalInfo = neg.split('[')[3].rstrip(']')
try:
neg_dict[ConceptPositionalInfo].add(NegatedConcept)
except KeyError:
neg_dict[ConceptPositionalInfo] = set([NegatedConcept])
return neg_dict
# --- run through the file --- #
# --- get the neglist --- #
neg_line = metamap_output.readline()
neg_dict = parse_negline(neg_line)
# the first line
n = 0
while True:
line = metamap_output.readline()
if not line: break
if utterance_re.match(line):
# we are now in an utterance!
parsed_utterance = parse_utterance(neg_dict)
if POSTPROC:
# hacky post-processing
parsed_utterance = postproc_utterance(parsed_utterance)
print 'Parsed utterance:'
print '\t','"'.join(line.split('"')[1:2]).strip('[]')
print '=====>'
print '\t',parsed_utterance
proc_data.write(parsed_utterance+'\n')
n += 1
else:
# not interested in this line
continue
proc_data.close()
print '\nWrote', n, 'sentences to', proc_data_path
| corcra/UMLS | parse_metamap.py | Python | mit | 8,989 | 0.00534 |
# -*- coding: utf-8 -*-
try:
import re2 as re
except ImportError:
import re
from lib.cuckoo.common.abstracts import Signature
class Silverlight_JS(Signature):
name = "silverlight_js"
description = "执行伪装过的包含一个Silverlight对象的JavaScript,可能被用于漏洞攻击尝试"
weight = 3
severity = 3
categories = ["exploit_kit", "silverlight"]
authors = ["Kevin Ross"]
minimum = "1.3"
evented = True
def __init__(self, *args, **kwargs):
Signature.__init__(self, *args, **kwargs)
filter_categories = set(["browser"])
# backward compat
filter_apinames = set(["JsEval", "COleScript_Compile", "COleScript_ParseScriptText"])
def on_call(self, call, process):
if call["api"] == "JsEval":
buf = self.get_argument(call, "Javascript")
else:
buf = self.get_argument(call, "Script")
if re.search("application\/x\-silverlight.*?\<param name[ \t\n]*=.*?value[ \t\n]*=.*?\<\/object\>.*", buf, re.IGNORECASE|re.DOTALL):
return True
| lixiangning888/whole_project | modules/signatures_merge_tmp/ek_silverlight.py | Python | lgpl-3.0 | 1,071 | 0.009794 |
#### NOTICE: THIS FILE IS AUTOGENERATED
#### MODIFICATIONS MAY BE LOST IF DONE IMPROPERLY
#### PLEASE SEE THE ONLINE DOCUMENTATION FOR EXAMPLES
from swgpy.object import *
def create(kernel):
result = Tangible()
result.template = "object/tangible/lair/base/shared_poi_all_lair_insecthill_large_fog_gray.iff"
result.attribute_template_id = -1
result.stfName("lair_n","insecthill")
#### BEGIN MODIFICATIONS ####
#### END MODIFICATIONS ####
return result | anhstudios/swganh | data/scripts/templates/object/tangible/lair/base/shared_poi_all_lair_insecthill_large_fog_gray.py | Python | mit | 469 | 0.046908 |
async def ev_mention(ev, message):
def_stat_data = {
'event': 'mention',
'count': 0
}
collection = 'EventStats'
database = ev.bot.cfg.db.database
check = ev.db[database][collection].find_one({"event": 'mention'})
if not check:
ev.db[database][collection].insert_one(def_stat_data)
ev_count = 0
else:
ev_count = check['count']
ev_count += 1
update_target = {"event": 'mention'}
update_data = {"$set": {'count': ev_count}}
ev.db[database][collection].update_one(update_target, update_data)
| lu-ci/apex-sigma-plugins | core_functions/stats/ev_mention.py | Python | gpl-3.0 | 572 | 0 |
#!/usr/bin/python
# Copyright (C) Vladimir Prus 2006.
# Distributed under the Boost Software License, Version 1.0. (See
# accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
# Regression test: if a library had no explicit sources, but only <source>
# properties, it was built as if it were a searched library, and the specified
# sources were not compiled.
import BoostBuild
t = BoostBuild.Tester(use_test_config=False)
t.write("jamroot.jam", """
lib a : : <source>a.cpp ;
""")
t.write("a.cpp", """
#ifdef _WIN32
__declspec(dllexport)
#endif
void foo() {}
""")
t.run_build_system()
t.expect_addition("bin/$toolset/debug*/a.obj")
t.rm("bin")
# Now try with <conditional>.
t.write("jamroot.jam", """
rule test ( properties * )
{
return <source>a.cpp ;
}
lib a : : <conditional>@test ;
""")
t.run_build_system()
t.expect_addition("bin/$toolset/debug*/a.obj")
t.cleanup()
| nawawi/poedit | deps/boost/tools/build/test/lib_source_property.py | Python | mit | 917 | 0 |
"""A Python module for interacting with Slack's RTM API."""
import inspect
import json
import logging
import time
from concurrent.futures.thread import ThreadPoolExecutor
from logging import Logger
from queue import Queue, Empty
from ssl import SSLContext
from threading import Lock, Event
from typing import Optional, Callable, List, Union
from slack_sdk.errors import SlackApiError, SlackClientError
from slack_sdk.proxy_env_variable_loader import load_http_proxy_from_env
from slack_sdk.socket_mode.builtin.connection import Connection, ConnectionState
from slack_sdk.socket_mode.interval_runner import IntervalRunner
from slack_sdk.web import WebClient
class RTMClient:
token: Optional[str]
bot_id: Optional[str]
default_auto_reconnect_enabled: bool
auto_reconnect_enabled: bool
ssl: Optional[SSLContext]
proxy: str
timeout: int
base_url: str
ping_interval: int
logger: Logger
web_client: WebClient
current_session: Optional[Connection]
current_session_state: Optional[ConnectionState]
wss_uri: Optional[str]
message_queue: Queue
message_listeners: List[Callable[["RTMClient", dict], None]]
message_processor: IntervalRunner
message_workers: ThreadPoolExecutor
closed: bool
connect_operation_lock: Lock
on_message_listeners: List[Callable[[str], None]]
on_error_listeners: List[Callable[[Exception], None]]
on_close_listeners: List[Callable[[int, Optional[str]], None]]
def __init__(
self,
*,
token: Optional[str] = None,
web_client: Optional[WebClient] = None,
auto_reconnect_enabled: bool = True,
ssl: Optional[SSLContext] = None,
proxy: Optional[str] = None,
timeout: int = 30,
base_url: str = WebClient.BASE_URL,
headers: Optional[dict] = None,
ping_interval: int = 5,
concurrency: int = 10,
logger: Optional[logging.Logger] = None,
on_message_listeners: Optional[List[Callable[[str], None]]] = None,
on_error_listeners: Optional[List[Callable[[Exception], None]]] = None,
on_close_listeners: Optional[List[Callable[[int, Optional[str]], None]]] = None,
trace_enabled: bool = False,
all_message_trace_enabled: bool = False,
ping_pong_trace_enabled: bool = False,
):
self.token = token.strip() if token is not None else None
self.bot_id = None
self.default_auto_reconnect_enabled = auto_reconnect_enabled
# You may want temporarily turn off the auto_reconnect as necessary
self.auto_reconnect_enabled = self.default_auto_reconnect_enabled
self.ssl = ssl
self.proxy = proxy
self.timeout = timeout
self.base_url = base_url
self.headers = headers
self.ping_interval = ping_interval
self.logger = logger or logging.getLogger(__name__)
if self.proxy is None or len(self.proxy.strip()) == 0:
env_variable = load_http_proxy_from_env(self.logger)
if env_variable is not None:
self.proxy = env_variable
self.web_client = web_client or WebClient(
token=self.token,
base_url=self.base_url,
timeout=self.timeout,
ssl=self.ssl,
proxy=self.proxy,
headers=self.headers,
logger=logger,
)
self.on_message_listeners = on_message_listeners or []
self.on_error_listeners = on_error_listeners or []
self.on_close_listeners = on_close_listeners or []
self.trace_enabled = trace_enabled
self.all_message_trace_enabled = all_message_trace_enabled
self.ping_pong_trace_enabled = ping_pong_trace_enabled
self.message_queue = Queue()
def goodbye_listener(_self, event: dict):
if event.get("type") == "goodbye":
message = "Got a goodbye message. Reconnecting to the server ..."
self.logger.info(message)
self.connect_to_new_endpoint(force=True)
self.message_listeners = [goodbye_listener]
self.socket_mode_request_listeners = []
self.current_session = None
self.current_session_state = ConnectionState()
self.current_session_runner = IntervalRunner(
self._run_current_session, 0.1
).start()
self.wss_uri = None
self.current_app_monitor_started = False
self.current_app_monitor = IntervalRunner(
self._monitor_current_session,
self.ping_interval,
)
self.closed = False
self.connect_operation_lock = Lock()
self.message_processor = IntervalRunner(self.process_messages, 0.001).start()
self.message_workers = ThreadPoolExecutor(max_workers=concurrency)
# --------------------------------------------------------------
# Decorator to register listeners
# --------------------------------------------------------------
def on(self, event_type: str) -> Callable:
"""Registers a new event listener.
Args:
event_type: str representing an event's type (e.g., message, reaction_added)
"""
def __call__(*args, **kwargs):
func = args[0]
if func is not None:
if isinstance(func, Callable):
name = (
func.__name__
if hasattr(func, "__name__")
else f"{func.__class__.__module__}.{func.__class__.__name__}"
)
inspect_result: inspect.FullArgSpec = inspect.getfullargspec(func)
if inspect_result is not None and len(inspect_result.args) != 2:
actual_args = ", ".join(inspect_result.args)
error = f"The listener '{name}' must accept two args: client, event (actual: {actual_args})"
raise SlackClientError(error)
def new_message_listener(_self, event: dict):
actual_event_type = event.get("type")
if event.get("bot_id") == self.bot_id:
# SKip the events generated by this bot user
return
# https://github.com/slackapi/python-slack-sdk/issues/533
if event_type == "*" or (
actual_event_type is not None
and actual_event_type == event_type
):
func(_self, event)
self.message_listeners.append(new_message_listener)
else:
error = f"The listener '{func}' is not a Callable (actual: {type(func).__name__})"
raise SlackClientError(error)
# Not to cause modification to the decorated method
return func
return __call__
# --------------------------------------------------------------
# Connections
# --------------------------------------------------------------
def is_connected(self) -> bool:
"""Returns True if this client is connected."""
return self.current_session is not None and self.current_session.is_active()
def issue_new_wss_url(self) -> str:
"""Acquires a new WSS URL using rtm.connect API method"""
try:
api_response = self.web_client.rtm_connect()
return api_response["url"]
except SlackApiError as e:
if e.response["error"] == "ratelimited":
delay = int(e.response.headers.get("Retry-After", "30")) # Tier1
self.logger.info(f"Rate limited. Retrying in {delay} seconds...")
time.sleep(delay)
# Retry to issue a new WSS URL
return self.issue_new_wss_url()
else:
# other errors
self.logger.error(f"Failed to retrieve WSS URL: {e}")
raise e
def connect_to_new_endpoint(self, force: bool = False):
"""Acquires a new WSS URL and tries to connect to the endpoint."""
with self.connect_operation_lock:
if force or not self.is_connected():
self.logger.info("Connecting to a new endpoint...")
self.wss_uri = self.issue_new_wss_url()
self.connect()
self.logger.info("Connected to a new endpoint...")
def connect(self):
"""Starts talking to the RTM server through a WebSocket connection"""
if self.bot_id is None:
self.bot_id = self.web_client.auth_test()["bot_id"]
old_session: Optional[Connection] = self.current_session
old_current_session_state: ConnectionState = self.current_session_state
if self.wss_uri is None:
self.wss_uri = self.issue_new_wss_url()
current_session = Connection(
url=self.wss_uri,
logger=self.logger,
ping_interval=self.ping_interval,
trace_enabled=self.trace_enabled,
all_message_trace_enabled=self.all_message_trace_enabled,
ping_pong_trace_enabled=self.ping_pong_trace_enabled,
receive_buffer_size=1024,
proxy=self.proxy,
on_message_listener=self.run_all_message_listeners,
on_error_listener=self.run_all_error_listeners,
on_close_listener=self.run_all_close_listeners,
connection_type_name="RTM",
)
current_session.connect()
if old_current_session_state is not None:
old_current_session_state.terminated = True
if old_session is not None:
old_session.close()
self.current_session = current_session
self.current_session_state = ConnectionState()
self.auto_reconnect_enabled = self.default_auto_reconnect_enabled
if not self.current_app_monitor_started:
self.current_app_monitor_started = True
self.current_app_monitor.start()
self.logger.info(
f"A new session has been established (session id: {self.session_id()})"
)
def disconnect(self):
"""Disconnects the current session."""
self.current_session.disconnect()
def close(self) -> None:
"""
Closes this instance and cleans up underlying resources.
After calling this method, this instance is no longer usable.
"""
self.closed = True
self.disconnect()
self.current_session.close()
def start(self) -> None:
"""Establishes an RTM connection and blocks the current thread."""
self.connect()
Event().wait()
def send(self, payload: Union[dict, str]) -> None:
if payload is None:
return
if self.current_session is None or not self.current_session.is_active():
raise SlackClientError(
"The RTM client is not connected to the Slack servers"
)
if isinstance(payload, str):
self.current_session.send(payload)
else:
self.current_session.send(json.dumps(payload))
# --------------------------------------------------------------
# WS Message Processor
# --------------------------------------------------------------
def enqueue_message(self, message: str):
self.message_queue.put(message)
if self.logger.level <= logging.DEBUG:
self.logger.debug(
f"A new message enqueued (current queue size: {self.message_queue.qsize()})"
)
def process_message(self):
try:
raw_message = self.message_queue.get(timeout=1)
if self.logger.level <= logging.DEBUG:
self.logger.debug(
f"A message dequeued (current queue size: {self.message_queue.qsize()})"
)
if raw_message is not None:
message: dict = {}
if raw_message.startswith("{"):
message = json.loads(raw_message)
def _run_message_listeners():
self.run_message_listeners(message)
self.message_workers.submit(_run_message_listeners)
except Empty:
pass
def process_messages(self) -> None:
while not self.closed:
try:
self.process_message()
except Exception as e:
self.logger.exception(f"Failed to process a message: {e}")
def run_message_listeners(self, message: dict) -> None:
type = message.get("type")
if self.logger.level <= logging.DEBUG:
self.logger.debug(f"Message processing started (type: {type})")
try:
for listener in self.message_listeners:
try:
listener(self, message)
except Exception as e:
self.logger.exception(f"Failed to run a message listener: {e}")
except Exception as e:
self.logger.exception(f"Failed to run message listeners: {e}")
finally:
if self.logger.level <= logging.DEBUG:
self.logger.debug(f"Message processing completed (type: {type})")
# --------------------------------------------------------------
# Internals
# --------------------------------------------------------------
def session_id(self) -> Optional[str]:
if self.current_session is not None:
return self.current_session.session_id
return None
def run_all_message_listeners(self, message: str):
if self.logger.level <= logging.DEBUG:
self.logger.debug(f"on_message invoked: (message: {message})")
self.enqueue_message(message)
for listener in self.on_message_listeners:
listener(message)
def run_all_error_listeners(self, error: Exception):
self.logger.exception(
f"on_error invoked (session id: {self.session_id()}, "
f"error: {type(error).__name__}, message: {error})"
)
for listener in self.on_error_listeners:
listener(error)
def run_all_close_listeners(self, code: int, reason: Optional[str] = None):
if self.logger.level <= logging.DEBUG:
self.logger.debug(f"on_close invoked (session id: {self.session_id()})")
if self.auto_reconnect_enabled:
self.logger.info(
"Received CLOSE event. Going to reconnect... "
f"(session id: {self.session_id()})"
)
self.connect_to_new_endpoint()
for listener in self.on_close_listeners:
listener(code, reason)
def _run_current_session(self):
if self.current_session is not None and self.current_session.is_active():
session_id = self.session_id()
try:
self.logger.info(
"Starting to receive messages from a new connection"
f" (session id: {session_id})"
)
self.current_session_state.terminated = False
self.current_session.run_until_completion(self.current_session_state)
self.logger.info(
"Stopped receiving messages from a connection"
f" (session id: {session_id})"
)
except Exception as e:
self.logger.exception(
"Failed to start or stop the current session"
f" (session id: {session_id}, error: {e})"
)
def _monitor_current_session(self):
if self.current_app_monitor_started:
try:
self.current_session.check_state()
if self.auto_reconnect_enabled and (
self.current_session is None or not self.current_session.is_active()
):
self.logger.info(
"The session seems to be already closed. Going to reconnect... "
f"(session id: {self.session_id()})"
)
self.connect_to_new_endpoint()
except Exception as e:
self.logger.error(
"Failed to check the current session or reconnect to the server "
f"(session id: {self.session_id()}, error: {type(e).__name__}, message: {e})"
)
| slackhq/python-slackclient | slack_sdk/rtm_v2/__init__.py | Python | mit | 16,519 | 0.001574 |
# Copyright (c) 2014 Red Hat, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import multiprocessing
import os
import time
import fixtures
from neutron.agent.linux import utils
from neutron.tests import tools
class RecursivePermDirFixture(fixtures.Fixture):
"""Ensure at least perms permissions on directory and ancestors."""
def __init__(self, directory, perms):
super(RecursivePermDirFixture, self).__init__()
self.directory = directory
self.least_perms = perms
def _setUp(self):
previous_directory = None
current_directory = self.directory
while previous_directory != current_directory:
perms = os.stat(current_directory).st_mode
if perms & self.least_perms != self.least_perms:
os.chmod(current_directory, perms | self.least_perms)
previous_directory = current_directory
current_directory = os.path.dirname(current_directory)
class AdminDirFixture(fixtures.Fixture):
"""Handle directory create/delete with admin permissions required"""
def __init__(self, directory):
super(AdminDirFixture, self).__init__()
self.directory = directory
def _setUp(self):
# NOTE(cbrandily): Ensure we will not delete a directory existing
# before test run during cleanup.
if os.path.exists(self.directory):
tools.fail('%s already exists' % self.directory)
create_cmd = ['mkdir', '-p', self.directory]
delete_cmd = ['rm', '-r', self.directory]
utils.execute(create_cmd, run_as_root=True)
self.addCleanup(utils.execute, delete_cmd, run_as_root=True)
class SleepyProcessFixture(fixtures.Fixture):
"""Process fixture to perform time.sleep for a given number of seconds."""
def __init__(self, timeout=60):
super(SleepyProcessFixture, self).__init__()
self.timeout = timeout
@staticmethod
def yawn(seconds):
time.sleep(seconds)
def _setUp(self):
self.process = multiprocessing.Process(target=self.yawn,
args=[self.timeout])
self.process.start()
self.addCleanup(self.destroy)
def destroy(self):
self.process.terminate()
@property
def pid(self):
return self.process.pid
| noironetworks/neutron | neutron/tests/functional/agent/linux/helpers.py | Python | apache-2.0 | 2,881 | 0 |
# Django
from django.apps import AppConfig
class BhsConfig(AppConfig):
name = 'apps.bhs'
verbose_name = 'Base'
def ready(self):
import algoliasearch_django as algoliasearch
from .indexes import AwardIndex
Award = self.get_model('award')
algoliasearch.register(Award, AwardIndex)
from .indexes import ChartIndex
Chart = self.get_model('chart')
algoliasearch.register(Chart, ChartIndex)
from .indexes import GroupIndex
Group = self.get_model('group')
algoliasearch.register(Group, GroupIndex)
from .indexes import PersonIndex
Person = self.get_model('person')
algoliasearch.register(Person, PersonIndex)
from .indexes import ConventionIndex
Convention = self.get_model('convention')
algoliasearch.register(Convention, ConventionIndex)
return
| dbinetti/barberscore-django | project/apps/bhs/config.py | Python | bsd-2-clause | 898 | 0 |
import unittest
from circular_buffer import (
CircularBuffer,
BufferFullException,
BufferEmptyException
)
class CircularBufferTest(unittest.TestCase):
def test_read_empty_buffer(self):
buf = CircularBuffer(1)
with self.assertRaises(BufferEmptyException):
buf.read()
def test_write_and_read_back_one_item(self):
buf = CircularBuffer(1)
buf.write('1')
self.assertEqual('1', buf.read())
with self.assertRaises(BufferEmptyException):
buf.read()
def test_write_and_read_back_multiple_items(self):
buf = CircularBuffer(2)
buf.write('1')
buf.write('2')
self.assertEqual(buf.read(), '1')
self.assertEqual(buf.read(), '2')
with self.assertRaises(BufferEmptyException):
buf.read()
def test_clearing_buffer(self):
buf = CircularBuffer(3)
for c in '123':
buf.write(c)
buf.clear()
with self.assertRaises(BufferEmptyException):
buf.read()
buf.write('1')
buf.write('2')
self.assertEqual(buf.read(), '1')
buf.write('3')
self.assertEqual(buf.read(), '2')
def test_alternate_write_and_read(self):
buf = CircularBuffer(2)
buf.write('1')
self.assertEqual(buf.read(), '1')
buf.write('2')
self.assertEqual(buf.read(), '2')
def test_read_back_oldest_item(self):
buf = CircularBuffer(3)
buf.write('1')
buf.write('2')
buf.read()
buf.write('3')
buf.read()
self.assertEqual(buf.read(), '3')
def test_write_full_buffer(self):
buf = CircularBuffer(2)
buf.write('1')
buf.write('2')
with self.assertRaises(BufferFullException):
buf.write('A')
def test_overwrite_full_buffer(self):
buf = CircularBuffer(2)
buf.write('1')
buf.write('2')
buf.overwrite('A')
self.assertEqual(buf.read(), '2')
self.assertEqual(buf.read(), 'A')
with self.assertRaises(BufferEmptyException):
buf.read()
def test_overwrite_non_full_buffer(self):
buf = CircularBuffer(2)
buf.overwrite('1')
buf.overwrite('2')
self.assertEqual(buf.read(), '1')
self.assertEqual(buf.read(), '2')
with self.assertRaises(BufferEmptyException):
buf.read()
def test_alternate_read_and_overwrite(self):
buf = CircularBuffer(5)
for c in '123':
buf.write(c)
buf.read()
buf.read()
buf.write('4')
buf.read()
for c in '5678':
buf.write(c)
buf.overwrite('A')
buf.overwrite('B')
self.assertEqual(buf.read(), '6')
self.assertEqual(buf.read(), '7')
self.assertEqual(buf.read(), '8')
self.assertEqual(buf.read(), 'A')
self.assertEqual(buf.read(), 'B')
with self.assertRaises(BufferEmptyException):
buf.read()
if __name__ == '__main__':
unittest.main()
| mweb/python | exercises/circular-buffer/circular_buffer_test.py | Python | mit | 3,083 | 0 |
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
# Server Specific Configurations
# See https://pecan.readthedocs.org/en/latest/configuration.html#server-configuration # noqa
server = {
'port': '6385',
'host': '0.0.0.0'
}
# Pecan Application Configurations
# See https://pecan.readthedocs.org/en/latest/configuration.html#application-configuration # noqa
app = {
'root': 'ironic.api.controllers.root.RootController',
'modules': ['ironic.api'],
'static_root': '%(confdir)s/public',
'debug': False,
'enable_acl': True,
'acl_public_routes': ['/', '/v1'],
}
# WSME Configurations
# See https://wsme.readthedocs.org/en/latest/integrate.html#configuration
wsme = {
'debug': cfg.CONF.debug,
}
| varunarya10/ironic | ironic/api/config.py | Python | apache-2.0 | 1,302 | 0 |
from molpher.algorithms.functions import find_path
from molpher.core import ExplorationTree as ETree
class BasicPathfinder:
"""
:param settings: settings to use in the search
:type settings: `Settings`
A very basic pathfinder class that can be used to run exploration with
any combination of operations.
"""
class MaxItersReachedException(Exception):
def __init__(self, tree):
super(BasicPathfinder.MaxItersReachedException, self).__init__(
"Maximum number of iterations reached while searching "
"for a path\n\t source: {0}\n\t target: {1}".format(tree.source, tree.target))
def __init__(self, settings, operations):
self.settings = settings
"""a settings class (should be a subclass of `Settings`)"""
self.tree = ETree.create(source=self.settings.source, target=self.settings.target)
""":class:`~molpher.core.ExplorationTree.ExplorationTree` used in the search"""
if self.settings.tree_params:
self.tree.params = self.settings.tree_params
self.tree.thread_count = self.settings.max_threads
self._iteration = operations
self.path = None
"""a list of SMILES strings if a path was found, `None` otherwise"""
def __call__(self):
"""
Executes the search
:return: discovered path
:rtype: `list` of `str`
"""
counter = 0
while not self.tree.path_found:
counter+=1
if counter > self.settings.max_iters:
raise BasicPathfinder.MaxItersReachedException(self.tree)
print('Iteration {0}'.format(counter))
for oper in self._iteration:
self.tree.runOperation(oper)
self.path = find_path(self.tree, self.tree.params['target'])
print('Path found:', self.path)
return self.path | lich-uct/molpher-lib | src/python/molpher/algorithms/pathfinders.py | Python | gpl-3.0 | 1,908 | 0.003145 |
"""
Models for code snippets and related data.
Most of these models also have custom managers defined which add
convenient shortcuts for repetitive or common bits of logic; see
``managers.py`` in this directory.
"""
import datetime, re
from django.db import connection, models
from django.template.defaultfilters import slugify
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
import managers
from markdown import markdown
from pygments import highlight, lexers, formatters
RATING_CHOICES = (
(-1, 'Not useful'),
(1, 'Useful')
)
class Language(models.Model):
"""
A language in which a Snippet can be written.
The ``language_code`` field should be set to an alias of a
Pygments lexer which is capable of processing this language.
The ``file_extension`` and ``mime_type`` fields will be used when
users download Snippets, to set the filename and HTTP Content-Type
of the download appropriately.
"""
name = models.CharField(max_length=50)
slug = models.SlugField(editable=False)
language_code = models.CharField(max_length=50,
help_text="This should be an alias of a Pygments lexer which can handle this language.")
file_extension = models.CharField(max_length=10,
help_text="The file extension to use when downloading Snippets in this Language; leave out the dot.")
mime_type = models.CharField(max_length=100,
help_text="The HTTP Content-Type to use when downloading Snippets in this Language.")
class Meta:
ordering = ('name',)
def save(self, *args, **kwargs):
if not self.id:
self.slug = slugify(self.name)
super(Language, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse('cab:snippets_by_language', kwargs={'slug': self.slug})
def __unicode__(self):
return self.name
def get_lexer(self):
"""
Returns an instance of the Pygments lexer for this language.
"""
return lexers.get_lexer_by_name(self.language_code)
class Tag(models.Model):
"""
A descriptive tag to be applied to a Snippet.
"""
name = models.CharField(max_length=50, unique=True)
slug = models.SlugField(editable=False)
class Meta:
ordering = ('name',)
def save(self, *args, **kwargs):
if not self.id:
self.slug = slugify(self.name)
super(Tag, self).save(*args, **kwargs)
def get_absolute_url(self):
return reverse('cab:snippets_by_tag', kwargs={'slug':self.slug})
def __unicode__(self):
return self.name
class Snippet(models.Model):
"""
A snippet of code in some Language.
This is slightly denormalized in two ways:
1. Because it's wasteful to run Pygments over the code each time
the Snippet is viewed, it is instead run on save, and two
copies of the code -- one the original input, the other
highlighted by Pygments -- are stored.
2. For much the same reason, Markdown is run over the Snippet's
description on save, instead of on each view, and the result
is stored in a separate column.
Also, Tags are added through the ``tag_list`` field which, after
the Snippet has been saved, will be iterated over to set up the
relationships to actual Tag objects.
"""
title = models.CharField(max_length=250)
language = models.ForeignKey(Language)
description = models.TextField(help_text="Accepts HTML.")
description_html = models.TextField(editable=False)
code = models.TextField()
highlighted_code = models.TextField(editable=False)
pub_date = models.DateTimeField(editable=False)
updated_date = models.DateTimeField(editable=False)
author = models.ForeignKey(User)
tag_list = models.CharField(max_length=250,
help_text="Separate tags with spaces. Maximum 250 characters.")
tags = models.ManyToManyField(Tag, editable=False)
original = models.ForeignKey('self', null=True, blank=True,
help_text="Optional. Fill this in if this Snippet is based on another.")
objects = managers.SnippetsManager()
class Meta:
ordering = ('-pub_date',)
def save(self, *args, **kwargs):
if not self.id:
self.pub_date = datetime.datetime.now()
self.updated_date = datetime.datetime.now()
self.description_html = self.sanitize(self.description)
# Use safe_mode in Markdown to prevent arbitrary tags.
# self.description_html = markdown(self.description, safe_mode=True)
self.highlighted_code = self.highlight()
self.tag_list = self.tag_list.lower() # Normalize to lower-case
super(Snippet, self).save(*args, **kwargs)
# Now that the Snippet is saved, deal with the tags.
current_tags = list(self.tags.all()) # We only want to query this once.
# Splitting to get the new tag list is tricky, because people
# will stick commas and other whitespace in the darndest places.
new_tag_list = [t for t in re.split('[\s,]+', self.tag_list) if t]
# First, clear out tags that aren't on the Snippet anymore.
for tag in current_tags:
if tag.name not in new_tag_list:
self.tags.remove(tag)
# Then add any new tags.
for tag_name in new_tag_list:
if tag_name not in [tag.name for tag in current_tags]:
tag, created = Tag.objects.get_or_create(name=tag_name)
self.tags.add(tag)
def sanitize(self, value):
from BeautifulSoup import BeautifulSoup, Comment
import re
js_regex = re.compile(r'[\s]*(&#x.{1,7})?'.join(list('javascript')))
allowed_tags = 'strong em a p br img'.split()
soup = BeautifulSoup(value)
for comment in soup.findAll(text=lambda text: isinstance(text, Comment)):
comment.extract()
for tag in soup.findAll(True):
if tag.name not in allowed_tags:
tag.hidden = True
return soup.renderContents().decode('utf8')
def __unicode__(self):
return self.title
def get_absolute_url(self):
return reverse('cab:snippet_detail', kwargs={'snippet_id': self.id})
def highlight(self):
"""
Returns this Snippet's originally-input code, highlighted via
Pygments.
"""
return highlight(self.code,
self.language.get_lexer(),
formatters.HtmlFormatter(linenos=True))
class Rating(models.Model):
"""
A particular User's rating of a particular Snippet.
"""
snippet = models.ForeignKey(Snippet)
user = models.ForeignKey(User)
date = models.DateTimeField(editable=False)
score = models.IntegerField(choices=RATING_CHOICES)
objects = managers.RatingsManager()
def save(self, *args, **kwargs):
if not self.id:
self.date = datetime.datetime.now()
super(Rating, self).save(*args, **kwargs)
def __unicode__(self):
return "%s rating '%s'" % (self.user.username, self.snippet.title)
class Bookmark(models.Model):
"""
A Snippet bookmarked by a User.
"""
snippet = models.ForeignKey(Snippet)
user = models.ForeignKey(User)
date = models.DateTimeField(editable=False, auto_now_add=True)
objects = managers.BookmarksManager()
class Meta:
ordering = ('date',)
def __unicode__(self):
return "%s bookmarked by %s" % (self.snippet.title, self.user.username)
| girasquid/cab | cab/models.py | Python | bsd-3-clause | 7,952 | 0.006916 |
"""
End-to-end tests for the simulator configuration. Sets up a server with
the backend, sends some basic queries to that server and verifies results
are as expected.
"""
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import unittest
import ga4gh.frontend as frontend
import ga4gh.protocol as protocol
import tests.utils as utils
class TestSimulatedStack(unittest.TestCase):
"""
Tests the full stack for the Simulated backend by using the Flask
testing client.
"""
@classmethod
def setUpClass(cls):
config = {
"DATA_SOURCE": "__SIMULATED__",
"SIMULATED_BACKEND_RANDOM_SEED": 1111,
"SIMULATED_BACKEND_NUM_CALLS": 0,
"SIMULATED_BACKEND_VARIANT_DENSITY": 1.0,
"SIMULATED_BACKEND_NUM_VARIANT_SETS": 10,
}
frontend.configure(
baseConfig="TestConfig", extraConfig=config)
cls.app = frontend.app.test_client()
@classmethod
def tearDownClass(cls):
cls.app = None
def setUp(self):
self.backend = frontend.app.backend
self.variantSetIds = [
variantSet.getId() for variantSet in
self.backend.getDataset().getVariantSets()]
def sendJsonPostRequest(self, path, data):
return self.app.post(
path, headers={'Content-type': 'application/json'},
data=data)
def testVariantSetsSearch(self):
expectedIds = self.variantSetIds
request = protocol.SearchVariantSetsRequest()
request.pageSize = len(expectedIds)
path = utils.applyVersion('/variantsets/search')
response = self.sendJsonPostRequest(
path, request.toJsonString())
self.assertEqual(200, response.status_code)
responseData = protocol.SearchVariantSetsResponse.fromJsonString(
response.data)
self.assertTrue(protocol.SearchVariantSetsResponse.validate(
responseData.toJsonDict()))
self.assertIsNone(responseData.nextPageToken)
self.assertEqual(len(expectedIds), len(responseData.variantSets))
for variantSet in responseData.variantSets:
self.assertTrue(variantSet.id in expectedIds)
def testVariantsSearch(self):
expectedIds = self.variantSetIds[:1]
referenceName = '1'
request = protocol.SearchVariantsRequest()
request.referenceName = referenceName
request.start = 0
request.end = 0
request.variantSetIds = expectedIds
# Request windows is too small, no results
path = utils.applyVersion('/variants/search')
response = self.sendJsonPostRequest(
path, request.toJsonString())
self.assertEqual(200, response.status_code)
responseData = protocol.SearchVariantsResponse.fromJsonString(
response.data)
self.assertIsNone(responseData.nextPageToken)
self.assertEqual([], responseData.variants)
# Larger request window, expect results
request.end = 2 ** 16
path = utils.applyVersion('/variants/search')
response = self.sendJsonPostRequest(
path, request.toJsonString())
self.assertEqual(200, response.status_code)
responseData = protocol.SearchVariantsResponse.fromJsonString(
response.data)
self.assertTrue(protocol.SearchVariantsResponse.validate(
responseData.toJsonDict()))
self.assertGreater(len(responseData.variants), 0)
# Verify all results are in the correct range, set and reference
for variant in responseData.variants:
self.assertGreaterEqual(variant.start, 0)
self.assertLessEqual(variant.end, 2 ** 16)
self.assertTrue(variant.variantSetId in expectedIds)
self.assertEqual(variant.referenceName, referenceName)
# TODO: Add more useful test scenarios, including some covering
# pagination behavior.
# TODO: Add test cases for other methods when they are implemented.
@unittest.skipIf(True, "")
def testCallSetsSearch(self):
# TODO remove the @skipIf decorator here once calls have been
# properly implemented in the simulator.
request = protocol.SearchCallSetsRequest()
request.name = None
path = utils.applyVersion('/callsets/search')
# when variantSetIds are wrong, no results
request.variantSetIds = ["xxxx"]
response = self.sendJsonPostRequest(
path, request.toJsonString())
self.assertEqual(200, response.status_code)
responseData = protocol.SearchCallSetsResponse.fromJsonString(
response.data)
self.assertIsNone(responseData.nextPageToken)
self.assertEqual([], responseData.callSets)
# if no callset name is given return all callsets
request.variantSetIds = self.variantSetIds[:1]
response = self.sendJsonPostRequest(
path, request.toJsonString())
self.assertEqual(200, response.status_code)
responseData = protocol.SearchCallSetsResponse.fromJsonString(
response.data)
self.assertTrue(protocol.SearchCallSetsResponse.validate(
responseData.toJsonDict()))
self.assertNotEqual([], responseData.callSets)
# TODO test the length of responseData.callSets equal to all callsets
# Verify all results are of the correct type and range
for callSet in responseData.callSets:
self.assertIs(type(callSet.info), dict)
self.assertIs(type(callSet.variantSetIds), list)
splits = callSet.id.split(".")
variantSetId = '.'.join(splits[:2])
callSetName = splits[-1]
self.assertIn(variantSetId, callSet.variantSetIds)
self.assertEqual(callSetName, callSet.name)
self.assertEqual(callSetName, callSet.sampleId)
# TODO add tests after name string search schemas is implemented
| shajoezhu/server | tests/unit/test_simulated_stack.py | Python | apache-2.0 | 6,031 | 0 |
# Copyright 2015 Michael Broxton
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from setuptools import setup
import spark_gce
import fnmatch
import os
support_files = []
for root, dirnames, filenames in os.walk('spark_gce/support_files'):
for filename in fnmatch.filter(filenames, '*'):
support_files.append(os.path.join(root, filename)[10:])
setup(
name='spark-gce',
packages=['spark_gce'],
version=str(spark_gce.__version__),
description='This script helps you create a Spark cluster on Google Compute Engine.',
author='Michael Broxton',
author_email='broxton@gmail.com',
url='https://github.com/broxtronix/spark-gce',
download_url = 'https://github.com/broxtronix/spark-gce/tarball/1.0.6',
scripts = ['bin/spark-gce'],
package_data = {'spark_gce': support_files},
install_requires=['boto']
)
| samklr/spark-gce | setup.py | Python | apache-2.0 | 1,354 | 0.00517 |
# from bigcommerce.connection import Connection, OAuthConnection, HttpException, ClientRequestException, \
# EmptyResponseWarning, RedirectionException, ServerException
import bigcommerce.resources
import bigcommerce.api
| hockeybuggy/bigcommerce-api-python | bigcommerce/__init__.py | Python | mit | 225 | 0.004444 |
import re
import abc
import warnings
import collections
from functools import partial
from bridgy.error import MissingBastionHost
with warnings.catch_warnings():
# Thiw warns about using the slow implementation of SequenceMatcher
# instead of the python-Levenshtein module, which requires compilation.
# I'd prefer for users tp simply use this tool without the need to
# compile since the search space is probably fairly small
warnings.filterwarnings("ignore", category=UserWarning)
from fuzzywuzzy import fuzz
class InstanceType:
ALL = 'ALL'
VM = 'VM'
ECS = 'ECS'
Bastion = collections.namedtuple("Bastion", "destination options")
Instance = collections.namedtuple("Instance", "name address aliases source container_id type")
# allow there to be optional kwargs that default to None
Instance.__new__.__defaults__ = (None,) * len(Instance._fields)
class InventorySource(object):
__metaclass__ = abc.ABCMeta
name = "Invalid"
bastion = None
ssh_user = None
ssh_options = None
include_pattern = None
exclude_pattern = None
def __init__(self, *args, **kwargs):
if 'name' in kwargs:
self.name = "%s (%s)" % (kwargs['name'], self.name)
self.source = kwargs['name']
if 'bastion' in kwargs:
if 'address' not in kwargs['bastion']:
raise MissingBastionHost
if 'user' in kwargs['bastion']:
destination = '{user}@{host}'.format(user=kwargs['bastion']['user'],
host=kwargs['bastion']['address'])
else:
destination = kwargs['bastion']['address']
bastion_options = ''
if 'options' in kwargs['bastion']:
bastion_options = kwargs['bastion']['options']
self.bastion = Bastion(destination=destination, options=bastion_options)
if 'ssh' in kwargs:
if 'user' in kwargs['ssh']:
self.ssh_user = kwargs['ssh']['user']
if 'options' in kwargs['ssh']:
self.ssh_options = kwargs['ssh']['options']
else:
self.ssh_options = ''
if 'include_pattern' in kwargs:
self.include_pattern = kwargs['include_pattern']
if 'exclude_pattern' in kwargs:
self.exclude_pattern = kwargs['exclude_pattern']
def instance_filter(self, instance, include_re=None, exclude_re=None):
comparables = [instance.name, instance.address]
if instance.aliases:
comparables.extend(list(instance.aliases))
if include_re:
for name in comparables:
if include_re.search(name):
return True
return False
elif exclude_re:
for name in comparables:
if exclude_re.search(name):
return False
return True
else:
return True
def filter(self, all_instances):
include_re, exclude_re = None, None
if self.include_pattern:
include_re = re.compile(self.include_pattern)
if self.exclude_pattern:
exclude_re = re.compile(self.exclude_pattern)
config_instance_filter = partial(self.instance_filter, include_re=include_re, exclude_re=exclude_re)
return list(filter(config_instance_filter, all_instances))
@abc.abstractmethod
def update(self): pass
@abc.abstractmethod
def instances(self, stub=True): pass
def search(self, targets, partial=True, fuzzy=False):
allInstances = self.instances()
matchedInstances = set()
for host in targets:
for instance in allInstances:
names = [instance.name]
if instance.aliases != None:
names += list(instance.aliases)
for name in names:
if host.lower() == name.lower():
matchedInstances.add((100, instance))
elif partial and host.lower() in name.lower():
matchedInstances.add((99, instance))
if fuzzy:
score = fuzz.partial_ratio(host.lower(), name.lower())
if score > 85 or host.lower() in name.lower():
matchedInstances.add((score, instance))
# it is possible for the same instance to be matched, if so, it should only
# appear on the return list once (still ordered by the most probable match)
return list(collections.OrderedDict([(v, None) for k, v in sorted(list(matchedInstances))]).keys())
class InventorySet(InventorySource):
def __init__(self, inventories=None, **kwargs):
super(InventorySet, self).__init__(inventories, **kwargs)
self.inventories = []
if inventories != None:
if not isinstance(inventories, list) and not isinstance(inventories, tuple):
raise RuntimeError("InventorySet only takes a list of inventories. Given: %s" % repr(type(inventories)))
for inventory in inventories:
self.add(inventory)
def add(self, inventory):
if not isinstance(inventory, InventorySource):
raise RuntimeError("InventorySet item is not an inventory. Given: %s" % repr(type(inventory)))
self.inventories.append(inventory)
@property
def name(self):
return " + ".join([inventory.name for inventory in self.inventories])
def update(self, filter_sources=tuple()):
for inventory in self.inventories:
if len(filter_sources) == 0 or (len(filter_sources) > 0 and inventory.source in filter_sources):
inventory.update()
def instances(self, stub=True, filter_sources=tuple()):
instances = []
for inventory in self.inventories:
if len(filter_sources) == 0 or (len(filter_sources) > 0 and inventory.source in filter_sources):
instances.extend(inventory.instances())
return instances
def search(self, targets, partial=True, fuzzy=False, filter_sources=tuple()):
instances = []
for inventory in self.inventories:
if len(filter_sources) == 0 or (len(filter_sources) > 0 and inventory.source in filter_sources):
instances.extend(inventory.search(targets, partial, fuzzy))
return instances | wagoodman/bridgy | bridgy/inventory/source.py | Python | mit | 6,467 | 0.003402 |
salario = 1000
ano = 1996
while ano <= 2020:
salario *= 1.015
ano += 1
print("{0:.2f}".format(salario, 2))
#https://pt.stackoverflow.com/q/432854/101
| bigown/SOpt | Python/Algorithm/Payroll.py | Python | mit | 159 | 0.006289 |
## @package work_queue_futures
# Python Work Queue bindings.
#
# This is a library on top of work_queue which replaces q.wait with the concept
# of futures.
#
# This is experimental.
#
# - @ref work_queue_futures::WorkQueue
# - @ref work_queue::Task
import work_queue
import multiprocessing
import os
import subprocess
import sys
import threading
import time
import traceback
import concurrent.futures as futures
import atexit
try:
# from py3
import queue as ThreadQueue
except ImportError:
# from py2
import Queue as ThreadQueue
##
# Python Work Queue object
#
# Implements an asynchronous WorkQueueFutures object.
# @ref work_queue_futures::WorkQueueFutures.
class WorkQueueFutures(object):
def __init__(self, *args, **kwargs):
local_worker_args = kwargs.get('local_worker', None)
if local_worker_args:
del kwargs['local_worker']
if local_worker_args is True:
# local_worker_args can be a dictionary of worker options, or
# simply 'True' to get the defaults (1 core, 512MB memory,
# 1000MB of disk)
local_worker_args = {}
# calls to synchronous WorkQueueFutures are coordinated with _queue_lock
self._queue_lock = threading.Lock()
self._stop_queue_event = threading.Event()
# set when queue is empty
self._join_event = threading.Event()
self._tasks_to_submit = ThreadQueue.Queue()
self._tasks_before_callbacks = ThreadQueue.Queue()
self._sync_loop = threading.Thread(target = self._sync_loop)
self._sync_loop.daemon = True
self._callback_loop = threading.Thread(target = self._callback_loop)
self._callback_loop.daemon = True
self._local_worker = None
self._queue = work_queue.WorkQueue(*args, **kwargs)
if local_worker_args:
self._local_worker = Worker(self.port, **local_worker_args)
self._sync_loop.start()
self._callback_loop.start()
atexit.register(self._terminate)
# methods not explicitly defined we route to synchronous WorkQueue, using a lock.
def __getattr__(self, name):
attr = getattr(self._queue, name)
if callable(attr):
def method_wrapped(*args, **kwargs):
result = None
with self._queue_lock:
result = attr(*args, **kwargs)
return result
return method_wrapped
else:
return attr
##
# Submit a task to the queue.
#
# @param self Reference to the current work queue object.
# @param task A task description created from @ref work_queue::Task.
def submit(self, future_task):
if isinstance(future_task, FutureTask):
self._tasks_to_submit.put(future_task, False)
else:
raise TypeError("{} is not a WorkQueue.Task")
##
# Disable wait when using the futures interface
def wait(self, *args, **kwargs):
raise AttributeError('wait cannot be used with the futures interface.')
##
# Determine whether there are any known tasks queued, running, or waiting to be collected.
#
# Returns 0 if there are tasks remaining in the system, 1 if the system is "empty".
#
# @param self Reference to the current work queue object.
def empty(self):
if self._tasks_to_submit.empty():
return self._queue.empty()
else:
return 0
def _callback_loop(self):
while not self._stop_queue_event.is_set():
task = None
try:
task = self._tasks_before_callbacks.get(True, 1)
task.set_result_or_exception()
self._tasks_before_callbacks.task_done()
except ThreadQueue.Empty:
pass
except Exception as e:
err = traceback.format_exc()
if task:
task.set_exception(FutureTaskError(t, err))
else:
print(err)
def _sync_loop(self):
# map from taskids to FutureTask objects
active_tasks = {}
while True:
try:
if self._stop_queue_event.is_set():
return
# if the queue is empty, we wait for tasks to be declared for
# submission, otherwise _queue.wait return immediately and we
# busy-wait
submit_timeout = 1
if len(active_tasks.keys()) > 0:
submit_timeout = 0
# do the submits, if any
empty = False
while not empty:
try:
task = self._tasks_to_submit.get(True, submit_timeout)
if not task.cancelled():
with self._queue_lock:
submit_timeout = 0
taskid = self._queue.submit(task)
task._set_queue(self)
active_tasks[task.id] = task
self._tasks_to_submit.task_done()
except ThreadQueue.Empty:
empty = True
# wait for any task
with self._queue_lock:
if not self._queue.empty():
task = self._queue.wait(1)
if task:
self._tasks_before_callbacks.put(task, False)
del active_tasks[task.id]
if len(active_tasks) == 0 and self._tasks_to_submit.empty():
self._join_event.set()
if self._local_worker:
self._local_worker.check_alive()
except Exception as e:
# on error, we set exception to all the known tasks so that .result() does not block
err = traceback.format_exc()
while not self._tasks_to_submit.empty():
try:
t = self._tasks_to_submit.get(False)
t.set_exception(FutureTaskError(t, err))
self._tasks_to_submit.task_done()
except ThreadQueue.Empty:
pass
while not self._tasks_before_callbacks.empty():
try:
t = self._tasks_before_callbacks.get(False)
t.set_exception(FutureTaskError(t, err))
self._tasks_before_callbacks.task_done()
except ThreadQueue.Empty:
pass
for t in active_tasks.values():
t.set_exception(FutureTaskError(t, err))
active_tasks.clear()
self._stop_queue_event.set()
def join(self, timeout=None):
now = time.time()
self._join_event.clear()
return self._join_event.wait(timeout)
def _terminate(self):
self._stop_queue_event.set()
for thread in [self._sync_loop, self._callback_loop]:
try:
thread.join()
except RuntimeError:
pass
if self._local_worker:
try:
self._local_worker.shutdown()
except Exception as e:
pass
def __del__(self):
self._terminate()
class FutureTask(work_queue.Task):
valid_runtime_envs = ['conda', 'singularity']
def __init__(self, command):
super(FutureTask, self).__init__(command)
self._queue = None
self._cancelled = False
self._exception = None
self._done_event = threading.Event()
self._callbacks = []
self._runtime_env_type = None
@property
def queue(self):
return self._queue
def _set_queue(self, queue):
self._queue = queue
self.set_running_or_notify_cancel()
def cancel(self):
if self.queue:
self.queue.cancel_by_taskid(self.id)
self._cancelled = True
self._done_event.set()
self._invoke_callbacks()
return self.cancelled()
def cancelled(self):
return self._cancelled
def done(self):
return self._done_event.is_set()
def running(self):
return (self._queue is not None) and (not self.done())
def result(self, timeout=None):
if self.cancelled():
raise futures.CancelledError
# wait for task to be done event if not done already
self._done_event.wait(timeout)
if self.done():
if self._exception is not None:
raise self._exception
else:
return self._result
else:
# Raise exception if task not done by timeout
raise futures.TimeoutError(timeout)
def exception(self, timeout=None):
if self.cancelled():
raise futures.CancelledError
self._done_event.wait(timeout)
if self.done():
return self._exception
else:
raise futures.TimeoutError(timeout)
def add_done_callback(self, fn):
"""
Attaches the callable fn to the future. fn will be called, with the
future as its only argument, when the future is cancelled or finishes
running. Added callables are called in the order that they were added
and are always called in a thread belonging to the process that added
them.
If the callable raises an Exception subclass, it will be logged and
ignored. If the callable raises a BaseException subclass, the behavior
is undefined.
If the future has already completed or been cancelled, fn will be
called immediately.
"""
if self.done():
fn(self)
else:
self._callbacks.append(fn)
def _invoke_callbacks(self):
self._done_event.set()
for fn in self._callbacks:
try:
fn(self)
except Exception as e:
sys.stderr.write('Error when executing future object callback:\n')
traceback.print_exc()
def set_result_or_exception(self):
result = self._task.result
if result == work_queue.WORK_QUEUE_RESULT_SUCCESS and self.return_status == 0:
self.set_result(True)
else:
self.set_exception(FutureTaskError(self))
def set_running_or_notify_cancel(self):
if self.cancelled():
return False
else:
return True
def set_result(self, result):
self._result = result
self._invoke_callbacks()
def set_exception(self, exception):
self._exception = exception
self._invoke_callbacks()
def specify_runtime_env(self, type, filename):
import _work_queue
if type not in FutureTask.valid_runtime_envs:
raise FutureTaskError("Runtime '{}' type is not one of {}".format(type, FutureTask.valid_runtime_envs))
self._runtime_env_type = type
if type == 'conda':
conda_env = 'conda_env.tar.gz'
self.specify_input_file(filename, conda_env, cache = True)
command = 'mkdir -p conda_env && tar xf {} -C conda_env && source conda_env/bin/activate && {}'.format(conda_env, self.command)
_work_queue.work_queue_task_command_line_set(self._task, command)
elif type == 'singularity':
sin_env = 'sin_env.img'
self.specify_input_file(filename, sin_env, cache = True)
command = 'singularity exec -B $(pwd):/wq-sandbox --pwd /wq-sandbox {} -- {}'.format(sin_env, self.command)
_work_queue.work_queue_task_command_line_set(self._task, command)
class Worker(object):
def __init__(self, port, executable='work_queue_worker', cores=1, memory=512, disk=1000):
self._proc = None
self._port = port
self._executable = executable
self._cores = cores
self._memory = memory
self._disk = disk
self._permanent_error = None
self.devnull = open(os.devnull, 'w')
self.check_alive()
def check_alive(self):
if self._permanent_error is not None:
raise Exception(self._permanent_error)
return False
if self._proc and self._proc.is_alive():
return True
if self._proc:
self._proc.join()
if self._proc.exitcode != 0:
self._permanent_error = self._proc.exitcode
return False
return self._launch_worker()
def shutdown(self):
if not self._proc:
return
if self._proc.is_alive():
self._proc.terminate()
self._proc.join()
def _launch_worker(self):
args = [self._executable,
'--single-shot',
'--cores', self._cores,
'--memory', self._memory,
'--disk', self._disk,
'--timeout', 300,
'localhost',
self._port]
args = [str(x) for x in args]
self._proc = multiprocessing.Process(target=lambda: subprocess.check_call(args, stderr=self.devnull, stdout=self.devnull), daemon=True)
self._proc.start()
return self.check_alive()
class FutureTaskError(Exception):
_state_to_msg = {
work_queue.WORK_QUEUE_RESULT_SUCCESS: 'Success',
work_queue.WORK_QUEUE_RESULT_INPUT_MISSING: 'Input file is missing',
work_queue.WORK_QUEUE_RESULT_OUTPUT_MISSING: 'Output file is missing',
work_queue.WORK_QUEUE_RESULT_STDOUT_MISSING: 'stdout is missing',
work_queue.WORK_QUEUE_RESULT_SIGNAL: 'Signal received',
work_queue.WORK_QUEUE_RESULT_RESOURCE_EXHAUSTION: 'Resources exhausted',
work_queue.WORK_QUEUE_RESULT_TASK_TIMEOUT: 'Task timed-out before completion',
work_queue.WORK_QUEUE_RESULT_UNKNOWN: 'Unknown error',
work_queue.WORK_QUEUE_RESULT_FORSAKEN: 'Internal error',
work_queue.WORK_QUEUE_RESULT_MAX_RETRIES: 'Maximum number of retries reached',
work_queue.WORK_QUEUE_RESULT_TASK_MAX_RUN_TIME: 'Task did not finish before deadline',
work_queue.WORK_QUEUE_RESULT_DISK_ALLOC_FULL: 'Disk allocation for the task is full'
}
def __init__(self, task, exception = None):
self.task = task
self.exit_status = None
self.state = None
self.exception = None
if exception:
self.exception = exception
else:
self.exit_status = task.return_status
self.state = task._task.result
def __str__(self):
if self.exception:
return str(self.exception)
msg = self._state_to_str()
if not msg:
return str(self.state)
if self.state != work_queue.WORK_QUEUE_RESULT_SUCCESS or self.exit_status == 0:
return msg
else:
return 'Execution completed with exit status {}'.format(self.exit_status)
def _state_to_str(self):
return FutureTaskError._state_to_msg.get(self.state, None)
| nkremerh/cctools | work_queue/src/bindings/python2/work_queue_futures.py | Python | gpl-2.0 | 15,412 | 0.003634 |
# coding: utf-8
from flask import Flask, session, redirect, url_for, request,abort
import config
config = config.rec()
def on_finish():
None
def currentUserGet():
if 'user' in session:
user = session['user']
return user['username']
else:
return None
def currentUserSet(username):
if username:
session['user'] = dict({'username':username})
else:
session.pop('user',None)
def replyerSet(name, email, website):
if name:
session['replyer'] = dict({'name': name, 'email': email,'website': website})
else:
session.pop('replyer',None)
def replyerGet():
if 'replyer' in session:
reply = session['replyer']
name = reply['name']
return name
else:
return None
def userAuth(username, password):
return username == config.admin_username and password == config.admin_password
def isAdmin():
return currentUserGet() == config.admin_username
def checkAdmin():
if not isAdmin():
abort(404)
def get_current_user():
return currentUserGet()
| maxis1314/pyutils | web/views/base.py | Python | apache-2.0 | 1,087 | 0.014719 |
#!/usr/bin/env python3
# Copyright (c) 2014-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test node disconnect and ban behavior"""
import time
from test_framework.test_framework import GuldenTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
connect_nodes_bi,
wait_until,
)
class DisconnectBanTest(GuldenTestFramework):
def set_test_params(self):
self.num_nodes = 2
def run_test(self):
self.log.info("Test setban and listbanned RPCs")
self.log.info("setban: successfully ban single IP address")
assert_equal(len(self.nodes[1].getpeerinfo()), 2) # node1 should have 2 connections to node0 at this point
self.nodes[1].setban(subnet="127.0.0.1", command="add")
wait_until(lambda: len(self.nodes[1].getpeerinfo()) == 0, timeout=10)
assert_equal(len(self.nodes[1].getpeerinfo()), 0) # all nodes must be disconnected at this point
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("clearbanned: successfully clear ban list")
self.nodes[1].clearbanned()
assert_equal(len(self.nodes[1].listbanned()), 0)
self.nodes[1].setban("127.0.0.0/24", "add")
self.log.info("setban: fail to ban an already banned subnet")
assert_equal(len(self.nodes[1].listbanned()), 1)
assert_raises_rpc_error(-23, "IP/Subnet already banned", self.nodes[1].setban, "127.0.0.1", "add")
self.log.info("setban: fail to ban an invalid subnet")
assert_raises_rpc_error(-30, "Error: Invalid IP/Subnet", self.nodes[1].setban, "127.0.0.1/42", "add")
assert_equal(len(self.nodes[1].listbanned()), 1) # still only one banned ip because 127.0.0.1 is within the range of 127.0.0.0/24
self.log.info("setban remove: fail to unban a non-banned subnet")
assert_raises_rpc_error(-30, "Error: Unban failed", self.nodes[1].setban, "127.0.0.1", "remove")
assert_equal(len(self.nodes[1].listbanned()), 1)
self.log.info("setban remove: successfully unban subnet")
self.nodes[1].setban("127.0.0.0/24", "remove")
assert_equal(len(self.nodes[1].listbanned()), 0)
self.nodes[1].clearbanned()
assert_equal(len(self.nodes[1].listbanned()), 0)
self.log.info("setban: test persistence across node restart")
self.nodes[1].setban("127.0.0.0/32", "add")
self.nodes[1].setban("127.0.0.0/24", "add")
# Set the mocktime so we can control when bans expire
old_time = int(time.time())
self.nodes[1].setmocktime(old_time)
self.nodes[1].setban("192.168.0.1", "add", 1) # ban for 1 seconds
self.nodes[1].setban("2001:4d48:ac57:400:cacf:e9ff:fe1d:9c63/19", "add", 1000) # ban for 1000 seconds
listBeforeShutdown = self.nodes[1].listbanned()
assert_equal("192.168.0.1/32", listBeforeShutdown[2]['address'])
# Move time forward by 3 seconds so the third ban has expired
self.nodes[1].setmocktime(old_time + 3)
assert_equal(len(self.nodes[1].listbanned()), 3)
self.stop_node(1)
self.start_node(1)
listAfterShutdown = self.nodes[1].listbanned()
assert_equal("127.0.0.0/24", listAfterShutdown[0]['address'])
assert_equal("127.0.0.0/32", listAfterShutdown[1]['address'])
assert_equal("/19" in listAfterShutdown[2]['address'], True)
# Clear ban lists
self.nodes[1].clearbanned()
connect_nodes_bi(self.nodes, 0, 1)
self.log.info("Test disconnectnode RPCs")
self.log.info("disconnectnode: fail to disconnect when calling with address and nodeid")
address1 = self.nodes[0].getpeerinfo()[0]['addr']
node1 = self.nodes[0].getpeerinfo()[0]['addr']
assert_raises_rpc_error(-32602, "Only one of address and nodeid should be provided.", self.nodes[0].disconnectnode, address=address1, node_id=node1)
self.log.info("disconnectnode: fail to disconnect when calling with junk address")
assert_raises_rpc_error(-29, "Node not found in connected nodes", self.nodes[0].disconnectnode, address="221B Baker Street")
self.log.info("disconnectnode: successfully disconnect node by address")
address1 = self.nodes[0].getpeerinfo()[0]['addr']
self.nodes[0].disconnectnode(address=address1)
wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
self.log.info("disconnectnode: successfully reconnect node")
connect_nodes_bi(self.nodes, 0, 1) # reconnect the node
assert_equal(len(self.nodes[0].getpeerinfo()), 2)
assert [node for node in self.nodes[0].getpeerinfo() if node['addr'] == address1]
self.log.info("disconnectnode: successfully disconnect node by node id")
id1 = self.nodes[0].getpeerinfo()[0]['id']
self.nodes[0].disconnectnode(node_id=id1)
wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 1, timeout=10)
assert not [node for node in self.nodes[0].getpeerinfo() if node['id'] == id1]
if __name__ == '__main__':
DisconnectBanTest().main()
| nlgcoin/guldencoin-official | test/functional/p2p_disconnect_ban.py | Python | mit | 5,333 | 0.003375 |
"""
WSGI config for coursing_field project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "coursing_field.settings")
application = get_wsgi_application()
| PNNutkung/Coursing-Field | coursing_field/wsgi.py | Python | apache-2.0 | 406 | 0 |
# Copyright 2009-2011 Canonical Ltd. This software is licensed under the
# GNU Affero General Public License version 3 (see the file LICENSE).
__metaclass__ = type
import doctest
import unittest
from lp.testing.layers import DatabaseFunctionalLayer
def test_suite():
suite = unittest.TestSuite()
suite.layer = DatabaseFunctionalLayer
suite.addTest(doctest.DocTestSuite('lp.app.widgets.textwidgets'))
suite.addTest(doctest.DocTestSuite('lp.app.widgets.date'))
return suite
| abramhindle/UnnaturalCodeFork | python/testdata/launchpad/lib/lp/app/widgets/tests/test_widget_doctests.py | Python | agpl-3.0 | 498 | 0 |
import forecastio
class ForecastAPI:
_API_KEY = "8eefab4d187a39b993ca9c875fef6159"
_LAZY = False
_LAT = 0
_LNG = 0
_forecast = ()
def __init__(self,key,lat,lng,lazy=False):
self._LAT = lat
self._LNG = lng
self._API_KEY = key
self._LAZY = lazy
self._forecast = forecastio.load_forecast(self._API_KEY,self._LAT,self._LNG,lazy=lazy)
def get_7day_forecast_detailed(self):
return self._forecast.daily().data
"""
Help getting cloud data from the future
"""
def get_7_day_cloudCover(self):
c_data = self._forecast.daily().data
cloud_results = {}
for day in c_data:
cloud_results[day.time.isoformat()] = day.cloudCover
return cloud_results
"""
Helper on getting cloud sunrise and sunset data
"""
def get_n_day_minimal_solar(self,n_days):
c_data = self._forecast.daily().data
sun_results = {}
count = 0
for day in c_data:
if count < n_days:
sun_results[day.time.isoformat()] = {"sunrise":day.sunriseTime,"sunset":day.sunsetTime,"stat":day.icon,"cloudcover":day.cloudCover}
count = count + 1
return sun_results
"""
Helper on getting cloud sunrise and sunset data from the past
"""
def get_historical_day_minimal_solar(self,days):
#TODO get temp just for reference
sun_results = {}
for day in days:
print "getting date for %s"%day
self._forecast = forecastio.load_forecast(self._API_KEY,self._LAT,self._LNG,lazy=self._LAZY,time=day)
c_data = self._forecast.daily().data
for f_day in c_data:
print "adding date for %s"%f_day
sun_results[day.isoformat()] = {"sunrise":f_day.sunriseTime,"sunset":f_day.sunsetTime,"stat":f_day.icon,"cloudcover":f_day.cloudCover}
return sun_results
| GreatLakesEnergy/sesh-dash-beta | seshdash/api/forecast.py | Python | mit | 1,952 | 0.019467 |
# Copyright 2014-2016 The Meson development team
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from . import coredata, environment, mesonlib, build, mintro, mlog
from .ast import AstIDGenerator
def add_arguments(parser):
coredata.register_builtin_arguments(parser)
parser.add_argument('builddir', nargs='?', default='.')
parser.add_argument('--clearcache', action='store_true', default=False,
help='Clear cached state (e.g. found dependencies)')
def make_lower_case(val):
if isinstance(val, bool):
return str(val).lower()
elif isinstance(val, list):
return [make_lower_case(i) for i in val]
else:
return str(val)
class ConfException(mesonlib.MesonException):
pass
class Conf:
def __init__(self, build_dir):
self.build_dir = os.path.abspath(os.path.realpath(build_dir))
if 'meson.build' in [os.path.basename(self.build_dir), self.build_dir]:
self.build_dir = os.path.dirname(self.build_dir)
self.build = None
self.max_choices_line_length = 60
self.name_col = []
self.value_col = []
self.choices_col = []
self.descr_col = []
self.has_choices = False
self.all_subprojects = set()
self.yielding_options = set()
if os.path.isdir(os.path.join(self.build_dir, 'meson-private')):
self.build = build.load(self.build_dir)
self.source_dir = self.build.environment.get_source_dir()
self.coredata = coredata.load(self.build_dir)
self.default_values_only = False
elif os.path.isfile(os.path.join(self.build_dir, environment.build_filename)):
# Make sure that log entries in other parts of meson don't interfere with the JSON output
mlog.disable()
self.source_dir = os.path.abspath(os.path.realpath(self.build_dir))
intr = mintro.IntrospectionInterpreter(self.source_dir, '', 'ninja', visitors = [AstIDGenerator()])
intr.analyze()
# Re-enable logging just in case
mlog.enable()
self.coredata = intr.coredata
self.default_values_only = True
else:
raise ConfException('Directory {} is neither a Meson build directory nor a project source directory.'.format(build_dir))
def clear_cache(self):
self.coredata.deps.host.clear()
self.coredata.deps.build.clear()
def set_options(self, options):
self.coredata.set_options(options)
def save(self):
# Do nothing when using introspection
if self.default_values_only:
return
# Only called if something has changed so overwrite unconditionally.
coredata.save(self.coredata, self.build_dir)
# We don't write the build file because any changes to it
# are erased when Meson is executed the next time, i.e. when
# Ninja is run.
def print_aligned(self):
col_widths = (max([len(i) for i in self.name_col], default=0),
max([len(i) for i in self.value_col], default=0),
max([len(i) for i in self.choices_col], default=0))
for line in zip(self.name_col, self.value_col, self.choices_col, self.descr_col):
if self.has_choices:
print('{0:{width[0]}} {1:{width[1]}} {2:{width[2]}} {3}'.format(*line, width=col_widths))
else:
print('{0:{width[0]}} {1:{width[1]}} {3}'.format(*line, width=col_widths))
def split_options_per_subproject(self, options):
result = {}
for k, o in options.items():
subproject = ''
if ':' in k:
subproject, optname = k.split(':')
if o.yielding and optname in options:
self.yielding_options.add(k)
self.all_subprojects.add(subproject)
result.setdefault(subproject, {})[k] = o
return result
def _add_line(self, name, value, choices, descr):
self.name_col.append(' ' * self.print_margin + name)
self.value_col.append(value)
self.choices_col.append(choices)
self.descr_col.append(descr)
def add_option(self, name, descr, value, choices):
if isinstance(value, list):
value = '[{0}]'.format(', '.join(make_lower_case(value)))
else:
value = make_lower_case(value)
if choices:
self.has_choices = True
if isinstance(choices, list):
choices_list = make_lower_case(choices)
current = '['
while choices_list:
i = choices_list.pop(0)
if len(current) + len(i) >= self.max_choices_line_length:
self._add_line(name, value, current + ',', descr)
name = ''
value = ''
descr = ''
current = ' '
if len(current) > 1:
current += ', '
current += i
choices = current + ']'
else:
choices = make_lower_case(choices)
else:
choices = ''
self._add_line(name, value, choices, descr)
def add_title(self, title):
titles = {'descr': 'Description', 'value': 'Current Value', 'choices': 'Possible Values'}
if self.default_values_only:
titles['value'] = 'Default Value'
self._add_line('', '', '', '')
self._add_line(title, titles['value'], titles['choices'], titles['descr'])
self._add_line('-' * len(title), '-' * len(titles['value']), '-' * len(titles['choices']), '-' * len(titles['descr']))
def add_section(self, section):
self.print_margin = 0
self._add_line('', '', '', '')
self._add_line(section + ':', '', '', '')
self.print_margin = 2
def print_options(self, title, options):
if not options:
return
if title:
self.add_title(title)
for k, o in sorted(options.items()):
printable_value = o.printable_value()
if k in self.yielding_options:
printable_value = '<inherited from main project>'
self.add_option(k, o.description, printable_value, o.choices)
def print_conf(self):
def print_default_values_warning():
mlog.warning('The source directory instead of the build directory was specified.')
mlog.warning('Only the default values for the project are printed, and all command line parameters are ignored.')
if self.default_values_only:
print_default_values_warning()
print('')
print('Core properties:')
print(' Source dir', self.source_dir)
if not self.default_values_only:
print(' Build dir ', self.build_dir)
dir_option_names = ['bindir',
'datadir',
'includedir',
'infodir',
'libdir',
'libexecdir',
'localedir',
'localstatedir',
'mandir',
'prefix',
'sbindir',
'sharedstatedir',
'sysconfdir']
test_option_names = ['errorlogs',
'stdsplit']
core_option_names = [k for k in self.coredata.builtins if k not in dir_option_names + test_option_names]
dir_options = {k: o for k, o in self.coredata.builtins.items() if k in dir_option_names}
test_options = {k: o for k, o in self.coredata.builtins.items() if k in test_option_names}
core_options = {k: o for k, o in self.coredata.builtins.items() if k in core_option_names}
def insert_build_prefix(k):
idx = k.find(':')
if idx < 0:
return 'build.' + k
return k[:idx + 1] + 'build.' + k[idx + 1:]
core_options = self.split_options_per_subproject(core_options)
host_compiler_options = self.split_options_per_subproject(
dict(self.coredata.flatten_lang_iterator(
self.coredata.compiler_options.host.items())))
build_compiler_options = self.split_options_per_subproject(
dict(self.coredata.flatten_lang_iterator(
(insert_build_prefix(k), o)
for k, o in self.coredata.compiler_options.build.items())))
project_options = self.split_options_per_subproject(self.coredata.user_options)
show_build_options = self.default_values_only or self.build.environment.is_cross_build()
self.add_section('Main project options')
self.print_options('Core options', core_options[''])
self.print_options('', self.coredata.builtins_per_machine.host)
if show_build_options:
self.print_options('', {insert_build_prefix(k): o for k, o in self.coredata.builtins_per_machine.build.items()})
self.print_options('Backend options', self.coredata.backend_options)
self.print_options('Base options', self.coredata.base_options)
self.print_options('Compiler options', host_compiler_options.get('', {}))
if show_build_options:
self.print_options('', build_compiler_options.get('', {}))
self.print_options('Directories', dir_options)
self.print_options('Testing options', test_options)
self.print_options('Project options', project_options.get('', {}))
for subproject in sorted(self.all_subprojects):
if subproject == '':
continue
self.add_section('Subproject ' + subproject)
if subproject in core_options:
self.print_options('Core options', core_options[subproject])
if subproject in host_compiler_options:
self.print_options('Compiler options', host_compiler_options[subproject])
if subproject in build_compiler_options and show_build_options:
self.print_options('', build_compiler_options[subproject])
if subproject in project_options:
self.print_options('Project options', project_options[subproject])
self.print_aligned()
# Print the warning twice so that the user shouldn't be able to miss it
if self.default_values_only:
print('')
print_default_values_warning()
def run(options):
coredata.parse_cmd_line_options(options)
builddir = os.path.abspath(os.path.realpath(options.builddir))
c = None
try:
c = Conf(builddir)
if c.default_values_only:
c.print_conf()
return 0
save = False
if len(options.cmd_line_options) > 0:
c.set_options(options.cmd_line_options)
coredata.update_cmd_line_file(builddir, options)
save = True
elif options.clearcache:
c.clear_cache()
save = True
else:
c.print_conf()
if save:
c.save()
mintro.update_build_options(c.coredata, c.build.environment.info_dir)
mintro.write_meson_info_file(c.build, [])
except ConfException as e:
print('Meson configurator encountered an error:')
if c is not None and c.build is not None:
mintro.write_meson_info_file(c.build, [e])
raise e
return 0
| becm/meson | mesonbuild/mconf.py | Python | apache-2.0 | 12,095 | 0.002232 |
from scipy import *
from pylab import *
num_detectors = 100
x = 0.5+0.25*arange(0,float(num_detectors))/float(num_detectors)
y = zeros(num_detectors) + 0.5
t = 0.
n_cycles = 1
dt = 0.1/n_cycles
tmax = 8
def vel(x,y):
return [-(y-0.5),x-0.5]
while(t<tmax):
t = t + dt
[k1_x,k1_y] = vel(x,y)
[k2_x,k2_y] = vel(x+0.5*dt*k1_x,y+0.5*dt*k1_y)
[k3_x,k3_y] = vel(x+0.5*dt*k2_x,y+0.5*dt*k2_y)
[k4_x,k4_y] = vel(x+dt*k3_x,y+dt*k3_y)
x = x + dt*(k1_x/6.+k2_x/3. + k3_x/3. + k4_x/6.)
y = y + dt*(k1_y/6.+k2_y/3. + k3_y/3. + k4_y/6.)
plot(x,y,'.')
#show()
x.tofile('Xvals.txt',sep=' ')
y.tofile('Yvals.txt',sep=' ')
| FluidityStokes/fluidity | tests/lagrangian_detectors_3d_1e2/get_RK_traj.py | Python | lgpl-2.1 | 640 | 0.029688 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.