repo_name
stringlengths 7
111
| __id__
int64 16.6k
19,705B
| blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
151
| content_id
stringlengths 40
40
| detected_licenses
list | license_type
stringclasses 2
values | repo_url
stringlengths 26
130
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
42
| visit_date
timestamp[ns] | revision_date
timestamp[ns] | committer_date
timestamp[ns] | github_id
int64 14.6k
687M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 12
values | gha_fork
bool 2
classes | gha_event_created_at
timestamp[ns] | gha_created_at
timestamp[ns] | gha_updated_at
timestamp[ns] | gha_pushed_at
timestamp[ns] | gha_size
int64 0
10.2M
⌀ | gha_stargazers_count
int32 0
178k
⌀ | gha_forks_count
int32 0
88.9k
⌀ | gha_open_issues_count
int32 0
2.72k
⌀ | gha_language
stringlengths 1
16
⌀ | gha_archived
bool 1
class | gha_disabled
bool 1
class | content
stringlengths 10
2.95M
| src_encoding
stringclasses 5
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 10
2.95M
| extension
stringclasses 19
values | num_repo_files
int64 1
202k
| filename
stringlengths 4
112
| num_lang_files
int64 1
202k
| alphanum_fraction
float64 0.26
0.89
| alpha_fraction
float64 0.2
0.89
| hex_fraction
float64 0
0.09
| num_lines
int32 1
93.6k
| avg_line_length
float64 4.57
103
| max_line_length
int64 7
931
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
vanessamaike/mc102 | 2,800,318,701,523 | 9a14c3bc9de403a3a2a6ca4cf5d713b0a35aa10c | adda4511e32ded29f982340a1a45c422be277ea1 | /code/aula15/a15-ex1.py | bc7718545c56f2e661d3c60b2007a57956cd27b3 | []
| no_license | https://github.com/vanessamaike/mc102 | a1b1bf5e883c763de1be7d9616016e1acbb5a9a2 | 4f4d49bb5f3c6261de575e847f6100751112eed0 | refs/heads/master | 2020-06-29T05:44:35.710792 | 2019-12-16T12:55:53 | 2019-12-16T12:55:53 | 200,454,546 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | n = 4
mat = []
for i in range (n):
lista = []
for j in range(n):
lista.append(j+i)
mat.append(lista)
print(mat)
| UTF-8 | Python | false | false | 134 | py | 162 | a15-ex1.py | 161 | 0.514925 | 0.507463 | 0 | 10 | 12.4 | 25 |
vishalchotara/patient_management_system | 18,339,510,386,173 | 7779647e20b5daf5d52d8b4982926eb972beda6f | 1e86acf1ad511803e7e2c4ab22b364d2bba2d3c9 | /hospitals.py | 6d00609d17fcf0301d0968494e387263c32fb877 | []
| no_license | https://github.com/vishalchotara/patient_management_system | 4bd584fe07d8d0ce0352452136ad7e67a4ca7b5a | d5fdae95bfb10d5d5424432a6eb496eb21e1465a | refs/heads/main | 2023-05-31T05:59:07.631242 | 2021-06-26T17:16:39 | 2021-06-26T17:16:39 | 380,553,708 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Hospital:
def __init__(self, name, total_beds, num_beds_occupied, curr_patients):
self.__name = name
self.__total_beds = total_beds
self.__occupied_beds = num_beds_occupied
self.__patients_list = curr_patients
def get_name(self):
"""
get_name returns the name of the Hospital capitalized
:return: str
"""
return self.__name.capitalize()
def add_patients(self, patient_id, patient):
"""
add_patients adds the patient_id and Patient object, patient, to the dictionary of patients stored in the
hospital object
:param patient_id: str
:param patient: Patient
:return: None
"""
self.__patients_list[patient_id] = patient
self.__occupied_beds += 1
def get_patients(self):
"""
get_patients returns the dictionary of Patients
:return: dictionary of Patients
"""
return self.__patients_list
def get_patient(self, patient_id):
"""
get_patient returns the Patient object with the id, patient_id.
:param patient_id: str
:return: Patient
"""
return self.__patients_list.get(patient_id)
def discharge_patient(self, patient_id):
"""
discharge_patient pops the Patient object that corresponds with the patient_id from the dictionary of
Patients in the hospital object and returns the Patient object
:param patient_id: str
:return: Patient
"""
self.__occupied_beds -= 1
return self.__patients_list.pop(patient_id)
def available_beds(self):
"""
available_beds returns the number of beds still available in the Hospital
:return: int
"""
return self.__total_beds - self.__occupied_beds
| UTF-8 | Python | false | false | 1,906 | py | 6 | hospitals.py | 5 | 0.579224 | 0.578174 | 0 | 56 | 32.035714 | 113 |
flaviocardoso/uri204719 | 2,035,814,501,975 | 2dd37ce1f84232e6445e3c770ec7d83a05b8e843 | 0c79382188946515059a26b636ea4689edc24faa | /feitos/jogotempocomminutos.py | 0c30e80d06335f4c073b7cd0cd85cb2ad1f5ad01 | []
| no_license | https://github.com/flaviocardoso/uri204719 | cbd2b9cb5fbb183d891128c05eb63ee8e4cb78c0 | 9ec7a9c9e7bb788f61a80dce203b9a0aea9d93f3 | refs/heads/master | 2021-09-11T19:51:14.831377 | 2018-04-11T17:31:05 | 2018-04-11T17:31:05 | 113,496,241 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/bin/python3
#uri 1047
#tempo de jogo com minutos
'''
7 8 9 10 - O JOGO DUROU 2 HORA(S) E 2 MINUTOS
7 7 7 7 - O JOGO DUROU 24 HORA(S) E 0 MINUTOS
7 10 8 9 - O JOGO DUROU 0 HORA(S) E 59 MINUTO(S)
'''
HI, MI, HF, MF = map(int, input().split())
DH = HF - HI
DM = MF - MI
if(HF <= HI):
DH = 24 + DH
if(MF < MI):
DM = 60 + DM
DH = DH - 1
print("O JOGO DUROU {0} HORA(S) E {1} MINUTO(S)".format(DH, DM))
| UTF-8 | Python | false | false | 416 | py | 85 | jogotempocomminutos.py | 83 | 0.5625 | 0.480769 | 0 | 20 | 19.8 | 64 |
harshp8l/deep-learning-lang-detection | 4,526,895,543,398 | e27d3996552b8640be4fdaef3c49ef01f797a1f6 | 6f05f7d5a67b6bb87956a22b988067ec772ba966 | /data/train/python/207aac546ce116bdfab1a5ebae1cb7d25ed8bf1cpostService.py | 207aac546ce116bdfab1a5ebae1cb7d25ed8bf1c | [
"MIT"
]
| permissive | https://github.com/harshp8l/deep-learning-lang-detection | 93b6d24a38081597c610ecf9b1f3b92c7d669be5 | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | refs/heads/master | 2020-04-07T18:07:00.697994 | 2018-11-29T23:21:23 | 2018-11-29T23:21:23 | 158,597,498 | 0 | 0 | MIT | true | 2018-11-21T19:36:42 | 2018-11-21T19:36:41 | 2018-10-25T04:08:03 | 2018-08-24T13:01:04 | 42,815 | 0 | 0 | 0 | null | false | null | from domain.repositories.postRepository import PostRepository
__author__ = 'jean'
class PostService:
def __init__(self, post=None):
self.repository = PostRepository(post)
def create(self):
return self.repository.create()
def get_by_id(self, _id, entity):
return self.repository.get_by_id(_id, entity)
def save(self):
self.repository.update()
def delete(self, _id=None):
self.repository.remove(_id)
def set_published(self, value, _id=None):
if type(value) != bool:
raise NameError('O valor passado deve ser booleano - Valor passado: ' + str(type(value)))
self.repository.set_published(value, _id)
def list(self, amount):
return self.repository.list(amount) | UTF-8 | Python | false | false | 769 | py | 11,818 | 207aac546ce116bdfab1a5ebae1cb7d25ed8bf1cpostService.py | 6,904 | 0.639792 | 0.639792 | 0 | 29 | 25.551724 | 101 |
thevasudevgupta/accelerate | 8,787,503,111,495 | 4bc524d6f7a4daad12dbc305e72c19dc80ef5684 | ed98c033fab3373398be21e8999c22d229ad4e0b | /src/accelerate/utils/transformer_engine.py | a6342d7150f1abf083bed7a19c2a5e299e29e3e3 | [
"Apache-2.0"
]
| permissive | https://github.com/thevasudevgupta/accelerate | 41555c5a42f669ed2aee76aa5e6f86898087519f | 4d13e4e474def51a67ca23cc7356926d45cd63bc | refs/heads/main | 2023-08-17T03:41:54.697915 | 2023-08-11T07:15:15 | 2023-08-11T07:15:15 | 361,257,528 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Copyright 2022 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch.nn as nn
from .imports import is_fp8_available
if is_fp8_available():
import transformer_engine.pytorch as te
def convert_model(model, to_transformer_engine=True, _convert_linear=True, _convert_ln=True):
"""
Recursively converts the linear and layernorm layers of a model to their `transformers_engine` counterpart.
"""
if not is_fp8_available():
raise ImportError("Using `convert_model` requires transformer_engine to be installed.")
for name, module in model.named_children():
if isinstance(module, nn.Linear) and to_transformer_engine and _convert_linear:
# Return early if the linear layer weights are not multiples of 16
if any(p % 16 != 0 for p in module.weight.shape):
return
has_bias = module.bias is not None
te_module = te.Linear(
module.in_features, module.out_features, bias=has_bias, params_dtype=module.weight.dtype
)
te_module.weight.data = module.weight.data.clone()
if has_bias:
te_module.bias.data = module.bias.data.clone()
setattr(model, name, te_module)
elif isinstance(module, nn.LayerNorm) and to_transformer_engine and _convert_ln:
te_module = te.LayerNorm(module.normalized_shape[0], eps=module.eps, params_dtype=module.weight.dtype)
te_module.weight.data = module.weight.data.clone()
te_module.bias.data = module.bias.data.clone()
setattr(model, name, te_module)
elif isinstance(module, te.Linear) and not to_transformer_engine and _convert_linear:
has_bias = module.bias is not None
new_module = nn.Linear(
module.in_features, module.out_features, bias=has_bias, params_dtype=module.weight.dtype
)
new_module.weight.data = module.weight.data.clone()
if has_bias:
new_module.bias.data = module.bias.data.clone()
setattr(model, name, new_module)
elif isinstance(module, te.LayerNorm) and not to_transformer_engine and _convert_ln:
new_module = nn.LayerNorm(module.normalized_shape[0], eps=module.eps, params_dtype=module.weight.dtype)
new_module.weight.data = module.weight.data.clone()
new_module.bias.data = module.bias.data.clone()
setattr(model, name, new_module)
else:
convert_model(
module,
to_transformer_engine=to_transformer_engine,
_convert_linear=_convert_linear,
_convert_ln=_convert_ln,
)
def has_transformer_engine_layers(model):
"""
Returns whether a given model has some `transformer_engine` layer or not.
"""
if not is_fp8_available():
raise ImportError("Using `has_transformer_engine_layers` requires transformer_engine to be installed.")
for m in model.modules():
if isinstance(m, (te.LayerNorm, te.Linear)):
return True
return False
| UTF-8 | Python | false | false | 3,665 | py | 142 | transformer_engine.py | 88 | 0.65075 | 0.645566 | 0 | 84 | 42.630952 | 115 |
ThachNgocTran/CodingPractice | 18,159,121,762,256 | 56644a26bb49c53e32002f656319706b8510d942 | 2c8980dba95faa493b83c3d3c1e1590d82ddea7b | /PythonPractice/MyDataScienceTest.py | aafa4a58b90da7298f67337819492157e99b18ac | []
| no_license | https://github.com/ThachNgocTran/CodingPractice | fbed03f6a7995a5d8bd7f590a87297e2d91eeb60 | 7cc7a31151e654ed2a6b7232242c5da4174eaa3e | refs/heads/master | 2021-01-19T04:50:06.555675 | 2017-07-26T21:01:44 | 2017-07-26T21:01:44 | 60,353,942 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import os.path
import pandas as pd
import numpy as np
DATA_FILE_PATH = "Data/Indian Liver Patient Dataset (ILPD).csv"
CLEAN_DATA_FILE_PATH = "Data/Cleaned_Indian Liver Patient Dataset (ILPD).csv"
def test_data_preprocessing():
if DATA_FILE_PATH is None or len(DATA_FILE_PATH) == 0:
raise Exception("Input is invalid")
if not os.path.isfile(DATA_FILE_PATH):
raise Exception("File not exists")
# Read CSV file
temp_df = pd.read_csv(DATA_FILE_PATH, header=None)
# Check its numbers of rows and columns
(num_row, num_col) = temp_df.shape
# Add meaningful columns' names.
temp_df.columns = ['age', 'gender', 'total_bilirubin', 'direct_bilirubin', 'alkaline_phosphotase',
'alamine_aminotransferase',
'aspartate_aminotransferase', 'total_protiens', 'albumin',
'ratio_albumin_and_globulin_ratio', 'liver_res']
# Transform categorical target variable to numberic one. 1 means "positive", 0 means "negative".
# temp_df['liver_res'] returns a Series. Modify the Series, return another Series. Assign that to the original Series.
temp_df['liver_res'] = temp_df['liver_res'].apply(lambda x: 0 if (x == 2) else x)
# Get the index that is null
findNull = temp_df['ratio_albumin_and_globulin_ratio'].isnull() # Return a Series (TRUE, TRUE, FALSE...)
# Get the index based on a Series of Boolean type ==> Int64Index
index_null = temp_df['ratio_albumin_and_globulin_ratio'].index[findNull]
# Drop rows based on Index
temp_df = temp_df.drop(index_null)
# Drop columns based on names
# temp_df.drop(['direct_bilirubin', 'aspartate_aminotransferase', 'albumin'], axis=1, inplace=True)
# Add new column
# temp_df['is_train'] = np.random.uniform(0, 1, len(temp_df)) <= 0.75
# Split dataset based on values of a specific column
# train, test = temp_df[temp_df['is_train'] == True], temp_df[temp_df['is_train'] == False]
# Save to CSV
temp_df.to_csv(CLEAN_DATA_FILE_PATH, index=False)
| UTF-8 | Python | false | false | 2,060 | py | 14 | MyDataScienceTest.py | 11 | 0.656796 | 0.650485 | 0 | 51 | 39.392157 | 122 |
KrisSoto/Python-Stuff | 15,693,810,515,840 | 829178dd68d4341b49ae4ea6fe4e35a00c9e5a4b | 959355cec2a1483739610396a060377a46b0aa72 | /basic data visualizations/lab4.py | 83f2adfe3c037f07417e9af891e10d21608c1b5d | []
| no_license | https://github.com/KrisSoto/Python-Stuff | 587a447f0b4b6455d672b0b20a7242839a56d8ea | 80f9c759f8b173d96748f82245b625fbe406d2e8 | refs/heads/master | 2017-10-07T17:41:46.677759 | 2017-02-08T15:14:44 | 2017-02-08T15:14:44 | 81,342,074 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import random
import pandas
import matplotlib.pyplot as plt
data = pandas.read_csv("bgg_db.csv", encoding="ISO-8859-1")
# 1) Comparison - display a bar chart that displays how many games came out from the year 2000 to present day
comparison_data = data[data["year"] >= 2000][["year", "names"]]
comparison_data.hist()
plt.title("Games after year 2000")
plt.xlabel("Year")
plt.ylabel("Num. Games")
plt.show()
# 2) Distribution - display two overlapping histograms that display the average rating and geek rating for all games
avg_data = data[["avg rating", "geek rating", "names"]]
avg_data.plot(alpha = 0.5, kind="hist")
plt.show()
# 3) Relationship - display a scatter plot showing the relationship between the number of votes and average rating
data.plot(kind='scatter', x='avg rating', y='num votes')
plt.show()
# 4) Composition – create a stacked column chart for the years 2000 to present day,
# the stack will represent game quality ( score >= 8 good, score >= 4 average, score > = 0 bad).
# new column with good, avg, bad. for year list count of good avg and bad games based on score
games_data = pandas.DataFrame()
years = data[data["year"] >= 2000][["year"]]
years = years["year"].unique()
games_data.insert(0, "Year", years)
games_data.insert(0, "Good", 0)
games_data.insert(0, "Bad", 0)
games_data.insert(0, "Average", 0)
print("Loading last graph")
for index, value in data.iterrows():
rating, year = value["avg rating"], value["year"]
if year >= 2000:
if rating >= 8:
games_data.loc[games_data["Year"] == year, 'Good'] += 1
elif rating >= 4:
games_data.loc[games_data["Year"] == year, 'Average'] += 1
else:
games_data.loc[games_data["Year"] == year, 'Bad'] += 1
badList = list(games_data["Bad"])
goodList = list(games_data["Good"])
AvgList = list(games_data["Average"])
games_data.plot(kind="hist", stacked=True)
plt.show() | UTF-8 | Python | false | false | 1,913 | py | 9 | lab4.py | 7 | 0.674516 | 0.648352 | 0 | 51 | 36.490196 | 116 |
ptsiampas/Exercises_Learning_Python3 | 9,113,920,608,070 | 1fa82210bd6114c6af22bfa1cf12b4aff1253ab0 | 57b39dee1fd5f18823d13240bdbc61be0414e1da | /14_List Algorithms/Example_14.7c.py | 8af39bd237298a511e541672723f5a09119aae72 | []
| no_license | https://github.com/ptsiampas/Exercises_Learning_Python3 | d7c2e7b965419f21e28a70f8e03b8793ab3b8b9f | 666744116ac37c70fa8b9a0d8e109b85504788c1 | refs/heads/master | 2021-01-10T04:40:46.840585 | 2015-12-06T14:39:22 | 2015-12-06T14:39:22 | 45,291,974 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from unit_tester import test
from queens_functions import *
# 7|_|_|_|_|_|X|_|_|
# 6|X|_|_|_|_|_|_|_|
# 5|_|_|_|_|X|_|_|_|
# 4|_|X|_|_|_|_|_|_|
# 3|_|_|_|_|_|_|_|X|
# 2|_|_|X|_|_|_|_|_|
# 1|_|_|_|_|_|_|X|_|
# 0|_|_|_|X|_|_|_|_|
# _|0|1|2|3|4|5|6|7|
test(has_clashes([6, 4, 2, 0, 5, 7, 1, 3]), False) # Solution from above
# 7|_|_|_|_|_|X|_|_|
# 6|_|X|_|_|_|_|_|_|
# 5|_|_|_|_|X|_|_|_|
# 4|X|_|_|_|_|_|_|_|
# 3|_|_|_|_|_|_|_|X|
# 2|_|_|X|_|_|_|_|_|
# 1|_|_|_|_|_|_|X|_|
# 0|_|_|_|X|_|_|_|_|
# _|0|1|2|3|4|5|6|7|
test(has_clashes([4, 6, 2, 0, 5, 7, 1, 3]), True) # Swap rows of first two
# 3|_|_|_|X|
# 2|_|_|X|_|
# 1|_|X|_|_|
# 0|X|_|_|_|
# _|0|1|2|3|
test(has_clashes([0, 1, 2, 3]), True) # Try small 4x4 board
# 3|_|_|X|_|
# 2|X|_|_|_|
# 1|_|_|_|X|
# 0|_|X|_|_|
# _|0|1|2|3|
test(has_clashes([2, 0, 3, 1]), False) # Solution to 4x4 case
| UTF-8 | Python | false | false | 845 | py | 164 | Example_14.7c.py | 147 | 0.339645 | 0.249704 | 0 | 37 | 21.837838 | 75 |
xiangzz159/flask_graphql_example | 13,460,427,519,809 | c9bc2fe0bed6c5cb3de4e032d7b1f6e2fc15a1e6 | 20f2ed1942faa4881c77d75ad77cc6f945fbefbc | /app.py | 76bd3ef89179a89109db5c3d77e29d9d1d305f3c | []
| no_license | https://github.com/xiangzz159/flask_graphql_example | 477e8d9651fd3053b93b59862728bc9c366ae7ec | d7edf850b79f768b9bddec43c74347c9d32b32cf | refs/heads/master | 2022-07-24T17:28:35.834901 | 2018-09-12T05:41:43 | 2018-09-12T05:41:43 | 130,650,794 | 1 | 0 | null | false | 2022-06-21T21:20:55 | 2018-04-23T06:36:18 | 2018-09-12T05:41:56 | 2022-06-21T21:20:55 | 39 | 1 | 1 | 4 | Python | false | false | # !/usr/bin/env python
# _*_ coding:utf-8 _*_
'''
@author: yerik
@contact: xiangzz159@qq.com
@time: 2018/4/18 11:06
@desc: python启动入口
'''
import numpy as np
import json
import flask
import flask_graphql
from flask_cors import CORS
import api
import config
import mutations
import jsonify
from auths import Auth
from flask_httpauth import HTTPTokenAuth
# NumpyEncoder: useful for JSON serializing
# Dictionaries that contain Numpy Arrays
class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(obj, np.integer):
return int(obj)
elif isinstance(obj, np.floating):
return float(obj)
elif isinstance(obj, np.ndarray):
return obj.tolist()
else:
return super(NumpyEncoder, self).default(obj)
app = flask.Flask(__name__)
app.debug = True
app.json_encoder = NumpyEncoder
cors = CORS(app)
auth = HTTPTokenAuth()
@app.after_request
def after_request(response):
response.headers.add('Access-Control-Allow-Origin', '*')
if flask.request.method == 'OPTIONS':
response.headers['Access-Control-Allow-Methods'] = 'DELETE, GET, POST, PUT'
headers = flask.request.headers.get('Access-Control-Request-Headers')
if headers:
response.headers['Access-Control-Allow-Headers'] = headers
return response
# 请求前拦截
@app.before_request
def befor_request():
if config.BEFOR_REQUEST:
if '/graphql' in flask.request.url:
result = Auth.identify(Auth, flask.request)
if result['status'] == False:
return str(result)
@app.route('/')
def index():
return flask.redirect("/graphql", code=302)
@app.route('/apps/')
def apps():
return "Apps: flask_graphql_example"
@app.route('/login', methods=['POST'])
def login():
email = flask.request.form.get('email')
password = mutations.encryption_md5(flask.request.form.get('password'))
if (not email or not password):
return jsonify(config.falseReturn('', '用户名和密码不能为空'))
else:
return Auth.authenticate(Auth, email, password)
def graphql_view():
view = flask_graphql.GraphQLView.as_view(
'graphql',
schema=api.schema,
graphiql=True,
context={
'session': config.db_session,
}
)
return view
# Graphql view
app.add_url_rule('/graphql',
view_func=graphql_view()
)
if __name__ == '__main__':
import optparse
parser = optparse.OptionParser()
parser.add_option('-s',
'--debug-sql',
help="Print executed SQL statement to commandline",
dest="debug_sql",
action="store_true",
default=False)
options, args = parser.parse_args()
import logging
logging.basicConfig()
if options.debug_sql:
logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO)
# else:
# from raven.contrib.flask import Sentry
# sentry = Sentry(app, logging=True, level=logging.WARN)
app.run(
host=config.APP_HOST,
port=config.APP_PORT)
| UTF-8 | Python | false | false | 3,211 | py | 11 | app.py | 8 | 0.617471 | 0.611479 | 0 | 136 | 22.316176 | 83 |
simhaonline/dots-backend | 19,619,410,614,812 | c8c491a0c9c564821f18340c7ec0ec7f4af69431 | fae9915a6090f526da8820249295602825242e5e | /tables/views.py | 2df35d0e5dad4555e96d2a2c38d074d7f4c1950b | []
| no_license | https://github.com/simhaonline/dots-backend | f7cf106f1eba02fbdd11ae5024197e64d68e4e82 | 65668688a018cdc96581ff21f182d78f93f41e06 | refs/heads/master | 2022-09-29T05:46:27.500890 | 2020-04-25T18:43:43 | 2020-04-25T18:43:43 | 269,731,286 | 0 | 1 | null | true | 2020-06-05T18:08:18 | 2020-06-05T18:08:17 | 2020-06-05T17:05:56 | 2020-06-05T17:06:35 | 10,103 | 0 | 0 | 0 | null | false | false | from django.db import IntegrityError
from django.conf import settings as app_settings
from rest_framework import viewsets
from rest_framework import permissions
from rest_framework.response import Response
from rest_framework import status
from pymongo import MongoClient
from .models import Table
from .serializers import TableSerializer
from .utils import (process_data, connect_to_mongo, )
class TableViewSet(viewsets.ModelViewSet):
"""
Tables view
Creates a Table model and dumps the dataset to mongo
"""
permission_classes = (permissions.IsAuthenticated,)
queryset = Table.objects.all()
serializer_class = TableSerializer
lookup_field = 'table_uuid'
def create(self, request, *args, **kwargs):
try:
request_data = request.data
data = process_data(
request_data.pop('data'), request_data.get('source')
)
except (ValueError, KeyError) as e:
return Response(e, status=status.HTTP_400_BAD_REQUEST)
request_data['owner'] = request.user.id
serializer = self.get_serializer(
data=request_data, context={'request': self.request}
)
serializer.is_valid(raise_exception=True)
try:
self.perform_create(serializer)
# generate mongoData
mongo_data = {
'data': data,
'table_uuid': str(serializer.data.get('table_uuid'))
}
mongo_client = connect_to_mongo()
connection = mongo_client[request_data.get('name').replace(' ', '_')]
connection.insert_one(mongo_data)
return Response(
serializer.data, status=status.HTTP_201_CREATED
)
except IntegrityError as e:
return Response(e, status=status.HTTP_400_BAD_REQUEST)
def perform_create(self, serializer):
# set owner
serializer.save()
| UTF-8 | Python | false | false | 1,943 | py | 28 | views.py | 20 | 0.62841 | 0.623778 | 0 | 61 | 30.852459 | 81 |
antwyh/rmcmd | 16,131,897,190,932 | 50c3da6f8b3bd4f556e5d3de7302965ef4b3ae08 | 4270b9ed5c7b9512d384968e7c699f1cfe36620f | /libcode/LoggerUtils.py | eccfbe1c31ba58cfcfd08cce5949fe4100a0b27b | [
"Apache-2.0"
]
| permissive | https://github.com/antwyh/rmcmd | 8de8ad507cda5419e520633faefdc85149a25e91 | 990f266ddfcfb282fec799645cb2debe950c6fde | refs/heads/master | 2022-09-09T04:41:42.656173 | 2020-06-05T10:18:35 | 2020-06-05T10:18:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/local/bin/python3.7
import logging
class LoggerUtils:
def __init__(self, name, path):
self.logger = logging.getLogger(name)
self.logger.setLevel(level = logging.INFO)
handler = logging.FileHandler(path)
handler.setLevel(logging.INFO)
formatter = logging.Formatter('%(asctime)s %(pathname)s(%(lineno)d): %(levelname)s %(message)s')
handler.setFormatter(formatter)
self.logger.addHandler(handler)
def createLogger(name, path):
return LoggerUtils(name, path).logger
class CmdLogger:
cmdLoger = LoggerUtils.createLogger(__name__, "log/cmd.log")
@staticmethod
def logCmd(arguments, cmd):
cmdPara = []
for argob in arguments:
if arguments[argob] is not False and arguments[argob] is not None:
if (argob=='--index'):
cmdPara.append("{0}:{1}".format(argob, arguments[argob]))
elif arguments[argob] != 0:
cmdPara.append("{0}:{1}".format(argob, arguments[argob]))
CmdLogger.cmdLoger.info("------------------------------->执行命令[%s]参数如下:\n%s", cmd, cmdPara) | UTF-8 | Python | false | false | 1,163 | py | 13 | LoggerUtils.py | 8 | 0.596338 | 0.590235 | 0 | 29 | 38.586207 | 104 |
hippopotamusinflight/sudoku_python | 6,622,839,594,485 | 382c298435aa38e15d3f68c95ebbfded09fa6bb6 | 0c58a8f215bda4bd6aaf5efa8a2734dfae0ddddc | /sudoku_solver_clean/block_rm_line_single/block_rm_line_single_v1.py | b6fc09f2402471f6f1c21c04011c6fd5f5f074af | []
| no_license | https://github.com/hippopotamusinflight/sudoku_python | 9748d7ec1f17092de1c0bba1213c561f7ce31a0c | 2d679994c41febfbaa149eae394d0bd5d14fdf42 | refs/heads/master | 2021-07-13T23:14:19.375103 | 2020-06-20T22:23:37 | 2020-06-20T22:23:37 | 173,583,698 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 7 20:50:40 2019
@author: minghan
"""
#%%
from collections import Counter
from copy import deepcopy
#%%
def block_rm_line_single(dictionary,len_trkr):
dic = deepcopy(dictionary)
tracker = {}
for col in [0,3,6]:
for row in [0,3,6]:
region_all_poss = []
for d in dic:
if d[0] in range(row,row+3) and d[1] in range(col,col+3):
region_all_poss.extend(dic[d])
count2s = Counter(region_all_poss)
pairs_all_digits = [k for k, v in count2s.items() if v == 2]
for digit in pairs_all_digits:
pair_loc = []
for d in dic:
if d[0] in range(row,row+3) and d[1] in range(col,col+3):
if digit in dic[d]:
pair_loc.append(d)
if pair_loc[0][0] == pair_loc[1][0]:
for d in dic:
if d[0] == pair_loc[0][0] and d not in pair_loc:
if digit in dic[d]:
if d not in tracker:
tracker[d] = [digit]
else:
tracker[d].append(digit)
dic[d].remove(digit)
if pair_loc[0][1] == pair_loc[1][1]:
for d in dic:
if d[1] == pair_loc[0][1] and d not in pair_loc:
if digit in dic[d]:
if d not in tracker:
tracker[d] = [digit]
else:
tracker[d].append(digit)
dic[d].remove(digit)
print(tracker)
len_trkr += len(tracker)
return(dic,len_trkr) | UTF-8 | Python | false | false | 1,949 | py | 20 | block_rm_line_single_v1.py | 18 | 0.396101 | 0.373525 | 0 | 51 | 37.235294 | 92 |
kuhball/drehding | 8,512,625,190,796 | f993a13a913b9da50e81567692ea63375022a6ca | da46ea52f29433bf7eed8ad26500a7143f977f6e | /ui/tickbased_ui.py | e028ea6bd361b6ab1138ec40429d91b5bb8b09ca | []
| no_license | https://github.com/kuhball/drehding | 44596c54ba54c6c7d3e5d66b42c06f63010b0acf | 7b5ad72f2ea4d04c5c67450e6d0d8ce27806beba | refs/heads/main | 2023-03-07T05:37:15.906716 | 2021-02-18T15:43:45 | 2021-02-18T15:43:45 | 319,995,885 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
# Simple UI to steer the `tickbased_ui` sketch
import time
import serial
import PySimpleGUI as sg
# Config
MAX_SKIP_TURNS = 10
HALL_PULSE_PER_TURN = 9 # see HALL_PULSE_PER_TURN setting in your sketch
SERIAL_PORT = '/dev/ttyACM1' # see bottom right of Arduino IDE
SERIAL_PORT_BAUD = 9600 # see Serial.begin(SERIAL_PORT_BAUD) in your sketch
# Init connection to Arduino
arduino = serial.Serial(SERIAL_PORT, SERIAL_PORT_BAUD, timeout=.1)
time.sleep(1) # give the connection a second to settle
# Functions
def mkslider(min, max, default, key):
return sg.Slider(
enable_events=True,
range=(min, max),
default_value=default,
orientation='horizontal',
key=key
)
def mkled(id):
return [
[sg.Text(f"LED {id}")],
[sg.Text(f"At which tick of the {HALL_PULSE_PER_TURN} rotation ticks to flash:")],
[mkslider(0, HALL_PULSE_PER_TURN - 1, 0, f'led_{id}_pos')],
[sg.Text("Rotations to skip after each flash:")],
[mkslider(0, MAX_SKIP_TURNS, 0, f'led_{id}_cooldown')]
]
def main():
layout = [
[sg.Text("Controls for tickbased_ui sketch")],
[sg.Text('_'*30)],
[sg.Text("Motor speed")],
[mkslider(0, 255, 40, 'motor_pulse_length')],
[sg.Text('_'*30)],
*mkled(1),
[sg.Text('_'*30)],
*mkled(2),
[sg.Text('_'*30)],
*mkled(3),
[sg.Button("Close")]
]
# Create the window
window = sg.Window("Controls for tickbased_ui sketch", layout)
# Create an event loop
while True:
event, values = window.read()
# End program if user closes window or
# presses the OK button
if event == "Close" or event == sg.WIN_CLOSED:
break
msg = f'{event}:{int(values[event])}'
arduino.write(str.encode(msg))
print(f'Send: {msg}')
while True:
data = arduino.readline()[:-2] #the last bit gets rid of the new-line chars
if data:
print('data', data)
else:
break
window.close()
if __name__ == "__main__":
main()
| UTF-8 | Python | false | false | 2,163 | py | 79 | tickbased_ui.py | 63 | 0.569117 | 0.553398 | 0 | 82 | 25.378049 | 90 |
WheatonCS/Lexos | 9,981,503,999,164 | e96f4c6d9dc5803ac5009940c5fe639a24311160 | 0fca12a004a92b37a3baa39e01eea3ec2cc89f0e | /lexos/receivers/statistics_receiver.py | f241a89b85aa988476bf4cd57d35f8ad3d6e4af0 | [
"MIT"
]
| permissive | https://github.com/WheatonCS/Lexos | 976811e266539aea07abb7b958c45aed9b5d1b7b | 0950b8e9c672142ba0cccd3066db188e965380f9 | refs/heads/master | 2023-04-28T01:00:50.316325 | 2022-02-14T17:08:33 | 2022-02-14T17:08:33 | 10,040,275 | 113 | 25 | MIT | false | 2023-04-15T12:24:28 | 2013-05-13T20:16:10 | 2023-03-08T16:02:19 | 2023-04-15T12:24:28 | 87,888 | 111 | 19 | 37 | Python | false | false | """This is the receiver for the stats model."""
from typing import List, NamedTuple
from lexos.receivers.base_receiver import BaseReceiver
from lexos.managers.utility import load_file_manager
class StatsFrontEndOption(NamedTuple):
"""The typed tuple to hold stats front end option."""
# This is the list of active file ids.
active_file_ids: List[int]
# The column to sort by
sort_column: int
# The sort method
sort_ascending: bool
# The colors
text_color: str
highlight_color: str
class StatsReceiver(BaseReceiver):
"""This is the class that gets front end options for the stats model."""
def __init__(self):
"""Get stats front end options using the receiver."""
super().__init__()
def options_from_front_end(self) -> StatsFrontEndOption:
"""Get the options from front end.
The only option is selected file ids.
"""
# Force file ids to be integer type and remove extra blank.
active_file_ids = [file.id for file in
load_file_manager().get_active_files()]
# Get the selected column
sort_column = int(self._front_end_data[
"statistics_table_selected_column"])
# Get the sort column
sort_ascending = bool(self._front_end_data[
"statistics_table_sort_mode"] == "Ascending")
# Get the colors
text_color = self._front_end_data.get("text_color")
highlight_color = self._front_end_data.get("highlight_color")
# Return stats front end option.
return StatsFrontEndOption(active_file_ids=active_file_ids,
sort_column=sort_column,
sort_ascending=sort_ascending,
text_color=text_color,
highlight_color=highlight_color)
| UTF-8 | Python | false | false | 1,893 | py | 637 | statistics_receiver.py | 104 | 0.603275 | 0.603275 | 0 | 58 | 31.637931 | 76 |
fishstamp82/moltools | 14,353,780,751,725 | b3822b39ea8bae8a9f143096a149f0da661530f8 | a8d73a6b9dfa6b61a16aefd9011146011b402c56 | /moltools/test/test_bonds.py | efa506b8c970d88d62769cb71b6c40b053e02021 | [
"MIT"
]
| permissive | https://github.com/fishstamp82/moltools | 53a4996a8ac7c3ba8ad38986ecfae16b4b5fb8a1 | 5104878674c78a0337426de08b10028ac9736914 | refs/heads/master | 2020-12-22T07:01:15.008885 | 2018-02-25T21:52:40 | 2018-02-25T21:52:40 | 34,782,928 | 0 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null | import unittest, os, warnings
import numpy as np
from moltools import Cluster, Atom, Molecule, Water, Generator
warnings.simplefilter('error')
from nose.plugins.attrib import attr
FILE_MOL = os.path.join( os.path.dirname(__file__), 'tip3p44_10qm.mol' )
FILE_PDB = os.path.join( os.path.dirname(__file__), 'tip3p0.pdb' )
@attr(speed = 'fast' )
class BondTestCase( unittest.TestCase ):
def test_populate_bonds(self):
w = Water.get_standard()
w.populate_bonds()
assert len( w.o.bonds ) == 2
assert len( w.h1.bonds ) == 1
assert len( w.h2.bonds ) == 1
#def test_transfer_props_1(self):
# w = Water.get_standard()
# w.attach_properties()
# w.populate_bonds()
# w.o.transfer_props( {'beta' : 1} )
# np.testing.assert_allclose( w.o.p.b, np.zeros(10), atol = 1e-7 )
#def test_transfer_props_2(self):
# w = Water.get_standard()
# w.attach_properties()
# w.populate_bonds()
# B = w.p.b.copy()
# w.h1.transfer_props()
# w.o.transfer_props()
# np.testing.assert_allclose( w.p.b, B, atol = 1e-7 )
#def test_transfer_props_3(self):
# w = Water.get_standard()
# w.attach_properties()
# w.populate_bonds()
# C = w.p.q.copy()
# #C = w.p.q.copy()
# #D = w.p.d.copy()
# #Q = w.p.Q.copy()
# A = w.p.a.copy()
# B = w.p.b.copy()
# transfer = { 'charge' : 1,
# 'quadrupole' : 1,
# 'dipole' : 1,
# 'alpha' : 1,
# 'beta' : 1 }
# w.transfer_props( [w.o, w.h1], transfer = transfer )
# #np.testing.assert_allclose( w.p.q, C, atol = 1e-7 )
# #np.testing.assert_allclose( w.p.d, D, atol = 1e-7 )
# #np.testing.assert_allclose( w.p.Q, Q, atol = 1e-7 )
# np.testing.assert_allclose( w.p.a, A, atol = 1e-7 )
# np.testing.assert_allclose( w.p.b, B, atol = 1e-7 )
#def test_transfer_props_4(self):
# w = Water.get_standard()
# w.attach_properties()
# w.populate_bonds()
# q_h1 = w.h1.p.q
# d_h1 = w.h1.p.d.copy()
# Q_h1 = w.h1.p.Q.copy()
# a_h1 = w.h1.p.a.copy()
# b_h1 = w.h1.p.b.copy()
#
# q_o = w.o.p.q
# w.h1.transfer_props( {'charge' : 1 } )
# np.testing.assert_allclose( w.h1.p.q, 0.0, atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.d, d_h1, atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.Q, Q_h1, atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.a, a_h1, atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.b, b_h1, atol = 1e-7 )
# np.testing.assert_allclose( w.o.p.q, (q_h1 + q_o), atol = 1e-7 )
#
#def test_transfer_props_5(self):
# w = Water.get_standard()
# w.attach_properties()
# w.populate_bonds()
# q_h1 = w.h1.p.q
# d_h1 = w.h1.p.d.copy()
# Q_h1 = w.h1.p.Q.copy()
# a_h1 = w.h1.p.a.copy()
# b_h1 = w.h1.p.b.copy()
# w.h1.transfer_props( { 'charge':1, 'dipole' : 1 } )
# np.testing.assert_allclose( w.h1.p.q, 0.0, atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.d, np.zeros(3), atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.Q, Q_h1, atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.a, a_h1, atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.b, b_h1, atol = 1e-7 )
#def test_transfer_props_6(self):
# w = Water.get_standard()
# w.attach_properties()
# w.populate_bonds()
# q_h1 = w.h1.p.q
# d_h1 = w.h1.p.d.copy()
# Q_h1 = w.h1.p.Q.copy()
# a_h1 = w.h1.p.a.copy()
# b_h1 = w.h1.p.b.copy()
# w.h1.transfer_props( { 'charge':1, 'dipole' : 1, 'quadurpole':1 } )
# np.testing.assert_allclose( w.h1.p.q, 0.0, atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.d, np.zeros(3), atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.Q, np.zeros(6), atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.a, a_h1, atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.b, b_h1, atol = 1e-7 )
#def test_transfer_props_7(self):
# w = Water.get_standard()
# w.attach_properties()
# w.populate_bonds()
# q_h1 = w.h1.p.q
# d_h1 = w.h1.p.d.copy()
# Q_h1 = w.h1.p.Q.copy()
# a_h1 = w.h1.p.a.copy()
# b_h1 = w.h1.p.b.copy()
# w.h1.transfer_props( { 'charge':1,
# 'alpha' : 1,
# 'dipole' : 1, 'quadurpole':1 } )
# np.testing.assert_allclose( w.h1.p.q, 0.0, atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.d, np.zeros(3), atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.Q, np.zeros(6), atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.a, np.zeros(6), atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.b, b_h1, atol = 1e-7 )
#def test_transfer_props_8(self):
# w = Water.get_standard()
# w.attach_properties()
# w.populate_bonds()
# q_h1 = w.h1.p.q
# d_h1 = w.h1.p.d.copy()
# Q_h1 = w.h1.p.Q.copy()
# a_h1 = w.h1.p.a.copy()
# b_h1 = w.h1.p.b.copy()
# w.h1.transfer_props( { 'charge':1,
# 'alpha' : 1, 'beta' : 1,
# 'dipole' : 1, 'quadurpole':1 } )
# np.testing.assert_allclose( w.h1.p.q, 0.0, atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.d, np.zeros(3), atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.Q, np.zeros(6), atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.a, np.zeros(6), atol = 1e-7 )
# np.testing.assert_allclose( w.h1.p.b, np.zeros(10), atol = 1e-7 )
if __name__ == '__main__':
unittest.main()
| UTF-8 | Python | false | false | 5,705 | py | 78 | test_bonds.py | 47 | 0.516039 | 0.47695 | 0 | 162 | 34.216049 | 77 |
AverageTryHard/DjangoTaskEnjoyPro | 8,203,387,551,097 | 2e3c1d286a80db5446b9aadf6f2c6e6ee41f7edb | 7eadf82706d40fe4138fcb04ae552c6a22551d23 | /rest_messaging/migrations/0002_auto_20210311_2343.py | b54abd976c2e3f4a31d92da61c0c61737dc25d9b | []
| no_license | https://github.com/AverageTryHard/DjangoTaskEnjoyPro | acee5bbb21f72e02301b01ba7e95c0317d895930 | 71f0b9370fb8dc21b4c43c49388573d136cb061b | refs/heads/master | 2023-03-14T02:23:08.158347 | 2021-03-15T18:35:56 | 2021-03-15T18:35:56 | 347,116,228 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 3.1.7 on 2021-03-11 21:43
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('rest_messaging', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='message',
name='changed_at',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterField(
model_name='message',
name='created_at',
field=models.DateTimeField(auto_created=True),
),
migrations.AlterField(
model_name='message',
name='is_read',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='message',
name='is_sent',
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name='message',
name='text_body',
field=models.CharField(blank=True, default='', max_length=200),
),
]
| UTF-8 | Python | false | false | 1,055 | py | 12 | 0002_auto_20210311_2343.py | 9 | 0.546919 | 0.526066 | 0 | 38 | 26.763158 | 75 |
RounakChatterjee/SEM2_FinalExam | 9,517,647,578,866 | ff24e218c4f2cf91958eda141c4637563a8fd267 | d4af77ea1a945e39c2bb6d049fa9f2af23075f78 | /Codes/Ques_6_solveIvp.py | 39ed97acf6f54252fa031b5050cf2f75f41b6ddc | []
| no_license | https://github.com/RounakChatterjee/SEM2_FinalExam | 2744f840f2aceaa8f2c0df92ae447bfcaa10e1c4 | 87af490ad730aeaeb4933b29988222da7c723bcc | refs/heads/master | 2022-10-03T11:41:27.731593 | 2020-06-05T16:57:13 | 2020-06-05T16:57:13 | 269,700,874 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | '''
SOLVING THE SET OF EQUATIONS
==============================================================================
Author : Rounak Chatterjee
Date : 05/05/2020
==============================================================================
Given this set of simultaneous Equation 1st Order, the best way to solve it is by using
4th Order Runge Kutta Method. Here we'll use a custom code to better
handle the vectorial nature of the problem.
In this we denote r = [y1,y2] so r[0] = y1 and r[1] = y2
'''
import numpy as np
import matplotlib.pyplot as plt
h = 0.001 # step value
a = 0.0 #Initial Value
b = 0.5 #Final Value
def f(r,x): #The Vector function
return np.array([32.0*r[0]+66.0*r[1]+(2.0/3.0)*x+(2.0/3.0),-66.0*r[0]-133.0*r[1]-(1.0/3.0)*x-(1.0/3.0)])
def next_value(wi,xi): # This Executes a step of The RK Algorithm
k1 = h*f(wi,xi)
k2 = h*f(wi+k1/2.0,xi+h/2.0)
k3 = h*f(wi+k2/2,xi+h/2.0)
k4 = h*f(wi+k3,xi+h)
return (wi+(1.0/6.0)*(k1+2.0*k2+2.0*k3+k4))
n = int((b-a)/h) # total number of steps
x = np.arange(a,b,h)
w = np.zeros(shape=(n,2),dtype=np.float64) # Solution set
w[0]=[(1.0/3.0),(1.0/3.0)] # Initial values
for i in range(n-1):
w[i+1] = next_value(w[i],a+i*h)
plt.title("Plot of Computed Solutions",size = 15)
plt.plot(x,w[:,0],color = 'blue',label = 'y$_1$(x)')
plt.plot(x,w[:,1],color = 'red',label = 'y$_2$(x)')
plt.legend()
plt.xlabel("x",size = 13)
plt.ylabel("y(x)",size = 13)
plt.grid()
plt.show()
'''
This Part of Code Produces the graph that justifies that the solution
obtained is correct. As analytically obtained we see that the function 2y1 + y2 is of form
2y1 + y2 = exp(-x)+x
here we try to verify it.
'''
plt.title("Justification Plot",size = 15)
plt.plot(x,2.0*w[:,0]+w[:,1],lw =5 ,color = 'black',label = '2y$_1$(x)+y$_2$(x)')
plt.plot(x,np.exp(-x)+x,color = '#00FF00',label = "Analytical Solution")
plt.legend()
plt.xlabel("x",size = 13)
plt.ylabel("y(x)",size = 13)
plt.grid()
plt.show()
| UTF-8 | Python | false | false | 2,015 | py | 9 | Ques_6_solveIvp.py | 6 | 0.565757 | 0.500744 | 0 | 63 | 29.952381 | 105 |
dnkzsmp/IrregularVerbs | 17,506,286,737,557 | f4bfe2c79f821383d5c65969e0f0686fa772d405 | df0ca6f9d14b7ee093e1ccfc3ae33ebd2d28fcaa | /src/startwindow.py | 05740b43fa2d697d0847e4069a210cab1378f3ca | []
| no_license | https://github.com/dnkzsmp/IrregularVerbs | 085b322f4a5f71c35f2b1364c8cabc9e6e76f54e | 61831af0fc0e811e2fe883538ddcde430059ebad | refs/heads/master | 2021-02-12T04:29:40.283865 | 2020-05-21T15:03:24 | 2020-05-21T15:03:24 | 249,629,558 | 3 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
import sys
import os
from tkinter import Label, Entry, Button
from tkinter import Tk, PhotoImage
class StartWindow(Tk):
def __init__(self, lines):
super().__init__()
self.s = 0
self.verbs = lines
self.hello1 = Label(self)
self.hello2 = Label(self)
self.hello3 = Label(self)
self.verbs_entry = Entry(self, bd=4)
self.start = Button(self, text='Начать',
command=self.start_check)
self.Exit = Button(self, text='Выйти',
command=self.exit_app)
self.Exit.config(font=2)
self.PS = Label(self)
self.hello1.pack()
self.hello2.pack()
self.hello3.pack()
self.verbs_entry.pack()
self.start.pack()
self.Exit.pack()
self.PS.pack()
self.protocol("WM_DELETE_WINDOW", self.exit_app)
self.initUI()
def initUI(self):
self.iconphoto(True, PhotoImage(file=os.path.join(sys.path[0],
'../style/icon.png')))
self.geometry('600x230')
x = (self.winfo_screenwidth() - self.winfo_reqwidth()) / 2
y = (self.winfo_screenheight() - self.winfo_reqheight()) / 2
self.wm_geometry("+%d+%d" % (x, y))
self.resizable(False, False)
self.title('Тренажер форм глагола "IrregularVerbs"')
self.config(bg='#3A3A3A')
self.verbs_entry.config(bg='#DADADA')
self.start.config(bg='#DADADA')
self.Exit.config(bg='#DADADA')
self.hello1.config(text='Добро пожаловать в тренажер', bg='#3A3A3A',
font=('Arial', 13, 'bold'), fg='#DADADA')
self.hello2.config(text='форм неправильного глагола '
'"IrregularVerbs"\n', bg='#3A3A3A',
font=('Arial', 13, 'bold'), fg='#DADADA')
self.hello3.config(text='Введите кол-во глаголов для тренировки'
'(от 1 до 10)', bg='#3A3A3A', font=5,
fg='#11FF00')
self.start.config(font=2)
self.PS.config(text='', fg='#00FF38', bg='#3A3A3A')
def start_check(self):
if self.verbs_entry.get().isdigit() and \
not self.verbs_entry.get().isspace():
self.s = int(self.verbs_entry.get())
if self.s > 10:
self.verbs_entry.delete(0, 'end')
self.PS.config(text='\n*максимальное число: 10',
font=3, fg='#FF79E8')
if self.s == 0:
self.verbs_entry.delete(0, 'end')
self.PS.config(text='\n*минимальное число: 1',
fg='#FF79E8', font=3)
elif 1 <= self.s <= 10:
sys.path.append('..')
from src.graphic import Graphic
self.destroy()
root = Graphic(self.s, self.verbs)
root.mainloop()
elif not self.verbs_entry.get().isdigit() or \
self.verbs_entry.get().isspace():
self.verbs_entry.delete(0, 'end')
self.PS.config(text='\n*надо вести число '
'число от 1 до 10', fg='#FF79E8', font=2)
def exit_app(self):
self.destroy()
sys.exit()
| UTF-8 | Python | false | false | 3,519 | py | 10 | startwindow.py | 5 | 0.501493 | 0.4773 | 0 | 83 | 39.337349 | 80 |
ivyliu1205/comp3311 | 12,498,354,869,069 | 6753ad5991edaf263d4fa3045369fd325f9cef93 | fd16fda59f2839defb4003b6f9e65ecd9687e81c | /Assignment/Assignment3/q3.py | 42234dcc7b1b7e3f5bf801b2c1cc19c841415b47 | []
| no_license | https://github.com/ivyliu1205/comp3311 | 25767bcf342366026723bbce4190d88ca6383e77 | 8ea8da20e06c91c36a327a10fbc315dc8353b342 | refs/heads/master | 2022-04-10T18:57:28.312914 | 2020-02-23T14:44:28 | 2020-02-23T14:44:28 | 242,533,561 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # COMP3311 19T3 Assignment 3
import cs3311
import sys
#define CODE
code = 'ENGG'
if len(sys.argv) == 2:
code = sys.argv[1]
conn = cs3311.connect()
cur = conn.cursor()
cur.execute(
"SELECT DISTINCT bname, array_agg(DISTINCT subjectcode) \
FROM helper_q3_buildings\
WHERE term = '19T2' AND alpha = '{}' AND bname <> 'none'\
GROUP BY bname\
".format(code)
)
for tup in cur.fetchall():
bname, subjectcode = tup
print(bname)
for course in subjectcode:
print(" " + course)
cur.close()
conn.close()
| UTF-8 | Python | false | false | 577 | py | 22 | q3.py | 16 | 0.601386 | 0.563258 | 0 | 29 | 17.689655 | 61 |
testgitmuseum/Museum | 7,816,840,503,850 | c66f9200fb47f7f12238464b912a2061b21a2d33 | 42ba5a1f8f84bac7f248dd7d90f16fb518f986ff | /Utilities/WebConnectDownload/DownloadStockMarketData.py | 61cb1799a48ba96667ccd81918c2e0a50f290a30 | []
| no_license | https://github.com/testgitmuseum/Museum | 0c0684a1d5316c299319061ff691f27c55230fa5 | 34a8d9fd86a70ba738c3df85a73fe5c99a370fc4 | refs/heads/master | 2021-04-15T06:14:13.333360 | 2018-03-22T17:42:41 | 2018-03-22T17:42:41 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from datetime import datetime
from Utilities.TypeConverter import convert_date_to_int_yahoo_api_format
import json
import requests
import pandas as pd
import urllib2
__author__ = 'vfourrier'
GLOBAL_PATH = r'https://finance.yahoo.com/quote/'
path_results = r'/Volumes/SECURITY 1/Vincent/BSCM/ATM/Results/Data/'
"""
This class is used to download data related to a single stock or multiple stocks
Important: Date in yahoo api are calculated as the total number of seconds since
January 1st 1970 00:00:00.000
Use Utilities.TypeConverter to convert your date
"""
class DownloadStockMarketData:
def __init__(self, list_ticker, frequency, start_date, end_date):
self.global_path = GLOBAL_PATH
self.list_ticker = list_ticker
self.frequency = frequency
self.start_downloading_date = start_date
self.end_downloading_date = end_date
self.dict_url = {}
self.dict_object = {}
def create_base_stock(self):
self.get_url()
self.get_json_from_api()
def get_json_from_api(self):
for url in self.dict_url.keys():
try:
resp = requests.get(url)
r = resp.text.encode('utf-8')
i1 = 0
i1 = r.find('root.App.main', i1)
i1 = r.find('{', i1)
i2 = r.find("\n", i1)
i2 = r.rfind(';', i1, i2)
jsonstr = r[i1:i2]
data = json.loads(jsonstr)
self.dict_object[ticker] = data
except:
self.dict_object[ticker] = 'connection failed'
def get_url(self):
interval = self.get_interval(self.frequency)
temp_yahoo_start_date = convert_date_to_int_yahoo_api_format(self.start_downloading_date)
temp_yahoo_end_date = convert_date_to_int_yahoo_api_format(self.end_downloading_date)
if self.start_downloading_date is None and self.end_downloading_date is None:
for ticker in self.list_ticker:
self.dict_url[ticker] = self.global_path + ticker
else:
for ticker in self.list_ticker:
self.dict_url[ticker] = self.global_path + ticker +\
'/history?period1=' + temp_yahoo_start_date + '&period2=' +\
temp_yahoo_end_date + '&interval=' + interval + \
'&filter=history&frequency=' + interval
def get_historical_stock_df_between_dates(self, req_data=None):
if req_data is None:
return self.get_all_stock_data()
else:
return None
# get the list of required data for download
def get_all_stock_data(self):
return None
@staticmethod
def get_interval(frequency):
if frequency == 'daily':
return '1d'
if frequency == 'weekly':
return '1w'
if frequency == 'monthly':
return '1m'
#
# import requests
# import json
# symbol='MSFT'
# url='https://finance.yahoo.com/quote/' + symbol
# resp = requests.get(url)
# r=resp.text.encode('utf-8')
# i1=0
# i1=r.find('root.App.main', i1)
# i1=r.find('{', i1)
# i2=r.find("\n", i1)
# i2=r.rfind(';', i1, i2)
# jsonstr=r[i1:i2]
# data = json.loads(jsonstr)
# name=data['context']['dispatcher']['stores']['QuoteSummaryStore']['price']['shortName']
# price=data['context']['dispatcher']['stores']['QuoteSummaryStore']['price']['regularMarketPrice']['raw']
# change=data['context']['dispatcher']['stores']['QuoteSummaryStore']['price']['regularMarketChange']['raw']
# shares_outstanding=data['context']['dispatcher']['stores']['QuoteSummaryStore']['defaultKeyStatistics']['sharesOutstanding']['raw']
# market_cap=data['context']['dispatcher']['stores']['QuoteSummaryStore']['summaryDetail']['marketCap']['raw']
# trailing_pe=data['context']['dispatcher']['stores']['QuoteSummaryStore']['summaryDetail']['trailingPE']['raw']
# earnings_per_share=data['context']['dispatcher']['stores']['QuoteSummaryStore']['defaultKeyStatistics']['trailingEps']['raw']
# forward_annual_dividend_rate=data['context']['dispatcher']['stores']['QuoteSummaryStore']['summaryDetail']['dividendRate']['raw']
# forward_annual_dividend_yield=data['context']['dispatcher']['stores']['QuoteSummaryStore']['summaryDetail']['dividendYield']['raw']
#
#
# from datetime import datetime
# global_path = r'https://finance.yahoo.com/quote/'
# start_downloading_date = datetime(2018,1,1)
# end_downloading_date = datetime(2018,2,1)
# dict_url = {}
# frequency = 'daily'
# interval = get_interval(frequency)
# temp_yahoo_start_date = convert_date_to_int_yahoo_api_format(start_downloading_date)
# temp_yahoo_end_date = convert_date_to_int_yahoo_api_format(end_downloading_date)
# if start_downloading_date == None and end_downloading_date == None:
# for ticker in list_ticker:
# dict_url[ticker] = global_path + ticker
# else:
# for ticker in list_ticker:
# dict_url[ticker] = global_path + ticker + '/history?period1=' + str(temp_yahoo_start_date) + '&period2=' + str(temp_yahoo_end_date) + '&interval=' + interval + '&filter=history&frequency=' + interval
#
# list_
# dict_object = {}
# for url in dict_url.keys():
# try:
# resp = requests.get(url)
# r = resp.text.encode('utf-8')
# i1 = 0
# i1 = r.find('root.App.main', i1)
# i1 = r.find('{', i1)
# i2 = r.find("\n", i1)
# i2 = r.rfind(';', i1, i2)
# jsonstr = r[i1:i2]
# data = json.loads(jsonstr)
# dict_object[ticker] = data
# except:
# dict_object[ticker] = 'connection failed' | UTF-8 | Python | false | false | 5,674 | py | 20 | DownloadStockMarketData.py | 18 | 0.610328 | 0.596757 | 0 | 142 | 38.964789 | 209 |
h2oai/h2o-3 | 9,612,136,855,453 | c8585b6325ee2c36a662e1c11ccd65fa7f1c54e0 | db12b990924703cd74748d8585cd9c11fafa6746 | /h2o-py/tests/testdir_misc/pyunit_mojo_predict.py | 740e651316d3ea5cda752950b9e0cbf44d821f97 | [
"Apache-2.0"
]
| permissive | https://github.com/h2oai/h2o-3 | 919019a8f297eec676011a9cfd2cc2d97891ce14 | d817ab90c8c47f6787604a0b9639b66234158228 | refs/heads/master | 2023-08-17T18:50:17.732191 | 2023-08-17T16:44:42 | 2023-08-17T16:44:42 | 17,371,412 | 6,872 | 2,345 | Apache-2.0 | false | 2023-09-14T18:05:40 | 2014-03-03T16:08:07 | 2023-09-13T16:15:03 | 2023-09-14T18:05:38 | 619,600 | 6,476 | 1,997 | 2,722 | Jupyter Notebook | false | false | import sys
import tempfile
import shutil
import time
import os
import pandas
from pandas.testing import assert_frame_equal
sys.path.insert(1, "../../")
import h2o
from tests import pyunit_utils
from h2o.estimators.gbm import H2OGradientBoostingEstimator
genmodel_name = "h2o-genmodel.jar"
def download_mojo(model, mojo_zip_path, genmodel_path=None):
mojo_zip_path = os.path.abspath(mojo_zip_path)
parent_dir = os.path.dirname(mojo_zip_path)
print("\nDownloading MOJO @... " + parent_dir)
time0 = time.time()
if genmodel_path is None:
genmodel_path = os.path.join(parent_dir, genmodel_name)
mojo_file = model.download_mojo(path=mojo_zip_path, get_genmodel_jar=True, genmodel_name=genmodel_path)
print(" => %s (%d bytes)" % (mojo_file, os.stat(mojo_file).st_size))
assert os.path.exists(mojo_file)
print(" Time taken = %.3fs" % (time.time() - time0))
assert os.path.exists(mojo_zip_path)
print(" => %s (%d bytes)" % (mojo_zip_path, os.stat(mojo_zip_path).st_size))
assert os.path.exists(genmodel_path)
print(" => %s (%d bytes)" % (genmodel_path, os.stat(genmodel_path).st_size))
def mojo_predict_api_test(sandbox_dir):
data = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
input_csv = "%s/in.csv" % sandbox_dir
output_csv = "%s/prediction.csv" % sandbox_dir
h2o.export_file(data[1, 2:], input_csv)
data[1] = data[1].asfactor()
model = H2OGradientBoostingEstimator(distribution="bernoulli")
model.train(x=[2, 3, 4, 5, 6, 7, 8], y=1, training_frame=data)
# download mojo
model_zip_path = os.path.join(sandbox_dir, 'model.zip')
genmodel_path = os.path.join(sandbox_dir, 'h2o-genmodel.jar')
download_mojo(model, model_zip_path)
assert os.path.isfile(model_zip_path)
assert os.path.isfile(genmodel_path)
# test that we can predict using default paths
h2o.mojo_predict_csv(input_csv_path=input_csv, mojo_zip_path=model_zip_path, verbose=True)
h2o.mojo_predict_csv(input_csv_path=input_csv, mojo_zip_path=model_zip_path, genmodel_jar_path=genmodel_path,
verbose=True)
assert os.path.isfile(output_csv)
os.remove(model_zip_path)
os.remove(genmodel_path)
os.remove(output_csv)
# test that we can predict using custom genmodel path
other_sandbox_dir = tempfile.mkdtemp()
try:
genmodel_path = os.path.join(other_sandbox_dir, 'h2o-genmodel-custom.jar')
download_mojo(model, model_zip_path, genmodel_path)
assert os.path.isfile(model_zip_path)
assert os.path.isfile(genmodel_path)
try:
h2o.mojo_predict_csv(input_csv_path=input_csv, mojo_zip_path=model_zip_path, verbose=True)
assert False, "There should be no h2o-genmodel.jar at %s" % sandbox_dir
except RuntimeError:
pass
assert not os.path.isfile(output_csv)
h2o.mojo_predict_csv(input_csv_path=input_csv, mojo_zip_path=model_zip_path,
genmodel_jar_path=genmodel_path, verbose=True)
assert os.path.isfile(output_csv)
os.remove(output_csv)
output_csv = "%s/out.prediction" % other_sandbox_dir
# test that we can predict using default paths
h2o.mojo_predict_csv(input_csv_path=input_csv, mojo_zip_path=model_zip_path,
genmodel_jar_path=genmodel_path, verbose=True, output_csv_path=output_csv)
assert os.path.isfile(output_csv)
os.remove(model_zip_path)
os.remove(genmodel_path)
os.remove(output_csv)
finally:
shutil.rmtree(other_sandbox_dir)
def mojo_predict_csv_test(target_dir):
mojo_file_name = "prostate_gbm_model.zip"
mojo_zip_path = os.path.join(target_dir, mojo_file_name)
prostate = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
r = prostate[0].runif()
train = prostate[r < 0.70]
test = prostate[r >= 0.70]
# Getting first row from test data frame
pdf = test[1, 2:]
input_csv = "%s/in.csv" % target_dir
output_csv = "%s/output.csv" % target_dir
h2o.export_file(pdf, input_csv)
# =================================================================
# Regression
# =================================================================
regression_gbm1 = H2OGradientBoostingEstimator(distribution="gaussian")
regression_gbm1.train(x=[2, 3, 4, 5, 6, 7, 8], y=1, training_frame=train)
pred_reg = regression_gbm1.predict(pdf)
contribs_reg = regression_gbm1.predict_contributions(pdf)
p1 = pred_reg[0, 0]
print("Regression prediction: " + str(p1))
download_mojo(regression_gbm1, mojo_zip_path)
print("\nPerforming Regression Prediction using MOJO @... " + target_dir)
prediction_result = h2o.mojo_predict_csv(input_csv_path=input_csv, mojo_zip_path=mojo_zip_path,
output_csv_path=output_csv)
print("Prediction result: " + str(prediction_result))
assert p1 == float(prediction_result[0]['predict']), "expected predictions to be the same for binary and MOJO model for regression"
print("\nComparing Regression Contributions using MOJO @... " + target_dir)
contributions_result = h2o.mojo_predict_csv(input_csv_path=input_csv, mojo_zip_path=mojo_zip_path,
output_csv_path=output_csv, predict_contributions=True)
assert contributions_result is not None
contributions_pandas = pandas.read_csv(output_csv)
assert_frame_equal(contribs_reg.as_data_frame(use_pandas=True), contributions_pandas, check_dtype=False)
# =================================================================
# Binomial
# =================================================================
train[1] = train[1].asfactor()
bernoulli_gbm1 = H2OGradientBoostingEstimator(distribution="bernoulli")
bernoulli_gbm1.train(x=[2, 3, 4, 5, 6, 7, 8], y=1, training_frame=train)
pred_bin = bernoulli_gbm1.predict(pdf)
contribs_bin = bernoulli_gbm1.predict_contributions(pdf)
binary_prediction_0 = pred_bin[0, 1]
binary_prediction_1 = pred_bin[0, 2]
print("Binomial prediction: p0: " + str(binary_prediction_0))
print("Binomial prediction: p1: " + str(binary_prediction_1))
download_mojo(bernoulli_gbm1, mojo_zip_path)
print("\nPerforming Binomial Prediction using MOJO @... " + target_dir)
prediction_result = h2o.mojo_predict_csv(input_csv_path=input_csv, mojo_zip_path=mojo_zip_path,
output_csv_path=output_csv)
mojo_prediction_0 = float(prediction_result[0]['p0'])
mojo_prediction_1 = float(prediction_result[0]['p1'])
print("Binomial prediction: p0: " + str(mojo_prediction_0))
print("Binomial prediction: p1: " + str(mojo_prediction_1))
assert abs(binary_prediction_0 - mojo_prediction_0) < 1e-15, "expected predictions to be the same for binary and MOJO model for Binomial - p0"
assert abs(binary_prediction_1 - mojo_prediction_1) < 1e-15, "expected predictions to be the same for binary and MOJO model for Binomial - p1"
print("\nComparing Binary Classification Contributions using MOJO @... " + target_dir)
contributions_bin_result = h2o.mojo_predict_csv(input_csv_path=input_csv, mojo_zip_path=mojo_zip_path,
output_csv_path=output_csv, predict_contributions=True)
assert contributions_bin_result is not None
contributions_bin_pandas = pandas.read_csv(output_csv)
print(contributions_bin_pandas)
print(contribs_bin.as_data_frame(use_pandas=True))
assert_frame_equal(contribs_bin.as_data_frame(use_pandas=True), contributions_bin_pandas, check_dtype=False)
# =================================================================
# Multinomial
# =================================================================
iris = h2o.import_file(path=pyunit_utils.locate("smalldata/iris/iris.csv"))
r = iris[0].runif()
train = iris[r < 0.90]
test = iris[r >= 0.10]
# Getting first row from test data frame
pdf = test[1, 0:4]
input_csv = "%s/in-multi.csv" % target_dir
output_csv = "%s/output.csv" % target_dir
h2o.export_file(pdf, input_csv)
multi_gbm = H2OGradientBoostingEstimator()
multi_gbm.train(x=['C1', 'C2', 'C3', 'C4'], y='C5', training_frame=train)
pred_multi = multi_gbm.predict(pdf)
multinomial_prediction_1 = pred_multi[0, 1]
multinomial_prediction_2 = pred_multi[0, 2]
multinomial_prediction_3 = pred_multi[0, 3]
print("Multinomial prediction (Binary): p0: " + str(multinomial_prediction_1))
print("Multinomial prediction (Binary): p1: " + str(multinomial_prediction_2))
print("Multinomial prediction (Binary): p2: " + str(multinomial_prediction_3))
download_mojo(multi_gbm, mojo_zip_path)
print("\nPerforming Multinomial Prediction using MOJO @... " + target_dir)
prediction_result = h2o.mojo_predict_csv(input_csv_path=input_csv, mojo_zip_path=mojo_zip_path,
output_csv_path=output_csv)
mojo_prediction_1 = float(prediction_result[0]['Iris-setosa'])
mojo_prediction_2 = float(prediction_result[0]['Iris-versicolor'])
mojo_prediction_3 = float(prediction_result[0]['Iris-virginica'])
print("Multinomial prediction (MOJO): p0: " + str(mojo_prediction_1))
print("Multinomial prediction (MOJO): p1: " + str(mojo_prediction_2))
print("Multinomial prediction (MOJO): p2: " + str(mojo_prediction_3))
assert abs(multinomial_prediction_1 - mojo_prediction_1) < 1e-15, "expected predictions to be the same for binary and MOJO model for Multinomial - p0"
assert abs(multinomial_prediction_2 - mojo_prediction_2) < 1e-15, "expected predictions to be the same for binary and MOJO model for Multinomial - p1"
assert abs(multinomial_prediction_3 - mojo_prediction_3) < 1e-15, "expected predictions to be the same for binary and MOJO model for Multinomial - p2"
def mojo_predict_pandas_test(sandbox_dir):
data = h2o.import_file(path=pyunit_utils.locate("smalldata/logreg/prostate.csv"))
input_csv = "%s/in.csv" % sandbox_dir
pdf = data[1, 2:]
h2o.export_file(pdf, input_csv)
data[1] = data[1].asfactor()
model = H2OGradientBoostingEstimator(distribution="bernoulli")
model.train(x=[2, 3, 4, 5, 6, 7, 8], y=1, training_frame=data)
h2o_prediction = model.predict(pdf)
h2o_contributions = model.predict_contributions(pdf)
# download mojo
model_zip_path = os.path.join(sandbox_dir, 'model.zip')
genmodel_path = os.path.join(sandbox_dir, 'h2o-genmodel.jar')
download_mojo(model, model_zip_path)
assert os.path.isfile(model_zip_path)
assert os.path.isfile(genmodel_path)
pandas_frame = pandas.read_csv(input_csv)
mojo_prediction = h2o.mojo_predict_pandas(dataframe=pandas_frame, mojo_zip_path=model_zip_path, genmodel_jar_path=genmodel_path)
print("Binomial Prediction (Binary) - p0: %f" % h2o_prediction[0,1])
print("Binomial Prediction (Binary) - p1: %f" % h2o_prediction[0,2])
print("Binomial Prediction (MOJO) - p0: %f" % mojo_prediction['p0'].iloc[0])
print("Binomial Prediction (MOJO) - p1: %f" % mojo_prediction['p1'].iloc[0])
assert abs(h2o_prediction[0,1] - mojo_prediction['p0'].iloc[0]) < 1e-15, "expected predictions to be the same for binary and MOJO model - p0"
assert abs(h2o_prediction[0,2] - mojo_prediction['p1'].iloc[0]) < 1e-15, "expected predictions to be the same for binary and MOJO model - p0"
mojo_contributions = h2o.mojo_predict_pandas(dataframe=pandas_frame, mojo_zip_path=model_zip_path,
genmodel_jar_path=genmodel_path, predict_contributions=True)
assert_frame_equal(h2o_contributions.as_data_frame(use_pandas=True), mojo_contributions, check_dtype=False)
csv_test_dir = tempfile.mkdtemp()
api_test_dir = tempfile.mkdtemp()
pandas_test_dir = tempfile.mkdtemp()
try:
if __name__ == "__main__":
pyunit_utils.standalone_test(lambda: mojo_predict_api_test(api_test_dir))
pyunit_utils.standalone_test(lambda: mojo_predict_csv_test(csv_test_dir))
pyunit_utils.standalone_test(lambda: mojo_predict_pandas_test(pandas_test_dir))
else:
mojo_predict_api_test(api_test_dir)
mojo_predict_csv_test(csv_test_dir)
mojo_predict_pandas_test(pandas_test_dir)
finally:
shutil.rmtree(csv_test_dir)
shutil.rmtree(api_test_dir)
shutil.rmtree(pandas_test_dir)
| UTF-8 | Python | false | false | 12,620 | py | 6,198 | pyunit_mojo_predict.py | 5,451 | 0.647781 | 0.629477 | 0 | 269 | 45.914498 | 154 |
Satvik782/Youtube-Downloader | 14,800,457,329,691 | 6697589b7ba91dec8dfbf498a9c52d0301a01a34 | 29fb58ca30c888cf600a21ff49a7489f412e76b7 | /finalcode.py | 1d684193d579feaa4022a09e1fcf42ab9437bdc7 | []
| no_license | https://github.com/Satvik782/Youtube-Downloader | ad6f967b5bd342bad1fe510d5d928d4fd92e8239 | ded83b75e175dcd6c2bac896f2bd441b7b01c773 | refs/heads/main | 2023-05-22T22:11:42.070216 | 2021-05-29T14:57:27 | 2021-05-29T14:57:27 | 372,000,447 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from pytube import YouTube
print("Enter Link")
link = input()
print("Enter file name")
s=input()
yt = YouTube(link)
try:
yt.streams.filter(progressive = True,
file_extension = "mp4").first().download(output_path = "C:/YouTube Videos/",
filename = s)
except:
print("Some Error!")
print('Task Completed!') | UTF-8 | Python | false | false | 318 | py | 1 | finalcode.py | 1 | 0.679245 | 0.676101 | 0 | 16 | 18.9375 | 77 |
tsutterley/read-ICESat-2 | 2,611,340,119,363 | fc4b95c6bf265b2754c2fc5c41952df8011ea54f | ea81b6f47aa652f5713109dd4d4eb4623b5dd92c | /icesat2_toolkit/spatial.py | d1ddb8f6df2509247858c943b5b4171b0b445afa | [
"MIT",
"CC-BY-4.0"
]
| permissive | https://github.com/tsutterley/read-ICESat-2 | 8c7132e1215d92ee5ff79ba2ccac486bc218c768 | dd950b3569f6d4dab504c1d9905827e6a3c2876c | refs/heads/main | 2023-08-22T13:19:59.404879 | 2023-08-03T23:22:57 | 2023-08-03T23:22:57 | 193,143,942 | 66 | 21 | MIT | false | 2023-09-14T15:59:15 | 2019-06-21T18:32:36 | 2023-06-08T06:52:36 | 2023-09-14T15:59:14 | 4,689 | 56 | 22 | 4 | Python | false | false | #!/usr/bin/env python
u"""
spatial.py
Written by Tyler Sutterley (05/2023)
Utilities for reading and operating on spatial data
PYTHON DEPENDENCIES:
numpy: Scientific Computing Tools For Python
https://numpy.org
https://numpy.org/doc/stable/user/numpy-for-matlab-users.html
netCDF4: Python interface to the netCDF C library
https://unidata.github.io/netcdf4-python/netCDF4/index.html
h5py: Pythonic interface to the HDF5 binary data format
https://www.h5py.org/
gdal: Pythonic interface to the Geospatial Data Abstraction Library (GDAL)
https://pypi.python.org/pypi/GDAL
UPDATE HISTORY:
Updated 05/2023: using pathlib to define and expand paths
Updated 04/2023: copy inputs in cartesian to not modify original arrays
added iterative methods for converting from cartesian to geodetic
Updated 03/2023: add basic variable typing to function inputs
Updated 12/2022: place some imports behind try/except statements
Updated 10/2022: verify data variable in netCDF4/HDF5 read functions
Updated 07/2022: filter warnings after import attempts
Updated 06/2022: place netCDF4 import behind try/except statements
added field_mapping options to netCDF4 and HDF5 reads
Updated 04/2022: updated docstrings to numpy documentation format
Updated 01/2022: use iteration breaks in convert ellipsoid function
Written 11/2021
"""
from __future__ import annotations
import re
import io
import copy
import gzip
import uuid
import logging
import pathlib
import warnings
import numpy as np
# attempt imports
try:
import osgeo.gdal, osgeo.osr, osgeo.gdalconst
except (ImportError, ModuleNotFoundError) as exc:
warnings.filterwarnings("module")
warnings.warn("GDAL not available", ImportWarning)
try:
import h5py
except (ImportError, ModuleNotFoundError) as exc:
warnings.filterwarnings("module")
warnings.warn("h5py not available", ImportWarning)
try:
import netCDF4
except (ImportError, ModuleNotFoundError) as exc:
warnings.filterwarnings("module")
warnings.warn("netCDF4 not available", ImportWarning)
# ignore warnings
warnings.filterwarnings("ignore")
def case_insensitive_filename(filename: str | pathlib.Path):
"""
Searches a directory for a filename without case dependence
Parameters
----------
filename: str
input filename
"""
# check if file presently exists with input case
filename = pathlib.Path(filename).expanduser().absolute()
if not filename.exists():
# search for filename without case dependence
f = [f.name for f in filename.parent.iterdir() if
re.match(filename.name, f.name, re.I)]
# raise error if no file found
if not f:
raise FileNotFoundError(str(filename))
filename = filename.with_name(f.pop())
# return the matched filename
return filename
def from_file(filename: str, format: str, **kwargs):
"""
Wrapper function for reading data from an input format
Parameters
----------
filename: str
full path of input file
format: str
format of input file
**kwargs: dict
Keyword arguments for file reader
"""
# read input file to extract spatial coordinates and data
if (format == 'netCDF4'):
dinput = from_netCDF4(filename, **kwargs)
elif (format == 'HDF5'):
dinput = from_HDF5(filename, **kwargs)
elif (format == 'geotiff'):
dinput = from_geotiff(filename, **kwargs)
else:
raise ValueError(f'Invalid format {format}')
return dinput
def from_netCDF4(filename: str, **kwargs):
"""
Read data from a netCDF4 file
Parameters
----------
filename: str
full path of input netCDF4 file
compression: str or NoneType, default None
file compression type
timename: str, default 'time'
name for time-dimension variable
xname: str, default 'lon'
name for x-dimension variable
yname: str, default 'lat'
name for y-dimension variable
varname: str, default 'data'
name for data variable
field_mapping: dict, default {}
mapping between output variables and input netCDF4
"""
# set default keyword arguments
kwargs.setdefault('compression', None)
kwargs.setdefault('timename', 'time')
kwargs.setdefault('xname', 'lon')
kwargs.setdefault('yname', 'lat')
kwargs.setdefault('varname', 'data')
kwargs.setdefault('field_mapping', {})
# read data from netCDF4 file
# Open the NetCDF4 file for reading
if (kwargs['compression'] == 'gzip'):
# read as in-memory (diskless) netCDF4 dataset
with gzip.open(case_insensitive_filename(filename), 'r') as f:
fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=f.read())
elif (kwargs['compression'] == 'bytes'):
# read as in-memory (diskless) netCDF4 dataset
fileID = netCDF4.Dataset(uuid.uuid4().hex, memory=filename.read())
else:
# read netCDF4 dataset
fileID = netCDF4.Dataset(case_insensitive_filename(filename), 'r')
# Output NetCDF file information
logging.info(fileID.filepath())
logging.info(list(fileID.variables.keys()))
# create python dictionary for output variables and attributes
dinput = {}
dinput['attributes'] = {}
# get attributes for the file
for attr in ['title', 'description', 'projection']:
# try getting the attribute
try:
ncattr, = [s for s in fileID.ncattrs() if re.match(attr, s, re.I)]
dinput['attributes'][attr] = fileID.getncattr(ncattr)
except (ValueError, AttributeError):
pass
# list of attributes to attempt to retrieve from included variables
attributes_list = ['description', 'units', 'long_name', 'calendar',
'standard_name', 'grid_mapping', '_FillValue']
# mapping between netCDF4 variable names and output names
if not kwargs['field_mapping']:
kwargs['field_mapping']['x'] = copy.copy(kwargs['xname'])
kwargs['field_mapping']['y'] = copy.copy(kwargs['yname'])
if kwargs['varname'] is not None:
kwargs['field_mapping']['data'] = copy.copy(kwargs['varname'])
if kwargs['timename'] is not None:
kwargs['field_mapping']['time'] = copy.copy(kwargs['timename'])
# for each variable
for key, nc in kwargs['field_mapping'].items():
# Getting the data from each NetCDF variable
dinput[key] = fileID.variables[nc][:]
# get attributes for the included variables
dinput['attributes'][key] = {}
for attr in attributes_list:
# try getting the attribute
try:
ncattr, = [s for s in fileID.variables[nc].ncattrs()
if re.match(attr, s, re.I)]
dinput['attributes'][key][attr] = \
fileID.variables[nc].getncattr(ncattr)
except (ValueError, AttributeError):
pass
# get projection information if there is a grid_mapping attribute
if 'data' in dinput.keys() and 'grid_mapping' in dinput['attributes']['data'].keys():
# try getting the attribute
grid_mapping = dinput['attributes']['data']['grid_mapping']
# get coordinate reference system attributes
dinput['attributes']['crs'] = {}
for att_name in fileID[grid_mapping].ncattrs():
dinput['attributes']['crs'][att_name] = \
fileID.variables[grid_mapping].getncattr(att_name)
# get the spatial projection reference information from wkt
# and overwrite the file-level projection attribute (if existing)
srs = osgeo.osr.SpatialReference()
srs.ImportFromWkt(dinput['attributes']['crs']['crs_wkt'])
dinput['attributes']['projection'] = srs.ExportToProj4()
# convert to masked array if fill values
if 'data' in dinput.keys() and '_FillValue' in dinput['attributes']['data'].keys():
dinput['data'] = np.ma.asarray(dinput['data'])
dinput['data'].fill_value = dinput['attributes']['data']['_FillValue']
dinput['data'].mask = (dinput['data'].data == dinput['data'].fill_value)
# Closing the NetCDF file
fileID.close()
# return the spatial variables
return dinput
def from_HDF5(filename: str | pathlib.Path, **kwargs):
"""
Read data from a HDF5 file
Parameters
----------
filename: str
full path of input HDF5 file
compression: str or NoneType, default None
file compression type
timename: str, default 'time'
name for time-dimension variable
xname: str, default 'lon'
name for x-dimension variable
yname: str, default 'lat'
name for y-dimension variable
varname: str, default 'data'
name for data variable
field_mapping: dict, default {}
mapping between output variables and input HDF5
"""
# set default keyword arguments
kwargs.setdefault('compression', None)
kwargs.setdefault('timename', 'time')
kwargs.setdefault('xname', 'lon')
kwargs.setdefault('yname', 'lat')
kwargs.setdefault('varname', 'data')
kwargs.setdefault('field_mapping', {})
# read data from HDF5 file
# Open the HDF5 file for reading
if (kwargs['compression'] == 'gzip'):
# read gzip compressed file and extract into in-memory file object
with gzip.open(case_insensitive_filename(filename), 'r') as f:
fid = io.BytesIO(f.read())
# set filename of BytesIO object
fid.filename = filename.name
# rewind to start of file
fid.seek(0)
# read as in-memory (diskless) HDF5 dataset from BytesIO object
fileID = h5py.File(fid, 'r')
elif (kwargs['compression'] == 'bytes'):
# read as in-memory (diskless) HDF5 dataset
fileID = h5py.File(filename, 'r')
else:
# read HDF5 dataset
fileID = h5py.File(case_insensitive_filename(filename), 'r')
# Output HDF5 file information
logging.info(fileID.filename)
logging.info(list(fileID.keys()))
# create python dictionary for output variables and attributes
dinput = {}
dinput['attributes'] = {}
# get attributes for the file
for attr in ['title', 'description', 'projection']:
# try getting the attribute
try:
dinput['attributes'][attr] = fileID.attrs[attr]
except (KeyError, AttributeError):
pass
# list of attributes to attempt to retrieve from included variables
attributes_list = ['description', 'units', 'long_name', 'calendar',
'standard_name', 'grid_mapping', '_FillValue']
# mapping between HDF5 variable names and output names
if not kwargs['field_mapping']:
kwargs['field_mapping']['x'] = copy.copy(kwargs['xname'])
kwargs['field_mapping']['y'] = copy.copy(kwargs['yname'])
if kwargs['varname'] is not None:
kwargs['field_mapping']['data'] = copy.copy(kwargs['varname'])
if kwargs['timename'] is not None:
kwargs['field_mapping']['time'] = copy.copy(kwargs['timename'])
# for each variable
for key, h5 in kwargs['field_mapping'].items():
# Getting the data from each HDF5 variable
dinput[key] = np.copy(fileID[h5][:])
# get attributes for the included variables
dinput['attributes'][key] = {}
for attr in attributes_list:
# try getting the attribute
try:
dinput['attributes'][key][attr] = fileID[h5].attrs[attr]
except (KeyError, AttributeError):
pass
# get projection information if there is a grid_mapping attribute
if 'data' in dinput.keys() and 'grid_mapping' in dinput['attributes']['data'].keys():
# try getting the attribute
grid_mapping = dinput['attributes']['data']['grid_mapping']
# get coordinate reference system attributes
dinput['attributes']['crs'] = {}
for att_name, att_val in fileID[grid_mapping].attrs.items():
dinput['attributes']['crs'][att_name] = att_val
# get the spatial projection reference information from wkt
# and overwrite the file-level projection attribute (if existing)
srs = osgeo.osr.SpatialReference()
srs.ImportFromWkt(dinput['attributes']['crs']['crs_wkt'])
dinput['attributes']['projection'] = srs.ExportToProj4()
# convert to masked array if fill values
if 'data' in dinput.keys() and '_FillValue' in dinput['attributes']['data'].keys():
dinput['data'] = np.ma.asarray(dinput['data'])
dinput['data'].fill_value = dinput['attributes']['data']['_FillValue']
dinput['data'].mask = (dinput['data'].data == dinput['data'].fill_value)
# Closing the HDF5 file
fileID.close()
# return the spatial variables
return dinput
def from_geotiff(filename: str, **kwargs):
"""
Read data from a geotiff file
Parameters
----------
filename: str
full path of input geotiff file
compression: str or NoneType, default None
file compression type
bounds: list or NoneType, default bounds
extent of the file to read: ``[xmin, xmax, ymin, ymax]``
"""
# set default keyword arguments
kwargs.setdefault('compression', None)
kwargs.setdefault('bounds', None)
# Open the geotiff file for reading
if (kwargs['compression'] == 'gzip'):
# read as GDAL gzip virtual geotiff dataset
mmap_name = f"/vsigzip/{str(case_insensitive_filename(filename))}"
ds = osgeo.gdal.Open(mmap_name)
elif (kwargs['compression'] == 'bytes'):
# read as GDAL memory-mapped (diskless) geotiff dataset
mmap_name = f"/vsimem/{uuid.uuid4().hex}"
osgeo.gdal.FileFromMemBuffer(mmap_name, filename.read())
ds = osgeo.gdal.Open(mmap_name)
else:
# read geotiff dataset
ds = osgeo.gdal.Open(str(case_insensitive_filename(filename)),
osgeo.gdalconst.GA_ReadOnly)
# print geotiff file if verbose
logging.info(str(filename))
# create python dictionary for output variables and attributes
dinput = {}
dinput['attributes'] = {c:dict() for c in ['x', 'y', 'data']}
# get the spatial projection reference information
srs = ds.GetSpatialRef()
dinput['attributes']['projection'] = srs.ExportToProj4()
dinput['attributes']['wkt'] = srs.ExportToWkt()
# get dimensions
xsize = ds.RasterXSize
ysize = ds.RasterYSize
bsize = ds.RasterCount
# get geotiff info
info_geotiff = ds.GetGeoTransform()
dinput['attributes']['spacing'] = (info_geotiff[1], info_geotiff[5])
# calculate image extents
xmin = info_geotiff[0]
ymax = info_geotiff[3]
xmax = xmin + (xsize-1)*info_geotiff[1]
ymin = ymax + (ysize-1)*info_geotiff[5]
# x and y pixel center coordinates (converted from upper left)
x = xmin + info_geotiff[1]/2.0 + np.arange(xsize)*info_geotiff[1]
y = ymax + info_geotiff[5]/2.0 + np.arange(ysize)*info_geotiff[5]
# if reducing to specified bounds
if kwargs['bounds'] is not None:
# reduced x and y limits
xlimits = (kwargs['bounds'][0], kwargs['bounds'][1])
ylimits = (kwargs['bounds'][2], kwargs['bounds'][3])
# Specify offset and rows and columns to read
xoffset = int((xlimits[0] - xmin)/info_geotiff[1])
yoffset = int((ymax - ylimits[1])/np.abs(info_geotiff[5]))
xcount = int((xlimits[1] - xlimits[0])/info_geotiff[1]) + 1
ycount = int((ylimits[1] - ylimits[0])/np.abs(info_geotiff[5])) + 1
# reduced x and y pixel center coordinates
dinput['x'] = x[slice(xoffset, xoffset + xcount, None)]
dinput['y'] = y[slice(yoffset, yoffset + ycount, None)]
# read reduced image with GDAL
dinput['data'] = ds.ReadAsArray(xoff=xoffset, yoff=yoffset,
xsize=xcount, ysize=ycount)
# reduced image extent (converted back to upper left)
xmin = np.min(dinput['x']) - info_geotiff[1]/2.0
xmax = np.max(dinput['x']) - info_geotiff[1]/2.0
ymin = np.min(dinput['y']) - info_geotiff[5]/2.0
ymax = np.max(dinput['y']) - info_geotiff[5]/2.0
else:
# x and y pixel center coordinates
dinput['x'] = np.copy(x)
dinput['y'] = np.copy(y)
# read full image with GDAL
dinput['data'] = ds.ReadAsArray()
# image extent
dinput['attributes']['extent'] = (xmin, xmax, ymin, ymax)
# set default time to zero for each band
dinput.setdefault('time', np.zeros((bsize)))
# check if image has fill values
dinput['data'] = np.ma.asarray(dinput['data'])
dinput['data'].mask = np.zeros_like(dinput['data'], dtype=bool)
if ds.GetRasterBand(1).GetNoDataValue():
# mask invalid values
dinput['data'].fill_value = ds.GetRasterBand(1).GetNoDataValue()
# create mask array for bad values
dinput['data'].mask[:] = (dinput['data'].data == dinput['data'].fill_value)
# set attribute for fill value
dinput['attributes']['data']['_FillValue'] = dinput['data'].fill_value
# close the dataset
ds = None
# return the spatial variables
return dinput
def convert_ellipsoid(
phi1: np.ndarray,
h1: np.ndarray,
a1: float,
f1: float,
a2: float,
f2: float,
eps: float = 1e-12,
itmax: int = 10
):
"""
Convert latitudes and heights to a different ellipsoid using Newton-Raphson
Parameters
----------
phi1: np.ndarray
latitude of input ellipsoid in degrees
h1: np.ndarray
height above input ellipsoid in meters
a1: float
semi-major axis of input ellipsoid
f1: float
flattening of input ellipsoid
a2: float
semi-major axis of output ellipsoid
f2: float
flattening of output ellipsoid
eps: float, default 1e-12
tolerance to prevent division by small numbers and
to determine convergence
itmax: int, default 10
maximum number of iterations to use in Newton-Raphson
Returns
-------
phi2: np.ndarray
latitude of output ellipsoid in degrees
h2: np.ndarray
height above output ellipsoid in meters
References
----------
.. [1] J. Meeus, *Astronomical Algorithms*, 2nd edition, 477 pp., (1998).
"""
if (len(phi1) != len(h1)):
raise ValueError('phi and h have incompatable dimensions')
# semiminor axis of input and output ellipsoid
b1 = (1.0 - f1)*a1
b2 = (1.0 - f2)*a2
# initialize output arrays
npts = len(phi1)
phi2 = np.zeros((npts))
h2 = np.zeros((npts))
# for each point
for N in range(npts):
# force phi1 into range -90 <= phi1 <= 90
if (np.abs(phi1[N]) > 90.0):
phi1[N] = np.sign(phi1[N])*90.0
# handle special case near the equator
# phi2 = phi1 (latitudes congruent)
# h2 = h1 + a1 - a2
if (np.abs(phi1[N]) < eps):
phi2[N] = np.copy(phi1[N])
h2[N] = h1[N] + a1 - a2
# handle special case near the poles
# phi2 = phi1 (latitudes congruent)
# h2 = h1 + b1 - b2
elif ((90.0 - np.abs(phi1[N])) < eps):
phi2[N] = np.copy(phi1[N])
h2[N] = h1[N] + b1 - b2
# handle case if latitude is within 45 degrees of equator
elif (np.abs(phi1[N]) <= 45):
# convert phi1 to radians
phi1r = phi1[N] * np.pi/180.0
sinphi1 = np.sin(phi1r)
cosphi1 = np.cos(phi1r)
# prevent division by very small numbers
cosphi1 = np.copy(eps) if (cosphi1 < eps) else cosphi1
# calculate tangent
tanphi1 = sinphi1 / cosphi1
u1 = np.arctan(b1 / a1 * tanphi1)
hpr1sin = b1 * np.sin(u1) + h1[N] * sinphi1
hpr1cos = a1 * np.cos(u1) + h1[N] * cosphi1
# set initial value for u2
u2 = np.copy(u1)
# setup constants
k0 = b2 * b2 - a2 * a2
k1 = a2 * hpr1cos
k2 = b2 * hpr1sin
# perform newton-raphson iteration to solve for u2
# cos(u2) will not be close to zero since abs(phi1) <= 45
for i in range(0, itmax+1):
cosu2 = np.cos(u2)
fu2 = k0 * np.sin(u2) + k1 * np.tan(u2) - k2
fu2p = k0 * cosu2 + k1 / (cosu2 * cosu2)
if (np.abs(fu2p) < eps):
break
else:
delta = fu2 / fu2p
u2 -= delta
if (np.abs(delta) < eps):
break
# convert latitude to degrees and verify values between +/- 90
phi2r = np.arctan(a2 / b2 * np.tan(u2))
phi2[N] = phi2r*180.0/np.pi
if (np.abs(phi2[N]) > 90.0):
phi2[N] = np.sign(phi2[N])*90.0
# calculate height
h2[N] = (hpr1cos - a2 * np.cos(u2)) / np.cos(phi2r)
# handle final case where latitudes are between 45 degrees and pole
else:
# convert phi1 to radians
phi1r = phi1[N] * np.pi/180.0
sinphi1 = np.sin(phi1r)
cosphi1 = np.cos(phi1r)
# prevent division by very small numbers
cosphi1 = np.copy(eps) if (cosphi1 < eps) else cosphi1
# calculate tangent
tanphi1 = sinphi1 / cosphi1
u1 = np.arctan(b1 / a1 * tanphi1)
hpr1sin = b1 * np.sin(u1) + h1[N] * sinphi1
hpr1cos = a1 * np.cos(u1) + h1[N] * cosphi1
# set initial value for u2
u2 = np.copy(u1)
# setup constants
k0 = a2 * a2 - b2 * b2
k1 = b2 * hpr1sin
k2 = a2 * hpr1cos
# perform newton-raphson iteration to solve for u2
# sin(u2) will not be close to zero since abs(phi1) > 45
for i in range(0, itmax+1):
sinu2 = np.sin(u2)
fu2 = k0 * np.cos(u2) + k1 / np.tan(u2) - k2
fu2p = -1 * (k0 * sinu2 + k1 / (sinu2 * sinu2))
if (np.abs(fu2p) < eps):
break
else:
delta = fu2 / fu2p
u2 -= delta
if (np.abs(delta) < eps):
break
# convert latitude to degrees and verify values between +/- 90
phi2r = np.arctan(a2 / b2 * np.tan(u2))
phi2[N] = phi2r*180.0/np.pi
if (np.abs(phi2[N]) > 90.0):
phi2[N] = np.sign(phi2[N])*90.0
# calculate height
h2[N] = (hpr1sin - b2 * np.sin(u2)) / np.sin(phi2r)
# return the latitude and height
return (phi2, h2)
def compute_delta_h(
a1: float,
f1: float,
a2: float,
f2: float,
lat: np.ndarray
):
"""
Compute difference in elevation for two ellipsoids at a given
latitude using a simplified empirical equation
Parameters
----------
a1: float
semi-major axis of input ellipsoid
f1: float
flattening of input ellipsoid
a2: float
semi-major axis of output ellipsoid
f2: float
flattening of output ellipsoid
lat: np.ndarray
latitudes (degrees north)
Returns
-------
delta_h: np.ndarray
difference in elevation for two ellipsoids
References
----------
.. [1] J Meeus, *Astronomical Algorithms*, pp. 77--82, (1991).
"""
# force phi into range -90 <= phi <= 90
gt90, = np.nonzero((lat < -90.0) | (lat > 90.0))
lat[gt90] = np.sign(lat[gt90])*90.0
# semiminor axis of input and output ellipsoid
b1 = (1.0 - f1)*a1
b2 = (1.0 - f2)*a2
# compute delta_a and delta_b coefficients
delta_a = a2 - a1
delta_b = b2 - b1
# compute differences between ellipsoids
# delta_h = -(delta_a * cos(phi)^2 + delta_b * sin(phi)^2)
phi = lat * np.pi/180.0
delta_h = -(delta_a*np.cos(phi)**2 + delta_b*np.sin(phi)**2)
return delta_h
def wrap_longitudes(lon: float | np.ndarray):
"""
Wraps longitudes to range from -180 to +180
Parameters
----------
lon: float or np.ndarray
longitude (degrees east)
"""
phi = np.arctan2(np.sin(lon*np.pi/180.0), np.cos(lon*np.pi/180.0))
# convert phi from radians to degrees
return phi*180.0/np.pi
def to_cartesian(
lon: np.ndarray,
lat: np.ndarray,
h: float | np.ndarray = 0.0,
a_axis: float = 6378137.0,
flat: float = 1.0/298.257223563
):
"""
Converts geodetic coordinates to Cartesian coordinates
Parameters
----------
lon: np.ndarray
longitude (degrees east)
lat: np.ndarray
latitude (degrees north)
h: float or np.ndarray, default 0.0
height above ellipsoid (or sphere)
a_axis: float, default 6378137.0
semimajor axis of the ellipsoid
for spherical coordinates set to radius of the Earth
flat: float, default 1.0/298.257223563
ellipsoidal flattening
for spherical coordinates set to 0
"""
# verify axes and copy to not modify inputs
lon = np.atleast_1d(np.copy(lon))
lat = np.atleast_1d(np.copy(lat))
# fix coordinates to be 0:360
lon[lon < 0] += 360.0
# Linear eccentricity and first numerical eccentricity
lin_ecc = np.sqrt((2.0*flat - flat**2)*a_axis**2)
ecc1 = lin_ecc/a_axis
# convert from geodetic latitude to geocentric latitude
dtr = np.pi/180.0
# geodetic latitude in radians
latitude_geodetic_rad = lat*dtr
# prime vertical radius of curvature
N = a_axis/np.sqrt(1.0 - ecc1**2.0*np.sin(latitude_geodetic_rad)**2.0)
# calculate X, Y and Z from geodetic latitude and longitude
X = (N + h) * np.cos(latitude_geodetic_rad) * np.cos(lon*dtr)
Y = (N + h) * np.cos(latitude_geodetic_rad) * np.sin(lon*dtr)
Z = (N * (1.0 - ecc1**2.0) + h) * np.sin(latitude_geodetic_rad)
# return the cartesian coordinates
return (X, Y, Z)
def to_sphere(x: np.ndarray, y: np.ndarray, z: np.ndarray):
"""
Convert from cartesian coordinates to spherical coordinates
Parameters
----------
x, np.ndarray
cartesian x-coordinates
y, np.ndarray
cartesian y-coordinates
z, np.ndarray
cartesian z-coordinates
"""
# verify axes and copy to not modify inputs
x = np.atleast_1d(np.copy(x))
y = np.atleast_1d(np.copy(y))
z = np.atleast_1d(np.copy(z))
# calculate radius
rad = np.sqrt(x**2.0 + y**2.0 + z**2.0)
# calculate angular coordinates
# phi: azimuthal angle
phi = np.arctan2(y, x)
# th: polar angle
th = np.arccos(z/rad)
# convert to degrees and fix to 0:360
lon = 180.0*phi/np.pi
if np.any(lon < 0):
lt0 = np.nonzero(lon < 0)
lon[lt0] += 360.0
# convert to degrees and fix to -90:90
lat = 90.0 - (180.0*th/np.pi)
np.clip(lat, -90, 90, out=lat)
# return latitude, longitude and radius
return (lon, lat, rad)
def to_geodetic(
x: np.ndarray,
y: np.ndarray,
z: np.ndarray,
a_axis: float = 6378137.0,
flat: float = 1.0/298.257223563,
method: str = 'bowring',
eps: float = np.finfo(np.float64).eps,
iterations: int = 10
):
"""
Convert from cartesian coordinates to geodetic coordinates
using either iterative or closed-form methods
Parameters
----------
x, float
cartesian x-coordinates
y, float
cartesian y-coordinates
z, float
cartesian z-coordinates
a_axis: float, default 6378137.0
semimajor axis of the ellipsoid
flat: float, default 1.0/298.257223563
ellipsoidal flattening
method: str, default 'bowring'
method to use for conversion
- ``'moritz'``: iterative solution
- ``'bowring'``: iterative solution
- ``'zhu'``: closed-form solution
eps: float, default np.finfo(np.float64).eps
tolerance for iterative methods
iterations: int, default 10
maximum number of iterations
"""
# verify axes and copy to not modify inputs
x = np.atleast_1d(np.copy(x))
y = np.atleast_1d(np.copy(y))
z = np.atleast_1d(np.copy(z))
# calculate the geodetic coordinates using the specified method
if (method.lower() == 'moritz'):
return _moritz_iterative(x, y, z, a_axis=a_axis, flat=flat,
eps=eps, iterations=iterations)
elif (method.lower() == 'bowring'):
return _bowring_iterative(x, y, z, a_axis=a_axis, flat=flat,
eps=eps, iterations=iterations)
elif (method.lower() == 'zhu'):
return _zhu_closed_form(x, y, z, a_axis=a_axis, flat=flat)
else:
raise ValueError(f'Unknown conversion method: {method}')
def _moritz_iterative(
x: np.ndarray,
y: np.ndarray,
z: np.ndarray,
a_axis: float = 6378137.0,
flat: float = 1.0/298.257223563,
eps: float = np.finfo(np.float64).eps,
iterations: int = 10
):
"""
Convert from cartesian coordinates to geodetic coordinates
using the iterative solution of [1]_
Parameters
----------
x, float
cartesian x-coordinates
y, float
cartesian y-coordinates
z, float
cartesian z-coordinates
a_axis: float, default 6378137.0
semimajor axis of the ellipsoid
flat: float, default 1.0/298.257223563
ellipsoidal flattening
eps: float, default np.finfo(np.float64).eps
tolerance for iterative method
iterations: int, default 10
maximum number of iterations
References
----------
.. [1] B. Hofmann-Wellenhof and H. Moritz,
*Physical Geodesy*, 2nd Edition, 403 pp., (2006).
`doi: 10.1007/978-3-211-33545-1
<https://doi.org/10.1007/978-3-211-33545-1>`_
"""
# Linear eccentricity and first numerical eccentricity
lin_ecc = np.sqrt((2.0*flat - flat**2)*a_axis**2)
ecc1 = lin_ecc/a_axis
# degrees to radians
dtr = np.pi/180.0
# calculate longitude
lon = np.arctan2(y, x)/dtr
# set initial estimate of height to 0
h = np.zeros_like(lon)
h0 = np.inf*np.ones_like(lon)
# calculate radius of parallel
p = np.sqrt(x**2 + y**2)
# initial estimated value for phi using h=0
phi = np.arctan(z/(p*(1.0 - ecc1**2)))
# iterate to tolerance or to maximum number of iterations
i = 0
while np.any(np.abs(h - h0) > eps) and (i <= iterations):
# copy previous iteration of height
h0 = np.copy(h)
# calculate radius of curvature
N = a_axis/np.sqrt(1.0 - ecc1**2 * np.sin(phi)**2)
# estimate new value of height
h = p/np.cos(phi) - N
# estimate new value for latitude using heights
phi = np.arctan(z/(p*(1.0 - ecc1**2*N/(N + h))))
# add to iterator
i += 1
# return latitude, longitude and height
return (lon, phi/dtr, h)
def _bowring_iterative(
x: np.ndarray,
y: np.ndarray,
z: np.ndarray,
a_axis: float = 6378137.0,
flat: float = 1.0/298.257223563,
eps: float = np.finfo(np.float64).eps,
iterations: int = 10
):
"""
Convert from cartesian coordinates to geodetic coordinates
using the iterative solution of [1]_ [2]_
Parameters
----------
x, float
cartesian x-coordinates
y, float
cartesian y-coordinates
z, float
cartesian z-coordinates
a_axis: float, default 6378137.0
semimajor axis of the ellipsoid
flat: float, default 1.0/298.257223563
ellipsoidal flattening
eps: float, default np.finfo(np.float64).eps
tolerance for iterative method
iterations: int, default 10
maximum number of iterations
References
----------
.. [1] B. R. Bowring, "Transformation from spatial
to geodetic coordinates," *Survey Review*, 23(181),
323--327, (1976). `doi: 10.1179/sre.1976.23.181.323
<https://doi.org/10.1179/sre.1976.23.181.323>`_
.. [2] B. R. Bowring, "The Accuracy Of Geodetic
Latitude and Height Equations," *Survey Review*, 28(218),
202--206, (1985). `doi: 10.1179/sre.1985.28.218.202
<https://doi.org/10.1179/sre.1985.28.218.202>`_
"""
# semiminor axis of the WGS84 ellipsoid [m]
b_axis = (1.0 - flat)*a_axis
# Linear eccentricity
lin_ecc = np.sqrt((2.0*flat - flat**2)*a_axis**2)
# square of first and second numerical eccentricity
e12 = lin_ecc**2/a_axis**2
e22 = lin_ecc**2/b_axis**2
# degrees to radians
dtr = np.pi/180.0
# calculate longitude
lon = np.arctan2(y, x)/dtr
# calculate radius of parallel
p = np.sqrt(x**2 + y**2)
# initial estimated value for reduced parametric latitude
u = np.arctan(a_axis*z/(b_axis*p))
# initial estimated value for latitude
phi = np.arctan((z + e22*b_axis*np.sin(u)**3) /
(p - e12*a_axis*np.cos(u)**3))
phi0 = np.inf*np.ones_like(lon)
# iterate to tolerance or to maximum number of iterations
i = 0
while np.any(np.abs(phi - phi0) > eps) and (i <= iterations):
# copy previous iteration of phi
phi0 = np.copy(phi)
# calculate reduced parametric latitude
u = np.arctan(b_axis*np.tan(phi)/a_axis)
# estimate new value of latitude
phi = np.arctan((z + e22*b_axis*np.sin(u)**3) /
(p - e12*a_axis*np.cos(u)**3))
# add to iterator
i += 1
# calculate final radius of curvature
N = a_axis/np.sqrt(1.0 - e12 * np.sin(phi)**2)
# estimate final height (Bowring, 1985)
h = p*np.cos(phi) + z*np.sin(phi) - a_axis**2/N
# return latitude, longitude and height
return (lon, phi/dtr, h)
def _zhu_closed_form(
x: np.ndarray,
y: np.ndarray,
z: np.ndarray,
a_axis: float = 6378137.0,
flat: float = 1.0/298.257223563,
):
"""
Convert from cartesian coordinates to geodetic coordinates
using the closed-form solution of [1]_
Parameters
----------
x, float
cartesian x-coordinates
y, float
cartesian y-coordinates
z, float
cartesian z-coordinates
a_axis: float, default 6378137.0
semimajor axis of the ellipsoid
flat: float, default 1.0/298.257223563
ellipsoidal flattening
References
----------
.. [1] J. Zhu, "Exact conversion of Earth-centered,
Earth-fixed coordinates to geodetic coordinates,"
*Journal of Guidance, Control, and Dynamics*,
16(2), 389--391, (1993). `doi: 10.2514/3.21016
<https://arc.aiaa.org/doi/abs/10.2514/3.21016>`_
"""
# semiminor axis of the WGS84 ellipsoid [m]
b_axis = (1.0 - flat)*a_axis
# Linear eccentricity
lin_ecc = np.sqrt((2.0*flat - flat**2)*a_axis**2)
# square of first numerical eccentricity
e12 = lin_ecc**2/a_axis**2
# degrees to radians
dtr = np.pi/180.0
# calculate longitude
lon = np.arctan2(y, x)/dtr
# calculate radius of parallel
w = np.sqrt(x**2 + y**2)
# allocate for output latitude and height
lat = np.zeros_like(lon)
h = np.zeros_like(lon)
if np.any(w == 0):
# special case where w == 0 (exact polar solution)
ind, = np.nonzero(w == 0)
h[ind] = np.sign(z[ind])*z[ind] - b_axis
lat[ind] = 90.0*np.sign(z[ind])
else:
# all other cases
ind, = np.nonzero(w != 0)
l = e12/2.0
m = (w[ind]/a_axis)**2.0
n = ((1.0 - e12)*z[ind]/b_axis)**2.0
i = -(2.0*l**2 + m + n)/2.0
k = (l**2.0 - m - n)*l**2.0
q = (1.0/216.0)*(m + n - 4.0*l**2)**3.0 + m*n*l**2.0
D = np.sqrt((2.0*q - m*n*l**2)*m*n*l**2)
B = i/3.0 - (q + D)**(1.0/3.0) - (q - D)**(1.0/3.0)
t = np.sqrt(np.sqrt(B**2-k) - (B + i)/2.0) - \
np.sign(m - n)*np.sqrt((B - i)/2.0)
wi = w/(t + l)
zi = (1.0 - e12)*z[ind]/(t - l)
# calculate latitude and height
lat[ind] = np.arctan2(zi, ((1.0 - e12)*wi))/dtr
h[ind] = np.sign(t-1.0+l)*np.sqrt((w-wi)**2.0 + (z[ind]-zi)**2.0)
# return latitude, longitude and height
return (lon, lat, h)
def scale_areas(
lat: np.ndarray,
flat: float=1.0/298.257223563,
ref: float=70.0
):
"""
Calculates area scaling factors for a polar stereographic projection
including special case of at the exact pole [1]_ [2]_
Parameters
----------
lat: np.ndarray
latitude (degrees north)
flat: float, default 1.0/298.257223563
ellipsoidal flattening
ref: float, default 70.0
reference latitude (true scale latitude)
Returns
-------
scale: np.ndarray
area scaling factors at input latitudes
References
----------
.. [1] J. P. Snyder, *Map Projections used by the U.S. Geological Survey*,
Geological Survey Bulletin 1532, U.S. Government Printing Office, (1982).
.. [2] JPL Technical Memorandum 3349-85-101
"""
# convert latitude from degrees to positive radians
theta = np.abs(lat)*np.pi/180.0
# convert reference latitude from degrees to positive radians
theta_ref = np.abs(ref)*np.pi/180.0
# square of the eccentricity of the ellipsoid
# ecc2 = (1-b**2/a**2) = 2.0*flat - flat^2
ecc2 = 2.0*flat - flat**2
# eccentricity of the ellipsoid
ecc = np.sqrt(ecc2)
# calculate ratio at input latitudes
m = np.cos(theta)/np.sqrt(1.0 - ecc2*np.sin(theta)**2)
t = np.tan(np.pi/4.0 - theta/2.0)/((1.0 - ecc*np.sin(theta)) / \
(1.0 + ecc*np.sin(theta)))**(ecc/2.0)
# calculate ratio at reference latitude
mref = np.cos(theta_ref)/np.sqrt(1.0 - ecc2*np.sin(theta_ref)**2)
tref = np.tan(np.pi/4.0 - theta_ref/2.0)/((1.0 - ecc*np.sin(theta_ref)) / \
(1.0 + ecc*np.sin(theta_ref)))**(ecc/2.0)
# distance scaling
k = (mref/m)*(t/tref)
kp = 0.5*mref*np.sqrt(((1.0+ecc)**(1.0+ecc))*((1.0-ecc)**(1.0-ecc)))/tref
# area scaling
scale = np.where(np.isclose(theta, np.pi/2.0), 1.0/(kp**2), 1.0/(k**2))
return scale
# PURPOSE: check a specified 2D point is inside a specified 2D polygon
def inside_polygon(
x: np.ndarray,
y: np.ndarray,
xpts: np.ndarray,
ypts: np.ndarray,
threshold: float = 0.01
):
"""
Indicates whether a specified 2D point is inside a specified 2D polygon
Parameters
----------
x: np.ndarray
x-coordinates of the 2D point(s) to check
y: np.ndarray
y-coordinates of the 2D point(s) to check
xpts: np.ndarray
x-coordinates of the 2D polygon.
ypts: np.ndarray
y-coordinates of the 2D polygon
threshold: float, default 0.01
minimum angle for checking if inside polygon
Returns
-------
flag: np.ndarray
flag denoting if points are within polygon
- ``True``: within polygon
- ``False``: outside polygon
"""
# create numpy arrays for 2D points
x = np.atleast_1d(x)
y = np.atleast_1d(y)
nn = len(x)
# create numpy arrays for polygon points
xpts = np.array(xpts)
ypts = np.array(ypts)
# check dimensions of polygon points
if (xpts.ndim != 1):
raise ValueError('X coordinates of polygon not a vector.')
if (ypts.ndim != 1):
raise ValueError('Y coordinates of polygon not a vector.')
if (len(xpts) != len(ypts)):
raise ValueError('Incompatable vector dimensions.')
# maximum possible number of vertices in polygon
N = len(xpts)
# Close the polygon if not already closed
if not np.isclose(xpts[-1],xpts[0]) and not np.isclose(ypts[-1],ypts[0]):
xpts = np.concatenate((xpts,[xpts[0]]),axis=0)
ypts = np.concatenate((ypts,[ypts[0]]),axis=0)
else:
# remove 1 from number of vertices
N -= 1
# Calculate dot and cross products of points to neighboring polygon points
i = np.arange(N)
X1 = np.dot(xpts[i][:,np.newaxis],np.ones((1,nn))) - \
np.dot(np.ones((N,1)),x[np.newaxis,:])
Y1 = np.dot(ypts[i][:,np.newaxis],np.ones((1,nn))) - \
np.dot(np.ones((N,1)),y[np.newaxis,:])
X2 = np.dot(xpts[i+1][:,np.newaxis],np.ones((1,nn))) - \
np.dot(np.ones((N,1)),x[np.newaxis,:])
Y2 = np.dot(ypts[i+1][:,np.newaxis],np.ones((1,nn))) - \
np.dot(np.ones((N,1)),y[np.newaxis,:])
# Dot-product
dp = X1*X2 + Y1*Y2
# Cross-product
cp = X1*Y2 - Y1*X2
# Calculate tangent of the angle between the two nearest adjacent points
theta = np.arctan2(cp,dp)
# If point is outside polygon then summation over all possible
# angles will equal a small number (e.g. 0.01)
flag = np.where(np.abs(np.sum(theta,axis=0)) > threshold, True, False)
# Make a scalar value if there was only one input value
if (nn == 1):
return flag[0]
else:
return flag
| UTF-8 | Python | false | false | 41,157 | py | 82 | spatial.py | 30 | 0.596278 | 0.561921 | 0 | 1,117 | 35.846016 | 89 |
PauloVilarinho/algoritmos | 5,274,219,886,493 | 151f28b1a0780180d1a8e73ac838aac3f1224827 | 8a2f6867eca5d40d1770b58ab73e8a9f01a9ddc5 | /ListasFabio/lista3/fabio03_q12.py | 7ada99780db863e16eb40a93161bed071632fbf5 | []
| no_license | https://github.com/PauloVilarinho/algoritmos | 822eaae631de1660df553d87d2f168c2646efa60 | a3cfed6081bbf5ae0b17766a027abcd558b9e9d0 | refs/heads/master | 2020-03-13T18:31:50.980839 | 2018-06-21T21:17:28 | 2018-06-21T21:17:28 | 131,236,984 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def main():
n = int(input("insira o numero n"))
aux = 1
soma = 0
while aux <= n :
soma += int(input("digite 1 numero"))
aux += 1
print(soma)
print(soma/n)
if __name__ == '__main__':
main()
| UTF-8 | Python | false | false | 234 | py | 339 | fabio03_q12.py | 338 | 0.470085 | 0.452991 | 0 | 12 | 18.5 | 45 |
wearhacks/main_wearhacks | 19,499,151,557,216 | 70be0a3f61056c4792ef0e00fa4f9c5431c0f5db | b4bbd4785529df30bd1b2bacf79be05f36b8bfdb | /events/forms.py | ec1932fb8905a9386d02f5d287faa87510535b8d | []
| no_license | https://github.com/wearhacks/main_wearhacks | 9c45080e2c74183e2359833d69c79d8befda5504 | 01f3745c9d786d6f12429b60473355f8d7aa44ed | refs/heads/master | 2021-01-10T01:37:52.114207 | 2018-10-09T20:50:34 | 2018-10-09T20:50:34 | 44,467,237 | 0 | 1 | null | false | 2016-03-31T01:21:21 | 2015-10-18T05:50:16 | 2016-01-29T04:09:33 | 2016-03-31T01:21:21 | 40,248 | 0 | 1 | 20 | CSS | null | null | from django import forms
class PartnerForm(forms.Form):
organization_name = forms.CharField(widget=forms.TextInput(attrs={'required':True}))
email = forms.EmailField(widget=forms.EmailInput(attrs={'required':True}))
message = forms.CharField(widget=forms.Textarea(attrs={'required':True}))
| UTF-8 | Python | false | false | 303 | py | 55 | forms.py | 26 | 0.752475 | 0.752475 | 0 | 6 | 49.5 | 88 |
pav3lo/joke-dataset | 16,080,357,573,199 | 9e039c774b72049636763ca76c9dafd4164726bd | 295e3f9daeed9955263dbe3661dba246ba9e8ba3 | /etc/joke_mover.py | 83acd98f434489da1b6634db76eb1d585918eb1f | []
| no_license | https://github.com/pav3lo/joke-dataset | 1df94f45ed5a1ceb682dcab173b676cc5a1fe078 | bf4a61e97cfd0628f0ce1661ca598dfa99ecdae2 | refs/heads/master | 2021-09-07T11:03:52.448534 | 2018-02-22T01:25:48 | 2018-02-22T01:25:48 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # INPUT: .csv with "Question", "Answer", and "meta" fields.
# OUTPUT: .csv with only the rows with the specified "meta" tags.
import pandas as pd
import sys
from time import strftime
INFILE = 'pull2.csv'
DELETE_AFTER_MOVING = False
df = pd.read_csv(INFILE)
cols = (["Question", "Answer", "meta"])
newdf = pd.DataFrame(columns=cols)
tag = "tag:nosubject"
if len(sys.argv) > 1:
tag = sys.argv[1]
outfile = tag + "_at_" + strftime("%Y-%m-%d_%H:%M") + ".csv"
insertion_row = 1
for idx, tags in enumerate(df["meta"]):
if tag in tags:
newdf.loc[insertion_row] = df.loc[idx]
if DELETE_AFTER_MOVING:
df = df.drop(idx)
insertion_row += 1
if (idx % 5000 == 0):
print idx
df.to_csv(INFILE, encoding='utf-8', index=False)
newdf.to_csv(outfile, encoding='utf-8', index=False)
| UTF-8 | Python | false | false | 830 | py | 25 | joke_mover.py | 16 | 0.622892 | 0.608434 | 0 | 36 | 22.055556 | 65 |
cseelhoff/tensorflow | 1,090,921,723,330 | aefa5bc23b3309793ef43dc85d03a862997507f9 | ba9a512d0403231f3b8e82befb2dea6a042b0c06 | /ttt.py | 9952f1909e0d6d0539e7c67875d33c51d3907e76 | []
| no_license | https://github.com/cseelhoff/tensorflow | fd1b084273a5751f00d06f79b781ba38bb0232b6 | 21e3097cbb9cc3be47038efbdd5d1be70bf0e5b8 | refs/heads/master | 2018-01-01T12:27:33.568177 | 2017-05-11T13:04:31 | 2017-05-11T13:04:31 | 70,170,974 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
import copy
observation_size = 10
total_possible_actions = 9
agents_list = [0, 1]
class GameState:
def __init__(self):
self.board = np.array([0,0,0,0,0,0,0,0,0])
self.turn = 0
self.terminal = False
def get_agent_observation(state, agent_index):
board = np.array([0,0,0,0,0,0,0,0,0])
if agent_index == 0:
board = state.board
if agent_index == 1:
board = np.multiply(state.board, -1)
current_turn = np.array([state.turn])
observation = np.concatenate((board, current_turn))
return observation
def get_human_readable_observation(observation):
current_turn = get_turn_from_observation(observation)
status_text = 'Turn: ' + str(current_turn) + "\n"
for square_index in range(9):
multiplied_square = observation[square_index]
if multiplied_square == 1:
status_text += 'X'
elif multiplied_square == -1:
status_text += 'O'
else:
status_text += '_'
if square_index % 3 == 2:
status_text += "\n"
return status_text
def get_turn_from_observation(observation):
return observation[9]
def get_valid_actions(observation):
valid_actions = list()
for square_index in range(9):
square = observation[square_index]
if square == 0:
valid_actions.append(square_index)
return valid_actions
def get_value_from_observation(observation):
combo = observation[0] + observation[1] + observation[2]
reward = get_reward_from_combo(combo)
if reward == 0:
combo = observation[0] + observation[3] + observation[6]
reward = get_reward_from_combo(combo)
if reward == 0:
combo = observation[0] + observation[4] + observation[8]
reward = get_reward_from_combo(combo)
if reward == 0:
combo = observation[1] + observation[4] + observation[7]
reward = get_reward_from_combo(combo)
if reward == 0:
combo = observation[2] + observation[4] + observation[6]
reward = get_reward_from_combo(combo)
if reward == 0:
combo = observation[2] + observation[5] + observation[8]
reward = get_reward_from_combo(combo)
if reward == 0:
combo = observation[3] + observation[4] + observation[5]
reward = get_reward_from_combo(combo)
if reward == 0:
combo = observation[6] + observation[7] + observation[8]
reward = get_reward_from_combo(combo)
return reward
def get_reward_from_combo(combo):
reward = 0
if combo == 3:
reward = 1
if combo == -3:
reward = -1
return reward
def take_action(preaction_state, action):
#not checking for valid action
postaction_state = copy.deepcopy(preaction_state)
current_turn = preaction_state.turn
piece_id = 1
postaction_state.turn = 1
if current_turn == 1:
piece_id = -1
postaction_state.turn = 0
postaction_state.board[action] = piece_id
reward = get_value_from_observation(postaction_state.board)
if reward != 0 or 0 not in postaction_state.board:
postaction_state.terminal = True
return postaction_state
| UTF-8 | Python | false | false | 2,829 | py | 35 | ttt.py | 30 | 0.696006 | 0.668434 | 0 | 99 | 27.555556 | 60 |
Evan-Wang-ch/twitterAnalysis | 15,470,472,241,495 | b53c8da8573049c937a10d2ae13a238f7b0a9e9d | c5916d82b1ec0b9585d0d431cfaf9d5db422cea8 | /NDCG_Graph_Auto.py | 4f3817af405237d3eaebdbde122e8a371925d53e | []
| no_license | https://github.com/Evan-Wang-ch/twitterAnalysis | 7a0440c16389bc1422ed8f0ebe08bd6c1ce25489 | fb35769f624143ae58e1689590f79719262d2cb2 | refs/heads/master | 2018-05-13T12:27:18.597714 | 2017-05-28T11:16:40 | 2017-05-28T11:16:40 | 92,656,623 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #Draw a picture about NDCG@k of different feature sets in auto-labeling
import re,collections
import matplotlib.pyplot as plt
import numpy as np
def get_lines(file):
i = 0
with open(file) as f:
losses = []
for line in f:
i = i + 1
if i > 10:
return losses
print(line)
losses.append(float(line))
return losses
path1 = 'Data//tryOut11.dat'
path2 = 'Data//tryOut22.dat'
path3 = 'Data//tryOut33.dat'
path4 = 'Data//tryOut44.dat'
keywords2 = get_lines(path1)
keywords1 = get_lines(path2)
keywords3 = get_lines(path3)
keywords4 = get_lines(path4)
# plt.style.use('fivethirtyeight')
# plt.style.use("ggplot")
# n_bins = 50
l1, = plt.plot(range(1,11), keywords1, 'o-', color = 'red', linewidth = 1)
l2, = plt.plot(range(1,11), keywords2, 'o-', color = 'blue', linewidth = 1)
l3, = plt.plot(range(1,11), keywords3, 'o-', color = 'orange', linewidth = 1)
l4, = plt.plot(range(1,11), keywords4, color = 'green', linewidth = 1, linestyle = ':')
plt.legend((l3, l1, l2, l4), ('Lexical and Graph Embedding Features', 'All Features', 'Lexical Features', 'Baseline(Random Ranking)'), loc = 'lower right', shadow=True)
plt.xlabel('k')
plt.ylabel('NDCG@k')
plt.title('Performance NDCG@k Changes with Different k(Auto Labeled)')
plt.grid(True, linestyle='-', which='major', color='lightgrey',
alpha=0.5)
plt.grid(True)
plt.show()
| UTF-8 | Python | false | false | 1,424 | py | 33 | NDCG_Graph_Auto.py | 23 | 0.638343 | 0.599017 | 0 | 44 | 31.363636 | 168 |
rg3915/spark | 11,647,951,313,772 | d35f3e0405a06336fff86917111347889cc1c268 | cab88de907ce8bd8d5e8a1850a29e6753457bb40 | /spark/selenium/gen_random_values.py | a9992a5f384083c9479f8dd504b879e50617f1da | [
"MIT"
]
| permissive | https://github.com/rg3915/spark | 1734bcd9d15353a24a0b04c7521fe3cca7fc54db | dc93564cebb0e5d025eeed819b7fe0fe2edca5a5 | refs/heads/master | 2021-01-02T08:43:47.172425 | 2017-08-02T00:48:46 | 2017-08-02T00:48:46 | 87,489,867 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import string
from random import random, randrange, choice
from datetime import date, datetime, timedelta
def gen_string(max_length):
return str(''.join(choice(string.ascii_letters) for i in range(max_length)))
gen_string.required = ['max_length']
def gen_cpf():
def calcula_digito(digs):
s = 0
qtd = len(digs)
for i in range(qtd):
s += n[i] * (1 + qtd - i)
res = 11 - s % 11
if res >= 10:
return 0
return res
n = [randrange(10) for i in range(9)]
n.append(calcula_digito(n))
n.append(calcula_digito(n))
return "%d%d%d%d%d%d%d%d%d%d%d" % tuple(n)
def gen_digits(max_length):
return str(''.join(choice(string.digits) for i in range(max_length)))
def gen_phone():
# gera um telefone no formato xx xxxxx-xxxx
digits_ = gen_digits(11)
return '{} 9{}-{}'.format(digits_[:2], digits_[3:7], digits_[7:])
def gen_date(min_year=1900, max_year=datetime.now().year):
# gera um date no formato yyyy-mm-dd
start = date(min_year, 1, 1)
years = max_year - min_year + 1
end = start + timedelta(days=365 * years)
return start + (end - start) * random()
def convert_date(d):
# converte data no formato dia, mês, ano.
return d.strftime('%d/%m/%Y')
def gen_datetime(min_year=1900, max_year=datetime.now().year):
# gera um datetime no formato yyyy-mm-dd hh:mm:ss.000000
start = datetime(min_year, 1, 1)
years = max_year - min_year + 1
end = start + timedelta(days=365 * years)
return (start + (end - start) * random()).isoformat(" ")
def gen_timestamp(min_year=1915, max_year=datetime.now().year):
# gera um datetime no formato yyyy-mm-dd hh:mm:ss.000000
min_date = datetime(min_year, 1, 1)
max_date = datetime(max_year + 1, 1, 1)
delta = random() * (max_date - min_date).total_seconds()
return (min_date + timedelta(seconds=delta)).isoformat(" ")
# See more gen_random_values in
# https://gist.github.com/rg3915/744aacde209046901748
| UTF-8 | Python | false | false | 2,011 | py | 38 | gen_random_values.py | 27 | 0.622886 | 0.583582 | 0 | 66 | 29.454545 | 80 |
Francis1998/leetcode | 12,627,203,859,230 | 45b3cc5d157c097a6210282567ff3230f3cf3e19 | e545395ef78b1e396076dddd726444be28576290 | /7.py | b1ec49abfaf08cd96cfb50897ba8e7f20290932f | []
| no_license | https://github.com/Francis1998/leetcode | bb38fe91145b57e657ec812bebd95d3e30be3b96 | 9fbab5fe97026f7fa6e7124321480a7e20d5b72a | refs/heads/master | 2023-04-22T02:45:57.223755 | 2021-05-08T12:47:23 | 2021-05-08T12:47:23 | 318,083,503 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Solution:
def slidingPuzzle(self, board):
s = ''.join(str(e) for row in board for e in row)
if s == "123450":
return 0
bq, eq, nq, visited, res = {(s, s.index('0'))},{('123450',5)},set(),set(),0
moves = [[1, 3], [0, 2, 4], [1, 5], [0, 4], [1, 3, 5], [2, 4]]
while bq:
visited |= bq
res += 1
for x, idx in bq:
for nidx in moves[idx]:
print(idx)
lx = [*x]
lx[idx], lx[nidx] = lx[nidx],lx[idx]
nx = ''.join(lx)
if (nx,nidx) not in visited:
if (nx,nidx) in eq:
return res
nq.add((nx,nidx))
bq,nq = nq,set()
if len(bq) > len(eq):
bq, eq = eq, bq
return -1
dp = Solution()
print(ord('a')-ord('b')) | UTF-8 | Python | false | false | 942 | py | 71 | 7.py | 57 | 0.363057 | 0.329087 | 0 | 27 | 33.925926 | 86 |
dnonatar/Racquet_Selector | 4,045,859,241,640 | fe17217a98e77c05d01c35ca4c1dbc960a928e5e | 5a02166ac190e29258ba6ecf8e22c0f958e8d16e | /Cleaning/cleanup_1.py | 138d8b4cf3d4e0f3bac3c0e969be03e0799a2db6 | []
| no_license | https://github.com/dnonatar/Racquet_Selector | 0cbd01660894b5c55190a369d884a3f824a27a18 | d5dfa628b724571a18ab8f618863fae5eb2cb8d6 | refs/heads/master | 2020-03-22T07:32:25.086776 | 2018-07-04T10:48:55 | 2018-07-04T10:48:55 | 139,706,782 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pandas as pd
import os
os.chdir('/home/ratanond/Desktop/DataScience/Projects/Racquet_Selector/Cleaning/scraped_data')
babolat = pd.read_csv('babolat.csv')
wilson = pd.read_csv('wilson.csv')
head = pd.read_csv('head.csv')
yonex = pd.read_csv('yonex.csv')
babolat.name = 'babolat'
wilson.name = 'wilson'
head.name = 'head'
yonex.name = 'yonex'
cols_drop = ['Tension','Grip_Type', 'Composition','Weight','Beam_Width']
def exclude_junior(df):
df = df[~df.Racket_Name.str.contains('Junior')]
return df
## remove the word 'Tennis Racquet'
def fix_racket_name(df):
for i in range(0,df.shape[0]):
df['Racket_Name'][i] = df['Racket_Name'][i].replace(" Tennis Racquet","")
return df
racket_brands = [babolat,wilson,head,yonex]
for brand in racket_brands:
file_name = brand.name
brand = brand.drop(cols_drop,axis=1)
brand = fix_racket_name(brand)
brand = exclude_junior(brand)
brand.to_csv('./cleaned_output/'+file_name+'.csv',index=False)
# Question..
# If put file_name just before to_csv, it wouldn't work
# If drop and exclude_junior first, also doesn't work
| UTF-8 | Python | false | false | 1,133 | py | 21 | cleanup_1.py | 5 | 0.671668 | 0.66902 | 0 | 40 | 27.2 | 94 |
preetiduhan/DataStructuresInPython | 8,658,654,096,595 | 776cd825ad6338cd046fd4b2559b8bb26e2af98d | e19f387a6f4458357b1685985306a3105f77d00d | /preserve_spaces_reverse_string.py | 22d60553890c0061c60653814df54979e13230a5 | []
| no_license | https://github.com/preetiduhan/DataStructuresInPython | 4073a86f33d53ca987350de64bebd0d16f360cd0 | 989aa4cb322508fe3706d942a333cd5e1c861222 | refs/heads/master | 2020-05-27T15:06:05.899949 | 2020-05-20T02:56:02 | 2020-05-20T02:56:02 | 188,673,970 | 1 | 0 | null | false | 2020-03-22T05:50:40 | 2019-05-26T11:24:35 | 2020-03-22T05:50:13 | 2020-03-22T05:50:39 | 72 | 0 | 0 | 0 | Python | false | false | '''
Write a program to Reverse the given string while preserving the position of spaces.
Examples:
Input : "abc de"
Output : edc ba
Input : "intern at geeks"
Output : skeegt an retni
Input : "Help others"
Output : sreh topleH
'''
def preserve(sent):
index_space_list = []
result =[]
for i in range(len(sent)):
if(sent[i] == ' '):
index_space_list.append(len(sent)-i-1)
for i in range((len(sent)-1),0,-1):
if(i in index_space_list):
result.append(' ')
if(sent[i]== ' '):
i=i-1
result.append(sent[i])
print(result,end='')
preserve('Help others')
| UTF-8 | Python | false | false | 644 | py | 43 | preserve_spaces_reverse_string.py | 42 | 0.569876 | 0.562112 | 0 | 29 | 21 | 84 |
MHKiT-Software/MHKiT-Python | 8,375,186,266,648 | 3ff5cd3b31a2275e3a181a821011d57d93fdaaf1 | f60537ca1e0ed0d5b8a6e33298603190a2f71ce3 | /mhkit/wave/io/swan.py | c71a1a5142008c5d86bb134403d57a801963ab8d | [
"BSD-3-Clause",
"Python-2.0"
]
| permissive | https://github.com/MHKiT-Software/MHKiT-Python | 70a02c53a1bf10e1d6e5f80717f084220acc716e | 3e66f8f9513fc9c20deda300e748cb1269cd2b29 | refs/heads/master | 2023-08-31T07:43:25.456003 | 2023-08-29T19:13:16 | 2023-08-29T19:13:16 | 228,677,190 | 37 | 43 | BSD-3-Clause | false | 2023-08-31T13:11:45 | 2019-12-17T18:24:24 | 2023-07-11T04:29:20 | 2023-08-31T13:11:43 | 450,655 | 38 | 41 | 25 | Python | false | false | from scipy.io import loadmat
from os.path import isfile
import pandas as pd
import numpy as np
import re
def read_table(swan_file):
'''
Reads in SWAN table format output
Parameters
----------
swan_file: str
filename to import
Returns
-------
swan_data: DataFrame
Dataframe of swan output
metaDict: Dictionary
Dictionary of metaData
'''
assert isinstance(swan_file, str), 'swan_file must be of type str'
assert isfile(swan_file)==True, f'File not found: {swan_file}'
f = open(swan_file,'r')
header_line_number = 4
for i in range(header_line_number+2):
line = f.readline()
if line.startswith('% Run'):
metaDict = _parse_line_metadata(line)
if metaDict['Table'].endswith('SWAN'):
metaDict['Table'] = metaDict['Table'].split(' SWAN')[:-1]
if i == header_line_number:
header = re.split("\s+",line.rstrip().strip('%').lstrip())
metaDict['header'] = header
if i == header_line_number+1:
units = re.split('\s+',line.strip(' %\n').replace('[','').replace(']',''))
metaDict['units'] = units
f.close()
swan_data = pd.read_csv(swan_file, sep='\s+', comment='%',
names=metaDict['header'])
return swan_data, metaDict
def read_block(swan_file):
'''
Reads in SWAN block output with headers and creates a dictionary
of DataFrames for each SWAN output variable in the output file.
Parameters
----------
swan_file: str
swan block file to import
Returns
-------
data: Dictionary
Dictionary of DataFrame of swan output variables
metaDict: Dictionary
Dictionary of metaData dependent on file type
'''
assert isinstance(swan_file, str), 'swan_file must be of type str'
assert isfile(swan_file)==True, f'File not found: {swan_file}'
extension = swan_file.split('.')[1].lower()
if extension == 'mat':
dataDict = _read_block_mat(swan_file)
metaData = {'filetype': 'mat',
'variables': [var for var in dataDict.keys()]}
else:
dataDict, metaData = _read_block_txt(swan_file)
return dataDict, metaData
def _read_block_txt(swan_file):
'''
Reads in SWAN block output with headers and creates a dictionary
of DataFrames for each SWAN output variable in the output file.
Parameters
----------
swan_file: str
swan block file to import (must be written with headers)
Returns
-------
dataDict: Dictionary
Dictionary of DataFrame of swan output variables
metaDict: Dictionary
Dictionary of metaData dependent on file type
'''
assert isinstance(swan_file, str), 'swan_file must be of type str'
assert isfile(swan_file)==True, f'File not found: {swan_file}'
f = open(swan_file)
runLines=[]
metaDict = {}
column_position = None
dataDict={}
for position, line in enumerate(f):
if line.startswith('% Run'):
varPosition = position
runLines.extend([position])
column_position = position + 5
varDict = _parse_line_metadata(line)
varDict['unitMultiplier'] = float(varDict['Unit'].split(' ')[0])
metaDict[varPosition] = varDict
variable = varDict['vars']
dataDict[variable] = {}
if position==column_position and column_position!=None:
columns = line.strip('% \n').split()
metaDict[varPosition]['cols'] = columns
N_columns = len(columns)
columns_position = None
if not line.startswith('%'):
raw_data = ' '.join(re.split(' |\.', line.strip(' \n'))).split()
index_number = int(raw_data[0])
columns_data = raw_data[1:]
data=[]
possibleNaNs = ['****']
NNaNsTotal = sum([line.count(nanVal) for nanVal in possibleNaNs])
if NNaNsTotal>0:
for vals in columns_data:
NNaNs = 0
for nanVal in possibleNaNs:
NNaNs += vals.count(nanVal)
if NNaNs > 0:
for i in range(NNaNs):
data.extend([np.nan])
else:
data.extend([float(vals)])
else:
data.extend([float(val) for val in columns_data])
dataDict[variable][index_number] = data
metaData = pd.DataFrame(metaDict).T
f.close()
for var in metaData.vars.values:
df = pd.DataFrame(dataDict[var]).T
varCols = metaData[metaData.vars == var].cols.values.tolist()[0]
colsDict = dict(zip(df.columns.values.tolist(), varCols))
df.rename(columns=colsDict)
unitMultiplier = metaData[metaData.vars == var].unitMultiplier.values[0]
dataDict[var] = df * unitMultiplier
metaData.pop('cols')
metaData = metaData.set_index('vars').T.to_dict()
return dataDict, metaData
def _read_block_mat(swan_file):
'''
Reads in SWAN matlab output and creates a dictionary of DataFrames
for each swan output variable.
Parameters
----------
swan_file: str
filename to import
Returns
-------
dataDict: Dictionary
Dictionary of DataFrame of swan output variables
'''
assert isinstance(swan_file, str), 'swan_file must be of type str'
assert isfile(swan_file)==True, f'File not found: {swan_file}'
dataDict = loadmat(swan_file, struct_as_record=False, squeeze_me=True)
removeKeys = ['__header__', '__version__', '__globals__']
for key in removeKeys:
dataDict.pop(key, None)
for key in dataDict.keys():
dataDict[key] = pd.DataFrame(dataDict[key])
return dataDict
def _parse_line_metadata(line):
'''
Parses the variable metadata into a dictionary
Parameters
----------
line: str
line from block swan data to parse
Returns
-------
metaDict: Dictionary
Dictionary of variable metadata
'''
assert isinstance(line, str), 'line must be of type str'
metaDict={}
meta=re.sub('\s+', " ", line.replace(',', ' ').strip('% \n').replace('**', 'vars:'))
mList = meta.split(':')
elms = [elm.split(' ') for elm in mList]
for elm in elms:
try:
elm.remove('')
except:
pass
for i in range(len(elms)-1):
elm = elms[i]
key = elm[-1]
val = ' '.join(elms[i+1][:-1])
metaDict[key] = val
metaDict[key] = ' '.join(elms[-1])
return metaDict
def dictionary_of_block_to_table(dictionary_of_DataFrames, names=None):
'''
Converts a dictionary of structured 2D grid SWAN block format
x (columns),y (index) to SWAN table format x (column),y (column),
values (column) DataFrame.
Parameters
----------
dictionary_of_DataFrames: Dictionary
Dictionary of DataFrames in with columns as X indicie and Y as index.
names: List (Optional)
Name of data column in returned table. Default=Dictionary.keys()
Returns
-------
swanTables: DataFrame
DataFrame with columns x,y,values where values = Dictionary.keys()
or names
'''
assert isinstance(dictionary_of_DataFrames, dict), (
'dictionary_of_DataFrames must be of type Dict')
assert bool(dictionary_of_DataFrames), 'dictionary_of_DataFrames is empty'
for key in dictionary_of_DataFrames:
assert isinstance(dictionary_of_DataFrames[key],pd.DataFrame), (
f'Dictionary key:{key} must be of type pd.DataFrame')
if not isinstance(names, type(None)):
assert isinstance(names, list), (
'If specified names must be of type list')
assert all([isinstance(elm, str) for elm in names]), (
'If specified all elements in names must be of type string')
assert len(names) == len(dictionary_of_DataFrames), (
'If specified names must the same length as dictionary_of_DataFrames')
if names == None:
variables = [var for var in dictionary_of_DataFrames.keys() ]
else:
variables = names
var0 = variables[0]
swanTables = block_to_table(dictionary_of_DataFrames[var0], name=var0)
for var in variables[1:]:
tmp_dat = block_to_table(dictionary_of_DataFrames[var], name=var)
swanTables[var] = tmp_dat[var]
return swanTables
def block_to_table(data, name='values'):
'''
Converts structured 2D grid SWAN block format x (columns), y (index)
to SWAN table format x (column),y (column), values (column)
DataFrame.
Parameters
----------
data: DataFrame
DataFrame in with columns as X indicie and Y as index.
name: string (Optional)
Name of data column in returned table. Default='values'
Returns
-------
table: DataFrame
DataFrame with columns x,y,values
'''
assert isinstance(data,pd.DataFrame), 'data must be of type pd.DataFrame'
assert isinstance(name, str), 'Name must be of type str'
table = data.unstack().reset_index(name=name)
table = table.rename(columns={'level_0':'x', 'level_1': 'y'})
table.sort_values(['x', 'y'], ascending=[True, True], inplace=True)
return table
| UTF-8 | Python | false | false | 9,951 | py | 151 | swan.py | 72 | 0.558939 | 0.556125 | 0 | 294 | 32.843537 | 88 |
dsunder/wg21 | 15,272,903,746,507 | 11bab2e9a68543181c71053a349d780bf4ffd0d3 | 5ae49ee1781f7f0e0909a8291ea53d88e8f457c9 | /data/refs.py | 7def883ef77d731321f0fc2911c9a737f8a650f2 | []
| no_license | https://github.com/dsunder/wg21 | fa4d894c28eb220a371f9d65a7a3d3e3e2d54484 | 634b0154e3a5721be5534b602b991e200eaba8b3 | refs/heads/master | 2021-12-14T03:29:17.538690 | 2021-10-19T20:13:06 | 2021-10-19T20:13:06 | 220,190,186 | 0 | 0 | null | true | 2019-11-07T08:44:14 | 2019-11-07T08:44:13 | 2019-11-07T08:44:07 | 2019-10-25T18:33:54 | 11,384 | 0 | 0 | 0 | null | false | false | #!/usr/bin/env python3
import sys
from datetime import datetime
import requests
from bs4 import BeautifulSoup
import json
import yaml
url = 'https://wg21.link/index'
dates = {}
for elem in BeautifulSoup(requests.get(url + '.html').text, 'lxml').find_all('li'):
date = elem.find(class_='date')
if date is not None:
dates[elem['id']] = date.get_text()
index_yaml = yaml.safe_load(requests.get(url + '.yaml').text)['references']
for item in index_yaml:
if item.pop('issued', None) is not None:
date = datetime.strptime(dates[item['id']], '%Y-%m-%d')
item['issued'] = { 'date-parts' : [[ date.year, date.month, date.day ]] }
json.dump(index_yaml, sys.stdout, ensure_ascii=False, indent=2)
| UTF-8 | Python | false | false | 709 | py | 5 | refs.py | 3 | 0.672779 | 0.665726 | 0 | 25 | 27.36 | 83 |
Guoami/spider | 5,781,026,001,625 | 7e92b8f64558178cf43ed6775cfca43624364391 | e8c4b34455efb444e16b28f953a72ec941594308 | /com/yifeng/spider/picture/wallpaper.py | e1bd71ee1152b2eea070af53dd968c5f23d203ed | []
| no_license | https://github.com/Guoami/spider | 8d7d344e34195f009be97b3008f91c81989dd0c4 | 612b33154eb1e64067ce00b1a2bebf815856a57d | refs/heads/master | 2022-01-13T06:25:13.331286 | 2019-05-13T01:17:34 | 2019-05-13T01:17:34 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from urllib import request
import math
import re
'''
爬取小鸟壁纸高清美女大图
'''
base_url = 'http://wallpaper.apc.360.cn/index.php?c=WallPaper&a=getAppsByCategory&cid=6&count=10&start='
header = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36'
}
#保存到本地的路径地址
base_filepath = 'E:/wallpaper/'
total = 6705
count = 10
totalPage = math.ceil(total / count)
for i in range(0, totalPage):
try:
print("======开始下载第"+str(i+1)+"页的图片======")
url = base_url + str(i * 10)
req = request.Request(url, headers=header)
resp = request.urlopen(req).read().decode('utf-8')
pat = 'url_mobile":"(.*?)",'
pat1 = '"url":"(.*?)",'
pat2 = 'utag":"(.*?)",'
url_mobile_list = re.compile(pat).findall(resp)
url__list = re.compile(pat1).findall(resp)
#utag_list = re.compile(pat2).findall(resp)
for j in range(0, len(url_mobile_list)):
download_url = url_mobile_list[j]
#当url_mobile_list的地址为""时,采用url的地址
if (download_url == ""):
download_url = url__list[j]
#将\/转换成/
reurl = download_url.replace('\/', '/')
#将Unicode中文编码转换成中文汉字,并利用这个ustag作为文件的名字
#utag = utag_list[j].encode('utf-8').decode('unicode_escape')
#request.urlretrieve(reurl, base_filepath + utag + '.jpg')
# 使用数字作为图片名字
request.urlretrieve(reurl, base_filepath + str(i+1) + str(j) + '.jpg')
print("======第" + str(i + 1) + "页的图片下载完成======")
except Exception as err:
print(err) | UTF-8 | Python | false | false | 1,819 | py | 2 | wallpaper.py | 1 | 0.565058 | 0.532071 | 0 | 50 | 31.76 | 135 |
skchandra/SoftDesSp15 | 16,664,473,144,906 | c5b873d94b570323eae39f3048d89cf161242a37 | 4f2aebb4fc5aa0780c9d2f7fb2adcfc9aa3ffe03 | /finalProjectWorkDir/math.py | fab5eaa26be7d5f32044922fff7eef000170e007 | []
| no_license | https://github.com/skchandra/SoftDesSp15 | 6922fddb1e5c5e7bd4432b4024c7c4db64279d54 | 09a2ccfebc982686d7b88b9b0beed8c404aa5516 | refs/heads/master | 2021-01-17T22:17:47.190433 | 2015-05-05T00:22:02 | 2015-05-05T00:22:02 | 30,161,313 | 0 | 0 | null | true | 2015-02-01T21:57:44 | 2015-02-01T21:57:44 | 2015-02-01T18:20:06 | 2015-02-01T18:20:06 | 3,839 | 0 | 0 | 0 | null | null | null | from scipy.optimize import fsolve
import math
"""def equations(p,*args):
x, y = p
return (x+y**2-4, math.exp(x) + x*y - 3)
slope = 7
dist = 3
x, y = fsolve(equations,(1,1),args = (slope,dist))
print equations((x,y))"""
import warnings
warnings.filterwarnings('ignore', 'The iteration is not making good progress')
def find_coordinates(p1,x1,y1,axis_x1,axis_y1,axis_x2,axis_y2,theta):
if (axis_x2>axis_x1) and (axis_y2-axis_y1==0):
if math.degrees(theta) <= 90:
x,y = p1[0],axis_y2+math.fabs(p1[1]-y1)
else:
x,y = p1[0],axis_y2-math.fabs(p1[1]-y1)
elif (axis_x2<axis_x1) and (axis_y2-axis_y1==0):
if math.degrees(theta) <= 90:
x,y = p1[0],axis_y2-math.fabs(p1[1]-y1)
else:
x,y = p1[0],axis_y2+math.fabs(p1[1]-y1)
elif (axis_y2>axis_y1) and (axis_x2-axis_x1==0):
if math.degrees(theta) <= 90:
x,y = axis_x2-math.fabs(p1[0]-x1),p1[1]
else:
x,y = axis_x2+math.fabs(p1[0]-x1),p1[1]
elif (axis_y2<axis_y1) and (axis_x2-axis_x1==0):
if math.degrees(theta) <= 90:
x,y = axis_x2+math.fabs(p1[0]-x1),p1[1]
else:
x,y = axis_x2-math.fabs(p1[0]-x1),p1[1]
else:
#slope of actual folding line
axis_slope = (axis_y2-axis_y1)/(axis_x2-axis_x1)
print 'axis',axis_slope
#slope of final coordinate
point_slope = -1/axis_slope
print 'point',point_slope
#intersection point of not-translated coordinate, base folding line
int_x,int_y = p1[0],0
#distance between intersection point and right axis coordinate (not-translated)
axis_dist = x1-int_x
point_dist = p1[1]-y1
print axis_dist,point_dist
#calculating intersection point on actual folding line
intersect_x = axis_x2 - (axis_dist*(1/math.sqrt(1+(point_slope**2))))
intersect_y = axis_y2 - (axis_dist*(point_slope/math.sqrt(1+(point_slope**2))))
print intersect_x,intersect_y
#find final translated x,y coordinates
if math.degrees(theta) <= 90:
x = axis_x2 - (point_dist*(1/math.sqrt(1+(point_slope**2))))
y = axis_y2 - (point_dist*(point_slope/math.sqrt(1+(point_slope**2))))
else:
x = axis_x2 + (point_dist*(1/math.sqrt(1+(point_slope**2))))
y = axis_y2 + (point_dist*(point_slope/math.sqrt(1+(point_slope**2))))
return x,y
print find_coordinates((4.0,3.0),5.0,0.0,3.0,0.0,6.0,4.0,1.57) | UTF-8 | Python | false | false | 2,231 | py | 14 | math.py | 11 | 0.649933 | 0.585388 | 0 | 65 | 33.338462 | 81 |
JavierAntoran/pytorch_fun | 5,042,291,647,024 | f19f5b6fc929489711ef8f8b9c3944ee44f17c62 | 6c9bc0ad6cb8b4f199eb0d58bf8930c34d87c02a | /closs/2_btk_dim2_closs0/src/read_cifar.py | 89aca512759c18b9965ad8c0eeb91880adb6362c | []
| no_license | https://github.com/JavierAntoran/pytorch_fun | 8f4550bb796acc4730c866a0e3765f1bee48bcb5 | bbe6588eb99b6f359ac57cdb4ec065f0ea79d7ec | refs/heads/master | 2021-09-15T04:31:51.412372 | 2018-05-25T22:03:54 | 2018-05-25T22:03:54 | 119,742,644 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from __future__ import print_function
from utils import *
def read_train_cifar10():
xtr = []
ytr = []
for j in range(5):
d = load_obj('../data/cifar-10-batches-py/data_batch_%d' % (j+1) )
xtr.append(d['data'])
ytr.append(d['labels'])
xtr = np.concatenate(xtr)
ytr = np.concatenate(ytr)
ytr = np.asarray(ytr, dtype=np.int64)
return xtr, ytr
def read_test_cifar10():
d = load_obj('../data/cifar-10-batches-py/test_batch')
xts = d['data']
yts = d['labels']
yts = np.asarray(yts, dtype=np.int64)
return xts, yts
def read_train_test_cifar10():
xtr, ytr = read_train_cifar10()
xts, yts = read_test_cifar10()
return xtr, ytr, xts, yts
# ----------------------------------------------------------------------------------------------------------------------
def read_train_cifar100():
d = load_obj('../data/cifar-100-python/train')
xtr = d['data']
ytr = d['fine_labels']
ytr = np.asarray(ytr, dtype=np.int64)
return xtr, ytr
def read_test_cifar100():
d = load_obj('../data/cifar-100-python/test')
xts = d['data']
yts = d['fine_labels']
yts = np.asarray(yts, dtype=np.int64)
return xts, yts
def read_train_test_cifar100():
xtr, ytr = read_train_cifar100()
xts, yts = read_test_cifar100()
return xtr, ytr, xts, yts
| UTF-8 | Python | false | false | 1,351 | py | 55 | read_cifar.py | 41 | 0.542561 | 0.509252 | 0 | 48 | 27.104167 | 120 |
pius-ng3a/MambuSiteV1 | 14,937,896,289,605 | 75ce1f1baf7cb9c7a677bc3e7f80a08ff161d3af | 98d41ace3bc5e82323c4cd21b6e7d16ea9d16f69 | /Mambu/quarters/models.py | dbaa01d303875ea62bd6a8ec5112fd9d377c909c | []
| no_license | https://github.com/pius-ng3a/MambuSiteV1 | 1152b15f99ae10918bb3404311e98cbf478e0661 | 0af11fb5e9da003098c1f9d9306d9529d7f0f1e1 | refs/heads/master | 2022-12-04T02:07:37.703323 | 2020-08-30T16:52:39 | 2020-08-30T16:52:39 | 289,721,269 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.db import models
from time import time
from django.urls import reverse
from django.contrib.auth.models import User
from django.forms import ModelForm
#from members.models import Member
# Create your models here.
class Quarter(models.Model):
"""Quarter"""
name=models.CharField(max_length=30)
#leader_id= models.ForeignKey(Member,on_delete=models.CASCADE, default=1) #1 = audio, 2=video, 3=text
description = models.CharField(max_length=250,null=True)
population = models.IntegerField(default=200)
area = models.CharField(max_length=100,default="600 Squared KM")
created_at = models.DateField(blank=True,null=True ,auto_now_add=True)
updated_at= models.DateField(blank=True, null=True,auto_now=True)
def __unicode__(self):
return self.name
def get_absolute_url(self):
return reverse('quarter')
def __str__(self):
return self.name
class ProjectForm(ModelForm):
"""docstring for ProjectForm"""
class Meta:
model = Quarter
fields = ['name','area','description','population'] #'leader_id',
class AddQuarter(ModelForm):
"""docstring for AddQuarter"""
def __init__(self, arg):
super(AddQuarter, self).__init__()
self.arg = arg
| UTF-8 | Python | false | false | 1,179 | py | 31 | models.py | 23 | 0.729432 | 0.714165 | 0 | 36 | 31.666667 | 102 |
KevinLoveGitHub/python-utils | 10,419,590,706,864 | c222154bf9586bce53524aa2d71d845eb223cf6f | 2ef26a93f452b0f1bd2bb16e2ce61f0c1d6255a9 | /del_node_modules.py | 33b235407d17c308fb394eecbb7d18d604b04515 | []
| no_license | https://github.com/KevinLoveGitHub/python-utils | 1e8af8b8f308470804b2c866a6afdb64542303e0 | c185a8f9a6bdc42d2d29d24cca72a478cd776b4d | refs/heads/master | 2022-11-05T09:53:23.553312 | 2022-10-27T10:12:43 | 2022-10-27T10:12:43 | 148,456,517 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python
# -*- coding: utf-8 -*-
import os
import shutil
root_dir = '/Users/Kevin/Workspaces/node'
def get_file_name(path):
""" 删除所有项目下的 build 文件夹 """
for root, dirs, files in os.walk(path, topdown=False):
if root.endswith('/node_modules'):
print(root)
shutil.rmtree(root)
if __name__ == '__main__':
get_file_name(root_dir)
| UTF-8 | Python | false | false | 405 | py | 17 | del_node_modules.py | 12 | 0.587467 | 0.584856 | 0 | 19 | 19.157895 | 58 |
SRechenberger/sat_entropy | 5,935,644,813,081 | e30a042e0f5e0a207ff14ed5e5a8a94fca787e74 | e209877d6f980e50147ec34544fd98d7c1d1db5a | /sat/experiment.py | 737515f6fc890118e8b52362e6a90b2f14ebc3b7 | [
"MIT"
]
| permissive | https://github.com/SRechenberger/sat_entropy | fcfcd6546850b095af9f8dc777104fc17a25a64d | 9e0c083aa30b183d1ff7c7e14db9252c915a3836 | refs/heads/master | 2021-01-24T12:15:11.554849 | 2018-10-03T13:40:30 | 2018-10-03T13:40:30 | 123,127,100 | 0 | 0 | MIT | false | 2018-04-20T15:02:43 | 2018-02-27T12:38:10 | 2018-03-13T22:09:32 | 2018-04-20T15:02:42 | 6,936 | 0 | 0 | 0 | Python | false | null | import os
import sys
import time
import random
import json
from io import IOBase
from sat.utils import CNF
from multiprocessing import Pool
class Experiment:
# TODO needs overhaul!!!
def __init__(self,
directories = [],
poolsize = 1,
solver = None,
config = dict(),
verbose = False,
seed = None,
prob = None,
log = sys.stdout):
if type(prob) is not int:
raise ValueError(
'prob :: {} should be an int.'
.format(type(prob)))
self.verbose=verbose
self.log=log
self.setupFormulae(prob, directories)
self.poolsize = poolsize
self.config = config
self.seed = None
if solver:
self.solver=solver
self.ready=True
else:
self.ready=False
self.executed = False
def setupSolvers(self, solver, config = dict()):
if self.verbose:
print('Setting up {} solvers... '.format(solver.__name__),
flush=True,
file=self.log)
if type(config) is not dict:
raise TypeError('kwargs=({}:{}) should be a dict.'
.format(kwargs, type(kwargs)))
self.solvers = list(
map(lambda cnf: solver(cnf, **config),
self.formulae))
self.ready = True
self.run = False
if self.verbose:
print(' ...solvers set up.',
flush=True,
file=self.log)
def setupFormulae(self, prob, directories):
if self.verbose:
print('Setting up formulae... ',
flush=True,
file=self.log)
# Check the argument type.
if type(directories) is not list:
raise TypeError('directory=({}::{}) should be a list.'
.format(directories, type(directories)))
# Load the formulae
# os.listdir directory
# >>> filter (\f -> f.endswith('.cnf'))
# >>> map CNF
self.formulae = []
for directory in directories:
self.formulae += list(
map(
lambda f: os.path.join(directory, f),
filter(
lambda f: f.endswith('.cnf'),
os.listdir(directory)
)
)
)
self.formulae = random.sample(
self.formulae,
prob
)
# Raise a waring, if the directory is empty,
# and no output is to be expected.
if len(self.formulae) <= 0:
raise RuntimeWarning(
'There are no test files: there will be no output.')
if self.verbose:
print(' ...formulae set up.',
flush=True,
file=self.log)
def _runSolver(self, filepath):
def prepare_config():
empty_config = {}
for k,v in self.config.items():
if type(v) is list:
empty_config[k] = v[random.randrange(0,len(v))]
else:
empty_config[k] = v
return empty_config
solver = self.solver(filepath,**prepare_config())
solver.solve(self.seed)
return dict(
formula_fname = solver.formula_fname,
max_clause_len = solver.formula.maxClauseLength,
variables = solver.formula.numVars,
clauses = solver.formula.numClauses,
cb = solver.cb,
time = solver.time,
runs = solver.runs,
sat = solver.sat,
)
def runExperiment(self):
if self.verbose:
print('Running Solvers... ',
file=self.log,
flush=True)
if not self.ready:
raise RuntimeError('First run prepareSolvers.')
if self.executed:
raise RuntimeWarning('Experiment already run!')
with Pool(processes=self.poolsize) as pool:
log = self.log
del self.log
begin = time.time()
self.results = pool.map(self._runSolver, self.formulae)
end = time.time()
self.log = log
totalSecs = int(end - begin)
secs = totalSecs % 60
mins = (totalSecs // 60) % 60
hours = totalSecs // (60*60)
if self.verbose:
print(' ...solvers run; took {}h{}m{}s'.format(hours, mins, secs),
file=self.log,
flush=True)
self.executed = True
def getResultsAsString(self,
requestedColumns=None,
pretty=False,
label=False):
if len(self.results) <= 0:
raise RuntimeWarning(
'There are no results, maybe due to missing test files.')
return ''
if requestedColumns:
columns = requestedColumns
else:
columns = list(self.results[0].keys())
def formatField(field):
width = 1 + max(list(map(len, columns)))
template = '{:'
if pretty:
if type(field) == str:
template += '>'
template += str(width)
if type(field) == float:
template += '.4f'
template += '}'
return template.format(field)
toReturn = ""
if label:
for c in columns[:-1]:
toReturn += formatField(c) + ','
toReturn += formatField(columns[-1]) + '\n'
for r in self.results:
for c in columns[:-1]:
field = formatField(r[c])
toReturn += field + ','
toReturn += formatField(r[columns[-1]]) + '\n'
return toReturn
def printResults(self,
outfile=None,
requestedColumns=None,
pretty=False,
label=False,
csv=False,
res=None):
if outfile and (type(outfile) is not str and not isinstance(outfile, IOBase)):
raise TypeError('outfile::{} should be a str or a IOBase.'
.format(type(outfile)))
ownFile = False
if outfile:
if type(outfile) == str:
f = open(outfile, 'w')
ownFile = True
elif isinstance(outfile, IOBase):
f = outfile
else:
f = sys.stdout
if csv:
f.write(
self.getResultsAsString(
requestedColumns=requestedColumns,
pretty=pretty,
label=label
)
)
else:
f.write(
json.dumps(
res if res else self.results,
indent=2,
)
)
if ownFile:
f.close()
| UTF-8 | Python | false | false | 7,223 | py | 13 | experiment.py | 12 | 0.457289 | 0.454243 | 0 | 253 | 27.521739 | 86 |
zabuldon/pytables | 5,935,644,808,127 | ed56a39c8f3061f408bc3abc4719550fa347ab99 | 86df653eedd97cd69010c4650188134066f7be01 | /pytables/__main__.py | 55b8598bd9f9703edbb2bd4fabbc57c712efe04e | []
| no_license | https://github.com/zabuldon/pytables | b8d979c44f7839096f20ef4ea8687444f48b330f | 7288639a3af8c43b86a1d2c64a2d452e95836de7 | refs/heads/master | 2020-04-22T15:52:43.232962 | 2019-02-15T09:18:26 | 2019-02-15T09:21:00 | 170,490,454 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """Start PyTables."""
import sys
from pytables.const import REQUIRED_PYTHON_VER
def validate_python() -> None:
"""Validate that the right Python version is running."""
if sys.version_info[:3] < REQUIRED_PYTHON_VER:
print("Home Assistant requires at least Python {}.{}.{}".format(*REQUIRED_PYTHON_VER))
sys.exit(1)
| UTF-8 | Python | false | false | 341 | py | 12 | __main__.py | 6 | 0.671554 | 0.665689 | 0 | 11 | 30 | 94 |
hong8240/OPS435 | 19,679,540,178,385 | 9d07156affb35e5eb4cd7b7ade627297b2717c5a | ee9928bcde26f2009b7129280c06fb4a0b8bd7f2 | /lab2/lab2f.py | eb32d0613c0894c017e02c8f36b6bba0d9acb9d1 | []
| no_license | https://github.com/hong8240/OPS435 | 48061b6e83ef177135dbbb1fca7a7bf97e89b481 | 0f12801a89e05a3476ae52b8335cac6f93350050 | refs/heads/master | 2022-09-15T20:21:20.922883 | 2020-06-05T02:28:58 | 2020-06-05T02:28:58 | 269,511,056 | 2 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
import sys
count = int(sys.argv[1])
while count !=0:
print(count)
count = count - 1
print('blast off!')
| UTF-8 | Python | false | false | 135 | py | 43 | lab2f.py | 42 | 0.637037 | 0.607407 | 0 | 7 | 18.285714 | 24 |
razorblack/python_programming | 17,119,739,648,307 | b0a322957d408d2b0013ac86302d53f69e7c1a01 | c33e3729160318c382b7a8e40692b915b47a20ca | /PythonLab/Program3.py | bdbfe75762a4cdba8dcdb95887ee02e4849b8da2 | []
| no_license | https://github.com/razorblack/python_programming | 230c857404dd0f14da69dfe7ea81d98f3458065f | 32703c8c5ae0c7811b2d0d494ad2c019dcd0d299 | refs/heads/master | 2023-08-03T08:03:01.908187 | 2021-08-31T16:14:28 | 2021-08-31T16:14:28 | 321,036,634 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Method to find most frequent word
def most_frequent(fname):
count = 0 # count of a specific word
maxcount = 0 # maximum among the count of each words
l = [] # list to store the words with maximum count
with open(fname, 'r') as f:
contents = f.read()
words = contents.split()
# Logic for max occurring word
for i in range(len(words)):
for j in range(len(words)):
if words[i] == words[j]:
count += 1
if count >= maxcount:
l.clear()
l.append(words[i])
maxcount = count
count = 0
# Printing the list of most occurring word
print(l)
file = input("Enter the file name\n")
print("Most frequent word in the text file is:")
most_frequent(file)
| UTF-8 | Python | false | false | 821 | py | 46 | Program3.py | 42 | 0.54933 | 0.544458 | 0 | 28 | 28.321429 | 57 |
maiorem/Practice-to-JAVA | 15,247,133,943,661 | 4496deb8cf1de7648e2c270fe81b5bc5b56c5e7a | bfa0643f7d116eab256c83e0cac61af0f4716488 | /python/test.py | 32d1a0d3acb71d5173183a0a44ba4c3ec0669918 | []
| no_license | https://github.com/maiorem/Practice-to-JAVA | ab6edba8d3a72b9a678c512b4b2e228b1adaacbe | 743c643fd90d125fe75dc3df098fcb7277c659fb | refs/heads/master | 2023-03-05T11:22:15.192226 | 2023-02-26T11:28:05 | 2023-02-26T11:28:05 | 253,974,237 | 1 | 0 | null | false | 2023-03-09T16:15:46 | 2020-04-08T03:19:55 | 2023-03-07T08:42:38 | 2023-03-09T16:15:44 | 13,814 | 1 | 0 | 17 | Java | false | false | student_number=12
# 학생의 번호를 출력합니다.
print('학생 번호 :',student_number)
st_number_str=str(student_number) # 문자열로 변경해주는 형변환 함수 str
print(type(student_number)) # 변수의 타입 확인
print(type(st_number_str)) # 변수의 타입 확인
number_list1=range(10) # 0~9까지의 숫자 리스트 반환
number_list2=range(5,10) # 5~9까지의 숫자 리스트 반환
number_list3=range(0,10,2) # 0~9까지의 짝수 리스트 반환
print('number_list1 :', type(number_list1))
print('number_list2 :', number_list2)
print('number_list3 :', number_list3)
for i in number_list1 :
print(i)
del student_number # 변수 삭제
| UTF-8 | Python | false | false | 693 | py | 430 | test.py | 353 | 0.684601 | 0.634508 | 0 | 23 | 22.391304 | 57 |
CPYcpyCPY/amorino | 2,027,224,581,501 | 8ce80896149c52542045ae7ccb74faa653089f64 | 7bb3ab7a19f7ff28133e947dbeeea9f5937fd4ea | /myApp/manager/member_manager.py | bb83586a85769a660308b5d241894f6ff8e44407 | []
| no_license | https://github.com/CPYcpyCPY/amorino | 6f2086a110ae7433afa94d8ec85019a111fe6c0b | 64e0fc893bd079163fe589ada5b1630c25bb470f | refs/heads/master | 2020-07-08T20:50:02.172784 | 2017-03-03T00:34:02 | 2017-03-03T00:34:02 | 67,266,491 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # coding=utf-8
import json
# import sqlite3
from collect_manager import collectManager
from json_manager import jsonManager
# 会员管理
class MemberManager(object):
def __init__(self):
self.users_path = 'static/data/users.json'
self.users = []
self.get_user()
self.infos = {
"err": "用户名或密码不能为空",
'exist': "注册用户名已存在",
'noExist': "删除的用户名不存在",
'addSuccess': "注册成功",
'deleteSuccess': "删除成功"
}
self.result = {
"info": "",
"users": ""
}
def get_user(self):
data = jsonManager.read_json(self.users_path)
for user in data['users']:
self.users.append(user)
def operate(self, request, operation):
if operation == 'add':
self.result['info'] = self.add_user(request)
else:
self.result['info'] = self.delete_user(request)
self.result['users'] = self.users
return json.dumps(self.result)
def add_user(self, request):
username = request['username']
password = request['password']
if username == '' or password == '':
return self.infos['err']
if self.exist(username):
return self.infos['exist']
user = {
'username': username,
'password': password
}
data = jsonManager.read_json(self.users_path)
data['users'].append(user)
jsonManager.write_json(self.users_path, data)
self.users = data['users']
collectManager.add_user(username)
return self.infos['addSuccess']
def delete_user(self, request):
username = request['username']
if self.exist(username) == 0:
return self.infos['noExist']
data = jsonManager.read_json(self.users_path)
for user in data['users']:
if user['username'] == username:
data['users'].remove(user)
pass
jsonManager.write_json(self.users_path, data)
self.users = data['users']
collectManager.delete_user(username)
return self.infos['deleteSuccess']
def exist(self, username):
for user in self.users:
if user['username'] == username:
return 1
return 0
def valid(self, username, password):
for user in self.users:
if user['username'] == username and user['password'] == password:
return 1
return 0
def get_members(self):
return self.users
memberManager = MemberManager()
| UTF-8 | Python | false | false | 2,672 | py | 71 | member_manager.py | 33 | 0.551272 | 0.548574 | 0 | 89 | 28.146067 | 77 |
Cesar4280/cine | 4,990,752,000,019 | 40f9862630c1f8e897e874d31926e64bfe181f66 | b8a47a3fd7afc51efb230c2578611e37b724287f | /src/config.py | 4bf460fbd3f67ba327c93fefcde22b093b47c164 | []
| no_license | https://github.com/Cesar4280/cine | 7a011e76caacb1183b80c9161d874a2d06107c65 | 4f06770efaf3850f2011354ff0bd0dee248301c6 | refs/heads/main | 2023-08-28T13:05:36.803433 | 2021-11-02T11:56:20 | 2021-11-02T11:56:20 | 416,561,187 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | class Config:
#Creamos una llave secreta para las cookies
SECRET_KEY = 'CINEFILOS SECRET' | UTF-8 | Python | false | false | 97 | py | 26 | config.py | 15 | 0.731959 | 0.731959 | 0 | 3 | 31.666667 | 47 |
neophack/bf3-bots | 11,089,605,559,152 | 8ba2d47f7101a5de14dea57430d3e3c17c36316d | 6141530d1a92ba34da031ac2381dd708effea772 | /tests/test_vectorization.py | 15225322ec69c81c3b73c2d9d08d60e713367d91 | [
"MIT"
]
| permissive | https://github.com/neophack/bf3-bots | 4e576a4867a6b8068827d973583d736cd938d008 | 8a802a8c0eeb055e1edc16e18c9944cfc0126bfd | refs/heads/main | 2023-03-15T18:35:16.798919 | 2021-03-06T22:41:27 | 2021-03-06T22:41:27 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from numba import njit
import numpy as np
import time
@njit(parallel=True)
def vectorized_add(a, b):
return a + b
@njit(parallel=True)
def vectorized_subtract(a, b):
return a + b
@njit(parallel=True)
def vectorized_mul(a, b):
return a * b
@njit(parallel=True)
def vectorized_div(a, b):
return a / b
@njit(parallel=True)
def vectorized_power(a, b):
return np.power(a, b)
def test_vectorization():
a = np.full([256, 256], 1.0, dtype=np.float32)
b = np.full([256, 256], 2.0, dtype=np.float32)
ts = time.time()
vectorized_add(a, b)
te = time.time()
print(f"Completed vectorized_add in {te-ts}s")
ts = time.time()
vectorized_subtract(a, b)
te = time.time()
print(f"Completed vectorized_subtract in {te-ts}s")
ts = time.time()
vectorized_mul(a, b)
te = time.time()
print(f"Completed vectorized_mul in {te-ts}s")
ts = time.time()
vectorized_div(a, b)
te = time.time()
print(f"Completed vectorized_div in {te-ts}s")
ts = time.time()
vectorized_power(a, b)
te = time.time()
print(f"Completed vectorized_power in {te-ts}s") | UTF-8 | Python | false | false | 1,132 | py | 94 | test_vectorization.py | 49 | 0.627208 | 0.609541 | 0 | 52 | 20.788462 | 55 |
scrambldchannel/berlin-trees-api-pywrapper | 3,143,916,071,320 | 36a55bd7a8d7aed4d782a9ce199c040afb67b477 | 96248533e1f59cfb6fd553d67f8badf63fa80b99 | /berlintreesapiwrapper/__init__.py | d5c4a0ab48e7c249f4e3f13bbdfb64dff4ec97e9 | [
"MIT"
]
| permissive | https://github.com/scrambldchannel/berlin-trees-api-pywrapper | 88d6ab84b5f8b11f1a59b7ac7ba0f32492a12acf | c6eec35f8f68aba70983b175adfbfcc235a6f114 | refs/heads/master | 2020-07-31T12:45:58.845672 | 2019-11-16T19:53:20 | 2019-11-16T19:53:20 | 210,607,828 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from .api import TreesWrapper
| UTF-8 | Python | false | false | 30 | py | 5 | __init__.py | 4 | 0.833333 | 0.833333 | 0 | 1 | 29 | 29 |
Seanny123/POVME | 10,453,950,445,823 | 6a9c70792e02463c72b14c9b2ac6490256ba85ae | c990d51a499641eea86cb59b6846abe67bc3db3e | /POVME/packages/in_progress/pocket_alignment_and_clustering/hardTestCase/makeTestCase.py | 3e60116cd01db499b3e03ec2c52489595ec90540 | [
"MIT"
]
| permissive | https://github.com/Seanny123/POVME | 04484a271a4dbf7b2498a815f996489680a0492a | 253dc95fe5001ee489b511488b4564bd6257f80c | refs/heads/master | 2020-11-24T01:35:40.301944 | 2019-12-13T19:33:01 | 2019-12-13T19:33:01 | 227,907,245 | 1 | 0 | MIT | true | 2019-12-13T19:20:20 | 2019-12-13T19:20:19 | 2019-08-19T21:59:47 | 2019-07-18T14:33:38 | 25,165 | 0 | 0 | 0 | null | false | false | import POVME.packages.binana.peel as peel
import numpy as np
import pylab
import sys
import random
randomFactor = 0.75
plot = False
createPdbs = True
createNpys = True
mainPocket = [peel.point([0,0,0]),7]
sidePocket1 = [peel.point([0,0,6]),3]
sidePocket2 = [peel.point([0,6,0]),3]
sidePocket3 = [peel.point([6,0,0]),3]
sidePocket4 = [peel.point([0,4,4]),3]
families = {}
# families[1] = [21,mainPocket,sidePocket1]
# families[2] = [21,mainPocket,sidePocket2]
# families[3] = [21,mainPocket,sidePocket3]
# families[4] = [20,mainPocket,sidePocket4]
# families[5] = [20,mainPocket]
# families[6] = [20,mainPocket,sidePocket1,sidePocket2]
# families[7] = [20,mainPocket,sidePocket1,sidePocket4]
# families[8] = [20,mainPocket,sidePocket1,sidePocket2,sidePocket4]
# families[9] = [20,mainPocket,sidePocket1,sidePocket2,sidePocket3,sidePocket4]
families[1] = [10,mainPocket,sidePocket1]
families[2] = [31,mainPocket,sidePocket2]
families[3] = [7,mainPocket,sidePocket3]
families[4] = [46,mainPocket,sidePocket4]
families[5] = [17,mainPocket]
families[6] = [25,mainPocket,sidePocket1,sidePocket2]
families[7] = [2,mainPocket,sidePocket1,sidePocket4]
families[8] = [18,mainPocket,sidePocket1,sidePocket2,sidePocket4]
families[9] = [27,mainPocket,sidePocket1,sidePocket2,sidePocket3,sidePocket4]
familyMembership = []
#for index, family in enumerate(families.keys()):
for family in list(families.keys()):
for replicate in range(families[family][0]):
outPrefix = 'family%i_rep%02i' %(family, replicate)
this_fm = peel.featureMap([-20,20,-20,20,-20,20],1)
for pocket in families[family][1:]:
randomShift = peel.point(np.random.normal(0,randomFactor,(3)))
this_fm.add_sphere(pocket[0].point_sum_new(randomShift),
pocket[1])
familyMembership.append((family, outPrefix, this_fm))
#if 'createPdbs' in sys.argv:
if createPdbs == True:
this_fm.write_pdb(outPrefix+'.pdb')
if createNpys == True:
np.save(outPrefix+'.npy', this_fm.toPovmeList()[:,:3])
familyMap = np.zeros((len(familyMembership),len(familyMembership)))
for iInd, i in enumerate(familyMembership):
for jInd, j in enumerate(familyMembership):
if i[0] == j[0]:
familyMap[iInd, jInd] = 1
if plot:
pylab.imshow(familyMap, interpolation='nearest')
pylab.show()
familyMembershipShuffled = [i for i in familyMembership]
random.shuffle(familyMembershipShuffled)
familyMap = np.zeros((len(familyMembershipShuffled),len(familyMembershipShuffled)))
for iInd, i in enumerate(familyMembershipShuffled):
if createNpys == True:
np.save('shuffled%03i.npy' %(iInd),i[2].toPovmeList()[:,:3])
if createPdbs == True:
i[2].write_pdb('shuffled%03i.pdb' %(iInd))
for jInd, j in enumerate(familyMembershipShuffled):
if i[0] == j[0]:
familyMap[iInd, jInd] = 1
if plot:
pylab.imshow(familyMap, interpolation='nearest')
pylab.show()
import time
with open('key_%s' %(time.strftime('%y_%m_%d_%H_%M_%S')),'wb') as of:
of.write('\n'.join(['%r %r' %(i[0],i[1]) for i in familyMembershipShuffled]))
| UTF-8 | Python | false | false | 3,183 | py | 67 | makeTestCase.py | 51 | 0.674835 | 0.628966 | 0 | 86 | 35.837209 | 83 |
takenmore/Leetcode_record | 5,772,436,050,518 | ce1e74eb519e43ccfd6a4a4ce693582421e306f1 | bd0fb69648ff4516e8e1ac3840509d22c5e4e6fa | /offer_jz/39.MajorElement.py | 7f2923283e60e8d5e719c628e3d4aa2a3daaebe9 | []
| no_license | https://github.com/takenmore/Leetcode_record | d523b997f7e1b80e1841f007e48b6ed84b38e6c5 | 7ebe6f3a373403125549346c49a08f9c554dafac | refs/heads/master | 2022-12-02T04:50:02.000415 | 2020-08-08T05:45:20 | 2020-08-08T05:45:20 | 262,273,250 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | '''
数组中有一个数字出现的次数超过数组长度的一半,请找出这个数字。
'''
from typing import List
import random
'''
两种解法 摩尔投票法 / 快速选择 partition / 哈希
快速选择 超时。 但渐进时间复杂度应该是一致的 且修改了数组
摩尔投票法 : 极限一换一,换到最后的就是众数
哈希 : 未写 通过记录次数。 申请了额外空间
'''
class Solution:
def majorityElement(self, nums: List[int]) -> int:
res = nums[0]
times = 1
for i in range(1, len(nums)):
if times == 0:
res = nums[i]
times = 1
else:
times = times + 1 if nums[i] == res else times - 1
return res
def majorityElement_p(self, nums: List[int]) -> int:
n = len(nums)
if n <= 0:
return None
target = n >> 1
left, right = 0, n - 1
index = self.partition(nums, left, right)
while index != target:
index = self.partition(nums, index + 1, right) \
if index < target else self.partition(nums, left, index - 1)
return nums[index]
def partition(self, nums, first, last):
r_index = random.randint(first, last)
nums[first], nums[r_index] = nums[r_index], nums[first]
pivot = nums[first]
pos = first
for i in range(first + 1, last + 1):
if nums[i] < pivot:
pos += 1
if pos != i:
nums[i], nums[pos] = nums[pos], nums[i]
nums[first], nums[pos] = nums[pos], nums[first]
return pos
S = Solution()
print(S.majorityElement([2, 1, 3, 3]))
| UTF-8 | Python | false | false | 1,730 | py | 117 | 39.MajorElement.py | 116 | 0.513963 | 0.500665 | 0 | 52 | 27.923077 | 76 |
fromkotadd/instagram_bot_remove_freeloaders | 16,630,113,376,676 | 7f42e68a264792ed4038533640827704b7694b7b | bc3974d65b1769801e4f34dea1cb4f2077c030e0 | /remove_freeloaders_user.py | ad87a729559271577d98401d8f37b9664a024cf0 | []
| no_license | https://github.com/fromkotadd/instagram_bot_remove_freeloaders | e1555150925f59099d79590fc5bca681dccbb796 | 445a0a575f9ac3395048556d6ac82fb0f2d00609 | refs/heads/main | 2023-07-24T05:04:11.084652 | 2021-09-06T08:04:04 | 2021-09-06T08:04:04 | 398,222,795 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from auto_login_insta import login
from selenium import webdriver
from auth_data import username, password, nickname
import time
import random
# метод отписки, отписываемся от всех кто не подписан на нас
def smart_unsubscribe(username):
browser = webdriver.Chrome(r'chromedriver.exe')
login(username, password, browser)
browser.get(f"https://www.instagram.com/{nickname}/")
time.sleep(random.randrange(3, 6))
followers_button = browser.find_element_by_xpath(
"/html/body/div[1]/section/main/div/header/section/ul/li[2]/a/span")
followers_count = followers_button.get_attribute("title")
following_button = browser.find_element_by_xpath("/html/body/div[1]/section/main/div/header/section/ul/li[3]/a")
following_count = following_button.find_element_by_tag_name("span").text
time.sleep(random.randrange(3, 6))
# если количество подписчиков больше 999, убираем из числа запятые
if ',' in followers_count or following_count:
followers_count, following_count = int(''.join(followers_count.split(','))), int(
''.join(following_count.split(',')))
else:
followers_count, following_count = int(followers_count), int(following_count)
print(f"Количество подписчиков: {followers_count}")
followers_loops_count = int(followers_count / 12) + 1
print(f"Число итераций для сбора подписчиков: {followers_loops_count}")
print(f"Количество подписок: {following_count}")
following_loops_count = int(following_count / 12) + 1
print(f"Число итераций для сбора подписок: {following_loops_count}")
# собираем список подписчиков
followers_button.click()
time.sleep(random.randrange(4, 6))
followers_ul = browser.find_element_by_class_name("isgrP")
time.sleep(random.randrange(4, 6))
try:
followers_urls = []
print("Запускаем сбор подписчиков...")
for i in range(1, followers_loops_count + 1):
browser.execute_script("arguments[0].scrollTop = arguments[0].scrollHeight", followers_ul)
time.sleep(random.randrange(2, 4))
print(f"Итерация #{i}")
all_urls_div = followers_ul.find_elements_by_tag_name("li")
for url in all_urls_div:
url = url.find_element_by_tag_name("a").get_attribute("href")
followers_urls.append(url)
# сохраняем всех подписчиков пользователя в файл
with open(f"{nickname}_followers_list.txt", "w") as followers_file:
for link in followers_urls:
followers_file.write(link + "\n")
except Exception as ex:
print(ex)
browser.close()
browser.quit()
time.sleep(random.randrange(4, 6))
browser.get(f"https://www.instagram.com/{nickname}/")
time.sleep(random.randrange(3, 6))
# собираем список подписок
following_button = browser.find_element_by_xpath("/html/body/div[1]/section/main/div/header/section/ul/li[3]/a")
following_button.click()
time.sleep(random.randrange(3, 5))
following_ul = browser.find_element_by_class_name("isgrP")
try:
following_urls = []
print("Запускаем сбор подписок")
for i in range(1, following_loops_count + 1):
browser.execute_script("arguments[0].scrollTop = arguments[0].scrollHeight", following_ul)
time.sleep(random.randrange(2, 4))
print(f"Итерация #{i}")
all_urls_div = following_ul.find_elements_by_tag_name("li")
for url in all_urls_div:
url = url.find_element_by_tag_name("a").get_attribute("href")
following_urls.append(url)
# сохраняем всех подписок пользователя в файл
with open(f"{nickname}_following_list.txt", "w") as following_file:
for link in following_urls:
following_file.write(link + "\n")
"""Сравниваем два списка, если пользователь есть в подписках, но его нет в подписчиках,
заносим его в отдельный список"""
count = 0
unfollow_list = []
for user in following_urls:
if user not in followers_urls:
count += 1
unfollow_list.append(user)
print(f"Нужно отписаться от {count} пользователей")
# сохраняем всех от кого нужно отписаться в файл
with open(f"{nickname}_unfollow_list.txt", "w") as unfollow_file:
for user in unfollow_list:
unfollow_file.write(user + "\n")
print('Список создан')
print("Запускаем отписку...")
time.sleep(2)
# заходим к каждому пользователю на страницу и отписываемся
with open(f"{nickname}_unfollow_list.txt") as unfollow_file:
unfollow_users_list = unfollow_file.readlines()
unfollow_users_list = [row.strip() for row in unfollow_users_list]
try:
count = len(unfollow_users_list)
for user_url in unfollow_users_list:
browser.get(user_url)
time.sleep(random.randrange(4, 6))
# кнопка отписки
unfollow_button = browser.find_element_by_class_name("glyphsSpriteFriend_Follow")
time.sleep(random.randrange(4, 6))
unfollow_button.click()
time.sleep(random.randrange(4, 6))
# подтверждение отписки
unfollow_button_confirm = browser.find_element_by_class_name("mt3GC").find_element_by_class_name(
"aOOlW")
unfollow_button_confirm.click()
print(f"Отписались от {user_url}")
count -= 1
if not count:
print('удаление завершенно')
print(f"Осталось отписаться от: {count} пользователей")
time.sleep(random.randrange(20, 30))
except Exception as ex:
print(ex)
browser.close()
browser.quit()
except Exception as ex:
print(ex)
browser.close()
browser.quit()
time.sleep(random.randrange(4, 6))
browser.close()
browser.quit()
if __name__ == "__main__":
smart_unsubscribe(username)
| UTF-8 | Python | false | false | 6,228 | py | 4 | remove_freeloaders_user.py | 3 | 0.689624 | 0.679084 | 0 | 170 | 30.370588 | 113 |
chine007/BL_code | 446,676,637,404 | b4d5bf7cf900dadb458ee24f2828b18e5cfc551e | 097e7d0adf1f69367e386ba8c742bb41ec81927f | /读观点参数表格进行BL计算.py | eef673b6599d668672f18518c9cfcb9984b7e5a3 | []
| no_license | https://github.com/chine007/BL_code | 1578a010d62076c3b5971adfae6af21aaff5141b | 6b3f31c68e9edcf37e119bb98397a8c819939c3f | refs/heads/master | 2021-04-25T05:37:59.320913 | 2017-09-12T16:52:35 | 2017-09-12T16:52:35 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Mon Sep 4 16:04:48 2017
@author: skofield
"""
import os
from bl_funcs import getpqc
from bl_funcs import bl
import xlrd
import numpy as np
import pandas as pd
from WindPy import w
import datetime
from xlrd import xldate_as_tuple
import xlwt
#读取当前目录作为工作目录
working_dir=os.getcwd()
#定义BL初始化参数文件路径
bl_ini_filepath=working_dir+"\\"+"bl_ini.xls"
#定义资产日收益率表格路径
daily_r_path=working_dir+"\\"+"资产日收益率.xls"
#定义BL观点参数表格路径
bl_view_filepath=working_dir+"\\"+"bl_view.xls"
#打开BL初始化参数文件、资产日收益率文件、BL观点参数文件
bl_ini_data=xlrd.open_workbook(bl_ini_filepath)
daily_r_data=xlrd.open_workbook(daily_r_path)
bl_view_data=xlrd.open_workbook(bl_view_filepath)
#读取BL初始化参数文件中的交易开始日期和结束日期并格式化处理
bl_ini_table=bl_ini_data.sheets()[0]
trade_start_date=bl_ini_table.cell(11,1).value
trade_end_date=bl_ini_table.cell(13,1).value
trade_start_date=datetime.datetime(*xldate_as_tuple(trade_start_date,0)).date()
trade_end_date=datetime.datetime(*xldate_as_tuple(trade_end_date,0)).date()
trade_start_date=str(trade_start_date)
trade_end_date=str(trade_end_date)
#取BL初始化参数文件中的初始市场权重
w_mkt=bl_ini_table.row_values(3)[1:]
w_mkt=np.mat(w_mkt).T
#取BL初始化参数文件中的股票代码列表并计数
stock_list=bl_ini_table.row_values(0)[1:]
stock_count=len(stock_list)
#读取资产名称列表stock_name
stock_name=bl_ini_table.row_values(1)[1:]
#读取风险厌恶系数delta
delta=bl_ini_table.cell(5,1).value
#读取回溯天数
recall_days=bl_ini_table.cell(15,1).value
#读取资产日收益率表格数据构造dataframe
daily_r_df=pd.read_excel(daily_r_path,sheetname=0,header=0,index_col=0)
daily_r_df.index=pd.to_datetime(daily_r_df.index)
#取交易区间内的交易日,构造交易时间序列
trade_series=daily_r_df[trade_start_date:trade_end_date].index
###############################################################################
# #
# 交易开始 #
# #
###############################################################################
#在交易区间内,按月调仓进行操作。每进入下个月,取最近一年历史统计数据,按需读取新观点
view_sheet=-1
cur_month=''
#定义BL组合净值list
port_netval=[]
#新建一个excel,用于记录每月BL计算出的最新权重配置
monthly_bl_w=xlwt.Workbook()
#定义上面的excel存放路径
bl_result_filepath=working_dir+'\\output\\bl_result.xls'
#对交易日期进行遍历循环
for i in range(0,len(trade_series)):
#判断是否进入下个月
if(str(trade_series[i])[0:7]!=cur_month):
#进入新的月份后,按照回溯天数重新回溯数据
recall_start_date=trade_series[i]-datetime.timedelta(recall_days)
recall_end_date=trade_series[i]-datetime.timedelta(1)
#当前月份重新赋值
cur_month=str(trade_series[i])[0:7]
#取下一个sheet
view_sheet+=1
#判断本月对应的sheet是否有新观点,如有,则读取新观点
if bl_view_data.sheets()[view_sheet].cell(1,0).value==1:
P,Q,LC,view_count=getpqc(bl_view_filepath,view_sheet,stock_count)
#计算本月BL权重
w_bl=bl(daily_r_path,recall_start_date,recall_end_date,delta,w_mkt,P,Q,LC,view_count)
#在excel中新建一个sheet,记录本月计算出的权重结果
new_sheet=monthly_bl_w.add_sheet(cur_month,cell_overwrite_ok=True)
#定义BL结果写入excel时显示保留四位小数
style1=xlwt.XFStyle()
fmt='##0.0000'
style1.num_format_str=fmt
new_sheet.write(0,0,'资产列表: ')
new_sheet.write(3,0,'当月BL权重: ')
new_sheet.col(0).width = (len('当月BL权重')*460)
for j in range(0,len(stock_list)):
new_sheet.write(0,j+1,stock_list[j])
new_sheet.write(1,j+1,stock_name[j])
new_sheet.write(3,j+1,float(w_bl[j]),style1)
new_sheet.col(j+1).width = (len('沪深300工业')*460) #设置excel列宽
#计算每日组合净值
net_val=float(np.mat(daily_r_df[str(trade_series[i])]*0.01+1)*w_bl)
port_netval.append(net_val)
#将每月的BL权重配置保存到文件
monthly_bl_w.save(bl_result_filepath)
#对BL组合日净值list加上时间序列索引,构造为Series
port_netval=pd.Series(port_netval,index=trade_series,name='BL组合日净值')
#定义每日组合净值保存到csv文件的路径,将每日净值存入
port_netval_filepath=working_dir+'\\output\\port_netval.csv'
port_netval.to_csv(port_netval_filepath)
| UTF-8 | Python | false | false | 5,083 | py | 4 | 读观点参数表格进行BL计算.py | 3 | 0.617455 | 0.598061 | 0 | 140 | 27.364286 | 89 |
katajakasa/aetherguild4 | 16,003,048,170,232 | bcc40b3348d8e7a68d493c0999d72c79a5926ca8 | c62bd77742f921b8f50b886db7488ce03725f5ab | /aether/gallery/__init__.py | 5216e250601645afd35620cae089929a9f01287a | [
"MIT"
]
| permissive | https://github.com/katajakasa/aetherguild4 | a361688a87d86ae2284a4c07aa9fe9d6b91d2fbb | 2d51f73fad15bfa9a0da052f2509b308d566fafa | refs/heads/master | 2023-08-03T19:51:43.808931 | 2023-07-28T17:35:01 | 2023-07-28T17:35:01 | 143,641,102 | 0 | 0 | MIT | false | 2023-05-09T22:42:13 | 2018-08-05T19:17:15 | 2022-01-07T22:54:12 | 2023-05-09T22:42:10 | 1,349 | 0 | 0 | 3 | Python | false | false | default_app_config = "aether.gallery.apps.GalleryConfig"
| UTF-8 | Python | false | false | 57 | py | 70 | __init__.py | 50 | 0.807018 | 0.807018 | 0 | 1 | 56 | 56 |
achim0308/finance | 15,178,414,472,704 | a9236c7ddb1aba1b6657e5122af36c535713c45b | ff6b338bc2cdb4799242b53898c5fbb772400134 | /returns/calc.py | 883aa54f21a8a86a1ab46ed1225db9cfed4aff5e | []
| no_license | https://github.com/achim0308/finance | 572123d76c0ad48fa92ffa587469d58c2f5bce32 | c9454cbfe1cf8f0d88e1087616a80d0982346dee | refs/heads/master | 2023-08-19T21:39:28.792736 | 2023-08-09T20:34:38 | 2023-08-09T20:34:38 | 54,742,400 | 0 | 0 | null | false | 2021-12-17T22:06:49 | 2016-03-25T19:20:48 | 2021-12-16T00:51:31 | 2021-12-17T22:06:48 | 6,170 | 0 | 0 | 0 | HTML | false | false | from datetime import date
def newtonSolve(f, df, x0, absTol=1E-4, relTol=1E-4, itMax=50, damping=0.70):
lastX = x0
nextX = lastX + 10.0 * absTol
it = 0
while (abs(lastX - nextX) > absTol or abs(lastX - nextX) > relTol*abs(lastX)):
it = it + 1
if it > itMax:
raise StopIteration('Exceed iteration count')
newY = f(nextX)
lastX = nextX
if (nextX > 10.0 or nextX < -1.0):
raise StopIteration('Diverging')
try:
nextX = lastX - damping * newY / df(nextX)
except ZeroDivisionError:
nextX = lastX + absTol
return nextX
def solverF2(rate, cashflowList):
d0 = cashflowList[0]['date']
return sum([float(c['cashflow']) / (1 + rate)**((c['date'] - d0).days / 365.0) for c in cashflowList])
def solverDF2(rate, cashflowList):
d0 = cashflowList[0]['date']
return sum([-(c['date'] - d0).days/365.0 * float(c['cashflow']) / (1 + rate)**((c['date'] - d0).days / 365.0 + 1.0) for c in cashflowList])
def callSolver2(cashflowList):
if not cashflowList:
raise RuntimeError('Empty list')
r0 = 0
f = lambda r: solverF2(r, cashflowList)
df = lambda r: solverDF2(r, cashflowList)
try:
r = newtonSolve(f, df, r0)
except StopIteration:
raise RuntimeError('Iteration limit exceeded')
return float(r)*100.0
class Solver():
# solver to determine rate of return of given date-cashflow tuples
def __init__(self):
self.cashflowList = {}
self.date0 = date(2000,1,1)
def __str__(self):
s = ""
for d in self.cashflowList:
s += str(d) + ": " + str(self.cashflowList[d]) + "\n"
return s
def addCashflow(self,cashflow, date):
if len(self.cashflowList) == 0:
self.date0 = date
diffDate = (date - self.date0).days
if not diffDate in self.cashflowList:
self.cashflowList[diffDate] = float(cashflow)
else:
self.cashflowList[diffDate] = self.cashflowList[diffDate] + float(cashflow)
def calcRateOfReturn(self):
if not self.cashflowList:
raise RuntimeError('Empty list')
r0 = 0.0
f = lambda r: self._solverF(r)
df = lambda r: self._solverDF(r)
try:
r = self._newtonSolve(f=f, df=df, x0=r0)
except StopIteration as e:
print(e)
raise RuntimeError('Iteration limit exceeded')
return float(r)*100.0
# private functions
def _solverF(self, rate):
return sum([cashflow / (1 + rate)**(diffDays / 365.0) \
for diffDays, cashflow in self.cashflowList.items()])
def _solverDF(self, rate):
return sum([-diffDays/365.0 * cashflow / (1 + rate)**(diffDays / 365.0 + 1.0)
for diffDays, cashflow in self.cashflowList.items()])
def _newtonSolve(self, f, df, x0, absTol=1E-4, relTol=1E-4, itMax=50, damping=0.70):
lastX = x0
nextX = lastX + 10.0 * absTol
it = 0
while (abs(lastX - nextX) > absTol or abs(lastX - nextX) > relTol*abs(lastX)):
it = it + 1
if it > itMax:
raise StopIteration('Exceed iteration count')
newY = f(nextX)
lastX = nextX
try:
nextX = lastX - damping * newY / df(nextX)
except ZeroDivisionError:
nextX = lastX + absTol
if (nextX > 10.0 or nextX < -1.0):
raise StopIteration('Diverging')
return nextX
| UTF-8 | Python | false | false | 3,616 | py | 52 | calc.py | 23 | 0.553374 | 0.5224 | 0 | 104 | 33.769231 | 143 |
BarracudaPff/code-golf-data-python | 15,496,242,004,348 | e0d8f720778c4c887c3f7b80a0f3837ba6a18600 | 7357d367b0af4650ccc5b783b7a59090fdde47bb | /py-appscript/tags/py-appscript-0.17.3/Lib/osaterminology/dom/applescripttypes.py | 3e538c28020ad4e851ab5651d3849f95b636ac87 | [
"MIT"
]
| permissive | https://github.com/BarracudaPff/code-golf-data-python | fb0cfc74d1777c4246d56a5db8525432bf37ab1a | 42e8858c2ebc6a061012bcadb167d29cebb85c5e | refs/heads/main | 2023-05-29T05:52:22.856551 | 2020-05-23T22:12:48 | 2020-05-23T22:12:48 | 378,832,634 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | """applescripttypes -- used to get default AppleScript type/class/enum names from AppleScript component; used by aeteparser.AppleScriptParser, sdefparser.AppscriptHandler"""
__all__ = ["typebycode", "enumerationbycode", "typebyname"]
class AeutTypesParser(aeteparser.Receiver):
def __init__(self):
self._typesbycode = {}
self._typesbyname = {}
self._enumerationsbycode = {}
def start_class(self, code, name, description):
self._name = name
self._code = code
self._isplural = False
def is_plural(self):
self._isplural = True
def end_class(self):
if not self._isplural:
self._typesbycode[self._code] = self._name
self._typesbyname[self._name] = self._code
def start_enumeration(self, code):
self._enumerationsbycode[code] = self._enumeration = []
def add_enumerator(self, code, name, description):
self._enumeration.append((name, code))
def result(self):
self._typesbycode["furl"] = "file"
self._typesbyname["file"] = "furl"
return self._typesbycode, self._enumerationsbycode, self._typesbyname
p = AeutTypesParser()
aeteparser.parse(getterminology.getaeut(), p)
typebycode, enumerationbycode, typebyname = p.result() | UTF-8 | Python | false | false | 1,154 | py | 444 | applescripttypes.py | 437 | 0.725303 | 0.725303 | 0 | 28 | 40.25 | 173 |
absurdjaden/PAINT-PROJECT | 6,880,537,637,858 | 9891634cfe0c6a52d708b844e1e3c4326b835851 | eedc702c68afc931c7867ce5cd785f525050eeb2 | /program.py | a7c4f29ddd5e30c0e64df08b1aa683797608da75 | []
| no_license | https://github.com/absurdjaden/PAINT-PROJECT | dec6a316e652442476abaf7d334cc9d3b87eb865 | 5332fdde2116bd1c0020162c2f19e84346d64820 | refs/heads/master | 2020-11-25T05:43:35.479285 | 2020-01-24T04:36:08 | 2020-01-24T04:36:08 | 228,525,596 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from pygame import *
from random import *
from tkinter import *
from tkinter import filedialog
from math import *
width,height=1200,800
screen=display.set_mode((width,height)) #setting screen
root=Tk()
root.withdraw() #hides extra window
font.init()
arialFont=font.SysFont('Arial',15) #font of the paint project
font.get_fonts()
BLACK=(0,0,0)
WHITE=(255,255,255)
###loading images
paletteWheel=image.load('images/colourWheel.jpg')
programTitle=image.load('images/kspLogo2.png')
programBackground=image.load('images/programLayout.png')
colDisplayFrame=image.load('images/colDisplay.png')
hover=image.load('tools/hover.png')
hover2=image.load('tools/hover2.png')
hover3=image.load('tools/hover3.png')
bg1=image.load('backgrounds/bg1.png')
bg2=image.load('backgrounds/bg2.png')
bg3=image.load('backgrounds/bg3.png')
bg4=image.load('backgrounds/bg4.png')
bg5=image.load('backgrounds/bg5.png')
bg0T=image.load('backgrounds/bg0T.png')
bg1T=image.load('backgrounds/bg1T.png')
bg2T=image.load('backgrounds/bg2T.png')
bg3T=image.load('backgrounds/bg3T.png')
bg4T=image.load('backgrounds/bg4T.png')
bg5T=image.load('backgrounds/bg5T.png')
stamp1=image.load('stamps/stamp1.png')
stamp2=image.load('stamps/stamp2.png')
stamp3=image.load('stamps/stamp3.png')
stamp4=image.load('stamps/stamp4.png')
stamp5=image.load('stamps/stamp5.png')
stamp6=image.load('stamps/stamp6.png')
stamp7=image.load('stamps/stamp7.png')
stamp8=image.load('stamps/stamp8.png')
stamp1T=image.load('stamps/stamp1T.png')
stamp2T=image.load('stamps/stamp2T.png')
stamp3T=image.load('stamps/stamp3T.png')
stamp4T=image.load('stamps/stamp4T.png')
stamp5T=image.load('stamps/stamp5T.png')
stamp6T=image.load('stamps/stamp6T.png')
stamp7T=image.load('stamps/stamp7T.png')
stamp8T=image.load('stamps/stamp8T.png')
jeb1=image.load('extras/jebediahView1.png')
jeb2=image.load('extras/jebediahView2.png')
pencilRest=image.load('tools/pencilRest.png')
eraserRest=image.load('tools/eraserRest.png')
sprayRest=image.load('tools/sprayRest.png')
shapeRest=image.load('tools/shapeRest.png')
lineRest=image.load('tools/lineRest.png')
rectRest=image.load('tools/rectRest.png')
brushRest=image.load('tools/brushRest.png')
undoRest=image.load('tools/undoRest.png')
redoRest=image.load('tools/redoRest.png')
openRest=image.load('tools/openRest.png')
saveRest=image.load('tools/saveRest.png')
rectRest=image.load('tools/rectRest.png')
eliRest=image.load('tools/eliRest.png')
rectFRest=image.load('tools/rectFRest.png')
eliFRest=image.load('tools/eliFRest.png')
leftRest=image.load('tools/leftRest.png')
rightRest=image.load('tools/rightRest.png')
pencilUse=image.load('tools/pencilUse.png')
eraserUse=image.load('tools/eraserUse.png')
sprayUse=image.load('tools/sprayUse.png')
shapeUse=image.load('tools/shapeUse.png')
lineUse=image.load('tools/lineUse.png')
rectUse=image.load('tools/rectUse.png')
brushUse=image.load('tools/brushUse.png')
undoUse=image.load('tools/undoUse.png')
redoUse=image.load('tools/redoUse.png')
openUse=image.load('tools/openUse.png')
saveUse=image.load('tools/saveUse.png')
eliUse=image.load('tools/eliUse.png')
rectFUse=image.load('tools/rectFUse.png')
eliFUse=image.load('tools/eliFUse.png')
leftUse=image.load('tools/leftUse.png')
rightUse=image.load('tools/rightUse.png')
#setting default state for each icon
pencilIcon=pencilRest
eraserIcon=eraserRest
sprayIcon=sprayRest
shapeIcon=shapeRest
lineIcon=lineRest
rectIcon=rectRest
brushIcon=brushRest
undoIcon=undoRest
redoIcon=redoRest
openIcon=openRest
saveIcon=saveRest
eliIcon=eliRest
rectFIcon=rectFRest
eliFIcon=eliFRest
leftIcon=leftRest
rightIcon=rightRest
jebIcon=jeb1
###setting up rect objects
canvasRect=Rect(150,80,900,500)
diaRect=Rect(920,610,185,155)
diaRect2=Rect(920,610,185,155)
pencilRect=Rect(50,600,85,85)
eraserRect=Rect(50,700,85,85)
sprayRect=Rect(150,600,85,85)
shapeRect=Rect(150,700,85,85)
lineRect=Rect(250,600,85,85)
rectRect=Rect(250,700,85,85)
brushRect=Rect(350,600,85,85)
eliRect=Rect(350,700,85,85)
rectFRect=rectRect
eliFRect=eliRect
undoRect=Rect(50,15,85,50)
redoRect=Rect(150,15,85,50)
openRect=Rect(1065,80,85,85)
saveRect=Rect(1065,180,85,85)
leftRect=Rect(915,15,50,50)
rightRect=Rect(1085,15,50,50)
bgRect=Rect(980,15,90,50)
stampRect=Rect(730,595,175,175)
stamp1Rect=Rect(738,603,75,75)
stamp2Rect=Rect(738,687,75,75)
stamp3Rect=Rect(823,603,75,75)
stamp4Rect=Rect(823,687,75,75)
paletteRect=Rect(510,595,200,200)
colDisplayRect=Rect(460,595,30,200)
toolRect=Rect(50,600,385,185)
topRect=Rect(50,15,385,50)
openSaveRect=Rect(1065,80,85,185)
leftRightRect=Rect(915,15,220,50)
###displaying images
screen.blit(programBackground,(0,0))
screen.blit(paletteWheel,paletteRect)
draw.rect(screen,WHITE,canvasRect)
#setting initial values
canvasIn=False
rightClick=False
paletteSel=False
paletteSel1=False
toolSel=False
topSel=False
topSel1=False
openSave=False
openSave1=False
leftRight=False
leftRight1=False
stampSel=False
stampSel1=False
colDisplay=False
opened=False
openChange=False
bgChanged=False
stampDone=False
running=True
endShape=False
pastScreen=False
mxInCanvas=False
lineCopy=False
shapeDone=False
shapeUndoStep=False
bgChange=False
tool='none' #tool selection default is none
action='none' #action selection default is none
omx,omy=0,0 #setting value
tTool=tool #temporary tool for stamps
oTool=tool
size=5 #default size
col=0,0,0,255 #default colour black
oCol=col
r,g,b,a=0,0,0,0 #default values for rgb and alpha(a)
mx,my=0,0 #default values for mouse pos
sx,sy=0,0 #default value for starting position
empty=screen.subsurface(canvasRect).copy() #default starting canvas
screenCap=empty #original canvas
bg0=empty #empty screen for default background
jebCount=0
jebMulti=1
stampPg='1'
stamp=stamp1 #active stamp is first stamp
colList=[(1,0,0,255),(0,1,0,255),(0,0,1,255),(1,1,0,255),(0,0,0,255)] #first colour on colour history palette
undoList=[empty] #undo when you press undo the most recent item added is blitted
redoList=[empty] #redo when you press redo the most recent item added is blitted
shapeList=[] #coordinates for the shape tool
bgList=[bg0,bg1,bg2,bg3,bg4,bg5] #list of backgrounds
bgTList=[bg0T,bg1T,bg2T,bg3T,bg4T,bg5T] #list of background thumbnails
toolList=['pencil','eraser','spray','shape','line','rect','brush','eli','open','save'] #list of each tool
toolButton=[pencilRect,eraserRect,sprayRect,shapeRect,lineRect,rectRect,brushRect,eliRect,openRect,saveRect]#list of each tool Rect
iconList=[pencilIcon,eraserIcon,sprayIcon,shapeIcon,lineIcon,rectIcon,brushIcon,eliIcon,openIcon,saveIcon]#list of each icon
iconListRest=[pencilRest,eraserRest,sprayRest,shapeRest,lineRest,rectRest,brushRest,eliRest,openRest,saveRest] #list of icons in rest
iconListUse=[pencilUse,eraserUse,sprayUse,shapeUse,lineUse,rectUse,brushUse,eliUse,openUse,saveUse] #list of icon in use
stampList=[stamp1,stamp2,stamp3,stamp4,stamp5,stamp6,stamp7,stamp8] #list of stamps
stampTList=[stamp1T,stamp2T,stamp3T,stamp4T,stamp5T,stamp6T,stamp7T,stamp8T] #list of stamp thumbnails
diaText=['Pencil in fine details with this', #dialogue box text for line 1
'Erase your mistakes with tool',
'Spray paint the canvas with this',
'Left click to select points,',
'Draw a line with this tool',
'Draw an rectangle with this tool,',
'Draw using a paintbrush with',
'Draw an ellipse with this tool,',
'Open an image file',
'Save an image file as a .png',
'Undo the latest step',
'Redo the latest step',
'Cycle left in the background',
'Cycle right in the background',
'Select a colour from within the',
'Select a colour from the colour',
'Select a stamp with left click,']
diaText2=['tool', #dialogue box text for line 2
'',
'tool',
'then right click to complete',
'',
'and right click to switch modes',
'this tool',
'and right click to switch modes',
'',
'',
'',
'',
'selection',
'selection',
'range',
'history',
'scroll to change the pg. number']
#more default values
diaDisp=diaText[0]
diaDisp2=diaText2[0]
diaDone=False
diaChange=True
jebTimerCount=0
#blitting more images
screen.blit(bgTList[1],(955,15,90,50))
screen.blit(bgTList[-1],(1025,15,90,50))
screen.blit(jeb1,(1120,650,175,155))
while running:
for evt in event.get():
if evt.type==QUIT:
running=False
if evt.type==MOUSEBUTTONDOWN:
if evt.button==1:
sx,sy=evt.pos #starting position of tool
click=True #mouse has been clicked
print("mouse down")
if paletteRect.collidepoint(mx,my): #to see if user is selecting the colour palette
paletteSel=True
else:
paleteSel=False
if colDisplayRect.collidepoint(mx,my): #to see if user is selecting the colour history palette
colDisplay=True
else:
colDisplay=False
if toolRect.collidepoint(mx,my): #to see if the user is selecting from tools
toolSel=True
else:
toolSel=False
if topRect.collidepoint(mx,my): #to see if user is selecting from the top buttons
topSel=True
else:
topSel=False
if openSaveRect.collidepoint(mx,my):#to see if user is selecting from open and save buttons
openSave=True
else:
openSave=False
if leftRightRect.collidepoint(mx,my): #to see if the user is selecting from the left and right buttons
leftRight=True
else:
leftRight=False
if stampRect.collidepoint(mx,my):
stampSel=True
else:
stampSel=False
topSel1=topSel #making seperate copies of variables
openSave1=openSave
leftRight1=leftRight
paletteSel1=paletteSel
stampSel1=stampSel
if evt.button==3: #right clicking
if tool=='shape': #shape tool adding points for each click
print(len(shapeList))
if len(shapeList)>2: #polygon needs to have at least 3 points
shapeDone=True #booleans for setting values later on
shapeUndoStep=True
draw.polygon(screen,col,shapeList,size)
screenCap=screen.subsurface(canvasRect).copy() #copies the screen
screen.blit(screenCap,(canvasRect)) #updating the screen
###for dialogue text box
if tool=='rect':
tool='rectF'
iconList[5]=rectFUse
elif tool=='rectF':
tool='rect'
iconList[5]=rectIcon
elif tool=='eli':
tool='eliF'
iconList[7]=eliFUse
elif tool=='eliF':
tool='eli'
iconList[7]=eliIcon
if evt.button==4: #scrolling up
if stampRect.collidepoint(mx,my): #for stamp page scrolling
stampPg='1' #first stamp page
else:
if size+1<41: #increases thickness
size+=1
if evt.button==5: #scrolling down
if stampRect.collidepoint(mx,my):
stampPg='2' #second stamp page
else:
if size-1>0: #decreases thickness
size-=1
if evt.type==MOUSEBUTTONUP: #release of the mouse
if evt.button==1: #to prevent scroll wheel from interfering
if tool!='shape' and tool!='none' or action=='stamp': #all tools + stamps
if mxInCanvas: #copying screen if the canvas has been altered
fwdScreen=screen.subsurface(canvasRect).copy() #adding the copied screen to the undo list
undoList.append(fwdScreen)
mxInCanvas=False
if paletteRect.collidepoint(mx,my) and paletteSel:
colList.append(col) #adding colour to the colour history list
colList.pop(0) #removing the oldest colour from history
if colDisplayRect.collidepoint(mx,my) and colDisplay:
colPick=colList.index(col) #finds colour that needs to be swapped
colList[colPick],colList[-1]=colList[-1],colList[colPick] #swapping the colours with the one that colour that is picked
if tool=='shape' and shapeUndoStep: #removing the undo steps when the shape tool is used
shapeUndoStep=False
fwdScreen=screen.subsurface(canvasRect).copy()
undoList.append(fwdScreen)
if shapeDone or tool!='shape': #setting the list of points for the shape to be nothing when the old shape is finished
shapeList*=0
shapeDone=False
if evt.button==1:
screen.set_clip(canvasRect)
if tool=='line' and canvasRect.collidepoint(mx,my): #drawing the actual line in the program
screenCap=screen.subsurface(canvasRect).copy()
draw.line(screen,col,(sx,sy),(mx,my),size)
fwdScreen=screen.subsurface(canvasRect).copy()
if tool=='shape' and canvasRect.collidepoint(mx,my) and shapeDone==False:
shapeList.append(mouse.get_pos())
for term in range(len(shapeList)): #drawing each vertice of each shape
draw.circle(screen,col,shapeList[term],1)
print(shapeDone)
if tool=='rect' and canvasRect.collidepoint(mx,my): #drawing the rectangle
screenCap=screen.subsurface(canvasRect).copy()
draw.rect(screen,col,(sx,sy,mx-sx,my-sy),size)
draw.rect(screen,col,(sx-round(size/2,1)+1,round(sy-size/2,1)+1,size,size))
draw.rect(screen,col,(mx-round(size/2,1),round(my-size/2,1),size,size))
draw.rect(screen,col,(mx-round(size/2,1),round(sy-size/2,1)+1,size,size))
draw.rect(screen,col,(sx-round(size/2,1)+1,round(my-size/2,1),size,size))
screenCap=screen.subsurface(canvasRect).copy()
if tool=='rectF' and canvasRect.collidepoint(mx,my): #drawing the filled rectangle
screenCap=screen.subsurface(canvasRect).copy()
draw.rect(screen,col,(sx,sy,mx-sx,my-sy))
screenCap=screen.subsurface(canvasRect).copy()
if tool=='eli' and canvasRect.collidepoint(mx,my): #drawing the unfilled ellipse
screenCap=screen.subsurface(canvasRect).copy()
if size>10: #setting max size for ellipse tool
size=10
sxEli=sx
syEli=sy
wEli=mx-sx
hEli=my-sy
eliDrawRect=Rect(sxEli,syEli,wEli,hEli)
eliDrawRect.normalize()
try:
draw.ellipse(screen,col,(eliDrawRect),size) #incase where the size of ellipse collides within the walls
except:
draw.ellipse(screen,col,(eliDrawRect))
screenCap=screen.subsurface(canvasRect).copy()
if tool=='eliF' and canvasRect.collidepoint(mx,my): #drawing the filled ellipses
if size>10: #setting max size for filled ellipse tool
size=10
sxEli=sx
syEli=sy
wEli=mx-sx
hEli=my-sy
eliDrawRect=Rect(sxEli,syEli,wEli,hEli)
eliDrawRect.normalize()
try:
draw.ellipse(screen,col,(eliDrawRect),size)
except:
pass
screenCap=screen.subsurface(canvasRect).copy()
if stampSel: #when stamp is selected, blitting the proper stamps for each page number
action='stamp'
diaDone=True
if stampPg=='1': #determining what stamp is selected
if stamp1Rect.collidepoint(mx,my):
stamp=stampList[0]
if stamp2Rect.collidepoint(mx,my):
stamp=stampList[1]
if stamp3Rect.collidepoint(mx,my):
stamp=stampList[2]
if stamp4Rect.collidepoint(mx,my):
stamp=stampList[3]
if stampPg=='2':
if stamp1Rect.collidepoint(mx,my):
stamp=stampList[4]
if stamp2Rect.collidepoint(mx,my):
stamp=stampList[5]
if stamp3Rect.collidepoint(mx,my):
stamp=stampList[6]
if stamp4Rect.collidepoint(mx,my):
stamp=stampList[7]
if action=='stamp': #blitting the stamps
if mb[0]==1 and canvasRect.collidepoint(mx,my):
cw=stamp.get_width()
ch=stamp.get_height()
screen.blit(stamp,(mx-cw/2,my-ch/2)) #centering the stamps
screen.set_clip(None)
if bgChanged: #when the background is changed
fwdScreen=screen.subsurface(canvasRect).copy() #added in later codes to undolist
bgChanged=False
paletteSel=False
print("mouse up")
mx,my=mouse.get_pos()
mb=mouse.get_pressed()
if mb[0]==1 and canvasRect.collidepoint(mx,my):
mxInCanvas=True
if canvasRect.collidepoint(mx,my):
canvasIn=True #boolean for the stamps
else:
canvasIn=False
###drawing the rectangles and blitting images on screen
for q in range(len(iconList)): #connected lists, blitted
screen.blit(iconList[q],toolButton[q])
screen.blit(undoIcon,(undoRect))
screen.blit(redoIcon,(redoRect))
screen.blit(leftIcon,(leftRect))
screen.blit(rightIcon,(rightRect))
screen.blit(jebIcon,(1120,650,175,155)) #jebediah character in bottom right
draw.rect(screen,WHITE,stampRect)
draw.rect(screen,WHITE,diaRect)
if stampPg=='1':
screen.blit(stampTList[0],(738,603,75,75)) #blitting the stamps page 1
screen.blit(stampTList[1],(738,687,75,75))
screen.blit(stampTList[2],(823,603,75,75))
screen.blit(stampTList[3],(823,687,75,75))
if stampPg=='2':
screen.blit(stampTList[4],(738,603,75,75)) #blitting the stamps on page 2
screen.blit(stampTList[5],(738,687,75,75))
screen.blit(stampTList[6],(823,603,75,75))
screen.blit(stampTList[7],(823,687,75,75))
if openChange: #changing bg list to default values
bgList=[bg0,bg1,bg2,bg3,bg4,bg5]
bgTList=[bg0T,bg1T,bg2T,bg3T,bg4T,bg5T]
screen.blit(bgTList[1],(955,15,90,50)) #default thumbnails for background in the top right corner
screen.blit(bgTList[-1],(1025,15,90,50))
screen.blit(bgList[0],canvasRect)
screen.blit(myPic,(150+distX,80+distY,900,500)) #putting image in the center of canvas
redoList*=0 #clearing the list
redoList.append(bgList[-1])
undoList*=0
fwdScreen=screen.subsurface(canvasRect).copy()
undoList.append(fwdScreen)
openChange=False
if bgChange:
screen.blit(bgTList[1],(955,15,90,50))
screen.blit(bgTList[-1],(1025,15,90,50))
screen.blit(bgList[0],canvasRect)
bgChange=False
###hovering over objects
for q in range(len(toolList)): #list of each tool and its rect location for hovering
if toolButton[q].collidepoint(mx,my):
screen.blit(hover,(toolButton[q]))
if undoRect.collidepoint(mx,my): #undo,redo,right,and left arent in a list because they are smaller icons
screen.blit(hover2,(undoRect))
if redoRect.collidepoint(mx,my):
screen.blit(hover2,(redoRect))
if rightRect.collidepoint(mx,my):
screen.blit(hover3,(rightRect))
if leftRect.collidepoint(mx,my):
screen.blit(hover3,(leftRect))
###selecting
if toolSel:
if mb[0]==1:
for q in range(len(toolList)): #changing the icon to be what is selected
if toolButton[q].collidepoint(mx,my):
tool=toolList[q]
iconList=iconListRest[:]
iconList[q]=iconListUse[q]
if mb[0]==1 and redoRect.collidepoint(mx,my): #special cases for different sied icons
redoIcon=redoUse
else:
redoIcon=redoRest
if mb[0]==1 and undoRect.collidepoint(mx,my):
undoIcon=undoUse
else:
undoIcon=undoRest
if mb[0]==1 and leftRect.collidepoint(mx,my):
leftIcon=leftUse
else:
leftIcon=leftRest
if mb[0]==1 and rightRect.collidepoint(mx,my):
rightIcon=rightUse
else:
rightIcon=rightRest
###using buttons
if topSel: #to see if the cursor is in the top button range
if mb[0]==1 and undoRect.collidepoint(mx,my):
action='undo'
if len(undoList)>1: #base canvas cant be deleted
undone=undoList[-1] #undo step capture saved
undoList.pop() #removing the undo step
screen.blit(undoList[-1],canvasRect) #putting the previous screencap on canvas
redoList.append(undone) #adding the removed capture
pastScreen=True #current canvas exists in the past
topSel=False #setting back to default value
screenCap=screen.subsurface(canvasRect).copy()
if mb[0]==1 and redoRect.collidepoint(mx,my):
action='redo'
if len(redoList)>1: #base canvas cant be deleted
redone=redoList[-1] #redo step capture saved
screen.blit(redoList[-1],canvasRect) #putting the redone screencap on canvas
redoList.pop() #removing the redo step and
undoList.append(redone) #adding it to the undo step
topSel=False #setting back to default value
screenCap=screen.subsurface(canvasRect).copy()
if openSave:
if mb[0]==1 and openRect.collidepoint(mx,my):
action='open'
try:
screen.set_clip(canvasRect)
fName=filedialog.askopenfilename()
print(fName)
myPic=image.load(fName)
widthPic=myPic.get_width()
heightPic=myPic.get_height() #cropping the image below
if widthPic>900: #if width of image is greater than the canvas size,
factorW=widthPic/900 #image is scalled so the width fits on canvas
muPic=transform.scale(myPic,(int(widthPic/factorW),int(heightPic/factorW)))
widthPic=myPic.get_width()
heightPic=myPic.get_height()
if heightPic>500: #if height of image is greater than canvas size,
factorH=heightPic/500 #image is scalled so the height fits on canvas,
myPic=transform.smoothscale(myPic,(int(widthPic/factorH),int(heightPic/factorH)))
widthPic=myPic.get_width()
heightPic=myPic.get_height()
distX=(900-widthPic)/2 #calculating distance to be moved to the right to center
distY=(500-heightPic)/2 #calculating distance to be moved to down to center
screen.blit(myPic,(150+distX,80+distY,900,500)) #putting image in the center of canvas
print('opened',fName)
screen.set_clip(None)
openChange=True #booleans for further action in event loop+regular loop
opened=True
diaDisp=diaText[8] #dialogue changed to the index for open
diaDisp2=diaText2[8]
diaDone=True
except:
print('opening error')
diaDisp=diaText[8]
diaDisp2=diaText2[8]
diaDone=True
if mb[0]==1 and saveRect.collidepoint(mx,my):
action='save'
try:
fName=filedialog.asksaveasfilename(defaultextension='.png') #saving image as .png
image.save(screen.subsurface(canvasRect).copy(),fName)
print('saved as',fName)
except:
print('saving error')
diaDisp=diaText[9]
diaDone=True
else:
diaDone=False #if these actions arent completed, then the dialogue to be displayed should be from the regular tools
if leftRight: #changing backgrounds
if mb[0]==1 and leftRect.collidepoint(mx,my):
bgTList.append(bgTList[0]) #swapping the last with the first backgrounds
bgTList.pop(0)
bgList.append(bgList[0])
bgList.pop(0)
leftRight=False #booleans for further ifs
bgChange=True
bgChanged=True
action='left'
if mb[0]==1 and rightRect.collidepoint(mx,my):
bgTadd=bgTList[-1] #swapping the first with the last backgrounds
bgTList.reverse()
bgTList.append(bgTadd)
bgTList.reverse()
bgTList.pop()
bgadd=bgList[-1]
bgList.reverse()
bgList.append(bgadd)
bgList.reverse()
bgList.pop()
leftRight=False
bgChange=True
bgChanged=True
action='right'
###colour picking
if paletteRect.collidepoint(mx,my) and paletteSel: #selecting colours
action='col palette'
if mb[0]==1:
col=screen.get_at((mx,my))
r,g,b,a=screen.get_at((mx,my))
diaDisp=diaText[14] #dialogue is colour
diaDisp2=diaText2[14]
diaDone=True #makes it so tool dialogue will be skipped
else:
diaDone=False
###displaying colours
for rectCol in range(5):
draw.rect(screen,(colList[rectCol]),(460,755-40*rectCol,30,40)) #from colour history palette
if colDisplayRect.collidepoint(mx,my) and colDisplay:
action='col pick'
if mb[0]==1:
col=screen.get_at((mx,my)) #setting the colour to be the one chosen in the colour history palette
diaDone=True
diaDisp=diaText[15]
diaDisp2=diaText2[15]
screen.blit(colDisplayFrame,colDisplayRect) #colour history frame
###spray paint tool
if tool=='spray':
sprayX=randint(-size-20,size+20) #random points for the x
sprayY=randint(-size-20,size+20) #random points for the y
if (sprayX)**2+(sprayY)**2<=2*(size+6)**2: #to see if those random points as coordinates fit inside circle
sprayX=mx+sprayX #if true, the spray can coordinates will be
sprayY=my+sprayY #sprayXco and sprayYco
sprayX2=randint(-size-20,size+20) #same code as above, but with a smaller radius
sprayY2=randint(-size-20,size+20)
if (sprayX2)**2+(sprayY2)**2<=2*(size+3)**2:
sprayX2=mx+sprayX2
sprayY2=my+sprayY2
sprayX3=randint(-size-20,size+20) #same code as above, but with an even smaller radius
sprayY3=randint(-size-20,size+20)
if (sprayX3)**2+(sprayY3)**2<=2*(size)**2:
sprayX3=mx+sprayX3
sprayY3=my+sprayY3
###size of tool
if tool=='pencil': #setting maximum sizes for pencil
if size>5:
size=5
if tool=='eli' or tool=='eliF' or tool=='shape': #and for ellipse and shape tools
if size>10:
size=10
tTool=tool #temporary tool saved for when the tool is set to none to make the stamps not have tools underneath them
if action=='stamp':
tTool=tool
tool='none'
screen.set_clip(canvasRect)
screen.blit(undoList[-1],canvasRect) #whole block is a preview of where the stamp is/stamp follows cursor
cw=stamp.get_width()
ch=stamp.get_height()
screen.blit(stamp,(mx-cw/2,my-ch/2)) #centering the stamp
screen.set_clip(None)
stampDone=True
if toolRect.collidepoint(mx,my) and mb[0]==1 or col!=oCol or colDisplay: #checks to see if any tools/actions/buttons are used
if action!='none': #stops using the stamp if new tool/action/col is picked
action='none'
screen.blit(undoList[-1],canvasRect)
###using tool
if canvasRect.collidepoint(mx,my) and mb[0]==1:
screen.set_clip(canvasRect)
if tool=='pencil': #pencil tool
draw.line(screen,col,(omx,omy),(mx,my),size)
if tool=='eraser': #eraser tool
dx=mx-omx
dy=my-omy
hyp=sqrt(dx**2+dy**2)
if hyp%1!=0:
hyp+=0.5
for i in range(0,int(hyp)):
dotx=int(omx-size//2+i*dx/hyp)
doty=int(omy-size//2+i*dy/hyp)
screen.blit(bgList[0],(dotx,doty,size*2,size*2),(dotx-150,doty-80,size*2,size*2)) #draws the background image in a square on top of the image
if tool=='spray': #spray paint tool
draw.circle(screen,col,(sprayX,sprayY),0) #outer circle
draw.circle(screen,col,(sprayX2,sprayY2),0) #middle circle
draw.circle(screen,col,(sprayX3,sprayY3),0) #inner circle
if tool=='line': #preview of line tool
screen.blit(undoList[-1],canvasRect)
draw.line(screen,col,(sx,sy),(mx,my),size)
if tool=='rect': #preview of rectangle tool
screen.blit(undoList[-1],canvasRect)
draw.rect(screen,col,(sx,sy,mx-sx,my-sy),size)
draw.rect(screen,col,(sx-round(size/2,1)+1,round(sy-size/2,1)+1,size,size))
draw.rect(screen,col,(mx-round(size/2,1),round(my-size/2,1),size,size))
draw.rect(screen,col,(mx-round(size/2,1),round(sy-size/2,1)+1,size,size))
draw.rect(screen,col,(sx-round(size/2,1)+1,round(my-size/2,1),size,size))
if tool=='brush':
if omx!=mx or omy!=my:
dx=mx-omx
dy=my-omy
hyp=sqrt(dx**2+dy**2)
if hyp%1!=0:
hyp+=0.5
draw.circle(screen,col,(mx,my),size)
for i in range(0,int(hyp),1):
dotx=int(omx+i*dx/hyp)
doty=int(omy+i*dy/hyp)
draw.circle(screen,col,(dotx,doty),size)
if tool=='eli': #preview of ellipse tool
screen.blit(undoList[-1],canvasRect)
sxEli=sx
syEli=sy
wEli=mx-sx
hEli=my-sy
eliDrawRect=Rect(sxEli,syEli,wEli,hEli)
eliDrawRect.normalize()
print(eliDrawRect,size)
try:
draw.ellipse(screen,col,(eliDrawRect),size)
except:
draw.ellipse(screen,col,(eliDrawRect))
if tool=='rectF': #preview of filled rectangle tool
screen.blit(undoList[-1],canvasRect)
draw.rect(screen,col,(sx,sy,mx-sx,my-sy))
if tool=='eliF': #preview of filled ellipse tool
screen.blit(undoList[-1],canvasRect)
sxEli=sx
syEli=sy
wEli=mx-sx
hEli=my-sy
eliDrawRect=Rect(sxEli,syEli,wEli,hEli)
eliDrawRect.normalize()
print(eliDrawRect)
try:
draw.ellipse(screen,col,(eliDrawRect))
except:
pass
screenCap=screen.subsurface(canvasRect).copy()
if pastScreen: #if there is drawing on the canvas when it is done in the past/undo steps
redoList*=0 #clearing the list
redoList.append(bgList[-1])
pastScreen=False
screen.set_clip(None)
##blitting final information
toolWord='Tool: '+tool.upper() #setting up the pregenerated text with the dynamic values (size,mx and my,tool,action)
actionWord='Latest Action: '+action.upper()
sizeWord='Size: '+str(size)+' (scroll to change size)'
posWord='Position: '+str(mx)+','+str(my)
sizePic=arialFont.render(sizeWord,True,(BLACK)) #rendering and blitting each dialogue + texts in the text box
screen.blit(sizePic,(925,660,185,155))
diaPic=arialFont.render(diaDisp,True,(BLACK))
screen.blit(diaPic,(925,675+20,185,155))
diaPic2=arialFont.render(diaDisp2,True,(BLACK))
screen.blit(diaPic2,(925,690+20,185,155))
toolPic=arialFont.render(toolWord,True,(BLACK))
screen.blit(toolPic,(925,615,185,155))
actionPic=arialFont.render(actionWord,True,(BLACK))
screen.blit(actionPic,(925,635,185,155))
posPic=arialFont.render(posWord,True,(BLACK))
screen.blit(posPic,(925,675,185,155))
display.flip()
if stampDone: #setting the tool back to its original value, before using the stamp tool
tool=tTool
stampDone=False
toolSel=False
omx=mx #updating tools with the new tools
omy=my
oTool=tool
oCol=col
oDiaDisp=diaDisp
if diaDone!=True:
if topSel1 or openSave1 or leftRight1 or paletteSel1 or colDisplay or stampSel1: #dialogue box is the text from before
if action=='open':
diaDisp=diaText[8]
diaDisp2=diaText2[8]
if action=='save':
diaDisp=diaText[9]
diaDisp2=diaText2[9]
if action=='undo':
diaDisp=diaText[10]
diaDisp2=diaText2[10]
if action=='redo':
diaDisp=diaText[11]
diaDisp2=diaText2[11]
if action=='left':
diaDisp=diaText[12]
diaDisp2=diaText2[12]
if action=='right':
diaDisp=diaText[13]
diaDisp2=diaText2[13]
if action=='stamp':
diaDisp=diaText[16]
diaDisp2=diaText2[16]
else:
for x in range(len(toolList)): #dialogue is the tools
if tool==toolList[x]:
diaDisp=diaText[x]
diaDisp2=diaText2[x]
if bgChanged: #when you change the background undo and redo are reset *new image
undoList*=0
undoList.append(bgList[0])
redoList*=0
redoList.append(bgList[0])
if oDiaDisp!=diaDisp: #when the dialogue changes, character is allowed to speak
diaChange=True
jebTimerCount=0 #number counter that ranges from positive to negative integers
###counter of time for Jebediah at the bottom right corner
jebIcon=jeb1 #default state of Jebediah is closing mouth
if diaChange:
jebTimerCount+=1
if diaChange:
jebCount+=jebMulti
if jebCount>8:
jebMulti*=-1
if jebCount<-15:
jebMulti*=-1
if jebCount>0: #when counter is positive character opens their mouth
jebIcon=jeb1
else: #otherwise character is closing their mouth
jebIcon=jeb2
if jebTimerCount>200:
diaChange=False
jebTimerCount=0
quit()
| UTF-8 | Python | false | false | 37,513 | py | 4 | program.py | 2 | 0.578573 | 0.547863 | 0 | 903 | 39.542636 | 157 |
nmmmnu/redis-rdb-tools | 2,035,814,519,911 | da02abb1d81fef190526d2562d83662fa6681478 | d3c01e4bf7cd86e5a146e52dac6bb8d0f846b2ba | /rdbtools/cli/aggregate.py | 21d94a6ce95646a4080905055a6d27a9999ef4e4 | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | https://github.com/nmmmnu/redis-rdb-tools | b7e917e49cfa6cbf232cf16129a1ebe246f034df | 9aeaf545cd31c1f29327612986f75a69ec785652 | refs/heads/master | 2021-01-18T03:04:45.977076 | 2013-04-23T14:37:17 | 2013-04-23T14:37:17 | 9,621,129 | 1 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/python
import sys
try:
filename = sys.argv[1]
except IndexError:
print "Usage %s filename [max_rows]" % sys.argv[0]
sys.exit(1)
try:
maxbr = long(sys.argv[2])
except IndexError:
maxbr = None
br = 0
br_size_in_bytes = 0
br_max = 0
s_db = {}
s_tipe = {}
s_hist = {}
s_hist_levels = []
for i in range(32 + 4):
s_hist_levels.append(2 << i)
for line in open(filename):
if br :
if br % 1000 == 0 :
print "\r%12d records processed" % br ,
try:
x = line.strip().split(",")
(db, tipe, key, size_in_bytes, encoding, num_elements, len_largest_element) = x
db = int(db)
size_in_bytes = int(size_in_bytes)
except ValueError :
continue
if db == "database" :
continue
# Global count
br += 1
br_size_in_bytes += size_in_bytes
if br_max < size_in_bytes :
br_max = size_in_bytes
# Database
try:
s_db[db]["br"] += 1
s_db[db]["size_in_bytes"] += size_in_bytes
if s_db[db]["max"] < size_in_bytes :
s_db[db]["max"] = size_in_bytes
except KeyError:
s_db[db] = {
"br" : 1 ,
"size_in_bytes" : long(size_in_bytes) ,
"max" : long(size_in_bytes) ,
}
# Key type
try:
s_tipe[tipe]["br"] += 1
s_tipe[tipe]["size_in_bytes"] += size_in_bytes
if s_tipe[tipe]["max"] < size_in_bytes :
s_tipe[tipe]["max"] = size_in_bytes
except KeyError:
s_tipe[tipe] = {
"br" : 1 ,
"size_in_bytes" : long(size_in_bytes) ,
"max" : long(size_in_bytes) ,
}
# histogram
for i in reversed(s_hist_levels) :
if size_in_bytes > i :
try:
s_hist[i]["br"] += 1
s_hist[i]["size_in_bytes"] += size_in_bytes
if s_hist[i]["max"] < size_in_bytes :
s_hist[i]["max"] = size_in_bytes
except KeyError:
s_hist[i] = {
"br" : 1 ,
"size_in_bytes" : long(size_in_bytes) ,
"max" : long(size_in_bytes) ,
}
break
#print x
if maxbr :
if br >= maxbr :
break
def table_head(title):
tit = "%12s : %12s : %6s : %12s : %6s : %12s : %12s" % (
"#" ,
"count" ,
"count%" ,
"size bytes" ,
"size%" ,
"avg size" ,
"max size"
)
tit_line = "-" * 91
print
print
print title
print tit_line
print tit
print tit_line
def table_foot():
tit_line = "-" * 91
print tit_line
print "%12s : %12d : %6.2f : %12d : %6.2f : %12d : %12d" % (
"total" ,
br ,
100.00 ,
br_size_in_bytes ,
100.00 ,
br_size_in_bytes / br ,
br_max
)
def table(title, m):
table_head(title)
for i in sorted(m.keys()) :
x = m[i]
print "%12s : %12d : %6.2f : %12d : %6.2f : %12d : %12d" % (
i ,
x["br"] ,
x["br"] / float(br) * 100 ,
x["size_in_bytes"] ,
x["size_in_bytes"] / float(br_size_in_bytes) * 100 ,
x["size_in_bytes"] / x["br"] ,
x["max"]
)
table_foot()
table("Distribution by Database", s_db )
table("Distribution by key type", s_tipe )
table("Distribution by sizetype", s_hist )
| UTF-8 | Python | false | false | 3,014 | py | 1 | aggregate.py | 1 | 0.518248 | 0.490046 | 0 | 174 | 16.298851 | 81 |
x19290-not4u/area_total----- | 16,295,105,927,197 | e3a907c8967ca34b355f17e2072ba3028897515e | a0b4bc7fdd96506a70f8fc72db1379a757a427b5 | /x_lib/zz3x_csv_to_shapes.py | 4d8d69246308791380c8f6220392139c38cff407 | []
| no_license | https://github.com/x19290-not4u/area_total----- | a7fa67616d31317321808b127affc8a761122195 | 2cf9bbf1c0b85e4379ae808f2cc1b5c1aa79a4de | refs/heads/master | 2020-01-24T07:08:16.762452 | 2016-08-30T13:38:41 | 2016-08-30T13:38:41 | 73,982,425 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from unittest import TestCase
from x_lib import csv_to_shapes, py2decode
from e_better import area_total, Circle, Square
from io import StringIO
from math import pi
_CSV = r'''
circle,10
square,5
'''[1:]
_CSV = py2decode(_CSV)
class T(TestCase):
def test(self):
shapes = csv_to_shapes(Circle, Square, StringIO(_CSV))
expected = pi * 10 * 10 + 5 * 5
got = area_total(shapes)
self.assertEqual(expected, got)
| UTF-8 | Python | false | false | 446 | py | 52 | zz3x_csv_to_shapes.py | 39 | 0.659193 | 0.632287 | 0 | 20 | 21.3 | 62 |
BrunoHenryCDMal/python_plots | 781,684,065,668 | 0dce9bdcd00c838634c3f664fbb854bb02c88c09 | 40e4bd46dd3f08c0f6e6355a4a580dc56803b45a | /plots_rings.py | 06e341605e028ccb9b6bf55db9d56baaa61068d4 | []
| no_license | https://github.com/BrunoHenryCDMal/python_plots | d8f9ce397519d6579849e57d9af3021432979a70 | e61ea128e718592992f74364274a73b10b5e3f22 | refs/heads/master | 2021-01-17T07:04:17.914428 | 2019-10-23T13:11:48 | 2019-10-23T13:11:48 | 45,528,470 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | '''
gasfractions_vs_stellarmass
milkyway_sfr_and_gas_profiles
evo_milkyway_gas_profile
gas_metallicity_gradients
SFR_gradients
'''
import numpy as np
import pandas as pd
#import seaborn as sns
#sns.set_style('darkgrid')
import matplotlib.pyplot as plt
import matplotlib.gridspec as gridspec
from astropy.table import Table
from astropy.io import fits
from matplotlib.ticker import MultipleLocator, FormatStrFormatter
from matplotlib.backends.backend_pdf import PdfPages
from matplotlib.colors import LogNorm
import sys
from scipy.ndimage import zoom
from importlib import reload
import inspect
from scipy import interpolate
import random
import procedures
reload (procedures)
from procedures import *
import plots_input
reload (plots_input)
from plots_input import *
def milkyway_sfr_and_gas_profiles(ThisRedshiftList):
labels_to_write=['sigma_star', 'sigma_HI', 'sigma_H2', 'sigma_Cold']
plot_color=['red','purple']
fig = plt.figure(figsize=(two_two_size_small[0],two_two_size_small[1]))
grid = gridspec.GridSpec(2, 2)
grid.update(wspace=0.0, hspace=0.0)
List_Names_Massive=['NGC0628','NGC3198','NGC3184','NGC4736','NGC3351','NGC6946',
'NGC3627','NGC5194','NGC3521','NGC2841','NGC5055','NGC7331']
List_Vflat_Massive=np.array([217.,150.,210.,156.,196.,186.,
192.,219.,227.,302.,192.,244.])
List_Names_Dwarfs=['DDO145','HoI','HoII','IC2574','NGC4214','NGC2976',
'NGC4449','NGC3077','NGC7793','NGC2403','NGC0925']
List_Vflat_Dwarfs=np.array([50.,53.,36.,134.,37.,92.,
-99.,-99.,115.,134.,136.,])
Ngals=12
#READ Observations
#BLUEDISKS
'''df_bd = pd.read_csv(Datadir+'bluedisks_props.csv', index_col=0)
#display(df_bd[:3])
#df_bd.to_csv(Datadir+'bluedisks_props.csv',index=False)
sel = (df_bd['log_Mstar']>10.8) & (df_bd['log_M_HI']>9.5)
bd_indexes = df_bd[sel].index.values.tolist()
print(bd_indexes)
df_bd_profs = pd.read_csv(Datadir+'bluedisks_profiles.csv')
#display(df_bd_profs[:3])'''
#LEROY
index=0
fa = open(Datadir+"/leroy2008_gradients.txt", "r")
Leroy_Names=[]
for line in fa:
if(index==0):
fields = line.strip().split()
n_header=int(fields[0])
if(index==n_header+1):
fields = line.strip().split()
Aux_Leroy = np.zeros(int(fields[0]), dtype=[('radius',np.float32),('normradius',np.float32),
('SigmaHI',np.float32),('SigmaHI_err',np.float32),
('SigmaH2',np.float32),('SigmaH2_err',np.float32),
('SigmaStar',np.float32),('SigmaStar_err',np.float32),
('SigmaSFR',np.float32),('SigmaSFR_err',np.float32),
('SigmaSFR_FUV',np.float32),('SigmaSFR_24',np.float32)])
if(index>n_header+1):
fields = line.strip().split()
Leroy_Names.append(fields[0])
#Leroy['name'][index-n_header-2]=str(fields[0])
Aux_Leroy['radius'][index-n_header-2]=float(fields[1])
Aux_Leroy['normradius'][index-n_header-2]=float(fields[2])
Aux_Leroy['SigmaHI'][index-n_header-2]=float(fields[3])
Aux_Leroy['SigmaHI_err'][index-n_header-2]=float(fields[4])
Aux_Leroy['SigmaH2'][index-n_header-2]=float(fields[5])
Aux_Leroy['SigmaH2_err'][index-n_header-2]=float(fields[6])
Aux_Leroy['SigmaStar'][index-n_header-2]=float(fields[7])
Aux_Leroy['SigmaStar_err'][index-n_header-2]=float(fields[8])
Aux_Leroy['SigmaSFR'][index-n_header-2]=float(fields[9])
Aux_Leroy['SigmaSFR_err'][index-n_header-2]=float(fields[10])
Aux_Leroy['SigmaSFR_FUV'][index-n_header-2]=float(fields[11])
Aux_Leroy['SigmaSFR_24'][index-n_header-2]=float(fields[12])
index+=1
#endfor
#print(Leroy['name'])
fa.close()
Obs_Gal=[('radius',np.float32,100),('normradius',np.float32,100),('SigmaHI',np.float32,100),
('SigmaHI_err',np.float32,100),('SigmaH2',np.float32,100),('SigmaH2_err',np.float32,100),
('SigmaStar',np.float32,100),('SigmaStar_err',np.float32,100),('SigmaSFR',np.float32,100),
('SigmaSFR_err',np.float32,100),('SigmaSFR_FUV',np.float32,100),('SigmaSFR_24',np.float32,100)]
Leroy = np.zeros(Ngals,dtype=Obs_Gal)
#next lines are to avoid have a lot of zeros at zero radius
Leroy['radius'][:,:]=-99.
Leroy['normradius'][:,:]=-99.
for ii in range(0,len(List_Names_Massive)):
index=0
for jj in range(0,len(Leroy_Names)):
if(Leroy_Names[jj]==List_Names_Massive[ii]):
Leroy['radius'][ii,index] = Aux_Leroy['radius'][jj]
Leroy['normradius'][ii,index] = Aux_Leroy['normradius'][jj]
Leroy['SigmaHI'][ii,index] = Aux_Leroy['SigmaHI'][jj]
Leroy['SigmaHI_err'][ii,index] = Aux_Leroy['SigmaHI_err'][jj]
Leroy['SigmaH2'][ii,index] = Aux_Leroy['SigmaH2'][jj]
Leroy['SigmaH2_err'][ii,index] = Aux_Leroy['SigmaH2_err'][jj]
Leroy['SigmaStar'][ii,index] = Aux_Leroy['SigmaStar'][jj]
Leroy['SigmaStar_err'][ii,index] = Aux_Leroy['SigmaStar_err'][jj]
Leroy['SigmaSFR'][ii,index] = Aux_Leroy['SigmaSFR'][jj]
Leroy['SigmaSFR_err'][ii,index] = Aux_Leroy['SigmaSFR_err'][jj]
Leroy['SigmaSFR_FUV'][ii,index] = Aux_Leroy['SigmaSFR_FUV'][jj]
Leroy['SigmaSFR_24'][ii,index] = Aux_Leroy['SigmaSFR_24'][jj]
index+=1
#radius.append(Leroy['radius'][jj])
#sigma.append(Leroy['SigmaStar'][jj])
for ii in range(0,len(ThisRedshiftList)):
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR=G_MR[sel]
SSFR = np.log10(G0_MR['Sfr']/(G0_MR['StellarMass']*1.e10/Hubble_h))
'''G0_MR=G0_MR[(G0_MR['StellarMass']>0.) & (G0_MR['DiskMass']>0.) &
(G0_MR['Vvir']>150.) & (G0_MR['Vvir']<235.) & (G0_MR['Type']==0) &
(SSFR>np.log10(2.*(1+ThisRedshiftList[ii])**2./(1.37e10))-1.5) &
(G0_MR['BulgeMass']/G0_MR['StellarMass']<0.3)]'''
G0_MR=G0_MR[(G0_MR['StellarMass']>0.) & (G0_MR['DiskMass']>0.) & (G0_MR['Type']==0) &
(SSFR>np.log10(2.*(1+ThisRedshiftList[ii])**2./(1.37e10))-1.0) &
#(G0_MR['Sfr']>0.01) &
(G0_MR['Vvir']>200.) & (G0_MR['Vvir']<235.) &
(G0_MR['BulgeMass']/G0_MR['StellarMass']<0.15) &
(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)>10.3) &
(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)<10.7) &
(G0_MR['H2fraction']>0.)]
#180, 235
#G0_MR=G0_MR[(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)>10.) & (G0_MR['DiskMass']>0.) &
# (G0_MR['Vmax']>125.) & (G0_MR['MagDust'][:,1]<-20.) &
# ((G0_MR['BulgeMass']/G0_MR['StellarMass'])<0.15)]
print("Ngals in Milkyway selection:",len(G0_MR))
#print(G0_MR['Vvir'],G0_MR['Mvir']*1e10/Hubble_h)
xlim=[0.0,19.0]
ylim=[0.,3.8]
bin_obs=2.
#0->Sigmastar, 1->SigmaHI, 2->SigmaH2, 3->SigmaGas
for i_property in range(0,4):
subplot=plt.subplot(grid[i_property])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
majorFormatter = FormatStrFormatter('%d')
subplot.xaxis.set_major_locator(MultipleLocator(5))
subplot.xaxis.set_minor_locator(MultipleLocator(1.))
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
if((i_property==0) or (i_property==2)):
ylab='$\log_{10}(\Sigma/(M_{\odot} \mathrm{pc^{-2}}))$'
subplot.set_ylabel(ylab, fontsize=14)
if((i_property==2) or (i_property==3)):
xlab='$r/\mathrm{kpc}$'
subplot.set_xlabel(xlab, fontsize=14)
if (i_property==0) or (i_property==1):
plt.tick_params(axis='x', which='both', bottom='on', labelbottom='off')
if (i_property==1) or (i_property==3):
plt.tick_params(axis='y', which='both', left='on', labelleft='off')
#PLOT OBSERVATIONS
#MEDIAN
if(i_property==0):
Sigma=Leroy['SigmaStar']
Sigma_err=Leroy['SigmaStar_err']
if(i_property==1):
Sigma=Leroy['SigmaHI']
Sigma_err=Leroy['SigmaHI_err']
if(i_property==2):
Sigma=Leroy['SigmaH2']
Sigma_err=Leroy['SigmaH2_err']
if(i_property==3):
Sigma=Leroy['SigmaHI']+Leroy['SigmaH2']
Sigma_err=Leroy['SigmaHI_err']+Leroy['SigmaH2_err']
sel=Sigma>0.
'''(x_binned,median,mean,pc16,pc84,rms)=median_and_percentiles(bin_obs,xlim[0]+0.5,xlim[1],
Leroy['radius'][sel],Sigma[sel])
subplot.plot(x_binned, np.log10(median),color='black', linewidth=2)
#subplot.plot(x_binned, pc16,color='black', linewidth=2)
#subplot.plot(x_binned, pc84,color='black', linewidth=2)
subplot.fill_between(x_binned,np.log10(pc16),np.log10(pc84),facecolor='lightgrey',
interpolate=True,alpha=0.8,edgecolor='black')'''
#DATA POINTS
#error=[np.log10((Leroy['SigmaStar']+Leroy['SigmaStar_err'])/Leroy['SigmaStar']),
# np.log10(Leroy['SigmaStar']/(Leroy['SigmaStar']-Leroy['SigmaStar_err']))]
#subplot.errorbar(Leroy['radius'], np.log10(Leroy['SigmaStar']), error,
# fmt='o', markersize=3, ecolor='blue', color='blue')
#ONE LINE PER GALAXY
for igal in range(0,len(List_Names_Massive)):
if((List_Vflat_Massive[igal]>200.) & (List_Vflat_Massive[igal]<235.)):
subplot.plot(Leroy['radius'][igal,:], np.log10(Sigma[igal,:]),
color='blue', linewidth=1, linestyle='-')
y_err=[np.log10(Sigma[igal,:]/(Sigma[igal,:]-Sigma_err[igal,:])),
np.log10((Sigma[igal,:]+Sigma_err[igal,:])/Sigma[igal,:])]
subplot.errorbar(Leroy['radius'][igal,:], np.log10(Sigma[igal,:]),yerr=y_err,color='blue',zorder=-1)
#BLUE DISKS
if(i_property==1):
for jj in range(0,5):
df = pd.read_csv(Datadir+'Bluediscs_HI_profiles_'+str(jj+1)+'.csv')
subplot.plot(df['x'], np.log10(df['y']), color='seagreen', linewidth=2, linestyle='-')
if(i_property==2):
for jj in range(0,4):
df = pd.read_csv(Datadir+'Bluediscs_H2profiles_'+str(jj+1)+'.csv')
subplot.plot(df['x'], np.log10(df['y']), color='seagreen', linewidth=2, linestyle='-')
#MODEL
Sigma=np.zeros(RNUM,dtype=np.float32)
Sigma_mean=np.zeros(RNUM,dtype=np.float32)
Bulge_density=np.zeros(RNUM,dtype=np.float32)
pc16=np.zeros(RNUM,dtype=np.float32)
pc84=np.zeros(RNUM,dtype=np.float32)
for kk in range(0,RNUM):
if(kk==0):
r_in = 0.
else:
r_in = (RingRadius[kk-1])
r_out = RingRadius[kk]
if(i_property==0):
Mass=G0_MR['DiskMassRings'][:,kk]*1e10/Hubble_h + G0_MR['BulgeMassRings'][:,kk]*1e10/Hubble_h
if(i_property==1):
Mass=G0_MR['ColdGasRings'][:,kk]*1e10/Hubble_h*(1.-G0_MR['H2fractionRings'][:,kk])
if(i_property==2):
Mass=G0_MR['ColdGasRings'][:,kk]*1e10/Hubble_h*G0_MR['H2fractionRings'][:,kk]
if(i_property==3):
Mass=G0_MR['ColdGasRings'][:,kk]*1e10/Hubble_h
y_variable=Mass/(3.14*(r_out**2-r_in**2)*1e6)
Sigma[kk]=np.median(y_variable)
y_sorted = np.sort(y_variable)
pc16[kk] = y_sorted[int(16*len(y_variable)/100)]
pc84[kk] = y_sorted[int(84*len(y_variable)/100)]
Sigma_mean[kk]=np.mean(y_variable)
if(i_property==2):
print(Sigma)
subplot.plot(RingRadius, np.log10(Sigma_mean),color='red', linewidth=2, linestyle=':')
subplot.plot(RingRadius, np.log10(Sigma),color='red', linewidth=2)
subplot.plot(RingRadius, np.log10(pc16),color='red', linestyle='--')
subplot.plot(RingRadius, np.log10(pc84),color='red', linestyle='--')
#WRITE OUTPUT
if(write_to_file==1):
df = pd.DataFrame({'r_kpc':RingRadius, 'mean':np.log10(Sigma_mean), 'median':np.log10(Sigma),
'pc16':np.log10(pc16), 'pc84':np.log10(pc84)})
df.to_csv(Datadir + file_to_write + 'MilkyWay_Gradients_' + labels_to_write[i_property] +
str(f'_z{ThisRedshiftList[ii]:0.2f}')+'.csv', index=False)
#df = pd.read_csv(Datadir + file_to_write + 'MilkyWay_Gradients_' + labels_to_write[i_property] +
# str(f'_z{ThisRedshiftList[ii]:0.2f}')+'.csv')
#subplot.plot(df['r_kpc'], df['mean'],color='black', linestyle='--')
if(i_property==0):
label='$\Sigma_{*}$'
if(i_property==1):
label='$\Sigma_{\mathrm{HI}}$'
if(i_property==2):
label='$\Sigma_{\mathrm{H_2}}$'
if(i_property==3):
label='$\Sigma_{\mathrm{cold}}$'
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.75, y_percentage=0.84,
color='black', xlog=0, ylog=0, label=label,
fontsize=16, fontweight='normal')
if(i_property==2):
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.1, y_percentage=0.9, color='black',
xlog=0, ylog=0, label=prefix_this_model, fontsize=13, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.02,y_percentage=0.92,
color='red',x2_percentage=0.08,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.1, y_percentage=0.81,
color='black', xlog=0, ylog=0, label='Leroy 2008', fontsize=13, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.02,y_percentage=0.83,
color='blue',x2_percentage=0.08,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.1, y_percentage=0.72,
color='black', xlog=0, ylog=0, label='BlueDiscs', fontsize=13, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.02,y_percentage=0.74,
color='seagreen',x2_percentage=0.08,xlog=0,ylog=0,linestyle='-',linewidth=2)
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYF19_milkyway_sfr_and_gas_profiles.pdf')
plt.close()
return
#end milkyway_sfr_and_gas_profiles
def milkyway_gradients(ThisRedshiftList):
#Model SELECTION
ii=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR_unsel=G_MR[sel]
if(opt_detailed_enrichment==1):
G0_MR=G0_MR_unsel[(G0_MR_unsel['StellarMass']>0.) & (G0_MR_unsel['DiskMass']>0.) &
((G0_MR_unsel['MetalsDiskMass'][:,0] + G0_MR_unsel['MetalsDiskMass'][:,1] +
G0_MR_unsel['MetalsDiskMass'][:,2])>0.) & (G0_MR_unsel['Type']==0) &
(np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1e10/Hubble_h))>-10.5) &
(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h)>10.5) &
(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h)<11.) &
(G0_MR_unsel['Vvir']>200.) & (G0_MR_unsel['Vvir']<235.) &
((G0_MR_unsel['BulgeMass']/G0_MR_unsel['StellarMass'])<0.15)]
else:
G0_MR=G0_MR_unsel[(G0_MR_unsel['StellarMass']>0.) & (G0_MR_unsel['DiskMass']>0.) &
(G0_MR_unsel['MetalsDiskMass']>0.) & (G0_MR_unsel['Type']==0) &
(np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1e10/Hubble_h))>-10.5) &
(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h)>10.5) &
(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h)<11.) &
(G0_MR_unsel['Vvir']>200.) & (G0_MR_unsel['Vvir']<235.) &
((G0_MR_unsel['BulgeMass']/G0_MR_unsel['StellarMass'])<0.15)]
fig = plt.figure(figsize=(two_one_size_small[0],two_one_size_small[1]))
grid = gridspec.GridSpec(2,1)
grid.update(wspace=0.0, hspace=0.0)
for i_property in range(0,2):
subplot=plt.subplot(grid[i_property])
xlim=[0.0,15.0]
ylim=[-0.8, 0.7]
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$r$[kpc]'
subplot.set_xlabel(xlab, fontsize=14)
if(i_property==0):
ylab='$\log_{10}$$(Z_{\mathrm{gas}}/Z_\odot)$'
plt.tick_params(axis='x', which='both', bottom='on', labelbottom='off')
else:
ylab='$\log_{10}$$(Z_*/Z_\odot)$'
subplot.set_ylabel(ylab, fontsize=14)
majorFormatter = FormatStrFormatter('%d')
subplot.xaxis.set_major_locator(MultipleLocator(5))
subplot.xaxis.set_minor_locator(MultipleLocator(1))
subplot.yaxis.set_major_locator(MultipleLocator(0.2))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
#OBSERVATIONS
if(i_property==0):
file = Datadir + '/Moran12.txt'
Moran12 = Table.read(file, format='ascii')
subplot.fill_between(Moran12['radius'],Moran12['log10_Zgas']-Moran12['err_down'],
Moran12['log10_Zgas']+Moran12['err_up'], facecolor='lightgrey',
interpolate=True, alpha=0.8, edgecolor='black')
else:
file = Datadir + 'Fu2013_Cepheids_Metals.txt'
Fu13_Ceph = Table.read(file, format='ascii')
subplot.errorbar(Fu13_Ceph['radius'], Fu13_Ceph['log10_Zstars'],
yerr=[Fu13_Ceph['err_down'],Fu13_Ceph['err_up']],
fmt='o', markersize=5, ecolor='blue', color='blue')
#MODEL
mean_metallicity=np.zeros(RNUM,dtype=np.float32)
pc16=np.zeros(RNUM,dtype=np.float32)
pc84=np.zeros(RNUM,dtype=np.float32)
for jj in range(0,RNUM):
#gas metallicity
if(i_property==0):
if(opt_detailed_enrichment==1):
MetalsRing=(G0_MR['MetalsColdGasRings'][:,jj,0] +
G0_MR['MetalsColdGasRings'][:,jj,1] +
G0_MR['MetalsColdGasRings'][:,jj,2])
else:
MetalsRing=G0_MR['MetalsColdGasRings'][:,jj]
MassRing=G0_MR['ColdGasRings'][:,jj]
#stellar metallicity
else:
if(opt_detailed_enrichment==1):
MetalsRing=(G0_MR['MetalsDiskMassRings'][:,jj,0] +
G0_MR['MetalsDiskMassRings'][:,jj,1] +
G0_MR['MetalsDiskMassRings'][:,jj,2])
else:
MetalsRing=G0_MR['MetalsDiskMassRings'][:,jj]
MassRing=G0_MR['DiskMassRings'][:,jj]
if(opt_rings_in_bulges==1):
if(opt_detailed_enrichment==1):
MetalsRing+=(G0_MR['MetalsBulgeMassRings'][:,jj,0] +
G0_MR['MetalsBulgeMassRings'][:,jj,1] +
G0_MR['MetalsBulgeMassRings'][:,jj,2])
else:
MetalsRing+=G0_MR['MetalsBulgeMassRings'][:,jj]
MassRing+=G0_MR['BulgeMassRings'][:,jj]
metallicity=MetalsRing[MetalsRing>0.]/MassRing[MetalsRing>0.]/0.016
mean_metallicity[jj]=np.log10(np.median(metallicity))
sel=metallicity>0.
y_sorted = np.sort(metallicity[sel])
if(len(metallicity[sel])>0.):
pc16[jj] = y_sorted[int(16*len(metallicity[sel])/100)]
pc84[jj] = y_sorted[int(84*len(metallicity[sel])/100)]
else:
pc16[jj] = 0.
pc84[jj] = 0.
#endfor
subplot.plot(RingRadius,mean_metallicity,color='red',linewidth=2)
subplot.plot(RingRadius,np.log10(pc16),color='red',linewidth=2,linestyle='--')
subplot.plot(RingRadius,np.log10(pc84),color='red',linewidth=2,linestyle='--')
#LABELS
if(i_property==0):
plot_label (subplot,'label',xlim,ylim,x_percentage=0.12,y_percentage=0.9,
color='black',xlog=0,ylog=0,label=prefix_this_model,fontsize=13,fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.02,y_percentage=0.92,
color='red',x2_percentage=0.10,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot, 'label', xlim, ylim,
x_percentage=0.12, y_percentage=0.82, color='black', xlog=0, ylog=0,
label='Moran 2012', fontsize=13, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.03,y_percentage=0.845,
color='lightgrey',x2_percentage=0.09,xlog=0,ylog=0,linestyle='-',linewidth=6)
else:
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.08, y_percentage=0.9,
color='blue', xlog=0, ylog=0, label='Fu 2013 (Cepheids)',
fontsize=13, fontweight='normal')
plot_label (subplot, 'symbol', xlim, ylim, x_percentage=0.05, y_percentage=0.92,
color='blue', xlog=0, ylog=0, sym='o', sym_size=5, err_size=0.04)
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYJ18_plots_milkyway_gradients.pdf')
plt.close()
return
#end gas_milkyway_gradients
def gas_metallicity_gradients_mass_bins(ThisRedshiftList):
ii=0
plot_color=['blue','green','red']
fig = plt.figure(figsize=(two_one_size_small[0],two_one_size_small[1]))
grid = gridspec.GridSpec(2, 1)
grid.update(wspace=0.0, hspace=0.0)
#OBSERVATIONS
Bresolin_Names=['NGC1512','NGC3621','M83','NGC4625']
Bresolin_logMass=[11.4771,10.3010,]
#haynes_MHI=haynes.data['HI']
#haynes_Magr=haynes.data['mr']-5*(np.log10(haynes.data['distance']*1.e6)-1)
'''Moran_Names=['G10019', 'G11845', 'G23315', 'G3505', 'G41323', 'G10218', 'G11956', 'G23408', 'G3509', 'G4137',
'G10358', 'G11989', 'G23419', 'G3519', 'G41783', 'G10404', 'G12002', 'G23443', 'G3524', 'G4195',
'G10447', 'G12025', 'G23450', 'G3537', 'G41969', 'G10813', 'G12069', 'G23453', 'G3593', 'G42013',
'G10817', 'G12318', 'G23496', 'G35981', 'G42025', 'G12460', 'G23685', 'G42140', 'G10827', 'G12970',
'G24064', 'G36307', 'G42141', 'G10831', 'G13037', 'G24094', 'G4216s', 'G10836', 'G13227', 'G24149',
'G3631', 'G4223', 'G10841', 'G13775', 'G24168', 'G3645', 'G4228', 'G10844', 'G13976', 'G24183',
'G3757', 'G4230', 'G10850', 'G14017', 'G24366', 'G3777', 'G4233', 'G10872', 'G14247', 'G24496',
'G3817', 'G4239', 'G10884', 'G14260', 'G25214', 'G3819', 'G42402', 'G10889', 'G14288', 'G4130',
'G25347', 'G3821', 'G47221', 'G10942', 'G14831', 'G25763', 'G38472', 'G48356', 'G10943', 'G14943',
'G26221', 'G3859', 'G51351', 'G10944', 'G15166', 'G26311', 'G38758', 'G51416', 'G10948', 'G15181',
'G26368', 'G3879', 'G51563', 'G10949', 'G15257', 'G26602', 'G3880', 'G51899', 'G10950', 'G16695',
'G26650', 'G38964', 'G52045', 'G11004', 'G17029', 'G26822', 'G39119', 'G52163', 'G11015', 'G17640',
'G27167', 'G39270', 'G52297', 'G11016b', 'G17659', 'G28168', 'G39548', 'G53795', 'G11016', 'G17684',
'G28461', 'G39567', 'G55745', 'G11019', 'G18202', 'G29487', 'G39595', 'G56307', 'G11071', 'G18335',
'G29510', 'G3971', 'G56312', 'G11087', 'G18421', 'G29555', 'G3976', 'G56320', 'G11109', 'G18900',
'G29596', 'G3977', 'G56375', 'G11112', 'G19132', 'G29699', 'G4017', 'G56612', 'G11120', 'G19672',
'G29842', 'G4018', 'G56737', 'G11126', 'G1977', 'G29892', 'G40247', 'G57017', 'G11223', 'G19852',
'G30175', 'G40257', 'G6583', 'G11270', 'G19918', 'G30338', 'G4030', 'G7031', 'G11295', 'G19950',
'G30479', 'G40317', 'G7286', 'G11311', 'G20026', 'G30508', 'G4037', 'G8096', 'G11349', 'G20041',
'G30811', 'G4038', 'G8634', 'G11386', 'G20042', 'G3189', 'G4039', 'G9109', 'G11437', 'G20133',
'G3261', 'G4040', 'G9463', 'G11488', 'G20144', 'G32937', 'G4048', 'G9551', 'G11513', 'G20183',
'G3293', 'G40500', 'G9814', 'G11514', 'G20292', 'G3301', 'G40501', 'G9891', 'G11794', 'G21842',
'G3435', 'G4054', 'G9948', 'G11808', 'G22999', 'G3439', 'G4057', 'G11817', 'G23120', 'G3465',
'G4094', 'G11824', 'G23194', 'G3504']
file=Datadir+'Moran/individual/'
file=file+Moran_Names[10]+'spec_cat.fits'
#file=file+'G10019spec_cat.fits'
fits_table=fits.open(file)
cols = fits_table[1].columns
cols.info()
print("")
print("")
Moran = fits_table[1]
print(Moran.data['METALLICITY'])
print(Moran.data['R_ASEC_IN'])
file=Datadir+'Moran/combined_cats/combined_cat_DR2DR3.fits'
fits_table=fits.open(file)
cols = fits_table[1].columns
cols.info()
Moran = fits_table[1]
print(Moran.data['GASS_ID'])'''
for i_radius in range (0,1+1):
subplot=plt.subplot(grid[i_radius])
if(i_radius==0):
xlim=[0.0,15.0]
xlab='$r$[kpc]'
subplot.xaxis.set_label_position('top')
plt.tick_params(axis='x',which='both',top='on',labeltop='on',bottom='off',labelbottom='off')
subplot.xaxis.set_major_locator(MultipleLocator(5.))
subplot.xaxis.set_minor_locator(MultipleLocator(1.))
else:
xlim=[-1.0,1.]
xlab='$r/r_{d}$'
plt.tick_params(axis='x',which='both',top='off',labeltop='off')
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
ylim=[8.0, 9.1]
ylab='$12+\log_{10}(O/H)_{\mathrm{gas}}$'
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(0.2))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
median_metallicity=np.zeros(RNUM,dtype=np.float32)
low_mass_limits=[9.5,10.0,10.5]
massbin=0.5
#***************
#* MODEL *
#***************
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR_unsel=G_MR[sel]
if(opt_detailed_enrichment==1):
G0_MR_unsel=G0_MR_unsel[((G0_MR_unsel['MetalsColdGas'][:,0] +
G0_MR_unsel['MetalsColdGas'][:,1] +
G0_MR_unsel['MetalsColdGas'][:,2])>.0) &
(np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1e10/Hubble_h))>-11.) &
(G0_MR_unsel['BulgeMass']/G0_MR_unsel['StellarMass']<0.15)]
else:
G0_MR_unsel=G0_MR_unsel[(G0_MR_unsel['MetalsColdGas']>.0) &
(np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1e10/Hubble_h))>-11.) &
(G0_MR_unsel['BulgeMass']/G0_MR_unsel['StellarMass']<0.15)] #&
#(G0_MR_unsel['BulgeMass']/G0_MR_unsel['StellarMass']<.3)]
for kk in range(0,len(low_mass_limits)):
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h) > low_mass_limits[kk]) &
(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h) < low_mass_limits[kk]+massbin)]
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
for jj in range(0,RNUM):
ColdGasRing=G0_MR['ColdGasRings'][:,jj]
if(opt_individual_elements==1):
'''MetalsColdGasRing=(G0_MR['MetalsColdGasRings'][:,jj,0] +
G0_MR['MetalsColdGasRings'][:,jj,1] +
G0_MR['MetalsColdGasRings'][:,jj,2])
sel=MetalsColdGasRing>0.
Metallicity=MetalsColdGasRing[sel]/ColdGasRing[sel]/0.0134*10**8.69'''
N_H=G0_MR['ColdGasRings_elements'][:,jj,0]/1.
N_O=G0_MR['ColdGasRings_elements'][:,jj,4]/16.
sel=(N_O>0.) & (N_H>0.)
Metallicity=1e12*(N_O[sel]/N_H[sel])
else:
MetalsColdGasRing=G0_MR['MetalsColdGasRings'][:,jj]
sel=MetalsColdGasRing>0.
Metallicity=MetalsColdGasRing[sel]/ColdGasRing[sel]/0.0134*10**8.69
if(i_radius==0):
median_metallicity[jj]=np.log10(np.median(Metallicity))
else:
#aux needed to because len(Metallicity)!= NGals
aux_x=np.zeros(int(NGals),dtype=np.float32)
aux_y=np.zeros(int(NGals),dtype=np.float32)
aux_x[sel]=np.log10(RingRadius[jj]/(G0_MR['StellarHalfMassRadius'][sel]*1000./Hubble_h))
aux_y[sel]=Metallicity #no [sel] here because the selection was applied above
x_variable[NGals*jj:NGals*(jj+1)]=aux_x
y_variable[NGals*jj:NGals*(jj+1)]=aux_y
#metallicity versus physical radius
if(i_radius==0):
subplot.plot(RingRadius, median_metallicity,color=plot_color[kk], linewidth=2)
#metallicity versus physical radius/rd
else:
bin=0.1
sel=y_variable>0.
if(len(y_variable[sel])>0.):
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles (bin, xlim[0], xlim[1],
x_variable[sel], y_variable[sel])
#print(np.log10(median))
subplot.plot(x_binned, np.log10(median), color=plot_color[kk], linewidth=2)
#median_radius=np.median(G0_MR['GasDiskRadius'][G0_MR['GasDiskRadius']>0.]*1000./Hubble_h)
#Rings=np.log10(RingRadius/median_radius)
#subplot.plot(Rings, median_metallicity, color=plot_color[kk], linewidth=2)
#labels
if(i_radius==1):
label="%0.1f" % low_mass_limits[kk] + "<$M_{\star}[M_{\odot}]$<" + "%0.1f" % (low_mass_limits[kk]+massbin)
plot_label (subplot, 'label',xlim,ylim,x_percentage=0.15,y_percentage=0.05+(kk*0.09),
color='black',xlog=0,ylog=0,label=label,fontsize=12,fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.05,y_percentage=0.075+(kk*0.09),
color=plot_color[kk],x2_percentage=0.12,xlog=0,ylog=0,linestyle='-',linewidth=2)
#****************
#* OBSERVATIONS *
#****************
if((low_mass_limits[kk]==10.) & (i_radius==0)):
file = Datadir + '/bresolin12_gradients_NGC3621.txt'
obs = Table.read(file, format='ascii')
subplot.plot([obs['x_1'],obs['x_2']], [obs['y_1'],obs['y_2']],
color=plot_color[kk],linestyle='--',linewidth=2)
plot_label (subplot, 'label',xlim,ylim,x_percentage=0.05,y_percentage=0.32,
color=plot_color[kk],xlog=0,ylog=0,label='B12-NGC3621',
fontsize=12,fontweight='normal')
if((low_mass_limits[kk]==10.5) & (i_radius==0)):
file = Datadir + '/bresolin12_gradients_NGC1512.txt'
obs = Table.read(file, format='ascii')
subplot.plot([obs['x_1'],obs['x_2']], [obs['y_1'],obs['y_2']],
color=plot_color[kk],linestyle='--',linewidth=2)
plot_label (subplot, 'label',xlim,ylim,x_percentage=0.6,y_percentage=0.32,
color=plot_color[kk],xlog=0,ylog=0,label='B12-NGC1512',
fontsize=12,fontweight='normal')
'''if((low_mass_limits[kk]==10.5) & (i_radius==0)):
file = Datadir + '/bresolin12_gradients_M83.txt'
obs = Table.read(file, format='ascii')
subplot.plot([obs['x_1'],obs['x_2']], [obs['y_1'],obs['y_2']], color=plot_color[kk],linestyle=':')
if((low_mass_limits[kk]==10.5) & (i_radius==0)):
file = Datadir + '/bresolin12_gradients_NGC4625.txt'
obs = Table.read(file, format='ascii')
subplot.plot([obs['x_1'],obs['x_2']], [obs['y_1'],obs['y_2']], color=plot_color[kk],linestyle=':')'''
#endfor -> MASS
#endfor -> i_radius
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYW17_plots_gas_metallicity_gradients_mass_bins.pdf')
plt.close()
return
#end gas_metallicity_gradients
def stellar_metallicity_gradients_mass_bins(ThisRedshiftList):
ii=0
plot_color=['purple','blue','lightblue','green','orange','red','brown']
fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
subplot=plt.subplot()
xlim=[0.0,3.]
xlab='$r/r_{d}$'
plt.tick_params(axis='x',which='both',top='off',labeltop='off')
subplot.xaxis.set_major_locator(MultipleLocator(1.0))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
ylim=[-1.0, 1.0]
ylab='$\mathrm{log_{10}}(Z_*/Z_{\odot})$'
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(0.2))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
median_metallicity=np.zeros(RNUM,dtype=np.float32)
mass_bins=[9.1,9.6,10.1,10.6,10.9,11.2,11.5,11.8]
#Model
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR_unsel=G_MR[sel]
if(opt_detailed_enrichment==1):
G0_MR_unsel=G0_MR_unsel[((G0_MR_unsel['MetalsDiskMass'][:,0] +
G0_MR_unsel['MetalsDiskMass'][:,1] +
G0_MR_unsel['MetalsDiskMass'][:,2])>.0)] #&
#(np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1e10/Hubble_h))>-10.5) &
# (G0_MR_unsel['BulgeMass']/G0_MR_unsel['StellarMass']<0.15)]
else:
G0_MR_unsel=G0_MR_unsel[(G0_MR_unsel['MetalsDiskMass']>.0) &
(np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1e10/Hubble_h))>-10.5) &
(G0_MR_unsel['BulgeMass']/G0_MR_unsel['StellarMass']<0.15)] #&
#(G0_MR_unsel['BulgeMass']/G0_MR_unsel['StellarMass']<.3)]
for kk in range(0,len(mass_bins)-1):
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h) > mass_bins[kk]) &
(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h) < mass_bins[kk+1])]
for jj in range(0,RNUM):
if(opt_detailed_enrichment==1):
MetalsColdGasRing=(G0_MR['MetalsDiskMassRings'][:,jj,0] +
G0_MR['MetalsDiskMassRings'][:,jj,1] +
G0_MR['MetalsDiskMassRings'][:,jj,2])
else:
MetalsColdGasRing=G0_MR['MetalsDiskMassRings'][:,jj]
ColdGasRing=G0_MR['DiskMassRings'][:,jj]
metallicity=MetalsColdGasRing[MetalsColdGasRing>0.]/ColdGasRing[MetalsColdGasRing>0.]/0.02
if(len(metallicity)>0.):
median_metallicity[jj]=np.log10(np.median(metallicity))
median_radius=np.median(G0_MR['StellarHalfLightRadius'][G0_MR['StellarHalfLightRadius']>0.] *
1000./Hubble_h)
Rings=np.log10(RingRadius/median_radius)
Rings=RingRadius/median_radius
subplot.plot(Rings, median_metallicity, color=plot_color[kk], linewidth=2)
#labels
label="%0.1f" % mass_bins[kk] + "<$M_{\star}[M_{\odot}]$<" + "%0.1f" % mass_bins[kk+1]
plot_label (subplot, 'label',xlim,ylim,x_percentage=0.15,y_percentage=0.05+(kk*0.09),
color='black',xlog=0,ylog=0,label=label,fontsize=12,fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.05,y_percentage=0.075+(kk*0.09),
color=plot_color[kk],x2_percentage=0.12,xlog=0,ylog=0,linestyle='-',linewidth=2)
#endfor -> MASS
#endfor -> i_radius
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYW17_plots_stellar_metallicity_gradients_mass_bins.pdf')
plt.close()
return
#end stellar_metallicity_gradients
'''
for kk in range(0,RNUM):
if(kk==0):
r_in=0.
else:
r_in=(RingRadius[kk-1]+RingRadius[kk])/2.
if(kk==RNUM-1):
r_out=RingRadius[kk]+((RingRadius[kk]-RingRadius[kk-1])/2.)
else:
r_out=(RingRadius[kk]+RingRadius[kk+1])/2.
if(i_property==0):
r_bulge=G0_MR['BulgeSize']*1000./Hubble_h #From Mpc/h to Kpc
if(opt_rings_in_bulges==1):
BulgeMass_this_ring=G0_MR['BulgeMassRings'][:,kk]*1e10/Hubble_h
else:
BulgeMass_this_ring=((G0_MR['BulgeMass']*1e10/Hubble_h)*
((1+(r_in/r_bulge))**(-1.) - (1.+(r_out/r_bulge))**(-1.)) )
Mass=G0_MR['DiskMassRings'][:,kk]*1e10/Hubble_h
sel=(r_bulge>0.)
Mass[sel]+=BulgeMass_this_ring[sel]
y_variable=Mass/(3.14*(r_out**2-r_in**2)*1e6)
sel=y_variable>0.
if(len(y_variable[sel])>0.):
Sigma[kk]=np.median(y_variable[sel])
Sigma_mean[kk]=np.mean(y_variable[sel])
y_sorted = np.sort(y_variable[sel])
pc16[kk] = y_sorted[int(16*len(y_variable[sel])/100)]
pc84[kk] = y_sorted[int(84*len(y_variable[sel])/100)]
'''
def gradients_enci(ThisRedshiftList):
'''fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
subplot=plt.subplot()
xlim=[9.0, 12.0]
ylim=[-3.5, 3.0]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='Mass'
ylab='SFR'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
i_z=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR = G_MR[sel]
G0_MR = G0_MR[(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)>8.5) & (G0_MR['Sfr']>0) & (G0_MR['Type']==0)]
Mass = np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)
SFR = np.log10(G0_MR['Sfr'])
bin=[0.2,0.2]
Nbins=[int((xlim[1]-xlim[0])/bin[0]),int((ylim[1]-ylim[0])/bin[1])]
Ngals=len(Mass)
H, xedges, yedges = np.histogram2d(Mass, SFR, bins=Nbins)
extent = [xedges[0], xedges[-1],yedges[0], yedges[-1]]
plt.subplots_adjust(bottom=0.15, left=0.15)
mylevels = np.linspace(1., Nbins[0], Nbins[0])*Ngals/(Nbins[0]**2/3.)
H = zoom(H, 20)
cont=plt.contourf(H.transpose()[::], origin='lower', cmap='Greys_r', levels=mylevels, extent=extent)
a = 1.0
b = -9.65
xx = np.arange(xlim[0],xlim[1],0.1)
yy = a*xx+b
subplot.plot(xx,yy, color='blue')
yy = a*xx+b+0.1
subplot.plot(xx,yy, color='blue',linestyle='--')
yy = a*xx+b+0.3
subplot.plot(xx,yy, color='blue',linestyle='--')
yy = a*xx+b+0.5
subplot.plot(xx,yy, color='blue',linestyle='--')
plt.savefig('./fig/HYJ18_main_sequence.pdf')
plt.close()
fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
subplot=plt.subplot()
xlim=[7.0, 10.0]
ylim=[-3.5, 0.0]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='Mass'
ylab='SFR'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
i_z=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR = G_MR[sel]
G0_MR = G0_MR[(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)>8.5) & (G0_MR['Sfr']>0) & (G0_MR['Type']==0)]
area = np.zeros(RNUM,dtype=np.float32)
for jj in range(0,RNUM):
if(jj==0):
area[jj]=(3.14*RingRadius[jj]*RingRadius[jj])
else:
area[jj]=(3.14*(RingRadius[jj]*RingRadius[jj]-RingRadius[jj-1]*RingRadius[jj-1]))
#print(G0_MR['DiskMassRings'][:,:]*area.shape)
Mass = np.log10(G0_MR['DiskMassRings'][:,10]/area[10]*1.e10/Hubble_h)
SFR = np.log10(G0_MR['SfrRings'][:,10]/area[10])
#for jj in range(0,RNUM):
# subplot.scatter(Mass[:1000,jj], SFR[:1000,jj], s=2)
Mass_spaxels = Mass.flatten()
SFR_spaxels = SFR.flatten()
sel = (Mass_spaxels>7.0) & (SFR_spaxels>-4.0)
bin=[0.2,0.2]
Nbins=[int((xlim[1]-xlim[0])/bin[0]),int((ylim[1]-ylim[0])/bin[1])]
Ngals=len(Mass_spaxels)/100000.
H, xedges, yedges = np.histogram2d(Mass_spaxels[sel], SFR_spaxels[sel], bins=Nbins)
extent = [xedges[0], xedges[-1],yedges[0], yedges[-1]]
plt.subplots_adjust(bottom=0.15, left=0.15)
mylevels = np.linspace(1., Nbins[0], Nbins[0])*Ngals/(Nbins[0]**2/3.)
H = zoom(H, 20)
cont=plt.contourf(H.transpose()[::], origin='lower', cmap='Greys_r', levels=mylevels, extent=extent)
#subplot.scatter(Mass_spaxels[:5000], SFR_spaxels[:5000], s=2)
a = 1.1
b = -10.7
xx = np.arange(xlim[0],xlim[1],0.1)
yy = a*xx+b
subplot.plot(xx,yy, color='black')
a = [1.3, 1.17, 1.14, 1.14, 1.14, 1.1, 1.1, 0.9, 0.9, 0.85, 0.85, 0.85]
b = [-12.45, -11.3, -11.05, -11.05, -11.05, -10.7, -10.6, -8.95, -8.95, -8.47, -8.47, -8.47]
a = 0.85
b = -8.47
xx = np.arange(xlim[0],xlim[1],0.1)
yy = a*xx+b
subplot.plot(xx,yy, color='blue')
plt.savefig('./fig/HYJ18_main_sequence_spaxels.pdf')
plt.close()'''
plot_color=['purple','red', 'orange', 'black', 'green', 'blue']
fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
global_a = 1.0
global_b = -9.65
spaxel_a = [1.3, 1.17, 1.14, 1.14, 1.14, 1.1, 1.1, 0.9, 0.9, 0.85, 0.85, 0.85]
spaxel_b = [-12.45, -11.3, -11.05, -11.05, -11.05, -10.7, -10.6, -8.95, -8.95, -8.47, -8.47, -8.47]
area = np.zeros(RNUM,dtype=np.float32)
for jj in range(0,RNUM):
if(jj==0):
area[jj]=(3.14*RingRadius[jj]*RingRadius[jj])
else:
area[jj]=(3.14*(RingRadius[jj]*RingRadius[jj]-RingRadius[jj-1]*RingRadius[jj-1]))
delta_low = [-5.0,-1.0,-0.3,-0.1,0.0,0.3]
delta_high = [-1.0, -0.3,0.0,0.1, 0.3,1.0]
for delta_i in range(0,len(delta_low)):
subplot=plt.subplot()
xlim=[0., 2.]
ylim=[-3, 0.5]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='$r/r_e$'
ylab='$\log_{10}(\Sigma_{SFR}[\mathrm{M}_{\odot}\mathrm{yr}^{-1}\mathrm{Kpc^{-2}}])$'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
i_z=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR_unsel = G_MR[sel]
G0_MR_unsel = G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h)>10.5) &
(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h)<11.0) &
(G0_MR_unsel['Sfr']>0.) & (G0_MR_unsel['Type']==0)]
Mass = np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)
SFR = np.log10(G0_MR_unsel['Sfr'])
#global deviation from main squence
G0_MR=G0_MR_unsel[(SFR>Mass*global_a+global_b+delta_low[delta_i]) & (SFR<Mass*global_a+global_b+delta_high[delta_i])]
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
new_x_var = np.arange(xlim[0],xlim[1]+1.0,0.1)
re = G0_MR['StellarDiskRadius']*1000./Hubble_h
#loop on radial bins
for jj in range(0,RNUM):
x_variable[NGals*jj:NGals*(jj+1)] = RingRadius[jj]/re
#SFR_this_ring=np.log10(G0_MR['SfrRings'][:,jj]/area[jj])
#SFR_Main_Sequence = spaxel_a[delta_i] * np.log10(G0_MR['DiskMassRings'][:,jj]/area[jj]*1.e10/Hubble_h)+ spaxel_b[delta_i]
#y_variable[NGals*jj:NGals*(jj+1)]= np.log10(SFR_this_ring - SFR_Main_Sequence)
#y_variable[NGals*jj:NGals*(jj+1)]= np.log10(G0_MR['SfrRings'][:,jj]/area[jj])
y_variable[NGals*jj:NGals*(jj+1)]= np.log10(G0_MR['SfrRings'][:,jj] / area[jj] * ((0.2*re)**2))
#endfor RNUM
N_random_gals = 1000
if(N_random_gals>NGals):
N_random_gals=NGals
random.seed(a=2)
random_list = random.sample(range(0, NGals), N_random_gals)
#N_random_gals = 2
#random_list = random_list[0:1]
interpol_x_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
interpol_y_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
to_do = np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
i_index = 0
for ii in random_list:
slice_ii = [x*NGals+ii for x in range(0,12)]
xx = x_variable[slice_ii]
yy = y_variable[slice_ii]
#ignore galaxies without halflightradius or with nan on the y_variable
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
if(len(xx[sel])>3):
to_do[i_index*len(new_x_var):(i_index+1)*len(new_x_var)]=1
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
interpol_y_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = f(new_x_var)
interpol_x_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = new_x_var
i_index += 1
sel = to_do ==1
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles_fixed_xx(interpol_x_variable[sel],
interpol_y_variable[sel], non_zero=0)
subplot.plot(x_binned[1:], median[1:], color=plot_color[delta_i], linewidth=2, linestyle='-')
#end loop on mass bins
#end loop on properties to plot
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYJ18_gradients_ellison.pdf')
plt.close()
return
#end gradients_enci
def gradients_ellison(ThisRedshiftList):
'''fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
subplot=plt.subplot()
xlim=[9.0, 12.0]
ylim=[-3.5, 3.0]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='Mass'
ylab='SFR'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
i_z=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR = G_MR[sel]
G0_MR = G0_MR[(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)>8.5) & (G0_MR['Sfr']>0) & (G0_MR['Type']==0)]
Mass = np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)
SFR = np.log10(G0_MR['Sfr'])
bin=[0.2,0.2]
Nbins=[int((xlim[1]-xlim[0])/bin[0]),int((ylim[1]-ylim[0])/bin[1])]
Ngals=len(Mass)
H, xedges, yedges = np.histogram2d(Mass, SFR, bins=Nbins)
extent = [xedges[0], xedges[-1],yedges[0], yedges[-1]]
plt.subplots_adjust(bottom=0.15, left=0.15)
mylevels = np.linspace(1., Nbins[0], Nbins[0])*Ngals/(Nbins[0]**2/3.)
H = zoom(H, 20)
cont=plt.contourf(H.transpose()[::], origin='lower', cmap='Greys_r', levels=mylevels, extent=extent)
a = 1.0
b = -9.65
xx = np.arange(xlim[0],xlim[1],0.1)
yy = a*xx+b
subplot.plot(xx,yy, color='blue')
yy = a*xx+b+0.1
subplot.plot(xx,yy, color='blue',linestyle='--')
yy = a*xx+b+0.3
subplot.plot(xx,yy, color='blue',linestyle='--')
yy = a*xx+b+0.5
subplot.plot(xx,yy, color='blue',linestyle='--')
plt.savefig('./fig/HYJ18_main_sequence.pdf')
plt.close()
fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
subplot=plt.subplot()
xlim=[7.0, 10.0]
ylim=[-3.5, 0.0]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='Mass'
ylab='SFR'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
i_z=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR = G_MR[sel]
G0_MR = G0_MR[(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)>8.5) & (G0_MR['Sfr']>0) & (G0_MR['Type']==0)]
area = np.zeros(RNUM,dtype=np.float32)
for jj in range(0,RNUM):
if(jj==0):
area[jj]=(3.14*RingRadius[jj]*RingRadius[jj])
else:
area[jj]=(3.14*(RingRadius[jj]*RingRadius[jj]-RingRadius[jj-1]*RingRadius[jj-1]))
#print(G0_MR['DiskMassRings'][:,:]*area.shape)
Mass = np.log10(G0_MR['DiskMassRings'][:,10]/area[10]*1.e10/Hubble_h)
SFR = np.log10(G0_MR['SfrRings'][:,10]/area[10])
#for jj in range(0,RNUM):
# subplot.scatter(Mass[:1000,jj], SFR[:1000,jj], s=2)
Mass_spaxels = Mass.flatten()
SFR_spaxels = SFR.flatten()
sel = (Mass_spaxels>7.0) & (SFR_spaxels>-4.0)
bin=[0.2,0.2]
Nbins=[int((xlim[1]-xlim[0])/bin[0]),int((ylim[1]-ylim[0])/bin[1])]
Ngals=len(Mass_spaxels)/100000.
H, xedges, yedges = np.histogram2d(Mass_spaxels[sel], SFR_spaxels[sel], bins=Nbins)
extent = [xedges[0], xedges[-1],yedges[0], yedges[-1]]
plt.subplots_adjust(bottom=0.15, left=0.15)
mylevels = np.linspace(1., Nbins[0], Nbins[0])*Ngals/(Nbins[0]**2/3.)
H = zoom(H, 20)
cont=plt.contourf(H.transpose()[::], origin='lower', cmap='Greys_r', levels=mylevels, extent=extent)
#subplot.scatter(Mass_spaxels[:5000], SFR_spaxels[:5000], s=2)
a = 1.1
b = -10.7
xx = np.arange(xlim[0],xlim[1],0.1)
yy = a*xx+b
subplot.plot(xx,yy, color='black')
a = [1.3, 1.17, 1.14, 1.14, 1.14, 1.1, 1.1, 0.9, 0.9, 0.85, 0.85, 0.85]
b = [-12.45, -11.3, -11.05, -11.05, -11.05, -10.7, -10.6, -8.95, -8.95, -8.47, -8.47, -8.47]
a = 0.85
b = -8.47
xx = np.arange(xlim[0],xlim[1],0.1)
yy = a*xx+b
subplot.plot(xx,yy, color='blue')
plt.savefig('./fig/HYJ18_main_sequence_spaxels.pdf')
plt.close()'''
plot_color=['red', 'orange', 'yellow', 'green', 'blue', 'darkblue','purple']
fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
global_a = 1.0
global_b = -9.65
spaxel_a = [1.3, 1.17, 1.14, 1.14, 1.14, 1.1, 1.1, 0.9, 0.9, 0.85, 0.85, 0.85]
spaxel_b = [-12.45, -11.3, -11.05, -11.05, -11.05, -10.7, -10.6, -8.95, -8.95, -8.47, -8.47, -8.47]
area = np.zeros(RNUM,dtype=np.float32)
for jj in range(0,RNUM):
if(jj==0):
area[jj]=(3.14*RingRadius[jj]*RingRadius[jj])
else:
area[jj]=(3.14*(RingRadius[jj]*RingRadius[jj]-RingRadius[jj-1]*RingRadius[jj-1]))
delta_low = [-3.0,-0.5,-0.3,-0.1,0.1,0.3,0.5]
delta_high = [-0.5,-0.3,-0.1,0.1,0.3,0.5,3.0]
#delta_low = [-0.1]
#delta_high = [0.1]
for delta_i in range(0,len(delta_low)):
subplot=plt.subplot()
xlim=[0., 10.]
ylim=[-4, 4.]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='$\log_{10}(r \mathrm{[kpc]})$'
ylab='$\log_{10}(\Sigma_{SFR}[\mathrm{yr}^{-1}\mathrm{Kpc^{-2}}])$'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(5.0))
subplot.xaxis.set_minor_locator(MultipleLocator(1.0))
i_z=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR_unsel = G_MR[sel]
G0_MR_unsel = G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h)>8.5) &
(G0_MR_unsel['Sfr']>0.) & (G0_MR_unsel['Type']==0)]
Mass = np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)
SFR = np.log10(G0_MR_unsel['Sfr'])
#global deviation from main squence
G0_MR=G0_MR_unsel[(SFR>Mass*global_a+global_b+delta_low[delta_i]) & (SFR<Mass*global_a+global_b+delta_high[delta_i])]
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
new_x_var = np.arange(xlim[0],xlim[1]+1.0,0.05)
resolved_MS = np.array([-4.870e-01, -7.104e-01, -8.779e-01, -1.045e+00, -1.225e+00, -1.490e+00,
-1.856e+00, -2.426e+00, -6.482e+00, -5.002e+01, -4.590e+02, -4.060e+03])
re = G0_MR['StellarDiskRadius']*1000./Hubble_h
#loop on radial bins
for jj in range(0,RNUM):
x_variable[NGals*jj:NGals*(jj+1)] = RingRadius[jj]
SFR_this_ring=G0_MR['SfrRings'][:,jj]/area[jj]
#SFR_Main_Sequence = spaxel_a[delta_i] * np.log10(G0_MR['DiskMassRings'][:,jj]/area[jj]*1.e10/Hubble_h)+ spaxel_b[delta_i]
#y_variable[NGals*jj:NGals*(jj+1)]= np.log10(10**resolved_MS[jj]-SFR_this_ring)
y_variable[NGals*jj:NGals*(jj+1)]= - resolved_MS[jj] + np.log10(SFR_this_ring)
#y_variable[NGals*jj:NGals*(jj+1)]= np.log10(SFR_this_ring)
#endfor RNUM
N_random_gals = 1000
if(N_random_gals>NGals):
N_random_gals=NGals
random.seed(a=2)
random_list = random.sample(range(0, NGals), N_random_gals)
#N_random_gals = 2
#random_list = random_list[0:1]
interpol_x_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
interpol_y_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
to_do = np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
i_index = 0
for ii in random_list:
slice_ii = [x*NGals+ii for x in range(0,12)]
xx = x_variable[slice_ii]
yy = y_variable[slice_ii]
#ignore galaxies without halflightradius or with nan on the y_variable
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
if(len(xx[sel])>3):
to_do[i_index*len(new_x_var):(i_index+1)*len(new_x_var)]=1
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
interpol_y_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = f(new_x_var)
interpol_x_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = new_x_var
i_index += 1
sel = to_do ==1
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles_fixed_xx(interpol_x_variable[sel],
interpol_y_variable[sel], non_zero=0)
'''for kk in range(0, RNUM):
print(f'{RingRadius[kk]:0.3f}')
for kk in range(0, len(x_binned)):
print(f'{x_binned[kk]:0.3f} {median[kk]:0.3e}')'''
subplot.plot(x_binned, median, color=plot_color[delta_i], linewidth=2, linestyle='-')
#end loop on mass bins
#end loop on properties to plot
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYJ18_gradients_ellison.pdf')
plt.close()
return
#end gradients_ellison
'''def gradients_mean_evo (ThisRedshiftList):
plot_color=['orange', 'red']
mass_low = 11.0
mass_high = 11.5
fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
subplot=plt.subplot()
xlim=[-0.4, 1.5]
ylim=[-3, 2.]
#ylim=[-14, -8.5]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='$\log_{10}(r \mathrm{[kpc]})$'
ylab='$\log_{10}(\Sigma_{SFR}[M_{\odot} \mathrm{yr}^{-1}\mathrm{Kpc^{-2}}])$'
#if(i_z==1):
# ylab=''
# plt.tick_params(axis='y', which='both', left=True, labelleft=False)
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
for i_z in range (0,len(ThisRedshiftList)):
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR_unsel = G_MR[sel]
G0_MR = G0_MR_unsel[(G0_MR_unsel['Sfr']>0.) & (G0_MR_unsel['Type']==0) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high)]
print(ThisRedshiftList[i_z])
#if(i_z==0):
# SFR_cut = -10.5
#else:
# SFR_cut = -10.0
#if(quenched_state==0):
# sel = np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1.e10/Hubble_h))>SFR_cut
#else:
# sel = np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1.e10/Hubble_h))<SFR_cut
#G0_MR_unsel = G0_MR_unsel[sel]
NGals=len(G0_MR)
print(NGals)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
new_x_var = np.arange(xlim[0],xlim[1]+1.0,0.2)
#loop on radial bins
for jj in range(0,RNUM):
x_variable[NGals*jj:NGals*(jj+1)]=np.log10(RingRadius[jj])
SFR_this_ring=G0_MR['SfrRings'][:,jj]
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=SFR_this_ring/(3.14*RingRadius[0]**2)
else:
y_variable[NGals*jj:NGals*(jj+1)]=SFR_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2))
#endfor RNUM
N_random_gals = 50000
if(N_random_gals>NGals):
N_random_gals=NGals
random.seed(a=2)
random_list = random.sample(range(0, NGals), N_random_gals)
#N_random_gals = 2
#random_list = random_list[0:1]
interpol_x_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
interpol_y_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
to_do = np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
i_index = 0
for ii in random_list:
slice_ii = [x*NGals+ii for x in range(0,12)]
xx = x_variable[slice_ii]
yy = np.log10(y_variable[slice_ii])
#ignore galaxies without halflightradius or with nan on the y_variable
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
if(len(xx[sel])>3):
to_do[i_index*len(new_x_var):(i_index+1)*len(new_x_var)]=1
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
interpol_y_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = f(new_x_var)
interpol_x_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = new_x_var
i_index += 1
sel = to_do ==1
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles_fixed_xx(interpol_x_variable[sel],
interpol_y_variable[sel], non_zero=0)
subplot.plot(x_binned, median, color='red', linewidth=2, linestyle='-')
#WRITE OUTPUT
if(write_to_file==1):
df = pd.DataFrame({'log10_r':x_binned, 'log10_SigmaSFR':median})
file = Datadir+file_to_write+'gradients_mean_evo'+str(f'_M{mass_low:0.2f}') + \
str(f'_{mass_high:0.2f}') + str(f'_Qstate{quenched_state:d}') + \
str(f'_z{ThisRedshiftList[i_z]:0.2f}')+'.csv'
df.to_csv(file,index=False)
#df = pd.read_csv(file)
#subplot.plot(df['log10_r'],df['log10_SigmaSFR'], color='black')
#end loop on mass bins
if(i_z==0):
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.5, y_percentage=0.5, color='black', xlog=0,
ylog=0,label='Star Forming', fontsize=11, fontweight='normal')
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.15, y_percentage=0.28, color='black', xlog=0,
ylog=0,label='Passive', fontsize=11, fontweight='normal')
else:
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.59, y_percentage=0.62, color='black', xlog=0,
ylog=0,label='Star Forming', fontsize=11, fontweight='normal')
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.28, y_percentage=0.3, color='black', xlog=0,
ylog=0,label='Passive', fontsize=11, fontweight='normal')
if(i_z==1):
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.2, y_percentage=0.9, color='black', xlog=0,
ylog=0,label='$10.0<\log_{10}(M_*[\mathrm{M}_{\odot}])<11.0$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.1,y_percentage=0.93,
color='orange',x2_percentage=0.18,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.1,y_percentage=0.91,
color='orange',x2_percentage=0.18,xlog=0,ylog=0,linestyle='--',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.2, y_percentage=0.82, color='black', xlog=0,
ylog=0,label='$11.0<\log_{10}(M_*[\mathrm{M}_{\odot}])<12.0$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.1,y_percentage=0.85,
color='red',x2_percentage=0.18,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.1,y_percentage=0.83,
color='red',x2_percentage=0.18,xlog=0,ylog=0,linestyle='--',linewidth=2)
if(i_z==0):
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.1, y_percentage=0.1, color='black', xlog=0,
ylog=0,label='z=0', fontsize=13, fontweight='normal')
else:
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.1, y_percentage=0.1, color='black', xlog=0,
ylog=0,label='z=2', fontsize=13, fontweight='normal')
#end loop on properties to plot
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYF19_gradients_mean_evo.pdf')
plt.close()
return
#end gradients_mean_evo
'''
def gradients_insideout_quenching_combined(ThisRedshiftList):
'''fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
subplot=plt.subplot()
xlim=[9.0, 12.0]
ylim=[-13.0, -8.0]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='Mass'
ylab='SFR'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
i_z=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR = G_MR[sel]
G0_MR = G0_MR[(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)>8.5) & (G0_MR['Sfr']>0) & (G0_MR['Type']==0)]
Mass = np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)
SFR = np.log10(G0_MR['Sfr']/(G0_MR['StellarMass']*1.e10/Hubble_h))
subplot.scatter(Mass[:10000], SFR[:10000], s=2)
bin=[0.2,0.1]
Nbins=[int((xlim[1]-xlim[0])/bin[0]),int((ylim[1]-ylim[0])/bin[1])]
Ngals=len(Mass)/10.
H, xedges, yedges = np.histogram2d(Mass, SFR, bins=Nbins)
extent = [xedges[0], xedges[-1],yedges[0], yedges[-1]]
plt.subplots_adjust(bottom=0.15, left=0.15)
mylevels = np.linspace(1., Nbins[0], Nbins[0])*Ngals/(Nbins[0]**2/3.)
H = zoom(H, 20)
cont=plt.contourf(H.transpose()[::], origin='lower', cmap='Greys_r', levels=mylevels, extent=extent)
plt.savefig('./fig/HYJ18_main_sequence.pdf')
plt.close()'''
plot_color=['orange', 'red']
fig = plt.figure(figsize=(one_two_size_small[0],one_two_size_small[1]))
grid = gridspec.GridSpec(1, 2)
grid.update(wspace=0.0, hspace=0.0)
for i_z in range (0,len(ThisRedshiftList)):
subplot=plt.subplot(grid[i_z])
xlim=[-0.4, 1.5]
ylim=[-4, 2.]
#ylim=[-14, -8.5]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='$\log_{10}(r \mathrm{[kpc]})$'
ylab='$\log_{10}(\Sigma_{SFR}[M_{\odot} \mathrm{yr}^{-1}\mathrm{Kpc^{-2}}])$'
if(i_z==1):
ylab=''
plt.tick_params(axis='y', which='both', left=True, labelleft=False)
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
MassBins = 2
mass_low = [10.,11.]
mass_high = [11.,12.]
for quenched_state in range(0,2):
for k_mass in range(0,len(mass_low)):
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR_unsel = G_MR[sel]
G0_MR_unsel = G0_MR_unsel[(G0_MR_unsel['Sfr']>0.) & (G0_MR_unsel['Type']==0)]
if(i_z==0):
SFR_cut = -10.5
else:
SFR_cut = -10.0
if(quenched_state==0):
sel = np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1.e10/Hubble_h))>SFR_cut
else:
sel = np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1.e10/Hubble_h))<SFR_cut
G0_MR_unsel = G0_MR_unsel[sel]
#mass bins
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low[k_mass]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high[k_mass])]
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
new_x_var = np.arange(xlim[0],xlim[1]+1.0,0.2)
#loop on radial bins
for jj in range(0,RNUM):
x_variable[NGals*jj:NGals*(jj+1)]=np.log10(RingRadius[jj])
SFR_this_ring=G0_MR['SfrRings'][:,jj]
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=SFR_this_ring/(3.14*RingRadius[0]**2)
else:
y_variable[NGals*jj:NGals*(jj+1)]=SFR_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2))
#endfor RNUM
N_random_gals = 50000
if(N_random_gals>NGals):
N_random_gals=NGals
random.seed(a=2)
random_list = random.sample(range(0, NGals), N_random_gals)
#N_random_gals = 2
#random_list = random_list[0:1]
interpol_x_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
interpol_y_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
to_do = np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
i_index = 0
for ii in random_list:
slice_ii = [x*NGals+ii for x in range(0,12)]
xx = x_variable[slice_ii]
yy = np.log10(y_variable[slice_ii])
#ignore galaxies without halflightradius or with nan on the y_variable
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
if(len(xx[sel])>3):
to_do[i_index*len(new_x_var):(i_index+1)*len(new_x_var)]=1
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
interpol_y_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = f(new_x_var)
interpol_x_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = new_x_var
i_index += 1
sel = to_do ==1
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles_fixed_xx(interpol_x_variable[sel],
interpol_y_variable[sel], non_zero=0)
if(quenched_state==0):
subplot.plot(x_binned, median, color=plot_color[k_mass], linewidth=2, linestyle='--')
else:
subplot.plot(x_binned, median, color=plot_color[k_mass], linewidth=2, linestyle='-')
#WRITE OUTPUT
if(write_to_file==1):
df = pd.DataFrame({'log10_r':x_binned, 'log10_SigmaSFR':median})
file = Datadir+file_to_write+'Gradients_insideout_quenching'+str(f'_M{mass_low[k_mass]:0.2f}') + \
str(f'_{mass_high[k_mass]:0.2f}') + str(f'_Qstate{quenched_state:d}') + \
str(f'_z{ThisRedshiftList[i_z]:0.2f}')+'.csv'
df.to_csv(file,index=False)
#df = pd.read_csv(file)
#subplot.plot(df['log10_r'],df['log10_SigmaSFR'], color='black')
#end loop on mass bins
if(i_z==0):
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.5, y_percentage=0.5, color='black', xlog=0,
ylog=0,label='Star Forming', fontsize=11, fontweight='normal')
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.15, y_percentage=0.28, color='black', xlog=0,
ylog=0,label='Passive', fontsize=11, fontweight='normal')
else:
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.59, y_percentage=0.62, color='black', xlog=0,
ylog=0,label='Star Forming', fontsize=11, fontweight='normal')
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.28, y_percentage=0.3, color='black', xlog=0,
ylog=0,label='Passive', fontsize=11, fontweight='normal')
'''plot_label (subplot, 'label', xlim, ylim, x_percentage=0.57, y_percentage=0.73, color='black', xlog=0,
ylog=0,label='$SSFR[\mathrm{yr}^{-1}]>10^{-10}$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.48,y_percentage=0.76,
color='red',x2_percentage=0.55,xlog=0,ylog=0,linestyle='--',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.48,y_percentage=0.74,
color='orange',x2_percentage=0.55,xlog=0,ylog=0,linestyle='--',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.57, y_percentage=0.65, color='black', xlog=0,
ylog=0,label='$SSFR[\mathrm{yr}^{-1}]<10^{-11}$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.48,y_percentage=0.68,
color='red',x2_percentage=0.55,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.48,y_percentage=0.66,
color='orange',x2_percentage=0.55,xlog=0,ylog=0,linestyle='-',linewidth=2)'''
if(i_z==1):
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.2, y_percentage=0.9, color='black', xlog=0,
ylog=0,label='$10.0<\log_{10}(M_*[\mathrm{M}_{\odot}])<11.0$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.1,y_percentage=0.93,
color='orange',x2_percentage=0.18,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.1,y_percentage=0.91,
color='orange',x2_percentage=0.18,xlog=0,ylog=0,linestyle='--',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.2, y_percentage=0.82, color='black', xlog=0,
ylog=0,label='$11.0<\log_{10}(M_*[\mathrm{M}_{\odot}])<12.0$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.1,y_percentage=0.85,
color='red',x2_percentage=0.18,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.1,y_percentage=0.83,
color='red',x2_percentage=0.18,xlog=0,ylog=0,linestyle='--',linewidth=2)
if(i_z==0):
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.1, y_percentage=0.1, color='black', xlog=0,
ylog=0,label='z=0', fontsize=13, fontweight='normal')
else:
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.1, y_percentage=0.1, color='black', xlog=0,
ylog=0,label='z=2', fontsize=13, fontweight='normal')
#end loop on properties to plot
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYF19_gradients_insideout_quenching_combined.pdf')
plt.close()
return
#end gradients_insideout_quenching_combined
def gradients_insideout_quenching_SSFR(ThisRedshiftList):
plot_color=['blue', 'orange', 'red']
fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
MassBins = 3
mass_low = [9., 10.,11.]
mass_high = [10., 11.,12.]
for quenched_state in range(0,2):
for k_mass in range(0,len(mass_low)):
subplot=plt.subplot()
xlim=[-0.5, 1.5]
#ylim=[-3, 2.]
ylim=[-14, -8.5]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='$\log_{10}(r \mathrm{[kpc]})$'
ylab='$\log_{10}(\Sigma_{SSFR}[\mathrm{yr}^{-1}\mathrm{Kpc^{-2}}])$'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
i_z=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR_unsel = G_MR[sel]
G0_MR_unsel = G0_MR_unsel[(G0_MR_unsel['Sfr']>0.) & (G0_MR_unsel['Type']==0)]
if(quenched_state==0):
G0_MR_unsel=G0_MR_unsel[np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1.e10/Hubble_h))>-10.]
else:
G0_MR_unsel=G0_MR_unsel[np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1.e10/Hubble_h))<-11.]
#mass bins
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low[k_mass]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high[k_mass])]
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
new_x_var = np.arange(xlim[0],xlim[1]+1.0,0.2)
#loop on radial bins
for jj in range(0,RNUM):
x_variable[NGals*jj:NGals*(jj+1)]=np.log10(RingRadius[jj])
SFR_this_ring=(G0_MR['SfrRings'][:,jj]/(G0_MR['DiskMassRings'][:,jj]*1e10/Hubble_h+G0_MR['BulgeMassRings'][:,jj]*1e10/Hubble_h))
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=SFR_this_ring/(3.14*RingRadius[0]**2)
else:
y_variable[NGals*jj:NGals*(jj+1)]=SFR_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2))
#endfor RNUM
N_random_gals = 50000
if(N_random_gals>NGals):
N_random_gals=NGals
random.seed(a=2)
random_list = random.sample(range(0, NGals), N_random_gals)
#N_random_gals = 2
#random_list = random_list[0:1]
interpol_x_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
interpol_y_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
to_do = np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
i_index = 0
for ii in random_list:
slice_ii = [x*NGals+ii for x in range(0,12)]
xx = x_variable[slice_ii]
yy = np.log10(y_variable[slice_ii])
#ignore galaxies without halflightradius or with nan on the y_variable
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
if(len(xx[sel])>3):
to_do[i_index*len(new_x_var):(i_index+1)*len(new_x_var)]=1
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
interpol_y_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = f(new_x_var)
interpol_x_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = new_x_var
i_index += 1
sel = to_do ==1
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles_fixed_xx(interpol_x_variable[sel],
interpol_y_variable[sel], non_zero=0)
if(quenched_state==0):
subplot.plot(x_binned, median, color=plot_color[k_mass], linewidth=2, linestyle='--')
else:
subplot.plot(x_binned, median, color=plot_color[k_mass], linewidth=2, linestyle='-')
#end loop on mass bins
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.35, y_percentage=0.88, color='black', xlog=0,
ylog=0,label='$9.0<\log_{10}(M_*[\mathrm{M}_{\odot}])<10.0$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.27,y_percentage=0.9,
color='blue',x2_percentage=0.33,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.35, y_percentage=0.78, color='black', xlog=0,
ylog=0,label='$10.0<\log_{10}(M_*[\mathrm{M}_{\odot}])<11.0$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.27,y_percentage=0.8,
color='orange',x2_percentage=0.33,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.35, y_percentage=0.68, color='black', xlog=0,
ylog=0,label='$11.0<\log_{10}(M_*[\mathrm{M}_{\odot}])<12.0$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.27,y_percentage=0.7,
color='red',x2_percentage=0.33,xlog=0,ylog=0,linestyle='-',linewidth=2)
#end loop on properties to plot
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYJ18_gradients_insideout_quenching_SSFR.pdf')
plt.close()
return
#end gradients_insideout_quenching_SSFR
def gradients_insideout_quenching_SFR(ThisRedshiftList):
plot_color=['orange', 'red']
fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
MassBins = 2
mass_low = [10.,11.]
mass_high = [11.,12.]
for quenched_state in range(0,2):
for k_mass in range(0,len(mass_low)):
subplot=plt.subplot()
xlim=[-0.5, 1.5]
ylim=[-4, 1.]
#ylim=[-14, -8.5]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='$\log_{10}(r \mathrm{[kpc]})$'
ylab='$\log_{10}(\Sigma_{SFR}[M_{\odot} \mathrm{yr}^{-1}\mathrm{Kpc^{-2}}])$'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
i_z=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR_unsel = G_MR[sel]
G0_MR_unsel = G0_MR_unsel[(G0_MR_unsel['Sfr']>0.) & (G0_MR_unsel['Type']==0)]
if(quenched_state==0):
G0_MR_unsel=G0_MR_unsel[np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1.e10/Hubble_h))>-10.]
else:
G0_MR_unsel=G0_MR_unsel[np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1.e10/Hubble_h))<-11.]
#mass bins
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low[k_mass]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high[k_mass])]
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
new_x_var = np.arange(xlim[0],xlim[1]+1.0,0.2)
#loop on radial bins
for jj in range(0,RNUM):
x_variable[NGals*jj:NGals*(jj+1)]=np.log10(RingRadius[jj])
SFR_this_ring=G0_MR['SfrRings'][:,jj]
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=SFR_this_ring/(3.14*RingRadius[0]**2)
else:
y_variable[NGals*jj:NGals*(jj+1)]=SFR_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2))
#endfor RNUM
N_random_gals = 50000
if(N_random_gals>NGals):
N_random_gals=NGals
random.seed(a=2)
random_list = random.sample(range(0, NGals), N_random_gals)
#N_random_gals = 2
#random_list = random_list[0:1]
interpol_x_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
interpol_y_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
to_do = np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
i_index = 0
for ii in random_list:
slice_ii = [x*NGals+ii for x in range(0,12)]
xx = x_variable[slice_ii]
yy = np.log10(y_variable[slice_ii])
#ignore galaxies without halflightradius or with nan on the y_variable
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
if(len(xx[sel])>3):
to_do[i_index*len(new_x_var):(i_index+1)*len(new_x_var)]=1
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
interpol_y_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = f(new_x_var)
interpol_x_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = new_x_var
i_index += 1
sel = to_do ==1
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles_fixed_xx(interpol_x_variable[sel],
interpol_y_variable[sel], non_zero=0)
if(quenched_state==0):
subplot.plot(x_binned, median, color=plot_color[k_mass], linewidth=2, linestyle='--')
else:
subplot.plot(x_binned, median, color=plot_color[k_mass], linewidth=2, linestyle='-')
#end loop on mass bins
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.57, y_percentage=0.62, color='black', xlog=0,
ylog=0,label='Star Forming', fontsize=11, fontweight='normal')
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.23, y_percentage=0.3, color='black', xlog=0,
ylog=0,label='Passive', fontsize=11, fontweight='normal')
'''plot_label (subplot, 'label', xlim, ylim, x_percentage=0.57, y_percentage=0.73, color='black', xlog=0,
ylog=0,label='$SSFR[\mathrm{yr}^{-1}]>10^{-10}$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.48,y_percentage=0.76,
color='red',x2_percentage=0.55,xlog=0,ylog=0,linestyle='--',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.48,y_percentage=0.74,
color='orange',x2_percentage=0.55,xlog=0,ylog=0,linestyle='--',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.57, y_percentage=0.65, color='black', xlog=0,
ylog=0,label='$SSFR[\mathrm{yr}^{-1}]<10^{-11}$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.48,y_percentage=0.68,
color='red',x2_percentage=0.55,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.48,y_percentage=0.66,
color='orange',x2_percentage=0.55,xlog=0,ylog=0,linestyle='-',linewidth=2)'''
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.38, y_percentage=0.9, color='black', xlog=0,
ylog=0,label='$10.0<\log_{10}(M_*[\mathrm{M}_{\odot}])<11.0$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.3,y_percentage=0.93,
color='orange',x2_percentage=0.36,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.3,y_percentage=0.91,
color='orange',x2_percentage=0.36,xlog=0,ylog=0,linestyle='--',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.38, y_percentage=0.82, color='black', xlog=0,
ylog=0,label='$11.0<\log_{10}(M_*[\mathrm{M}_{\odot}])<12.0$', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.3,y_percentage=0.85,
color='red',x2_percentage=0.36,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.3,y_percentage=0.83,
color='red',x2_percentage=0.36,xlog=0,ylog=0,linestyle='--',linewidth=2)
#end loop on properties to plot
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYJ18_gradients_insideout_quenching_SFR.pdf')
plt.close()
return
#end gradients_insideout_quenching_SFR
def gradients_insideout_quenching(ThisRedshiftList):
plot_color=['red', 'blue']
plot_color_obs=['darkorange', 'cornflowerblue']
linestyles=['--','-']
fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
MassBins = 2
mass_low = [9.5, 10.85]
mass_high = [10.3, 11.7]
for k_mass in range(0,len(mass_low)):
subplot=plt.subplot()
xlim=[-0.5, 1.4]
ylim=[7.0, 10.5]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='$r \mathrm{(kpc)}$'
ylab='$\log_{10}(\Sigma_*[M_{\odot} \mathrm{Kpc^{-2}}])$'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
for i_z in range(0, len(ThisRedshiftList)):
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR_unsel = G_MR[sel]
#main sequence selection
if(i_z==0):
G0_MR_unsel=G0_MR_unsel[np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1.e10/Hubble_h))<-10.5]
else:
G0_MR_unsel=G0_MR_unsel[np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1.e10/Hubble_h))>-9.]
#mass bins
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low[k_mass]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high[k_mass])]
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
new_x_var = np.arange(xlim[0],xlim[1]+1.0,0.1)
#loop on radial bins
for jj in range(0,RNUM):
x_variable[NGals*jj:NGals*(jj+1)]=np.log10(RingRadius[jj])
Mass_this_ring=(G0_MR['DiskMassRings'][:,jj]*1e10/Hubble_h)+(G0_MR['BulgeMassRings'][:,jj]*1e10/Hubble_h)
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=Mass_this_ring/(3.14*RingRadius[0]**2)
else:
y_variable[NGals*jj:NGals*(jj+1)]=Mass_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2))
#endfor RNUM
N_random_gals = 20000
if(N_random_gals>NGals):
N_random_gals=NGals
random.seed(a=2)
random_list = random.sample(range(0, NGals), N_random_gals)
interpol_x_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
interpol_y_variable=np.zeros(int(len(new_x_var)*N_random_gals),dtype=np.float32)
i_index = 0
for ii in random_list:
slice_ii = [x*NGals+ii for x in range(0,12)]
xx = x_variable[slice_ii]
yy = np.log10(y_variable[slice_ii])
#ignore galaxies without halflightradius or with nan on the y_variable
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
if(len(xx[sel])>3):
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
interpol_y_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = f(new_x_var)
interpol_x_variable[i_index*len(new_x_var):(i_index+1)*len(new_x_var)] = new_x_var
i_index += 1
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles_fixed_xx(interpol_x_variable,
interpol_y_variable, non_zero=0)
subplot.plot(x_binned, median, color=plot_color[i_z], linewidth=2, linestyle=linestyles[1])
#Observations
if(k_mass==0):
df = pd.read_csv(Datadir+'tachella2015_highz_massbin1.csv')
error_down = np.log10(df['y']/(df['y']+df['err_down']))
error_up = np.log10((df['y']+df['err_down'])/df['y'])
#subplot.errorbar(np.log10(df['x']), np.log10(df['y']), yerr=[error_down,error_up], fmt='o', markersize=5,
# color=plot_color[0], ecolor=plot_color[0])
subplot.plot(np.log10(df['x']),np.log10(df['y']),color=plot_color_obs[0],linewidth=2, linestyle=linestyles[0])
df = pd.read_csv(Datadir+'tachella2015_lowz_massbin1.csv')
#subplot.fill_between(np.log10(df['x']),np.log10(df['y']+df['err_down']),np.log10(df['y']+df['err_up']),
# facecolor='grey', interpolate=True, alpha=0.3)
subplot.plot(np.log10(df['x']),np.log10(df['y']),color=plot_color_obs[1],linewidth=2, linestyle=linestyles[0])
if(k_mass==1):
df = pd.read_csv(Datadir+'tachella2015_highz_massbin2.csv')
error_down = np.log10(df['y']/(df['y']+df['err_down']))
error_up = np.log10((df['y']+df['err_down'])/df['y'])
#subplot.errorbar(np.log10(df['x']), np.log10(df['y']), yerr=[error_down,error_up], fmt='o', markersize=5,
# color=plot_color[0], ecolor=plot_color[0])
subplot.plot(np.log10(df['x']),np.log10(df['y']),color=plot_color_obs[0],linewidth=2, linestyle=linestyles[0])
df = pd.read_csv(Datadir+'tachella2015_lowz_massbin2.csv')
#subplot.fill_between(np.log10(df['x']),np.log10(df['y']+df['err_down']),np.log10(df['y']+df['err_up']),
# facecolor='grey', interpolate=True, alpha=0.3)
subplot.plot(np.log10(df['x']),np.log10(df['y']),color=plot_color_obs[1],linewidth=2, linestyle=linestyles[0])
#end loop on mass bins
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.7, y_percentage=0.78,
color='black', xlog=0, ylog=0,label='Tachella 2015', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.62,y_percentage=0.81,
color=plot_color_obs[0],x2_percentage=0.68,xlog=0,ylog=0,linestyle='--',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.62,y_percentage=0.79,
color=plot_color_obs[1],x2_percentage=0.68,xlog=0,ylog=0,linestyle='--',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.7, y_percentage=0.88,
color='black', xlog=0, ylog=0,label='This Work', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.62,y_percentage=0.91,
color='red',x2_percentage=0.68,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot,'line',xlim,ylim,x_percentage=0.62,y_percentage=0.89,
color='blue',x2_percentage=0.68,xlog=0,ylog=0,linestyle='-',linewidth=2)
#end loop on properties to plot
#SSFR distribution at z=2 for selection
#subplot=plt.subplot(grid[2])
#xlim=[0.0, 12.0]
#ylim=[-13.0, -8.0]
#G0_MR=G0_MR_unsel[np.log10(G0_MR_unsel['Sfr']/(G0_MR_unsel['StellarMass']*1.e10/Hubble_h))>-9.5]
#subplot.scatter(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h),
# np.log10(G0_MR['Sfr']/(G0_MR['StellarMass']*1.e10/Hubble_h)), marker='o',s=1)
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYJ18_gradients_insideout_quenching.pdf')
plt.close()
return
#end gradients_insideout_quenching
def all_gradients(ThisRedshiftList):
i_z=0
labels = ['SigmaStar','StellarMetallicity','SigmaGas','GasMetallicity']
plot_color=['purple', 'blue', 'green', 'darkorange', 'red']
fig = plt.figure(figsize=(two_two_size_large[0],two_two_size_large[1]))
grid = gridspec.GridSpec(2, 2)
grid.update(wspace=0.4, hspace=0.3)
plt.subplots_adjust(left=0.12, right=0.97, top=0.97, bottom=0.09)
MassBins = 5
mass_low = [9.0, 9.5, 10.0, 10.5, 11.0]
mass_high = [9.5, 10.0, 10.5, 11.0, 11.5]
#mass_low = [9.0]
#mass_high = [9.5]
(sel)=select_current_redshift(G_MR, ThisRedshiftList, i_z, FullSnapshotList_MR)
G0_MR_unsel = G_MR[sel]
G0_MR_unsel = G0_MR_unsel[np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>9.0]
#SFH structure needed for age gradients
fa = open(DirName_MR+"SFH_Bins","rb")
nbins = np.fromfile(fa,np.int32,1)
template = np.dtype([('SnapNum',np.int32,1), ('Bin',np.int32,1), ('Lookbacktime',np.float64,1),
('dt',np.float64,1),('nbins',np.int32,1)])
SFH = np.fromfile(fa,template,int(nbins))
fa.close()
SFH=SFH[SFH['SnapNum']==G0_MR_unsel['SnapNum'][0]]
Nprops=4
#loop on quantity to plot
for plot_prop in range (0, Nprops):
subplot=plt.subplot(grid[plot_prop])
if(plot_prop==0):
#***************************
#* stellar surface density *
#***************************
ylim=[1.0, 5.0]
ylab='$\log_{10}(\Sigma_*/(M_{\odot}\mathrm{pc^{-2}}))$'
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==1):
#***************************
#* stellar metallicity *
#***************************
ylim=[-1., 0.4]
ylab='$\mathrm{log_{10}}(Z_*/Z_{\odot})$'
subplot.yaxis.set_major_locator(MultipleLocator(0.5))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==2):
#***************************
#* ColdGas *
#***************************
ylim=[-0.5, 2.5]
ylab='$\log_{10}(\Sigma_{\mathrm{cold}}/(M_{\odot} \mathrm{pc^{-2}}))$'
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==3):
#***************************
#* Cold Gas metallicity *
#***************************
ylim=[7.5, 9.4]
ylab='$12 + \log_{10}(\mathrm{O/H})_{\mathrm{cold}}$'
subplot.yaxis.set_major_locator(MultipleLocator(0.5))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
xlim=[0.0, 3.0]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='$r/r_{e}$'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
#For comparison with MaNGA metallicity gradients select only galaxies with non-zero metallicity
if(plot_prop==2):
if(opt_detailed_enrichment==1):
G0_MR_unsel=G0_MR_unsel[((G0_MR_unsel['MetalsColdGas'][:,0] +
G0_MR_unsel['MetalsColdGas'][:,1] +
G0_MR_unsel['MetalsColdGas'][:,2])>.0)]
else:
G0_MR_unsel=G0_MR_unsel[(G0_MR_unsel['MetalsColdGas']>.0)]
#if(plot_prop==0 or plot_prop==1):
# G0_MR_unsel=G0_MR_unsel[(G0_MR_unsel['DiskMass']/G0_MR_unsel['StellarMass']>0.9)]
'''if(plot_prop==0):
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low[0]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high[0])]
area=np.zeros(int(RNUM),dtype=np.float32)
for jj in range(0,12):
#1e6 -> from kpc^2 to pc^2
if(jj==0):
area[jj]=(3.14*(RingRadius[0]**2*1e6))
else:
area[jj]=(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2)*1e6)
#for i_gal in range(0,len(G0_MR)):
for i_gal in range(0,500):
x_variable=RingRadius/(G0_MR['StellarHalfLightRadius'][i_gal]*1000./Hubble_h)
Mass = (G0_MR['DiskMassRings'][i_gal,:]*1e10/Hubble_h)+(G0_MR['BulgeMassRings'][i_gal,:]*1e10/Hubble_h)
y_variable=Mass/area
subplot.plot(x_variable,np.log10(y_variable),color='blue')'''
#loop on mass bins
for k_mass in range(0,len(mass_low)):
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low[k_mass]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high[k_mass])]
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
new_x_var = np.arange(xlim[0],xlim[1],0.1)
interpol_x_variable=np.zeros(int(len(new_x_var)*NGals),dtype=np.float32)
interpol_y_variable=np.zeros(int(len(new_x_var)*NGals),dtype=np.float32)
#loop on radial bins
for jj in range(0,RNUM):
if(plot_prop==0):
x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['StellarHalfMassRadius']*1000./Hubble_h)
elif(plot_prop==1):
x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['StellarHalfMassRadius']*1000./Hubble_h)
#x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['StellarDiskRadius']*1000./Hubble_h)
else:
x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['GasDiskRadius']*1000./Hubble_h)
x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['StellarHalfMassRadius']*1000./Hubble_h)
#***************************
#* Stellar surface density *
#***************************
if(plot_prop==0):
Mass_this_ring=(G0_MR['DiskMassRings'][:,jj]*1e10/Hubble_h)+(G0_MR['BulgeMassRings'][:,jj]*1e10/Hubble_h)
#Mass_this_ring=(G0_MR['DiskMassRings'][:,jj]*1e10/Hubble_h)
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=Mass_this_ring/(3.14*(RingRadius[0]**2*1e6))
else:
y_variable[NGals*jj:NGals*(jj+1)]=Mass_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2)*1e6)
#***************************
#* stellar metallicity *
#***************************
if(plot_prop==1):
if(opt_detailed_enrichment==1):
Metals_this_ring = (G0_MR['MetalsDiskMassRings'][:,jj,0] +
G0_MR['MetalsDiskMassRings'][:,jj,1] +
G0_MR['MetalsDiskMassRings'][:,jj,2])
Metals_this_ring += (G0_MR['MetalsBulgeMassRings'][:,jj,0] +
G0_MR['MetalsBulgeMassRings'][:,jj,1] +
G0_MR['MetalsBulgeMassRings'][:,jj,2])
else:
MetalsDiskMass_this_ring=G0_MR['MetalsDiskMassRings'][:,jj]
Mass_this_Ring = G0_MR['DiskMassRings'][:,jj] + G0_MR['BulgeMassRings'][:,jj]
y_variable[NGals*jj:NGals*(jj+1)]= Metals_this_ring/Mass_this_Ring/0.02
#***************************
#* ColdGas surface density *
#***************************
if(plot_prop==2):
Mass_this_ring=(G0_MR['ColdGasRings'][:,jj]*1e10/Hubble_h)
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=Mass_this_ring/(3.14*RingRadius[0]**2*1e6)
else:
y_variable[NGals*jj:NGals*(jj+1)]=Mass_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2)*1e6)
#***************************
#* Gas metallicity *
#***************************
if(plot_prop==3):
if(opt_detailed_enrichment==1):
MetalsColdGas_this_ring=(G0_MR['MetalsColdGasRings'][:,jj,0] +
G0_MR['MetalsColdGasRings'][:,jj,1] +
G0_MR['MetalsColdGasRings'][:,jj,2])
else:
MetalsColdGas_this_ring=G0_MR['MetalsColdGasRings'][:,jj]
y_variable[NGals*jj:NGals*(jj+1)] = 10**(np.log10(MetalsColdGas_this_ring /
G0_MR['ColdGasRings'][:,jj]/0.0134) + 8.69)
#endfor RNUM
N_random_gals = 20000
if(N_random_gals>NGals):
N_random_gals=NGals
random.seed(a=1)
random_list = random.sample(range(0, NGals), N_random_gals)
for ii in random_list:
slice_ii = [x*NGals+ii for x in range(0,RNUM)]
xx = x_variable[slice_ii]
yy = np.log10(y_variable[slice_ii])
#ignore galaxies without halflightradius or with nan on the y_variable
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
if(len(xx[sel])>3):
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
interpol_y_variable[ii*len(new_x_var):(ii+1)*len(new_x_var)] = f(new_x_var)
interpol_x_variable[ii*len(new_x_var):(ii+1)*len(new_x_var)] = new_x_var
sel = interpol_x_variable>0.
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles_fixed_xx(interpol_x_variable[sel],
interpol_y_variable[sel])
subplot.plot(x_binned, median, color=plot_color[k_mass], linewidth=2)
#WRITE OUTPUT
if(write_to_file==1):
df = pd.DataFrame({'log10_M':x_binned, 'HalfMassRadius':median, 'pc16':median-rms, 'pc84':median+rms})
file = Datadir+file_to_write+'All_Gradients_'+labels[plot_prop] + str(f'_M{mass_low[k_mass]:0.2f}') + \
str(f'_{mass_high[k_mass]:0.2f}') + str(f'_z{ThisRedshiftList[i_z]:0.2f}') + '.csv'
df.to_csv(file,index=False)
#df = pd.read_csv(file)
#subplot.plot(df['log10_M'],df['HalfMassRadius'], color='black')
#end loop on mass bins
#Observations
if(k_mass == MassBins-1):
if(plot_prop==0 or plot_prop==1):
#CALIFA (stellar surface densities)
CALIFA_mass_low =[9.1,9.6,10.1,10.6,11.2]
CALIFA_mass_high=[9.6,10.1,10.6,10.9,11.5]
CALIFA_mass_index=[0,1,2,3,5]
obs_rings=np.arange(0.05,2.8,0.1)
for k_mass in range(0,len(CALIFA_mass_low)):
char_mass_low="%0.1f" % CALIFA_mass_low[k_mass]
char_mass_high="%0.1f" % CALIFA_mass_high[k_mass]
char_k_mass="%d" % CALIFA_mass_index[k_mass]
k_color = plot_color[k_mass]
if(plot_prop==0):
file = Datadir+'/CALIFA_rosa_stellar_density_'+char_k_mass+'_'+char_mass_low+'_'+char_mass_high+'.txt'
obs = Table.read(file, format='ascii')
sel=obs['stellar_dens']!=0.0
subplot.plot(obs_rings[sel],obs['stellar_dens'][sel], color=k_color, linestyle='--',linewidth=2)
if(plot_prop==1):
file = Datadir + '/CALIFA_rosa_metals_'+char_k_mass+'_'+char_mass_low+'_'+char_mass_high+'.txt'
obs = Table.read(file, format='ascii')
sel=obs['metallicity']!=0.0
subplot.plot(obs_rings[sel],obs['metallicity'][sel], color=k_color,linestyle='--',linewidth=2)
'''file = Datadir + '/MANGA_gradients/goddard2016_LT_new_MAD.txt'
obs = Table.read(file, format='ascii')
subplot.plot(obs['Radius'],obs['MW_Metallicity_M1'], linewidth=2, linestyle=':', color=plot_color[0])
subplot.plot(obs['Radius'],obs['MW_Metallicity_M2'], linewidth=2, linestyle=':', color=plot_color[2])
subplot.plot(obs['Radius'],obs['MW_Metallicity_M3'], linewidth=2, linestyle=':', color=plot_color[3])'''
if(plot_prop==3):
df = pd.read_csv(Datadir+'ferrer2019/ferrer2019_dop_lowmass.csv')
subplot.errorbar(df['radius']+0.1, df['metallicity'], xerr=df['err_x'],
yerr=[df['err_y_down'],df['err_y_up']], fmt='o', markersize=5,
color=plot_color[1], ecolor=plot_color[1],capsize=2)
df = pd.read_csv(Datadir+'ferrer2019/ferrer2019_dop_intermass.csv')
subplot.errorbar(df['radius']+0.1, df['metallicity'], xerr=df['err_x'],
yerr=[df['err_y_down'],df['err_y_up']], fmt='o', markersize=5,
color=plot_color[2], ecolor=plot_color[2],capsize=2)
df = pd.read_csv(Datadir+'ferrer2019/ferrer2019_dop_highmass.csv')
subplot.errorbar(df['radius']+0.1, df['metallicity'], xerr=df['err_x'],
yerr=[df['err_y_down'],df['err_y_up']], fmt='o', markersize=5,
color=plot_color[4], ecolor=plot_color[4],capsize=2)
#labels
if(plot_prop==0):
plot_label (subplot, 'label',xlim,ylim,x_percentage=0.26,y_percentage=0.9,
color='black',xlog=0,ylog=0,label='$\log_{10}(M_*/M_{\odot})=$', fontsize=10,fontweight='normal')
for k_mass in range(0,MassBins):
char_mass_low="%0.1f" % mass_low[k_mass]
char_mass_high="%0.1f" % mass_high[k_mass]
x_values=[0.72, 0.36, 0.655, 0.33, 0.655]
y_values=[0.9,0.82,0.82,0.74,0.74]
label='['+char_mass_low+','+char_mass_high+']'
plot_label (subplot, 'label',xlim,ylim,x_percentage=x_values[k_mass],y_percentage=y_values[k_mass],
color=plot_color[k_mass],xlog=0,ylog=0,label=label,fontsize=10,fontweight='normal')
if(plot_prop==1):
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.7, y_percentage=0.85,
color='black', xlog=0, ylog=0,label='CALIFA', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.57,y_percentage=0.88,
color='red',x2_percentage=0.67,xlog=0,ylog=0,linestyle='--',linewidth=2)
'''plot_label (subplot, 'label', xlim, ylim, x_percentage=0.7, y_percentage=0.78,
color='black', xlog=0, ylog=0,label='MaNGA', fontsize=11, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.57,y_percentage=0.8,
color='darkorange',x2_percentage=0.67,xlog=0,ylog=0,linestyle=':',linewidth=2)'''
if(plot_prop==3):
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.55, y_percentage=0.87,
color='black', xlog=0, ylog=0,label='MUSE - MAD', fontsize=11, fontweight='normal')
plot_label (subplot, 'symbol', xlim, ylim, x_percentage=0.5, y_percentage=0.9,
color='red', xlog=0, ylog=0, sym='o', sym_size=5, err_size=0.1)
#end loop on properties to plot
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYF19_all_gradients.pdf')
plt.close()
return
#end gas_gradients
def gas_gradients(ThisRedshiftList):
ii=0
plot_color=['purple', 'blue', 'green', 'darkorange', 'red']
fig = plt.figure(figsize=(three_one_size_small[0],three_one_size_small[1]))
grid = gridspec.GridSpec(3, 1)
grid.update(wspace=0.0, hspace=0.0)
MassBins = 5
mass_low = [9.0, 9.5, 10.0, 10.5, 11.0]
mass_high = [9.5, 10.0, 10.5, 11.0, 11.5]
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR_unsel = G_MR[sel]
G0_MR_unsel = G0_MR_unsel[np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>9.0]
#SFH structure needed for age gradients
fa = open(DirName_MR+"SFH_Bins","rb")
nbins = np.fromfile(fa,np.int32,1)
template = np.dtype([('SnapNum',np.int32,1), ('Bin',np.int32,1), ('Lookbacktime',np.float64,1),
('dt',np.float64,1),('nbins',np.int32,1)])
SFH = np.fromfile(fa,template,int(nbins))
fa.close()
SFH=SFH[SFH['SnapNum']==G0_MR_unsel['SnapNum'][0]]
i_grid=0
Nprops=3
#loop on quantity to plot
for plot_prop in range (0, Nprops):
subplot=plt.subplot(grid[i_grid])
i_grid+=1
if(plot_prop==0):
#***************************
#* stellar surface density *
#***************************
ylim=[1.5, 5.0]
ylab='$\log_{10}(\Sigma_*[M_{\odot}/\mathrm{pc^2}])$'
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==1):
#***************************
#* ColdGas age *
#***************************
ylim=[0.0, 2.5]
ylab='$\log_{10}(\Sigma_{\mathrm{cold}}[M_{\odot}/\mathrm{pc^2}])$'
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.5))
if(plot_prop==2):
#***************************
#* Cold Gas metallicity *
#***************************
ylim=[7.5, 9.4]
ylab='$12 + \log_{10}(\mathrm{O/H})_{\mathrm{cold}}$'
subplot.yaxis.set_major_locator(MultipleLocator(0.5))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
xlim=[0.0, 2.0]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='$r/r_{e}$'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
if((plot_prop==0) or (plot_prop==1)):
plt.tick_params(axis='x', which='both', bottom=True, labelbottom=False)
#For comparison with MaNGA metallicity gradients select only galaxies with non-zero metallicity
if(plot_prop==2):
if(opt_detailed_enrichment==1):
G0_MR_unsel=G0_MR_unsel[((G0_MR_unsel['MetalsColdGas'][:,0] +
G0_MR_unsel['MetalsColdGas'][:,1] +
G0_MR_unsel['MetalsColdGas'][:,2])>.0)]
else:
G0_MR_unsel=G0_MR_unsel[(G0_MR_unsel['MetalsColdGas']>.0)]
#loop on mass bins
for k_mass in range(0,len(mass_low)):
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low[k_mass]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high[k_mass])]
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
new_x_var = np.arange(xlim[0],xlim[1],0.1)
interpol_x_variable=np.zeros(int(len(new_x_var)*NGals),dtype=np.float32)
interpol_y_variable=np.zeros(int(len(new_x_var)*NGals),dtype=np.float32)
#loop on radial bins
for jj in range(0,RNUM):
if(plot_prop==0):
x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['StellarHalfLightRadius']*1000./Hubble_h)
else:
x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['GasDiskRadius']*1000./Hubble_h)
#***************************
#* Stellar surface density *
#***************************
if(plot_prop==0):
Mass_this_ring=(G0_MR['DiskMassRings'][:,jj]*1e10/Hubble_h)+(G0_MR['BulgeMassRings'][:,jj]*1e10/Hubble_h)
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=Mass_this_ring/(3.14*RingRadius[0]**2*1e6)
else:
y_variable[NGals*jj:NGals*(jj+1)]=Mass_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2)*1e6)
#***************************
#* ColdGas surface density *
#***************************
if(plot_prop==1):
Mass_this_ring=(G0_MR['ColdGasRings'][:,jj]*1e10/Hubble_h)
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=Mass_this_ring/(3.14*RingRadius[0]**2*1e6)
else:
y_variable[NGals*jj:NGals*(jj+1)]=Mass_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2)*1e6)
#***************************
#* Gas metallicity *
#***************************
if(plot_prop==2):
if(opt_detailed_enrichment==1):
MetalsColdGas_this_ring=(G0_MR['MetalsColdGasRings'][:,jj,0] +
G0_MR['MetalsColdGasRings'][:,jj,1] +
G0_MR['MetalsColdGasRings'][:,jj,2])
else:
MetalsColdGas_this_ring=G0_MR['MetalsColdGasRings'][:,jj]
y_variable[NGals*jj:NGals*(jj+1)] = 10**(np.log10(MetalsColdGas_this_ring /
G0_MR['ColdGasRings'][:,jj]/0.0134) + 8.69)
#endfor RNUM
N_random_gals = 20000
if(N_random_gals>NGals):
N_random_gals=NGals
random.seed(a=1)
random_list = random.sample(range(0, NGals), N_random_gals)
for ii in random_list:
slice_ii = [x*NGals+ii for x in range(0,12)]
xx = x_variable[slice_ii]
yy = np.log10(y_variable[slice_ii])
#ignore galaxies without halflightradius or with nan on the y_variable
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
if(len(xx[sel])>3):
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
interpol_y_variable[ii*len(new_x_var):(ii+1)*len(new_x_var)] = f(new_x_var)
interpol_x_variable[ii*len(new_x_var):(ii+1)*len(new_x_var)] = new_x_var
sel = interpol_x_variable>0.
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles_fixed_xx(interpol_x_variable[sel],
interpol_y_variable[sel])
subplot.plot(x_binned, median, color=plot_color[k_mass], linewidth=2)
#end loop on mass bins
#Observations
if(plot_prop==2 and k_mass ==MassBins-1):
df = pd.read_csv(Datadir+'erroz_ferrer_dopita_bin1.csv')
subplot.errorbar(df['radius'], df['metallicity'], xerr=0.1, yerr=[-df['error_down'],df['error_up']],
fmt='o', markersize=5, color=plot_color[1], ecolor=plot_color[1])
df = pd.read_csv(Datadir+'erroz_ferrer_dopita_bin3.csv')
subplot.errorbar(df['radius'], df['metallicity'], xerr=0.1, yerr=[-df['error_down'],df['error_up']],
fmt='o', markersize=5, color=plot_color[4], ecolor=plot_color[4])
#labels
if(plot_prop==0):
plot_label (subplot, 'label',xlim,ylim,x_percentage=0.17,y_percentage=0.9,
color='black',xlog=0,ylog=0,label='$\log_{10}(M_*[M_{\odot}])=$', fontsize=12,fontweight='normal')
for k_mass in range(0,MassBins):
char_mass_low="%0.1f" % mass_low[k_mass]
char_mass_high="%0.1f" % mass_high[k_mass]
x_values=[0.54, 0.745, 0.2, 0.46, 0.72]
y_values=[0.9,0.9,0.8,0.8,0.8]
label='['+char_mass_low+','+char_mass_high+']'
plot_label (subplot, 'label',xlim,ylim,x_percentage=x_values[k_mass],y_percentage=y_values[k_mass],
color=plot_color[k_mass],xlog=0,ylog=0,label=label,fontsize=12,fontweight='normal')
#end loop on properties to plot
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYJ18_gas_gradients.pdf')
plt.close()
return
#end gas_gradients
def MANGA_CALIFA_gradients(ThisRedshiftList):
ii=0
plot_color=['blue', 'green', 'red']
fig = plt.figure(figsize=(three_one_size_small[0],three_one_size_small[1]))
grid = gridspec.GridSpec(3, 1)
grid.update(wspace=0.0, hspace=0.0)
MassBins = 3
mass_low = [9.0,10.0,10.5]
mass_high = [10.0,10.5,11.0]
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR_unsel = G_MR[sel]
G0_MR_unsel = G0_MR_unsel[np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>9.0]
#SFH structure needed for age gradients
fa = open(DirName_MR+"SFH_Bins","rb")
nbins = np.fromfile(fa,np.int32,1)
template = np.dtype([('SnapNum',np.int32,1), ('Bin',np.int32,1), ('Lookbacktime',np.float64,1),
('dt',np.float64,1),('nbins',np.int32,1)])
SFH = np.fromfile(fa,template,int(nbins))
fa.close()
SFH=SFH[SFH['SnapNum']==G0_MR_unsel['SnapNum'][0]]
i_grid=0
Nprops=3
#loop on quantity to plot
for plot_prop in range (0, Nprops):
subplot=plt.subplot(grid[i_grid])
i_grid+=1
if(plot_prop==0):
#***************************
#* stellar surface density *
#***************************
ylim=[0.5, 5.0]
ylab='$\log_{10}(\Sigma_*[M_{\odot}/\mathrm{pc^2}])$'
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==1):
#***************************
#* Mass-weighted age *
#***************************
ylim=[3.0, 9.5]
ylab='$Age_{MW}(Gyr)$'
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.5))
if(plot_prop==2):
#***************************
#* stellar metallicity *
#***************************
ylim=[-1., 0.4]
ylab='$\mathrm{log_{10}}(Z_*/Z_{\odot})$'
subplot.yaxis.set_major_locator(MultipleLocator(0.5))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
xlim=[0.0, 2.0]
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
xlab='$r/r_{e}$'
subplot.set_xlabel(xlab,fontsize=14), subplot.set_ylabel(ylab,fontsize=14)
if((plot_prop==0) or (plot_prop==1)):
plt.tick_params(axis='x', which='both', bottom=True, labelbottom=False)
#OBSERVATIONS
CALIFA_mass_low=[9.1,10.1,10.6]
CALIFA_mass_high=[9.6,10.6,10.9]
CALIFA_mass_index=[0,2,3]
obs_rings=np.arange(0.05,2.8,0.1)
#CALIFA (surface densities and light weighted ages)
if(plot_prop==0):
for k_mass in range(0,len(CALIFA_mass_low)):
char_mass_low="%0.1f" % CALIFA_mass_low[k_mass]
char_mass_high="%0.1f" % CALIFA_mass_high[k_mass]
char_k_mass="%d" % CALIFA_mass_index[k_mass]
file = Datadir+'/CALIFA_rosa_stellar_density_'+char_k_mass+'_'+char_mass_low+'_'+char_mass_high+'.txt'
obs = Table.read(file, format='ascii')
sel=obs['stellar_dens']!=0.0
subplot.plot(obs_rings[sel],obs['stellar_dens'][sel], color=plot_color[k_mass],linestyle='--',linewidth=2)
#MANGA
if(plot_prop==1 or plot_prop==2):
file = Datadir + '/MANGA_gradients/goddard2016_LT_new_MAD.txt'
obs = Table.read(file, format='ascii')
if(plot_prop==1):
obs_radius = np.array([0.121, 0.407, 0.663, 0.836, 1.069, 1.240, 1.435])
obs_MW_age = np.array([0.765, 0.733, 0.717, 0.702, 0.693, 0.693, 0.691])
subplot.plot(obs_radius, 10**obs_MW_age, linewidth=2, linestyle='--', color=plot_color[0])
obs_radius = np.array([0.109, 0.245, 0.345, 0.523, 0.703, 0.894, 1.100, 1.281, 1.439])
obs_MW_age = np.array([0.736, 0.719, 0.720, 0.674, 0.676, 0.684, 0.689, 0.684, 0.681])
subplot.plot(obs_radius, 10**obs_MW_age, linewidth=2, linestyle='--', color=plot_color[1])
obs_radius = np.array([0.113, 0.225, 0.320, 0.472, 0.632, 0.765, 0.961, 1.109, 1.246, 1.389, 1.440])
obs_MW_age = np.array([0.684, 0.708, 0.732, 0.721, 0.699, 0.693, 0.692, 0.710, 0.724, 0.726, 0.731])
subplot.plot(obs_radius, 10**obs_MW_age, linewidth=2, linestyle='--', color=plot_color[2])
elif(plot_prop==2):
subplot.plot(obs['Radius'],obs['MW_Metallicity_M1'], linewidth=2, linestyle='--', color=plot_color[0])
subplot.plot(obs['Radius'],obs['MW_Metallicity_M2'], linewidth=2, linestyle='--', color=plot_color[1])
subplot.plot(obs['Radius'],obs['MW_Metallicity_M3'], linewidth=2, linestyle='--', color=plot_color[2])
#plot_color=['darkblue','blue','lightblue','green','orange','red','brown']
#fig = plt.figure(figsize=(three_one_size_small[0],three_one_size_small[1]))
#grid = gridspec.GridSpec(3, 1)
#grid.update(wspace=0.0, hspace=0.0)
#For comparison with MaNGA metallicity gradients select only galaxies with non-zero metallicity
if(plot_prop==2):
if(opt_detailed_enrichment==1):
G0_MR_unsel=G0_MR_unsel[((G0_MR_unsel['MetalsDiskMass'][:,0] +
G0_MR_unsel['MetalsDiskMass'][:,1] +
G0_MR_unsel['MetalsDiskMass'][:,2])>.0)]
else:
G0_MR_unsel=G0_MR_unsel[(G0_MR_unsel['MetalsDiskMass']>.0)]
#For ManGa comparison with metallicity and age gradients select only late-type galaxies
if(plot_prop==1 or plot_prop==2):
G0_MR_unsel=G0_MR_unsel[(G0_MR_unsel['DiskMass']/G0_MR_unsel['StellarMass']>0.8)]
#loop on mass bins
for k_mass in range(0,len(mass_low)):
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low[k_mass]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high[k_mass])]
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
new_x_var = np.arange(xlim[0],xlim[1],0.1)
interpol_x_variable=np.zeros(int(len(new_x_var)*NGals),dtype=np.float32)
interpol_y_variable=np.zeros(int(len(new_x_var)*NGals),dtype=np.float32)
#loop on radial bins
for jj in range(0,RNUM):
x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['StellarHalfLightRadius']*1000./Hubble_h)
#***************************
#* stellar surface density *
#***************************
if(plot_prop==0):
StellarMass_this_ring=(G0_MR['DiskMassRings'][:,jj]*1e10/Hubble_h +
G0_MR['BulgeMassRings'][:,jj]*1e10/Hubble_h)
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=StellarMass_this_ring/(3.14*RingRadius[0]**2*1e6)
else:
y_variable[NGals*jj:NGals*(jj+1)]=StellarMass_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2)*1e6)
#***************************
#* Mass-weighted AGE *
#***************************
if(plot_prop==1):
age=np.zeros(NGals)
for ii in range(0,len(SFH)):
age+=SFH['Lookbacktime'][ii]*(G0_MR['sfh_DiskMassRings'][:,jj,ii]*(1.-0.43))
y_variable[NGals*jj:NGals*(jj+1)] = age / G0_MR['DiskMassRings'][:,jj] / 1.e9
#***************************
#* stellar metallicity *
#***************************
if(plot_prop==2):
if(opt_detailed_enrichment==1):
MetalsDiskMass_this_ring=(G0_MR['MetalsDiskMassRings'][:,jj,0] +
G0_MR['MetalsDiskMassRings'][:,jj,1] +
G0_MR['MetalsDiskMassRings'][:,jj,2])
else:
MetalsDiskMass_this_ring=G0_MR['MetalsDiskMassRings'][:,jj]
#x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['StellarDiskRadius']*1000./Hubble_h)
#MetalsStellarMass_this_ring=(MetalsDiskMass_this_ring+MetalsBulgeMass_this_ring)*1e10/Hubble_h
#y_variable[NGals*jj:NGals*(jj+1)]= MetalsStellarMass_this_ring/StellarMass_this_ring/0.02
y_variable[NGals*jj:NGals*(jj+1)]= MetalsDiskMass_this_ring/G0_MR['DiskMassRings'][:,jj]/0.02
#endfor RNUM
#do plots for different properties
'''bin=0.25
sel=y_variable>0.
if(len(y_variable[sel])>0.):
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles (bin, xlim[0]+0.1, xlim[1],
x_variable[sel], y_variable[sel])
if(plot_prop==1):
subplot.plot(x_binned, mean, color=plot_color[k_mass], linewidth=2)
#subplot.plot(x_binned, pc16, color=plot_color[k_mass], linewidth=2, linestyle=':')
#subplot.plot(x_binned, pc84, color=plot_color[k_mass], linewidth=2, linestyle=':')
else:
subplot.plot(x_binned, np.log10(mean), color=plot_color[k_mass], linewidth=2)'''
N_random_gals = 5000
if(N_random_gals>NGals):
N_random_gals=NGals
random_list = random.sample(range(0, NGals), N_random_gals)
for ii in random_list:
slice_ii = [x*NGals+ii for x in range(0,12)]
xx = x_variable[slice_ii]
if(plot_prop==1):
yy = y_variable[slice_ii]
else:
yy = np.log10(y_variable[slice_ii])
#ignore galaxies without halflightradius or with nan on the y_variable
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
if(len(xx[sel])>0):
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
interpol_y_variable[ii*len(new_x_var):(ii+1)*len(new_x_var)] = f(new_x_var)
interpol_x_variable[ii*len(new_x_var):(ii+1)*len(new_x_var)] = new_x_var
'''
#without interpolation
bin=0.25
sel=y_variable>0.
if(len(y_variable[sel])>0.):
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles (bin, xlim[0]+0.1, xlim[1],
x_variable[sel], y_variable[sel])'''
sel = interpol_x_variable>0.
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles_fixed_xx(interpol_x_variable[sel],
interpol_y_variable[sel])
subplot.plot(x_binned, median, color=plot_color[k_mass], linewidth=2)
#end loop on mass bins
#labels
if(plot_prop==0):
plot_label (subplot, 'label',xlim,ylim,x_percentage=0.07,y_percentage=0.9,
color='black',xlog=0,ylog=0,label='$M_{\odot}=$', fontsize=12,fontweight='normal')
for k_mass in range(0,MassBins):
char_mass_low="%0.1f" % mass_low[k_mass]
char_mass_high="%0.1f" % mass_high[k_mass]
x_values=[0.2,0.44,0.71]
y_values=[0.9,0.9,0.9]
label='['+char_mass_low+','+char_mass_high+']'
plot_label (subplot, 'label',xlim,ylim,x_percentage=x_values[k_mass],y_percentage=y_values[k_mass],
color=plot_color[k_mass],xlog=0,ylog=0,label=label,fontsize=12,fontweight='normal')
if(plot_prop==0):
yy = 0.8
obs_label = 'CALIFA'
else:
yy = 0.9
obs_label = 'MaNGA'
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.69, y_percentage=yy,
color='black', xlog=0, ylog=0,label=prefix_this_model, fontsize=13, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.6,y_percentage=yy+0.02,
color='red',x2_percentage=0.67,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.69, y_percentage=yy-0.1,
color='black', xlog=0, ylog=0,label=obs_label, fontsize=13, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.6,y_percentage=yy-0.08,
color='red',x2_percentage=0.67,xlog=0,ylog=0,linestyle='--',linewidth=2)
#end loop on properties to plot
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYJ18_MANGA_CALIFA_gradients.pdf')
plt.close()
return
#end MANGA_CALIFA_gradients
def CALIFA_gradients_morph_types(ThisRedshiftList):
ii=0
plot_color=['brown','red','orange','green','lightblue','blue','darkblue']
fig = plt.figure(figsize=(three_one_size_small[0],three_one_size_small[1]))
grid = gridspec.GridSpec(3, 1)
grid.update(wspace=0.0, hspace=0.0)
disk_fraction=[0.0,0.05,0.1,0.2,0.4,0.6,0.8,1.0]
morph_types=['E','S0','Sa','Sb','Sbc','Sc','Sd']
obs_morph_types=['0_E','1_S0','2_Sa','3_Sb','4_Sbc','5_Sc','6_Sd']
mass_low=[11.2,10.9,10.9,10.9,10.6,10.1,9.1]
mass_high=[11.5,11.2,11.2,11.2,10.9,10.6,9.6]
Nprops=3
for plot_prop in range (0,Nprops):
subplot=plt.subplot(grid[plot_prop])
xlim=[0.0,3.]
xlab='$r/r_{d}$'
subplot.set_xlabel(xlab,fontsize=14)
subplot.xaxis.set_major_locator(MultipleLocator(1.0))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==0):
#***************************
#* stellar metallicity *
#***************************
ylim=[-1., 0.5]
ylab='$\mathrm{log_{10}}(Z_*/Z_{\odot})$'
subplot.yaxis.set_major_locator(MultipleLocator(0.5))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==1):
#***************************
#* stellar surface density *
#***************************
ylim=[0.5, 4.5]
ylab='$\log_{10}(\Sigma_*[M_{\odot}/\mathrm{pc^2}])$'
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==2):
#***************************
#* mass-weighted age *
#***************************
ylim=[8.0, 10.5]
#ylim=[9.0, 10.0]
ylab='$\log_{10}(Age_{LW}[yr])$'
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
subplot.set_ylabel(ylab,fontsize=14)
subplot.xaxis.set_major_formatter(FormatStrFormatter('%.1f'))
subplot.yaxis.set_major_formatter(FormatStrFormatter('%.1f'))
if((plot_prop==0) or (plot_prop==1)):
plt.tick_params(axis='x', which='both', bottom='on', labelbottom='off')
#OBSERVATIONS
for k_type in range(0,len(morph_types)):
obs_rings=np.arange(0.05,2.8,0.1)
if(plot_prop==0):
file = Datadir + '/CALIFA_rosa_metals_'+obs_morph_types[k_type]+'.txt'
obs = Table.read(file, format='ascii')
sel=obs['metallicity']!=0.0
subplot.plot(obs_rings[sel],obs['metallicity'][sel], color=plot_color[k_type],linestyle='--',linewidth=2)
if(plot_prop==1):
file = Datadir + '/CALIFA_rosa_stellar_density_'+obs_morph_types[k_type]+'.txt'
obs = Table.read(file, format='ascii')
sel=obs['stellar_dens']!=0.0
subplot.plot(obs_rings[sel],obs['stellar_dens'][sel], color=plot_color[k_type],linestyle='--',linewidth=2)
if(plot_prop==2):
file = Datadir + '/CALIFA_rosa_ageL_'+obs_morph_types[k_type]+'.txt'
obs = Table.read(file, format='ascii')
sel=obs['age_L']!=0.0
subplot.plot(obs_rings[sel],obs['age_L'][sel], color=plot_color[k_type],linestyle='--',linewidth=2)
median_metallicity=np.zeros(RNUM,dtype=np.float32)
#Model
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR_unsel=G_MR[sel]
if(opt_detailed_enrichment==1):
G0_MR_unsel=G0_MR_unsel[((G0_MR_unsel['MetalsDiskMass'][:,0] +
G0_MR_unsel['MetalsDiskMass'][:,1] +
G0_MR_unsel['MetalsDiskMass'][:,2])>.0)]
else:
G0_MR_unsel=G0_MR_unsel[(G0_MR_unsel['MetalsDiskMass']>.0)] #&
for k_type in range(0,len(morph_types)):
G0_MR=G0_MR_unsel[(G0_MR_unsel['DiskMass']/G0_MR_unsel['StellarMass']>disk_fraction[k_type]) &
(G0_MR_unsel['DiskMass']/G0_MR_unsel['StellarMass']<disk_fraction[k_type+1]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low[k_type]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high[k_type])]
NGals=len(G0_MR)
if(NGals>0):
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
r_bulge=G0_MR['BulgeSize']*1000./Hubble_h #From Mpc/h to kpc
#SFH structure needed for age gradients
fa = open(DirName_MR+"SFH_Bins","rb")
nbins = np.fromfile(fa,np.int32,1)
template = np.dtype([('SnapNum',np.int32,1),
('Bin',np.int32,1),
('Lookbacktime',np.float64,1),
('dt',np.float64,1),
('nbins',np.int32,1)
])
SFH = np.fromfile(fa,template,int(nbins))
fa.close()
for jj in range(0,RNUM):
x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['StellarHalfMassRadius']*1000./Hubble_h)
if(opt_rings_in_bulges==1):
BulgeMass_this_ring=G0_MR['BulgeMassRings'][:,jj]*1e10/Hubble_h
else:
if(jj==0):
r_bulge_m=1.-1./(1.+RingRadius[0]/r_bulge)
else:
r_bulge_m=(1/(1+RingRadius[jj-1]/r_bulge)-1/(1+RingRadius[jj]/r_bulge))
BulgeMass_this_ring=G0_MR['BulgeMass']*r_bulge_m*1e10/Hubble_h
BulgeMass_this_ring[r_bulge==0.]=0.
StellarMass_this_ring=G0_MR['DiskMassRings'][:,jj]*1e10/Hubble_h+BulgeMass_this_ring
#***************************
#* stellar metallicity *
#***************************
if(plot_prop==0):
#METALS
if(opt_rings_in_bulges==1):
if(opt_detailed_enrichment==1):
MetalsBulgeMass_this_ring=(G0_MR['MetalsBulgeMassRings'][:,jj,0] +
G0_MR['MetalsBulgeMassRings'][:,jj,1] +
G0_MR['MetalsBulgeMassRings'][:,jj,2])
else:
MetalsBulgeMass_this_ring=G0_MR['MetalsBulgeMassRings'][:,jj]
else:
if(opt_detailed_enrichment==1):
MetalsBulgeMass_this_ring=(G0_MR['MetalsBulgeMass'][:,0]+
G0_MR['MetalsBulgeMass'][:,1]+
G0_MR['MetalsBulgeMass'][:,2])*r_bulge_m
else:
MetalsBulgeMass_this_ring=G0_MR['MetalsBulgeMass']*r_bulge_m
MetalsBulgeMass_this_ring[r_bulge==0]=0.
if(opt_detailed_enrichment==1):
MetalsDiskMass_this_ring=(G0_MR['MetalsDiskMassRings'][:,jj,0] +
G0_MR['MetalsDiskMassRings'][:,jj,1] +
G0_MR['MetalsDiskMassRings'][:,jj,2])
else:
MetalsDiskMass_this_ring=G0_MR['MetalsDiskMassRings'][:,jj]
MetalsStellarMass_this_ring=(MetalsDiskMass_this_ring+MetalsBulgeMass_this_ring)*1e10/Hubble_h
y_variable[NGals*jj:NGals*(jj+1)]= MetalsStellarMass_this_ring/StellarMass_this_ring/0.02
#***************************
#* stellar surface density *
#***************************
if(plot_prop==1):
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=StellarMass_this_ring/(3.14*RingRadius[0]**2*1e6)
else:
y_variable[NGals*jj:NGals*(jj+1)]=StellarMass_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2)*1e6)
#***************************
#* mass-weighted AGE *
#***************************
if(plot_prop==2):
#we only need the SFH strucutre from the current snap
SFH=SFH[SFH['SnapNum']==G0_MR['SnapNum'][0]]
#if(jj==0):
# print(np.log10(SFH['Lookbacktime']))
age=np.zeros(NGals)
for ii in range(0,len(SFH)):
sel=G0_MR['sfh_DiskMassRings'][:,jj,ii]>0.
age[sel]+=SFH['Lookbacktime'][ii]*(G0_MR['sfh_DiskMassRings'][sel,jj,ii]*
(1.-0.43))*1e10/Hubble_h
if(opt_rings_in_bulges==1):
sel=G0_MR['sfh_BulgeMassRings'][:,jj,ii]>0.
age[sel]+=SFH['Lookbacktime'][ii]*(G0_MR['sfh_BulgeMassRings'][sel,jj,ii]*
(1.-0.43))*1e10/Hubble_h
else:
age+=G0_MR['MassWeightAge']*1e9*BulgeMass_this_ring
#sel=StellarMass_this_ring>0.
#age[sel]=age[sel]/StellarMass_this_ring[sel]
#scale the massweighted ages by the global light weighted age
y_variable[NGals*jj:NGals*(jj+1)] = (age/StellarMass_this_ring *
G0_MR['rBandWeightAge']/G0_MR['MassWeightAge'])
#y_variable[NGals*jj:NGals*(jj+1)]=age/(G0_MR['DiskMassRings'][:,jj]*1e10/Hubble_h)
'''if(jj==0):
age=np.log10(np.mean(G0_MR['MassWeightAge']*1e9))
subplot.scatter([(k_type+1)*0.1,(k_type+1)*0.1]
,[age,age],color=plot_color[k_type],marker='o',s=20)'''
#endfor RNUM
bin=0.1
sel=y_variable>0.
if(len(y_variable[sel])>0.):
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles (bin, xlim[0], xlim[1],
x_variable[sel], y_variable[sel])
subplot.plot(x_binned, np.log10(median), color=plot_color[k_type], linewidth=2)
#labels
x_values=[0.1,0.16,0.25,0.34,0.43,0.56,0.65]
if(plot_prop==0):
label=morph_types[k_type]
plot_label (subplot, 'label',xlim,ylim,x_percentage=x_values[k_type],y_percentage=0.85,
color=plot_color[k_type],xlog=0,ylog=0,label=label,fontsize=15,fontweight='normal')
if(plot_prop==1):
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.69, y_percentage=0.9, color='black',
xlog=0, ylog=0,label=prefix_this_model, fontsize=13, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.6,y_percentage=0.92,
color='brown',x2_percentage=0.67,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.69, y_percentage=0.8,
color='black', xlog=0, ylog=0,label='CALIFA', fontsize=13, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.6,y_percentage=0.82,
color='brown',x2_percentage=0.67,xlog=0,ylog=0,linestyle='--',linewidth=2)
#endfor -> morph_types
#endfor -> plot_prop
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYJ18_CALIFA_gradients_morph_types.pdf')
plt.close()
return
#end gradients_morph_types
def CALIFA_gradients_mass_bins(ThisRedshiftList):
#plot_color=['darkblue','blue','lightblue','green','orange','red','brown']
plot_color=['darkblue','green','red']
fig = plt.figure(figsize=(three_one_size_small[0],three_one_size_small[1]))
grid = gridspec.GridSpec(3, 1)
grid.update(wspace=0.0, hspace=0.0)
#mass_low=[9.1,9.6,10.1,10.6,10.9,11.2,11.5]
#mass_high=[9.6,10.1,10.6,10.9,11.2,11.5,11.8]
mass_low=[9.1,9.6,10.1]
mass_high=[9.6,10.1,10.6]
Nprops=3
for plot_prop in range (0,Nprops):
subplot=plt.subplot(grid[plot_prop])
xlim=[0.0,3.]
xlab='$r/r_{e}$'
subplot.set_xlabel(xlab,fontsize=14)
subplot.xaxis.set_major_locator(MultipleLocator(1.0))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==0):
#***************************
#* stellar surface density *
#***************************
ylim=[0.5, 4.5]
ylab='$\log_{10}(\Sigma_*[M_{\odot}/\mathrm{pc^2}])$'
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==1):
#***************************
#* mass-weighted age *
#***************************
ylim=[8.0, 10.5]
ylim=[0.0, 6.0]
ylab='$Age_{LW}(Gyr)$'
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==2):
#***************************
#* stellar metallicity *
#***************************
ylim=[-1., 0.2]
ylab='$\mathrm{log_{10}}(Z_*/Z_{\odot})$'
subplot.yaxis.set_major_locator(MultipleLocator(0.5))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
subplot.set_ylabel(ylab,fontsize=14)
subplot.xaxis.set_major_formatter(FormatStrFormatter('%.1f'))
subplot.yaxis.set_major_formatter(FormatStrFormatter('%.1f'))
if((plot_prop==0) or (plot_prop==1)):
plt.tick_params(axis='x', which='both', bottom='on', labelbottom='off')
#OBSERVATIONS
obs_rings=np.arange(0.05,2.8,0.1)
k_mass_index=[0,1,2]
for k_mass in range(0,len(mass_low)):
char_mass_low="%0.1f" % mass_low[k_mass]
char_mass_high="%0.1f" % mass_high[k_mass]
char_k_mass="%d" % k_mass_index[k_mass]
if(plot_prop==0):
file = Datadir+'/CALIFA_rosa_stellar_density_'+char_k_mass+'_'+char_mass_low+'_'+char_mass_high+'.txt'
obs = Table.read(file, format='ascii')
sel=obs['stellar_dens']!=0.0
subplot.plot(obs_rings[sel],obs['stellar_dens'][sel], color=plot_color[k_mass],linestyle='--',linewidth=2)
if(plot_prop==1):
file = Datadir + '/CALIFA_rosa_ageL_'+char_k_mass+'_'+char_mass_low+'_'+char_mass_high+'.txt'
obs = Table.read(file, format='ascii')
sel=obs['age_L']!=0.0
subplot.plot(obs_rings[sel],10**obs['age_L'][sel]/1.e9, color=plot_color[k_mass],linestyle='--',linewidth=2)
if(plot_prop==2):
file = Datadir + '/CALIFA_rosa_metals_'+char_k_mass+'_'+char_mass_low+'_'+char_mass_high+'.txt'
obs = Table.read(file, format='ascii')
sel=obs['metallicity']!=0.0
subplot.plot(obs_rings[sel],obs['metallicity'][sel], color=plot_color[k_mass],linestyle='--',linewidth=2)
median_metallicity=np.zeros(RNUM,dtype=np.float32)
#Model
ii=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR_unsel = G_MR[sel]
G0_MR_unsel = G0_MR_unsel[np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>9.0]
#SFH structure needed for age gradients
fa = open(DirName_MR+"SFH_Bins","rb")
nbins = np.fromfile(fa,np.int32,1)
template = np.dtype([('SnapNum',np.int32,1), ('Bin',np.int32,1), ('Lookbacktime',np.float64,1),
('dt',np.float64,1), ('nbins',np.int32,1)])
SFH = np.fromfile(fa,template,int(nbins))
fa.close()
for k_mass in range(0,len(mass_low)):
if(plot_prop==2):
if(opt_detailed_enrichment==1):
G0_MR_unsel=G0_MR_unsel[((G0_MR_unsel['MetalsDiskMass'][:,0] +
G0_MR_unsel['MetalsDiskMass'][:,1] +
G0_MR_unsel['MetalsDiskMass'][:,2])>.0)]
else:
G0_MR_unsel=G0_MR_unsel[(G0_MR_unsel['MetalsDiskMass']>.0)]
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low[k_mass]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high[k_mass])]
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
new_x_var = np.arange(xlim[0],xlim[1],0.1)
interpol_x_variable=np.zeros(int(len(new_x_var)*NGals),dtype=np.float32)
interpol_y_variable=np.zeros(int(len(new_x_var)*NGals),dtype=np.float32)
for jj in range(0,RNUM):
x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['StellarHalfLightRadius']*1000./Hubble_h)
StellarMass_this_ring=G0_MR['DiskMassRings'][:,jj]*1e10/Hubble_h+G0_MR['BulgeMassRings'][:,jj]*1e10/Hubble_h
#***************************
#* stellar surface density *
#***************************
if(plot_prop==0):
#1e6 -> from kpc^2 to pc^2
if(jj==0):
y_variable[NGals*jj:NGals*(jj+1)]=StellarMass_this_ring/(3.14*RingRadius[0]**2*1e6)
else:
y_variable[NGals*jj:NGals*(jj+1)]=StellarMass_this_ring/(3.14*(RingRadius[jj]**2-RingRadius[jj-1]**2)*1e6)
#***************************
#* mass-weighted AGE *
#***************************
if(plot_prop==1):
#we only need the SFH strucutre from the current snap
SFH=SFH[SFH['SnapNum']==G0_MR['SnapNum'][0]]
#if(jj==0):
# print(np.log10(SFH['Lookbacktime']))
age=np.zeros(NGals)
for ii in range(0,len(SFH)):
sel=G0_MR['sfh_DiskMassRings'][:,jj,ii]>0.
age[sel]+=SFH['Lookbacktime'][ii]*(G0_MR['sfh_DiskMassRings'][sel,jj,ii]*(1.-0.43))*1e10/Hubble_h
sel=G0_MR['sfh_BulgeMassRings'][:,jj,ii]>0.
age[sel]+=SFH['Lookbacktime'][ii]*(G0_MR['sfh_BulgeMassRings'][sel,jj,ii]*(1.-0.43))*1e10/Hubble_h
#to select only star forming rings
#sel_SFRrings = G0_MR['SfrRings'][:,jj]==0.
#age[sel_SFRrings] = 0.
#scale the massweighted ages by the global light weighted age
y_variable[NGals*jj:NGals*(jj+1)] = (age/StellarMass_this_ring *
G0_MR['rBandWeightAge']/G0_MR['MassWeightAge'])/1.e9
#y_variable[NGals*jj:NGals*(jj+1)]=age/(G0_MR['DiskMassRings'][:,jj]*1e10/Hubble_h)/1.e9
#***************************
#* stellar metallicity *
#***************************
if(plot_prop==2):
#METALS
if(opt_detailed_enrichment==1):
MetalsBulgeMass_this_ring=(G0_MR['MetalsBulgeMassRings'][:,jj,0] +
G0_MR['MetalsBulgeMassRings'][:,jj,1] +
G0_MR['MetalsBulgeMassRings'][:,jj,2])
else:
MetalsBulgeMass_this_ring=G0_MR['MetalsBulgeMassRings'][:,jj]
if(opt_detailed_enrichment==1):
MetalsDiskMass_this_ring=(G0_MR['MetalsDiskMassRings'][:,jj,0] +
G0_MR['MetalsDiskMassRings'][:,jj,1] +
G0_MR['MetalsDiskMassRings'][:,jj,2])
else:
MetalsDiskMass_this_ring=G0_MR['MetalsDiskMassRings'][:,jj]
MetalsStellarMass_this_ring=(MetalsDiskMass_this_ring+MetalsBulgeMass_this_ring)*1e10/Hubble_h
y_variable[NGals*jj:NGals*(jj+1)]= MetalsStellarMass_this_ring/StellarMass_this_ring/0.02
#endfor RNUM
#plot individual gradients
'''if(plot_prop==2 and k_mass==0):
for ii in range(0,10):
slice_ii = [x*NGals+ii for x in range(0,12)]
xx = x_variable[slice_ii]
yy = np.log10(y_variable[slice_ii])
subplot.plot(xx, yy, color='b', linewidth=1, linestyle=':')
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
print(xx[sel])
print(yy[sel])
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
subplot.plot(new_x_var, f(new_x_var), color='orange', linewidth=1, linestyle='-')'''
N_random_gals = 5000
if(N_random_gals>NGals):
N_random_gals=NGals
random_list = random.sample(range(0, NGals), N_random_gals)
for ii in random_list:
slice_ii = [x*NGals+ii for x in range(0,12)]
xx = x_variable[slice_ii]
if(plot_prop==1):
yy = y_variable[slice_ii]
else:
yy = np.log10(y_variable[slice_ii])
#ignore galaxies without halflightradius or with nan on the y_variable
sel = (~np.isnan(xx)) & (~np.isinf(xx)) & (~np.isnan(yy)) & (~np.isinf(yy))
if(len(xx[sel])>0):
f = interpolate.UnivariateSpline(xx[sel], yy[sel], s=0)
interpol_y_variable[ii*len(new_x_var):(ii+1)*len(new_x_var)] = f(new_x_var)
interpol_x_variable[ii*len(new_x_var):(ii+1)*len(new_x_var)] = new_x_var
'''
#without interpolation
bin=0.25
sel=y_variable>0.
if(len(y_variable[sel])>0.):
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles (bin, xlim[0]+0.1, xlim[1],
x_variable[sel], y_variable[sel])'''
sel = interpol_x_variable>0.
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles_fixed_xx(interpol_x_variable[sel],
interpol_y_variable[sel])
subplot.plot(x_binned, median, color=plot_color[k_mass], linewidth=2)
#labels
#char_mass_low="%0.1f" % mass_low[k_mass]
#char_mass_high="%0.1f" % mass_high[k_mass]
#x_values=[0.02,0.22,0.455,0.715,0.02,0.285,0.55]
#y_values=[0.9,0.9,0.9,0.9,0.82,0.82,0.82]
#if(plot_prop==0):
# label='['+char_mass_low+','+char_mass_high+']'
# plot_label (subplot, 'label',xlim,ylim,x_percentage=x_values[k_mass],y_percentage=y_values[k_mass],
# color=plot_color[k_mass],xlog=0,ylog=0,label=label,fontsize=12,fontweight='normal')
#labels
if(plot_prop==0):
plot_label (subplot, 'label',xlim,ylim,x_percentage=0.12,y_percentage=0.9,
color='black',xlog=0,ylog=0,label='$M_{\odot}=$', fontsize=12,fontweight='normal')
for k_mass in range(0, len(mass_low)):
char_mass_low="%0.1f" % mass_low[k_mass]
char_mass_high="%0.1f" % mass_high[k_mass]
x_values=[0.26,0.47,0.71]
y_values=[0.9,0.9,0.9]
label='['+char_mass_low+','+char_mass_high+']'
plot_label (subplot, 'label',xlim,ylim,x_percentage=x_values[k_mass],y_percentage=y_values[k_mass],
color=plot_color[k_mass],xlog=0,ylog=0,label=label,fontsize=12,fontweight='normal')
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.69, y_percentage=0.8,
color='black', xlog=0, ylog=0,label=prefix_this_model, fontsize=13, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.6,y_percentage=0.82,
color='red',x2_percentage=0.67,xlog=0,ylog=0,linestyle='-',linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.69, y_percentage=0.7,
color='black', xlog=0, ylog=0,label='CALIFA', fontsize=13, fontweight='normal')
plot_label (subplot,'line',xlim,ylim,x_percentage=0.6,y_percentage=0.72,
color='red',x2_percentage=0.67,xlog=0,ylog=0,linestyle='--',linewidth=2)
#endfor -> massbins
#endfor -> plot_prop
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYJ18_CALIFA_gradients_mass_bins.pdf')
plt.close()
return
#end gradients_mass_bins
def MANGA_gradients_late_types(ThisRedshiftList):
ii=0
plot_color=['brown','red','orange','green','lightblue','blue','darkblue']
fig = plt.figure(figsize=(two_four_size_large[0],two_four_size_large[1]))
grid = gridspec.GridSpec(2, 4)
grid.update(wspace=0.0, hspace=0.0)
MassBins=4
mass_low=[9.0,9.935,10.552,11.054]
mass_high=[9.935,10.552,11.054,11.5]
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR_unsel=G_MR[sel]
if(opt_detailed_enrichment==1):
G0_MR_unsel=G0_MR_unsel[((G0_MR_unsel['MetalsDiskMass'][:,0] +
G0_MR_unsel['MetalsDiskMass'][:,1] +
G0_MR_unsel['MetalsDiskMass'][:,2])>.0)]
else:
G0_MR_unsel=G0_MR_unsel[(G0_MR_unsel['MetalsDiskMass']>.0)]
#SFH structure needed for age gradients
fa = open(DirName_MR+"SFH_Bins","rb")
nbins = np.fromfile(fa,np.int32,1)
template = np.dtype([('SnapNum',np.int32,1),
('Bin',np.int32,1),
('Lookbacktime',np.float64,1),
('dt',np.float64,1),
('nbins',np.int32,1)
])
SFH = np.fromfile(fa,template,int(nbins))
fa.close()
SFH=SFH[SFH['SnapNum']==G0_MR_unsel['SnapNum'][0]]
i_grid=0
Nprops=2
#loop on metallicity and age
for plot_prop in range (0,Nprops):
#loop on mass bins
for i_mass in range(0,MassBins):
G0_MR=G0_MR_unsel[(G0_MR_unsel['DiskMass']/G0_MR_unsel['StellarMass']>0.8) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)>mass_low[i_mass]) &
(np.log10(G0_MR_unsel['StellarMass']*1e10/Hubble_h)<mass_high[i_mass])]
subplot=plt.subplot(grid[i_grid])
i_grid+=1
xlim=[0.0,1.7]
xlab='$r/r_{d}$'
subplot.set_xlabel(xlab,fontsize=14)
subplot.xaxis.set_major_locator(MultipleLocator(1.0))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==0):
#***************************
#* mass-weighted age *
#***************************
ylim=[0.5, 1.2]
ylab='$\log_{10}(Age_{MW}[Gyr])$'
subplot.yaxis.set_major_locator(MultipleLocator(1.0))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
if(plot_prop==1):
#***************************
#* stellar metallicity *
#***************************
ylim=[-2., 0.2]
ylab='$\mathrm{log_{10}}(Z_*/Z_{\odot})$'
subplot.yaxis.set_major_locator(MultipleLocator(0.5))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
subplot.set_ylim(ylim),subplot.set_xlim(xlim)
if(i_mass==0):
subplot.set_ylabel(ylab,fontsize=14)
subplot.xaxis.set_major_formatter(FormatStrFormatter('%.1f'))
subplot.yaxis.set_major_formatter(FormatStrFormatter('%.1f'))
if(plot_prop==0):
plt.tick_params(axis='x', which='both', bottom='on', labelbottom='off')
if(i_mass>0):
plt.tick_params(axis='y', which='both', left='on', labelleft='off')
NGals=len(G0_MR)
x_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
y_variable=np.zeros(int(RNUM*NGals),dtype=np.float32)
r_bulge=G0_MR['BulgeSize']*1000./Hubble_h #From Mpc/h to kpc
for jj in range(0,RNUM):
x_variable[NGals*jj:NGals*(jj+1)]=RingRadius[jj]/(G0_MR['StellarHalfMassRadius']*1000./Hubble_h)
if(opt_rings_in_bulges==1):
BulgeMass_this_ring=G0_MR['BulgeMassRings'][:,jj]*1e10/Hubble_h
else:
if(jj==0):
r_bulge_m=1.-1./(1.+RingRadius[0]/r_bulge)
else:
r_bulge_m=(1/(1+RingRadius[jj-1]/r_bulge)-1/(1+RingRadius[jj]/r_bulge))
BulgeMass_this_ring=G0_MR['BulgeMass']*r_bulge_m*1e10/Hubble_h
BulgeMass_this_ring[r_bulge==0.]=0.
StellarMass_this_ring=G0_MR['DiskMassRings'][:,jj]*1e10/Hubble_h+BulgeMass_this_ring
#***************************
#* mass-weighted AGE *
#***************************
if(plot_prop==0):
#we only need the SFH strucutre from the current snap
age=np.zeros(NGals)
for ii in range(0,len(SFH)):
sel=G0_MR['sfh_DiskMassRings'][:,jj,ii]>0.
age[sel]+=SFH['Lookbacktime'][ii]/1e9*(G0_MR['sfh_DiskMassRings'][sel,jj,ii]
*(1.-0.43))*1e10/Hubble_h
age+=G0_MR['MassWeightAge']*BulgeMass_this_ring
y_variable[NGals*jj:NGals*(jj+1)] = age/StellarMass_this_ring
#***************************
#* stellar metallicity *
#***************************
if(plot_prop==1):
#METALS
if(opt_rings_in_bulges==1):
if(opt_detailed_enrichment==1):
MetalsBulgeMass_this_ring=(G0_MR['MetalsBulgeMassRings'][:,jj,0] +
G0_MR['MetalsBulgeMassRings'][:,jj,1] +
G0_MR['MetalsBulgeMassRings'][:,jj,2])
else:
MetalsBulgeMass_this_ring=G0_MR['MetalsBulgeMassRings'][:,jj]
else:
if(opt_detailed_enrichment==1):
MetalsBulgeMass_this_ring=(G0_MR['MetalsBulgeMass'][:,0]+
G0_MR['MetalsBulgeMass'][:,1]+
G0_MR['MetalsBulgeMass'][:,2])*r_bulge_m
else:
MetalsBulgeMass_this_ring=G0_MR['MetalsBulgeMass']*r_bulge_m
MetalsBulgeMass_this_ring[r_bulge==0]=0.
if(opt_detailed_enrichment==1):
MetalsDiskMass_this_ring=(G0_MR['MetalsDiskMassRings'][:,jj,0] +
G0_MR['MetalsDiskMassRings'][:,jj,1] +
G0_MR['MetalsDiskMassRings'][:,jj,2])
else:
MetalsDiskMass_this_ring=G0_MR['MetalsDiskMassRings'][:,jj]
MetalsStellarMass_this_ring=(MetalsDiskMass_this_ring+MetalsBulgeMass_this_ring)*1e10/Hubble_h
y_variable[NGals*jj:NGals*(jj+1)]= MetalsStellarMass_this_ring/StellarMass_this_ring/0.02
#endfor RNUM
bin=0.1
sel=y_variable>0.
if(len(y_variable[sel])>0.):
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles (bin, xlim[0], xlim[1],
x_variable[sel], y_variable[sel])
subplot.plot(x_binned, np.log10(median), color='red', linewidth=2)
#labels
'''x_values=[0.1,0.16,0.25,0.34,0.43,0.56,0.65]
if(plot_prop==0):
label=morph_types[k_type]
plot_label (subplot, 'label',xlim,ylim,x_percentage=x_values[k_type],y_percentage=0.85,
color=plot_color[k_type],xlog=0,ylog=0,label=label,fontsize=15,fontweight='normal') '''
#OBSERVATIONS
if(plot_prop==0):
if(i_mass==0):
file = Datadir + '/MANGA_gradients/goddard2016_LT_new_MAD.txt'
obs = Table.read(file, format='ascii')
subplot.fill_between(obs['Radius'],obs['MW_Age_M1']+obs['MW_Age_Error_M1'],
obs['MW_Age_M1']-obs['MW_Age_Error_M1'], facecolor='lightblue',
interpolate=True, alpha=0.4, edgecolor='steelblue')
subplot.plot(obs['Radius'],obs['MW_Age_M1'],color='blue', linewidth=2)
if(i_mass==1):
subplot.fill_between(obs['Radius'],obs['MW_Age_M2']+obs['MW_Age_Error_M2'],
obs['MW_Age_M2']-obs['MW_Age_Error_M2'], facecolor='lightblue',
interpolate=True, alpha=0.4, edgecolor='steelblue')
subplot.plot(obs['Radius'],obs['MW_Age_M2'],color='blue', linewidth=2)
if(i_mass==2):
subplot.fill_between(obs['Radius'],obs['MW_Age_M3']+obs['MW_Age_Error_M3'],
obs['MW_Age_M3']-obs['MW_Age_Error_M3'], facecolor='lightblue',
interpolate=True, alpha=0.4, edgecolor='steelblue')
subplot.plot(obs['Radius'],obs['MW_Age_M3'],color='blue', linewidth=2)
if(i_mass==3):
subplot.fill_between(obs['Radius'],obs['MW_Age_M4']+obs['MW_Age_Error_M4'],
obs['MW_Age_M4']-obs['MW_Age_Error_M4'], facecolor='lightblue',
interpolate=True, alpha=0.4, edgecolor='steelblue')
subplot.plot(obs['Radius'],obs['MW_Age_M4'],color='blue', linewidth=2)
if(plot_prop==1):
if(i_mass==0):
file = Datadir + '/MANGA_gradients/goddard2016_LT_new_MAD.txt'
obs = Table.read(file, format='ascii')
subplot.fill_between(obs['Radius'],obs['MW_Metallicity_M1']+obs['MW_Metallicity_Error_M1'],
obs['MW_Metallicity_M1']-obs['MW_Metallicity_Error_M1'], facecolor='lightblue',
interpolate=True, alpha=0.4, edgecolor='steelblue')
subplot.plot(obs['Radius'],obs['MW_Metallicity_M1'],color='blue', linewidth=2)
if(i_mass==1):
subplot.fill_between(obs['Radius'],obs['MW_Metallicity_M2']+obs['MW_Metallicity_Error_M2'],
obs['MW_Metallicity_M2']-obs['MW_Metallicity_Error_M2'], facecolor='lightblue',
interpolate=True, alpha=0.4, edgecolor='steelblue')
subplot.plot(obs['Radius'],obs['MW_Metallicity_M2'],color='blue', linewidth=2)
if(i_mass==2):
subplot.fill_between(obs['Radius'],obs['MW_Metallicity_M3']+obs['MW_Metallicity_Error_M3'],
obs['MW_Metallicity_M3']-obs['MW_Metallicity_Error_M3'], facecolor='lightblue',
interpolate=True, alpha=0.4, edgecolor='steelblue')
subplot.plot(obs['Radius'],obs['MW_Metallicity_M3'],color='blue', linewidth=2)
if(i_mass==3):
subplot.fill_between(obs['Radius'],obs['MW_Metallicity_M4']+obs['MW_Metallicity_Error_M4'],
obs['MW_Metallicity_M4']-obs['MW_Metallicity_Error_M4'], facecolor='lightblue',
interpolate=True, alpha=0.4, edgecolor='steelblue')
subplot.plot(obs['Radius'],obs['MW_Metallicity_M4'],color='blue', linewidth=2)
#if(plot_prop==0):
label="%0.2f" % mass_low[i_mass] + r'$<\mathrm{log_{10}}(M_*[M_{\odot}])<$' + "%0.2f" % mass_high[i_mass]
plot_label (subplot, 'label',xlim,ylim,x_percentage=0.05,y_percentage=0.05,
color='black',xlog=0,ylog=0,label=label,fontsize=11,fontweight='normal')
#endfor -> morph_types
#endfor -> plot_prop
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYJ18_MANGA_gradients_late_types.pdf')
plt.close()
return
#end gradients_morph_types
def SFR_gradients(ThisRedshiftList):
fig = plt.figure(figsize=(10,10))
grid = gridspec.GridSpec(2, 2)
xlim=[0.0,6.0]
ylim=[-5.0, 0.0]
xlab='$r/r_d$[kpc]'
ylab='$\Sigma_{\mathrm{SFR}}$'
low_mass_limits=[9.0,9.5,10.0,10.5]
massbin=0.5
#Model
ii=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR_unsel=G_MR[sel]
G0_MR_unsel=G0_MR_unsel[G0_MR_unsel['BulgeMass']/G0_MR_unsel['StellarMass']<0.8]
#metallicity versus physical radius
for kk in range(0,len(low_mass_limits)):
mean_SFR=np.zeros(RNUM,dtype=np.float32)
radius=np.zeros(RNUM,dtype=np.float32)
G0_MR=G0_MR_unsel[(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h) > low_mass_limits[kk]) &
(np.log10(G0_MR_unsel['StellarMass']*1.e10/Hubble_h) < low_mass_limits[kk]+massbin)]
for jj in range(0,RNUM):
area=(3.14*RingRadius[jj]*RingRadius[jj])
SFR=G0_MR['SfrRings'][:,jj]/area
mean_SFR[jj]=np.log10(np.mean(SFR))
G=G0_MR[G0_MR['StellarDiskRadius']>0.]
radius=RingRadius/np.median(G['StellarDiskRadius']/3.*1000./Hubble_h)
subplot=plt.subplot(grid[kk])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
subplot.plot(radius, mean_SFR,color='red', linewidth=2)
label="%0.1f" % low_mass_limits[kk] + "$<M_{\star}[M_{\odot}]<$" + "%0.1f" % (low_mass_limits[kk]+massbin)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.075, y_percentage=0.88,
color='black', xlog=0, ylog=0, label=label,
fontsize=13, fontweight='normal')
#endfor
#endfor
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.close()
return
#end SFR_gradients
def gasfractions_Saintonge17(ThisRedshiftList):
for ii in range(0,len(ThisRedshiftList)):
xlim=[9.5,11.5]
ylim=[-2.0,0.5]
bin=0.25
fig = plt.figure(figsize=(one_one_size_small[0],one_one_size_small[1]))
subplot=plt.subplot()
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
ylab='$\log_{10}(M_{\mathrm{gas}}/M_*)$'
subplot.set_ylabel(ylab, fontsize=14)
xlab='$\log_{10}(M_*/M_{\odot})$'
subplot.set_xlabel(xlab, fontsize=14)
#format axis
majorFormatter = FormatStrFormatter('%d')
subplot.xaxis.set_major_locator(MultipleLocator(0.5))
subplot.xaxis.set_minor_locator(MultipleLocator(0.1))
subplot.yaxis.set_minor_locator(MultipleLocator(0.1))
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR=G_MR[sel]
log_StellarMass=np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)
log_SFR=np.log10(G0_MR['Sfr'])
G0_MR=G0_MR[(log_StellarMass>7.) & (G0_MR['ColdGas']>-1e-30) & ~np.isnan(G0_MR['H2fraction']) &
#(np.log10(G0_MR['Sfr']/(G0_MR['StellarMass']*1.e10/Hubble_h))>-11.)]
((log_SFR-log_StellarMass)>np.log10((1+ThisRedshiftList[ii])**2/(1.37e10/2.)) -1.0)]
#G0_MR=G0_MR[(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)>7.) & (G0_MR['ColdGas']>0.)]
StellarMass=stellar_mass_with_err(G0_MR, Hubble_h, ThisRedshiftList[ii])
#HI
#sel=(1.-G0_MR['H2fraction'])>0.
Fraction_HI=np.log10(G0_MR['ColdGas']*(1.-G0_MR['H2fraction'])*1.e10/Hubble_h)-StellarMass
(x_binned, median,mean,pc16,pc84,rms)=median_and_percentiles(bin,xlim[0],xlim[1],StellarMass,Fraction_HI)
subplot.plot(x_binned,median,color='red',linewidth=2)
subplot.plot(x_binned,pc16,color='red',linewidth=2,linestyle='--')
subplot.plot(x_binned,pc84,color='red',linewidth=2,linestyle='--')
#WRITE OUTPUT
if(write_to_file==1):
df = pd.DataFrame({'Log10_M':x_binned, 'median':median, 'pc16':pc16, 'pc84':pc84})
df.to_csv(Datadir + file_to_write + 'GasFractions_Saintonge2017_HI' +
str(f'_z{ThisRedshiftList[ii]:0.2f}')+'.csv', index=False)
#df = pd.read_csv(Datadir + file_to_write + 'GasFractions_Saintonge2017_HI' +
# str(f'_z{ThisRedshiftList[ii]:0.2f}')+'.csv')
#subplot.plot(df['Log10_M'], df['median'],color='black', linestyle='--')
#H2
#sel=G0_MR['H2fraction']>0.
Fraction_H2=np.log10(G0_MR['ColdGas']*(G0_MR['H2fraction'])*1.e10/Hubble_h)-StellarMass
(x_binned, median,mean,pc16,pc84,rms)=median_and_percentiles(bin,xlim[0],xlim[1],StellarMass,Fraction_H2)
subplot.plot(x_binned,median,color='blue',linewidth=2)
subplot.plot(x_binned,pc16,color='blue',linewidth=2,linestyle='--')
subplot.plot(x_binned,pc84,color='blue',linewidth=2,linestyle='--')
#WRITE OUTPUT
if(write_to_file==1):
df = pd.DataFrame({'Log10_M':x_binned, 'median':median, 'pc16':pc16, 'pc84':pc84})
df.to_csv(Datadir + file_to_write + 'GasFractions_Saintonge2017_H2' +
str(f'_z{ThisRedshiftList[ii]:0.2f}')+'.csv', index=False)
#df = pd.read_csv(Datadir + file_to_write + 'GasFractions_Saintonge2017_H2' +
# str(f'_z{ThisRedshiftList[ii]:0.2f}')+'.csv')
#subplot.plot(df['Log10_M'], df['median'],color='black', linestyle='--')
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.19, y_percentage=0.9,
color='black', xlog=0, ylog=0, label='xGass/xCOLD GASS: ', fontsize=13, fontweight='normal')
plot_label (subplot, 'symbol', xlim, ylim, x_percentage=0.75, y_percentage=0.925,
color='red', xlog=0, ylog=0, sym='o', sym_size=5, err_size=0.05)
plot_label (subplot, 'symbol', xlim, ylim, x_percentage=0.855, y_percentage=0.925,
color='blue', xlog=0, ylog=0, sym='o', sym_size=5, err_size=0.05)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.77, y_percentage=0.9,
color='black', xlog=0, ylog=0, label='HI H$_2$', fontsize=13, fontweight='normal')
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.435, y_percentage=0.8,
color='black', xlog=0, ylog=0, label=prefix_this_model+': ', fontsize=13, fontweight='normal')
plot_label (subplot, 'line', xlim, ylim,x_percentage=0.74, y_percentage=0.825,
color='red', x2_percentage=0.76, xlog=0, ylog=0, linestyle='-', linewidth=2)
plot_label (subplot, 'line', xlim, ylim,x_percentage=0.84, y_percentage=0.825,
color='blue', x2_percentage=0.86, xlog=0, ylog=0, linestyle='-', linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.77, y_percentage=0.8,
color='black', xlog=0, ylog=0, label='HI H$_2$', fontsize=13, fontweight='normal')
#plot_label (subplot, 'label', xlim, ylim, x_percentage=0.6, y_percentage=0.85,
# color='black', xlog=0, ylog=0, label='$M_{\mathrm{cold}}/M_*$',
# fontsize=15, fontweight='normal')
#OBSERVATIONS
#HI
file = Datadir+"/Saintonge2017_HI.csv"
df = pd.read_csv(file)
subplot.errorbar(df['x'], df['y'],yerr=[df['err_down'], df['err_up']],
fmt='o', markersize=5, ecolor='red', color='red',zorder=+3)
#HI2
file = Datadir+"/Saintonge2017_H2.csv"
df = pd.read_csv(file)
subplot.errorbar(df['x'], df['y'],yerr=[df['err_down'], df['err_up']],
fmt='o', markersize=5, ecolor='blue', color='blue',zorder=+3)
'''file = Datadir+"/Saintonge2016_gasfraction.txt"
Saint16 = Table.read(file, format='ascii')
Saint16_mass=(Saint16['mass_bin_low']+Saint16['mass_bin_high'])/2.
#H2
#OBSERVATIONS PLOT
y_err=np.zeros(len(Saint16['fH2']),dtype=np.float32)
y_err=[np.log10(Saint16['fH2']/(Saint16['fH2']-Saint16['fH2_err'])),
np.log10((Saint16['fH2']+Saint16['fH2_err'])/Saint16['fH2'])]
subplot.errorbar(Saint16_mass, np.log10(Saint16['fH2']),xerr=0.12,yerr=y_err,
fmt='o', markersize=5, ecolor='blue', color='blue',zorder=+3)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.6, y_percentage=0.85,
color='black', xlog=0, ylog=0, label='$M_{\mathrm{H_2}}/M_*$',
fontsize=15, fontweight='normal')'''
'''xx = [9.407, 9.638, 9.848, 10.04, 10.24,
10.46, 10.67, 10.87, 11.07, 11.27]
yy = [-1.01,-1.08,-0.97,-0.90,-1.08,
-1.05,-1.34,-1.41,-1.66,-2.02]
xx = [9.388,9.669, 9.915, 10.10, 10.25,
10.41,10.58, 10.75,10.90,11.08]
yy = [-1.11,-1.28,-1.23,-1.16,-1.39,
-1.40,-1.62,-1.75,-1.83,-2.01]
xx = [9.406, 9.643,9.854,10.07,10.25,
10.46,10.68,10.82,11.10,11.31]
yy = [-0.93,-0.89,-0.96,-0.89,-1.05,
-0.99,-1.10,-1.26,-1.41,-1.68]
xx = [9.366,9.465,9.467,9.882,10.01,
10.14,10.37,10.56,10.77,11.12]
yy=[-0.99,-1.05,-1.12,-1.10,-1.11,
-1.20,-1.27,-1.39,-1.44,-1.70]
subplot.scatter(xx,yy, color='black')'''
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYF19_gasfractions_Saintonge17.pdf')
plt.close()
return
#end
def gasfractions_vs_stellarmass(ThisRedshiftList):
labels_to_write=['ColdGas', 'HI', 'H2', 'H2_HI']
plot_color=['red','purple']
fig = plt.figure(figsize=(one_four_size_large[0],one_four_size_large[1]))
grid = gridspec.GridSpec(1, 4)
grid.update(wspace=0.0, hspace=0.0)
#OBSERVATIONS READ
file = Datadir+"/Saintonge2016_gasfraction.txt"
Saint16 = Table.read(file, format='ascii')
Saint16_mass=(Saint16['mass_bin_low']+Saint16['mass_bin_high'])/2.
for ii in range(0,len(ThisRedshiftList)):
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR=G_MR[sel]
#G0_MR=G0_MR[(G0_MR['StellarMass']>0.) & (G0_MR['ColdGas']>0.) &
# (G0_MR['Vvir']>120.) & (G0_MR['BulgeMass']/G0_MR['StellarMass']<0.15)]
G0_MR=G0_MR[(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)>7.) & (G0_MR['ColdGas']>0.) &
(np.log10(G0_MR['Sfr']/(G0_MR['StellarMass']*1.e10/Hubble_h))>-11.)]
#G0_MR=G0_MR[(np.log10(G0_MR['StellarMass']*1.e10/Hubble_h)>10.) & (G0_MR['ColdGas']>0.)]
StellarMass=stellar_mass_with_err(G0_MR, Hubble_h, ThisRedshiftList[ii])
xlim=[9.5,11.5]
ylim=[-2.0,0.5]
bin=0.25
for i_gas in range(0,4):
subplot=plt.subplot(grid[i_gas])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
if i_gas>0:
plt.tick_params(axis='y', which='both', left='on', labelleft='off')
if i_gas==0:
ylab='$\log_{10}(M_{\mathrm{gas}}/M_*)$'
subplot.set_ylabel(ylab, fontsize=14)
xlab='$\log_{10}(M_*/M_{\odot})$'
subplot.set_xlabel(xlab, fontsize=14)
#format axis
majorFormatter = FormatStrFormatter('%d')
subplot.xaxis.set_major_locator(MultipleLocator(1))
subplot.xaxis.set_minor_locator(MultipleLocator(0.25))
#MODEL
if(i_gas==0):
sel=G0_MR['ColdGas']>0.
Fraction=np.log10(G0_MR['ColdGas']*1.e10/Hubble_h)-StellarMass
if(i_gas==1):
sel=(1.-G0_MR['H2fraction'])>0.
Fraction=np.log10(G0_MR['ColdGas']*(1.-G0_MR['H2fraction'])*1.e10/Hubble_h)-StellarMass
if(i_gas==2):
sel=G0_MR['H2fraction']>0.
Fraction=np.log10(G0_MR['ColdGas']*(G0_MR['H2fraction'])*1.e10/Hubble_h)-StellarMass
if(i_gas==3):
sel=G0_MR['H2fraction']>0.
Fraction=np.log10(G0_MR['H2fraction']/(1.-G0_MR['H2fraction']))
(x_binned, median,mean,pc16,pc84,rms)=median_and_percentiles(bin,xlim[0],xlim[1],StellarMass[sel],Fraction[sel])
sel=(median!=0)
subplot.plot(x_binned[sel],median[sel],color=plot_color[ii],linewidth=2)
subplot.plot(x_binned[sel],pc16[sel],color=plot_color[ii],linewidth=2,linestyle='--')
subplot.plot(x_binned[sel],pc84[sel],color=plot_color[ii],linewidth=2,linestyle='--')
#WRITE OUTPUT
if(write_to_file==1):
df = pd.DataFrame({'Log10_M':x_binned[sel], 'median':median[sel], 'pc16':pc16[sel], 'pc84':pc84[sel]})
df.to_csv(Datadir + file_to_write + 'GasFractions_' + labels_to_write[i_gas] +
str(f'_z{ThisRedshiftList[ii]:0.2f}')+'.csv', index=False)
#df = pd.read_csv(Datadir + file_to_write + 'GasFractions_' + labels_to_write[i_gas] +
# str(f'_z{ThisRedshiftList[ii]:0.2f}')+'.csv')
#subplot.plot(df['Log10_M'], df['median'],color='black', linestyle='--')
#OBSERVATIONS PLOT
if(i_gas==0):
#Cold
y_err=np.zeros(len(Saint16['fHI']),dtype=np.float32)
Saint16_H2plusHI=Saint16['fH2']+Saint16['fHI']
Saint16_H2plusHI_err=Saint16['fH2_err']+Saint16['fHI_err']
y_err=[np.log10(Saint16_H2plusHI/(Saint16_H2plusHI-Saint16_H2plusHI_err)),
np.log10((Saint16_H2plusHI+Saint16_H2plusHI_err)/Saint16_H2plusHI)]
subplot.errorbar(Saint16_mass, np.log10(Saint16_H2plusHI),xerr=0.12,yerr=y_err,
fmt='o', markersize=5, ecolor='blue', color='blue',zorder=+3,capsize=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.15, y_percentage=0.2,
color='black', xlog=0, ylog=0, label=prefix_this_model,
fontsize=13, fontweight='normal')
plot_label (subplot, 'line', xlim, ylim,x_percentage=0.05, y_percentage=0.2175,
color='red', x2_percentage=0.12, xlog=0, ylog=0, linestyle='-', linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.15, y_percentage=0.12,
color='black', xlog=0, ylog=0, label='Saintonge2016',
fontsize=13, fontweight='normal')
plot_label (subplot, 'symbol', xlim, ylim, x_percentage=0.12, y_percentage=0.14,
color='blue', xlog=0, ylog=0, sym='o', sym_size=5, err_size=0.075)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.6, y_percentage=0.85,
color='black', xlog=0, ylog=0, label='$M_{\mathrm{cold}}/M_*$',
fontsize=15, fontweight='normal')
if(i_gas==1):
#HI
#OBSERVATIONS PLOT
y_err=np.zeros(len(Saint16['fHI']),dtype=np.float32)
y_err=[np.log10(Saint16['fHI']/(Saint16['fHI']-Saint16['fHI_err'])),
np.log10((Saint16['fHI']+Saint16['fHI_err'])/Saint16['fHI'])]
subplot.errorbar(Saint16_mass, np.log10(Saint16['fHI']),xerr=0.12,yerr=y_err,
fmt='o', markersize=5, ecolor='blue', color='blue',zorder=+3)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.6, y_percentage=0.85,
color='black', xlog=0, ylog=0, label='$M_{\mathrm{HI}}/M_*$',
fontsize=15, fontweight='normal')
if(i_gas==2):
#H2
#OBSERVATIONS PLOT
y_err=np.zeros(len(Saint16['fH2']),dtype=np.float32)
y_err=[np.log10(Saint16['fH2']/(Saint16['fH2']-Saint16['fH2_err'])),
np.log10((Saint16['fH2']+Saint16['fH2_err'])/Saint16['fH2'])]
subplot.errorbar(Saint16_mass, np.log10(Saint16['fH2']),xerr=0.12,yerr=y_err,
fmt='o', markersize=5, ecolor='blue', color='blue',zorder=+3)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.6, y_percentage=0.85,
color='black', xlog=0, ylog=0, label='$M_{\mathrm{H_2}}/M_*$',
fontsize=15, fontweight='normal')
if(i_gas==3):
#H2/HI
#OBSERVATIONS PLOT
y_err=np.zeros(len(Saint16['fHI']),dtype=np.float32)
Saint16_H2overHI=Saint16['fH2']/Saint16['fHI']
Saint16_H2overHI_err=Saint16['fH2_err']/Saint16['fH2']+Saint16['fHI_err']/Saint16['fHI']
sel=Saint16_H2overHI_err>Saint16_H2overHI
Saint16_H2overHI_err[sel]=Saint16_H2overHI[sel]-0.01
y_err=[np.log10(Saint16_H2overHI/(Saint16_H2overHI-Saint16_H2overHI_err)),
np.log10((Saint16_H2overHI+Saint16_H2overHI_err)/Saint16_H2overHI)]
subplot.errorbar(Saint16_mass, np.log10(Saint16_H2overHI),xerr=0.12,yerr=y_err,
fmt='o', markersize=5, ecolor='blue', color='blue',zorder=+3)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.6, y_percentage=0.85,
color='black', xlog=0, ylog=0, label='$\mathrm{H_2}/\mathrm{HI}$',
fontsize=15, fontweight='normal')
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.savefig('./fig/HYF19_gasfractions_vs_stellarmass.pdf')
plt.close()
return
#end
def H2fraction_vs_stellarmass(ThisRedshiftList):
plot_color=['red','purple']
fig = plt.figure(figsize=(7,7))
for ii in range(0,len(ThisRedshiftList)):
#HII
xlim=[9.5,11.5]
ylim=[-2.5,1.0]
bin=0.25
subplot=plt.subplot()
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
#format axis
majorFormatter = FormatStrFormatter('%d')
subplot.xaxis.set_major_locator(MultipleLocator(1))
subplot.xaxis.set_minor_locator(MultipleLocator(0.25))
xlab='$log_{10}(M_*[M_{\odot}])$'
ylab='$f_{\mathrm{H_2}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR=G_MR[sel]
G0_MR=G0_MR[(G0_MR['StellarMass']>0.) & (G0_MR['ColdGas']>0.) & (G0_MR['H2fraction']>0.)]
StellarMass=stellar_mass_with_err(G0_MR, Hubble_h, ThisRedshiftList[ii])
#Fraction=np.log10(G0_MR['H2fraction'])
Fraction=G0_MR['H2fraction']/(1.-G0_MR['H2fraction'])
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles (bin, xlim[0], xlim[1], StellarMass, Fraction)
sel=(median!=0)
subplot.plot(x_binned[sel], np.log10(median[sel]),color=plot_color[ii], linewidth=2)
subplot.plot(x_binned[sel], np.log10(pc16[sel]),color=plot_color[ii], linewidth=2, linestyle='--')
subplot.plot(x_binned[sel], np.log10(pc84[sel]),color=plot_color[ii], linewidth=2, linestyle='--')
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.close()
return
#end H2fraction_vs_stellarmass
def evo_milkyway_gas_profile(ThisRedshiftList):
plot_color=['red','purple']
fig = plt.figure(figsize=(15,4))
grid = gridspec.GridSpec(1, 3)
#grid.update(wspace=0.0, hspace=0.0)
for ii in range(0,len(ThisRedshiftList)):
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR=G_MR[sel]
selected_Gal=G0_MR[(G0_MR['StellarMass']>0.) & (G0_MR['DiskMass']>0.) &
(G0_MR['Vvir']/Hubble_h>200.) & (G0_MR['Vvir']/Hubble_h<235.) & (G0_MR['Type']==0)
& (G0_MR['BulgeMass']/G0_MR['StellarMass']<0.15)]
print(len(selected_Gal))
selected_Gal=selected_Gal[0]
MainBranch=G_MR[(G_MR['GalID']>=selected_Gal['GalID']) & (G_MR['GalID']<=selected_Gal['MainLeafId'])]
print(selected_Gal['GalID'],selected_Gal['MainLeafId'])
print('diskmass=', np.log10(selected_Gal['DiskMass']*1.e10),
'disk fraction=', selected_Gal['DiskMass']/selected_Gal['StellarMass'],
'stellarmass=', np.log10(selected_Gal['StellarMass']*1.e10))
# Have a look at the colormaps here and decide which one you'd like:
# http://matplotlib.org/1.2.1/examples/pylab_examples/show_colormaps.html
num_plots=len(MainBranch)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
#colormap = plt.cm.gist_ncar
#plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0., 0.8, num_plots)])
xlim=[0.0,30.0]
ylim=[0.,4.]
#SigmaGas
bin=2.
subplot=plt.subplot(grid[0])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$r[\mathrm{kpc}]$'
ylab='$\Sigma_{\mathrm{gas}}[M_{\odot}/\mathrm{pc^2}]$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
for jj in range (0,np.amax(MainBranch['SnapNum'])+1):
#for jj in range (0,35):
Gal=MainBranch[MainBranch['SnapNum']==jj]
#print(Gal['SnapNum'],Gal['ColdGasRings'])
if(len(Gal)>0):
#print(Gal['SnapNum'])
Sigma=np.zeros(RNUM,dtype=np.float32)
for ii in range(0,RNUM):
Mass=Gal['ColdGasRings'][0][ii]*1e10/Hubble_h
Sigma[ii]= Mass/(3.14*RingRadius[ii]*RingRadius[ii]*1e6)
subplot.plot(RingRadius, np.log10(Sigma), linewidth=1)
#H2
bin=2.
subplot=plt.subplot(grid[1])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$r[\mathrm{kpc}]$'
ylab='$\Sigma_{\mathrm{H_2}}[M_{\odot}/\mathrm{pc^2}]$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
for jj in range (0,np.amax(MainBranch['SnapNum'])+1):
Gal=MainBranch[MainBranch['SnapNum']==jj]
if(len(Gal)>0):
#print(Gal['SnapNum'])
Sigma=np.zeros(RNUM,dtype=np.float32)
for ii in range(0,RNUM):
Mass=Gal['ColdGasRings'][0][ii]*1e10/Hubble_h*Gal['H2fractionRings'][0][ii]/1.3
Sigma[ii]= Mass/(3.14*RingRadius[ii]*RingRadius[ii]*1e6)
subplot.plot(RingRadius, np.log10(Sigma), linewidth=1)
#Stars
bin=2.
subplot=plt.subplot(grid[2])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$r[\mathrm{kpc}]$'
ylab='$\Sigma_{\mathrm{stars}}[M_{\odot}/\mathrm{pc^2}]$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
for jj in range (0,np.amax(MainBranch['SnapNum'])+1):
Gal=MainBranch[MainBranch['SnapNum']==jj]
if(len(Gal)>0):
#print(Gal['SnapNum'])
Sigma=np.zeros(RNUM,dtype=np.float32)
for ii in range(0,RNUM):
Mass=Gal['DiskMassRings'][0][ii]*1e10/Hubble_h
Sigma[ii]= Mass/(3.14*RingRadius[ii]*RingRadius[ii]*1e6)
subplot.plot(RingRadius, np.log10(Sigma), linewidth=1)
#option1
#color = (0, jj*1./num_plots , 0, 1)
#option2
#color=iter(cm.rainbow(np.linspace(0,1,n)))
#for i in range(n):
#c=next(color)
#ax1.plot(x, y,c=c)
#LABELS
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.075, y_percentage=0.2,
color='black', xlog=0, ylog=0, label='Peeples 2015',
fontsize=13, fontweight='normal')
plot_label (subplot, 'symbol', xlim, ylim, x_percentage=0.05, y_percentage=0.225,
color='blue', xlog=0, ylog=0, sym='o', sym_size=5, err_size=0.15)
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.close()
return
#end evo_milkyway_gas_profile
def evo_milkyway_stellar_profiles(ThisRedshiftList):
#select galaxies at z=0 and then follow progenitors
ii=0
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR=G_MR[sel]
#**********************
#* SELECT DISC GALAXY *
#**********************
fig = plt.figure(figsize=(15,4))
grid = gridspec.GridSpec(1, 3)
#grid.update(wspace=0.0, hspace=0.0)
selected_Gal=G0_MR[(G0_MR['StellarMass']>0.) & (G0_MR['DiskMass']>0.) &
(G0_MR['Vvir']/Hubble_h>200.) & (G0_MR['Vvir']/Hubble_h<235.) & (G0_MR['Type']==0)
& (G0_MR['BulgeMass']/G0_MR['StellarMass']<0.15)]
print('Number of Galaxies Selected at z=0: ',len(selected_Gal))
selected_Gal=selected_Gal[0]
MainBranch=G_MR[(G_MR['GalID']>=selected_Gal['GalID']) & (G_MR['GalID']<=selected_Gal['MainLeafId'])]
print('First Galaxy selected ID:',selected_Gal['GalID'],'MainLeafID:',selected_Gal['MainLeafId'])
print('')
print('diskmass=%0.4f stellarmass=%0.4f disk fraction=%0.4f' %
(np.log10(selected_Gal['DiskMass']*1.e10), np.log10(selected_Gal['StellarMass']*1.e10),
selected_Gal['DiskMass']/selected_Gal['StellarMass'] ))
# Have a look at the colormaps here and decide which one you'd like:
# http://matplotlib.org/1.2.1/examples/pylab_examples/show_colormaps.html
num_plots=len(MainBranch)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
xlim=[0.0,20.0]
ylim=[0.,3.]
#Stars Disk
bin=2.
subplot=plt.subplot(grid[0])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$r[\mathrm{kpc}]$'
ylab='$\Sigma_{\mathrm{DiskMass}}[M_{\odot}/\mathrm{pc^2}]$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
for jj in range (0,np.amax(MainBranch['SnapNum'])+1):
Gal=MainBranch[MainBranch['SnapNum']==jj]
if(len(Gal)>0):
#print(Gal['SnapNum'])
Sigma=np.zeros(RNUM,dtype=np.float32)
for ii in range(0,RNUM):
Mass=Gal['DiskMassRings'][0][ii]*1e10/Hubble_h
Sigma[ii]= Mass/(3.14*RingRadius[ii]*RingRadius[ii]*1e6)
subplot.plot(RingRadius, np.log10(Sigma), linewidth=1)
#Stars Bulge
bin=2.
subplot=plt.subplot(grid[1])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$r[\mathrm{kpc}]$'
ylab='$\Sigma_{\mathrm{BulgeMass}}[M_{\odot}/\mathrm{pc^2}]$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
for jj in range (0,np.amax(MainBranch['SnapNum'])+1):
Gal=MainBranch[MainBranch['SnapNum']==jj]
if(len(Gal)>0):
#print(Gal['SnapNum'])
Sigma=np.zeros(RNUM,dtype=np.float32)
for ii in range(0,RNUM):
Mass=Gal['BulgeMassRings'][0][ii]*1e10/Hubble_h
Sigma[ii]= Mass/(3.14*RingRadius[ii]*RingRadius[ii]*1e6)
subplot.plot(RingRadius, np.log10(Sigma), linewidth=1)
#Stars Combined
bin=2.
subplot=plt.subplot(grid[2])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$r[\mathrm{kpc}]$'
ylab='$\Sigma_{\mathrm{stars}}[M_{\odot}/\mathrm{pc^2}]$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
for jj in range (0,np.amax(MainBranch['SnapNum'])+1):
Gal=MainBranch[MainBranch['SnapNum']==jj]
if(len(Gal)>0):
#print(Gal['SnapNum'])
Sigma=np.zeros(RNUM,dtype=np.float32)
for ii in range(0,RNUM):
Mass=(Gal['DiskMassRings'][0][ii]+Gal['BulgeMassRings'][0][ii])*1e10/Hubble_h
Sigma[ii]= Mass/(3.14*RingRadius[ii]*RingRadius[ii]*1e6)
subplot.plot(RingRadius, np.log10(Sigma), linewidth=1)
plt.tight_layout()
plt.savefig('./fig/plots_evo_milkyway_stellar_profiles_disc.pdf')
#pdf.savefig()
#plt.close()
output.append(fig)
#***********************
#* SELECT BULGE GALAXY *
#***********************
fig = plt.figure(figsize=(15,4))
grid = gridspec.GridSpec(1, 3)
#grid.update(wspace=0.0, hspace=0.0)
selected_Gal=G0_MR[(G0_MR['StellarMass']>0.) & (G0_MR['DiskMass']>0.) &
(G0_MR['Vvir']/Hubble_h>200.) & (G0_MR['Vvir']/Hubble_h<235.) & (G0_MR['Type']==0)
& (G0_MR['BulgeMass']/G0_MR['StellarMass']>0.95)]
print('Number of Galaxies Selected at z=0: ',len(selected_Gal))
selected_Gal=selected_Gal[0]
MainBranch=G_MR[(G_MR['GalID']>=selected_Gal['GalID']) & (G_MR['GalID']<=selected_Gal['MainLeafId'])]
print('First Galaxy selected ID:',selected_Gal['GalID'],'MainLeafID:',selected_Gal['MainLeafId'])
print('')
print('diskmass=%0.4f stellarmass=%0.4f disk fraction=%0.4f' %
(np.log10(selected_Gal['DiskMass']*1.e10), np.log10(selected_Gal['StellarMass']*1.e10),
selected_Gal['DiskMass']/selected_Gal['StellarMass'] ))
# Have a look at the colormaps here and decide which one you'd like:
# http://matplotlib.org/1.2.1/examples/pylab_examples/show_colormaps.html
num_plots=len(MainBranch)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
xlim=[0.0,20.0]
ylim=[0.,3.]
#Stars Disk
bin=2.
subplot=plt.subplot(grid[0])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$r[\mathrm{kpc}]$'
ylab='$\Sigma_{\mathrm{DiskMass}}[M_{\odot}/\mathrm{pc^2}]$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
for jj in range (0,np.amax(MainBranch['SnapNum'])+1):
Gal=MainBranch[MainBranch['SnapNum']==jj]
if(len(Gal)>0):
#print(Gal['SnapNum'])
Sigma=np.zeros(RNUM,dtype=np.float32)
for ii in range(0,RNUM):
Mass=Gal['DiskMassRings'][0][ii]*1e10/Hubble_h
Sigma[ii]= Mass/(3.14*RingRadius[ii]*RingRadius[ii]*1e6)
subplot.plot(RingRadius, np.log10(Sigma), linewidth=1)
#Stars Bulge
bin=2.
subplot=plt.subplot(grid[1])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$r[\mathrm{kpc}]$'
ylab='$\Sigma_{\mathrm{BulgeMass}}[M_{\odot}/\mathrm{pc^2}]$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
for jj in range (0,np.amax(MainBranch['SnapNum'])+1):
Gal=MainBranch[MainBranch['SnapNum']==jj]
if(len(Gal)>0):
#print(Gal['SnapNum'])
Sigma=np.zeros(RNUM,dtype=np.float32)
for ii in range(0,RNUM):
Mass=Gal['BulgeMassRings'][0][ii]*1e10/Hubble_h
Sigma[ii]= Mass/(3.14*RingRadius[ii]*RingRadius[ii]*1e6)
subplot.plot(RingRadius, np.log10(Sigma), linewidth=1)
#Stars Combined
bin=2.
subplot=plt.subplot(grid[2])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$r[\mathrm{kpc}]$'
ylab='$\Sigma_{\mathrm{stars}}[M_{\odot}/\mathrm{pc^2}]$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
colormap = plt.cm.gist_ncar_r
plt.gca().set_color_cycle([colormap(i) for i in np.linspace(0.2, 1.0, num_plots)])
for jj in range (0,np.amax(MainBranch['SnapNum'])+1):
Gal=MainBranch[MainBranch['SnapNum']==jj]
if(len(Gal)>0):
#print(Gal['SnapNum'])
Sigma=np.zeros(RNUM,dtype=np.float32)
for ii in range(0,RNUM):
Mass=(Gal['DiskMassRings'][0][ii]+Gal['BulgeMassRings'][0][ii])*1e10/Hubble_h
Sigma[ii]= Mass/(3.14*RingRadius[ii]*RingRadius[ii]*1e6)
subplot.plot(RingRadius, np.log10(Sigma), linewidth=1)
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.close()
return
#end evo_milkyway_stellar_profiles
def test_H2_prescriptions(ThisRedshiftList):
for ii in range(0,len(ThisRedshiftList)):
#HII
xlim=[0.0,4.0]
ylim=[-2.5,0.2]
plot_color=['red','purple']
fig = plt.figure(figsize=(15,5))
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G0_MR=G_MR[sel]
G0_MR=G0_MR[(G0_MR['StellarMass']>0.) & (G0_MR['ColdGas']>0.) & (G0_MR['H2fraction']>0.)]
SigmaGas=np.zeros(len(G0_MR)*RNUM,dtype=np.float64)
Fraction=np.zeros(len(G0_MR)*RNUM,dtype=np.float64)
Metallicity=np.zeros(len(G0_MR)*RNUM,dtype=np.float64)
StellarDensity=np.zeros(len(G0_MR)*RNUM,dtype=np.float64)
for ii in range(0,RNUM):
area=(3.14*RingRadius[ii]*RingRadius[ii]*1e6)
SigmaGas[ii*len(G0_MR):(ii+1)*len(G0_MR)]=np.log10((G0_MR['ColdGasRings'][:,ii]*1e10/Hubble_h)/area)
Fraction[ii*len(G0_MR):(ii+1)*len(G0_MR)]=G0_MR['H2fractionRings'][:,ii]
#Metallicity[ii*len(G0_MR):(ii+1)*len(G0_MR)]=np.log10(G0_MR['MetalsColdGasRings'][:,ii]/
# G0_MR['ColdGasRings'][:,ii]/0.02)
StellarDensity[ii*len(G0_MR):(ii+1)*len(G0_MR)]=np.log10((G0_MR['DiskMassRings'][:,ii]*1e10/Hubble_h)/area)
#SigmaGas=np.log10((G0_MR['ColdGas']*1e10/Hubble_h)/(3.14*G0_MR['GasDiskRadius']*G0_MR['GasDiskRadius']*1e6*1e6))
#Fraction=np.log10(G0_MR['H2fraction'])
#Bin by metallicity
grid = gridspec.GridSpec(1, 2)
subplot=plt.subplot(grid[0])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$log_{10}(\Sigma_{\mathrm{gas}}[M_{\odot}\mathrm{pc}^{-2}])$'
ylab='$log_{10}(f_{\mathrm{H_2}})$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
#KMT10 model
plot_color=['red','red','green','green','blue','blue','black']
metallicityr=[-2.0,-1.5,-1.0,-0.5,0.0,0.5,1.0]
metallicityr=np.power(10,metallicityr)
for ii in range(0,7):
SigmaH=np.arange(xlim[0],xlim[1]+0.1,0.001)
khi=3.1*1.0*(1+3.1*metallicityr[ii]**0.365)/4.1
tau=0.066*pow(10,SigmaH)*metallicityr[ii];
s=np.log(1+0.6*khi+0.01*khi*khi)/(0.6*tau);
fraction=np.log10(1-0.75*s/(1+0.25*s));
subplot.plot(SigmaH, fraction,color=plot_color[ii], linewidth=2, linestyle='--')
plot_color=['red','green','blue','black']
x_bins=[-1.25,-0.5,0.25,1.0]
bin=0.5
sel_property=Metallicity
for ii in range(0,len(x_bins)):
sel=((sel_property>x_bins[ii]-bin) & (sel_property<x_bins[ii]+bin))
if(len(SigmaGas[sel])>0):
bin=0.1
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles (bin, xlim[0], xlim[1],
SigmaGas[sel], Fraction[sel])
subplot.plot(x_binned, np.log10(median),color=plot_color[ii], linewidth=2)
#Bin by stellar surface density
subplot=plt.subplot(grid[1])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
subplot.set_xlabel(xlab, fontsize=16), subplot.set_ylabel(ylab, fontsize=16)
plot_color=['red','green','blue','black']
x_bins=[-1.25,-0.5,0.25,1.0]
bin=0.5
sel_property=Metallicity
x_bins=[1.,10.,100.,1000.]
sel_property=StellarDensity
for ii in range(0,len(x_bins)):
sel=((sel_property>x_bins[ii]-x_bins[ii]*0.1) & (sel_property<x_bins[ii]+x_bins[ii]*0.1))
#sel=((sel_property>x_bins[ii]-bin) & (sel_property<x_bins[ii]+bin))
if(len(SigmaGas[sel])>0):
bin=0.1
(x_binned, median, mean, pc16, pc84, rms)=median_and_percentiles (bin, xlim[0], xlim[1],
SigmaGas[sel], Fraction[sel])
subplot.plot(x_binned, np.log10(median),color=plot_color[ii], linewidth=2)
#LABELS
plot_label (subplot, 'label', xlim, ylim,
x_percentage=0.15, y_percentage=0.1, color='black', xlog=0, ylog=0,
label=prefix_this_model, fontsize=13, fontweight='normal')
plot_label (subplot, 'line', xlim, ylim,
x_percentage=0.04, y_percentage=0.12, color='red', x2_percentage=0.13,
xlog=0, ylog=0, linestyle='-', linewidth=2)
plot_label (subplot, 'label', xlim, ylim, x_percentage=0.55, y_percentage=0.8,
color='black', xlog=0, ylog=0, label='Gas Fraction',
fontsize=13, fontweight='normal')
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.close()
return
#end test_H2_prescriptions
def test_rings(ThisRedshiftList):
ii=0
plot_color=['blue','green','red']
(sel)=select_current_redshift(G_MR, ThisRedshiftList, ii, FullSnapshotList_MR)
G_MR=G_MR[sel]
N=min(len(G_MR),1000)
G0_MR=np.random.choice(G_MR, size=N, replace=False)
fig = plt.figure(figsize=(15,10))
grid = gridspec.GridSpec(2, 3)
#********************
#* MASS
#********************
xlim=[6.0,11.0]
ylim=[6.0,11.0]
#Gas Mass (Total vs Rings)
subplot=plt.subplot(grid[0])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{Cold}}$'
ylab='$\mathrm{Rings Sum} - M_{\mathrm{Cold}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
MassRings=np.sum(G0_MR['ColdGasRings'],axis=1)
subplot.scatter(np.log10(G0_MR['ColdGas']*1e10),np.log10(MassRings*1e10),s=1, color='black')
#Disk Mass (Total vs Rings)
subplot=plt.subplot(grid[1])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{DiskMass}}$'
ylab='$\mathrm{Rings Sum} - M_{\mathrm{DiskMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
MassRings=np.sum(G0_MR['DiskMassRings'],axis=1)
subplot.scatter(np.log10(G0_MR['DiskMass']*1e10),np.log10(MassRings*1e10),s=5, color='black')
#Bulge Mass (Total vs Rings)
subplot=plt.subplot(grid[2])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{BulgeMass}}$'
ylab='$\mathrm{Rings Sum} - M_{\mathrm{BulgeMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
MassRings=np.sum(G0_MR['BulgeMassRings'],axis=1)
subplot.scatter(np.log10(G0_MR['BulgeMass']*1e10),np.log10(MassRings*1e10),s=5, color='black')
#********************
#* METALS
#********************
xlim=[5.0,9.0]
ylim=[5.0,9.0]
#Mass of Metals in Gas (Total vs Rings)
subplot=plt.subplot(grid[3])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{MetalsCold}}$'
ylab='$\mathrm{Rings Sum} - M_{\mathrm{MetalsCold}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
if(opt_detailed_enrichment==1):
Metals=G0_MR['MetalsColdGas'][:,0]+G0_MR['MetalsColdGas'][:,1]+G0_MR['MetalsColdGas'][:,2]
MetalsRings=G0_MR['MetalsColdGasRings'][:,:,0] + G0_MR['MetalsColdGasRings'][:,:,1] + \
G0_MR['MetalsColdGasRings'][:,:,2]
MetalsRings=np.sum(MetalsRings,axis=1)
else:
Metals=G0_MR['MetalsColdGas']
MetalsRings=np.sum(G0_MR['MetalsColdGasRings'],axis=1)
subplot.scatter(np.log10(Metals*1e10), np.log10(MetalsRings*1e10),s=5, color='black')
#DiskMass Metallicity (Total vs Rings)
subplot=plt.subplot(grid[4])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{MetalsDiskMass}}$'
ylab='$\mathrm{Rings Sum} - M_{\mathrm{MetalsDiskMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
if(opt_detailed_enrichment==1):
Metals=G0_MR['MetalsDiskMass'][:,0]+G0_MR['MetalsDiskMass'][:,1]+G0_MR['MetalsDiskMass'][:,2]
MetalsRings=G0_MR['MetalsDiskMassRings'][:,:,0] + G0_MR['MetalsDiskMassRings'][:,:,1] + \
G0_MR['MetalsDiskMassRings'][:,:,2]
MetalsRings=np.sum(MetalsRings,axis=1)
else:
Metals=G0_MR['MetalsDiskMass']
MetalsRings=np.sum(G0_MR['MetalsDiskMassRings'],axis=1)
subplot.scatter(np.log10(Metals*1e10), np.log10(MetalsRings*1e10),s=5, color='black')
#BulgeMass Metallicity (Total vs Rings)
subplot=plt.subplot(grid[5])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{MetalsBulgeMass}}$'
ylab='$\mathrm{Rings Sum} - M_{\mathrm{MetalsBulgeMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
if(opt_detailed_enrichment==1):
Metals=G0_MR['MetalsBulgeMass'][:,0]+G0_MR['MetalsBulgeMass'][:,1]+G0_MR['MetalsBulgeMass'][:,2]
MetalsRings=G0_MR['MetalsBulgeMassRings'][:,:,0] + G0_MR['MetalsBulgeMassRings'][:,:,1] + \
G0_MR['MetalsBulgeMassRings'][:,:,2]
MetalsRings=np.sum(MetalsRings,axis=1)
else:
Metals=G0_MR['MetalsBulgeMass']
MetalsRings=np.sum(G0_MR['MetalsBulgeMassRings'],axis=1)
subplot.scatter(np.log10(Metals*1e10), np.log10(MetalsRings*1e10),s=5, color='black')
#pdf.savefig()
#plt.close()
#**************************************************
#* Sum Mass in Elements vs Sum Mass in Rings
#**************************************************
fig = plt.figure(figsize=(15,10))
grid = gridspec.GridSpec(2, 3)
xlim=[7.0,10.0]
ylim=[7.0, 10.0]
#Cold Gas elements vs cold gas(Total vs Rings)
if(opt_detailed_enrichment==1):
subplot=plt.subplot(grid[0])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='Total - Sum Elements in ColdGas'
ylab='Rings Sum - ColdGas'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
ElementsSum=np.sum(G0_MR['ColdGas_elements'],axis=1)
MassRings=np.sum(G0_MR['ColdGasRings']*1e10/Hubble_h ,axis=1)
subplot.scatter(np.log10(ElementsSum), np.log10(MassRings),s=5, color='red')
#Disk elements vs DiskMass(Total vs Rings)
if(opt_detailed_enrichment==1):
subplot=plt.subplot(grid[1])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='Total - Sum Elements in DiskMass'
ylab='Rings Sum - DiskMass'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
ElementsSum=np.sum(G0_MR['DiskMass_elements'],axis=1)
MassRings=np.sum(G0_MR['DiskMassRings']*1e10/Hubble_h ,axis=1)
subplot.scatter(np.log10(ElementsSum), np.log10(MassRings),s=5, color='red')
#Bulge elements vs BulgeMass(Total vs Rings)
if(opt_detailed_enrichment==1):
subplot=plt.subplot(grid[2])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='Total - Sum Elements in BulgeMass'
ylab='Rings Sum - BulgeMass'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
ElementsSum=np.sum(G0_MR['BulgeMass_elements'],axis=1)
MassRings=np.sum(G0_MR['BulgeMassRings']*1e10/Hubble_h ,axis=1)
subplot.scatter(np.log10(ElementsSum), np.log10(MassRings),s=5, color='red')
#**************************************************
#* Mass in Element vs Element in Rings
#**************************************************
xlim=[4.0,8.0]
ylim=[4.0, 8.0]
#Cold Gas Elements(Total vs Rings)
if(opt_detailed_enrichment==1):
subplot=plt.subplot(grid[3])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='Total - ColdGas in Element Nr6'
ylab='Rings Sum - ColdGas in Element Nr6'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
Elements=G0_MR['ColdGas_elements'][:,6]
ElementsRings=np.sum(G0_MR['ColdGasRings_elements'][:,:,6],axis=1)
subplot.scatter(np.log10(Elements), np.log10(ElementsRings),s=5, color='red')
#DiskMass Elements(Total vs Rings)
if(opt_detailed_enrichment==1):
subplot=plt.subplot(grid[4])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='Total - DiskMass in Element Nr6'
ylab='Rings Sum - DiskMass in Element Nr6'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
Elements=G0_MR['DiskMass_elements'][:,6]
ElementsRings=np.sum(G0_MR['DiskMassRings_elements'][:,:,6],axis=1)
subplot.scatter(np.log10(Elements), np.log10(ElementsRings),s=5, color='red')
#BulgeMass Elements(Total vs Rings)
if(opt_detailed_enrichment==1):
subplot=plt.subplot(grid[5])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='Total - BulgeMass in Element Nr6'
ylab='Rings Sum - BulgeMass in Element Nr6'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
Elements=G0_MR['BulgeMass_elements'][:,6]
ElementsRings=np.sum(G0_MR['BulgeMassRings_elements'][:,:,6],axis=1)
subplot.scatter(np.log10(Elements), np.log10(ElementsRings),s=5, color='red')
plt.tight_layout()
#pdf.savefig()
#plt.close()
#***************************
#* TEST SFH ARRAYS *
#***************************
fig = plt.figure(figsize=(15,10))
grid = gridspec.GridSpec(2, 3)
xlim=[7.0,11.0]
ylim=[7.0,11.0]
x=np.arange(xlim[0],xlim[1]+1.0,0.1)
#******************
#* DISC *
#******************
#Disk Mass (Total vs SFH)
subplot=plt.subplot(grid[0])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{DiskMass}}$'
ylab='$\mathrm{SFH\;Sum} - M_{\mathrm{DiskMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
sfh_Mass=np.sum(G0_MR['sfh_DiskMass'],axis=1)
subplot.scatter(np.log10(G0_MR['DiskMass']*1e10),np.log10(sfh_Mass*1e10*(1-0.43)),s=5, color='black')
#subplot.scatter(np.log10(G0_MR['DiskMass']*1e10),np.log10(sfh_Mass*1e10),s=5, color='black')
subplot.plot(x,x)
#Disk Mass RING Nr 2 (Total vs SFH)
subplot=plt.subplot(grid[1])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{DiskMass}} \mathrm{Ring[2]}$'
ylab='$\mathrm{SFH\;Sum} - M_{\mathrm{DiskMass}} \mathrm{Ring[2]}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
sfh_MassRing=np.sum(G0_MR['sfh_DiskMassRings'][:,2,:],axis=1)
subplot.scatter(np.log10(G0_MR['DiskMassRings'][:,2]*1e10),np.log10(sfh_MassRing*1e10*(1-0.43)),s=5, color='black')
#subplot.scatter(np.log10(G0_MR['DiskMassRings'][:,2]*1e10),np.log10(sfh_MassRing*1e10),s=5, color='black')
subplot.plot(x,x)
#Disk Mass (Total vs SFHRings)
subplot=plt.subplot(grid[2])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{DiskMass}}$'
ylab='$\mathrm{SFH\;Sum\;All Rings} - M_{\mathrm{DiskMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
sfh_MassRings=np.zeros(len(G0_MR))
for jj in range(0,RNUM):
sfh_MassThisRing=G0_MR['sfh_DiskMassRings'][:,jj,:]
sfh_MassRings+=np.sum(sfh_MassThisRing,axis=1)
subplot.scatter(np.log10(np.sum(G0_MR['sfh_DiskMass'],axis=1)*1e10),np.log10(sfh_MassRings*1e10),s=5, color='black')
subplot.plot(x,x)
#******************
#* BULGE *
#******************
#BulgeMass (Total vs SFH)
subplot=plt.subplot(grid[3])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{BulgeMass}}$'
ylab='$\mathrm{SFH\;Sum} - M_{\mathrm{BulgeMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
sfh_Mass=np.sum(G0_MR['sfh_BulgeMass'],axis=1)
subplot.scatter(np.log10(G0_MR['BulgeMass']*1e10),np.log10(sfh_Mass*1e10*(1-0.43)),s=5, color='black')
#subplot.scatter(np.log10(G0_MR['BulgeMass']*1e10),np.log10(sfh_Mass*1e10),s=5, color='black')
subplot.plot(x,x)
#Bulge Mass RING Nr 2 (Total vs SFH)
subplot=plt.subplot(grid[4])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{BulgeMass}} \mathrm{Ring[2]}$'
ylab='$\mathrm{SFH\;Sum} - M_{\mathrm{BulgeMass}} \mathrm{Ring[2]}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
sfh_MassRing=np.sum(G0_MR['sfh_BulgeMassRings'][:,0,:],axis=1)
subplot.scatter(np.log10(G0_MR['BulgeMassRings'][:,0]*1e10),np.log10(sfh_MassRing*1e10*(1-0.43)),s=5, color='black')
#subplot.scatter(np.log10(G0_MR['BulgeMassRings'][:,0]*1e10),np.log10(sfh_MassRing*1e10),s=5, color='black')
subplot.plot(x,x)
#Bulge Mass (Total vs SFHRings)
subplot=plt.subplot(grid[5])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total SFH} - M_{\mathrm{BulgeMass}}$'
ylab='$\mathrm{SFH\;Sum\;All Rings} - M_{\mathrm{BulgeMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
sfh_MassRings=np.zeros(len(G0_MR))
for jj in range(0,RNUM):
sfh_MassThisRing=G0_MR['sfh_BulgeMassRings'][:,jj,:]
sfh_MassRings+=np.sum(sfh_MassThisRing,axis=1)
subplot.scatter(np.log10(np.sum(G0_MR['sfh_BulgeMass'],axis=1)*1e10),np.log10(sfh_MassRings*1e10),s=5, color='black')
subplot.plot(x,x)
plt.tight_layout()
#pdf.savefig()
#plt.close()
#***************************
#* TEST SFH METAL ARRAYS *
#***************************
fig = plt.figure(figsize=(15,10))
grid = gridspec.GridSpec(2, 3)
xlim=[0.0,11.0]
ylim=[0.0,11.0]
x=np.arange(xlim[0],xlim[1]+1.0,0.1)
#******************
#* DISC *
#******************
#Metals Disk Mass (Total vs SFH)
subplot=plt.subplot(grid[0])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{MetalsDiskMass}}$'
ylab='$\mathrm{SFH\;Sum} - M_{\mathrm{MetalsDiskMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
if(opt_detailed_enrichment==1):
sfh_metals=np.sum(G0_MR['sfh_MetalsDiskMass'],axis=2)
metals=np.sum(G0_MR['MetalsDiskMass'],axis=1)
#sfh_metals=np.sum(G0_MR['sfh_ElementsDiskMass'],axis=2)
#metals=np.sum(G0_MR['DiskMass_elements'],axis=1)
else:
sfh_metals=G0_MR['sfh_MetalsDiskMass']
metals=G0_MR['MetalsDiskMass']
sfh_metals=np.sum(sfh_metals,axis=1)
subplot.scatter(np.log10(metals*1e10),np.log10(sfh_metals*1e10*(1-0.43)),s=5, color='black')
#subplot.scatter(np.log10(metals),np.log10(sfh_metals),s=5, color='black')
subplot.plot(x,x)
#******************
#* BULGE *
#******************
#Metals BulgeMass (Total vs SFH)
subplot=plt.subplot(grid[3])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{MetalsBulgeMass}}$'
ylab='$\mathrm{SFH\;Sum} - M_{\mathrm{MetalsBulgeMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
if(opt_detailed_enrichment==1):
sfh_metals=np.sum(G0_MR['sfh_MetalsBulgeMass'],axis=2)
metals=np.sum(G0_MR['MetalsBulgeMass'],axis=1)
else:
sfh_metals=G0_MR['sfh_MetalsBulgeMass']
metals=G0_MR['MetalsBulgeMass']
sfh_metals=np.sum(sfh_metals,axis=1)
subplot.scatter(np.log10(metals*1e10),np.log10(sfh_metals*1e10*(1-0.43)),s=5, color='black')
subplot.plot(x,x)
if(opt_detailed_enrichment==1):
#******************
#* DISC *
#******************
#Elements Disk Mass (Total vs SFH)
subplot=plt.subplot(grid[1])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{ElementsDiskMass}}$'
ylab='$\mathrm{SFH\;Sum} - M_{\mathrm{ElementsDiskMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
sfh_metals=np.sum(G0_MR['sfh_ElementsDiskMass'],axis=2)
metals=np.sum(G0_MR['DiskMass_elements'],axis=1)
sfh_metals=np.sum(sfh_metals,axis=1)
subplot.scatter(np.log10(metals),np.log10(sfh_metals*(1-0.43)),s=5, color='black')
subplot.plot(x,x)
#******************
#* BULGE *
#******************
#Elements BulgeMass (Total vs SFH)
subplot=plt.subplot(grid[4])
subplot.set_ylim(ylim), subplot.set_xlim(xlim)
xlab='$\mathrm{Total} - M_{\mathrm{ElementsBulgeMass}}$'
ylab='$\mathrm{SFH\;Sum} - M_{\mathrm{ElementsBulgeMass}}$'
subplot.set_xlabel(xlab, fontsize=14), subplot.set_ylabel(ylab, fontsize=14)
sfh_metals=np.sum(G0_MR['sfh_ElementsBulgeMass'],axis=2)
metals=np.sum(G0_MR['BulgeMass_elements'],axis=1)
sfh_metals=np.sum(sfh_metals,axis=1)
subplot.scatter(np.log10(metals),np.log10(sfh_metals*(1-0.43)),s=5, color='black')
subplot.plot(x,x)
#print(sfh_Mass)
plt.tight_layout()
current_function = inspect.getframeinfo(inspect.currentframe()).function
plt.savefig('./fig/plots_'+current_function+'.pdf')
plt.close()
return
#end test_rings | UTF-8 | Python | false | false | 251,688 | py | 1,122 | plots_rings.py | 20 | 0.496384 | 0.450792 | 0 | 5,135 | 48.012853 | 158 |
brycexu/SAA | 876,173,350,225 | 090e453b27dd02738c23acc8549592e6abcce2b0 | 61593f33cad05b398734aae53856fbf245bf4b0d | /Zero-Shot Classification/SUN/Dataloader.py | 62e57a3dc93eb62a2d299ad415e3abf436adc045 | []
| no_license | https://github.com/brycexu/SAA | 101ed18810da8a19b7669eb8ef188f111848b8da | 1df8702b7741b7c21fdc9b9b24fb01c040ad7c4a | refs/heads/master | 2022-07-04T14:10:41.495093 | 2020-05-14T08:31:39 | 2020-05-14T08:31:39 | 245,337,457 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Author : Bryce Xu
# Time : 2020/2/29
# Function:
from Dataset import SUNDataset
from Datasampler import SUNBatchSampler
import torch
import numpy as np
def dataloader(parser, mode):
dataset = SUNDataset(mode=mode, data_root=parser.image_dir, attribute_root=parser.attributes_file,
wmodel_root=parser.word_dir)
print(mode)
print(len(np.unique(dataset.label)))
if 'train' in mode:
sampler = SUNBatchSampler(labels=dataset.label, classes_per_it=parser.classes_per_tr,
num_samples=parser.num_per_tr, iterations=parser.iterations)
dataloader = torch.utils.data.DataLoader(dataset, batch_sampler=sampler)
else:
sampler = SUNBatchSampler(labels=dataset.label, classes_per_it=parser.classes_per_ts,
num_samples=parser.num_per_ts, iterations=parser.iterations)
dataloader = torch.utils.data.DataLoader(dataset, batch_sampler=sampler)
return dataloader
| UTF-8 | Python | false | false | 997 | py | 18 | Dataloader.py | 15 | 0.679037 | 0.672016 | 0 | 23 | 42.347826 | 102 |
MatsKBrun/utils | 13,331,578,496,196 | 825972768786802dd2cec8269444d1d4de7c14c3 | 2fbf230c38185d30b9070436b3e37691f700c459 | /tests/test_permutations.py | 67359d5f46aa8193d9cd37fda31211045ae92797 | []
| no_license | https://github.com/MatsKBrun/utils | df4044e4b1056ab31978dc6a77c26c83637aa540 | edc06637c21f73379387940758f713294dec9b06 | refs/heads/master | 2021-01-19T10:53:55.978391 | 2017-04-05T11:09:14 | 2017-04-05T11:09:14 | 87,912,584 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import unittest
import numpy as np
from utils import permutations
class TestPermutations(unittest.TestCase):
def compare_lists(self, base, length, lst):
# Compare a pre-defined list with the result of multinary_permutations
# Define a generator, and check that all values produced are contained within lst
# Also count the number of iterations
iter_cnt = 0
for a in permutations.multinary_permutations(base, length):
found = False
for b in lst:
if np.array_equal(np.array(a), np.array(b)):
found = True
break
assert found
iter_cnt += 1
assert iter_cnt == len(lst)
def test_length_2(self):
# Explicitly construct a 2D array of all combination of base 3
base = 3
length = 2
lst = []
for i in range(base):
for j in range(base):
lst.append([i, j])
self.compare_lists(base, length, lst)
def test_base_4(self):
# Check with a manually constructed list of length 3
base = 4
length = 3
lst = []
for i in range(base):
for j in range(base):
for k in range(base):
lst.append([i, j, k])
self.compare_lists(base, length, lst)
if __name__ == '__main__':
unittest.main()
| UTF-8 | Python | false | false | 1,399 | py | 11 | test_permutations.py | 11 | 0.551108 | 0.543245 | 0 | 46 | 29.413043 | 85 |
QiliWu/web-scraping-with-python | 3,358,664,468,560 | 5ef3f72e0f6bd5b2b38c424b8de9ff60887952b7 | 81147b475e1d475e61d557d6fa680e82f69b2bb4 | /chapter9-04.py | 9381fa00a339674aca6c1dc75cda69ac968646f0 | []
| no_license | https://github.com/QiliWu/web-scraping-with-python | 4dc7b97a045febf37d59583fb43783c87306585f | 2370f5bd1191e87158a68efec51693d40ecbceba | refs/heads/master | 2021-09-21T16:49:17.465744 | 2018-08-29T05:19:53 | 2018-08-29T05:19:53 | 103,244,149 | 0 | 1 | null | false | 2017-09-26T11:36:04 | 2017-09-12T08:32:53 | 2017-09-12T08:33:02 | 2017-09-26T11:34:49 | 521 | 0 | 1 | 3 | Python | null | null | import requests
files = {'uploadFile': r'D:\03-CS\web scraping with python\logo.png'}
r = requests.post('http://pythonscraping.com/pages/processing2.php',files =files)
print(r.text) | UTF-8 | Python | false | false | 182 | py | 40 | chapter9-04.py | 35 | 0.747253 | 0.730769 | 0 | 5 | 35.6 | 81 |
schwallie/CarND-Advanced-Lane-Lines | 25,769,813,622 | e73e93501c66df9be24e001d6a1e4587c8284eb9 | 00ae13e0469b8406dc97d7d0c1b5d0f063130004 | /thresholds.py | f084615192962b804ac6b0a6f457c9500fcc147d | []
| no_license | https://github.com/schwallie/CarND-Advanced-Lane-Lines | 794dccc36cbe2644bf57b3be3858c9599c654472 | c09e10e47b6b54a9e77419b4ad51374de1073dcc | refs/heads/master | 2021-05-01T01:59:14.140977 | 2017-02-06T14:10:47 | 2017-02-06T14:10:47 | 79,867,522 | 0 | 0 | null | true | 2017-01-24T01:36:41 | 2017-01-24T01:36:41 | 2017-01-21T20:18:58 | 2017-01-20T00:55:42 | 62,940 | 0 | 0 | 0 | null | null | null | import cv2
import numpy as np
import config
# Use color transforms, gradients, etc., to create a thresholded binary image.
def pipeline(img):
"""
Pipeline for a combined threshold pipeline,
thresholds found from testing different options
but also from guidance on slack
:param img:
:return:
"""
hls = cv2.cvtColor(img, cv2.COLOR_RGB2HLS)
grey = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
s_img = hls[:, :, 2]
sobelx_on_simg = thresholded_sobel_x(s_img)
sobelx_on_greyimg = thresholded_sobel_x(grey)
r_thresh = rgb_binary_threshold_r(img)
thresh = threshold(s_img, config.threshold_configs['s_img_min'], config.threshold_configs['s_img_max'])
return combined(sobelx_on_greyimg, sobelx_on_simg, thresh, r_thresh)
def rgb_binary_threshold_r(src_img, thresh=(200, 255)):
"""Return a binary threshold image of the R channel in RGB color space."""
R = src_img[:, :, 0]
binary = np.zeros_like(R)
binary[(R > thresh[0]) & (R <= thresh[1])] = 1
return binary
def combined(sobel_x, sobelx_simg, s_thresh, r_thresh):
"""
Return combined threshold
:param sobel_x:
:param sobelx_simg:
:param s_thresh:
:return:
"""
combined = np.zeros_like(sobel_x, dtype=np.uint8)
combined[(sobel_x > 0) | (sobelx_simg > 0) | (s_thresh > 0) | (r_thresh > 0)] = 255
return combined
def thresholded_sobel_x(img):
"""
Return a thresholded sobel
:param img:
:return:
"""
abs_sobelx = np.absolute(cv2.Sobel(img, cv2.CV_64F, 1, 0)) # Set kernal size?
scaled_sobel = np.uint8(255 * abs_sobelx / np.max(abs_sobelx))
return threshold(scaled_sobel, config.threshold_configs['sobel_min'], config.threshold_configs['sobel_max'])
def threshold(image, thresh_min, thresh_max):
"""
Generic threshold
:param image:
:param thresh_min:
:param thresh_max:
:return:
"""
binary = np.zeros(image.shape, dtype=np.float32)
binary[(image >= thresh_min) & (image <= thresh_max)] = 1
return binary
| UTF-8 | Python | false | false | 2,042 | py | 8 | thresholds.py | 7 | 0.643976 | 0.624878 | 0 | 69 | 28.594203 | 112 |
Dannyzen/karmapolice | 6,923,487,311,642 | 08279bd2e255b41ad977cf5c64ebeebabf7607fb | 26019d74b05efb248df5e7e3046bc781de61722e | /db.py | 900e1f9e575c10756e6e88e0560028875a2844e6 | [
"MIT"
]
| permissive | https://github.com/Dannyzen/karmapolice | 7dd9db819e261abfc6a325bbdd634ce44422ab57 | f422ab8f800f273af7529536eca4c8dd99e6e87e | refs/heads/master | 2016-08-04T06:19:43.250731 | 2013-09-29T15:44:05 | 2013-09-29T15:44:05 | null | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import karmaconfig
from pymongo import MongoClient
from passlib.hash import sha256_crypt # http://pythonhosted.org/passlib/
from time import gmtime, strftime
client = MongoClient(karmaconfig.URI)
db = client[karmaconfig.DBNAME]
def getNow():
return strftime("%Y-%m-%d %H:%M:%S", gmtime())
#for troubleshooting
def makeObject(cursor):
theObject = dict((cursor, record) for record in cursor)
return theObject
def insertUser(user):
# db['user'].ensure_index("email",unique=1,background=1)
# db['user'].ensure_index("username",unique=1,background=1)
db['user'].insert({"email":user.email, "hash": setHash(user), "karma":user.karma, "username": user.username})
#May want to consider changing $set to '$inc'
def addKarma(email, karma):
db['user'].update({"email":email},{ "$set": {"karma": karma}})
def updateThanker(email, thanker):
db['user'].update({"email":email},{"$push":{"thankers":{getNow():thanker}}})
def validateUser(password,email):
return verifyPassword(password,getHash(email))
def verifyPassword(password,pass_hash):
return sha256_crypt.verify(password,pass_hash)
def getHash(email):
cursor = db['user'].find_one({"email":email}, fields ={"hash":1})
return cursor['hash']
def getEmail(email):
return(db['user'].find_one({"email":email}, fields={"email": 1, "karma": 1}))
def getUserName(username):
return(db['user'].find_one({"username":username}, fields={"username": 1}))
#use a salt
def setHash(user):
user.password = sha256_crypt.encrypt(user.password)
return user.password
| UTF-8 | Python | false | false | 1,565 | py | 5 | db.py | 4 | 0.692013 | 0.68115 | 0 | 48 | 31.604167 | 113 |
ginakirkish/henrylab_scripts | 19,524,921,339,093 | 765fdad01e6f9bc71f0d7544228362e51097e574 | 45e7fc0c586729a9fbff0cd5ec036db292264bad | /jim/nii_to_jim_roi.py | 2980859d5474c5a3f8557adf6097e3581276890c | []
| no_license | https://github.com/ginakirkish/henrylab_scripts | 5a11dc96ed03f85e00b7330d26ee776b6f93ac63 | 5e3009180a731ccd10c851668b00234e6e6728a5 | refs/heads/master | 2020-12-12T17:48:48.280077 | 2020-02-10T19:22:42 | 2020-02-10T19:22:42 | 234,189,521 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import nibabel as nib
import numpy as np
import scipy.ndimage.interpolation as wezoom
import glob
import os
import sys
import argparse
import copy
def load(image):
img = nib.load(image)
img_header = img.get_header()
img_affine = img.get_affine()
img_data = np.array(img.get_data())
return img_affine, img_data, img_header
def getpts(nii):
nii_aff, nii_data, nii_header = load(nii)
nii_coords = []
for z in range(nii_data.shape[2]):
x,y = np.where(nii_data[:, :, z] == 1)
if x.size:
coord = [x[0], y[0], z]
print('point found at', coord)
nii_coords.append(coord)
return nii_coords
def nii2jim(nii_coords, img_shape, voxdims):
jim_coords = copy.deepcopy(nii_coords)
for slice_num in range(len(nii_coords)):
jim_coords[slice_num][0] = (nii_coords[slice_num][0]- img_shape[0]/2.0) * voxdims[0]/1.0
jim_coords[slice_num][1] = (img_shape[1]/2.0 - nii_coords[slice_num][1]) * voxdims[1]/1.0
return jim_coords
def make_roi(coords, roi, img, out_roi):
roifile = open(roi, 'r')
lines = roifile.readlines()
shape_num = 0
for idx,line in enumerate(lines):
if 'Slice=' in line:
print('idx', idx)
print('lines idx', lines[idx])
print('len of coords', len(coords), 'shape num', shape_num)
lines[idx] = 'Slice={}\n'.format(str(coords[shape_num][2]+1)) #Makes this line now the z coord from nii
if 'Begin Shape' in line:
shape_num += 1
if 'X=' in line:
lines[idx] = ' X={}; Y={}\n'.format(str(coords[shape_num-1][0]), str(coords[shape_num-1][1]))
if 'Image source' in line:
lines[idx] = 'Image source="{}"'.format(img)
if 'End Marker' in line and shape_num == len(coords):
lines = lines[:idx+1]
break
roifile.close()
print('saving roi file at', out_roi)
with open(out_roi, 'w') as file:
file.writelines(lines)
def nii_to_jim(nii, img, out, roi):
nii_coords = getpts(nii)
img_aff, img_data, img_hdr = load(img)
vox_dims = img_hdr.get_zooms()
jim_coords = nii2jim(nii_coords, img_data.shape, vox_dims)
make_roi(jim_coords, roi, img, out)
if __name__ == '__main__':
parser = argparse.ArgumentParser('This script converts from nifti file to jim.roi file, however it requires a template roi file to exist. It essentially overwrites the original roi file with the new data that is passed via the nifti file')
parser.add_argument('-roi', help='input roi file')
parser.add_argument('-nii', help='nii file to convert')
parser.add_argument('-img', help='image nii file used to create roi')
parser.add_argument('-out', help='location of output roi file')
args = parser.parse_args()
nii_to_jim(args.nii, args.img, args.out, args.roi)
| UTF-8 | Python | false | false | 2,879 | py | 130 | nii_to_jim_roi.py | 127 | 0.608892 | 0.598124 | 0 | 75 | 37.386667 | 243 |
jrsavage29/NetApps_Assignment1_42 | 9,225,589,797,332 | f4c722f3344a8b9fa154e91d90ed8fda202a25fe | a0957feb6f5ffd285be806872d468747e6d90fe1 | /server.py | dc5ded7db0114831ba825ea93833580f8e882a5d | []
| no_license | https://github.com/jrsavage29/NetApps_Assignment1_42 | 83d69642bb1fb2244485d0805fff934d61ed44f1 | ffe4031faeaa91d2f86f6798ab653c13d08326bd | refs/heads/main | 2023-03-04T01:02:08.893489 | 2021-02-14T14:23:58 | 2021-02-14T14:23:58 | 337,454,150 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
import sys
import socket
import pickle
import wolframalpha
import hashlib
import os
from cryptography.fernet import Fernet
from playsound import playsound
from ServerKeys import ibmTextToSpeech_key
from ServerKeys import ibmTextToSpeech_url
from ServerKeys import wolframAPI_key
from ibm_watson import TextToSpeechV1
from ibm_cloud_sdk_core.authenticators import IAMAuthenticator
def main(argv):
server_port = None
socket_size = None
# Take the list of arguments from the command line and parse them
if len(argv) != 4:
# if we are missing arguments then we get an error output the correct usage
print("usage: server.py -sp <SERVER_PORT> -z <SOCKET_SIZE>")
sys.exit(1)
# parsing the arguements and putting them into their respective variables
for argpos in range(len(argv)):
if argv[argpos] == '-sp':
server_port = argv[argpos + 1]
elif argv[argpos] == '-z':
socket_size = argv[argpos + 1]
# Display what was parsed
print("Server Port is " + str(server_port) + "\nSocket Size is " + str(socket_size))
# setup of IBM authenticators
api = IAMAuthenticator(ibmTextToSpeech_key)
text_to_speech = TextToSpeechV1(authenticator=api)
text_to_speech.set_service_url(ibmTextToSpeech_url)
# setup wolfram alpha api key
clientWolf = wolframalpha.Client(wolframAPI_key)
# Setup connection to client
host = ''
port = int(server_port)
size = int(socket_size)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((host, port))
print("[Server 01] - Created socket at " + str(s.getsockname()[0]) + " on port " + str(server_port))
s.listen()
while True:
print("[Server 02] - Listening for client connections")
client, address = s.accept()
client_ip, client_port = address
print("[Server 03 ] - Accepted client connection from " + str(client_ip) + " on port " + str(client_port))
# receiving pickled data from client
pickled_data = client.recv(size)
# print(pickled_data)
# unpickling data
data = pickle.loads(pickled_data)
print("[Server 04] - Received data: ", data)
if data:
# getting data from the tuple payload
question_key, encrypted_question, question_check_sum = data
print("[Server 05] - Decrypt Key: ", question_key)
# print("Question:")
# print("key: ", question_key, "\nEncrypted question: ", encrypted_question, "\nCheck sum", question_check_sum)
# check sum to verify with the received one
verify_check_sum = hashlib.md5(encrypted_question)
# print(verify_check_sum.hexdigest())
if verify_check_sum.hexdigest() == question_check_sum:
# fernet instance for encoding and decoding
fernet = Fernet(question_key)
question = fernet.decrypt(encrypted_question).decode()
print("[Server 06] - Plain Text: ", question)
# create query audio file and play it
with open("Question.mp3", "wb") as audiofile:
audiofile.write(
text_to_speech.synthesize(question,
voice='en-US_AllisonV3Voice',
accept='audio/mp3'
).get_result().content)
print("[Server 07] - Speaking Question: ", question)
playsound('Question.mp3')
os.remove('Question.mp3')
# generate response to question through wolfram and display it to the console
print("[Server 08] - Sending question to Wolframalpha")
response = clientWolf.query(question)
answer = next(response.results).text
print("[Server 09] - Received answer from Wolframalpha: ", answer)
# encrypt the answer
answer_key = fernet.generate_key()
print("[Server 10] - Encryption Key: ", answer_key)
encrypted_answer = fernet.encrypt(answer.encode())
print("[Server 11] - Cipher Text: ", encrypted_answer)
answer_check_sum = hashlib.md5(encrypted_answer)
print("[Server 12] - Generated MD5 Checksum: ", answer_check_sum)
# tuple payload to send to server
pickle_tuple = (answer_key, encrypted_answer, answer_check_sum.hexdigest())
print("[Server 13] - Answer payload: ", pickle_tuple)
# print("key: ", answer_key, "\nEncrypted Answer: ", encrypted_answer, "\nCheck sum: ", answer_check_sum.hexdigest())
# pickling the data
pickle_string = pickle.dumps(pickle_tuple)
print("[Server 14] - Sending answer: ", pickle_string)
# sending the pickle to the server
client.send(pickle_string)
client.close()
if __name__ == "__main__":
main(sys.argv[1:]) | UTF-8 | Python | false | false | 5,259 | py | 6 | server.py | 4 | 0.577676 | 0.56912 | 0 | 119 | 42.210084 | 133 |
huesoluis/insercionfp | 19,250,043,421,932 | 7cfae7ccf3d0a421c2b32873c3de4dffa822cbd2 | 1d955ee87f83bd4541ce81b426b4bd209b8d3290 | /scripts/carga_junio19/databaseconfig.py | 9ed23908f4c2d4cfd1b656ff3f8f5f47502f19ba | []
| no_license | https://github.com/huesoluis/insercionfp | 868272c36b873ff6bf0894e93c09e5d1d4afe521 | de4c061cf08496ecd77a82c75d9e623324fde184 | refs/heads/master | 2021-07-03T02:31:52.697258 | 2020-02-12T12:04:49 | 2020-02-12T12:04:49 | 240,000,091 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | usuario='root'
password='Suricato1.fp'
database='INSERCIONFP'
| UTF-8 | Python | false | false | 62 | py | 1,690 | databaseconfig.py | 67 | 0.790323 | 0.774194 | 0 | 3 | 19.666667 | 23 |
cnsunfocus/subprice | 18,494,129,183,888 | 8fc706d0143d9ecb2d033a5ba22e2139f60cf8ea | b6feb3bf2b006a815cea6e56c092f44ff8d2f557 | /weistock-lme/calc_price_sub_result.py | ea80fdcd01ac41c7dfbb7c4918b73127cc51fa9e | []
| no_license | https://github.com/cnsunfocus/subprice | 1db6810d6af1161d7ad990c51a03986464ae84b3 | 17e6952839759410a074a2eb0738a6e2d3ae25eb | refs/heads/master | 2021-08-31T01:49:35.277685 | 2017-12-20T04:49:10 | 2017-12-20T04:49:10 | 114,843,950 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import MySQLdb
import time
import datetime
import models
import db_util
import utils
dbutil = db_util.DbUtil()
def parse_data(rs):
data = models.FutureData()
data.date = rs[0]
data.open = rs[1]
data.close = rs[2]
return data
def calc_sub_price(latest_datetime, end_time = None):
sql = "select date, open,close from %s where date >= '%s' and date not like '%%09:00:00' and close != 5000 order by date asc"
ctp_sql = sql % ('ctp_cu_1min', latest_datetime)
sub_price_sql = "insert into sub_price_1min (date, close) values ('%s',%s)"
query_sql = "select count(1) from sub_price_1min where date = '%s'"
calc_sub_sql = "select c.close - l.close from lme_cu_1min l, ctp_cu_1min c where l.date <='%s' and c.date = '%s' order by l.date desc limit 1"
# calc_sub_sql = "select (c.close - l.close*cx.currency/100.0) ,c.close, l.close, cx.currency as sub_price from lme_cu_1min l, ctp_cu_1min c, (select currency from t_currency cc where cc.date <= '%s' order by date desc limit 1) as cx where l.date <='%s' and c.date = '%s' order by l.date desc limit 1;"
del_sql = "delete from ctp_cu_1min where date='%s'"
cur = dbutil.conn.cursor()
cur.execute(ctp_sql)
rows = cur.fetchall()
for row in rows:
data = parse_data(row)
if end_time and data.date > end_time:
break
cur.execute(query_sql % data.date)
count = cur.fetchone()[0]
if count == 0:
cur.execute(calc_sub_sql % (data.date, data.date))
result = cur.fetchone()
if result is None:
cur.execute(del_sql % data.date)
continue
sub_price = result[0]
cur.execute(sub_price_sql % (data.date, sub_price))
else:
cur.execute(calc_sub_sql % (data.date, data.date))
result = cur.fetchone()
sub_price = result[0]
cur.execute("update sub_price_1min set close=%s, date='%s' where date = '%s'" % (sub_price, data.date, data.date))
cur.close()
if __name__ == '__main__':
latest_datetime = None
while True:
try:
#if latest_datetime is None:
latest_datetime = dbutil.get_price_sub_latestdate('', '')
if latest_datetime is None:
latest_datetime = datetime.datetime.fromtimestamp(0)
open_time, close_time = utils.get_open_close_time()
if latest_datetime >= open_time: # 如果最新的价差时间晚于开盘时间,则从开盘时间计算价差,防止中间有数据丢失。
latest_datetime = open_time
calc_sub_price(latest_datetime)
time.sleep(6)
except Exception as e:
dbutil.close()
dbutil = db_util.DbUtil()
print e
| UTF-8 | Python | false | false | 2,857 | py | 25 | calc_price_sub_result.py | 18 | 0.583184 | 0.569889 | 0 | 82 | 32.939024 | 306 |
quanted/hms_app | 4,458,176,075,952 | 6ed18ac27ff051df3b563280e3448d8cc622e404 | 150315b4be1e22c7c04f2d55a1e08d4c38b130c3 | /views/submodule_pages.py | d864c474b4ff5f2aa4ba04939f9cebe9ac3ba4d7 | []
| no_license | https://github.com/quanted/hms_app | d15b3d2761215abca2b53be2af66578acaa3b908 | 7a8aec3b2eccadcfccc77b6eb7ba0f203327df7a | refs/heads/dev | 2023-09-02T17:47:34.412377 | 2023-08-16T14:02:00 | 2023-08-16T14:02:00 | 78,229,704 | 1 | 0 | null | false | 2021-08-13T13:33:14 | 2017-01-06T18:40:22 | 2021-08-10T20:26:39 | 2021-08-13T13:33:13 | 494 | 0 | 0 | 0 | Python | false | false | """
Router to direct requests for a specified model/submodule overview
"""
from . import meteorology_submodels as met_submodels
from . import hydrology_submodels as hydro_submodels
from . import hydrology_submodels_algorithms as hydro_submodel_algor
import hms_app.models.precip_workflow.precip_compare_overview as precip_compare
import hms_app.models.precip_workflow.precip_extraction_overview as precip_extract
import hms_app.models.precip_workflow.precip_compare_parameters as pcp
import hms_app.models.workflow.workflow_overview as workflow
import hms_app.models.workflow.streamflow_overview as streamflow
import hms_app.models.workflow.time_of_travel_overview as tot
import hms_app.models.workflow.workflow_parameters as wk_parameters
from . import precip_compare_setup
from django.http import HttpResponse
from django.template.loader import render_to_string
from .default_pages import error_404_page, build_overview_page, build_input_page, build_algorithms_page, build_output_page
import logging
hydrology_submodules = ['overview', "evapotranspiration", "soilmoisture", "surfacerunoff", "subsurfaceflow"]
hydrodynamic_modules = ['overview', "constant_volume", "changing_volume", "kinematic_wave"]
meteorology_submodules = ['overview', "precipitation", "radiation", "solarcalculator", "temperature", "wind", "humidity"]
def get_overview(request, model=None, submodule=None):
"""
Dynamically build the submodule overview page
:param request:
:param model:
:param submodule:
:return:
"""
model = str(model).lower()
submodule = str(submodule).lower()
logging.info("hms page request, model: " + model + "; submodule: " + submodule)
print("hms page request, model: " + model + "; submodule: " + submodule)
title = "{} - {}".format(model.capitalize(), submodule.replace("_", " ").capitalize())
p = request.scheme + "://" + request.get_host()
import_block = None
top = False
if submodule == "overview":
top = True
if model == "meteorology":
if submodule != "overview":
import_block = render_to_string("{}/{}_imports.html".format(model, submodule))
description = met_submodels.get_submodel_description(p, submodule)
elif model == "hydrology":
if submodule != "overview":
import_block = render_to_string("{}/{}_imports.html".format(model, submodule))
description = hydro_submodels.get_submodel_description(p, submodule)
elif model == "workflow":
if submodule == "streamflow":
import_block = render_to_string('workflow/hms_workflow_imports.html')
description = streamflow.Streamflow.description
elif submodule == "time_of_travel":
import_block = render_to_string("workflow/time_of_travel_imports.html")
description = tot.TimeOfTravel.description
elif submodule == "overview":
description = workflow.Workflow.description
else:
import_block = render_to_string("workflow/precip_workflow_imports.html", {'SUBMODEL': submodule})
description = precip_compare_setup.build_overview_page(p, submodule)
else:
return error_404_page(request)
html = build_overview_page(request=request, model=model, submodule=submodule, title=title, import_block=import_block, description=description, top=top)
response = HttpResponse()
response.write(html)
return response
def get_data_request(request, model=None, submodule=None):
"""
Dynamically build the submodule data request page
:param request:
:param model:
:param submodule:
:return:
"""
model = str(model).lower()
submodule = str(submodule).lower()
logging.info("hms page request, model: " + model + "; submodule: " + submodule)
print("hms page request, model: " + model + "; submodule: " + submodule)
title = "{} - {}".format(model.capitalize(), submodule.replace("_", " ").capitalize())
if model == "meteorology":
import_block = render_to_string("{}/{}_imports.html".format(model, submodule))
input_model = met_submodels.get_model_input_module(model)
input_page_func = getattr(input_model, 'get_submodel_form_input')
input_form = input_page_func(submodule, None)
input_block = render_to_string('04hms_input_form.html', {'FORM': input_form})
elif model == "hydrology":
import_block = render_to_string("{}/{}_imports.html".format(model, submodule))
input_model = hydro_submodels.get_model_input_module(model)
input_page_func = getattr(input_model, 'get_submodel_form_input')
input_form = input_page_func(submodule, None)
input_block = render_to_string('04hms_input_form.html', {'FORM': input_form})
elif model == "workflow":
import_block = render_to_string("workflow/precip_workflow_imports.html", {'SUBMODEL': submodule})
if submodule == "precip_compare":
input = render_to_string('04hms_input_form_v2.html')
input_block = render_to_string('04hms_precipcompare_input.html', {'INPUT': input})
elif submodule == "precip_data_extraction":
input_form = pcp.PrecipitationCompareFormInput()
input = render_to_string('04hms_input_form.html', {'FORM': input_form})
input_block = render_to_string('04hms_precipcompare_input.html', {'INPUT': input})
elif submodule == "streamflow":
import_block = render_to_string('workflow/hms_workflow_imports.html')
input_block = render_to_string('workflow/hms_workflow_input_body.html')
elif submodule == "time_of_travel":
import_block = render_to_string("workflow/time_of_travel_imports.html")
input_form = wk_parameters.TimeOfTravelFormInput()
input = render_to_string('04hms_input_form.html', {'FORM': input_form})
# input_block = render_to_string('04hms_precipcompare_input.html', {'INPUT': input})
input_block = render_to_string('04hms_input_form.html', {'FORM': input_form})
else:
return error_404_page(request)
else:
return error_404_page(request)
html = build_input_page(request=request, model=model, submodule=submodule, title=title, import_block=import_block, input_block=input_block)
response = HttpResponse()
response.write(html)
return response
def get_algorithms(request, model=None, submodule=None):
"""
Dynamically build the submodule algorithms page
:param request:
:param model:
:param submodule:
:return:
"""
model = str(model).lower()
submodule = str(submodule).lower()
logging.info("hms page request, model: " + model + "; submodule: " + submodule)
print("hms page request, model: " + model + "; submodule: " + submodule)
title = "{} - {}".format(model.capitalize(), submodule.replace("_", " ").capitalize())
if model == "meteorology":
import_block = render_to_string("{}/{}_imports.html".format(model, submodule))
algorithms = render_to_string('hms_submodel_algorithms.html',
{'ALGORITHMS': met_submodels.get_submodel_algorithm(submodule)})
elif model == "hydrology":
import_block = render_to_string("{}/{}_imports.html".format(model, submodule))
algorithms = render_to_string('hms_submodel_algorithms.html',
{'ALGORITHMS': hydro_submodel_algor.get_submodel_description(submodule)})
elif model == "workflow":
import_block = render_to_string("workflow/precip_workflow_imports.html", {'SUBMODEL': submodule})
if submodule == "precip_compare":
algorithms = render_to_string("hms_submodel_algorithms.html",
{'ALGORITHMS': precip_compare.PrecipCompare.algorithms})
elif submodule == "precip_data_extraction":
algorithms = render_to_string('hms_submodel_algorithms.html',
{"ALGORITHMS": precip_extract.PrecipExtract.algorithms})
elif submodule == "streamflow":
import_block = render_to_string('workflow/hms_workflow_imports.html')
algorithms = render_to_string('hms_submodel_algorithms.html',
{"ALGORITHMS": streamflow.Streamflow.algorithms})
elif submodule == "time_of_travel":
import_block = render_to_string("workflow/time_of_travel_imports.html")
algorithms = render_to_string('hms_submodel_algorithms.html',
{"ALGORITHMS": tot.TimeOfTravel.algorithms})
else:
return error_404_page(request)
else:
return error_404_page(request)
html = build_algorithms_page(request=request, model=model, submodule=submodule, title=title, import_block=import_block, algorithms=algorithms)
response = HttpResponse()
response.write(html)
return response
def get_output_request(request, model=None, submodule=None, task_id=None):
"""
Dynamically build the submodule output page
:param request:
:param model:
:param submodule:
:return:
"""
model = str(model).lower()
submodule = str(submodule).lower()
logging.info("hms page request, model: " + model + "; submodule: " + submodule)
print("hms page request, model: " + model + "; submodule: " + submodule)
title = "{} - {}".format(model.capitalize(), submodule.replace("_", " ").capitalize())
output_block = None
advanced = False
if model == "workflow":
import_block = render_to_string("workflow/precip_workflow_imports.html", {'SUBMODEL': submodule})
if submodule == "streamflow":
import_block = render_to_string('workflow/hms_workflow_imports.html')
output_block = render_to_string("workflow/hms_workflow_output_body.html")
advanced = True
elif submodule == "time_of_travel":
import_block = render_to_string("workflow/time_of_travel_imports.html")
else:
import_block = render_to_string("{}/{}_imports.html".format(model, submodule))
html = build_output_page(request=request, model=model, submodule=submodule, title=title, import_block=import_block, task_id=task_id, output_block=output_block, advanced=advanced)
response = HttpResponse()
response.write(html)
return response
| UTF-8 | Python | false | false | 10,401 | py | 65 | submodule_pages.py | 61 | 0.656379 | 0.652822 | 0 | 204 | 49.985294 | 182 |
Anu1996rag/Python_Basics | 10,127,532,917,817 | 791d5cd4e6e79443d1c6c5c653f7e4637e66e3f8 | 5225d23ddf7b2e2b18387a4ba656a34c543aae33 | /regular_expressions.py | e871a55d9fa7a36e5f57521b719bfafd2542987a | []
| no_license | https://github.com/Anu1996rag/Python_Basics | 2800f6f4a0cf30533595f0326968da6976e3273d | 215e3c5b7cdfd6f055de1caa287ee0f594b6ac63 | refs/heads/master | 2021-07-22T21:42:22.521692 | 2021-04-10T13:40:10 | 2021-04-10T13:40:10 | 230,445,219 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""
Created on Tue Mar 24 08:38:53 2020
@author: Gurudas
Regular Expressions
"""
##### Optional matching with the question mark
import re
batRegEx = re.compile(r'bat(wo)?man')
mo1 = batRegEx.search('batman')
print(mo1.group())
mo2 = batRegEx.search('batwoman')
print(mo2.group())
############ GREEDY AND NON-GREEDY MATCHING #################
greedyRegEx = re.compile(r'(ha){3,5}')
mo1 = greedyRegEx.search('hahahahaha')
print(mo1.group())
nonGreedyRegEx = re.compile(r'(ha){3,5}?')
mo2 = nonGreedyRegEx.search('hahahahaha')
print(mo2.group())
################# CHARACTER CLASSES ########################
xmasRegEx = re.compile(r'\d+\s\w+')
xmasRegEx.findall('12 drummers ,10 pipers ,9 ladies')
#finding out vowels from a string
vowels = re.compile(r'[aeiouAEIOU]')
string = input("Enter a string :")
print(vowels.findall(string))
################ Caret and dollar sign ####################
beginsWith = re.compile(r'^Hello')
mo1 = beginsWith.search('Hello world !')
print(mo1.group())
endsWith = re.compile(r'good$')
mo2 = endsWith.search('If it ends good , it feels good')
print(mo2.group())
exactMatch = re.compile(r'^exact$')
mo3 = exactMatch.search('Exactly , I need the exact data points which we had earlier.')
print(mo3.group())
mo4 = exactMatch.search('exact , I need the exact data points which we had earlier.')
print(mo4.group())
mo5= exactMatch.search('exact')
print(mo5.group())
################## THE WILDCARD CHARACTER #############################
atRegEx = re.compile(r'.at') #this '.' character matches only a single character
print(atRegEx.findall('there\'s a cat on the mat inside a flat.')
############# MATCHING EVERYTHING WITH DOT STAR #######################
matchRegEx = re.compile(r'First Name :(.*) , Last Name :(.*)')
mo1 = matchRegEx.search('First Name :Anurag , Last Name :Patil')
print(mo1.group())
nonGreedyRegEx = re.compile(r'<.*?>')
mo = nonGreedyRegEx.search('<TO serve a man> for dinner >')
print("non greedy search :",mo.group())
greedyRegEx = re.compile(r'<.*>')
mo1 = greedyRegEx.search('<TO serve a man> for dinner >')
print("greedy search :",mo1.group())
################### MATCHING NEWLINES WITH DOT CHARACTER ####################
newLineRegEx = re.compile(r'.*')
print(newLineRegEx.search('this is an example.\nof regex without matching new lines.').group())
newLineRegEx = re.compile(r'.*',re.DOTALL)
print(newLineRegEx.search('this is an example.\nof regex with matching new lines.').group())
############### CASE INSENSITIVE MATCHING ####################
caseInsenstiveRegEx = re.compile(r'robo',re.I)
print(caseInsenstiveRegEx.findall('Robo or robo does not matter.It gives all RoBO ,RoBo ,rObO results'))
############### SUBSTITUTING STRINGS WITH THE sub() METHOD ###########
nameRegEx = re.compile(r'Agent \w+') #this pattern will match only two words one with Agent and second can be any word followed by the word 'Agent'
nameRegEx.sub('Censored','Agent Bob has been informed by Agent Alice')
####
agentNameRegEx = re.compile(r'Agent (\w)\w*')
agentNameRegEx.sub('\1*******','Agent Bob has been informed by Agent Alice')
| UTF-8 | Python | false | false | 3,171 | py | 61 | regular_expressions.py | 48 | 0.6386 | 0.624409 | 0 | 133 | 22.834586 | 148 |
Blesfia/ai | 2,422,361,596,720 | ac75af002af1ba06b506f68fd5881c7a0bbec47a | 70365931c4d26b54ebe976c3d04c90e87f4f9b4e | /gym/CartPole-v1/model.py | e8a0d6f2e684ae1ec41ef8ba7a79fd88d045668a | []
| no_license | https://github.com/Blesfia/ai | 693b89ce4126f093f10e23037dd8f17b0bc55b52 | 5fc136c229c64c63a6be4a9f3ef7ee04bb93b1fd | refs/heads/master | 2020-05-17T11:31:06.919812 | 2019-05-26T15:13:14 | 2019-05-26T15:13:14 | 183,686,931 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import tensorflow as tf
import numpy as np
class Model:
def __init__(self):
self.model = tf.keras.Sequential([
tf.keras.layers.Dense(50, input_shape=(4,)),
tf.keras.layers.Dense(50),
tf.keras.layers.Dense(2, activation='softmax')
])
self.model.compile(
loss='mean_squared_error',
optimizer='adam',
metrics=['accuracy', 'mean_squared_error'],
)
def train(self, x, y):
early_stop = tf.keras.callbacks.EarlyStopping(monitor='mean_squared_error', patience=3)
self.model.fit(x, y, verbose=True, epochs=5, batch_size=10, callbacks=[early_stop])
def predict(self, x):
return np.argmax(self.model.predict(np.array([x])))
| UTF-8 | Python | false | false | 761 | py | 13 | model.py | 9 | 0.587385 | 0.574244 | 0 | 22 | 33.545455 | 95 |
ningyumo/untitled1 | 10,084,583,235,436 | 6d1e1ac1336848d2813aff9ead38e605fd01cd63 | d8ae098f8ef4d24eceed51570c9f7824a36436fa | /myuser/migrations/0009_auto_20190717_1154.py | 76dec66229b5a9f8243b824894dd508b1c396fc9 | []
| no_license | https://github.com/ningyumo/untitled1 | 8a1aeecbb9e0366eb7e1fe4c357c345a14073a8b | 88fd6e7c99f1278b6720ecd11bd8ce3f40359407 | refs/heads/master | 2020-06-15T05:14:17.323767 | 2019-07-24T09:09:46 | 2019-07-24T09:09:46 | 195,212,189 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Generated by Django 2.2.3 on 2019-07-17 11:54
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('myuser', '0008_auto_20190716_1703'),
]
operations = [
migrations.RemoveField(
model_name='profile',
name='collect_blog',
),
migrations.DeleteModel(
name='ProfileCollectBlog',
),
]
| UTF-8 | Python | false | false | 416 | py | 55 | 0009_auto_20190717_1154.py | 41 | 0.572115 | 0.497596 | 0 | 20 | 19.8 | 47 |
vanSultan/anal_dev_algo | 14,164,802,162,342 | c4a55c6a23113e4ca5c472bd52c286e068cfc91c | 1f7c1add061cd5b4c50270e941fbc4b52cddd378 | /lab_04/__init__.py | b0eaceccf3654f49e155f6c0a6176702c4e24ed2 | [
"MIT"
]
| permissive | https://github.com/vanSultan/anal_dev_algo | 787b2bf9936652856d5fd67e9c8581ec5eb0a4c6 | e9d6382103080e6f885b1456cc0a3ce64fbe1863 | refs/heads/master | 2023-01-08T19:45:57.630291 | 2020-11-06T22:20:46 | 2020-11-06T22:20:46 | 296,299,748 | 0 | 0 | MIT | false | 2020-11-06T22:20:47 | 2020-09-17T10:56:11 | 2020-10-28T11:58:22 | 2020-11-06T22:20:46 | 3,814 | 0 | 0 | 0 | TeX | false | false | from scipy.optimize import minimize, least_squares, dual_annealing, differential_evolution
from .funcs import get_signal, f_least_squares, rational_approx
__all__ = [
"minimize", "least_squares", "dual_annealing", "differential_evolution",
"get_signal", "f_least_squares", "rational_approx"
]
| UTF-8 | Python | false | false | 303 | py | 32 | __init__.py | 15 | 0.729373 | 0.729373 | 0 | 8 | 36.875 | 90 |
youngchow/interface_auto_test | 8,710,193,687,100 | 4db45c1ce32071439ed93e389507d28e9af2a1e8 | b151f194c570baaa840f4b208934823f8a8017ed | /practice/suite_case.py | 6fb1a8dbf53a25d98e6088bd9c35aaf3efb48809 | []
| no_license | https://github.com/youngchow/interface_auto_test | 67327893145e61074dcda9aa7cf90cbac1990837 | 39cca1472519cb9d89a86161ec54df82e9477ad2 | refs/heads/master | 2020-06-25T17:01:09.959924 | 2019-07-29T11:06:51 | 2019-07-29T11:06:51 | 199,372,272 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: UTF-8 -*-
import unittest
from libs.HTMLTestRunnerNew import HTMLTestRunner
from scripts.handle_config import do_config
from cases import test_01_register
from datetime import datetime
from scripts.contants import REPORTS_DIR
import os
suite = unittest.TestSuite()
loader = unittest.TestLoader()
case = loader.loadTestsFromModule(test_01_register)
suite.addTests(case)
report_name = do_config.get_value('report','report_html_name') + datetime.strftime(datetime.now(),'%Y%m%d%H%M%S') + '.html'
html_full_path = os.path.join(REPORTS_DIR,report_name)
with open(html_full_path,mode='wb') as file:
runner = HTMLTestRunner(stream=file,title='验证注册接口',tester='gordon',verbosity=2)
runner.run(suite) | UTF-8 | Python | false | false | 727 | py | 23 | suite_case.py | 20 | 0.760839 | 0.752448 | 0 | 20 | 34.8 | 123 |
sfermigier/solace | 12,953,621,404,835 | 987fa49f816f09a03adf90ced543b6ef2700d8c9 | 39cf047af55632c876f559c843fe8ef6575ee77f | /solace/utils/ini.py | 9f53964782d7d079db9d977f9866d911d9366525 | [
"BSD-3-Clause"
]
| permissive | https://github.com/sfermigier/solace | f3806be47e5e47e1853295fa8c6c7bcd36c2910d | 32d0b398643344c797a40a094689300c364de3b0 | refs/heads/master | 2016-09-06T13:05:45.865187 | 2012-06-25T13:53:18 | 2012-06-25T13:53:18 | 4,780,566 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # -*- coding: utf-8 -*-
"""
solace.utils.ini
~~~~~~~~~~~~~~~~
Parses an ini file into a dict.
:copyright: (c) 2009 by Plurk Inc., see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
_coding_re = re.compile('coding(?:\s*[:=]\s*|\s+)(\S+)')
COOKIE_LIMIT = 2
DEFAULT_ENCODING = 'utf-8'
def parse_ini(filename_or_fileobj):
"""Parses a config file in ini format into a dict."""
if isinstance(filename_or_fileobj, basestring):
f = open(filename_or_fileobj)
close_later = True
else:
f = filename_or_fileobj
close_later = False
try:
result = {}
encoding = None
section = ''
for idx, line in enumerate(f):
line = line.strip()
if not line:
continue
if line[0] in '#;':
if encoding is None and idx < COOKIE_LIMIT:
match = _coding_re.match(line)
if match is not None:
encoding = match.group()
continue
if line[0] == '[' and line[-1] == ']':
section = line[1:-1]
continue
if '=' in line:
key, value = line.split('=', 1)
key = key.rstrip()
# if we haven't seen an encoding cookie so far, we
# use the default encoding
if encoding is None:
encoding = DEFAULT_ENCODING
value = value.lstrip().decode(encoding, 'replace')
else:
key = line
value = u''
if section:
key = '%s.%s' % (section, key)
result[key] = value
finally:
if close_later:
f.close()
return result
| UTF-8 | Python | false | false | 1,825 | py | 109 | ini.py | 51 | 0.470685 | 0.463562 | 0 | 66 | 26.651515 | 69 |
POOJA-P/sorting | 18,433,999,635,674 | 3f435a7537634413222945ab293482ee6ade926e | 47273e03fdf2c55db47b2df23eeba48df62c2adc | /add.py | e3bc052caa07a5f12a35cba8de3552bdae09b019 | []
| no_license | https://github.com/POOJA-P/sorting | 6ba1a1a8eb1e41d0a118cf8fa81ecece650b85a1 | c79476f08095b2cc4269427402aafe384ca6dc23 | refs/heads/master | 2021-01-25T06:44:35.169426 | 2017-06-07T06:02:35 | 2017-06-07T06:02:35 | 93,598,291 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
print()
print('changing of variables in an array')
a =[1, 2, 3]
print('the given array is')
print(a)
print('the array changed : ')
a[0] = 5
a[2]=8
print(a)
print()
print()
print('addition of two numbers')
num1=input('enter the first number : ')
num2=input('enter the second number : ')
sum=float(num1)+float(num2)
print('sum of {0} and {1} is {2}'.format(num1,num2,sum))
print()
print()
print('prime numbers in an interval')
lower=1
upper=100
print('prime numbers between', lower, 'and', upper, 'are : ')
for num in range(lower,upper+1):
if num>1:
for i in range(2,num):
if(num % i) == 0:
break
else:
print(num)
| UTF-8 | Python | false | false | 708 | py | 2 | add.py | 2 | 0.601695 | 0.566384 | 0 | 36 | 18.638889 | 62 |
fp-computer-programming/cycle-2-labs-p22crsullivan | 8,186,207,693,540 | 491b2b73c68d748e1d41362fff346b1821e209dd | 1b6addee13a1dd99fd2d1c688b46182e7cc0537e | /lab_2-2.py | 4175e3009338b53ffb34c3817edbb6a835925d1e | []
| no_license | https://github.com/fp-computer-programming/cycle-2-labs-p22crsullivan | 1c1dace719b4dace4c3aacdb1011bd8436137464 | 07d4d7d479ecc635aa0e86ff4ed2b76d567996c9 | refs/heads/master | 2023-07-18T18:45:32.203354 | 2021-09-21T14:05:02 | 2021-09-21T14:05:02 | 406,016,234 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Author: CRS 9/15/21
a = 3
b = a
print(a, b)
a = 7
print(a, b)
| UTF-8 | Python | false | false | 64 | py | 6 | lab_2-2.py | 5 | 0.53125 | 0.421875 | 0 | 6 | 9.666667 | 21 |
namanhnatuli/django_hotel_management_system | 11,295,764,003,457 | b817ae23fb42d5ee5bae03732e8b68e6ad46eb95 | f30aae19c621efc0cb704671b94a6550290fbf7c | /blog/forms.py | 054b5946c6313fbe095569aa844c807acef90f45 | []
| no_license | https://github.com/namanhnatuli/django_hotel_management_system | a95b56bc8bce2fb143dbab219f19b1707c995009 | 9c2133e76e04c55fd085b6e2fd2eb9b195af7a20 | refs/heads/main | 2023-07-20T05:51:53.764670 | 2021-09-04T04:44:26 | 2021-09-04T04:44:26 | 393,999,135 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # Django imports
from django import forms
from django.forms import TextInput, Select, FileInput
# Third-party app imports
from ckeditor.fields import RichTextFormField
from taggit.forms import TagWidget, TagField
# Blog app imports
from blog.models import Article, Category, Comment
class CategoryForm(forms.ModelForm):
class Meta:
model = Category
fields = ["name", "approved"]
widgets = {
'name': forms.TextInput(attrs={'class': 'form-control'}),
}
class ArticleCreateForm(forms.ModelForm):
category = forms.ModelChoiceField(queryset=Category.objects.filter(approved=True), widget=forms.Select())
body = RichTextFormField()
class Meta:
model = Article
fields = ["title", "category", "image", "body", "tags", "status"]
widgets = {
'title': TextInput(attrs={'name': "article-title", 'class': "form-control", 'id': "articleTitle"}),
'image': FileInput(attrs={"class": "clearablefileinput", "type": "file", "id": "articleImage", "name": "article-image"}),
'tags': TextInput(attrs={'name': "tags", 'data-role': "tagsinput", 'placeholder': "Example: sports, game"}),
'status': Select(),
}
class ArticleUpdateForm(forms.ModelForm):
category = forms.ModelChoiceField(queryset=Category.objects.filter(approved=True), widget=forms.Select())
tags = TagField(widget=TagWidget(attrs={'name': "tags", 'placeholder': "Example: sports, game", 'id': "tags", 'data-role': "tagsinput"}))
body = RichTextFormField()
class Meta:
model = Article
fields = ["title", "category", "image", "body", "tags", "status"]
widgets = {
'title': TextInput(attrs={'name': "article-title", 'class': "form-control", 'id': "articleTitle"}),
'image': FileInput(attrs={"class": "clearablefileinput", "type": "file", "id": "articleImage","name": "article-image"}),
'status': Select(),
}
class CommentUpdateForm(forms.ModelForm):
class Meta:
model = Comment
fields = ['approved']
| UTF-8 | Python | false | false | 2,095 | py | 100 | forms.py | 56 | 0.629117 | 0.629117 | 0 | 55 | 37.090909 | 141 |
brunogfranca/redisconf | 18,605,798,344,876 | 440e9a445f870729695a8018261631265a9ca52e | b0657be19d78370d3c74fc86154b0a47dffaf7e0 | /redisconf/config.py | 568bc1eb91c2053c2b34b76aebddc609a5bb08ef | []
| no_license | https://github.com/brunogfranca/redisconf | 05176b8ed304edefa18ea5a7174403c37ecee739 | 9326522b3de56be14f5521f8a192bf9dfbae2a39 | refs/heads/master | 2021-01-17T13:15:37.611483 | 2016-06-22T20:09:43 | 2016-06-22T20:09:43 | 10,104,011 | 1 | 1 | null | false | 2016-06-22T20:09:43 | 2013-05-16T15:15:29 | 2015-09-08T06:49:19 | 2016-06-22T20:09:43 | 172 | 2 | 1 | 0 | Python | null | null | import getpass
import redis
class Config(object):
'''
Basic Usage:
conf = Config('namespace')
conf.setConf('keyname', 'value')
conf.getConf('keyname')
questions = []
questions.append({'key':'mongodb_host','question':"mongodb's host"})
questions.append({'key':'mongodb_port','question':"mongodb's port", 'default':27017})
questions.append({'key':'mongodb_pass','question':"mongodb's password", 'is_password':True})
conf.configureEnvironment(questions)
conf.getEnvironmentConfig()
'''
def __init__(self, namespace, **redisconfs):
self.namespace = namespace
self.conn = redis.StrictRedis(**redisconfs)
def setConf(self, key, value):
return self.conn.set(self.namespace+'.'+key, value)
def getConf(self, key):
return self.conn.get(self.namespace+'.'+key)
def configureEnvironment(self, setup_info):
'''
Usage:
questions = []
questions.append({'key':'mongodb_host','question':"mongodb's host"})
questions.append({'key':'mongodb_port','question':"mongodb's port", 'default':27017})
questions.append({'key':'mongodb_pass','question':"mongodb's password", 'is_password':True})
conf.configureEnvironment(questions)
'''
for question_data in setup_info:
# Set vars
key = question_data['key']
is_password = False
if question_data.has_key('is_password'):
is_password = question_data['is_password']
default = False
if question_data.has_key('default'):
default = question_data['default']
question = question_data['question']
# Check if the user wants to change the current value
change = 'Y'
old_value = self.getConf(key)
if old_value:
change_question = 'Do you want to change the current value for '+question+'?'
if is_password:
old_value = 'OLD PASSWORD'
change_question += ' ('+old_value+')'
change_question += ' [y/N]'
change = raw_input(change_question)
if change == '':
change = 'N'
while change.lower() not in ['y', 'n']:
change = raw_input(change_question)
if change == '':
change = 'N'
value = ''
if change.lower() == 'y':
# Is Password
if is_password:
value = getpass.getpass(question+": ")
else:
if default:
question += ' ('+str(default)+')'
value = raw_input(question+": ")
# Set Default Value
if not value:
if default:
value = default
self.setConf(key, value)
return
def getEnvironmentConfig(self):
keys = self.conn.keys(self.namespace+'.*')
conf = {}
for key in keys:
conf[key.replace(self.namespace+'.', '')] = self.conn.get(key)
return conf
| UTF-8 | Python | false | false | 3,262 | py | 3 | config.py | 2 | 0.509197 | 0.506131 | 0 | 90 | 35.244444 | 104 |
saloot/NeuralNetworkTomography | 8,134,668,104,063 | dfeb0f1da5adb3f0dbfa80125addc6e70503b81b | 31459c357b18a2fadff245d6c9564a07f8da20ca | /Python Codes/Old Versions/Master.py | f9a78ae043e735fe2dad37dd4fe35e6e991bffd4 | []
| no_license | https://github.com/saloot/NeuralNetworkTomography | 0046b5ad9eb0dcd045e9d04e6a898d62786d302c | 8e77cd1854277112a1a6216dc1e7dbce66e88bf3 | refs/heads/master | 2021-04-24T18:03:40.044515 | 2019-12-04T07:00:12 | 2019-12-04T07:00:12 | 20,876,869 | 4 | 4 | null | null | null | null | null | null | null | null | null | null | null | null | null | #=======================DEFAULT VALUES FOR THE VARIABLES=======================
n_exc_default = 160
n_inh_default = 40
connection_prob_default = 0.2
frac_input_neurons_default = 0.3
no_cascades_default = 8000
ensemble_size_default = 10
delay_max_default = 1.0
ensemble_count_init_default = 0
network_type_default ='F'
OS_default ='W'
folder_codes_default = './' #'C:\Python27'
#==============================================================================
#================================INSTRUCTIONS==================================
help_message = "\n"
help_message = help_message + "\n"
help_message = help_message + "###################################INSTRUCTIONS################################\n"
help_message = help_message + "Here is how to use the code: you have to specify the option flag and"
help_message = help_message + "the quantity right afterwards.\nExample: -E 100 for setting a network with 100 excitatory neurons. "
help_message = help_message + "The full list of options are as follows:\n"
help_message = help_message + "-E xxx: To specify the number of excitatory neurons. Default value = %s.\n" %str(n_exc_default)
help_message = help_message + "-I xxx: To specify the number of inhibitory neurons. Default value = %s.\n" %str(n_inh_default)
help_message = help_message + "-P xxx: To specify the probabaility of having a connection between two neurons. Default value = %s.\n" %str(connection_prob_default)
help_message = help_message + "-Q xxx: To specify the fraction of stimulated input neurons. Default value = %s.\n" %str(frac_input_neurons_default)
help_message = help_message + "-T xxx: To specify the number of considered cascades. Default value = %s.\n" %str(no_cascades_default)
help_message = help_message + "-D xxx: To specify the maximum delay for the neural connections in milliseconds. Default value = %s.\n" %str(delay_max_default)
help_message = help_message + "-S xxx: To specify the number of generated random graphs. Default value = %s.\n" %str(ensemble_size_default)
help_message = help_message + "-A xxx: To specify the folder that stores the generated data. Default value = %s. \n" %str(folder_codes_default)
help_message = help_message + "-F xxx: To specify the ensemble index to start simulation. Default value = %s. \n" %str(ensemble_count_init_default)
help_message = help_message + "-N xxx: To specify the network type to simulate. Default value = %s. \n" %str(network_type_default)
help_message = help_message + "-O xxx: The operating system. Default value = %s. \n" %str(OS_default)
help_message = help_message + "#################################################################################"
help_message = help_message + "\n"
#==============================================================================
#=======================IMPORT THE NECESSARY LIBRARIES=========================
from brian import *
import time
import numpy as np
import os
import sys,getopt,os
from auxiliary_functions import *
import subprocess
#==============================================================================
#==========================PARSE COMMAND LINE ARGUMENTS========================
os.system('clear') # Clear the commandline window
t0 = time.time() # Initialize the timer
input_opts, args = getopt.getopt(sys.argv[1:],"hE:I:P:Q:T:S:B:D:A:F:N:O:")
if (input_opts):
for opt, arg in input_opts:
if opt =='-E':
n_exc = int(arg) # The number of excitatory neurons in the output layer
elif opt =='-I':
n_inh = int(arg) # The number of inhibitory neurons in the output layer
elif opt =='-P':
connection_prob = float(arg) # The probability of having a link from the input to output neurons in the second layer
elif opt =='-Q':
frac_input_neurons = float(arg) # Fraction of neurons in the input layer that will be excited by a stimulus
elif opt =='-T':
no_cascades = int(arg) # Number of times we inject stimulus to the network
elif opt =='-S':
ensemble_size = int(arg) # The number of random networks that will be generated
elif opt =='-B':
binary_mode = int(arg) # Defines the method to transform the graph to binary. "1" for threshold base and "2" for sparsity based
elif opt =='-D':
delay_max = float(arg) # The maximum amount of synaptic delay in mili seconds
elif opt =='-A':
folder_codes = str(arg) # The folder to store results
elif opt =='-F':
ensemble_count_init = int(arg) # The ensemble to start simulations from
elif opt =='-N':
network_type = str(arg) # The type of network to simulate
elif opt =='-O':
OS_type = str(arg) # The operating system
elif opt =='-h':
print(help_message)
sys.exit()
else:
print('Code will be executed using default values')
#==============================================================================
#================================INITIALIZATIONS===============================
#------------Set the Default Values if Variables are not Defines---------------
if'n_exc' not in locals():
n_exc = n_exc_default
print('ATTENTION: The default value of %s for n_exc is considered.\n' %str(n_exc))
if'n_inh' not in locals():
n_inh = n_inh_default
print('ATTENTION: The default value of %s for n_inh is considered.\n' %str(n_inh))
if'connection_prob' not in locals():
connection_prob = connection_prob_default
print('ATTENTION: The default value of %s for connection_prob is considered.\n' %str(connection_prob))
if'frac_input_neurons' not in locals():
frac_input_neurons = frac_input_neurons_default
print('ATTENTION: The default value of %s for frac_input_neurons is considered.\n' %str(frac_input_neurons))
if'no_cascades' not in locals():
no_cascades = no_cascades_default
print('ATTENTION: The default value of %s for no_cascades is considered.\n' %str(no_cascades))
if'ensemble_size' not in locals():
ensemble_size = ensemble_size_default
print('ATTENTION: The default value of %s for ensemble_size is considered.\n' %str(ensemble_size))
if'folder_codes' not in locals():
folder_codes = folder_codes_default;
print('ATTENTION: The default value of %s for folder_codes is considered.\n' %str(folder_codes))
if'delay_max' not in locals():
delay_max = delay_max_default;
print('ATTENTION: The default value of %s for delay_max is considered.\n' %str(delay_max))
if'ensemble_count_init' not in locals():
ensemble_count_init = ensemble_count_init_default;
print('ATTENTION: The default value of %s for str(ensemble_count_init) is considered.\n' %str(ensemble_count_init))
if'network_type' not in locals():
network_type = network_type_default;
print('ATTENTION: The default value of %s for network_type is considered.\n' %str(network_type))
#------------------------------------------------------------------------------
#==============================================================================
#==========================EXECUTE THE COMMANDS================================
if (network_type =='F'):
subprocess.call(['python',folder_codes +'Generate_Neural_Data_Feed_Forward_N_to_1_Cascades.py','-E', str(n_exc),'-I',
str(n_inh),'-P', str(connection_prob),'-Q', str(frac_input_neurons),'-T',str(no_cascades),
'-S', str(ensemble_size),'-D',str(delay_max),'-F',str(ensemble_count_init)])
subprocess.call(['python',folder_codes +'Inference_FF_n_to_1_Cascades.py','-E', str(n_exc),'-I',
str(n_inh),'-P', str(connection_prob),'-Q', str(frac_input_neurons),'-T',str(no_cascades),
'-S', str(ensemble_size),'-D',str(delay_max),'-F',str(ensemble_count_init)])
subprocess.call(['python',folder_codes +'Transform_to_Binary.py','-E', str(n_exc),'-I',
str(n_inh),'-P', str(connection_prob),'-Q', str(frac_input_neurons),'-T',str(no_cascades),
'-S', str(ensemble_size),'-D',str(delay_max),'-F',str(ensemble_count_init),'-N',str(network_type)])
elif (network_type =='R'):
subprocess.call(['python',folder_codes +'Generate_Neural_Data_Delayed_Cascades.py','-E', str(n_exc),'-I',
str(n_inh),'-P', str(connection_prob),'-Q', str(frac_input_neurons),'-T',str(no_cascades),
'-S', str(ensemble_size),'-D',str(delay_max),'-F',str(ensemble_count_init)])
subprocess.call(['python',folder_codes +'Inference_Cascades_Delayed.py','-E', str(n_exc),'-I',
str(n_inh),'-P', str(connection_prob),'-Q', str(frac_input_neurons),'-T',str(no_cascades),
'-S', str(ensemble_size),'-D',str(delay_max),'-F',str(ensemble_count_init)])
subprocess.call(['python',folder_codes +'Transform_to_Binary.py','-E', str(n_exc),'-I',
str(n_inh),'-P', str(connection_prob),'-Q', str(frac_input_neurons),'-T',str(no_cascades),
'-S', str(ensemble_size),'-D',str(delay_max),'-F',str(ensemble_count_init),'-N',str(network_type)])
else:
print 'Invalid network type'
#============================================================================== | UTF-8 | Python | false | false | 9,739 | py | 165 | Master.py | 25 | 0.552829 | 0.549543 | 0 | 166 | 57.674699 | 168 |
abhinaypodugu/vercelDeploy | 5,720,896,466,649 | 10c84ce4ac2fd9a55ce6e40a65a96187ffd33aa2 | 48f220cffd294263fb4264daa7799870438397a9 | /vercelDeploy/template.py | fffeeff963a56fac85baee2167891953152f267e | []
| no_license | https://github.com/abhinaypodugu/vercelDeploy | 3372eb3cea7008ea7e6c15a51dc0054610dad8c7 | 52e060ffb9ac26a0aa677772765e671bb15720b7 | refs/heads/master | 2023-08-22T11:54:11.807070 | 2021-10-16T14:39:14 | 2021-10-16T14:39:14 | 417,465,750 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | # FastAPI file creation
template1 = """
from fastapi import FastAPI
from {path} import {class_name}
import uvicorn
app = FastAPI()
{func_name}={class_name}()
@app.get("/")
def hello_world():
msg="Welcome to my FastAPI project!\
Please visit the /docs to see the API documentation."
return msg\n
"""
template2 = """
app.add_api_route(
path="/{route_path}",
endpoint={endpoint},
methods={http_methods},
)\n
"""
template3 = """
if __name__ == "__main__":
uvicorn.run(
app=app,
host='localhost',
port=5000
)\n"""
| UTF-8 | Python | false | false | 618 | py | 9 | template.py | 8 | 0.553398 | 0.542071 | 0 | 32 | 18.3125 | 61 |
MzoneCL/MentalHealth | 9,285,719,336,460 | 3d2940bc362b34dafed9861bfe2a3e9509b1ed12 | 7acdb08fcb10e46f62d6c43f3b327e838dfecba2 | /data_preprocess.py | 8f3af435b41717a6b657bb44095b7042c5ce9491 | []
| no_license | https://github.com/MzoneCL/MentalHealth | 1b6381aca5ef484a452bb58383dfc5e092ae512d | 14aeafd4d03a7ecc8aafe200122f5ff6ab66cb29 | refs/heads/main | 2023-04-28T18:43:31.744856 | 2021-05-11T09:04:32 | 2021-05-11T09:04:32 | 365,923,245 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pandas as pd
import numpy as np
import os
raw_data_path = 'data_raw/data_faker.csv'
# 将数据中所有的空白字符(' ')转为''
def remove_blank():
data_raw = pd.read_csv(raw_data_path)
len_ = len(data_raw)
cols = data_raw.columns
for i in range(len_):
for col in cols:
if type(data_raw.iloc[i][col]) == str:
data_raw.loc[i, col] = data_raw.loc[i, col].replace(' ', '')
data_raw.to_csv(raw_data_path, encoding='utf_8_sig')
# 众数填充
def fill_with_mode():
data_raw = pd.read_csv(raw_data_path)
cols = data_raw.columns
for col in cols:
if data_raw[col].isnull().sum() > 0:
print('填充:' + col)
data_raw[col].fillna(data_raw[col].mode()[0], inplace=True)
data_raw.to_csv('data_processed/fill_with_mode.csv', encoding='utf_8_sig')
# 将数据缩放到 min_ ~ max_ 范围内
def scale_data(df, min_, max_):
pass
def normalized(df):
return (df - df.min()) / (df.max() - df.min())
if __name__ == '__main__':
# remove_blank()
fill_with_mode()
| UTF-8 | Python | false | false | 1,088 | py | 5 | data_preprocess.py | 2 | 0.574757 | 0.570874 | 0 | 43 | 22.930233 | 78 |
BMW-InnovationLab/BMW-TensorFlow-Training-GUI | 19,430,432,073,184 | 41d93adb6a96249fdcf710f2451f1abae5d488ee | 8e90a7759ec7143427823547e0fbff58e0343aaa | /docker_sdk_api/api/controllers/infrastructure_controller.py | 7ab18ae9d76ccddb59a024a2bca861fb8decc457 | [
"LicenseRef-scancode-free-unknown",
"Apache-2.0"
]
| permissive | https://github.com/BMW-InnovationLab/BMW-TensorFlow-Training-GUI | 646a6f86f26887e94351b4c572b7fe7f0842f75c | 06531dae14365986c86baf735fd149317f4bb67a | refs/heads/master | 2023-07-20T01:48:27.299962 | 2023-07-12T15:22:22 | 2023-07-12T15:22:22 | 227,429,492 | 1,030 | 198 | Apache-2.0 | false | 2023-05-22T17:40:23 | 2019-12-11T18:06:11 | 2023-05-13T22:25:45 | 2023-05-22T17:40:22 | 272,193 | 937 | 158 | 0 | Python | false | false | from fastapi import APIRouter, HTTPException
from application.infrastructure.services.gpu_service import GpuService
from application.infrastructure.services.port_scanner_service import PortScannerService
from containers import Services
from domain.exceptions.application_error import ApplicationError
from domain.models.container_info import ContainerInfo
tensorboard_port_service = Services.tensorboard_port_scanner_service()
router = APIRouter()
"""
Gets the available gpus.
Returns
-------
list of str
a list of gpu names wuth less than 25% memory consumption
"""
@router.get("/gpu/info")
async def get_gpu_info():
try:
return GpuService().get_gpu_info()
except ApplicationError as e:
raise HTTPException(status_code=400, detail=e.__str__())
except Exception as e:
raise HTTPException(status_code=500, detail=e.__str__())
"""
Get all the used ports on the system
Returns
-------
list of str
used ports
"""
@router.get("/used/ports")
async def get_used_ports():
try:
return PortScannerService().get_used_ports()
except ApplicationError as e:
raise HTTPException(status_code=400, detail=e.__str__())
except Exception as e:
raise HTTPException(status_code=500, detail=e.__str__())
"""
Gets the tensorboard port for a specific job
Parameters
----------
containerInfo: ContainerInfo
object of type ContainerInfo containing the container name
Returns
-------
str
the port on which tensorboard for that specific container is running
"""
@router.post("/tensorboard/port")
async def get_tensorboard_ports(container_info: ContainerInfo):
try:
return tensorboard_port_service.get_tensorboard_port(container_info=container_info)
except ApplicationError as e:
raise HTTPException(status_code=400, detail=e.__str__())
except Exception as e:
raise HTTPException(status_code=500, detail=e.__str__())
"""
Gets the tensorboard port for a specific archived job
Parameters
----------
containerInfo: ContainerInfo
object of type ContainerInfo containing the container name
Returns
-------
int
the port on which tensorboard for that specific container is running
"""
@router.post("/archived/tensorboard/port")
async def get_tensorboard_ports(container_info: ContainerInfo):
try:
return tensorboard_port_service.get_archived_tensorboard_port(container_info=container_info)
except ApplicationError as e:
raise HTTPException(status_code=400, detail=e.__str__())
except Exception as e:
raise HTTPException(status_code=500, detail=e.__str__())
| UTF-8 | Python | false | false | 2,638 | py | 468 | infrastructure_controller.py | 281 | 0.712282 | 0.702426 | 0 | 102 | 24.862745 | 100 |
Drew-E-Wilson/Project3 | 18,073,222,399,341 | c4b45f8f5746ad027aea8cb7fe7e8916e07e8279 | 1c5095f90e10b40f8b259518ee3ba155ce1374cc | /codrs/serializers.py | d15410bd348362a6679c7fa8a338162edeb9819b | []
| no_license | https://github.com/Drew-E-Wilson/Project3 | d4e4dba04924535789b370c316d3cbe562f20df5 | 06e857e293f7a96e0e7a7d513ad3ab4482d11cae | refs/heads/main | 2023-05-21T06:56:06.051611 | 2021-04-27T12:35:43 | 2021-04-27T12:35:43 | 361,229,785 | 0 | 0 | null | true | 2021-04-24T17:52:21 | 2021-04-24T17:52:20 | 2021-04-24T01:54:35 | 2021-04-24T01:54:33 | 54 | 0 | 0 | 0 | null | false | false | from .models import Array, Comment, Profile
from django.contrib.auth.models import User, Group
from rest_framework import serializers
class CommentSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
# The model it will serialize
model = Comment
# the fields that should be included in the serialized output
fields = ['id', 'user', 'comment']
class ArraySerializer(serializers.ModelSerializer):
# push = serializers.StringRelatedField(many=True)
comment = CommentSerializer(many=True)
class Meta:
model = Array
fields = ['id', 'user', 'body', 'comment']
def create(self, validated_data):
comment_data = validated_data.pop('comment')
array = Array.objects.create(**validated_data)
for comment_data in comment_data:
Comment.objects.create(array=array, **comment_data)
Comment.objects.create(comment=comment, **comment_data)
return array
def update(self, instance, validated_data):
comment_data = validated_data.pop('comment')
comments = (instance.comment).all()
comments = list(comments)
instance.user = validated_data.get('user', instance.user)
instance.body = validated_data.get('body', instance.body)
instance.save()
for comment_data in comment_data:
comment = comments.pop(0)
comment.user = comment_data.get('user', comment.user)
comment.body = comment_data.get('comment', comment.comment)
comment.save()
return instance
class ProfileSerializer(serializers.HyperlinkedModelSerializer):
class Meta:
# The model it will serialize
model = Profile
# the fields that should be included in the serialized output
fields = ['id', 'firstname', 'lastname', 'genderpronouns', 'location', 'aboutme', 'linkedin']
| UTF-8 | Python | false | false | 1,898 | py | 17 | serializers.py | 16 | 0.658588 | 0.658061 | 0 | 49 | 37.714286 | 101 |
amnox/cppinstitute_questions | 16,552,803,965,782 | 97df9067d9500c7c2ab6b0b2c73e40148a90f6d3 | f49536d4676e0e8fe1ad4d0529a84680f81a27bf | /hackerrank_solutions/max_sum_alternate.py | 4173cd3170efb14a6d4f866298e704acefdcd475 | []
| no_license | https://github.com/amnox/cppinstitute_questions | e2a398a1fd28884ac27bd98ec8ee5f305f1f215c | 79a9d9c3fa01584ec1820c504dcb111e0c504c3e | refs/heads/master | 2021-06-20T02:40:28.945177 | 2021-02-22T02:16:38 | 2021-02-22T02:16:38 | 189,868,151 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | def find_max_sum(arr):
incl = 0
excl = 0
for i in arr:
# Current max excluding i
new_excl = max(excl,incl)
# Current max including i
incl = excl + i
excl = new_excl
# return max of incl and excl
return max(excl,incl)
| UTF-8 | Python | false | false | 248 | py | 60 | max_sum_alternate.py | 60 | 0.612903 | 0.604839 | 0 | 15 | 15.533333 | 31 |
khyasir/odoo-10 | 6,957,847,022,580 | f3eb015b4626d65ef2deca35e4268dbc4c073114 | 85147eec46810841bd49918dd3df8d423e9f1a04 | /ufc/orient_sales_invoice/model.py | f49e70f6d594b4c0e3a8b44cca19784fd7a39e3e | []
| no_license | https://github.com/khyasir/odoo-10 | 999cade56501d96030399ecc3d525d5647ea5228 | f745b6fe70748444b0a70d854d2273d8d24337cf | refs/heads/master | 2021-08-16T22:45:01.532494 | 2017-11-20T12:37:12 | 2017-11-20T12:37:12 | 106,803,831 | 0 | 3 | null | null | null | null | null | null | null | null | null | null | null | null | null | #-*- coding:utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2011 OpenERP SA (<http://openerp.com>). All Rights Reserved
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
###################################################
from openerp import models, fields, api
from num2words import num2words
class SampleDevelopmentReport(models.AbstractModel):
_name = 'report.orient_sales_invoice.module_report'
@api.model
def render_html(self,docids, data=None):
report_obj = self.env['report']
report = report_obj._get_report_from_name('orient_sales_invoice.module_report')
print "------------------------------------------0"
records = self.env['orient.summ'].browse(docids)
enteries = []
for data in records.sum_ids2:
if data.quantity not in enteries:
enteries.append(data.quantity)
active_carton = []
def values(cart_id,attr):
del active_carton [:]
cart_id = cart_id
for x in records.sum_ids2:
if x.quantity == cart_id:
active_carton.append(x)
if attr == 'quan':
amount = 0
for x in active_carton:
amount = amount + x.sale_price
return amount
if attr == 'count':
value = 0
for x in active_carton:
value = value + 1
return value
def number_to_word(attrb):
word = num2words((attrb))
word = word.title() + " " + "Only"
return word
docargs = {
'doc_ids': docids,
'doc_model': 'orient.summ',
'docs': records,
'data': data,
'enteries': enteries,
'values':values,
'number_to_word': number_to_word
}
return report_obj.render('orient_sales_invoice.module_report', docargs)
| UTF-8 | Python | false | false | 2,739 | py | 192 | model.py | 81 | 0.536692 | 0.531216 | 0 | 78 | 34.051282 | 87 |
greenforse/AntoneoTD | 12,034,498,389,733 | 5f1734b438215dd13ef31b7031bf2c5a33b508d6 | 510fee9a2220e8d5bcb786b2dd010ba1351237a8 | /StartMenuState.py | f7641375238ac47e7796ad5548e78db4ea5f3f15 | []
| no_license | https://github.com/greenforse/AntoneoTD | 7b3cebe49e94df132edfaab3c755ce1d1dced4ab | 0472d25c4aac0d5cb79fe21d0ad32f9e4fc4769a | refs/heads/master | 2023-07-13T17:53:45.485865 | 2021-08-23T10:52:12 | 2021-08-23T10:52:12 | 375,608,992 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from GameState import GameState
from State import State
class StartMenuState(State):
def __init__(self,game):
self.game=game
def init(self):
self.game.buildStartMenu()
self.game.initAndStopStartMenu()
def processEvents(self):
self.game.inputMouseButtonInBigMenu()
if self.game.Anton.play:
self.game.initAndStopStartMenu()
self.game.changeState(GameState)
def draw(self,time):
self.game.drawStartMenu(time)
| UTF-8 | Python | false | false | 516 | py | 17 | StartMenuState.py | 17 | 0.635659 | 0.635659 | 0 | 19 | 26.052632 | 45 |
MishokkohsiM/TP-WEb | 7,533,372,648,446 | b00cfce095b0d1a29c2847f1b22fa34a11aff66e | be520c3232e08e80b3ca7638d0e915e1e1978afc | /questions/views.py | 01dd88f3690290eab4dc876adc83219b8e382522 | []
| no_license | https://github.com/MishokkohsiM/TP-WEb | 98354072323f5eed6719f9569520a84c21a41988 | cdc2b26e69170236a0f3644f2652906fe52004bb | refs/heads/master | 2020-05-04T01:47:17.498101 | 2019-04-15T21:30:39 | 2019-04-15T21:30:39 | 178,913,559 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | from django.shortcuts import render
# Create your views here.
from django.shortcuts import render
def ask(request):
return render(request, 'ask.html')
def index(request):
return render(request, 'index.html')
def login(request):
return render(request, 'login.html')
def registration(request):
return render(request, 'register.html')
def base(request):
return render(request, 'base.html')
| UTF-8 | Python | false | false | 419 | py | 23 | views.py | 11 | 0.71599 | 0.71599 | 0 | 25 | 15.76 | 43 |
open-mmlab/mmagic | 9,174,050,191,204 | b0118e5b9b78c78be87e9fc3bb2842a3261b9299 | af101b467134e10270bb72d02f41f07daa7f57d8 | /tests/test_models/test_editors/test_mspie/test_mspie_stylegan2_discriminator.py | 5290d8ee1077ff35e91609cac86a1071b2acc6d3 | [
"Apache-2.0"
]
| permissive | https://github.com/open-mmlab/mmagic | 4d864853417db300de4dfe7e83ce380fd1557a23 | a382f143c0fd20d227e1e5524831ba26a568190d | refs/heads/main | 2023-08-31T14:40:24.936423 | 2023-08-30T05:05:56 | 2023-08-30T05:05:56 | 203,999,962 | 1,370 | 192 | Apache-2.0 | false | 2023-09-14T11:39:18 | 2019-08-23T13:04:29 | 2023-09-14T11:22:31 | 2023-09-14T11:39:17 | 26,511 | 5,729 | 962 | 22 | Jupyter Notebook | false | false | # Copyright (c) OpenMMLab. All rights reserved.
import pytest
import torch
from mmagic.models.editors.mspie import MSStyleGAN2Discriminator
class TestMSStyleGANv2Disc:
@classmethod
def setup_class(cls):
cls.default_cfg = dict(in_size=64, channel_multiplier=1)
def test_msstylegan2_disc_cpu(self):
d = MSStyleGAN2Discriminator(**self.default_cfg)
img = torch.randn((2, 3, 64, 64))
score = d(img)
assert score.shape == (2, 1)
d = MSStyleGAN2Discriminator(
with_adaptive_pool=True, **self.default_cfg)
img = torch.randn((2, 3, 64, 64))
score = d(img)
assert score.shape == (2, 1)
@pytest.mark.skipif(not torch.cuda.is_available(), reason='requires cuda')
def test_msstylegan2_disc_cuda(self):
d = MSStyleGAN2Discriminator(**self.default_cfg).cuda()
img = torch.randn((2, 3, 64, 64)).cuda()
score = d(img)
assert score.shape == (2, 1)
d = MSStyleGAN2Discriminator(
with_adaptive_pool=True, **self.default_cfg).cuda()
img = torch.randn((2, 3, 64, 64)).cuda()
score = d(img)
assert score.shape == (2, 1)
| UTF-8 | Python | false | false | 1,186 | py | 1,249 | test_mspie_stylegan2_discriminator.py | 952 | 0.610455 | 0.574199 | 0 | 37 | 31.054054 | 78 |
JuanjoUrrutia/zoo | 14,448,270,017,428 | 2297266f0ea69a63d666c753037b4fcfde879472 | 50295df6dc72f8f6b2675c76b082aa1be8f53a7c | /zoo.py | ec3bdd82fbccfcde8423377d780436597139ce0d | []
| no_license | https://github.com/JuanjoUrrutia/zoo | ab202a695886d45a890bb8a55bfc342075e19b7f | b478cf665ae3f7595c390350c982e3c870dbe0cf | refs/heads/master | 2023-05-25T22:56:42.118040 | 2021-05-31T10:23:49 | 2021-05-31T10:23:49 | 371,332,991 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import random
class Animal:
def __init__(self, name):
self.health = 50
self.happiness = int(random.random()*100)
self.sexvalue = random.random()
if self.sexvalue <= 0.5 :
self.sex = 'Macho'
else:
self.sex = 'Hembra'
self.age = int(random.random()*10)
while self.age == 0:
self.age = int(random.random()*10)
self.name = name
self.species = "placeholderspecies"
def rename(self, name):
self.name = name
def death(self):
if self.health < 0:
print("¡"+ self.name + " ha muerto!")
if self.happiness < 0:
print("¡"+ self.name + " se escapó!")
def display_info(self):
print('Nombre: ' + self.name)
print('Especie:' + self.species)
print('Edad: ' + str(self.age))
print('Sexo: ' + self.sex)
print('Felicidad:' + str(self.happiness) + '/100')
print('Salud:' + str(self.health) + '/100')
print("\n")
class Lion(Animal):
def __init__(self, name):
super().__init__(name)
self.species = 'Lion'
self.name = name
def eat(food):
if food == "Carne":
self.health += 30
if food == "Verduras":
self.health -= 30
class Tiger(Animal):
def __init__(self, name):
super().__init__(name)
self.species = 'Tiger'
self.name = name
def eat(self, food):
if food == "Carne":
self.health += 30
if food == "Verduras":
self.health -= 30
class Bear(Animal):
def __init__(self, name):
super().__init__(name)
self.species = 'Bear'
self.name = name
def eat(self, food):
if food == "Carne":
self.health += 20
if food == "Verduras":
self.health += 20
class Zoo(Animal):
def __init__(self, zoo_name):
super().__init__(zoo_name)
self.animals = []
self.name = zoo_name
def day_end(self):
#if time_counter = 8
for animal in self.animals:
animal.health -= 10
animal.happiness -= random.random()*40
#time_counter = 0
def add_lion(self, name):
self.animals.append( Lion(name) )
def add_tiger(self, name):
self.animals.append( Tiger(name) )
def add_bear(self, name):
self.animals.append( Bear(name) )
def print_all_info(self):
print("-"*30, self.name, "-"*30)
for item in self.animals:
item.display_info()
zoo1 = Zoo("Zoo")
zoo1.add_lion("Nala")
zoo1.add_lion("Simba")
zoo1.add_tiger("Rajah")
zoo1.add_tiger("Shere Khan")
zoo1.print_all_info()
zoo1.animals[3].eat("Carne")
zoo1.print_all_info()
#FALTA AGREGAR MODIFICADORES DE FELICIDAD
#FALTA AGREGAR MODIFICADORES ADICIONALES DE SALUD
#FALTA CREAR MENU INTERACTIVO
#FALTA CREAR SISTEMA DE PASO DE TIEMPO
#FALTA CREAR SISTEMA DE MUERTE/HUIDA DE ANIMALES
#FALTA IMPLEMENTAR BIBLIOTECA DE IMAGENES ASCII PARA ANIMALES
#FALTA AGREGAR MAS ANIMALES
#FALTA AGREGAR INCIDENCIAS POR PROBABILIDADES DURANTE PASO DE TIEMPO
#FALTA AGREGAR INSTANCIA DE FIN DE DIA | UTF-8 | Python | false | false | 3,165 | py | 1 | zoo.py | 1 | 0.560721 | 0.544592 | 0 | 124 | 24.508065 | 68 |
mukris4/GCPT | 8,375,186,265,310 | 7068b36bc48bd48099b3152a6ebbd772774016a4 | bbd70f2054a78199596bf3832b5d82380f23ab6a | /test_case/test_orderManage.py | 97edda4948ef37c5c823945c7077564b9b14098d | []
| no_license | https://github.com/mukris4/GCPT | 51d0a9e4296a0397120128be6e556ff929f63f01 | d84b0eec79c616802034d57339724608ba82140d | refs/heads/master | 2020-06-27T18:29:34.427115 | 2019-08-06T09:48:00 | 2019-08-06T09:48:14 | 200,019,053 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import time
import unittest
import random,os
from selenium import webdriver
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.common.by import By
from operate.Common_Operations import common_browser
from selenium.webdriver.support.select import Select
from BeautifulReport import BeautifulReport
# 登录
class OrderM(unittest.TestCase):
def save_img(self, img_name):
"""
传入一个img_name, 并存储到默认的文件路径下
:param img_name:
:return:
"""
#pycharm执行
self.driver.get_screenshot_as_file('{}/{}.png'.format(os.path.abspath(r"H:\GCPT\img"), img_name))
#Jenkins构建脚本 未启动浏览器截图出错
# self.driver.get_screenshot_as_file('{}/{}.png'.format(os.path.abspath(r"H:\Jenkins\workspace\gcpt_test\img"), img_name))
@classmethod
def setUpClass(cls) :
cls.driver = webdriver.Ie()
cls.driver.get('http://192.168.1.24:8031/(S(q0bjadi3zomcgh3afy4snowk))/Login.aspx')
cls.driver.maximize_window()
@classmethod
def tearDownClass(cls):
time.sleep(3)
cls.driver.quit()
# @unittest.skip('暂不执行')
def test_a1_login(self):
'''管理员登录'''
common_browser.login(self,"admin","abc123")
self.driver.switch_to.frame("topFrame")
result=self.driver.find_element_by_id("userName1").text
assert "管理员" in result
#订单管理
# @unittest.skip('暂不执行')
def test_a2_enterorderF(self):
'''进入订单通知复核页面'''
self.driver.switch_to.parent_frame()
self.driver.switch_to.frame('leftFrame')
self.driver.find_element_by_link_text("订单通知复核").click()
self.driver.switch_to.parent_frame()
self.driver.switch_to.frame('main')
# 显式等待
element = WebDriverWait(self.driver, 10,3).until(EC.presence_of_element_located((By.CLASS_NAME, 'titlebt'))).text
self.assertEqual(element, "订单通知复核")
@unittest.skip('暂不执行')
def test_a3_ordercheck_seach(self):
'''订单复核通知查询'''
WebDriverWait(self.driver, 10, 3).until(
EC.presence_of_element_located((By.ID, 'WebTool_btQuery'))).click()
windows = self.driver.window_handles
self.driver.switch_to.window(windows[1])
element1 = WebDriverWait(self.driver, 10, 3).until(
EC.presence_of_element_located((By.ID, 'btQuery'))).get_attribute('value')
self.assertEqual(element1, "查 询")
@unittest.skip('暂不执行')
def test_a4_closesearch(self):
'''关闭订单复核查询'''
WebDriverWait(self.driver, 10,3).until(
EC.presence_of_element_located((By.ID, 'btCance'))).click()
windows = self.driver.window_handles
self.driver.switch_to.window(windows[0]) #切换至原来窗口
self.driver.switch_to.parent_frame()
self.driver.switch_to.frame('main')#切换至main frame
element = WebDriverWait(self.driver, 10, 3).until(
EC.presence_of_element_located((By.CLASS_NAME, 'titlebt'))).text
self.assertEqual(element, "订单通知复核")
#@unittest.skip('暂不执行')
@BeautifulReport.add_test_img('test_a5_orderSave')
def test_a5_orderSave(self):
'''点击选择第一条数据保存'''
WebDriverWait(self.driver, 10, 3).until(
EC.presence_of_element_located((By.ID, 'conCon_gridListHdr_selected_1'))).click()
global orderNO # 订单号
orderNO = self.driver.find_element_by_xpath('//*[@id="conCon_gridListHdr"]/tbody/tr[2]/td[4]').text
# print(orderNO)
global gys # 供应商账号
gys = self.driver.find_element_by_xpath(' // *[ @ id = "conCon_gridListHdr_txtLinker1_0"]').text
WebDriverWait(self.driver, 10, 3).until(
EC.presence_of_element_located((By.ID, 'WebTool_btSave'))).click()
time.sleep(1)
# 获取alert对话框
dig_alert = self.driver.switch_to.alert
self.assertIn("保存成功!", dig_alert.text)
time.sleep(3)
dig_alert.accept()
#@unittest.skip('暂不执行')
# @unittest.expectedFailure
@BeautifulReport.add_test_img('test_a6_orderSubmit')
def test_a6_orderSubmit(self):
'''点击选择第一条数据提交'''
# 将返回该匹配行的第一列(暂未实现)
# By.xpath("//td[./span[text()=orderNO]]/../td[1]")
self.driver.switch_to.alert.accept()
#层级定位
# table_div = self.driver.find_element_by_id('conCon_UPgridList')
# table=table_div.find_element_by_tag_name("table")
# row = table.find_elements_by_tag_name('td')
#
# for item in row:
# item1=item.text
# list.append(item1)
# print(list)
# trOrder= list.index(orderNO)
# trNO=trOrder//13
# print(trNO)
WebDriverWait(self.driver, 10, 3).until(
EC.presence_of_element_located((By.ID, 'conCon_gridListHdr_selected_0'))).click()
# WebDriverWait(self.driver, 10,3).until(
# EC.presence_of_element_located((By.ID, '"conCon_gridListHdr_selected_"+trNO'))).click()
WebDriverWait(self.driver, 10, 3).until(
EC.presence_of_element_located((By.ID, 'WebTool_btTj'))).click()
time.sleep(1)
# 获取alert对话框
dig_alert = self.driver.switch_to.alert
time.sleep(1)
print(dig_alert.text)
# 获取文本值为空
self.assertIn("提交成功!", dig_alert.text,"IE浏览器当弹窗出现不允许此页创建更多的消息时获取不到弹窗的文本值")
time.sleep(3)
self.driver.switch_to.alert.accept()
'''后期优化未保存,就提交的case'''
#切换供应商账号
@unittest.skip('暂不执行')
def test_a7_switch_Supplier(self):
'''注销'''
self.driver.switch_to.parent_frame()
self.driver.switch_to.frame("topFrame")
self.driver.find_element_by_link_text("注销").click()
username=self.driver.find_element_by_xpath('//*[@id="loginBg"]/table/tbody/tr[1]/td[1]/span').text
self.assertIn(username,"账 号:")
@unittest.skip('暂不执行')
@BeautifulReport.add_test_img('test_a8_supplier_login')
def test_a8_supplier_login(self):
'''供应商登录'''
common_browser.login(self, gys, "abc123")
# common_browser.login(self,gys,"abc123")
self.driver.switch_to.frame("topFrame")
result = self.driver.find_element_by_id("userName1").text
assert gys in result
# assert "gkzy" in result
@unittest.skip('暂不执行')
def test_a9_order_receive_confirm(self):
'''进入订单接收确认页面'''
self.driver.switch_to.parent_frame()
self.driver.switch_to.frame('leftFrame')
self.driver.find_element_by_link_text("订单接收确认").click()
self.driver.switch_to.parent_frame()
self.driver.switch_to.frame('main')
# 显式等待
element = WebDriverWait(self.driver, 10, 3).until(
EC.presence_of_element_located((By.CLASS_NAME, 'titlebt'))).text
self.assertEqual(element, "订单接收确认")
@unittest.skip('暂不执行')
def test_b1_order_receive_save(self):
'''订单接收确认—点击确认按钮'''
global orderNO1 #配送单订单号
orderNO1=self.driver.find_element_by_id("conCon_txtBillNo").get_attribute('value')
self.driver.find_element_by_id("WebTool_btSave").click()
time.sleep(1)
dig_alert = self.driver.switch_to.alert
time.sleep(1)
self.assertIn("确认成功!", dig_alert.text)
self.driver.switch_to.alert.accept()
@unittest.skip('暂不执行')
def test_b2_Distribution_list_submit(self):
'''进入配送单提交页面'''
self.driver.switch_to.parent_frame()
self.driver.switch_to.frame('leftFrame')
self.driver.find_element_by_link_text("配送单提交").click()
self.driver.switch_to.parent_frame()
self.driver.switch_to.frame('main')
element = WebDriverWait(self.driver, 10, 3).until(
EC.presence_of_element_located((By.CLASS_NAME, 'titlebt'))).text
self.assertEqual(element, "配送单提交")
@unittest.skip('暂不执行')
def test_b3_Distribution_add(self):
'''配送单提交-新增订单明细数据-获取订单号后,循环获取仓库,查询订单是否在该仓库中'''
self.driver.find_element_by_id("WebTool_btAdd").click()
# 选择仓库
select = self.driver.find_element_by_id("conCon_ddlCKName")
# 获取select里面的option标签,注意使用find_elements
options_list = select.find_elements_by_tag_name('option')
# 遍历option
Warehouse = []
for option in options_list:
Warehouse.append(option.get_attribute("value"))
for selectWarehouse in Warehouse:
Select(self.driver.find_element_by_id("conCon_ddlCKName")).select_by_value(selectWarehouse)
time.sleep(3)
self.driver.find_element_by_id("conCon_btnAddDtl").click()
windows = self.driver.window_handles
self.driver.switch_to.window(windows[1])
WebDriverWait(self.driver, 15, 5).until(
EC.presence_of_element_located((By.ID, 'txtBillNo'))).send_keys(orderNO1)
# time.sleep(3)
# self.driver.find_element_by_id("txtBillNo").send_keys(orderNO1)
self.driver.find_element_by_id("btQuery").click()
billNo=self.driver.find_element_by_xpath('//*[@id="gridLogList"]/tbody/tr[2]/td[2]').text
if billNo=="1":
self.driver.find_element_by_id("gridLogList_selected_0").click()
self.driver.find_element_by_id("btOK").click()
break
else:
self.driver.find_element_by_id("btCance").click()
# 判断当前窗口
all_handles = self.driver.window_handles
self.driver.switch_to.window(all_handles[0])
self.driver.switch_to.parent_frame()
self.driver.switch_to.frame('main')
self.assertEqual(billNo, "1")
@unittest.skip('暂不执行')
def test_b4_Distribution_save(self):
'''配送单保存'''
windows = self.driver.window_handles
self.driver.switch_to.window(windows[0]) # 切换至原来窗口
self.driver.switch_to.parent_frame()
self.driver.switch_to.frame('main') # 切换至main frame
#销售清单号
WebDriverWait(self.driver, 10, 3).until(
EC.presence_of_element_located((By.ID, 'conCon_txtSplOutBillNo'))).send_keys(orderNO1+"10086")
#批号
WebDriverWait(self.driver, 10, 3).until(
EC.presence_of_element_located((By.ID, 'conCon_gridList_txtLotNo_0'))).send_keys(random.randint(1,10))
#生产日期 有效期
time.sleep(2)
prodate=self.driver.find_element_by_id("conCon_gridList_txtProDate_0")
# ActionChains(self.driver).double_click(prodate).perform()
prodate.click()
self.driver.find_element_by_id("conCon_gridList_txtProDate_0").send_keys("20140101")
self.driver.find_element_by_id("conCon_gridList_txtEndDate_0").send_keys("21000101")
self.driver.find_element_by_id("WebTool_btSave").click()
time.sleep(1)
dig_alert = self.driver.switch_to.alert
time.sleep(1)
self.assertIn("保存成功!", dig_alert.text)
self.driver.switch_to.alert.accept()
@unittest.skip('暂不执行')
def test_b5_Distribution_submit(self):
'''配送单提交'''
self.driver.find_element_by_id("WebTool_btTj").click()
time.sleep(1)
time.sleep(1)
self.assertEqual("", self.driver.switch_to.alert.text,"IE浏览器当弹窗出现不允许此页创建更多的消息时获取不到弹窗的文本值,如果提交不成功则报错")
self.driver.switch_to.alert.accept() | UTF-8 | Python | false | false | 12,346 | py | 10 | test_orderManage.py | 7 | 0.616717 | 0.602315 | 0 | 279 | 39.569892 | 130 |
Tokimaro/Anonim_tg_bot | 1,778,116,505,202 | 70ac805dd6a9aa7f7bac413ae6b1993a1b10a9b6 | b65414de25ed91906639a21645f5b04334e58ea8 | /main.py | 034af3693412216fbb8330fae7b09282c35c51c6 | []
| no_license | https://github.com/Tokimaro/Anonim_tg_bot | 760f206adc62bb3df574354854229db31fca8e8a | 638f6b7470cbf181e44c3d193d13d6219ce9646f | refs/heads/main | 2023-01-21T14:17:55.625530 | 2020-11-25T16:27:03 | 2020-11-25T16:27:03 | 315,952,527 | 1 | 2 | null | null | null | null | null | null | null | null | null | null | null | null | null | import telebot
from telebot import types
from db import check_user
from db import reg_db
from db import delete_user
from db import get_info
from db import select_free
from db import add_user
from db import check_status
from db import add_second_user
from db import check_companion
from db import check_open
from db import close_chat
from db import edit_db
import time
bot = telebot.TeleBot('1458248555:AAF_onnTChilq1tMKeq1OtuvLRzwvqT7z9c')
class User: # Класс для собирания данных и добавления в бд, пользователей
def __init__(self, user_id):
self.user_id = user_id
self.name = None
self.age = None
self.sex = None
self.change = None
user_dict = {} # Словарь из пользователей
@bot.message_handler(commands=['start'])
def welcome(
message): # Стартовое меня, если вы не зарегистрированы, нгачнётся регистрация, иначе у вас будет выбор между действиями
if check_user(user_id=message.from_user.id)[0]:
mark = types.ReplyKeyboardMarkup(one_time_keyboard=True)
mark.add('Начать поиск', 'Посмотреть профиль', 'Удалить профиль')
bot.send_message(message.from_user.id, "Что дальше? ", reply_markup=mark)
bot.register_next_step_handler(message, search_prof)
else:
bot.send_message(message.from_user.id, "Пользоватерь не зарегестрирован, начало регистрации")
bot.send_message(message.from_user.id, "Введите ваше имя:")
bot.register_next_step_handler(message, reg_name)
@bot.message_handler(content_types=['text'])
def text_reac(message): # реакция на любое сообщение, которое не является командой
bot.send_message(message.chat.id, 'Неизвестная команда')
def reg_name(message): # Регистрация имени
if message.text != '':
user = User(message.from_user.id)
user_dict[message.from_user.id] = user
user.name = message.text
bot.send_message(message.from_user.id, "Каков твой возраст?:")
bot.register_next_step_handler(message, reg_age)
else:
bot.send_message(message.from_user.id, "Введите ваше имя:")
bot.register_next_step_handler(message, reg_name)
def reg_age(message): # Регистрация возраста
age = message.text
if not age.isdigit():
msg = bot.reply_to(message, 'Это должно быть число')
bot.register_next_step_handler(msg, reg_age)
return
user = user_dict[message.from_user.id]
user.age = age
markup = types.ReplyKeyboardMarkup(one_time_keyboard=True)
markup.add('Мужчина', 'Женщина')
bot.send_message(message.from_user.id, 'Каков ваш пол?', reply_markup=markup)
bot.register_next_step_handler(message, reg_sex)
def reg_sex(message): # Регистрация Пола
sex = message.text
user = user_dict[message.from_user.id]
if (sex == u'Мужчина') or (sex == u'Женщина'):
user.sex = sex
mark = types.ReplyKeyboardMarkup(one_time_keyboard=True)
mark.add('Мужчин', 'Женщин', 'Всех')
bot.send_message(message.from_user.id, 'C кем хотите пообщаться?', reply_markup=mark)
bot.register_next_step_handler(message, reg_change)
else:
bot.send_message(message.from_user.id, 'Что-то пошло не так')
bot.register_next_step_handler(message, reg_sex)
def reg_change(message): # Регистрация выбора людей, которых они ищут, по половому признаку
if (message.text == u'Мужчин') or (message.text == u'Женщин') or (message.text == u'Всех'):
user = user_dict[message.from_user.id]
user.change = message.text
bot.send_message(message.from_user.id,
"Проверьте введйнные данные:\n Ваше имя: " + str(user.name) + "\n Ваш возраст: " + str(
user.age) + "\n Ваш пол: " + str(user.sex) + "\nВы ищите: " + str(user.change))
markup = types.ReplyKeyboardMarkup(one_time_keyboard=True)
markup.add('Да', 'Нет')
bot.send_message(message.from_user.id, "Заполнить заново: ", reply_markup=markup)
bot.register_next_step_handler(message, reg_accept)
else:
bot.send_message(message.from_user.id, 'Что-то пошло не так')
bot.register_next_step_handler(message, reg_change)
def reg_accept(message): # Потверждение регистрации или замена старых данных на новых в бд
if (message.text == u'Да') or (message.text == u'Нет'):
if message.text == u'Да':
bot.send_message(message.from_user.id, "Введите ваше имя:")
bot.register_next_step_handler(message, reg_name)
else:
if not check_user(user_id=message.from_user.id)[0]:
user = user_dict[message.from_user.id]
reg_db(user_id=user.user_id, name=user.name, old=user.age, gender=user.sex, change=user.change)
bot.send_message(message.from_user.id, "Вы зарегестрированы:")
else:
if message.from_user.id in user_dict.keys():
user = user_dict[message.from_user.id]
edit_db(user_id=user.user_id, name=user.name, old=user.age, gender=user.sex, change=user.change)
welcome(message)
def search_prof(message): # Отображение профиля, с возможностью пересоздать профиль и инициализация поиска партнёра
if (message.text == u'Начать поиск') or (message.text == u'Посмотреть профиль') or (
message.text == u'Удалить профиль'):
if message.text == u'Начать поиск':
bot.send_message(message.from_user.id, 'Поиск начался')
search_partner(message)
elif message.text == u'Посмотреть профиль':
user_info = get_info(user_id=message.from_user.id)
bot.send_message(message.from_user.id,
"Проверьте введённые данные:\nВаше имя: " + str(user_info[2]) + "\nВаш возраст: " + str(
user_info[3]) + "\nВаш пол: " + str(user_info[4]) + "\nВы ищите: " + str(user_info[5]))
mark = types.ReplyKeyboardMarkup(one_time_keyboard=True)
mark.add('Да', 'Нет')
bot.send_message(message.from_user.id, 'Заполнить заного', reply_markup=mark)
bot.register_next_step_handler(message, reg_accept)
else:
delete_user(user_id=message.from_user.id)
bot.send_message(message.from_user.id, 'Профиль удалён')
welcome(message)
else:
bot.send_message(message.from_user.id, 'Что-то пошло не так')
bot.register_next_step_handler(message, search_prof)
def search_partner(message): # Поиск партнёра, если парнёр найден, отоюражает данные о нём и начинается чатинг
is_open = check_open(first_id=message.from_user.id)
if is_open[0][0]: # если уже имеется открытый чат, сразу переходит в чаттинг
bot.register_next_step_handler(message, chat)
else:
select = select_free()
success = False
if not select:
add_user(first_id=message.from_user.id)
else:
for sel in select:
if check_status(first_id=message.from_user.id, second_id=sel[0]) or message.from_user.id == sel[0]:
print('da sel')
continue
else:
print(sel[0])
print(message.from_user.id)
add_second_user(first_id=sel[0], second_id=message.from_user.id)
bot.send_message(message.from_user.id, 'Мы нашли вам собеседника')
user_info = get_info(user_id=sel[0])
bot.send_message(message.from_user.id,
"Имя собеседника: " + str(user_info[2]) + "\nВозраст собеседника: " + str(
user_info[3]) + "\nПол собеседника: " + str(user_info[4]))
bot.send_message(sel[0], 'Мы нашли вам собеседника')
user_info = get_info(user_id=message.from_user.id)
bot.send_message(sel[0],
"Имя собеседника: " + str(user_info[2]) + "\nВозраст собеседника: " + str(
user_info[3]) + "\nПол собеседника: " + str(user_info[4]))
success = True
break
if not success:
time.sleep(2)
search_partner(message)
else:
bot.register_next_step_handler(message, chat)
def chat(message): # реализация чата, если полльзователь напишет "/exit" и разрывает соединение
if message.text == "/exit":
companion = check_companion(first_id=message.from_user.id)
bot.send_message(companion, "Ваш собеседник вышел. Напишите что-либо для продолжения работы")
close_chat(first_id=message.from_user.id)
welcome(message)
return
elif not check_open(first_id=message.from_user.id)[0][0]:
welcome(message)
return
companion = check_companion(first_id=message.from_user.id)
bot.send_message(companion, message.text)
bot.register_next_step_handler(message, chat)
bot.polling()
| UTF-8 | Python | false | false | 10,442 | py | 3 | main.py | 2 | 0.619195 | 0.614993 | 0 | 202 | 43.772277 | 129 |
bagnine/Clothing_Recommender | 12,524,124,635,246 | c0fb0b45d6026575e1a423cfd2a8be7354fc82b3 | f9695c07bc6967928c89c595a9f45c2e942ef483 | /src/modules/data.py | d3a783fcea94ef131242995becb9b12d500bdf49 | []
| no_license | https://github.com/bagnine/Clothing_Recommender | b90d3f2596885cccecc535dbcef91d0a290494d2 | ef93384603ad2ecde238840c1759f3cf2447a92b | refs/heads/main | 2023-02-28T14:46:33.999557 | 2021-02-03T02:38:41 | 2021-02-03T02:38:41 | 319,463,225 | 0 | 1 | null | null | null | null | null | null | null | null | null | null | null | null | null | import pandas as pd
from selenium import webdriver
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import TimeoutException
from selenium.common.exceptions import NoSuchElementException
from webdriver_manager.chrome import ChromeDriverManager
import time
import urllib
import numpy as np
def scroll_page(number_of_scrolls):
'''Scrolls down to refresh the page a specified number of times'''
# Skip the header and go straight into the main feed
results = driver.find_elements_by_xpath('//div[@class="FiltersInstantSearch"]//div[@class="feed-item"]')
# Scroll the specified number of times with a 2 second break in between
for i in range(0,number_of_scrolls):
driver.execute_script("window.scrollTo(0,document.body.scrollHeight)")
if i%10 == 0:
print(f'Completed scroll {i+1} out of {number_of_scrolls)}')
time.sleep(2)
# Creates an object containing indices for every item on the page after scrolling
results = driver.find_elements_by_xpath('//div[@class="FiltersInstantSearch"]//div[@class="feed-item"]')
return results
def make_dataframe(results):
'''Takes in the results from the scroll function, iterates through them
and pulls data from each of the following categories: Name, Designer,
Price, (New Price and Old Price if an item has changed prices), Size,
Time (when an item was posted), Last Bump (when the price dropped most
recently) and Link.
The output is a DataFrame with each of the features as a column'''
Name = []
Designer = []
Price = []
NewPrice = []
OldPrice = []
Size = []
Time = []
LastBump = []
Link = []
for i, result in enumerate(results):
Designer.append(result.find_element_by_class_name("listing-designer").text)
Name.append(result.find_element_by_class_name("listing-title").text)
try:
Price.append(result.find_element_by_xpath('.//p[@class="sub-title original-price"]').text)
NewPrice.append(np.nan)
OldPrice.append(np.nan)
except NoSuchElementException:
NewPrice.append(result.find_element_by_xpath('.//p[@class="sub-title new-price"]').text)
OldPrice.append(result.find_element_by_xpath('.//p[@class="sub-title original-price strike-through"]').text)
Price.append(np.nan)
Size.append(result.find_element_by_xpath('.//p[@class="listing-size sub-title"]').text)
Time.append(result.find_element_by_xpath(".//span[@class='date-ago']").text)
try:
LastBump.append(result.find_element_by_xpath(".//span[@class='strike-through']").text)
except NoSuchElementException:
LastBump.append(np.nan)
Link.append(result.find_element_by_xpath('./a').get_attribute("href"))
grailed_dict = {'Name': Name,
'Designer': Designer,
'Price': Price,
'NewPrice': NewPrice,
'OldPrice': OldPrice,
'Size': Size,
'Time': Time,
'LastBump': LastBump,
'Link': Link}
if i%100 == 0:
print(f'Completed row {i} out of {len(results)}')
return pd.DataFrame(grailed_dict)
def page_dataframe(list_of_links, file_path):
'''Iterates through a list of links generated by the make_dataframe function, loads individual
pages, pulls additional features and saves the first image to the specified filepath. Images
are named based on the index number of the link.
A second DataFrame is output with the new features'''
# Lists to append everything to
UserName=[]
Sold=[]
Feedback=[]
CurrentListings=[]
Description=[]
ProfileLink=[]
FeedBack=[]
FeedbackLink=[]
FollowerCount=[]
FullSize=[]
PostedTime=[]
BumpedTime=[]
Location=[]
Transactions=[]
### Iterating through the list of links, scraping additional data from each page and saving the first image
for i, link in enumerate(df.Link):
# navigating to the specified page
driver.get(link)
# saving the image by number of the Link index
try:
image = driver.find_element_by_xpath('//div[@class="-image-wrapper"]')
src = image.find_element_by_tag_name('img').get_attribute('src')
urllib.request.urlretrieve(src, f'{file_path}{i}.jpg')
except NoSuchElementException:
continue
# saving seller's Username
try:
UserName.append(driver.find_element_by_xpath('//span[@class="-username"]').text)
except NoSuchElementException:
UserName.append(np.nan)
# number of items seller has sold
try:
Sold.append(driver.find_element_by_xpath('//a[@class="-link"]/span[2]').text)
except NoSuchElementException:
Sold.append(np.nan)
# Feedback rating
try:
FeedBack.append(driver.find_element_by_xpath('//span[@class="-feedback-count"]').text)
except NoSuchElementException:
FeedBack.append(np.nan)
try:
CurrentListings.append(driver.find_element_by_xpath('//a[@class="-for-sale-link"]').text)
except NoSuchElementException:
CurrentListings.append(np.nan)
try:
Description.append(driver.find_element_by_xpath('//div[@class="listing-description"]').text)
except NoSuchElementException:
Description.append(np.nan)
try:
ProfileLink.append(driver.find_element_by_xpath('//span[@class="Username"]/a').get_attribute("href"))
except NoSuchElementException:
ProfileLink.append(np.nan)
try:
FeedbackLink.append(driver.find_element_by_xpath('//div[@class="-details"]/a').get_attribute("href"))
except NoSuchElementException:
FeedbackLink.append(np.nan)
try:
FollowerCount.append(driver.find_element_by_xpath('//p[@class="-follower-count"]').text)
except NoSuchElementException:
FollowerCount.append(np.nan)
try:
FullSize.append(driver.find_element_by_xpath('//h2[@class="listing-size sub-title"]').text)
except NoSuchElementException:
FullSize.append(np.nan)
try:
PostedTime.append(driver.find_element_by_xpath('//div[@class="-metadata"]/span[2]').text)
except NoSuchElementException:
PostedTime.append(np.nan)
try:
BumpedTime.append(driver.find_element_by_xpath('//div[@class="-metadata"]/span[4]').text)
except NoSuchElementException:
BumpedTime.append(np.nan)
try:
Location.append(driver.find_element_by_xpath('//label[@class="--label"]').text)
except NoSuchElementException:
Location.append(np.nan)
if i%100 == 0:
print(f'Completed Page {i} out of {len(list_of_links)}.')
page_dict = {'Username': UserName,
'Sold': Sold,
'Feedback': FeedBack,
'CurrentListings': CurrentListings,
'Description': Description,
'ProfileLink': ProfileLink,
'FeedbackLink': FeedbackLink,
'FollowerCount': FollowerCount,
'FullSize': FullSize,
'PostedTime': PostedTime,
'BumpedTime': BumpedTime,
'Location': Location}
return pd.DataFrame(page_dict) | UTF-8 | Python | false | false | 7,821 | py | 13 | data.py | 5 | 0.610536 | 0.607979 | 0 | 201 | 37.915423 | 120 |
tnemelck/machine_learning | 1,855,425,907,193 | 666079a95443564fa8736bcd5dd3141e7ed6287f | bce5fc068a0be5f5c8022d7b2b65002d8f3365e1 | /titanic_data_clean.py | 213afb5c38d9c2d678903e13912f9c54a6ada630 | []
| no_license | https://github.com/tnemelck/machine_learning | fcca9a4e1c4caeb3a535e84e116f2ae658733650 | 53f3a5fb4aaaef9e93d01e5bc65a8c26dd6953ed | refs/heads/master | 2020-03-16T16:00:10.691991 | 2018-05-09T14:26:55 | 2018-05-09T14:26:55 | 132,767,620 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Apr 19 15:25:51 2018
@author: elvex
"""
import pandas as pd
import re
import numpy as np
import matplotlib.pyplot as plt
import sklearn.preprocessing as skp
import sklearn.model_selection as skms
def get_BDD(contenu = "train"):
path = "/home/elvex/.kaggle/competitions/titanic/"
path_train = path + "train.csv"
path_test = path + "test.csv"
dico_path = {"train" : path_train, "test": path_test}
adr = dico_path.get(contenu, path_test)
bdd = pd.read_csv(adr, sep=',', header=0, index_col=0)
return bdd
def plot_importance(bdd, feature, bins=20, range=None):
survived = bdd[bdd.Survived == 1]
dead = bdd[bdd.Survived == 0]
x2 = np.array(dead[feature].dropna())
x1 = np.array(survived[feature].dropna())
plt.hist([x1, x2], label = ["survived", "dead"], bins = bins, color = ['b', 'k'], normed=[True, True])
plt.legend(loc="upper_left")
plt.title("Distribution de la feature {}".format(feature))
plt.show()
def plot_importance_continue(bdd, feature):
survived = bdd[bdd.Survived == 1]
dead = bdd[bdd.Survived == 0]
x1 = np.array(dead[feature].dropna())
x2 = np.array(survived[feature].dropna())
h_D = list(np.histogram(x1, bins=100, range = (0, 100)))
h_D[0] = h_D[0] / x1.size
h_V = list(np.histogram(x2, bins = 100, range = (0, 100)))
h_V[0] = h_V[0] / x2.size
plt.figure()
plt.subplot(211)
plt.plot(h_V[1][:-1], h_V[0], color = 'b', label = "Vivant")
plt.plot(h_D[1][:-1], h_D[0], color = 'r', label = "Mort")
plt.title("Distribution de la feature {}".format(feature))
plt.legend(loc="upper_left")
plt.subplot(212)
DH = h_V[0] - h_D[0]
plt.plot(h_V[1][:-1], DH, color = 'k', label = "Vivant-Mort")
plt.legend(loc="upper_left")
plt.show()
return h_V, h_D, DH
def plot_importance_all(bdd, bins=20):
for c in bdd.columns: plot_importance(bdd, c, bins)
def correlation(bdd, seuil = 0.05):
mtx_cor = np.corrcoef(bdd.as_matrix(), rowvar=0)[0]
for i in range(1, mtx_cor.size):
deb = "!!! " if abs(mtx_cor[i]) < seuil else ""
print("{}La corrélation entre la survie et la variable '{}' vaut {}".format(deb, bdd.columns[i], mtx_cor[i]))
return mtx_cor
def formate_Pclass(bdd):
df = bdd["Pclass"]
df.fillna(df.median(), inplace = True)
df = pd.get_dummies(df, prefix = "Pclass")
bdd.drop('Pclass', axis = 1, inplace = True)
bdd = pd.concat([bdd, df], axis=1, join_axes=[bdd.index])
return bdd
def formate_Name(bdd):
df = bdd["Name"]
df = df.map(lambda x: re.split('[,.]', x)[1])
df = pd.get_dummies(df, prefix = "Title")
bdd.drop('Name', axis = 1, inplace = True)
bdd = pd.concat([bdd, df], axis=1, join_axes=[bdd.index])
return bdd
def formate_Sex(bdd):
df = bdd["Sex"]
#df.fillna(df.median(), inplace = True)
df = pd.get_dummies(df, prefix = "Sexe")
bdd.drop('Sex', axis = 1, inplace = True)
bdd = pd.concat([bdd, df], axis=1, join_axes=[bdd.index])
return bdd
def formate_Age(bdd):
df = bdd["Age"]
df.fillna(df.median(), inplace = True)
df_young_child = pd.DataFrame({"Young child" : (df <= 5) * 1})
df_old_teen = pd.DataFrame({"Old Teen" : ((df >= 18) * (df <= 21)) * 1})
df_young_adult = pd.DataFrame({"Young adult" : ((df > 21) * (df <= 30)) * 1})
df_elder = pd.DataFrame({"Elder" : (df >= 64) *1})
bdd.drop('Age', axis = 1, inplace = True)
bdd = pd.concat([bdd, df, df_young_child, df_old_teen, df_young_adult, df_elder], axis=1, join_axes=[bdd.index])
return bdd
def formate_SibSp_Parch(bdd):
df1 = bdd["SibSp"]
df2 = bdd["Parch"]
df1.fillna(df1.median())
df2.fillna(df2.median())
dfS = pd.DataFrame({"Family": df1+df2})
dfA = pd.DataFrame({"Alone" : (dfS.Family == 0) * 1})
bdd.drop('SibSp', axis = 1, inplace = True)
bdd.drop('Parch', axis = 1, inplace = True)
bdd = pd.concat([bdd, df1, df2, dfS, dfA], axis=1, join_axes=[bdd.index])
return bdd
def map_ticket(s):
s = " ".join(s.split(" ")[:-1])
s = s.replace(".", "")
s = s.replace("/", "")
s = s.replace(" ", "")
return s
def formate_Ticket(bdd):
bdd.drop('Ticket', axis = 1, inplace = True)
return bdd
def formate_Fare(bdd):
df = bdd["Fare"]
df.fillna(df.median(), inplace = True)
bdd.drop('Fare', axis = 1, inplace = True)
bdd = pd.concat([bdd, df], axis=1, join_axes=[bdd.index])
return bdd
def formate_Cabin(bdd):
bdd.drop('Cabin', axis = 1, inplace = True)
return bdd
def formate_Embarked(bdd):
df = bdd["Embarked"]
#df.fillna(df.median(), inplace = True)
df = pd.get_dummies(df, prefix = "Embarked")
bdd.drop('Embarked', axis = 1, inplace = True)
bdd = pd.concat([bdd, df], axis=1, join_axes=[bdd.index])
return bdd
def drop(bdd, feature):
try:
bdd = bdd.drop(feature, axis = 1, inplace=False)
except ValueError:
pass
return bdd
def formate_drop(bdd):
liste_drop = ['Embarked_Q', 'Family', 'SibSp', 'Title_ the Countess', 'Title_ the Countess',
'Title_ Sir', 'Title_ Ms', 'Title_ Major', 'Title_ Lady', 'Title_ Jonkheer',
'Title_ Dr', 'Title_ Don', 'Title_ Col', 'Title_ Capt']
for d in liste_drop:
bdd = drop(bdd, d)
return bdd
def formate_bdd(BDD):
bdd = BDD.copy(deep = True)
bdd = formate_Pclass(bdd)
bdd = formate_Name(bdd)
bdd = formate_Sex(bdd)
bdd = formate_Age(bdd)
bdd = formate_SibSp_Parch(bdd)
bdd = formate_Ticket(bdd)
bdd = formate_Fare(bdd)
bdd = formate_Cabin(bdd)
bdd = formate_Embarked(bdd)
bdd = formate_drop(bdd)
bdd = bdd.fillna(bdd.median())
return bdd
def bdd2ary(bdd, scaler = None):
bdd = bdd.fillna(value = np.nan)
Y = bdd["Survived"].as_matrix()
bddX = bdd.drop("Survived", axis=1)
X = bddX.as_matrix()
imp = skp.Imputer(missing_values=np.nan, strategy='median', axis=0)
X = imp.fit_transform(X)
if not scaler: scaler = skp.Normalizer().fit(X)
Xr = scaler.transform(X)
return (Xr, Y, scaler) | UTF-8 | Python | false | false | 6,196 | py | 3 | titanic_data_clean.py | 3 | 0.586279 | 0.566263 | 0 | 203 | 29.522167 | 117 |
mattssll/warehouse_gzipped_json | 14,783,277,440,152 | adcd722783ee38f92c9b38329898a48d9b690706 | 660de55953ed910d4f2903e65e6350b10ca02374 | /app_spark/schemas/dataschemas.py | add36c9341f6a853d08575da152797d527d6ef8c | []
| no_license | https://github.com/mattssll/warehouse_gzipped_json | 9d19d96f34083afd996b9e542005173b21ecba08 | 2ab9707b48443e2d1d20c20979a9afb45303f49d | refs/heads/master | 2023-06-13T08:08:20.995583 | 2021-07-11T21:01:30 | 2021-07-11T21:01:30 | 376,405,017 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null |
from pyspark.sql.types import StructType,StructField, StringType, IntegerType, LongType, DoubleType, ArrayType
def getMetadataSchemas():
metadataSchema = StructType([ \
StructField("asin",StringType(),False), \
StructField("categories", ArrayType(ArrayType(StringType())),True), \
StructField("title",StringType(),True), \
StructField("price",DoubleType(),True), \
StructField("imUrl", StringType(), True), \
StructField("description", StringType(), True), \
StructField('related', StructType([
StructField('also_bought', ArrayType(StringType()), True),
StructField('also_viewed', ArrayType(StringType()), True),
StructField('bought_together', ArrayType(StringType()), True),
StructField('buy_after_viewing', ArrayType(StringType()), True),
])), \
StructField("brand", StringType(), True), \
StructField("salesRank", StringType(), True)])
reviewSchema=""
return [metadataSchema, reviewSchema]
| UTF-8 | Python | false | false | 1,064 | py | 19 | dataschemas.py | 16 | 0.628759 | 0.628759 | 0 | 20 | 52.15 | 110 |
zoeyangyy/algo | 8,667,244,037,183 | fd066deaa7f53c451dc312d5e5fcc14c88556b30 | 9f342ee429df0beed2fbb7610d5afb8c32186017 | /end_start_list.py | 2b09432d0ea28153e7a4aff8828656a27453d609 | []
| no_license | https://github.com/zoeyangyy/algo | 55025ea549a66580423ffee47eaa5004b0260cb1 | 1671b30c07ef0e495c8b5863b5e0433dcd62dc9b | refs/heads/master | 2020-03-26T23:25:03.187739 | 2018-08-23T16:14:23 | 2018-08-23T16:14:23 | 145,540,636 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Time : 2018/8/16 上午10:03
# @Author : Zoe
# @File : end_start_list.py
# @Description :
class ListNode:
def __init__(self, x):
self.val = x
self.next = None
class Solution:
# 返回从尾部到头部的列表值序列,例如[1,2,3]
def printListFromTailToHead(self, listNode):
# write code here
return_list = []
if listNode:
while listNode.next:
return_list.append(listNode.val)
listNode = listNode.next
return_list.append(listNode.val)
return return_list[::-1]
a = ListNode(4)
b = ListNode(5)
a.next = b
c = Solution()
print(c.printListFromTailToHead({})) | UTF-8 | Python | false | false | 752 | py | 28 | end_start_list.py | 28 | 0.560224 | 0.535014 | 0 | 33 | 20.666667 | 48 |
schreven/PyPokerEngine | 7,335,804,158,408 | f41be932519284bb7cc2227cceb9cca5d61bf485 | 041bd8e7ede7c63ff9eaaaec3136a29bfd37abe5 | /pypokerengine/engine/action_checker.py | 7a50d8f2b0f97e11b458c3f22528d5edab41fdb6 | [
"MIT"
]
| permissive | https://github.com/schreven/PyPokerEngine | 0211658c33bde2da9002de165a8b148d03735c93 | 1fd5990a561296634dae5ad18b49f6d1319d1089 | refs/heads/master | 2020-05-25T00:25:43.688985 | 2019-12-04T15:39:37 | 2019-12-04T15:39:37 | 187,533,953 | 3 | 0 | MIT | true | 2019-05-19T22:20:46 | 2019-05-19T22:20:46 | 2019-05-18T06:04:50 | 2019-03-13T03:33:10 | 677 | 0 | 0 | 0 | null | false | false | from functools import reduce
import sys
class ActionChecker:
@classmethod
def correct_action(self, players, player_pos, sb_amount, action, amount=None):
if self.is_allin(players[player_pos], action, amount):
amount = players[player_pos].stack + players[player_pos].paid_sum()
elif self.__is_illegal(players, player_pos, sb_amount, action, amount):
print('[ERROR] From action_checker, correct_action: illegal action. Folding')
action, amount = "fold", 0
return action, amount
@classmethod
def is_allin(self, player, action, bet_amount):
if action == 'call':
return bet_amount >= player.stack + player.paid_sum()
elif action == 'raise':
return bet_amount == player.stack + player.paid_sum()
else:
return False
@classmethod
def need_amount_for_action(self, player, amount):
return amount - player.paid_sum()
@classmethod
def agree_amount(self, players):
last_raise = self.__fetch_last_raise(players)
return last_raise["amount"] if last_raise else 0
@classmethod
def legal_actions(self, players, player_pos, sb_amount):
min_raise = self.__min_raise_amount(players, sb_amount)
max_raise = players[player_pos].stack + players[player_pos].paid_sum()
if max_raise < min_raise:
if self.agree_amount(players)>=max_raise:
min_raise = max_raise = -1
else:
min_raise = max_raise = players[player_pos].stack + players[player_pos].paid_sum()
return [
{ "action" : "fold" , "amount" : 0 },
{ "action" : "call" , "amount" : self.agree_amount(players) },
{ "action" : "raise", "amount" : { "min": min_raise, "max": max_raise } }
]
@classmethod
def _is_legal(self, players, player_pos, sb_amount, action, amount=None):
return not self.__is_illegal(players, player_pos, sb_amount, action, amount)
@classmethod
def __is_illegal(self, players, player_pos, sb_amount, action, amount=None):
if action == 'fold':
illegal= False
elif action == 'call':
illegal= self.__is_short_of_money(players[player_pos], amount)\
or self.__is_illegal_call(players, amount)
elif action == 'raise':
illegal= self.__is_short_of_money(players[player_pos], amount) \
or self.__is_illegal_raise(players, amount, sb_amount)
if illegal:
print('[ERROR] From action_checker, is_illegal: Attempting illegal action')
#sys.exit(1)
return illegal
@classmethod
def __is_illegal_call(self, players, amount):
return amount != self.agree_amount(players)
@classmethod
def __is_illegal_raise(self, players, amount, sb_amount):
return self.__min_raise_amount(players, sb_amount) > amount
@classmethod
def __min_raise_amount(self, players, sb_amount):
raise_ = self.__fetch_last_raise(players)
is_BB = self.__last_action_is_BB(players)
if is_BB:
ret = 4*sb_amount
elif raise_:
ret = raise_["amount"] + raise_["add_amount"]
else:
ret = sb_amount*2
return ret
@classmethod
def __is_short_of_money(self, player, amount):
return player.stack < amount - player.paid_sum()
@classmethod
def __fetch_last_raise(self, players):
all_histories = [p.action_histories for p in players]
all_histories = reduce(lambda acc, e: acc + e, all_histories) # flatten
raise_histories = [h for h in all_histories if h["action"] in ["RAISE", "SMALLBLIND", "BIGBLIND"]]
if len(raise_histories) == 0:
return None
else:
return max(raise_histories, key=lambda h: h["amount"]) # maxby
@classmethod
def __last_action_is_BB(self, players):
all_histories = [p.action_histories for p in players]
all_histories = reduce(lambda acc, e: acc + e, all_histories) # flatten
last_actions = [h for h in all_histories if h["action"] in ["RAISE", "BIGBLIND"]]
if len(last_actions) == 0:
return False
elif last_actions[-1]["action"] == 'BIGBLIND':
return True
else:
return False
| UTF-8 | Python | false | false | 4,024 | py | 54 | action_checker.py | 45 | 0.639414 | 0.636928 | 0 | 114 | 34.289474 | 102 |
jdsteinman/femCell | 14,405,320,327,962 | 0372da4bafffed3764c9faa1ecbbd15778369176 | 55f3db748a1c465fc2f437146f1c0df9df61b6f8 | /cell_simulation/post_tools.py | 3ae82720b63212a413f46cb05242be3ed5c1d3eb | []
| no_license | https://github.com/jdsteinman/femCell | d7536ea2ca1bbf5b8f4fa08d613a4c112b42bf7e | d3b88942a9ebe58455d2d41a0beafe979ac8873e | refs/heads/master | 2023-04-16T18:52:16.335374 | 2021-12-15T22:51:24 | 2021-12-15T22:51:24 | 302,151,270 | 1 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import numpy as np
import pandas as pd
import meshio
import pyvtk
from numpy.linalg import eig, norm
from scipy.linalg import polar
"""
Useful functions
"""
## Normalize an array of 3-component vectors
def normalize(a):
arr = np.asarray(a)
assert(np.ndim(arr) <= 2)
if arr.ndim == 1:
ss = np.sum(arr**2)**0.5
ss[ss==0] = 1e10
arr = arr/ss
elif arr.ndim == 2:
ss = np.sum(arr**2, axis=1)**0.5
ss[ss==0] = 1e10
arr = arr / ss[:, np.newaxis]
return arr
def arr_to_tensor(arr):
return tuple([tuple(map(tuple, A)) for A in arr])
## Surface Normals
def get_surface_normals(vert, conn):
# Check type
vert = np.asarray(vert, dtype="float64")
conn = np.asarray(conn, dtype="int64")
# Face coordinates
tris = vert[conn]
# Face normals
fn = np.cross(tris[:,1,:] - tris[:,0,:], tris[:,2,:] - tris[:,0,:] )
# Normalize face normals
fn = normalize(fn)
# Vertex normal = sum of adjacent face normals
n = np.zeros(vert.shape, dtype = vert.dtype)
n[ conn[:,0] ] += fn
n[ conn[:,1] ] += fn
n[ conn[:,2] ] += fn
# Normalize vertex normals
# Mult by -1 to make them point outward??
n = normalize(n) * -1
return n
## Returns dot product of displacement vectors with surface outward normal
def dots(disp, norms):
# normalize row-wise
disp = normalize(disp)
# dot products
dp = np.sum(disp*norms, axis=1)
return dp
"""
Point Data
"""
class UnstructuredData:
def __init__(self, surface_points, points, conn, u, u_data, F, mu=None):
"""
Class to export point data to vtk.
Usage:
point_data = PointData(points, conn, u, F, mu)
point_data.save_to_vtk("out.vtk")
Parameters
----------
points : array-like, float
conn : array-like, int
u : array-like, float
F : array-like, float
mu : array-like, float
"""
# Set inputs
self.spoints = np.array(surface_points, dtype=float)
self.points = np.array(points, dtype=float)
self.conn = np.array(conn, dtype=int)
npoints = self.points.shape[0]
ncells = self.conn.shape[0]
self.u = np.asfarray(u)
self.u_data = np.asfarray(u_data)
self.F = np.asfarray(F)
self.mu = np.asfarray(mu)
# Preallocate
self.r = np.zeros((npoints))
self.discrepancy = np.zeros((npoints))
self.dots = np.zeros((npoints))
self.Ndots = np.zeros((npoints))
self.C = np.zeros((npoints, 3, 3))
self.R = np.zeros((npoints, 3, 3))
self.U = np.zeros((npoints, 3, 3))
self.eigval = np.zeros((npoints, 3))
self.eigvec = np.zeros((npoints, 3, 3))
self.theta = np.zeros((npoints))
self.df = pd.DataFrame()
# Top level functions
def save_to_csv(self, fname, sep=","):
# Deformation
self._calculate()
# Store Data in df
self._assemble_df()
# Save to csv
self.df.to_csv(fname, sep=sep)
def save_to_vtk(self, fname):
# Deformation
self._calculate()
point_data = pyvtk.PointData(\
pyvtk.Vectors(self.u, name="u"),
pyvtk.Vectors(self.u, name="u_sim"),
pyvtk.Vectors(self.u_data, name="u_data"),
pyvtk.Scalars(self.discrepancy, name="discrepancy"),
pyvtk.Scalars(self.dots, name="Dot Product"),
pyvtk.Scalars(self.Ndots, name="Normalized Dot Product"),
pyvtk.Scalars(self.r, name="normal distance"),
pyvtk.Tensors(arr_to_tensor(self.F), name="F"),
pyvtk.Tensors(arr_to_tensor(self.C), name="C"),
pyvtk.Tensors(arr_to_tensor(self.R), name="R"),
pyvtk.Tensors(arr_to_tensor(self.U), name="U"),
pyvtk.Vectors(self.eigval, name="lambda"),
pyvtk.Vectors(self.eigvec[:,:,0], name="e1"),
pyvtk.Vectors(self.eigvec[:,:,1], name="e2"),
pyvtk.Vectors(self.eigvec[:,:,2], name="e3"),
pyvtk.Scalars(self.theta, name="theta"),
pyvtk.Scalars(self.mu, name="mu")
)
# cell_data = pyvtk.CellData(\
# pyvtk.Scalars(self.mu, name="mu")
# )
vtk = pyvtk.VtkData(\
pyvtk.UnstructuredGrid(self.points,
tetra=self.conn), # pyvtk.Tensors(arr_to_tensor(self.eigvec), name="v"),
point_data)
vtk.tofile(fname)
# Low Level Functions
def _calculate(self):
# Normal Distance
for i, point in enumerate(self.points):
r = point - self.spoints
r = np.sum(np.abs(r)**2, axis=-1)**(0.5)
self.r[i] = np.amin(r)
# Residuals
self.discrepancy = norm(self.u_data, axis=1) - norm(self.u, axis=1)
# Dot products
self.dots = np.sum(self.u*self.u_data, axis=1)
self.Ndots = np.sum(normalize(self.u)*normalize(self.u_data), axis=1)
# Polar Decomposition
R, U = [], []
for i, F in enumerate(self.F):
R, U = polar(F)
self.R[i] = R
self.U[i] = U
# Rotation angle
tr_R = np.trace(self.R, axis1=1, axis2=2)
self.theta = np.arccos((tr_R-1)/2) * 180 / np.pi
# Right Cauchy-Green Tensor
self.C = np.matmul(self.F.transpose(0,2,1), self.F)
# Eigenvalues/eigenvectors
self.eigval, self.eigvec = eig(self.U)
# Order by decreasing eigenvalue
sort_ind = np.argsort(self.eigval, axis=-1)
sort_ind = np.flip(sort_ind, axis=-1)
self.eigval = np.take_along_axis(self.eigval, sort_ind, -1)
self.eigval = np.sqrt(self.eigval)
for i, v in enumerate(self.eigvec):
v = v[:, sort_ind[i]]
v = normalize(v)
def _assemble_df(self):
# Clear df
self.df = pd.DataFrame()
# Points
x, y, z = np.hsplit(self.points, 3)
r = np.sum((self.points-self.points[0])**2, axis=1)**0.5
self.df["x"] = x.flatten()
self.df["y"] = y.flatten()
self.df["z"] = z.flatten()
self.df["r"] = r.flatten()
# Shear Modulus
self.df["mu"] = self.mu.flatten()
# Displacement
ux, uy, uz = np.hsplit(self.u, 3)
umag = (ux**2 + uy**2 + uz**2)**.5
self.df["Ux"] = ux.flatten()
self.df["Uy"] = uy.flatten()
self.df["Uz"] = uz.flatten()
self.df["Umag"] = umag.flatten()
# Deformation Tensor
columns = ['F11','F12','F13','F21','F22','F23','F31','F32','F33']
for col, dat in zip(columns, self.F.reshape((-1,9)).T):
self.df[col] = dat
# Rotation Tensor
columns = ['R11','R12','R13','R21','R22','R23','R31','R32','R33']
for col, dat in zip(columns, self.R.reshape((-1,9)).T):
self.df[col] = dat
# Stretch Tensor
columns = ['U11','U12','U13','U21','U22','U23','U31','U32','U33']
for col, dat in zip(columns, self.U.reshape((-1,9)).T):
self.df[col] = dat
# Right Cauchy-Green Tensor
columns = ['C11','C12','C13','C21','C22','C23','C31','C32','C33']
for col, dat in zip(columns, self.C.reshape((-1,9)).T):
self.df[col] = dat
# Eigenvalues, Eigenvectors
columns = ['w1', 'w2', 'w3']
for col, dat in zip(columns, self.eigval.transpose()):
self.df[col] = dat
columns = ["v11","v12", "v13", "v21", "v22", "v23", "v31", "v32", "v33"]
for col, dat in zip(columns, self.eigvec.reshape((-1,9), order="F").T):
self.df[col] = dat
# Rotation angle
self.df["theta"]=self.theta
class PointData:
def __init__(self, vert, beads, u_sim, u_data, F):
"""
Class to export point data to vtk.
Usage:
Parameters
----------
points : array-like, float
u : array-like, float
F : array-like, float
"""
# Set inputs
self.vert = np.array(vert, dtype=float)
self.points = np.array(beads, dtype=float)
npoints = self.points.shape[0]
self.u = np.asfarray(u_sim)
self.u_data = np.asfarray(u_data)
self.F = np.asfarray(F)
# Preallocate
self.r = np.zeros((npoints))
self.res = np.zeros((npoints))
self.perr = np.zeros((npoints))
self.dots = np.zeros((npoints))
self.Ndots = np.zeros((npoints))
self.C = np.zeros((npoints, 3, 3))
self.R = np.zeros((npoints, 3, 3))
self.U = np.zeros((npoints, 3, 3))
self.eigval = np.zeros((npoints, 3))
self.eigvec = np.zeros((npoints, 3, 3))
self.theta = np.zeros((npoints))
self.df = pd.DataFrame()
# Top level functions
def save_to_vtk(self, fname):
# Deformation
self._calculate()
point_data = pyvtk.PointData(\
pyvtk.Vectors(self.u, name="u"),
pyvtk.Vectors(self.u, name="u_sim"),
pyvtk.Scalars(self.r, name="normal distance"),
pyvtk.Vectors(self.u_data, name="u_data"),
pyvtk.Vectors(self.res, name="residuals"),
pyvtk.Vectors(self.perr, name="Percent Error"),
pyvtk.Scalars(self.dots, name="Dot Product"),
pyvtk.Scalars(self.Ndots, name="Normalized Dot Product"),
pyvtk.Tensors(arr_to_tensor(self.F), name="F"),
pyvtk.Tensors(arr_to_tensor(self.C), name="C"),
pyvtk.Tensors(arr_to_tensor(self.R), name="R"),
pyvtk.Tensors(arr_to_tensor(self.U), name="U"),
pyvtk.Vectors(self.eigval, name="lambda"),
pyvtk.Vectors(self.eigvec[:,:,0], name="e1"),
pyvtk.Vectors(self.eigvec[:,:,1], name="e2"),
pyvtk.Vectors(self.eigvec[:,:,2], name="e3"),
pyvtk.Scalars(self.theta, name="theta")
)
vtk = pyvtk.VtkData(\
pyvtk.PolyData(self.points),
point_data)
vtk.tofile(fname)
# Low Level Functions
def _calculate(self):
# Normal Distance
for i, point in enumerate(self.points):
r = point - self.vert
r = np.sum(np.abs(r)**2, axis=-1)**(0.5)
self.r[i] = np.amin(r)
# Residuals
self.res = self.u_data - self.u
self.perr = np.abs(self.res/self.u_data)*100
# Dot products
self.dots = np.sum(self.u*self.u_data, axis=1)
self.Ndots = np.sum(normalize(self.u)*normalize(self.u_data), axis=1)
# Polar Decomposition
R, U = [], []
for i, F in enumerate(self.F):
R, U = polar(F)
self.R[i] = R
self.U[i] = U
# Rotation angle
tr_R = np.trace(self.R, axis1=1, axis2=2)
self.theta = np.arccos((tr_R-1)/2) * 180 / np.pi
# Right Cauchy-Green Tensor
self.C = np.matmul(self.F.transpose(0,2,1), self.F)
# Eigenvalues/eigenvectors
self.eigval, self.eigvec = eig(self.U)
# Order by decreasing eigenvalue
sort_ind = np.argsort(self.eigval, axis=-1)
sort_ind = np.flip(sort_ind, axis=-1)
self.eigval = np.take_along_axis(self.eigval, sort_ind, -1)
self.eigval = np.sqrt(self.eigval)
for i, v in enumerate(self.eigvec):
v = v[:, sort_ind[i]]
v = normalize(v) | UTF-8 | Python | false | false | 11,516 | py | 103 | post_tools.py | 41 | 0.531 | 0.511289 | 0 | 370 | 30.127027 | 99 |
Orionisxoxo/microservices_bookings | 9,629,316,693,287 | 537af92bd30eac84375eb4d57cfba0d3ecd07cdf | e0439be388dba6554a10ec16486c0cf153e69551 | /tests/bookings.py | 49ec2f9015c243eeac6c512db27ea58870b0dc0f | []
| no_license | https://github.com/Orionisxoxo/microservices_bookings | 9ba9a906f1933154a22f8b8f538489d5748f6d4e | 70be20de023b09cf43c5e53a16236826f0109b3b | refs/heads/feature_templates | 2020-12-09T19:56:04.168738 | 2020-01-27T23:11:15 | 2020-01-27T23:11:15 | 233,404,794 | 0 | 0 | null | false | 2020-01-12T14:30:29 | 2020-01-12T14:25:24 | 2020-01-12T14:30:14 | 2020-01-12T14:30:28 | 0 | 0 | 0 | 1 | Tcl | false | false | import unittest
import requests
class TestBookingService(unittest.TestCase):
def setUp(self):
self.url = "http://127.0.0.1:5003/bookings"
def test_booking_records(self):
for date, expected in GOOD_RESPONSES.iteritems():
reply = requests.get("{}/{}".format(self.url, date))
actual_reply = reply.json()
self.assertEqual(len(actual_reply), len(expected),
"Got {} booking but expected {}".format(
len(actual_reply), len(expected)
))
# Use set because the order doesn't matter
self.assertEqual(set(actual_reply), set(expected),
"Got {} but expected {}".format(
actual_reply, expected))
def test_not_found(self):
invalid_user = "jim_the_duck_guy"
actual_reply = requests.get("{}/{}".format(self.url, invalid_user))
self.assertEqual(actual_reply.status_code, 404,
"Got {} but expected 404".format(
actual_reply.status_code))
GOOD_RESPONSES = {
"piotr_szyszka": {
"20191201": [
"sleepy_hollow"
]
},
"dominik_nowak": {
"20191201": [
"sleepy_hollow"
],
"20191202": [
"iron_man"
]
},
"karolina_popielik": {
"20191201": [
"victor_frankenstein",
"sleepy_hollow"
],
"20191205": [
"titanic",
"iron_man"
]
}
}
if __name__ == "__main__":
unittest.main()
| UTF-8 | Python | false | false | 1,563 | py | 3 | bookings.py | 1 | 0.506078 | 0.47025 | 0 | 59 | 25.491525 | 75 |
BoxBoxter/SPSGEcodeBreaker | 8,907,762,180,512 | 18491d93c44e362981ad19de81f6d28a715d1a77 | f6b30099c98b3352307fd7464fa48ab66e5bd0b2 | /codeBreakerLetras.py | 39a68f0a996424f626055c08dff52dd804085d08 | []
| no_license | https://github.com/BoxBoxter/SPSGEcodeBreaker | 27882b75354b06c89d0035080cf09094d984a4b4 | 9f277bd93d2bb59821ffd877548642d8a59eead8 | refs/heads/master | 2020-12-15T00:15:32.859318 | 2020-01-19T15:50:46 | 2020-01-19T15:50:46 | 234,924,682 | 0 | 0 | null | null | null | null | null | null | null | null | null | null | null | null | null | import random
def obtenerNumeroAleatorio():
"""
Devolvemos una palabra de la lista apartir de un random de la longitud
:return:
"""
arrayPalabras =["marco", "pablo", "lucas", "miguel"]
#numero = random.randint(0, len(arrayPalabras)-1)
#print ("Numero Generado " + str(numero))
#palabra = arrayPalabras[numero]
#print ("Palabra escogida " + palabra)
#return palabra
return arrayPalabras[random.randint(0, len(arrayPalabras)-1)]
def comparar(palabraUsuario, palabraGenerada):
palabraUsuarioN = list(palabraUsuario)
palabraGeneradaN = list(palabraGenerada)
if (palabraUsuarioN == palabraGeneradaN):
return "Acertaste"
else:
for i in range(0,len(palabraUsuario)):
if (palabraUsuario[i] == palabraGenerada[i]):
return "Acercate una letra en una posicion"
if (palabraUsuario[i] in palabraGenerada):
return "Acertaste una letra pero no es la posicion que toca"
return "Fallaste"
def game():
acertado = False
palabraGenerada = obtenerNumeroAleatorio()
while not (acertado):
#try:
palabraUsuario = raw_input("Di un palabra de cinco letras \n")
respuesta = comparar(palabraUsuario, palabraGenerada)
if respuesta == "Acertaste":
acertado = True
print respuesta
#except:
#print "No se puede introducir un valor que sea distinto a un numero"
game() | UTF-8 | Python | false | false | 1,483 | py | 1 | codeBreakerLetras.py | 1 | 0.639245 | 0.635873 | 0 | 46 | 31.26087 | 81 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.