|
import yfinance as yf |
|
import pandas as pd |
|
from datetime import datetime, timedelta |
|
from pymongo import MongoClient |
|
import pytz |
|
import os |
|
|
|
mongo_url = os.environ['MongoURL'] |
|
df_logo = pd.read_csv('https://raw.githubusercontent.com/jarvisx17/nifty500/main/Stocks.csv') |
|
df_logo = df_logo[['Symbol','Industry', "logo", "FNO"]] |
|
tz = pytz.timezone('Asia/Kolkata') |
|
def UpdatedCollectionName(): |
|
current_time = datetime.now(tz) |
|
collection_name = current_time.strftime('%Y-%m-%d') |
|
if current_time.time() >= datetime.strptime('15:30', '%H:%M').time(): |
|
collection_name = (current_time + timedelta(days=1)).strftime('%Y-%m-%d') |
|
return collection_name |
|
else: |
|
return collection_name |
|
|
|
def get_rsi(close, lookback=14): |
|
ret = close.diff() |
|
up = [] |
|
down = [] |
|
for i in range(len(ret)): |
|
if ret.iloc[i] < 0: |
|
up.append(0) |
|
down.append(ret.iloc[i]) |
|
else: |
|
up.append(ret.iloc[i]) |
|
down.append(0) |
|
up_series = pd.Series(up, index=close.index) |
|
down_series = pd.Series(down, index=close.index).abs() |
|
up_ewm = up_series.ewm(com=lookback - 1, adjust=False).mean() |
|
down_ewm = down_series.ewm(com=lookback - 1, adjust=False).mean() |
|
rs = up_ewm / down_ewm |
|
rsi = 100 - (100 / (1 + rs)) |
|
rsi_df = pd.DataFrame(rsi, columns=['RSI']) |
|
return rsi_df |
|
|
|
def Stocks(): |
|
|
|
utc_now = datetime.utcnow() |
|
indian_timezone = pytz.timezone('Asia/Kolkata') |
|
indian_now = utc_now.replace(tzinfo=pytz.utc).astimezone(indian_timezone) |
|
end_date = utc_now |
|
end_date = end_date.replace(tzinfo=pytz.utc).astimezone(indian_timezone) |
|
end_date = end_date.replace(tzinfo=pytz.utc).astimezone(indian_timezone) |
|
|
|
start_date = end_date - timedelta(days=365) |
|
nifty500_symbols = [] |
|
nifty500 = pd.read_csv('https://archives.nseindia.com/content/indices/ind_nifty500list.csv') |
|
for symbol in nifty500.Symbol: |
|
symbol = f'{symbol}.NS' |
|
nifty500_symbols.append(symbol) |
|
|
|
nifty500_data = pd.DataFrame() |
|
print("Downloading data...") |
|
for symbol in nifty500_symbols: |
|
try: |
|
stock_data = yf.download(symbol, start=start_date, end=end_date, progress=False) |
|
stock_data['Symbol'] = symbol |
|
nifty500_data = pd.concat([nifty500_data, stock_data], axis=0) |
|
except Exception as e: |
|
print(f"Error fetching data for {symbol}: {e}") |
|
|
|
nifty500_data.reset_index(inplace=True) |
|
nifty500_data['RSI'] = nifty500_data.groupby('Symbol')['Close'].apply(lambda x: get_rsi(x, lookback=14)) |
|
nifty500_data['SMA20'] = nifty500_data.groupby('Symbol')['Close'].transform(lambda x: x.rolling(window=20).mean()) |
|
nifty500_data['PercentageChange'] = nifty500_data.groupby('Symbol')['Close'].pct_change() * 100 |
|
nifty500_data_last_2_rows = nifty500_data.groupby('Symbol').tail(2) |
|
nifty500_data_last_2_rows.reset_index(drop=True, inplace=True) |
|
nifty500_data_last_2_rows['Prev_RSI'] = nifty500_data_last_2_rows.groupby('Symbol')['RSI'].shift(1) |
|
|
|
filtered_data_by_stock = [] |
|
for symbol, group in nifty500_data_last_2_rows.groupby('Symbol'): |
|
filtered_stock_data = group[(group['RSI'] >= 60) & (group['Prev_RSI'] < 60)] |
|
if not filtered_stock_data.empty: |
|
filtered_data_by_stock.append(filtered_stock_data) |
|
|
|
filtered_data = pd.concat(filtered_data_by_stock) |
|
filtered_data.reset_index(drop=True, inplace=True) |
|
filtered_data[['Open', 'High','Low', 'Close', 'RSI', 'Prev_RSI','SMA20', 'PercentageChange']] = filtered_data[['Open', 'High','Low', 'Close', 'RSI', 'Prev_RSI', 'SMA20', 'PercentageChange']].round(2) |
|
filtered_data = filtered_data.sort_values(by='PercentageChange', ascending=False) |
|
filtered_data.reset_index(drop=True, inplace=True) |
|
filtered_data = pd.merge(filtered_data, df_logo, on='Symbol', how='inner') |
|
filtered_data = filtered_data[['Symbol', 'Date', 'Open', 'High', 'Low', 'Close', 'RSI', 'Prev_RSI','PercentageChange','Industry','FNO', "logo"]] |
|
client = MongoClient(mongo_url) |
|
db = client['mydatabase'] |
|
collection_name = UpdatedCollectionName() |
|
collection = db[collection_name] |
|
data_dict = filtered_data.to_dict(orient='records') |
|
collection.insert_many(data_dict) |
|
return filtered_data.to_dict(orient="records") |