Update daily.py
Browse files
daily.py
CHANGED
|
@@ -6,110 +6,6 @@ import requests
|
|
| 6 |
from datetime import datetime, timedelta
|
| 7 |
from ta_indi_pat import talib_df # use the combined talib_df function
|
| 8 |
from common import html_card, wrap_html
|
| 9 |
-
|
| 10 |
-
# -----------------------------
|
| 11 |
-
# Global Variables
|
| 12 |
-
# -----------------------------
|
| 13 |
-
nse_del_key_map = {
|
| 14 |
-
'Symbol': "Symbol", 'Series': "Series",
|
| 15 |
-
'Date': 'Date', 'Prev Close': 'Preclose',
|
| 16 |
-
'Open Price': 'Open', 'High Price': 'High',
|
| 17 |
-
'Low Price': 'Low', 'Last Price': 'Last',
|
| 18 |
-
'Close Price': 'Close', 'Average Price': 'AvgPrice',
|
| 19 |
-
'Total Traded Quantity': 'Volume',
|
| 20 |
-
'Turnover ₹': 'Turnover', 'No. of Trades': "Trades",
|
| 21 |
-
'Deliverable Qty': "Delivery", '% Dly Qt to Traded Qty': "Del%"
|
| 22 |
-
}
|
| 23 |
-
|
| 24 |
-
# -----------------------------
|
| 25 |
-
# Data Fetching Functions (NSE)
|
| 26 |
-
# -----------------------------
|
| 27 |
-
def url_nse_del(symbol, start_date, end_date):
|
| 28 |
-
base_url = "https://www.nseindia.com/api/historicalOR/generateSecurityWiseHistoricalData"
|
| 29 |
-
start_date_str = start_date.strftime("%d-%m-%Y")
|
| 30 |
-
end_date_str = end_date.strftime("%d-%m-%Y")
|
| 31 |
-
url = f"{base_url}?from={start_date_str}&to={end_date_str}&symbol={symbol.split('.')[0]}&type=priceVolumeDeliverable&series=ALL&csv=true"
|
| 32 |
-
return url
|
| 33 |
-
|
| 34 |
-
def to_numeric_safe(series):
|
| 35 |
-
series = series.replace('-', 0)
|
| 36 |
-
series = series.fillna(0)
|
| 37 |
-
series = series.astype(str).str.replace(',', '')
|
| 38 |
-
return pd.to_numeric(series, errors='coerce').fillna(0)
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
def nse_del(symbol, start_date_str=None, end_date_str=None):
|
| 42 |
-
# Default end date is today
|
| 43 |
-
end_date = datetime.now()
|
| 44 |
-
if end_date_str:
|
| 45 |
-
try:
|
| 46 |
-
end_date = datetime.strptime(end_date_str, "%Y-%m-%d")
|
| 47 |
-
except ValueError:
|
| 48 |
-
print(f"Warning: Invalid end date format '{end_date_str}'. Using today's date.")
|
| 49 |
-
end_date = datetime.now()
|
| 50 |
-
|
| 51 |
-
# Default start date is one year prior to end_date
|
| 52 |
-
start_date = end_date - timedelta(days=365)
|
| 53 |
-
if start_date_str:
|
| 54 |
-
try:
|
| 55 |
-
start_date = datetime.strptime(start_date_str, "%Y-%m-%d")
|
| 56 |
-
except ValueError:
|
| 57 |
-
print(f"Warning: Invalid start date format '{start_date_str}'. Using default start date.")
|
| 58 |
-
start_date = end_date - timedelta(days=365)
|
| 59 |
-
|
| 60 |
-
# Ensure start_date is not after end_date
|
| 61 |
-
if start_date > end_date:
|
| 62 |
-
print("Warning: Start date is after end date. Swapping dates.")
|
| 63 |
-
start_date, end_date = end_date, start_date
|
| 64 |
-
|
| 65 |
-
url = url_nse_del(symbol, start_date, end_date)
|
| 66 |
-
headers = {
|
| 67 |
-
'User-Agent': 'Mozilla/5.0'
|
| 68 |
-
}
|
| 69 |
-
try:
|
| 70 |
-
response = requests.get(url, headers=headers)
|
| 71 |
-
response.raise_for_status()
|
| 72 |
-
if response.content:
|
| 73 |
-
df = pd.read_csv(io.StringIO(response.content.decode('utf-8'))).round(2)
|
| 74 |
-
df.columns = df.columns.str.strip()
|
| 75 |
-
df.rename(columns=nse_del_key_map, inplace=True)
|
| 76 |
-
|
| 77 |
-
# Capitalize the first letter of ALL column names after renaming
|
| 78 |
-
df.columns = [col.capitalize() for col in df.columns]
|
| 79 |
-
|
| 80 |
-
# Remove 'Symbol', 'Series', 'Avgprice', and 'Last' columns (now capitalized)
|
| 81 |
-
df.drop(columns=['Symbol','Series','Avgprice','Last'], errors='ignore', inplace=True)
|
| 82 |
-
|
| 83 |
-
# Convert 'Date' column to datetime objects
|
| 84 |
-
df['Date'] = pd.to_datetime(df['Date'], format='%d-%b-%Y').dt.strftime('%Y-%m-%d')
|
| 85 |
-
|
| 86 |
-
numeric_cols = ['Close', 'Preclose', 'Open', 'High', 'Low', 'Volume', 'Delivery', 'Turnover', 'Trades']
|
| 87 |
-
# Ensure numeric_cols are capitalized before checking and conversion
|
| 88 |
-
numeric_cols_capitalized = [col.capitalize() for col in numeric_cols]
|
| 89 |
-
for col in numeric_cols_capitalized:
|
| 90 |
-
if col in df.columns:
|
| 91 |
-
df[col] = to_numeric_safe(df[col])
|
| 92 |
-
else:
|
| 93 |
-
df[col] = 0
|
| 94 |
-
return df
|
| 95 |
-
except Exception as e:
|
| 96 |
-
print(f"Error fetching data from NSE for {symbol}: {e}")
|
| 97 |
-
return None
|
| 98 |
-
|
| 99 |
-
def daily(symbol,source="yfinace"):
|
| 100 |
-
if source=="yfinance":
|
| 101 |
-
df = yf.download(symbol + ".NS", period="1y", interval="1d").round(2)
|
| 102 |
-
if df.empty:
|
| 103 |
-
return html_card("Error", f"No daily data found for {symbol}")
|
| 104 |
-
|
| 105 |
-
# --- Standardize columns ---
|
| 106 |
-
df.columns = ["Close", "High", "Low", "Open", "Volume"]
|
| 107 |
-
df.reset_index(inplace=True) # make Date a column
|
| 108 |
-
|
| 109 |
-
if source=="NSE":
|
| 110 |
-
df=nse_del(symbol)
|
| 111 |
-
print("df from nse data")
|
| 112 |
-
return df
|
| 113 |
def fetch_daily(symbol, source,max_rows=200):
|
| 114 |
"""
|
| 115 |
Fetch daily OHLCV data, calculate TA-Lib indicators + patterns,
|
|
|
|
| 6 |
from datetime import datetime, timedelta
|
| 7 |
from ta_indi_pat import talib_df # use the combined talib_df function
|
| 8 |
from common import html_card, wrap_html
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 9 |
def fetch_daily(symbol, source,max_rows=200):
|
| 10 |
"""
|
| 11 |
Fetch daily OHLCV data, calculate TA-Lib indicators + patterns,
|