Oviya commited on
Commit
66dc1bf
·
1 Parent(s): 1ca8ce0

Track binaries via Git LFS (analysedata.xlsx, TA_Lib wheel)

Browse files
.dockerignore ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -----------------------
2
+ # VCS / metadata
3
+ # -----------------------
4
+ .git
5
+ .gitignore
6
+
7
+ # -----------------------
8
+ # Python build artefacts
9
+ # -----------------------
10
+ __pycache__/
11
+ *.pyc
12
+ *.pyo
13
+ *.pyd
14
+ *.log
15
+ *.coverage
16
+ .pytest_cache/
17
+ pytest_cache/
18
+ htmlcov/
19
+ .tox/
20
+
21
+ # -----------------------
22
+ # Virtual environments
23
+ # -----------------------
24
+ .venv/
25
+ venv/
26
+ env/
27
+ ENV/
28
+
29
+ # -----------------------
30
+ # OS / Editor files
31
+ # -----------------------
32
+ .DS_Store
33
+ Thumbs.db
34
+ .vscode/
35
+ .idea/
36
+ *.iml
37
+
38
+ # -----------------------
39
+ # App caches / local data
40
+ # -----------------------
41
+ cache/
42
+ wheels/
43
+ logs/
44
+ tmp/
45
+ temp/
46
+
47
+ # Optional large/local data (uncomment if not needed in image)
48
+ # data/
49
+ # datasets/
50
+ # models/
51
+ # checkpoints/
52
+ # nltk_data/
53
+ # *.pt
54
+ # *.pth
55
+ # *.pkl
56
+ # *.joblib
57
+ # *.sqlite
58
+ # *.db
59
+
60
+ # -----------------------
61
+ # Node (if present)
62
+ # -----------------------
63
+ node_modules/
64
+
65
+ # -----------------------
66
+ # Env files / secrets
67
+ # -----------------------
68
+ .env
69
+ .env.*
.gitattributes CHANGED
@@ -33,3 +33,5 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ *.xlsx filter=lfs diff=lfs merge=lfs -text
37
+ TA_Lib-*.whl filter=lfs diff=lfs merge=lfs -text
.gitignore ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -----------------------
2
+ # Python / Flask basics
3
+ # -----------------------
4
+ __pycache__/
5
+ *.py[cod]
6
+ *$py.class
7
+
8
+ # Virtual envs
9
+ .venv/
10
+ venv/
11
+ env/
12
+ ENV/
13
+
14
+ # Build / packaging
15
+ build/
16
+ dist/
17
+ *.egg-info/
18
+ .eggs/
19
+
20
+ # Testing / coverage
21
+ .pytest_cache/
22
+ pytest_cache/
23
+ .coverage
24
+ htmlcov/
25
+ .tox/
26
+
27
+ # Jupyter
28
+ .ipynb_checkpoints/
29
+
30
+ # Logs & PIDs
31
+ logs/
32
+ *.log
33
+ *.pid
34
+
35
+ # OS-specific
36
+ .DS_Store
37
+ Thumbs.db
38
+
39
+ # IDE / Editor
40
+ .vscode/
41
+ .idea/
42
+ *.iml
43
+ *.code-workspace
44
+
45
+ # -----------------------
46
+ # App-specific
47
+ # -----------------------
48
+
49
+ # Local environment files (keep examples if you want)
50
+ .env
51
+ .env.*
52
+ !.env.example
53
+
54
+ # Credentials / keys (VERY IMPORTANT)
55
+ *.pem
56
+ *.p12
57
+ *.key
58
+ *.crt
59
+ *.cer
60
+ *.der
61
+ *.pfx
62
+ *.enc
63
+ *service-account*.json
64
+ *credentials*.json
65
+ *credential*.json
66
+ *-sa.json
67
+ *secret*.json
68
+ learnenglish-ai-*.json
69
+ gcloud*.json
70
+
71
+ # Media / generated assets
72
+ static/videos/
73
+ static/audio/
74
+ static/transcripts/
75
+ uploads/
76
+ tmp/
77
+ temp/
78
+ *.tmp
79
+
80
+ # MoviePy / temp renders (keep source code, ignore big artifacts)
81
+ # (If you actually want to commit sample media, remove these)
82
+ *.moviepy_temp*
83
+
84
+
85
+
86
+ # Build caches / wheels
87
+ cache/
88
+ wheels/
89
+
90
+ # Data / models you don’t want in git
91
+ data/
92
+ datasets/
93
+ models/
94
+ checkpoints/
95
+ *.pt
96
+ *.pth
97
+ *.pkl
98
+ *.joblib
99
+ *.sqlite
100
+ *.db
101
+
102
+ # NLTK and similar local data
103
+ nltk_data/
104
+
105
+ # Node stuff (in case)
106
+ node_modules/
107
+
Dockerfile ADDED
@@ -0,0 +1,40 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM python:3.11-slim
2
+
3
+ ENV PYTHONDONTWRITEBYTECODE=1 \
4
+ PYTHONUNBUFFERED=1
5
+
6
+ # System deps for builds (lxml, numpy/scipy), plus tools
7
+ RUN apt-get update && apt-get install -y --no-install-recommends \
8
+ build-essential curl wget git \
9
+ libxml2-dev libxslt1-dev zlib1g-dev \
10
+ libjpeg-dev libpng-dev \
11
+ libopenblas-dev liblapack-dev gfortran \
12
+ ca-certificates && \
13
+ rm -rf /var/lib/apt/lists/*
14
+
15
+ # Build and install TA-Lib (C library) so python TA-Lib will work
16
+ RUN curl -L -o ta-lib-0.4.0-src.tar.gz http://prdownloads.sourceforge.net/ta-lib/ta-lib-0.4.0-src.tar.gz && \
17
+ tar -xzf ta-lib-0.4.0-src.tar.gz && cd ta-lib-0.4.0 && \
18
+ ./configure --prefix=/usr && make && make install && \
19
+ cd .. && rm -rf ta-lib-0.4.0 ta-lib-0.4.0-src.tar.gz
20
+
21
+ WORKDIR /app
22
+
23
+ # Python deps first (better layer caching)
24
+ COPY requirements.txt /app/requirements.txt
25
+ RUN python -m pip install --upgrade pip && \
26
+ pip install --no-cache-dir -r /app/requirements.txt
27
+
28
+ # PyTorch (CPU wheels from official index)
29
+ RUN pip install --no-cache-dir --index-url https://download.pytorch.org/whl/cpu \
30
+ torch torchvision torchaudio
31
+
32
+ # Pre-fetch NLTK data used by your code (VADER)
33
+ RUN python - <<'PY'\nimport nltk;nltk.download('vader_lexicon')\nPY
34
+
35
+ # Copy the application code
36
+ COPY . /app
37
+
38
+ # Start with gunicorn; HF provides $PORT
39
+ # If your app object is named "app" in "pytrade.py", this is correct.
40
+ CMD bash -lc "gunicorn -w 1 -k gthread -b 0.0.0.0:${PORT:-7860} pytrade:app --timeout 180"
README.md DELETED
@@ -1,10 +0,0 @@
1
- ---
2
- title: Pytrade Backend
3
- emoji: 👁
4
- colorFrom: indigo
5
- colorTo: green
6
- sdk: docker
7
- pinned: false
8
- ---
9
-
10
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
TA_Lib-0.4.29-cp312-cp312-win_amd64.whl ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:234d0aa150039b2263de0cc2828882db52e759bd9d785f46be52ddc9e9299a39
3
+ size 531318
adxstrategies.py ADDED
@@ -0,0 +1,143 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ import yfinance as yf
3
+ import pandas as pd
4
+ import numpy as np
5
+ import talib
6
+ import datetime
7
+
8
+ # Calculate ADX, +DI, and -DI values
9
+ def calculate_adx(data, period=14):
10
+ high_prices = data['high']
11
+ low_prices = data['low']
12
+ close_prices = data['close']
13
+
14
+ adx = talib.ADX(high_prices, low_prices, close_prices, timeperiod=period)
15
+ plus_di = talib.PLUS_DI(high_prices, low_prices, close_prices, timeperiod=period)
16
+ minus_di = talib.MINUS_DI(high_prices, low_prices, close_prices, timeperiod=period)
17
+
18
+ return adx, plus_di, minus_di
19
+
20
+ # Detect ADX crossover (directional indicators +DI and -DI)
21
+ def adx_di_crossover_strategy(plus_di, minus_di, adx, threshold=20, lookback_days=5):
22
+ # We will loop over the last `lookback_days` to check for crossovers
23
+ for i in range(-lookback_days, 0):
24
+ # Check if ADX is above the threshold
25
+ if adx[i] > threshold:
26
+ # Bullish condition: +DI crosses above -DI and ADX is above threshold
27
+ if plus_di[i] > minus_di[i] and plus_di[i - 1] <= minus_di[i - 1]:
28
+ return "Bullish"
29
+ # Bearish condition: -DI crosses above +DI and ADX is above threshold
30
+ elif minus_di[i] > plus_di[i] and minus_di[i - 1] <= plus_di[i - 1]:
31
+ return "Bearish"
32
+
33
+ return "Neutral"
34
+
35
+ #ADX Breakout strategy
36
+ def adx_breakout_strategy(data, adx, threshold=25):
37
+
38
+ # Detect breakout condition (ADX above 25, price breaking resistance/support)
39
+ if adx[-1] > threshold:
40
+ if data['close'][-1] > data['high'][-2]: # Bullish breakout
41
+ return "Bullish"
42
+ elif data['close'][-1] < data['low'][-2]: # Bearish breakout
43
+ return "Bearish"
44
+
45
+ return "Neutral"
46
+
47
+ # ADX Slope Strategy
48
+ def get_adx_slope_signal(adx, days=5, threshold=0.1):
49
+ total_slope = 0
50
+
51
+ # Calculate slope for each of the last `days`
52
+ for i in range(-days, -1):
53
+ slope = adx[i + 1] - adx[i]
54
+ total_slope += slope
55
+
56
+ # Average slope
57
+ avg_slope = total_slope / (days - 1)
58
+
59
+ if avg_slope > threshold:
60
+ return "Bullish"
61
+ elif avg_slope < -threshold:
62
+ return "Bearish"
63
+ else:
64
+ return "Neutral"
65
+
66
+ # ADX Divergence Strategy
67
+ def adx_divergence_strategy(data, adx, threshold=25):
68
+ """
69
+ Detects divergence between price and ADX.
70
+ A divergence occurs when price makes a new high/low, but ADX does not follow the same direction.
71
+ """
72
+ price_high = data['high']
73
+ price_low = data['low']
74
+
75
+ # Checking for divergence
76
+ price_divergence_bullish = price_high[-1] > price_high[-2] and adx[-1] < adx[-2]
77
+ price_divergence_bearish = price_low[-1] < price_low[-2] and adx[-1] > adx[-2]
78
+
79
+ if price_divergence_bullish:
80
+ return "Bullish"
81
+ elif price_divergence_bearish:
82
+ return "Bearish"
83
+ return "Neutral"
84
+
85
+
86
+ # Main ADX strategy function
87
+ def adx_strategies(data):
88
+
89
+ # Calculate ADX, +DI, and -DI
90
+ adx, plus_di, minus_di = calculate_adx(data)
91
+
92
+ signals = {
93
+ "ADX": round(adx.iloc[-1], 2),
94
+ "ADX + DI Crossover": adx_di_crossover_strategy(plus_di, minus_di, adx),
95
+ "ADX Breakout": adx_breakout_strategy(data, adx),
96
+ "ADX Slope": get_adx_slope_signal(adx[-5:]),
97
+ "ADX Divergence": adx_divergence_strategy(data, adx)
98
+ }
99
+
100
+ weights = {
101
+ "ADX + DI Crossover": 35,
102
+ "ADX Breakout": 30,
103
+ "ADX Slope": 20,
104
+ "ADX Divergence": 15
105
+ }
106
+
107
+ total_score = 0
108
+ for strategy, weight in weights.items():
109
+ signal = signals[strategy]
110
+ if signal == "Bullish":
111
+ total_score += weight
112
+ elif signal == "Neutral":
113
+ total_score += weight * 0.5
114
+
115
+ overall_percentage = round((total_score / sum(weights.values())) * 100, 2)
116
+
117
+ if overall_percentage >= 60:
118
+ final_signal = "Buy"
119
+ elif overall_percentage <= 40:
120
+ final_signal = "DBuy"
121
+ else:
122
+ final_signal = "Neutral"
123
+
124
+ return signals, overall_percentage, final_signal,adx, plus_di, minus_di
125
+
126
+
127
+ def extract_series(data, series, days=100):
128
+ series = pd.Series(series).dropna().tail(days)
129
+ series.index = data.index[-len(series):]
130
+ series.index = series.index.strftime('%Y-%m-%d')
131
+ return series.round(2).to_dict()
132
+
133
+ # API-style function
134
+ def get_adx_trade_signal(data):
135
+ adx_signals, overallscore, final_signal,adx, plus_di, minus_di = adx_strategies(data)
136
+ return {
137
+ "adx_signals": adx_signals,
138
+ "adx_score": overallscore,
139
+ "adx_final_signal": final_signal,
140
+ "ADX_Indicator": extract_series(data, adx),
141
+ "PLUS_DI": extract_series(data, plus_di),
142
+ "MINUS_DI": extract_series(data, minus_di)
143
+ }
analysedata.xlsx ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:22de367d4c7376a90da9e3d4a11bf2aab4de907541695b1042427f76f526b272
3
+ size 5787301
analysestock.py ADDED
@@ -0,0 +1,316 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import yfinance as yf
2
+ import pandas as pd
3
+ import numpy as np
4
+ import talib
5
+ import math
6
+ import requests
7
+ import time
8
+ import datetime
9
+ from datetime import timedelta
10
+ from collections import OrderedDict
11
+
12
+ from rsistrategies import get_rsi_trade_signal
13
+ from macdstrategies import get_macd_trade_signal
14
+ from emastrategies import get_ema_trade_signal
15
+ from atrstrategies import get_atr_trade_signal
16
+ from adxstrategies import get_adx_trade_signal
17
+ from fibostrategies import get_fibonacci_trade_signal
18
+ from priceactionstrategies import get_priceaction_trade_signal
19
+ from srstrategies import get_support_resistance_signal
20
+ from bbstrategies import get_bollinger_trade_signal
21
+ from fundamental import get_fundamental_details
22
+ from news import get_latest_news_with_sentiment
23
+
24
+ from prediction import (
25
+ load_or_train_highlow_model,
26
+ build_current_features_row_23k,
27
+ predict_high_low_for_current_row,
28
+ )
29
+ import os, numpy as np, pandas as pd
30
+
31
+ TRAIN_XLSX_PATH = r"D:\PY-Trade\backend alone\analysedata.xlsx"
32
+ MODEL_BUNDLE_PATH = r"C:\VIJI\pytrade-app\backend\models\gps_highlow_extratrees.pkl"
33
+
34
+
35
+ from predictedchart import run_stock_prediction
36
+
37
+ # ===================== TA scoring =====================
38
+ def calculate_technical_analysis_score(indicator_scores):
39
+ indicator_weights = {
40
+ 'RSI': 13,
41
+ 'MACD': 13,
42
+ 'ATR': 5,
43
+ 'ADX': 4,
44
+ 'EMA': 13,
45
+ 'PriceAction': 14,
46
+ 'Bollinger': 10,
47
+ 'Fibonacci': 4,
48
+ 'SR': 9
49
+ }
50
+ weight_values = list(indicator_weights.values())
51
+ weighted_score = sum(score * weight for score, weight in zip(indicator_scores, weight_values))
52
+ total_weight = sum(weight_values)
53
+ technical_analysis_score = (weighted_score / (total_weight * 100)) * 85
54
+ return technical_analysis_score
55
+
56
+ # ================== Pivot levels & trade ==================
57
+ def calculate_pivot_points(ticker, score, live_price, atr_period=14):
58
+ data = yf.download(ticker, period="2mo", interval="1wk")
59
+ df = yf.download(ticker, period="2mo", interval="1d")
60
+
61
+ if score < 50:
62
+ return {
63
+ "remarks": "Score is below 50%, avoid trading. No trade recommendation",
64
+ "pivot_point": "N/A", "resistance1": "N/A", "support1": "N/A",
65
+ "resistance2": "N/A", "support2": "N/A",
66
+ "resistance3": "N/A", "support3": "N/A",
67
+ "entry_point": "N/A", "stop_loss": "N/A", "target_price": "N/A",
68
+ "s1_pect": "N/A", "s2_pect": "N/A", "s3_pect": "N/A",
69
+ "r1_pect": "N/A", "r2_pect": "N/A", "r3_pect": "N/A", "p1_pect": "N/A"
70
+ }
71
+
72
+ if 50 <= score < 65:
73
+ stoploss_multiplier, risk_reward_ratio = 1.2, 1.5
74
+ remarks = "Neutral confidence - Monitor the price for further confirmation."
75
+ elif 65 <= score < 70:
76
+ stoploss_multiplier, risk_reward_ratio = 1.5, 2.0
77
+ remarks = "Moderate confidence - Conservative stop loss and reward."
78
+ elif 70 <= score < 80:
79
+ stoploss_multiplier, risk_reward_ratio = 1.8, 2.5
80
+ remarks = "Good confidence - Balanced approach."
81
+ else:
82
+ stoploss_multiplier, risk_reward_ratio = 2.0, 3.0
83
+ remarks = "High confidence - Aggressive approach."
84
+
85
+ close_prices = df['Close'].to_numpy().flatten()
86
+ high_prices = df['High'].to_numpy().flatten()
87
+ low_prices = df['Low'].to_numpy().flatten()
88
+ df['ATR'] = talib.ATR(high_prices, low_prices, close_prices, timeperiod=atr_period)
89
+
90
+ latest_atr = df['ATR'].iloc[-1]
91
+ entry_point = live_price
92
+ stop_loss = entry_point - (stoploss_multiplier * latest_atr)
93
+ target_price = entry_point + ((entry_point - stop_loss) * risk_reward_ratio)
94
+
95
+ previous_week = data.iloc[-2]
96
+ high, low, close = previous_week["High"], previous_week["Low"], previous_week["Close"]
97
+
98
+ P = (high + low + close) / 3
99
+ R1 = (2 * P) - low
100
+ S1 = (2 * P) - high
101
+ R2 = P + (high - low)
102
+ S2 = P - (high - low)
103
+ R3 = high + 2 * (P - low)
104
+ S3 = low - 2 * (high - P)
105
+
106
+ p1_pect = ((P - live_price) / P) * 100
107
+ s1_pect = ((S1 - live_price) / S1) * 100
108
+ s2_pect = ((S2 - live_price) / S2) * 100
109
+ s3_pect = ((S3 - live_price) / S3) * 100
110
+ r1_pect = ((R1 - live_price) / R1) * 100
111
+ r2_pect = ((R2 - live_price) / R2) * 100
112
+ r3_pect = ((R3 - live_price) / R3) * 100
113
+
114
+ return {
115
+ "pivot_point": round(float(P), 2),
116
+ "resistance1": round(float(R1), 2),
117
+ "support1": round(float(S1), 2),
118
+ "resistance2": round(float(R2), 2),
119
+ "support2": round(float(S2), 2),
120
+ "resistance3": round(float(R3), 2),
121
+ "support3": round(float(S3), 2),
122
+ "entry_point": round(float(entry_point), 2),
123
+ "stop_loss": round(float(stop_loss), 2),
124
+ "target_price": round(float(target_price), 2),
125
+ "s1_pect": round(float(s1_pect), 2),
126
+ "s2_pect": round(float(s2_pect), 2),
127
+ "s3_pect": round(float(s3_pect), 2),
128
+ "r1_pect": round(float(r1_pect), 2),
129
+ "r2_pect": round(float(r2_pect), 2),
130
+ "r3_pect": round(float(r3_pect), 2),
131
+ "p1_pect": round(float(p1_pect), 2),
132
+ "remarks": remarks
133
+ }
134
+
135
+
136
+
137
+ # =================== Main: short-term swing ===================
138
+ def analysestock(ticker):
139
+
140
+ stock_data = yf.download(ticker, start="2023-01-01", end="2025-09-01", interval="1d")
141
+ stock_data.columns = [col.lower() if isinstance(col, str) else col[0].lower() for col in stock_data.columns]
142
+ stockdetail = yf.Ticker(ticker)
143
+ company_name = stockdetail.info.get("longName", "Company name not found")
144
+ live_price = stockdetail.info["regularMarketPrice"]
145
+ price_change = stockdetail.info['regularMarketChange']
146
+ percentage_change = stockdetail.info['regularMarketChangePercent']
147
+
148
+ recentdays = stock_data.tail(20)
149
+ ohlc_data = []
150
+ for index, row in recentdays.iterrows():
151
+ ohlc_data.append({
152
+ "x": index.strftime('%Y-%m-%d'),
153
+ "y": [round(row['open'], 2), round(row['high'], 2), round(row['low'], 2), round(row['close'], 2)]
154
+ })
155
+
156
+ # TA Strategy signals
157
+ rsi_trade_signal = get_rsi_trade_signal(stock_data)
158
+ macd_trade_signal = get_macd_trade_signal(stock_data)
159
+ ema_trade_signal = get_ema_trade_signal(stock_data)
160
+ atr_trade_signal = get_atr_trade_signal(stock_data)
161
+ adx_trade_signal = get_adx_trade_signal(stock_data)
162
+ fibo_trade_signal = get_fibonacci_trade_signal(stock_data)
163
+ priceaction_trade_signal = get_priceaction_trade_signal(stock_data)
164
+ bb_trade_signal = get_bollinger_trade_signal(stock_data)
165
+ sr_trade_signal = get_support_resistance_signal(stock_data)
166
+
167
+ final_trade_signal = OrderedDict([
168
+ ("RSI", rsi_trade_signal['rsi_final_signal']),
169
+ ("MACD", macd_trade_signal['macd_final_signal']),
170
+ ("ATR", atr_trade_signal['atr_final_signal']),
171
+ ("EMA", ema_trade_signal['ema_final_signal']),
172
+ ("ADX", adx_trade_signal['adx_final_signal']),
173
+ ("Fibo", fibo_trade_signal['fib_final_signal']),
174
+ ("BB", bb_trade_signal['bollinger_final_signal']),
175
+ ("SR", sr_trade_signal['sr_final_signal']),
176
+ ("PA_MS", priceaction_trade_signal['priceaction_final_signal']),
177
+ ])
178
+
179
+ indicator_score = [
180
+ rsi_trade_signal["rsi_score"],
181
+ macd_trade_signal['macd_score'],
182
+ atr_trade_signal['atr_score'],
183
+ adx_trade_signal['adx_score'],
184
+ ema_trade_signal['ema_score'],
185
+ priceaction_trade_signal['priceaction_score'],
186
+ bb_trade_signal['bollinger_score'],
187
+ fibo_trade_signal['fib_score'],
188
+ sr_trade_signal['sr_score']
189
+ ]
190
+
191
+ overall_ta_score = calculate_technical_analysis_score(indicator_score)
192
+
193
+ #FA signals
194
+
195
+ fundamental_analysis = get_fundamental_details(ticker)
196
+
197
+ #news
198
+
199
+ news_payload = get_latest_news_with_sentiment(
200
+ company_name,
201
+ period="1d",
202
+ max_results=10,
203
+ language="en",
204
+ country="US"
205
+ )
206
+
207
+ #overallscore
208
+
209
+ overall_fa_score = fundamental_analysis["overall_fa_score"]
210
+ overall_news_score = news_payload['overall_news_score']
211
+ combined_overall_score = overall_ta_score + overall_fa_score + overall_news_score
212
+ combined_overall_signal = np.where(combined_overall_score > 65, 'Buy',
213
+ np.where(combined_overall_score > 50, 'Neutral', 'DBuy'))
214
+
215
+
216
+ #trade recommendation
217
+
218
+ pivot_levels = calculate_pivot_points(ticker, combined_overall_score, live_price)
219
+
220
+
221
+ #prediiction
222
+ predictions = run_stock_prediction(ticker)
223
+ predictions_float = [float(pred) for pred in predictions['Predicted Close']]
224
+ prediction_dates = pd.to_datetime(predictions['Date']).dt.strftime('%d-%m-%Y').tolist()
225
+ model_error = None
226
+ pred_high, pred_low = np.nan, np.nan
227
+ try:
228
+ bundle = load_or_train_highlow_model(TRAIN_XLSX_PATH, MODEL_BUNDLE_PATH)
229
+
230
+ current_feat_row = build_current_features_row_23k(
231
+ ticker=ticker,
232
+ stock_data=stock_data,
233
+ rsi_trade_signal=rsi_trade_signal,
234
+ macd_trade_signal=macd_trade_signal,
235
+ ema_trade_signal=ema_trade_signal,
236
+ atr_trade_signal=atr_trade_signal,
237
+ adx_trade_signal=adx_trade_signal,
238
+ bb_trade_signal=bb_trade_signal,
239
+ sr_trade_signal=sr_trade_signal,
240
+ priceaction_trade_signal=priceaction_trade_signal,
241
+ fibo_trade_signal=fibo_trade_signal,
242
+ overall_ta_score=overall_ta_score,
243
+ )
244
+
245
+ pred_high, pred_low = predict_high_low_for_current_row(
246
+ bundle=bundle,
247
+ current_row_df=current_feat_row,
248
+ live_close=stock_data['close'].iloc[-1]
249
+ )
250
+ except Exception as ex:
251
+ model_error = f"{type(ex).__name__}: {ex}"
252
+ print(f"[WARN] High/Low prediction failed: {model_error}")
253
+
254
+
255
+
256
+
257
+ response = {
258
+ "ticker": ticker,
259
+ "company_name": company_name,
260
+ "live_price": round(live_price, 2),
261
+ "price_change": round(price_change, 2),
262
+ "percentage_change": round(percentage_change, 2),
263
+ "ohlc_data":ohlc_data,
264
+ "RSI": rsi_trade_signal['rsi_signals'],
265
+ "MACD": macd_trade_signal['macd_signals'],
266
+ "EMA": ema_trade_signal['ema_signals'],
267
+ "ATR": atr_trade_signal['atr_signals'],
268
+ "ADX": adx_trade_signal['adx_signals'],
269
+ "Fibo": fibo_trade_signal['fib_signals'],
270
+ "SR": sr_trade_signal['support_resistance_signals'],
271
+ "BB": bb_trade_signal['bollinger_signals'],
272
+ "PA_MS": priceaction_trade_signal['priceaction_signals'],
273
+ "final_trade_signal": final_trade_signal,
274
+ "overall_ta_score": round(overall_ta_score, 2),
275
+ "fundamental_analysis": fundamental_analysis,
276
+ "overall_fa_score": overall_fa_score,
277
+ "news_overall_score": overall_news_score,
278
+ "news": news_payload["items"],
279
+ "combined_overall_score": round(combined_overall_score, 2),
280
+ "combined_overall_signal": str(combined_overall_signal),
281
+ "tradingInfo": pivot_levels,
282
+
283
+ "RSI 14": rsi_trade_signal['rsi_14_last_2_years'],
284
+ "RSI 5": rsi_trade_signal['rsi_5_last_2_years'],
285
+ "MA_20": rsi_trade_signal['ma'],
286
+ "Close": rsi_trade_signal['close'],
287
+ "LowerBB": rsi_trade_signal['lowerbb'],
288
+ "UpperBB": rsi_trade_signal['upperbb'],
289
+ "MACDLine": macd_trade_signal['macd_line'],
290
+ "MACDSignalLine": macd_trade_signal['macd_signal_line'],
291
+ "MACDHistogram": macd_trade_signal['macd_histogram'],
292
+ "ATRValue": atr_trade_signal['atr_values'],
293
+ "EMA 5": ema_trade_signal['EMA_5'],
294
+ "EMA 20": ema_trade_signal['EMA_20'],
295
+ "EMA 50": ema_trade_signal['EMA_50'],
296
+ "ADX_Indicator": adx_trade_signal['ADX_Indicator'],
297
+ "PLUS_DI": adx_trade_signal['PLUS_DI'],
298
+ "MINUS_DI": adx_trade_signal['MINUS_DI'],
299
+ "prediction_prices": predictions_float,
300
+ "prediction_dates": prediction_dates,
301
+ }
302
+
303
+
304
+ response.update({
305
+ "ai_predicted_highest_price": pred_high,
306
+ "ai_predicted_lowest_price": pred_low,
307
+ "ai_model_meta": {
308
+ "model": "ExtraTreesRegressor (multi-output capable, native)",
309
+ "bundle_path": MODEL_BUNDLE_PATH,
310
+ "trained_rows": (bundle.get("trained_rows") if 'bundle' in locals() else None),
311
+ "sklearn_version": (bundle.get("sklearn_version") if 'bundle' in locals() else None)
312
+ },
313
+ "ai_model_error": model_error
314
+ })
315
+
316
+ return response
atrstrategies.py ADDED
@@ -0,0 +1,157 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ import yfinance as yf
3
+ import pandas as pd
4
+ import numpy as np
5
+ import datetime
6
+ import talib
7
+
8
+
9
+ # Calculate ATR values
10
+ def calculate_atr(data):
11
+ atr = talib.ATR(data['high'], data['low'], data['close'], timeperiod=14)
12
+ return atr
13
+
14
+ # ATR Breakout Strategy (Price crossing ATR threshold)
15
+ def atr_breakout_strategy(data, atr, multiplier=2):
16
+ latest_close = data['close'].iloc[-2]
17
+ previous_close = data['close'].iloc[-1]
18
+ latest_atr = atr.iloc[-2]
19
+ threshold_up = latest_close + (multiplier * latest_atr)
20
+ threshold_down = latest_close - (multiplier * latest_atr)
21
+
22
+ # Bullish breakout (price moves above ATR threshold)
23
+ if previous_close > threshold_up:
24
+ return "Bullish"
25
+ # Bearish breakout (price moves below ATR threshold)
26
+ elif previous_close < threshold_down:
27
+ return "Bearish"
28
+ # No breakout, neutral
29
+ else:
30
+ return "Neutral"
31
+
32
+ def calculate_dynamic_threshold(atr, period=14):
33
+
34
+ atr_change = atr.pct_change(periods=period)
35
+ atr_std = atr_change.std()
36
+ dynamic_threshold = 2 * atr_std
37
+ return dynamic_threshold
38
+
39
+ # ATR Expansion Strategy (Confirming trend continuation)
40
+ def atr_expansion_strategy(data, atr, period=14, days_to_check=5):
41
+
42
+
43
+ dynamicthreshold = calculate_dynamic_threshold(atr, period)
44
+
45
+
46
+ atr_last = atr.iloc[-days_to_check:]
47
+ atr_expansion = atr_last[-1] > atr_last.mean() + dynamicthreshold
48
+
49
+ if atr_expansion:
50
+
51
+ if data['close'].iloc[-1] > data['close'].iloc[-2]:
52
+ return "Bullish"
53
+ elif data['close'].iloc[-1] < data['close'].iloc[-2]:
54
+ return "Bearish"
55
+ else:
56
+ return "Neutral"
57
+
58
+
59
+ return "Neutral"
60
+
61
+ # ATR Squeeze/Compression Strategy (Confirming trend continuation)
62
+ def atr_squeeze_strategy(data, atr, period=14, days_to_check=5):
63
+
64
+ dynamicthreshold = calculate_dynamic_threshold(atr, period)
65
+
66
+ atr_last = atr.iloc[-days_to_check:]
67
+ atr_compression = atr_last[-1] < atr_last.mean() - dynamicthreshold
68
+ resistance = data['high'].iloc[-days_to_check:].max()
69
+ support = data['low'].iloc[-days_to_check:].min()
70
+
71
+ if atr_compression:
72
+
73
+ if data['close'].iloc[-1] > resistance:
74
+ return "Bullish"
75
+
76
+ elif data['close'].iloc[-1] < support:
77
+ return "Bearish"
78
+
79
+ else:
80
+ return "Neutral"
81
+
82
+
83
+ return "Neutral"
84
+
85
+ # ATR Trend Reversal Strategy (ATR rising during price reversal)
86
+ def atr_trend_reversal_strategy(atr, price, days=5):
87
+ # Look at the change in price and ATR
88
+ price_diff = price.iloc[-1] - price.iloc[-days]
89
+ atr_diff = atr.iloc[-1] - atr.iloc[-days]
90
+
91
+ # If price is reversing (uptrend to downtrend or vice versa), and ATR is increasing
92
+ if price_diff > 0 and atr_diff > 0:
93
+ return "Bullish"
94
+ elif price_diff < 0 and atr_diff > 0:
95
+ return "Bearish"
96
+ return "Neutral"
97
+
98
+ # Main strategy function using ATR strategy
99
+ def atr_strategies(data):
100
+
101
+ atr = calculate_atr(data)
102
+
103
+ atr_breakout = atr_breakout_strategy(data, atr)
104
+
105
+ atr_expansion = atr_expansion_strategy(data,atr)
106
+
107
+ atr_squeeze = atr_squeeze_strategy(data,atr)
108
+
109
+ atr_trend_reversal = atr_trend_reversal_strategy(atr, data['close'])
110
+
111
+ # Collect signals
112
+ signals = {
113
+ "ATR": round(atr.iloc[-1], 2),
114
+ "ATR Breakout": atr_breakout,
115
+ "ATR Expansion": atr_expansion,
116
+ "ATR Squeeze": atr_squeeze,
117
+ "ATR Trend Reversal": atr_trend_reversal
118
+ }
119
+
120
+ weights = {
121
+ "ATR Breakout": 45,
122
+ "ATR Expansion": 15,
123
+ "ATR Squeeze": 15,
124
+ "ATR Trend Reversal": 25
125
+ }
126
+
127
+ total_score = 0
128
+ for strategy, weight in weights.items():
129
+ signal = signals[strategy]
130
+ if signal == "Bullish":
131
+ total_score += weight
132
+ elif signal == "Neutral":
133
+ total_score += weight * 0.5
134
+
135
+ overall_percentage = round((total_score / sum(weights.values())) * 100, 2)
136
+
137
+ if overall_percentage >= 60:
138
+ final_signal = "Buy"
139
+ elif overall_percentage <= 40:
140
+ final_signal = "DBuy"
141
+ else:
142
+ final_signal = "Neutral"
143
+
144
+ return signals, overall_percentage, final_signal,atr
145
+
146
+ # API-style function to fetch ATR signals
147
+ def get_atr_trade_signal(data):
148
+ atr_signals, overallscore, final_signal,atr = atr_strategies(data)
149
+ atr_series = pd.Series(atr, index=data.index).dropna().tail(100)
150
+ atr_series.index = atr_series.index.strftime('%Y-%m-%d')
151
+ return {
152
+ "atr_signals": atr_signals,
153
+ "atr_score": overallscore,
154
+ "atr_final_signal": final_signal,
155
+ "atr_values": atr_series.round(2).to_dict()
156
+
157
+ }
bbstrategies.py ADDED
@@ -0,0 +1,129 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ import yfinance as yf
3
+ import pandas as pd
4
+ import numpy as np
5
+ import talib
6
+ import datetime
7
+
8
+ # Bollinger Band Calculation
9
+ def calculate_bollinger(data, period=20, stddev=2):
10
+ close = data['close']
11
+ upper, middle, lower = talib.BBANDS(close, timeperiod=period, nbdevup=stddev, nbdevdn=stddev, matype=0)
12
+ return upper, middle, lower
13
+
14
+
15
+ #BB Squeeze breakout/fade after low volatility
16
+ def detect_bb_squeeze(close, upper, lower, middle, lookback=20, perc=20):
17
+ bandwidth = (upper - lower) / middle
18
+ # 20th percentile over the lookback window
19
+ thresh = np.percentile(bandwidth.iloc[-lookback:], perc)
20
+ if bandwidth.iloc[-1] < thresh:
21
+ return "Neutral"
22
+ # otherwise fall back to a breakout rule
23
+ if close.iloc[-1] > upper.iloc[-1]:
24
+ return "Bullish"
25
+ elif close.iloc[-1] < lower.iloc[-1]:
26
+ return "Bearish"
27
+ return "Neutral"
28
+
29
+
30
+ # BB Breakout Detection
31
+ def detect_bb_breakout(close, upper, lower):
32
+ if close.iloc[-1] > upper.iloc[-1]:
33
+ return "Bullish"
34
+ elif close.iloc[-1] < lower.iloc[-1]:
35
+ return "Bearish"
36
+ return "Neutral"
37
+
38
+
39
+ # BB Breakout Reversal
40
+ def detect_bb_breakout_reversal(data, upper, lower, middle, lookahead=3):
41
+
42
+ i = len(data) - lookahead - 1
43
+
44
+ if i < 0:
45
+ return "Neutral"
46
+
47
+ row = data.iloc[i]
48
+ # Bullish Reversal
49
+ if row['close'] > upper.iloc[i]:
50
+ for j in range(1, lookahead + 1):
51
+ next_row = data.iloc[i + j]
52
+ if next_row['close'] < upper.iloc[i + j] and next_row['close'] > middle.iloc[i + j]:
53
+ return "Bullish"
54
+
55
+ # Bearish Reversal
56
+ elif row['close'] < lower.iloc[i]:
57
+ for j in range(1, lookahead + 1):
58
+ next_row = data.iloc[i + j]
59
+ if next_row['close'] > lower.iloc[i + j] and next_row['close'] < middle.iloc[i + j]:
60
+ return "Bearish"
61
+
62
+ return "Neutral"
63
+
64
+
65
+
66
+ # Middle Band Pullback
67
+ def detect_middle_band_pullback(close, middle, upper, lower, threshold=0.10, trend_lookback=3):
68
+ band_width = upper.iloc[-1] - lower.iloc[-1]
69
+ if abs(close.iloc[-1] - middle.iloc[-1]) < band_width * threshold:
70
+ trend_above = all(close.iloc[-i] > middle.iloc[-i] for i in range(2, 2 + trend_lookback))
71
+ trend_below = all(close.iloc[-i] < middle.iloc[-i] for i in range(2, 2 + trend_lookback))
72
+ if trend_above:
73
+ return "Bullish"
74
+ elif trend_below:
75
+ return "Bearish"
76
+ return "Neutral"
77
+
78
+
79
+
80
+ # Master strategy function
81
+ def bollinger_strategies(data):
82
+
83
+ upper, middle, lower = calculate_bollinger(data)
84
+
85
+ signals = {
86
+ "UpperBand": round(upper.iloc[-1], 2),
87
+ "MiddleBand": round(middle.iloc[-1], 2),
88
+ "LowerBand": round(lower.iloc[-1], 2),
89
+ "BB Squeeze": detect_bb_squeeze(data['close'], upper, lower, middle),
90
+ "BB Breakout": detect_bb_breakout(data['close'], upper, lower),
91
+ "BB Breakout Reversal": detect_bb_breakout_reversal(data, upper, lower, middle),
92
+ "Middle Band Pullback": detect_middle_band_pullback(data['close'], middle, upper, lower)
93
+
94
+ }
95
+
96
+ weights = {
97
+ "BB Squeeze": 30,
98
+ "BB Breakout": 25,
99
+ "BB Breakout Reversal": 25,
100
+ "Middle Band Pullback": 20
101
+ }
102
+
103
+ total_score = 0
104
+ for strategy, weight in weights.items():
105
+ signal = signals[strategy]
106
+ if "Bullish" in signal or "Breakout Up" in signal or "Squeeze" in signal or "Pullback" in signal:
107
+ total_score += weight
108
+ elif "Neutral" in signal or "No Breakout" in signal:
109
+ total_score += weight * 0.5
110
+
111
+ overall_percentage = round((total_score / sum(weights.values())) * 100, 2)
112
+
113
+ if overall_percentage >= 60:
114
+ final_signal = "Buy"
115
+ elif overall_percentage <= 40:
116
+ final_signal = "DBuy"
117
+ else:
118
+ final_signal = "Neutral"
119
+
120
+ return signals, overall_percentage, final_signal
121
+
122
+ # API-style function
123
+ def get_bollinger_trade_signal(data):
124
+ bb_signals, overall_score, final_signal = bollinger_strategies(data)
125
+ return {
126
+ "bollinger_signals": bb_signals,
127
+ "bollinger_score": overall_score,
128
+ "bollinger_final_signal": final_signal
129
+ }
companies.py ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # utils.py
2
+ import csv
3
+ import io
4
+ import requests
5
+ from typing import List, Dict
6
+ from requests.exceptions import RequestException
7
+ import time
8
+
9
+ # List of URLs for NIFTY50 and NIFTY100
10
+ NIFTY_URLS = {
11
+ "NIFTY50": "https://www.niftyindices.com/IndexConstituent/ind_nifty50list.csv",
12
+ "NIFTY100": "https://www.niftyindices.com/IndexConstituent/ind_nifty100list.csv"
13
+ }
14
+
15
+ def fetch_nifty_companies(index_code: str, retries: int = 3, delay: int = 5) -> List[Dict[str, str]]:
16
+ # Get the URL for the given index_code
17
+ url = NIFTY_URLS.get(index_code)
18
+
19
+ if not url:
20
+ raise ValueError(f"Unknown index code: {index_code}")
21
+
22
+ # Retry logic
23
+ for attempt in range(retries):
24
+ try:
25
+ # Fetch the CSV data
26
+ response = requests.get(url)
27
+ # Ensure the request was successful
28
+ response.raise_for_status()
29
+ # Read CSV data from the response text
30
+ return parse_nifty_csv(response.text)
31
+
32
+ except RequestException as e:
33
+ print(f"Attempt {attempt + 1} failed: {e}")
34
+ if attempt < retries - 1:
35
+ time.sleep(delay) # Wait before retrying
36
+ else:
37
+ raise Exception(f"Failed to fetch data after {retries} attempts.") from e
38
+
39
+ # Function to fetch companies for both NIFTY50 and NIFTY100
40
+ def get_companies_from_indices() -> Dict[str, List[Dict[str, str]]]:
41
+ nifty50_companies = fetch_nifty_companies("NIFTY50")
42
+ nifty100_companies = fetch_nifty_companies("NIFTY100")
43
+
44
+ # Combine both lists and return
45
+ all_companies = {
46
+ "NIFTY50": nifty50_companies,
47
+ "NIFTY100": nifty100_companies
48
+ }
49
+
50
+ return all_companies
emastrategies.py ADDED
@@ -0,0 +1,147 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ import yfinance as yf
3
+ import pandas as pd
4
+ import numpy as np
5
+ import talib
6
+ import datetime
7
+
8
+
9
+
10
+ # Calculate EMA values
11
+ def calculate_ema(data, short_period=5, medium_period=20, long_period=50):
12
+ close_prices = data['close']
13
+ ema_short = talib.EMA(close_prices, timeperiod=short_period)
14
+ ema_medium = talib.EMA(close_prices, timeperiod=medium_period)
15
+ ema_long = talib.EMA(close_prices, timeperiod=long_period)
16
+ return ema_short,ema_medium, ema_long
17
+
18
+ def detect_ema_crossover(ema20, ema50):
19
+
20
+ for i in range(len(ema20) - 1):
21
+ older_ema20 = ema20[i]
22
+ newer_ema20 = ema20[i + 1]
23
+ older_ema50 = ema50[i]
24
+ newer_ema50 = ema50[i + 1]
25
+
26
+ # Bullish crossover - EMA 20 above EMA 50
27
+ if older_ema20 <= older_ema50 and newer_ema20 > newer_ema50:
28
+ return "Bullish"
29
+ # Bearish crossover EMA 20 below EMA 50
30
+ elif older_ema20 >= older_ema50 and newer_ema20 < newer_ema50:
31
+ return "Bearish"
32
+
33
+ return "Neutral"
34
+
35
+
36
+ def detect_ema_price_crossover(ema20, price, days=5):
37
+ bullish_days = 0
38
+ bearish_days = 0
39
+
40
+ # Check the last N days
41
+ for i in range(-days, 0):
42
+ if price[i] > ema20[i]:
43
+ bullish_days += 1
44
+ elif price[i] < ema20[i]:
45
+ bearish_days += 1
46
+
47
+ # Final decision
48
+ if bullish_days == days:
49
+ return "Bullish"
50
+ elif bearish_days == days:
51
+ return "Bearish"
52
+ else:
53
+ return "Neutral"
54
+
55
+
56
+ def get_ema_average_slope_signal(ema_series, days=5, threshold=0.1):
57
+
58
+ total_slope = 0
59
+
60
+ # Calculate slope for each of the last `days`
61
+ for i in range(-days, -1):
62
+ slope = ema_series[i + 1] - ema_series[i]
63
+ total_slope += slope
64
+
65
+ # Average slope
66
+ avg_slope = total_slope / (days - 1)
67
+
68
+ if avg_slope > threshold:
69
+ return "Bullish"
70
+ elif avg_slope < -threshold:
71
+ return "Bearish"
72
+ else:
73
+ return "Neutral"
74
+
75
+ def triple_ema_strategy(ema_short, ema_medium, ema_long):
76
+
77
+ if ema_short > ema_medium and ema_short > ema_long:
78
+ return "Bullish"
79
+
80
+ # Bearish condition: Short-term EMA crosses below medium and long-term EMAs
81
+ elif ema_short < ema_medium and ema_short < ema_long:
82
+ return "Bearish"
83
+
84
+ # Neutral condition: EMAs are not aligned
85
+ else:
86
+ return "Neutral"
87
+
88
+
89
+ # Main strategy function using EMA crossover
90
+ def ema_strategies(data):
91
+
92
+ ema5, ema_20, ema_50 = calculate_ema(data, short_period=5, medium_period=20, long_period=50)
93
+
94
+ signals = {
95
+ "EMA 20": round(ema_20.iloc[-1], 2),
96
+ "EMA 50": round(ema_50.iloc[-1], 2),
97
+ "EMA Crossover": detect_ema_crossover(ema_20[-5:], ema_50[-5:]),
98
+ "EMA Price Crossover": detect_ema_price_crossover(ema_20[-5:], data['close'][-5:]),
99
+ "EMA Slope": get_ema_average_slope_signal(ema_20[-5:]),
100
+ "Triple EMA": triple_ema_strategy(ema5.iloc[-1], ema_20.iloc[-1], ema_50.iloc[-1])
101
+ }
102
+
103
+ weights = {
104
+ "EMA Crossover": 30,
105
+ "EMA Price Crossover": 25,
106
+ "EMA Slope": 20,
107
+ "Triple EMA": 25
108
+ }
109
+
110
+ total_score = 0
111
+ for strategy, weight in weights.items():
112
+ signal = signals[strategy]
113
+ if signal == "Bullish":
114
+ total_score += weight
115
+ elif signal == "Neutral":
116
+ total_score += weight * 0.5
117
+
118
+ overall_percentage = round((total_score / sum(weights.values())) * 100, 2)
119
+
120
+ if overall_percentage >= 60:
121
+ final_signal = "Buy"
122
+ elif overall_percentage <= 40:
123
+ final_signal = "DBuy"
124
+ else:
125
+ final_signal = "Neutral"
126
+
127
+ return signals, overall_percentage, final_signal,ema5, ema_20, ema_50
128
+
129
+ # API-style function
130
+ def get_ema_trade_signal(data):
131
+ ema_signals, overallscore, final_signal,ema5, ema_20, ema_50 = ema_strategies(data)
132
+
133
+ ema5_series = pd.Series(ema5, index=data.index).dropna().tail(100)
134
+ ema5_series.index = ema5_series.index.strftime('%Y-%m-%d')
135
+ ema_20_series = pd.Series(ema_20, index=data.index).dropna().tail(100)
136
+ ema_20_series.index = ema_20_series.index.strftime('%Y-%m-%d')
137
+ ema_50_series = pd.Series(ema_50, index=data.index).dropna().tail(100)
138
+ ema_50_series.index = ema_50_series.index.strftime('%Y-%m-%d')
139
+ return {
140
+ "ema_signals": ema_signals,
141
+ "ema_score": overallscore,
142
+ "ema_final_signal": final_signal,
143
+ "EMA_5": ema5_series.round(2).to_dict(),
144
+ "EMA_20": ema_20_series.round(2).to_dict(),
145
+ "EMA_50": ema_50_series.round(2).to_dict()
146
+
147
+ }
fibostrategies.py ADDED
@@ -0,0 +1,233 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ import yfinance as yf
3
+ import pandas as pd
4
+ import numpy as np
5
+ import talib
6
+ import datetime
7
+
8
+
9
+ # Detect recent trend (uptrend or downtrend)
10
+ def detect_trend(data, lookback_days=7):
11
+ recent_data = data.iloc[-lookback_days:]
12
+ closes = recent_data['close'].values
13
+
14
+ if closes[-1] > closes[0]:
15
+ return "Uptrend"
16
+ elif closes[-1] < closes[0]:
17
+ return "Downtrend"
18
+ else:
19
+ return "Sideways"
20
+
21
+ # Fibonacci Retracement Pullback Strategy
22
+ def fibonacci_retracement_bounce(data, fib_levels=[0.382, 0.5, 0.618], tolerance=0.005, lookback_days=7):
23
+ last_close = data['close'].iloc[-1]
24
+
25
+ # Detect recent trend
26
+ trend = detect_trend(data, lookback_days)
27
+
28
+ # Use recent high/low
29
+ recent_data = data.iloc[-lookback_days:]
30
+ swing_high = recent_data['high'].max()
31
+ swing_low = recent_data['low'].min()
32
+
33
+ retracement_levels = {level: swing_high - (swing_high - swing_low) * level for level in fib_levels}
34
+
35
+ nearest_level = None
36
+ min_deviation = float('inf')
37
+
38
+ for level, price_level in retracement_levels.items():
39
+ deviation = abs(last_close - price_level) / price_level
40
+ if deviation < min_deviation:
41
+ min_deviation = deviation
42
+ nearest_level = (level, price_level)
43
+
44
+ if nearest_level:
45
+ _, level_price = nearest_level
46
+ if min_deviation <= tolerance:
47
+ if trend == "Uptrend" and last_close > level_price:
48
+ return "Bullish"
49
+ elif trend == "Downtrend" and last_close < level_price:
50
+ return "Bearish"
51
+
52
+ return "Neutral"
53
+
54
+ # 2. Fibonacci Breakout (Retracement Break) Strategy
55
+
56
+ def fibonacci_breakout(data, fib_threshold=0.618, lookback_days=7, check_candles=3, tolerance=0.005):
57
+ recent_data = data.iloc[-lookback_days:]
58
+ last_data = data.iloc[-check_candles:]
59
+
60
+ swing_high = recent_data['high'].max()
61
+ swing_low = recent_data['low'].min()
62
+
63
+ fib_level_price = swing_high - (swing_high - swing_low) * fib_threshold
64
+
65
+ # Check last few candles
66
+ crossed_above = 0
67
+ crossed_below = 0
68
+
69
+ for i in range(len(last_data)):
70
+ close_price = last_data['close'].iloc[i]
71
+ if close_price > fib_level_price * (1 + tolerance):
72
+ crossed_above += 1
73
+ elif close_price < fib_level_price * (1 - tolerance):
74
+ crossed_below += 1
75
+
76
+ # Decision
77
+ if crossed_above == check_candles:
78
+ return "Bullish"
79
+ elif crossed_below == check_candles:
80
+ return "Bearish"
81
+ else:
82
+ return "Neutral"
83
+
84
+
85
+ # 3. Golden Pocket Reversal Strategy
86
+
87
+ def calculate_fib_levels(high, low):
88
+ """Calculate Fibonacci retracement levels"""
89
+ diff = high - low
90
+ return {
91
+ '23.6%': high - 0.236 * diff,
92
+ '38.2%': high - 0.382 * diff,
93
+ '50%': high - 0.5 * diff,
94
+ '61.8%': high - 0.618 * diff,
95
+ '65%': high - 0.65 * diff,
96
+ '78.6%': high - 0.786 * diff
97
+ }
98
+
99
+ def golden_pocket_reversal_strategy(data, lookback_period=7):
100
+ """
101
+ Golden Pocket Reversal Strategy (61.8% - 65%)
102
+
103
+ Parameters:
104
+ - data: DataFrame with columns ['high', 'low', 'close']
105
+ - lookback_period: Number of periods to consider for swing high/low
106
+
107
+ Returns:
108
+ - 'bullish' if bullish reversal signal detected
109
+ - 'bearish' if bearish reversal signal detected
110
+ - 'neutral' if no clear signal
111
+ """
112
+
113
+ if len(data) < lookback_period + 1:
114
+ return "neutral"
115
+
116
+ # Get recent swing high and low
117
+ recent_high = data['high'].rolling(lookback_period).max().iloc[-1]
118
+ recent_low = data['low'].rolling(lookback_period).min().iloc[-1]
119
+
120
+ # Calculate Fibonacci levels
121
+ fib_levels = calculate_fib_levels(recent_high, recent_low)
122
+ golden_zone_low = fib_levels['65%']
123
+ golden_zone_high = fib_levels['61.8%']
124
+
125
+ # Get recent price action
126
+ recent_close = data['close'].iloc[-1]
127
+ prev_close = data['close'].iloc[-2]
128
+
129
+ # Check if price is in the golden pocket zone (61.8% - 65%)
130
+ in_golden_zone = golden_zone_low <= recent_close <= golden_zone_high
131
+
132
+ if not in_golden_zone:
133
+ return "Neutral"
134
+
135
+ # Check for bullish reversal (price coming from below)
136
+ if recent_close > prev_close and prev_close < golden_zone_low:
137
+ # Additional confirmation - price closed above previous candle's high
138
+ if recent_close > data['high'].iloc[-2]:
139
+ return "Bullish"
140
+
141
+ # Check for bearish reversal (price coming from above)
142
+ elif recent_close < prev_close and prev_close > golden_zone_high:
143
+ # Additional confirmation - price closed below previous candle's low
144
+ if recent_close < data['low'].iloc[-2]:
145
+ return "Bearish"
146
+
147
+ return "Neutral"
148
+
149
+ # 4. Fibonacci Confluence Strategy
150
+
151
+ def fibonacci_confluence_signal(data, fib_level=0.618, lookback_days=5, ema_period=9, tolerance=0.005):
152
+
153
+ # Calculate EMA9
154
+ data['EMA9'] = talib.EMA(data['close'], timeperiod=ema_period)
155
+
156
+ # Get recent data for swing high/low
157
+ recent = data.iloc[-lookback_days:]
158
+ swing_high = recent['high'].max()
159
+ swing_low = recent['low'].min()
160
+
161
+ # Calculate Fibonacci level
162
+ fib_price = swing_high - (swing_high - swing_low) * fib_level
163
+
164
+ # Get latest candle data
165
+ current_close = data.iloc[-1]['close']
166
+ current_ema = data.iloc[-1]['EMA9']
167
+
168
+ # Check if price is within tolerance of Fibonacci level
169
+ if abs(current_close - fib_price) / fib_price <= tolerance:
170
+ if current_close > current_ema:
171
+ return 'Bullish'
172
+ elif current_close < current_ema:
173
+ return 'Bearish'
174
+
175
+ return 'Neutral'
176
+
177
+
178
+
179
+
180
+
181
+
182
+ # ======================
183
+ # Main Fibonacci Strategy Aggregator
184
+ # ======================
185
+
186
+ def fibonacci_strategies(data):
187
+
188
+ signals = {
189
+ "Fibonacci Retracement Bounce": fibonacci_retracement_bounce(data),
190
+ "Fibonacci Breakout": fibonacci_breakout(data),
191
+ "Golden Pocket Reversal": golden_pocket_reversal_strategy(data),
192
+ "Fibonacci Confluence": fibonacci_confluence_signal(data)
193
+ }
194
+
195
+ weights = {
196
+ "Fibonacci Retracement Bounce": 30,
197
+ "Fibonacci Breakout": 25,
198
+ "Golden Pocket Reversal": 30,
199
+ "Fibonacci Confluence": 15
200
+ }
201
+
202
+ total_score = 0
203
+ for strategy, weight in weights.items():
204
+ signal = signals[strategy]
205
+ if signal == "Bullish":
206
+ total_score += weight
207
+ elif signal == "Neutral":
208
+ total_score += weight * 0.5
209
+
210
+ overall_percentage = round((total_score / sum(weights.values())) * 100, 2)
211
+
212
+ if overall_percentage >= 60:
213
+ final_signal = "Buy"
214
+ elif overall_percentage <= 40:
215
+ final_signal = "DBuy"
216
+ else:
217
+ final_signal = "Neutral"
218
+
219
+ return signals, overall_percentage, final_signal
220
+
221
+
222
+
223
+ # ======================
224
+ # API-style Function
225
+ # ======================
226
+
227
+ def get_fibonacci_trade_signal(data):
228
+ fib_signals, overallscore, final_signal = fibonacci_strategies(data)
229
+ return {
230
+ "fib_signals": fib_signals,
231
+ "fib_score": overallscore,
232
+ "fib_final_signal": final_signal
233
+ }
fundamental.py ADDED
@@ -0,0 +1,292 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, jsonify
2
+ import pandas as pd
3
+ import yfinance as yf
4
+ import numpy as np
5
+
6
+ def get_fundamental_details(ticker):
7
+ # Fetch the stock data using yfinance
8
+ stock = yf.Ticker(ticker)
9
+
10
+ # Fetch fundamental data and financial statements
11
+ fundamental_data = stock.info
12
+ income_statement = stock.financials
13
+ balance_sheet = stock.balance_sheet
14
+ cash_flow_statement = stock.cashflow
15
+
16
+
17
+ current_net_income = income_statement.loc['Net Income'].iloc[0]
18
+ previous_net_income = income_statement.loc['Net Income'].iloc[1]
19
+
20
+ earnings_growth_yoy = 'N/A'
21
+ # Calculate Earnings Growth (YoY)
22
+ if current_net_income is not None and previous_net_income is not None:
23
+ earnings_growth_yoy = ((current_net_income - previous_net_income) / previous_net_income) * 100
24
+
25
+ current_assets = balance_sheet.loc['Current Assets'].iloc[0] if 'Current Assets' in balance_sheet.index else None
26
+ current_liabilities = balance_sheet.loc['Current Liabilities'].iloc[0] if 'Current Liabilities' in balance_sheet.index else None
27
+
28
+ current_ratio = 'N/A'
29
+ # Calculate Earnings Growth (YoY)
30
+ if current_assets is not None and current_liabilities is not None:
31
+ current_ratio = current_assets/current_liabilities
32
+
33
+ # Ensure data is transposed for proper indexing
34
+ balance_sheet = balance_sheet.T
35
+
36
+ # Get the latest non-null Debt-to-Equity Ratio
37
+ debt_to_equity = "N/A"
38
+ total_liabilities_series = total_equity_series = None
39
+
40
+ if "Total Liabilities Net Minority Interest" in balance_sheet.columns and "Ordinary Shares Number" in balance_sheet.columns:
41
+ total_liabilities_series = balance_sheet["Total Liabilities Net Minority Interest"].dropna()
42
+ total_equity_series = balance_sheet["Ordinary Shares Number"].dropna()
43
+
44
+ if not total_liabilities_series.empty and not total_equity_series.empty:
45
+ total_liabilities = total_liabilities_series.iloc[0] # Get the latest non-null value
46
+ total_equity = total_equity_series.iloc[0] # Get the latest non-null value
47
+
48
+ if total_liabilities is not None and total_equity is not None and total_equity != 0:
49
+ debt_to_equity = total_liabilities / total_equity
50
+
51
+
52
+
53
+
54
+ fundamental_metrics = {
55
+ 'EPS': fundamental_data.get('epsTrailingTwelveMonths', None),
56
+ 'P/E Ratio': fundamental_data.get('trailingPE', None),
57
+ 'Revenue Growth': fundamental_data.get('revenueGrowth', None),
58
+ 'Debt-to-Equity Ratio': debt_to_equity,
59
+ 'Earnings Growth(YoY)': earnings_growth_yoy
60
+
61
+ }
62
+
63
+ eps = fundamental_metrics['EPS']
64
+ pe_ratio = fundamental_metrics['P/E Ratio']
65
+ revenue_growth = fundamental_metrics['Revenue Growth']
66
+ debt_to_equity = fundamental_metrics['Debt-to-Equity Ratio']
67
+ earnings_growth_yoy = fundamental_metrics['Earnings Growth(YoY)']
68
+
69
+
70
+
71
+ # Initialize status variables
72
+ earnings_growth_status = "none"
73
+ debt_to_equity_status = "none"
74
+ pe_ratio_status = "none"
75
+ revenue_growth_status = "none"
76
+ eps_status = "none"
77
+
78
+ # Updated Thresholds
79
+ earnings_growth_threshold = 0.12 # Earnings Growth > 12% (good)
80
+ debt_to_equity_threshold = 1.5 # D/E < 1.5 (good)
81
+ pe_ratio_threshold = 25 # P/E < 25 (good)
82
+ revenue_growth_threshold = 0.12 # Revenue Growth > 12% (good)
83
+ eps_threshold = 0 # EPS > 0 (good)
84
+
85
+ # Updated Weightages
86
+ earnings_growth_weight = 2 # 3.5% weight for earnings growth
87
+ debt_to_equity_weight = 1.5 # 2.75% weight for debt-to-equity ratio
88
+ pe_ratio_weight = 1.5 # 2.5% weight for P/E ratio
89
+ revenue_growth_weight = 2 # 3.5% weight for revenue growth
90
+ eps_weight = 3 # 3.75% weight for EPS
91
+
92
+ # Check and assign status for EPS, if available
93
+ if eps is not None:
94
+ if eps > eps_threshold:
95
+ eps_status = "good"
96
+ else:
97
+ eps_status = "bad"
98
+
99
+ # Check and assign status for earnings growth, if available
100
+ if earnings_growth_yoy is not None:
101
+ if earnings_growth_yoy >= earnings_growth_threshold:
102
+ earnings_growth_status = "good"
103
+ else:
104
+ earnings_growth_status = "bad"
105
+
106
+ # Check and assign status for debt-to-equity, if available
107
+ if debt_to_equity is not None:
108
+ if debt_to_equity <= debt_to_equity_threshold:
109
+ debt_to_equity_status = "good"
110
+ else:
111
+ debt_to_equity_status = "bad"
112
+
113
+ # Check and assign status for P/E ratio, if available
114
+ if pe_ratio is not None:
115
+ if pe_ratio <= pe_ratio_threshold:
116
+ pe_ratio_status = "good"
117
+ else:
118
+ pe_ratio_status = "bad"
119
+
120
+ # Check and assign status for revenue growth, if available
121
+ if revenue_growth is not None:
122
+ if revenue_growth >= revenue_growth_threshold:
123
+ revenue_growth_status = "good"
124
+ else:
125
+ revenue_growth_status = "bad"
126
+
127
+
128
+
129
+ # Calculate overall score
130
+ overall_fa_score = 0
131
+ overall_fa_score += eps_weight if eps_status == "good" else 0
132
+ overall_fa_score += earnings_growth_weight if earnings_growth_status == "good" else 0
133
+ overall_fa_score += debt_to_equity_weight if debt_to_equity_status == "good" else 0
134
+ overall_fa_score += pe_ratio_weight if pe_ratio_status == "good" else 0
135
+ overall_fa_score += revenue_growth_weight if revenue_growth_status == "good" else 0
136
+
137
+
138
+ earnings_growth = stock.info.get('earningsGrowth', None)
139
+
140
+ peg_ratio = None
141
+ if pe_ratio is not None and earnings_growth is not None and earnings_growth != 0:
142
+ peg_ratio = pe_ratio / earnings_growth
143
+
144
+
145
+ # Financial Metrics (Valuation, Profitability, etc.)
146
+ financialMetrics = {
147
+ 'Market Cap': fundamental_data.get('marketCap', None),
148
+ 'Price-to-Book (P/B) Ratio': fundamental_data.get('priceToBook', None),
149
+ 'Price-to-Sales (P/S) Ratio': fundamental_data.get('priceToSalesTrailing12Months', None),
150
+ 'PEG Ratio': peg_ratio,
151
+ 'EV/EBITDA': fundamental_data.get('enterpriseToEbitda', None)
152
+ }
153
+
154
+ # Balance Sheet Data
155
+ balanceSheetInformation = {
156
+ 'Total Assets': balance_sheet.loc['Total Assets'].iloc[0] if 'Total Assets' in balance_sheet.index else None,
157
+ 'Total Liabilities': balance_sheet.loc['Total Liabilities Net Minority Interest'].iloc[0] if 'Total Liabilities Net Minority Interest' in balance_sheet.index else None,
158
+ 'Total Stockholder Equity': (balance_sheet.loc['Total Assets'].iloc[0] - balance_sheet.loc['Total Liabilities Net Minority Interest'].iloc[0])
159
+ if 'Total Assets' in balance_sheet.index and 'Total Liabilities Net Minority Interest' in balance_sheet.index else None,
160
+ 'Long Term Debt': balance_sheet.loc['Long Term Debt'].iloc[0] if 'Long Term Debt' in balance_sheet.index else None,
161
+ 'Current Assets': balance_sheet.loc['Current Assets'].iloc[0] if 'Current Assets' in balance_sheet.index else None,
162
+ 'Current Liabilities': balance_sheet.loc['Current Liabilities'].iloc[0] if 'Current Liabilities' in balance_sheet.index else None,
163
+ 'Inventory': balance_sheet.loc['Inventory'].iloc[0] if 'Inventory' in balance_sheet.index else None
164
+ }
165
+
166
+
167
+ # Income Statement Data
168
+ incomeStatement = {
169
+ 'Total Revenue': income_statement.loc['Total Revenue'].iloc[0] if 'Total Revenue' in income_statement.index else None,
170
+ 'Operating Income': income_statement.loc['Operating Income'].iloc[0] if 'Operating Income' in income_statement.index else None,
171
+ 'Net Income': income_statement.loc['Net Income'].iloc[0] if 'Net Income' in income_statement.index else None,
172
+ 'Gross Profit': income_statement.loc['Gross Profit'].iloc[0] if 'Gross Profit' in income_statement.index else None
173
+ }
174
+
175
+ # Growth Indicators
176
+ growthIndicators = {
177
+ 'Revenue Growth (YoY)': income_statement.loc['Revenue Growth'].iloc[0] if 'Revenue Growth' in income_statement.index else None,
178
+ 'Profit Margins': (incomeStatement['Net Income'] / incomeStatement['Total Revenue']) * 100 if incomeStatement['Total Revenue'] else None,
179
+ 'ROE (Return on Equity)': balanceSheetInformation.get('Return on Equity (ROE)', None),
180
+ 'ROA (Return on Assets)': balanceSheetInformation.get('Return on Assets (ROA)', None)
181
+ }
182
+
183
+ # Assuming your original cashFlowStatement dict is defined here
184
+ cashFlowStatement = {
185
+ 'Operating Cash Flow': cash_flow_statement.loc['Operating Cash Flow'].iloc[0] if 'Operating Cash Flow' in cash_flow_statement.index else None,
186
+ 'Investing Cash Flow': cash_flow_statement.loc['Investing Cash Flow'].iloc[0] if 'Investing Cash Flow' in cash_flow_statement.index else None,
187
+ 'Financing Cash Flow': cash_flow_statement.loc['Financing Cash Flow'].iloc[0] if 'Financing Cash Flow' in cash_flow_statement.index else None,
188
+ 'Cash Flow to Debt Ratio': (
189
+ cash_flow_statement.loc['Operating Cash Flow'].iloc[0] / balance_sheet.loc['Long Term Debt'].iloc[0]
190
+ if 'Operating Cash Flow' in cash_flow_statement.index and 'Long Term Debt' in balance_sheet.index
191
+ else None
192
+ )
193
+ }
194
+
195
+ # Replace NaN or None values with 'N/A'
196
+ cashFlowStatement = {k: ('N/A' if pd.isna(v) else v) for k, v in cashFlowStatement.items()}
197
+
198
+ # Company Overview Data
199
+ companyOverview = {
200
+ 'Company Name': fundamental_data.get('longName', None),
201
+ 'Sector': fundamental_data.get('sector', None),
202
+ 'Industry': fundamental_data.get('industry', None)
203
+ }
204
+
205
+ # Risk Indicators
206
+ riskIndicators = {
207
+ 'Debt-to-Equity Ratio(Risk)': balanceSheetInformation.get('Long Term Debt', None) / balanceSheetInformation.get('Total Stockholder Equity', None)
208
+ if balanceSheetInformation.get('Long Term Debt') and balanceSheetInformation.get('Total Stockholder Equity') else None,
209
+ 'Interest Coverage Ratio': income_statement.loc['Interest Coverage'].iloc[0] if 'Interest Coverage' in income_statement.index else None,
210
+ 'Beta (Stock Volatility)': fundamental_data.get('beta', None),
211
+ 'Quick Ratio': (balanceSheetInformation.get('Current Assets', None) - balanceSheetInformation.get('Inventory', None)) / balanceSheetInformation.get('Current Liabilities', None)
212
+ if balanceSheetInformation.get('Current Assets') and balanceSheetInformation.get('Inventory') and balanceSheetInformation.get('Current Liabilities') else None
213
+ }
214
+
215
+ # Dividends
216
+ dividends = {
217
+ 'Payout Ratio': fundamental_data.get('payoutRatio', None),
218
+ 'Dividend Growth Rate': fundamental_data.get('dividendGrowthRate', None)
219
+ }
220
+
221
+ # Profitability Indicators
222
+ profitabilityIndicators = {
223
+ 'Gross Margin': (income_statement.loc['Gross Profit'].iloc[0] / income_statement.loc['Total Revenue'].iloc[0]) * 100
224
+ if 'Gross Profit' in income_statement.index and 'Total Revenue' in income_statement.index else None,
225
+ 'Operating Margin': (income_statement.loc['Operating Income'].iloc[0] / income_statement.loc['Total Revenue'].iloc[0]) * 100
226
+ if 'Operating Income' in income_statement.index and 'Total Revenue' in income_statement.index else None,
227
+ 'Net Margin': (income_statement.loc['Net Income'].iloc[0] / income_statement.loc['Total Revenue'].iloc[0]) * 100
228
+ if 'Net Income' in income_statement.index and 'Total Revenue' in income_statement.index else None
229
+ }
230
+
231
+ # Liquidity Indicators
232
+ liquidityIndicators = {
233
+ 'Cash Ratio': balance_sheet.loc['Cash And Cash Equivalents'].iloc[0] / balance_sheet.loc['Current Liabilities'].iloc[0]
234
+ if 'Cash And Cash Equivalents' in balance_sheet.index and 'Current Liabilities' in balance_sheet.index else None,
235
+ 'Working Capital': balance_sheet.loc['Current Assets'].iloc[0] - balance_sheet.loc['Current Liabilities'].iloc[0]
236
+ if 'Current Assets' in balance_sheet.index and 'Current Liabilities' in balance_sheet.index else None
237
+ }
238
+
239
+ investorInsightMetrics = {
240
+ 'EPS': eps,
241
+ 'P/E Ratio': pe_ratio,
242
+ 'Revenue Growth': revenue_growth,
243
+ 'Debt-to-Equity Ratio': debt_to_equity,
244
+ 'Earnings Growth(YoY)': earnings_growth_yoy
245
+ }
246
+
247
+ result = {
248
+
249
+ 'EPS': eps_status,
250
+ 'P/E Ratio': pe_ratio_status,
251
+ 'Revenue Growth': revenue_growth_status,
252
+ 'Debt-to-Equity Ratio': debt_to_equity_status,
253
+ 'Earnings Growth(YoY)': earnings_growth_status
254
+ }
255
+
256
+ def replace_nan_with_na(data):
257
+ if isinstance(data, dict):
258
+ return {k: replace_nan_with_na(v) for k, v in data.items()}
259
+ elif isinstance(data, list):
260
+ return [replace_nan_with_na(v) for v in data]
261
+ elif pd.isna(data):
262
+ return 'N/A'
263
+ else:
264
+ return data
265
+
266
+
267
+
268
+
269
+ return replace_nan_with_na({
270
+ "fa_strategy": result,
271
+ "overall_fa_score": round(overall_fa_score, 2),
272
+ "Investor Insight Metrics": investorInsightMetrics,
273
+ "Company Overview": companyOverview,
274
+ "Growth Indicators": growthIndicators,
275
+ "Risk Indicators": riskIndicators,
276
+ "Dividends": dividends,
277
+ "Cash Flow Statement": cashFlowStatement,
278
+ "Financial Metrics": financialMetrics,
279
+ "Income Statement": incomeStatement,
280
+ "BalanceSheet Information": balanceSheetInformation,
281
+ "Profitability Indicators": profitabilityIndicators,
282
+ "Liquidity Indicators": liquidityIndicators,
283
+ })
284
+
285
+
286
+
287
+
288
+
289
+
290
+
291
+
292
+
list.py ADDED
@@ -0,0 +1,110 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # -*- coding: utf-8 -*-
2
+ """
3
+ Minimal API for PY-Trade filters & companies
4
+ - /getfilters -> countries -> exchanges -> indices
5
+ - /getcompanies?code=NIFTY50 -> { code, asOf, count, constituents[] }
6
+ """
7
+
8
+ from __future__ import annotations
9
+ import csv, io, json, time
10
+ from typing import Dict, List, Any
11
+ from pathlib import Path
12
+
13
+ import requests
14
+ from flask import Flask, request, jsonify
15
+ from flask_cors import CORS
16
+
17
+ # ---------- configuration ----------
18
+ UA = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/127 Safari/537.36"
19
+ REFERER = "https://www.niftyindices.com/indices/equity/broad-based-indices"
20
+ TTL_SECONDS = 60 * 60 * 12 # 12h cache
21
+ CACHE_DIR = Path(__file__).with_name("cache")
22
+ CACHE_DIR.mkdir(exist_ok=True)
23
+
24
+ # Official CSV endpoints for NSE indices
25
+ NIFTY_URLS: Dict[str, str] = {
26
+ "NIFTY50": "https://www.niftyindices.com/IndexConstituent/ind_nifty50list.csv",
27
+ "NIFTY100": "https://www.niftyindices.com/IndexConstituent/ind_nifty100list.csv",
28
+ "NIFTY200": "https://www.niftyindices.com/IndexConstituent/ind_nifty200list.csv",
29
+ "NIFTYMID100": "https://www.niftyindices.com/IndexConstituent/ind_niftymidcap100list.csv",
30
+ "NIFTY500": "https://www.niftyindices.com/IndexConstituent/ind_nifty500list.csv",
31
+ }
32
+
33
+ # Filters payload for the UI (add more countries/exchanges here later)
34
+ MARKETS: Dict[str, Dict[str, List[Dict[str, str]]]] = {
35
+ "India": {
36
+ "NSE (National Stock Exchange)": [
37
+ {"code": "NIFTY50", "name": "NIFTY 50"},
38
+ {"code": "NIFTY100", "name": "NIFTY 100"},
39
+ {"code": "NIFTY200", "name": "NIFTY 200"},
40
+ {"code": "NIFTYMID100", "name": "NIFTY Midcap 100"},
41
+ {"code": "NIFTY500", "name": "NIFTY 500"},
42
+ ]
43
+ }
44
+ }
45
+
46
+ # ---------- utilities ----------
47
+ def http_get_text(url: str) -> str:
48
+ sess = requests.Session()
49
+ sess.headers.update({"User-Agent": UA, "Referer": REFERER, "Accept": "text/csv,*/*"})
50
+ r = sess.get(url, timeout=25)
51
+ r.raise_for_status()
52
+ r.encoding = r.encoding or "utf-8"
53
+ return r.text
54
+
55
+ def parse_nifty_csv(text: str) -> List[Dict[str, str]]:
56
+ # Columns: Company Name, Industry, Symbol, Series, ISIN Code
57
+ out: List[Dict[str, str]] = []
58
+ rdr = csv.DictReader(io.StringIO(text))
59
+ for row in rdr:
60
+ sym = (row.get("Symbol") or "").strip()
61
+ name = (row.get("Company Name") or "").strip()
62
+ if sym and name:
63
+ out.append({"symbol": f"{sym}.NS", "company": name})
64
+ return out
65
+
66
+ def cache_path(code: str) -> Path:
67
+ return CACHE_DIR / f"{code.lower()}.json"
68
+
69
+ def load_cache(code: str) -> Any | None:
70
+ fp = cache_path(code)
71
+ if not fp.exists():
72
+ return None
73
+ age = time.time() - fp.stat().st_mtime
74
+ if age > TTL_SECONDS:
75
+ return None
76
+ with fp.open("r", encoding="utf-8") as f:
77
+ return json.load(f)
78
+
79
+ def save_cache(code: str, payload: Any) -> None:
80
+ fp = cache_path(code)
81
+ with fp.open("w", encoding="utf-8") as f:
82
+ json.dump(payload, f, ensure_ascii=False, indent=2)
83
+
84
+ def build_companies_payload(code: str) -> Dict[str, Any]:
85
+ code = code.upper()
86
+ # 1) try cache
87
+ cached = load_cache(code)
88
+ if cached:
89
+ return cached
90
+
91
+ # 2) fetch official CSV
92
+ url = NIFTY_URLS.get(code)
93
+ if not url:
94
+ raise ValueError(f"Unknown index code: {code}")
95
+ text = http_get_text(url)
96
+ rows = parse_nifty_csv(text)
97
+
98
+ payload = {
99
+ "code": code,
100
+ "exchange": "NSE",
101
+ "country": "IN",
102
+ "currency": "INR",
103
+ "asOf": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()),
104
+ "count": len(rows),
105
+ "constituents": rows,
106
+ "source": url,
107
+ }
108
+ save_cache(code, payload)
109
+ return payload
110
+
macdstrategies.py ADDED
@@ -0,0 +1,239 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ import yfinance as yf
3
+ import pandas as pd
4
+ import numpy as np
5
+ import talib
6
+ from collections import OrderedDict
7
+ import datetime
8
+
9
+
10
+ # Calculate MACD, Signal and Histogram
11
+ def calculate_macdvalue(data, fast=12, slow=26, signal=9):
12
+
13
+ close_prices = data['close']
14
+
15
+ # Calculate MACD
16
+ macd_line, signal_line, histogram = talib.MACD(
17
+ close_prices,
18
+ fastperiod=fast,
19
+ slowperiod=slow,
20
+ signalperiod=signal
21
+ )
22
+
23
+ return macd_line, signal_line, histogram
24
+
25
+ # MACD Line Crossover - completed
26
+ def get_macd_line_crossover_signal(macd, signal):
27
+
28
+ for i in range(len(macd) - 1):
29
+ older_macd = macd[i]
30
+ newer_macd = macd[i + 1]
31
+ older_signal = signal[i]
32
+ newer_signal = signal[i + 1]
33
+
34
+ # Bullish crossover (MACD crosses above Signal)
35
+ if older_macd <= older_signal and newer_macd > newer_signal:
36
+ return "Bullish"
37
+ # Bearish crossover (MACD crosses below Signal)
38
+ elif older_macd >= older_signal and newer_macd < newer_signal:
39
+ return "Bearish"
40
+
41
+ return "Neutral"
42
+
43
+ # Zero Line Crossover - completed
44
+ def get_macd_zero_line_crossover_signal(macd):
45
+
46
+ for i in range(len(macd) - 1):
47
+ older = macd[i]
48
+ newer = macd[i + 1]
49
+
50
+ if older <= 0 and newer > 0:
51
+ return "Bullish"
52
+ elif older >= 0 and newer < 0:
53
+ return "Bearish"
54
+
55
+ return "Neutral"
56
+
57
+ # MACD Momentum Signal - completed
58
+ def get_macd_momentum_signal(macd, signal, hist):
59
+
60
+ for i in range(len(macd) - 1):
61
+ older_macd = macd[i]
62
+ newer_macd = macd[i + 1]
63
+ older_signal = signal[i]
64
+ newer_signal = signal[i + 1]
65
+ current_hist = hist[i + 1] # Use the histogram of the newer point
66
+
67
+ # Bullish crossover (MACD crosses above Signal) with positive histogram
68
+ if older_macd <= older_signal and newer_macd > newer_signal and current_hist > 0:
69
+ return "Bullish"
70
+
71
+ # Bearish crossover (MACD crosses below Signal) with negative histogram
72
+ elif older_macd >= older_signal and newer_macd < newer_signal and current_hist < 0:
73
+ return "Bearish"
74
+
75
+ return "Neutral"
76
+
77
+ # MACD Volume Signal - completed
78
+ def get_macd_volume_signal(data, macd, signal):
79
+
80
+ avg_volume = data['volume'].rolling(window=10).mean()
81
+ recent_volume = data['volume'].values[-1:]
82
+ recent_avg_volume = avg_volume.values[-1:]
83
+ volume_confirm = recent_volume > recent_avg_volume
84
+
85
+
86
+ for i in range(len(macd) - 1):
87
+ older_macd = macd[i]
88
+ newer_macd = macd[i + 1]
89
+ older_signal = signal[i]
90
+ newer_signal = signal[i + 1]
91
+
92
+ if (older_macd <= older_signal and
93
+ newer_macd > newer_signal and
94
+ volume_confirm):
95
+ return "Bullish"
96
+ elif (older_macd >= older_signal and
97
+ newer_macd < newer_signal and
98
+ volume_confirm):
99
+ return "Bearish"
100
+
101
+ return "Neutral"
102
+
103
+ # MACD Multi-Timeframe - completed
104
+ def get_macd_multi_timeframe_confirmation(macd, signal,macd_hr, signal_hr):
105
+ for i in range(len(macd) - 1):
106
+ older_macd = macd[i]
107
+ newer_macd = macd[i + 1]
108
+ older_signal = signal[i]
109
+ newer_signal = signal[i + 1]
110
+ older_macd_hr = macd_hr[i]
111
+ newer_macd_hr = macd_hr[i + 1]
112
+ older_signal_hr = signal_hr[i]
113
+ newer_signal_hr = signal_hr[i + 1]
114
+
115
+ # Bullish crossover (MACD crosses above Signal)
116
+ if older_macd <= older_signal and newer_macd > newer_signal and older_macd_hr <= older_signal_hr and newer_macd_hr > newer_signal_hr:
117
+ return "Bullish"
118
+ # Bearish crossover (MACD crosses below Signal)
119
+ elif older_macd >= older_signal and newer_macd < newer_signal and older_macd_hr >= older_signal_hr and newer_macd_hr < newer_signal_hr:
120
+ return "Bearish"
121
+
122
+ return "Neutral"
123
+
124
+ # Price and MACD Divergence - completed
125
+ def get_macd_divergence_signal(macd, price):
126
+
127
+ # Bullish Divergence: Price makes lower lows, but MACD makes higher lows
128
+ bullish_divergence = None
129
+ for i in range(10, len(price)): # Look at the last 5 candles
130
+ if price[i] < price[i-1] and macd[i] > macd[i-1]:
131
+ bullish_divergence = "Bullish"
132
+ break
133
+
134
+ # Bearish Divergence: Price makes higher highs, but MACD makes lower highs
135
+ bearish_divergence = None
136
+ for i in range(10, len(price)): # Look at the last 5 candles
137
+ if price[i] > price[i-1] and macd[i] < macd[i-1]:
138
+ bearish_divergence = "Bearish"
139
+ break
140
+
141
+ if bullish_divergence:
142
+ return bullish_divergence
143
+ elif bearish_divergence:
144
+ return bearish_divergence
145
+ else:
146
+ return "Neutral"
147
+
148
+ # MACD Hidden Divergence - completed
149
+ def get_macd_hidden_divergence_signal(macd, price):
150
+
151
+ # Bullish Hidden Divergence: Price makes a higher low, but MACD makes a lower low
152
+ bullish_hidden_divergence = None
153
+ for i in range(10, len(price)): # Look at the last 5 candles
154
+ if price[i] > price[i-10] and macd[i] < macd[i-10]:
155
+ bullish_hidden_divergence = "Bullish"
156
+ break
157
+
158
+ # Bearish Hidden Divergence: Price makes a lower high, but MACD makes a higher high
159
+ bearish_hidden_divergence = None
160
+ for i in range(10, len(price)): # Look at the last 5 candles
161
+ if price[i] < price[i-10] and macd[i] > macd[i-10]:
162
+ bearish_hidden_divergence = "Bearish"
163
+ break
164
+
165
+ if bullish_hidden_divergence:
166
+ return bullish_hidden_divergence
167
+ elif bearish_hidden_divergence:
168
+ return bearish_hidden_divergence
169
+ else:
170
+ return "Neutral"
171
+
172
+
173
+ # macd_strategies and get_macd_trade_signal functions
174
+ def macd_strategies(data):
175
+
176
+ macd, signal, hist = calculate_macdvalue(data)
177
+
178
+ latest_macd = macd[-1]
179
+ signals = {
180
+ "MACD": round(latest_macd,2),
181
+ "MACD Line Crossover": get_macd_line_crossover_signal(macd[-5:],signal[-5:]),
182
+ "MACD Zero-Line Crossover": get_macd_zero_line_crossover_signal(macd[-5:]),
183
+ "MACD Divergence": get_macd_divergence_signal(macd[-10:], data['close'][-10:]),
184
+ "Hidden Divergence": get_macd_hidden_divergence_signal(macd[-10:], data['close'][-10:]),
185
+ "MACD Volume": get_macd_volume_signal(data, macd[-5:],signal[-5:]),
186
+ "MACD Momentum": get_macd_momentum_signal(macd[-5:],signal[-5:],hist[-5:]),
187
+
188
+ }
189
+
190
+ macd_signal_weights = {
191
+ "MACD Line Crossover": 25,
192
+ "MACD Zero-Line Crossover": 15,
193
+ "MACD Divergence": 20,
194
+ "Hidden Divergence": 10,
195
+ "MACD Volume": 15,
196
+ "MACD Momentum": 15,
197
+
198
+ }
199
+
200
+ total_score = 0
201
+ for strategy, weight in macd_signal_weights.items():
202
+ signal = signals[strategy]
203
+ if signal == "Bullish":
204
+ total_score += weight
205
+ elif signal == "Neutral":
206
+ total_score += weight * 0.5
207
+
208
+ overall_percentage = round((total_score / sum(macd_signal_weights.values())) * 100, 2)
209
+
210
+ if overall_percentage >= 60:
211
+ final_signal = "Buy"
212
+ elif overall_percentage <= 40:
213
+ final_signal = "DBuy"
214
+ else:
215
+ final_signal = "Neutral"
216
+
217
+ return signals, overall_percentage, final_signal
218
+
219
+
220
+ def get_macd_trade_signal(data):
221
+ macd_signals, overallscore, final_signal = macd_strategies(data)
222
+ macd_line, signal_line, hist = calculate_macdvalue(data)
223
+ # Format and convert MACD and Signal Line for last 100 days
224
+ macd_series = pd.Series(macd_line, index=data.index).dropna().tail(100)
225
+ signal_series = pd.Series(signal_line, index=data.index).dropna().tail(100)
226
+
227
+ macd_series.index = macd_series.index.strftime('%Y-%m-%d')
228
+ signal_series.index = signal_series.index.strftime('%Y-%m-%d')
229
+ hist_series = pd.Series(hist, index=data.index).dropna().tail(100)
230
+ hist_series.index = hist_series.index.strftime('%Y-%m-%d')
231
+
232
+ return {
233
+ "macd_signals": macd_signals,
234
+ "macd_score": overallscore,
235
+ "macd_final_signal": final_signal,
236
+ "macd_line": macd_series.round(2).to_dict(),
237
+ "macd_signal_line": signal_series.round(2).to_dict(),
238
+ "macd_histogram": hist_series.round(2).to_dict()
239
+ }
news.py ADDED
@@ -0,0 +1,107 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # news_sentiment.py
2
+ # pip install gnews nltk rapidfuzz
3
+
4
+ from __future__ import annotations
5
+ from datetime import datetime, timezone
6
+ import time
7
+ from typing import List, Dict, Any
8
+
9
+ from gnews import GNews
10
+ from rapidfuzz import fuzz
11
+ from nltk.sentiment import SentimentIntensityAnalyzer
12
+ import nltk
13
+
14
+ # Ensure VADER is available (safe to call multiple times)
15
+ try:
16
+ nltk.data.find("sentiment/vader_lexicon.zip")
17
+ except LookupError:
18
+ nltk.download("vader_lexicon")
19
+
20
+ # Keep one analyzer instance
21
+ _SIA = SentimentIntensityAnalyzer()
22
+
23
+
24
+ def _sentiment_label(compound: float) -> str:
25
+ if compound > 0.05:
26
+ return "Positive"
27
+ elif compound < -0.05:
28
+ return "Negative"
29
+ return "Neutral"
30
+
31
+
32
+ def _is_similar(title: str, seen_titles: List[str], threshold: int = 60) -> bool:
33
+ for t in seen_titles:
34
+ if fuzz.ratio(title, t) > threshold:
35
+ return True
36
+ return False
37
+
38
+
39
+ def get_latest_news_with_sentiment(
40
+ query: str,
41
+ *,
42
+ period: str = "1d",
43
+ max_results: int = 20,
44
+ language: str = "en",
45
+ country: str = "US",
46
+ retries: int = 3,
47
+ backoff_seconds: int = 3
48
+ ) -> Dict[str, Any]:
49
+
50
+
51
+ seen_titles: List[str] = []
52
+ results = []
53
+
54
+ for attempt in range(retries):
55
+ try:
56
+ g = GNews(language=language, country=country, period=period, max_results=max_results)
57
+ results = g.get_news(query) or []
58
+ if results:
59
+ break
60
+ except Exception as e:
61
+ print(f"[Attempt {attempt+1}] GNews error: {e}")
62
+ time.sleep(backoff_seconds * (attempt + 1))
63
+
64
+ if not results:
65
+ return {"overall_news_score": 0.0, "count": 0, "items": []}
66
+
67
+ items: List[Dict[str, Any]] = []
68
+ total_compound = 0.0
69
+
70
+ for art in results:
71
+ title = (art.get("title") or "").strip()
72
+ if not title:
73
+ continue
74
+ if _is_similar(title, seen_titles, threshold=60):
75
+ continue
76
+ seen_titles.append(title)
77
+
78
+ url = (art.get("url")
79
+ or art.get("link")
80
+ or art.get("source", {}).get("url")
81
+ or "")
82
+
83
+ published_raw = (art.get("published date")
84
+ or art.get("publishedDate")
85
+ or art.get("datetime")
86
+ or "")
87
+ if isinstance(published_raw, datetime):
88
+ if published_raw.tzinfo is None:
89
+ published_raw = published_raw.replace(tzinfo=timezone.utc)
90
+ published = published_raw.strftime("%Y-%m-%d %H:%M")
91
+ else:
92
+ published = str(published_raw)
93
+
94
+ compound = _SIA.polarity_scores(title)["compound"]
95
+ items.append({
96
+ "title": title,
97
+ "url": url,
98
+ "published": published,
99
+ "sentiment": _sentiment_label(compound),
100
+ "compound": round(compound, 3),
101
+ })
102
+ total_compound += compound
103
+
104
+ n = len(items)
105
+ overall = round(((total_compound / n) + 1) * 2.5, 2) if n else 0.0
106
+
107
+ return {"overall_news_score": overall, "count": n, "items": items}
predictedchart.py ADDED
@@ -0,0 +1,126 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import yfinance as yf
2
+ import pandas as pd
3
+ import numpy as np
4
+ import talib
5
+ from sklearn.preprocessing import MinMaxScaler
6
+ import torch
7
+ import torch.nn as nn
8
+ from torch.utils.data import Dataset, DataLoader
9
+
10
+ # Step 1: Download data with TA indicators
11
+ def fetch_stock_data_with_indicators(ticker, start="2020-01-01", end="2025-09-03"):
12
+ df = yf.download(ticker, start=start, end=end)
13
+ actualdata = yf.download(ticker, start=start, end="2025-09-04")
14
+ df = df[["Open", "High", "Low", "Close", "Volume"]]
15
+ close_prices = df['Close'].to_numpy().flatten()
16
+ low_prices = df['Low'].to_numpy().flatten()
17
+ high_prices = df['High'].to_numpy().flatten()
18
+ # Add indicators
19
+ #df["RSI"] = talib.RSI(close_prices, timeperiod=14)
20
+ #df["MACD"], df["MACD_signal"], _ = talib.MACD(close_prices)
21
+ df["EMA_20"] = talib.EMA(close_prices, timeperiod=20)
22
+ df["ATR"] = talib.ATR(high_prices, low_prices, close_prices, timeperiod=14)
23
+
24
+ df.dropna(inplace=True)
25
+ return df
26
+
27
+ def fetch_originaldata(ticker, start="2020-01-01", end="2025-01-03"):
28
+ actualdata = yf.download(ticker, start=start, end="2025-01-24")
29
+ return actualdata
30
+
31
+ # Step 2: Custom Dataset
32
+ class StockDataset(Dataset):
33
+ def __init__(self, series, window_size):
34
+ self.data = []
35
+ for i in range(len(series) - window_size):
36
+ self.data.append((series[i:i+window_size], series[i+window_size][3]))
37
+
38
+ def __len__(self):
39
+ return len(self.data)
40
+
41
+ def __getitem__(self, idx):
42
+ x, y = self.data[idx]
43
+ return torch.tensor(x, dtype=torch.float32), torch.tensor(y, dtype=torch.float32)
44
+
45
+ # Step 3: Transformer model
46
+ class TransformerPredictor(nn.Module):
47
+ def __init__(self, input_size, d_model=64, nhead=4, num_layers=2, dropout=0.1):
48
+ super(TransformerPredictor, self).__init__()
49
+ self.linear_in = nn.Linear(input_size, d_model)
50
+ encoder_layer = nn.TransformerEncoderLayer(d_model=d_model, nhead=nhead, dropout=dropout)
51
+ self.transformer = nn.TransformerEncoder(encoder_layer, num_layers=num_layers)
52
+ self.linear_out = nn.Linear(d_model, 1)
53
+
54
+ def forward(self, src):
55
+ x = self.linear_in(src) # [seq, batch, d_model]
56
+ x = self.transformer(x) # [seq, batch, d_model]
57
+ out = self.linear_out(x[-1]) # [batch, 1]
58
+ return out.squeeze()
59
+
60
+ # Step 4: Training function
61
+ def train_model(model, dataloader, epochs, lr=0.001):
62
+ optimizer = torch.optim.Adam(model.parameters(), lr=lr)
63
+ loss_fn = nn.MSELoss()
64
+ for epoch in range(epochs):
65
+ for x, y in dataloader:
66
+ x = x.permute(1, 0, 2) # [batch, seq, features] -> [seq, batch, features]
67
+ pred = model(x)
68
+ loss = loss_fn(pred, y)
69
+ optimizer.zero_grad()
70
+ loss.backward()
71
+ optimizer.step()
72
+ print("Epoch {}/{} - Loss: {:.4f}".format(epoch+1, epochs, loss.item()))
73
+
74
+ # Step 5: Run pipeline
75
+ def run_stock_prediction(ticker):
76
+ df = fetch_stock_data_with_indicators(ticker)
77
+ scaler = MinMaxScaler()
78
+ scaled_data = scaler.fit_transform(df.values)
79
+
80
+ window_size = 20
81
+ dataset = StockDataset(scaled_data, window_size)
82
+ dataloader = DataLoader(dataset, batch_size=32, shuffle=True)
83
+
84
+ input_size = scaled_data.shape[1]
85
+ model = TransformerPredictor(input_size=input_size)
86
+ train_model(model, dataloader, epochs=2)
87
+
88
+
89
+ # Predict next 15 days
90
+ predictions = []
91
+ input_seq = scaled_data[-window_size:].copy() # shape: [20, features]
92
+
93
+ for i in range(15):
94
+ seq_tensor = torch.tensor(input_seq, dtype=torch.float32).unsqueeze(1) # [seq_len, 1, features]
95
+
96
+ with torch.no_grad():
97
+ predicted_scaled = model(seq_tensor).item()
98
+
99
+ # Create new row based on last row, replace only Close price (index 3)
100
+ new_row = input_seq[-1].copy()
101
+ new_row[3] = predicted_scaled
102
+
103
+ # Inverse scale to get actual Close price
104
+ predicted_row = scaler.inverse_transform([new_row])[0]
105
+ predicted_close = predicted_row[3]
106
+ predictions.append(predicted_close)
107
+
108
+ # Slide window: remove first row, append new row
109
+ input_seq = np.vstack([input_seq[1:], [new_row]])
110
+
111
+ # Get the last date from the dataset
112
+ last_date = df.index[-1]
113
+ predicted_dates = pd.date_range(start=last_date + pd.Timedelta(days=1), periods=15, freq='B') # Business days
114
+
115
+
116
+ prediction_results = pd.DataFrame({
117
+ 'Date': predicted_dates,
118
+ 'Predicted Close': predictions,
119
+
120
+
121
+ })
122
+
123
+ return prediction_results
124
+
125
+
126
+
prediction.py ADDED
@@ -0,0 +1,257 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os, re, joblib, numpy as np, pandas as pd, sklearn
2
+ from sklearn.ensemble import ExtraTreesRegressor
3
+
4
+ PRICE_COLS = ["Close Price", "Highest Price", "Lowest Price"]
5
+
6
+ def _drop_unnamed(df: pd.DataFrame) -> pd.DataFrame:
7
+ to_drop = [c for c in df.columns if str(c).startswith("Unnamed")]
8
+ return df.drop(columns=to_drop) if to_drop else df
9
+
10
+ def _read_excel_loose_header(xlsx_path: str) -> pd.DataFrame:
11
+
12
+ raw = pd.read_excel(xlsx_path, engine='openpyxl', header=None)
13
+ first_row = [str(x) for x in raw.iloc[0].tolist()]
14
+ header_row = 0 if any("Close Price" in s for s in first_row) else 1
15
+ return pd.read_excel(xlsx_path, engine='openpyxl', header=header_row)
16
+
17
+ def _map_training_indicators(df: pd.DataFrame) -> pd.DataFrame:
18
+
19
+ def map_series(s: pd.Series):
20
+ if s.dtype == 'O':
21
+ cleaned = s.astype(str).str.strip()
22
+ cleaned = cleaned.replace({'nan': np.nan, 'NaN': np.nan, 'None': np.nan, '': np.nan})
23
+ return cleaned.map({'Red': 0, 'Yellow': 1, 'Green': 2})
24
+ return s
25
+ out = df.copy()
26
+ for col in out.columns:
27
+ if col not in PRICE_COLS:
28
+ out[col] = map_series(out[col])
29
+ return out
30
+
31
+ def _map_testing_indicators(df: pd.DataFrame) -> pd.DataFrame:
32
+
33
+ def map_series(s: pd.Series):
34
+ if s.dtype == 'O':
35
+ cleaned = s.astype(str).str.strip()
36
+ cleaned = cleaned.replace({'nan': np.nan, 'NaN': np.nan, 'None': np.nan, '': np.nan})
37
+ return cleaned.map({'Red': 0, 'Yellow': 1, 'Green': 2})
38
+
39
+ return s.replace({10: 2, 5: 1, 0: 0})
40
+ out = df.copy()
41
+ for col in out.columns:
42
+ if col not in PRICE_COLS:
43
+ out[col] = map_series(out[col])
44
+ return out
45
+
46
+ def _find_target_cols(df: pd.DataFrame):
47
+ if "Highest Price" not in df.columns or "Lowest Price" not in df.columns:
48
+ raise ValueError("Excel must contain 'Highest Price' and 'Lowest Price' columns.")
49
+ return "Highest Price", "Lowest Price"
50
+
51
+ def load_or_train_highlow_model(xlsx_path: str, model_path: str):
52
+
53
+ def _is_cache_fresh():
54
+ return os.path.exists(model_path) and os.path.getmtime(model_path) >= os.path.getmtime(xlsx_path)
55
+
56
+ if os.path.exists(model_path) and _is_cache_fresh():
57
+ obj = joblib.load(model_path)
58
+ if isinstance(obj, dict) and {'model','features','medians'} <= set(obj.keys()):
59
+ return obj
60
+
61
+ if not os.path.exists(xlsx_path):
62
+ raise FileNotFoundError(f"Training Excel not found at: {xlsx_path}")
63
+
64
+ df = _read_excel_loose_header(xlsx_path)
65
+ df = _drop_unnamed(df)
66
+
67
+ y_high, y_low = _find_target_cols(df)
68
+ df_mapped = _map_training_indicators(df)
69
+
70
+ X = df_mapped.drop(columns=[y_high, y_low]).apply(pd.to_numeric, errors='coerce')
71
+ y = df_mapped[[y_high, y_low]].apply(pd.to_numeric, errors='coerce')
72
+
73
+ med = X.median(numeric_only=True)
74
+ X = X.fillna(med)
75
+ y = y.fillna(y.median(numeric_only=True))
76
+
77
+ model = ExtraTreesRegressor(
78
+ n_estimators=300,
79
+ random_state=42,
80
+ n_jobs=-1,
81
+ max_depth=None,
82
+ min_samples_leaf=2,
83
+ )
84
+ model.fit(X.values, y.values)
85
+
86
+ bundle = {
87
+ 'model': model,
88
+ 'features': X.columns.tolist(),
89
+ 'medians': med.to_dict(),
90
+ 'sklearn_version': sklearn.__version__,
91
+ 'trained_rows': int(X.shape[0]),
92
+ }
93
+ os.makedirs(os.path.dirname(model_path), exist_ok=True)
94
+ joblib.dump(bundle, model_path)
95
+ return bundle
96
+
97
+ def _to_num(v):
98
+ import pandas as pd
99
+ if isinstance(v, (list, tuple, pd.Series, np.ndarray)):
100
+ if len(v) == 0:
101
+ return 0.0
102
+ return _to_num(v[-1])
103
+ if isinstance(v, dict):
104
+ numeric_vals = [vv for vv in v.values() if isinstance(vv, (int, float, np.number))]
105
+ if numeric_vals:
106
+ best = max(numeric_vals)
107
+ return 1.0 if float(best) > 0 else 0.0
108
+ return 1.0 if any(bool(vv) for vv in v.values()) else 0.0
109
+ if isinstance(v, (bool, int, float, np.number)):
110
+ try:
111
+ return float(v)
112
+ except Exception:
113
+ return 0.0
114
+ if isinstance(v, str):
115
+ s = v.strip().lower()
116
+ if s in {"buy", "bullish", "long", "breakout", "yes", "true", "dbuy"}:
117
+ return 1.0
118
+ if s in {"sell", "bearish", "short", "no", "false"}:
119
+ return 0.0
120
+ try:
121
+ return float(v)
122
+ except Exception:
123
+ return 0.0
124
+ try:
125
+ return float(v)
126
+ except Exception:
127
+ return 0.0
128
+
129
+ def build_current_features_row_23k(
130
+ ticker: str,
131
+ stock_data: pd.DataFrame,
132
+ rsi_trade_signal: dict,
133
+ macd_trade_signal: dict,
134
+ ema_trade_signal: dict,
135
+ atr_trade_signal: dict,
136
+ adx_trade_signal: dict,
137
+ bb_trade_signal: dict,
138
+ sr_trade_signal: dict,
139
+ priceaction_trade_signal: dict,
140
+ fibo_trade_signal: dict,
141
+ overall_ta_score: float,
142
+ ) -> pd.DataFrame:
143
+
144
+ last_close = _to_num(stock_data['close'].iloc[-1])
145
+
146
+ rsi_sig = rsi_trade_signal.get('rsi_signals', {}) or {}
147
+ macd_sig = macd_trade_signal.get('macd_signals', {}) or {}
148
+ atr_sig = atr_trade_signal.get('atr_signals', {}) or {}
149
+ ema_sig = ema_trade_signal.get('ema_signals', {}) or {}
150
+ adx_sig = adx_trade_signal.get('adx_signals', {}) or {}
151
+ bb_sig = bb_trade_signal.get('bollinger_signals', {}) or {}
152
+ sr_sig = sr_trade_signal.get('support_resistance_signals', {}) or {}
153
+ pa_sig = priceaction_trade_signal.get('priceaction_signals', {}) or {}
154
+ fib_sig = priceaction_trade_signal.get('fib_signals') or fibo_trade_signal.get('fib_signals', {})
155
+
156
+ def sig_num(d, key): return _to_num(d.get(key, 0))
157
+
158
+ row = {
159
+ "TA Score": _to_num(overall_ta_score),
160
+ "Close Price": last_close,
161
+
162
+ # RSI
163
+ "RSI": _to_num(rsi_trade_signal.get('rsi_score', 0)),
164
+ "Overbought/Oversold": sig_num(rsi_sig, "Overbought/Oversold"),
165
+ "RSI Swing Rejection": sig_num(rsi_sig, "RSI Swing Rejection"),
166
+ "RSI Divergence": sig_num(rsi_sig, "RSI Divergence"),
167
+ "RSI_Bollinger Band": sig_num(rsi_sig, "RSI_Bollinger Band"),
168
+ "RSI 5/14 Crossover": sig_num(rsi_sig, "RSI 5/14 Crossover"),
169
+ "RSI Trend 50 Confirmation": sig_num(rsi_sig, "RSI Trend 50 Confirmation"),
170
+ "RSI_MA": _to_num(rsi_sig.get("RSI_MA", rsi_trade_signal.get("ma", 0))),
171
+ "Mean Reversion": sig_num(rsi_sig, "Mean Reversion"),
172
+
173
+ # MACD
174
+ "MACD": _to_num(macd_trade_signal.get('macd_score', 0)),
175
+ "MACD Line Crossover": sig_num(macd_sig, "MACD Line Crossover"),
176
+ "MACD Zero-Line Crossover": sig_num(macd_sig, "MACD Zero-Line Crossover"),
177
+ "MACD Divergence": sig_num(macd_sig, "MACD Divergence"),
178
+ "Hidden Divergence": sig_num(macd_sig, "Hidden Divergence"),
179
+ "MACD Volume": sig_num(macd_sig, "MACD Volume"),
180
+ "MACD Momentum": sig_num(macd_sig, "MACD Momentum"),
181
+
182
+ # ATR
183
+ "ATR": _to_num(atr_trade_signal.get('atr_score', 0)),
184
+ "ATR Breakout": sig_num(atr_sig, "ATR Breakout"),
185
+ "ATR Expansion": sig_num(atr_sig, "ATR Expansion"),
186
+ "ATR Squeeze": sig_num(atr_sig, "ATR Squeeze"),
187
+ "ATR Trend Reversal": sig_num(atr_sig, "ATR Trend Reversal"),
188
+
189
+ # EMA
190
+ "EMA": _to_num(ema_trade_signal.get('ema_score', 0)),
191
+ "EMA Crossover": sig_num(ema_sig, "EMA Crossover"),
192
+ "EMA Price Crossover": sig_num(ema_sig, "EMA Price Crossover"),
193
+ "EMA Slope": sig_num(ema_sig, "EMA Slope"),
194
+ "Triple EMA": sig_num(ema_sig, "Triple EMA"),
195
+
196
+ # ADX
197
+ "ADX": _to_num(adx_trade_signal.get('adx_score', 0)),
198
+ "ADX + DI Crossover": sig_num(adx_sig, "ADX + DI Crossover"),
199
+ "ADX Breakout": sig_num(adx_sig, "ADX Breakout"),
200
+ "ADX Slope": sig_num(adx_sig, "ADX Slope"),
201
+ "ADX Divergence": sig_num(adx_sig, "ADX Divergence"),
202
+
203
+ # Fibonacci
204
+ "Fibo": _to_num(fibo_trade_signal.get('fib_score', 0)),
205
+ "Fibonacci Retracement Bounce": sig_num(fib_sig, "Fibonacci Retracement Bounce"),
206
+ "Fibonacci Breakout": sig_num(fib_sig, "Fibonacci Breakout"),
207
+ "Golden Pocket Reversal": sig_num(fib_sig, "Golden Pocket Reversal"),
208
+ "Fibonacci Confluence": sig_num(fib_sig, "Fibonacci Confluence"),
209
+
210
+ # Bollinger
211
+ "BB": _to_num(bb_trade_signal.get('bollinger_score', 0)),
212
+ "BB Squeeze": sig_num(bb_sig, "BB Squeeze"),
213
+ "BB Breakout": sig_num(bb_sig, "BB Breakout"),
214
+ "BB Breakout Reversal": sig_num(bb_sig, "BB Breakout Reversal"),
215
+ "Middle Band Pullback": sig_num(bb_sig, "Middle Band Pullback"),
216
+
217
+
218
+ "SR": _to_num(sr_trade_signal.get('sr_score', 0)),
219
+ "Breakout": sig_num(sr_sig, "Breakout"),
220
+ "Reversal": sig_num(sr_sig, "Reversal"),
221
+ "Flip": sig_num(sr_sig, "Flip"),
222
+ "SR_Retest": sig_num(sr_sig, "SR_Retest"),
223
+
224
+
225
+ "PA_MS": _to_num(priceaction_trade_signal.get('priceaction_score', 0)),
226
+ "Candlestick Pattern": sig_num(pa_sig, "Candlestick Pattern"),
227
+ "HH_HL_LL_LH": sig_num(pa_sig, "HH_HL_LL_LH"),
228
+ "Triangle Breakout": sig_num(pa_sig, "Triangle Breakout"),
229
+ "Fair Value Gap": sig_num(pa_sig, "Fair Value Gap"),
230
+ "BOS": sig_num(pa_sig, "BOS"),
231
+ "CHoCH": sig_num(pa_sig, "CHoCH"),
232
+ "Order_Block": sig_num(pa_sig, "Order_Block"),
233
+ }
234
+
235
+ return pd.DataFrame([row]).replace([np.inf, -np.inf], np.nan)
236
+
237
+ def _prepare_test_currentrow(current_row_df: pd.DataFrame, feature_cols, train_medians: dict):
238
+ df = _map_testing_indicators(current_row_df.copy())
239
+ X = df.reindex(columns=feature_cols).apply(pd.to_numeric, errors='coerce')
240
+ X = X.fillna(pd.Series(train_medians))
241
+ return X
242
+
243
+ def predict_high_low_for_current_row(bundle: dict, current_row_df: pd.DataFrame, live_close: float):
244
+
245
+ feature_cols = bundle['features']
246
+ medians = bundle['medians']
247
+ model: ExtraTreesRegressor = bundle['model']
248
+
249
+ X = _prepare_test_currentrow(current_row_df, feature_cols, medians)
250
+ preds = model.predict(X.values)
251
+ high_pred, low_pred = float(preds[0, 0]), float(preds[0, 1])
252
+
253
+ if not np.isnan(live_close):
254
+ high_pred = max(high_pred, float(live_close))
255
+ low_pred = min(low_pred, float(live_close))
256
+
257
+ return round(high_pred, 2), round(low_pred, 2)
priceactionstrategies.py ADDED
@@ -0,0 +1,271 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ import yfinance as yf
3
+ import pandas as pd
4
+ import numpy as np
5
+ import datetime
6
+ import talib
7
+
8
+
9
+ # Candlestick Pattern Detection Strategy
10
+ def candlestick_pattern_strategy(data):
11
+ open_ = data['open']
12
+ high = data['high']
13
+ low = data['low']
14
+ close = data['close']
15
+
16
+ # Bullish Patterns
17
+ bullish_patterns = [
18
+ talib.CDLENGULFING(open_, high, low, close),
19
+ talib.CDLHAMMER(open_, high, low, close),
20
+ talib.CDLMORNINGSTAR(open_, high, low, close),
21
+ talib.CDLPIERCING(open_, high, low, close),
22
+ talib.CDLINVERTEDHAMMER(open_, high, low, close),
23
+ talib.CDL3WHITESOLDIERS(open_, high, low, close)
24
+ ]
25
+
26
+ # Bearish Patterns
27
+ bearish_patterns = [
28
+ talib.CDLENGULFING(open_, high, low, close),
29
+ talib.CDLSHOOTINGSTAR(open_, high, low, close),
30
+ talib.CDLEVENINGSTAR(open_, high, low, close),
31
+ talib.CDLDARKCLOUDCOVER(open_, high, low, close),
32
+ talib.CDLHANGINGMAN(open_, high, low, close),
33
+ talib.CDL3BLACKCROWS(open_, high, low, close)
34
+ ]
35
+
36
+ # Neutral Patterns
37
+ neutral_patterns = [
38
+ talib.CDLDOJI(open_, high, low, close),
39
+ talib.CDLSPINNINGTOP(open_, high, low, close),
40
+ talib.CDLLONGLEGGEDDOJI(open_, high, low, close),
41
+ talib.CDLHIGHWAVE(open_, high, low, close)
42
+ ]
43
+
44
+
45
+ # Check Bullish
46
+ for pattern in bullish_patterns:
47
+ if pattern.iloc[-1] > 0:
48
+ return "Bullish"
49
+
50
+ # Check Bearish
51
+ for pattern in bearish_patterns:
52
+ if pattern.iloc[-1] < 0:
53
+ return "Bearish"
54
+
55
+ # Check Neutral
56
+ for pattern in neutral_patterns:
57
+ if pattern.iloc[-1] != 0:
58
+ return "Neutral"
59
+
60
+ return "Neutral"
61
+
62
+ #first day high value is higher than the second day and third day should be higher than second day
63
+ def three_bar_triangle_breakout(data):
64
+
65
+ high = data['high']
66
+ low = data['low']
67
+ close = data['close']
68
+ open_ = data['open']
69
+
70
+ # Bullish entry condition
71
+ ENTRYLONG = (
72
+ close.iloc[-1] > open_.iloc[-1] and
73
+ close.iloc[-1] > close.iloc[-2] and
74
+ close.iloc[-1] > high.iloc[-2] and
75
+ low.iloc[-2] > low.iloc[-4] and
76
+ low.iloc[-3] > low.iloc[-4] and
77
+ high.iloc[-2] < high.iloc[-4] and
78
+ high.iloc[-3] < high.iloc[-4]
79
+ )
80
+
81
+ # Bearish entry condition
82
+ ENTRYSHORT = (
83
+ close.iloc[-1] < open_.iloc[-1] and
84
+ close.iloc[-1] < close.iloc[-2] and
85
+ close.iloc[-1] < low.iloc[-2] and
86
+ low.iloc[-2] > low.iloc[-4] and
87
+ low.iloc[-3] > low.iloc[-4] and
88
+ high.iloc[-2] < high.iloc[-4] and
89
+ high.iloc[-3] < high.iloc[-4]
90
+ )
91
+
92
+ if ENTRYLONG:
93
+ return "Bullish"
94
+ elif ENTRYSHORT:
95
+ return "Bearish"
96
+ else:
97
+ return "Neutral"
98
+
99
+ def hh_ll_price_action_strategy(data, lookback_days=5):
100
+
101
+ data = data.tail(lookback_days)
102
+
103
+ is_higher_high = True
104
+ is_higher_low = True
105
+ is_lower_high = True
106
+ is_lower_low = True
107
+
108
+ for i in range(1, len(data)):
109
+ if data['high'].iloc[i] <= data['high'].iloc[i - 1]:
110
+ is_higher_high = False
111
+ if data['low'].iloc[i] <= data['low'].iloc[i - 1]:
112
+ is_higher_low = False
113
+ if data['high'].iloc[i] >= data['high'].iloc[i - 1]:
114
+ is_lower_high = False
115
+ if data['low'].iloc[i] >= data['low'].iloc[i - 1]:
116
+ is_lower_low = False
117
+
118
+ if is_higher_high and is_higher_low:
119
+ return "Bullish"
120
+ elif is_lower_high and is_lower_low:
121
+ return "Bearish"
122
+ else:
123
+ return "Neutral"
124
+
125
+
126
+ def fvg_strategy(data, lookback_days=5):
127
+
128
+ data = data.tail(lookback_days)
129
+
130
+ for i in range(2, len(data)):
131
+ high_candle1 = data['high'].iloc[i - 2]
132
+ low_candle1 = data['low'].iloc[i - 2]
133
+ high_candle3 = data['high'].iloc[i]
134
+ low_candle3 = data['low'].iloc[i]
135
+
136
+ # Bullish FVG: Gap between high of candle 1 and low of candle 3
137
+ if low_candle3 > high_candle1:
138
+ return "Bullish"
139
+
140
+ # Bearish FVG: Gap between low of candle 1 and high of candle 3
141
+ if high_candle3 < low_candle1:
142
+ return "Bearish"
143
+
144
+ return "Neutral"
145
+
146
+ def bos_strategy(data, lookback_days=10):
147
+
148
+ data = data.tail(lookback_days)
149
+ highs = data['high'].tolist()
150
+ lows = data['low'].tolist()
151
+
152
+
153
+ # Recent high/low
154
+ recent_high = highs[-1]
155
+ previous_high = max(highs[:-1]) # Highest in previous candles
156
+
157
+ recent_low = lows[-1]
158
+ previous_low = min(lows[:-1]) # Lowest in previous candles
159
+
160
+ # Check for bullish BOS (new high formed)
161
+ if recent_high > previous_high:
162
+ return "Bullish"
163
+
164
+ # Check for bearish BOS (new low formed)
165
+ if recent_low < previous_low:
166
+ return "Bearish"
167
+
168
+ return "Neutral"
169
+
170
+ def choch_strategy(data, lookback_period=14):
171
+ data = data.copy()
172
+
173
+ # Calculate recent rolling highs and lows
174
+ data['recent_high'] = data['high'].rolling(window=lookback_period).max()
175
+ data['recent_low'] = data['low'].rolling(window=lookback_period).min()
176
+
177
+
178
+ # Check for Bullish or Bearish CHoCH using the most recent candle
179
+ if (data['high'].iloc[-1] < data['high'].iloc[-2]) and (data['low'].iloc[-1] < data['recent_low'].iloc[-2]):
180
+ return "Bearish"
181
+
182
+ elif (data['low'].iloc[-1] > data['low'].iloc[-2]) and (data['high'].iloc[-1] > data['recent_high'].iloc[-2]):
183
+ return "Bullish"
184
+
185
+ return "Neutral"
186
+
187
+
188
+ def order_block_strategy(data, lookback_days=2):
189
+
190
+ data = data.tail(lookback_days)
191
+
192
+ previous = data.iloc[-2]
193
+ current = data.iloc[-1]
194
+
195
+ # Bullish Order Block: last bearish candle followed by a strong bullish move
196
+ if previous['close'] < previous['open'] and current['close'] > current['open'] and current['close'] > previous['high']:
197
+ return "Bullish"
198
+
199
+ # Bearish Order Block: last bullish candle followed by a strong bearish move
200
+ elif previous['close'] > previous['open'] and current['close'] < current['open'] and current['close'] < previous['low']:
201
+ return "Bearish"
202
+
203
+ return "Neutral"
204
+
205
+
206
+ # Main strategy function using Candlestick
207
+ def priceaction_strategies(data):
208
+
209
+ candlestick_signal = candlestick_pattern_strategy(data)
210
+
211
+ hh_hl = hh_ll_price_action_strategy(data)
212
+
213
+ triangle_breakout = three_bar_triangle_breakout(data)
214
+
215
+ fvg = fvg_strategy(data)
216
+
217
+ bos = bos_strategy(data)
218
+
219
+ choch = choch_strategy(data)
220
+
221
+ order_block = order_block_strategy(data)
222
+
223
+ signals = {
224
+ "Candlestick Pattern": candlestick_signal,
225
+ "HH_HL_LL_LH" : hh_hl,
226
+ "Triangle Breakout": triangle_breakout,
227
+ "Fair Value Gap": fvg,
228
+ "BOS": bos,
229
+ "CHoCH": choch,
230
+ "Order_Block": order_block
231
+
232
+ }
233
+
234
+ weights = {
235
+ "Candlestick Pattern": 15,
236
+ "HH_HL_LL_LH": 15,
237
+ "Triangle Breakout": 15,
238
+ "Fair Value Gap": 10,
239
+ "BOS": 20,
240
+ "CHoCH": 15,
241
+ "Order_Block": 10
242
+ }
243
+
244
+ total_score = 0
245
+ for strategy, weight in weights.items():
246
+ signal = signals[strategy]
247
+ if signal == "Bullish":
248
+ total_score += weight
249
+ elif signal == "Neutral":
250
+ total_score += weight * 0.5
251
+
252
+ overall_percentage = round((total_score / sum(weights.values())) * 100, 2)
253
+
254
+ if overall_percentage >= 60:
255
+ final_signal = "Buy"
256
+ elif overall_percentage <= 40:
257
+ final_signal = "DBuy"
258
+ else:
259
+ final_signal = "Neutral"
260
+
261
+ return signals, overall_percentage, final_signal
262
+
263
+ # API-style function for Candlestick Strategy
264
+ def get_priceaction_trade_signal(data):
265
+ priceaction_signals, overallscore, final_signal = priceaction_strategies(data)
266
+ return {
267
+ "priceaction_signals": priceaction_signals,
268
+ "priceaction_score": overallscore,
269
+ "priceaction_final_signal": final_signal
270
+ }
271
+
pytrade.py ADDED
@@ -0,0 +1,71 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ from flask_cors import CORS
3
+ from flask import Response
4
+ from analysestock import analysestock
5
+ from list import build_companies_payload
6
+ import yfinance as yf
7
+ import json
8
+ import os
9
+
10
+ app = Flask(__name__)
11
+ # CORS(app) # allow all origins for simplicity
12
+
13
+ # Allow your Angular Space + local dev
14
+ FRONTEND_ORIGIN = os.environ.get(
15
+ "FRONTEND_ORIGIN",
16
+ "https://pykara-pytrade.static.hf.space,https://localhost:4200"
17
+ )
18
+ allowed = [o.strip() for o in FRONTEND_ORIGIN.split(",") if o.strip()]
19
+ CORS(app, resources={r"/*": {"origins": allowed}}, supports_credentials=True)
20
+
21
+ @app.get("/health")
22
+ def health():
23
+ return {"status": "ok"}, 200
24
+
25
+ # ---------- API ----------
26
+
27
+ @app.get("/getfilters")
28
+ def get_filters():
29
+ return jsonify({"asOf": time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()), "markets": MARKETS})
30
+
31
+ @app.get("/getcompanies")
32
+ def get_companies():
33
+ code = (request.args.get("code") or request.args.get("index") or "").upper()
34
+ if not code:
35
+ return jsonify({"error": "Missing ?code=<INDEXCODE>"}), 400
36
+ try:
37
+ payload = build_companies_payload(code)
38
+ return jsonify(payload)
39
+ except requests.HTTPError as e:
40
+ return jsonify({"error": f"Upstream error: {e}"}), 502
41
+ except Exception as e:
42
+ return jsonify({"error": str(e)}), 500
43
+
44
+
45
+ @app.route('/analysestock', methods=['POST'])
46
+ def analyze_all():
47
+ try:
48
+ data = request.get_json()
49
+ tickersSymbol = data['ticker']
50
+ results = []
51
+
52
+ for ticker in tickersSymbol:
53
+ try:
54
+ results.append(analysestock(ticker))
55
+ except Exception as e:
56
+ results.append({"ticker": ticker, "error": str(e)})
57
+
58
+ # Use Response with json.dumps to preserve OrderedDict order
59
+ return Response(json.dumps(results, indent=2), mimetype='application/json')
60
+ except Exception as e:
61
+ return jsonify({"error": str(e)}), 500
62
+
63
+ # if __name__ == "__main__":
64
+ # app.run(host="127.0.0.1", port=5000, debug=True)
65
+
66
+ if __name__ == "__main__":
67
+ # Default to 5000 locally; on Hugging Face Spaces the platform injects PORT.
68
+ port = int(os.environ.get("PORT", "5000"))
69
+ # Bind only to localhost in dev; bind to 0.0.0.0 when running on a platform port.
70
+ host = "127.0.0.1" if port == 5000 else "0.0.0.0"
71
+ app.run(host=host, port=port, debug=(host == "127.0.0.1"))
requirements.txt ADDED
@@ -0,0 +1,17 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ flask
2
+ flask-cors
3
+ yfinance==0.2.65
4
+ pandas==2.3.2
5
+ numpy==2.3.2
6
+ TA-Lib # we build the C lib in Dockerfile
7
+ scikit-learn
8
+ scipy
9
+ joblib
10
+ threadpoolctl
11
+ openpyxl
12
+ gnews
13
+ newspaper3k
14
+ lxml_html_clean
15
+ nltk
16
+ rapidfuzz
17
+ gunicorn
rsistrategies.py ADDED
@@ -0,0 +1,243 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ import yfinance as yf
3
+ import pandas as pd
4
+ import numpy as np
5
+ import talib
6
+ from collections import OrderedDict
7
+ import datetime
8
+
9
+ # --- Strategy Functions ---
10
+
11
+ def get_overbought_oversold_signal(recent):
12
+ if (recent['RSI_14'] < 30).any():
13
+ return "Bullish"
14
+ elif (recent['RSI_14'] > 70).any():
15
+ return "Bearish"
16
+ else:
17
+ return "Neutral"
18
+
19
+ def get_rsi_crossover_signal(rsi5, rsi14):
20
+
21
+ for i in range(len(rsi5) - 1):
22
+ older_rsi5 = rsi5[i]
23
+ newer_rsi5 = rsi5[i + 1]
24
+ older_rsi14 = rsi14[i]
25
+ newer_rsi14 = rsi14[i + 1]
26
+
27
+ # Bullish crossover (MACD crosses above Signal)
28
+ if older_rsi5 <= older_rsi14 and newer_rsi5 > newer_rsi14:
29
+ return "Bullish"
30
+ # Bearish crossover (MACD crosses below Signal)
31
+ elif older_rsi5 >= older_rsi14 and newer_rsi5 < newer_rsi14:
32
+ return "Bearish"
33
+
34
+ return "Neutral"
35
+
36
+
37
+
38
+ def get_mean_reversion_signal(df):
39
+ rsi = df['RSI_5']
40
+ if len(rsi) < 6:
41
+ return "Neutral"
42
+
43
+ # Check for crossover below 20 in last 5 entries
44
+ buy_signal = ((rsi < 20) & (rsi.shift(1) >= 20)).tail(5).any()
45
+ sell_signal = ((rsi > 80) & (rsi.shift(1) <= 80)).tail(5).any()
46
+
47
+ if buy_signal:
48
+ return "Bullish"
49
+ elif sell_signal:
50
+ return "Bearish"
51
+ else:
52
+ return "Neutral"
53
+
54
+
55
+ def get_bollinger_rsi_signal(recent):
56
+ buy = ((recent['close'].to_numpy().flatten() < recent['Lower_BB']) & (recent['RSI_14'] < 30)).any()
57
+ sell = ((recent['close'].to_numpy().flatten() > recent['Upper_BB']) & (recent['RSI_14'] > 70)).any()
58
+ if buy:
59
+ return "Bullish"
60
+ elif sell:
61
+ return "Bearish"
62
+ else:
63
+ return "Neutral"
64
+
65
+
66
+ def get_rsi_with_ma_signal(recent):
67
+ buy = ((recent['close'].to_numpy().flatten() > recent['MA_20']) & (recent['RSI_14'] > 50)).any()
68
+ sell = ((recent['close'].to_numpy().flatten() < recent['MA_20']) & (recent['RSI_14'] < 50)).any()
69
+ if buy:
70
+ return "Bullish"
71
+ elif sell:
72
+ return "Bearish"
73
+ else:
74
+ return "Neutral"
75
+
76
+ def get_rsi_50_trend_signal(recent):
77
+ if (recent['RSI_14'] > 50).all():
78
+ return "Bullish"
79
+ elif (recent['RSI_14'] < 50).all():
80
+ return "Bearish"
81
+ else:
82
+ return "Neutral"
83
+
84
+
85
+ def get_swing_rejection_signal(rsi14):
86
+
87
+ r1, r2, r3, r4, r5, r6 = rsi14
88
+
89
+ if (
90
+ r1 < 30 and
91
+ r2 > r1 and
92
+ r3 < r2 and r3 > r1 and
93
+ r4 > r3 and
94
+ (r5 > r2 or r6 > r2) and
95
+ r6 > 30
96
+ ):
97
+ return "Bullish"
98
+
99
+ elif (
100
+ r1 > 70 and
101
+ r2 < r1 and
102
+ r3 > r2 and r3 < r1 and
103
+ r4 < r3 and
104
+ (r5 < r2 or r6 < r2) and
105
+ r6 < 70
106
+ ):
107
+ return "Bearish"
108
+
109
+ return "Neutral"
110
+
111
+
112
+ def is_pivot_low(prices, idx, left=5, right=5):
113
+ """Check if current point is a pivot low"""
114
+ if idx < left or idx + right >= len(prices):
115
+ return False
116
+ return all(prices[idx] < prices[idx - i] and prices[idx] < prices[idx + i] for i in range(1, left + 1))
117
+
118
+ def is_pivot_high(prices, idx, left=5, right=5):
119
+ """Check if current point is a pivot high"""
120
+ if idx < left or idx + right >= len(prices):
121
+ return False
122
+ return all(prices[idx] > prices[idx - i] and prices[idx] > prices[idx + i] for i in range(1, left + 1))
123
+
124
+ def get_rsi_divergence_signal(df):
125
+ df = df.dropna().reset_index(drop=True)
126
+ prices = df['close'].values
127
+ rsi = df['RSI_14'].values
128
+
129
+ left = 5
130
+ right = 5
131
+ max_range = 20
132
+
133
+ recent_idx = len(prices) - 1 # latest candle
134
+ start_idx = max(recent_idx - max_range, left)
135
+
136
+ for i in range(recent_idx - 1, start_idx - 1, -1):
137
+ if is_pivot_low(prices, i, left, right) and is_pivot_low(rsi, i, left, right):
138
+ # Regular Bullish Divergence
139
+ if prices[recent_idx] < prices[i] and rsi[recent_idx] > rsi[i]:
140
+ return "Bullish"
141
+
142
+ if is_pivot_high(prices, i, left, right) and is_pivot_high(rsi, i, left, right):
143
+ # Regular Bearish Divergence
144
+ if prices[recent_idx] > prices[i] and rsi[recent_idx] < rsi[i]:
145
+ return "Bearish"
146
+
147
+ return "Neutral"
148
+
149
+
150
+ # --- Master RSI Strategy Function ---
151
+
152
+ def rsi_strategies(df):
153
+
154
+ close_prices = df['close']
155
+
156
+ # Calculate all indicators
157
+ df['RSI_14'] = talib.RSI(close_prices, timeperiod=14)
158
+ df['RSI_5'] = talib.RSI(close_prices, timeperiod=5)
159
+ df['MA_20'] = talib.SMA(close_prices, timeperiod=20)
160
+ df['Upper_BB'], df['Middle_BB'], df['Lower_BB'] = talib.BBANDS(close_prices, timeperiod=20)
161
+
162
+
163
+ # Ensure all calculations are added to df before slicing
164
+ recent = df.tail(5)
165
+
166
+
167
+ signals = OrderedDict([
168
+ ("RSI 14", round(df[['RSI_14']].iloc[-1][0], 2)),
169
+ ("Overbought/Oversold", get_overbought_oversold_signal(recent)),
170
+ ("RSI Swing Rejection", get_swing_rejection_signal(df['RSI_14'].tail(6))),
171
+ ("RSI Divergence", get_rsi_divergence_signal(df)),
172
+ ("RSI_Bollinger Band", get_bollinger_rsi_signal(recent)),
173
+ ("RSI 5/14 Crossover", get_rsi_crossover_signal(df['RSI_5'].tail(5),df['RSI_14'].tail(5))),
174
+ ("RSI Trend 50 Confirmation", get_rsi_50_trend_signal(recent)),
175
+ ("RSI_MA", get_rsi_with_ma_signal(recent)),
176
+ ("Mean Reversion", get_mean_reversion_signal(df[['RSI_5']].tail(6)))
177
+ ])
178
+
179
+
180
+ # Weightage for each signal
181
+ rsi_signal_weights = {
182
+ "Overbought/Oversold": 15,
183
+ "RSI Swing Rejection": 15,
184
+ "RSI Divergence": 15,
185
+ "RSI_Bollinger Band": 15,
186
+ "RSI 5/14 Crossover": 10,
187
+ "RSI Trend 50 Confirmation": 10,
188
+ "RSI_MA": 10,
189
+ "Mean Reversion": 10
190
+
191
+ }
192
+
193
+ # Calculate weighted score
194
+ total_score = 0
195
+ for strategy, weight in rsi_signal_weights.items():
196
+ signal = signals[strategy]
197
+ if signal == "Bullish":
198
+ total_score += weight
199
+ elif signal == "Neutral":
200
+ total_score += weight * 0.5
201
+ # Bearish gives 0 score
202
+
203
+ overall_percentage = round((total_score / sum(rsi_signal_weights.values())) * 100, 2)
204
+
205
+
206
+
207
+ # Final output signal
208
+ if overall_percentage >= 60:
209
+ final_signal = "Buy"
210
+ elif overall_percentage <= 40:
211
+ final_signal = "DBuy"
212
+ else:
213
+ final_signal = "Neutral"
214
+
215
+
216
+ return signals, overall_percentage, final_signal
217
+
218
+ def extract_series(data, column_name, days=100):
219
+ series = data[[column_name]].dropna().tail(days)
220
+ series.index = series.index.strftime('%Y-%m-%d')
221
+ return series[column_name].round(2).to_dict()
222
+
223
+ def get_rsi_trade_signal(data):
224
+
225
+ rsi_signals, overallscore, final_signal = rsi_strategies(data)
226
+
227
+ return {
228
+
229
+ "rsi_signals": rsi_signals,
230
+ "rsi_score": overallscore,
231
+ "rsi_final_signal": final_signal,
232
+ "rsi_14_last_2_years": extract_series(data, 'RSI_14'),
233
+ "rsi_5_last_2_years": extract_series(data, 'RSI_5'),
234
+ "ma": extract_series(data, 'MA_20'),
235
+ "close": extract_series(data, 'close'),
236
+ "open": extract_series(data, 'open'),
237
+ "high": extract_series(data, 'high'),
238
+ "low": extract_series(data, 'low'),
239
+ "lowerbb": extract_series(data, 'Lower_BB'),
240
+ "upperbb": extract_series(data, 'Upper_BB')
241
+ }
242
+
243
+
srstrategies.py ADDED
@@ -0,0 +1,175 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import yfinance as yf
2
+ import pandas as pd
3
+ import numpy as np
4
+ import datetime
5
+ import talib
6
+
7
+
8
+ # Calculate Support and Resistance using Pivot Points
9
+ def calculate_support_resistance(data):
10
+ # Pivot Point Calculation
11
+ data['Pivot'] = (data['high'] + data['low'] + data['close']) / 3
12
+ # Support and Resistance Calculations
13
+ data['Support1'] = (2 * data['Pivot']) - data['high']
14
+ data['Resistance1'] = (2 * data['Pivot']) - data['low']
15
+
16
+ return data
17
+
18
+ #Strategy 1: Reversal strategy - find the difference between close and support/resistance and find the tolerance value based on ATR if it is less than tolernace value and check the candlestick pattern - based on the return bullish/bearish/neutral
19
+ def detect_reversal(df, support, resistance):
20
+ df['Signal'] = 'Neutral' # Default is neutral
21
+
22
+ close_prices = df['close'].to_numpy().flatten()
23
+ high_prices = df['high'].to_numpy().flatten()
24
+ low_prices = df['low'].to_numpy().flatten()
25
+ open_prices = df['open'].to_numpy().flatten()
26
+
27
+ # Use common reversal patterns
28
+ hammer = talib.CDLHAMMER(open_prices, high_prices, low_prices, close_prices)
29
+ engulfing = talib.CDLENGULFING(open_prices, high_prices, low_prices, close_prices)
30
+ shooting_star = talib.CDLSHOOTINGSTAR(open_prices, high_prices, low_prices, close_prices)
31
+ doji = talib.CDLDOJI(open_prices, high_prices, low_prices, close_prices)
32
+ morning_star = talib.CDLMORNINGSTAR(open_prices, high_prices, low_prices, close_prices)
33
+ evening_star = talib.CDLEVENINGSTAR(open_prices, high_prices, low_prices, close_prices)
34
+ piercing_line = talib.CDLPIERCING(open_prices, high_prices, low_prices, close_prices)
35
+ harami = talib.CDLHARAMI(open_prices, high_prices, low_prices, close_prices)
36
+ df['ATR'] = talib.ATR(high_prices, low_prices, close_prices, timeperiod=14)
37
+ tolerance = df['ATR'].iloc[-1] * 2
38
+ for i in range(1, len(df)):
39
+ close = close_prices[i]
40
+
41
+ # Detect Bullish Reversal
42
+ if abs(close - support[i]) <= tolerance:
43
+ if hammer[i] > 0 or engulfing[i] > 0 or doji[i] > 0 or morning_star[i] > 0 or piercing_line[i] > 0 or harami[i] > 0:
44
+ df.loc[df.index[i], 'Signal'] = 'Bullish'
45
+
46
+ # Detect Bearish Reversal
47
+ if abs(close - resistance[i]) <= tolerance:
48
+ if shooting_star[i] < 0 or engulfing[i] < 0 or doji[i] < 0 or evening_star[i] < 0 or piercing_line[i] < 0 or harami[i] < 0:
49
+ df.loc[df.index[i], 'Signal'] = 'Bearish'
50
+
51
+ return df
52
+
53
+
54
+ # Strategy 2: Breakout Trading - the previous day should be above to resistance/support and break the support and resistance and crossed
55
+ def detect_breakouts(df):
56
+ support_level = df['Support1'].iloc[-1]
57
+ resistance_level = df['Resistance1'].iloc[-1]
58
+
59
+ if df['close'].iloc[-1] > resistance_level and df['close'].iloc[-2] <= resistance_level:
60
+ return "Bullish"
61
+
62
+ elif df['close'].iloc[-1] < resistance_level and df['close'].iloc[-2] >= resistance_level:
63
+ return "Bearish"
64
+
65
+ elif df['close'].iloc[-1] > support_level and df['close'].iloc[-2] <= support_level:
66
+ return "Bullish"
67
+
68
+ elif df['close'].iloc[-1] < support_level and df['close'].iloc[-2] >= support_level:
69
+ return "Bearish"
70
+
71
+ return "Neutral"
72
+
73
+ # Strategy 3: Flip Zone
74
+ def detect_flip_zone(df):
75
+
76
+ support_level = df['Support1'].iloc[-1]
77
+ resistance_level = df['Resistance1'].iloc[-1]
78
+
79
+ if df['close'].iloc[-3] < support_level and df['close'].iloc[-2] >= support_level and df['close'].iloc[-1] > support_level:
80
+ return "Bullish"
81
+
82
+ elif df['close'].iloc[-3] > support_level and df['close'].iloc[-2] <= support_level and df['close'].iloc[-1] < support_level:
83
+ return "Bearish"
84
+
85
+ elif df['close'].iloc[-3] < resistance_level and df['close'].iloc[-2] >= resistance_level and df['close'].iloc[-1] > resistance_level:
86
+ return "Bullish"
87
+
88
+ elif df['close'].iloc[-3] > resistance_level and df['close'].iloc[-2] <= resistance_level and df['close'].iloc[-1] < resistance_level:
89
+ return "Bearish"
90
+
91
+
92
+ return "Neutral"
93
+
94
+ # Strategy 4: SR RETEST - bounceup and bouncedown
95
+ def detect_sr_retest(df):
96
+
97
+ support_level = df['Support1'].iloc[-1]
98
+ resistance_level = df['Resistance1'].iloc[-1]
99
+
100
+ # Retest Strategy: Bullish Retest - Price breaks above resistance and then tests it as support
101
+ if df['close'].iloc[-4] < resistance_level and df['close'].iloc[-3] >= resistance_level and df['close'].iloc[-2] < df['close'].iloc[-3] and df['close'].iloc[-2] < df['close'].iloc[-1]:
102
+ return "Bearish"
103
+
104
+ elif df['close'].iloc[-4] > resistance_level and df['close'].iloc[-3] <= resistance_level and df['close'].iloc[-2] > df['close'].iloc[-3] and df['close'].iloc[-2] > df['close'].iloc[-1]:
105
+ return "Bullish"
106
+
107
+ elif df['close'].iloc[-4] < support_level and df['close'].iloc[-3] >= support_level and df['close'].iloc[-2] < df['close'].iloc[-3] and df['close'].iloc[-2] < df['close'].iloc[-1]:
108
+ return "Bullish"
109
+
110
+ elif df['close'].iloc[-4] > support_level and df['close'].iloc[-3] <= support_level and df['close'].iloc[-2] > df['close'].iloc[-3] and df['close'].iloc[-2] > df['close'].iloc[-1]:
111
+ return "Bearish"
112
+
113
+
114
+ return "Neutral"
115
+
116
+
117
+ # Final Signal Calculation
118
+ def support_resistance_strategy(data):
119
+
120
+
121
+ # Calculate Support and Resistance levels using Pivot Points
122
+ data = calculate_support_resistance(data)
123
+ # Calculate the market trend based on price and support/resistance
124
+ breakout = detect_breakouts(data)
125
+ reversal = detect_reversal(data, data['Support1'].to_numpy(), data['Resistance1'].to_numpy())
126
+ flip = detect_flip_zone(data)
127
+
128
+ sr_retest = detect_sr_retest(data)
129
+
130
+
131
+ # Weight the signals for a final decision (example weighting)
132
+ signals = {
133
+ "Support1": round(data['Support1'].iloc[-1], 2),
134
+ "Resistance1": round(data['Resistance1'].iloc[-1], 2),
135
+ "Breakout": breakout,
136
+ "Reversal": reversal['Signal'].iloc[-1],
137
+ "Flip": flip,
138
+ "SR_Retest":sr_retest
139
+
140
+ }
141
+
142
+ weights = {
143
+ "Breakout": 35,
144
+ "Reversal": 25,
145
+ "Flip": 20,
146
+ "SR_Retest":20
147
+ }
148
+
149
+ total_score = 0
150
+ for strategy, weight in weights.items():
151
+ signal = signals[strategy]
152
+ if signal == "Bullish":
153
+ total_score += weight
154
+ elif signal == "Neutral":
155
+ total_score += weight * 0.5
156
+
157
+ overall_percentage = round((total_score / sum(weights.values())) * 100, 2)
158
+
159
+ if overall_percentage >= 60:
160
+ final_signal = "Buy"
161
+ elif overall_percentage <= 40:
162
+ final_signal = "DBuy"
163
+ else:
164
+ final_signal = "Neutral"
165
+
166
+ return signals, overall_percentage, final_signal
167
+
168
+ # API-style function
169
+ def get_support_resistance_signal(data):
170
+ sr_signals, overallscore, final_signal = support_resistance_strategy(data)
171
+ return {
172
+ "support_resistance_signals": sr_signals,
173
+ "sr_score": overallscore,
174
+ "sr_final_signal": final_signal
175
+ }
testing.py ADDED
@@ -0,0 +1,28 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from flask import Flask, request, jsonify
2
+ from transformers import T5Tokenizer, T5ForConditionalGeneration
3
+ import torch
4
+
5
+ app = Flask(__name__)
6
+
7
+ # Load model and tokenizer
8
+ device = "cuda" if torch.cuda.is_available() else "cpu"
9
+ model_name = "AventIQ-AI/t5-stockmarket-qa-chatbot"
10
+ model = T5ForConditionalGeneration.from_pretrained(model_name).to(device)
11
+ tokenizer = T5Tokenizer.from_pretrained(model_name)
12
+
13
+ @app.route('/ask', methods=['POST'])
14
+ def ask():
15
+ # Get question from the frontend
16
+ question = request.json.get('question', '')
17
+ input_text = "question: " + question
18
+ input_ids = tokenizer.encode(input_text, return_tensors="pt").to(device)
19
+
20
+ with torch.no_grad():
21
+ outputs = model.generate(input_ids, max_length=50)
22
+ answer = tokenizer.decode(outputs[0], skip_special_tokens=True)
23
+
24
+ # Return the answer as a JSON response
25
+ return jsonify({'answer': answer})
26
+
27
+ if __name__ == '__main__':
28
+ app.run(debug=True)