L CANSLIM
This commit is contained in:
parent
9cb4f62a48
commit
9b6871d575
44
industry_sic_codes.csv
Normal file
44
industry_sic_codes.csv
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
SIC Code,Office,Industry Title
|
||||||
|
100,Industrial Applications and Services,AGRICULTURAL PRODUCTION-CROPS
|
||||||
|
200,Industrial Applications and Services,AGRICULTURAL PROD-LIVESTOCK & ANIMAL SPECIALTIES
|
||||||
|
700,Industrial Applications and Services,AGRICULTURAL SERVICES
|
||||||
|
800,Industrial Applications and Services,FORESTRY
|
||||||
|
900,Industrial Applications and Services,"FISHING, HUNTING AND TRAPPING"
|
||||||
|
1000,Office of Energy & Transportation,METAL MINING
|
||||||
|
1040,Office of Energy & Transportation,GOLD AND SILVER ORES
|
||||||
|
1090,Office of Energy & Transportation,MISCELLANEOUS METAL ORES
|
||||||
|
1220,Office of Energy & Transportation,BITUMINOUS COAL & LIGNITE MINING
|
||||||
|
1221,Office of Energy & Transportation,BITUMINOUS COAL & LIGNITE SURFACE MINING
|
||||||
|
1311,Office of Energy & Transportation,CRUDE PETROLEUM & NATURAL GAS
|
||||||
|
1381,Office of Energy & Transportation,DRILLING OIL & GAS WELLS
|
||||||
|
1382,Office of Energy & Transportation,OIL & GAS FIELD EXPLORATION SERVICES
|
||||||
|
1389,Office of Energy & Transportation,"OIL & GAS FIELD SERVICES, NEC"
|
||||||
|
1400,Office of Energy & Transportation,MINING & QUARRYING OF NONMETALLIC MINERALS (NO FUELS)
|
||||||
|
1520,Office of Real Estate & Construction,GENERAL BLDG CONTRACTORS - RESIDENTIAL BLDGS
|
||||||
|
1531,Office of Real Estate & Construction,OPERATIVE BUILDERS
|
||||||
|
1540,Office of Real Estate & Construction,GENERAL BLDG CONTRACTORS - NONRESIDENTIAL BLDGS
|
||||||
|
1600,Office of Real Estate & Construction,HEAVY CONSTRUCTION OTHER THAN BLDG CONST - CONTRACTORS
|
||||||
|
1623,Office of Real Estate & Construction,"WATER, SEWER, PIPELINE, COMM & POWER LINE CONSTRUCTION"
|
||||||
|
1700,Office of Real Estate & Construction,CONSTRUCTION - SPECIAL TRADE CONTRACTORS
|
||||||
|
1731,Office of Real Estate & Construction,ELECTRICAL WORK
|
||||||
|
2000,Office of Manufacturing,FOOD AND KINDRED PRODUCTS
|
||||||
|
2011,Office of Manufacturing,MEAT PACKING PLANTS
|
||||||
|
2013,Office of Manufacturing,SAUSAGES & OTHER PREPARED MEAT PRODUCTS
|
||||||
|
2015,Office of Manufacturing,POULTRY SLAUGHTERING AND PROCESSING
|
||||||
|
2020,Office of Manufacturing,DAIRY PRODUCTS
|
||||||
|
2024,Office of Manufacturing,ICE CREAM & FROZEN DESSERTS
|
||||||
|
2030,Office of Manufacturing,"CANNED, FROZEN & PRESERVD FRUIT, VEG & FOOD SPECIALTIES"
|
||||||
|
2033,Office of Manufacturing,"CANNED, FRUITS, VEG, PRESERVES, JAMS & JELLIES"
|
||||||
|
2040,Office of Manufacturing,GRAIN MILL PRODUCTS
|
||||||
|
2050,Office of Manufacturing,BAKERY PRODUCTS
|
||||||
|
2052,Office of Manufacturing,COOKIES & CRACKERS
|
||||||
|
2060,Office of Manufacturing,SUGAR & CONFECTIONERY PRODUCTS
|
||||||
|
2070,Office of Manufacturing,FATS & OILS
|
||||||
|
2080,Office of Manufacturing,BEVERAGES
|
||||||
|
2082,Office of Manufacturing,MALT BEVERAGES
|
||||||
|
2086,Office of Manufacturing,BOTTLED & CANNED SOFT DRINKS & CARBONATED WATERS
|
||||||
|
2090,Office of Manufacturing,MISCELLANEOUS FOOD PREPARATIONS & KINDRED PRODUCTS
|
||||||
|
2092,Office of Manufacturing,PREPARED FRESH OR FROZEN FISH & SEAFOODS
|
||||||
|
2100,Office of Manufacturing,TOBACCO PRODUCTS
|
||||||
|
2111,Office of Manufacturing,CIGARETTES
|
||||||
|
2200,Office of Manufacturing,TEXTILE MILL PRODUCTS
|
||||||
|
18
src/main.py
18
src/main.py
@ -2,6 +2,7 @@ import datetime
|
|||||||
from screener.data_fetcher import validate_date_range, fetch_financial_data, get_stocks_in_time_range
|
from screener.data_fetcher import validate_date_range, fetch_financial_data, get_stocks_in_time_range
|
||||||
from screener.c_canslim import check_quarterly_earnings, check_return_on_equity, check_sales_growth
|
from screener.c_canslim import check_quarterly_earnings, check_return_on_equity, check_sales_growth
|
||||||
from screener.a_canslim import check_annual_eps_growth
|
from screener.a_canslim import check_annual_eps_growth
|
||||||
|
from screener.l_canslim import check_industry_leadership # ✅ NEW: Import L Score function
|
||||||
from screener.csv_appender import append_scores_to_csv
|
from screener.csv_appender import append_scores_to_csv
|
||||||
from screener.screeners import SCREENERS # Import categories
|
from screener.screeners import SCREENERS # Import categories
|
||||||
from screener.user_input import get_user_screener_selection # Import function
|
from screener.user_input import get_user_screener_selection # Import function
|
||||||
@ -15,7 +16,8 @@ def main():
|
|||||||
start_date, end_date = validate_date_range(user_start_date, user_end_date, required_quarters=4)
|
start_date, end_date = validate_date_range(user_start_date, user_end_date, required_quarters=4)
|
||||||
|
|
||||||
# 3️⃣ Get selected screeners & customization preferences
|
# 3️⃣ Get selected screeners & customization preferences
|
||||||
selected_screeners = get_user_screener_selection() # ✅ Now imported from `user_input.py`
|
selected_screeners = get_user_screener_selection()
|
||||||
|
print(f"\n✅ Selected Screeners: {selected_screeners}\n") # ✅ DEBUG LOG
|
||||||
|
|
||||||
# 4️⃣ Get all stock symbols dynamically
|
# 4️⃣ Get all stock symbols dynamically
|
||||||
symbol_list = get_stocks_in_time_range(start_date, end_date)
|
symbol_list = get_stocks_in_time_range(start_date, end_date)
|
||||||
@ -24,14 +26,14 @@ def main():
|
|||||||
print("No stocks found within the given date range.")
|
print("No stocks found within the given date range.")
|
||||||
return
|
return
|
||||||
|
|
||||||
print(f"Processing {len(symbol_list)} stocks within the given date range...")
|
print(f"Processing {len(symbol_list)} stocks within the given date range...\n")
|
||||||
|
|
||||||
# 5️⃣ Process each stock symbol
|
# 5️⃣ Process each stock symbol
|
||||||
for symbol in symbol_list:
|
for symbol in symbol_list:
|
||||||
data = fetch_financial_data(symbol, start_date, end_date)
|
data = fetch_financial_data(symbol, start_date, end_date)
|
||||||
|
|
||||||
if not data:
|
if not data:
|
||||||
print(f"Warning: No data returned for {symbol}. Assigning default score.")
|
print(f"⚠️ Warning: No data returned for {symbol}. Assigning default score.\n")
|
||||||
scores = {screener: 0.25 for category in selected_screeners for screener in selected_screeners[category]}
|
scores = {screener: 0.25 for category in selected_screeners for screener in selected_screeners[category]}
|
||||||
else:
|
else:
|
||||||
scores = {}
|
scores = {}
|
||||||
@ -47,15 +49,21 @@ def main():
|
|||||||
scores[screener] = check_sales_growth(data.get("sales_growth", []))
|
scores[screener] = check_sales_growth(data.get("sales_growth", []))
|
||||||
elif screener == "ROE_Score":
|
elif screener == "ROE_Score":
|
||||||
scores[screener] = check_return_on_equity(data.get("roe", []))
|
scores[screener] = check_return_on_equity(data.get("roe", []))
|
||||||
|
elif screener == "L_Score":
|
||||||
|
scores[screener] = check_industry_leadership(symbol) # ✅ NEW: Industry Leadership Calculation
|
||||||
|
print(f"🟢 {symbol} - L_Score: {scores[screener]}") # ✅ DEBUG LOG
|
||||||
|
|
||||||
# Apply user-defined threshold if applicable
|
# Apply user-defined threshold if applicable
|
||||||
if isinstance(threshold, (int, float)):
|
if isinstance(threshold, (int, float)):
|
||||||
scores[screener] = scores[screener] >= threshold
|
scores[screener] = scores[screener] >= threshold
|
||||||
|
|
||||||
# 7️⃣ Append results to CSV
|
# 7️⃣ Calculate Total Score
|
||||||
|
scores["Total_Score"] = sum(scores.values()) # ✅ NEW: Total Score Calculation
|
||||||
|
|
||||||
|
# 8️⃣ Append results to CSV
|
||||||
append_scores_to_csv(symbol, scores)
|
append_scores_to_csv(symbol, scores)
|
||||||
|
|
||||||
print("Scores saved in data/metrics/stock_scores.csv")
|
print("✅ Scores saved in data/metrics/stock_scores.csv\n")
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|||||||
@ -1,57 +1,50 @@
|
|||||||
import csv
|
import csv
|
||||||
import os
|
import os
|
||||||
|
|
||||||
# Define the directory and generic CSV file path
|
CSV_FILE_PATH = os.path.abspath(os.path.join(os.path.dirname(__file__), "../../data/metrics/stock_scores.csv"))
|
||||||
BASE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "../..")) # Go two levels up
|
|
||||||
METRICS_DIR = os.path.join(BASE_DIR, "data", "metrics")
|
|
||||||
CSV_FILE = os.path.join(METRICS_DIR, "stock_scores.csv")
|
|
||||||
|
|
||||||
# Track all unique screeners used in any stock
|
|
||||||
ALL_HEADERS = set()
|
|
||||||
|
|
||||||
def append_scores_to_csv(symbol, scores):
|
def append_scores_to_csv(symbol, scores):
|
||||||
"""
|
"""
|
||||||
Append stock analysis scores to a generic CSV file in `data/metrics` directory.
|
Appends stock scores to the CSV file dynamically and ensures output order matches CSV.
|
||||||
|
|
||||||
Ensures all rows have the same headers, adding new screeners dynamically.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
symbol (str): Stock ticker symbol.
|
symbol (str): Stock ticker symbol.
|
||||||
scores (dict): Dictionary of metric scores (e.g., EPS_Score, Sales_Score, etc.).
|
scores (dict): Dictionary of scores for each screener.
|
||||||
"""
|
"""
|
||||||
# Ensure the directory exists
|
file_exists = os.path.exists(CSV_FILE_PATH)
|
||||||
os.makedirs(METRICS_DIR, exist_ok=True)
|
|
||||||
|
|
||||||
# Compute Total Score
|
# Ensure Total_Score is always included
|
||||||
total_score = sum(scores.values())
|
scores["Total_Score"] = sum(scores.values())
|
||||||
scores["Total_Score"] = total_score
|
|
||||||
|
|
||||||
# Update tracked headers
|
# Read existing headers if the file exists
|
||||||
ALL_HEADERS.update(scores.keys())
|
existing_headers = []
|
||||||
|
if file_exists:
|
||||||
|
with open(CSV_FILE_PATH, mode="r", encoding="utf-8") as file:
|
||||||
|
reader = csv.reader(file)
|
||||||
|
existing_headers = next(reader, [])
|
||||||
|
|
||||||
# Preferred order: "Symbol" first, "Total_Score" last
|
# Ensure headers dynamically include all possible screeners, keeping order
|
||||||
preferred_order = ["Symbol"]
|
new_headers = ["Symbol"] + list(scores.keys())
|
||||||
remaining_headers = sorted([h for h in ALL_HEADERS if h not in preferred_order + ["Total_Score"]])
|
|
||||||
sorted_headers = preferred_order + remaining_headers + ["Total_Score"]
|
|
||||||
|
|
||||||
# Check if file exists
|
# Merge existing headers with new ones, keeping Symbol first and Total_Score last
|
||||||
file_exists = os.path.isfile(CSV_FILE)
|
headers = existing_headers if existing_headers else new_headers
|
||||||
|
if "L_Score" not in headers:
|
||||||
|
headers.insert(-1, "L_Score") # Ensure L_Score is before Total_Score
|
||||||
|
|
||||||
with open(CSV_FILE, 'a', newline='') as csvfile:
|
# Ensure order consistency for output
|
||||||
writer = csv.DictWriter(csvfile, fieldnames=sorted_headers)
|
row_data = {header: scores.get(header, 0) for header in headers}
|
||||||
|
row_data["Symbol"] = symbol # Ensure Symbol is set
|
||||||
|
|
||||||
# If it's a new file or headers changed, write a new header row
|
# ✅ Print scores in the exact order they will appear in the CSV
|
||||||
if not file_exists or set(sorted_headers) != set(writer.fieldnames):
|
print("\n📊 Score Output (Matching CSV Order):")
|
||||||
csvfile.seek(0) # Move to start of the file
|
print(f"{symbol}: " + ", ".join([f"{key}: {row_data[key]}" for key in headers]))
|
||||||
csvfile.truncate() # Clear previous file to rewrite headers
|
|
||||||
|
# Open CSV and update headers if needed
|
||||||
|
with open(CSV_FILE_PATH, mode="a", newline="") as file:
|
||||||
|
writer = csv.DictWriter(file, fieldnames=headers)
|
||||||
|
|
||||||
|
# Write header only if file is new or headers have changed
|
||||||
|
if not file_exists or set(headers) != set(existing_headers):
|
||||||
writer.writeheader()
|
writer.writeheader()
|
||||||
|
|
||||||
# Fill missing values with 0.25 (default)
|
writer.writerow(row_data)
|
||||||
row = {"Symbol": symbol}
|
|
||||||
for header in sorted_headers:
|
|
||||||
if header == "Symbol":
|
|
||||||
row[header] = symbol
|
|
||||||
else:
|
|
||||||
row[header] = scores.get(header, 0.25) # Use default if missing
|
|
||||||
|
|
||||||
writer.writerow(row)
|
|
||||||
|
|||||||
77
src/screener/l_canslim.py
Normal file
77
src/screener/l_canslim.py
Normal file
@ -0,0 +1,77 @@
|
|||||||
|
import csv
|
||||||
|
import os
|
||||||
|
from db.db_connection import create_client
|
||||||
|
|
||||||
|
# Load SIC Industry Data
|
||||||
|
SIC_LOOKUP = {}
|
||||||
|
|
||||||
|
def load_sic_data():
|
||||||
|
"""Loads SIC Code data into a dictionary from the new CSV location above `src/`."""
|
||||||
|
global SIC_LOOKUP
|
||||||
|
script_dir = os.path.dirname(os.path.abspath(__file__)) # Get the directory of l_canslim.py
|
||||||
|
project_root = os.path.abspath(os.path.join(script_dir, "../../")) # Go up to project root
|
||||||
|
sic_file = os.path.join(project_root, "industry_sic_codes.csv") # Adjust path
|
||||||
|
|
||||||
|
if not os.path.exists(sic_file):
|
||||||
|
raise FileNotFoundError(f"Error: SIC Code CSV file not found at {sic_file}")
|
||||||
|
|
||||||
|
with open(sic_file, mode="r", encoding="utf-8") as file:
|
||||||
|
reader = csv.DictReader(file)
|
||||||
|
for row in reader:
|
||||||
|
sic_code = row["SIC Code"].strip() # Match CSV header exactly
|
||||||
|
industry = row["Industry Title"].strip() # Match CSV header exactly
|
||||||
|
SIC_LOOKUP[sic_code] = industry
|
||||||
|
|
||||||
|
# Ensure SIC data is loaded at module import
|
||||||
|
load_sic_data()
|
||||||
|
|
||||||
|
def check_industry_leadership(symbol):
|
||||||
|
"""
|
||||||
|
Determines if a stock is a leader in its industry group.
|
||||||
|
|
||||||
|
Criteria:
|
||||||
|
- Relative Strength (RS) Rating should be **80 or higher**.
|
||||||
|
- Stock should be in the top-performing industry groups (top 40-50).
|
||||||
|
- Stock should be trading near its **52-week high**.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
symbol (str): Stock ticker symbol.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
float: 1 (Pass), 0 (Fail), 0.25 (Insufficient Data).
|
||||||
|
"""
|
||||||
|
client = create_client()
|
||||||
|
|
||||||
|
query = f"""
|
||||||
|
SELECT
|
||||||
|
ticker,
|
||||||
|
sic,
|
||||||
|
relative_strength,
|
||||||
|
high_52_week,
|
||||||
|
current_price
|
||||||
|
FROM stock_db.stock_indicators
|
||||||
|
WHERE ticker = '{symbol}'
|
||||||
|
"""
|
||||||
|
|
||||||
|
result = client.query(query)
|
||||||
|
|
||||||
|
if not result.result_rows:
|
||||||
|
return 0.25 # Not enough data
|
||||||
|
|
||||||
|
_, sic, rs_rating, high_52_week, current_price = result.result_rows[0]
|
||||||
|
|
||||||
|
if not sic or str(sic) not in SIC_LOOKUP:
|
||||||
|
return 0.25 # No SIC industry data available
|
||||||
|
|
||||||
|
# Ensure RS rating is valid
|
||||||
|
if rs_rating is None or high_52_week is None or current_price is None:
|
||||||
|
return 0.25 # Missing necessary data
|
||||||
|
|
||||||
|
# Industry Leader Criteria
|
||||||
|
passes_rs = rs_rating >= 80
|
||||||
|
near_high = current_price >= (high_52_week * 0.90) # Within 10% of 52-week high
|
||||||
|
|
||||||
|
if passes_rs and near_high:
|
||||||
|
return 1 # Stock is an industry leader
|
||||||
|
else:
|
||||||
|
return 0 # Stock is lagging
|
||||||
@ -4,7 +4,8 @@ SCREENERS = {
|
|||||||
"EPS_Score": "Checks quarterly EPS growth",
|
"EPS_Score": "Checks quarterly EPS growth",
|
||||||
"Annual_EPS_Score": "Checks 3-year annual EPS growth",
|
"Annual_EPS_Score": "Checks 3-year annual EPS growth",
|
||||||
"Sales_Score": "Checks quarterly sales growth",
|
"Sales_Score": "Checks quarterly sales growth",
|
||||||
"ROE_Score": "Checks return on equity"
|
"ROE_Score": "Checks return on equity",
|
||||||
|
"L_Score": "Checks if the stock is a leader in its industry" # ✅ NEW: Added L_Score
|
||||||
},
|
},
|
||||||
"Volume-Based": {
|
"Volume-Based": {
|
||||||
"Volume_Oscillator_Score": "Checks for unusual volume surges",
|
"Volume_Oscillator_Score": "Checks for unusual volume surges",
|
||||||
|
|||||||
@ -21,9 +21,9 @@ def get_user_screener_selection():
|
|||||||
for category in selected_categories:
|
for category in selected_categories:
|
||||||
print(f"\nCategory: {category}")
|
print(f"\nCategory: {category}")
|
||||||
use_defaults = input(f"Use default settings for {category}? (y/n): ").strip().lower()
|
use_defaults = input(f"Use default settings for {category}? (y/n): ").strip().lower()
|
||||||
|
|
||||||
selected_screeners[category] = {}
|
selected_screeners[category] = {}
|
||||||
|
|
||||||
for screener, description in SCREENERS[category].items():
|
for screener, description in SCREENERS[category].items():
|
||||||
if use_defaults == "y":
|
if use_defaults == "y":
|
||||||
selected_screeners[category][screener] = "default"
|
selected_screeners[category][screener] = "default"
|
||||||
@ -31,4 +31,10 @@ def get_user_screener_selection():
|
|||||||
custom_value = input(f"{screener} ({description}) - Enter custom threshold or press Enter to use default: ").strip()
|
custom_value = input(f"{screener} ({description}) - Enter custom threshold or press Enter to use default: ").strip()
|
||||||
selected_screeners[category][screener] = float(custom_value) if custom_value else "default"
|
selected_screeners[category][screener] = float(custom_value) if custom_value else "default"
|
||||||
|
|
||||||
|
# ✅ Ensure L_Score is added if Fundamentals is selected
|
||||||
|
if "Fundamentals" in selected_screeners and "L_Score" not in selected_screeners["Fundamentals"]:
|
||||||
|
selected_screeners["Fundamentals"]["L_Score"] = "default" # ✅ Ensure L_Score is included
|
||||||
|
|
||||||
|
print(f"\n✅ Selected Screeners: {selected_screeners}\n") # ✅ DEBUG LOG
|
||||||
|
|
||||||
return selected_screeners
|
return selected_screeners
|
||||||
|
|||||||
Loading…
Reference in New Issue
Block a user