diff --git a/.env.example b/.env.example new file mode 100644 index 0000000..d52d743 --- /dev/null +++ b/.env.example @@ -0,0 +1,6 @@ +BOT_TOKEN='your-bot-token' +DB_HOST= "" +DB_NAME= "" +DB_USER="" +DB_PASS="" +DB_PORT="" \ No newline at end of file diff --git a/.gitignore b/.gitignore index 1c0a637..f7b14c2 100644 --- a/.gitignore +++ b/.gitignore @@ -33,3 +33,4 @@ Thumbs.db +.vercel diff --git a/README.md b/README.md index 3bc81b7..53e3638 100644 --- a/README.md +++ b/README.md @@ -56,10 +56,26 @@ So I created **OppTickBot** — a personal tool that turned into something I now - Python 3.10+ - [python-telegram-bot](https://github.com/python-telegram-bot/python-telegram-bot) v22+ (with job-queue extra) -- SQLite for storing opportunities +- PostgreSQL for storing opportunities - dateutil + regex for date parsing - Pillow + pytesseract (optional) for OCR on images +### Database Structure + +The project uses PostgreSQL. The main table is `opportunities` which maps to the following schema: +- `opp_id` (TEXT PRIMARY KEY): Unique identifier. +- `user_id` (BIGINT): Telegram user ID. +- `title` (TEXT): Opportunity title. +- `opp_type` (TEXT): Category (Internship, Scholarship, Event, etc.). +- `deadline` (TEXT): Datetime for the deadline. +- `priority` (TEXT): Priority level. +- `description` (TEXT): Extracted or user-provided description. +- `message_text` (TEXT): Original raw text. +- `link` (TEXT): Related URL. +- `archived` (INTEGER DEFAULT 0): Whether it is archived. +- `done` (INTEGER DEFAULT 0): Whether it is completed. +- `missed_notified` (INTEGER DEFAULT 0): Has user been notified of missing deadline. + ### Setup (Local Development) ```bash @@ -74,8 +90,53 @@ source .venv/bin/activate # Windows: .venv\Scripts\activate # 3. Install dependencies pip install -r requirements.txt -# 4. Create .env file with your token -echo "BOT_TOKEN=your_bot_token_here" > .env +# 4. Create .env file with your credentials +cp .env.example .env + +# Open the .env file and populate it with your actual values: +# BOT_TOKEN="your-bot-token" +# DB_HOST="localhost" +# DB_NAME="opptick_db" +# DB_USER="postgres" +# DB_PASS="password" +# DB_PORT="5432" + # 5. Run the bot -python bot.py \ No newline at end of file +python bot.py +``` + +### Deployment (Vercel) + +You can deploy this bot as a serverless function on Vercel. + +1. **Install Vercel CLI**: `npm i -g vercel` +2. **Deploy**: + ```bash + vercel --prod + ``` + (You will be asked to authenticate if it's your first time.) +3. **Set Environment Variables**: + If you have a local `.env` file, Vercel can automatically import it during setup. + Alternatively, go to your **Vercel Project Settings > Environment Variables** and add: + `BOT_TOKEN`, `DB_HOST`, `DB_NAME`, `DB_USER`, `DB_PASS`, `DB_PORT`. + +4. **Set Telegram Webhook**: + After deployment, get your Vercel URL (e.g., `https://your-project.vercel.app`) and set the webhook: + ``` + https://api.telegram.org/bot/setWebhook?url=https://your-project.vercel.app/webhook/ + ``` + +5. **Cron Jobs (Reminders)**: + - A `vercel.json` file is configured to run a daily cron job that triggers the reminder logic. + ```js + "schedule": "30 20 * * *" + ``` + - **Note on Free Tier**: Vercel's free tier supports cron jobs but with limitations (e.g., once a day). + - Alternatively, you can use an external service like [cron-job.org](https://cron-job.org) to hit `https://your-project.vercel.app/cron` at your preferred frequency. + +**TODOs**: +- `db.py`: In `Database.init_db()` there is a TODO to "find a better way to not re-init db on every call". +- `web_app.py`: Secure the `/cron` endpoint with a secret token/API key to prevent unauthorized triggering. + + diff --git a/bot.py b/bot.py index 2969d59..0d5743a 100644 --- a/bot.py +++ b/bot.py @@ -1,20 +1,21 @@ import os import logging -import sqlite3 import uuid import re import io from datetime import datetime, timedelta -from dotenv import load_dotenv from dateutil.parser import parse as date_parse +from db import Database +from config import BOT_TOKEN,logger, db + from telegram import ( Update, InlineKeyboardButton, InlineKeyboardMarkup, ReplyKeyboardMarkup ) from telegram.ext import ( Application, CommandHandler, MessageHandler, ConversationHandler, - ContextTypes, filters, CallbackQueryHandler, JobQueue, ChatMemberHandler + ContextTypes, filters, CallbackQueryHandler, ChatMemberHandler, JobQueue ) # OCR dependencies @@ -25,50 +26,6 @@ except ImportError: OCR_AVAILABLE = False -# Load environment -if os.path.exists(".env"): - load_dotenv() -BOT_TOKEN = os.getenv("BOT_TOKEN") -if not BOT_TOKEN: - raise ValueError("BOT_TOKEN missing! Set in .env or Railway Variables.") - -# Logging -logging.basicConfig( - format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', - level=logging.INFO -) -logger = logging.getLogger(__name__) - -# DB setup -DB_FILE = 'opportunities.db' -def init_db(): - conn = sqlite3.connect(DB_FILE) - c = conn.cursor() - c.execute(''' - CREATE TABLE IF NOT EXISTS opportunities ( - opp_id TEXT PRIMARY KEY, - user_id INTEGER, - title TEXT, - opp_type TEXT, - deadline TEXT, - priority TEXT, - description TEXT, - message_text TEXT, - link TEXT, - archived INTEGER DEFAULT 0, - done INTEGER DEFAULT 0, - missed_notified INTEGER DEFAULT 0 - ) - ''') - # Safe migration for existing databases - for col, defn in [("link", "TEXT"), ("missed_notified", "INTEGER DEFAULT 0")]: - try: - c.execute(f"ALTER TABLE opportunities ADD COLUMN {col} {defn}") - except sqlite3.OperationalError: - pass - conn.commit() - conn.close() -init_db() # Conversation states DEADLINE, TYPE, PRIORITY, TITLE, DESCRIPTION, LINK, CONFIRM = range(7) @@ -355,15 +312,7 @@ async def confirm_callback(update: Update, context: ContextTypes.DEFAULT_TYPE) - link = context.user_data.get('link', '') try: - conn = sqlite3.connect(DB_FILE) - conn.execute( - 'INSERT INTO opportunities ' - '(opp_id, user_id, title, opp_type, deadline, priority, description, message_text, link) ' - 'VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?)', - (opp_id, user_id, title, opp_type, deadline.isoformat(), priority, desc, message_text, link) - ) - conn.commit() - conn.close() + db.add_opportunity(opp_id, user_id, title, opp_type, deadline.isoformat(), priority, desc, message_text, link) except Exception as e: logger.error('DB error: %s', e) await query.edit_message_text('⚠️ Error saving. Please try again.') @@ -433,15 +382,10 @@ def schedule_reminders(job_queue, user_id, opp_id, deadline, priority, title, de async def check_missed(context: ContextTypes.DEFAULT_TYPE) -> None: """Fires once daily; notifies each overdue opportunity ONCE only.""" now = datetime.now() - conn = sqlite3.connect(DB_FILE) - c = conn.cursor() - c.execute( - 'SELECT user_id, opp_id, title, description, opp_type, link, deadline ' - 'FROM opportunities ' - 'WHERE deadline < ? AND archived = 0 AND done = 0 AND missed_notified = 0', - (now.isoformat(),) - ) - for uid, opp_id, title, desc, opp_type, link, dl_str in c.fetchall(): + missed = db.get_missed_opportunities(now.isoformat()) + + for row in missed: + uid, opp_id, title, desc, opp_type, link, dl_str = row try: dl = datetime.fromisoformat(str(dl_str)) desc_s = str(desc) if desc else '' @@ -461,11 +405,9 @@ async def check_missed(context: ContextTypes.DEFAULT_TYPE) -> None: InlineKeyboardButton('✅ Mark as Done', callback_data=f'done_{opp_id}') ]]) await context.bot.send_message(chat_id=uid, text=msg, reply_markup=keyboard, parse_mode='Markdown') - conn.execute('UPDATE opportunities SET missed_notified = 1 WHERE opp_id = ?', (opp_id,)) - conn.commit() + db.mark_missed_notified(opp_id) except Exception as exc: logger.error('Missed-notify failed for %s: %s', opp_id, exc) - conn.close() async def mark_done_callback(update: Update, context: ContextTypes.DEFAULT_TYPE) -> int: query = update.callback_query @@ -473,12 +415,7 @@ async def mark_done_callback(update: Update, context: ContextTypes.DEFAULT_TYPE) if query.data.startswith('done_'): opp_id = query.data.split('_')[1] user_id = query.from_user.id - conn = sqlite3.connect(DB_FILE) - c = conn.cursor() - c.execute('UPDATE opportunities SET done=1, archived=1 WHERE opp_id = ? AND user_id = ?', (opp_id, user_id)) - updated = c.rowcount - conn.commit() - conn.close() + updated = db.mark_done(opp_id, user_id) if updated > 0: for job in context.job_queue.jobs(): if job.name and opp_id in job.name: @@ -490,11 +427,7 @@ async def mark_done_callback(update: Update, context: ContextTypes.DEFAULT_TYPE) async def list_opps(update: Update, context: ContextTypes.DEFAULT_TYPE): user_id = update.message.from_user.id - conn = sqlite3.connect(DB_FILE) - c = conn.cursor() - c.execute('SELECT opp_id, title, opp_type, deadline, priority, description FROM opportunities WHERE user_id = ? AND archived = 0 AND done = 0 ORDER BY deadline', (user_id,)) - opps = c.fetchall() - conn.close() + opps = db.get_active_opportunities(user_id) if not opps: await update.message.reply_text("No active opportunities.") return @@ -513,12 +446,7 @@ async def delete(update: Update, context: ContextTypes.DEFAULT_TYPE): return opp_id = context.args[0] user_id = update.message.from_user.id - conn = sqlite3.connect(DB_FILE) - c = conn.cursor() - c.execute('DELETE FROM opportunities WHERE opp_id = ? AND user_id = ?', (opp_id, user_id)) - deleted = c.rowcount - conn.commit() - conn.close() + deleted = db.delete_opportunity(opp_id, user_id) if deleted > 0: for job in context.job_queue.jobs(): if job.name and opp_id in job.name: @@ -533,12 +461,7 @@ async def archive(update: Update, context: ContextTypes.DEFAULT_TYPE): return opp_id = context.args[0] user_id = update.message.from_user.id - conn = sqlite3.connect(DB_FILE) - c = conn.cursor() - c.execute('UPDATE opportunities SET archived=1 WHERE opp_id = ? AND user_id = ?', (opp_id, user_id)) - updated = c.rowcount - conn.commit() - conn.close() + updated = db.archive_opportunity(opp_id, user_id) if updated > 0: for job in context.job_queue.jobs(): if job.name and opp_id in job.name: @@ -553,12 +476,7 @@ async def done(update: Update, context: ContextTypes.DEFAULT_TYPE): return opp_id = context.args[0] user_id = update.message.from_user.id - conn = sqlite3.connect(DB_FILE) - c = conn.cursor() - c.execute('UPDATE opportunities SET done=1, archived=1 WHERE opp_id = ? AND user_id = ?', (opp_id, user_id)) - updated = c.rowcount - conn.commit() - conn.close() + updated = db.mark_done(opp_id, user_id) if updated > 0: for job in context.job_queue.jobs(): if job.name and opp_id in job.name: @@ -571,15 +489,7 @@ async def summary(update: Update, context: ContextTypes.DEFAULT_TYPE): user_id = update.message.from_user.id now = datetime.now() week_end = now + timedelta(days=7) - conn = sqlite3.connect(DB_FILE) - c = conn.cursor() - c.execute( - 'SELECT COUNT(*), opp_type FROM opportunities ' - 'WHERE user_id = ? AND deadline >= ? AND deadline <= ? AND archived=0 AND done=0 GROUP BY opp_type', - (user_id, now.isoformat(), week_end.isoformat()) - ) - sums = c.fetchall() - conn.close() + sums = db.get_weekly_summary(user_id, now.isoformat(), week_end.isoformat()) if not sums: await update.message.reply_text("No upcoming this week.") return @@ -591,41 +501,10 @@ async def summary(update: Update, context: ContextTypes.DEFAULT_TYPE): async def error_handler(update: object, context: ContextTypes.DEFAULT_TYPE): logger.warning('Update caused error: %s', context.error) -# --- Reschedule reminders on startup --- -def reschedule_all_reminders(job_queue: JobQueue): - """Re-registers all pending reminders after a bot restart.""" - conn = sqlite3.connect(DB_FILE) - c = conn.cursor() - c.execute( - 'SELECT user_id, opp_id, title, deadline, priority, description, opp_type, link ' - 'FROM opportunities WHERE archived = 0 AND done = 0' - ) - rows = c.fetchall() - conn.close() - now = datetime.now() - for user_id, opp_id, title, dl_str, priority, desc, opp_type, link in rows: - try: - deadline = datetime.fromisoformat(dl_str) - if deadline > now: - schedule_reminders( - job_queue, user_id, opp_id, deadline, - priority or '', title or '', desc or '', opp_type or 'Other', link or '' - ) - except Exception as exc: - logger.error('Startup reschedule failed for %s: %s', opp_id, exc) # --- Main --- -def main(): - application = Application.builder().token(BOT_TOKEN).job_queue(JobQueue()).build() - reschedule_all_reminders(application.job_queue) - if 'missed_job' not in application.bot_data: - application.job_queue.run_repeating( - check_missed, - interval=timedelta(days=1), - first=datetime.now() + timedelta(minutes=2) - ) - application.bot_data['missed_job'] = True - +def build_application(token): + application = Application.builder().token(token).build() conv_handler = ConversationHandler( entry_points=[ MessageHandler(filters.UpdateType.MESSAGE & ~filters.COMMAND, handle_forward) @@ -654,7 +533,13 @@ def main(): application.add_error_handler(error_handler) logger.info('OppTick started.') - application.run_polling(allowed_updates=Update.ALL_TYPES) + # application.run_polling(allowed_updates=Update.ALL_TYPES) + return application if __name__ == '__main__': - main() + application = build_application(BOT_TOKEN) + application.run_polling(allowed_updates=Update.ALL_TYPES) + + + + diff --git a/config.py b/config.py new file mode 100644 index 0000000..bc9dba6 --- /dev/null +++ b/config.py @@ -0,0 +1,29 @@ +import os +from dotenv import load_dotenv +import logging + +from db import Database + +# Load environment variables once +load_dotenv() + +# Configuration Variables +BOT_TOKEN = os.getenv("BOT_TOKEN") +DB_HOST = os.getenv("DB_HOST", "localhost") +DB_NAME = os.getenv("DB_NAME") +DB_USER = os.getenv("DB_USER") +DB_PASS = os.getenv("DB_PASS") +DB_PORT = os.getenv("DB_PORT", "5432") + +# Validation +if not BOT_TOKEN: + raise ValueError("BOT_TOKEN is missing! Please set it in .env") + +# Logging Configuration +logging.basicConfig( + format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', + level=logging.INFO +) +logger = logging.getLogger("OppTick") + +db = Database(DB_HOST,DB_NAME,DB_USER,DB_PASS,DB_PORT) diff --git a/db.py b/db.py new file mode 100644 index 0000000..c689e6f --- /dev/null +++ b/db.py @@ -0,0 +1,105 @@ +import os +import psycopg2 + + +class Database: + _initialized = False + + def __init__(self, host, database, user, password, port): + self.conn = psycopg2.connect( + host=host, + database=database, + user=user, + password=password, + port=port + ) + self.conn.autocommit = True + + self.init_db() # TODO: find better way to not re init db on every call + + + def init_db(self): + """Create table and ensure missing columns exist""" + with self.conn.cursor() as c: + c.execute(''' + CREATE TABLE IF NOT EXISTS opportunities ( + opp_id TEXT PRIMARY KEY, + user_id BIGINT, + title TEXT, + opp_type TEXT, + deadline TEXT, + priority TEXT, + description TEXT, + message_text TEXT, + archived INTEGER DEFAULT 0, + done INTEGER DEFAULT 0 + ) + ''') + + c.execute("ALTER TABLE opportunities ADD COLUMN IF NOT EXISTS link TEXT") + c.execute("ALTER TABLE opportunities ADD COLUMN IF NOT EXISTS missed_notified INTEGER DEFAULT 0") + + # ---------------- CRUD Methods ---------------- # + + def add_opportunity(self, opp_id, user_id, title, opp_type, deadline, priority, desc, message_text, link=None): + with self.conn.cursor() as c: + c.execute(''' + INSERT INTO opportunities + (opp_id, user_id, title, opp_type, deadline, priority, description, message_text, link) + VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s) + ''', (opp_id, user_id, title, opp_type, deadline, priority, desc, message_text, link)) + + def get_missed_opportunities(self, now_iso): + with self.conn.cursor() as c: + c.execute(''' + SELECT user_id, opp_id, title, description, opp_type, link, deadline + FROM opportunities + WHERE deadline < %s AND archived = 0 AND done = 0 AND missed_notified = 0 + ''', (now_iso,)) + return c.fetchall() + + def mark_missed_notified(self, opp_id): + with self.conn.cursor() as c: + c.execute('UPDATE opportunities SET missed_notified = 1 WHERE opp_id = %s', (opp_id,)) + + def mark_done(self, opp_id, user_id): + with self.conn.cursor() as c: + c.execute('UPDATE opportunities SET done=1, archived=1 WHERE opp_id = %s AND user_id = %s', (opp_id, user_id)) + return c.rowcount + + def get_active_opportunities(self, user_id): + with self.conn.cursor() as c: + c.execute(''' + SELECT opp_id, title, opp_type, deadline, priority, description + FROM opportunities + WHERE user_id = %s AND archived = 0 AND done = 0 ORDER BY deadline + ''', (user_id,)) + return c.fetchall() + + def delete_opportunity(self, opp_id, user_id): + with self.conn.cursor() as c: + c.execute('DELETE FROM opportunities WHERE opp_id = %s AND user_id = %s', (opp_id, user_id)) + return c.rowcount + + def archive_opportunity(self, opp_id, user_id): + with self.conn.cursor() as c: + c.execute('UPDATE opportunities SET archived=1 WHERE opp_id = %s AND user_id = %s', (opp_id, user_id)) + return c.rowcount + + def get_weekly_summary(self, user_id, now_iso, week_end_iso): + with self.conn.cursor() as c: + c.execute(''' + SELECT COUNT(*) as count, opp_type FROM opportunities + WHERE user_id = %s AND deadline >= %s AND deadline <= %s AND archived=0 AND done=0 + GROUP BY opp_type + ''', (user_id, now_iso, week_end_iso)) + return c.fetchall() + + def get_all_active_reminders(self): + with self.conn.cursor() as c: + c.execute(''' + SELECT user_id, opp_id, title, deadline, priority, description, opp_type, link + FROM opportunities + WHERE archived = 0 AND done = 0 + ''') + return c.fetchall() \ No newline at end of file diff --git a/requirements.txt b/requirements.txt index 95eff61..c021426 100644 --- a/requirements.txt +++ b/requirements.txt @@ -13,4 +13,6 @@ python-telegram-bot[job-queue]>=22.0 python-dateutil>=2.8 python-dotenv>=1.0 pillow>=10.0 -pytesseract>=0.3 \ No newline at end of file +pytesseract>=0.3 +psycopg2-binary +flask \ No newline at end of file diff --git a/vercel.json b/vercel.json new file mode 100644 index 0000000..ab68c9b --- /dev/null +++ b/vercel.json @@ -0,0 +1,21 @@ +{ + "version": 2, + "builds": [ + { + "src": "web_app.py", + "use": "@vercel/python" + } + ], + "routes": [ + { + "src": "/(.*)", + "dest": "web_app.py" + } + ], + "crons": [ + { + "path": "/cron", + "schedule": "30 20 * * *" + } + ] +} \ No newline at end of file diff --git a/web_app.py b/web_app.py new file mode 100644 index 0000000..3b70f7f --- /dev/null +++ b/web_app.py @@ -0,0 +1,94 @@ +import asyncio +import os +from flask import Flask, request, jsonify +from datetime import datetime +from telegram import Update, Bot +from bot import build_application +from config import BOT_TOKEN,logger, db + +app = Flask(__name__) + +# Initialize Telegram App +if not BOT_TOKEN: + raise ValueError("BOT_TOKEN is missing!") +telegram_app = build_application(BOT_TOKEN) + +# Event Loop Management +try: + loop = asyncio.get_running_loop() +except RuntimeError: + loop = asyncio.new_event_loop() + asyncio.set_event_loop(loop) + +async def vercel_cron_reminders(): + """Checks upcoming deadlines and sends reminders.""" + bot = Bot(token=BOT_TOKEN) + now = datetime.now() + try: + rows = db.get_all_active_reminders() + except Exception as e: + logger.error(f"Failed to fetch active reminders: {e}") + return + + for user_id, opp_id, title, dl_str, priority, desc, opp_type, link in rows: + try: + deadline = datetime.fromisoformat(dl_str) + days_left = (deadline - now).days + + # Reminder Logic + remind_days = [14, 7, 3, 2, 1, 0] if 'High' in priority else [7, 3, 1, 0] + + if days_left in remind_days: + short_desc = (desc[:120] + '…') if len(desc) > 120 else desc + + header_msg = f"⏰ *{days_left} day(s) left!*" if days_left > 0 else "🚨 *TODAY is the deadline!*" + link_msg = f"\n🔗 {link}" if link else "" + + msg = ( + f"{header_msg}\n\n" + f"📌 *ID:* `{opp_id}`\n" + f"🏷️ *Title:* {title}\n" + f"🗂️ *Type:* {opp_type}\n" + f"📄 *Description:* {short_desc}" + f"{link_msg}" + ) + await bot.send_message(chat_id=user_id, text=msg, parse_mode='Markdown') + except Exception as e: + logger.error(f"Failed to send reminder for {opp_id}: {e}") + +async def _safe_process_update(data): + """Wait for bot initialization then process raw update data.""" + if not telegram_app._initialized: + await telegram_app.initialize() + + update = Update.de_json(data, telegram_app.bot) + await telegram_app.process_update(update) + +@app.route("/webhook/", methods=["POST"], strict_slashes=False) +def bot_endpoint(): + """Receives updates from Telegram.""" + if request.method == "POST": + loop.run_until_complete(_safe_process_update(request.json)) + return "OK", 200 + return "OK", 200 + +@app.get("/cron") +def cron_trigger(): #TODO: add token verification to now allow anyone to make this call + """Triggered by external cron service (like Vercel Cron).""" + try: + loop.run_until_complete(vercel_cron_reminders()) + return jsonify({"status": "reminders sent"}), 200 + except Exception as e: + logger.error(f"Cron job failed: {e}") + return jsonify({"status": "failed", "error": str(e)}), 500 + +@app.route("/webhook/health") +def health(): + return jsonify({"status": "ok"}), 200 + +if __name__ == "__main__": + port = int(os.environ.get("PORT", 8000)) + app.run(host="0.0.0.0", port=port) + + +