now synce to central server
parent
0122caa714
commit
3952101712
BIN
link_tracker.db
BIN
link_tracker.db
Binary file not shown.
213
main.py
213
main.py
|
|
@ -7,9 +7,28 @@ from datetime import datetime
|
|||
import sqlite3
|
||||
import json
|
||||
from pathlib import Path
|
||||
import requests
|
||||
import os
|
||||
|
||||
app = FastAPI(title="Link Tracker API", version="1.0.0")
|
||||
CONFIG_PATH = "sync_config.json"
|
||||
DEFAULT_CONFIG = {
|
||||
"central_server_url": "",
|
||||
"username": "",
|
||||
"password": "",
|
||||
"enabled": False
|
||||
}
|
||||
def load_sync_config():
|
||||
"""Load sync configuration from file"""
|
||||
if os.path.exists(CONFIG_PATH):
|
||||
with open(CONFIG_PATH, 'r') as f:
|
||||
return json.load(f)
|
||||
return DEFAULT_CONFIG.copy()
|
||||
|
||||
def save_sync_config(config):
|
||||
"""Save sync configuration to file"""
|
||||
with open(CONFIG_PATH, 'w') as f:
|
||||
json.dump(config, f, indent=2)
|
||||
# Enable CORS for your Chrome extension
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
|
|
@ -41,10 +60,17 @@ def init_db():
|
|||
colinkiri BOOLEAN DEFAULT FALSE,
|
||||
indexer BOOLEAN DEFAULT FALSE,
|
||||
t2 BOOLEAN DEFAULT FALSE,
|
||||
synced_at DATETIME NULL, -- NEW: when this was last synced
|
||||
created_at DATETIME DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
|
||||
# Add sync column to existing databases
|
||||
try:
|
||||
cursor.execute("ALTER TABLE pages ADD COLUMN synced_at DATETIME NULL")
|
||||
except sqlite3.OperationalError:
|
||||
pass # Column already exists
|
||||
|
||||
# For existing databases, add the new columns if they don't exist
|
||||
try:
|
||||
cursor.execute("ALTER TABLE pages ADD COLUMN colinkiri BOOLEAN DEFAULT FALSE")
|
||||
|
|
@ -85,6 +111,7 @@ def init_db():
|
|||
|
||||
# Create indexes for better query performance
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_pages_url ON pages (url)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_pages_synced ON pages (synced_at)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_links_domain ON links (domain)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_links_client_domain ON links (client_domain)")
|
||||
cursor.execute("CREATE INDEX IF NOT EXISTS idx_links_page_id ON links (page_id)")
|
||||
|
|
@ -92,6 +119,7 @@ def init_db():
|
|||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
|
||||
# Pydantic models for API requests
|
||||
class LinkData(BaseModel):
|
||||
href: str
|
||||
|
|
@ -128,6 +156,12 @@ class LinkSummary(BaseModel):
|
|||
is_client_link: bool
|
||||
client_name: Optional[str] = None
|
||||
|
||||
class SyncConfig(BaseModel):
|
||||
central_server_url: str
|
||||
username: str
|
||||
password: str
|
||||
enabled: bool
|
||||
|
||||
@app.on_event("startup")
|
||||
async def startup_event():
|
||||
"""Initialize database on startup"""
|
||||
|
|
@ -158,12 +192,12 @@ async def capture_page(data: PageCaptureRequest):
|
|||
client_urls = []
|
||||
|
||||
if existing_page:
|
||||
# Update existing page
|
||||
# Update existing page (reset synced_at since data changed)
|
||||
page_id = existing_page[0]
|
||||
cursor.execute("""
|
||||
UPDATE pages
|
||||
SET title = ?, keywords = ?, timestamp = ?,
|
||||
detected_clients = ?, total_links = ?, linked_to = ?
|
||||
detected_clients = ?, total_links = ?, linked_to = ?, synced_at = NULL
|
||||
WHERE id = ?
|
||||
""", (
|
||||
data.title,
|
||||
|
|
@ -474,6 +508,10 @@ async def get_stats():
|
|||
cursor.execute("SELECT COUNT(*) FROM links WHERE is_client_link = 1")
|
||||
client_links = cursor.fetchone()[0]
|
||||
|
||||
# Sync stats
|
||||
cursor.execute("SELECT COUNT(*) FROM pages WHERE synced_at IS NOT NULL")
|
||||
synced_pages = cursor.fetchone()[0]
|
||||
|
||||
# Links by client
|
||||
cursor.execute("""
|
||||
SELECT client_name, COUNT(*) as link_count
|
||||
|
|
@ -491,11 +529,182 @@ async def get_stats():
|
|||
"total_links": total_links,
|
||||
"client_links": client_links,
|
||||
"other_links": total_links - client_links,
|
||||
"synced_pages": synced_pages,
|
||||
"unsynced_pages": total_pages - synced_pages,
|
||||
"client_breakdown": client_stats
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error fetching stats: {str(e)}")
|
||||
|
||||
@app.get("/sync/config")
|
||||
async def get_sync_config():
|
||||
"""Get current sync configuration (without password)"""
|
||||
config = load_sync_config()
|
||||
# Don't return password in response
|
||||
config_safe = config.copy()
|
||||
config_safe["password"] = "***" if config["password"] else ""
|
||||
return config_safe
|
||||
|
||||
@app.post("/sync/config")
|
||||
async def update_sync_config(config: SyncConfig):
|
||||
"""Update sync configuration"""
|
||||
try:
|
||||
config_dict = config.dict()
|
||||
save_sync_config(config_dict)
|
||||
return {"success": True, "message": "Sync configuration updated"}
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error updating config: {str(e)}")
|
||||
|
||||
@app.post("/sync/to-central")
|
||||
async def sync_to_central():
|
||||
"""Sync unsynced data to central server"""
|
||||
try:
|
||||
config = load_sync_config()
|
||||
print(f"Config loaded: {config['central_server_url']}")
|
||||
if not config["enabled"] or not config["central_server_url"]:
|
||||
raise HTTPException(status_code=400, detail="Sync not configured or disabled")
|
||||
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Get unsynced pages
|
||||
cursor.execute("""
|
||||
SELECT id, url, title, keywords, timestamp, detected_clients, total_links
|
||||
FROM pages
|
||||
WHERE synced_at IS NULL
|
||||
ORDER BY created_at ASC
|
||||
""")
|
||||
|
||||
unsynced_pages = cursor.fetchall()
|
||||
|
||||
if not unsynced_pages:
|
||||
return {"success": True, "message": "No pages to sync", "synced_count": 0}
|
||||
|
||||
# Prepare sync data
|
||||
pages_data = []
|
||||
|
||||
for page_row in unsynced_pages:
|
||||
page_id, url, title, keywords, timestamp, detected_clients, total_links = page_row
|
||||
|
||||
# Get links for this page
|
||||
cursor.execute("""
|
||||
SELECT href, anchor_text, title_attr
|
||||
FROM links
|
||||
WHERE page_id = ?
|
||||
""", (page_id,))
|
||||
|
||||
links_data = []
|
||||
for link_row in cursor.fetchall():
|
||||
links_data.append({
|
||||
"href": link_row[0],
|
||||
"text": link_row[1] or "",
|
||||
"title": link_row[2] or ""
|
||||
})
|
||||
|
||||
# Parse JSON fields
|
||||
keywords_list = json.loads(keywords) if keywords else []
|
||||
clients_list = json.loads(detected_clients) if detected_clients else []
|
||||
|
||||
clients = [{"domain": c["domain"], "name": c["name"]} for c in clients_list]
|
||||
|
||||
pages_data.append({
|
||||
"url": url,
|
||||
"title": title,
|
||||
"timestamp": timestamp,
|
||||
"keywords": keywords_list,
|
||||
"detectedClients": clients,
|
||||
"totalLinks": total_links,
|
||||
"links": links_data
|
||||
})
|
||||
|
||||
# Send to central server
|
||||
sync_data = {"pages": pages_data}
|
||||
print(f"Sending {len(pages_data)} pages to central server")
|
||||
response = requests.post(
|
||||
f"{config['central_server_url'].rstrip('/')}/sync",
|
||||
json=sync_data,
|
||||
auth=(config["username"], config["password"]),
|
||||
timeout=30
|
||||
)
|
||||
print(f"Response status: {response.status_code}")
|
||||
print(f"Response text: {response.text}")
|
||||
|
||||
if response.status_code == 200:
|
||||
result = response.json()
|
||||
|
||||
# Mark pages as synced
|
||||
page_ids = [row[0] for row in unsynced_pages]
|
||||
placeholders = ','.join(['?'] * len(page_ids))
|
||||
cursor.execute(f"""
|
||||
UPDATE pages
|
||||
SET synced_at = CURRENT_TIMESTAMP
|
||||
WHERE id IN ({placeholders})
|
||||
""", page_ids)
|
||||
|
||||
conn.commit()
|
||||
conn.close()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": f"Successfully synced {len(pages_data)} pages",
|
||||
"synced_count": result.get("synced_pages", len(pages_data)),
|
||||
"skipped_count": result.get("skipped_pages", 0),
|
||||
"server_response": result.get("message", "")
|
||||
}
|
||||
else:
|
||||
conn.close()
|
||||
raise HTTPException(
|
||||
status_code=response.status_code,
|
||||
detail=f"Central server error: {response.text}"
|
||||
)
|
||||
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Network error details: {e}")
|
||||
return {"success": False, "message": f"Network error: {str(e)}"}
|
||||
except Exception as e:
|
||||
print(f"Full error: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
raise HTTPException(status_code=500, detail=f"Sync error: {str(e)}")
|
||||
|
||||
@app.get("/sync/status")
|
||||
async def get_sync_status():
|
||||
"""Get sync status information"""
|
||||
try:
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
# Count synced vs unsynced pages
|
||||
cursor.execute("SELECT COUNT(*) FROM pages WHERE synced_at IS NOT NULL")
|
||||
synced_count = cursor.fetchone()[0]
|
||||
|
||||
cursor.execute("SELECT COUNT(*) FROM pages WHERE synced_at IS NULL")
|
||||
unsynced_count = cursor.fetchone()[0]
|
||||
|
||||
# Get last sync time
|
||||
cursor.execute("SELECT MAX(synced_at) FROM pages WHERE synced_at IS NOT NULL")
|
||||
last_sync = cursor.fetchone()[0]
|
||||
|
||||
# Get oldest unsynced page
|
||||
cursor.execute("SELECT MIN(created_at) FROM pages WHERE synced_at IS NULL")
|
||||
oldest_unsynced = cursor.fetchone()[0]
|
||||
|
||||
conn.close()
|
||||
|
||||
config = load_sync_config()
|
||||
|
||||
return {
|
||||
"sync_enabled": config["enabled"],
|
||||
"server_configured": bool(config["central_server_url"]),
|
||||
"synced_pages": synced_count,
|
||||
"unsynced_pages": unsynced_count,
|
||||
"last_sync": last_sync,
|
||||
"oldest_unsynced": oldest_unsynced
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(status_code=500, detail=f"Error getting sync status: {str(e)}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
|
|
|
|||
|
|
@ -0,0 +1,6 @@
|
|||
{
|
||||
"central_server_url": "http://192.168.174.35:8001",
|
||||
"username": "bryan",
|
||||
"password": "admin123",
|
||||
"enabled": true
|
||||
}
|
||||
Loading…
Reference in New Issue