Lala Ne Demek Argo ?

Murat

New member
import requests

import random

import time

import logging

import json

from bs4 import BeautifulSoup

import os

# === Ayarlar ===

SITE_FILE = r"C:UsersAdministratorDesktopapisite.txt"

API_FILE = r"C:UsersAdministratorDesktopapiapi.txt"

YANIT_FILE = r"C:UsersAdministratorDesktopapiyanit.txt"

TAMAM_DIR = r"C:UsersAdministratorDesktopapitamam"

PROGRESS_FILE = r"C:UsersAdministratorDesktopapiprogress.json"

REPLY_RANGE = (1, 3)

HEADERS = {"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64)"}

ID_ARTIS = 20

os.makedirs(TAMAM_DIR, exist_ok=True)

logging.basicConfig(

format="%(asctime)s [%(levelname)s] %(message)s",

level=logging.INFO,

datefmt="%H:%M:%S"

)

def normalize_url(url):

return url.replace("https://", "").replace("http://", "").replace("www.", "").strip("/")

def temizle_url(s):

return normalize_url(s)

def load_progress():

if os.path.exists(PROGRESS_FILE):

with open(PROGRESS_FILE, "r", encoding="utf-8") as f:

return json.load(f)

return {}

def save_progress(progress):

with open(PROGRESS_FILE, "w", encoding="utf-8") as f:

json.dump(progress, f, ensure_ascii=False, indent=2)

def fetch_thread_info(base_url, thread_id):

url = f"https://{base_url}/threads/.{thread_id}"

try:

r = requests.get(url, headers=HEADERS, timeout=10)

if r.status_code != 200 or "template-error" in r.text:

return None

soup = BeautifulSoup(r.text, "html.parser")

title_el = soup.select_one("div.p-title h1.p-title-value")

author_el = soup.select_one("[data-author]")

post_el = soup.select_one("[data-content^='post-']")

member_el = soup.select_one("a.username[data-user-id]")

if not title_el or not author_el or not post_el or not member_el:

return None

title = title_el.text.strip()

author = author_el["data-author"]

post_id = post_el["data-content"].split("-")[1]

member_id = member_el["data-user-id"]

meta_descriptions = []

for tag in soup.find_all("meta"):

if tag.get("name") == "description" or tag.get("property") in ("og:description", "twitter:description"):

content = tag.get("content", "").strip()

if content and content not in meta_descriptions:

meta_descriptions.append(content)

return title, author, post_id, member_id, meta_descriptions

except Exception as e:

logging.warning(f"{base_url} → Hata: {e}")

return None

def select_random_api(api_list, exclude_user, site, used_users):

norm_site = normalize_url(site)

available = [api for api in api_list

if len(api) == 3

and norm_site in normalize_url(api[0])

and api[1] != exclude_user

and api[1] not in used_users]

return random.choice(available) if available else None

def send_reply(api_url, api_user, api_key, thread_id, reply_text):

headers = {

"Content-Type": "application/x-www-form-urlencoded",

"XF-Api-User": api_user,

"XF-Api-Key": api_key

}

data = {

"thread_id": thread_id,

"message": reply_text

}

try:

response = requests.post(api_url, headers=headers, data=data, timeout=30)

if response.status_code == 200:

yanit_path = os.path.join(TAMAM_DIR, f"yanit_{thread_id}_{api_user}.txt")

with open(yanit_path, "w", encoding="utf-8") as f:

f.write(reply_text)

logging.info(f"Yanıt gönderildi → {api_user}")

else:

logging.warning(f"{api_user} → API cevabı {response.status_code} - {response.text}")

except Exception as e:

logging.error(f"{api_user} → gönderim hatası: {e}")

def main():

sites = [temizle_url(l.strip()) for l in open(SITE_FILE, encoding="utf-8") if l.strip()]

apis = [line.strip().split(" | ") for line in open(API_FILE, encoding="utf-8") if " | " in line]

replies = [line.strip() for line in open(YANIT_FILE, encoding="utf-8") if line.strip()]

progress = load_progress()

logging.info("Bot çalışıyor — log + ID artışı ile")

while True:

for site in sites:

site_key = normalize_url(site)

current_id = progress.get(site_key, 16000)

used_users = set()

found_topic = False

logging.info(f"[{site}] Arama başlıyor: {current_id}")

for _ in range(50):

info = fetch_thread_info(site, current_id)

if not info:

logging.debug(f"{site} → {current_id} konu yok. {ID_ARTIS} arttırılıyor.")

current_id += ID_ARTIS

continue

title, author, post_id, member_id, meta_contents = info

logging.info(f"[{site}#{current_id}] "{title}" by {author}")

found_topic = True

count = random.randint(*REPLY_RANGE)

for _ in range(count):

selected = select_random_api(apis, author, site, used_users)

if not selected:

logging.warning(f"{site} için uygun API kalmadı.")

break

api_url, user, key = selected

yanit = random.choice(replies)

meta_parca = meta_contents[0] if meta_contents else title

quote = (

f'
n'

f'{meta_parca}n'

f'
nn{yanit.replace("[nick]", author)}'

)

send_reply(api_url, user, key, post_id, quote)

used_users.add(user)

time.sleep(random.uniform(2, 5))

current_id += 1

break # konu bulundu, bir sonrakine geç

if not found_topic:

logging.info(f"{site} → Yeni konu bulunamadı. {ID_ARTIS} arttırılıyor.")

current_id += ID_ARTIS

progress[site_key] = current_id

save_progress(progress)

logging.info(f"{site} tamamlandı, 30 saniye bekleniyor.")

time.sleep(95)

logging.info("Tüm siteler tamamlandı. 60 saniye mola.")

time.sleep(200)

if __name__ == "__main__":

try:

main()

except KeyboardInterrupt:

logging.info("Elle durduruldu.")

except Exception as e:

logging.critical(f"Beklenmeyen hata: {e}")