# -*- coding: utf-8 -*-
import os
import io
import re

INPUT_FILE = "streams.txt"   # Eingabe
OUTPUT_FILE = "analog.m3u"   # fertige M3U

QUALITY_TOKENS = set(["HD", "FHD", "SD", "UHD", "4K", "8K", "HEVC", "H.265", "H265"])


def compute_group_from_name(name):
    """
    Aus z.B. 'EN| CHRISTMAS 1 4K' -> 'EN CHRISTMAS'
    """
    name = name.strip()

    lang = ""
    rest = name

    if "|" in name:
        parts = name.split("|", 1)
        lang = parts[0].strip()
        rest = parts[1].strip()

    tokens = rest.split()
    kept = []
    for t in tokens:
        t_upper = t.upper()
        if t.isdigit() or t_upper in QUALITY_TOKENS:
            break
        kept.append(t)

    if kept:
        base = " ".join(kept)
    else:
        base = rest

    if lang:
        group = (lang + " " + base).strip()
    else:
        group = base

    return group


def build_url_map(lines):
    """
    Liest streams.txt wie von dir gepostet:
    #EXTM3U
    #EXTINF:-1 tvg-name="EN| CHRISTMAS 1 4K" ... ,EN| CHRISTMAS 1 4K
    http://...
    und baut url -> (name, kategorie)
    """
    url_map = {}
    i = 0
    n = len(lines)

    # ggf. Header überspringen
    if n > 0 and lines[0].strip().upper().startswith("#EXTM3U"):
        i = 1

    while i < n - 1:
        line = lines[i].strip()
        next_line = lines[i + 1].strip()

        if line.startswith("#EXTINF") and \
           (next_line.lower().startswith("http://") or next_line.lower().startswith("https://")):

            url = next_line

            m = re.match(r'#EXTINF:-?\d+\s*(.*?),(.*)', line)
            if m:
                attrs_str = m.group(1).strip()
                display_name = m.group(2).strip()

                # tvg-name extrahieren, sonst Name nach dem Komma
                m_name = re.search(r'tvg-name="([^"]*)"', attrs_str)
                if m_name:
                    ch_name = m_name.group(1).strip()
                else:
                    ch_name = display_name

                if not ch_name:
                    ch_name = url

                category = compute_group_from_name(ch_name)
                url_map[url] = (ch_name, category)

            i += 2
        else:
            i += 1

    return url_map


def parse_line(line):
    """
    Dein ursprünglicher Parser für "name url"-Zeilen – hier nur für Fallback.
    In unserem Usecase bekommt er vor allem die URL-Zeilen.
    """
    line = line.strip()
    if not line or line.startswith("#"):
        return None

    for sep in (";", ",", "|", "\t"):
        if sep in line:
            left, right = [x.strip() for x in line.split(sep, 1)]
            name = left
            url = right
            if name.lower().startswith(("http://", "https://")):
                tmp = name
                name = url
                url = tmp
            return name, url

    parts = line.split()
    if len(parts) == 1:
        url = parts[0]
        name = url
    else:
        if parts[0].lower().startswith(("http://", "https://")):
            url = parts[0]
            name = " ".join(parts[1:])
        else:
            url = parts[-1]
            name = " ".join(parts[:-1])
    return name.strip(), url.strip()


def main():
    if not os.path.exists(INPUT_FILE):
        print("Eingabedatei '{}' nicht gefunden.".format(INPUT_FILE))
        return

    with io.open(INPUT_FILE, "r", encoding="utf-8", errors="ignore") as f:
        lines = f.read().splitlines()

    # 1. Map: URL -> (Name, Kategorie) aus den EXTINF-Zeilen
    url_map = build_url_map(lines)

    # 2. Deine alte Logik: aus allen Nicht-# Zeilen Einträge bauen
    entries = []  # (name, url, group)
    for line in lines:
        parsed = parse_line(line)
        if not parsed:
            continue
        name, url = parsed
        if not url.lower().startswith(("http://", "https://")):
            continue

        if url in url_map:
            ch_name, group = url_map[url]
            name = ch_name
        else:
            group = None

        entries.append((name, url, group))

    if not entries:
        print("Keine gültigen Streams gefunden.")
        return

    with io.open(OUTPUT_FILE, "w", encoding="utf-8", newline="\n") as f:
        f.write(u"#EXTM3U\n")
        for name, url, group in entries:
            if group:
                f.write(u'#EXTINF:-1 group-title="{0}",{1}\n'.format(group, name))
            else:
                f.write(u"#EXTINF:-1,{0}\n".format(name))
            f.write(u"{0}\n".format(url))

    print("Fertige Playlist geschrieben nach '{}' mit {} Einträgen.".format(
        OUTPUT_FILE, len(entries))
    )


if __name__ == "__main__":
    main()
