Files
xtream2m3u/run.py

433 lines
17 KiB
Python
Raw Normal View History

2025-04-24 22:25:01 -03:00
import ipaddress
2024-08-26 13:09:52 -03:00
import json
import logging
2025-04-24 21:28:27 -03:00
import os
2025-04-24 22:25:01 -03:00
import socket
import urllib.parse
2025-04-24 22:25:01 -03:00
from functools import lru_cache
2024-08-26 13:09:52 -03:00
2025-04-24 22:25:01 -03:00
import dns.resolver
2024-08-26 13:09:52 -03:00
import requests
from fake_useragent import UserAgent
2024-08-26 13:09:52 -03:00
from flask import Flask, Response, request
2025-04-24 22:25:01 -03:00
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
2024-08-26 13:09:52 -03:00
app = Flask(__name__)
2025-04-24 22:25:01 -03:00
# Get default proxy URL from environment variable
DEFAULT_PROXY_URL = os.environ.get('PROXY_URL')
# Set up custom DNS resolver
def setup_custom_dns():
"""Configure a custom DNS resolver using reliable DNS services"""
dns_servers = ['1.1.1.1', '1.0.0.1', '8.8.8.8', '8.8.4.4', '9.9.9.9']
custom_resolver = dns.resolver.Resolver()
custom_resolver.nameservers = dns_servers
original_getaddrinfo = socket.getaddrinfo
def new_getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
if host:
try:
# Skip DNS resolution for IP addresses
try:
ipaddress.ip_address(host)
# If we get here, the host is already an IP address
logger.debug(f"Host is already an IP address: {host}, skipping DNS resolution")
except ValueError:
# Not an IP address, so use DNS resolution
answers = custom_resolver.resolve(host)
host = str(answers[0])
logger.debug(f"Custom DNS resolved {host}")
except Exception as e:
logger.info(f"Custom DNS resolution failed for {host}: {e}, falling back to system DNS")
return original_getaddrinfo(host, port, family, type, proto, flags)
socket.getaddrinfo = new_getaddrinfo
logger.info("Custom DNS resolver set up")
# Initialize DNS resolver
setup_custom_dns()
# Common request function with caching for API endpoints
@lru_cache(maxsize=128)
def fetch_api_data(url, timeout=10):
"""Make a request to an API endpoint with caching"""
ua = UserAgent()
headers = {
'User-Agent': ua.chrome,
'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8',
'Accept-Language': 'en-US,en;q=0.5',
'Connection': 'keep-alive',
}
2025-04-24 21:28:27 -03:00
2024-08-26 13:09:52 -03:00
try:
2025-04-24 22:25:01 -03:00
hostname = urllib.parse.urlparse(url).netloc.split(':')[0]
logger.info(f"Making request to host: {hostname}")
2025-04-24 22:25:01 -03:00
response = requests.get(url, headers=headers, timeout=timeout)
2024-08-26 13:09:52 -03:00
response.raise_for_status()
2025-04-24 22:25:01 -03:00
# Try to parse as JSON
try:
return json.loads(response.text)
except json.JSONDecodeError:
# Return text if not valid JSON
return response.text
except requests.exceptions.SSLError:
2025-01-26 16:05:10 -03:00
return {'error': 'SSL Error', 'details': 'Failed to verify SSL certificate'}, 503
2025-04-24 22:25:01 -03:00
except requests.exceptions.RequestException as e:
logger.error(f"RequestException: {e}")
2025-01-26 16:05:10 -03:00
return {'error': 'Request Exception', 'details': str(e)}, 503
2024-08-26 13:09:52 -03:00
2025-04-24 22:25:01 -03:00
def stream_request(url, headers=None, timeout=10):
"""Make a streaming request that doesn't buffer the full response"""
if not headers:
headers = {
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
}
return requests.get(url, stream=True, headers=headers, timeout=timeout)
def encode_url(url):
"""Safely encode a URL for use in proxy endpoints"""
return urllib.parse.quote(url, safe='') if url else ''
def generate_streaming_response(response, content_type=None):
"""Generate a streaming response with appropriate headers"""
if not content_type:
content_type = response.headers.get('Content-Type', 'application/octet-stream')
def generate():
try:
bytes_sent = 0
for chunk in response.iter_content(chunk_size=8192):
if chunk:
bytes_sent += len(chunk)
yield chunk
logger.info(f"Stream completed, sent {bytes_sent} bytes")
except Exception as e:
logger.error(f"Streaming error: {str(e)}")
raise
headers = {
'Access-Control-Allow-Origin': '*',
'Content-Type': content_type,
}
# Add content length if available and not using chunked transfer
if 'Content-Length' in response.headers and 'Transfer-Encoding' not in response.headers:
headers['Content-Length'] = response.headers['Content-Length']
else:
headers['Transfer-Encoding'] = 'chunked'
return Response(
generate(),
mimetype=content_type,
headers=headers,
direct_passthrough=True
)
2025-01-26 17:11:02 -03:00
@app.route('/image-proxy/<path:image_url>')
def proxy_image(image_url):
"""Proxy endpoint for images to avoid CORS issues"""
try:
original_url = urllib.parse.unquote(image_url)
logger.info(f"Image proxy request for: {original_url}")
2025-01-26 17:11:02 -03:00
response = requests.get(original_url, stream=True, timeout=10)
response.raise_for_status()
content_type = response.headers.get('Content-Type', '')
if not content_type.startswith('image/'):
logger.error(f"Invalid content type for image: {content_type}")
return Response('Invalid image type', status=415)
2025-01-26 17:11:02 -03:00
2025-04-24 22:25:01 -03:00
return generate_streaming_response(response, content_type)
except requests.Timeout:
return Response('Image fetch timeout', status=504)
except requests.HTTPError as e:
return Response(f'Failed to fetch image: {str(e)}', status=e.response.status_code)
2025-01-26 17:11:02 -03:00
except Exception as e:
logger.error(f"Image proxy error: {str(e)}")
return Response('Failed to process image', status=500)
2025-01-26 17:11:02 -03:00
@app.route('/stream-proxy/<path:stream_url>')
def proxy_stream(stream_url):
"""Proxy endpoint for streams"""
try:
original_url = urllib.parse.unquote(stream_url)
logger.info(f"Stream proxy request for: {original_url}")
2025-04-24 22:25:01 -03:00
response = stream_request(original_url)
response.raise_for_status()
2025-04-24 22:25:01 -03:00
# Determine content type
content_type = response.headers.get('Content-Type')
if not content_type:
if original_url.endswith('.ts'):
content_type = 'video/MP2T'
elif original_url.endswith('.m3u8'):
content_type = 'application/vnd.apple.mpegurl'
else:
content_type = 'application/octet-stream'
logger.info(f"Using content type: {content_type}")
2025-04-24 22:25:01 -03:00
return generate_streaming_response(response, content_type)
except requests.Timeout:
return Response('Stream timeout', status=504)
except requests.HTTPError as e:
return Response(f'Failed to fetch stream: {str(e)}', status=e.response.status_code)
except Exception as e:
logger.error(f"Stream proxy error: {str(e)}")
return Response('Failed to process stream', status=500)
2025-04-24 22:25:01 -03:00
def parse_group_list(group_string):
"""Parse a comma-separated string into a list of trimmed strings"""
return [group.strip() for group in group_string.split(',')] if group_string else []
def get_required_params():
"""Get and validate the required parameters from the request"""
2025-01-26 17:11:02 -03:00
url = request.args.get('url')
username = request.args.get('username')
password = request.args.get('password')
if not url or not username or not password:
2025-04-24 22:25:01 -03:00
return None, None, None, json.dumps({
2025-01-26 17:11:02 -03:00
'error': 'Missing Parameters',
'details': 'Required parameters: url, username, and password'
2025-04-24 22:25:01 -03:00
}), 400
proxy_url = request.args.get('proxy_url', DEFAULT_PROXY_URL) or request.host_url.rstrip('/')
return url, username, password, proxy_url, None
def validate_xtream_credentials(url, username, password):
"""Validate the Xtream API credentials"""
api_url = f'{url}/player_api.php?username={username}&password={password}'
data = fetch_api_data(api_url)
2025-01-26 17:11:02 -03:00
2025-04-24 22:25:01 -03:00
if isinstance(data, tuple): # Error response
return None, data[0], data[1]
2025-01-26 17:11:02 -03:00
2025-04-24 22:25:01 -03:00
if 'user_info' not in data or 'server_info' not in data:
return None, json.dumps({
'error': 'Invalid Response',
'details': 'Server response missing required data (user_info or server_info)'
}), 400
return data, None, None
2025-01-26 17:11:02 -03:00
2025-04-24 22:25:01 -03:00
def fetch_categories_and_channels(url, username, password):
"""Fetch categories and channels from the Xtream API"""
# Fetch categories
category_url = f'{url}/player_api.php?username={username}&password={password}&action=get_live_categories'
categories = fetch_api_data(category_url)
2025-01-26 17:11:02 -03:00
2025-04-24 22:25:01 -03:00
if isinstance(categories, tuple): # Error response
return None, None, categories[0], categories[1]
2025-01-26 17:11:02 -03:00
2025-04-24 22:25:01 -03:00
# Fetch live channels
channel_url = f'{url}/player_api.php?username={username}&password={password}&action=get_live_streams'
channels = fetch_api_data(channel_url)
if isinstance(channels, tuple): # Error response
return None, None, channels[0], channels[1]
if not isinstance(categories, list) or not isinstance(channels, list):
return None, None, json.dumps({
'error': 'Invalid Data Format',
'details': 'Categories or channels data is not in the expected format'
}), 500
return categories, channels, None, None
@app.route('/xmltv', methods=['GET'])
def generate_xmltv():
"""Generate a filtered XMLTV file from the Xtream API"""
# Get and validate parameters
url, username, password, proxy_url, error = get_required_params()
if error:
return error
# Parse filter parameters
unwanted_groups = parse_group_list(request.args.get('unwanted_groups', ''))
wanted_groups = parse_group_list(request.args.get('wanted_groups', ''))
# Validate credentials
user_data, error_json, error_code = validate_xtream_credentials(url, username, password)
if error_json:
return error_json, error_code, {'Content-Type': 'application/json'}
# Fetch XMLTV data
base_url = url.rstrip('/')
xmltv_url = f'{base_url}/xmltv.php?username={username}&password={password}'
xmltv_data = fetch_api_data(xmltv_url, timeout=20) # Longer timeout for XMLTV
if isinstance(xmltv_data, tuple): # Error response
return json.dumps(xmltv_data[0]), xmltv_data[1], {'Content-Type': 'application/json'}
# If not filtering or proxying, return the original XMLTV
if not (unwanted_groups or wanted_groups) and not proxy_url:
return Response(
xmltv_data,
mimetype='application/xml',
headers={"Content-Disposition": "attachment; filename=guide.xml"}
)
2025-01-26 17:11:02 -03:00
# Replace image URLs in the XMLTV content
2025-04-24 22:25:01 -03:00
if proxy_url:
2025-01-26 17:11:02 -03:00
import re
def replace_icon_url(match):
original_url = match.group(1)
2025-04-24 22:25:01 -03:00
proxied_url = f"{proxy_url}/image-proxy/{encode_url(original_url)}"
2025-01-26 17:11:02 -03:00
return f'<icon src="{proxied_url}"'
2025-04-24 22:25:01 -03:00
xmltv_data = re.sub(
2025-01-26 17:11:02 -03:00
r'<icon src="([^"]+)"',
replace_icon_url,
2025-04-24 22:25:01 -03:00
xmltv_data
2025-01-26 17:11:02 -03:00
)
2025-04-24 22:25:01 -03:00
# If filtering is enabled, filter the XML
2025-04-24 21:11:46 -03:00
if unwanted_groups or wanted_groups:
2025-01-26 17:11:02 -03:00
try:
2025-04-24 22:25:01 -03:00
# Fetch categories and channels for filtering
categories, channels, error_json, error_code = fetch_categories_and_channels(url, username, password)
if error_json:
# If we can't get filtering data, just return the unfiltered XMLTV
logger.warning("Could not fetch filtering data, returning unfiltered XMLTV")
else:
2025-01-26 17:11:02 -03:00
# Create category mapping
category_names = {cat['category_id']: cat['category_name'] for cat in categories}
2025-04-24 22:25:01 -03:00
# Create set of channel IDs to exclude
2025-04-24 21:11:46 -03:00
excluded_channels = set()
for channel in channels:
if channel['stream_type'] == 'live':
group_title = category_names.get(channel['category_id'], '')
if wanted_groups:
# If wanted_groups is specified, exclude channels NOT in wanted groups
if not any(wanted_group.lower() in group_title.lower() for wanted_group in wanted_groups):
excluded_channels.add(str(channel['stream_id']))
elif unwanted_groups:
# Otherwise use unwanted_groups filtering
if any(unwanted_group.lower() in group_title.lower() for unwanted_group in unwanted_groups):
excluded_channels.add(str(channel['stream_id']))
2025-01-26 17:11:02 -03:00
if excluded_channels:
# Simple XML filtering using string operations
filtered_lines = []
current_channel = None
skip_current = False
2025-04-24 22:25:01 -03:00
for line in xmltv_data.split('\n'):
2025-01-26 17:11:02 -03:00
if '<channel id="' in line:
current_channel = line.split('"')[1]
skip_current = current_channel in excluded_channels
if not skip_current:
if '<programme ' in line:
channel_id = line.split('channel="')[1].split('"')[0]
skip_current = channel_id in excluded_channels
if not skip_current:
filtered_lines.append(line)
if '</channel>' in line or '</programme>' in line:
skip_current = False
2025-04-24 22:25:01 -03:00
xmltv_data = '\n'.join(filtered_lines)
except Exception as e:
logger.error(f"Failed to filter XMLTV: {e}")
2025-01-26 17:11:02 -03:00
# If filtering fails, return unfiltered XMLTV
2025-04-24 22:25:01 -03:00
# Return the XMLTV data
2025-01-26 17:11:02 -03:00
return Response(
2025-04-24 22:25:01 -03:00
xmltv_data,
2025-01-26 17:11:02 -03:00
mimetype='application/xml',
headers={"Content-Disposition": "attachment; filename=guide.xml"}
)
2024-08-26 13:09:52 -03:00
@app.route('/m3u', methods=['GET'])
def generate_m3u():
2025-04-24 22:25:01 -03:00
"""Generate a filtered M3U playlist from the Xtream API"""
# Get and validate parameters
url, username, password, proxy_url, error = get_required_params()
if error:
return error
# Parse filter parameters
unwanted_groups = parse_group_list(request.args.get('unwanted_groups', ''))
wanted_groups = parse_group_list(request.args.get('wanted_groups', ''))
no_stream_proxy = request.args.get('nostreamproxy', '').lower() == 'true'
2024-08-26 13:09:52 -03:00
2025-04-24 22:25:01 -03:00
# Validate credentials
user_data, error_json, error_code = validate_xtream_credentials(url, username, password)
if error_json:
return error_json, error_code, {'Content-Type': 'application/json'}
2024-08-26 13:09:52 -03:00
2025-04-24 22:25:01 -03:00
# Fetch categories and channels
categories, channels, error_json, error_code = fetch_categories_and_channels(url, username, password)
if error_json:
return error_json, error_code, {'Content-Type': 'application/json'}
2024-08-26 13:09:52 -03:00
2025-04-24 22:25:01 -03:00
# Extract user info and server URL
username = user_data['user_info']['username']
password = user_data['user_info']['password']
2024-08-26 13:09:52 -03:00
2025-04-24 22:25:01 -03:00
server_url = f"http://{user_data['server_info']['url']}:{user_data['server_info']['port']}"
stream_base_url = f"{server_url}/live/{username}/{password}/"
2024-08-26 13:09:52 -03:00
2025-04-24 22:25:01 -03:00
# Create category name lookup
category_names = {cat['category_id']: cat['category_name'] for cat in categories}
2025-01-26 17:11:02 -03:00
2024-08-26 13:09:52 -03:00
# Generate M3U playlist
m3u_playlist = "#EXTM3U\n"
2025-04-24 22:25:01 -03:00
for channel in channels:
2024-08-26 13:09:52 -03:00
if channel['stream_type'] == 'live':
2025-04-24 22:25:01 -03:00
group_title = category_names.get(channel["category_id"], "Uncategorized")
2025-04-24 21:11:46 -03:00
2025-04-24 22:25:01 -03:00
# Handle filtering logic
2025-04-24 21:11:46 -03:00
include_channel = True
if wanted_groups:
# Only include channels from specified groups
include_channel = any(wanted_group.lower() in group_title.lower() for wanted_group in wanted_groups)
elif unwanted_groups:
# Exclude channels from unwanted groups
include_channel = not any(unwanted_group.lower() in group_title.lower() for unwanted_group in unwanted_groups)
if include_channel:
2025-04-24 22:25:01 -03:00
# Proxy the logo URL if available
2025-01-26 17:11:02 -03:00
original_logo = channel.get('stream_icon', '')
2025-04-24 22:25:01 -03:00
logo_url = f"{proxy_url}/image-proxy/{encode_url(original_logo)}" if original_logo else ''
2025-01-26 17:11:02 -03:00
2025-04-24 22:25:01 -03:00
# Create the stream URL with or without proxying
stream_url = f'{stream_base_url}{channel["stream_id"]}.ts'
if not no_stream_proxy:
2025-04-24 22:25:01 -03:00
stream_url = f"{proxy_url}/stream-proxy/{encode_url(stream_url)}"
2025-04-24 22:25:01 -03:00
# Add channel to playlist
2024-08-26 14:19:08 -03:00
m3u_playlist += f'#EXTINF:0 tvg-name="{channel["name"]}" group-title="{group_title}" tvg-logo="{logo_url}",{channel["name"]}\n'
m3u_playlist += f'{stream_url}\n'
2024-08-26 13:09:52 -03:00
2025-04-24 22:25:01 -03:00
# Return the M3U playlist
return Response(
m3u_playlist,
mimetype='audio/x-scpls',
headers={"Content-Disposition": "attachment; filename=LiveStream.m3u"}
)
2024-08-26 13:09:52 -03:00
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')