mirror of
https://github.com/ovosimpatico/xtream2m3u.git
synced 2026-01-15 16:32:55 -03:00
Add DNS resolvers to the API
Some checks failed
Build and Push Docker Image / build-and-push (push) Failing after 1m29s
Some checks failed
Build and Push Docker Image / build-and-push (push) Failing after 1m29s
This commit is contained in:
@@ -11,10 +11,4 @@ services:
|
|||||||
- FLASK_ENV=production
|
- FLASK_ENV=production
|
||||||
- GUNICORN_CMD_ARGS="--workers=3"
|
- GUNICORN_CMD_ARGS="--workers=3"
|
||||||
# - PROXY_URL=https://your-domain.com
|
# - PROXY_URL=https://your-domain.com
|
||||||
dns:
|
|
||||||
- 1.1.1.1 # Cloudflare
|
|
||||||
- 1.0.0.1
|
|
||||||
- 8.8.8.8 # Google
|
|
||||||
- 8.8.4.4
|
|
||||||
- 9.9.9.9 # Quad9
|
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
472
run.py
472
run.py
@@ -1,27 +1,62 @@
|
|||||||
|
import ipaddress
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
|
import socket
|
||||||
import urllib.parse
|
import urllib.parse
|
||||||
|
from functools import lru_cache
|
||||||
|
|
||||||
|
import dns.resolver
|
||||||
import requests
|
import requests
|
||||||
from fake_useragent import UserAgent
|
from fake_useragent import UserAgent
|
||||||
from flask import Flask, Response, request
|
from flask import Flask, Response, request
|
||||||
from requests.exceptions import SSLError
|
|
||||||
|
|
||||||
|
# Configure logging
|
||||||
logging.basicConfig(level=logging.INFO)
|
logging.basicConfig(level=logging.INFO)
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
|
|
||||||
# Get default proxy URL from environment variable or use the request host
|
# Get default proxy URL from environment variable
|
||||||
DEFAULT_PROXY_URL = os.environ.get('PROXY_URL', None)
|
DEFAULT_PROXY_URL = os.environ.get('PROXY_URL')
|
||||||
|
|
||||||
def curl_request(url, binary=False):
|
# Set up custom DNS resolver
|
||||||
"""
|
def setup_custom_dns():
|
||||||
Make a request with custom headers
|
"""Configure a custom DNS resolver using reliable DNS services"""
|
||||||
binary: If True, return raw bytes instead of text (for images)
|
dns_servers = ['1.1.1.1', '1.0.0.1', '8.8.8.8', '8.8.4.4', '9.9.9.9']
|
||||||
"""
|
|
||||||
|
custom_resolver = dns.resolver.Resolver()
|
||||||
|
custom_resolver.nameservers = dns_servers
|
||||||
|
|
||||||
|
original_getaddrinfo = socket.getaddrinfo
|
||||||
|
|
||||||
|
def new_getaddrinfo(host, port, family=0, type=0, proto=0, flags=0):
|
||||||
|
if host:
|
||||||
try:
|
try:
|
||||||
|
# Skip DNS resolution for IP addresses
|
||||||
|
try:
|
||||||
|
ipaddress.ip_address(host)
|
||||||
|
# If we get here, the host is already an IP address
|
||||||
|
logger.debug(f"Host is already an IP address: {host}, skipping DNS resolution")
|
||||||
|
except ValueError:
|
||||||
|
# Not an IP address, so use DNS resolution
|
||||||
|
answers = custom_resolver.resolve(host)
|
||||||
|
host = str(answers[0])
|
||||||
|
logger.debug(f"Custom DNS resolved {host}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.info(f"Custom DNS resolution failed for {host}: {e}, falling back to system DNS")
|
||||||
|
return original_getaddrinfo(host, port, family, type, proto, flags)
|
||||||
|
|
||||||
|
socket.getaddrinfo = new_getaddrinfo
|
||||||
|
logger.info("Custom DNS resolver set up")
|
||||||
|
|
||||||
|
# Initialize DNS resolver
|
||||||
|
setup_custom_dns()
|
||||||
|
|
||||||
|
# Common request function with caching for API endpoints
|
||||||
|
@lru_cache(maxsize=128)
|
||||||
|
def fetch_api_data(url, timeout=10):
|
||||||
|
"""Make a request to an API endpoint with caching"""
|
||||||
ua = UserAgent()
|
ua = UserAgent()
|
||||||
headers = {
|
headers = {
|
||||||
'User-Agent': ua.chrome,
|
'User-Agent': ua.chrome,
|
||||||
@@ -30,41 +65,43 @@ def curl_request(url, binary=False):
|
|||||||
'Connection': 'keep-alive',
|
'Connection': 'keep-alive',
|
||||||
}
|
}
|
||||||
|
|
||||||
response = requests.get(url, headers=headers)
|
try:
|
||||||
response.raise_for_status()
|
hostname = urllib.parse.urlparse(url).netloc.split(':')[0]
|
||||||
return response.content if binary else response.text
|
logger.info(f"Making request to host: {hostname}")
|
||||||
|
|
||||||
except SSLError:
|
response = requests.get(url, headers=headers, timeout=timeout)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
# Try to parse as JSON
|
||||||
|
try:
|
||||||
|
return json.loads(response.text)
|
||||||
|
except json.JSONDecodeError:
|
||||||
|
# Return text if not valid JSON
|
||||||
|
return response.text
|
||||||
|
|
||||||
|
except requests.exceptions.SSLError:
|
||||||
return {'error': 'SSL Error', 'details': 'Failed to verify SSL certificate'}, 503
|
return {'error': 'SSL Error', 'details': 'Failed to verify SSL certificate'}, 503
|
||||||
except requests.RequestException as e:
|
except requests.exceptions.RequestException as e:
|
||||||
print(f"RequestException: {e}")
|
logger.error(f"RequestException: {e}")
|
||||||
return {'error': 'Request Exception', 'details': str(e)}, 503
|
return {'error': 'Request Exception', 'details': str(e)}, 503
|
||||||
|
|
||||||
def encode_image_url(url):
|
def stream_request(url, headers=None, timeout=10):
|
||||||
"""Encode the image URL to be used in the proxy endpoint"""
|
"""Make a streaming request that doesn't buffer the full response"""
|
||||||
if not url:
|
if not headers:
|
||||||
return ''
|
headers = {
|
||||||
return urllib.parse.quote(url, safe='')
|
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
|
||||||
|
}
|
||||||
|
|
||||||
@app.route('/image-proxy/<path:image_url>')
|
return requests.get(url, stream=True, headers=headers, timeout=timeout)
|
||||||
def proxy_image(image_url):
|
|
||||||
"""Proxy endpoint for images to avoid CORS issues"""
|
|
||||||
try:
|
|
||||||
# Decode the URL
|
|
||||||
original_url = urllib.parse.unquote(image_url)
|
|
||||||
logger.info(f"Image proxy request for: {original_url}")
|
|
||||||
|
|
||||||
# Make request with stream=True and timeout
|
def encode_url(url):
|
||||||
response = requests.get(original_url, stream=True, timeout=10)
|
"""Safely encode a URL for use in proxy endpoints"""
|
||||||
response.raise_for_status()
|
return urllib.parse.quote(url, safe='') if url else ''
|
||||||
|
|
||||||
# Get content type from response
|
def generate_streaming_response(response, content_type=None):
|
||||||
content_type = response.headers.get('Content-Type', '')
|
"""Generate a streaming response with appropriate headers"""
|
||||||
logger.info(f"Image response headers: {dict(response.headers)}")
|
if not content_type:
|
||||||
|
content_type = response.headers.get('Content-Type', 'application/octet-stream')
|
||||||
if not content_type.startswith('image/'):
|
|
||||||
logger.error(f"Invalid content type for image: {content_type}")
|
|
||||||
return Response('Invalid image type', status=415)
|
|
||||||
|
|
||||||
def generate():
|
def generate():
|
||||||
try:
|
try:
|
||||||
@@ -73,35 +110,49 @@ def proxy_image(image_url):
|
|||||||
if chunk:
|
if chunk:
|
||||||
bytes_sent += len(chunk)
|
bytes_sent += len(chunk)
|
||||||
yield chunk
|
yield chunk
|
||||||
logger.info(f"Image completed, sent {bytes_sent} bytes")
|
logger.info(f"Stream completed, sent {bytes_sent} bytes")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Image streaming error in generator: {str(e)}")
|
logger.error(f"Streaming error: {str(e)}")
|
||||||
raise
|
raise
|
||||||
|
|
||||||
headers = {
|
headers = {
|
||||||
'Cache-Control': 'public, max-age=31536000',
|
|
||||||
'Access-Control-Allow-Origin': '*',
|
'Access-Control-Allow-Origin': '*',
|
||||||
|
'Content-Type': content_type,
|
||||||
}
|
}
|
||||||
|
|
||||||
# Only add Content-Length if we have it and it's not chunked transfer
|
# Add content length if available and not using chunked transfer
|
||||||
if ('Content-Length' in response.headers and
|
if 'Content-Length' in response.headers and 'Transfer-Encoding' not in response.headers:
|
||||||
'Transfer-Encoding' not in response.headers):
|
|
||||||
headers['Content-Length'] = response.headers['Content-Length']
|
headers['Content-Length'] = response.headers['Content-Length']
|
||||||
else:
|
else:
|
||||||
headers['Transfer-Encoding'] = 'chunked'
|
headers['Transfer-Encoding'] = 'chunked'
|
||||||
|
|
||||||
logger.info(f"Sending image response with headers: {headers}")
|
|
||||||
|
|
||||||
return Response(
|
return Response(
|
||||||
generate(),
|
generate(),
|
||||||
mimetype=content_type,
|
mimetype=content_type,
|
||||||
headers=headers
|
headers=headers,
|
||||||
|
direct_passthrough=True
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@app.route('/image-proxy/<path:image_url>')
|
||||||
|
def proxy_image(image_url):
|
||||||
|
"""Proxy endpoint for images to avoid CORS issues"""
|
||||||
|
try:
|
||||||
|
original_url = urllib.parse.unquote(image_url)
|
||||||
|
logger.info(f"Image proxy request for: {original_url}")
|
||||||
|
|
||||||
|
response = requests.get(original_url, stream=True, timeout=10)
|
||||||
|
response.raise_for_status()
|
||||||
|
|
||||||
|
content_type = response.headers.get('Content-Type', '')
|
||||||
|
|
||||||
|
if not content_type.startswith('image/'):
|
||||||
|
logger.error(f"Invalid content type for image: {content_type}")
|
||||||
|
return Response('Invalid image type', status=415)
|
||||||
|
|
||||||
|
return generate_streaming_response(response, content_type)
|
||||||
except requests.Timeout:
|
except requests.Timeout:
|
||||||
logger.error(f"Timeout fetching image: {original_url}")
|
|
||||||
return Response('Image fetch timeout', status=504)
|
return Response('Image fetch timeout', status=504)
|
||||||
except requests.HTTPError as e:
|
except requests.HTTPError as e:
|
||||||
logger.error(f"HTTP error fetching image: {str(e)}")
|
|
||||||
return Response(f'Failed to fetch image: {str(e)}', status=e.response.status_code)
|
return Response(f'Failed to fetch image: {str(e)}', status=e.response.status_code)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Image proxy error: {str(e)}")
|
logger.error(f"Image proxy error: {str(e)}")
|
||||||
@@ -111,24 +162,15 @@ def proxy_image(image_url):
|
|||||||
def proxy_stream(stream_url):
|
def proxy_stream(stream_url):
|
||||||
"""Proxy endpoint for streams"""
|
"""Proxy endpoint for streams"""
|
||||||
try:
|
try:
|
||||||
# Decode the URL
|
|
||||||
original_url = urllib.parse.unquote(stream_url)
|
original_url = urllib.parse.unquote(stream_url)
|
||||||
logger.info(f"Stream proxy request for: {original_url}")
|
logger.info(f"Stream proxy request for: {original_url}")
|
||||||
|
|
||||||
headers = {
|
response = stream_request(original_url)
|
||||||
'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36'
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add timeout to prevent hanging
|
|
||||||
response = requests.get(original_url, stream=True, headers=headers, timeout=10)
|
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
|
||||||
logger.info(f"Stream response headers: {dict(response.headers)}")
|
# Determine content type
|
||||||
|
|
||||||
# Get content type from response
|
|
||||||
content_type = response.headers.get('Content-Type')
|
content_type = response.headers.get('Content-Type')
|
||||||
if not content_type:
|
if not content_type:
|
||||||
# Try to determine content type from URL
|
|
||||||
if original_url.endswith('.ts'):
|
if original_url.endswith('.ts'):
|
||||||
content_type = 'video/MP2T'
|
content_type = 'video/MP2T'
|
||||||
elif original_url.endswith('.m3u8'):
|
elif original_url.endswith('.m3u8'):
|
||||||
@@ -137,117 +179,136 @@ def proxy_stream(stream_url):
|
|||||||
content_type = 'application/octet-stream'
|
content_type = 'application/octet-stream'
|
||||||
|
|
||||||
logger.info(f"Using content type: {content_type}")
|
logger.info(f"Using content type: {content_type}")
|
||||||
|
return generate_streaming_response(response, content_type)
|
||||||
def generate():
|
|
||||||
try:
|
|
||||||
bytes_sent = 0
|
|
||||||
for chunk in response.iter_content(chunk_size=64*1024):
|
|
||||||
if chunk:
|
|
||||||
bytes_sent += len(chunk)
|
|
||||||
yield chunk
|
|
||||||
logger.info(f"Stream completed, sent {bytes_sent} bytes")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Streaming error in generator: {str(e)}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
response_headers = {
|
|
||||||
'Access-Control-Allow-Origin': '*',
|
|
||||||
'Content-Type': content_type,
|
|
||||||
'Accept-Ranges': 'bytes',
|
|
||||||
'Cache-Control': 'no-cache',
|
|
||||||
'Connection': 'keep-alive'
|
|
||||||
}
|
|
||||||
|
|
||||||
# Only add Content-Length if we have it and it's not chunked transfer
|
|
||||||
if ('Content-Length' in response.headers and
|
|
||||||
'Transfer-Encoding' not in response.headers):
|
|
||||||
response_headers['Content-Length'] = response.headers['Content-Length']
|
|
||||||
else:
|
|
||||||
response_headers['Transfer-Encoding'] = 'chunked'
|
|
||||||
|
|
||||||
logger.info(f"Sending response with headers: {response_headers}")
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
generate(),
|
|
||||||
headers=response_headers,
|
|
||||||
direct_passthrough=True
|
|
||||||
)
|
|
||||||
except requests.Timeout:
|
except requests.Timeout:
|
||||||
logger.error(f"Timeout fetching stream: {original_url}")
|
|
||||||
return Response('Stream timeout', status=504)
|
return Response('Stream timeout', status=504)
|
||||||
except requests.HTTPError as e:
|
except requests.HTTPError as e:
|
||||||
logger.error(f"HTTP error fetching stream: {str(e)}")
|
|
||||||
return Response(f'Failed to fetch stream: {str(e)}', status=e.response.status_code)
|
return Response(f'Failed to fetch stream: {str(e)}', status=e.response.status_code)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error(f"Stream proxy error: {str(e)}")
|
logger.error(f"Stream proxy error: {str(e)}")
|
||||||
return Response('Failed to process stream', status=500)
|
return Response('Failed to process stream', status=500)
|
||||||
|
|
||||||
@app.route('/xmltv', methods=['GET'])
|
def parse_group_list(group_string):
|
||||||
def generate_xmltv():
|
"""Parse a comma-separated string into a list of trimmed strings"""
|
||||||
# Get parameters from the URL
|
return [group.strip() for group in group_string.split(',')] if group_string else []
|
||||||
|
|
||||||
|
def get_required_params():
|
||||||
|
"""Get and validate the required parameters from the request"""
|
||||||
url = request.args.get('url')
|
url = request.args.get('url')
|
||||||
username = request.args.get('username')
|
username = request.args.get('username')
|
||||||
password = request.args.get('password')
|
password = request.args.get('password')
|
||||||
unwanted_groups = request.args.get('unwanted_groups', '')
|
|
||||||
wanted_groups = request.args.get('wanted_groups', '')
|
|
||||||
proxy_url = request.args.get('proxy_url', DEFAULT_PROXY_URL)
|
|
||||||
|
|
||||||
if not url or not username or not password:
|
if not url or not username or not password:
|
||||||
return json.dumps({
|
return None, None, None, json.dumps({
|
||||||
'error': 'Missing Parameters',
|
'error': 'Missing Parameters',
|
||||||
'details': 'Required parameters: url, username, and password'
|
'details': 'Required parameters: url, username, and password'
|
||||||
}), 400, {'Content-Type': 'application/json'}
|
}), 400
|
||||||
|
|
||||||
# Convert groups into lists
|
proxy_url = request.args.get('proxy_url', DEFAULT_PROXY_URL) or request.host_url.rstrip('/')
|
||||||
unwanted_groups = [group.strip() for group in unwanted_groups.split(',')] if unwanted_groups else []
|
|
||||||
wanted_groups = [group.strip() for group in wanted_groups.split(',')] if wanted_groups else []
|
|
||||||
|
|
||||||
# Verify credentials first
|
return url, username, password, proxy_url, None
|
||||||
mainurl_response = curl_request(f'{url}/player_api.php?username={username}&password={password}')
|
|
||||||
if isinstance(mainurl_response, tuple):
|
|
||||||
return json.dumps(mainurl_response[0]), mainurl_response[1], {'Content-Type': 'application/json'}
|
|
||||||
|
|
||||||
# If credentials are valid, fetch the XMLTV data directly
|
def validate_xtream_credentials(url, username, password):
|
||||||
base_url = url.rstrip('/') # Remove trailing slash if present
|
"""Validate the Xtream API credentials"""
|
||||||
xmltv_response = curl_request(f'{base_url}/xmltv.php?username={username}&password={password}')
|
api_url = f'{url}/player_api.php?username={username}&password={password}'
|
||||||
|
data = fetch_api_data(api_url)
|
||||||
|
|
||||||
# Get the current host URL for the proxy
|
if isinstance(data, tuple): # Error response
|
||||||
host_url = proxy_url or request.host_url.rstrip('/')
|
return None, data[0], data[1]
|
||||||
|
|
||||||
if isinstance(xmltv_response, tuple): # Check if it's an error response
|
if 'user_info' not in data or 'server_info' not in data:
|
||||||
return json.dumps(xmltv_response[0]), xmltv_response[1], {'Content-Type': 'application/json'}
|
return None, json.dumps({
|
||||||
|
'error': 'Invalid Response',
|
||||||
|
'details': 'Server response missing required data (user_info or server_info)'
|
||||||
|
}), 400
|
||||||
|
|
||||||
|
return data, None, None
|
||||||
|
|
||||||
|
def fetch_categories_and_channels(url, username, password):
|
||||||
|
"""Fetch categories and channels from the Xtream API"""
|
||||||
|
# Fetch categories
|
||||||
|
category_url = f'{url}/player_api.php?username={username}&password={password}&action=get_live_categories'
|
||||||
|
categories = fetch_api_data(category_url)
|
||||||
|
|
||||||
|
if isinstance(categories, tuple): # Error response
|
||||||
|
return None, None, categories[0], categories[1]
|
||||||
|
|
||||||
|
# Fetch live channels
|
||||||
|
channel_url = f'{url}/player_api.php?username={username}&password={password}&action=get_live_streams'
|
||||||
|
channels = fetch_api_data(channel_url)
|
||||||
|
|
||||||
|
if isinstance(channels, tuple): # Error response
|
||||||
|
return None, None, channels[0], channels[1]
|
||||||
|
|
||||||
|
if not isinstance(categories, list) or not isinstance(channels, list):
|
||||||
|
return None, None, json.dumps({
|
||||||
|
'error': 'Invalid Data Format',
|
||||||
|
'details': 'Categories or channels data is not in the expected format'
|
||||||
|
}), 500
|
||||||
|
|
||||||
|
return categories, channels, None, None
|
||||||
|
|
||||||
|
@app.route('/xmltv', methods=['GET'])
|
||||||
|
def generate_xmltv():
|
||||||
|
"""Generate a filtered XMLTV file from the Xtream API"""
|
||||||
|
# Get and validate parameters
|
||||||
|
url, username, password, proxy_url, error = get_required_params()
|
||||||
|
if error:
|
||||||
|
return error
|
||||||
|
|
||||||
|
# Parse filter parameters
|
||||||
|
unwanted_groups = parse_group_list(request.args.get('unwanted_groups', ''))
|
||||||
|
wanted_groups = parse_group_list(request.args.get('wanted_groups', ''))
|
||||||
|
|
||||||
|
# Validate credentials
|
||||||
|
user_data, error_json, error_code = validate_xtream_credentials(url, username, password)
|
||||||
|
if error_json:
|
||||||
|
return error_json, error_code, {'Content-Type': 'application/json'}
|
||||||
|
|
||||||
|
# Fetch XMLTV data
|
||||||
|
base_url = url.rstrip('/')
|
||||||
|
xmltv_url = f'{base_url}/xmltv.php?username={username}&password={password}'
|
||||||
|
xmltv_data = fetch_api_data(xmltv_url, timeout=20) # Longer timeout for XMLTV
|
||||||
|
|
||||||
|
if isinstance(xmltv_data, tuple): # Error response
|
||||||
|
return json.dumps(xmltv_data[0]), xmltv_data[1], {'Content-Type': 'application/json'}
|
||||||
|
|
||||||
|
# If not filtering or proxying, return the original XMLTV
|
||||||
|
if not (unwanted_groups or wanted_groups) and not proxy_url:
|
||||||
|
return Response(
|
||||||
|
xmltv_data,
|
||||||
|
mimetype='application/xml',
|
||||||
|
headers={"Content-Disposition": "attachment; filename=guide.xml"}
|
||||||
|
)
|
||||||
|
|
||||||
# Replace image URLs in the XMLTV content
|
# Replace image URLs in the XMLTV content
|
||||||
if not isinstance(xmltv_response, tuple):
|
if proxy_url:
|
||||||
import re
|
import re
|
||||||
|
|
||||||
def replace_icon_url(match):
|
def replace_icon_url(match):
|
||||||
original_url = match.group(1)
|
original_url = match.group(1)
|
||||||
proxied_url = f"{host_url}/image-proxy/{encode_image_url(original_url)}"
|
proxied_url = f"{proxy_url}/image-proxy/{encode_url(original_url)}"
|
||||||
return f'<icon src="{proxied_url}"'
|
return f'<icon src="{proxied_url}"'
|
||||||
|
|
||||||
# Replace icon URLs in the XML
|
xmltv_data = re.sub(
|
||||||
xmltv_response = re.sub(
|
|
||||||
r'<icon src="([^"]+)"',
|
r'<icon src="([^"]+)"',
|
||||||
replace_icon_url,
|
replace_icon_url,
|
||||||
xmltv_response
|
xmltv_data
|
||||||
)
|
)
|
||||||
|
|
||||||
# If unwanted_groups or wanted_groups is specified, we need to filter the XML
|
# If filtering is enabled, filter the XML
|
||||||
if unwanted_groups or wanted_groups:
|
if unwanted_groups or wanted_groups:
|
||||||
try:
|
try:
|
||||||
# Fetch categories and channels to get the mapping
|
# Fetch categories and channels for filtering
|
||||||
category_response = curl_request(f'{url}/player_api.php?username={username}&password={password}&action=get_live_categories')
|
categories, channels, error_json, error_code = fetch_categories_and_channels(url, username, password)
|
||||||
livechannel_response = curl_request(f'{url}/player_api.php?username={username}&password={password}&action=get_live_streams')
|
if error_json:
|
||||||
|
# If we can't get filtering data, just return the unfiltered XMLTV
|
||||||
if not isinstance(category_response, tuple) and not isinstance(livechannel_response, tuple):
|
logger.warning("Could not fetch filtering data, returning unfiltered XMLTV")
|
||||||
categories = json.loads(category_response)
|
else:
|
||||||
channels = json.loads(livechannel_response)
|
|
||||||
|
|
||||||
# Create category mapping
|
# Create category mapping
|
||||||
category_names = {cat['category_id']: cat['category_name'] for cat in categories}
|
category_names = {cat['category_id']: cat['category_name'] for cat in categories}
|
||||||
|
|
||||||
# Create set of channel IDs to exclude or include
|
# Create set of channel IDs to exclude
|
||||||
excluded_channels = set()
|
excluded_channels = set()
|
||||||
|
|
||||||
for channel in channels:
|
for channel in channels:
|
||||||
@@ -269,7 +330,7 @@ def generate_xmltv():
|
|||||||
current_channel = None
|
current_channel = None
|
||||||
skip_current = False
|
skip_current = False
|
||||||
|
|
||||||
for line in xmltv_response.split('\n'):
|
for line in xmltv_data.split('\n'):
|
||||||
if '<channel id="' in line:
|
if '<channel id="' in line:
|
||||||
current_channel = line.split('"')[1]
|
current_channel = line.split('"')[1]
|
||||||
skip_current = current_channel in excluded_channels
|
skip_current = current_channel in excluded_channels
|
||||||
@@ -285,118 +346,59 @@ def generate_xmltv():
|
|||||||
if '</channel>' in line or '</programme>' in line:
|
if '</channel>' in line or '</programme>' in line:
|
||||||
skip_current = False
|
skip_current = False
|
||||||
|
|
||||||
xmltv_response = '\n'.join(filtered_lines)
|
xmltv_data = '\n'.join(filtered_lines)
|
||||||
|
except Exception as e:
|
||||||
except (json.JSONDecodeError, IndexError, KeyError):
|
logger.error(f"Failed to filter XMLTV: {e}")
|
||||||
# If filtering fails, return unfiltered XMLTV
|
# If filtering fails, return unfiltered XMLTV
|
||||||
pass
|
|
||||||
|
|
||||||
# Return the modified XMLTV data
|
# Return the XMLTV data
|
||||||
return Response(
|
return Response(
|
||||||
xmltv_response,
|
xmltv_data,
|
||||||
mimetype='application/xml',
|
mimetype='application/xml',
|
||||||
headers={"Content-Disposition": "attachment; filename=guide.xml"}
|
headers={"Content-Disposition": "attachment; filename=guide.xml"}
|
||||||
)
|
)
|
||||||
|
|
||||||
@app.route('/m3u', methods=['GET'])
|
@app.route('/m3u', methods=['GET'])
|
||||||
def generate_m3u():
|
def generate_m3u():
|
||||||
# Get parameters from the URL
|
"""Generate a filtered M3U playlist from the Xtream API"""
|
||||||
url = request.args.get('url')
|
# Get and validate parameters
|
||||||
username = request.args.get('username')
|
url, username, password, proxy_url, error = get_required_params()
|
||||||
password = request.args.get('password')
|
if error:
|
||||||
unwanted_groups = request.args.get('unwanted_groups', '')
|
return error
|
||||||
wanted_groups = request.args.get('wanted_groups', '')
|
|
||||||
|
# Parse filter parameters
|
||||||
|
unwanted_groups = parse_group_list(request.args.get('unwanted_groups', ''))
|
||||||
|
wanted_groups = parse_group_list(request.args.get('wanted_groups', ''))
|
||||||
no_stream_proxy = request.args.get('nostreamproxy', '').lower() == 'true'
|
no_stream_proxy = request.args.get('nostreamproxy', '').lower() == 'true'
|
||||||
proxy_url = request.args.get('proxy_url', DEFAULT_PROXY_URL)
|
|
||||||
|
|
||||||
if not url or not username or not password:
|
# Validate credentials
|
||||||
return json.dumps({
|
user_data, error_json, error_code = validate_xtream_credentials(url, username, password)
|
||||||
'error': 'Missing Parameters',
|
if error_json:
|
||||||
'details': 'Required parameters: url, username, and password'
|
return error_json, error_code, {'Content-Type': 'application/json'}
|
||||||
}), 400, {'Content-Type': 'application/json'}
|
|
||||||
|
|
||||||
# Convert groups into lists
|
# Fetch categories and channels
|
||||||
unwanted_groups = [group.strip() for group in unwanted_groups.split(',')] if unwanted_groups else []
|
categories, channels, error_json, error_code = fetch_categories_and_channels(url, username, password)
|
||||||
wanted_groups = [group.strip() for group in wanted_groups.split(',')] if wanted_groups else []
|
if error_json:
|
||||||
|
return error_json, error_code, {'Content-Type': 'application/json'}
|
||||||
|
|
||||||
# Verify the credentials and the provided URL
|
# Extract user info and server URL
|
||||||
mainurl_response = curl_request(f'{url}/player_api.php?username={username}&password={password}')
|
username = user_data['user_info']['username']
|
||||||
if isinstance(mainurl_response, tuple): # Check if it's an error response
|
password = user_data['user_info']['password']
|
||||||
return json.dumps(mainurl_response[0]), mainurl_response[1], {'Content-Type': 'application/json'}
|
|
||||||
mainurl_json = mainurl_response
|
|
||||||
|
|
||||||
try:
|
server_url = f"http://{user_data['server_info']['url']}:{user_data['server_info']['port']}"
|
||||||
mainurlraw = json.loads(mainurl_json)
|
stream_base_url = f"{server_url}/live/{username}/{password}/"
|
||||||
except json.JSONDecodeError as e:
|
|
||||||
return json.dumps({
|
|
||||||
'error': 'Invalid JSON',
|
|
||||||
'details': f'Failed to parse server response: {str(e)}'
|
|
||||||
}), 500, {'Content-Type': 'application/json'}
|
|
||||||
|
|
||||||
if 'user_info' not in mainurlraw or 'server_info' not in mainurlraw:
|
# Create category name lookup
|
||||||
return json.dumps({
|
category_names = {cat['category_id']: cat['category_name'] for cat in categories}
|
||||||
'error': 'Invalid Response',
|
|
||||||
'details': 'Server response missing required data (user_info or server_info)'
|
|
||||||
}), 400, {'Content-Type': 'application/json'}
|
|
||||||
|
|
||||||
# Fetch live streams
|
|
||||||
livechannel_response = curl_request(f'{url}/player_api.php?username={username}&password={password}&action=get_live_streams')
|
|
||||||
if isinstance(livechannel_response, tuple): # Check if it's an error response
|
|
||||||
return json.dumps(livechannel_response[0]), livechannel_response[1], {'Content-Type': 'application/json'}
|
|
||||||
livechannel_json = livechannel_response
|
|
||||||
|
|
||||||
try:
|
|
||||||
livechannelraw = json.loads(livechannel_json)
|
|
||||||
except json.JSONDecodeError as e:
|
|
||||||
return json.dumps({
|
|
||||||
'error': 'Invalid JSON',
|
|
||||||
'details': f'Failed to parse live streams data: {str(e)}'
|
|
||||||
}), 500, {'Content-Type': 'application/json'}
|
|
||||||
|
|
||||||
if not isinstance(livechannelraw, list):
|
|
||||||
return json.dumps({
|
|
||||||
'error': 'Invalid Data Format',
|
|
||||||
'details': 'Live streams data is not in the expected format'
|
|
||||||
}), 500, {'Content-Type': 'application/json'}
|
|
||||||
|
|
||||||
# Fetch live categories
|
|
||||||
category_response = curl_request(f'{url}/player_api.php?username={username}&password={password}&action=get_live_categories')
|
|
||||||
if isinstance(category_response, tuple): # Check if it's an error response
|
|
||||||
return json.dumps(category_response[0]), category_response[1], {'Content-Type': 'application/json'}
|
|
||||||
category_json = category_response
|
|
||||||
|
|
||||||
try:
|
|
||||||
categoryraw = json.loads(category_json)
|
|
||||||
except json.JSONDecodeError as e:
|
|
||||||
return json.dumps({
|
|
||||||
'error': 'Invalid JSON',
|
|
||||||
'details': f'Failed to parse categories data: {str(e)}'
|
|
||||||
}), 500, {'Content-Type': 'application/json'}
|
|
||||||
|
|
||||||
if not isinstance(categoryraw, list):
|
|
||||||
return json.dumps({
|
|
||||||
'error': 'Invalid Data Format',
|
|
||||||
'details': 'Categories data is not in the expected format'
|
|
||||||
}), 500, {'Content-Type': 'application/json'}
|
|
||||||
|
|
||||||
username = mainurlraw['user_info']['username']
|
|
||||||
password = mainurlraw['user_info']['password']
|
|
||||||
|
|
||||||
server_url = f"http://{mainurlraw['server_info']['url']}:{mainurlraw['server_info']['port']}"
|
|
||||||
fullurl = f"{server_url}/live/{username}/{password}/"
|
|
||||||
|
|
||||||
categoryname = {cat['category_id']: cat['category_name'] for cat in categoryraw}
|
|
||||||
|
|
||||||
# Get the current host URL for the proxy
|
|
||||||
host_url = proxy_url or request.host_url.rstrip('/')
|
|
||||||
|
|
||||||
# Generate M3U playlist
|
# Generate M3U playlist
|
||||||
m3u_playlist = "#EXTM3U\n"
|
m3u_playlist = "#EXTM3U\n"
|
||||||
for channel in livechannelraw:
|
|
||||||
if channel['stream_type'] == 'live':
|
|
||||||
group_title = categoryname.get(channel["category_id"], "Uncategorized")
|
|
||||||
|
|
||||||
# Handle filtering - if wanted_groups is provided, it takes precedence over unwanted_groups
|
for channel in channels:
|
||||||
|
if channel['stream_type'] == 'live':
|
||||||
|
group_title = category_names.get(channel["category_id"], "Uncategorized")
|
||||||
|
|
||||||
|
# Handle filtering logic
|
||||||
include_channel = True
|
include_channel = True
|
||||||
|
|
||||||
if wanted_groups:
|
if wanted_groups:
|
||||||
@@ -407,19 +409,25 @@ def generate_m3u():
|
|||||||
include_channel = not any(unwanted_group.lower() in group_title.lower() for unwanted_group in unwanted_groups)
|
include_channel = not any(unwanted_group.lower() in group_title.lower() for unwanted_group in unwanted_groups)
|
||||||
|
|
||||||
if include_channel:
|
if include_channel:
|
||||||
# Proxy the logo URL
|
# Proxy the logo URL if available
|
||||||
original_logo = channel.get('stream_icon', '')
|
original_logo = channel.get('stream_icon', '')
|
||||||
logo_url = f"{host_url}/image-proxy/{encode_image_url(original_logo)}" if original_logo else ''
|
logo_url = f"{proxy_url}/image-proxy/{encode_url(original_logo)}" if original_logo else ''
|
||||||
|
|
||||||
stream_url = f'{fullurl}{channel["stream_id"]}.ts'
|
# Create the stream URL with or without proxying
|
||||||
|
stream_url = f'{stream_base_url}{channel["stream_id"]}.ts'
|
||||||
if not no_stream_proxy:
|
if not no_stream_proxy:
|
||||||
stream_url = f"{host_url}/stream-proxy/{encode_image_url(stream_url)}"
|
stream_url = f"{proxy_url}/stream-proxy/{encode_url(stream_url)}"
|
||||||
|
|
||||||
|
# Add channel to playlist
|
||||||
m3u_playlist += f'#EXTINF:0 tvg-name="{channel["name"]}" group-title="{group_title}" tvg-logo="{logo_url}",{channel["name"]}\n'
|
m3u_playlist += f'#EXTINF:0 tvg-name="{channel["name"]}" group-title="{group_title}" tvg-logo="{logo_url}",{channel["name"]}\n'
|
||||||
m3u_playlist += f'{stream_url}\n'
|
m3u_playlist += f'{stream_url}\n'
|
||||||
|
|
||||||
# Return the M3U playlist as a downloadable file
|
# Return the M3U playlist
|
||||||
return Response(m3u_playlist, mimetype='audio/x-scpls', headers={"Content-Disposition": "attachment; filename=LiveStream.m3u"})
|
return Response(
|
||||||
|
m3u_playlist,
|
||||||
|
mimetype='audio/x-scpls',
|
||||||
|
headers={"Content-Disposition": "attachment; filename=LiveStream.m3u"}
|
||||||
|
)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
app.run(debug=True, host='0.0.0.0')
|
app.run(debug=True, host='0.0.0.0')
|
||||||
Reference in New Issue
Block a user