Refresh all metadata for imported strm files

Is there a way to schedule a refresh of all metadata for imported strm files?

Not out of the box, but you could use this Python code to accomplish what you want:

strm_reprocess.py

import datetime
import requests
import aiohttp
import asyncio

# Global Variables
channels_url = "http://[YOUR_CHANNELS_URL_HERE]:8089" # You must use http in order to avoid issues
url_headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36'}

# Current date/time for logging
def current_time():
    return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f") + ": "

# Check if Channels URL is correct
def check_channels_url():
    channels_url_okay = None
    
    try:
        response = requests.get(channels_url, headers=url_headers)
        if response:
            channels_url_okay = True
    except requests.RequestException:
        print(f"\n{current_time()} WARNING: Channels URL not found at {channels_url}")
        print(f"{current_time()} WARNING: Please change Channels URL in settings")

    return channels_url_okay

# Gets a list of of the IDs for .strm files
def get_strm_ids():
    api_url = f"{channels_url}/api/v1/all?source=stream-files"
    strm_ids = []

    channels_url_okay = check_channels_url()

    if channels_url_okay:

        try:
            response = requests.get(api_url, headers=url_headers)
            response.raise_for_status()  # Raise an exception if the response status code is not 200 (OK)
            data = response.json()

            for item in data:
                strm_ids.append(item["id"])

        except requests.RequestException as e:
            print(f"\n{current_time()} ERROR: From Channels API... {e}")

    else:
        print(f"\n{current_time()} INFO: Cannot check for changes from last run due to Channels URL error.\n")

    return strm_ids

# Asycnronous request of strm file reprocessing
async def get_reprocess_requests(strm_ids):
    async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=600)) as reprocess_session:
        tasks = [send_reprocess_requests(reprocess_session, f"{channels_url}/dvr/files/{strm_id}/reprocess") for strm_id in strm_ids]
        try:
            await asyncio.gather(*tasks)
        except asyncio.TimeoutError:
            print(f"\n{current_time()} ERROR: Reprocessing requests timed out.")

# Build list of strm reprocess requests
async def send_reprocess_requests(reprocess_session, reprocess_url):
    try:
        await reprocess_session.put(reprocess_url)
    except asyncio.TimeoutError:
        print(f"\n{current_time()} ERROR: Request for {reprocess_url} timed out.")

strm_ids = get_strm_ids()

print(f"\n{current_time()} Beginning Reprocess requests...")
asyncio.run(get_reprocess_requests(strm_ids))
print(f"\n{current_time()} Finished Reprocess requests")

Just change [YOUR_CHANNELS_URL_HERE] to your Channels URL. If successful, in the logs you will see something like this for every .strm file:

2024/08/03 11:38:56.117568 [HTTP] | 200 |      7.0005ms | fe80::705c:79c8:fbef:2bdb%vEthernet (WSL) | PUT      "/dvr/files/100037/reprocess"

I pulled this out of code I'm using for Stream Link Manager for Channels (still in beta development). If I'm feeling froggy at some point in the distant future, I might add .strm file support.

2 Likes

Awesome. Thank you so much, I had to make some changes (via ChatGPT) to get it to work as it kept timing out but it worked like a charm. Cheers.

import datetime
import requests
import aiohttp
import asyncio

Global Variables

channels_url = "http://IP:Port" # You must use http in order to avoid issues
url_headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36'}

Current date/time for logging

def current_time():
return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f") + ": "

Check if Channels URL is correct

def check_channels_url():
channels_url_okay = None

try:
    response = requests.get(channels_url, headers=url_headers)
    if response:
        channels_url_okay = True
except requests.RequestException:
    print(f"\n{current_time()} WARNING: Channels URL not found at {channels_url}")
    print(f"{current_time()} WARNING: Please change Channels URL in settings")

return channels_url_okay

Gets a list of of the IDs for .strm files

def get_strm_ids():
api_url = f"{channels_url}/api/v1/all?source=stream-files"
strm_ids = []

channels_url_okay = check_channels_url()

if channels_url_okay:
    try:
        response = requests.get(api_url, headers=url_headers)
        response.raise_for_status()  # Raise an exception if the response status code is not 200 (OK)
        data = response.json()
        for item in data:
            strm_ids.append(item["id"])
    except requests.RequestException as e:
        print(f"\n{current_time()} ERROR: From Channels API... {e}")
else:
    print(f"\n{current_time()} INFO: Cannot check for changes from last run due to Channels URL error.\n")

return strm_ids

Asynchronous request of strm file reprocessing

async def get_reprocess_requests(strm_ids):
async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=1800)) as reprocess_session:
tasks = [send_reprocess_requests(reprocess_session, f"{channels_url}/dvr/files/{strm_id}/reprocess") for strm_id in strm_ids]
try:
await asyncio.gather(*tasks)
except asyncio.TimeoutError:
print(f"\n{current_time()} ERROR: Reprocessing requests timed out.")

Build list of strm reprocess requests

async def send_reprocess_requests(reprocess_session, reprocess_url):
print(f"\n{current_time()} Requesting: {reprocess_url}") # Log the URL being requested
try:
async with reprocess_session.put(reprocess_url) as response:
print(f"{current_time()} Response status: {response.status}")
if response.status == 404:
print(f"{current_time()} ERROR: 404 Not Found for {reprocess_url}")
except asyncio.TimeoutError:
print(f"\n{current_time()} ERROR: Request for {reprocess_url} timed out.")

strm_ids = get_strm_ids()

print(f"\n{current_time()} Beginning Reprocess requests...")
asyncio.run(get_reprocess_requests(strm_ids))
print(f"\n{current_time()} Finished Reprocess requests")

Pretty neat. Maybe this should be implemented in Olivetin? @bnhf

1 Like

Stream Link Manager for Channels was released, and now includes STRM support!

1 Like

Sweet! Unfortunately your updated code lost the proper formatting when pasted here. I tried to create a standalone Python script for this but it didn't work. Could you please repost the code that works for you, but with proper formatting? Thanks! Here's the button:

Hear hear! :clap: I second this motion! @bnhf

1 Like

I believe this should correct the formatting issues, but hopefully @Naonak will respond to confirm:

import datetime
import requests
import aiohttp
import asyncio

# Global Variables
channels_url = "http://IP:Port"  # You must use http in order to avoid issues
url_headers = {'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/96.0.4664.45 Safari/537.36'}

# Current date/time for logging
def current_time():
    return datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f") + ": "

# Check if Channels URL is correct
def check_channels_url():
    channels_url_okay = None

    try:
        response = requests.get(channels_url, headers=url_headers)
        if response:
            channels_url_okay = True
    except requests.RequestException:
        print(f"\n{current_time()} WARNING: Channels URL not found at {channels_url}")
        print(f"{current_time()} WARNING: Please change Channels URL in settings")

    return channels_url_okay

# Gets a list of the IDs for .strm files
def get_strm_ids():
    api_url = f"{channels_url}/api/v1/all?source=stream-files"
    strm_ids = []

    channels_url_okay = check_channels_url()

    if channels_url_okay:
        try:
            response = requests.get(api_url, headers=url_headers)
            response.raise_for_status()  # Raise an exception if the response status code is not 200 (OK)
            data = response.json()
            for item in data:
                strm_ids.append(item["id"])
        except requests.RequestException as e:
            print(f"\n{current_time()} ERROR: From Channels API... {e}")
    else:
        print(f"\n{current_time()} INFO: Cannot check for changes from last run due to Channels URL error.\n")

    return strm_ids

# Asynchronous request of strm file reprocessing
async def get_reprocess_requests(strm_ids):
    async with aiohttp.ClientSession(timeout=aiohttp.ClientTimeout(total=1800)) as reprocess_session:
        tasks = [send_reprocess_requests(reprocess_session, f"{channels_url}/dvr/files/{strm_id}/reprocess") for strm_id in strm_ids]
        try:
            await asyncio.gather(*tasks)
        except asyncio.TimeoutError:
            print(f"\n{current_time()} ERROR: Reprocessing requests timed out.")

# Build list of strm reprocess requests
async def send_reprocess_requests(reprocess_session, reprocess_url):
    print(f"\n{current_time()} Requesting: {reprocess_url}")  # Log the URL being requested
    try:
        async with reprocess_session.put(reprocess_url) as response:
            print(f"{current_time()} Response status: {response.status}")
            if response.status == 404:
                print(f"{current_time()} ERROR: 404 Not Found for {reprocess_url}")
    except asyncio.TimeoutError:
        print(f"\n{current_time()} ERROR: Request for {reprocess_url} timed out.")

# Main execution
strm_ids = get_strm_ids()

print(f"\n{current_time()} Beginning Reprocess requests...")
asyncio.run(get_reprocess_requests(strm_ids))
print(f"\n{current_time()} Finished Reprocess requests")
2 Likes

Thanks! I tried it. Any idea what Response status: 200 means here? I got a long list of those. And the .stmlnks whose metadata I'd hoped ro refresh, didn't refresh. I'm not sure why or what else to look for.

That's the equivalent of an "OK" response for an HTTP request. In other words, it's a good thing. :slight_smile:

Probably a problem in the script?