diff --git a/.github/stale.yml b/.github/stale.yml
new file mode 100644
index 0000000..619cd6a
--- /dev/null
+++ b/.github/stale.yml
@@ -0,0 +1,20 @@
+# Number of days of inactivity before an issue becomes stale
+daysUntilStale: 21
+# Number of days of inactivity before a stale issue is closed
+daysUntilClose: 7
+# Issues with these labels will never be considered stale
+exemptLabels:
+ - bug
+ - documentation
+ - enhancement
+ - pinned
+ - security
+# Label to use when marking an issue as stale
+staleLabel: inactive
+# Comment to post when marking an issue as stale. Set to `false` to disable
+markComment: >
+ This issue has been automatically marked as stale because it has not had
+ recent activity. It will be closed if no further activity occurs. Thank you
+ for your contributions.
+# Comment to post when closing a stale issue. Set to `false` to disable
+closeComment: false
diff --git a/.gitignore b/.gitignore
index 11551c0..4665dd0 100644
--- a/.gitignore
+++ b/.gitignore
@@ -15,11 +15,11 @@ config_future.ini
dev/
data/
cache/
+plugins_ext/
*.ts
lib/tvheadend/development/
lib/web/htdocs/temp/*
!lib/web/htdocs/temp/__init__.py
-plugins_ext/provider*
build/*/cabernet*.exe
ffmpeg/
misc/
diff --git a/Dockerfile_tvh_crypt.alpine b/Dockerfile_tvh_crypt.alpine
index 2b5ac20..31bacad 100644
--- a/Dockerfile_tvh_crypt.alpine
+++ b/Dockerfile_tvh_crypt.alpine
@@ -2,13 +2,15 @@ FROM python:3.8-alpine
#RUN apk add --no-cache --update bash tzdata ffmpeg py3-cryptography py-requests && \
RUN apk add --no-cache --update bash tzdata ffmpeg curl && \
apk add --no-cache --virtual builddeps gcc musl-dev python3-dev libffi-dev openssl-dev cargo && \
- pip3 install requests && \
- pip install cryptography --no-binary=cryptography && \
+ pip3 install httpx[http2] && \
+ pip3 install streamlink && \
+ pip3 install cryptography --no-binary=cryptography && \
apk del builddeps
+COPY requirements.txt /app/requirements.txt
+
COPY *.py /app/
COPY lib/ /app/lib/
COPY plugins /app/plugins
-COPY plugins_ext /app/plugins_ext
RUN touch /app/is_container
ENTRYPOINT ["python3", "/app/tvh_main.py"]
diff --git a/Dockerfile_tvh_crypt.slim-buster b/Dockerfile_tvh_crypt.slim-buster
index e7b9069..16eb75b 100644
--- a/Dockerfile_tvh_crypt.slim-buster
+++ b/Dockerfile_tvh_crypt.slim-buster
@@ -28,6 +28,5 @@ RUN apt-get update \
COPY *.py /app/
COPY lib/ /app/lib/
COPY plugins /app/plugins
-COPY plugins_ext /app/plugins_ext
RUN touch /app/is_container
ENTRYPOINT ["python3", "/app/tvh_main.py"]
diff --git a/LICENSE b/LICENSE
index 1e82844..e28453c 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,6 +1,6 @@
MIT License
-Copyright (c) 2021 ROCKY4546 (https://github.com/rocky4546)
+Copyright (c) 2023 ROCKY4546 (https://github.com/rocky4546)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
diff --git a/README.md b/README.md
index 87a0bca..5b0a8ef 100644
--- a/README.md
+++ b/README.md
@@ -1,41 +1,76 @@
+## NOTICE:
+By default this app doesn't provide any video sources, only the plugins access the providers streams for personal use.
## Installation
### 1. Requirements
-- Python 3.7+
+- Python 3.8+
- python cryptography module
-- ffmpeg
+- python httpx[http2] module
+- (optional) streamlink module
+- ffmpeg and ffprobe
### 2. Installation
- Download source
- Unzip source in the installation folder
-- Create a data folder inside the installation folder and create a config.ini file inside the data folder
-- Edit the config.ini and add the following lines
+- Launch the app by running the command "python3 tvh_main.py". This should create a data folder and a config.ini inside that folder
+- Bring up browser and go to http://ip address:6077/
+- From Plugins, install PlutoTV plugin
+- Stop the app
+- Edit the data/config.ini and add the following lines (Update: This is suppose to automatically happen in 0.9.14)
[plutotv_default]
label = PlutoTV Instance
- Launch the app by running the command "python3 tvh_main.py"
-- Bring up browser and go to http://ipaddress:6077/
+- Bring up browser and go to http://ip address:6077/
- Go to settings and make changes you want.
- Logging: Change log level from warning to info if needed
- - Under Providers > PlutoTV enable
- - URL Filtering
- - PTS/DTS Resync
+- Enable the PlutoTV instance in the Settings page
+- Restart the app (from the Scheduler/Applications) to have the plugin fully activate
- From XML/JSON Links try some of the links
-### 3. Docker
+### 3. Services
+- MS Windows
+ - Services for MS Windows is auto-created using the installer provided for each release.
+- Unix/Linux
+ - Services for CoreELEC and Debian/Ubuntu are found here. Follow the instructions found in the files.
+ - https://github.com/cabernetwork/cabernet/tree/master/lib/tvheadend/service
+
+### 4. Docker
See http://ghcr.io/cabernetwork/cabernet:latest
+- Use or Review ports and remote mount points at docker-compose.yml
+- Note: it requires unzipping the cabernet source into ./docker/cabernet/config/app to run
+- Recommended Docker file: Dockerfile_tvh_crypt.alpine
+- Bring up browser and go to http://ip address:6077/
+- From Plugins, install PlutoTV plugin
+- Stop the app
+- Edit the data/config.ini and add the following lines
+
+[plutotv_default]
+label = PlutoTV Instance
+
+- Restart the app (from the Scheduler/Applications) to have the plugin fully activate
+- From XML/JSON Links try some of the links
-### 4. Notes
+### 5. Default Ports
+- 6007 Web UI
+- 5004 Stream port
+- 1900 SSDP (if enabled)
+- 65001 HDHomeRun (if enabled)
+
+### 6. Notes
- URL used can include plugin and instance levels to filter down to a specific set of data
- - http://ipaddress:6077/channels.m3u
- - http://ipaddress:6077/pLuToTv/channels.m3u
- - http://ipaddress:6077/PlutoTV/Default/channels.m3u
-- config.ini group tag requirements
+ - http://ip address:6077/channels.m3u
+ - http://ip address:6077/pLuToTv/channels.m3u
+ - http://ip address:6077/PlutoTV/Default/channels.m3u
+- config.ini group tag requirements when creating an instance
- All lower case
- - Underscore separates the plugin name from the instance name
- - Use a single word if possible for the instance name or use underlines between words
- - Do not change the instance name. It is used throughout the system and is difficult to change.
+ - Underscore is a key character in section tags and separates the plugin name from the instance name
+ - Use a single word if possible for the instance name
+ - Do not change the instance name unless you go into data management and remove the instance first.
- [plutotv_mychannels]
+### 7. Forum
+https://tvheadend.org/boards/5/topics/43052
+
Enjoy
diff --git a/build/WINDOWS/Plugins/ZipDLL/ZipDLL.cpp b/build/WINDOWS/Plugins/ZipDLL/ZipDLL.cpp
deleted file mode 100644
index 23bdfe6..0000000
--- a/build/WINDOWS/Plugins/ZipDLL/ZipDLL.cpp
+++ /dev/null
@@ -1,240 +0,0 @@
-// ZipDLL.cpp : Definiert den Einsprungpunkt f�r die DLL-Anwendung.
-//
-
-#include "ZipArchive\ZipArchive.h"
-#include "exdll.h"
-#include
-
-BOOL APIENTRY DllMain( HANDLE hModule,
- DWORD ul_reason_for_call,
- LPVOID lpReserved
- )
-{
- return TRUE;
-}
-
-HWND g_hwndDlg, g_hwndList;
-
-void LogMessage(const char *pStr);
-void SetStatus(const char *pStr);
-
-extern "C" void __declspec(dllexport) extractall(HWND hwndParent, int string_size,
- char *variables, stack_t **stacktop)
-{
- EXDLL_INIT();
-
- g_hwndDlg = g_hwndList = 0;
-
- // do your stuff here
- g_hwndDlg=FindWindowEx(hwndParent,NULL,"#32770",NULL);
- if (g_hwndDlg)
- g_hwndList=FindWindowEx(g_hwndDlg,NULL,"SysListView32",NULL);
-
- //Extract file to destination
- char destination[MAX_PATH+1];
- char source[MAX_PATH+1];
- char buffer[4096];
-
- char szExtracting[MAX_PATH * 2 + 100] = "Extracting contents of %s to %s";
- char szExtractingPrintCount[200] = " Extracting %d files and directories";
- char szExtractFile[MAX_PATH + 50] = " Extract : %s";
- char szErrorCouldNotExtract[MAX_PATH + 100] = " Error: Could not extract %s";
- char szCouldNotExtract[MAX_PATH + 100] = "Could not extract %s";
- char szErrorCouldNotGetFileAttributes[100] = "Error: Could not get file attributes.";
- char szCouldNotGetFileAttributes[100] = "Could not get file attributes.";
- char szError[1000] = " Error: %s";
-
- popstring(source);
-
- if (!lstrcmpi(source, "/TRANSLATE")) {
- //Use localized strings
- popstring(szExtracting);
- popstring(szExtractingPrintCount);
- popstring(szExtractFile);
- popstring(szErrorCouldNotExtract);
- popstring(szCouldNotExtract);
- popstring(szErrorCouldNotGetFileAttributes);
- popstring(szCouldNotGetFileAttributes);
- popstring(szError);
- popstring(source);
- }
-
- popstring(destination);
-
- sprintf(buffer, szExtracting, source, destination);
- LogMessage(buffer);
- try
- {
- // Open archive
- CZipArchive archive;
- archive.Open(source, CZipArchive::zipOpenReadOnly);
-
- // Get number of entries in archive
- int nCount=archive.GetCount();
- sprintf(buffer, szExtractingPrintCount, nCount);
- LogMessage(buffer);
-
- //Process each file in archive
- for (int i=0;i
-# Microsoft Developer Studio Generated Build File, Format Version 6.00
-# ** NICHT BEARBEITEN **
-
-# TARGTYPE "Win32 (x86) Dynamic-Link Library" 0x0102
-
-CFG=ZipDLL - Win32 Release
-!MESSAGE Dies ist kein g�ltiges Makefile. Zum Erstellen dieses Projekts mit NMAKE
-!MESSAGE verwenden Sie den Befehl "Makefile exportieren" und f�hren Sie den Befehl
-!MESSAGE
-!MESSAGE NMAKE /f "ZipDLL.mak".
-!MESSAGE
-!MESSAGE Sie k�nnen beim Ausf�hren von NMAKE eine Konfiguration angeben
-!MESSAGE durch Definieren des Makros CFG in der Befehlszeile. Zum Beispiel:
-!MESSAGE
-!MESSAGE NMAKE /f "ZipDLL.mak" CFG="ZipDLL - Win32 Release"
-!MESSAGE
-!MESSAGE F�r die Konfiguration stehen zur Auswahl:
-!MESSAGE
-!MESSAGE "ZipDLL - Win32 Release" (basierend auf "Win32 (x86) Dynamic-Link Library")
-!MESSAGE
-
-# Begin Project
-# PROP AllowPerConfigDependencies 0
-# PROP Scc_ProjName ""
-# PROP Scc_LocalPath ""
-CPP=cl.exe
-MTL=midl.exe
-RSC=rc.exe
-# PROP BASE Use_MFC 0
-# PROP BASE Use_Debug_Libraries 0
-# PROP BASE Output_Dir "Release"
-# PROP BASE Intermediate_Dir "Release"
-# PROP BASE Target_Dir ""
-# PROP Use_MFC 0
-# PROP Use_Debug_Libraries 0
-# PROP Output_Dir "Release"
-# PROP Intermediate_Dir "Release"
-# PROP Ignore_Export_Lib 1
-# PROP Target_Dir ""
-# ADD BASE CPP /nologo /MT /W3 /GX /O2 /D "WIN32" /D "NDEBUG" /D "_WINDOWS" /D "_MBCS" /D "_USRDLL" /D "ZIPDLL_EXPORTS" /Yu"stdafx.h" /FD /c
-# ADD CPP /nologo /MT /W3 /GX /O1 /D "WIN32" /D "NDEBUG" /D "_WINDOWS" /D "_MBCS" /D "_USRDLL" /D "ZIPDLL_EXPORTS" /FD /c
-# SUBTRACT CPP /YX /Yc /Yu
-# ADD BASE MTL /nologo /D "NDEBUG" /mktyplib203 /win32
-# ADD MTL /nologo /D "NDEBUG" /mktyplib203 /win32
-# ADD BASE RSC /l 0x407 /d "NDEBUG"
-# ADD RSC /l 0x407 /d "NDEBUG"
-BSC32=bscmake.exe
-# ADD BASE BSC32 /nologo
-# ADD BSC32 /nologo
-LINK32=link.exe
-# ADD BASE LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib /nologo /dll /machine:I386
-# ADD LINK32 kernel32.lib user32.lib gdi32.lib winspool.lib comdlg32.lib advapi32.lib shell32.lib ole32.lib oleaut32.lib uuid.lib odbc32.lib odbccp32.lib ZipArchive\Release\ZipArchive_STL.lib /nologo /dll /machine:I386 /out:"ZipDLL.dll" /opt:ref /opt:nowin98
-# SUBTRACT LINK32 /pdb:none /nodefaultlib
-# Begin Target
-
-# Name "ZipDLL - Win32 Release"
-# Begin Group "Quellcodedateien"
-
-# PROP Default_Filter "cpp;c;cxx;rc;def;r;odl;idl;hpj;bat"
-# Begin Source File
-
-SOURCE=.\ZipDLL.cpp
-# End Source File
-# End Group
-# Begin Group "Header-Dateien"
-
-# PROP Default_Filter "h;hpp;hxx;hm;inl"
-# Begin Source File
-
-SOURCE=.\exdll.h
-# End Source File
-# End Group
-# Begin Group "Ressourcendateien"
-
-# PROP Default_Filter "ico;cur;bmp;dlg;rc2;rct;bin;rgs;gif;jpg;jpeg;jpe"
-# End Group
-# Begin Source File
-
-SOURCE=.\ReadMe.txt
-# End Source File
-# End Target
-# End Project
diff --git a/build/WINDOWS/Plugins/ZipDLL/ZipDLL.dsw b/build/WINDOWS/Plugins/ZipDLL/ZipDLL.dsw
deleted file mode 100644
index 2c37cb4..0000000
--- a/build/WINDOWS/Plugins/ZipDLL/ZipDLL.dsw
+++ /dev/null
@@ -1,44 +0,0 @@
-Microsoft Developer Studio Workspace File, Format Version 6.00
-# WARNUNG: DIESE ARBEITSBEREICHSDATEI DARF NICHT BEARBEITET ODER GEL�SCHT WERDEN!
-
-###############################################################################
-
-Project: "ZipArchive"=.\ZipArchive\ZipArchive_STL.dsp - Package Owner=<4>
-
-Package=<5>
-{{{
-}}}
-
-Package=<4>
-{{{
-}}}
-
-###############################################################################
-
-Project: "ZipDLL"=.\ZipDLL.dsp - Package Owner=<4>
-
-Package=<5>
-{{{
-}}}
-
-Package=<4>
-{{{
- Begin Project Dependency
- Project_Dep_Name ZipArchive
- End Project Dependency
-}}}
-
-###############################################################################
-
-Global:
-
-Package=<5>
-{{{
-}}}
-
-Package=<3>
-{{{
-}}}
-
-###############################################################################
-
diff --git a/build/WINDOWS/Plugins/ZipDLL/exdll.h b/build/WINDOWS/Plugins/ZipDLL/exdll.h
deleted file mode 100644
index 6b04c56..0000000
--- a/build/WINDOWS/Plugins/ZipDLL/exdll.h
+++ /dev/null
@@ -1,97 +0,0 @@
-#ifndef _EXDLL_H_
-#define _EXDLL_H_
-
-// only include this file from one place in your DLL.
-// (it is all static, if you use it in two places it will fail)
-
-#define EXDLL_INIT() { \
- g_stringsize=string_size; \
- g_stacktop=stacktop; \
- g_variables=variables; }
-
-// For page showing plug-ins
-#define WM_NOTIFY_OUTER_NEXT (WM_USER+0x8)
-#define WM_NOTIFY_CUSTOM_READY (WM_USER+0xd)
-#define NOTIFY_BYE_BYE 'x'
-
-typedef struct _stack_t {
- struct _stack_t *next;
- char text[1]; // this should be the length of string_size
-} stack_t;
-
-
-static unsigned int g_stringsize;
-static stack_t **g_stacktop;
-static char *g_variables;
-
-static int popstring(char *str); // 0 on success, 1 on empty stack
-static void pushstring(const char *str);
-
-enum
-{
-INST_0, // $0
-INST_1, // $1
-INST_2, // $2
-INST_3, // $3
-INST_4, // $4
-INST_5, // $5
-INST_6, // $6
-INST_7, // $7
-INST_8, // $8
-INST_9, // $9
-INST_R0, // $R0
-INST_R1, // $R1
-INST_R2, // $R2
-INST_R3, // $R3
-INST_R4, // $R4
-INST_R5, // $R5
-INST_R6, // $R6
-INST_R7, // $R7
-INST_R8, // $R8
-INST_R9, // $R9
-INST_CMDLINE, // $CMDLINE
-INST_INSTDIR, // $INSTDIR
-INST_OUTDIR, // $OUTDIR
-INST_EXEDIR, // $EXEDIR
-INST_LANG, // $LANGUAGE
-__INST_LAST
-};
-
-
-// utility functions (not required but often useful)
-static int popstring(char *str)
-{
- stack_t *th;
- if (!g_stacktop || !*g_stacktop) return 1;
- th=(*g_stacktop);
- lstrcpy(str,th->text);
- *g_stacktop = th->next;
- GlobalFree((HGLOBAL)th);
- return 0;
-}
-
-static void pushstring(const char *str)
-{
- stack_t *th;
- if (!g_stacktop) return;
- th=(stack_t*)GlobalAlloc(GPTR,sizeof(stack_t)+g_stringsize);
- lstrcpyn(th->text,str,g_stringsize);
- th->next=*g_stacktop;
- *g_stacktop=th;
-}
-
-static char *getuservariable(int varnum)
-{
- if (varnum < 0 || varnum >= __INST_LAST) return NULL;
- return g_variables+varnum*g_stringsize;
-}
-
-static void setuservariable(int varnum, char *var)
-{
- if (var != NULL && varnum >= 0 && varnum < __INST_LAST)
- lstrcpy(g_variables + varnum*g_stringsize, var);
-}
-
-
-
-#endif//_EXDLL_H_
\ No newline at end of file
diff --git a/build/WINDOWS/Plugins/ZipDLL/license.txt b/build/WINDOWS/Plugins/ZipDLL/license.txt
deleted file mode 100644
index a396e01..0000000
--- a/build/WINDOWS/Plugins/ZipDLL/license.txt
+++ /dev/null
@@ -1,31 +0,0 @@
-This NSIS plugin is licensed under the GPL, please read the file ZipArchive\glp.txt
-for details.
-
-This program is free software; you can redistribute it and/or
-modify it under the terms of the GNU General Public License
-as published by the Free Software Foundation; either version 2
-of the License, or (at your option) any later version.
-
-This library is distributed in the hope that it will be useful,
-but WITHOUT ANY WARRANTY; without even the implied warranty of
-MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
-See the GNU General Public License for more details.
-
-You should have received a copy of the GNU General Public
-License along with this library; if not, write to the Free Software
-Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
-
-
-
-ZipDLL uses the ZipArchive library from http://www.artpol-software.com/index_zip.html
-Please read the file ZipArchive\license.txt for details
-
-Alternative license for use with proprietary software:
-------------------------------------------------------
-
-Since ZipArchive is licensed under the GPL, it may only be used with programs with a
-GPL compatible license, the same applies to this DLL.
-You can, however obtain a commercial license (free of charge for freeware and most
-shareware programs) for ZipArchive. Please read ZipArchive\license.txt for details.
-Permission is granted to use ZipDLL together with prorietary software when you've
-obtained a license for ZipArchive.
\ No newline at end of file
diff --git a/build/WINDOWS/Plugins/ZipDLL/readme.txt b/build/WINDOWS/Plugins/ZipDLL/readme.txt
deleted file mode 100644
index 6bbb508..0000000
--- a/build/WINDOWS/Plugins/ZipDLL/readme.txt
+++ /dev/null
@@ -1,122 +0,0 @@
- ZipDLL v1.2.2a
- --------------
- Copyright 2002-2004
- by Tim Kosse
- tim.kosse@gmx.de
-
-What is this?
--------------
-
- ZipDLL is a extension DLL for NSIS. It can unzip files from
- zip files. It is especially useful in combination with NSISdl so
- that you don't have to download large files uncompressed.
-
-Usage
------
-
- To extract files from a zip file, use the following macro:
-
- !insertmacro ZIPDLL_EXTRACT SOURCE DESTINATION FILE
-
- Parameters: Zip file, destination directory, file to extract
- Description: Extract the specified file in the archive to the
- destination directory.
- If file is <ALL>, all files in the archive
- will be extracted.
-
- Example:
- !insertmacro MUI_ZIPDLL_EXTRACTALL "c:\test.zip" "c:\output"
-
-
- Exported Functions:
- - extractall
- Parameters: Zip file, destination directory
- Description: Extracts all files in the archive to the destination
- directory.
-
- - extractfile
- Parameters: Zip file, destination directory, file to extract
- Description: Extracts the specified file in the archive to the destination
- directory.
-
- Example:
- ZipDLL::extractall "c:\test.zip" "c:\output"
-
-Supported languages
--------------------
-
-ZipDLL.nsh contains the following additional languages:
-- Arabic
-- Brazilian
-- Chinese Simplified
-- Chinese Traditional
-- Croatian
-- Danish
-- French
-- German
-- Hungarian
-- Korean
-- Lithuanian
-- Polish
-- Russian
-- Spanish
-
-To add your language, simply modify ZipDLL.nsh, should be really easy.
-Please send the modified ZipDLL.nsh to tim.kosse@gmx.de so that other people can
-benfit from it, too.
-
-Legal Stuff
------------
-
- This NSIS plugin is licensed under the GPL, please read the file ZipArchive\glp.txt
- for details.
-
- ZipDLL uses the ZipArchive library from http://www.artpol-software.com/index_zip.html
- Please read the file ZipArchive\license.txt for details
-
- Alternative license for use with proprietary software:
- ------------------------------------------------------
-
- Since ZipArchive is licensed under the GPL, it may only be used with programs with a
- GPL compatible license, the same applies to this DLL.
- You can, however obtain a commercial license (free of charge for freeware and most
- shareware programs) for ZipArchive. Please read ZipArchive\license.txt for details.
- Permission is granted to use ZipDLL together with prorietary software when you've
- obtained a license for ZipArchive.
-
-Version History
----------------
-
-1.2.2a
-------
-
-- added Croatian and Hungarian language
-
-1.2.2
------
-
-- Added a lot of languages
-- Some improvements for ZipDll.nsh made by deguix
-
-1.2.1
------
-
-- Made compatible with NSIS 2b3
-
-1.2
----
-
-- Added macros for automatic language selection
-- Translation possible, works like /translate switch for NsisDL plugin
-
-1.1
----
-
-- made compatible with latest NSIS (parameters on stack swapped)
-- cleaned up code
-
-1.0
----
-
-- initial release
-
\ No newline at end of file
diff --git a/build/WINDOWS/Plugins/ZipDLL/zipdll.nsh b/build/WINDOWS/Plugins/ZipDLL/zipdll.nsh
deleted file mode 100644
index bf50ee7..0000000
--- a/build/WINDOWS/Plugins/ZipDLL/zipdll.nsh
+++ /dev/null
@@ -1,417 +0,0 @@
-;ZipDLL include file for NSIS
-;Written by Tim Kosse (mailto:tim.kosse@gmx.de)
-;some improvements by deguix
-
-;Supported languages with their translators in alphabetical order:
-
-;Arabic translation by asdfuae
-;Brazilian Portuguese translation by "deguix"
-;Chinese, Simplified translation by Kii Ali
-;Chinese, Traditional traslation by "matini" and Kii Ali
-;Croatian translation by "iostriz"
-;Danish translation by Claus Futtrup
-;French translation by "veekee"
-;German translation by Tim Kosse
-;Hungarian translation by Toth Laszlo
-;Korean translation by Seongab Kim
-;Lithuanian translation by Vytautas Krivickas
-;Polish translation by Krzysztof Galuszka
-;Russion translation by Sergey
-;Spanish translation by "dark_boy"
-
-!ifndef ZIPDLL_USED
-
-!define ZIPDLL_USED
-
-!macro ZIPDLL_EXTRACT SOURCE DESTINATION FILE
-
- !define "FILE_${FILE}"
-
- !ifndef FILE_
- Push "${FILE}"
- !endif
-
- IfFileExists "${DESTINATION}" +2
- CreateDirectory "${DESTINATION}"
-
- Push "${DESTINATION}"
-
- IfFileExists "${SOURCE}" +2
- SetErrors
-
- Push "${SOURCE}"
-
- ;The strings that will be translated are (ready to copy,
- ;remove leading semicolons in your language block):
-
- !ifdef LANG_ENGLISH
-
- ;English is default language of ZipDLL, no need to push the untranslated strings
-
- ;StrCmp $LANGUAGE ${LANG_ENGLISH} 0 +1
-
- ;Push " Error: %s"
- ;Push "Could not get file attributes."
- ;Push "Error: Could not get file attributes."
- ;Push "Could not extract %s"
- ;Push " Error: Could not extract %s"
-
- ;!ifdef FILE_
- ;Push " Extract: %s"
- ;Push " Extracting %d files and directories"
- ;Push "Extracting contents of %s to %s"
- ;!else
- ;Push "Specified file does not exist in archive."
- ;Push "Error: Specified file does not exist in archive."
- ;Push "Extracting the file %s from %s to %s"
- ;!endif
-
- ;Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_HUNGARIAN
-
- StrCmp $LANGUAGE ${LANG_HUNGARIAN} 0 +10
-
- Push " Hiba: %s"
- Push "Nem olvashat� a f�jl attrib�tumai."
- Push "Hiba: Nem olvashat� a f�jl attrib�tumai."
- Push "Nem siker�lt kicsomagolni a(z) %s"
- Push " Hiba: Nem siker�lt kicsomagolni a(z) %s"
-
- !ifdef FILE_
- Push " Kicsomagol�s: %s"
- Push " %d f�jl �s mappa kicsomagol�sa"
- Push "%s tartalom kicsomagol�sa a %s helyre"
- !else
- Push "A megadott f�jl nem tal�lhat� az arh�vumban."
- Push "Hiba: A megadott f�jl nem tal�lhat� az arh�vumban."
- Push "%s f�jl kcsomagol�sa a(z) %s f�jlb�l a %s helyre"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_FRENCH
-
- StrCmp $LANGUAGE ${LANG_FRENCH} 0 +10
-
- Push " Erreur : %s"
- Push "Impossible de r�cup�rer les informations sur le fichier."
- Push "Erreur : Impossible de r�cup�rer les informations sur le fichier."
- Push "Impossible de d�compresser %s."
- Push " Erreur : Impossible de d�compresser %s."
-
- !ifdef FILE_
- Push " D�compression : %s"
- Push " D�compression de %d fichiers et r�pertoires"
- Push "D�compression des donn�es de %s vers %s"
- !else
- Push "Le fichier sp�cifi� n'existe pas dans l'archive"
- Push "Erreur : Le fichier sp�cifi� n'existe pas dans l'archive"
- Push "D�compression du fichier %s depuis %s vers %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_GERMAN
-
- StrCmp $LANGUAGE ${LANG_GERMAN} 0 +10
-
- Push " Fehler: %s"
- Push "Dateiattribute konnten nicht ermittelt werden."
- Push "Fehler: Dateiattribute konnten nicht ermittelt werden."
- Push "%s konnte nicht dekomprimiert werden."
- Push " Fehler: %s konnte nicht dekomprimiert werden."
-
- !ifdef FILE_
- Push " Dekomprimiere: %s"
- Push " Dekomprimiere %d Dateien und Verzeichnisse"
- Push "Dekomprimiere Inhalt von %s nach %s"
- !else
- Push "Die angegebene Datei existiert nicht im Archiv"
- Push "Fehler: Die angegebene Datei existiert nicht im Archiv"
- Push "Dekomprimiere Datei %s von %s nach %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_SPANISH
-
- StrCmp $LANGUAGE ${LANG_SPANISH} 0 +10
-
- Push " Error: %s"
- Push "No se obtuvieron atributos del archivo"
- Push "Error: No se obtuvieron atributos del archivo"
- Push "No se pudo extraer %s"
- Push " Error: No se pudo extraer %s"
-
- !ifdef FILE_
- Push " Extraer: %s"
- Push " Extrayendo %d archivos y directorios"
- Push "Extraer archivos de %s a %s"
- !else
- Push "Archivo especificado no existe en el ZIP"
- Push "Error: El archivo especificado no existe en el ZIP"
- Push "Extrayendo el archivo %s de %s a %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_PORTUGUESEBR
-
- StrCmp $LANGUAGE ${LANG_PORTUGUESEBR} 0 +10
-
- Push " Erro: %s"
- Push "N�o se pode ler os atributos do arquivo"
- Push "Error: N�o se pode ler os atributos do arquivo"
- Push "N�o se pode extrair %s"
- Push " Erro: N�o se pode extrair %s"
-
- !ifdef FILE_
- Push " Extraindo: %s"
- Push " Extraindo %d arquivos e diret�rios"
- Push "Extraindo arquivos de %s a %s"
- !else
- Push "O arquivo especificado n�o existe no ZIP"
- Push "Erro: O arquivo especificado n�o existe no ZIP"
- Push "Extraindo o arquivo %s de %s a %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_TRADCHINESE
-
- StrCmp $LANGUAGE ${LANG_TRADCHINESE} 0 +11
-
- Push " ���~: %s"
- Push "�L�k���o�ɮ��ݩʡC"
- Push "���~: �L�k���o�ɮ��ݩʡC"
- Push "�L�k�����Y %s"
- Push " ���~�G�L�k�����Y %s"
-
- !ifdef FILE_
- Push " �����Y�G%s"
- Push " ���b�����Y %d �ɮP�ؿ�"
- Push "���b�����Y %s �����e�� %s"
- !else
- Push "���w���ɮרä��s�b�����Y�]�C"
- Push "���~�G���w���ɮרä��s�b�����Y�]�C"
- Push "���b�����Y�ɮ� %s �A�q %s �� %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_SIMPCHINESE
-
- StrCmp $LANGUAGE ${LANG_SIMPCHINESE} 0 +11
-
- Push " ����: %s"
- Push "��ȡ���ļ����ԡ�"
- Push "����: ��ȡ���ļ����ԡ�"
- Push "����ѹ�� %s"
- Push " ��������ѹ�� %s"
-
- !ifdef FILE_
- Push " ��ѹ����%s"
- Push " ���ڽ�ѹ�� %d �ļ���Ŀ¼"
- Push "���ڽ�ѹ�� %s �����ݵ� %s"
- !else
- Push "ָ�����ļ�����������ѹ������"
- Push "����ָ�����ļ�����������ѹ������"
- Push "���ڽ�ѹ���ļ� %s ���� %s �� %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_LITHUANIAN
-
- StrCmp $LANGUAGE ${LANG_LITHUANIAN} 0 +10
-
- Push " Klaida: %s"
- Push "Negaleta gauti bylos nuorodu."
- Push "Klaida: Negaleta gauti bylos nuorodu."
- Push "Negaleta i�traukti %s"
- Push " Klaida: Negaleta i�traukti %s"
-
- !ifdef FILE_
- Push " I�traukiam : %s"
- Push " I�traukiame %d bylas ir katalogus"
- Push "I�traukiame viska is %s i %s"
- !else
- Push "Parinkta byla nesurasta �iame archyve."
- Push "Klaida: Parinkta byla nesurasta �iame archyve."
- Push "I�traukiame byla %s i� %s i %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef "LANG_POLISH"
-
- strcmp $LANGUAGE ${LANG_POLISH} 0 +10
-
- Push " B��d: %s"
- Push "Nie mo�e pobra� atrybutu pliku."
- Push "B��d: Nie mo�e pobra� atrybutu pliku."
- Push "Nie mo�e rozpakowa� %s."
- Push " B��d: Nie mo�e rozpakowa� %s."
-
- !ifdef FILE_
- Push " Rozpakuj: %s"
- Push " Rozpakowywanie %d plik�w i katalog�w"
- Push "Rozpakowywanie zawarto�ci %s do %s"
- !else
- Push "Plik nie istnieje w archiwum"
- Push "B��d: Plik nie istnieje w archiwum"
- Push "Rozpakowywanie pliku %s z %s do %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef "LANG_KOREAN"
- strcmp $LANGUAGE ${LANG_KOREAN} 0 +10
- Push " ���� : %s"
- Push "ȭ�� �Ӽ��� ���� �� �����ϴ�."
- Push "����: ȭ�� �Ӽ��� ���� �� �����ϴ�."
- Push "%s��(��) Ǯ �� �����ϴ�."
- Push " ����: %s��(��) Ǯ �� �����ϴ�."
-
- !ifdef FILE_
- Push " Ǯ�� : %s"
- Push " %d���� ���ϰ� ������ Ǫ�� ��"
- Push "%s�� ������ %s�� Ǫ�� ��"
- !else
- Push "������ ������ ���� ���� �ȿ� �����ϴ�."
- Push "����: ������ ������ ���� ���� �ȿ� �����ϴ�."
- Push "%s ������ %s���� %s�� Ǫ�� ��"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef "LANG_RUSSIAN"
-
- strcmp $LANGUAGE ${LANG_RUSSIAN} 0 +10
-
- Push " ������: %s"
- Push "�� ���� �������� �������� �����."
- Push "������: �� ���� �������� �������� �����."
- Push "�� ���� ������� %s"
- Push " ������: �� ���� ������� %s"
-
- !ifdef LANG_
- Push " �������� : %s"
- Push " ���������� %d ������ � �����"
- Push "������ ����������� ������ �� %s � %s"
- !else
- Push "����������� ���� �� ��������� � ������."
- Push "������: S����������� ���� �� ��������� � ������."
- Push "���������� ����� %s �� %s � %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_ARABIC
-
- StrCmp $LANGUAGE ${LANG_ARABIC} 0 +10
-
- Push " ����: %s"
- Push "�� ���� ��� ����� �����."
- Push "����: �� ���� ��� ����� �����."
- Push "�� ���� ������� %s"
- Push " ����: �� ���� ������� %s"
-
- !ifdef FILE_
- Push " ������� : %s"
- Push " ������� ������ � ����� %d"
- Push "������� ������� %s ��� %s"
- !else
- Push "����� ��� ����� �� �����."
- Push "����: ����� ��� ����� �� �����."
- Push "������� ����� %s �� %s ��� %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_DANISH
-
- StrCmp $LANGUAGE ${LANG_DANISH} 0 +10
-
- Push " Fejl: %s"
- Push "Kunne ikke l�se fil attributter."
- Push "Fejl: Kunne ikke l�se fil attributter."
- Push "Kunne ikke udpakke %s"
- Push " Fejl: Kunne ikke udpakke %s"
-
- !ifdef FILE_
- Push " Udpakker: %s"
- Push " Udpakker %d filer og mapper"
- Push "Udpakker indhold fra %s til %s"
- !else
- Push "Specificeret fil eksisterer ikke i filarkivet"
- Push "Fejl: Specificeret fil eksisterer ikke i filarkivet"
- Push "Udpakker fil %s fra %s til %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_CROATIAN
-
- StrCmp $LANGUAGE ${LANG_CROATIAN} 0 +10
-
- Push " Gre�ka: %s"
- Push "Ne mogu dohvatiti atribute datoteke."
- Push "Gre�ka: Ne mogu dohvatiti atribute datoteke."
- Push "Ne mogu ekstrahirati %s"
- Push " Gre�ka: Ne mogu ekstrahirati %s"
-
- !ifdef FILE_
- Push " Ekstrakcija: %s"
- Push " Ekstrakcija %d datoteka i mapa"
- Push "Ekstrakcija sadr�aja %s u %s"
- !else
- Push "Tra�ena datoteka ne postoji u arhivi."
- Push "Gre�ka: Tra�ena datoteka ne postoji u arhivi."
- Push "Ekstrakcija datoteke %s iz %s u %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef FILE_
- ZipDLL::extractall
- !else
- ZipDLL::extractfile
- !endif
-
- !undef "FILE_${FILE}"
-
-!macroend
-
-!endif
diff --git a/build/WINDOWS/TvhLib.nsh b/build/WINDOWS/TvhLib.nsh
index 2b71402..afc0d5d 100644
--- a/build/WINDOWS/TvhLib.nsh
+++ b/build/WINDOWS/TvhLib.nsh
@@ -28,7 +28,9 @@ Function DataFolderPage
Abort
${EndIf}
CreateDirectory "$DataFolder"
- ${NSD_CreateLabel} 0 0 100% 24u "Please specify the Cabernet data folder. Writeable by the user: System"
+ ${NSD_CreateLabel} 0 0 100% 24u "Please specify the Cabernet data folder. \
+ Writeable by the user: System$\r$\nIt is highly recommended to have \
+ this folder be easy to access."
${NSD_CreateGroupBox} 0 40u 100% 34u "Data Folder"
${NSD_CreateText} 3% 54u 77% 12u "$DataFolder"
Pop $txtDataFolder
@@ -183,10 +185,12 @@ Function AddFiles
File "${SOURCEPATH}\TVHEADEND.md"
File "${SOURCEPATH}\requirements.txt"
Rename "$INSTDIR\TVHEADEND.md" "$INSTDIR\README.txt"
- SetOutPath "$INSTDIR"
- File /r /x __pycache__ /x development "${SOURCEPATH}\lib"
- SetOutPath "$INSTDIR"
- File /r /x __pycache__ "${SOURCEPATH}\plugins"
+
+ SetOutPath "$INSTDIR\lib"
+ File /r /x __pycache__ /x development "${SOURCEPATH}\lib\*.*"
+ SetOutPath "$INSTDIR\plugins"
+ File /r /x __pycache__ "${SOURCEPATH}\plugins\*.*"
+
SetOutPath "$INSTDIR\build\WINDOWS"
File "${SOURCEPATH}\build\WINDOWS\UpdateConfig.pyw"
FunctionEnd
diff --git a/build/WINDOWS/buildwin.nsi b/build/WINDOWS/buildwin.nsi
index ede0743..ab22280 100644
--- a/build/WINDOWS/buildwin.nsi
+++ b/build/WINDOWS/buildwin.nsi
@@ -6,7 +6,7 @@
!define PRODUCT_NAME "cabernet"
!define PRODUCT_VERSION ${VERSION}
!define PRODUCT_PUBLISHER "rocky4546"
-!define PRODUCT_WEB_SITE "http://www.mycompany.com"
+!define PRODUCT_WEB_SITE "https://github.com/cabernetwork/cabernet"
!define PRODUCT_DIR_REGKEY "Software\Microsoft\Windows\CurrentVersion\App Paths\tvh_main.py"
!define PRODUCT_UNINST_KEY "Software\Microsoft\Windows\CurrentVersion\Uninstall\${PRODUCT_NAME}"
!define PRODUCT_UNINST_ROOT_KEY "HKLM"
@@ -21,14 +21,12 @@
; MUI 1.67 compatible ------
!addplugindir '.\Plugins\inetc\Plugins\x86-unicode'
-!addplugindir '.\Plugins\ZipDLL'
!include "MUI.nsh"
!include "MultiUser.nsh"
!include "MUI2.nsh"
!include nsDialogs.nsh
!include "TvhLib.nsh"
-!include "ZipDLL.nsh"
; MUI Settings
!define MUI_ABORTWARNING
@@ -152,7 +150,12 @@ Section "Install FFMPEG" SEC04
Click OK to abort installation" /SD IDOK
Abort
dlok:
- ZipDLL::extractall "$TEMP\ffmpeg.zip" "$TEMP\ffmpeg"
+ StrCpy $cmd 'powershell expand-archive \"$TEMP\ffmpeg.zip\" \"$TEMP\ffmpeg\"'
+ nsExec::ExecToStack '$cmd'
+ Pop $0 ;return value
+ Pop $1 ; status text
+ ;MessageBox MB_OK "FFMPEG Extract Status:$0 $1"
+
StrCpy $subfolder "$TEMP\ffmpeg\ffmpeg*.*"
Call GetSubfolder
StrCmp $subfolder "" empty
@@ -164,6 +167,7 @@ Section "Install FFMPEG" SEC04
DELETE "$TEMP\ffmpeg.zip"
RMDIR /r "$TEMP\ffmpeg\*.*"
RMDIR "$TEMP\ffmpeg"
+ SetOutPath "$INSTDIR"
SectionEnd
@@ -220,9 +224,16 @@ FunctionEnd
Function un.onInit
+ Var /GLOBAL remove_all
!insertmacro MULTIUSER_UNINIT
- MessageBox MB_ICONQUESTION|MB_YESNO|MB_DEFBUTTON2 "Are you sure you want to completely remove $(^Name) and all of its components?" IDYES +2
+ MessageBox MB_ICONQUESTION|MB_YESNO|MB_DEFBUTTON2 "Are you sure you want to completely remove $(^Name)?" IDYES +2
Abort
+ MessageBox MB_ICONQUESTION|MB_YESNO|MB_DEFBUTTON2 "Do you want to remove all data and plugins?" IDYES true2
+ StrCpy $remove_all "0"
+ Goto end2
+ true2:
+ StrCpy $remove_all "1"
+ end2:
FunctionEnd
@@ -238,9 +249,24 @@ Section Uninstall
Call un.installService
${EndIf}
- #Delete "$INSTDIR\${PRODUCT_NAME}.url"
- #Delete "$INSTDIR\uninst.exe"
- RMDIR /r "$INSTDIR\*.*"
+ ${If} $remove_all == "1"
+ RMDIR /r "$INSTDIR\*.*"
+ ${Else}
+ #Delete Cabernet folders
+ RMDIR /r "$INSTDIR\build"
+ RMDIR /r "$INSTDIR\lib"
+ RMDIR /r "$INSTDIR\plugins"
+
+ #Delete Cabernet files
+ Delete "$INSTDIR\CHANGE*.*"
+ Delete "$INSTDIR\Dock*"
+ Delete "$INSTDIR\LIC*"
+ Delete "$INSTDIR\READ*.*"
+ Delete "$INSTDIR\req*.*"
+ Delete "$INSTDIR\tvh*.*"
+ Delete "$INSTDIR\uninst.exe"
+ Delete "$INSTDIR\${PRODUCT_NAME}.url"
+ ${EndIf}
Delete "$SMPROGRAMS\$ICONS_GROUP\Uninstall.lnk"
Delete "$SMPROGRAMS\$ICONS_GROUP\Website.lnk"
@@ -248,7 +274,10 @@ Section Uninstall
Delete "$SMPROGRAMS\$ICONS_GROUP\cabernet.lnk"
RMDir "$SMPROGRAMS\$ICONS_GROUP"
- RMDir "$INSTDIR"
+
+ ${If} $remove_all == "1"
+ RMDir "$INSTDIR"
+ ${EndIf}
DeleteRegKey ${PRODUCT_UNINST_ROOT_KEY} "${PRODUCT_UNINST_KEY}"
DeleteRegKey HKLM "${PRODUCT_DIR_REGKEY}"
diff --git a/build/WINDOWS/zipdll.nsh b/build/WINDOWS/zipdll.nsh
deleted file mode 100644
index 6237df0..0000000
--- a/build/WINDOWS/zipdll.nsh
+++ /dev/null
@@ -1,132 +0,0 @@
-;ZipDLL include file for NSIS
-;Written by Tim Kosse (mailto:tim.kosse@gmx.de)
-;some improvements by deguix
-
-;Supported languages with their translators in alphabetical order:
-
-;Arabic translation by asdfuae
-;Brazilian Portuguese translation by "deguix"
-;Chinese, Simplified translation by Kii Ali
-;Chinese, Traditional traslation by "matini" and Kii Ali
-;Croatian translation by "iostriz"
-;Danish translation by Claus Futtrup
-;French translation by "veekee"
-;German translation by Tim Kosse
-;Hungarian translation by Toth Laszlo
-;Korean translation by Seongab Kim
-;Lithuanian translation by Vytautas Krivickas
-;Polish translation by Krzysztof Galuszka
-;Russion translation by Sergey
-;Spanish translation by "dark_boy"
-
-!ifndef ZIPDLL_USED
-
-!define ZIPDLL_USED
-
-!macro ZIPDLL_EXTRACT SOURCE DESTINATION FILE
-
- !define "FILE_${FILE}"
-
- !ifndef FILE_
- Push "${FILE}"
- !endif
-
- IfFileExists "${DESTINATION}" +2
- CreateDirectory "${DESTINATION}"
-
- Push "${DESTINATION}"
-
- IfFileExists "${SOURCE}" +2
- SetErrors
-
- Push "${SOURCE}"
-
- ;The strings that will be translated are (ready to copy,
- ;remove leading semicolons in your language block):
-
- !ifdef LANG_ENGLISH
-
- ;English is default language of ZipDLL, no need to push the untranslated strings
-
- ;StrCmp $LANGUAGE ${LANG_ENGLISH} 0 +1
-
- ;Push " Error: %s"
- ;Push "Could not get file attributes."
- ;Push "Error: Could not get file attributes."
- ;Push "Could not extract %s"
- ;Push " Error: Could not extract %s"
-
- ;!ifdef FILE_
- ;Push " Extract: %s"
- ;Push " Extracting %d files and directories"
- ;Push "Extracting contents of %s to %s"
- ;!else
- ;Push "Specified file does not exist in archive."
- ;Push "Error: Specified file does not exist in archive."
- ;Push "Extracting the file %s from %s to %s"
- ;!endif
-
- ;Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_GERMAN
-
- StrCmp $LANGUAGE ${LANG_GERMAN} 0 +10
-
- Push " Fehler: %s"
- Push "Dateiattribute konnten nicht ermittelt werden."
- Push "Fehler: Dateiattribute konnten nicht ermittelt werden."
- Push "%s konnte nicht dekomprimiert werden."
- Push " Fehler: %s konnte nicht dekomprimiert werden."
-
- !ifdef FILE_
- Push " Dekomprimiere: %s"
- Push " Dekomprimiere %d Dateien und Verzeichnisse"
- Push "Dekomprimiere Inhalt von %s nach %s"
- !else
- Push "Die angegebene Datei existiert nicht im Archiv"
- Push "Fehler: Die angegebene Datei existiert nicht im Archiv"
- Push "Dekomprimiere Datei %s von %s nach %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
- !ifdef LANG_SPANISH
-
- StrCmp $LANGUAGE ${LANG_SPANISH} 0 +10
-
- Push " Error: %s"
- Push "No se obtuvieron atributos del archivo"
- Push "Error: No se obtuvieron atributos del archivo"
- Push "No se pudo extraer %s"
- Push " Error: No se pudo extraer %s"
-
- !ifdef FILE_
- Push " Extraer: %s"
- Push " Extrayendo %d archivos y directorios"
- Push "Extraer archivos de %s a %s"
- !else
- Push "Archivo especificado no existe en el ZIP"
- Push "Error: El archivo especificado no existe en el ZIP"
- Push "Extrayendo el archivo %s de %s a %s"
- !endif
-
- Push "/TRANSLATE"
-
- !endif
-
-
- !ifdef FILE_
- ZipDLL::extractall
- !else
- ZipDLL::extractfile
- !endif
-
- !undef "FILE_${FILE}"
-
-!macroend
-
-!endif
diff --git a/docker-compose.yml b/docker-compose.yml
index 9ece127..c70a82c 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -1,8 +1,22 @@
-locast2plex:
- image: tgorg/locast2plex
- ports:
- - "6077:6077"
- - "1900:1900/udp"
- restart: unless-stopped
- volumes:
- - ./config.ini:/app/config/config.ini
+# NOTE: Cabernet tends to maintain versions on its own and will conflict with docker's
+# versioning system.
+# The recommendation is to have the entire cabernet source folder be in its own volume.
+# whether you mount /app or /app/cabernet, the whole cabernet folder should be a volume.
+# then run the python script tvh_main.py at the top level Caberent folder to start Cabernet.
+
+version: '2.4'
+services:
+ cabernet:
+ container_name: cabernet
+ image: ghcr.io/cabernetwork/cabernet:latest
+ environment:
+ - TZ="America/New_York"
+ - PUID=1000
+ - PGID=1000
+ ports:
+ - "5004:5004" # Port used to stream
+ - "6077:6077" # Web Interface Port
+ restart: unless-stopped
+ volumes:
+ - ./docker/cabernet/config/app:/app
+ - ./.cabernet/key.txt:/root/.cabernet/key.txt
diff --git a/lib/clients/channels/channels.py b/lib/clients/channels/channels.py
index 44ec53c..8a796a5 100644
--- a/lib/clients/channels/channels.py
+++ b/lib/clients/channels/channels.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -40,68 +40,67 @@ def playlist(_webserver):
@getrequest.route('/channels.m3u')
def channels_m3u(_webserver):
_webserver.do_mime_response(200, 'audio/x-mpegurl', get_channels_m3u(
- _webserver.config, _webserver.stream_url,
- _webserver.query_data['name'],
+ _webserver.config, _webserver.stream_url,
+ _webserver.query_data['name'],
_webserver.query_data['instance'],
_webserver.plugins.plugins
- ))
+ ))
@getrequest.route('/lineup.xml')
def lineup_xml(_webserver):
_webserver.do_mime_response(200, 'application/xml', get_channels_xml(
_webserver.config, _webserver.stream_url,
- _webserver.query_data['name'],
+ _webserver.query_data['name'],
_webserver.query_data['instance'],
_webserver.plugins.plugins
- ))
+ ))
@getrequest.route('/lineup.json')
def lineup_json(_webserver):
_webserver.do_mime_response(200, 'application/json', get_channels_json(
- _webserver.config, _webserver.stream_url,
- _webserver.query_data['name'],
+ _webserver.config, _webserver.stream_url,
+ _webserver.query_data['name'],
_webserver.query_data['instance'],
_webserver.plugins.plugins
- ))
+ ))
def get_channels_m3u(_config, _base_url, _namespace, _instance, _plugins):
-
format_descriptor = '#EXTM3U'
record_marker = '#EXTINF'
+ ch_obj = ChannelsURL(_config, _base_url)
db = DBChannels(_config)
ch_data = db.get_channels(_namespace, _instance)
fakefile = StringIO()
fakefile.write(
- '%s\n' % format_descriptor
- )
+ '%s\n' % format_descriptor
+ )
sids_processed = []
for sid, sid_data_list in ch_data.items():
for sid_data in sid_data_list:
if sid in sids_processed:
continue
- sids_processed.append(sid)
- if not sid_data['enabled']:
- continue
- if not _plugins[sid_data['namespace']].enabled:
+ if not sid_data['enabled'] \
+ or not _plugins.get(sid_data['namespace']) \
+ or not _plugins[sid_data['namespace']].enabled:
continue
if not _plugins[sid_data['namespace']] \
- .plugin_obj.instances[sid_data['instance']].enabled:
+ .plugin_obj.instances[sid_data['instance']].enabled:
continue
config_section = utils.instance_config_section(sid_data['namespace'], sid_data['instance'])
if not _config[config_section]['enabled']:
continue
+ sids_processed.append(sid)
stream = _config[config_section]['player-stream_type']
- if stream == 'm3u8redirect':
+ if stream == 'm3u8redirect' and sid_data['json'].get('stream_url'):
uri = sid_data['json']['stream_url']
else:
- uri = '{}{}/{}/watch/{}'.format(
- 'http://', _base_url, sid_data['namespace'], str(sid))
-
+ uri = ch_obj.set_uri(sid_data)
+
# NOTE tvheadend supports '|' separated names in two attributes
# either 'group-title' or 'tvh-tags'
# if a ';' is used in group-title, tvheadend will use the
@@ -119,21 +118,20 @@ def get_channels_m3u(_config, _base_url, _namespace, _instance, _plugins):
groups += '|' + sid_data['json']['group_sdtv']
updated_chnum = utils.wrap_chnum(
- str(sid_data['display_number']), sid_data['namespace'],
+ str(sid_data['display_number']), sid_data['namespace'],
sid_data['instance'], _config)
- ch_obj = ChannelsURL(_config)
service_name = ch_obj.set_service_name(sid_data)
fakefile.write(
'%s\n' % (
- record_marker + ':-1' + ' ' +
- 'channelID=\'' + sid + '\' ' +
- 'tvg-num=\'' + updated_chnum + '\' ' +
- 'tvg-chno=\'' + updated_chnum + '\' ' +
- 'tvg-name=\'' + sid_data['display_name'] + '\' ' +
- 'tvg-id=\'' + sid + '\' ' +
- (('tvg-logo=\'' + sid_data['thumbnail'] + '\' ')
- if sid_data['thumbnail'] else '') +
- 'group-title=\''+groups+'\',' + service_name
+ record_marker + ':-1' + ' ' +
+ 'channelID=\'' + sid + '\' ' +
+ 'tvg-num=\'' + updated_chnum + '\' ' +
+ 'tvg-chno=\'' + updated_chnum + '\' ' +
+ 'tvg-name=\'' + sid_data['display_name'] + '\' ' +
+ 'tvg-id=\'' + sid + '\' ' +
+ (('tvg-logo=\'' + sid_data['thumbnail'] + '\' ')
+ if sid_data['thumbnail'] else '') +
+ 'group-title=\'' + groups + '\',' + service_name
)
)
fakefile.write(
@@ -145,9 +143,10 @@ def get_channels_m3u(_config, _base_url, _namespace, _instance, _plugins):
)
return fakefile.getvalue()
-
+
def get_channels_json(_config, _base_url, _namespace, _instance, _plugins):
db = DBChannels(_config)
+ ch_obj = ChannelsURL(_config, _base_url)
ch_data = db.get_channels(_namespace, _instance)
return_json = ''
sids_processed = []
@@ -158,36 +157,38 @@ def get_channels_json(_config, _base_url, _namespace, _instance, _plugins):
sids_processed.append(sid)
if not sid_data['enabled']:
continue
+ if not _plugins.get(sid_data['namespace']):
+ continue
if not _plugins[sid_data['namespace']].enabled:
continue
if not _plugins[sid_data['namespace']] \
- .plugin_obj.instances[sid_data['instance']].enabled:
+ .plugin_obj.instances[sid_data['instance']].enabled:
continue
config_section = utils.instance_config_section(sid_data['namespace'], sid_data['instance'])
if not _config[config_section]['enabled']:
continue
+ sids_processed.append(sid)
stream = _config[config_section]['player-stream_type']
if stream == 'm3u8redirect':
uri = sid_data['json']['stream_url']
else:
- uri = _base_url + '/' + sid_data['namespace'] + '/watch/' + sid
+ uri = ch_obj.set_uri(sid_data)
updated_chnum = utils.wrap_chnum(
- str(sid_data['display_number']), sid_data['namespace'],
+ str(sid_data['display_number']), sid_data['namespace'],
sid_data['instance'], _config)
- return_json = return_json + \
- ch_templates['jsonLineup'].format(
- sid,
- sid_data['json']['callsign'],
- updated_chnum,
- sid_data['display_name'],
- uri,
- sid_data['json']['HD'])
+ return_json = return_json + ch_templates['jsonLineup'].format(
+ sid_data['json']['callsign'],
+ updated_chnum,
+ sid_data['display_name'],
+ uri,
+ sid_data['json']['HD'])
return_json = return_json + ','
return "[" + return_json[:-1] + "]"
def get_channels_xml(_config, _base_url, _namespace, _instance, _plugins):
db = DBChannels(_config)
+ ch_obj = ChannelsURL(_config, _base_url)
ch_data = db.get_channels(_namespace, _instance)
return_xml = ''
sids_processed = []
@@ -195,39 +196,43 @@ def get_channels_xml(_config, _base_url, _namespace, _instance, _plugins):
for sid_data in sid_data_list:
if sid in sids_processed:
continue
- sids_processed.append(sid)
if not sid_data['enabled']:
continue
+ if not _plugins.get(sid_data['namespace']):
+ continue
if not _plugins[sid_data['namespace']].enabled:
continue
if not _plugins[sid_data['namespace']] \
- .plugin_obj.instances[sid_data['instance']].enabled:
+ .plugin_obj.instances[sid_data['instance']].enabled:
continue
+
config_section = utils.instance_config_section(sid_data['namespace'], sid_data['instance'])
if not _config[config_section]['enabled']:
continue
+ sids_processed.append(sid)
stream = _config[config_section]['player-stream_type']
if stream == 'm3u8redirect':
uri = sid_data['json']['stream_url']
+ uri = escape(uri)
else:
- uri = _base_url + '/' + sid_data['namespace'] + '/watch/' + sid
+ uri = escape(ch_obj.set_uri(sid_data))
updated_chnum = utils.wrap_chnum(
- str(sid_data['display_number']), sid_data['namespace'],
+ str(sid_data['display_number']), sid_data['namespace'],
sid_data['instance'], _config)
- return_xml = return_xml + \
- ch_templates['xmlLineup'].format(
- updated_chnum,
- escape(sid_data['display_name']),
- uri,
- sid_data['json']['HD'])
+ return_xml = return_xml + ch_templates['xmlLineup'].format(
+ updated_chnum,
+ escape(sid_data['display_name']),
+ uri,
+ sid_data['json']['HD'])
return "" + return_xml + " "
class ChannelsURL:
- def __init__(self, _config):
+ def __init__(self, _config, _base_url):
self.logger = logging.getLogger(__name__)
self.config = _config
+ self.base_url = _base_url
def update_channels(self, _namespace, _query_data):
db = DBChannels(self.config)
@@ -241,9 +246,10 @@ def update_channels(self, _namespace, _query_data):
value = values[0]
if name == 'enabled':
value = int(value)
-
+
db_value = None
- for ch_db in ch_data[uid]:
+ ch_db = None
+ for ch_db in ch_data[uid]:
if ch_db['instance'] == instance:
db_value = ch_db[name]
break
@@ -252,6 +258,12 @@ def update_channels(self, _namespace, _query_data):
lookup_name = self.translate_main2json(name)
if lookup_name is not None:
value = ch_db['json'][lookup_name]
+ if name == 'display_number':
+ config_section = utils.instance_config_section(ch_db['namespace'], instance)
+ start_ch = self.config[config_section].get('channel-start_ch_num')
+ if start_ch > -1:
+ results += ''.join(['ERROR: Starting Ch Number setting is not default (-1) [', uid, '][', instance, '][', name, '] not changed', ' '])
+ continue
results += ''.join(['Updated [', uid, '][', instance, '][', name, '] to ', str(value), ' '])
ch_db[name] = value
if name == 'thumbnail':
@@ -277,10 +289,10 @@ def get_thumbnail_size(self, _thumbnail):
if _thumbnail is None or _thumbnail == '':
return thumbnail_size
h = {'User-Agent': utils.DEFAULT_USER_AGENT,
- 'Accept': '*/*',
- 'Accept-Encoding': 'identity',
- 'Connection': 'Keep-Alive'
- }
+ 'Accept': '*/*',
+ 'Accept-Encoding': 'identity',
+ 'Connection': 'Keep-Alive'
+ }
req = urllib.request.Request(_thumbnail, headers=h)
with urllib.request.urlopen(req) as resp:
img_blob = resp.read()
@@ -294,10 +306,22 @@ def set_service_name(self, _sid_data):
Returns the service name used to sync with the EPG channel name
"""
updated_chnum = utils.wrap_chnum(
- str(_sid_data['display_number']), _sid_data['namespace'],
+ str(_sid_data['display_number']), _sid_data['namespace'],
_sid_data['instance'], self.config)
if self.config['epg']['epg_channel_number']:
return updated_chnum + \
' ' + _sid_data['display_name']
else:
return _sid_data['display_name']
+
+ def set_uri(self, _sid_data):
+ if self.config['epg']['epg_use_channel_number']:
+ updated_chnum = utils.wrap_chnum(
+ str(_sid_data['display_number']), _sid_data['namespace'],
+ _sid_data['instance'], self.config)
+ uri = '{}{}/{}/auto/v{}'.format(
+ 'http://', self.base_url, _sid_data['namespace'], updated_chnum)
+ else:
+ uri = '{}{}/{}/watch/{}'.format(
+ 'http://', self.base_url, _sid_data['namespace'], str(_sid_data['uid']))
+ return uri
\ No newline at end of file
diff --git a/lib/clients/channels/channels_form_html.py b/lib/clients/channels/channels_form_html.py
index 2cc091c..291ccbf 100644
--- a/lib/clients/channels/channels_form_html.py
+++ b/lib/clients/channels/channels_form_html.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -16,17 +16,13 @@
substantial portions of the Software.
"""
-import io
-import urllib
-
-import lib.image_size.get_image_size as get_image_size
import lib.common.utils as utils
from lib.common.decorators import getrequest
from lib.common.decorators import postrequest
from lib.clients.channels.channels import ChannelsURL
-@getrequest.route('/api/channels_form')
+@getrequest.route('/api/channelsform')
def get_channels_form_html(_webserver, _namespace=None, _sort_col=None, _sort_dir=None, filter_dict=None):
channels_form = ChannelsFormHTML(_webserver.channels_db, _webserver.config)
if _namespace is None:
@@ -37,10 +33,8 @@ def get_channels_form_html(_webserver, _namespace=None, _sort_col=None, _sort_di
_webserver.do_mime_response(200, 'text/html', form)
-@postrequest.route('/api/channels_form')
+@postrequest.route('/api/channelsform')
def post_channels_html(_webserver):
- # Take each key and make a [section][key] to store the value
- channel_changes = {}
namespace = _webserver.query_data['name'][0]
sort_col = _webserver.query_data['sort_col'][0]
sort_dir = _webserver.query_data['sort_dir'][0]
@@ -49,9 +43,9 @@ def post_channels_html(_webserver):
del _webserver.query_data['sort_dir']
del _webserver.query_data['sort_col']
filter_dict = get_filter_data(_webserver.query_data)
-
+
if sort_col is None:
- cu = ChannelsURL(_webserver.config)
+ cu = ChannelsURL(_webserver.config, _webserver.stream_url)
results = cu.update_channels(namespace, _webserver.query_data)
_webserver.do_mime_response(200, 'text/html', results)
else:
@@ -69,6 +63,8 @@ def __init__(self, _channels_db, _config):
self.num_enabled = 0
self.sort_column = None
self.sort_direction = None
+ self.ch_data = None
+ self.filter_dict = None
def get(self, _namespace, _sort_col, _sort_dir, _filter_dict):
self.sort_column = _sort_col
@@ -77,7 +73,7 @@ def get(self, _namespace, _sort_col, _sort_dir, _filter_dict):
self.filter_dict = _filter_dict
sort_data = self.get_db_sort_data(_sort_col, _sort_dir)
self.ch_data = self.db.get_sorted_channels(self.namespace, None, sort_data[0], sort_data[1])
- return ''.join([self.header,self.body])
+ return ''.join([self.header, self.body])
def get_db_sort_data(self, _sort_col, _sort_dir):
if _sort_dir == 'sortdesc':
@@ -116,29 +112,29 @@ def header(self):
'',
' ',
''
- ])
+ ])
@property
def form_header(self):
header_dir = {
- 'enabled':'sortnone',
- 'instance':'sortnone',
- 'num':'sortnone',
- 'name':'sortnone',
- 'group':'sortnone',
- 'thumbnail':'sortnone',
- 'metadata':'sortnone'
+ 'enabled': 'sortnone',
+ 'instance': 'sortnone',
+ 'num': 'sortnone',
+ 'name': 'sortnone',
+ 'group': 'sortnone',
+ 'thumbnail': 'sortnone',
+ 'metadata': 'sortnone'
}
header_dir[self.sort_column] = self.sort_direction
-
+
return ''.join([
' ',
' ',
' ',
'Total Unique Channels = ', str(self.num_of_channels), ' ',
- 'Total Enabled Unique Channels = ', str(self.num_enabled), ' '
+ 'Total Enabled Unique Channels = ', str(self.num_enabled), ' ',
'',
- 'Save changes ',
'
',
'',
@@ -147,11 +143,13 @@ def form_header(self):
'',
'',
' ',
- '
', self.form, ''
+ ])
@property
def db_updates(self):
html = ''.join([
'',
- '
',
- '
',
+ '
',
- '
',
+ '
',
- '
',
+ '
',
- ])
+ ''
+ '
',
+ ' ',
+ ])
return html
-
@property
def backups(self):
html = ''.join([
@@ -307,37 +305,37 @@ def backups(self):
])
backups_location = self.config['datamgmt']['backups-location']
folderlist = sorted(glob.glob(os.path.join(
- backups_location, BACKUP_FOLDER_NAME+'*')), reverse=True)
+ backups_location, BACKUP_FOLDER_NAME + '*')), reverse=True)
for folder in folderlist:
filename = os.path.basename(folder)
datetime_str = self.get_backup_date(filename)
if datetime_str is None:
continue
html = ''.join([html,
- '
',
- '',
- '',
- 'folder ',
- '',
- '',
- '', datetime_str, '
',
- '', folder, '
',
- '',
- '',
- 'delete_forever ',
- ' '
- ])
- html = ''.join([html,
- '',
- ''
- ])
+ '
',
+ '',
+ '',
+ 'folder ',
+ '',
+ '',
+ '', datetime_str, '
',
+ '', folder, '
',
+ '',
+ '',
+ 'delete_forever ',
+ ' '
+ ])
+ html = ''.join([html,
+ '',
+ ''
+ ])
return html
def del_backup(self, _folder):
- valid_regex = re.compile('^([a-zA-Z0-9_]+$)')
+ valid_regex = re.compile('^([a-zA-Z0-9_.]+$)')
if not valid_regex.match(_folder):
self.logger.info('Invalid backup folder to delete: {}'.format(_folder))
return
@@ -356,15 +354,15 @@ def restore_form(self, _folder):
html = ''.join([
''
- '
'
- ])
+ html = ''.join([html,
+ '
',
+ '',
+ ' ',
+ ' ',
+ ' ',
+ '',
+ bkup_defn[key]['label'],
+ ' ',
+ ' '
+ ])
+ html = ''.join([html,
+ '',
+ ' ',
+ ''
+ ])
return html
def get_backup_date(self, _filename):
try:
- datetime_obj = datetime.datetime.strptime(_filename,
- BACKUP_FOLDER_NAME + '_%Y%m%d_%H%M')
+ m = re.match(self.search_date, _filename)
+ if m and len(m.groups()) == 3:
+ ver = m.group(2)
+ if ver is None:
+ ver = ''
+ date_str = m.group(3)
+ datetime_obj = datetime.datetime.strptime(
+ date_str, '%Y%m%d_%H%M')
+ else:
+ raise ValueError('Filename incorrect format')
except ValueError as e:
- self.logger.info('Bad backup folder name {}: {}'.format(filename, e))
+ self.logger.info('Bad backup folder name {}: {}'.format(_filename, e))
return None
opersystem = platform.system()
if opersystem in ['Windows']:
- return datetime_obj.strftime('%m/%d/%Y, %#I:%M %p')
+ return datetime_obj.strftime('%m/%d/%Y, %#I:%M %p ' + str(ver))
else:
- return datetime_obj.strftime('%m/%d/%Y, %-I:%M %p')
+ return datetime_obj.strftime('%m/%d/%Y, %-I:%M %p ' + str(ver))
@property
def select_reset_channel(self):
@@ -418,12 +424,12 @@ def select_reset_channel(self):
'No Yes ',
'Plugin: ',
'ALL '
- ])
+ ])
for name in plugins_channel:
html_option = ''.join([html_option,
- '', name['namespace'], ' ',
- ])
- return ''.join([html_option, ' ' ])
+ '', name['namespace'], ' ',
+ ])
+ return ''.join([html_option, ''])
@property
def select_reset_epg(self):
@@ -432,19 +438,19 @@ def select_reset_epg(self):
plugin_epg = db_epg.get_epg_names()
plugin_programs = db_epg_programs.get_program_names()
- plugin_epg_names = [ s['namespace'] for s in plugin_epg ]
- plugin_programs_names = [ s['namespace'] for s in plugin_programs ]
- plugin_list = list(set(plugin_epg_names+plugin_programs_names))
-
+ plugin_epg_names = [s['namespace'] for s in plugin_epg]
+ plugin_programs_names = [s['namespace'] for s in plugin_programs]
+ plugin_list = list(set(plugin_epg_names + plugin_programs_names))
+
html_option = ''.join([
'
Plugin: ',
'ALL ',
- ])
+ ])
for name in plugin_list:
html_option = ''.join([html_option,
- '', name, ' ',
- ])
- return ''.join([html_option, ' ' ])
+ '', name, ' ',
+ ])
+ return ''.join([html_option, ''])
@property
def select_reset_sched(self):
@@ -453,12 +459,12 @@ def select_reset_sched(self):
html_option = ''.join([
'
Plugin: ',
'ALL ',
- ])
+ ])
for name in plugins_sched:
html_option = ''.join([html_option,
- '', name['namespace'], ' ',
- ])
- return ''.join([html_option, ' ' ])
+ '', name['namespace'], ' ',
+ ])
+ return ''.join([html_option, ''])
@property
def select_del_instance(self):
@@ -467,8 +473,11 @@ def select_del_instance(self):
name_inst_dict = db_plugins.get_instances()
for ns, inst_list in name_inst_dict.items():
for inst in inst_list:
- name_inst.append(''.join([
- ns, ':', inst]))
+ section = utils.instance_config_section(ns, inst)
+ if self.config.get(section) \
+ and self.config[section].get('enabled'):
+ name_inst.append(''.join([
+ ns, ':', inst]))
db_channels = DBChannels(self.config)
name_inst_list = db_channels.get_channel_instances()
self.update_ns_inst(name_inst, name_inst_list)
@@ -482,12 +491,12 @@ def select_del_instance(self):
html_option = ''.join([
'
Instance: ',
'None ',
- ])
+ ])
for name in name_inst:
html_option = ''.join([html_option,
- '', name, ' ',
- ])
- return ''.join([html_option, ' ' ])
+ '', name, ' ',
+ ])
+ return ''.join([html_option, ''])
def update_ns_inst(self, _name_inst, _name_inst_list):
for name_inst_dict in _name_inst_list:
@@ -496,7 +505,6 @@ def update_ns_inst(self, _name_inst, _name_inst_list):
name_inst_dict['namespace'],
':',
name_inst_dict['instance'],
- ])
+ ])
if ns_in not in _name_inst:
_name_inst.append(ns_in)
-
\ No newline at end of file
diff --git a/lib/db/db.py b/lib/db/db.py
index 5ce80b6..4273e2d 100644
--- a/lib/db/db.py
+++ b/lib/db/db.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -20,10 +20,12 @@
import os
import pathlib
import random
+import shutil
import sqlite3
import threading
import time
+LOCK = threading.Lock()
DB_EXT = '.db'
BACKUP_EXT = '.sql'
@@ -34,13 +36,13 @@
SQL_UPDATE = '_update'
SQL_GET = '_get'
SQL_DELETE = '_del'
-
+FILE_LINK_ZIP = '_filelinks'
class DB:
conn = {}
def __init__(self, _config, _db_name, _sqlcmds):
- self.logger = logging.getLogger(__name__+str(threading.get_ident()))
+ self.logger = logging.getLogger(__name__ + str(threading.get_ident()))
self.config = _config
self.db_name = _db_name
self.sqlcmds = _sqlcmds
@@ -48,7 +50,6 @@ def __init__(self, _config, _db_name, _sqlcmds):
self.offset = -1
self.where = None
self.sqlcmd = None
-
self.db_fullpath = pathlib.Path(self.config['paths']['db_dir']) \
.joinpath(_db_name + DB_EXT)
if not os.path.exists(self.db_fullpath):
@@ -57,84 +58,108 @@ def __init__(self, _config, _db_name, _sqlcmds):
self.check_connection()
DB.conn[self.db_name][threading.get_ident()].commit()
- def sql_exec(self, _sqlcmd, _bindings=None):
+ def sql_exec(self, _sqlcmd, _bindings=None, _cursor=None):
try:
self.check_connection()
if _bindings:
- return DB.conn[self.db_name][threading.get_ident()].execute(_sqlcmd, _bindings)
+ if _cursor:
+ return _cursor.execute(_sqlcmd, _bindings)
+ else:
+ return DB.conn[self.db_name][threading.get_ident()].execute(_sqlcmd, _bindings)
else:
- return DB.conn[self.db_name][threading.get_ident()].execute(_sqlcmd)
+ if _cursor:
+ return _cursor.execute(_sqlcmd)
+ else:
+ return DB.conn[self.db_name][threading.get_ident()].execute(_sqlcmd)
except sqlite3.IntegrityError as e:
DB.conn[self.db_name][threading.get_ident()].close()
del DB.conn[self.db_name][threading.get_ident()]
raise e
def rnd_sleep(self, _sec):
- r = random.randrange(0,50)
+ r = random.randrange(0, 50)
sec = _sec + r / 100
time.sleep(sec)
def add(self, _table, _values):
+ self.logger.trace('DB add() called {}'.format(threading.get_ident()))
cur = None
sqlcmd = self.sqlcmds[''.join([_table, SQL_ADD_ROW])]
- i = 5
+ i = 10
while i > 0:
i -= 1
try:
- cur = self.sql_exec(sqlcmd, _values)
+ self.check_connection()
+ cur = DB.conn[self.db_name][threading.get_ident()].cursor()
+ self.sql_exec(sqlcmd, _values, cur)
DB.conn[self.db_name][threading.get_ident()].commit()
lastrow = cur.lastrowid
cur.close()
+ self.logger.trace('DB add() exit {}'.format(threading.get_ident()))
return lastrow
except sqlite3.OperationalError as e:
self.logger.warning('{} Add request ignored, retrying {}, {}'
- .format(self.db_name, i, e))
+ .format(self.db_name, i, e))
DB.conn[self.db_name][threading.get_ident()].rollback()
if cur is not None:
cur.close()
self.rnd_sleep(0.3)
+ self.logger.trace('DB add() exit {}'.format(threading.get_ident()))
return None
def delete(self, _table, _values):
+ self.logger.trace('DB delete() called {}'.format(threading.get_ident()))
cur = None
sqlcmd = self.sqlcmds[''.join([_table, SQL_DELETE])]
- i = 5
+ i = 10
while i > 0:
i -= 1
try:
- cur = self.sql_exec(sqlcmd, _values)
+ self.check_connection()
+ cur = DB.conn[self.db_name][threading.get_ident()].cursor()
+ self.sql_exec(sqlcmd, _values, cur)
num_deleted = cur.rowcount
DB.conn[self.db_name][threading.get_ident()].commit()
cur.close()
+ self.logger.trace('DB delete() exit {}'.format(threading.get_ident()))
return num_deleted
except sqlite3.OperationalError as e:
self.logger.warning('{} Delete request ignored, retrying {}, {}'
- .format(self.db_name, i, e))
+ .format(self.db_name, i, e))
DB.conn[self.db_name][threading.get_ident()].rollback()
if cur is not None:
cur.close()
self.rnd_sleep(0.3)
+ self.logger.trace('DB delete() exit {}'.format(threading.get_ident()))
return 0
def update(self, _table, _values=None):
+ self.logger.trace('DB update() called {}'.format(threading.get_ident()))
cur = None
sqlcmd = self.sqlcmds[''.join([_table, SQL_UPDATE])]
- i = 5
+ i = 10
while i > 0:
i -= 1
try:
- cur = self.sql_exec(sqlcmd, _values)
+ LOCK.acquire(True)
+ self.check_connection()
+ cur = DB.conn[self.db_name][threading.get_ident()].cursor()
+ self.sql_exec(sqlcmd, _values, cur)
DB.conn[self.db_name][threading.get_ident()].commit()
lastrow = cur.lastrowid
cur.close()
+ LOCK.release()
+ self.logger.trace('DB update() exit {}'.format(threading.get_ident()))
return lastrow
except sqlite3.OperationalError as e:
- self.logger.warning('{} Update request ignored, retrying {}, {}'
- .format(self.db_name, i, e))
+ self.logger.notice('{} Update request ignored, retrying {}, {}'
+ .format(self.db_name, i, e))
DB.conn[self.db_name][threading.get_ident()].rollback()
if cur is not None:
cur.close()
+ LOCK.release()
self.rnd_sleep(0.3)
+ self.logger.trace('DB update() exit {}'.format(threading.get_ident()))
return None
def commit(self):
@@ -143,17 +168,19 @@ def commit(self):
def get(self, _table, _where=None):
cur = None
sqlcmd = self.sqlcmds[''.join([_table, SQL_GET])]
- i = 5
+ i = 10
while i > 0:
i -= 1
try:
- cur = self.sql_exec(sqlcmd, _where)
+ self.check_connection()
+ cur = DB.conn[self.db_name][threading.get_ident()].cursor()
+ self.sql_exec(sqlcmd, _where, cur)
result = cur.fetchall()
cur.close()
return result
except sqlite3.OperationalError as e:
self.logger.warning('{} GET request ignored retrying {}, {}'
- .format(self.db_name, i, e))
+ .format(self.db_name, i, e))
DB.conn[self.db_name][threading.get_ident()].rollback()
if cur is not None:
cur.close()
@@ -166,23 +193,28 @@ def get_dict(self, _table, _where=None, sql=None):
sqlcmd = self.sqlcmds[''.join([_table, SQL_GET])]
else:
sqlcmd = sql
- i = 5
+ i = 10
while i > 0:
i -= 1
try:
- cur = self.sql_exec(sqlcmd, _where)
+ LOCK.acquire(True)
+ self.check_connection()
+ cur = DB.conn[self.db_name][threading.get_ident()].cursor()
+ self.sql_exec(sqlcmd, _where, cur)
records = cur.fetchall()
rows = []
for row in records:
rows.append(dict(zip([c[0] for c in cur.description], row)))
cur.close()
+ LOCK.release()
return rows
except sqlite3.OperationalError as e:
self.logger.warning('{} GET request ignored retrying {}, {}'
- .format(self.db_name, i, e))
+ .format(self.db_name, i, e))
DB.conn[self.db_name][threading.get_ident()].rollback()
if cur is not None:
cur.close()
+ LOCK.release()
self.rnd_sleep(0.3)
return None
@@ -205,6 +237,77 @@ def get_dict_next(self):
row = records[0]
return dict(zip([c[0] for c in self.cur.description], row))
+ def save_file(self, _keys, _blob):
+ """
+ Stores the blob in the folder with the db name with
+ the filename of concatenated _keys
+ _keys is the list of unique keys for the table
+ Returns the filepath to the file generated
+ """
+ folder_path = pathlib.Path(self.config['paths']['db_dir']) \
+ .joinpath(self.db_name)
+ os.makedirs(folder_path, exist_ok=True)
+ filename = '_'.join(str(x) for x in _keys) + '.txt'
+ file_rel_path = pathlib.Path(self.db_name).joinpath(filename)
+ filepath = folder_path.joinpath(filename)
+ try:
+ with open(filepath, mode='wb') as f:
+ if isinstance(_blob, str):
+ f.write(_blob.encode())
+ else:
+ f.write(_blob)
+ f.flush()
+ f.close()
+ except PermissionError as ex:
+ self.logger.warning('Unable to create linked database file {}'
+ .format(file_rel_path))
+ return None
+ return file_rel_path
+
+ def delete_file(self, _filepath):
+ """
+ _filepath is relative to the database path
+ """
+ fullpath = pathlib.Path(self.config['paths']['db_dir']) \
+ .joinpath(_filepath)
+ try:
+ os.remove(fullpath)
+ return True
+ except PermissionError as ex:
+ self.logger.warning('Unable to delete linked database file {}'
+ .format(_filepath))
+ return False
+ except FileNotFoundError as ex:
+ self.logger.warning('File missing, unable to delete linked database file {}'
+ .format(_filepath))
+ return False
+
+ def get_file(self, _filepath):
+ """
+ _filepath is relative to the database path
+ return the blob
+ """
+ fullpath = pathlib.Path(self.config['paths']['db_dir']) \
+ .joinpath(_filepath)
+
+ if not fullpath.exists():
+ self.logger.warning('Linked database file Missing {}'.format(_filepath))
+ return None
+ try:
+ with open(fullpath, mode='rb') as f:
+ blob = f.read()
+ f.close()
+ return blob
+ except PermissionError as ex:
+ self.logger.warning('Unable to read linked database file {}'
+ .format(_filepath))
+ return None
+
+ def get_file_by_key(self, _keys):
+ filename = '_'.join(str(x) for x in _keys) + '.txt'
+ file_rel_path = pathlib.Path(self.db_name).joinpath(filename)
+ return self.get_file(file_rel_path)
+
def reinitialize_tables(self):
self.drop_tables()
self.create_tables()
@@ -225,20 +328,38 @@ def export_sql(self, backup_folder):
if not os.path.isdir(backup_folder):
os.mkdir(backup_folder)
self.check_connection()
+
+ # Check for linked file folder and zip up if present
+ db_linkfilepath = pathlib.Path(self.config['paths']['db_dir']) \
+ .joinpath(self.db_name)
+ if db_linkfilepath.exists():
+ self.logger.debug('Linked file folder exists, backing up folder for db {}'.format(self.db_name))
+ backup_filelink = pathlib.Path(backup_folder, self.db_name + FILE_LINK_ZIP)
+ shutil.make_archive(backup_filelink, 'zip', db_linkfilepath)
+
backup_file = pathlib.Path(backup_folder, self.db_name + BACKUP_EXT)
- with open(backup_file, 'w') as export_f:
+ with open(backup_file, 'w', encoding='utf-8') as export_f:
for line in DB.conn[self.db_name][threading.get_ident()].iterdump():
export_f.write('%s\n' % line)
except PermissionError as e:
self.logger.warning(e)
self.logger.warning('Unable to make backups')
-
+
def import_sql(self, backup_folder):
self.logger.debug('Running restore for {} database'.format(self.db_name))
if not os.path.isdir(backup_folder):
msg = 'Backup folder does not exist: {}'.format(backup_folder)
self.logger.warning(msg)
return msg
+
+ # Check for linked file folder and zip up if present
+ backup_filelink = pathlib.Path(backup_folder, self.db_name + FILE_LINK_ZIP + '.zip')
+ db_linkfilepath = pathlib.Path(self.config['paths']['db_dir']) \
+ .joinpath(self.db_name)
+ if backup_filelink.exists():
+ self.logger.debug('Linked file folder exists, restoring folder for db {}'.format(self.db_name))
+ shutil.unpack_archive(backup_filelink, db_linkfilepath)
+
backup_file = pathlib.Path(backup_folder, self.db_name + BACKUP_EXT)
if not os.path.isfile(backup_file):
msg = 'Backup file does not exist, skipping: {}'.format(backup_file)
@@ -265,7 +386,7 @@ def check_connection(self):
if self.db_name not in DB.conn:
DB.conn[self.db_name] = {}
db_conn_dbname = DB.conn[self.db_name]
-
+
if threading.get_ident() not in db_conn_dbname:
db_conn_dbname[threading.get_ident()] = sqlite3.connect(
self.db_fullpath, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES)
diff --git a/lib/db/db_channels.py b/lib/db/db_channels.py
index df16d2c..02a7c7b 100644
--- a/lib/db/db_channels.py
+++ b/lib/db/db_channels.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -79,7 +79,6 @@
)
"""
-
],
'dt': [
"""
@@ -90,12 +89,12 @@
""",
"""
DROP TABLE IF EXISTS categories
- """
+ """,
"""
DROP TABLE IF EXISTS zones
"""
- ],
-
+ ],
+
'channels_add':
"""
INSERT INTO channels (
@@ -114,7 +113,7 @@
UPDATE channels SET
enabled=?, display_number=?, display_name=?, group_tag=?, thumbnail=?, thumbnail_size=?
WHERE namespace=? AND instance=? AND uid=?
- """,
+ """,
'channels_updated_update':
"""
UPDATE channels SET updated = False WHERE namespace=? AND instance=?
@@ -137,6 +136,12 @@
display_number=?
WHERE namespace=? AND instance=? AND uid=?
""",
+ 'channels_num_update':
+ """
+ UPDATE channels SET
+ number=?
+ WHERE namespace=? AND instance=? AND uid=?
+ """,
'channels_del':
"""
DELETE FROM channels WHERE updated LIKE ?
@@ -145,7 +150,8 @@
'channels_get':
"""
SELECT * FROM channels WHERE namespace LIKE ?
- AND instance LIKE ? ORDER BY CAST(number as FLOAT), namespace, instance
+ AND instance LIKE ? AND enabled LIKE ?
+ ORDER BY CAST(number as FLOAT), namespace, instance
""",
'channels_one_get':
"""
@@ -160,7 +166,7 @@
"""
SELECT DISTINCT namespace,instance FROM channels
""",
-
+
'status_add':
"""
INSERT OR REPLACE INTO status (
@@ -202,9 +208,10 @@ def save_channel_list(self, _namespace, _instance, _ch_dict, save_edit_groups=Tr
Assume the list is complete and will remove any old channels not updated
"""
if _instance is None or _namespace is None:
- self.logger.warning('Saving Channel List: Namespace or Instance is None {}:{}'
+ self.logger.warning(
+ 'Saving Channel List: Namespace or Instance is None {}:{}'
.format(_namespace, _instance))
- self.update(DB_CHANNELS_TABLE + '_updated', (_namespace, _instance,))
+ self.update(DB_CHANNELS_TABLE + '_updated', (_namespace, _instance,))
for ch in _ch_dict:
if save_edit_groups:
edit_groups = ch['groups_other']
@@ -225,6 +232,12 @@ def save_channel_list(self, _namespace, _instance, _ch_dict, save_edit_groups=Tr
True,
json.dumps(ch)))
except sqlite3.IntegrityError as ex:
+ # record already present. Check the thumbnail and update as needed
+ ch_stored = self.get_channel(ch['id'], _namespace, _instance)
+ if ch_stored['thumbnail'] is None and ch['thumbnail'] is not None:
+ ch_stored['thumbnail'] = ch['thumbnail']
+ ch_stored['thumbnail_size'] = ch['thumbnail_size']
+ self.update_channel(ch_stored)
self.update(DB_CHANNELS_TABLE, (
ch['number'],
True,
@@ -242,7 +255,7 @@ def update_channel(self, _ch):
"""
Updates the editable fields for one channel
"""
- self.update(DB_CHANNELS_TABLE+'_editable', (
+ self.update(DB_CHANNELS_TABLE + '_editable', (
_ch['enabled'],
_ch['display_number'],
_ch['display_name'],
@@ -260,7 +273,7 @@ def del_channels(self, _namespace, _instance):
if not _instance:
_instance = '%'
return self.delete(DB_CHANNELS_TABLE, ('%', _namespace, _instance,))
-
+
def del_status(self, _namespace=None, _instance=None):
if not _namespace:
_namespace = '%'
@@ -269,8 +282,7 @@ def del_status(self, _namespace=None, _instance=None):
return self.delete(DB_STATUS_TABLE, (_namespace, _instance,))
def get_status(self, _namespace, _instance):
- result = self.get(DB_STATUS_TABLE,
- (_namespace, _instance))
+ result = self.get(DB_STATUS_TABLE, (_namespace, _instance))
if result:
last_update = result[0][0]
if last_update is not None:
@@ -280,14 +292,16 @@ def get_status(self, _namespace, _instance):
else:
return None
- def get_channels(self, _namespace, _instance):
+ def get_channels(self, _namespace, _instance, _enabled=None):
if not _namespace:
_namespace = '%'
if not _instance:
_instance = '%'
+ if _enabled is None:
+ _enabled = '%'
rows_dict = {}
- rows = self.get_dict(DB_CHANNELS_TABLE, (_namespace, _instance,))
+ rows = self.get_dict(DB_CHANNELS_TABLE, (_namespace, _instance, _enabled))
if rows is None:
return None
for row in rows:
@@ -302,7 +316,7 @@ def get_channels(self, _namespace, _instance):
else:
rows_dict[row['uid']] = []
rows_dict[row['uid']].append(row)
-
+
return rows_dict
def get_channel_names(self):
@@ -318,12 +332,13 @@ def get_channel(self, _uid, _namespace, _instance):
_instance = '%'
rows = self.get_dict(DB_CHANNELS_TABLE + '_one', (_uid, _namespace, _instance,))
- for row in rows:
- ch = json.loads(row['json'])
- row['json'] = ch
- if row['atsc'] is not None:
- row['atsc'] = ast.literal_eval(row['atsc'])
- return row
+ if rows:
+ for row in rows:
+ ch = json.loads(row['json'])
+ row['json'] = ch
+ if row['atsc'] is not None:
+ row['atsc'] = ast.literal_eval(row['atsc'])
+ return row
return None
def update_channel_atsc(self, _ch):
@@ -331,7 +346,7 @@ def update_channel_atsc(self, _ch):
Updates the atsc field for one channel
"""
atsc_str = str(_ch['atsc'])
- self.update(DB_CHANNELS_TABLE+'_atsc', (
+ self.update(DB_CHANNELS_TABLE + '_atsc', (
atsc_str,
_ch['namespace'],
_ch['instance'],
@@ -343,7 +358,7 @@ def update_channel_json(self, _ch, _namespace, _instance):
Updates the json field for one channel
"""
json_str = json.dumps(_ch)
- self.update(DB_CHANNELS_TABLE+'_json', (
+ self.update(DB_CHANNELS_TABLE + '_json', (
json_str,
_namespace,
_instance,
@@ -355,15 +370,27 @@ def update_channel_number(self, _ch):
Updates the display_number field for one channel
"""
display_number = str(_ch['display_number'])
- self.update(DB_CHANNELS_TABLE+'_chnum', (
+ self.update(DB_CHANNELS_TABLE + '_chnum', (
display_number,
_ch['namespace'],
_ch['instance'],
_ch['uid']
))
+
+ def update_number(self, _ch):
+ """
+ Updates the display_number field for one channel
+ """
+ number = str(_ch['number'])
+ self.update(DB_CHANNELS_TABLE + '_num', (
+ number,
+ _ch['namespace'],
+ _ch['instance'],
+ _ch['uid']
+ ))
-
- def get_sorted_channels(self, _namespace, _instance, _first_sort_key=[None, True], _second_sort_key=[None, True]):
+ def get_sorted_channels(self, _namespace, _instance,
+ _first_sort_key=[None, True], _second_sort_key=[None, True]):
"""
Using dynamic SQl to create a SELECT statement and send to the DB
keys are [name_of_column, direction_asc=True]
@@ -379,31 +406,29 @@ def get_sorted_channels(self, _namespace, _instance, _first_sort_key=[None, True
_namespace = '%'
if not _instance:
_instance = '%'
-
- rows_dict = {}
rows = self.get_dict(None, (_namespace, _instance,), sql=sqlcmd)
for row in rows:
ch = json.loads(row['json'])
row['json'] = ch
row['thumbnail_size'] = ast.literal_eval(row['thumbnail_size'])
return rows
-
+
def get_channels_orderby(self, _column, _ascending):
str_types = ['namespace', 'instance', 'enabled', 'display_name', 'group_tag', 'thumbnail']
float_types = ['uid', 'display_number']
json_types = ['HD', 'callsign']
if _ascending:
- dir = 'ASC'
+ dir_ = 'ASC'
else:
- dir = 'DESC'
+ dir_ = 'DESC'
if _column is None:
return ''
elif _column in str_types:
- return ''.join([_column, ' ', dir, ', '])
+ return ''.join([_column, ' ', dir_, ', '])
elif _column in float_types:
- return ''.join(['CAST(', _column, ' as FLOAT) ', dir, ', '])
+ return ''.join(['CAST(', _column, ' as FLOAT) ', dir_, ', '])
elif _column in json_types:
- return ''.join(['JSON_EXTRACT(json, "$.', _column, '") ', dir, ', '])
+ return ''.join(['JSON_EXTRACT(json, "$.', _column, '") ', dir_, ', '])
def add_zone(self, _namespace, _instance, _uid, _name):
self.add(DB_ZONE_TABLE, (_namespace, _instance, _uid, _name))
@@ -411,8 +436,6 @@ def add_zone(self, _namespace, _instance, _uid, _name):
def get_zones(self, _namespace, _instance):
return self.get_dict(DB_ZONE_TABLE, (_namespace, _instance,))
-
-
@Backup(DB_CONFIG_NAME)
def backup(self, backup_folder):
self.export_sql(backup_folder)
diff --git a/lib/db/db_config_defn.py b/lib/db/db_config_defn.py
index 64237e7..5f2d6d6 100644
--- a/lib/db/db_config_defn.py
+++ b/lib/db/db_config_defn.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -17,7 +17,6 @@
"""
import json
-import threading
from lib.db.db import DB
from lib.common.decorators import Backup
diff --git a/lib/db/db_epg.py b/lib/db/db_epg.py
index 7110f55..c536766 100644
--- a/lib/db/db_epg.py
+++ b/lib/db/db_epg.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -18,13 +18,11 @@
import json
import datetime
-import threading
from lib.db.db import DB
from lib.common.decorators import Backup
from lib.common.decorators import Restore
-
DB_EPG_TABLE = 'epg'
DB_CONFIG_NAME = 'db_files-epg_db'
@@ -36,7 +34,7 @@
instance VARCHAR(255) NOT NULL,
day DATE NOT NULL,
last_update TIMESTAMP,
- json TEXT NOT NULL,
+ file VARCHAR(255) NOT NULL,
UNIQUE(namespace, instance, day)
)
"""
@@ -44,13 +42,18 @@
'dt': [
"""
DROP TABLE IF EXISTS epg
+ """
+ ],
+
+ 'epg_column_names_get':
+ """
+ SELECT name FROM pragma_table_info('epg')
""",
- ],
'epg_add':
"""
INSERT OR REPLACE INTO epg (
- namespace, instance, day, last_update, json
+ namespace, instance, day, last_update, file
) VALUES ( ?, ?, ?, ?, ? )
""",
@@ -58,11 +61,21 @@
"""
DELETE FROM epg WHERE namespace LIKE ? AND instance LIKE ? AND day < DATE('now',?)
""",
- 'epg_del':
+ 'epg_by_day_get':
+ """
+ SELECT file FROM epg WHERE namespace LIKE ? AND instance LIKE ? AND day < DATE('now',?)
+ """,
+
+ 'epg_instance_del':
"""
DELETE FROM epg WHERE namespace=? AND instance LIKE ?
""",
+ 'epg_instance_get':
+ """
+ SELECT file FROM epg WHERE namespace=? AND instance LIKE ?
+ """,
+
'epg_last_update_get':
"""
SELECT datetime(last_update, 'localtime') FROM epg WHERE
@@ -89,7 +102,7 @@
"""
SELECT DISTINCT namespace FROM epg
""",
- 'epg_instance_get':
+ 'epg_instances_get':
"""
SELECT DISTINCT namespace, instance FROM epg
"""
@@ -101,23 +114,32 @@ class DBepg(DB):
def __init__(self, _config):
super().__init__(_config, _config['datamgmt'][DB_CONFIG_NAME], sqlcmds)
+ def get_col_names(self):
+ return self.get(DB_EPG_TABLE + '_column_names')
+
def save_program_list(self, _namespace, _instance, _day, _prog_list):
- self.add(DB_EPG_TABLE, (
- _namespace,
- _instance,
- _day,
- datetime.datetime.utcnow(),
- json.dumps(_prog_list),))
+ filepath = self.save_file((DB_EPG_TABLE, _namespace, _instance, _day), json.dumps(_prog_list))
+ if filepath:
+ self.add(DB_EPG_TABLE, (
+ _namespace,
+ _instance,
+ _day,
+ datetime.datetime.utcnow(),
+ str(filepath),))
- def del_old_programs(self, _namespace, _instance, _days='-1 day'):
+ def del_old_programs(self, _namespace, _instance, _days='-2 day'):
"""
- Removes all records for this namespace/instance that are over 1 day old
+ Removes all records for this namespace/instance that are over 2 day old
"""
if not _namespace:
_namespace = '%'
if not _instance:
_instance = '%'
- self.delete(DB_EPG_TABLE +'_by_day', (_namespace, _instance, _days,))
+ files = self.get(DB_EPG_TABLE + '_by_day', (_namespace, _instance, _days,))
+ files = [x[0] for x in files]
+ for f in files:
+ self.delete_file(f)
+ self.delete(DB_EPG_TABLE + '_by_day', (_namespace, _instance, _days,))
def del_instance(self, _namespace, _instance):
"""
@@ -125,14 +147,18 @@ def del_instance(self, _namespace, _instance):
"""
if not _instance:
_instance = '%'
- return self.delete(DB_EPG_TABLE, (_namespace, _instance,))
+ files = self.get(DB_EPG_TABLE + '_instance', (_namespace, _instance,))
+ files = [x[0] for x in files]
+ for f in files:
+ self.delete_file(f)
+ return self.delete(DB_EPG_TABLE + '_instance', (_namespace, _instance,))
def set_last_update(self, _namespace=None, _instance=None, _day=None):
if not _namespace:
_namespace = '%'
if not _instance:
_instance = '%'
- self.update(DB_EPG_TABLE+'_last_update', (
+ self.update(DB_EPG_TABLE + '_last_update', (
_day,
_namespace,
_instance,
@@ -155,10 +181,16 @@ def get_epg_names(self):
return self.get_dict(DB_EPG_TABLE + '_name')
def get_epg_instances(self):
- return self.get_dict(DB_EPG_TABLE + '_instance')
+ return self.get_dict(DB_EPG_TABLE + '_instances')
def get_epg_one(self, _namespace, _instance, _day):
- return self.get_dict(DB_EPG_TABLE + '_one', (_namespace, _instance, _day))
+ row = self.get_dict(DB_EPG_TABLE + '_one', (_namespace, _instance, _day))
+ if len(row):
+ blob = self.get_file_by_key((_namespace, _instance, _day,))
+ if blob:
+ row[0]['json'] = json.loads(blob)
+ return row
+ return []
def init_get_query(self, _namespace, _instance):
if not _namespace:
@@ -176,13 +208,18 @@ def get_next_row(self):
namespace = row['namespace']
instance = row['instance']
day = row['day']
- json_data = json.loads(row['json'])
+ file = row['file']
+ blob = self.get_file(file)
+ if blob:
+ json_data = json.loads(blob)
+ else:
+ json_data = []
row = json_data
return row, namespace, instance, day
def close_query(self):
self.cur.close()
-
+
@Backup(DB_CONFIG_NAME)
def backup(self, backup_folder):
self.export_sql(backup_folder)
diff --git a/lib/db/db_epg_programs.py b/lib/db/db_epg_programs.py
index 381d59e..98af69c 100644
--- a/lib/db/db_epg_programs.py
+++ b/lib/db/db_epg_programs.py
@@ -18,13 +18,11 @@
import json
import datetime
-import threading
from lib.db.db import DB
from lib.common.decorators import Backup
from lib.common.decorators import Restore
-
DB_PROGRAMS_TABLE = 'programs'
DB_CONFIG_NAME = 'db_files-epg_programs_db'
@@ -44,7 +42,7 @@
"""
DROP TABLE IF EXISTS programs
""",
- ],
+ ],
'programs_add':
"""
@@ -89,7 +87,7 @@ def del_old_programs(self, _namespace, _instance, _days='-30 day'):
"""
Removes all records for this namespace/instance that are over xx days old
"""
- self.delete(DB_PROGRAMS_TABLE +'_by_day', (_namespace, _days))
+ self.delete(DB_PROGRAMS_TABLE + '_by_day', (_namespace, _days))
def del_namespace(self, _namespace):
"""
@@ -102,7 +100,7 @@ def get_program_names(self):
def get_program(self, _namespace, _id):
return self.get_dict(DB_PROGRAMS_TABLE, (_namespace, _id))
-
+
@Backup(DB_CONFIG_NAME)
def backup(self, backup_folder):
self.export_sql(backup_folder)
diff --git a/lib/db/db_plugins.py b/lib/db/db_plugins.py
index a78f403..4de8291 100644
--- a/lib/db/db_plugins.py
+++ b/lib/db/db_plugins.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -17,74 +17,117 @@
"""
import json
-import threading
from lib.db.db import DB
from lib.common.decorators import Backup
from lib.common.decorators import Restore
-
+DB_REPOS_TABLE = 'repos'
DB_PLUGINS_TABLE = 'plugins'
DB_INSTANCE_TABLE = 'instance'
DB_CONFIG_NAME = 'db_files-plugins_db'
-
sqlcmds = {
'ct': [
+ """
+ CREATE TABLE IF NOT EXISTS repos (
+ id VARCHAR(255) NOT NULL,
+ name VARCHAR(255) NOT NULL,
+ url VARCHAR(255) NOT NULL,
+ json TEXT NOT NULL,
+ UNIQUE(id)
+ )
+ """,
"""
CREATE TABLE IF NOT EXISTS plugins (
id VARCHAR(255) NOT NULL,
+ repo VARCHAR(255) NOT NULL,
namespace VARCHAR(255) NOT NULL,
+ installed BOOLEAN NOT NULL,
json TEXT NOT NULL,
- UNIQUE(namespace, id)
+ UNIQUE(repo, namespace, id)
)
""",
"""
CREATE TABLE IF NOT EXISTS instance (
+ repo VARCHAR(255) NOT NULL,
namespace VARCHAR(255) NOT NULL,
instance VARCHAR(255) NOT NULL,
description TEXT,
- UNIQUE(namespace, instance)
+ UNIQUE(repo, namespace, instance)
)
"""
],
'dt': [
+ """
+ DROP TABLE IF EXISTS instance
+ """,
"""
DROP TABLE IF EXISTS plugins
""",
"""
- DROP TABLE IF EXISTS instance
+ DROP TABLE IF EXISTS repos
+ """
+ ],
+ 'repos_add':
"""
- ],
+ INSERT OR REPLACE INTO repos (
+ id, name, url, json
+ ) VALUES ( ?, ?, ?, ? )
+ """,
+ 'repos_get':
+ """
+ SELECT * FROM repos WHERE id LIKE ?
+ """,
+ 'repos_del':
+ """
+ DELETE FROM repos WHERE id=?
+ """,
'plugins_add':
"""
INSERT OR REPLACE INTO plugins (
- id, namespace, json
- ) VALUES ( ?, ?, ? )
+ id, repo, namespace, installed, json
+ ) VALUES ( ?, ?, ?, ?, ? )
""",
'plugins_get':
"""
- SELECT * FROM plugins WHERE namespace LIKE ?
+ SELECT * FROM plugins WHERE repo LIKE ? AND id LIKE ?
+ AND installed=?
+ """,
+ 'plugins_name_get':
+ """
+ SELECT * FROM plugins WHERE repo LIKE ? AND namespace LIKE ?
+ AND installed=?
+ """,
+ 'plugins_all_get':
+ """
+ SELECT * FROM plugins WHERE repo LIKE ? AND id LIKE ?
+ """,
+ 'plugins_all_name_get':
+ """
+ SELECT * FROM plugins WHERE repo LIKE ? AND namespace LIKE ?
""",
'plugins_del':
"""
- DELETE FROM plugins WHERE namespace=?
+ DELETE FROM plugins WHERE repo=? AND id=?
""",
'instance_add':
"""
INSERT OR REPLACE INTO instance (
- namespace, instance, description
- ) VALUES ( ?, ?, ? )
+ repo, namespace, instance, description
+ ) VALUES ( ?, ?, ?, ? )
""",
'instance_get':
"""
- SELECT * FROM instance ORDER BY namespace, instance
+ SELECT * FROM instance WHERE repo LIKE ?
+ AND namespace LIKE ? ORDER BY namespace, instance
""",
'instance_del':
"""
- DELETE FROM instance WHERE namespace LIKE ? AND instance LIKE ?
+ DELETE FROM instance WHERE repo LIKE ? AND
+ namespace LIKE ? AND instance LIKE ?
"""
}
@@ -94,22 +137,62 @@ class DBPlugins(DB):
def __init__(self, _config):
super().__init__(_config, _config['datamgmt'][DB_CONFIG_NAME], sqlcmds)
+ def save_repo(self, _repo_dict):
+ self.add(DB_REPOS_TABLE, (
+ _repo_dict['id'],
+ _repo_dict['name'],
+ _repo_dict['repo_url'],
+ json.dumps(_repo_dict)))
+
def save_plugin(self, _plugin_dict):
self.add(DB_PLUGINS_TABLE, (
_plugin_dict['id'],
+ _plugin_dict['repoid'],
_plugin_dict['name'],
+ _plugin_dict['version']['installed'],
json.dumps(_plugin_dict)))
- def save_instance(self, namespace, instance, descr):
+ def save_instance(self, _repo_id, _namespace, _instance, _descr):
self.add(DB_INSTANCE_TABLE, (
- namespace,
- instance,
- descr))
+ _repo_id,
+ _namespace,
+ _instance,
+ _descr))
- def get_plugins(self, _namespace = None):
- if not _namespace:
- _namespace = '%'
- rows = self.get_dict(DB_PLUGINS_TABLE, (_namespace,))
+ def get_repos(self, _id):
+ if not _id:
+ _id = '%'
+ rows = self.get_dict(DB_REPOS_TABLE, (_id,))
+ plugin_list = []
+ for row in rows:
+ plugin_list.append(json.loads(row['json']))
+ if len(plugin_list) == 0:
+ plugin_list = None
+ return plugin_list
+
+ def del_repo(self, _id):
+ """
+ If a plugin is installed, it must be removed before the
+ repo can be deleted. Once all plugins are not installed,
+ then will remove the repo, plugin and all instances
+ """
+ plugins_installed = self.get_plugins(True, _id)
+ self.logger.warning('################## TBD, aborting delete {}'.format(len(plugins_installed)))
+
+
+ #self.delete(DB_INSTANCE_TABLE, (_id, '%', '%',))
+ #self.delete(DB_PLUGINS_TABLE, (_id, '%',))
+ #self.delete(DB_REPOS_TABLE, (_id,))
+
+ def get_plugins(self, _installed, _repo_id=None, _plugin_id=None):
+ if not _repo_id:
+ _repo_id = '%'
+ if not _plugin_id:
+ _plugin_id = '%'
+ if _installed is None:
+ rows = self.get_dict(DB_PLUGINS_TABLE+'_all', (_repo_id, _plugin_id,))
+ else:
+ rows = self.get_dict(DB_PLUGINS_TABLE, (_repo_id, _plugin_id, _installed,))
plugin_list = []
for row in rows:
plugin_list.append(json.loads(row['json']))
@@ -117,24 +200,45 @@ def get_plugins(self, _namespace = None):
plugin_list = None
return plugin_list
- def del_plugin(self, _namespace):
+ def get_plugins_by_name(self, _installed, _repo_id=None, _plugin_name=None):
+ if not _repo_id:
+ _repo_id = '%'
+ if not _plugin_name:
+ _plugin_name = '%'
+ if _installed is None:
+ rows = self.get_dict(DB_PLUGINS_TABLE+'_all_name', (_repo_id, _plugin_name,))
+ else:
+ rows = self.get_dict(DB_PLUGINS_TABLE+'_name', (_repo_id, _plugin_name, _installed,))
+ plugin_list = []
+ for row in rows:
+ plugin_list.append(json.loads(row['json']))
+ if len(plugin_list) == 0:
+ plugin_list = None
+ return plugin_list
+
+ def del_plugin(self, _repo_id, _plugin_id):
"""
Deletes the instance rows first due to constaints, then
deletes the plugin
"""
+ self.delete(DB_INSTANCE_TABLE, (_repo_id, _plugin_id, '%',))
+ self.delete(DB_PLUGINS_TABLE, (_repo_id, _plugin_id,))
- self.delete(DB_INSTANCE_TABLE, (_namespace, '%', ))
- self.delete(DB_PLUGINS_TABLE, (_namespace,))
-
- def del_instance(self, _namespace, _instance):
- return self.delete(DB_INSTANCE_TABLE, (_namespace, _instance))
+ def del_instance(self, _repo, _namespace, _instance):
+ if not _repo:
+ _repo = '%'
+ return self.delete(DB_INSTANCE_TABLE, (_repo, _namespace, _instance,))
- def get_instances(self):
+ def get_instances(self, _repo=None, _namespace=None):
"""
createa a dict of namespaces that contain an array of instances
"""
+ if not _repo:
+ _repo = '%'
+ if not _namespace:
+ _namespace = '%'
rows_dict = {}
- rows = self.get_dict(DB_INSTANCE_TABLE)
+ rows = self.get_dict(DB_INSTANCE_TABLE, (_repo, _namespace,))
for row in rows:
if row['namespace'] not in rows_dict:
rows_dict[row['namespace']] = []
@@ -144,9 +248,13 @@ def get_instances(self):
return rows_dict
- def get_instances_full(self):
+ def get_instances_full(self, _repo=None, _namespace=None):
+ if not _repo:
+ _repo = '%'
+ if not _namespace:
+ _namespace = '%'
rows_dict = {}
- rows = self.get_dict(DB_INSTANCE_TABLE)
+ rows = self.get_dict(DB_INSTANCE_TABLE, (_repo, _namespace,))
for row in rows:
rows_dict[row['namespace']] = row
return rows_dict
diff --git a/lib/db/db_scheduler.py b/lib/db/db_scheduler.py
index 177d3ac..72a8913 100644
--- a/lib/db/db_scheduler.py
+++ b/lib/db/db_scheduler.py
@@ -16,23 +16,18 @@
substantial portions of the Software.
"""
-import ast
-import json
import datetime
import sqlite3
-import threading
import uuid
from lib.db.db import DB
from lib.common.decorators import Backup
from lib.common.decorators import Restore
-
DB_TASK_TABLE = 'task'
DB_TRIGGER_TABLE = 'trigger'
DB_CONFIG_NAME = 'db_files-scheduler_db'
-
sqlcmds = {
'ct': [
"""
@@ -79,8 +74,8 @@
"""
DROP TABLE IF EXISTS task
"""
- ],
-
+ ],
+
'task_add':
"""
INSERT INTO task (
@@ -154,7 +149,7 @@
FROM task
WHERE active='1'
""",
-
+
'task_del':
"""
DELETE FROM task WHERE
@@ -208,18 +203,18 @@
class DBScheduler(DB):
def __init__(self, _config):
- super().__init__(_config, _config['datamgmt'][DB_CONFIG_NAME], sqlcmds)
+ super().__init__(_config, _config['datamgmt'][DB_CONFIG_NAME], sqlcmds)
- def save_task(self, _area, _title, _namespace, _instance, _funccall,
- _priority, _threadtype, _description):
+ def save_task(self, _area, _title, _namespace, _instance, _funccall,
+ _priority, _threadtype, _description):
"""
Returns true if the record was saved. If the record already exists,
it will return false.
"""
try:
- id = str(uuid.uuid1()).upper()
+ id_ = str(uuid.uuid1()).upper()
self.add(DB_TASK_TABLE, (
- id,
+ id_,
_area,
_title,
_namespace,
@@ -280,7 +275,7 @@ def get_task(self, _id):
if len(task) == 1:
return task[0]
else:
- return None
+ return None
def get_task_names(self):
return self.get_dict(DB_TASK_TABLE + '_name')
@@ -315,13 +310,17 @@ def reset_activity(self, _activity=False, _area=None, _title=None):
))
def get_active_status(self, _taskid):
- return self.get_dict(DB_TASK_TABLE + '_active', (_taskid,))[0]['active']
+ res = self.get_dict(DB_TASK_TABLE + '_active', (_taskid,))
+ if res:
+ return res[0]['active']
+ else:
+ return None
def get_num_active(self):
return self.get(DB_TASK_TABLE + '_num_active')[0][0]
- def save_trigger(self, _area, _title, _timetype, timeofday=None,
- dayofweek=None, interval=-1, timelimit=-1, randdur=-1):
+ def save_trigger(self, _area, _title, _timetype, timeofday=None,
+ dayofweek=None, interval=-1, timelimit=-1, randdur=-1):
"""
timetype: daily, weekly, interval, startup
timelimit: maximum time it can run before terminating. -1 is not used
@@ -331,9 +330,9 @@ def save_trigger(self, _area, _title, _timetype, timeofday=None,
randdur: maximum in minutes. Interval only. Will add a randum amount
to the event start time up to the maximum minutes. -1 is not used.
"""
- id = str(uuid.uuid1()).upper()
+ id_ = str(uuid.uuid1()).upper()
self.add(DB_TRIGGER_TABLE, (
- id,
+ id_,
_area,
_title,
_timetype,
@@ -343,7 +342,7 @@ def save_trigger(self, _area, _title, _timetype, timeofday=None,
interval,
randdur,
))
- return id
+ return id_
def get_triggers_by_type(self, _timetype):
"""
@@ -359,7 +358,7 @@ def get_trigger(self, _uuid):
if len(trigger) == 1:
return trigger[0]
else:
- return None
+ return None
def get_triggers(self, _taskid=None):
"""
diff --git a/lib/db/db_temp.py b/lib/db/db_temp.py
old mode 100755
new mode 100644
index 44eb7b3..7d401d8
--- a/lib/db/db_temp.py
+++ b/lib/db/db_temp.py
@@ -18,13 +18,11 @@
import json
import datetime
-import threading
from lib.db.db import DB
from lib.common.decorators import Backup
from lib.common.decorators import Restore
-
DB_TEMP_TABLE = 'temp'
DB_CONFIG_NAME = 'db_files-temp_db'
@@ -45,7 +43,7 @@
"""
DROP TABLE IF EXISTS temp
""",
- ],
+ ],
'temp_add':
"""
@@ -88,13 +86,14 @@ def save_json(self, _namespace, _instance, _value, _json):
def cleanup_temp(self, _namespace, _instance, _hours='-6 hours'):
"""
- Removes all records for this namespace/instance that are over 1 hour old
+ Removes all records for this namespace/instance that are over 6 hour old
"""
if not _namespace:
_namespace = '%'
if not _instance:
_instance = '%'
- deleted = self.delete(DB_TEMP_TABLE +'_by_day', (_namespace, _instance, _hours,))
+ deleted = self.delete(DB_TEMP_TABLE + '_by_day', (_namespace, _instance, _hours,))
+ self.sql_exec('VACUUM')
def del_instance(self, _namespace, _instance):
"""
@@ -107,7 +106,6 @@ def del_instance(self, _namespace, _instance):
def get_record(self, _namespace, _instance, _value):
return self.get_dict(DB_TEMP_TABLE, (_namespace, _instance, _value))
-
@Backup(DB_CONFIG_NAME)
def backup(self, backup_folder):
self.export_sql(backup_folder)
diff --git a/lib/m3u8/__init__.py b/lib/m3u8/__init__.py
index 36150b6..2d07fea 100644
--- a/lib/m3u8/__init__.py
+++ b/lib/m3u8/__init__.py
@@ -3,8 +3,9 @@
# Use of this source code is governed by a MIT License
# license that can be found in the LICENSE file.
-import sys
+import logging
import os
+import sys
from .httpclient import DefaultHTTPClient, _parsed_url
from .model import (M3U8, Segment, SegmentList, PartialSegment,
@@ -23,13 +24,20 @@
'PreloadHint' 'DateRange', 'DateRangeList', 'loads', 'load',
'parse', 'ParseError')
+LOGGER = None
+
def loads(content, uri=None, custom_tags_parser=None):
'''
Given a string with a m3u8 content, returns a M3U8 object.
Optionally parses a uri to set a correct base_uri on the M3U8 object.
Raises ValueError if invalid content
'''
-
+ global LOGGER
+ if LOGGER is None:
+ LOGGER = logging.getLogger(__name__)
+ if not content.startswith('#EXTM3U'):
+ LOGGER.warning('INVALID m3u format: #EXTM3U missing {}'.format(uri))
+ return None
if uri is None:
return M3U8(content, custom_tags_parser=custom_tags_parser)
else:
@@ -37,20 +45,35 @@ def loads(content, uri=None, custom_tags_parser=None):
return M3U8(content, base_uri=base_uri, custom_tags_parser=custom_tags_parser)
-def load(uri, timeout=None, headers={}, custom_tags_parser=None, http_client=DefaultHTTPClient(), verify_ssl=True):
+def load(uri, timeout=9, headers={}, custom_tags_parser=None, http_client=DefaultHTTPClient(), verify_ssl=True, http_session=None):
'''
Retrieves the content from a given URI and returns a M3U8 object.
Raises ValueError if invalid content or IOError if request fails.
'''
+ global LOGGER
+ if LOGGER is None:
+ LOGGER = logging.getLogger(__name__)
if is_url(uri):
- content, base_uri = http_client.download(uri, timeout, headers, verify_ssl)
+ content, base_uri = http_client.download(uri, timeout, headers, verify_ssl, http_session)
+ if content is None:
+ LOGGER.warning('Unable to obtain m3u file {}'.format(uri))
+ return None
+ if not content.startswith('#EXTM3U'):
+ LOGGER.warning('INVALID m3u format: #EXTM3U missing {}'.format(uri))
+ return None
return M3U8(content, base_uri=base_uri, custom_tags_parser=custom_tags_parser)
else:
return _load_from_file(uri, custom_tags_parser)
def _load_from_file(uri, custom_tags_parser=None):
+ global LOGGER
+ if LOGGER is None:
+ LOGGER = logging.getLogger(__name__)
with open(uri, encoding='utf8') as fileobj:
raw_content = fileobj.read().strip()
base_uri = os.path.dirname(uri)
+ if not raw_content.startswith('#EXTM3U'):
+ LOGGER.warning('INVALID m3u format: #EXTM3U missing {}'.format(uri))
+ return None
return M3U8(raw_content, base_uri=base_uri, custom_tags_parser=custom_tags_parser)
diff --git a/lib/m3u8/httpclient.py b/lib/m3u8/httpclient.py
index d03bbab..6e75135 100644
--- a/lib/m3u8/httpclient.py
+++ b/lib/m3u8/httpclient.py
@@ -1,3 +1,4 @@
+import logging
import posixpath
import ssl
import sys
@@ -18,18 +19,35 @@ class DefaultHTTPClient:
def __init__(self, proxies=None):
self.proxies = proxies
-
- def download(self, uri, timeout=None, headers={}, verify_ssl=True):
- proxy_handler = urllib.request.ProxyHandler(self.proxies)
- https_handler = HTTPSHandler(verify_ssl=verify_ssl)
- opener = urllib.request.build_opener(proxy_handler, https_handler)
- opener.addheaders = headers.items()
- resource = opener.open(uri, timeout=timeout)
- base_uri = _parsed_url(resource.geturl())
- content = resource.read().decode(
- resource.headers.get_content_charset(failobj="utf-8")
- )
- return content, base_uri
+ self.base_uri = None
+ self.logger = None
+
+ def download(self, uri, timeout=9, headers={}, verify_ssl=True, http_session=None):
+ content = self.get_uri(uri, timeout, headers, verify_ssl, http_session)
+ return content, self.base_uri
+
+ def get_uri(self, _uri, _timeout, _headers, _verify_ssl, _http_session):
+ if self.logger is None:
+ self.logger = logging.getLogger(__name__)
+
+ if _http_session:
+ resp = _http_session.get(_uri, headers=_headers, timeout=_timeout)
+ x = resp.text
+ self.base_uri = _parsed_url(str(resp.url))
+ resp.raise_for_status()
+ return x
+
+ else:
+ proxy_handler = urllib.request.ProxyHandler(self.proxies)
+ https_handler = HTTPSHandler(verify_ssl=_verify_ssl)
+ opener = urllib.request.build_opener(proxy_handler, https_handler)
+ opener.addheaders = _headers.items()
+ resource = opener.open(_uri, timeout=_timeout)
+ self.base_uri = _parsed_url(resource.geturl())
+ content = resource.read().decode(
+ resource.headers.get_content_charset(failobj="utf-8")
+ )
+ return content
class HTTPSHandler:
diff --git a/lib/main.py b/lib/main.py
index 7689335..880ad73 100644
--- a/lib/main.py
+++ b/lib/main.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -34,8 +34,15 @@
pip(['install', 'cryptography'])
except ModuleNotFoundError:
print('Unable to install required cryptography module')
+ try:
+ import httpx
+ except ImportError:
+ pip(['install', 'httpx[http2]'])
+ except ModuleNotFoundError:
+ print('Unable to install required httpx[http2] module')
except (ImportError, ModuleNotFoundError):
- print('Unable to load pip module to install cryptography module')
+ print('Unable to load pip module to install required modules')
+
import lib.clients.hdhr.hdhr_server as hdhr_server
@@ -48,6 +55,7 @@
import lib.updater.updater as updater
import lib.config.user_config as user_config
from lib.db.db_scheduler import DBScheduler
+from lib.db.db_temp import DBTemp
from lib.common.utils import clean_exit
from lib.common.pickling import Pickling
from lib.schedule.scheduler import Scheduler
@@ -106,28 +114,42 @@ def main(script_dir):
# Get Operating system
opersystem = platform.system()
config_obj = None
+ scheduler = None
+ terminate_queue = None
try:
RESTART_REQUESTED = False
config_obj = user_config.get_config(script_dir, opersystem, args)
config = config_obj.data
LOGGER = logging.getLogger(__name__)
+ # reduce logging for httpx modules
+ logging.getLogger("hpack").setLevel(logging.WARNING)
+ logging.getLogger("httpx").setLevel(logging.WARNING)
+ logging.getLogger("httpcore").setLevel(logging.WARNING)
LOGGER.warning('#########################################')
LOGGER.warning('MIT License, Copyright (C) 2021 ROCKY4546')
LOGGER.notice('Cabernet v{}'.format(utils.get_version_str()))
+ except KeyboardInterrupt:
+ if LOGGER:
+ LOGGER.warning('^C received, shutting down the server')
+ return
+ try:
# use this until 0.9.3 due to maintenance mode not being enabled in 0.9.1
- if args.restart and config['main']['maintenance_mode']:
+ if config['main']['maintenance_mode']:
LOGGER.info('In maintenance mode, applying patches')
patcher.patch_upgrade(config_obj, utils.VERSION)
- config_obj.write('main', 'maintenance_mode', False)
time.sleep(0.01)
+ config_obj.write('main', 'maintenance_mode', False)
utils.cleanup_web_temp(config)
+ dbtemp = DBTemp(config)
+ dbtemp.cleanup_temp(None, None)
plugins = init_plugins(config_obj)
config_obj.defn_json = None
init_versions(plugins)
+
if opersystem in ['Windows']:
pickle_it = Pickling(config)
pickle_it.to_pickle(plugins)
@@ -158,7 +180,8 @@ def main(script_dir):
terminate_processes(config, hdhr_serverx, ssdp_serverx, webadmin, tuner, scheduler, config_obj)
except KeyboardInterrupt:
- LOGGER.warning('^C received, shutting down the server')
+ if LOGGER:
+ LOGGER.warning('^C received, shutting down the server')
shutdown(config, hdhr_serverx, ssdp_serverx, webadmin, tuner, scheduler, config_obj, terminate_queue)
@@ -234,9 +257,11 @@ def init_hdhr(_config, _hdhr_queue):
def shutdown(_config, _hdhr_serverx, _ssdp_serverx, _webadmin, _tuner, _scheduler, _config_obj, _terminate_queue):
- _terminate_queue.put('shutdown')
- time.sleep(2)
- terminate_processes(_config, _hdhr_serverx, _ssdp_serverx, _webadmin, _tuner, _scheduler, _config_obj)
+ if _terminate_queue:
+ _terminate_queue.put('shutdown')
+ time.sleep(0.01)
+ terminate_processes(_config, _hdhr_serverx, _ssdp_serverx, _webadmin, _tuner, _scheduler, _config_obj)
+ LOGGER.debug('main process terminated')
clean_exit()
@@ -263,3 +288,4 @@ def terminate_processes(_config, _hdhr_serverx, _ssdp_serverx, _webadmin, _tuner
if _config_obj and _config_obj.defn_json:
_config_obj.defn_json.terminate()
del _config_obj
+ time.sleep(0.5)
\ No newline at end of file
diff --git a/lib/plugins/__init__.py b/lib/plugins/__init__.py
index 3740447..87d95d7 100644
--- a/lib/plugins/__init__.py
+++ b/lib/plugins/__init__.py
@@ -1 +1 @@
-# location to install plugins
+import lib.plugins.plugin_manager
\ No newline at end of file
diff --git a/lib/plugins/plugin.py b/lib/plugins/plugin.py
index 47b0b07..c91d97d 100644
--- a/lib/plugins/plugin.py
+++ b/lib/plugins/plugin.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -27,7 +27,6 @@
from lib.db.db_plugins import DBPlugins
from lib.db.db_config_defn import DBConfigDefn
-
PLUGIN_CONFIG_DEFN_FILE = 'config_defn.json'
PLUGIN_INSTANCE_DEFN_FILE = 'instance_defn.json'
PLUGIN_MANIFEST_FILE = 'plugin.json'
@@ -40,19 +39,17 @@ def register(func):
class Plugin:
-
# Temporarily used to register the plugin setup() function
_plugin_func = None
logger = None
- def __init__(self, _config_obj, _plugin_defn, _plugin_path):
-
+ def __init__(self, _config_obj, _plugin_defn, _plugins_pkg, _plugin_id, _is_external):
if Plugin.logger is None:
Plugin.logger = logging.getLogger(__name__)
self.enabled = True
- self.plugin_path = _plugin_path
+ self.plugin_path = '.'.join([_plugins_pkg, _plugin_id])
+ self.plugin_id = _plugin_id
self.config_obj = _config_obj
-
self.db_configdefn = DBConfigDefn(_config_obj.data)
self.load_config_defn()
@@ -60,23 +57,53 @@ def __init__(self, _config_obj, _plugin_defn, _plugin_path):
self.init_func = Plugin._plugin_func
self.plugin_settings = {}
self.plugin_db = DBPlugins(_config_obj.data)
- self.namespace = None
+ self.namespace = ''
self.instances = []
- self.load_plugin_manifest(_plugin_defn)
+ self.repo_id = None
+ self.load_plugin_manifest(_plugin_defn, _is_external)
+ if not self.namespace:
+ self.enabled = False
+ self.logger.debug('1 Plugin disabled in config.ini for {}'.format(self.namespace))
+ return
self.plugin_obj = None
- if not self.config_obj.data[self.namespace.lower()]['enabled']:
+ self.config_obj.data[self.namespace.lower()]['version'] = self.plugin_settings['version']['current']
+ if not self.config_obj.data[self.namespace.lower()].get('enabled'):
self.enabled = False
- self.logger.debug('Plugin disabled in config.ini for {}'.format(self.namespace))
+ self.logger.debug('2 Plugin disabled in config.ini for {}'.format(self.namespace))
+ self.db_configdefn.add_config(self.config_obj.data)
return
self.load_instances()
self.logger.notice('Plugin created for {}'.format(self.namespace))
+ def terminate(self):
+ """
+ Removes all has a object from the object and calls any subclasses to also terminate
+ Not calling inherited class at this time
+ """
+ self.enabled = False
+ self.config_obj.write(
+ self.namespace.lower(), 'enabled', False)
+
+ if self.plugin_obj:
+ self.plugin_obj.terminate()
+ self.plugin_path = None
+ self.plugin_id = None
+ self.config_obj = None
+ self.db_configdefn = None
+ self.init_func = None
+ self.plugin_settings = None
+ self.plugin_db = None
+ self.namespace = None
+ self.instances = None
+ self.repo_id = None
+ self.plugin_obj = None
+
+
def load_config_defn(self):
try:
self.logger.debug(
'Plugin Config Defn file loaded at {}'.format(self.plugin_path))
defn_obj = ConfigDefn(self.plugin_path, PLUGIN_CONFIG_DEFN_FILE, self.config_obj.data)
-
default_config = defn_obj.get_default_config()
self.config_obj.merge_config(default_config)
defn_obj.call_oninit(self.config_obj)
@@ -95,15 +122,16 @@ def load_config_defn(self):
def load_instances(self):
inst_defn_obj = ConfigDefn(self.plugin_path, PLUGIN_INSTANCE_DEFN_FILE, self.config_obj.data, True)
- # determine in the config data whether the instance of this name exists. It would have a section name = 'name-instance'
+ # determine in the config data whether the instance of this name exists.
+ # It would have a section name = 'name-instance'
self.instances = self.find_instances()
if len(self.instances) == 0:
- self.enabled = False
- self.config_obj.data[self.namespace.lower()]['enabled'] = False
- self.logger.info('No instances found, disabling plugin {}'.format(self.namespace))
+ self.enabled = True
+ self.config_obj.data[self.namespace.lower()]['enabled'] = True
+ self.logger.info('No instances found, {}'.format(self.namespace))
return
for inst in self.instances:
- self.plugin_db.save_instance(self.namespace, inst, '')
+ self.plugin_db.save_instance(self.repo_id, self.namespace, inst, '')
# create a defn with the instance name as the section name. then process it.
inst_defn_obj.is_instance_defn = False
for area, area_data in inst_defn_obj.config_defn.items():
@@ -114,16 +142,17 @@ def load_instances(self):
base_section = section.split('_', 1)[0]
area_data['sections'][base_section + '_' + inst] = area_data['sections'].pop(section)
if 'label' in self.config_obj.data[base_section + '_' + inst] \
- and self.config_obj.data[base_section + '_' + inst]['label'] is not None:
- area_data['sections'][base_section + '_' + inst]['label'] = self.config_obj.data[base_section + '_' + inst]['label']
+ and self.config_obj.data[base_section + '_' + inst]['label'] is not None:
+ area_data['sections'][base_section + '_' + inst]['label'] = \
+ self.config_obj.data[base_section + '_' + inst]['label']
inst_defn_obj.save_defn_to_db()
-
+
default_config = inst_defn_obj.get_default_config()
self.config_obj.merge_config(default_config)
inst_defn_obj.call_oninit(self.config_obj)
self.config_obj.defn_json.merge_defn_obj(inst_defn_obj)
- for area, area_data in inst_defn_obj.config_defn.items():
- for section, section_data in area_data['sections'].items():
+ for area2, area_data2 in inst_defn_obj.config_defn.items():
+ for section, section_data in area_data2['sections'].items():
for setting in section_data['settings'].keys():
new_value = self.config_obj.fix_value_type(
section, setting, self.config_obj.data[section][setting])
@@ -138,23 +167,37 @@ def find_instances(self):
instances.append(section.split(inst_sec, 1)[1])
return instances
- def load_plugin_manifest(self, _plugin_defn):
+ def load_plugin_manifest(self, _plugin_defn, _is_external):
self.load_default_settings(_plugin_defn)
- self.import_manifest()
+ self.import_manifest(_is_external)
def load_default_settings(self, _plugin_defn):
for name, attr in _plugin_defn.items():
self.plugin_settings[name] = attr['default']
- def import_manifest(self):
+ def import_manifest(self, _is_external):
try:
- json_settings = importlib.resources.read_text(self.plugin_path, PLUGIN_MANIFEST_FILE)
- settings = json.loads(json_settings)
- self.namespace = settings['name']
- self.plugin_db.save_plugin(settings)
+ json_settings = self.plugin_db.get_plugins(_installed=None, _repo_id=None, _plugin_id=self.plugin_id)
+
+ local_settings = importlib.resources.read_text(self.plugin_path, PLUGIN_MANIFEST_FILE)
+ local_settings = json.loads(local_settings)
+ local_settings = local_settings['plugin']
+
+ if not json_settings:
+ json_settings = local_settings
+ json_settings['repoid'] = None
+ else:
+ json_settings = json_settings[0]
+ self.repo_id = json_settings['repoid']
+ if local_settings['version']['current']:
+ json_settings['version']['current'] = local_settings['version']['current']
+ json_settings['external'] = _is_external
+ json_settings['version']['installed'] = True
+ self.namespace = json_settings['name']
+ self.plugin_db.save_plugin(json_settings)
self.logger.debug(
'Plugin Manifest file loaded at {}'.format(self.plugin_path))
- self.plugin_settings = utils.merge_dict(self.plugin_settings, settings, True)
+ self.plugin_settings = utils.merge_dict(self.plugin_settings, json_settings, True)
except FileNotFoundError:
self.logger.warning(
'PLUGIN MANIFEST FILE NOT FOUND AT {}'.format(self.plugin_path))
@@ -162,4 +205,3 @@ def import_manifest(self):
@property
def name(self):
return self.plugin_settings['name']
-
diff --git a/lib/plugins/plugin_channels.py b/lib/plugins/plugin_channels.py
index 7e6c77f..5e23077 100644
--- a/lib/plugins/plugin_channels.py
+++ b/lib/plugins/plugin_channels.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -21,22 +21,18 @@
import json
import logging
import io
-import os
-import pathlib
import re
-import shutil
import threading
import time
-import urllib.request
import lib.m3u8 as m3u8
+import lib.config.config_callbacks as config_callbacks
import lib.common.utils as utils
import lib.image_size.get_image_size as get_image_size
from lib.db.db_channels import DBChannels
from lib.common.decorators import handle_url_except
from lib.common.decorators import handle_json_except
-
class PluginChannels:
def __init__(self, _instance_obj):
@@ -47,11 +43,25 @@ def __init__(self, _instance_obj):
self.instance_key = _instance_obj.instance_key
self.db = DBChannels(self.config_obj.data)
self.config_section = self.instance_obj.config_section
- if self.config_obj.data[self.config_section].get('channel-start_ch_num') is not None:
- self.ch_num_enum = self.config_obj.data[self.config_section]['channel-start_ch_num']
- else:
+
+ self.ch_num_enum = self.config_obj.data[self.config_section].get('channel-start_ch_num')
+ if self.ch_num_enum is None or self.ch_num_enum < 0:
self.ch_num_enum = 0
+ def terminate(self):
+ """
+ Removes all has a object from the object and calls any subclasses to also terminate
+ Not calling inherited class at this time
+ """
+ self.logger = None
+ self.instance_obj = None
+ self.config_obj = None
+ self.plugin_obj = None
+ self.instance_key = None
+ self.db = None
+ self.config_section = None
+ self.ch_num_enum = None
+
def set_channel_num(self, _number):
"""
if _number is None then will set the channel number based
@@ -64,7 +74,6 @@ def set_channel_num(self, _number):
else:
return _number
-
def get_channels(self):
"""
Interface method to override
@@ -73,40 +82,43 @@ def get_channels(self):
@handle_url_except()
@handle_json_except
- def get_uri_json_data(self, _uri):
+ def get_uri_json_data(self, _uri, _retries):
header = {
'Content-Type': 'application/json',
'User-agent': utils.DEFAULT_USER_AGENT}
- req = urllib.request.Request(_uri, headers=header)
- with urllib.request.urlopen(req, timeout=10.0) as resp:
- return json.load(resp)
+ resp = self.plugin_obj.http_session.get(_uri, headers=header, timeout=8)
+ x = resp.json()
+ resp.raise_for_status()
+ return x
@handle_url_except()
- def get_uri_data(self, _uri, _header=None, _data=None):
+ def get_uri_data(self, _uri, _retries, _header=None, _data=None):
if _header is None:
header = {
'User-agent': utils.DEFAULT_USER_AGENT}
else:
header = _header
- req = urllib.request.Request(_uri, data=_data, headers=header)
- with urllib.request.urlopen(req, timeout=10.0) as resp:
- return resp.read()
+ if _data:
+ resp = self.plugin_obj.http_session.post(_uri, headers=header, data=_data, timeout=8)
+ else:
+ resp = self.plugin_obj.http_session.get(_uri, headers=header, timeout=8)
+ x = resp.content
+ return x
- @handle_url_except(timeout=10.0)
- @handle_json_except
- def get_m3u8_data(self, _uri, _header=None):
+ @handle_url_except()
+ def get_m3u8_data(self, _uri, _retries, _header=None):
if _header is None:
return m3u8.load(_uri,
- headers={'User-agent': utils.DEFAULT_USER_AGENT})
+ headers={'User-agent': utils.DEFAULT_USER_AGENT},
+ http_session=self.plugin_obj.http_session)
else:
return m3u8.load(_uri,
- headers=_header)
+ headers=_header,
+ http_session=self.plugin_obj.http_session)
-
def refresh_channels(self, force=False):
- if self.config_obj.data[self.config_section].get('channel-start_ch_num') is not None:
- self.ch_num_enum = self.config_obj.data[self.config_section]['channel-start_ch_num']
- else:
+ self.ch_num_enum = self.config_obj.data[self.config_section].get('channel-start_ch_num')
+ if self.ch_num_enum is None or self.ch_num_enum < 0:
self.ch_num_enum = 0
last_update = self.db.get_status(self.plugin_obj.name, self.instance_key)
update_needed = False
@@ -114,8 +126,8 @@ def refresh_channels(self, force=False):
update_needed = True
else:
delta = datetime.datetime.now() - last_update
- if delta.total_seconds() / 3600 >= self.config_obj.data[
- self.config_section]['channel-update_timeout']:
+ if delta.total_seconds() / 3600 >= \
+ self.config_obj.data[self.config_section]['channel-update_timeout']:
update_needed = True
if update_needed or force:
i = 0
@@ -124,19 +136,25 @@ def refresh_channels(self, force=False):
i += 1
time.sleep(0.5)
ch_dict = self.get_channels()
- if ch_dict == None:
- self.logger.warning('Unable to retrieve channel data from {}:{}, aborting refresh' \
+ if ch_dict is None:
+ self.logger.warning(
+ 'Unable to retrieve channel data from {}:{}, aborting refresh'
.format(self.plugin_obj.name, self.instance_key))
return False
if 'channel-import_groups' in self.config_obj.data[self.config_section]:
- self.db.save_channel_list(self.plugin_obj.name, self.instance_key, ch_dict, \
+ self.db.save_channel_list(
+ self.plugin_obj.name, self.instance_key, ch_dict,
self.config_obj.data[self.config_section]['channel-import_groups'])
else:
self.db.save_channel_list(self.plugin_obj.name, self.instance_key, ch_dict)
- self.logger.debug('{}:{} Channel update complete' \
+ if self.config_obj.data[self.config_section].get('channel-start_ch_num') > -1:
+ config_callbacks.update_channel_num(self.config_obj, self.config_section, 'channel-start_ch_num')
+ self.logger.debug(
+ '{}:{} Channel update complete'
.format(self.plugin_obj.name, self.instance_key))
else:
- self.logger.debug('Channel data still new for {} {}, not refreshing' \
+ self.logger.debug(
+ 'Channel data still new for {} {}, not refreshing'
.format(self.plugin_obj.name, self.instance_key))
return False
@@ -146,7 +164,7 @@ def clean_group_name(self, group_name):
return re.sub('[ +&*%$#@!:;,<>?]', '', group_name)
@handle_url_except()
- def get_thumbnail_size(self, _thumbnail, _ch_uid, ):
+ def get_thumbnail_size(self, _thumbnail, _retries, _ch_uid, ):
thumbnail_size = (0, 0)
if _thumbnail is None or _thumbnail == '':
return thumbnail_size
@@ -154,86 +172,135 @@ def get_thumbnail_size(self, _thumbnail, _ch_uid, ):
if _ch_uid is not None:
ch_row = self.db.get_channel(_ch_uid, self.plugin_obj.name, self.instance_key)
if ch_row is not None:
- ch_dict = ch_row['json']
if ch_row['json']['thumbnail'] == _thumbnail:
return ch_row['json']['thumbnail_size']
h = {'User-Agent': utils.DEFAULT_USER_AGENT,
- 'Accept': '*/*',
- 'Accept-Encoding': 'identity',
- 'Connection': 'Keep-Alive'
- }
- req = urllib.request.Request(_thumbnail, headers=h)
- with urllib.request.urlopen(req) as resp:
- img_blob = resp.read()
- fp = io.BytesIO(img_blob)
- sz = len(img_blob)
- try:
- thumbnail_size = get_image_size.get_image_size_from_bytesio(fp, sz)
- except get_image_size.UnknownImageFormat as e:
- self.logger.warning('{}: Thumbnail unknown format. {}'
- .format(self.plugin_obj.name, str(e)))
- pass
+ 'Accept': '*/*',
+ 'Accept-Encoding': 'identity',
+ 'Connection': 'Keep-Alive'
+ }
+ resp = self.plugin_obj.http_session.get(_thumbnail, headers=h, timeout=8)
+ resp.raise_for_status()
+ img_blob = resp.content
+ fp = io.BytesIO(img_blob)
+ sz = len(img_blob)
+ try:
+ thumbnail_size = get_image_size.get_image_size_from_bytesio(fp, sz)
+ except get_image_size.UnknownImageFormat as e:
+ self.logger.warning('{}: Thumbnail unknown format. {}'
+ .format(self.plugin_obj.name, str(e)))
+ pass
return thumbnail_size
@handle_url_except
- def get_best_stream(self, _url, _channel_id):
- self.logger.debug('{}: Getting best video stream info for {} {}' \
+ def get_best_stream(self, _url, _retries, _channel_id, _referer=None):
+ if self.config_obj.data[self.config_section]['player-stream_type'] == 'm3u8redirect':
+ return _url
+
+
+ self.logger.debug(
+ '{}: Getting best video stream info for {} {}'
.format(self.plugin_obj.name, _channel_id, _url))
- bestStream = None
- videoUrlM3u = m3u8.load(_url,
- headers={'User-agent': utils.DEFAULT_USER_AGENT})
- self.logger.debug("Found " + str(len(videoUrlM3u.playlists)) + " Playlists")
-
- max_bitrate = 570000
-
- # obtain lowest value that exceeds max_bitrate
- if len(videoUrlM3u.playlists) > 0:
- bandwidth_list = {}
- for videoStream in videoUrlM3u.playlists:
- bandwidth_list[videoStream.stream_info.bandwidth] = videoStream
- bandwidth_list = collections.OrderedDict(sorted(bandwidth_list.items(), reverse=True))
- self.logger.warning(bandwidth_list.keys())
-
- for videoStream in videoUrlM3u.playlists:
- if bestStream is None:
- bestStream = videoStream
- elif videoStream.stream_info.resolution is None:
- for bps, seg in bandwidth_list.items():
- if bps < max_bitrate:
- bestStream = seg
- break
- else:
- bestStream = seg
-
- self.logger.warning('Using bandwidth {} for {}'.format(bestStream.stream_info.bandwidth, _channel_id))
+ best_stream = None
+ if _referer:
+ header = {
+ 'User-agent': utils.DEFAULT_USER_AGENT,
+ 'Referer': _referer}
+ else:
+ header = {'User-agent': utils.DEFAULT_USER_AGENT}
+
+ ch_dict = self.db.get_channel(_channel_id, self.plugin_obj.name, self.instance_key)
+ ch_json = ch_dict['json']
+ best_resolution = -1
+ video_url_m3u = m3u8.load(
+ _url, headers=header,
+ http_session=self.plugin_obj.http_session)
+
+ if not video_url_m3u:
+ self.logger.notice('{}:{} Unable to obtain m3u file, aborting stream {}'
+ .format(self.plugin_obj.name, self.instance_key, _channel_id))
+ return
+ self.logger.debug("Found " + str(len(video_url_m3u.playlists)) + " Playlists")
+
+ if len(video_url_m3u.playlists) > 0:
+ max_bitrate = self.config_obj.data[self.config_section]['player-stream_quality']
+ bitrate_list = {}
+ for video_stream in video_url_m3u.playlists:
+ bitrate_list[video_stream.stream_info.bandwidth] = video_stream
+ bitrate_list = collections.OrderedDict(sorted(bitrate_list.items(), reverse=True))
+ # bitrate is sorted from highest to lowest
+ if list(bitrate_list.keys())[0] > max_bitrate:
+ is_set_by_bitrate = True
+ else:
+ is_set_by_bitrate = False
+ for bps, seg in bitrate_list.items():
+ if bps < max_bitrate:
+ best_stream = seg
+ if seg.stream_info.resolution:
+ best_resolution = seg.stream_info.resolution[1]
break
-
- if videoStream.stream_info.bandwidth > bestStream.stream_info.bandwidth:
- # current is higher bandwidth
- bestStream = videoStream
- elif ((videoStream.stream_info.resolution[0] > bestStream.stream_info.resolution[0]) and
- (videoStream.stream_info.resolution[1] > bestStream.stream_info.resolution[1])):
- bestStream = videoStream
- elif ((videoStream.stream_info.resolution[0] == bestStream.stream_info.resolution[0]) and
- (videoStream.stream_info.resolution[1] == bestStream.stream_info.resolution[1]) and
- (videoStream.stream_info.bandwidth > bestStream.stream_info.bandwidth)):
- bestStream = videoStream
- if bestStream is not None:
- if bestStream.stream_info.resolution is None:
- self.logger.debug('{} will use bandwidth at {} bps' \
- .format(_channel_id, str(bestStream.stream_info.bandwidth)))
else:
- self.logger.notice(self.plugin_obj.name + ': ' + _channel_id + " will use " +
- str(bestStream.stream_info.resolution[0]) + "x" +
- str(bestStream.stream_info.resolution[1]) +
- " resolution at " + str(bestStream.stream_info.bandwidth) + "bps")
- return bestStream.absolute_uri
+ best_stream = seg
+ if seg.stream_info.resolution:
+ best_resolution = seg.stream_info.resolution[1]
+
+ for video_stream in video_url_m3u.playlists:
+ if best_stream is None:
+ best_stream = video_stream
+ if video_stream.stream_info.resolution:
+ best_resolution = video_stream.stream_info.resolution[1]
+ elif not video_stream.stream_info.resolution:
+ # already set earlier
+ continue
+ elif ((video_stream.stream_info.resolution[0] > best_stream.stream_info.resolution[0]) and
+ (video_stream.stream_info.resolution[1] > best_stream.stream_info.resolution[1]) and
+ not is_set_by_bitrate):
+ best_stream = video_stream
+ best_resolution = video_stream.stream_info.resolution[1]
+ elif ((video_stream.stream_info.resolution[0] == best_stream.stream_info.resolution[0]) and
+ (video_stream.stream_info.resolution[1] == best_stream.stream_info.resolution[1]) and
+ (video_stream.stream_info.bandwidth > best_stream.stream_info.bandwidth) and
+ not is_set_by_bitrate):
+ best_stream = video_stream
+ best_resolution = video_stream.stream_info.resolution[1]
+
+ json_needs_updating = False
+ if best_stream is not None:
+ # use resolution over 720 as HD or
+ # bandwidth over 3mil
+ if best_resolution >= 720 and ch_json['HD'] == 0:
+ ch_json['HD'] = 1
+ json_needs_updating = True
+ elif 0 < best_resolution < 720 and ch_json['HD'] == 1:
+ ch_json['HD'] = 0
+ json_needs_updating = True
+ elif best_stream.stream_info.bandwidth > 3000000 and ch_json['HD'] == 0:
+ ch_json['HD'] = 1
+ json_needs_updating = True
+ elif best_stream.stream_info.bandwidth <= 3000000 and ch_json['HD'] == 1:
+ ch_json['HD'] = 0
+ json_needs_updating = True
+
+ if best_stream.stream_info.resolution is None:
+ self.logger.debug(
+ '{} will use bandwidth at {} bps'
+ .format(_channel_id, str(best_stream.stream_info.bandwidth)))
+ else:
+ self.logger.notice(
+ self.plugin_obj.name + ': ' + _channel_id + " will use " +
+ str(best_stream.stream_info.resolution[0]) + "x" +
+ str(best_stream.stream_info.resolution[1]) +
+ " resolution at " + str(best_stream.stream_info.bandwidth) + "bps")
+
+ if json_needs_updating:
+ self.db.update_channel_json(ch_json, self.plugin_obj.name, self.instance_key)
+ return best_stream.absolute_uri
else:
self.logger.debug('{}: {} No variant streams found for this station. Assuming single stream only.'
- .format(self.plugin_obj.name, _channel_id))
+ .format(self.plugin_obj.name, _channel_id))
return _url
def check_logger_refresh(self):
if not self.logger.isEnabledFor(40):
- self.logger = logging.getLogger(__name__+str(threading.get_ident()))
+ self.logger = logging.getLogger(__name__ + str(threading.get_ident()))
diff --git a/lib/plugins/plugin_epg.py b/lib/plugins/plugin_epg.py
index 25f2188..e463c6b 100644
--- a/lib/plugins/plugin_epg.py
+++ b/lib/plugins/plugin_epg.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -20,7 +20,6 @@
import json
import logging
import threading
-import urllib.request
import lib.common.utils as utils
from lib.db.db_epg import DBepg
@@ -28,7 +27,6 @@
from lib.common.decorators import handle_json_except
-
class PluginEPG:
def __init__(self, _instance_obj):
@@ -39,24 +37,37 @@ def __init__(self, _instance_obj):
self.plugin_obj = _instance_obj.plugin_obj
self.db = DBepg(self.config_obj.data)
self.config_section = self.instance_obj.config_section
- self.episode_adj = self.config_obj.data \
- [self.instance_obj.config_section].get('epg-episode_adjustment')
+ self.episode_adj = self.config_obj.data[self.instance_obj.config_section]\
+ .get('epg-episode_adjustment')
if self.episode_adj is None:
self.episode_adj = 0
else:
self.episode_adj = int(self.episode_adj)
-
+
+ def terminate(self):
+ """
+ Removes all has a object from the object and calls any subclasses to also terminate
+ Not calling inherited class at this time
+ """
+ self.logger = None
+ self.instance_obj = None
+ self.config_obj = None
+ self.instance_key = None
+ self.plugin_obj = None
+ self.db = None
+ self.config_section = None
+ self.episode_adj = None
@handle_url_except(timeout=10.0)
@handle_json_except
- def get_uri_data(self, _uri, _header=None):
+ def get_uri_data(self, _uri, _retries, _header=None):
if _header is None:
header = {'User-agent': utils.DEFAULT_USER_AGENT}
else:
header = _header
- req = urllib.request.Request(_uri, headers=header)
- with urllib.request.urlopen(req, timeout=10.0) as resp:
- x = json.load(resp)
+ resp = self.plugin_obj.http_session.get(_uri, headers=header, timeout=8)
+ x = resp.json()
+ resp.raise_for_status()
return x
def refresh_epg(self):
@@ -65,7 +76,7 @@ def refresh_epg(self):
return False
if not self.config_obj.data[self.instance_obj.config_section]['epg-enabled']:
self.logger.info('EPG Collection not enabled for {} {}'
- .format(self.plugin_obj.name, self.instance_key))
+ .format(self.plugin_obj.name, self.instance_key))
return False
forced_dates, aging_dates = self.dates_to_pull()
self.db.del_old_programs(self.plugin_obj.name, self.instance_key)
@@ -91,9 +102,6 @@ def get_channel_days(self, _zone, _uid, _days):
"""
pass
-
-
-
def dates_to_pull(self):
"""
Returns the days to pull, if EPG is less than a day, then
@@ -127,4 +135,4 @@ def is_refresh_expired(self):
def check_logger_refresh(self):
if not self.logger.isEnabledFor(40):
- self.logger = logging.getLogger(__name__+str(threading.get_ident()))
+ self.logger = logging.getLogger(__name__ + str(threading.get_ident()))
diff --git a/lib/plugins/plugin_handler.py b/lib/plugins/plugin_handler.py
index 8531bfa..0fe8225 100644
--- a/lib/plugins/plugin_handler.py
+++ b/lib/plugins/plugin_handler.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -16,22 +16,27 @@
substantial portions of the Software.
"""
+import configparser
import logging
import json
import importlib
import importlib.resources
+import os
+import pathlib
import lib.common.exceptions as exceptions
import lib.common.utils as utils
from .plugin import Plugin
+from .repo_handler import RepoHandler
from lib.db.db_plugins import DBPlugins
+from lib.db.db_channels import DBChannels
+from lib.db.db_config_defn import DBConfigDefn
PLUGIN_DEFN_FILE = 'plugin_defn.json'
class PluginHandler:
-
logger = None
cls_plugins = None
@@ -41,37 +46,119 @@ def __init__(self, _config_obj):
if PluginHandler.logger is None:
PluginHandler.logger = logging.getLogger(__name__)
self.plugin_defn = self.load_plugin_defn()
- self.collect_plugins(self.config_obj.data['paths']['internal_plugins_pkg'])
- self.collect_plugins(self.config_obj.data['paths']['external_plugins_pkg'])
+ self.check_external_plugin_folder()
+ self.repos = RepoHandler(self.config_obj)
+
+ self.repos.load_cabernet_repo()
+ self.collect_plugins(self.config_obj.data['paths']['internal_plugins_pkg'], False)
+ self.collect_plugins(self.config_obj.data['paths']['external_plugins_pkg'], True)
+ self.cleanup_config_missing_plugins()
if PluginHandler.cls_plugins is not None:
del PluginHandler.cls_plugins
PluginHandler.cls_plugins = self.plugins
- def collect_plugins(self, _plugins_pkg):
- plugin_db = DBPlugins(self.config_obj.data)
- #plugin_db.reinitialize_tables()
- for folder in importlib.resources.contents(_plugins_pkg):
- if folder.startswith('__'):
- continue
+ def terminate(self, _plugin_name):
+ """
+ calls terminate to the plugin requested
+ """
+ self.plugins[_plugin_name].terminate()
+ del self.plugins[_plugin_name]
+
+ def check_external_plugin_folder(self):
+ """
+ If the folder does not exists, then create it and place the
+ __init__.py file in it.
+ """
+ ext_folder = pathlib.Path(self.config_obj.data['paths']['main_dir']) \
+ .joinpath(self.config_obj.data['paths']['external_plugins_pkg'])
+ init_file = ext_folder.joinpath('__init__.py')
+ if not init_file.exists():
+ self.logger.notice('Creating external plugin folder for use by Cabernet')
try:
- importlib.resources.read_text(_plugins_pkg, folder)
- except (IsADirectoryError, PermissionError) as e:
- try:
- plugin = Plugin(self.config_obj, self.plugin_defn, '.'.join([_plugins_pkg, folder]))
- self.plugins[plugin.name] = plugin
- except (exceptions.CabernetException, AttributeError):
- pass
+ if not ext_folder.exists():
+ os.makedirs(ext_folder)
+ f = open(init_file, 'wb')
+ f.close()
+ except PermissionError as e:
+ self.logger.warning('ERROR: {} unable to create {}'.format(str(e), init_file))
+
+ def collect_plugins(self, _plugins_pkg, _is_external):
+ pkg = importlib.util.find_spec(_plugins_pkg)
+ if not pkg:
+ # module folder does not exist, do nothing
+ self.logger.notice(
+ 'plugin folder {} does not exist with a __init__.py empty file in it.'
+ .format(_plugins_pkg))
+ return
+
+ for folder in importlib.resources.contents(_plugins_pkg):
+ self.collect_plugin(_plugins_pkg, _is_external, folder)
self.del_missing_plugins()
+ def cleanup_config_missing_plugins(self):
+ """
+ Case where the plugin is deleted from folder, but database and config
+ still have data.
+ """
+ ch_db = DBChannels(self.config_obj.data)
+ ns_inst_list = ch_db.get_channel_instances()
+ ns_list = ch_db.get_channel_names()
+ for ns in ns_list:
+ ns = ns['namespace']
+ if not self.plugins.get(ns) and self.config_obj.data.get(ns.lower()):
+ for nv in self.config_obj.data.get(ns.lower()).items():
+ new_value = self.set_value_type(nv[1])
+ self.config_obj.data[ns.lower()][nv[0]] = new_value
+ for ns_inst in ns_inst_list:
+ if not self.plugins.get(ns_inst['namespace']):
+ inst_name = utils.instance_config_section(ns_inst['namespace'], ns_inst['instance'])
+ if self.config_obj.data.get(inst_name):
+ for nv in self.config_obj.data.get(inst_name).items():
+ new_value = self.set_value_type(nv[1])
+ self.config_obj.data[inst_name][nv[0]] = new_value
+ db_configdefn = DBConfigDefn(self.config_obj.data)
+ db_configdefn.add_config(self.config_obj.data)
+
+ def set_value_type(self, _value):
+ if not isinstance(_value, str):
+ return _value
+ if _value == 'True':
+ return True
+ elif _value == 'False':
+ return False
+ elif _value.isdigit():
+ return int(_value)
+ else:
+ return _value
+
+ def collect_plugin(self, _plugins_pkg, _is_external, _folder):
+ if _folder.startswith('__'):
+ return
+ try:
+ importlib.resources.read_text(_plugins_pkg, _folder)
+ except (IsADirectoryError, PermissionError):
+ try:
+ plugin = Plugin(self.config_obj, self.plugin_defn, _plugins_pkg, _folder, _is_external)
+ self.plugins[plugin.name] = plugin
+ except (exceptions.CabernetException, AttributeError):
+ pass
+ except UnicodeDecodeError:
+ pass
+ except Exception:
+ pass
+ return
+
def del_missing_plugins(self):
"""
- deletes the plugins from the db that are no longer present
+ updates to uninstalled the plugins from the db that are no longer present
"""
plugin_db = DBPlugins(self.config_obj.data)
- plugin_dblist = plugin_db.get_plugins()
- for p_dict in plugin_dblist:
- if (p_dict['name'] not in self.plugins) and (p_dict['name'] != utils.CABERNET_NAMESPACE):
- plugin_db.del_plugin(p_dict['name'])
+ plugin_dblist = plugin_db.get_plugins(_installed=True)
+ if plugin_dblist:
+ for p_dict in plugin_dblist:
+ if (p_dict['name'] not in self.plugins) and (p_dict['name'] != utils.CABERNET_ID):
+ p_dict['version']['installed'] = False
+ plugin_db.save_plugin(p_dict)
def load_plugin_defn(self):
try:
@@ -86,7 +173,7 @@ def load_plugin_defn(self):
def initialize_plugins(self):
for name, plugin in self.plugins.items():
- if not self.config_obj.data[plugin.name.lower()]['enabled']:
+ if not plugin.enabled or not self.config_obj.data[plugin.name.lower()]['enabled']:
self.logger.info('Plugin {} is disabled in config.ini'.format(plugin.name))
plugin.enabled = False
else:
diff --git a/lib/plugins/plugin_instance_obj.py b/lib/plugins/plugin_instance_obj.py
index 1962a9c..cf86f1f 100644
--- a/lib/plugins/plugin_instance_obj.py
+++ b/lib/plugins/plugin_instance_obj.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -41,8 +41,31 @@ def __init__(self, _plugin_obj, _instance_key):
else:
self.enabled = True
+ def terminate(self):
+ """
+ Removes all has a object from the object and calls any subclasses to also terminate
+ Not calling inherited class at this time
+ """
+ self.enabled = False
+ if self.channels:
+ self.channels.terminate()
+ if self.epg:
+ self.epg.terminate()
+ if self.programs:
+ self.programs.terminate()
+ self.logger = None
+ self.config_obj = None
+ self.plugin_obj = None
+ self.instance_key = None
+ self.scheduler_db = None
+ self.enabled = None
+ self.channels = None
+ self.programs = None
+ self.epg = None
+
+
##############################
- ### EXTERNAL STREAM METHODS
+ # ## EXTERNAL STREAM METHODS
##############################
def is_time_to_refresh(self, _last_refresh):
@@ -61,12 +84,13 @@ def get_channel_uri(self, sid):
if self.enabled and self.config_obj.data[self.config_section]['enabled']:
return self.channels.get_channel_uri(sid)
else:
- self.logger.debug('{}:{} Plugin instance disabled, not getting Channel uri' \
+ self.logger.debug(
+ '{}:{} Plugin instance disabled, not getting Channel uri'
.format(self.plugin_obj.name, self.instance_key))
return None
##############################
- ### EXTERNAL EPG METHODS
+ # ## EXTERNAL EPG METHODS
##############################
def get_channel_day(self, _zone, _uid, _day):
@@ -77,11 +101,11 @@ def get_channel_day(self, _zone, _uid, _day):
if self.enabled and self.config_obj.data[self.config_section]['enabled']:
return self.epg.get_channel_day(_zone, _uid, _day)
else:
- self.logger.debug('{}:{} Plugin instance disabled, not getting EPG channel data' \
+ self.logger.debug(
+ '{}:{} Plugin instance disabled, not getting EPG channel data'
.format(self.plugin_obj.name, self.instance_key))
return None
-
def get_program_info(self, _prog_id):
"""
External request to return the program details
@@ -91,11 +115,11 @@ def get_program_info(self, _prog_id):
if self.enabled and self.config_obj.data[self.config_section]['enabled']:
return self.programs.get_program_info(_prog_id)
else:
- self.logger.debug('{}:{} Plugin instance disabled, not getting EPG program data' \
+ self.logger.debug(
+ '{}:{} Plugin instance disabled, not getting EPG program data'
.format(self.plugin_obj.name, self.instance_key))
return None
-
def get_channel_list(self, _zone_id, _ch_ids=None):
"""
External request to return the channel list for a zone.
@@ -104,14 +128,13 @@ def get_channel_list(self, _zone_id, _ch_ids=None):
if self.enabled and self.config_obj.data[self.config_section]['enabled']:
return self.channels.get_channel_list(_zone_id, _ch_ids)
else:
- self.logger.debug('{}:{} Plugin instance disabled, not getting EPG zone data' \
+ self.logger.debug(
+ '{}:{} Plugin instance disabled, not getting EPG zone data'
.format(self.plugin_obj.name, self.instance_key))
return None
-
##############################
-
def scheduler_tasks(self):
"""
dummy routine that will be overridden by subclass,
@@ -119,7 +142,6 @@ def scheduler_tasks(self):
"""
pass
-
def refresh_channels(self):
"""
Called from the scheduler
@@ -129,11 +151,11 @@ def refresh_channels(self):
self.config_obj.data[self.config_section]['enabled']:
return self.channels.refresh_channels()
else:
- self.logger.notice('{}:{} Plugin instance disabled, not refreshing Channels' \
+ self.logger.notice(
+ '{}:{} Plugin instance disabled, not refreshing Channels'
.format(self.plugin_obj.name, self.instance_key))
return False
-
def refresh_epg(self):
"""
Called from the scheduler
@@ -143,13 +165,14 @@ def refresh_epg(self):
self.config_obj.data[self.config_section]['enabled']:
return self.epg.refresh_epg()
else:
- self.logger.info('{}:{} Plugin instance disabled, not refreshing EPG' \
+ self.logger.info(
+ '{}:{} Plugin instance disabled, not refreshing EPG'
.format(self.plugin_obj.name, self.instance_key))
return False
def check_logger_refresh(self):
if not self.logger.isEnabledFor(40):
- self.logger = logging.getLogger(__name__+str(threading.get_ident()))
+ self.logger = logging.getLogger(__name__ + str(threading.get_ident()))
if self.channels is not None:
self.channels.check_logger_refresh()
if self.epg is not None:
diff --git a/lib/plugins/plugin_manager/__init__.py b/lib/plugins/plugin_manager/__init__.py
new file mode 100644
index 0000000..d05eb0a
--- /dev/null
+++ b/lib/plugins/plugin_manager/__init__.py
@@ -0,0 +1,2 @@
+import lib.plugins.plugin_manager.plugins_html
+import lib.plugins.plugin_manager.plugins_form_html
diff --git a/lib/plugins/plugin_manager/plugin_manager.py b/lib/plugins/plugin_manager/plugin_manager.py
new file mode 100644
index 0000000..ce922ea
--- /dev/null
+++ b/lib/plugins/plugin_manager/plugin_manager.py
@@ -0,0 +1,323 @@
+"""
+MIT License
+
+Copyright (C) 2023 ROCKY4546
+https://github.com/rocky4546
+
+This file is part of Cabernet
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute,
+sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+"""
+
+import logging
+import os
+import pathlib
+import plugins
+import shutil
+import sys
+import time
+import urllib
+import zipfile
+
+import lib.common.utils as utils
+from lib.db.db_plugins import DBPlugins
+from lib.db.db_scheduler import DBScheduler
+from lib.common.decorators import handle_url_except
+
+
+class PluginManager:
+ logger = None
+
+ def __init__(self, _plugins, _config_obj=None):
+ """
+ If called during a patch update, the plugins is unknown,
+ so it should be set to None and the config object passed in instead
+ Otherwise, pass in the plugins and set the config object to None
+ """
+ if PluginManager.logger is None:
+ PluginManager.logger = logging.getLogger(__name__)
+ self.plugin_handler = _plugins
+ if self.plugin_handler:
+ self.config = _plugins.config_obj.data
+ self.config_obj = _plugins.config_obj
+ else:
+ self.config = _config_obj.data
+ self.config_obj = _config_obj
+
+ self.plugin_db = DBPlugins(self.config)
+ self.db_scheduler = DBScheduler(self.config)
+ self.plugin_rec = None
+ self.repo_rec = None
+
+ def check_plugin_status(self, _repo_id, _plugin_id):
+ """
+ Returns None if successful, otherwise, returns
+ string of the error
+ """
+ self.plugin_rec = self.plugin_db.get_plugins(None, _repo_id, _plugin_id)
+ if not self.plugin_rec:
+ self.logger.notice('No plugin found, aborting')
+ return 'Error: No plugin found, aborting request'
+
+ self.repo_rec = self.plugin_db.get_repos(_repo_id)
+ if not self.repo_rec:
+ self.logger.notice('No repo {} associated with plugin {}, aborting install'
+ .format(_repo_id, _plugin_id))
+ return 'Error: No repo found {}, associated with plugin {}, aborting install' \
+ .format(_repo_id, _plugin_id)
+ self.plugin_rec = self.plugin_rec[0]
+ self.repo_rec = self.repo_rec[0]
+
+ # if plugin exists, make sure we can delete it
+ if self.plugin_rec['external']:
+ plugin_path = self.config['paths']['external_plugins_pkg']
+ else:
+ plugin_path = self.config['paths']['internal_plugins_pkg']
+ plugin_path = pathlib.Path(
+ self.config['paths']['main_dir'],
+ plugin_path,
+ _plugin_id
+ )
+ if plugin_path.exists() and not os.access(plugin_path, os.W_OK):
+ self.logger.warning('Unable to update folder: OS Permission issue on plugin {}, aborting'.format(plugin_path))
+ return 'Error: Unable to update folder: OS Permission issue on plugin {}, aborting'.format(plugin_path)
+
+ return None
+
+ def check_version_requirements(self):
+ # check Cabernet required version
+ req = self.plugin_rec.get('requires')
+ if req:
+ cabernet = req[0].get(utils.CABERNET_ID)
+ if cabernet:
+ ver = cabernet.get('version')
+ if ver:
+ v_req = utils.get_version_index(ver)
+ v_cur = utils.get_version_index(utils.VERSION)
+ if v_req > v_cur:
+ self.logger.notice('Cabernet version too low, aborting install')
+ return 'Error: Cabernet version {} too low for plugin. Requires {}, aborting install' \
+ .format(utils.VERSION, ver)
+ return None
+
+ def get_plugin_zipfile(self):
+ # starting install process
+ zip_file = ''.join([
+ self.plugin_rec['id'], '-',
+ self.plugin_rec['version']['latest'],
+ '.zip'
+ ])
+ zippath = '/'.join([
+ self.repo_rec['dir']['datadir']['url'],
+ self.plugin_rec['id'], zip_file
+ ])
+ tmp_zip_path = self.download_zip(zippath, 2, zip_file)
+ if not tmp_zip_path:
+ self.logger.notice('Unable to obtain zip file from repo, aborting')
+ results = 'Error: Unable to obtain zip file {} from repo, aborting' \
+ .format(zip_file)
+ return (False, results)
+ results = 'Downloaded plugin {} from repo'.format(zip_file)
+ try:
+ with zipfile.ZipFile(tmp_zip_path, 'r') as z:
+ file_list = z.namelist()
+ res = [i for i in file_list if i.endswith(self.plugin_rec['id']+'/')]
+ if not res:
+ results += ' Error: Zip file does not contain plugin folder {}, aborting' \
+ .format(self.plugin_rec['id'])
+ return (False, results)
+ if len(res) != 1:
+ results += ' Error: Zip file contains multiple plugin folders {}, aborting' \
+ .format(self.plugin_rec['id'])
+ return (False, results)
+
+ z.extractall(os.path.dirname(tmp_zip_path))
+
+ except FileNotFoundError as ex:
+ self.logger.notice('File {} missing from tmp area, aborting'
+ .format(zip_file))
+ results += ' Error: File {} missing from tmp area, aborting' \
+ .format(zip_file)
+ return (False, results)
+
+ tmp_plugin_path = pathlib.Path(os.path.dirname(tmp_zip_path), res[0])
+ plugin_folder = pathlib.Path(
+ self.config['paths']['main_dir'],
+ self.config['paths']['external_plugins_pkg'])
+
+ plugin_id_folder = plugin_folder.joinpath(self.plugin_rec['id'])
+
+ if plugin_id_folder.exists():
+ try:
+ shutil.rmtree(plugin_id_folder)
+ except OSError as ex:
+ self.logger.warning('Unable to upgrade, {}'.format(str(ex)))
+ results += ' Error: Unable to delete folder for upgrade, {}'.format(str(ex))
+ return (False, results)
+
+ shutil.move(str(tmp_plugin_path), str(plugin_folder))
+ results += ' Installed plugin {} from repo, version {}' \
+ .format(self.plugin_rec['id'], self.plugin_rec['version']['latest'])
+
+ # remove the leftovers in the tmp folder
+ try:
+ p = pathlib.Path(tmp_plugin_path)
+ shutil.rmtree(p.parents[0])
+ os.remove(tmp_zip_path)
+ except OSError as ex:
+ self.logger.notice('Unable to delete plugin from tmp area: {}'.format(str(ex)))
+ results += ' Error: Unable to delete plugin folder from tmp area {}'.format(str(ex))
+ return (False, results)
+ return (True, results)
+
+ def upgrade_plugin(self, _repo_id, _plugin_id, _sched_queue):
+ results = self.check_plugin_status(_repo_id, _plugin_id)
+ if results:
+ return results
+
+ results = self.check_version_requirements()
+ if results:
+ return results
+
+ is_successful, results = self.get_plugin_zipfile()
+ if not is_successful:
+ return results
+
+ # update the plugin database entry with the new version...
+ self.plugin_rec['version']['current'] = self.plugin_rec['version']['latest']
+ self.plugin_db.save_plugin(self.plugin_rec)
+
+ results += ' A restart is required to finish cleaning up plugin'
+ return results
+
+ def install_plugin(self, _repo_id, _plugin_id, _sched_queue=None):
+ results = self.check_plugin_status(_repo_id, _plugin_id)
+ if results:
+ return results
+
+ if self.plugin_rec['version']['installed']:
+ self.logger.notice('Error: Plugin already installed, aborting')
+ return 'Error: Plugin already installed, aborting install'
+
+ results = self.check_version_requirements()
+ if results:
+ return results
+
+ is_successful, results = self.get_plugin_zipfile()
+ if not is_successful:
+ return results
+
+ # next inform cabernet that there is a new plugin
+ if self.plugin_handler:
+ try:
+ self.plugin_handler.collect_plugin(self.config['paths']['external_plugins_pkg'], True, self.plugin_rec['id'])
+ except FileNotFoundError:
+ self.logger.notice('Plugin folder not in external plugin folder: {}'.format(str(ex)))
+ results += ' Error: Plugin folder not in external plugin folder {}'.format(str(ex))
+ return results
+
+ # update the database to say plugin is installed and what version
+ # Enable plugin?
+ self.config_obj.write(
+ self.plugin_rec['name'].lower(), 'enabled', True)
+
+ results += ' A restart is suggested to finish cleaning up plugin'
+ return results
+
+ def delete_plugin(self, _repo_id, _plugin_id, _sched_queue=None):
+ plugin_rec = self.plugin_db.get_plugins(None, _repo_id, _plugin_id)
+ if not plugin_rec:
+ self.logger.notice('No plugin found, aborting')
+ return 'Error: No plugin found, aborting delete request'
+ elif not plugin_rec[0]['version']['installed']:
+ self.logger.notice('Plugin not installed, aborting')
+ return 'Error: Plugin not installed, aborting delete request'
+
+ plugin_rec = plugin_rec[0]
+ namespace = plugin_rec['name']
+ if plugin_rec['external']:
+ plugin_path = self.config['paths']['external_plugins_pkg']
+ else:
+ plugin_path = self.config['paths']['internal_plugins_pkg']
+
+ plugin_path = pathlib.Path(
+ self.config['paths']['main_dir'],
+ plugin_path,
+ _plugin_id
+ )
+ if not plugin_path.exists():
+ self.logger.notice('Missing plugin {}, aborting'.format(plugin_path))
+ return 'Error: Missing plugin {}, aborting'.format(plugin_path)
+ elif not os.access(plugin_path, os.W_OK):
+ self.logger.warning('Unable to delete folder: OS Permission issue on plugin {}, aborting'.format(plugin_path))
+ return 'Error: Unable to delete folder: OS Permission issue on plugin {}, aborting'.format(plugin_path)
+
+ results = 'Deleting all {} scheduled tasks'.format(namespace)
+ tasks = self.db_scheduler.get_tasks_by_name(plugin_rec['name'], None)
+ if _sched_queue:
+ for task in tasks:
+ _sched_queue.put({'cmd': 'delinstance', 'name': plugin_rec['name'], 'instance': None})
+
+ results += ' Deleting plugin objects'
+ if self.plugin_handler:
+ self.plugin_handler.terminate(namespace)
+
+ results += ' Deleting plugin folder {}'.format(plugin_path)
+ try:
+ shutil.rmtree(plugin_path)
+ except OSError as ex:
+ self.logger.notice('Unable to delete plugin: {}'.format(str(ex)))
+ results += ' Error: Unable to delete plugin folder {}'.format(str(ex))
+ return results
+
+ plugin_rec['version']['installed'] = False
+ plugin_rec['version']['current'] = None
+ plugin_rec = self.plugin_db.save_plugin(plugin_rec)
+
+ results += ' A restart is suggested to finish cleaning up plugin'
+ return results
+
+ def add_instance(self, _repo_id, _plugin_id, _sched_queue=None):
+ plugin_rec = self.plugin_db.get_plugins(None, _repo_id, _plugin_id)
+ if not plugin_rec:
+ self.logger.notice('No plugin found, aborting')
+ return 'Error: No plugin found, aborting delete request'
+ elif not plugin_rec[0]['version']['installed']:
+ self.logger.notice('Plugin not installed, aborting')
+ return 'Error: Plugin not installed, aborting delete request'
+
+ plugin_rec = plugin_rec[0]
+ namespace = plugin_rec['name']
+
+ results = 'Adding Instance {}'.format(_plugin_id)
+
+ results += ' A restart is suggested to finish adding the instance'
+ return results
+
+
+
+ @handle_url_except
+ def download_zip(self, _zip_url, _retries, _zip_filename):
+ """
+ Returns the location of the zip file
+ """
+ buf_size = 2 * 16 * 16 * 1024
+ save_path = pathlib.Path(self.config['paths']['tmp_dir']).joinpath(_zip_filename)
+
+ h = {'Content-Type': 'application/zip', 'User-agent': utils.DEFAULT_USER_AGENT}
+ req = urllib.request.Request(_zip_url, headers=h)
+ with urllib.request.urlopen(req) as resp:
+ with open(save_path, 'wb') as out_file:
+ while True:
+ chunk = resp.read(buf_size)
+ if not chunk:
+ break
+ out_file.write(chunk)
+ return save_path
diff --git a/lib/plugins/plugin_manager/plugins_form_html.py b/lib/plugins/plugin_manager/plugins_form_html.py
new file mode 100644
index 0000000..dbb749b
--- /dev/null
+++ b/lib/plugins/plugin_manager/plugins_form_html.py
@@ -0,0 +1,467 @@
+"""
+MIT License
+
+Copyright (C) 2023 ROCKY4546
+https://github.com/rocky4546
+
+This file is part of Cabernet
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute,
+sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+"""
+
+import logging
+
+import lib.common.utils as utils
+import lib.common.exceptions as exceptions
+
+from lib.common.decorators import getrequest
+from lib.common.decorators import postrequest
+from lib.web.pages.templates import web_templates
+from lib.db.db_plugins import DBPlugins
+from lib.plugins.plugin_manager.plugin_manager import PluginManager
+
+@getrequest.route('/api/pluginsform')
+def get_plugins_form_html(_webserver, _namespace=None, _sort_col=None, _sort_dir=None, filter_dict=None):
+ plugins_form = PluginsFormHTML(_webserver.config)
+
+ _area = _webserver.query_data.get('area')
+ _plugin = _webserver.query_data.get('plugin')
+ _repo = _webserver.query_data.get('repo')
+
+ if _area is None and _plugin is None and _repo is None:
+ _webserver.do_mime_response(
+ 404, 'text/html', web_templates['htmlError']
+ .format('404 - Badly formed request'))
+ elif _area:
+ try:
+ form = plugins_form.get(_area)
+ _webserver.do_mime_response(200, 'text/html', form)
+ except exceptions.CabernetException as ex:
+ _webserver.do_mime_response(
+ 404, 'text/html', web_templates['htmlError']
+ .format('404 - Badly formed area request'))
+ elif _plugin and _repo:
+ try:
+ form = plugins_form.get_plugin(_repo, _plugin)
+ _webserver.do_mime_response(200, 'text/html', form)
+ except exceptions.CabernetException as ex:
+ _webserver.do_mime_response(
+ 404, 'text/html', web_templates['htmlError']
+ .format('404 - Badly formed plugin request'))
+ else:
+ # case where plugin and repo are not provided together
+ _webserver.do_mime_response(
+ 404, 'text/html', web_templates['htmlError']
+ .format('404 - Badly formed plugin/repo request'))
+
+
+@postrequest.route('/api/pluginsform')
+def post_plugins_html(_webserver):
+ action = _webserver.query_data.get('action')
+ pluginid = _webserver.query_data.get('pluginId')
+ repoid = _webserver.query_data.get('repoId')
+ if action and pluginid and repoid:
+ action = action[0]
+ pluginid = pluginid[0]
+ repoid = repoid[0]
+ if action == "deletePlugin":
+ pm = PluginManager(_webserver.plugins)
+ results = pm.delete_plugin(repoid, pluginid, _webserver.sched_queue)
+ _webserver.do_mime_response(200, 'text/html', 'STATUS: Deleting plugin: {}:{} '.format(repoid, pluginid) + str(results))
+ if action == "addInstance":
+ pm = PluginManager(_webserver.plugins)
+ results = pm.add_instance(repoid, pluginid, _webserver.sched_queue)
+ _webserver.do_mime_response(200, 'text/html', 'STATUS: Adding Instance plugin: {}:{} '.format(repoid, pluginid) + str(results))
+ elif action == "installPlugin":
+ pm = PluginManager(_webserver.plugins)
+ results = pm.install_plugin(repoid, pluginid, _webserver.sched_queue)
+ _webserver.do_mime_response(200, 'text/html', 'STATUS: Installing plugin: {}:{} '.format(repoid, pluginid) + str(results))
+ elif action == "upgradePlugin":
+ pm = PluginManager(_webserver.plugins)
+ results = pm.upgrade_plugin(repoid, pluginid, _webserver.sched_queue)
+ _webserver.do_mime_response(200, 'text/html', 'STATUS: Installing plugin: {}:{} '.format(repoid, pluginid) + str(results))
+ else:
+ _webserver.do_mime_response(200, 'text/html', "doing something else"+str(action[0]))
+ else:
+ _webserver.do_mime_response(
+ 404, 'text/html', web_templates['htmlError']
+ .format('404 - Badly formed request'))
+
+
+class PluginsFormHTML:
+
+ def __init__(self, _config):
+ self.logger = logging.getLogger(__name__)
+ self.config = _config
+ self.plugin_db = DBPlugins(self.config)
+ self.active_tab_name = None
+ self.num_of_plugins = 0
+ self.plugin_data = None
+ self.area = None
+
+ def get(self, _area):
+ self.area = _area
+ return ''.join([self.header, self.body])
+
+ def get_plugin(self, _repo_id, _plugin_id):
+ plugin_defn = self.plugin_db.get_plugins(
+ _installed=None,
+ _repo_id=_repo_id,
+ _plugin_id=_plugin_id)
+ if not plugin_defn:
+ self.logger.warning(
+ 'HTTP request: Unknown plugin: {}'
+ .format(_plugin_id))
+ raise exceptions.CabernetException(
+ 'Unknown Plugin: {}'
+ .format(_plugin_id))
+ plugin_defn = plugin_defn[0]
+ return ''.join([self.get_plugin_header(plugin_defn), self.get_menu_section(plugin_defn), self.get_plugin_section(plugin_defn)])
+
+ def get_menu_top_section(self, _plugin_defn):
+ return ''.join([
+ '
'
+ ])
+
+ def get_menu_items(self, _plugin_defn):
+ if not _plugin_defn['external']:
+ menu_list=''
+ elif _plugin_defn['version']['installed']:
+ # delete and possible upgrade...
+ menu_list = ''
+ if _plugin_defn['version']['latest'] != _plugin_defn['version']['current']:
+ menu_list = ''.join([
+ '
'
+ ])
+ menu_list += ''.join([
+ '
'
+ ])
+ else:
+ # install
+ menu_list = ''.join([
+ '
'
+ ])
+ return menu_list
+
+ def get_menu_section(self, _plugin_defn):
+ pluginid = _plugin_defn['id']
+ repoid = _plugin_defn['repoid']
+
+ return ''.join([
+ '
',
+ '
'])
+
+ def get_plugin_header(self, _plugin_defn):
+ instances = self.plugin_db.get_instances(_namespace=_plugin_defn['name'])
+ if instances:
+ # array of instance names
+ instances = instances[_plugin_defn['name']]
+ else:
+ instances = None
+
+ if not _plugin_defn['version'].get('latest'):
+ _plugin_defn['version']['latest'] = None
+
+ html = ''.join([
+ '
',
+ '
',
+ str(_plugin_defn['name']), '
',
+
+ '
', str(_plugin_defn['summary']), '
',
+
+ '
',
+ '
',
+ '
',
+
+ '
',
+ str(_plugin_defn['description']),
+ '
',
+ '
'
+ ])
+ return html
+
+ def get_plugin_section(self, _plugin_defn):
+ pluginid = _plugin_defn['id']
+ repoid = _plugin_defn['repoid']
+ instances = self.plugin_db.get_instances(_namespace=_plugin_defn['name'])
+ if instances:
+ # array of instance names
+ instances = instances[_plugin_defn['name']]
+ else:
+ instances = None
+
+ if not _plugin_defn['version'].get('latest'):
+ _plugin_defn['version']['latest'] = None
+
+ latest_version = _plugin_defn['version']['latest']
+ upgrade_available = ''
+ if latest_version != _plugin_defn['version']['current'] and _plugin_defn['external']:
+ upgrade_available = ''.join([
+
+ '
'
+ ])
+
+ if _plugin_defn['version']['installed']:
+ version_installed_div = ''.join([
+ '
',
+ '
Version Installed:
',
+ '
',
+ str(_plugin_defn['version']['current']), '
',
+ upgrade_available,
+ '
',
+ ])
+ else:
+ version_installed_div = ''
+
+ if _plugin_defn.get('changelog'):
+ changelog_div = ''.join([
+ '
',
+ '
Change Log:
',
+ '
',
+ '',
+ str(_plugin_defn['changelog']), '
',
+ '
',
+ ])
+ else:
+ changelog_div = ''
+
+
+ html = ''.join([
+ '',
+ version_installed_div,
+ '
',
+ '
Latest Version:
',
+ '
',
+ str(_plugin_defn['version']['latest']),
+ '
',
+ '
',
+
+ changelog_div,
+
+ '
',
+ '
Dependencies:
',
+ '
',
+ str(_plugin_defn['dependencies']),
+ '
',
+ '
',
+
+ '
',
+ '
Source:
',
+ '
',
+ str(_plugin_defn['source']),
+ '
',
+ '
',
+
+ '
',
+ '
License:
',
+ '
',
+ str(_plugin_defn['license']),
+ '
',
+ '
',
+
+ '
',
+ '
Author:
',
+ '
',
+ str(_plugin_defn['provider-name']),
+ '
',
+ '
',
+
+ '
',
+ '
Origin:
',
+ '
',
+ 'Cabernet Plugin Repository',
+ '
',
+ '
',
+
+ '
',
+ '
Category:
',
+ '
',
+ str(_plugin_defn['category']),
+ '
',
+ '
',
+
+ '
',
+ '
Related Website:
',
+ '
',
+ str(_plugin_defn['website']),
+ '
',
+ '
',
+
+ '
',
+ '
Instances:
',
+ '
',
+ str(instances),
+ '
',
+ '
',
+
+ '
',
+ '
'
+ ])
+ return html
+
+ def form_plugins(self, _is_installed):
+ plugin_defns = self.plugin_db.get_plugins(
+ _is_installed, None, None)
+
+ if not plugin_defns:
+ if self.area == 'My_Plugins':
+ return ''.join([
+ 'No plugins are installed. Go to Catalog and select a plugin to install.'
+ ])
+ elif self.area == 'Catalog':
+ return ''.join([
+ 'All available plugins are installed'
+ ])
+
+ plugins_list = ''
+ for plugin_defn in sorted(plugin_defns, key=lambda p: p['id']):
+ repo_id = plugin_defn['repoid']
+ plugin_id = plugin_defn['id']
+ plugin_name = plugin_defn['name']
+
+ img_size = self.lookup_config_size()
+
+ latest_version = plugin_defn['version']['latest']
+ upgrade_available = ''
+ if _is_installed and plugin_defn['external']:
+ if latest_version != plugin_defn['version']['current']:
+ upgrade_available = '
Upgrade to {}
' \
+ .format(latest_version)
+ current_version = plugin_defn['version']['current']
+ elif not _is_installed:
+ current_version = plugin_defn['version']['latest']
+ else:
+ current_version = 'Internal'
+
+ plugins_list += ''.join([
+ '',
+ '',
+ '
',
+ '
',
+ upgrade_available,
+
+ '
', plugin_name,
+ '
',
+ str(current_version), '
',
+ ' '
+ ])
+ return plugins_list
+
+ @property
+ def header(self):
+ return ''.join([
+ '
',
+ ''
+ ])
+
+ @property
+ def form(self):
+ if self.area == 'My_Plugins':
+ forms_html = ''.join([
+ '
',
+ self.form_plugins(True), '
'])
+ elif self.area == 'Catalog':
+ forms_html = ''.join([
+ 'Plugins Available To Install:'
+ '
',
+ self.form_plugins(_is_installed=False),
+ '
'])
+ else:
+ self.logger.warning('HTTP request: unknown area: {}'.format(self.area))
+ raise exceptions.CabernetException('Unknown Tab: {}'.format(self.area))
+ return forms_html
+
+ @property
+ def body(self):
+ return ''.join([
+ '
'])
+
+ def lookup_config_size(self):
+ size_text = self.config['channels']['thumbnail_size']
+ if size_text == 'None':
+ return 0
+ elif size_text == 'Tiny(16)':
+ return 16
+ elif size_text == 'Small(48)':
+ return 48
+ elif size_text == 'Medium(128)':
+ return 128
+ elif size_text == 'Large(180)':
+ return 180
+ elif size_text == 'X-Large(270)':
+ return 270
+ elif size_text == 'Full-Size':
+ return None
+ else:
+ return None
+
diff --git a/lib/plugins/plugin_manager/plugins_html.py b/lib/plugins/plugin_manager/plugins_html.py
new file mode 100644
index 0000000..81d862e
--- /dev/null
+++ b/lib/plugins/plugin_manager/plugins_html.py
@@ -0,0 +1,110 @@
+"""
+MIT License
+
+Copyright (C) 2023 ROCKY4546
+https://github.com/rocky4546
+
+This file is part of Cabernet
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the “Software”), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute,
+sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+"""
+
+from lib.common.decorators import getrequest
+
+
+@getrequest.route('/api/plugins')
+def get_plugins_html(_webserver):
+ plugins_html = PluginsHTML()
+ html = plugins_html.get()
+ _webserver.do_mime_response(200, 'text/html', html)
+
+
+class PluginsHTML:
+
+ def __init__(self):
+ self.config = None
+ self.active_tab_name = None
+ self.tab_names = None
+
+ def get(self):
+ self.tab_names = self.get_tabs()
+ return ''.join([self.header, self.body])
+
+ @property
+ def header(self):
+ return ''.join([
+ '
',
+ ' ',
+ ' ',
+ '
Plugins ',
+ ' ',
+ '',
+ ' ',
+ ' ',
+ '',
+ '',
+ ''
+ ''
+ ])
+
+ @property
+ def title(self):
+ return ''.join([
+ '
',
+ '
Plugins '
+ ])
+
+ @property
+ def tabs(self):
+ activeTab = 'activeTab'
+ tabs_html = ''.join([
+ '
'])
+ for name, icon in self.tab_names.items():
+ key = name.replace(' ', '_')
+ tabs_html = ''.join([
+ tabs_html,
+ '',
+ '',
+ icon,
+ ' ',
+ name, ' '
+ ])
+ activeTab = ''
+ self.active_tab_name = name
+ tabs_html = ''.join([tabs_html, ' '])
+ return tabs_html
+
+ @property
+ def body(self):
+ return ''.join([
+ '
',
+ self.title,
+ self.tabs,
+ '',
+ '
',
+ self.plugin_page
+ ])
+
+ @property
+ def plugin_page(self):
+ return ''.join([
+ '
'
+ ])
+
+
+
+ def get_tabs(self):
+ return {'My Plugins': 'extension', 'Catalog': 'add_shopping_cart'}
+
diff --git a/lib/plugins/plugin_obj.py b/lib/plugins/plugin_obj.py
index 5be238b..1c32deb 100644
--- a/lib/plugins/plugin_obj.py
+++ b/lib/plugins/plugin_obj.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -19,6 +19,7 @@
import base64
import binascii
import datetime
+import httpx
import logging
import string
import threading
@@ -34,6 +35,8 @@ class PluginObj:
def __init__(self, _plugin):
self.logger = logging.getLogger(__name__)
self.plugin = _plugin
+ self.plugins = None
+ self.http_session = PluginObj.HttpSession()
self.config_obj = _plugin.config_obj
self.namespace = _plugin.namespace
self.def_trans = ''.join([
@@ -41,21 +44,41 @@ def __init__(self, _plugin):
string.ascii_lowercase,
string.digits,
'+/'
- ]).encode()
+ ]).encode()
self.instances = {}
self.scheduler_db = DBScheduler(self.config_obj.data)
self.scheduler_tasks()
self.enabled = True
self.logger.debug('Initializing plugin {}'.format(self.namespace))
+ def terminate(self):
+ """
+ Removes all has a object from the object and calls any subclasses to also terminate
+ Not calling inherited class at this time
+ """
+ self.enabled = False
+ for key, instance in self.instances.items():
+ return instance.terminate()
+ self.logger = None
+ self.plugin = None
+ self.plugins = None
+ self.http_session = None
+ self.config_obj = None
+ self.namespace = None
+ self.def_trans = None
+ self.instances = None
+ self.scheduler_db = None
+
+
+
# INTERFACE METHODS
# Plugin may have the following methods
# used to interface to the app.
##############################
- ### EXTERNAL STREAM METHODS
+ # ## EXTERNAL STREAM METHODS
##############################
-
+
def is_time_to_refresh_ext(self, _last_refresh, _instance):
"""
External request to determine if the m3u8 stream uri needs to
@@ -74,7 +97,7 @@ def get_channel_uri_ext(self, _sid, _instance=None):
return self.instances[_instance].get_channel_uri(_sid)
##############################
- ### EXTERNAL EPG METHODS
+ # ## EXTERNAL EPG METHODS
##############################
def get_channel_day_ext(self, _zone, _uid, _day, _instance='default'):
@@ -104,59 +127,80 @@ def get_channel_list_ext(self, _zone_id, _ch_ids=None, _instance='default'):
# END OF INTERFACE METHODS
-
def scheduler_tasks(self):
"""
dummy routine that will be overridden by subclass
"""
pass
- def enable_instance(self, _namespace, _instance):
+ def enable_instance(self, _namespace, _instance, _instance_name='Instance'):
"""
When one plugin is tied to another and requires it to be enabled,
this method will enable the other instance and set this plugin to disabled until
everything is up
+ Also used to create a new instance if missing. When _instance is None,
+ will look for any instance, if not will create a default one.
"""
name_config = _namespace.lower()
- instance_config = name_config + '_' + _instance
+ # if _instance is None and config has no instance for namespace, add one
+ if _instance is None:
+ x = [ k for k in self.config_obj.data.keys() if k.startswith(name_config+'_')]
+ if len(x):
+ return
+ else:
+ _instance = 'Default'
+ instance_config = name_config + '_' + _instance.lower()
+
if self.config_obj.data.get(name_config):
if self.config_obj.data.get(instance_config):
if not self.config_obj.data[instance_config]['enabled']:
- self.logger.notice('1. Enabling {}:{} plugin instance. Required by {}. Restart Required'
- .format(_namespace, _instance, self.namespace))
+ self.logger.warning('1. Enabling {}:{} plugin instance. Required by {}. Restart Required'
+ .format(_namespace, _instance, self.namespace))
self.config_obj.write(
- instance_config, 'enabled', True)
+ instance_config, 'enabled', True)
raise exceptions.CabernetException('{} plugin requested by {}. Restart Required'
- .format(_namespace, self.namespace))
+ .format(_namespace, self.namespace))
else:
- self.logger.notice('2. Enabling {}:{} plugin instance. Required by {}. Restart Required'
- .format(_namespace, _instance, self.namespace))
+ if _namespace != self.namespace:
+ self.logger.warning('2. Enabling {}:{} plugin instance. Required by {}. Restart Required'
+ .format(_namespace, _instance, self.namespace))
+ else:
+ self.logger.warning('3. Enabling {}:{} plugin instance. Restart Required'
+ .format(_namespace, _instance, self.namespace))
+
self.config_obj.write(
- instance_config, 'Label', _namespace + ' Instance')
+ instance_config, 'Label', _namespace + ' ' + _instance_name)
self.config_obj.write(
- instance_config, 'enabled', True)
+ instance_config, 'enabled', True)
raise exceptions.CabernetException('{} plugin requested by {}. Restart Required'
- .format(_namespace, self.namespace))
+ .format(_namespace, self.namespace))
else:
self.logger.error('Requested Plugin {} by {} Missing'
- .format(_namespace, self.namespace))
+ .format(_namespace, self.namespace))
raise exceptions.CabernetException('Requested Plugin {} by {} Missing'
- .format(_namespace, self.namespace))
+ .format(_namespace, self.namespace))
+ if _namespace not in self.plugins.keys():
+ self.logger.warning('{}:{} not installed and requested by {} settings. Restart Required'
+ .format(_namespace, _instance, self.namespace))
+ raise exceptions.CabernetException('{}:{} not enabled and requested by {} settings. Restart Required'
+ .format(_namespace, _instance, self.namespace))
+
if not self.plugins[_namespace].enabled:
- self.logger.notice('{}:{} not enabled and requested by {}. Restart Required'
- .format(_namespace, _instance, self.namespace))
- raise exceptions.CabernetException('{}:{} not enabled and requested by {}. Restart Required'
- .format(_namespace, _instance, self.namespace))
+ self.logger.warning('{}:{} not enabled and requested by {} settings. Restart Required'
+ .format(_namespace, _instance, self.namespace))
+ raise exceptions.CabernetException('{}:{} not enabled and requested by {} settings. Restart Required'
+ .format(_namespace, _instance, self.namespace))
def refresh_obj(self, _topic, _task_name):
if not self.enabled:
- self.logger.debug('{} Plugin disabled, not refreshing {}' \
+ self.logger.debug(
+ '{} Plugin disabled, not refreshing {}'
.format(self.plugin.name, _topic))
return
- self.web_admin_url = 'http://localhost:' + \
- str(self.config_obj.data['web']['web_admin_port'])
+ web_admin_url = 'http://localhost:' + \
+ str(self.config_obj.data['web']['web_admin_port'])
task = self.scheduler_db.get_tasks(_topic, _task_name)[0]
- url = ( self.web_admin_url + '/api/scheduler?action=runtask&taskid={}'
+ url = (web_admin_url + '/api/scheduler?action=runtask&taskid={}'
.format(task['taskid']))
req = urllib.request.Request(url)
with urllib.request.urlopen(req) as resp:
@@ -177,7 +221,6 @@ def refresh_channels(self, _instance=None):
Called from the scheduler
"""
return self.refresh_it('Channels', _instance)
-
def refresh_epg(self, _instance=None):
"""
@@ -191,20 +234,22 @@ def refresh_it(self, _what_to_refresh, _instance=None):
"""
try:
if not self.enabled:
- self.logger.debug('{} Plugin disabled, not refreshing {}' \
+ self.logger.debug(
+ '{} Plugin disabled, not refreshing {}'
.format(self.plugin.name, _what_to_refresh))
return False
if _instance is None:
for key, instance in self.instances.items():
if _what_to_refresh == 'EPG':
- return instance.refresh_epg()
+ instance.refresh_epg()
elif _what_to_refresh == 'Channels':
- return instance.refresh_channels()
+ instance.refresh_channels()
else:
if _what_to_refresh == 'EPG':
- return self.instances[_instance].refresh_epg()
+ self.instances[_instance].refresh_epg()
elif _what_to_refresh == 'Channels':
- return self.instances[_instance].refresh_channels()
+ self.instances[_instance].refresh_channels()
+ return True
except exceptions.CabernetException:
self.logger.debug('Setting plugin {} to disabled'.format(self.plugin.name))
self.enabled = False
@@ -216,7 +261,7 @@ def utc_to_local_time(self, _hours):
Used for scheduler on events
"""
tz_delta = datetime.datetime.now() - datetime.datetime.utcnow()
- tz_hours = round(tz_delta.total_seconds()/3610)
+ tz_hours = round(tz_delta.total_seconds() / 3610)
local_hours = tz_hours + _hours
if local_hours < 0:
local_hours += 24
@@ -229,25 +274,24 @@ def compress(self, _data):
_data = _data.encode()
return base64.b64encode(_data).translate(
_data.maketrans(self.def_trans,
- self.config_obj.data['main']['plugin_data'].encode()))
+ self.config_obj.data['main']['plugin_data'].encode()))
def uncompress(self, _data):
if type(_data) is str:
_data = _data.encode()
- a = self.config_obj.data['main']['plugin_data'].encode()
+ self.config_obj.data['main']['plugin_data'].encode()
try:
return base64.b64decode(_data.translate(_data.maketrans(
self.config_obj.data['main']['plugin_data']
.encode(), self.def_trans))) \
.decode()
- except (binascii.Error, UnicodeDecodeError) as ex:
- self.logger.error('Uncompression Error, invalid string {}' \
- .format(_data))
+ except (binascii.Error, UnicodeDecodeError):
+ self.logger.error('Uncompression Error, invalid string {}'.format(_data))
return None
def check_logger_refresh(self):
if not self.logger.isEnabledFor(40):
- self.logger = logging.getLogger(__name__+str(threading.get_ident()))
+ self.logger = logging.getLogger(__name__ + str(threading.get_ident()))
for inst, inst_obj in self.instances.items():
inst_obj.check_logger_refresh()
@@ -255,3 +299,21 @@ def check_logger_refresh(self):
def name(self):
return self.namespace
+ class HttpSession:
+ """
+ This class handles the management of the httpx session since
+ pickling of the httpx Client throws an exception.
+ """
+ def __init__(self):
+ self.http_session = None
+
+ def get(self, uri, headers=None, timeout=8):
+ if self.http_session is None:
+ self.http_session = httpx.Client(http2=True, verify=False, follow_redirects=True)
+ return self.http_session.get(uri, headers=headers, timeout=timeout)
+
+ def post(self, uri, headers=None, data=None, timeout=8):
+ if self.http_session is None:
+ self.http_session = httpx.Client(http2=True, verify=False, follow_redirects=True)
+ return self.http_session.post(uri, headers=headers, data=data, timeout=timeout)
+
diff --git a/lib/plugins/plugin_programs.py b/lib/plugins/plugin_programs.py
old mode 100755
new mode 100644
index b27cb23..4d607e7
--- a/lib/plugins/plugin_programs.py
+++ b/lib/plugins/plugin_programs.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -16,19 +16,15 @@
substantial portions of the Software.
"""
-import datetime
import json
import logging
import threading
-import urllib.request
import lib.common.utils as utils
-from lib.db.db_epg import DBepg
from lib.common.decorators import handle_url_except
from lib.common.decorators import handle_json_except
-
class PluginPrograms:
def __init__(self, _instance_obj):
@@ -45,22 +41,30 @@ def get_program_info(self, _prog_id):
"""
pass
- @handle_url_except(timeout=10.0)
+ def terminate(self):
+ """
+ Removes all has a object from the object and calls any subclasses to also terminate
+ Not calling inherited class at this time
+ """
+ self.logger = None
+ self.instance_obj = None
+ self.config_obj = None
+ self.instance_key = None
+ self.plugin_obj = None
+ self.config_section = None
+
+ @handle_url_except()
@handle_json_except
- def get_uri_data(self, _uri, _header=None):
+ def get_uri_data(self, _uri, _retries, _header=None):
if _header is None:
header = {'User-agent': utils.DEFAULT_USER_AGENT}
else:
header = _header
- req = urllib.request.Request(_uri, headers=header)
- with urllib.request.urlopen(req, timeout=10.0) as resp:
- x = json.load(resp)
+ resp = self.plugin_obj.http_session.get(_uri, headers=header, timeout=8)
+ x = resp.json()
+ resp.raise_for_status()
return x
-
-
-
-
def check_logger_refresh(self):
if not self.logger.isEnabledFor(40):
- self.logger = logging.getLogger(__name__+str(threading.get_ident()))
+ self.logger = logging.getLogger(__name__ + str(threading.get_ident()))
diff --git a/lib/plugins/repo_handler.py b/lib/plugins/repo_handler.py
new file mode 100644
index 0000000..9539838
--- /dev/null
+++ b/lib/plugins/repo_handler.py
@@ -0,0 +1,175 @@
+"""
+MIT License
+
+Copyright (C) 2023 ROCKY4546
+https://github.com/rocky4546
+
+This file is part of Cabernet
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute,
+sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+"""
+
+import httpx
+import logging
+import json
+import importlib
+import importlib.resources
+import os
+import pathlib
+import urllib
+
+import lib.common.exceptions as exceptions
+import lib.common.utils as utils
+from lib.db.db_plugins import DBPlugins
+from lib.common.decorators import handle_url_except
+from lib.common.decorators import handle_json_except
+
+
+class RepoHandler:
+
+ http_session = httpx.Client(http2=True, verify=False, follow_redirects=True)
+ logger = None
+
+ def __init__(self, _config_obj):
+ self.config_obj = _config_obj
+ if RepoHandler.logger is None:
+ RepoHandler.logger = logging.getLogger(__name__)
+ self.plugin_db = DBPlugins(_config_obj.data)
+
+
+
+ def load_cabernet_repo(self):
+ """
+ Loads the manifest which points to the plugin.json list of plugins
+ Will update the database on the manifest and plugin list
+ If there is a plugin that is no longer in the list, will tag for
+ deletion. (don't know at this point if it is installed.)
+ """
+ repo_settings = self.import_cabernet_manifest()
+ self.save_repo(repo_settings)
+ self.update_plugins(repo_settings)
+
+ def import_cabernet_manifest(self):
+ """
+ Loads the manifest for cabernet repo
+ """
+ json_settings = importlib.resources.read_text(self.config_obj.data['paths']['resources_pkg'], utils.CABERNET_REPO)
+ settings = json.loads(json_settings)
+ if settings:
+ settings = settings['plugin']
+ settings['repo_url'] = utils.CABERNET_REPO
+ self.plugin_db.get_repos(utils.CABERNET_ID)
+ return settings
+
+ def save_repo(self, _repo):
+ """
+ Saves to DB the repo json settings
+ """
+ self.plugin_db.save_repo(_repo)
+
+
+ def cache_thumbnails(self, _plugin_defn):
+ """
+ Determine if the cache area has the thumbnail, if not
+ will download and store the thumbnail
+ """
+ # path = thumbnail cache path + plugin_id + icon or fanart path
+ thumbnail_path = self.config_obj.data['paths']['thumbnails_dir']
+ plugin_id = _plugin_defn['id']
+ icon_path = _plugin_defn['icon']
+ fanart_path = _plugin_defn['fanart']
+
+ repoid = _plugin_defn['repoid']
+ repo_defn = self.plugin_db.get_repos(repoid)
+ if not repo_defn:
+ self.logger.notice('Repo not defined for plugin {}, unable to cache thumbnails'
+ .format(plugin_id))
+ return
+ datadir = repo_defn[0]['dir']['datadir']['url']
+ self.cache_thumbnail(datadir, plugin_id, icon_path, thumbnail_path)
+ self.cache_thumbnail(datadir, plugin_id, fanart_path, thumbnail_path)
+
+ def cache_thumbnail(self, _datadir, _plugin_id, _image_relpath, _thumbnail_path):
+ """
+ _datadir: datadir url from the repo definition
+ _plugin_id: plugin id which is also the folder name
+ _image_repath: relative path found in the plugin definition
+ _thumbnail_path: config setting to the thumbnail path area
+ """
+ full_repo = '/'.join([
+ _datadir, _plugin_id, _image_relpath])
+ full_cache = pathlib.Path(
+ _thumbnail_path, _plugin_id, _image_relpath)
+ if not full_cache.exists():
+ image = self.get_uri_data(full_repo, 2)
+ self.save_file(image, full_cache)
+
+ def update_plugins(self, _repo_settings):
+ """
+ Gets the list of plugins for this repo from [dir][info] and updates the db
+ """
+ uri = _repo_settings['dir']['info']
+ plugin_json = self.get_uri_json_data(uri)
+ if plugin_json:
+ plugin_json = plugin_json['plugins']
+ for plugin in plugin_json:
+ plugin = plugin['plugin']
+ if 'repository' in plugin['category']:
+ continue
+ # pull the db item. merge them and then update the db with new data.
+ plugin_data = self.plugin_db.get_plugins(_installed=None, _repo_id=_repo_settings['id'], _plugin_id=plugin['id'])
+ if plugin_data:
+ plugin_data = plugin_data[0]
+ plugin['repoid'] = _repo_settings['id']
+ plugin['version']['installed'] = plugin_data['version']['installed']
+ plugin['version']['latest'] = plugin['version']['current']
+ plugin['version']['current'] = plugin_data['version']['current']
+ plugin['changelog'] = plugin.get('changelog')
+ if plugin_data.get('external'):
+ plugin['external'] = plugin_data['external']
+ else:
+ plugin['external'] = True
+ else:
+ plugin['repoid'] = _repo_settings['id']
+ plugin['version']['installed'] = False
+ plugin['version']['latest'] = plugin['version']['current']
+ plugin['version']['current'] = None
+ plugin['external'] = True
+ self.cache_thumbnails(plugin)
+ self.plugin_db.save_plugin(plugin)
+
+ @handle_url_except()
+ def get_uri_data(self, _uri, _retries):
+ header = {
+ 'User-agent': utils.DEFAULT_USER_AGENT}
+ resp = RepoHandler.http_session.get(_uri, headers=header, timeout=8)
+ x = resp.content
+ resp.raise_for_status()
+ return x
+
+ @handle_url_except()
+ @handle_json_except
+ def get_uri_json_data(self, _uri):
+ header = {
+ 'Content-Type': 'application/json',
+ 'User-agent': utils.DEFAULT_USER_AGENT}
+ req = urllib.request.Request(_uri, headers=header)
+ with urllib.request.urlopen(req, timeout=10.0) as resp:
+ return json.load(resp)
+
+
+ def save_file(self, _data, _file):
+ try:
+ os.makedirs(os.path.dirname(_file), exist_ok=True)
+
+ open(os.path.join(_file), 'wb').write(_data)
+ except Exception as e:
+ self.logger.warning("An error occurred saving %s file\n%s" % (_file, e))
+ raise
diff --git a/lib/resources/config_defn/1-base.json b/lib/resources/config_defn/1-base.json
index e73c17c..ba042e7 100644
--- a/lib/resources/config_defn/1-base.json
+++ b/lib/resources/config_defn/1-base.json
@@ -19,7 +19,7 @@
"2-Expert", "3-Advanced"],
"level": 0,
"onDefnLoad": "lib.config.config_callbacks.set_theme_folders",
- "help": "Displays settings based on complexity"
+ "help": "Default: 1-Standard. Displays settings based on complexity"
},
"theme":{
"label": "Theme",
@@ -30,14 +30,14 @@
"light", "light-blue", "light-pink",
"light-purple", "light-red", "spring", "wmc"],
"level": 0,
- "help": "Changes the way the page appears"
+ "help": "Default: spring. Changes the way the page appears"
},
"backgrounds":{
"label": "Background Folder",
"type": "path",
"default": null,
"level": 0,
- "help": "Uses the images in the folder for the background"
+ "help": "Default: None. Uses the images in the folder for the background"
}
}
},
@@ -62,21 +62,21 @@
"default": "stable",
"values": ["stable", "unstable"],
"level": 1,
- "help": "Version upgrade quality"
+ "help": "Default: stable. Version upgrade quality"
},
"maintenance_mode":{
"label": "Maintenance Mode",
"type": "boolean",
"default": false,
"level": 1,
- "help": "Used during upgrades. When enabled, causes the patch upgrades to be re-applied on a scheduler restart"
+ "help": "Default: false. Used during upgrades. When enabled, causes the patch upgrades to be re-applied on a scheduler restart"
},
"memory_usage":{
"label": "Memory Usage",
"type": "boolean",
"default": false,
"level": 2,
- "help": "Turn on and set logging to DEBUG. This will generate a memory profile after each web request or scheduler trigger."
+ "help": "Default: false. Turn on and set logging to DEBUG. This will generate a memory profile after each web request or scheduler trigger."
},
"ostype":{
"label": "OS Type",
@@ -136,7 +136,7 @@
"default": false,
"level": 1,
"onChange": "lib.config.config_callbacks.check_encryption",
- "help": "Will encrypt the password using a self-generated key. Use with only one user running service."
+ "help": "Default: False. Will encrypt the password using a self-generated key. Use with only one user running service."
},
"plugin_data":{
"label": "plugin data",
@@ -161,7 +161,7 @@
"values": ["None", "Tiny(16)", "Small(48)", "Medium(128)",
"Large(180)", "X-Large(270)", "Full-Size"],
"level": 1,
- "help": "The default size of the thumbnails displayed in the Channel Editor"
+ "help": "Default: Medium(128). The default size used throughout Cabernet (Channel Editor, Plugins, Config Plugin Icons)"
}
}
}
diff --git a/lib/resources/config_defn/2-paths.json b/lib/resources/config_defn/2-paths.json
index 8bcfb87..dd919d0 100644
--- a/lib/resources/config_defn/2-paths.json
+++ b/lib/resources/config_defn/2-paths.json
@@ -50,6 +50,15 @@
"onInit": "lib.config.config_callbacks.set_logs_path",
"help": "Location of log files when set to be used"
},
+ "thumbnails_dir":{
+ "label": "Thumbnails Cache Path",
+ "type": "path",
+ "default": null,
+ "level": 1,
+ "writable": false,
+ "onInit": "lib.config.config_callbacks.set_thumbnails_path",
+ "help": "Location of where cached thumbnails are stored"
+ },
"tmp_dir":{
"label": "TEMP Path",
"type": "path",
@@ -113,7 +122,7 @@
"default": null,
"level": 2,
"onInit": "lib.config.config_callbacks.set_ffmpeg_path",
- "help": "Only used with stream_type=ffmpegproxy"
+ "help": "Used with stream_type=ffmpegproxy or when PTS Filtering or PTS/DTS Resync are enabled"
},
"ffprobe_path":{
"label": "ffprobe_path",
@@ -121,7 +130,15 @@
"default": null,
"level": 2,
"onInit": "lib.config.config_callbacks.set_ffprobe_path",
- "help": "Only used with stream_type=ffmpegproxy"
+ "help": "Used when PTS Filter is enabled"
+ },
+ "streamlink_path":{
+ "label": "streamlink_path",
+ "type": "path",
+ "default": null,
+ "level": 2,
+ "onInit": "lib.config.config_callbacks.set_streamlink_path",
+ "help": "Used with stream_type=streamlinkproxy"
}
}
diff --git a/lib/resources/config_defn/3-logs.json b/lib/resources/config_defn/3-logs.json
index 7918c4e..d15619f 100644
--- a/lib/resources/config_defn/3-logs.json
+++ b/lib/resources/config_defn/3-logs.json
@@ -17,16 +17,16 @@
"default": true,
"level": 1,
"onChange": "lib.config.config_callbacks.logging_enable",
- "help": "Used to enable logging to the system logger"
+ "help": "Default: Enabled. Used to enable logging to the system logger"
},
"level":{
"label": "Level",
"type": "list",
- "values": ["DEBUG", "INFO", "NOTICE", "WARNING", "ERROR", "CRITICAL"],
+ "values": ["TRACE", "DEBUG", "INFO", "NOTICE", "WARNING", "ERROR", "CRITICAL"],
"default": "WARNING",
"level": 1,
"onChange": "lib.config.config_callbacks.logging_refresh",
- "help": "Log level for system logs. Default is WARNING"
+ "help": "Default: WARNING. Log level for system logs. Default is WARNING"
},
"class":{
"label": "class",
@@ -59,7 +59,7 @@
"label": "File Log Handler",
"sort": "2",
"icon": "comment",
- "description": "Python debug log settings",
+ "description": "Python local log file at data/logs/ log settings",
"settings":{
"enabled":{
"label": "Enabled",
@@ -72,7 +72,7 @@
"level":{
"label": "Level",
"type": "list",
- "values": ["DEBUG", "INFO", "NOTICE", "WARNING", "ERROR", "CRITICAL"],
+ "values": ["TRACE", "DEBUG", "INFO", "NOTICE", "WARNING", "ERROR", "CRITICAL"],
"default": "INFO",
"level": 1,
"onChange": "lib.config.config_callbacks.logging_refresh",
@@ -163,12 +163,12 @@
"level":{
"label": "level",
"type": "list",
- "values": ["DEBUG", "INFO", "NOTICE", "WARNING", "ERROR", "CRITICAL"],
- "default": "DEBUG",
+ "values": ["TRACE", "DEBUG", "INFO", "NOTICE", "WARNING", "ERROR", "CRITICAL"],
+ "default": "TRACE",
"level": 4,
"writable": false,
"onChange": "lib.config.config_callbacks.logging_refresh",
- "help": "Default is WARNING"
+ "help": "Default is TRACE"
},
"handlers":{
"label": "handlers",
diff --git a/lib/resources/config_defn/clients.json b/lib/resources/config_defn/clients.json
index e2cb783..1a140bc 100644
--- a/lib/resources/config_defn/clients.json
+++ b/lib/resources/config_defn/clients.json
@@ -17,7 +17,7 @@
"default": "0.0.0.0",
"level": 1,
"onInit": "lib.config.config_callbacks.set_ip",
- "help": "Use instead of plex_accessible_ip. 0.0.0.0 means bind to all IPs and use the main IP address for json data"
+ "help": "Default: 0.0.0.0. Use instead of plex_accessible_ip. 0.0.0.0 means bind to all IPs and use the main IP address for json data"
},
"plex_accessible_ip":{
"label": "plex_accessible_ip",
@@ -39,28 +39,28 @@
"type": "integer",
"default": 5004,
"level": 1,
- "help": "Port used to stream. Default is 5004"
+ "help": "Default: 5004. Port used to stream. Default is 5004"
},
"web_admin_port":{
"label": "web_admin_port",
"type": "integer",
"default": 6077,
"level": 1,
- "help": "Port for main web-site. TVHeadend can use any port; however, others such as Plex and Emby need it on port 80 for full HDHR compatilibity"
+ "help": "Default: 6077. Port for main web-site. TVHeadend can use any port; however, others such as Plex and Emby need it on port 80 for full HDHR compatilibity"
},
"disable_web_config":{
"label": "disable_web_config",
"type": "boolean",
"default": false,
"level": 3,
- "help": "Security setting to disable the ability to edit the configuration remotely"
+ "help": "Default: False. Security setting to disable the ability to edit the configuration remotely"
},
"concurrent_listeners":{
"label": "concurrent_listeners",
"type": "integer",
"default": 8,
- "level": 2,
- "help": "GUI Webadmin site only. Number of simultaneous HTTP requests at one time. If requests are exceeded, the request will hang until a listener becomes available."
+ "level": 3,
+ "help": "Default: 8. GUI Webadmin site only. Number of simultaneous HTTP requests at one time. If requests are exceeded, the request will hang until a listener becomes available."
}
}
},
@@ -75,7 +75,21 @@
"type": "boolean",
"default": true,
"level": 2,
- "help": "Identifies the service name as the channel name to client. Recommend disabling unless running a Scan in the client to reduce overhead."
+ "help": "Default: True. Identifies the service name as the channel name to client. Recommend disabling unless running a Scan in the client to reduce overhead."
+ },
+ "vod_retries":{
+ "label": "VOD Retries",
+ "type": "integer",
+ "default": 2,
+ "level": 3,
+ "help": "Default: 2. Recommend leaving this as is. On VOD (with all streaming packets provided at one time), may improve timeout errors by changing the number of retries. Minimum Total Timeout = num * 12sec/timeout * 2 retries. Only applies to internalproxy."
+ },
+ "switch_channel_timeout":{
+ "label": "Switch Channel Timeout",
+ "type": "integer",
+ "default": 2,
+ "level": 3,
+ "help": "Default: 2 seconds. Clients tend to timeout streams and request a reset. This value is the time in seconds it takes to request the stop followed by re-subscribing the channel. If it is too short, Cabernet will drop the current tuner and use a new one instead of reusing the current tuner."
}
}
},
@@ -99,35 +113,49 @@
"default": "tvheadend",
"values": ["normal", "tvheadend"],
"level": 1,
- "help": "TVHeadend uses specific genre to get colors on tv guide"
+ "help": "Default: tvheadend. TVHeadend uses specific genre to get colors on tv guide"
},
"epg_channel_number":{
"label": "Channel # in Name",
"type": "boolean",
"default": false,
"level": 2,
- "help": "When true will include the channel number in the channel name for the channel list and EPG"
+ "help": "Default: False. When true will include the channel number in the channel name for the channel list and EPG"
+ },
+ "epg_use_channel_number":{
+ "label": "Use Channel # for Channel ID",
+ "type": "boolean",
+ "default": false,
+ "level": 2,
+ "help": "Default: False. For clients like Plex and JellyFin, they use the channel id field in the xmltv.xml as the channel number"
+ },
+ "epg_add_plugin_to_channel_id":{
+ "label": "Add Plugin name to Channel ID",
+ "type": "boolean",
+ "default": false,
+ "level": 2,
+ "help": "Default: False. For cases where the different provider have the same UID for channels"
},
"epg_channel_icon":{
"label": "EPG Channel Icon",
"type": "boolean",
"default": true,
"level": 2,
- "help": "When true will include the icon for each channel inside the xmltv.xml file"
+ "help": "Default: True. When true will include the icon for each channel inside the xmltv.xml file"
},
"epg_program_icon":{
"label": "EPG Program Icon",
"type": "boolean",
"default": true,
"level": 2,
- "help": "When true will include the icon for each program inside the xmltv.xml file"
+ "help": "Default: True. When true will include the icon for each program inside the xmltv.xml file"
},
"epg_prettyprint":{
"label": "EPG Pretty Print",
"type": "boolean",
- "default": true,
+ "default": false,
"level": 1,
- "help": "If you are having memory issues, try turning this to false"
+ "help": "Default: False. If you are having memory issues, try turning this to false"
}
}
},
@@ -142,7 +170,7 @@
"type": "boolean",
"default": true,
"level": 2,
- "help": "Enables SSDP protocol on port 1900. Recommend keeping this disabled and use manual setup"
+ "help": "Default: True. Enables SSDP protocol on port 1900. Recommend keeping this disabled and use manual setup"
},
"udp_netmask":{
"label": "udp_netmask",
@@ -166,7 +194,7 @@
"type": "boolean",
"default": true,
"level": 2,
- "help": "Enables HDHR protocol on port 65001. Recommend keeping this disabled and use manual setup"
+ "help": "Default: True. Enables HDHR UDP discovery protocol on port 65001. Recommend keeping this disabled and use manual setup"
},
"hdhr_id":{
"label": "hdhr_id",
diff --git a/lib/resources/manifest.json b/lib/resources/manifest.json
index 00a702c..e4e7ad5 100644
--- a/lib/resources/manifest.json
+++ b/lib/resources/manifest.json
@@ -1,13 +1,44 @@
{
- "id": "cabernet",
- "name": "Cabernet",
- "github_repo_stable": "https://api.github.com/repos/cabernetwork/cabernet",
- "github_repo_unstable": "https://api.github.com/repos/cabernetwork/cabernet",
- "provider-name": "rocky4546",
- "summary": "Cabernet allows control of IPTV streams",
- "description": "Connect streams to your favorite media server. Cabernet is a modular-based appliance/platform that cleans, organizes and repackages IPTV streams to be compatible and digested by media clients.",
- "license": "MIT License, Copyright (C) 2021 ROCKY4546",
- "source": "https://github.com/cabernetwork/cabernet",
- "forum": "https://tvheadend.org/boards/5/topics/43052",
- "website": "https://cabernetwork.github.io/"
+ "plugin": {
+ "id": "cabernet",
+ "name": "Cabernet",
+ "version": {
+ "current": "0.1.0"
+ },
+ "requires": [{
+ "python": {
+ "name": "python",
+ "version": "3.7.0"
+ },
+ "python-lib": {
+ "name": "cryptography",
+ "version": "2.8"
+ },
+ "python-lib": {
+ "name": "streamlink",
+ "version": "5.3.1"
+ },
+ "python-lib": {
+ "name": "requests",
+ "version": "2.26.0"
+ }
+ }],
+ "category": ["repository"],
+ "provider-name": "rocky4546",
+ "summary": "Cabernet allows control of IPTV streams",
+ "description": "Connect streams to your favorite media server. Cabernet is a modular-based appliance/platform that cleans, organizes and repackages IPTV streams to be compatible and digested by media clients.",
+ "license": "MIT License, Copyright (C) 2021 ROCKY4546",
+ "source": "https://github.com/cabernetwork/cabernet",
+ "forum": "https://tvheadend.org/boards/5/topics/43052",
+ "website": "https://cabernetwork.github.io/",
+ "dir": {
+ "github_repo_stable": "https://api.github.com/repos/cabernetwork/cabernet",
+ "github_repo_unstable": "https://api.github.com/repos/cabernetwork/cabernet",
+ "info": "https://raw.githubusercontent.com/cabernetwork/Cabernet-Repository/main/plugin.json",
+ "checksum": "https://raw.githubusercontent.com/cabernetwork/Cabernet-Repository/main/plugin.json.sha2",
+ "datadir": {
+ "url": "https://raw.githubusercontent.com/cabernetwork/Cabernet-Repository/main/repo"
+ }
+ }
+ }
}
diff --git a/lib/resources/plugins/__init__.py b/lib/resources/plugins/__init__.py
old mode 100755
new mode 100644
diff --git a/lib/resources/plugins/config_defn.json b/lib/resources/plugins/config_defn.json
old mode 100755
new mode 100644
diff --git a/lib/resources/plugins/instance_defn.json b/lib/resources/plugins/instance_defn.json
old mode 100755
new mode 100644
diff --git a/lib/resources/plugins/instance_defn_channel.json b/lib/resources/plugins/instance_defn_channel.json
old mode 100755
new mode 100644
diff --git a/lib/resources/plugins/instance_defn_epg.json b/lib/resources/plugins/instance_defn_epg.json
old mode 100755
new mode 100644
diff --git a/lib/resources/plugins/plugin_repo.json b/lib/resources/plugins/plugin_repo.json
old mode 100755
new mode 100644
diff --git a/lib/schedule/schedule_html.py b/lib/schedule/schedule_html.py
index 3ae1640..1e4ae80 100644
--- a/lib/schedule/schedule_html.py
+++ b/lib/schedule/schedule_html.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -17,7 +17,6 @@
"""
import datetime
-import json
import logging
import time
@@ -36,7 +35,7 @@ def get_schedule_html(_webserver):
elif 'deltask' in _webserver.query_data:
schedule_html.del_task(_webserver.query_data['task'])
time.sleep(0.05)
- html = schedule_html.get(_webserver.query_data)
+ html = schedule_html.get(_webserver.query_data)
elif 'delete' in _webserver.query_data:
schedule_html.del_trigger(_webserver.query_data['trigger'])
time.sleep(0.05)
@@ -86,10 +85,9 @@ def header(self):
@property
def body(self):
- return ''.join(['', self.title, self.schedule_tasks, self.task,
- ''
- ])
-
+ return ''.join(['', self.title, self.schedule_tasks,
+ self.task, ''])
+
@property
def title(self):
return ''.join([
@@ -108,22 +106,24 @@ def schedule_tasks(self):
])
i = 0
for task_dict in tasks:
- i +=1
+ i += 1
if task_dict['area'] != current_area:
if i > 1:
html = ''.join([html,
- ''
- ])
+ ''
+ ])
current_area = task_dict['area']
if current_area in self.query_data:
checked = "checked"
else:
checked = ""
- html = ''.join([html,
+ html = ''.join([
+ html,
'',
'',
- '
',
- '
',
+ ' ',
+ '',
current_area, ' ',
'',
@@ -160,53 +160,53 @@ def schedule_tasks(self):
else:
dur_delta = str(task_dict['duration']) + ' seconds'
html = ''.join([html,
- '
',
- '
',
- '
',
- '
',
- '', task_dict['title'], '
',
- 'Plugin: ', task_dict['namespace']
- ])
+ '
',
+ '
',
+ '
'
+ ])
play_name = ''
play_icon = ''
delete_icon = ''
delete_name = ''
else:
html = ''.join([html,
- ' -- Last ran ', lastran_delta, ' ago, taking ',
- dur_delta, '
'
- ])
+ ' -- Last ran ', lastran_delta, ' ago, taking ',
+ dur_delta, '
'
+ ])
play_name = '&run=1'
play_icon = 'play_arrow'
delete_icon = 'delete_forever'
delete_name = '&deltask=1'
html = ''.join([html,
- ' ',
- '
',
- '', play_icon, ' ',
- '
',
- '
',
- '', delete_icon, ' ',
- '
'
- ])
+ '
',
+ '
',
+ '', play_icon, ' ',
+ '
',
+ '
',
+ '', delete_icon, ' ',
+ '
'
+ ])
html = ''.join([html,
- ' '
- ])
+ ''
+ ])
return html
-
- @property
+
+ @property
def task(self):
return ''.join([
'
'
@@ -230,12 +230,12 @@ def get_task(self, _id):
'',
' ',
'', str(task_dict['description']), ' '
- ' ',
+ '',
' ',
- 'Namespace: ', str(task_dict['namespace']),
+ 'Namespace: ', str(task_dict['namespace']),
' Instance: ', str(task_dict['instance']),
- ' Priority: ', str(task_dict['priority']),
- ' Thread Type: ', str(task_dict['threadtype']),
+ ' Priority: ', str(task_dict['priority']),
+ ' Thread Type: ', str(task_dict['threadtype']),
' ',
'',
' ',
@@ -245,7 +245,7 @@ def get_task(self, _id):
'add ',
'',
'',
- ])
+ ])
trigger_array = self.scheduler_db.get_triggers(_id)
for trigger_dict in trigger_array:
@@ -257,7 +257,7 @@ def get_task(self, _id):
trigger_str = ''.join([
'Every ', trigger_dict['dayofweek'],
' at ', trigger_dict['timeofday']
- ])
+ ])
elif trigger_dict['timetype'] == 'interval':
interval_mins = trigger_dict['interval']
remainder_hrs = interval_mins % 60
@@ -269,10 +269,10 @@ def get_task(self, _id):
trigger_str = 'Every ' + interval_str
if trigger_dict['randdur'] != -1:
trigger_str += ' with random maximum added time of ' + str(trigger_dict['randdur']) + ' minutes'
-
+
else:
trigger_str = 'UNKNOWN'
-
+
html = ''.join([
html,
'',
@@ -282,16 +282,16 @@ def get_task(self, _id):
trigger_str,
'',
'',
- '',
'delete_forever ',
' '
- ])
+ ])
return ''.join([
html,
''
- ])
+ ])
def get_trigger(self, _id):
task_dict = self.scheduler_db.get_task(_id)
@@ -306,7 +306,7 @@ def get_trigger(self, _id):
instance = ""
else:
instance = task_dict['instance']
-
+
return "".join([
'',
'',
''
@@ -467,7 +468,7 @@ def post_add_trigger(self, query_data):
'area': query_data['area'][0],
'title': query_data['title'][0],
'timetype': query_data['timetype'][0],
- 'timeofday': query_data['timeofdayhr'][0]+':'+query_data['timeofdaymin'][0]
+ 'timeofday': query_data['timeofdayhr'][0] + ':' + query_data['timeofdaymin'][0]
}})
time.sleep(0.05)
return 'Daily Trigger added'
@@ -481,7 +482,7 @@ def post_add_trigger(self, query_data):
'area': query_data['area'][0],
'title': query_data['title'][0],
'timetype': query_data['timetype'][0],
- 'timeofday': query_data['timeofdayhr'][0]+':'+query_data['timeofdaymin'][0],
+ 'timeofday': query_data['timeofdayhr'][0] + ':' + query_data['timeofdaymin'][0],
'dayofweek': query_data['dayofweek'][0]
}})
time.sleep(0.05)
@@ -500,20 +501,18 @@ def post_add_trigger(self, query_data):
time.sleep(0.05)
return 'Interval Trigger added'
return 'UNKNOWN'
-
+
def del_trigger(self, _uuid):
if self.scheduler_db.get_trigger(_uuid) is None:
return None
- self.queue.put({'cmd': 'del', 'uuid': _uuid })
+ self.queue.put({'cmd': 'del', 'uuid': _uuid})
time.sleep(0.05)
return 'Interval Trigger deleted'
def run_task(self, _taskid):
- self.queue.put({'cmd': 'runtask', 'taskid': _taskid })
+ self.queue.put({'cmd': 'runtask', 'taskid': _taskid})
return None
-
def del_task(self, _taskid):
- self.queue.put({'cmd': 'deltask', 'taskid': _taskid })
+ self.queue.put({'cmd': 'deltask', 'taskid': _taskid})
return None
-
diff --git a/lib/schedule/scheduler.py b/lib/schedule/scheduler.py
index 6b8d1fd..b119efc 100644
--- a/lib/schedule/scheduler.py
+++ b/lib/schedule/scheduler.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -18,12 +18,10 @@
import importlib
import logging
-import urllib.request
import time
from multiprocessing import Process
from threading import Thread
-import lib.main as main
import lib.schedule.schedule
import lib.common.exceptions as exceptions
from lib.common.decorators import getrequest
@@ -34,18 +32,20 @@
@getrequest.route('/api/scheduler')
def get_scheduler(_webserver):
try:
- if _webserver.query_data['action'] == 'runtask':
- _webserver.sched_queue.put({'cmd': 'runtask', 'taskid': _webserver.query_data['taskid'] })
+ if _webserver.query_data.get('action') == 'runtask':
+ _webserver.sched_queue.put({'cmd': 'runtask', 'taskid': _webserver.query_data.get('taskid')})
time.sleep(0.1)
- _webserver.do_mime_response(200, 'text/html', 'action is ' + _webserver.query_data['action'])
+ _webserver.do_mime_response(200, 'text/html', 'action executed: ' + _webserver.query_data['action'])
return
else:
- _webserver.do_mime_response(501, 'text/html',
+ _webserver.do_mime_response(
+ 501, 'text/html',
web_templates['htmlError'].format('501 - Unknown action'))
except KeyError:
- _webserver.do_mime_response(501, 'text/html',
+ _webserver.do_mime_response(
+ 501, 'text/html',
web_templates['htmlError'].format('501 - Badly formed request'))
-
+
class Scheduler(Thread):
"""
@@ -59,7 +59,6 @@ class Scheduler(Thread):
"""
scheduler_obj = None
-
def __init__(self, _plugins, _queue):
Thread.__init__(self)
self.logger = logging.getLogger(__name__)
@@ -77,6 +76,7 @@ def _queue_thread():
while not self.stop_thread:
queue_item = self.queue.get(True)
self.process_queue(queue_item)
+
_q_thread = Thread(target=_queue_thread, args=())
_q_thread.start()
self.start()
@@ -132,7 +132,6 @@ def call_trigger(self, _trigger):
"""
Calls the trigger function and times the result
"""
- results = None
start = time.time()
try:
if _trigger['namespace'] == 'internal':
@@ -142,22 +141,30 @@ def call_trigger(self, _trigger):
results = call_f(self.plugins)
else:
if _trigger['namespace'] not in self.plugins.plugins:
- self.logger.debug('{} scheduled tasks ignored. plugin missing' \
+ self.logger.debug(
+ '{} scheduled tasks ignored. plugin missing'
.format(_trigger['namespace']))
results = False
else:
plugin_obj = self.plugins.plugins[_trigger['namespace']].plugin_obj
if plugin_obj is None:
- self.logger.debug('{} scheduled tasks ignored. plugin disabled' \
+ self.logger.debug(
+ '{} scheduled tasks ignored. plugin disabled'
.format(_trigger['namespace']))
results = False
elif _trigger['instance'] is None:
call_f = getattr(plugin_obj, _trigger['funccall'])
results = call_f()
- else:
- call_f = getattr(plugin_obj.instances[_trigger['instance']],
- _trigger['funccall'])
+ elif plugin_obj.instances.get(_trigger['instance']):
+ call_f = getattr(plugin_obj.instances[_trigger['instance']],
+ _trigger['funccall'])
results = call_f()
+ else:
+ self.logger.debug(
+ '{}:{} scheduled tasks ignored. instance missing'
+ .format(_trigger['namespace'], _trigger['instance']))
+ results = False
+
except exceptions.CabernetException as ex:
self.logger.warning('{}'.format(str(ex)))
results = False
@@ -166,7 +173,7 @@ def call_trigger(self, _trigger):
'UNEXPECTED EXCEPTION on GET=', ex))
results = False
if results is None:
- results == True
+ results = True
end = time.time()
duration = int(end - start)
if results:
@@ -196,7 +203,7 @@ def add_job(self, _trigger):
"""
Adds a job to the schedule object using the trigger dict from the database
"""
- if _trigger['timetype'] == 'daily':
+ if _trigger['timetype'] == 'daily':
self.schedule.every().day.at(_trigger['timeofday']).do(
self.exec_trigger, _trigger) \
.tag(_trigger['uuid'])
@@ -206,7 +213,7 @@ def add_job(self, _trigger):
self.exec_trigger, _trigger) \
.tag(_trigger['uuid'])
elif _trigger['timetype'] == 'interval':
- if _trigger['randdur'] < 0:
+ if _trigger['randdur'] < 0:
self.schedule.every(_trigger['interval']).minutes.do(
self.exec_trigger, _trigger) \
.tag(_trigger['uuid'])
@@ -239,6 +246,8 @@ def process_queue(self, _queue_item):
self.run_task(_queue_item['taskid'])
elif _queue_item['cmd'] == 'deltask':
self.delete_task(_queue_item['taskid'])
+ elif _queue_item['cmd'] == 'delinstance':
+ self.delete_instance(_queue_item['name'], _queue_item['instance'])
elif _queue_item['cmd'] == 'del':
self.delete_trigger(_queue_item['uuid'])
elif _queue_item['cmd'] == 'add':
@@ -249,9 +258,9 @@ def process_queue(self, _queue_item):
self.logger.warning('UNKNOWN Scheduler cmd from queue: {}'.format(_queue_item))
except KeyError as e:
self.logger.warning('Badly formed scheduled request {} {}'.format(_queue_item, repr(e)))
-
+
def delete_trigger(self, _uuid):
- self.logger.notice('Deleting trigger {}'.format(_uuid))
+ self.logger.debug('Deleting trigger {}'.format(_uuid))
jobs = self.schedule.get_jobs(_uuid)
for job in jobs:
self.schedule.cancel_job(job)
@@ -268,42 +277,55 @@ def run_trigger(self, _uuid):
def add_trigger(self, trigger):
if trigger['timetype'] == 'startup':
self.create_trigger(trigger['area'], trigger['title'],
- trigger['timetype'])
+ trigger['timetype'])
elif trigger['timetype'] == 'daily':
self.create_trigger(trigger['area'], trigger['title'],
- trigger['timetype'],
- timeofday=trigger['timeofday']
- )
+ trigger['timetype'],
+ timeofday=trigger['timeofday']
+ )
elif trigger['timetype'] == 'daily':
self.create_trigger(trigger['area'], trigger['title'],
- trigger['timetype'],
- timeofday=trigger['timeofday']
- )
+ trigger['timetype'],
+ timeofday=trigger['timeofday']
+ )
elif trigger['timetype'] == 'weekly':
self.create_trigger(trigger['area'], trigger['title'],
- trigger['timetype'],
- timeofday=trigger['timeofday'],
- dayofweek=trigger['dayofweek']
- )
+ trigger['timetype'],
+ timeofday=trigger['timeofday'],
+ dayofweek=trigger['dayofweek']
+ )
elif trigger['timetype'] == 'interval':
self.create_trigger(trigger['area'], trigger['title'],
- trigger['timetype'],
- interval=trigger['interval'],
- randdur=trigger['randdur']
- )
+ trigger['timetype'],
+ interval=trigger['interval'],
+ randdur=trigger['randdur']
+ )
- def create_trigger(self, _area, _title, _timetype, timeofday=None,
- dayofweek=None, interval=-1, timelimit=-1, randdur=-1):
+ def create_trigger(self, _area, _title, _timetype, timeofday=None,
+ dayofweek=None, interval=-1, timelimit=-1, randdur=-1):
self.logger.notice('Creating trigger {}:{}:{}'.format(_area, _title, _timetype))
- uuid = self.scheduler_db.save_trigger(_area, _title, _timetype, timeofday,
- dayofweek, interval, timelimit, randdur)
+ uuid = self.scheduler_db.save_trigger(_area, _title, _timetype, timeofday,
+ dayofweek, interval, timelimit, randdur)
trigger = self.scheduler_db.get_trigger(uuid)
self.add_job(trigger)
+ def delete_instance(self, _name, _instance):
+ tasks = self.scheduler_db.get_tasks_by_name(_name, _instance)
+ for task in tasks:
+ self.logger.warning('deleting task {}'.format(task['taskid']))
+ self.delete_task(task['taskid'])
+
def delete_task(self, _taskid):
task = self.scheduler_db.get_task(_taskid)
- if task is not None:
- self.scheduler_db.del_task(task['area'], task['title'])
+ if task is None:
+ self.logger.notice('Task to delete missing: {}'.format(_taskid))
+ return
+
+ triggers = self.scheduler_db.get_triggers(_taskid)
+ for trigger in triggers:
+ self.delete_trigger(trigger['uuid'])
+ self.logger.debug('Deleting schedule task: {}'.format(_taskid))
+ self.scheduler_db.del_task(task['area'], task['title'])
def run_task(self, _taskid):
triggers = self.scheduler_db.get_triggers(_taskid)
@@ -318,18 +340,19 @@ def run_task(self, _taskid):
is_run = False
default_trigger = None
+ trigger = None
for trigger in triggers:
if trigger['timetype'] == 'startup':
continue
elif trigger['timetype'] == 'interval':
- self.queue.put({'cmd': 'run', 'uuid': trigger['uuid'] })
+ self.queue.put({'cmd': 'run', 'uuid': trigger['uuid']})
is_run = True
break
else:
default_trigger = trigger
if not is_run:
if default_trigger is not None:
- self.queue.put({'cmd': 'run', 'uuid': trigger['uuid'] })
+ self.queue.put({'cmd': 'run', 'uuid': trigger['uuid']})
else:
task = self.scheduler_db.get_task(_taskid)
if task is not None:
diff --git a/lib/streams/atsc.py b/lib/streams/atsc.py
index 97df308..fad3dc8 100644
--- a/lib/streams/atsc.py
+++ b/lib/streams/atsc.py
@@ -42,7 +42,6 @@
class ATSCMsg:
# class that generates most of the ATSC UDP protocol messages
- msg_counter = {}
# UDP msgs for ATSC
# https://www.atsc.org/wp-content/uploads/2015/03/Program-System-Information-Protocol-for-Terrestrial-Broadcast-and-Cable-1.pdf
@@ -60,6 +59,7 @@ def __init__(self):
self.crc_table_idx_width = 8
self.atsc_blank_section = b'\x47\x1f\xff\x10\x00'.ljust(ATSC_MSG_LEN, b'\xff')
self.type_strings = []
+ self.msg_counter = {}
def gen_crc_mpeg(self, _msg):
alg = Crc(
@@ -144,7 +144,7 @@ def gen_pid(self, _prog_number):
# Video Stream Element = Base_PID + 1 with the 12th bit set
# Audio Stream Elements = Vide Stream PID + 4 with the 12th bit set, then +1 for each additional lang
pid_lookup = [0x00, 0x30, 0x40, 0x50, 0x60, 0x70, 0x80, 0x90,
- 0x130, 0x140, 0x150, 0x160, 0x170, 0x180, 0x190, 0x230, 0x240]
+ 0x130, 0x140, 0x150, 0x160, 0x170, 0x180, 0x190, 0x230, 0x240]
return pid_lookup[_prog_number]
def gen_lang(self, _name):
@@ -157,9 +157,9 @@ def update_sdt_names(self, _video, _service_provider, _service_name):
video_len = len(_video.data)
msg = None
while True:
- if i+ATSC_MSG_LEN > video_len:
+ if i + ATSC_MSG_LEN > video_len:
break
- packet = _video.data[i:i+ATSC_MSG_LEN]
+ packet = _video.data[i:i + ATSC_MSG_LEN]
program_fields = self.decode_ts_packet(packet)
if program_fields is None:
i += ATSC_MSG_LEN
@@ -181,14 +181,14 @@ def update_sdt_names(self, _video, _service_provider, _service_name):
_video.data = b''.join([
_video.data[:i],
msg,
- _video.data[i+ATSC_MSG_LEN:]
- ])
+ _video.data[i + ATSC_MSG_LEN:]
+ ])
i += ATSC_MSG_LEN
if msg is None:
self.logger.debug('Missing ATSC SDT Msg in stream, unable to update provider and service name')
else:
self.logger.debug('Updating ATSC SDT with service info {} {}' \
- .format(_service_provider, _service_name))
+ .format(_service_provider, _service_name))
def gen_sld(self, _base_pid, _elements):
# Table 6.29 Service Location Descriptor
@@ -222,7 +222,7 @@ def gen_sld(self, _base_pid, _elements):
audio_pid_int += 1
audio_pid = utils.set_u16(audio_pid_int)
lang_msg = struct.pack('%ds' % (len(lang)),
- lang.encode())
+ lang.encode())
msg += stream_type + audio_pid + lang_msg
msg = video_pid + elem_len + msg
length = utils.set_u8(len(msg))
@@ -380,14 +380,14 @@ def gen_stt(self):
ver_sect_proto = b'\xc1\x00\x00\x00'
time_gps = datetime.datetime.utcnow() - datetime.datetime(1980, 1, 6) \
- - datetime.timedelta(seconds=LEAP_SECONDS_2021 - LEAP_SECONDS_1980)
+ - datetime.timedelta(seconds=LEAP_SECONDS_2021 - LEAP_SECONDS_1980)
time_gps_sec = int(time_gps.total_seconds())
system_time = utils.set_u32(time_gps_sec)
delta_time = utils.set_u8(LEAP_SECONDS_2021 - LEAP_SECONDS_1980)
daylight_savings = b'\x60'
msg = table_id_ext + ver_sect_proto + system_time + \
- delta_time + daylight_savings + b'\x00'
+ delta_time + daylight_savings + b'\x00'
length = utils.set_u16(len(msg) + 4 + 0xF000)
msg = MPEG2_PROGRAM_SYSTEM_TIME_TABLE_TAG + length + msg
crc = self.gen_crc_mpeg(msg)
@@ -482,6 +482,29 @@ def gen_cat(self):
# search 0x0020.*0001 ...
return b'\x00\x01\xb0\x09\xff\xff\xc3\x00\x00\xd5\xdc\xfb\x4c'
+ def update_continuity_counter(self, section):
+ pid = self.get_pid(section)
+ if pid is None:
+ return section
+
+ if pid not in self.msg_counter.keys():
+ self.msg_counter[pid] = 0
+
+ s_int = section[3]
+ s_top = s_int & 0xf0
+
+ s_int = s_top + self.msg_counter[pid]
+ sect_ba = bytearray(section)
+ sect_ba[3] = s_int
+ sect_bytes = bytes(sect_ba)
+
+ self.msg_counter[pid] += 1
+ if self.msg_counter[pid] > 15:
+ self.msg_counter[pid] = 0
+
+ return sect_bytes
+
+
def format_video_packets(self, _msgs=None):
# atsc packets are 1316 in length with 7 188 sections
# each section has a 471f ff10 00 when no data is present
@@ -497,18 +520,18 @@ def format_video_packets(self, _msgs=None):
# CAT 1
# 7 sections per packet
sections = [
- self.atsc_blank_section,
- self.atsc_blank_section,
- self.atsc_blank_section,
- self.atsc_blank_section,
- self.atsc_blank_section,
- self.atsc_blank_section,
- self.atsc_blank_section,
+ self.update_continuity_counter(self.atsc_blank_section),
+ self.update_continuity_counter(self.atsc_blank_section),
+ self.update_continuity_counter(self.atsc_blank_section),
+ self.update_continuity_counter(self.atsc_blank_section),
+ self.update_continuity_counter(self.atsc_blank_section),
+ self.update_continuity_counter(self.atsc_blank_section),
+ self.update_continuity_counter(self.atsc_blank_section),
]
if _msgs is None:
return b''.join(sections)
-
+
# for now assume the msgs are less than 1316
if len(_msgs) > 7:
self.logger.error('ATSC: TOO MANY MESSAGES={}'.format(len(_msgs)))
@@ -518,7 +541,7 @@ def format_video_packets(self, _msgs=None):
self.logger.error('ATSC: MESSAGE LENGTH TOO LONG={}'.format(len(_msgs[i])))
return None
else:
- sections[i] = _msgs[i].ljust(ATSC_MSG_LEN, b'\xff')
+ sections[i] = self.update_continuity_counter(_msgs[i].ljust(ATSC_MSG_LEN, b'\xff'))
return b''.join(sections)
# TBD need to handle large msg and more than 7 msgs
@@ -534,57 +557,62 @@ def extract_psip(self, _video_data):
pat_found = False
pmt_found = False
seg_counter = 0
-
- #print('writing out segment')
- #f = open('/tmp/data/segment.ts', 'wb')
- #f.write(_video_data)
- #f.close()
-
+
while True:
- if i+ATSC_MSG_LEN > video_len:
+ if i + ATSC_MSG_LEN > video_len:
break
- packet = _video_data[i:i+ATSC_MSG_LEN]
+ packet = _video_data[i:i + ATSC_MSG_LEN]
i += ATSC_MSG_LEN
program_fields = self.decode_ts_packet(packet)
seg_counter += 1
if seg_counter > 7:
- #self.logger.debug('###### SENDING BACK {} PACKETS'.format(len(packet_list)))
+ # self.logger.debug('###### SENDING BACK {} PACKETS'.format(len(packet_list)))
break
- else:
- packet_list.append(packet)
- continue
if program_fields is None:
continue
if program_fields['transport_error_indicator']:
continue
+ # SDT: 17, PAT: 0, Private data: 4096 (audio/video meta)
+ if program_fields['pid'] == 0 \
+ or program_fields['pid'] == 4096:
+ packet_list.append(packet)
+
+ seg_counter += 1
+ if seg_counter > 7:
+ # self.logger.debug('###### SENDING BACK {} PACKETS'.format(len(packet_list)))
+ break
+
+ continue
+
+
if program_fields['pid'] == 0x0000:
pmt_pids = self.decode_pat(program_fields['payload'])
- #self.logger.debug('###### EXPECTED PMT PIDS: {}'.format(pmt_pids))
+ # self.logger.debug('###### EXPECTED PMT PIDS: {}'.format(pmt_pids))
if not pat_found:
packet_list.append(packet)
pat_found = True
- if pmt_pids and program_fields['pid'] in pmt_pids.keys():
- program = pmt_pids[program_fields['pid']]
- self.decode_pmt(program_fields['pid'], program, program_fields['payload'])
- if not pmt_found:
- #self.logger.debug('###### FOUND PMT PID: {}'.format(program_fields['pid']))
- packet_list.append(packet)
- pmt_found = True
- continue
- elif program_fields['pid'] == 0x1ffb:
- self.logger.info('Packet Table indicator 0x1ffb, not implemented {}'.format(i))
- continue
- #elif program_fields['pid'] == 0x0011:
+ #if pmt_pids and program_fields['pid'] in pmt_pids.keys():
+ # program = pmt_pids[program_fields['pid']]
+ # self.decode_pmt(program_fields['pid'], program, program_fields['payload'])
+ # if not pmt_found:
+ # # self.logger.debug('###### FOUND PMT PID: {}'.format(program_fields['pid']))
+ # packet_list.append(packet)
+ # pmt_found = True
+ # continue
+ #elif program_fields['pid'] == 0x1ffb:
+ # self.logger.info('Packet Table indicator 0x1ffb, not implemented {}'.format(i))
+ # continue
+ # elif program_fields['pid'] == 0x0011:
# self.logger.info('Service Description Table (SDT) 0x0011, not implemented {}'.format(i))
# continue
- #elif program_fields['pid'] == 0x0000 or \
+ # elif program_fields['pid'] == 0x0000 or \
# program_fields['pid'] == 0x0100 or \
# program_fields['pid'] == 0x0101:
# continue
- #else:
+ # else:
# self.logger.info('Unknown PID {}'.format(program_fields['pid']))
prev_pid = program_fields['pid']
return packet_list
@@ -604,22 +632,22 @@ def sync_audio_video(self, _video_data):
pat_found = False
pmt_found = False
seg_counter = 0
-
- #print('writing out segment')
- #f = open('/tmp/data/segment.ts', 'wb')
- #f.write(_video_data)
- #f.close()
-
+
+ # print('writing out segment')
+ # f = open('/tmp/data/segment.ts', 'wb')
+ # f.write(_video_data)
+ # f.close()
+
while True:
- if i+ATSC_MSG_LEN > video_len:
+ if i + ATSC_MSG_LEN > video_len:
break
- packet = _video_data[i:i+ATSC_MSG_LEN]
+ packet = _video_data[i:i + ATSC_MSG_LEN]
i += ATSC_MSG_LEN
program_fields = self.decode_ts_packet(packet)
seg_counter += 1
if seg_counter > 7:
- #self.logger.debug('###### SENDING BACK {} PACKETS'.format(len(packet_list)))
+ # self.logger.debug('###### SENDING BACK {} PACKETS'.format(len(packet_list)))
break
else:
packet_list.append(packet)
@@ -632,7 +660,7 @@ def sync_audio_video(self, _video_data):
if program_fields['pid'] == 0x0000:
pmt_pids = self.decode_pat(program_fields['payload'])
- #self.logger.debug('###### EXPECTED PMT PIDS: {}'.format(pmt_pids))
+ # self.logger.debug('###### EXPECTED PMT PIDS: {}'.format(pmt_pids))
if not pat_found:
packet_list.append(packet)
pat_found = True
@@ -640,25 +668,36 @@ def sync_audio_video(self, _video_data):
program = pmt_pids[program_fields['pid']]
self.decode_pmt(program_fields['pid'], program, program_fields['payload'])
if not pmt_found:
- #self.logger.debug('###### FOUND PMT PID: {}'.format(program_fields['pid']))
+ # self.logger.debug('###### FOUND PMT PID: {}'.format(program_fields['pid']))
packet_list.append(packet)
pmt_found = True
continue
elif program_fields['pid'] == 0x1ffb:
self.logger.info('Packet Table indicator 0x1ffb, not implemented {}'.format(i))
continue
- #elif program_fields['pid'] == 0x0011:
+ # elif program_fields['pid'] == 0x0011:
# self.logger.info('Service Description Table (SDT) 0x0011, not implemented {}'.format(i))
# continue
- #elif program_fields['pid'] == 0x0000 or \
+ # elif program_fields['pid'] == 0x0000 or \
# program_fields['pid'] == 0x0100 or \
# program_fields['pid'] == 0x0101:
# continue
- #else:
+ # else:
# self.logger.info('Unknown PID {}'.format(program_fields['pid']))
prev_pid = program_fields['pid']
return packet_list
-
+
+
+ def get_pid(self, _packet_188):
+ word = struct.unpack('!I', _packet_188[0:4])[0]
+ sync = (word & 0xff000000) >> 24
+ if sync != 0x47:
+ return None
+
+ # Packet Identifier, describing the payload data.
+ pid = (word & 0x1fff00) >> 8
+ return pid
+
def decode_ts_packet(self, _packet_188):
fields = {}
word = struct.unpack('!I', _packet_188[0:4])[0]
@@ -741,9 +780,8 @@ def decode_pmt(self, pid, program, payload):
reserved = (pcr_pid & 0xe000) >> 13
pcr_pid &= 0x1fff
desc1 = payload[12:]
- #self.logger.debug('###### PMT DESCR {} {}'.format(pcr_pid, desc1))
- #descriptors = decode_descriptors(desc1)
-
+ # self.logger.debug('###### PMT DESCR {} {}'.format(pcr_pid, desc1))
+ # descriptors = decode_descriptors(desc1)
def decode_pat(self, payload):
t = binascii.b2a_hex(payload)
@@ -759,18 +797,13 @@ def decode_pat(self, payload):
program_count = (section_length - 5) / 4 - 1
if section_length > 20:
- #print(section_length, program_count, len(payload))
- #self.logger.warning('{} {} {}'.format(section_length, program_count, len(payload)))
- # log for corrupted atsc msg
return program_map_pids
-
for i in range(0, int(program_count)):
at = 8 + (i * 4) # skip headers, just get to the program numbers table
program_number = struct.unpack("!H", payload[at:at + 2])[0]
if at + 2 > len(payload):
break
- #print(len(payload), at)
program_map_pid = struct.unpack("!H", payload[at + 2:at + 2 + 2])[0]
# the pid is only 13 bits, upper 3 bits of this field are 'reserved' (I see 0b111)
@@ -780,4 +813,3 @@ def decode_pat(self, payload):
program_map_pids[program_map_pid] = program_number
i += 1
return program_map_pids
-
diff --git a/lib/streams/ffmpeg_proxy.py b/lib/streams/ffmpeg_proxy.py
index 95ad098..2fc3741 100644
--- a/lib/streams/ffmpeg_proxy.py
+++ b/lib/streams/ffmpeg_proxy.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -27,6 +27,8 @@
from .stream_queue import StreamQueue
from .pts_validation import PTSValidation
+MAX_IDLE_TIMER = 59
+
class FFMpegProxy(Stream):
@@ -45,68 +47,79 @@ def __init__(self, _plugins, _hdhr_queue):
self.write_buffer = None
self.stream_queue = None
self.pts_validation = None
+ self.tuner_no = -1
super().__init__(_plugins, _hdhr_queue)
- self.config = self.plugins.config_obj.data
self.db_configdefn = DBConfigDefn(self.config)
self.video = Video(self.config)
def update_tuner_status(self, _status):
- ch_num = self.channel_dict['number']
+ ch_num = self.channel_dict['display_number']
namespace = self.channel_dict['namespace']
scan_list = WebHTTPHandler.rmg_station_scans[namespace]
- for i, tuner in enumerate(scan_list):
- if type(tuner) == dict and tuner['ch'] == ch_num:
- WebHTTPHandler.rmg_station_scans[namespace][i]['status'] = _status
-
- def stream(self, _channel_dict, _write_buffer):
+ tuner = scan_list[self.tuner_no]
+ if type(tuner) == dict and tuner['ch'] == ch_num:
+ WebHTTPHandler.rmg_station_scans[namespace][self.tuner_no]['status'] = _status
+
+ def stream(self, _channel_dict, _write_buffer, _tuner_no):
+ global MAX_IDLE_TIMER
+ self.logger.info('Using ffmpeg_proxy for channel {}'.format(_channel_dict['uid']))
+ self.tuner_no = _tuner_no
self.channel_dict = _channel_dict
self.write_buffer = _write_buffer
self.config = self.db_configdefn.get_config()
+ MAX_IDLE_TIMER = self.config[self.namespace.lower()]['stream-g_stream_timeout']
+
self.pts_validation = PTSValidation(self.config, self.channel_dict)
channel_uri = self.get_stream_uri(self.channel_dict)
if not channel_uri:
- self.logger.warning('Unknown Channel')
+ self.logger.warning('Unknown Channel {}'.format(_channel_dict['uid']))
return
self.ffmpeg_proc = self.open_ffmpeg_proc(channel_uri)
time.sleep(0.01)
self.last_refresh = time.time()
self.block_prev_time = self.last_refresh
self.buffer_prev_time = self.last_refresh
- self.video.data = self.read_buffer()
+ self.read_buffer()
while True:
if not self.video.data:
- self.logger.debug('No Video Data, refreshing stream')
+ self.logger.info(
+ 'No Video Data, refreshing stream {} {}'
+ .format(_channel_dict['uid'], self.ffmpeg_proc.pid))
self.ffmpeg_proc = self.refresh_stream()
else:
try:
- self.validate_stream(self.video)
+ self.validate_stream()
self.update_tuner_status('Streaming')
+ start_ttw = time.time()
self.write_buffer.write(self.video.data)
+ delta_ttw = time.time() - start_ttw
+ self.logger.info(
+ 'Serving {} {} ({}B) ttw:{:.2f}s'
+ .format(self.ffmpeg_proc.pid, _channel_dict['uid'],
+ len(self.video.data), delta_ttw))
except IOError as e:
if e.errno in [errno.EPIPE, errno.ECONNABORTED, errno.ECONNRESET, errno.ECONNREFUSED]:
- self.logger.info('1. Connection dropped by end device')
+ self.logger.info('1. Connection dropped by end device {}'.format(self.ffmpeg_proc.pid))
break
else:
self.logger.error('{}{}'.format(
- '1 ################ UNEXPECTED EXCEPTION=', e))
+ '1 UNEXPECTED EXCEPTION=', e))
raise
try:
- self.video.data = self.read_buffer()
+ self.read_buffer()
+ except exceptions.CabernetException as ex:
+ self.logger.info('{} {}'.format(ex, self.ffmpeg_proc.pid))
+ break
except Exception as e:
self.logger.error('{}{}'.format(
- '2 ################ UNEXPECTED EXCEPTION=', e))
- raise
- self.logger.debug('Terminating ffmpeg stream')
- self.ffmpeg_proc.terminate()
- try:
- self.ffmpeg_proc.communicate()
- except ValueError:
- pass
+ '2 UNEXPECTED EXCEPTION=', e))
+ break
+ self.terminate_stream()
def validate_stream(self):
- if not self.config[self.channel_dict['namespace'].lower()]['player-enable_pts_filter']:
+ if not self.config[self.config_section]['player-enable_pts_filter']:
return
-
+
has_changed = True
while has_changed:
has_changed = False
@@ -119,42 +132,51 @@ def validate_stream(self):
has_changed = True
if results['refresh_stream']:
self.ffmpeg_proc = self.refresh_stream()
- self.video.data = self.read_buffer()
+ self.read_buffer()
has_changed = True
if results['reread_buffer']:
- self.video.data = self.read_buffer()
+ self.read_buffer()
has_changed = True
- return
+ return
def read_buffer(self):
+ global MAX_IDLE_TIMER
data_found = False
self.video.data = None
- idle_timer = 2
+ idle_timer = MAX_IDLE_TIMER # time slice segments are less than 10 seconds
while not data_found:
self.video.data = self.stream_queue.read()
if self.video.data:
data_found = True
else:
- time.sleep(0.2)
+ time.sleep(1)
idle_timer -= 1
- if idle_timer == 0:
- if self.plugins.plugins[self.channel_dict['namespace']].plugin_obj \
- .is_time_to_refresh_ext(self.last_refresh, self.channel_dict['instance']):
- self.ffmpeg_proc = self.refresh_stream()
+ if idle_timer < 1:
+ idle_timer = MAX_IDLE_TIMER # time slice segments are less than 10 seconds
+ self.logger.info(
+ 'No Video Data, refreshing stream {}'
+ .format(self.ffmpeg_proc.pid))
+ self.ffmpeg_proc = self.refresh_stream()
+ elif int(MAX_IDLE_TIMER / 2) == idle_timer:
+ self.update_tuner_status('No Reply')
return
+ def terminate_stream(self):
+ self.logger.debug('Terminating ffmpeg stream {}'.format(self.ffmpeg_proc.pid))
+ while True:
+ try:
+ self.ffmpeg_proc.terminate()
+ self.ffmpeg_proc.wait(timeout=1.5)
+ break
+ except ValueError:
+ pass
+ except subprocess.TimeoutExpired:
+ time.sleep(0.01)
+
def refresh_stream(self):
self.last_refresh = time.time()
channel_uri = self.get_stream_uri(self.channel_dict)
- try:
- self.ffmpeg_proc.terminate()
- self.ffmpeg_proc.wait(timeout=0.1)
- self.logger.debug('Previous ffmpeg terminated')
- except ValueError:
- pass
- except subprocess.TimeoutExpired:
- self.ffmpeg_proc.terminate()
- time.sleep(0.01)
+ self.terminate_stream()
self.logger.debug('{}{}'.format(
'Refresh Stream channelUri=', channel_uri))
@@ -170,7 +192,8 @@ def open_ffmpeg_proc_locast(self, _channel_uri):
visible by looking at the video packets for a 6 second window being 171
instead of 180. Following the first read, the packets increase to 180.
"""
- ffmpeg_command = [self.config['paths']['ffmpeg_path'],
+ ffmpeg_command = [
+ self.config['paths']['ffmpeg_path'],
'-i', str(_channel_uri),
'-f', 'mpegts',
'-nostats',
@@ -178,7 +201,8 @@ def open_ffmpeg_proc_locast(self, _channel_uri):
'-loglevel', 'warning',
'-copyts',
'pipe:1']
- ffmpeg_process = subprocess.Popen(ffmpeg_command,
+ ffmpeg_process = subprocess.Popen(
+ ffmpeg_command,
stdout=subprocess.PIPE,
bufsize=-1)
self.stream_queue = StreamQueue(188, ffmpeg_process, self.channel_dict['uid'])
@@ -192,18 +216,38 @@ def open_ffmpeg_proc(self, _channel_uri):
visible by looking at the video packets for a 6 second window being 171
instead of 180. Following the first read, the packets increase to 180.
"""
- ffmpeg_command = [self.config['paths']['ffmpeg_path'],
+ header = self.channel_dict['json'].get('Header')
+ str_array = []
+ if header:
+ str_array.append('-headers')
+ header_value = ''
+ for key, value in header.items():
+ header_value += key+': '+value+'\r\n'
+ if key == 'Referer':
+ self.logger.debug('Using HTTP Referer: {} Channel: {}'.format(value, self.channel_dict['uid']))
+ str_array.append(header_value)
+
+ ffmpeg_options = [
'-i', str(_channel_uri),
'-nostats',
'-hide_banner',
'-fflags', '+genpts',
'-threads', '2',
- '-loglevel', 'fatal',
+ '-loglevel', 'quiet',
'-c', 'copy',
'-f', 'mpegts',
'-c', 'copy',
'pipe:1']
- ffmpeg_process = subprocess.Popen(ffmpeg_command,
+
+ ffmpeg_command = [
+ self.config['paths']['ffmpeg_path']
+ ]
+ # Header option must come first in the options list
+ if str_array:
+ ffmpeg_command.extend(str_array)
+ ffmpeg_command.extend(ffmpeg_options)
+ ffmpeg_process = subprocess.Popen(
+ ffmpeg_command,
stdout=subprocess.PIPE,
bufsize=-1)
self.stream_queue = StreamQueue(188, ffmpeg_process, self.channel_dict['uid'])
diff --git a/lib/streams/internal_proxy.py b/lib/streams/internal_proxy.py
index 8795e1a..9be2738 100644
--- a/lib/streams/internal_proxy.py
+++ b/lib/streams/internal_proxy.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -18,37 +18,31 @@
import datetime
import errno
-import http
import os
+import queue
import re
-import signal
import socket
import threading
import time
-import urllib.request
-from collections import OrderedDict
-from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
-from cryptography.hazmat.backends import default_backend
+import urllib.parse
from multiprocessing import Queue, Process
-from queue import Empty
-from threading import Thread
import lib.common.exceptions as exceptions
import lib.common.utils as utils
import lib.m3u8 as m3u8
import lib.streams.m3u8_queue as m3u8_queue
-from lib.common.decorators import handle_url_except
-from lib.common.decorators import handle_json_except
from lib.streams.video import Video
from lib.streams.atsc import ATSCMsg
+from lib.streams.thread_queue import ThreadQueue
from lib.db.db_config_defn import DBConfigDefn
from lib.db.db_channels import DBChannels
from lib.clients.web_handler import WebHTTPHandler
from .stream import Stream
-MAX_OUT_QUEUE_SIZE = 6
-IDLE_COUNTER_MAX = 120
-
+MAX_OUT_QUEUE_SIZE = 30
+IDLE_COUNTER_MAX = 110 # four times the timeout * retries to terminate the stream in seconds set in config!
+STARTUP_IDLE_COUNTER = 40 # time to wait for an initial stream
+# code assumes a timeout response in TVH of 15 or higher.
class InternalProxy(Stream):
@@ -61,79 +55,91 @@ def __init__(self, _plugins, _hdhr_queue):
self.wfile = None
self.file_filter = None
self.t_m3u8 = None
+ self.t_m3u8_pid = None
self.duration = 6
self.last_ts_filename = ''
super().__init__(_plugins, _hdhr_queue)
- self.config = self.plugins.config_obj.data
self.db_configdefn = DBConfigDefn(self.config)
self.db_channels = DBChannels(self.config)
self.video = Video(self.config)
self.atsc = ATSCMsg()
self.initialized_psi = False
self.in_queue = Queue()
- self.out_queue = Queue(maxsize=MAX_OUT_QUEUE_SIZE)
+ self.t_queue = None
+ self.out_queue = queue.Queue(maxsize=MAX_OUT_QUEUE_SIZE)
self.terminate_queue = None
- self.tc_match = re.compile( r'^.+[^\d]+(\d*)\.ts' )
+ self.tc_match = re.compile(r'^.+\D+(\d*)\.ts')
self.idle_counter = 0
+ self.tuner_no = -1
+ # last time the idle counter was reset
+ self.last_reset_time = datetime.datetime.now()
+ self.last_atsc_msg = 0
+ self.filter_counter = 0
self.is_starting = True
self.cue = False
-
+
def terminate(self, *args):
- try:
- while not self.in_queue.empty():
- self.in_queue.get()
- except (Empty, EOFError):
- pass
- self.in_queue.put({'uri': 'terminate'})
- time.sleep(0.2)
- # since t_m3u8 has been told to terminate, clear the out queue and then wait for t_m3u8 so it can cleanup ffmpeg
- self.t_m3u8.join(timeout=15)
- if self.t_m3u8.is_alive():
- # this is not likely but if t_m3u8 does not self terminate then force it to terminate
- self.logger.debug('t_m3u8 failed to self terminate. Forcing it to terminate {}' \
- .format(self.t_m3u8.pid))
- self.t_m3u8.terminate()
- time.sleep(0.5)
+ self.t_queue.del_thread(threading.get_ident())
+ time.sleep(0.01)
+ self.in_queue.put({'thread_id': threading.get_ident(), 'uri': 'terminate'})
+ time.sleep(0.01)
+
+ # since t_m3u8 has been told to terminate, clear the
+ # out queue and then wait for t_m3u8, so it can clean up ffmpeg
+
+ # queue is not guaranteed to have terminate, so let t_queue know this thread is ending
+ count = 10
+ while str(self.t_queue) == '0' and self.t_queue.is_alive() and count > 0:
+ time.sleep(1.0)
+ count -= 1
+
+ if not self.t_queue.is_alive():
+ self.t_m3u8.join(timeout=15)
+ if self.t_m3u8.is_alive():
+ # this is not likely but if t_m3u8 does not self terminate then force it to terminate
+ self.logger.debug(
+ 'm3u8 queue failed to self terminate. Forcing it to terminate {}'
+ .format(self.t_m3u8_pid))
+ self.clear_queues()
+ self.t_m3u8.terminate()
self.t_m3u8 = None
self.clear_queues()
- @handle_url_except(timeout=None)
- @handle_json_except
- def get_m3u8_data(self, _uri):
- # it sticks here. Need to find a work around for the socket.timeout per process
- return m3u8.load(_uri,
- headers={'User-agent': utils.DEFAULT_USER_AGENT})
-
- def stream(self, _channel_dict, _wfile, _terminate_queue):
+ def stream(self, _channel_dict, _wfile, _terminate_queue, _tuner_no):
"""
Processes m3u8 interface without using ffmpeg
"""
+ global IDLE_COUNTER_MAX
+ self.tuner_no = _tuner_no
self.config = self.db_configdefn.get_config()
+ IDLE_COUNTER_MAX = self.config[self.namespace.lower()]['stream-g_stream_timeout']
+
self.channel_dict = _channel_dict
if not self.start_m3u8_queue_process():
self.terminate()
- return
+ return
self.wfile = _wfile
self.terminate_queue = _terminate_queue
while True:
try:
self.check_termination()
self.play_queue()
- if not self.t_m3u8.is_alive():
+ if self.t_m3u8 and not self.t_m3u8.is_alive():
break
except IOError as ex:
# Check we hit a broken pipe when trying to write back to the client
if ex.errno in [errno.EPIPE, errno.ECONNABORTED, errno.ECONNRESET, errno.ECONNREFUSED]:
# Normal process. Client request end of stream
- self.logger.info('Connection dropped by end device {} {}' \
- .format(ex, self.t_m3u8.pid))
+ self.logger.info(
+ 'Connection dropped by end device {} {}'
+ .format(ex, self.t_m3u8_pid))
break
else:
self.logger.error('{}{} {} {}'.format(
- 'UNEXPECTED EXCEPTION=', ex, self.t_m3u8.pid, socket.getdefaulttimeout()))
+ 'UNEXPECTED EXCEPTION=', ex, self.t_m3u8_pid, socket.getdefaulttimeout()))
raise
except exceptions.CabernetException as ex:
- self.logger.info('{} {}'.format(ex, self.t_m3u8.pid))
+ self.logger.info('{} {}'.format(ex, self.t_m3u8_pid))
break
self.terminate()
@@ -142,34 +148,74 @@ def check_termination(self):
raise exceptions.CabernetException("Termination Requested")
def clear_queues(self):
- self.in_queue.close()
- self.out_queue.close()
+ """
+ out_queue cannot be closed since it is a normal queue.
+ The others are handled elsewhere
+ """
+ pass
def play_queue(self):
global MAX_OUT_QUEUE_SIZE
global IDLE_COUNTER_MAX
+
if not self.cue:
- self.idle_counter += 1
- if self.idle_counter > IDLE_COUNTER_MAX:
+ self.update_idle_counter()
+ if self.is_starting and self.idle_counter > STARTUP_IDLE_COUNTER:
+ # we need to terminate this feed. Some providers require a
+ # retry in order to make it work.
+ self.idle_counter = 0
+ self.last_reset_time = datetime.datetime.now()
+ self.last_atsc_msg = 0
+ self.logger.info(
+ '1 Provider has not started playing the stream. Terminating the connection {}'
+ .format(self.t_m3u8_pid))
+ raise exceptions.CabernetException(
+ '2 Provider has not started playing the stream. Terminating the connection {}'
+ .format(self.t_m3u8_pid))
+ elif self.idle_counter > self.filter_counter + IDLE_COUNTER_MAX:
self.idle_counter = 0
- self.logger.info('Provider has stop playing the stream. Terminating the connection {}' \
- .format(self.t_m3u8.pid))
- raise exceptions.CabernetException('Provider has stop playing the stream. Terminating the connection {}' \
- .format(self.t_m3u8.pid))
- elif self.idle_counter % 6 == 0 and self.is_starting:
- self.write_atsc_msg()
- while not self.out_queue.empty():
- out_queue_item = self.out_queue.get()
+ self.last_atsc_msg = 0
+ self.last_reset_time = datetime.datetime.now()
+ self.filter_counter = 0
+ self.logger.info(
+ '1 Provider has stop playing the stream. Terminating the connection {}'
+ .format(self.t_m3u8_pid))
+ raise exceptions.CabernetException(
+ '2 Provider has stop playing the stream. Terminating the connection {}'
+ .format(self.t_m3u8_pid))
+ elif self.idle_counter > self.last_atsc_msg+6 \
+ and self.is_starting:
+ self.last_atsc_msg = self.idle_counter
+ self.write_atsc_msg()
+ elif self.idle_counter > self.last_atsc_msg+14:
+ self.last_atsc_msg = self.idle_counter
+ self.update_tuner_status('No Reply')
+ self.logger.debug('1 Requesting status from m3u8_queue {}'.format(self.t_m3u8_pid))
+ self.in_queue.put({'thread_id': threading.get_ident(), 'uri': 'status'})
+ if not self.is_starting \
+ and self.config[self.channel_dict['namespace'].lower()] \
+ ['player-send_atsc_keepalive']:
+ self.write_atsc_msg()
+ while True:
+ try:
+ out_queue_item = self.out_queue.get(timeout=1)
+ except queue.Empty:
+ break
if out_queue_item['atsc'] is not None:
self.channel_dict['atsc'] = out_queue_item['atsc']
- #self.logger.debug('SAVING TO DB {}'.format(len(out_queue_item['atsc'])))
self.db_channels.update_channel_atsc(
self.channel_dict)
uri = out_queue_item['uri']
if uri == 'terminate':
- raise exceptions.CabernetException('m3u8 queue termination requested, aborting stream {}' \
- .format(self.t_m3u8.pid))
+ raise exceptions.CabernetException(
+ 'm3u8 queue termination requested, aborting stream {} {}'
+ .format(self.t_m3u8_pid, threading.get_ident()))
elif uri == 'running':
+ self.logger.debug('1 Status of Running returned from m3u8_queue {}'.format(self.t_m3u8_pid))
+ continue
+ elif uri == 'extend':
+ self.logger.debug('Extending the idle timeout to {} seconds'.format(self.idle_counter+IDLE_COUNTER_MAX))
+ self.filter_counter = self.idle_counter
continue
data = out_queue_item['data']
if data['cue'] == 'in':
@@ -179,70 +225,113 @@ def play_queue(self):
self.cue = True
self.logger.debug('Turning M3U8 cue to True')
if data['filtered']:
- self.idle_counter = 0
- self.logger.info('Filtered Msg {} {}'.format(self.t_m3u8.pid, urllib.parse.unquote(uri)))
+ self.last_atsc_msg = self.idle_counter
+ self.filter_counter = self.idle_counter
+ self.logger.info('Filtered Msg {} {}'.format(self.t_m3u8_pid, urllib.parse.unquote(uri)))
self.update_tuner_status('Filtered')
- #self.write_buffer(out_queue_item['stream'])
+ # self.write_buffer(out_queue_item['stream'])
if self.is_starting:
self.is_starting = False
self.write_atsc_msg()
+ self.logger.debug('2 Requesting Status from m3u8_queue {}'.format(self.t_m3u8_pid))
+ self.in_queue.put({'thread_id': threading.get_ident(), 'uri': 'status'})
time.sleep(0.5)
else:
self.video.data = out_queue_item['stream']
if self.video.data is not None:
self.idle_counter = 0
+ self.last_atsc_msg = 0
+ self.last_reset_time = datetime.datetime.now()
+ self.filter_counter = 0
if self.config['stream']['update_sdt']:
self.atsc.update_sdt_names(self.video,
- self.channel_dict['namespace'].encode(),
- self.set_service_name(self.channel_dict).encode())
+ self.channel_dict['namespace'].encode(),
+ self.set_service_name(self.channel_dict).encode())
self.duration = data['duration']
uri_decoded = urllib.parse.unquote(uri)
if self.check_ts_counter(uri_decoded):
- start_ttw = time.time()
- self.write_buffer(self.video.data)
- delta_ttw = time.time() - start_ttw
- self.logger.info('Serving {} {} ({})s ({}B) ttw:{:.2f}s' \
- .format(self.t_m3u8.pid, uri_decoded, self.duration, len(self.video.data), delta_ttw))
- self.is_starting = False
- self.update_tuner_status('Streaming')
- time.sleep(0.1)
+ # if the length of the video is tiny, then print the string out
+ if len(self.video.data) < 2000 and len(self.video.data) % 188 != 0 or self.video.data.startswith(b'<'):
+ self.logger.info('{} {} Not a Video packet, restarting HTTP Session, data: {} {}'
+ .format(self.t_m3u8_pid, uri_decoded, len(self.video.data), self.video.data))
+ self.update_tuner_status('Bad Data')
+ self.in_queue.put({'thread_id': threading.get_ident(), 'uri': 'restart_http'})
+ else:
+ start_ttw = time.time()
+ self.write_buffer(self.video.data)
+ delta_ttw = time.time() - start_ttw
+ self.update_tuner_status('Streaming')
+ self.logger.info(
+ 'Serving {} {} ({})s ({}B) ttw:{:.2f}s {}'
+ .format(self.t_m3u8_pid, uri_decoded, self.duration,
+ len(self.video.data), delta_ttw, threading.get_ident()))
+ self.is_starting = False
+ time.sleep(0.1)
else:
if not self.is_starting:
self.update_tuner_status('No Reply')
uri_decoded = urllib.parse.unquote(uri)
- self.logger.debug('No Video Stream from Provider {} {}' \
- .format(self.t_m3u8.pid, uri_decoded))
+ self.logger.debug(
+ 'No Video Stream from Provider {} {}'
+ .format(self.t_m3u8_pid, uri_decoded))
self.check_termination()
time.sleep(0.01)
- time.sleep(1)
self.video.terminate()
def write_buffer(self, _data):
+ """
+ Plan is to slowly push out bytes until something is
+ added to the queue to process. This should stop the
+ clients from terminating the data stream due to lack of data for
+ a short. It is currently set to at least 20 seconds of data
+ before it stops transmitting
+ """
try:
- self.wfile.flush()
- # Do not use chunk writes! Just send data.
- #x = self.wfile.write('{}\r\n'.format(len(_data)).encode())
- x = self.wfile.write(_data)
- #x = self.wfile.write('\r\n'.encode())
- self.wfile.flush()
- except socket.timeout as ex:
+ bytes_written = 0
+ count = 0
+ bytes_per_write = int(len(_data)/20) # number of seconds to keep transmitting
+ while self.out_queue.qsize() == 0:
+ self.wfile.flush()
+ # Do not use chunk writes! Just send data.
+ # x = self.wfile.write('{}\r\n'.format(len(_data)).encode())
+ next_buffer_write = bytes_written + bytes_per_write
+ if next_buffer_write >= len(_data):
+ x = self.wfile.write(_data[bytes_written:])
+ bytes_written = len(_data)
+ self.wfile.flush()
+ break
+ else:
+ count += 1
+ if count > 13:
+ self.update_tuner_status('No Reply')
+ x = self.wfile.write(_data[bytes_written:next_buffer_write])
+ bytes_written = next_buffer_write
+ # x = self.wfile.write('\r\n'.encode())
+ self.wfile.flush()
+ time.sleep(1.0)
+ if bytes_written != len(_data):
+ x = self.wfile.write(_data[bytes_written:])
+ self.wfile.flush()
+ except socket.timeout:
raise
- except IOError as e:
+ except IOError:
raise
return x
def write_atsc_msg(self):
- if self.channel_dict['atsc'] is None:
- self.logger.debug('No video data, Sending Empty ATSC Msg {}' \
- .format(self.t_m3u8.pid))
+ if not self.channel_dict['atsc']:
+ self.logger.debug(
+ 'No video data, Sending Empty ATSC Msg {}'
+ .format(self.t_m3u8_pid))
self.write_buffer(
self.atsc.format_video_packets())
else:
- self.logger.debug('No video data, Sending Default ATSC Msg for channel {}' \
- .format(self.t_m3u8.pid))
+ self.logger.debug(
+ 'No video data, Sending Default ATSC Msg for channel {}'
+ .format(self.t_m3u8_pid))
self.write_buffer(
self.atsc.format_video_packets(
- self.channel_dict['atsc']))
+ self.channel_dict['atsc']))
def get_ts_counter(self, _uri):
m = self.tc_match.findall(_uri)
@@ -250,7 +339,7 @@ def get_ts_counter(self, _uri):
return '', 0
else:
self.logger.debug('ts_counter {} {}'.format(m, _uri))
- x_tuple = m[len(m)-1]
+ x_tuple = m[len(m) - 1]
if len(x_tuple) == 0:
x_tuple = (_uri, '0')
else:
@@ -258,28 +347,37 @@ def get_ts_counter(self, _uri):
return x_tuple
def update_tuner_status(self, _status):
- ch_num = self.channel_dict['number']
+ ch_num = self.channel_dict['display_number']
namespace = self.channel_dict['namespace']
scan_list = WebHTTPHandler.rmg_station_scans[namespace]
- for i, tuner in enumerate(scan_list):
- if type(tuner) == dict and tuner['ch'] == ch_num:
- WebHTTPHandler.rmg_station_scans[namespace][i]['status'] = _status
+ tuner = scan_list[self.tuner_no]
+ if type(tuner) == dict and tuner['ch'] == ch_num:
+ WebHTTPHandler.rmg_station_scans[namespace][self.tuner_no]['status'] = _status
+ def update_idle_counter(self):
+ """
+ Updates the idle_counter to the nearest int in seconds
+ based on when it was last reset
+ """
+ current_time = datetime.datetime.now()
+ delta_time = current_time - self.last_reset_time
+ self.idle_counter = int(delta_time.total_seconds())
def check_ts_counter(self, _uri):
"""
Providers sometime add the same stream section back into the list.
This methods catches this and informs the caller that it should be ignored.
- """
+ """
# counter = self.tc_match.findall(uri_decoded)
# if len(counter) != 0:
- # counter = counter[0]
+ # counter = counter[0]
# else:
- # counter = -1
+ # counter = -1
# self.logger.debug('ts counter={}'.format(counter))
if _uri == self.last_ts_filename:
- self.logger.warning('TC Counter Same section being transmitted, ignoring uri: {} m3u8pid:{} proxypid:{}' \
- .format(_uri, self.t_m3u8.pid, os.getpid()))
+ self.logger.notice(
+ 'TC Counter Same section being transmitted, ignoring uri: {} m3u8pid:{} proxypid:{}'
+ .format(_uri, self.t_m3u8_pid, os.getpid()))
return False
self.last_ts_filename = _uri
return True
@@ -300,38 +398,74 @@ def start_m3u8_queue_process(self):
time.sleep(0.01)
if InternalProxy.is_m3u8_starting == threading.get_ident():
break
+ ch_num = self.channel_dict['display_number']
+ namespace = self.channel_dict['namespace']
+ scan_list = WebHTTPHandler.rmg_station_scans[namespace]
+ tuner = scan_list[self.tuner_no]
+ m3u8_out_queue = None
+
+ if isinstance(tuner, dict) \
+ and tuner['ch'] == ch_num \
+ and tuner['instance'] == self.instance:
+
+ if not tuner['mux']:
+ # new tuner case
+ m3u8_out_queue = Queue(maxsize=MAX_OUT_QUEUE_SIZE)
+ self.t_queue = ThreadQueue(m3u8_out_queue, self.config)
+ self.t_queue.add_thread(threading.get_ident(), self.out_queue)
+ self.t_queue.status_queue = self.in_queue
+ WebHTTPHandler.rmg_station_scans[namespace][self.tuner_no]['mux'] = self.t_queue
+ else:
+ # reuse tuner case
+ self.t_queue = tuner['mux']
+ self.t_queue.add_thread(threading.get_ident(), self.out_queue)
+ self.t_m3u8 = self.t_queue.remote_proc
+ self.t_m3u8_pid = self.t_queue.remote_proc.pid
+ self.in_queue = self.t_queue.status_queue
+
while not is_running and restarts > 0:
restarts -= 1
# Process is not thread safe. Must do the same target, one at a time.
- self.t_m3u8 = Process(target=m3u8_queue.start, args=(
- self.config, self.plugins, self.in_queue, self.out_queue, self.channel_dict,))
- self.t_m3u8.start()
- self.in_queue.put({'uri': 'status'})
- time.sleep(0.1)
- tries = 0
- while self.out_queue.empty() and tries < max_tries:
- tries += 1
- time.sleep(0.2)
- if tries >= max_tries:
- self.m3u8_terminate()
+ self.in_queue.put({'thread_id': threading.get_ident(), 'uri': 'status'})
+ self.logger.debug('3 Requesting status from m3u8_queue {}'.format(self.t_m3u8_pid))
+
+ if m3u8_out_queue:
+ self.logger.debug('Starting m3u8 queue process')
+ self.t_m3u8 = Process(target=m3u8_queue.start, args=(
+ self.config, self.plugins, self.in_queue, m3u8_out_queue, self.channel_dict,))
+ self.t_m3u8.start()
+ self.t_queue.remote_proc = self.t_m3u8
+ self.t_m3u8_pid = self.t_m3u8.pid
+
+ time.sleep(0.1)
tries = 0
- else:
- try:
- # queue is not empty, but it sticks here anyway...
- status = self.out_queue.get(False, 3)
- except Empty:
+ while self.out_queue.empty() and tries < max_tries:
+ tries += 1
+ time.sleep(0.2)
+ if tries >= max_tries:
self.m3u8_terminate()
- tries = 0
- continue
-
- if status['uri'] == 'terminate':
- InternalProxy.is_m3u8_starting = False
- return False
- elif status['uri'] == 'running':
- is_running = True
else:
- self.logger.warning('Unknown response from m3u8queue: {}' \
- .format(status['uri']))
+ try:
+ # queue is not empty, but it sticks here anyway...
+ status = self.out_queue.get(False, 3)
+ except queue.Empty:
+ self.m3u8_terminate()
+ continue
+
+ if status['uri'] == 'terminate':
+ self.logger.debug('Receive request to terminate from m3u8_queue {}'.format(self.t_m3u8_pid))
+ InternalProxy.is_m3u8_starting = False
+ return False
+ elif status['uri'] == 'running':
+ self.logger.debug('2 Status of Running returned from m3u8_queue {}'.format(self.t_m3u8_pid))
+ is_running = True
+ else:
+ self.logger.warning(
+ 'Unknown response from m3u8queue: {}'
+ .format(status['uri']))
+ else:
+ is_running = True
+
InternalProxy.is_m3u8_starting = False
return restarts > 0
@@ -340,18 +474,22 @@ def m3u8_terminate(self):
try:
self.in_queue.get()
time.sleep(0.1)
- except (Empty, EOFError) as e:
+ except (queue.Empty, EOFError):
pass
- self.t_m3u8.terminate()
- self.t_m3u8.join()
- self.logger.debug('m3u8_queue did not start correctly, restarting {}' \
+ if self.t_m3u8:
+ self.t_m3u8.terminate()
+ self.t_m3u8.join()
+ self.logger.debug(
+ 'm3u8_queue did not start correctly, restarting {}'
.format(self.channel_dict['uid']))
try:
while not self.out_queue.empty():
self.out_queue.get()
- except (Empty, EOFError):
+ except (queue.Empty, EOFError):
pass
self.clear_queues()
- time.sleep(0.3)
+ time.sleep(0.1)
self.in_queue = Queue()
- self.out_queue = Queue(maxsize=MAX_OUT_QUEUE_SIZE)
+ self.out_queue = queue.Queue(maxsize=MAX_OUT_QUEUE_SIZE)
+ self.t_queue.add_thread(threading.get_ident(), self.out_queue)
+ self.t_queue.status_queue = self.in_queue
diff --git a/lib/streams/m3u8_queue.py b/lib/streams/m3u8_queue.py
index cabf44b..a13ff3a 100644
--- a/lib/streams/m3u8_queue.py
+++ b/lib/streams/m3u8_queue.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -16,8 +16,7 @@
substantial portions of the Software.
"""
-import datetime
-import http
+import httpx
import logging
import os
import re
@@ -25,7 +24,7 @@
import sys
import threading
import time
-import urllib.request
+import urllib.parse
from collections import OrderedDict
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from cryptography.hazmat.backends import default_backend
@@ -44,109 +43,79 @@
PLAY_LIST = OrderedDict()
-IN_QUEUE = None
-STREAM_QUEUE = None
-OUT_QUEUE = None
+PROCESSED_URLS = {}
+IN_QUEUE = Queue()
+OUT_QUEUE = Queue()
TERMINATE_REQUESTED = False
-MAX_STREAM_QUEUE_SIZE = 100
-
-
-class M3U8Queue(Thread):
- """
- This runs as an independent process (one per stream) to get and process the
- data stream as fast as possible and return it to the tuner web server for
- output to the client.
- """
- is_stuck = None
-
- def __init__(self, _config, _channel_dict):
+MAX_STREAM_QUEUE_SIZE = 20
+STREAM_QUEUE = Queue()
+OUT_QUEUE_LIST = []
+HTTP_TIMEOUT=8
+HTTP_RETRIES=3
+PARALLEL_DOWNLOADS=3
+IS_VOD = False
+UID_COUNTER = 1
+UID_PROCESSED = 1
+
+class M3U8GetUriData(Thread):
+ def __init__(self, _queue_item, _uid_counter, _config):
+ global TERMINATE_REQUESTED
Thread.__init__(self)
- self.logger = logging.getLogger(__name__+str(threading.get_ident()))
+ self.queue_item = _queue_item
+ self.uid_counter = _uid_counter
+ self.video = Video(_config)
self.config = _config
- self.namespace = _channel_dict['namespace'].lower()
+ self.logger = logging.getLogger(__name__ + str(threading.get_ident()))
self.pts_validation = None
- self.initialized_psi = False
- self.first_segment = True
- self.config_section = utils.instance_config_section(_channel_dict['namespace'], _channel_dict['instance'])
- self.atsc_msg = ATSCMsg()
- self.channel_dict = _channel_dict
- if self.config[self.config_section]['player-enable_pts_filter']:
- self.pts_validation = PTSValidation(_config, _channel_dict)
- self.video = Video(self.config)
- self.atsc = _channel_dict['atsc']
- if _channel_dict['json'].get('Header') is None:
- self.header = {'User-agent': utils.DEFAULT_USER_AGENT}
- else:
- self.header = _channel_dict['json']['Header']
- if _channel_dict['json'].get('use_date_on_m3u8_key') is None:
- self.use_date_on_key = True
- else:
- self.use_date_on_key = _channel_dict['json']['use_date_on_m3u8_key']
-
- self.pts_resync = PTSResync(_config, self.config_section, _channel_dict['uid'])
- self.key_list = {}
- self.start()
+ if _config[M3U8Queue.config_section]['player-enable_pts_filter']:
+ self.pts_validation = PTSValidation(_config, M3U8Queue.channel_dict)
+ if not TERMINATE_REQUESTED:
+ self.start()
- @handle_url_except()
- def get_uri_data(self, _uri):
- req = urllib.request.Request(_uri, headers=self.header)
- with urllib.request.urlopen(req, timeout=10) as resp:
- x = resp.read()
- return x
-
def run(self):
- global OUT_QUEUE
+ global UID_COUNTER
+ global UID_PROCESSED
global STREAM_QUEUE
global TERMINATE_REQUESTED
- try:
- while not TERMINATE_REQUESTED:
- queue_item = STREAM_QUEUE.get()
- if queue_item['uri_dt'] == 'terminate':
- time.sleep(0.01)
- break
- elif queue_item['uri_dt'] == 'status':
- OUT_QUEUE.put({'uri': 'running',
- 'data': None,
- 'stream': None,
- 'atsc': None})
- time.sleep(0.01)
- continue
- self.process_m3u8_item(queue_item)
- except (KeyboardInterrupt, EOFError):
- TERMINATE_REQUESTED = True
- self.pts_resync.terminate()
- self.clear_queues()
- sys.exit()
- except Exception as ex:
- TERMINATE_REQUESTED = True
- STREAM_QUEUE.put({'uri_dt': 'terminate'})
- IN_QUEUE.put({'uri': 'terminate'})
- if self.pts_resync is not None:
- self.pts_resync.terminate()
- self.clear_queues()
- time.sleep(0.01)
- self.logger.exception('{}{}'.format(
- 'UNEXPECTED EXCEPTION M3U8Queue=', ex))
- sys.exit()
- # we are terminating so cleanup ffmpeg
- if self.pts_resync is not None:
- self.pts_resync.terminate()
- self.clear_queues()
-
+ self.logger.trace('M3U8GetUriData started {} {} {}'.format(self.queue_item['data']['uri'], os.getpid(), threading.get_ident()))
+ m3u8_data = self.process_m3u8_item(self.queue_item)
+ if not TERMINATE_REQUESTED:
+ PROCESSED_URLS[self.uid_counter] = m3u8_data
+ STREAM_QUEUE.put({'uri_dt': 'check_processed_list'})
+ self.logger.trace('M3U8GetUriData terminated COUNTER {} {} {}'.format(self.uid_counter, os.getpid(), threading.get_ident()))
+ m3u8_data = None
+ self.queue_item = None
+ self.uid_counter = None
+ self.video = None
+ self.pts_validation = None
+ self.logger = None
+
+
+ @handle_url_except()
+ def get_uri_data(self, _uri, _retries):
+ """
+ _retries is used by the decorator when a HTTP failure occurs
+ """
+ global HTTP_TIMEOUT
+ resp = M3U8Queue.http_session.get(_uri, headers=M3U8Queue.http_header, timeout=HTTP_TIMEOUT)
+ x = resp.content
+ resp.raise_for_status()
+ return x
def decrypt_stream(self, _data):
+ global HTTP_RETRIES
if _data['key'] and _data['key']['uri']:
- if _data['key']['uri'] in self.key_list.keys():
- key_data = self.key_list[_data['key']['uri']]
+ if _data['key']['uri'] in M3U8Queue.key_list.keys():
+ key_data = M3U8Queue.key_list[_data['key']['uri']]
self.logger.debug('Reusing key {} {}'.format(os.getpid(), _data['key']['uri']))
elif not _data['key']['uri'].startswith('http'):
self.logger.warning('Unknown protocol, aborting {} {}'.format(os.getpid(), _data['key']['uri']))
return False
else:
- key_data = self.get_uri_data(_data['key']['uri'])
+ key_data = self.get_uri_data(_data['key']['uri'], HTTP_RETRIES)
if key_data is not None:
- self.key_list[_data['key']['uri']] = key_data
+ M3U8Queue.key_list[_data['key']['uri']] = key_data
if _data['key']['iv'] is None:
# if iv is none, use a random value
iv = bytearray.fromhex('000000000000000000000000000000F6')
@@ -157,139 +126,272 @@ def decrypt_stream(self, _data):
cipher = Cipher(algorithms.AES(key_data), modes.CBC(iv), default_backend())
decryptor = cipher.decryptor()
self.video.data = decryptor.update(self.video.data)
- if len(self.key_list.keys()) > 20:
- del self.key_list[list(self.key_list)[0]]
+ if len(M3U8Queue.key_list.keys()) > 20:
+ del M3U8Queue.key_list[list(M3U8Queue.key_list)[0]]
return True
def atsc_processing(self):
- if self.atsc is None:
- p_list = self.atsc_msg.extract_psip(self.video.data)
+ if not M3U8Queue.atsc:
+ p_list = M3U8Queue.atsc_msg.extract_psip(self.video.data)
if len(p_list) != 0:
- self.atsc = p_list
- self.channel_dict['atsc'] = p_list
- self.initialized_psi = True
+ M3U8Queue.atsc = p_list
+ M3U8Queue.channel_dict['atsc'] = p_list
+ M3U8Queue.initialized_psi = True
return p_list
- elif not self.initialized_psi:
- p_list = self.atsc_msg.extract_psip(self.video.data)
- if len(self.atsc) != len(p_list):
- self.atsc = p_list
- self.channel_dict['atsc'] = p_list
- self.initialized_psi = True
+ elif not M3U8Queue.initialized_psi:
+ p_list = M3U8Queue.atsc_msg.extract_psip(self.video.data)
+ if len(M3U8Queue.atsc) < len(p_list):
+ M3U8Queue.atsc = p_list
+ M3U8Queue.channel_dict['atsc'] = p_list
+ M3U8Queue.initialized_psi = True
return p_list
- for i in range(len(p_list)):
- if p_list[i][4:] != self.atsc[i][4:]:
- self.atsc = p_list
- self.channel_dict['atsc'] = p_list
- self.initialized_psi = True
- return p_list
+ if len(M3U8Queue.atsc) == len(p_list):
+ for i in range(len(p_list)):
+ if p_list[i][4:] != M3U8Queue.atsc[i][4:]:
+ M3U8Queue.atsc = p_list
+ M3U8Queue.channel_dict['atsc'] = p_list
+ M3U8Queue.initialized_psi = True
+ is_changed = True
+ return p_list
return None
+ def is_pts_valid(self):
+ if self.pts_validation is None:
+ return True
+ results = self.pts_validation.check_pts(self.video)
+ if results['byteoffset'] != 0:
+ return False
+ if results['refresh_stream']:
+ return False
+ if results['reread_buffer']:
+ return False
+ return True
+
+ def get_stream_from_atsc(self):
+ if M3U8Queue.atsc is not None:
+ return M3U8Queue.atsc_msg.format_video_packets(M3U8Queue.atsc)
+ else:
+ self.logger.info(''.join([
+ 'No ATSC msg available during filtered content, ',
+ 'recommend running this channel again to catch the ATSC msg.']))
+ return M3U8Queue.atsc_msg.format_video_packets()
+
def process_m3u8_item(self, _queue_item):
+ global IS_VOD
global TERMINATE_REQUESTED
global PLAY_LIST
global OUT_QUEUE
+ global UID_PROCESSED
+ global HTTP_RETRIES
uri_dt = _queue_item['uri_dt']
data = _queue_item['data']
if data['filtered']:
- OUT_QUEUE.put({'uri': uri_dt[0],
- 'data': data,
- 'stream': self.get_stream_from_atsc(),
- 'atsc': None})
PLAY_LIST[uri_dt]['played'] = True
- time.sleep(0.01)
+ return {'uri': data['uri'],
+ 'data': data,
+ 'stream': self.get_stream_from_atsc(),
+ 'atsc': None}
else:
- count = 1
- while True:
- self.video.data = self.get_uri_data(uri_dt[0])
- break
+ if IS_VOD:
+ count = self.config['stream']['vod_retries']
+ else:
+ count = 1
+ while count > 0:
+ self.video.data = self.get_uri_data(data['uri'], HTTP_RETRIES)
+ if self.video.data:
+ break
+
+ # TBD WHAT TO DO WITH THIS?
+ if count > 1:
+ out_queue_put({'uri': 'extend',
+ 'data': data,
+ 'stream': None,
+ 'atsc': None})
+ count -= 1
if uri_dt not in PLAY_LIST.keys():
+ self.logger.debug('{} uri_dt not in PLAY_LIST keys {}'.format(os.getpid(), uri_dt))
return
if self.video.data is None:
PLAY_LIST[uri_dt]['played'] = True
- OUT_QUEUE.put({'uri': uri_dt[0],
- 'data': data,
- 'stream': None,
- 'atsc': None
- })
- return
+ return {'uri': data['uri'],
+ 'data': data,
+ 'stream': None,
+ 'atsc': None
+ }
if not self.decrypt_stream(data):
# terminate if stream is not decryptable
- OUT_QUEUE.put({'uri': 'terminate',
- 'data': data,
- 'stream': None,
- 'atsc': None})
TERMINATE_REQUESTED = True
- self.pts_resync.terminate()
- self.clear_queues()
+ M3U8Queue.pts_resync.terminate()
+ M3U8Queue.pts_resync = None
+ clear_queues()
PLAY_LIST[uri_dt]['played'] = True
- time.sleep(0.01)
- return
+ return {'uri': 'terminate',
+ 'data': data,
+ 'stream': None,
+ 'atsc': None}
if not self.is_pts_valid():
PLAY_LIST[uri_dt]['played'] = True
- OUT_QUEUE.put({'uri': uri_dt[0],
- 'data': data,
- 'stream': None,
- 'atsc': None
- })
- return
-
- if self.first_segment:
- #print('writing out FIRST segment')
- self.first_segment = False
- #print('writing out FIRST segment')
-
- self.pts_resync.resequence_pts(self.video)
- if self.video.data is None:
- OUT_QUEUE.put({'uri': uri_dt[0],
- 'data': data,
- 'stream': self.video.data,
- 'atsc': None})
- PLAY_LIST[uri_dt]['played'] = True
- time.sleep(0.01)
- return
+ return {'uri': data['uri'],
+ 'data': data,
+ 'stream': None,
+ 'atsc': None
+ }
+
atsc_default_msg = self.atsc_processing()
- OUT_QUEUE.put({'uri': uri_dt[0],
- 'data': data,
- 'stream': self.video.data,
- 'atsc': atsc_default_msg
- })
PLAY_LIST[uri_dt]['played'] = True
- time.sleep(0.01)
+ if self.uid_counter > UID_PROCESSED+1:
+ out_queue_put({'uri': 'extend',
+ 'data': data,
+ 'stream': None,
+ 'atsc': None})
+ return {'uri': data['uri'],
+ 'data': data,
+ 'stream': self.video.data,
+ 'atsc': atsc_default_msg
+ }
- def is_pts_valid(self):
- if self.pts_validation is None:
- return True
- before = len(self.video.data)
- results = self.pts_validation.check_pts(self.video)
- if results['byteoffset'] != 0:
- return False
- if results['refresh_stream']:
- return False
- if results['reread_buffer']:
- return False
- return True
- def get_stream_from_atsc(self):
- if self.atsc is not None:
- return self.atsc_msg.format_video_packets(self.atsc)
+class M3U8Queue(Thread):
+ """
+ This runs as an independent process (one per stream) to get and process the
+ data stream as fast as possible and return it to the tuner web server for
+ output to the client.
+ """
+ is_stuck = None
+ http_session = httpx.Client(http2=True, verify=False, follow_redirects=True)
+ http_header = None
+ key_list = {}
+ config_section = None
+ channel_dict = None
+ pts_resync = None
+ atsc = None
+ atsc_msg = None
+ initialized_psi = False
+
+
+ def __init__(self, _config, _channel_dict):
+ Thread.__init__(self)
+ self.video = Video(_config)
+ self.q_action = None
+ self.logger = logging.getLogger(__name__ + str(threading.get_ident()))
+ self.config = _config
+ self.namespace = _channel_dict['namespace'].lower()
+ M3U8Queue.config_section = utils.instance_config_section(_channel_dict['namespace'], _channel_dict['instance'])
+ M3U8Queue.channel_dict = _channel_dict
+ M3U8Queue.atsc_msg = ATSCMsg()
+ self.channel_dict = _channel_dict
+ M3U8Queue.atsc = _channel_dict['atsc']
+ if _channel_dict['json'].get('Header') is None:
+ M3U8Queue.http_header = {'User-agent': utils.DEFAULT_USER_AGENT}
else:
- self.logger.info(''.join([
- 'No ATSC msg available during filtered content, ',
- 'recommend running this channel again to catch the ATSC msg.']))
- return self.atsc_msg.format_video_packets()
+ M3U8Queue.http_header = _channel_dict['json']['Header']
+ if _channel_dict['json'].get('use_date_on_m3u8_key') is None:
+ self.use_date_on_key = True
+ else:
+ self.use_date_on_key = _channel_dict['json']['use_date_on_m3u8_key']
- def clear_queues(self):
- global STREAM_QUEUE
+ M3U8Queue.pts_resync = PTSResync(_config, self.config_section, _channel_dict['uid'])
+ self.start()
+
+
+ def run(self):
global OUT_QUEUE
- global IN_QUEUE
- # closing a multiporcessing queue with 'close' without emptying it will prevent a process dependant on that queue
- # from terminating and fulfilling a 'join' if there was an entry in the queue
- # so we need to proactivley clear all queue entries instead of closing the queues
- clear_q(STREAM_QUEUE)
- clear_q(OUT_QUEUE)
- clear_q(IN_QUEUE)
+ global STREAM_QUEUE
+ global TERMINATE_REQUESTED
+ global UID_COUNTER
+ global UID_PROCESSED
+ global PARALLEL_DOWNLOADS
+ global PROCESSED_URLS
+ global IS_VOD
+ try:
+ while not TERMINATE_REQUESTED:
+ queue_item = STREAM_QUEUE.get()
+ self.q_action = queue_item['uri_dt']
+ if queue_item['uri_dt'] == 'terminate':
+ self.logger.debug('Received terminate from internalproxy {}'.format(os.getpid()))
+ TERMINATE_REQUESTED = True
+
+ break
+ elif queue_item['uri_dt'] == 'status':
+ out_queue_put({'uri': 'running',
+ 'data': None,
+ 'stream': None,
+ 'atsc': None})
+ continue
+ elif queue_item['uri_dt'] == 'check_processed_list':
+ self.logger.debug('#### Received check_processed_list request {} Received: {} Processed: {} Processed_Queue: {} Incoming_Queue: {}'
+ .format(os.getpid(), UID_COUNTER, UID_PROCESSED, len(PROCESSED_URLS), STREAM_QUEUE.qsize()))
+ self.check_processed_list()
+ continue
+
+ self.logger.debug('**** Running check_processed_list {} Received: {} Processed: {} Processed_Queue: {} Incoming_Queue: {}'
+ .format(os.getpid(), UID_COUNTER, UID_PROCESSED, len(PROCESSED_URLS), STREAM_QUEUE.qsize()))
+ self.check_processed_list()
+ while UID_COUNTER - UID_PROCESSED - len(PROCESSED_URLS) > PARALLEL_DOWNLOADS+1:
+ self.logger.debug('Slowed Processing: {} Received: {} Processed: {} Processed_Queue: {} Incoming_Queue: {}'
+ .format(os.getpid(), UID_COUNTER, UID_PROCESSED, len(PROCESSED_URLS), STREAM_QUEUE.qsize()))
+ time.sleep(.5)
+ self.check_processed_list()
+ if TERMINATE_REQUESTED:
+ break
+ self.process_queue = M3U8GetUriData(queue_item, UID_COUNTER, self.config)
+ if IS_VOD:
+ time.sleep(0.1)
+ else:
+ time.sleep(1.0)
+ UID_COUNTER += 1
+ except (KeyboardInterrupt, EOFError) as ex:
+ TERMINATE_REQUESTED = True
+ clear_queues()
+ if self.pts_resync is not None:
+ self.pts_resync.terminate()
+ self.pts_resync = None
+ time.sleep(0.01)
+ sys.exit()
+ except Exception as ex:
+ TERMINATE_REQUESTED = True
+ STREAM_QUEUE.put({'uri_dt': 'terminate'})
+ IN_QUEUE.put({'uri': 'terminate'})
+ if self.pts_resync is not None:
+ self.pts_resync.terminate()
+ self.pts_resync = None
+ clear_queues()
+ time.sleep(0.01)
+ self.logger.exception('{}'.format(
+ 'UNEXPECTED EXCEPTION M3U8Queue='))
+ sys.exit()
+ # we are terminating so cleanup ffmpeg
+ if self.pts_resync is not None:
+ self.pts_resync.terminate()
+ self.pts_resync = None
+ time.sleep(0.01)
+ out_queue_put({'uri': 'terminate',
+ 'data': None,
+ 'stream': None,
+ 'atsc': None})
+ PROCESSED_URLS.clear()
+ time.sleep(0.01)
+ TERMINATE_REQUESTED = True
+ self.logger.debug('M3U8Queue terminated {}'.format(os.getpid()))
+
+
+ def check_processed_list(self):
+ global UID_PROCESSED
+ global PROCESSED_URLS
+ if len(PROCESSED_URLS) > 0:
+ first_key = sorted(PROCESSED_URLS.keys())[0]
+ if first_key == UID_PROCESSED:
+ self.video.data = PROCESSED_URLS[first_key]['stream']
+ M3U8Queue.pts_resync.resequence_pts(self.video)
+ if self.video.data is None and self.q_action != 'check_processed_list':
+ PLAY_LIST[self.q_action]['played'] = True
+ PROCESSED_URLS[first_key]['stream'] = self.video.data
+
+ out_queue_put(PROCESSED_URLS[first_key])
+ del PROCESSED_URLS[first_key]
+ UID_PROCESSED += 1
class M3U8Process(Thread):
@@ -298,12 +400,13 @@ class M3U8Process(Thread):
Includes managing the processing queue and providing
the M3U8Queue with what to process.
"""
+
def __init__(self, _config, _plugins, _channel_dict):
+ global HTTP_TIMEOUT
+ global HTTP_RETRIES
+ global PARALLEL_DOWNLOADS
Thread.__init__(self)
- self.logger = logging.getLogger(__name__+str(threading.get_ident()))
- global IN_QUEUE
- global OUT_QUEUE
- global TERMINATE_REQUESTED
+ self.logger = logging.getLogger(__name__ + str(threading.get_ident()))
self.config = _config
self.channel_dict = _channel_dict
if _channel_dict['json'].get('Header') is None:
@@ -314,102 +417,112 @@ def __init__(self, _config, _plugins, _channel_dict):
self.use_date_on_key = True
else:
self.use_date_on_key = _channel_dict['json']['use_date_on_m3u8_key']
-
+
+ self.ch_uid = _channel_dict['uid']
self.is_starting = True
self.last_refresh = time.time()
self.plugins = _plugins
+ HTTP_TIMEOUT = self.config[_channel_dict['namespace'].lower()]['stream-g_http_timeout']
+ HTTP_RETRIES = self.config[_channel_dict['namespace'].lower()]['stream-g_http_retries']
+ PARALLEL_DOWNLOADS = self.config[_channel_dict['namespace'].lower()]['stream-g_concurrent_downloads']
self.config_section = utils.instance_config_section(_channel_dict['namespace'], _channel_dict['instance'])
- i = 5
- while i > 0 and IN_QUEUE.empty():
- i -= 1
- time.sleep(0.02)
- if IN_QUEUE.empty():
- self.logger.warning('1Corrupted queue, restarting process {} {}'.format(_channel_dict['uid'], os.getpid()))
- TERMINATE_REQUESTED = True
- time.sleep(0.01)
- return
- time.sleep(0.01)
- try:
- queue_item = IN_QUEUE.get(False, 1)
- except Empty:
- self.logger.debug('2Corrupted queue, restarting process {} {}'.format(_channel_dict['uid'], os.getpid()))
- TERMINATE_REQUESTED = True
- time.sleep(0.01)
- return
+ self.use_full_duplicate_checking = self.config[self.config_section]['player-enable_full_duplicate_checking']
- self.stream_uri = self.get_stream_uri()
- if not self.stream_uri:
- self.logger.warning('Unknown Channel {}'.format(_channel_dict['uid']))
- OUT_QUEUE.put({'uri': 'terminate',
- 'data': None,
- 'stream': None,
- 'atsc': None})
- TERMINATE_REQUESTED = True
- time.sleep(0.01)
- return
- else:
- OUT_QUEUE.put({'uri': 'running',
- 'data': None,
- 'stream': None,
- 'atsc': None})
- time.sleep(0.01)
self.is_running = True
self.duration = 6
self.m3u8_q = M3U8Queue(_config, _channel_dict)
+ time.sleep(0.1)
+ self.file_filter = None
self.start()
def run(self):
+ global IS_VOD
+ global IN_QUEUE
+ global OUT_QUEUE
global TERMINATE_REQUESTED
+
+ self.stream_uri = self.get_stream_uri()
+ if not self.stream_uri:
+ self.logger.warning('Unknown Channel {}'.format(self.ch_uid))
+ out_queue_put({'uri': 'terminate',
+ 'data': None,
+ 'stream': None,
+ 'atsc': None})
+ time.sleep(0.01)
+ self.terminate()
+ self.m3u8_q.join()
+ TERMINATE_REQUESTED = True
+ self.logger.debug('1 M3U8Process terminated {}'.format(os.getpid()))
+ return
+ else:
+ out_queue_put({'uri': 'running',
+ 'data': None,
+ 'stream': None,
+ 'atsc': None})
+ time.sleep(0.01)
+
try:
self.logger.debug('M3U8: {} {}'.format(self.stream_uri, os.getpid()))
- self.file_filter = None
if self.config[self.config_section]['player-enable_url_filter']:
stream_filter = self.config[self.config_section]['player-url_filter']
if stream_filter is not None:
self.file_filter = re.compile(stream_filter)
else:
self.logger.warning('[{}]][player-enable_url_filter]'
- ' enabled but [player-url_filter] not set'
- .format(self.config_section))
+ ' enabled but [player-url_filter] not set'
+ .format(self.config_section))
while not TERMINATE_REQUESTED:
added = 0
removed = 0
self.logger.debug('Reloading m3u8 stream queue {}'.format(os.getpid()))
- playlist = self.get_m3u8_data(self.stream_uri)
+ playlist = self.get_m3u8_data(self.stream_uri, 2)
if playlist is None:
- self.logger.debug('Playlist is none, terminating stream')
- break
+ self.logger.debug('M3U Playlist is None, retrying')
+ self.sleep(self.duration+0.5)
+ continue
+ if playlist.playlist_type == 'vod' or self.config[self.config_section]['player-play_all_segments']:
+ if not IS_VOD:
+ self.logger.debug('Setting stream type to VOD {}'.format(os.getpid()))
+ IS_VOD = True
+ elif IS_VOD:
+ self.logger.debug('Setting stream type to non-VOD {}'.format(os.getpid()))
+ IS_VOD = False
removed += self.remove_from_stream_queue(playlist)
added += self.add_to_stream_queue(playlist)
if self.plugins.plugins[self.channel_dict['namespace']].plugin_obj \
.is_time_to_refresh_ext(self.last_refresh, self.channel_dict['instance']):
self.stream_uri = self.get_stream_uri()
- self.logger.debug('M3U8: {} {}' \
- .format(self.stream_uri, os.getpid()))
+ self.logger.debug('M3U8: {} {}'
+ .format(self.stream_uri, os.getpid()))
self.last_refresh = time.time()
- self.sleep(0.3)
- elif added == 0 and self.duration > 0:
- self.sleep(0.8)
- else:
- self.sleep(0.8)
+ time.sleep(0.3)
+ elif self.duration > 0.5:
+ self.sleep(self.duration+0.5)
except Exception as ex:
- self.logger.exception('{}{}'.format(
- 'UNEXPECTED EXCEPTION M3U8Process=', ex))
+ self.logger.exception('{}'.format(
+ 'UNEXPECTED EXCEPTION M3U8Process='))
self.terminate()
# wait for m3u8_q to finish so it can cleanup ffmpeg
self.m3u8_q.join()
+ TERMINATE_REQUESTED = True
+ self.logger.debug('M3U8Process terminated {}'.format(os.getpid()))
def sleep(self, _time):
- for i in range(round(_time*10)):
+ global TERMINATE_REQUESTED
+ start_ttw = time.time()
+ for i in range(round(_time * 5)):
if not TERMINATE_REQUESTED:
- time.sleep(self.duration * 0.1)
+ time.sleep(self.duration * 0.2)
+ delta_ttw = time.time() - start_ttw
+ if delta_ttw > _time:
+ break
def terminate(self):
global STREAM_QUEUE
try:
STREAM_QUEUE.put({'uri_dt': 'terminate'})
time.sleep(0.01)
- except ValueError:
+ except ValueError as ex:
pass
def get_stream_uri(self):
@@ -417,10 +530,9 @@ def get_stream_uri(self):
.plugin_obj.get_channel_uri_ext(self.channel_dict['uid'], self.channel_dict['instance'])
@handle_url_except()
- @handle_json_except
- def get_m3u8_data(self, _uri):
+ def get_m3u8_data(self, _uri, _retries):
# it sticks here. Need to find a work around for the socket.timeout per process
- return m3u8.load(_uri, headers=self.header)
+ return m3u8.load(_uri, headers=self.header, http_session=M3U8Queue.http_session)
def segment_date_time(self, _segment):
if _segment:
@@ -429,18 +541,17 @@ def segment_date_time(self, _segment):
return None
return _segment.current_program_date_time.replace(microsecond=0)
-
def add_to_stream_queue(self, _playlist):
global PLAY_LIST
global STREAM_QUEUE
global TERMINATE_REQUESTED
total_added = 0
if _playlist.keys != [None]:
- keys = [{"uri": key.absolute_uri, "method": key.method, "iv": key.iv} \
- for key in _playlist.keys if key]
+ keys = [{"uri": key.absolute_uri, "method": key.method, "iv": key.iv}
+ for key in _playlist.keys if key]
if len(keys) != len(_playlist.segments):
- keys = [{"uri": keys[0]['uri'], "method": keys[0]['method'], "iv": keys[0]['iv']} \
- for i in range(0, len(_playlist.segments))]
+ keys = [{"uri": keys[0]['uri'], "method": keys[0]['method'], "iv": keys[0]['iv']}
+ for i in range(0, len(_playlist.segments))]
else:
keys = [None for i in range(0, len(_playlist.segments))]
num_segments = len(_playlist.segments)
@@ -450,10 +561,10 @@ def add_to_stream_queue(self, _playlist):
seg_to_play = num_segments
elif seg_to_play > num_segments:
seg_to_play = num_segments
-
- skipped_seg = num_segments-seg_to_play
- #total_added += self.add_segment(_playlist.segments[0], keys[0])
-
+
+ skipped_seg = num_segments - seg_to_play
+ # total_added += self.add_segment(_playlist.segments[0], keys[0])
+
for m3u8_segment, key in zip(_playlist.segments[0:skipped_seg], keys[0:skipped_seg]):
total_added += self.add_segment(m3u8_segment, key, _default_played=True)
for i in range(skipped_seg, num_segments):
@@ -468,7 +579,10 @@ def add_to_stream_queue(self, _playlist):
last_key = list(PLAY_LIST.keys())[-1]
i = 0
for index, segment in enumerate(reversed(_playlist.segments)):
- uri = segment.absolute_uri
+ if self.use_full_duplicate_checking:
+ uri = segment.absolute_uri
+ else:
+ uri = segment.get_path_from_uri()
dt = self.segment_date_time(segment)
if self.use_date_on_key:
uri_dt = (uri, dt)
@@ -476,13 +590,8 @@ def add_to_stream_queue(self, _playlist):
uri_dt = (uri, 0)
if last_key == uri_dt:
i = num_segments - index
- remaining_segs = num_segments - i
- for m3u8_segment, key in zip(_playlist \
- .segments[i:num_segments], keys[i:num_segments]):
- remaining_segs -= 1
- if remaining_segs < 1:
- # delay is this is the last segment to add from the provider
- time.sleep(2)
+ for m3u8_segment, key in zip(
+ _playlist.segments[i:num_segments], keys[i:num_segments]):
added = self.add_segment(m3u8_segment, key)
total_added += added
if added == 0 or TERMINATE_REQUESTED:
@@ -493,7 +602,11 @@ def add_to_stream_queue(self, _playlist):
def add_segment(self, _segment, _key, _default_played=False):
global TERMINATE_REQUESTED
self.set_cue_status(_segment)
- uri = _segment.absolute_uri
+ if self.use_full_duplicate_checking:
+ uri = _segment.absolute_uri
+ else:
+ uri = _segment.get_path_from_uri()
+ uri_full = _segment.absolute_uri
dt = self.segment_date_time(_segment)
if self.use_date_on_key:
uri_dt = (uri, dt)
@@ -504,11 +617,12 @@ def add_segment(self, _segment, _key, _default_played=False):
filtered = False
cue_status = self.set_cue_status(_segment)
if self.file_filter is not None:
- m = self.file_filter.match(urllib.parse.unquote(uri))
+ m = self.file_filter.match(urllib.parse.unquote(uri_full))
if m:
filtered = True
PLAY_LIST[uri_dt] = {
'uid': self.channel_dict['uid'],
+ 'uri': uri_full,
'played': played,
'filtered': filtered,
'duration': _segment.duration,
@@ -516,20 +630,23 @@ def add_segment(self, _segment, _key, _default_played=False):
'key': _key
}
if _segment.duration > 0:
- self.duration = _segment.duration
+ # use geometric averaging of 4 items
+ self.duration = (self.duration*3 + _segment.duration)/4
try:
if not played and not TERMINATE_REQUESTED:
- self.logger.debug('Added {} to play queue {}' \
- .format(uri, os.getpid()))
+ self.logger.debug('Added {} to play queue {}'
+ .format(uri_full, os.getpid()))
STREAM_QUEUE.put({'uri_dt': uri_dt,
- 'data': PLAY_LIST[uri_dt]})
+ 'data': PLAY_LIST[uri_dt]})
return 1
if _default_played:
- self.logger.debug('Skipping {} {} {}' \
- .format(uri, os.getpid(), _segment.program_date_time))
- except ValueError:
+ self.logger.debug('Skipping {} {} {}'
+ .format(uri_full, os.getpid(), _segment.program_date_time))
+ except ValueError as ex:
# queue is closed, terminating
pass
+ else:
+ self.logger.warning('DUPICATE FOUND {}'.format(uri_dt))
return 0
@@ -545,27 +662,33 @@ def remove_from_stream_queue(self, _playlist):
disc_index = total_index - i
break
for segment in _playlist.segments[disc_index:total_index]:
- s_uri = segment.absolute_uri
+ if self.use_full_duplicate_checking:
+ s_uri = segment.absolute_uri
+ else:
+ s_uri = segment.get_path_from_uri()
s_dt = self.segment_date_time(segment)
if self.use_date_on_key:
s_key = (s_uri, s_dt)
else:
s_key = (s_uri, 0)
-
+
if s_key in PLAY_LIST.keys():
continue
else:
try:
i = url_list.index(s_uri)
PLAY_LIST = utils.rename_dict_key(list(PLAY_LIST.keys())[i], s_key, PLAY_LIST)
- except ValueError:
+ except ValueError as ex:
# not in list
pass
-
+
for segment_key in list(PLAY_LIST.keys()):
is_found = False
for segment_m3u8 in _playlist.segments:
- s_uri = segment_m3u8.absolute_uri
+ if self.use_full_duplicate_checking:
+ s_uri = segment_m3u8.absolute_uri
+ else:
+ s_uri = segment_m3u8.get_path_from_uri()
s_dt = self.segment_date_time(segment_m3u8)
if self.use_date_on_key:
s_key = (s_uri, s_dt)
@@ -578,8 +701,8 @@ def remove_from_stream_queue(self, _playlist):
if PLAY_LIST[segment_key]['played']:
del PLAY_LIST[segment_key]
total_removed += 1
- self.logger.debug('Removed {} from play queue {}' \
- .format(segment_key[0], os.getpid()))
+ self.logger.debug('Removed {} from play queue {}'
+ .format(segment_key[0], os.getpid()))
continue
else:
break
@@ -593,18 +716,36 @@ def set_cue_status(self, _segment):
else:
return None
+
def clear_q(q):
try:
while True:
q.get_nowait()
- except (Empty, ValueError, EOFError):
+ except (Empty, ValueError, EOFError) as ex:
pass
+
def clear_queues():
+ # closing a multiprocessing queue with 'close' without emptying
+ # it will prevent a process dependant on that queue
+ # from terminating and fulfilling a 'join' if there was an entry in the queue
+ # so we need to proactivley clear all queue entries instead of closing the queues
+ global STREAM_QUEUE
+ global OUT_QUEUE
+ global IN_QUEUE
clear_q(OUT_QUEUE)
clear_q(STREAM_QUEUE)
clear_q(IN_QUEUE)
+def out_queue_put(data_dict):
+ global OUT_QUEUE
+ logger = logging.getLogger(__name__)
+ for t in OUT_QUEUE_LIST:
+ data_dict['thread_id'] = t
+ OUT_QUEUE.put(data_dict)
+ time.sleep(0.01)
+
+
def start(_config, _plugins, _m3u8_queue, _data_queue, _channel_dict, extra=None):
"""
All items in this process must handle a socket timeout of 5.0
@@ -613,6 +754,7 @@ def start(_config, _plugins, _m3u8_queue, _data_queue, _channel_dict, extra=None
global STREAM_QUEUE
global OUT_QUEUE
global TERMINATE_REQUESTED
+ logger = None
try:
utils.logging_setup(_plugins.config_obj.data)
logger = logging.getLogger(__name__)
@@ -625,32 +767,67 @@ def start(_config, _plugins, _m3u8_queue, _data_queue, _channel_dict, extra=None
try:
q_item = IN_QUEUE.get()
if q_item['uri'] == 'terminate':
- TERMINATE_REQUESTED = True
+ OUT_QUEUE_LIST.remove(q_item['thread_id'])
+ if not len(OUT_QUEUE_LIST):
+ TERMINATE_REQUESTED = True
+ clear_queues()
+ else:
+ clear_q(OUT_QUEUE)
+ time.sleep(0.01)
+
# clear queues in case queues are full (eg VOD) with queue.put stmts blocked
# p_m3u8 & m3u8_q then see TERMINATE_REQUESTED and exit including stopping ffmpeg
- clear_queues()
+ OUT_QUEUE.put({
+ 'thread_id': q_item['thread_id'],
+ 'uri': 'terminate',
+ 'data': None,
+ 'stream': None,
+ 'atsc': None})
+ time.sleep(0.01)
+ if not len(OUT_QUEUE_LIST):
+ p_m3u8.join()
+ elif q_item['uri'] == 'status':
+ if q_item['thread_id'] not in OUT_QUEUE_LIST:
+ OUT_QUEUE_LIST.append(q_item['thread_id'])
+ logger.debug('Adding client thread {} to m3u8 queue list'.format(q_item['thread_id']))
+ STREAM_QUEUE.put({'uri_dt': 'status'})
+ logger.debug('Sending Status request to stream queue {}'.format(os.getpid()))
+ time.sleep(0.01)
+ elif q_item['uri'] == 'restart_http':
+ logger.debug('HTTP Session restarted {}'.format(os.getpid()))
+ temp_session = M3U8Queue.http_session
+ M3U8Queue.http_session = httpx.Client(http2=True, verify=False, follow_redirects=True)
+ temp_session.close()
+ temp_session = None
time.sleep(0.01)
- p_m3u8.join()
- # finally make sure all queues are clear so that this process can be joined
- clear_queues()
else:
logger.debug('UNKNOWN m3u8 queue request {}'.format(q_item['uri']))
- except (KeyboardInterrupt, EOFError, TypeError, ValueError):
+ except (KeyboardInterrupt, EOFError, TypeError, ValueError) as ex:
TERMINATE_REQUESTED = True
try:
+ clear_queues()
+ out_queue_put({
+ 'uri': 'terminate',
+ 'data': None,
+ 'stream': None,
+ 'atsc': None})
+ time.sleep(0.01)
STREAM_QUEUE.put({'uri_dt': 'terminate'})
- except (EOFError, TypeError, ValueError):
+ time.sleep(0.1)
+ except (EOFError, TypeError, ValueError) as ex:
pass
- time.sleep(0.01)
+ logger.debug('4 m3u8_queue process terminated {}'.format(os.getpid()))
sys.exit()
+ clear_queues()
+ logger.debug('1 m3u8_queue process terminated {}'.format(os.getpid()))
sys.exit()
-
-
except Exception as ex:
- logger.exception('{}{}'.format(
- 'UNEXPECTED EXCEPTION startup=', str(ex)))
+ logger.exception('{}'.format(
+ 'UNEXPECTED EXCEPTION startup'))
TERMINATE_REQUESTED = True
+ logger.debug('3 m3u8_queue process terminated {}'.format(os.getpid()))
sys.exit()
- except KeyboardInterrupt:
+ except KeyboardInterrupt as ex:
TERMINATE_REQUESTED = True
+ logger.debug('2 m3u8_queue process terminated {}'.format(os.getpid()))
sys.exit()
diff --git a/lib/streams/pts_resync.py b/lib/streams/pts_resync.py
index 8a04cdd..6a8375b 100644
--- a/lib/streams/pts_resync.py
+++ b/lib/streams/pts_resync.py
@@ -16,13 +16,13 @@
substantial portions of the Software.
"""
+import copy
import logging
import os
import subprocess
import time
from threading import Thread
-import lib.common.utils as utils
from .stream_queue import StreamQueue
@@ -32,37 +32,71 @@ def __init__(self, _config, _config_section, _id):
self.logger = logging.getLogger(__name__)
self.config = _config
self.config_section = _config_section
+ self.empty_packet_count = 0
+ self.is_restart_requested = False
+ self.is_looping = False
self.id = _id
- if self.config[self.config_section]['player-pts_resync_type'] == 'ffmpeg':
- self.ffmpeg_proc = self.open_ffmpeg_proc()
- else:
- self.ffmpeg_proc = None
- self.stream_queue = StreamQueue(188, self.ffmpeg_proc, _id)
+ self.ffmpeg_proc = None
if self.config[self.config_section]['player-enable_pts_resync']:
if self.config[self.config_section]['player-pts_resync_type'] == 'ffmpeg':
- self.logger.info('PTS Resync running ffmpeg')
+ self.ffmpeg_proc = self.open_ffmpeg_proc()
+ self.stream_queue = StreamQueue(188, self.ffmpeg_proc, _id)
+ if self.config[self.config_section]['player-pts_resync_type'] == 'ffmpeg':
+ self.logger.debug('PTS Resync running ffmpeg')
def video_to_stdin(self, _video):
- i = 2
+ video_copy = copy.copy(_video.data)
+ i = 3
+ self.is_looping = False
while i > 0:
i -= 1
try:
- self.ffmpeg_proc.stdin.write(_video.data)
+ if video_copy:
+ self.ffmpeg_proc.stdin.write(video_copy)
break
except (BrokenPipeError, TypeError) as ex:
# This occurs when the process does not start correctly
- self.stream_queue.terminate()
+ self.logger.debug('BROKENPIPE {} {}'.format(self.ffmpeg_proc.pid, str(ex)))
+ if not self.is_restart_requested:
+ errcode = self.restart_ffmpeg()
+ self.is_looping = True
+ else:
+ time.sleep(0.7)
+
+ except ValueError:
+ # during termination, writing to a closed port, ignore
+ break
+ self.is_looping = False
+ video_copy = None
+
+ def restart_ffmpeg(self):
+ self.logger.debug('Restarting PTSResync ffmpeg due to no ffmpeg processing {}'.format(self.ffmpeg_proc.pid))
+ errcode = 0
+ self.empty_packet_count = 0
+ self.stream_queue.terminate()
+ while True:
+ try:
self.ffmpeg_proc.terminate()
- try:
- self.ffmpeg_proc.communicate()
- except ValueError:
- pass
- while self.ffmpeg_proc.poll() is None:
- time.sleep(0.1)
- self.ffmpeg_proc = self.open_ffmpeg_proc()
+ #self.ffmpeg_proc.wait(timeout=1.5)
+ break
+ except ValueError:
+ pass
+ except subprocess.TimeoutExpired:
time.sleep(0.01)
- self.logger.info('Restarting PTSResync ffmpeg due to corrupted process start {}'.format(os.getpid()))
- self.stream_queue = StreamQueue(188, self.ffmpeg_proc, self.id)
+ try:
+ sout, serr = self.ffmpeg_proc.communicate()
+ errcode = self.ffmpeg_proc.returncode
+ # an errcode of 1 means ffmpeg could not run
+ if errcode == 1:
+ self.logger.debug('FFMPEG ERRCODE: {}, unable for pts_resync to process segment in ffmpeg'.format(self.ffmpeg_proc.returncode))
+ except ValueError:
+ pass
+ while self.ffmpeg_proc.poll() is None:
+ time.sleep(0.1)
+ self.ffmpeg_proc = self.open_ffmpeg_proc()
+ self.stream_queue = StreamQueue(188, self.ffmpeg_proc, self.id)
+ time.sleep(0.5)
+ return errcode
def resequence_pts(self, _video):
@@ -71,9 +105,22 @@ def resequence_pts(self, _video):
if _video.data is None:
return
if self.config[self.config_section]['player-pts_resync_type'] == 'ffmpeg':
+ while self.is_looping:
+ time.sleep(0.5)
t_in = Thread(target=self.video_to_stdin, args=(_video,))
t_in.start()
+ time.sleep(0.1)
new_video = self.stream_queue.read()
+ if not new_video:
+ self.empty_packet_count += 1
+ if self.empty_packet_count > 2:
+ if not self.is_restart_requested:
+ self.is_restart_requested = True
+ self.restart_ffmpeg()
+ self.is_restart_requested = False
+ else:
+ self.empty_packet_count = 0
+
_video.data = new_video
elif self.config[self.config_section]['player-pts_resync_type'] == 'internal':
self.logger.warning('player-pts_resync_type internal NOT IMPLEMENTED')
@@ -88,7 +135,10 @@ def terminate(self):
self.ffmpeg_proc.stdout.flush()
self.ffmpeg_proc.terminate()
try:
- self.ffmpeg_proc.communicate()
+ sout, serr = self.ffmpeg_proc.communicate()
+ errcode = self.ffmpeg_proc.returncode
+ if errcode == 1:
+ self.logger.debug('FFMPEG errcode on exit: {}, unable for pts_resync to process segment in ffmpeg'.format(self.ffmpeg_proc.returncode))
except ValueError:
pass
@@ -99,7 +149,8 @@ def open_ffmpeg_proc(self):
visible by looking at the video packets for a 6 second window being 171
instead of 180. Following the first read, the packets increase to 180.
"""
- ffmpeg_command = [self.config['paths']['ffmpeg_path'],
+ ffmpeg_command = [
+ self.config['paths']['ffmpeg_path'],
'-nostats',
'-hide_banner',
'-loglevel', 'fatal',
@@ -109,9 +160,9 @@ def open_ffmpeg_proc(self):
'-f', 'mpegts',
'-c', 'copy',
'pipe:1']
- ffmpeg_process = subprocess.Popen(ffmpeg_command,
- stdin=subprocess.PIPE,
+ ffmpeg_process = subprocess.Popen(
+ ffmpeg_command,
+ stdin=subprocess.PIPE,
stdout=subprocess.PIPE,
bufsize=-1)
return ffmpeg_process
-
diff --git a/lib/streams/pts_validation.py b/lib/streams/pts_validation.py
index e56b351..b9c966f 100644
--- a/lib/streams/pts_validation.py
+++ b/lib/streams/pts_validation.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -19,11 +19,10 @@
import json
import logging
import subprocess
-import time
-
import lib.common.utils as utils
+
class PTSValidation:
logger = None
@@ -47,7 +46,6 @@ def __init__(self, _config, _channel_dict):
self.config_section = utils.instance_config_section(
self.channel_dict['namespace'], self.channel_dict['instance'])
-
def check_pts(self, _video):
"""
Checks the PTS in the video stream. If a bad PTS packet is found,
@@ -75,7 +73,7 @@ def check_pts(self, _video):
return {'refresh_stream': False, 'byteoffset': 0, 'reread_buffer': True}
elif pts_data['last_pts'] <= self.prev_last_pts:
self.logger.debug('Small PTS to Large PTS with entire PTS in the past. last_pts={} vs prev={}'
- .format(pts_data['last_pts'], self.prev_last_pts))
+ .format(pts_data['last_pts'], self.prev_last_pts))
return {'refresh_stream': False, 'byteoffset': 0, 'reread_buffer': True}
else:
byte_offset = self.find_bad_pkt_offset(from_front=False)
@@ -113,7 +111,7 @@ def check_pts(self, _video):
elif pts_data['first_pts'] < self.prev_last_pts:
if pts_data['last_pts'] <= self.prev_last_pts:
self.logger.debug('Entire PTS buffer in the past last_pts={} vs prev={}'.format(pts_data['last_pts'],
- self.prev_last_pts))
+ self.prev_last_pts))
return {'refresh_stream': False, 'byteoffset': 0, 'reread_buffer': True}
else:
byte_offset = self.find_past_pkt_offset(self.prev_last_pts)
@@ -163,7 +161,7 @@ def get_pts_values(self, _pts_json):
'DELTA PTS=', delta_from_prev,
'Pkts Rcvd=', len(_pts_json['packets'])))
return {'first_pts': first_pts, 'last_pts': last_pts,
- 'pts_size': pts_size, 'delta_from_prev': delta_from_prev}
+ 'pts_size': pts_size, 'delta_from_prev': delta_from_prev}
def find_bad_pkt_offset(self, from_front):
"""
@@ -212,25 +210,24 @@ def find_past_pkt_offset(self, prev_last_pts):
byte_offset = int(int(self.pts_json['packets'][i]['pos']) / 188) * 188
self.logger.debug(
'{}{} {}{} {}{}'.format('Future PTS at byte_offset=', byte_offset, 'pkt_pts=', next_pkt_pts,
- 'prev_pkt=', prev_pkt_dts))
+ 'prev_pkt=', prev_pkt_dts))
break
i += 1
return byte_offset
-
def get_probe_results(self, _video):
ffprobe_command = [self.config['paths']['ffprobe_path'],
- '-print_format', 'json',
- '-v', 'quiet', '-show_packets',
- '-select_streams', 'v:0',
- '-show_entries', 'side_data=:packet=pts,pos,duration,size',
- '-']
+ '-print_format', 'json',
+ '-v', 'quiet', '-show_packets',
+ '-select_streams', 'v:0',
+ '-show_entries', 'side_data=:packet=pts,pos,duration,size',
+ '-']
cmdpts = subprocess.Popen(ffprobe_command,
- stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ stdin=subprocess.PIPE, stdout=subprocess.PIPE)
ptsout = cmdpts.communicate(_video.data)[0]
exit_code = cmdpts.wait()
if exit_code != 0:
self.logger.warning('FFPROBE failed to execute with error code: {}'
- .format(exit_code))
+ .format(exit_code))
return None
return json.loads(ptsout)
diff --git a/lib/streams/stream.py b/lib/streams/stream.py
index 392b4a6..cdfd829 100644
--- a/lib/streams/stream.py
+++ b/lib/streams/stream.py
@@ -22,46 +22,63 @@
from lib.clients.web_handler import WebHTTPHandler
import lib.common.utils as utils
-class Stream:
+class Stream:
logger = None
def __init__(self, _plugins, _hdhr_queue):
self.plugins = _plugins
+ self.namespace = ''
+ self.instance = ''
+ self.config = self.plugins.config_obj.data
self.hdhr_queue = _hdhr_queue
if Stream.logger is None:
Stream.logger = logging.getLogger(__name__)
def put_hdhr_queue(self, _namespace, _index, _channel, _status):
- if not self.plugins.config_obj.data['hdhomerun']['disable_hdhr']:
+ if not self.config['hdhomerun']['disable_hdhr']:
self.hdhr_queue.put(
- {'namespace': _namespace, 'tuner': _index,
- 'channel': _channel, 'status': _status})
+ {'namespace': _namespace, 'tuner': _index,
+ 'channel': _channel, 'status': _status})
- def find_tuner(self, _namespace, _instance, _ch_num, _tuner):
+ def find_tuner(self, _namespace, _instance, _ch_num, _isvod):
# keep track of how many tuners we can use at a time
found = -1
scan_list = WebHTTPHandler.rmg_station_scans[_namespace]
for index, scan_status in enumerate(scan_list):
# the first idle tuner gets it
- if scan_status == 'Idle':
- WebHTTPHandler.rmg_station_scans[_namespace][index] = {'instance': _instance, 'ch': _ch_num, 'status': 'Starting'}
- self.put_hdhr_queue(_namespace, index, _ch_num, 'Stream')
+ if scan_status == 'Idle' and found == -1:
found = index
- break
+ elif isinstance(scan_status, dict):
+ if scan_status['instance'] == _instance \
+ and scan_status['ch'] == _ch_num \
+ and not _isvod \
+ and scan_status['mux'] \
+ and not scan_status['mux'].terminate_requested:
+ found = index
+ break
+ if found == -1:
+ return found
+ if WebHTTPHandler.rmg_station_scans[_namespace][index] != 'Idle':
+ self.logger.debug('Reusing tuner {} {}:{} ch:{}'.format(found, _namespace, _instance, _ch_num))
+ else:
+ self.logger.debug('Adding new tuner {} for stream {}:{} ch:{}'.format(found, _namespace, _instance, _ch_num))
+ WebHTTPHandler.rmg_station_scans[_namespace][found] = { \
+ 'instance': _instance,
+ 'ch': _ch_num,
+ 'mux': None,
+ 'status': 'Starting'}
+ self.put_hdhr_queue(_namespace, index, _ch_num, 'Stream')
return found
-
-
-
def set_service_name(self, _channel_dict):
updated_chnum = utils.wrap_chnum(
- str(_channel_dict['number']), _channel_dict['namespace'],
- _channel_dict['instance'], self.plugins.config_obj.data)
+ str(_channel_dict['display_number']), _channel_dict['namespace'],
+ _channel_dict['instance'], self.config)
if self.config['epg']['epg_channel_number']:
service_name = updated_chnum + \
- ' ' + _channel_dict['display_name']
+ ' ' + _channel_dict['display_name']
else:
service_name = _channel_dict['display_name']
return service_name
@@ -70,14 +87,16 @@ def get_stream_uri(self, _channel_dict):
return self.plugins.plugins[_channel_dict['namespace']] \
.plugin_obj.get_channel_uri_ext(_channel_dict['uid'], _channel_dict['instance'])
- def gen_response(self, _namespace, _instance, _ch_num, _tuner):
+ def gen_response(self, _namespace, _instance, _ch_num, _isvod):
"""
Returns dict where the dict is consistent with
the method do_dict_response requires as an argument
A code other than 200 means do not tune
dict also include a "tuner_index" that informs caller what tuner is allocated
"""
- i = self.find_tuner(_namespace, _instance, _ch_num, _tuner)
+ self.namespace = _namespace
+ self.instance = _instance
+ i = self.find_tuner(_namespace, _instance, _ch_num, _isvod)
if i >= 0:
return {
'tuner': i,
@@ -85,10 +104,15 @@ def gen_response(self, _namespace, _instance, _ch_num, _tuner):
'headers': {'Content-type': 'video/MP2T;'},
'text': None}
else:
- self.logger.warning('All tuners already in use [{}][{}] max tuners: {}' \
+ self.logger.warning(
+ 'All tuners already in use [{}][{}] max tuners: {}'
.format(_namespace, _instance, len(WebHTTPHandler.rmg_station_scans[_namespace])))
return {
'tuner': i,
'code': 400,
'headers': {'Content-type': 'text/html'},
'text': web_templates['htmlError'].format('400 - All tuners already in use.')}
+
+ @property
+ def config_section(self):
+ return utils.instance_config_section(self.namespace, self.instance)
diff --git a/lib/streams/stream_queue.py b/lib/streams/stream_queue.py
index 2b7ed68..24920ab 100644
--- a/lib/streams/stream_queue.py
+++ b/lib/streams/stream_queue.py
@@ -35,13 +35,13 @@ def __init__(self, _bytes_per_read, _proc, _stream_id):
self.queue = []
self.proc = _proc
self.stream_id = _stream_id
- self.not_terminated = True
+ self.is_terminated = False
def _populate_queue():
"""
Collect lines from 'stream' and put them in 'queue'.
"""
- while self.not_terminated:
+ while not self.is_terminated:
try:
self.sout.flush()
video_data = self.sout.read(self.bytes_per_read)
@@ -49,9 +49,11 @@ def _populate_queue():
self.queue.append(video_data)
else:
self.logger.debug('Stream ended for this process, exiting queue thread')
+ self.is_terminated = True
break
except ValueError:
# occurs on termination with buffer must not be NULL
+ self.is_terminated = True
break
self._t = Thread(target=_populate_queue, args=())
self._t.daemon = True
@@ -74,4 +76,4 @@ def read(self):
return None
def terminate(self):
- self.not_terminated = False
+ self.is_terminated = True
diff --git a/lib/streams/streamlink_proxy.py b/lib/streams/streamlink_proxy.py
index 4c84de8..7775028 100644
--- a/lib/streams/streamlink_proxy.py
+++ b/lib/streams/streamlink_proxy.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -20,6 +20,7 @@
import subprocess
import time
+import lib.common.exceptions as exceptions
from lib.clients.web_handler import WebHTTPHandler
from lib.streams.video import Video
from lib.db.db_config_defn import DBConfigDefn
@@ -27,6 +28,8 @@
from .stream_queue import StreamQueue
from .pts_validation import PTSValidation
+IDLE_TIMER = 20 # Duration for no video causing a refresh
+MAX_IDLE_TIMER = 59 # duration for no video causing stream termination
class StreamlinkProxy(Stream):
@@ -45,70 +48,85 @@ def __init__(self, _plugins, _hdhr_queue):
self.write_buffer = None
self.stream_queue = None
self.pts_validation = None
+ self.tuner_no = -1
super().__init__(_plugins, _hdhr_queue)
- self.config = self.plugins.config_obj.data
self.db_configdefn = DBConfigDefn(self.config)
self.video = Video(self.config)
def update_tuner_status(self, _status):
- ch_num = self.channel_dict['number']
+ ch_num = self.channel_dict['display_number']
namespace = self.channel_dict['namespace']
scan_list = WebHTTPHandler.rmg_station_scans[namespace]
- for i, tuner in enumerate(scan_list):
- if type(tuner) == dict and tuner['ch'] == ch_num:
- WebHTTPHandler.rmg_station_scans[namespace][i]['status'] = _status
+ tuner = scan_list[self.tuner_no]
+ if type(tuner) == dict and tuner['ch'] == ch_num:
+ WebHTTPHandler.rmg_station_scans[namespace][self.tuner_no]['status'] = _status
-
- def stream(self, _channel_dict, _write_buffer):
+ def stream(self, _channel_dict, _write_buffer, _tuner_no):
+ global MAX_IDLE_TIMER
+ self.logger.info('Using streamlink_proxy for channel {}'.format(_channel_dict['uid']))
+ self.tuner_no = _tuner_no
self.channel_dict = _channel_dict
self.write_buffer = _write_buffer
self.config = self.db_configdefn.get_config()
+ MAX_IDLE_TIMER = self.config[self.namespace.lower()]['stream-g_stream_timeout']
+
self.pts_validation = PTSValidation(self.config, self.channel_dict)
channel_uri = self.get_stream_uri(self.channel_dict)
if not channel_uri:
- self.logger.warning('Unknown Channel')
+ self.logger.warning('Unknown Channel {}'.format(_channel_dict['uid']))
return
self.streamlink_proc = self.open_streamlink_proc(channel_uri)
+ if not self.streamlink_proc:
+ return
time.sleep(0.01)
self.last_refresh = time.time()
self.block_prev_time = self.last_refresh
self.buffer_prev_time = self.last_refresh
- self.read_buffer()
+ try:
+ self.read_buffer()
+ except exceptions.CabernetException as ex:
+ self.logger.info(str(ex))
+ return
while True:
- if self.video.data is None:
- self.logger.debug('No Video Data, waiting')
- break
- #self.streamlink_proc = self.refresh_stream()
+ if not self.video.data:
+ self.logger.info(
+ '1 No Video Data, refreshing stream {} {}'
+ .format(_channel_dict['uid'], self.streamlink_proc.pid))
+ self.streamlink_proc = self.refresh_stream()
else:
try:
self.validate_stream()
self.update_tuner_status('Streaming')
+ start_ttw = time.time()
self.write_buffer.write(self.video.data)
+ delta_ttw = time.time() - start_ttw
+ self.logger.info(
+ 'Serving {} {} ({}B) ttw:{:.2f}s'
+ .format(self.streamlink_proc.pid, _channel_dict['uid'],
+ len(self.video.data), delta_ttw))
except IOError as e:
if e.errno in [errno.EPIPE, errno.ECONNABORTED, errno.ECONNRESET, errno.ECONNREFUSED]:
- self.logger.info('1. Connection dropped by end device')
+ self.logger.info('1. Connection dropped by end device {}'.format(self.streamlink_proc.pid))
break
else:
self.logger.error('{}{}'.format(
- '1 ################ UNEXPECTED EXCEPTION=', e))
+ '1 UNEXPECTED EXCEPTION=', e))
raise
try:
self.read_buffer()
+ except exceptions.CabernetException as ex:
+ self.logger.info('{} {}'.format(ex, self.streamlink_proc.pid))
+ break
except Exception as e:
self.logger.error('{}{}'.format(
- '2 ################ UNEXPECTED EXCEPTION=', e))
- raise
- self.logger.debug('Terminating streamlink stream')
- self.streamlink_proc.terminate()
- try:
- self.streamlink_proc.communicate()
- except ValueError:
- pass
+ '2 UNEXPECTED EXCEPTION=', e))
+ break
+ self.terminate_stream()
def validate_stream(self):
- if not self.config[self.channel_dict['namespace'].lower()]['player-enable_pts_filter']:
+ if not self.config[self.config_section]['player-enable_pts_filter']:
return
-
+
has_changed = True
while has_changed:
has_changed = False
@@ -126,37 +144,58 @@ def validate_stream(self):
if results['reread_buffer']:
self.read_buffer()
has_changed = True
- return
+ return
def read_buffer(self):
+ global MAX_IDLE_TIMER
+ global IDLE_TIMER
data_found = False
self.video.data = None
- idle_timer = 5
+ idle_timer = MAX_IDLE_TIMER # time slice segments are less than 10 seconds
while not data_found:
self.video.data = self.stream_queue.read()
if self.video.data:
data_found = True
else:
- time.sleep(0.5)
+ if self.stream_queue.is_terminated:
+ raise exceptions.CabernetException('Streamlink Terminated, exiting stream {}'.format(self.streamlink_proc.pid))
+
+ time.sleep(1)
idle_timer -= 1
- if idle_timer == 0:
- if self.plugins.plugins[self.channel_dict['namespace']].plugin_obj \
- .is_time_to_refresh_ext(self.last_refresh, self.channel_dict['instance']):
- self.streamlink_proc = self.refresh_stream()
- idle_timer = 2
+ if idle_timer % IDLE_TIMER == 0:
+ self.logger.info(
+ '2 No Video Data, refreshing stream {}'
+ .format(self.streamlink_proc.pid))
+ self.streamlink_proc = self.refresh_stream()
+
+ if idle_timer < 1:
+ idle_timer = MAX_IDLE_TIMER # time slice segments are less than 10 seconds
+ self.logger.info(
+ 'No Video Data, terminating stream {}'
+ .format(self.streamlink_proc.pid))
+ time.sleep(15)
+ self.streamlink_proc = self.terminate_stream()
+ raise exceptions.CabernetException('Unable to get video stream, terminating')
+ elif int(MAX_IDLE_TIMER / 2) == idle_timer:
+ self.update_tuner_status('No Reply')
+ return
+
+ def terminate_stream(self):
+ self.logger.debug('Terminating streamlink stream {}'.format(self.streamlink_proc.pid))
+ while True:
+ try:
+ self.streamlink_proc.terminate()
+ self.streamlink_proc.wait(timeout=1.5)
+ break
+ except ValueError:
+ pass
+ except subprocess.TimeoutExpired:
+ time.sleep(0.01)
def refresh_stream(self):
self.last_refresh = time.time()
channel_uri = self.get_stream_uri(self.channel_dict)
- try:
- self.streamlink_proc.terminate()
- self.streamlink_proc.wait(timeout=0.1)
- self.logger.debug('Previous streamlink terminated')
- except ValueError:
- pass
- except subprocess.TimeoutExpired:
- self.streamlink_proc.terminate()
- time.sleep(0.01)
+ self.terminate_stream()
self.logger.debug('{}{}'.format(
'Refresh Stream channelUri=', channel_uri))
@@ -172,20 +211,46 @@ def open_streamlink_proc(self, _channel_uri):
visible by looking at the video packets for a 6 second window being 171
instead of 180. Following the first read, the packets increase to 180.
"""
+ header = self.channel_dict['json'].get('Header')
+ str_array = []
+ llevel = self.config['handler_loghandler']['level']
+ if llevel == 'DEBUG':
+ sl_llevel = 'trace'
+ elif llevel == 'INFO':
+ sl_llevel = 'info'
+ elif llevel == 'NOTICE':
+ sl_llevel = 'warning'
+ elif llevel == 'WARNING':
+ sl_llevel = 'error'
+ else:
+ sl_llevel = 'none'
+
+ if header:
+ for key, value in header.items():
+ str_array.append('--http-header')
+ str_array.append(key + '=' + value)
+ if key == 'Referer':
+ self.logger.debug('Using HTTP Referer: {} Channel: {}'.format(value, self.channel_dict['uid']))
uri = '{}'.format(_channel_uri)
- streamlink_command = ['streamlink',
+ streamlink_command = [
+ self.config['paths']['streamlink_path'],
'--stdout',
- '--quiet',
- '--hds-segment-threads', '2',
+ '--loglevel', sl_llevel,
'--ffmpeg-fout', 'mpegts',
'--hls-segment-attempts', '2',
'--hls-segment-timeout', '5',
uri,
'720,best'
- ]
- streamlink_process = subprocess.Popen(streamlink_command,
- stdout=subprocess.PIPE,
- bufsize=-1)
+ ]
+ streamlink_command.extend(str_array)
+ try:
+ streamlink_process = subprocess.Popen(
+ streamlink_command,
+ stdout=subprocess.PIPE,
+ bufsize=-1)
+ except:
+ self.logger.error('Streamlink Binary Not Found: {}'.format(self.config['paths']['streamlink_path']))
+ return
self.stream_queue = StreamQueue(188, streamlink_process, self.channel_dict['uid'])
- time.sleep(1)
+ time.sleep(0.1)
return streamlink_process
diff --git a/lib/streams/thread_queue.py b/lib/streams/thread_queue.py
new file mode 100644
index 0000000..1bf2e7f
--- /dev/null
+++ b/lib/streams/thread_queue.py
@@ -0,0 +1,200 @@
+"""
+MIT License
+
+Copyright (C) 2023 ROCKY4546
+https://github.com/rocky4546
+
+This file is part of Cabernet
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this software
+and associated documentation files (the "Software"), to deal in the Software without restriction,
+including without limitation the rights to use, copy, modify, merge, publish, distribute,
+sublicense, and/or sell copies of the Software, and to permit persons to whom the Software
+is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+"""
+
+import logging
+import threading
+import time
+from queue import Empty
+
+from multiprocessing import Queue, Process
+from threading import Thread
+
+
+class ThreadQueue(Thread):
+ """
+ Takes a queue containing thread ids and pushes them
+ into other queues associated with those threads
+ Assumes queue item is a dict containing a name/value of "thread_id"
+ 'terminate' can be sent via name 'uri' to terminate a specific thread id
+ """
+ # list of [threadid, queue] items
+
+ def __init__(self, _queue, _config):
+ Thread.__init__(self)
+ self.logger = logging.getLogger(__name__ + str(threading.get_ident()))
+ # incoming queue containing the thread id of which outgoing queue to send it to.
+ self.queue = _queue
+ # outgoing queues
+ self.queue_list = {}
+ self.config = _config
+ self.terminate_requested = False
+ # The process using the incoming queue to send data
+ self._remote_proc = None
+ # incoming queue to the process, stored locally
+ self._status_queue = None
+ self.start()
+
+ def __str__(self):
+ """
+ Used to display the number of queues in the outgoing queue list
+ """
+ return str(len(self.queue_list))
+
+ def run(self):
+ thread_id = None
+ try:
+ while not self.terminate_requested:
+ queue_item = self.queue.get()
+ thread_id = queue_item.get('thread_id')
+ if not thread_id:
+ self.logger.warning('Badly formatted queue. thread_id required and missing thread_id:{} uri:{}'
+ .format(queue_item.get('thread_id'), queue_item.get('uri')))
+ continue
+ if not queue_item.get('uri'):
+ self.logger.warning('Badly formatted queue. uri required and missing thread_id:{} uri:{}'
+ .format(queue_item.get('thread_id'), queue_item.get('uri')))
+ continue
+ if queue_item.get('uri') == 'terminate':
+ time.sleep(self.config['stream']['switch_channel_timeout'])
+ self.del_thread(thread_id, True)
+ out_queue = self.queue_list.get(thread_id)
+ if out_queue:
+ # Define the length of sleep to keep the queues from becoming full
+ # or using all the memory. Occurs with VOD streams.
+ # sleep timer auto-adjusts to keep the queue a little over 10 items
+ # in the outgoing queue
+ if out_queue.qsize() > 10:
+ s = out_queue.qsize()/2
+ else:
+ s = 0.0
+ out_queue.put(queue_item)
+ self.sleep(s)
+
+ except (KeyboardInterrupt, EOFError) as ex:
+ self.terminate_requested = True
+ self.clear_queues()
+ self.logger.exception('{}{}'.format(
+ 'UNEXPECTED EXCEPTION ThreadQueue=', ex))
+ except Exception as ex:
+ # tell everyone we are terminating badly
+ self.logger.exception('{}'.format(
+ 'UNEXPECTED EXCEPTION ThreadQueue'))
+ for qdict in self.queue_list.items():
+ qdict[1].put({'thread_id': qdict[0], 'uri': 'terminate'})
+ self.terminate_requested = True
+ self.clear_queues()
+ time.sleep(0.01)
+
+ self.clear_queues()
+ self.terminate_requested = True
+ self.logger.debug('ThreadQueue terminated')
+
+ def clear_queues(self):
+ self.clear_q(self.queue)
+
+ def clear_q(self, _q):
+ try:
+ while True:
+ item = _q.get_nowait()
+ except (Empty, ValueError, EOFError, OSError) as ex:
+ pass
+
+ def add_thread(self, _thread_id, _queue):
+ """
+ Adds the thread id to the list of queues this class is sending data
+ """
+ out_queue = self.queue_list.get(_thread_id)
+ self.queue_list[_thread_id] = _queue
+ if not out_queue:
+ self.logger.debug('Adding thread id queue to thread queue: {}'.format(_thread_id))
+
+ def del_thread(self, _thread_id, _is_inrun=False):
+ """
+ Removes the thread id from the list of queues this class is sending data to
+ if queue list is empty, then will also set the terminate to True
+ and return True
+ _is_inrun is set to true when the call comes from the thread run method,
+ so wait for terminate is not required since it already is not waiting for get queue processing
+ """
+ out_queue = self.queue_list.get(_thread_id)
+ if out_queue:
+ del self.queue_list[_thread_id]
+ self.logger.debug('Removing thread id queue from thread queue: {}'.format(_thread_id))
+ if not len(self.queue_list):
+ # sleep to deal with boomerang effects on termination
+ # when the channel does a quick reset by the client
+ time.sleep(1.0)
+ if not len(self.queue_list):
+ self.logger.debug('Terminating thread queue')
+ self.terminate_requested = True
+ time.sleep(0.01)
+ self.clear_queues()
+ if _is_inrun:
+ return True
+ else:
+ self.queue.put({'thread_id': _thread_id, 'uri': 'terminate'})
+ time.sleep(0.01)
+ self.wait_for_termination()
+ return True
+ else:
+ return False
+ else:
+ return True
+
+ def wait_for_termination(self):
+ count = 50
+ while self.is_alive() and count > 0:
+ time.sleep(0.1)
+ count -= 1
+ self.clear_queues()
+
+ def sleep(self, _time):
+ """
+ Creates a sleep function that will exit quickly if the termination flag is set
+ """
+ start_ttw = time.time()
+ for i in range(round(_time * 5)):
+ if not self.terminate_requested:
+ time.sleep(_time * 0.2)
+ else:
+ break
+ delta_ttw = time.time() - start_ttw
+ if delta_ttw > _time:
+ break
+
+ @property
+ def remote_proc(self):
+ """
+ process using the status_queue and sending to the incoming queue
+ """
+ return self._remote_proc
+
+ @remote_proc.setter
+ def remote_proc(self, _proc):
+ self._remote_proc = _proc
+
+ @property
+ def status_queue(self):
+ """
+ queue used by the remote process as its incoming queue
+ """
+ return self._status_queue
+
+ @status_queue.setter
+ def status_queue(self, _queue):
+ self._status_queue = _queue
diff --git a/lib/tvheadend/epg_category.py b/lib/tvheadend/epg_category.py
index e19d09d..4343ed2 100644
--- a/lib/tvheadend/epg_category.py
+++ b/lib/tvheadend/epg_category.py
@@ -71,8 +71,7 @@
'COMEDY': 'Comedy',
'SOAP': 'Soap / Melodrama / Folkloric',
'ROMANCE': 'Romance',
- 'HISTORICAL': 'Serious / Classical / Religious ' \
- '/ Historical movie / Drama',
+ 'HISTORICAL': 'Serious / Classical / Religious / Historical movie / Drama',
'XXX': 'Adult movie / Drama',
'NEWS': 'News / Current affairs',
@@ -155,77 +154,77 @@
# Normal GENRES to TVHEADEND translation
TVHEADEND = {
- 'Action' : tvh_genres['THRILLER'],
- 'Action sports' : tvh_genres['SPORT'],
- 'Adventure' : tvh_genres['ADVENTURE'],
- 'Agriculture' : tvh_genres['NATURE'],
- 'Animals' : tvh_genres['NATURE'],
- 'Anthology' : tvh_genres['FILM'],
- 'Art' : tvh_genres['CULTURE'],
- 'Baseball' : tvh_genres['SPORT_TEAM'],
- 'Basketball' : tvh_genres['SPORT_TEAM'],
- 'Biography' : tvh_genres['VIP'],
- 'Boxing' : tvh_genres['SPORT'],
- 'Cartoon' : tvh_genres['CARTOON'],
- 'Children' : tvh_genres['KIDS'],
- 'Classic Sport Event' : tvh_genres['SPORT_SPECIAL'],
- 'Comedy' : tvh_genres['COMEDY'],
- 'Comedy drama' : tvh_genres['COMEDY'],
- 'Community' : tvh_genres['SOCIAL'],
- 'Consumer' : tvh_genres['SHOPPING'],
- 'Cooking' : tvh_genres['COOKING'],
- 'Crime' : tvh_genres['THRILLER'],
- 'Crime drama' : tvh_genres['THRILLER'],
- 'Docudrama' : tvh_genres['DOCUMENTARY'],
- 'Documentary' : tvh_genres['DOCUMENTARY'],
- 'Drama' : tvh_genres['MOVIE'],
- 'Educational' : tvh_genres['EDUCATIONAL'],
- 'Entertainment' : tvh_genres['GAME'],
- 'Exercise' : tvh_genres['FITNESS'],
- # 'Fantasy' :
- 'financial' : tvh_genres['ECONOMIC'],
- 'Football' : tvh_genres['FOOTBALL'],
- 'Game show' : tvh_genres['GAME'],
- 'Golf' : tvh_genres['SPORT_TEAM'],
- 'Health' : tvh_genres['MEDICINE'],
- 'Historical drama' : tvh_genres['HISTORICAL'],
- 'Hockey' : tvh_genres['SPORT_TEAM'],
- 'Home improvement' : tvh_genres['HANDICRAFT'],
- 'Horror' : tvh_genres['SF'],
- 'House/garden' : tvh_genres['GARDENING'],
- 'How-to' : tvh_genres['SCIENCE'],
- 'Interview' : tvh_genres['DEBATE'],
- 'Law' : tvh_genres['SOCIAL'],
- 'Medical' : tvh_genres['MEDICINE'],
- 'Mixed martial arts' : tvh_genres['MARTIAL_ARTS'],
- 'Music' : tvh_genres['MUSIC'],
- 'Musical' : tvh_genres['MUSIC'],
- 'Musical comedy' : tvh_genres['COMEDY'],
- 'Mystery' : tvh_genres['THRILLER'],
- 'News' : tvh_genres['NEWS'],
- 'Newsmagazine' : tvh_genres['NEWS_MAGAZINE'],
- 'Olympics' : tvh_genres['SPORT'],
- 'Outdoors' : tvh_genres['SPORT'],
- 'Poker' : tvh_genres['GAME'],
- 'Pro wrestling' : tvh_genres['MARTIAL_ARTS'],
- 'Public affairs' : tvh_genres['BROADCASTING'],
- 'Reality' : tvh_genres['GAME'],
- 'Religious' : tvh_genres['RELIGION'],
- 'Romance' : tvh_genres['ROMANCE'],
- 'Romantic comedy' : tvh_genres['ROMANCE'],
- 'Science' : tvh_genres['SCIENCE'],
- 'Science fiction' : tvh_genres['SF'],
- 'Self improvement' : tvh_genres['FURTHER_EDUCATION'],
- 'Shopping' : tvh_genres['SHOPPING'],
- 'Sitcom' : tvh_genres['COMEDY'],
- 'Soap' : tvh_genres['SOAP'],
- 'Soccer' : tvh_genres['FOOTBALL'],
- # 'Special' :
- 'Sports talk' : tvh_genres['SPORT'],
- 'Talk' : tvh_genres['TALK_SHOW'],
- 'Thriller' : tvh_genres['THRILLER'],
- 'Travel' : tvh_genres['TRAVEL'],
- 'Variety' : tvh_genres['VARIETY'],
- 'Weightlifting' : tvh_genres['ATHLETICS'],
- 'Western' : tvh_genres['ADVENTURE']
+ 'Action': tvh_genres['THRILLER'],
+ 'Action sports': tvh_genres['SPORT'],
+ 'Adventure': tvh_genres['ADVENTURE'],
+ 'Agriculture': tvh_genres['NATURE'],
+ 'Animals': tvh_genres['NATURE'],
+ 'Anthology': tvh_genres['FILM'],
+ 'Art': tvh_genres['CULTURE'],
+ 'Baseball': tvh_genres['SPORT_TEAM'],
+ 'Basketball': tvh_genres['SPORT_TEAM'],
+ 'Biography': tvh_genres['VIP'],
+ 'Boxing': tvh_genres['SPORT'],
+ 'Cartoon': tvh_genres['CARTOON'],
+ 'Children': tvh_genres['KIDS'],
+ 'Classic Sport Event': tvh_genres['SPORT_SPECIAL'],
+ 'Comedy': tvh_genres['COMEDY'],
+ 'Comedy drama': tvh_genres['COMEDY'],
+ 'Community': tvh_genres['SOCIAL'],
+ 'Consumer': tvh_genres['SHOPPING'],
+ 'Cooking': tvh_genres['COOKING'],
+ 'Crime': tvh_genres['THRILLER'],
+ 'Crime drama': tvh_genres['THRILLER'],
+ 'Docudrama': tvh_genres['DOCUMENTARY'],
+ 'Documentary': tvh_genres['DOCUMENTARY'],
+ 'Drama': tvh_genres['MOVIE'],
+ 'Educational': tvh_genres['EDUCATIONAL'],
+ 'Entertainment': tvh_genres['GAME'],
+ 'Exercise': tvh_genres['FITNESS'],
+ 'Fantasy': tvh_genres['SF'],
+ 'financial': tvh_genres['ECONOMIC'],
+ 'Football': tvh_genres['FOOTBALL'],
+ 'Game show': tvh_genres['GAME'],
+ 'Golf': tvh_genres['SPORT_TEAM'],
+ 'Health': tvh_genres['MEDICINE'],
+ 'Historical drama': tvh_genres['HISTORICAL'],
+ 'Hockey': tvh_genres['SPORT_TEAM'],
+ 'Home improvement': tvh_genres['HANDICRAFT'],
+ 'Horror': tvh_genres['SF'],
+ 'House/garden': tvh_genres['GARDENING'],
+ 'How-to': tvh_genres['SCIENCE'],
+ 'Interview': tvh_genres['DEBATE'],
+ 'Law': tvh_genres['SOCIAL'],
+ 'Medical': tvh_genres['MEDICINE'],
+ 'Mixed martial arts': tvh_genres['MARTIAL_ARTS'],
+ 'Music': tvh_genres['MUSIC'],
+ 'Musical': tvh_genres['MUSIC'],
+ 'Musical comedy': tvh_genres['COMEDY'],
+ 'Mystery': tvh_genres['THRILLER'],
+ 'News': tvh_genres['NEWS'],
+ 'Newsmagazine': tvh_genres['NEWS_MAGAZINE'],
+ 'Olympics': tvh_genres['SPORT'],
+ 'Outdoors': tvh_genres['SPORT'],
+ 'Poker': tvh_genres['GAME'],
+ 'Pro wrestling': tvh_genres['MARTIAL_ARTS'],
+ 'Public affairs': tvh_genres['BROADCASTING'],
+ 'Reality': tvh_genres['GAME'],
+ 'Religious': tvh_genres['RELIGION'],
+ 'Romance': tvh_genres['ROMANCE'],
+ 'Romantic comedy': tvh_genres['ROMANCE'],
+ 'Science': tvh_genres['SCIENCE'],
+ 'Science fiction': tvh_genres['SF'],
+ 'Self improvement': tvh_genres['FURTHER_EDUCATION'],
+ 'Shopping': tvh_genres['SHOPPING'],
+ 'Sitcom': tvh_genres['COMEDY'],
+ 'Soap': tvh_genres['SOAP'],
+ 'Soccer': tvh_genres['FOOTBALL'],
+ 'Special': tvh_genres['NEW_MEDIA'],
+ 'Sports talk': tvh_genres['SPORT'],
+ 'Talk': tvh_genres['TALK_SHOW'],
+ 'Thriller': tvh_genres['THRILLER'],
+ 'Travel': tvh_genres['TRAVEL'],
+ 'Variety': tvh_genres['VARIETY'],
+ 'Weightlifting': tvh_genres['ATHLETICS'],
+ 'Western': tvh_genres['ADVENTURE']
}
diff --git a/lib/tvheadend/service/Unix/tv_grab_file b/lib/tvheadend/service/Unix/tv_grab_file
old mode 100755
new mode 100644
diff --git a/lib/updater/cabernet.py b/lib/updater/cabernet.py
index 3f5faa3..0c4b085 100644
--- a/lib/updater/cabernet.py
+++ b/lib/updater/cabernet.py
@@ -18,6 +18,7 @@
import glob
import importlib
+import importlib.resources
import json
import logging
import os
@@ -35,8 +36,7 @@
from lib.common.decorators import handle_json_except
from lib.common.tmp_mgmt import TMPMgmt
-MANIFEST_FILE = 'manifest.json'
-TMP_ZIPFILE = utils.CABERNET_NAMESPACE + '.zip'
+TMP_ZIPFILE = utils.CABERNET_ID + '.zip'
class CabernetUpgrade:
@@ -48,7 +48,7 @@ def __init__(self, _plugins):
self.config = _plugins.config_obj.data
self.plugin_db = DBPlugins(self.config)
self.tmp_mgmt = TMPMgmt(self.config)
-
+
def update_version_info(self):
"""
Updates the database with the latest version release data
@@ -59,27 +59,30 @@ def update_version_info(self):
if release_data_list is not None:
current_version = utils.VERSION
last_version = release_data_list[0]['tag_name']
+ last_stable_version = release_data_list[0]['tag_name']
next_version = self.get_next_release(release_data_list)
- manifest['version'] = current_version
- manifest['next_version'] = next_version
- manifest['latest_version'] = last_version
+ manifest['version']['current'] = current_version
+ manifest['version']['next'] = next_version
+ manifest['version']['latest'] = last_version
+ manifest['version']['installed'] = True
self.save_manifest(manifest)
# need to have the task take at least 1 second to register the time
time.sleep(1)
def import_manifest(self):
"""
- Loads the manifest for cabernet from a file
+ Loads the manifest for cabernet
"""
- json_settings = importlib.resources.read_text(self.config['paths']['resources_pkg'], MANIFEST_FILE)
- settings = json.loads(json_settings)
- return settings
-
+ json_settings = self.plugin_db.get_repos(utils.CABERNET_ID)
+ if json_settings:
+ json_settings = json_settings[0]
+ return json_settings
+
def load_manifest(self):
"""
Loads the cabernet manifest from DB
"""
- manifest_list = self.plugin_db.get_plugins(utils.CABERNET_NAMESPACE)
+ manifest_list = self.plugin_db.get_repos(utils.CABERNET_ID)
if manifest_list is None:
return None
else:
@@ -89,20 +92,20 @@ def save_manifest(self, _manifest):
"""
Saves to DB the manifest for cabernet
"""
- self.plugin_db.save_plugin(_manifest)
-
+ self.plugin_db.save_repo(_manifest)
+
def github_releases(self, _manifest):
url = ''.join([
- _manifest['github_repo_' + self.config['main']['upgrade_quality'] ],
+ _manifest['dir']['github_repo_' + self.config['main']['upgrade_quality']],
'/releases'
- ])
- return self.get_uri_data(url)
-
- @handle_json_except
- @handle_url_except
- def get_uri_data(self, _uri):
- header = {'Content-Type': 'application/json',
- 'User-agent': utils.DEFAULT_USER_AGENT}
+ ])
+ return self.get_uri_data(url, 2)
+
+ @handle_json_except
+ @handle_url_except
+ def get_uri_data(self, _uri, _retries):
+ header = {'Content-Type': 'application/json',
+ 'User-agent': utils.DEFAULT_USER_AGENT}
req = urllib.request.Request(_uri, headers=header)
with urllib.request.urlopen(req, timeout=10.0) as resp:
x = json.load(resp)
@@ -110,59 +113,71 @@ def get_uri_data(self, _uri):
def get_next_release(self, release_data_list):
current_version = self.config['main']['version']
- x = self.version_re.match(current_version)
- c_version_float = float(re.findall(r'\d+\.(\d+\.\d+).\d+', current_version)[0])
+ cur_version_float = utils.get_version_index(current_version)
+ next_version_int = (int(cur_version_float/100)+2)*100
prev_version = release_data_list[0]['tag_name']
+ data = None
for data in release_data_list:
- numbers = re.findall(r'\d+\.(\d+)\.(\d+).\d+', data['tag_name'])[0]
- version_float = float('{:01d}.{:02d}'.format(int(numbers[0]), int(numbers[1])))
- if version_float-0.101 < c_version_float:
+ version_float = utils.get_version_index(data['tag_name'])
+ if version_float < next_version_int:
break
prev_version = data['tag_name']
return prev_version
+ def get_stable_release(self, release_data_list):
+ """
+ Get the latest stable release with the format z.y.x.w without additional text...
+
+ """
+ pass
+
+
+
+
def upgrade_app(self, _web_status):
"""
Initial request to perform an upgrade
"""
c_manifest = self.load_manifest()
if c_manifest is None:
+ self.logger.info('Cabernet manifest not found, aborting')
+ _web_status.data += 'Cabernet manifest not found, aborting \r\n'
return False
- if c_manifest['next_version'] == c_manifest['version']:
+ if not c_manifest['version'].get('next'):
+ return False
+ if c_manifest['version'].get('next') == c_manifest['version'].get('current'):
self.logger.info('Cabernet is on the current version, not upgrading')
_web_status.data += 'Cabernet is on the current version, not upgrading \r\n'
return False
-
-
+
# This checks to see if additional files or folders are in the
# basedir area. if so, abort upgrade.
# It is basically for the case where we have the wrong directory
_web_status.data += 'Checking current install area for expected files... \r\n'
if not self.check_expected_files(_web_status):
return False
-
+
b = backups.Backups(self.plugins)
-
+
# recursively check all folders from the basedir to see if they are writable
_web_status.data += 'Checking write permissions... \r\n'
resp = b.check_code_write_permissions()
if resp is not None:
_web_status.data += resp
return False
-
-
+
# simple call to run a backup of the data and source
# use a direct call to the backup methods instead of calling the scheduler
_web_status.data += 'Creating backup of code and data... \r\n'
if not b.backup_all():
_web_status.data += 'Backup failed, aborting upgrade \r\n'
return False
-
+
_web_status.data += 'Downloading new version from website... \r\n'
if not self.download_zip('/'.join([
- c_manifest['github_repo_' + self.config['main']['upgrade_quality'] ],
- 'zipball', c_manifest['next_version']
- ])):
+ c_manifest['dir']['github_repo_' + self.config['main']['upgrade_quality']],
+ 'zipball', c_manifest['version']['next']
+ ]), 2):
_web_status.data += 'Download of the new version failed, aborting upgrade \r\n'
return False
@@ -198,27 +213,30 @@ def check_expected_files(self, _web_status):
"""
Check the base directory files to see if all are expected.
"""
- files_present = ['build', 'lib', 'misc', 'plugins', 'plugins_ext',
- '.dockerignore', '.gitignore', 'CHANGELOG.md', 'CONTRIBUTING.md',
- 'Dockerfile', 'Dockerfile_l2p', 'Dockerfile_tvh', 'Dockerfile_tvh_crypt.alpine',
- 'Dockerfile_tvh_crypt.slim-buster', 'LICENSE', 'README.md',
- 'TVHEADEND.md', 'docker-compose.yml', 'requirements.txt', 'tvh_main.py',
- 'data', 'config.ini', 'is_container', '.git', 'cabernet.url', 'ffmpeg',
- 'README.txt', 'uninst.exe' ]
-
- filelist = [os.path.basename(x) for x in
- glob.glob(os.path.join(self.config['paths']['main_dir'], '*'))]
+ files_present = ['build', 'lib', 'misc',
+ '.dockerignore', '.gitignore', 'CHANGELOG.md', 'CONTRIBUTING.md',
+ 'Dockerfile', 'Dockerfile_l2p', 'Dockerfile_tvh', 'Dockerfile_tvh_crypt.alpine',
+ 'Dockerfile_tvh_crypt.slim-buster', 'LICENSE', 'README.md',
+ 'TVHEADEND.md', 'docker-compose.yml', 'requirements.txt', 'tvh_main.py',
+ 'data', 'config.ini', 'is_container', '.git', 'cabernet.url', 'ffmpeg',
+ 'README.txt', 'uninst.exe']
+
+ files_present.extend([self.config['paths']['internal_plugins_pkg'], self.config['paths']['external_plugins_pkg']])
+
+ filelist = [os.path.basename(x) for x in
+ glob.glob(os.path.join(self.config['paths']['main_dir'], '*'))]
response = True
for file in filelist:
if file not in files_present:
- _web_status.data += '#### Extra file(s) found in install directory, aborting upgrade. FILE: {} \r\n'.format(file)
+ _web_status.data += '#### Extra file(s) found in install directory, aborting upgrade. FILE: {} \r\n'\
+ .format(file)
response = False
return response
@handle_json_except
@handle_url_except
- def download_zip(self, _zip_url):
-
+ def download_zip(self, _zip_url, _retries):
+
buf_size = 2 * 16 * 16 * 1024
save_path = pathlib.Path(self.config['paths']['tmp_dir']).joinpath(TMP_ZIPFILE)
h = {'Content-Type': 'application/zip', 'User-agent': utils.DEFAULT_USER_AGENT}
@@ -245,9 +263,9 @@ def extract_code(self):
return None
def cleanup_tmp(self):
- dir = self.config['paths']['tmp_dir']
- for files in os.listdir(dir):
- path = os.path.join(dir, files)
+ dir_ = self.config['paths']['tmp_dir']
+ for files in os.listdir(dir_):
+ path = os.path.join(dir_, files)
try:
shutil.rmtree(path)
except OSError:
diff --git a/lib/updater/patcher.py b/lib/updater/patcher.py
index ff737c1..048ae82 100644
--- a/lib/updater/patcher.py
+++ b/lib/updater/patcher.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -16,21 +16,21 @@
substantial portions of the Software.
"""
-import importlib
+
+import configparser
import logging
-import os
-import shutil
-import sqlite3
-import threading
import time
+import traceback
-import lib.common.utils as utils
-from lib.db.db_channels import DBChannels
+from lib.plugins.plugin_manager.plugin_manager import PluginManager
+from lib.db.db_plugins import DBPlugins
from lib.db.db_scheduler import DBScheduler
-REQUIRED_VERSION = '0.9.9'
+
+REQUIRED_VERSION = '0.9.14'
LOGGER = None
+
def patch_upgrade(_config_obj, _new_version):
"""
This method is called when a cabernet upgrade is requested. Versions are
@@ -41,15 +41,25 @@ def patch_upgrade(_config_obj, _new_version):
it is associated is tested with this new version.
"""
global LOGGER
- LOGGER = logging.getLogger(__name__)
+ if not LOGGER:
+ LOGGER = logging.getLogger(__name__)
+
results = ''
if _new_version.startswith(REQUIRED_VERSION):
- LOGGER.info('Applying the patch to version: {}'.format(REQUIRED_VERSION))
- results = 'Patch: Updating Channels database...'
+ LOGGER.info('Applying patches to version: {}'.format(REQUIRED_VERSION))
+
+ try:
+ try:
+ _config_obj.config_handler.remove_option('streams', 'stream_timeout')
+ except configparser.NoSectionError:
+ pass
+ _config_obj.config_handler.remove_option('logger_root', 'level')
+ _config_obj.config_handler.set('logger_root', 'level', 'TRACE')
- dbchannels = DBChannels(_config_obj.data)
- dbchannels.create_tables()
+ except Exception:
+ # Make sure that the patcher exits normally so the maintenance flag is removed
+ LOGGER.warning(traceback.format_exc())
return results
@@ -60,6 +70,8 @@ def move_key(_config_obj, _key):
def find_key_by_section(_config_obj, _key, _section):
global LOGGER
+ if not LOGGER:
+ LOGGER = logging.getLogger(__name__)
if _section in _config_obj.data:
if _key in _config_obj.data[_section]:
LOGGER.info('Moving setting {}:{} to instance'.format(_section, _key))
@@ -68,11 +80,11 @@ def find_key_by_section(_config_obj, _key, _section):
for section in sections:
_config_obj.write(section, _key, value)
_config_obj.write(_section, _key, None)
-
+
def find_instance(_config, _plugin_name):
sections = []
for section in _config.keys():
- if section.startswith(_plugin_name+'_'):
+ if section.startswith(_plugin_name + '_'):
sections.append(section)
return sections
diff --git a/lib/updater/plugins.py b/lib/updater/plugins.py
index b6ccf7e..76440a1 100644
--- a/lib/updater/plugins.py
+++ b/lib/updater/plugins.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -16,7 +16,38 @@
substantial portions of the Software.
"""
+
+import logging
+
+from lib.db.db_plugins import DBPlugins
+from lib.plugins.plugin_manager.plugin_manager import PluginManager
+
+
class PluginsUpgrade:
- def __init__(self):
- pass
\ No newline at end of file
+ def __init__(self, _plugins):
+ self.logger = logging.getLogger(__name__)
+ self.config_obj = _plugins.config_obj
+ self.config = _plugins.config_obj.data
+ self.plugin_db = DBPlugins(self.config)
+ self.pm = PluginManager(None, self.config_obj)
+
+
+ def upgrade_plugins(self, _web_status):
+ _web_status.data += '#### Checking Plugins #### \r\n'
+ plugin_defns = self.plugin_db.get_plugins(True)
+ if not plugin_defns:
+ return True
+
+ for p_defn in plugin_defns:
+ if not p_defn.get('external'):
+ continue
+ if p_defn['version']['current'] == p_defn['version']['latest']:
+ continue
+ # upgrade available
+ _web_status.data += self.pm.delete_plugin(p_defn['repoid'], p_defn['id'])
+ _web_status.data += self.pm.install_plugin(p_defn['repoid'], p_defn['id'])
+ _web_status.data += ' \r\n#### Plugin Upgrades Finished #### \r\n'
+
+ return True
+
diff --git a/lib/updater/updater.py b/lib/updater/updater.py
index 2f1fdb0..9fdc4dc 100644
--- a/lib/updater/updater.py
+++ b/lib/updater/updater.py
@@ -1,7 +1,7 @@
"""
MIT License
-Copyright (C) 2021 ROCKY4546
+Copyright (C) 2023 ROCKY4546
https://github.com/rocky4546
This file is part of Cabernet
@@ -17,27 +17,29 @@
"""
import importlib
+import importlib.resources
import json
import logging
-import pathlib
import re
import time
-import urllib.request
from threading import Thread
import lib.common.utils as utils
-import lib.updater.cabernet as cabernet
from lib.db.db_scheduler import DBScheduler
from lib.db.db_plugins import DBPlugins
from lib.common.decorators import getrequest
from lib.web.pages.templates import web_templates
from lib.updater.cabernet import CabernetUpgrade
+from lib.updater.plugins import PluginsUpgrade
from lib.common.string_obj import StringObj
from lib.common.tmp_mgmt import TMPMgmt
+from lib.updater import cabernet
+from lib.plugins.repo_handler import RepoHandler
STATUS = StringObj()
IS_UPGRADING = False
+
@getrequest.route('/api/upgrade')
def upgrade(_webserver):
global STATUS
@@ -45,9 +47,9 @@ def upgrade(_webserver):
v = Updater(_webserver.plugins)
try:
if 'id' in _webserver.query_data:
- if _webserver.query_data['id'] != utils.CABERNET_NAMESPACE:
- _webserver.do_mime_response(501, 'text/html',
- web_templates['htmlError'].format('501 - Invalid ID'))
+ if _webserver.query_data['id'] != utils.CABERNET_ID:
+ _webserver.do_mime_response(501, 'text/html',
+ web_templates['htmlError'].format('501 - Invalid ID'))
return
if not IS_UPGRADING:
IS_UPGRADING = True
@@ -59,10 +61,10 @@ def upgrade(_webserver):
return
else:
_webserver.do_mime_response(501, 'text/html',
- web_templates['htmlError'].format('404 - Unknown action'))
+ web_templates['htmlError'].format('404 - Unknown action'))
except KeyError:
- _webserver.do_mime_response(501, 'text/html',
- web_templates['htmlError'].format('501 - Badly formed request'))
+ _webserver.do_mime_response(501, 'text/html',
+ web_templates['htmlError'].format('501 - Badly formed request'))
def check_for_updates(plugins):
@@ -91,23 +93,27 @@ def scheduler_tasks(self):
'internal',
None,
'lib.updater.updater.check_for_updates',
- 20,
- 'thread',
+ 99,
+ 'inline',
'Checks cabernet and all plugins for updated versions'
- ):
+ ):
scheduler_db.save_trigger(
'Applications',
'Check for Updates',
'interval',
interval=2850,
randdur=60
- )
+ )
scheduler_db.save_trigger(
'Applications',
'Check for Updates',
'startup')
def update_version_info(self):
+ self.logger.info('Updating Repo Cabernet-Repository versions')
+ self.repos = RepoHandler(self.config_obj)
+ self.repos.load_cabernet_repo()
+ self.logger.info('Updating Cabernet versions')
c = CabernetUpgrade(self.plugins)
c.update_version_info()
@@ -115,7 +121,7 @@ def import_manifest(self):
"""
Loads the manifest for cabernet from a file
"""
- json_settings = importlib.resources.read_text(self.config['paths']['resources_pkg'], MANIFEST_FILE)
+ json_settings = importlib.resources.read_text(self.config['paths']['resources_pkg'], utils.CABERNET_REPO)
settings = json.loads(json_settings)
return settings
@@ -123,14 +129,14 @@ def load_manifest(self, _manifest):
"""
Loads the cabernet manifest from DB
"""
- return self.plugin_db.get_plugins(_manifest)[0]
+ return self.plugin_db.get_plugins(_installed=True, _namespace=_manifest)[0]
def save_manifest(self, _manifest):
"""
Saves to DB the manifest for cabernet
"""
self.plugin_db.save_plugin(_manifest)
-
+
def upgrade_app(self, _id):
"""
Initial request to perform an upgrade
@@ -140,6 +146,7 @@ def upgrade_app(self, _id):
STATUS.data = 'Starting upgrade... \r\n'
+ # upgrade the main cabernet app
app = CabernetUpgrade(self.plugins)
if not app.upgrade_app(STATUS):
STATUS.data += ''
@@ -147,15 +154,21 @@ def upgrade_app(self, _id):
IS_UPGRADING = False
return
- # what do we do with plugins? They go here if necessary
- STATUS.data += '(TBD) Upgrading plugins... \r\n'
+ # upgrade the installed external plugins
+ p = PluginsUpgrade(self.plugins)
+ if not p.upgrade_plugins(STATUS):
+ STATUS.data += ''
+ time.sleep(1)
+ IS_UPGRADING = False
+ return
STATUS.data += 'Entering Maintenance Mode... \r\n'
+ # make sure the config_handler really has the config data uploaded
+ self.config_obj.config_handler.read(self.config_obj.data['paths']['config_file'])
self.config_obj.write('main', 'maintenance_mode', True)
STATUS.data += 'Restarting app in 3... \r\n'
self.tmp_mgmt.cleanup_tmp()
- IS_UPGRADING = False
time.sleep(0.8)
STATUS.data += '2... \r\n'
time.sleep(0.8)
@@ -163,9 +176,10 @@ def upgrade_app(self, _id):
STATUS.data += ''
time.sleep(1)
self.restart_app()
-
+ IS_UPGRADING = False
+
def restart_app(self):
# get schedDB and find restart taskid.
scheduler_db = DBScheduler(self.config)
task = scheduler_db.get_tasks('Applications', 'Restart')[0]
- self.sched_queue.put({'cmd': 'runtask', 'taskid': task['taskid'] })
+ self.sched_queue.put({'cmd': 'runtask', 'taskid': task['taskid']})
diff --git a/lib/web/htdocs/html/index.html b/lib/web/htdocs/html/index.html
index f6b7665..03dbee8 100644
--- a/lib/web/htdocs/html/index.html
+++ b/lib/web/htdocs/html/index.html
@@ -5,7 +5,6 @@
-
@@ -41,6 +40,7 @@
+
cabernet