diff --git a/.gitignore b/.gitignore
index 4427ab9b..f97a8aad 100644
--- a/.gitignore
+++ b/.gitignore
@@ -5,7 +5,10 @@
.Trashes
ehthumbs.db
Thumbs.db
-configuration.py
__pycache__
GeoLite2-City.mmdb
GeoLite2-City.tar.gz
+data/varken.ini
+.idea/
+Legacy/configuration.py
+varken-venv/
diff --git a/CHANGELOG.md b/CHANGELOG.md
new file mode 100644
index 00000000..d017bd15
--- /dev/null
+++ b/CHANGELOG.md
@@ -0,0 +1,73 @@
+# Change Log
+
+## [v1.0](https://github.com/Boerderij/Varken/tree/v1.0) (2018-12-09)
+[Full Changelog](https://github.com/Boerderij/Varken/compare/v0.3-nightly...v1.0)
+
+**Implemented enhancements:**
+
+- Add cisco asa from legacy [\#44](https://github.com/Boerderij/Varken/issues/44)
+- Add server ID to ombi to differenciate [\#43](https://github.com/Boerderij/Varken/issues/43)
+
+## [v0.3-nightly](https://github.com/Boerderij/Varken/tree/v0.3-nightly) (2018-12-07)
+[Full Changelog](https://github.com/Boerderij/Varken/compare/v0.2-nightly...v0.3-nightly)
+
+**Implemented enhancements:**
+
+- Create Changelog for nightly release [\#39](https://github.com/Boerderij/Varken/issues/39)
+- Create proper logging [\#34](https://github.com/Boerderij/Varken/issues/34)
+
+**Closed issues:**
+
+- Remove "dashboard" folder and subfolders [\#42](https://github.com/Boerderij/Varken/issues/42)
+- Remove "Legacy" folder [\#41](https://github.com/Boerderij/Varken/issues/41)
+
+## [v0.2-nightly](https://github.com/Boerderij/Varken/tree/v0.2-nightly) (2018-12-06)
+[Full Changelog](https://github.com/Boerderij/Varken/compare/v0.1...v0.2-nightly)
+
+**Implemented enhancements:**
+
+- Tautulli - multiple server support? [\#25](https://github.com/Boerderij/Varken/issues/25)
+
+**Closed issues:**
+
+- Create the DB if it does not exist. [\#38](https://github.com/Boerderij/Varken/issues/38)
+- create systemd examples [\#37](https://github.com/Boerderij/Varken/issues/37)
+- Create a GeoIP db downloader and refresher [\#36](https://github.com/Boerderij/Varken/issues/36)
+- Create unique IDs for all scripts to prevent duplicate data [\#35](https://github.com/Boerderij/Varken/issues/35)
+- use a config.ini instead of command-line flags [\#33](https://github.com/Boerderij/Varken/issues/33)
+- Migrate crontab to python schedule package [\#31](https://github.com/Boerderij/Varken/issues/31)
+- Consolidate missing and missing\_days in sonarr.py [\#30](https://github.com/Boerderij/Varken/issues/30)
+- Ombi something new \[Request\] [\#26](https://github.com/Boerderij/Varken/issues/26)
+- Support for Linux without ASA [\#21](https://github.com/Boerderij/Varken/issues/21)
+
+**Merged pull requests:**
+
+- varken to nightly [\#40](https://github.com/Boerderij/Varken/pull/40) ([DirtyCajunRice](https://github.com/DirtyCajunRice))
+
+## [v0.1](https://github.com/Boerderij/Varken/tree/v0.1) (2018-10-20)
+**Implemented enhancements:**
+
+- The address 172.17.0.1 is not in the database. [\#17](https://github.com/Boerderij/Varken/issues/17)
+- Local streams aren't showing with Tautulli [\#16](https://github.com/Boerderij/Varken/issues/16)
+- Worldmap panel [\#15](https://github.com/Boerderij/Varken/issues/15)
+
+**Closed issues:**
+
+- Tautulli.py not working. [\#18](https://github.com/Boerderij/Varken/issues/18)
+- Issues with scripts [\#12](https://github.com/Boerderij/Varken/issues/12)
+- issue with new tautulli.py [\#10](https://github.com/Boerderij/Varken/issues/10)
+- ombi.py fails when attempting to update influxdb [\#9](https://github.com/Boerderij/Varken/issues/9)
+- GeoIP Going to Break July 1st [\#8](https://github.com/Boerderij/Varken/issues/8)
+- \[Request\] Documentation / How-to Guide [\#1](https://github.com/Boerderij/Varken/issues/1)
+
+**Merged pull requests:**
+
+- v0.1 [\#20](https://github.com/Boerderij/Varken/pull/20) ([samwiseg0](https://github.com/samwiseg0))
+- Added selfplug [\#19](https://github.com/Boerderij/Varken/pull/19) ([si0972](https://github.com/si0972))
+- Major rework of the scripts [\#14](https://github.com/Boerderij/Varken/pull/14) ([samwiseg0](https://github.com/samwiseg0))
+- fix worldmap after change to maxmind local db [\#11](https://github.com/Boerderij/Varken/pull/11) ([madbuda](https://github.com/madbuda))
+- Update sonarr.py [\#7](https://github.com/Boerderij/Varken/pull/7) ([ghost](https://github.com/ghost))
+- Create crontabs [\#6](https://github.com/Boerderij/Varken/pull/6) ([ghost](https://github.com/ghost))
+- update plex\_dashboard.json [\#5](https://github.com/Boerderij/Varken/pull/5) ([ghost](https://github.com/ghost))
+- Update README.md [\#4](https://github.com/Boerderij/Varken/pull/4) ([ghost](https://github.com/ghost))
+- added sickrage portion [\#3](https://github.com/Boerderij/Varken/pull/3) ([ghost](https://github.com/ghost))
diff --git a/README.md b/README.md
index 94aebb89..eb16bc37 100644
--- a/README.md
+++ b/README.md
@@ -1,115 +1,48 @@
-# Grafana Scripts
-Repo for api scripts written (both pushing and pulling) to aggregate data into influxdb for grafana
+# Varken
+[![Discord](https://img.shields.io/badge/Discord-Varken-7289DA.svg?logo=discord&style=flat-square)](https://discord.gg/AGTG44H)
+[![BuyMeACoffee](https://img.shields.io/badge/BuyMeACoffee-Donate-ff813f.svg?logo=CoffeeScript&style=flat-square)](https://www.buymeacoffee.com/varken)
+[![Docker Pulls](https://img.shields.io/docker/pulls/boerderij/varken.svg?style=flat-square)](https://hub.docker.com/r/boerderij/varken/)
-Requirements /w install links: [Grafana](http://docs.grafana.org/installation/), [Python3](https://www.python.org/downloads/), [InfluxDB](https://docs.influxdata.com/influxdb/v1.5/introduction/installation/)
+Dutch for PIG. PIG is an Acronym for Plex/InfluxDB/Grafana
-
+Varken is a standalone command-line utility to aggregate data
+from the Plex ecosystem into InfluxDB. Examples use Grafana for a
+frontend
-## Quick Setup
-1. Install requirements `pip3 install -r requirements.txt`
-1. Make a copy of `configuration.example.py` to `configuration.py`
-2. Make the appropriate changes to `configuration.py`
-1. Create your plex database in influx
- ```sh
- user@server: ~$ influx
- > CREATE DATABASE plex
- > quit
- ```
-1. After completing the [getting started](http://docs.grafana.org/guides/getting_started/) portion of grafana, create your datasource for influxdb. At a minimum, you will need the plex database.
-1. Install `grafana-cli plugins install grafana-worldmap-panel`
-1. Click the + on your menu and click import. Using the .json provided in this repo, paste it in and customize as you like.
+Requirements:
+* Python3.6+
+* Python3-pip
+
+
+
+## Quick Setup
+1. Clone the repository `sudo git clone https://github.com/Boerderij/Varken.git /opt/Varken`
+1. Follow the systemd install instructions located in `varken.systemd`
+1. Create venv in project `cd /opt/Varken && /usr/bin/python3 -m venv varken-venv`
+1. Install requirements `/opt/Varken/varken-venv/bin/python -m pip install -r requirements.txt`
+1. Make a copy of `varken.example.ini` to `varken.ini` in the `data` folder
+ `cp /opt/Varken/data/varken.example.ini /opt/Varken/data/varken.ini`
+1. Make the appropriate changes to `varken.ini`
+ ie.`nano /opt/Varken/data/varken.ini`
+1. Make sure all the files have the appropriate permissions `sudo chown varken:varken -R /opt/Varken`
+1. After completing the [getting started](http://docs.grafana.org/guides/getting_started/) portion of grafana, create your datasource for influxdb.
+1. Install `grafana-cli plugins install grafana-worldmap-panel`
### Docker
-Repo is included in [si0972/grafana-scripts](https://github.com/si0972/grafana-scripts-docker)
+Repo is included in [Boerderij/docker-Varken](https://github.com/Boerderij/docker-Varken)
Example
```
-docker create \
- --name=grafana-scripts \
- -v :/Scripts \
- -e plex=true \
+docker run -d \
+ --name=varken \
+ -v :/config \
-e PGID= -e PUID= \
- si0972/grafana-scripts:latest
+ boerderij/varken:nightly
```
-
-
-
-
-## Scripts
-### `sonarr.py`
-Gathers data from Sonarr and pushes it to influxdb.
-
-```
-Script to aid in data gathering from Sonarr
-
-optional arguments:
- -h, --help show this help message and exit
- --missing Get all missing TV shows
- --missing_days MISSING_DAYS
- Get missing TV shows in past X days
- --upcoming Get upcoming TV shows
- --future FUTURE Get TV shows on X days into the future. Includes today.
- i.e. --future 2 is Today and Tomorrow
- --queue Get TV shows in queue
-```
-- Notes:
- - You cannot stack the arguments. ie. `sonarr.py --missing --queue`
- - One argument must be supplied
-
-### `radarr.py`
-Gathers data from Radarr and pushes it to influxdb
-
-```
-Script to aid in data gathering from Radarr
-
-optional arguments:
- -h, --help show this help message and exit
- --missing Get missing movies
- --missing_avl Get missing available movies
- --queue Get movies in queue
-```
-- Notes:
- - You cannot stack the arguments. ie. `radarr.py --missing --queue`
- - One argument must be supplied
- - `--missing_avl` Refers to how Radarr has determined if the movie should be available to download. The easy way to determine if the movie will appear on this list is if the movie has a RED "Missing" tag associated with that movie. BLUE "Missing" tag refers to a movie that is missing but is not available for download yet. These tags are determined by your "Minimum Availability" settings for that movie.
-
-### `ombi.py`
-Gathers data from Ombi and pushes it to influxdb
-
-```
-Script to aid in data gathering from Ombi
-
-optional arguments:
- -h, --help show this help message and exit
- --total Get the total count of all requests
- --counts Get the count of pending, approved, and available requests
-```
-- Notes:
- - You cannot stack the arguments. ie. `ombi.py --total --counts`
- - One argument must be supplied
-
-### `tautulli.py`
-Gathers data from Tautulli and pushes it to influxdb. On initial run it will download the geoip2 DB and use it for locations.
-
-## Notes
-To run the python scripts crontab is currently leveraged. Examples:
-```sh
-### Modify paths as appropriate. python3 is located in different places for different users. (`which python3` will give you the path)
-### to edit your crontab entry, do not modify /var/spool/cron/crontabs/ directly, use `crontab -e`
-### Crontabs require an empty line at the end or they WILL not run. Make sure to have 2 lines to be safe
-### It is bad practice to run any cronjob more than once a minute. For timing help: https://crontab.guru/
-* * * * * /usr/bin/python3 /path-to-grafana-scripts/ombi.py --total
-* * * * * /usr/bin/python3 /path-to-grafana-scripts/tautulli.py
-* * * * * /usr/bin/python3 /path-to-grafana-scripts/radarr.py --queue
-* * * * * /usr/bin/python3 /path-to-grafana-scripts/sonarr.py --queue
-*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/radarr.py --missing
-*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sonarr.py --missing
-*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sickrage.py
-```
diff --git a/Varken.py b/Varken.py
new file mode 100644
index 00000000..6fe4ec37
--- /dev/null
+++ b/Varken.py
@@ -0,0 +1,116 @@
+import sys
+
+# Check for python3.6 or newer to resolve erroneous typing.NamedTuple issues
+if sys.version_info < (3, 6):
+ exit('Varken requires python3.6 or newer')
+
+import schedule
+import threading
+import platform
+import distro
+
+from sys import exit
+from time import sleep
+from os import access, R_OK
+from os.path import isdir, abspath, dirname, join
+from argparse import ArgumentParser, RawTextHelpFormatter
+
+from varken.iniparser import INIParser
+from varken.sonarr import SonarrAPI
+from varken.tautulli import TautulliAPI
+from varken.radarr import RadarrAPI
+from varken.ombi import OmbiAPI
+from varken.cisco import CiscoAPI
+from varken.dbmanager import DBManager
+from varken.varkenlogger import VarkenLogger
+
+PLATFORM_LINUX_DISTRO = ' '.join(x for x in distro.linux_distribution() if x)
+
+
+def threaded(job):
+ thread = threading.Thread(target=job)
+ thread.start()
+
+
+if __name__ == "__main__":
+ parser = ArgumentParser(prog='varken',
+ description='Command-line utility to aggregate data from the plex ecosystem into InfluxDB',
+ formatter_class=RawTextHelpFormatter)
+
+ parser.add_argument("-d", "--data-folder", help='Define an alternate data folder location')
+ parser.add_argument("-D", "--debug", action='store_true', help='Use to enable DEBUG logging')
+
+ opts = parser.parse_args()
+
+ DATA_FOLDER = abspath(join(dirname(__file__), 'data'))
+
+ if opts.data_folder:
+ ARG_FOLDER = opts.data_folder
+
+ if isdir(ARG_FOLDER):
+ DATA_FOLDER = ARG_FOLDER
+ if not access(ARG_FOLDER, R_OK):
+ exit("Read permission error for {}".format(ARG_FOLDER))
+ else:
+ exit("{} does not exist".format(ARG_FOLDER))
+
+ # Initiate the logger
+ vl = VarkenLogger(data_folder=DATA_FOLDER, debug=opts.debug)
+ vl.logger.info('Starting Varken...')
+
+ vl.logger.info(u"{} {} ({}{})".format(
+ platform.system(), platform.release(), platform.version(),
+ ' - {}'.format(PLATFORM_LINUX_DISTRO) if PLATFORM_LINUX_DISTRO else ''
+ ))
+ vl.logger.info(u"Python {}".format(sys.version))
+
+ CONFIG = INIParser(DATA_FOLDER)
+ DBMANAGER = DBManager(CONFIG.influx_server)
+
+ if CONFIG.sonarr_enabled:
+ for server in CONFIG.sonarr_servers:
+ SONARR = SonarrAPI(server, DBMANAGER)
+ if server.queue:
+ schedule.every(server.queue_run_seconds).seconds.do(threaded, SONARR.get_queue)
+ if server.missing_days > 0:
+ schedule.every(server.missing_days_run_seconds).seconds.do(threaded, SONARR.get_missing)
+ if server.future_days > 0:
+ schedule.every(server.future_days_run_seconds).seconds.do(threaded, SONARR.get_future)
+
+ if CONFIG.tautulli_enabled:
+ for server in CONFIG.tautulli_servers:
+ TAUTULLI = TautulliAPI(server, DBMANAGER)
+ if server.get_activity:
+ schedule.every(server.get_activity_run_seconds).seconds.do(threaded, TAUTULLI.get_activity)
+
+ if CONFIG.radarr_enabled:
+ for server in CONFIG.radarr_servers:
+ RADARR = RadarrAPI(server, DBMANAGER)
+ if server.get_missing:
+ schedule.every(server.get_missing_run_seconds).seconds.do(threaded, RADARR.get_missing)
+ if server.queue:
+ schedule.every(server.queue_run_seconds).seconds.do(threaded, RADARR.get_queue)
+
+ if CONFIG.ombi_enabled:
+ for server in CONFIG.ombi_servers:
+ OMBI = OmbiAPI(server, DBMANAGER)
+ if server.request_type_counts:
+ schedule.every(server.request_type_run_seconds).seconds.do(threaded, OMBI.get_request_counts)
+ if server.request_total_counts:
+ schedule.every(server.request_total_run_seconds).seconds.do(threaded, OMBI.get_total_requests)
+
+ if CONFIG.ciscoasa_enabled:
+ for firewall in CONFIG.ciscoasa_firewalls:
+ ASA = CiscoAPI(firewall, DBMANAGER)
+ schedule.every(firewall.get_bandwidth_run_seconds).seconds.do(threaded, ASA.get_bandwidth)
+
+ # Run all on startup
+ SERVICES_ENABLED = [CONFIG.ombi_enabled, CONFIG.radarr_enabled, CONFIG.tautulli_enabled,
+ CONFIG.sonarr_enabled, CONFIG.ciscoasa_enabled]
+ if not [enabled for enabled in SERVICES_ENABLED if enabled]:
+ exit("All services disabled. Exiting")
+ schedule.run_all()
+
+ while True:
+ schedule.run_pending()
+ sleep(1)
diff --git a/cisco_asa.py b/cisco_asa.py
deleted file mode 100644
index 26f439a9..00000000
--- a/cisco_asa.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Do not edit this script. Edit configuration.py
-import requests
-from datetime import datetime, timezone
-from influxdb import InfluxDBClient
-
-import configuration
-
-current_time = datetime.now(timezone.utc).astimezone().isoformat()
-
-stats = {
- 'token': requests.post('{}/api/tokenservices'.format(configuration.asa_url),
- auth=(configuration.asa_username, configuration.asa_password), verify=False)
-}
-stats['headers'] = {'X-Auth-Token': stats['token'].headers['X-Auth-Token']}
-stats['outside_interface'] = requests.get('{}/api/monitoring/device/interfaces/Outside'.format(configuration.asa_url),
- headers=stats['headers'], verify=False).json()
-
-influx_payload = [
- {
- "measurement": "bandwidth",
- "tags": {
- "interface": "outside"
- },
- "time": current_time,
- "fields": {
- "upload_bitrate": stats['outside_interface']['outputBitRate'],
- "download_bitrate": stats['outside_interface']['inputBitRate']
- }
- }
-]
-
-influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
- configuration.influxdb_password, configuration.asa_influxdb_db_name)
-influx.write_points(influx_payload)
diff --git a/configuration.example.py b/configuration.example.py
deleted file mode 100644
index a0df50a2..00000000
--- a/configuration.example.py
+++ /dev/null
@@ -1,49 +0,0 @@
-'''
-Notes:
- - Domains should be either http(s)://subdomain.domain.com or http(s)://domain.com/url_suffix
-
- - Sonarr + Radarr scripts support multiple servers. You can remove the second
- server by putting a # in front of the line.
-
- - tautulli_failback_ip, This is used when there is no IP listed in tautulli.
- This can happen when you are streaming locally. This is usually your public IP.
-'''
-
-########################### INFLUXDB CONFIG ###########################
-influxdb_url = 'influxdb.domain.tld'
-influxdb_port = 8086
-influxdb_username = ''
-influxdb_password = ''
-
-############################ SONARR CONFIG ############################
-sonarr_server_list = [
- ('https://sonarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'),
- ('https://sonarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'),
- #('https://sonarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3')
- ]
-sonarr_influxdb_db_name = 'plex'
-
-############################ RADARR CONFIG ############################
-radarr_server_list = [
- ('https://radarr1.domain.tld', 'xxxxxxxxxxxxxxx', '1'),
- ('https://radarr2.domain.tld', 'xxxxxxxxxxxxxxx', '2'),
- #('https://radarr3.domain.tld', 'xxxxxxxxxxxxxxx', '3')
- ]
-radarr_influxdb_db_name = 'plex'
-
-############################ OMBI CONFIG ##############################
-ombi_url = 'https://ombi.domain.tld'
-ombi_api_key = 'xxxxxxxxxxxxxxx'
-ombi_influxdb_db_name = 'plex'
-
-########################## TAUTULLI CONFIG ############################
-tautulli_url = 'https://tautulli.domain.tld'
-tautulli_api_key = 'xxxxxxxxxxxxxxx'
-tautulli_failback_ip = ''
-tautulli_influxdb_db_name = 'plex'
-
-########################## FIREWALL CONFIG ############################
-asa_url = 'https://firewall.domain.tld'
-asa_username = 'cisco'
-asa_password = 'cisco'
-asa_influxdb_db_name = 'asa'
diff --git a/crontabs b/crontabs
deleted file mode 100644
index 413be4ec..00000000
--- a/crontabs
+++ /dev/null
@@ -1,11 +0,0 @@
-### Modify paths as appropriate. python3 is located in different places for different users. (`which python3` will give you the path)
-### to edit your crontab entry, do not modify /var/spool/cron/crontabs/ directly, use `crontab -e`
-### Crontabs require an empty line at the end or they WILL not run. Make sure to have 2 lines to be safe
-###
-* * * * * /usr/bin/python3 /path-to-grafana-scripts/ombi.py
-* * * * * ( sleep 30 ; /usr/bin/python3 /path-to-grafana-scripts/ombi.py )
-* * * * * /usr/bin/python3 /path-to-grafana-scripts/taurulli.py
-* * * * * ( sleep 30 ; /usr/bin/python3 /path-to-grafana-scripts/tautulli.py )
-*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/radarr.py
-*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sonarr.py
-#*/30 * * * * /usr/bin/python3 /path-to-grafana-scripts/sickrage.py
diff --git a/dashboard/beta_online_users_table.json b/dashboard/beta_online_users_table.json
deleted file mode 100644
index 09bc070c..00000000
--- a/dashboard/beta_online_users_table.json
+++ /dev/null
@@ -1,316 +0,0 @@
-{
- "columns": [],
- "datasource": "plex",
- "fontSize": "100%",
- "gridPos": {
- "h": 13,
- "w": 16,
- "x": 0,
- "y": 16
- },
- "hideTimeOverride": true,
- "id": 9,
- "interval": "",
- "links": [
- {
- "targetBlank": true,
- "title": "Tautulli",
- "type": "absolute",
- "url": "https://tautulli.domain.tld"
- }
- ],
- "minSpan": 12,
- "pageSize": 14,
- "scroll": true,
- "showHeader": true,
- "sort": {
- "col": 9,
- "desc": true
- },
- "styles": [
- {
- "alias": "",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "MM/DD/YY h:mm:ss a",
- "decimals": 2,
- "link": false,
- "pattern": "Time",
- "thresholds": [],
- "type": "hidden",
- "unit": "short"
- },
- {
- "alias": "User",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "name",
- "preserveFormat": false,
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Media",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "title",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Decision",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "video_decision",
- "preserveFormat": false,
- "sanitize": false,
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Quality",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "quality",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Limits",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "quality_profile",
- "preserveFormat": true,
- "sanitize": false,
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Version",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 0,
- "pattern": "product_version",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Device",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "platform",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "mappingType": 1,
- "pattern": "distinct",
- "thresholds": [],
- "type": "hidden",
- "unit": "short"
- },
- {
- "alias": "Location",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "mappingType": 1,
- "pattern": "location",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Player State",
- "colorMode": "row",
- "colors": [
- "rgba(50, 172, 45, 0.3)",
- "rgba(14, 221, 229, 0.56)",
- "rgba(214, 103, 28, 0.8)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 0,
- "link": false,
- "linkTargetBlank": false,
- "linkTooltip": "",
- "linkUrl": "",
- "mappingType": 1,
- "pattern": "player_state",
- "thresholds": [
- "1",
- "3"
- ],
- "type": "string",
- "unit": "none",
- "valueMaps": [
- {
- "text": "Playing",
- "value": "0"
- },
- {
- "text": "Paused",
- "value": "1"
- },
- {
- "text": "Buffering",
- "value": "3"
- }
- ]
- }
- ],
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [
- {
- "params": [
- "name"
- ],
- "type": "tag"
- },
- {
- "params": [
- "title"
- ],
- "type": "tag"
- },
- {
- "params": [
- "quality"
- ],
- "type": "tag"
- },
- {
- "params": [
- "video_decision"
- ],
- "type": "tag"
- },
- {
- "params": [
- "quality_profile"
- ],
- "type": "tag"
- },
- {
- "params": [
- "platform"
- ],
- "type": "tag"
- },
- {
- "params": [
- "product_version"
- ],
- "type": "tag"
- },
- {
- "params": [
- "location"
- ],
- "type": "tag"
- }
- ],
- "hide": false,
- "limit": "",
- "measurement": "Tautulli",
- "orderByTime": "ASC",
- "policy": "default",
- "query": "SELECT distinct(\"session_key\") FROM \"Tautulli\" WHERE (\"type\" = 'Session') AND $timeFilter GROUP BY \"name\", \"title\", \"quality\", \"video_decision\", \"quality_profile\", \"platform\", \"product_version\", \"location\", \"player_state\"",
- "rawQuery": true,
- "refId": "A",
- "resultFormat": "table",
- "select": [
- [
- {
- "params": [
- "session_key"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "distinct"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Session"
- }
- ]
- }
- ],
- "timeFrom": "1m",
- "title": "Users Online",
- "transform": "table",
- "type": "table"
-}
diff --git a/dashboard/panel_row_worldmap.json b/dashboard/panel_row_worldmap.json
deleted file mode 100644
index 63b381d4..00000000
--- a/dashboard/panel_row_worldmap.json
+++ /dev/null
@@ -1,121 +0,0 @@
-{
- "circleMaxSize": "2",
- "circleMinSize": "2",
- "colors": [
- "#e67817",
- "#6d3c97",
- "#890f02"
- ],
- "datasource": "plex",
- "decimals": 0,
- "esGeoPoint": "geohash",
- "esLocationName": "location",
- "esMetric": "metric",
- "gridPos": {
- "h": 10,
- "w": 10,
- "x": 10,
- "y": 6
- },
- "hideEmpty": false,
- "hideTimeOverride": true,
- "hideZero": false,
- "id": 4,
- "initialZoom": "4",
- "interval": "",
- "links": [],
- "locationData": "table",
- "mapCenter": "custom",
- "mapCenterLatitude": "37.9",
- "mapCenterLongitude": "-94.9",
- "maxDataPoints": 1,
- "minSpan": 8,
- "mouseWheelZoom": false,
- "showLegend": false,
- "stickyLabels": false,
- "tableQueryOptions": {
- "geohashField": "geohash",
- "labelField": "full_location",
- "latitudeField": "latitude",
- "longitudeField": "longitude",
- "metricField": "metric",
- "queryType": "coordinates"
- },
- "targets": [
- {
- "alias": "$tag_region_code",
- "dsType": "influxdb",
- "groupBy": [
- {
- "params": [
- "latitude"
- ],
- "type": "tag"
- },
- {
- "params": [
- "longitude"
- ],
- "type": "tag"
- },
- {
- "params": [
- "full_location"
- ],
- "type": "tag"
- },
- {
- "params": [
- "name"
- ],
- "type": "tag"
- }
- ],
- "measurement": "Tautulli",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "table",
- "select": [
- [
- {
- "params": [
- "session_key"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "distinct"
- },
- {
- "params": [],
- "type": "count"
- },
- {
- "params": [
- "metric"
- ],
- "type": "alias"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Session"
- }
- ]
- }
- ],
- "thresholds": "2,3",
- "timeFrom": "1m",
- "timeShift": null,
- "title": "",
- "type": "grafana-worldmap-panel",
- "unitPlural": "Streams",
- "unitSingle": "",
- "unitSingular": "Stream",
- "valueName": "current"
-}
diff --git a/dashboard/panel_us_only_worldmap.json b/dashboard/panel_us_only_worldmap.json
deleted file mode 100644
index e2984c4b..00000000
--- a/dashboard/panel_us_only_worldmap.json
+++ /dev/null
@@ -1,95 +0,0 @@
-{
- "circleMaxSize": 30,
- "circleMinSize": "1",
- "colors": [
- "#cca300",
- "#c15c17",
- "#890f02"
- ],
- "datasource": "plex",
- "decimals": 0,
- "esLocationName": "",
- "esMetric": "$tag_counter",
- "hideEmpty": false,
- "hideZero": false,
- "id": 4,
- "initialZoom": "4",
- "links": [],
- "locationData": "states",
- "mapCenter": "custom",
- "mapCenterLatitude": "39.8283",
- "mapCenterLongitude": "-98.5795",
- "maxDataPoints": 1,
- "minSpan": 8,
- "showLegend": false,
- "stickyLabels": false,
- "targets": [
- {
- "alias": "$tag_region_code",
- "dsType": "influxdb",
- "groupBy": [
- {
- "params": [
- "region_code"
- ],
- "type": "tag"
- }
- ],
- "measurement": "Tautulli",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "location"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "count"
- },
- {
- "params": [
- "metric"
- ],
- "type": "alias"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Session"
- }
- ]
- }
- ],
- "thresholds": "5,10",
- "timeFrom": "1m",
- "title": "",
- "type": "grafana-worldmap-panel",
- "unitPlural": "",
- "unitSingle": "",
- "unitSingular": "",
- "valueName": "current",
- "gridPos": {
- "x": 16,
- "y": 0,
- "w": 8,
- "h": 8
- },
- "mouseWheelZoom": false,
- "tableQueryOptions": {
- "queryType": "coordinates",
- "geohashField": "geohash",
- "latitudeField": "latitude",
- "longitudeField": "longitude",
- "metricField": "metric",
- "labelField": "location"
- }
-}
diff --git a/dashboard/plex_row_dashboard.json b/dashboard/plex_row_dashboard.json
deleted file mode 100644
index 00588cb2..00000000
--- a/dashboard/plex_row_dashboard.json
+++ /dev/null
@@ -1,1363 +0,0 @@
-{
- "__inputs": [
- {
- "name": "DS_FIREWALL",
- "label": "firewall",
- "description": "",
- "type": "datasource",
- "pluginId": "influxdb",
- "pluginName": "InfluxDB"
- },
- {
- "name": "DS_PLEX",
- "label": "plex",
- "description": "",
- "type": "datasource",
- "pluginId": "influxdb",
- "pluginName": "InfluxDB"
- },
- {
- "name": "DS_STORAGE_SERVER",
- "label": "storage_server",
- "description": "",
- "type": "datasource",
- "pluginId": "influxdb",
- "pluginName": "InfluxDB"
- }
- ],
- "__requires": [
- {
- "type": "grafana",
- "id": "grafana",
- "name": "Grafana",
- "version": "5.2.2"
- },
- {
- "type": "panel",
- "id": "grafana-worldmap-panel",
- "name": "Worldmap Panel",
- "version": "0.1.2"
- },
- {
- "type": "panel",
- "id": "graph",
- "name": "Graph",
- "version": "5.0.0"
- },
- {
- "type": "datasource",
- "id": "influxdb",
- "name": "InfluxDB",
- "version": "5.0.0"
- },
- {
- "type": "panel",
- "id": "singlestat",
- "name": "Singlestat",
- "version": "5.0.0"
- },
- {
- "type": "panel",
- "id": "table",
- "name": "Table",
- "version": "5.0.0"
- }
- ],
- "annotations": {
- "list": [
- {
- "builtIn": 1,
- "datasource": "-- Grafana --",
- "enable": true,
- "hide": true,
- "iconColor": "rgba(0, 211, 255, 1)",
- "name": "Annotations & Alerts",
- "type": "dashboard"
- }
- ]
- },
- "editable": true,
- "gnetId": null,
- "graphTooltip": 0,
- "id": null,
- "links": [],
- "panels": [
- {
- "aliasColors": {},
- "bars": false,
- "dashLength": 10,
- "dashes": false,
- "datasource": "${DS_FIREWALL}",
- "fill": 1,
- "gridPos": {
- "h": 8,
- "w": 16,
- "x": 0,
- "y": 0
- },
- "id": 1,
- "legend": {
- "avg": false,
- "current": true,
- "max": false,
- "min": false,
- "rightSide": false,
- "show": false,
- "total": false,
- "values": true
- },
- "lines": true,
- "linewidth": 1,
- "links": [],
- "minSpan": 16,
- "nullPointMode": "null",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [
- {
- "alias": "Download",
- "transform": "negative-Y"
- }
- ],
- "spaceLength": 10,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "alias": "Download",
- "dsType": "influxdb",
- "groupBy": [
- {
- "params": [
- "$__interval"
- ],
- "type": "time"
- },
- {
- "params": [
- "previous"
- ],
- "type": "fill"
- }
- ],
- "measurement": "bandwidth",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "download_bitrate"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "last"
- }
- ]
- ],
- "tags": [
- {
- "key": "interface",
- "operator": "=",
- "value": "outside"
- }
- ]
- },
- {
- "alias": "Upload",
- "dsType": "influxdb",
- "groupBy": [
- {
- "params": [
- "$__interval"
- ],
- "type": "time"
- },
- {
- "params": [
- "interface"
- ],
- "type": "tag"
- },
- {
- "params": [
- "previous"
- ],
- "type": "fill"
- }
- ],
- "measurement": "bandwidth",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "B",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "upload_bitrate"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "last"
- }
- ]
- ],
- "tags": [
- {
- "key": "interface",
- "operator": "=",
- "value": "outside"
- }
- ]
- }
- ],
- "thresholds": [],
- "timeFrom": null,
- "timeShift": null,
- "title": "Bandwidth",
- "tooltip": {
- "shared": true,
- "sort": 0,
- "value_type": "individual"
- },
- "type": "graph",
- "xaxis": {
- "buckets": null,
- "mode": "time",
- "name": null,
- "show": true,
- "values": []
- },
- "yaxes": [
- {
- "format": "bps",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ],
- "yaxis": {
- "align": false,
- "alignLevel": null
- }
- },
- {
- "circleMaxSize": "5",
- "circleMinSize": "1",
- "colors": [
- "#cca300",
- "#c15c17",
- "#890f02"
- ],
- "datasource": "${DS_PLEX}",
- "decimals": 0,
- "esLocationName": "",
- "esMetric": "$tag_counter",
- "gridPos": {
- "h": 8,
- "w": 8,
- "x": 16,
- "y": 0
- },
- "hideEmpty": false,
- "hideZero": false,
- "id": 4,
- "initialZoom": "4",
- "links": [],
- "locationData": "table",
- "mapCenter": "custom",
- "mapCenterLatitude": "39.8283",
- "mapCenterLongitude": "-98.5795",
- "maxDataPoints": 1,
- "minSpan": 8,
- "mouseWheelZoom": false,
- "showLegend": false,
- "stickyLabels": false,
- "tableQueryOptions": {
- "geohashField": "geohash",
- "labelField": "location",
- "latitudeField": "latitude",
- "longitudeField": "longitude",
- "metricField": "metric",
- "queryType": "coordinates"
- },
- "targets": [
- {
- "alias": "$tag_region_code",
- "dsType": "influxdb",
- "groupBy": [
- {
- "params": [
- "location"
- ],
- "type": "tag"
- },
- {
- "params": [
- "latitude"
- ],
- "type": "tag"
- },
- {
- "params": [
- "longitude"
- ],
- "type": "tag"
- }
- ],
- "measurement": "Tautulli",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "table",
- "select": [
- [
- {
- "params": [
- "location"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "count"
- },
- {
- "params": [
- "metric"
- ],
- "type": "alias"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Session"
- }
- ]
- }
- ],
- "thresholds": "5,10",
- "timeFrom": "1m",
- "title": "",
- "type": "grafana-worldmap-panel",
- "unitPlural": "",
- "unitSingle": "",
- "unitSingular": "",
- "valueName": "current"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_FIREWALL}",
- "decimals": 0,
- "format": "bps",
- "gauge": {
- "maxValue": 800000000,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 0,
- "y": 8
- },
- "id": 2,
- "interval": null,
- "links": [],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "bandwidth",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "upload_bitrate"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "interface",
- "operator": "=",
- "value": "outside"
- }
- ]
- }
- ],
- "thresholds": "300000000,700000000",
- "title": "Upload",
- "type": "singlestat",
- "valueFontSize": "50%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "current"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_FIREWALL}",
- "decimals": 0,
- "format": "bps",
- "gauge": {
- "maxValue": 800000000,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 4,
- "y": 8
- },
- "id": 3,
- "interval": null,
- "links": [],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "bandwidth",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "download_bitrate"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "interface",
- "operator": "=",
- "value": "outside"
- }
- ]
- }
- ],
- "thresholds": "300000000,700000000",
- "title": "Download",
- "type": "singlestat",
- "valueFontSize": "50%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "current"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_PLEX}",
- "format": "percent",
- "gauge": {
- "maxValue": 100,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 8,
- "y": 8
- },
- "id": 5,
- "interval": null,
- "links": [],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "plex",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "CPU Utilization"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "server",
- "operator": "=",
- "value": "Plex"
- }
- ]
- }
- ],
- "thresholds": "50,80",
- "title": "Plex CPU Load",
- "type": "singlestat",
- "valueFontSize": "100%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "avg"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_PLEX}",
- "format": "none",
- "gauge": {
- "maxValue": 30,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 12,
- "y": 8
- },
- "id": 6,
- "interval": null,
- "links": [],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Tautulli",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "current_streams"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "stream_count"
- }
- ]
- }
- ],
- "thresholds": "10,20",
- "title": "Plex Current Streams",
- "type": "singlestat",
- "valueFontSize": "120%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "current"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_STORAGE_SERVER}",
- "format": "percent",
- "gauge": {
- "maxValue": 30,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 16,
- "y": 8
- },
- "id": 7,
- "interval": null,
- "links": [],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Storage Servers",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "IO_Wait"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "server",
- "operator": "=",
- "value": "SAN3"
- }
- ]
- }
- ],
- "thresholds": "5,15",
- "title": "SAN IO_Wait",
- "type": "singlestat",
- "valueFontSize": "80%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "current"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_PLEX}",
- "format": "none",
- "gauge": {
- "maxValue": 20,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 20,
- "y": 8
- },
- "id": 8,
- "interval": null,
- "links": [
- {
- "targetBlank": true,
- "title": "Ombi",
- "type": "absolute",
- "url": "https://ombi.domain.tld/requests"
- }
- ],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Ombi",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "total"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Requests"
- }
- ]
- }
- ],
- "thresholds": "1,10",
- "title": "TV / Movie Requests in Queue",
- "type": "singlestat",
- "valueFontSize": "150%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "current"
- },
- {
- "columns": [],
- "datasource": "${DS_PLEX}",
- "fontSize": "100%",
- "gridPos": {
- "h": 10,
- "w": 12,
- "x": 0,
- "y": 13
- },
- "hideTimeOverride": true,
- "id": 9,
- "links": [
- {
- "targetBlank": true,
- "title": "Tautulli",
- "type": "absolute",
- "url": "https://tautulli.domain.tld/home"
- }
- ],
- "minSpan": 12,
- "pageSize": 8,
- "scroll": true,
- "showHeader": true,
- "sort": {
- "col": 0,
- "desc": true
- },
- "styles": [
- {
- "alias": "",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "MM/DD/YY h:mm:ss a",
- "decimals": 2,
- "link": false,
- "pattern": "Time",
- "thresholds": [],
- "type": "hidden",
- "unit": "short"
- },
- {
- "alias": "User",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "name",
- "preserveFormat": false,
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Movie / TV Show",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "title",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Transcode Decision",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "transcode_decision",
- "preserveFormat": false,
- "sanitize": false,
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Quality",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "quality",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- }
- ],
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Tautulli",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "table",
- "select": [
- [
- {
- "params": [
- "name"
- ],
- "type": "field"
- }
- ],
- [
- {
- "params": [
- "title"
- ],
- "type": "field"
- }
- ],
- [
- {
- "params": [
- "quality"
- ],
- "type": "field"
- }
- ],
- [
- {
- "params": [
- "transcode_decision"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Session"
- }
- ]
- }
- ],
- "timeFrom": "1m",
- "title": "Users Online",
- "transform": "table",
- "type": "table"
- },
- {
- "columns": [],
- "datasource": "${DS_PLEX}",
- "fontSize": "100%",
- "gridPos": {
- "h": 10,
- "w": 6,
- "x": 12,
- "y": 13
- },
- "hideTimeOverride": true,
- "id": 10,
- "links": [
- {
- "targetBlank": true,
- "title": "Sonarr",
- "type": "absolute",
- "url": "https://sonarr.domain.tld/wanted/missing"
- }
- ],
- "minSpan": 8,
- "pageSize": 8,
- "scroll": true,
- "showHeader": true,
- "sort": {
- "col": 0,
- "desc": true
- },
- "styles": [
- {
- "alias": "Time",
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "pattern": "Time",
- "type": "hidden"
- },
- {
- "alias": "Name",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "name",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- }
- ],
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Sonarr",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "table",
- "select": [
- [
- {
- "params": [
- "name"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Missing"
- }
- ]
- }
- ],
- "timeFrom": "30m",
- "title": "Missing TV Shows",
- "transform": "table",
- "type": "table"
- },
- {
- "columns": [],
- "datasource": "${DS_PLEX}",
- "fontSize": "100%",
- "gridPos": {
- "h": 10,
- "w": 6,
- "x": 18,
- "y": 13
- },
- "hideTimeOverride": true,
- "id": 11,
- "links": [
- {
- "targetBlank": true,
- "title": "Radarr",
- "type": "absolute",
- "url": "https://radarr.domain.tld/wanted/missing"
- }
- ],
- "minSpan": 6,
- "pageSize": 8,
- "scroll": true,
- "showHeader": true,
- "sort": {
- "col": 0,
- "desc": true
- },
- "styles": [
- {
- "alias": "",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "Time",
- "thresholds": [],
- "type": "hidden",
- "unit": "short"
- },
- {
- "alias": "Name",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "name",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- }
- ],
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Radarr",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "table",
- "select": [
- [
- {
- "params": [
- "name"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Missing"
- }
- ]
- }
- ],
- "timeFrom": "31m",
- "title": "Missing Movies",
- "transform": "table",
- "type": "table"
- }
- ],
- "refresh": "30s",
- "schemaVersion": 16,
- "style": "dark",
- "tags": [],
- "templating": {
- "list": []
- },
- "time": {
- "from": "now-6h",
- "to": "now"
- },
- "timepicker": {
- "refresh_intervals": [
- "5s",
- "10s",
- "30s",
- "1m",
- "5m",
- "15m",
- "30m",
- "1h",
- "2h",
- "1d"
- ],
- "time_options": [
- "5m",
- "15m",
- "1h",
- "6h",
- "12h",
- "24h",
- "2d",
- "7d",
- "30d"
- ]
- },
- "timezone": "browser",
- "title": "Plex",
- "uid": "iTbnha5mk",
- "version": 1
-}
diff --git a/dashboard/plex_us_only_dashboard.json b/dashboard/plex_us_only_dashboard.json
deleted file mode 100644
index 9da3175f..00000000
--- a/dashboard/plex_us_only_dashboard.json
+++ /dev/null
@@ -1,1351 +0,0 @@
-{
- "__inputs": [
- {
- "name": "DS_FIREWALL",
- "label": "firewall",
- "description": "",
- "type": "datasource",
- "pluginId": "influxdb",
- "pluginName": "InfluxDB"
- },
- {
- "name": "DS_PLEX",
- "label": "plex",
- "description": "",
- "type": "datasource",
- "pluginId": "influxdb",
- "pluginName": "InfluxDB"
- },
- {
- "name": "DS_STORAGE_SERVER",
- "label": "storage_server",
- "description": "",
- "type": "datasource",
- "pluginId": "influxdb",
- "pluginName": "InfluxDB"
- }
- ],
- "__requires": [
- {
- "type": "grafana",
- "id": "grafana",
- "name": "Grafana",
- "version": "5.2.2"
- },
- {
- "type": "panel",
- "id": "grafana-worldmap-panel",
- "name": "Worldmap Panel",
- "version": "0.1.2"
- },
- {
- "type": "panel",
- "id": "graph",
- "name": "Graph",
- "version": "5.0.0"
- },
- {
- "type": "datasource",
- "id": "influxdb",
- "name": "InfluxDB",
- "version": "5.0.0"
- },
- {
- "type": "panel",
- "id": "singlestat",
- "name": "Singlestat",
- "version": "5.0.0"
- },
- {
- "type": "panel",
- "id": "table",
- "name": "Table",
- "version": "5.0.0"
- }
- ],
- "annotations": {
- "list": [
- {
- "builtIn": 1,
- "datasource": "-- Grafana --",
- "enable": true,
- "hide": true,
- "iconColor": "rgba(0, 211, 255, 1)",
- "name": "Annotations & Alerts",
- "type": "dashboard"
- }
- ]
- },
- "editable": true,
- "gnetId": null,
- "graphTooltip": 0,
- "id": null,
- "links": [],
- "panels": [
- {
- "aliasColors": {},
- "bars": false,
- "dashLength": 10,
- "dashes": false,
- "datasource": "${DS_FIREWALL}",
- "fill": 1,
- "gridPos": {
- "h": 8,
- "w": 16,
- "x": 0,
- "y": 0
- },
- "id": 1,
- "legend": {
- "avg": false,
- "current": true,
- "max": false,
- "min": false,
- "rightSide": false,
- "show": false,
- "total": false,
- "values": true
- },
- "lines": true,
- "linewidth": 1,
- "links": [],
- "minSpan": 16,
- "nullPointMode": "null",
- "percentage": false,
- "pointradius": 5,
- "points": false,
- "renderer": "flot",
- "seriesOverrides": [
- {
- "alias": "Download",
- "transform": "negative-Y"
- }
- ],
- "spaceLength": 10,
- "stack": false,
- "steppedLine": false,
- "targets": [
- {
- "alias": "Download",
- "dsType": "influxdb",
- "groupBy": [
- {
- "params": [
- "$__interval"
- ],
- "type": "time"
- },
- {
- "params": [
- "previous"
- ],
- "type": "fill"
- }
- ],
- "measurement": "bandwidth",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "download_bitrate"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "last"
- }
- ]
- ],
- "tags": [
- {
- "key": "interface",
- "operator": "=",
- "value": "outside"
- }
- ]
- },
- {
- "alias": "Upload",
- "dsType": "influxdb",
- "groupBy": [
- {
- "params": [
- "$__interval"
- ],
- "type": "time"
- },
- {
- "params": [
- "interface"
- ],
- "type": "tag"
- },
- {
- "params": [
- "previous"
- ],
- "type": "fill"
- }
- ],
- "measurement": "bandwidth",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "B",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "upload_bitrate"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "last"
- }
- ]
- ],
- "tags": [
- {
- "key": "interface",
- "operator": "=",
- "value": "outside"
- }
- ]
- }
- ],
- "thresholds": [],
- "timeFrom": null,
- "timeShift": null,
- "title": "Bandwidth",
- "tooltip": {
- "shared": true,
- "sort": 0,
- "value_type": "individual"
- },
- "type": "graph",
- "xaxis": {
- "buckets": null,
- "mode": "time",
- "name": null,
- "show": true,
- "values": []
- },
- "yaxes": [
- {
- "format": "bps",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- },
- {
- "format": "short",
- "label": null,
- "logBase": 1,
- "max": null,
- "min": null,
- "show": true
- }
- ],
- "yaxis": {
- "align": false,
- "alignLevel": null
- }
- },
- {
- "circleMaxSize": 30,
- "circleMinSize": "1",
- "colors": [
- "#cca300",
- "#c15c17",
- "#890f02"
- ],
- "datasource": "${DS_PLEX}",
- "decimals": 0,
- "esLocationName": "",
- "esMetric": "$tag_counter",
- "gridPos": {
- "h": 8,
- "w": 8,
- "x": 16,
- "y": 0
- },
- "hideEmpty": false,
- "hideZero": false,
- "id": 4,
- "initialZoom": "4",
- "links": [],
- "locationData": "states",
- "mapCenter": "custom",
- "mapCenterLatitude": "39.8283",
- "mapCenterLongitude": "-98.5795",
- "maxDataPoints": 1,
- "minSpan": 8,
- "mouseWheelZoom": false,
- "showLegend": false,
- "stickyLabels": false,
- "tableQueryOptions": {
- "geohashField": "geohash",
- "labelField": "location",
- "latitudeField": "latitude",
- "longitudeField": "longitude",
- "metricField": "metric",
- "queryType": "coordinates"
- },
- "targets": [
- {
- "alias": "$tag_region_code",
- "dsType": "influxdb",
- "groupBy": [
- {
- "params": [
- "region_code"
- ],
- "type": "tag"
- }
- ],
- "measurement": "Tautulli",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "location"
- ],
- "type": "field"
- },
- {
- "params": [],
- "type": "count"
- },
- {
- "params": [
- "metric"
- ],
- "type": "alias"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Session"
- }
- ]
- }
- ],
- "thresholds": "5,10",
- "timeFrom": "1m",
- "title": "",
- "type": "grafana-worldmap-panel",
- "unitPlural": "",
- "unitSingle": "",
- "unitSingular": "",
- "valueName": "current"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_FIREWALL}",
- "decimals": 0,
- "format": "bps",
- "gauge": {
- "maxValue": 800000000,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 0,
- "y": 8
- },
- "id": 2,
- "interval": null,
- "links": [],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "bandwidth",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "upload_bitrate"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "interface",
- "operator": "=",
- "value": "outside"
- }
- ]
- }
- ],
- "thresholds": "300000000,700000000",
- "title": "Upload",
- "type": "singlestat",
- "valueFontSize": "50%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "current"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_FIREWALL}",
- "decimals": 0,
- "format": "bps",
- "gauge": {
- "maxValue": 800000000,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 4,
- "y": 8
- },
- "id": 3,
- "interval": null,
- "links": [],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "bandwidth",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "download_bitrate"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "interface",
- "operator": "=",
- "value": "outside"
- }
- ]
- }
- ],
- "thresholds": "300000000,700000000",
- "title": "Download",
- "type": "singlestat",
- "valueFontSize": "50%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "current"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_PLEX}",
- "format": "percent",
- "gauge": {
- "maxValue": 100,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 8,
- "y": 8
- },
- "id": 5,
- "interval": null,
- "links": [],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "plex",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "CPU Utilization"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "server",
- "operator": "=",
- "value": "Plex"
- }
- ]
- }
- ],
- "thresholds": "50,80",
- "title": "Plex CPU Load",
- "type": "singlestat",
- "valueFontSize": "100%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "avg"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_PLEX}",
- "format": "none",
- "gauge": {
- "maxValue": 30,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 12,
- "y": 8
- },
- "id": 6,
- "interval": null,
- "links": [],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Tautulli",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "current_streams"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "stream_count"
- }
- ]
- }
- ],
- "thresholds": "10,20",
- "title": "Plex Current Streams",
- "type": "singlestat",
- "valueFontSize": "120%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "current"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_STORAGE_SERVER}",
- "format": "percent",
- "gauge": {
- "maxValue": 30,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 16,
- "y": 8
- },
- "id": 7,
- "interval": null,
- "links": [],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Storage Servers",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "IO_Wait"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "server",
- "operator": "=",
- "value": "SAN3"
- }
- ]
- }
- ],
- "thresholds": "5,15",
- "title": "SAN IO_Wait",
- "type": "singlestat",
- "valueFontSize": "80%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "current"
- },
- {
- "cacheTimeout": null,
- "colorBackground": false,
- "colorValue": true,
- "colors": [
- "#299c46",
- "rgba(237, 129, 40, 0.89)",
- "#d44a3a"
- ],
- "datasource": "${DS_PLEX}",
- "format": "none",
- "gauge": {
- "maxValue": 20,
- "minValue": 0,
- "show": true,
- "thresholdLabels": false,
- "thresholdMarkers": true
- },
- "gridPos": {
- "h": 5,
- "w": 4,
- "x": 20,
- "y": 8
- },
- "id": 8,
- "interval": null,
- "links": [
- {
- "targetBlank": true,
- "title": "Ombi",
- "type": "absolute",
- "url": "https://ombi.domain.tld/requests"
- }
- ],
- "mappingType": 1,
- "mappingTypes": [
- {
- "name": "value to text",
- "value": 1
- },
- {
- "name": "range to text",
- "value": 2
- }
- ],
- "maxDataPoints": 100,
- "nullPointMode": "connected",
- "nullText": null,
- "postfix": "",
- "postfixFontSize": "50%",
- "prefix": "",
- "prefixFontSize": "50%",
- "rangeMaps": [
- {
- "from": "null",
- "text": "N/A",
- "to": "null"
- }
- ],
- "sparkline": {
- "fillColor": "rgba(31, 118, 189, 0.18)",
- "full": false,
- "lineColor": "rgb(31, 120, 193)",
- "show": true
- },
- "tableColumn": "",
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Ombi",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "time_series",
- "select": [
- [
- {
- "params": [
- "total"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Requests"
- }
- ]
- }
- ],
- "thresholds": "1,10",
- "title": "TV / Movie Requests in Queue",
- "type": "singlestat",
- "valueFontSize": "150%",
- "valueMaps": [
- {
- "op": "=",
- "text": "N/A",
- "value": "null"
- }
- ],
- "valueName": "current"
- },
- {
- "columns": [],
- "datasource": "${DS_PLEX}",
- "fontSize": "100%",
- "gridPos": {
- "h": 10,
- "w": 12,
- "x": 0,
- "y": 13
- },
- "hideTimeOverride": true,
- "id": 9,
- "links": [
- {
- "targetBlank": true,
- "title": "Tautulli",
- "type": "absolute",
- "url": "https://tautulli.domain.tld/home"
- }
- ],
- "minSpan": 12,
- "pageSize": 8,
- "scroll": true,
- "showHeader": true,
- "sort": {
- "col": 0,
- "desc": true
- },
- "styles": [
- {
- "alias": "",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "MM/DD/YY h:mm:ss a",
- "decimals": 2,
- "link": false,
- "pattern": "Time",
- "thresholds": [],
- "type": "hidden",
- "unit": "short"
- },
- {
- "alias": "User",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "name",
- "preserveFormat": false,
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Movie / TV Show",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "title",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Transcode Decision",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "transcode_decision",
- "preserveFormat": false,
- "sanitize": false,
- "thresholds": [],
- "type": "string",
- "unit": "short"
- },
- {
- "alias": "Quality",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "quality",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- }
- ],
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Tautulli",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "table",
- "select": [
- [
- {
- "params": [
- "name"
- ],
- "type": "field"
- }
- ],
- [
- {
- "params": [
- "title"
- ],
- "type": "field"
- }
- ],
- [
- {
- "params": [
- "quality"
- ],
- "type": "field"
- }
- ],
- [
- {
- "params": [
- "transcode_decision"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Session"
- }
- ]
- }
- ],
- "timeFrom": "1m",
- "title": "Users Online",
- "transform": "table",
- "type": "table"
- },
- {
- "columns": [],
- "datasource": "${DS_PLEX}",
- "fontSize": "100%",
- "gridPos": {
- "h": 10,
- "w": 6,
- "x": 12,
- "y": 13
- },
- "hideTimeOverride": true,
- "id": 10,
- "links": [
- {
- "targetBlank": true,
- "title": "Sonarr",
- "type": "absolute",
- "url": "https://sonarr.domain.tld/wanted/missing"
- }
- ],
- "minSpan": 8,
- "pageSize": 8,
- "scroll": true,
- "showHeader": true,
- "sort": {
- "col": 0,
- "desc": true
- },
- "styles": [
- {
- "alias": "Time",
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "pattern": "Time",
- "type": "hidden"
- },
- {
- "alias": "Name",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "name",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- }
- ],
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Sonarr",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "table",
- "select": [
- [
- {
- "params": [
- "name"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Missing"
- }
- ]
- }
- ],
- "timeFrom": "30m",
- "title": "Missing TV Shows",
- "transform": "table",
- "type": "table"
- },
- {
- "columns": [],
- "datasource": "${DS_PLEX}",
- "fontSize": "100%",
- "gridPos": {
- "h": 10,
- "w": 6,
- "x": 18,
- "y": 13
- },
- "hideTimeOverride": true,
- "id": 11,
- "links": [
- {
- "targetBlank": true,
- "title": "Radarr",
- "type": "absolute",
- "url": "https://radarr.domain.tld/wanted/missing"
- }
- ],
- "minSpan": 6,
- "pageSize": 8,
- "scroll": true,
- "showHeader": true,
- "sort": {
- "col": 0,
- "desc": true
- },
- "styles": [
- {
- "alias": "",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "Time",
- "thresholds": [],
- "type": "hidden",
- "unit": "short"
- },
- {
- "alias": "Name",
- "colorMode": null,
- "colors": [
- "rgba(245, 54, 54, 0.9)",
- "rgba(237, 129, 40, 0.89)",
- "rgba(50, 172, 45, 0.97)"
- ],
- "dateFormat": "YYYY-MM-DD HH:mm:ss",
- "decimals": 2,
- "pattern": "name",
- "thresholds": [],
- "type": "string",
- "unit": "short"
- }
- ],
- "targets": [
- {
- "dsType": "influxdb",
- "groupBy": [],
- "measurement": "Radarr",
- "orderByTime": "ASC",
- "policy": "default",
- "refId": "A",
- "resultFormat": "table",
- "select": [
- [
- {
- "params": [
- "name"
- ],
- "type": "field"
- }
- ]
- ],
- "tags": [
- {
- "key": "type",
- "operator": "=",
- "value": "Missing"
- }
- ]
- }
- ],
- "timeFrom": "31m",
- "title": "Missing Movies",
- "transform": "table",
- "type": "table"
- }
- ],
- "refresh": "30s",
- "schemaVersion": 16,
- "style": "dark",
- "tags": [],
- "templating": {
- "list": []
- },
- "time": {
- "from": "now-6h",
- "to": "now"
- },
- "timepicker": {
- "refresh_intervals": [
- "5s",
- "10s",
- "30s",
- "1m",
- "5m",
- "15m",
- "30m",
- "1h",
- "2h",
- "1d"
- ],
- "time_options": [
- "5m",
- "15m",
- "1h",
- "6h",
- "12h",
- "24h",
- "2d",
- "7d",
- "30d"
- ]
- },
- "timezone": "browser",
- "title": "Plex",
- "uid": "iTbnha5mk",
- "version": 1
-}
diff --git a/data/varken.example.ini b/data/varken.example.ini
new file mode 100644
index 00000000..392ed80f
--- /dev/null
+++ b/data/varken.example.ini
@@ -0,0 +1,91 @@
+# Notes:
+# - Sonarr + Radarr scripts support multiple servers. You can remove the second
+# server by putting a # in front of the lines and section name, and removing
+# that number from your server_ids list
+# - fallback_ip, This is used when there is no IP listed in tautulli.
+# This can happen when you are streaming locally. This is usually your public IP.
+
+[global]
+sonarr_server_ids = 1,2
+radarr_server_ids = 1,2
+tautulli_server_ids = 1
+ombi_server_ids = 1
+ciscoasa_firewall_ids = false
+
+[influxdb]
+url = influxdb.domain.tld
+port = 8086
+username =
+password =
+
+[tautulli-1]
+url = tautulli.domain.tld
+fallback_ip = 0.0.0.0
+apikey = xxxxxxxxxxxxxxxx
+ssl = false
+verify_ssl = true
+get_activity = true
+get_activity_run_seconds = 30
+
+[sonarr-1]
+url = sonarr1.domain.tld
+apikey = xxxxxxxxxxxxxxxx
+ssl = false
+verify_ssl = true
+missing_days = 7
+missing_days_run_seconds = 300
+future_days = 1
+future_days_run_seconds = 300
+queue = true
+queue_run_seconds = 300
+
+[sonarr-2]
+url = sonarr2.domain.tld
+apikey = yyyyyyyyyyyyyyyy
+ssl = false
+verify_ssl = true
+missing_days = 7
+missing_days_run_seconds = 300
+future_days = 1
+future_days_run_seconds = 300
+queue = true
+queue_run_seconds = 300
+
+[radarr-1]
+url = radarr1.domain.tld
+apikey = xxxxxxxxxxxxxxxx
+ssl = false
+verify_ssl = true
+queue = true
+queue_run_seconds = 300
+get_missing = true
+get_missing_run_seconds = 300
+
+[radarr-2]
+url = radarr2.domain.tld
+apikey = yyyyyyyyyyyyyyyy
+ssl = false
+verify_ssl = true
+queue = true
+queue_run_seconds = 300
+get_missing = true
+get_missing_run_seconds = 300
+
+[ombi-1]
+url = ombi.domain.tld
+apikey = xxxxxxxxxxxxxxxx
+ssl = false
+verify_ssl = true
+get_request_type_counts = true
+request_type_run_seconds = 300
+get_request_total_counts = true
+request_total_run_seconds = 300
+
+[ciscoasa-1]
+url = firewall.domain.tld
+username = cisco
+password = cisco
+outside_interface = WAN
+ssl = false
+verify_ssl = true
+get_bandwidth_run_seconds = 300
diff --git a/ombi.py b/ombi.py
deleted file mode 100644
index 5aa18124..00000000
--- a/ombi.py
+++ /dev/null
@@ -1,87 +0,0 @@
-# Do not edit this script. Edit configuration.py
-import sys
-import requests
-from datetime import datetime, timezone
-from influxdb import InfluxDBClient
-import argparse
-from argparse import RawTextHelpFormatter
-import configuration
-
-headers = {'Apikey': configuration.ombi_api_key}
-
-def now_iso():
- now_iso = datetime.now(timezone.utc).astimezone().isoformat()
- return now_iso
-
-def influx_sender(influx_payload):
- influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
- configuration.influxdb_password, configuration.ombi_influxdb_db_name)
- influx.write_points(influx_payload)
-
-def get_total_requests():
- get_tv_requests = requests.get('{}/api/v1/Request/tv'.format(configuration.ombi_url), headers=headers).json()
- get_movie_requests = requests.get('{}/api/v1/Request/movie'.format(configuration.ombi_url), headers=headers).json()
-
- count_movie_requests = 0
- count_tv_requests = 0
-
- for show in get_tv_requests:
- count_tv_requests += 1
-
- for movie in get_movie_requests:
- count_movie_requests += 1
-
- influx_payload = [
- {
- "measurement": "Ombi",
- "tags": {
- "type": "Request_Total"
- },
- "time": now_iso(),
- "fields": {
- "total": count_movie_requests + count_tv_requests
- }
- }
- ]
- return influx_payload
-
-def get_request_counts():
- get_request_counts = requests.get('{}/api/v1/Request/count'.format(configuration.ombi_url), headers=headers).json()
-
- influx_payload = [
- {
- "measurement": "Ombi",
- "tags": {
- "type": "Request_Counts"
- },
- "time": now_iso(),
- "fields": {
- "pending": int(get_request_counts['pending']),
- "approved": int(get_request_counts['approved']),
- "available": int(get_request_counts['available'])
- }
- }
- ]
- return influx_payload
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(prog='Ombi stats operations',
- description='Script to aid in data gathering from Ombi', formatter_class=RawTextHelpFormatter)
-
- parser.add_argument("--total", action='store_true',
- help='Get the total count of all requests')
-
- parser.add_argument("--counts", action='store_true',
- help='Get the count of pending, approved, and available requests')
-
- opts = parser.parse_args()
-
- if opts.total:
- influx_sender(get_total_requests())
-
- elif opts.counts:
- influx_sender(get_request_counts())
-
- elif len(sys.argv) == 1:
- parser.print_help(sys.stderr)
- sys.exit(1)
diff --git a/radarr.py b/radarr.py
deleted file mode 100644
index 2358a733..00000000
--- a/radarr.py
+++ /dev/null
@@ -1,171 +0,0 @@
-# Do not edit this script. Edit configuration.py
-import sys
-import requests
-from datetime import datetime, timezone
-from influxdb import InfluxDBClient
-import argparse
-from argparse import RawTextHelpFormatter
-import configuration
-
-
-def now_iso():
- now_iso = datetime.now(timezone.utc).astimezone().isoformat()
- return now_iso
-
-
-def influx_sender(influx_payload):
- influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
- configuration.influxdb_password, configuration.radarr_influxdb_db_name)
- influx.write_points(influx_payload)
-
-
-def get_missing_movies():
- # Set the time here so we have one timestamp to work with
- now = now_iso()
- missing = []
- influx_payload = []
-
- for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list:
- headers = {'X-Api-Key': radarr_api_key}
- get_movies = requests.get('{}/api/movie'.format(radarr_url), headers=headers).json()
- movies = {d['tmdbId']: d for d in get_movies}
-
- for movie in movies.keys():
- if not movies[movie]['downloaded']:
- movie_name = ('{} ({})'.format(movies[movie]['title'], movies[movie]['year']))
- missing.append((movie_name, movies[movie]['tmdbId']))
-
- for movie, id in missing:
- influx_payload.append(
- {
- "measurement": "Radarr",
- "tags": {
- "type": "Missing",
- "tmdbId": id,
- "server": server_id
- },
- "time": now,
- "fields": {
- "name": movie
- }
- }
- )
- # Empty missing or else things get foo bared
- missing = []
-
- return influx_payload
-
-
-def get_missing_avl():
- # Set the time here so we have one timestamp to work with
- now = now_iso()
- missing = []
- influx_payload = []
-
- for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list:
- headers = {'X-Api-Key': radarr_api_key}
- get_movies = requests.get('{}/api/movie'.format(radarr_url), headers=headers).json()
- movies = {d['tmdbId']: d for d in get_movies}
-
- for movie in movies.keys():
- if not movies[movie]['downloaded']:
- if movies[movie]['isAvailable'] is True:
- movie_name = ('{} ({})'.format(movies[movie]['title'], movies[movie]['year']))
- missing.append((movie_name, movies[movie]['tmdbId']))
-
-
- for movie, id in missing:
- influx_payload.append(
- {
- "measurement": "Radarr",
- "tags": {
- "type": "Missing_Available",
- "tmdbId": id,
- "server": server_id
- },
- "time": now,
- "fields": {
- "name": movie,
- }
- }
- )
- # Empty missing or else things get foo bared
- missing = []
-
- return influx_payload
-
-
-def get_queue_movies():
- # Set the time here so we have one timestamp to work with
- now = now_iso()
- influx_payload = []
- queue = []
-
- for radarr_url, radarr_api_key, server_id in configuration.radarr_server_list:
- headers = {'X-Api-Key': radarr_api_key}
- get_movies = requests.get('{}/api/queue'.format(radarr_url), headers=headers).json()
- queue_movies = {d['id']: d for d in get_movies}
-
- for movie in queue_movies.keys():
- name = '{} ({})'.format(queue_movies[movie]['movie']['title'], queue_movies[movie]['movie']['year'])
- quality = (queue_movies[movie]['quality']['quality']['name'])
- protocol = (queue_movies[movie]['protocol'].upper())
-
- if protocol == 'USENET':
- protocol_id = 1
- else:
- protocol_id = 0
-
- queue.append((name, queue_movies[movie]['id']))
-
- for movie, id in queue:
- influx_payload.append(
- {
- "measurement": "Radarr",
- "tags": {
- "type": "Queue",
- "tmdbId": id,
- "server": server_id
- },
- "time": now,
- "fields": {
- "name": movie,
- "quality": quality,
- "protocol": protocol,
- "protocol_id": protocol_id
- }
- }
- )
- # Empty queue or else things get foo bared
- queue = []
-
- return influx_payload
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(prog='Radarr stats operations',
- description='Script to aid in data gathering from Radarr', formatter_class=RawTextHelpFormatter)
-
- parser.add_argument("--missing", action='store_true',
- help='Get missing movies')
-
- parser.add_argument("--missing_avl", action='store_true',
- help='Get missing yet available movies')
-
- parser.add_argument("--queue", action='store_true',
- help='Get movies in queue')
-
- opts = parser.parse_args()
-
- if opts.missing:
- influx_sender(get_missing_movies())
-
- elif opts.missing_avl:
- influx_sender(get_missing_avl())
-
- elif opts.queue:
- influx_sender(get_queue_movies())
-
- elif len(sys.argv) == 1:
- parser.print_help(sys.stderr)
- sys.exit(1)
diff --git a/raid_init.py b/raid_init.py
deleted file mode 100644
index 4145bdd5..00000000
--- a/raid_init.py
+++ /dev/null
@@ -1,35 +0,0 @@
-import psutil
-import mdstat
-import platform
-from datetime import datetime, timezone, timedelta
-from influxdb import InfluxDBClient
-
-# Do not edit below this line #
-influx_payload = []
-devices = {
- 'md': mdstat.parse()['devices'],
-}
-
-for array in devices['md']:
- influx_payload.append(
- {
- "measurement": "Storage Servers",
- "tags": {
- "server": platform.uname()[1],
- "mount_point": array,
- "type": 'rebuild'
- },
- "time": datetime.now(timezone.utc).astimezone().isoformat(),
- "fields": {
- "resync_progress": float(devices['md'][array]['resync']['progress'].replace('%', '')),
- "resync_eta_mins": float(devices['md'][array]['resync']['finish'].replace('min', '')),
- "resync_eta_date": '{:%A, %b %d %I:%M %p}'.format(
- datetime.now() + timedelta(minutes=float(devices['md'][array]['resync']['finish']
- .replace('min', '')))),
- "resync_speed_KiB/s": int(devices['md'][array]['resync']['speed'].replace('K/sec', '')),
- }
- }
- )
-
-influx = InfluxDBClient('grafana.domain.tld', 8086, 'root', 'root', 'storage_server')
-influx.write_points(influx_payload)
diff --git a/requirements.txt b/requirements.txt
index ceb78c6d..bab1e82f 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -2,6 +2,9 @@
# Potential requirements.
# pip3 install -r requirements.txt
#---------------------------------------------------------
-requests
-geoip2
-influxdb
+requests>=2.20.1
+geoip2>=2.9.0
+influxdb>=5.2.0
+schedule>=0.5.0
+distro>=1.3.0
+urllib3>=1.22
\ No newline at end of file
diff --git a/san.py b/san.py
deleted file mode 100644
index bb7de3d0..00000000
--- a/san.py
+++ /dev/null
@@ -1,36 +0,0 @@
-import platform
-import psutil
-from datetime import datetime, timezone
-from influxdb import InfluxDBClient
-
-mount_points = ['/mnt/raid6-a', '/mnt/raid6-b']
-
-# Do not edit below this line #
-influx_payload = []
-devices = {
- 'mount_points': {}
-}
-
-for mount in mount_points:
- devices['mount_points'][mount] = {
- 'usage': psutil.disk_usage(mount)
- }
- influx_payload.append(
- {
- "measurement": "Storage Servers",
- "tags": {
- "server": platform.uname()[1],
- "mount_point": mount
- },
- "time": datetime.now(timezone.utc).astimezone().isoformat(),
- "fields": {
- "bytes Used": devices['mount_points'][mount]['usage'].used,
- "bytes Free": devices['mount_points'][mount]['usage'].free,
- "bytes Total": devices['mount_points'][mount]['usage'].total,
- "Utilization": devices['mount_points'][mount]['usage'].percent
- }
- }
- )
-
-influx = InfluxDBClient('grafana.domain.tld', 8086, 'root', 'root', 'storage_server')
-influx.write_points(influx_payload)
diff --git a/sonarr.py b/sonarr.py
deleted file mode 100644
index 8be5f6c3..00000000
--- a/sonarr.py
+++ /dev/null
@@ -1,317 +0,0 @@
-# Do not edit this script. Edit configuration.py
-import sys
-import requests
-from datetime import datetime, timezone, date, timedelta
-from influxdb import InfluxDBClient
-import argparse
-from argparse import RawTextHelpFormatter
-import configuration
-
-
-def now_iso():
- now_iso = datetime.now(timezone.utc).astimezone().isoformat()
- return now_iso
-
-
-def influx_sender(influx_payload):
- influx = InfluxDBClient(configuration.influxdb_url, configuration.influxdb_port, configuration.influxdb_username,
- configuration.influxdb_password, configuration.sonarr_influxdb_db_name)
- influx.write_points(influx_payload)
-
-
-def get_all_missing_shows():
- # Set the time here so we have one timestamp to work with
- now = now_iso()
-
- missing = []
-
- influx_payload = []
-
- for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list:
-
- headers = {'X-Api-Key': sonarr_api_key}
-
- get_tv_shows = requests.get('{}/api/wanted/missing/?pageSize=1000'.format(sonarr_url),
- headers=headers).json()['records']
-
- tv_shows = {d['id']: d for d in get_tv_shows}
-
-
- for show in tv_shows.keys():
- series_title = '{}'.format(tv_shows[show]['series']['title'])
- sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'],tv_shows[show]['episodeNumber'])
- missing.append((series_title, sxe, tv_shows[show]['id'], tv_shows[show]['title']))
-
- for series_title, sxe, id, episode_title in missing:
- influx_payload.append(
- {
- "measurement": "Sonarr",
- "tags": {
- "type": "Missing",
- "sonarrId": id,
- "server": server_id
- },
- "time": now,
- "fields": {
- "name": series_title,
- "epname": episode_title,
- "sxe": sxe
- }
- }
- )
- # Empty missing or else things get foo bared
- missing = []
-
- return influx_payload
-
-
-def get_missing_shows(days_past):
- # Set the time here so we have one timestamp to work with
- now = now_iso()
-
- last_days = str(date.today()+timedelta(days=-days_past))
-
- today = str(date.today())
-
- missing = []
-
- influx_payload = []
-
- for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list:
-
- headers = {'X-Api-Key': sonarr_api_key}
-
- get_tv_shows = requests.get('{}/api/calendar/?start={}&end={}&pageSize=1000'.format(sonarr_url, last_days, today),
- headers=headers).json()
-
- tv_shows = {d['id']: d for d in get_tv_shows}
-
- for show in tv_shows.keys():
- if not (tv_shows[show]['hasFile']):
- series_title = '{}'.format(tv_shows[show]['series']['title'])
- sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'], tv_shows[show]['episodeNumber'])
- air_date = (tv_shows[show]['airDate'])
- missing.append((series_title, sxe, air_date, tv_shows[show]['id']))
-
- for series_title, sxe, air_date, id in missing:
- influx_payload.append(
- {
- "measurement": "Sonarr",
- "tags": {
- "type": "Missing_Days",
- "sonarrId": id,
- "server": server_id
- },
- "time": now,
- "fields": {
- "name": series_title,
- "sxe": sxe,
- "airs": air_date
- }
- }
- )
-
- # Empty missing or else things get foo bared
- missing = []
-
- return influx_payload
-
-
-def get_upcoming_shows():
- # Set the time here so we have one timestamp to work with
- now = now_iso()
-
- upcoming = []
-
- influx_payload = []
-
- for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list:
-
- headers = {'X-Api-Key': sonarr_api_key}
-
- get_upcoming_shows = requests.get('{}/api/calendar/'.format(sonarr_url),
- headers=headers).json()
-
- upcoming_shows = {d['id']: d for d in get_upcoming_shows}
-
- for show in upcoming_shows.keys():
- series_title = '{}'.format(upcoming_shows[show]['series']['title'])
- sxe = 'S{:0>2}E{:0>2}'.format(upcoming_shows[show]['seasonNumber'],upcoming_shows[show]['episodeNumber'])
- upcoming.append((series_title, sxe, upcoming_shows[show]['id'], upcoming_shows[show]['title'], upcoming_shows[show]['airDate']))
-
- for series_title, sxe, id, episode_title, air_date in upcoming:
- influx_payload.append(
- {
- "measurement": "Sonarr",
- "tags": {
- "type": "Soon",
- "sonarrId": id,
- "server": server_id
- },
- "time": now,
- "fields": {
- "name": series_title,
- "epname": episode_title,
- "sxe": sxe,
- "airs": air_date
- }
- }
- )
- # Empty upcoming or else things get foo bared
- upcoming = []
-
- return influx_payload
-
-
-def get_future_shows(future_days):
- # Set the time here so we have one timestamp to work with
- now = now_iso()
-
- today = str(date.today())
-
- future = str(date.today()+timedelta(days=future_days))
-
- air_days = []
-
- downloaded = []
-
- influx_payload = []
-
- for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list:
-
- headers = {'X-Api-Key': sonarr_api_key}
-
- get_tv_shows = requests.get('{}/api/calendar/?start={}&end={}&pageSize=200'.format(sonarr_url, today, future),
- headers=headers).json()
-
- tv_shows = {d['id']: d for d in get_tv_shows}
-
- for show in tv_shows.keys():
- series_title = '{}'.format(tv_shows[show]['series']['title'])
- dl_status = int(tv_shows[show]['hasFile'])
- sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['seasonNumber'], tv_shows[show]['episodeNumber'])
- air_days.append((series_title, dl_status, sxe, tv_shows[show]['title'], tv_shows[show]['airDate'], tv_shows[show]['id']))
-
- for series_title, dl_status, sxe, episode_title, air_date, id in air_days:
- influx_payload.append(
- {
- "measurement": "Sonarr",
- "tags": {
- "type": "Future",
- "sonarrId": id,
- "server": server_id
- },
- "time": now,
- "fields": {
- "name": series_title,
- "epname": episode_title,
- "sxe": sxe,
- "airs": air_date,
- "downloaded": dl_status
- }
- }
- )
- # Empty air_days or else things get foo bared
- air_days = []
-
- return influx_payload
-
-
-def get_queue_shows():
- # Set the time here so we have one timestamp to work with
- now = now_iso()
-
- queue = []
-
- downloaded = []
-
- influx_payload = []
-
- for sonarr_url, sonarr_api_key, server_id in configuration.sonarr_server_list:
-
- headers = {'X-Api-Key': sonarr_api_key}
-
- get_tv_shows = requests.get('{}/api/queue'.format(sonarr_url),
- headers=headers).json()
-
- tv_shows = {d['id']: d for d in get_tv_shows}
-
- for show in tv_shows.keys():
- series_title = '{}'.format(tv_shows[show]['series']['title'])
- episode_title = '{}'.format(tv_shows[show]['episode']['title'])
- protocol = (tv_shows[show]['protocol'].upper())
- sxe = 'S{:0>2}E{:0>2}'.format(tv_shows[show]['episode']['seasonNumber'], tv_shows[show]['episode']['episodeNumber'])
- if protocol == 'USENET':
- protocol_id = 1
- else:
- protocol_id = 0
-
- queue.append((series_title, episode_title, protocol, protocol_id, sxe, tv_shows[show]['id']))
-
- for series_title, episode_title, protocol, protocol_id, sxe, id in queue:
- influx_payload.append(
- {
- "measurement": "Sonarr",
- "tags": {
- "type": "Queue",
- "sonarrId": id,
- "server": server_id
-
- },
- "time": now,
- "fields": {
- "name": series_title,
- "epname": episode_title,
- "sxe": sxe,
- "protocol": protocol,
- "protocol_id": protocol_id
- }
- }
- )
-
- # Empty queue or else things get foo bared
- queue = []
-
- return influx_payload
-
-
-if __name__ == "__main__":
- parser = argparse.ArgumentParser(prog='Sonarr stats operations',
- description='Script to aid in data gathering from Sonarr', formatter_class=RawTextHelpFormatter)
-
- parser.add_argument("--missing", action='store_true',
- help='Get all missing TV shows')
-
- parser.add_argument("--missing_days", type=int,
- help='Get missing TV shows in past X days')
-
- parser.add_argument("--upcoming", action='store_true',
- help='Get upcoming TV shows')
-
- parser.add_argument("--future", type=int,
- help='Get TV shows on X days into the future. Includes today.'
- '\ni.e. --future 2 is Today and Tomorrow')
-
- parser.add_argument("--queue", action='store_true',
- help='Get TV shows in queue')
-
- opts = parser.parse_args()
-
- if opts.missing:
- influx_sender(get_all_missing_shows())
-
- elif opts.missing_days:
- influx_sender(get_missing_shows(opts.missing_days))
-
- elif opts.upcoming:
- influx_sender(get_upcoming_shows())
-
- elif opts.future:
- influx_sender(get_future_shows(opts.future))
-
- elif opts.queue:
- influx_sender(get_queue_shows())
-
- elif len(sys.argv) == 1:
- parser.print_help(sys.stderr)
- sys.exit(1)
diff --git a/tautulli.py b/tautulli.py
deleted file mode 100644
index 96f61eda..00000000
--- a/tautulli.py
+++ /dev/null
@@ -1,179 +0,0 @@
-import os
-import tarfile
-import urllib.request
-import time
-from datetime import datetime, timezone
-import geoip2.database
-from influxdb import InfluxDBClient
-import requests
-import configuration
-
-CURRENT_TIME = datetime.now(timezone.utc).astimezone().isoformat()
-
-PAYLOAD = {'apikey': configuration.tautulli_api_key, 'cmd': 'get_activity'}
-
-ACTIVITY = requests.get('{}/api/v2'.format(configuration.tautulli_url),
- params=PAYLOAD).json()['response']['data']
-
-SESSIONS = {d['session_id']: d for d in ACTIVITY['sessions']}
-
-TAR_DBFILE = '{}/GeoLite2-City.tar.gz'.format(os.path.dirname(os.path.realpath(__file__)))
-
-DBFILE = '{}/GeoLite2-City.mmdb'.format(os.path.dirname(os.path.realpath(__file__)))
-
-NOW = time.time()
-
-DB_AGE = NOW - (86400 * 35)
-
-#remove the running db file if it is older than 35 days
-try:
- t = os.stat(DBFILE)
- c = t.st_ctime
- if c < DB_AGE:
- os.remove(DBFILE)
-except FileNotFoundError:
- pass
-
-
-def geo_lookup(ipaddress):
- """Lookup an IP using the local GeoLite2 DB"""
- if not os.path.isfile(DBFILE):
- urllib.request.urlretrieve(
- 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz',
- TAR_DBFILE)
-
- tar = tarfile.open(TAR_DBFILE, "r:gz")
- for files in tar.getmembers():
- if 'GeoLite2-City.mmdb' in files.name:
- files.name = os.path.basename(files.name)
- tar.extract(files, '{}/'.format(os.path.dirname(os.path.realpath(__file__))))
-
- reader = geoip2.database.Reader(DBFILE)
-
- return reader.city(ipaddress)
-
-
-INFLUX_PAYLOAD = [
- {
- "measurement": "Tautulli",
- "tags": {
- "type": "stream_count"
- },
- "time": CURRENT_TIME,
- "fields": {
- "current_streams": int(ACTIVITY['stream_count']),
- "transcode_streams": int(ACTIVITY['stream_count_transcode']),
- "direct_play_streams": int(ACTIVITY['stream_count_direct_play']),
- "direct_streams": int(ACTIVITY['stream_count_direct_stream'])
- }
- }
-]
-
-for session in SESSIONS.keys():
- try:
- geodata = geo_lookup(SESSIONS[session]['ip_address_public'])
- except (ValueError, geoip2.errors.AddressNotFoundError):
- if configuration.tautulli_failback_ip:
- geodata = geo_lookup(configuration.tautulli_failback_ip)
- else:
- geodata = geo_lookup(requests.get('http://ip.42.pl/raw').text)
-
- latitude = geodata.location.latitude
-
- if not geodata.location.latitude:
- latitude = 37.234332396
- else:
- latitude = geodata.location.latitude
-
- if not geodata.location.longitude:
- longitude = -115.80666344
- else:
- longitude = geodata.location.longitude
-
- decision = SESSIONS[session]['transcode_decision']
-
- if decision == 'copy':
- decision = 'direct stream'
-
- video_decision = SESSIONS[session]['stream_video_decision']
-
- if video_decision == 'copy':
- video_decision = 'direct stream'
-
- elif video_decision == '':
- video_decision = 'Music'
-
- quality = SESSIONS[session]['stream_video_resolution']
-
-
- # If the video resolution is empty. Asssume it's an audio stream
- # and use the container for music
- if not quality:
- quality = SESSIONS[session]['container'].upper()
-
- elif quality in ('SD', 'sd'):
- quality = SESSIONS[session]['stream_video_resolution'].upper()
-
- elif quality in '4k':
- quality = SESSIONS[session]['stream_video_resolution'].upper()
-
- else:
- quality = '{}p'.format(SESSIONS[session]['stream_video_resolution'])
-
-
- # Translate player_state to integers so we can colorize the table
- player_state = SESSIONS[session]['state'].lower()
-
- if player_state == 'playing':
- player_state = 0
-
- elif player_state == 'paused':
- player_state = 1
-
- elif player_state == 'buffering':
- player_state = 3
-
-
- INFLUX_PAYLOAD.append(
- {
- "measurement": "Tautulli",
- "tags": {
- "type": "Session",
- "session_id": SESSIONS[session]['session_id'],
- "name": SESSIONS[session]['friendly_name'],
- "title": SESSIONS[session]['full_title'],
- "platform": SESSIONS[session]['platform'],
- "product_version": SESSIONS[session]['product_version'],
- "quality": quality,
- "video_decision": video_decision.title(),
- "transcode_decision": decision.title(),
- "media_type": SESSIONS[session]['media_type'].title(),
- "audio_codec": SESSIONS[session]['audio_codec'].upper(),
- "audio_profile": SESSIONS[session]['audio_profile'].upper(),
- "stream_audio_codec": SESSIONS[session]['stream_audio_codec'].upper(),
- "quality_profile": SESSIONS[session]['quality_profile'],
- "progress_percent": SESSIONS[session]['progress_percent'],
- "region_code": geodata.subdivisions.most_specific.iso_code,
- "location": geodata.city.name,
- "full_location": '{} - {}'.format(geodata.subdivisions.most_specific.name,
- geodata.city.name),
- "latitude": latitude,
- "longitude": longitude,
- "player_state": player_state,
- "device_type": SESSIONS[session]['platform']
- },
- "time": CURRENT_TIME,
- "fields": {
- "session_id": SESSIONS[session]['session_id'],
- "session_key": SESSIONS[session]['session_key']
- }
- }
- )
-
-INFLUX_SENDER = InfluxDBClient(configuration.influxdb_url,
- configuration.influxdb_port,
- configuration.influxdb_username,
- configuration.influxdb_password,
- configuration.tautulli_influxdb_db_name)
-
-INFLUX_SENDER.write_points(INFLUX_PAYLOAD)
diff --git a/varken.systemd b/varken.systemd
new file mode 100644
index 00000000..f8aefe6f
--- /dev/null
+++ b/varken.systemd
@@ -0,0 +1,53 @@
+# Varken - Command-line utility to aggregate data from the Plex ecosystem into InfluxDB.
+#
+# Service Unit file for systemd system manager
+#
+# INSTALLATION NOTES
+#
+# 1. Copy this file into your systemd service unit directory (often '/lib/systemd/system')
+# and name it 'varken.service' with the following command:
+# cp /opt/Varken/varken.systemd /lib/systemd/system/varken.service
+#
+# 2. Edit the new varken.service file with configuration settings as required.
+# More details in the "CONFIGURATION NOTES" section shown below.
+#
+# 3. Enable boot-time autostart with the following commands:
+# systemctl daemon-reload
+# systemctl enable varken.service
+#
+# 4. Start now with the following command:
+# systemctl start varken.service
+#
+# CONFIGURATION NOTES
+#
+# - The example settings in this file assume that you will run varken as user: varken
+# - The example settings in this file assume that varken is installed to: /opt/Varken
+#
+# - To create this user and give it ownership of the Varken directory:
+# Ubuntu/Debian: sudo addgroup varken && sudo adduser --system --no-create-home varken --ingroup varken
+# CentOS/Fedora: sudo adduser --system --no-create-home varken
+# sudo chown varken:varken -R /opt/Varken
+#
+# - Adjust User= and Group= to the user/group you want Varken to run as.
+#
+# - WantedBy= specifies which target (i.e. runlevel) to start Varken for.
+# multi-user.target equates to runlevel 3 (multi-user text mode)
+# graphical.target equates to runlevel 5 (multi-user X11 graphical mode)
+
+[Unit]
+Description=Varken - Command-line utility to aggregate data from the Plex ecosystem into InfluxDB.
+After=network-online.target
+StartLimitInterval=200
+StartLimitBurst=3
+
+[Service]
+Type=simple
+User=varken
+Group=varken
+WorkingDirectory=/opt/Varken
+ExecStart=/opt/Varken/varken-venv/bin/python /opt/Varken/Varken.py
+Restart=always
+RestartSec=30
+
+[Install]
+WantedBy=multi-user.target
diff --git a/varken/__init__.py b/varken/__init__.py
new file mode 100644
index 00000000..341988ca
--- /dev/null
+++ b/varken/__init__.py
@@ -0,0 +1 @@
+VERSION = 1.0
diff --git a/varken/cisco.py b/varken/cisco.py
new file mode 100644
index 00000000..6ce3392b
--- /dev/null
+++ b/varken/cisco.py
@@ -0,0 +1,62 @@
+import logging
+from requests import Session, Request
+from datetime import datetime, timezone
+
+from varken.helpers import connection_handler
+
+
+class CiscoAPI(object):
+ def __init__(self, firewall, dbmanager):
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ self.dbmanager = dbmanager
+ self.firewall = firewall
+ # Create session to reduce server web thread load, and globally define pageSize for all requests
+ self.session = Session()
+ self.session.auth = (self.firewall.username, self.firewall.password)
+ self.logger = logging.getLogger()
+
+ self.get_token()
+
+ def __repr__(self):
+ return "".format(self.firewall.id)
+
+ def get_token(self):
+ endpoint = '/api/tokenservices'
+
+ req = self.session.prepare_request(Request('POST', self.firewall.url + endpoint))
+ post = connection_handler(self.session, req, self.firewall.verify_ssl)
+
+ if not post:
+ return
+
+ self.session.headers = {'X-Auth-Token': post}
+
+ def get_bandwidth(self):
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ endpoint = '/api/monitoring/device/interfaces/' + self.firewall.outside_interface
+
+ if not self.session.headers:
+ return
+
+ req = self.session.prepare_request(Request('GET', self.firewall.url + endpoint))
+ print(req.headers)
+ get = connection_handler(self.session, req, self.firewall.verify_ssl)
+
+ if not get:
+ return
+
+ influx_payload = [
+ {
+ "measurement": "Cisco ASA",
+ "tags": {
+ "interface": self.firewall.outside_interface
+ },
+ "time": self.now,
+ "fields": {
+ "upload_bitrate": get['outputBitRate'],
+ "download_bitrate": get['inputBitRate']
+ }
+ }
+ ]
+
+ self.dbmanager.write_points(influx_payload)
diff --git a/varken/dbmanager.py b/varken/dbmanager.py
new file mode 100644
index 00000000..4eee8037
--- /dev/null
+++ b/varken/dbmanager.py
@@ -0,0 +1,21 @@
+import logging
+
+from influxdb import InfluxDBClient
+
+logger = logging.getLogger('varken')
+
+class DBManager(object):
+ def __init__(self, server):
+ self.server = server
+ self.influx = InfluxDBClient(self.server.url, self.server.port, self.server.username, self.server.password,
+ 'varken')
+ databases = [db['name'] for db in self.influx.get_list_database()]
+
+ if 'varken' not in databases:
+ self.influx.create_database('varken')
+ self.influx.create_retention_policy('varken 30d/1h', '30d', '1', 'varken', False, '1h')
+
+ def write_points(self, data):
+ d = data
+ logger.debug('Writing Data to InfluxDB {}'.format(d))
+ self.influx.write_points(d)
diff --git a/varken/helpers.py b/varken/helpers.py
new file mode 100644
index 00000000..25f99d9a
--- /dev/null
+++ b/varken/helpers.py
@@ -0,0 +1,94 @@
+import os
+import time
+import tarfile
+import hashlib
+import urllib3
+import geoip2.database
+import logging
+
+from json.decoder import JSONDecodeError
+from os.path import abspath, join
+from requests.exceptions import InvalidSchema, SSLError
+from urllib.request import urlretrieve
+
+logger = logging.getLogger('varken')
+
+
+def geoip_download():
+ tar_dbfile = abspath(join('.', 'data', 'GeoLite2-City.tar.gz'))
+ url = 'http://geolite.maxmind.com/download/geoip/database/GeoLite2-City.tar.gz'
+ urlretrieve(url, tar_dbfile)
+ tar = tarfile.open(tar_dbfile, 'r:gz')
+ for files in tar.getmembers():
+ if 'GeoLite2-City.mmdb' in files.name:
+ files.name = os.path.basename(files.name)
+ tar.extract(files, abspath(join('.', 'data')))
+ os.remove(tar_dbfile)
+
+
+def geo_lookup(ipaddress):
+
+ dbfile = abspath(join('.', 'data', 'GeoLite2-City.mmdb'))
+ now = time.time()
+
+ try:
+ dbinfo = os.stat(dbfile)
+ db_age = now - dbinfo.st_ctime
+ if db_age > (35 * 86400):
+ os.remove(dbfile)
+ geoip_download()
+ except FileNotFoundError:
+ geoip_download()
+
+ reader = geoip2.database.Reader(dbfile)
+
+ return reader.city(ipaddress)
+
+
+def hashit(string):
+ encoded = string.encode()
+ hashed = hashlib.md5(encoded).hexdigest()
+
+ return hashed
+
+
+def connection_handler(session, request, verify):
+ s = session
+ r = request
+ v = verify
+ return_json = False
+
+ urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
+
+ try:
+ get = s.send(r, verify=v)
+ if get.status_code == 401:
+ logger.info('Your api key is incorrect for {}'.format(r.url))
+ elif get.status_code == 404:
+ logger.info('This url doesnt even resolve: {}'.format(r.url))
+ elif get.status_code == 200:
+ try:
+ return_json = get.json()
+ except JSONDecodeError:
+ logger.error('No JSON response... BORKED! Let us know in discord')
+ # 204 No Content is for ASA only
+ elif get.status_code == 204:
+ if get.headers['X-Auth-Token']:
+ return get.headers['X-Auth-Token']
+
+ except InvalidSchema:
+ logger.error('You added http(s):// in the config file. Don\'t do that.')
+
+ except SSLError as e:
+ logger.error('Either your host is unreachable or you have an SSL issue. : %s', e)
+
+ return return_json
+
+
+def mkdir_p(path):
+ """http://stackoverflow.com/a/600612/190597 (tzot)"""
+ try:
+ logger.info('Creating folder %s ', path)
+ os.makedirs(path, exist_ok=True)
+ except Exception as e:
+ logger.error('Could not create folder %s : %s ', path, e)
diff --git a/varken/iniparser.py b/varken/iniparser.py
new file mode 100644
index 00000000..30629a0c
--- /dev/null
+++ b/varken/iniparser.py
@@ -0,0 +1,193 @@
+import configparser
+import logging
+from sys import exit
+from os.path import join, exists
+from varken.structures import SonarrServer, RadarrServer, OmbiServer, TautulliServer, InfluxServer, CiscoASAFirewall
+
+logger = logging.getLogger()
+
+
+class INIParser(object):
+ def __init__(self, data_folder):
+ self.config = configparser.ConfigParser(interpolation=None)
+ self.data_folder = data_folder
+
+ self.influx_server = InfluxServer()
+
+ self.sonarr_enabled = False
+ self.sonarr_servers = []
+
+ self.radarr_enabled = False
+ self.radarr_servers = []
+
+ self.ombi_enabled = False
+ self.ombi_servers = []
+
+ self.tautulli_enabled = False
+ self.tautulli_servers = []
+
+ self.ciscoasa_enabled = False
+ self.ciscoasa_firewalls = []
+
+ self.parse_opts()
+
+ def enable_check(self, server_type=None):
+ t = server_type
+ global_server_ids = self.config.get('global', t)
+ if global_server_ids.lower() in ['false', 'no', '0']:
+ logger.info('%s disabled.', t.upper())
+ return False
+ else:
+ sids = self.clean_check(global_server_ids, t)
+ return sids
+
+ @staticmethod
+ def clean_check(server_id_list, server_type=None):
+ t = server_type
+ sid_list = server_id_list
+ cleaned_list = sid_list.replace(' ', '').split(',')
+ valid_sids = []
+ for sid in cleaned_list:
+ try:
+ valid_sids.append(int(sid))
+ except ValueError:
+ logger.error("{} is not a valid server id number".format(sid))
+
+ if valid_sids:
+ logger.info('%s : %s', t.upper(), valid_sids)
+ return valid_sids
+ else:
+ logger.error('No valid %s', t.upper())
+ return False
+
+ def read_file(self):
+ file_path = join(self.data_folder, 'varken.ini')
+ if exists(file_path):
+ with open(file_path) as config_ini:
+ self.config.read_file(config_ini)
+ else:
+ exit('Config file missing (varken.ini) in {}'.format(self.data_folder))
+
+ def parse_opts(self):
+ self.read_file()
+ # Parse InfluxDB options
+ url = self.config.get('influxdb', 'url')
+ port = self.config.getint('influxdb', 'port')
+ username = self.config.get('influxdb', 'username')
+ password = self.config.get('influxdb', 'password')
+
+ self.influx_server = InfluxServer(url, port, username, password)
+
+ # Parse Sonarr options
+ self.sonarr_enabled = self.enable_check('sonarr_server_ids')
+
+ if self.sonarr_enabled:
+ sids = self.config.get('global', 'sonarr_server_ids').strip(' ').split(',')
+
+ for server_id in sids:
+ sonarr_section = 'sonarr-' + server_id
+ url = self.config.get(sonarr_section, 'url')
+ apikey = self.config.get(sonarr_section, 'apikey')
+ scheme = 'https://' if self.config.getboolean(sonarr_section, 'ssl') else 'http://'
+ verify_ssl = self.config.getboolean(sonarr_section, 'verify_ssl')
+ if scheme != 'https://':
+ verify_ssl = False
+ queue = self.config.getboolean(sonarr_section, 'queue')
+ missing_days = self.config.getint(sonarr_section, 'missing_days')
+ future_days = self.config.getint(sonarr_section, 'future_days')
+ missing_days_run_seconds = self.config.getint(sonarr_section, 'missing_days_run_seconds')
+ future_days_run_seconds = self.config.getint(sonarr_section, 'future_days_run_seconds')
+ queue_run_seconds = self.config.getint(sonarr_section, 'queue_run_seconds')
+
+ server = SonarrServer(server_id, scheme + url, apikey, verify_ssl, missing_days,
+ missing_days_run_seconds, future_days, future_days_run_seconds,
+ queue, queue_run_seconds)
+ self.sonarr_servers.append(server)
+
+ # Parse Radarr options
+ self.radarr_enabled = self.enable_check('radarr_server_ids')
+
+ if self.radarr_enabled:
+ sids = self.config.get('global', 'radarr_server_ids').strip(' ').split(',')
+
+ for server_id in sids:
+ radarr_section = 'radarr-' + server_id
+ url = self.config.get(radarr_section, 'url')
+ apikey = self.config.get(radarr_section, 'apikey')
+ scheme = 'https://' if self.config.getboolean(radarr_section, 'ssl') else 'http://'
+ verify_ssl = self.config.getboolean(radarr_section, 'verify_ssl')
+ if scheme != 'https://':
+ verify_ssl = False
+ queue = self.config.getboolean(radarr_section, 'queue')
+ queue_run_seconds = self.config.getint(radarr_section, 'queue_run_seconds')
+ get_missing = self.config.getboolean(radarr_section, 'get_missing')
+ get_missing_run_seconds = self.config.getint(radarr_section, 'get_missing_run_seconds')
+
+ server = RadarrServer(server_id, scheme + url, apikey, verify_ssl, queue, queue_run_seconds,
+ get_missing, get_missing_run_seconds)
+ self.radarr_servers.append(server)
+
+ # Parse Tautulli options
+ self.tautulli_enabled = self.enable_check('tautulli_server_ids')
+
+ if self.tautulli_enabled:
+ sids = self.config.get('global', 'tautulli_server_ids').strip(' ').split(',')
+
+ for server_id in sids:
+ tautulli_section = 'tautulli-' + server_id
+ url = self.config.get(tautulli_section, 'url')
+ fallback_ip = self.config.get(tautulli_section, 'fallback_ip')
+ apikey = self.config.get(tautulli_section, 'apikey')
+ scheme = 'https://' if self.config.getboolean(tautulli_section, 'ssl') else 'http://'
+ verify_ssl = self.config.getboolean(tautulli_section, 'verify_ssl')
+ if scheme != 'https://':
+ verify_ssl = False
+ get_activity = self.config.getboolean(tautulli_section, 'get_activity')
+ get_activity_run_seconds = self.config.getint(tautulli_section, 'get_activity_run_seconds')
+
+ server = TautulliServer(server_id, scheme + url, fallback_ip, apikey, verify_ssl, get_activity,
+ get_activity_run_seconds)
+ self.tautulli_servers.append(server)
+
+ # Parse Ombi options
+ self.ombi_enabled = self.enable_check('ombi_server_ids')
+
+ if self.ombi_enabled:
+ sids = self.config.get('global', 'ombi_server_ids').strip(' ').split(',')
+ for server_id in sids:
+ ombi_section = 'ombi-' + server_id
+ url = self.config.get(ombi_section, 'url')
+ apikey = self.config.get(ombi_section, 'apikey')
+ scheme = 'https://' if self.config.getboolean(ombi_section, 'ssl') else 'http://'
+ verify_ssl = self.config.getboolean(ombi_section, 'verify_ssl')
+ if scheme != 'https://':
+ verify_ssl = False
+ request_type_counts = self.config.getboolean(ombi_section, 'get_request_type_counts')
+ request_type_run_seconds = self.config.getint(ombi_section, 'request_type_run_seconds')
+ request_total_counts = self.config.getboolean(ombi_section, 'get_request_total_counts')
+ request_total_run_seconds = self.config.getint(ombi_section, 'request_total_run_seconds')
+
+ server = OmbiServer(server_id, scheme + url, apikey, verify_ssl, request_type_counts,
+ request_type_run_seconds, request_total_counts, request_total_run_seconds)
+ self.ombi_servers.append(server)
+
+ # Parse ASA opts
+ self.ciscoasa_enabled = self.enable_check('ciscoasa_firewall_ids')
+
+ if self.ciscoasa_enabled:
+ fids = self.config.get('global', 'ciscoasa_firewall_ids').strip(' ').split(',')
+ for firewall_id in fids:
+ ciscoasa_section = 'ciscoasa-' + firewall_id
+ url = self.config.get(ciscoasa_section, 'url')
+ username = self.config.get(ciscoasa_section, 'username')
+ password = self.config.get(ciscoasa_section, 'password')
+ scheme = 'https://' if self.config.getboolean(ciscoasa_section, 'ssl') else 'http://'
+ verify_ssl = self.config.getboolean(ciscoasa_section, 'verify_ssl')
+ if scheme != 'https://':
+ verify_ssl = False
+ outside_interface = self.config.get(ciscoasa_section, 'outside_interface')
+ get_bandwidth_run_seconds = self.config.getint(ciscoasa_section, 'get_bandwidth_run_seconds')
+
+ firewall = CiscoASAFirewall(firewall_id, scheme + url, username, password, outside_interface,
+ verify_ssl, get_bandwidth_run_seconds)
+ self.ciscoasa_firewalls.append(firewall)
diff --git a/varken/ombi.py b/varken/ombi.py
new file mode 100644
index 00000000..f38ca49a
--- /dev/null
+++ b/varken/ombi.py
@@ -0,0 +1,82 @@
+import logging
+from requests import Session, Request
+from datetime import datetime, timezone
+
+from varken.helpers import connection_handler
+from varken.structures import OmbiRequestCounts
+
+
+class OmbiAPI(object):
+ def __init__(self, server, dbmanager):
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ self.dbmanager = dbmanager
+ self.server = server
+ # Create session to reduce server web thread load, and globally define pageSize for all requests
+ self.session = Session()
+ self.session.headers = {'Apikey': self.server.api_key}
+ self.logger = logging.getLogger()
+
+ def __repr__(self):
+ return "".format(self.server.id)
+
+ def get_total_requests(self):
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ tv_endpoint = '/api/v1/Request/tv'
+ movie_endpoint = "/api/v1/Request/movie"
+
+ tv_req = self.session.prepare_request(Request('GET', self.server.url + tv_endpoint))
+ movie_req = self.session.prepare_request(Request('GET', self.server.url + movie_endpoint))
+ get_tv = connection_handler(self.session, tv_req, self.server.verify_ssl)
+ get_movie = connection_handler(self.session, movie_req, self.server.verify_ssl)
+
+ if not all([get_tv, get_movie]):
+ return
+
+ movie_requests = len(get_movie)
+ tv_requests = len(get_tv)
+
+ influx_payload = [
+ {
+ "measurement": "Ombi",
+ "tags": {
+ "type": "Request_Total",
+ "server": self.server.id
+ },
+ "time": self.now,
+ "fields": {
+ "total": movie_requests + tv_requests,
+ "movies": movie_requests,
+ "tv_shows": tv_requests
+ }
+ }
+ ]
+
+ self.dbmanager.write_points(influx_payload)
+
+ def get_request_counts(self):
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ endpoint = '/api/v1/Request/count'
+
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
+ get = connection_handler(self.session, req, self.server.verify_ssl)
+
+ if not get:
+ return
+
+ requests = OmbiRequestCounts(**get)
+ influx_payload = [
+ {
+ "measurement": "Ombi",
+ "tags": {
+ "type": "Request_Counts"
+ },
+ "time": self.now,
+ "fields": {
+ "pending": requests.pending,
+ "approved": requests.approved,
+ "available": requests.available
+ }
+ }
+ ]
+
+ self.dbmanager.write_points(influx_payload)
diff --git a/varken/radarr.py b/varken/radarr.py
new file mode 100644
index 00000000..db4dd2e7
--- /dev/null
+++ b/varken/radarr.py
@@ -0,0 +1,128 @@
+import logging
+from requests import Session, Request
+from datetime import datetime, timezone
+
+from varken.helpers import hashit, connection_handler
+from varken.structures import Movie, Queue
+
+
+class RadarrAPI(object):
+ def __init__(self, server, dbmanager):
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ self.dbmanager = dbmanager
+ self.server = server
+ # Create session to reduce server web thread load, and globally define pageSize for all requests
+ self.session = Session()
+ self.session.headers = {'X-Api-Key': self.server.api_key}
+ self.logger = logging.getLogger()
+
+ def __repr__(self):
+ return "".format(self.server.id)
+
+ def get_missing(self):
+ endpoint = '/api/movie'
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ influx_payload = []
+ missing = []
+
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
+ get = connection_handler(self.session, req, self.server.verify_ssl)
+
+ if not get:
+ return
+
+ try:
+ movies = [Movie(**movie) for movie in get]
+ except TypeError as e:
+ self.logger.error('TypeError has occurred : %s while creating Movie structure', e)
+ return
+
+ for movie in movies:
+ if not movie.downloaded:
+ if movie.isAvailable:
+ ma = True
+ else:
+ ma = False
+ movie_name = '{} ({})'.format(movie.title, movie.year)
+ missing.append((movie_name, ma, movie.tmdbId))
+
+ for title, ma, mid in missing:
+ hash_id = hashit('{}{}{}'.format(self.server.id, title, mid))
+ influx_payload.append(
+ {
+ "measurement": "Radarr",
+ "tags": {
+ "Missing": True,
+ "Missing_Available": ma,
+ "tmdbId": mid,
+ "server": self.server.id,
+ "name": title
+ },
+ "time": self.now,
+ "fields": {
+ "hash": hash_id
+ }
+ }
+ )
+
+ self.dbmanager.write_points(influx_payload)
+
+ def get_queue(self):
+ endpoint = '/api/queue'
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ influx_payload = []
+ queue = []
+
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
+ get = connection_handler(self.session, req, self.server.verify_ssl)
+
+ if not get:
+ return
+
+ for movie in get:
+ try:
+ movie['movie'] = Movie(**movie['movie'])
+ except TypeError as e:
+ self.logger.error('TypeError has occurred : %s while creating Movie structure', e)
+ return
+
+ try:
+ download_queue = [Queue(**movie) for movie in get]
+ except TypeError as e:
+ self.logger.error('TypeError has occurred : %s while creating Queue structure', e)
+ return
+
+ for queue_item in download_queue:
+ movie = queue_item.movie
+ name = '{} ({})'.format(movie.title, movie.year)
+
+ if queue_item.protocol.upper() == 'USENET':
+ protocol_id = 1
+ else:
+ protocol_id = 0
+
+ queue.append((name, queue_item.quality['quality']['name'], queue_item.protocol.upper(),
+ protocol_id, queue_item.id))
+
+ for name, quality, protocol, protocol_id, qid in queue:
+ hash_id = hashit('{}{}{}'.format(self.server.id, name, quality))
+ influx_payload.append(
+ {
+ "measurement": "Radarr",
+ "tags": {
+ "type": "Queue",
+ "tmdbId": qid,
+ "server": self.server.id,
+ "name": name,
+ "quality": quality,
+ "protocol": protocol,
+ "protocol_id": protocol_id
+ },
+ "time": self.now,
+ "fields": {
+ "hash": hash_id
+ }
+ }
+ )
+
+ self.dbmanager.write_points(influx_payload)
diff --git a/varken/sonarr.py b/varken/sonarr.py
new file mode 100644
index 00000000..8f718178
--- /dev/null
+++ b/varken/sonarr.py
@@ -0,0 +1,178 @@
+import logging
+from requests import Session, Request
+from datetime import datetime, timezone, date, timedelta
+
+from varken.helpers import hashit, connection_handler
+from varken.structures import Queue, TVShow
+
+
+class SonarrAPI(object):
+ def __init__(self, server, dbmanager):
+ # Set Time of initialization
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ self.dbmanager = dbmanager
+ self.today = str(date.today())
+ self.server = server
+ # Create session to reduce server web thread load, and globally define pageSize for all requests
+ self.session = Session()
+ self.session.headers = {'X-Api-Key': self.server.api_key}
+ self.session.params = {'pageSize': 1000}
+ self.logger = logging.getLogger()
+
+ def __repr__(self):
+ return "".format(self.server.id)
+
+ def get_missing(self):
+ endpoint = '/api/calendar'
+ last_days = str(date.today() + timedelta(days=-self.server.missing_days))
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ params = {'start': last_days, 'end': self.today}
+ influx_payload = []
+ missing = []
+
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
+ get = connection_handler(self.session, req, self.server.verify_ssl)
+
+ if not get:
+ return
+
+ # Iteratively create a list of TVShow Objects from response json
+ try:
+ tv_shows = [TVShow(**show) for show in get]
+ except TypeError as e:
+ self.logger.error('TypeError has occurred : %s while creating TVShow structure', e)
+ return
+
+ # Add show to missing list if file does not exist
+ for show in tv_shows:
+ if not show.hasFile:
+ sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
+ missing.append((show.series['title'], sxe, show.airDate, show.title, show.id))
+
+ for series_title, sxe, air_date, episode_title, sonarr_id in missing:
+ hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
+ influx_payload.append(
+ {
+ "measurement": "Sonarr",
+ "tags": {
+ "type": "Missing",
+ "sonarrId": sonarr_id,
+ "server": self.server.id,
+ "name": series_title,
+ "epname": episode_title,
+ "sxe": sxe,
+ "airs": air_date
+ },
+ "time": self.now,
+ "fields": {
+ "hash": hash_id
+
+ }
+ }
+ )
+
+ self.dbmanager.write_points(influx_payload)
+
+ def get_future(self):
+ endpoint = '/api/calendar/'
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ future = str(date.today() + timedelta(days=self.server.future_days))
+ influx_payload = []
+ air_days = []
+ params = {'start': self.today, 'end': future}
+
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint, params=params))
+ get = connection_handler(self.session, req, self.server.verify_ssl)
+
+ if not get:
+ return
+
+ try:
+ tv_shows = [TVShow(**show) for show in get]
+ except TypeError as e:
+ self.logger.error('TypeError has occurred : %s while creating TVShow structure', e)
+ return
+
+ for show in tv_shows:
+ sxe = 'S{:0>2}E{:0>2}'.format(show.seasonNumber, show.episodeNumber)
+ if show.hasFile:
+ downloaded = 1
+ else:
+ downloaded = 0
+ air_days.append((show.series['title'], downloaded, sxe, show.title, show.airDate, show.id))
+
+ for series_title, dl_status, sxe, episode_title, air_date, sonarr_id in air_days:
+ hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
+ influx_payload.append(
+ {
+ "measurement": "Sonarr",
+ "tags": {
+ "type": "Future",
+ "sonarrId": sonarr_id,
+ "server": self.server.id,
+ "name": series_title,
+ "epname": episode_title,
+ "sxe": sxe,
+ "airs": air_date,
+ "downloaded": dl_status
+ },
+ "time": self.now,
+ "fields": {
+ "hash": hash_id
+ }
+ }
+ )
+
+ self.dbmanager.write_points(influx_payload)
+
+ def get_queue(self):
+ influx_payload = []
+ endpoint = '/api/queue'
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ queue = []
+
+ req = self.session.prepare_request(Request('GET', self.server.url + endpoint))
+ get = connection_handler(self.session, req, self.server.verify_ssl)
+
+ if not get:
+ return
+
+ try:
+ download_queue = [Queue(**show) for show in get]
+ except TypeError as e:
+ self.logger.error('TypeError has occurred : %s while creating Queue structure', e)
+ return
+
+ for show in download_queue:
+ sxe = 'S{:0>2}E{:0>2}'.format(show.episode['seasonNumber'], show.episode['episodeNumber'])
+ if show.protocol.upper() == 'USENET':
+ protocol_id = 1
+ else:
+ protocol_id = 0
+
+ queue.append((show.series['title'], show.episode['title'], show.protocol.upper(),
+ protocol_id, sxe, show.id))
+
+ for series_title, episode_title, protocol, protocol_id, sxe, sonarr_id in queue:
+ hash_id = hashit('{}{}{}'.format(self.server.id, series_title, sxe))
+ influx_payload.append(
+ {
+ "measurement": "Sonarr",
+ "tags": {
+ "type": "Queue",
+ "sonarrId": sonarr_id,
+ "server": self.server.id,
+ "name": series_title,
+ "epname": episode_title,
+ "sxe": sxe,
+ "protocol": protocol,
+ "protocol_id": protocol_id
+ },
+ "time": self.now,
+ "fields": {
+ "hash": hash_id
+ }
+ }
+ )
+
+ self.dbmanager.write_points(influx_payload)
diff --git a/varken/structures.py b/varken/structures.py
new file mode 100644
index 00000000..44c202a5
--- /dev/null
+++ b/varken/structures.py
@@ -0,0 +1,338 @@
+from typing import NamedTuple
+
+
+class Queue(NamedTuple):
+ movie: dict = None
+ series: dict = None
+ episode: dict = None
+ quality: dict = None
+ size: float = None
+ title: str = None
+ sizeleft: float = None
+ timeleft: str = None
+ estimatedCompletionTime: str = None
+ status: str = None
+ trackedDownloadStatus: str = None
+ statusMessages: list = None
+ downloadId: str = None
+ protocol: str = None
+ id: int = None
+
+
+class SonarrServer(NamedTuple):
+ id: int = None
+ url: str = None
+ api_key: str = None
+ verify_ssl: bool = False
+ missing_days: int = 0
+ missing_days_run_seconds: int = 30
+ future_days: int = 0
+ future_days_run_seconds: int = 30
+ queue: bool = False
+ queue_run_seconds: int = 30
+
+
+class RadarrServer(NamedTuple):
+ id: int = None
+ url: str = None
+ api_key: str = None
+ verify_ssl: bool = False
+ queue: bool = False
+ queue_run_seconds: int = 30
+ get_missing: bool = False
+ get_missing_run_seconds: int = 30
+
+
+class OmbiServer(NamedTuple):
+ id: int = None
+ url: str = None
+ api_key: str = None
+ verify_ssl: bool = False
+ request_type_counts: bool = False
+ request_type_run_seconds: int = 30
+ request_total_counts: bool = False
+ request_total_run_seconds: int = 30
+
+
+class TautulliServer(NamedTuple):
+ id: int = None
+ url: str = None
+ fallback_ip: str = None
+ api_key: str = None
+ verify_ssl: bool = None
+ get_activity: bool = False
+ get_activity_run_seconds: int = 30
+
+
+class InfluxServer(NamedTuple):
+ url: str = 'localhost'
+ port: int = 8086
+ username: str = 'root'
+ password: str = 'root'
+
+class CiscoASAFirewall(NamedTuple):
+ id: int = None
+ url: str = '192.168.1.1'
+ username: str = 'cisco'
+ password: str = 'cisco'
+ outside_interface: str = None
+ verify_ssl: bool = False
+ get_bandwidth_run_seconds: int = 30
+
+class OmbiRequestCounts(NamedTuple):
+ pending: int = 0
+ approved: int = 0
+ available: int = 0
+
+
+class TautulliStream(NamedTuple):
+ rating: str = None
+ transcode_width: str = None
+ labels: list = None
+ stream_bitrate: str = None
+ bandwidth: str = None
+ optimized_version: int = None
+ video_language: str = None
+ parent_rating_key: str = None
+ rating_key: str = None
+ platform_version: str = None
+ transcode_hw_decoding: int = None
+ thumb: str = None
+ title: str = None
+ video_codec_level: str = None
+ tagline: str = None
+ last_viewed_at: str = None
+ audio_sample_rate: str = None
+ user_rating: str = None
+ platform: str = None
+ collections: list = None
+ location: str = None
+ transcode_container: str = None
+ audio_channel_layout: str = None
+ local: str = None
+ stream_subtitle_format: str = None
+ stream_video_ref_frames: str = None
+ transcode_hw_encode_title: str = None
+ stream_container_decision: str = None
+ audience_rating: str = None
+ full_title: str = None
+ ip_address: str = None
+ subtitles: int = None
+ stream_subtitle_language: str = None
+ channel_stream: int = None
+ video_bitrate: str = None
+ is_allow_sync: int = None
+ stream_video_bitrate: str = None
+ summary: str = None
+ stream_audio_decision: str = None
+ aspect_ratio: str = None
+ audio_bitrate_mode: str = None
+ transcode_hw_decode_title: str = None
+ stream_audio_channel_layout: str = None
+ deleted_user: int = None
+ library_name: str = None
+ art: str = None
+ stream_video_resolution: str = None
+ video_profile: str = None
+ sort_title: str = None
+ stream_video_codec_level: str = None
+ stream_video_height: str = None
+ year: str = None
+ stream_duration: str = None
+ stream_audio_channels: str = None
+ video_language_code: str = None
+ transcode_key: str = None
+ transcode_throttled: int = None
+ container: str = None
+ stream_audio_bitrate: str = None
+ user: str = None
+ selected: int = None
+ product_version: str = None
+ subtitle_location: str = None
+ transcode_hw_requested: int = None
+ video_height: str = None
+ state: str = None
+ is_restricted: int = None
+ email: str = None
+ stream_container: str = None
+ transcode_speed: str = None
+ video_bit_depth: str = None
+ stream_audio_sample_rate: str = None
+ grandparent_title: str = None
+ studio: str = None
+ transcode_decision: str = None
+ video_width: str = None
+ bitrate: str = None
+ machine_id: str = None
+ originally_available_at: str = None
+ video_frame_rate: str = None
+ synced_version_profile: str = None
+ friendly_name: str = None
+ audio_profile: str = None
+ optimized_version_title: str = None
+ platform_name: str = None
+ stream_video_language: str = None
+ keep_history: int = None
+ stream_audio_codec: str = None
+ stream_video_codec: str = None
+ grandparent_thumb: str = None
+ synced_version: int = None
+ transcode_hw_decode: str = None
+ user_thumb: str = None
+ stream_video_width: str = None
+ height: str = None
+ stream_subtitle_decision: str = None
+ audio_codec: str = None
+ parent_title: str = None
+ guid: str = None
+ audio_language_code: str = None
+ transcode_video_codec: str = None
+ transcode_audio_codec: str = None
+ stream_video_decision: str = None
+ user_id: int = None
+ transcode_height: str = None
+ transcode_hw_full_pipeline: int = None
+ throttled: str = None
+ quality_profile: str = None
+ width: str = None
+ live: int = None
+ stream_subtitle_forced: int = None
+ media_type: str = None
+ video_resolution: str = None
+ stream_subtitle_location: str = None
+ do_notify: int = None
+ video_ref_frames: str = None
+ stream_subtitle_language_code: str = None
+ audio_channels: str = None
+ stream_audio_language_code: str = None
+ optimized_version_profile: str = None
+ relay: int = None
+ duration: str = None
+ rating_image: str = None
+ is_home_user: int = None
+ is_admin: int = None
+ ip_address_public: str = None
+ allow_guest: int = None
+ transcode_audio_channels: str = None
+ stream_audio_channel_layout_: str = None
+ media_index: str = None
+ stream_video_framerate: str = None
+ transcode_hw_encode: str = None
+ grandparent_rating_key: str = None
+ original_title: str = None
+ added_at: str = None
+ banner: str = None
+ bif_thumb: str = None
+ parent_media_index: str = None
+ live_uuid: str = None
+ audio_language: str = None
+ stream_audio_bitrate_mode: str = None
+ username: str = None
+ subtitle_decision: str = None
+ children_count: str = None
+ updated_at: str = None
+ player: str = None
+ subtitle_format: str = None
+ file: str = None
+ file_size: str = None
+ session_key: str = None
+ id: str = None
+ subtitle_container: str = None
+ genres: list = None
+ stream_video_language_code: str = None
+ indexes: int = None
+ video_decision: str = None
+ stream_audio_language: str = None
+ writers: list = None
+ actors: list = None
+ progress_percent: str = None
+ audio_decision: str = None
+ subtitle_forced: int = None
+ profile: str = None
+ product: str = None
+ view_offset: str = None
+ type: str = None
+ audience_rating_image: str = None
+ audio_bitrate: str = None
+ section_id: str = None
+ stream_subtitle_codec: str = None
+ subtitle_codec: str = None
+ video_codec: str = None
+ device: str = None
+ stream_video_bit_depth: str = None
+ video_framerate: str = None
+ transcode_hw_encoding: int = None
+ transcode_protocol: str = None
+ shared_libraries: list = None
+ stream_aspect_ratio: str = None
+ content_rating: str = None
+ session_id: str = None
+ directors: list = None
+ parent_thumb: str = None
+ subtitle_language_code: str = None
+ transcode_progress: int = None
+ subtitle_language: str = None
+ stream_subtitle_container: str = None
+ sub_type: str = None
+
+
+class TVShow(NamedTuple):
+ seriesId: int = None
+ episodeFileId: int = None
+ seasonNumber: int = None
+ episodeNumber: int = None
+ title: str = None
+ airDate: str = None
+ airDateUtc: str = None
+ overview: str = None
+ episodeFile: dict = None
+ hasFile: bool = None
+ monitored: bool = None
+ unverifiedSceneNumbering: bool = None
+ absoluteEpisodeNumber: int = None
+ sceneAbsoluteEpisodeNumber: int = None
+ sceneEpisodeNumber: int = None
+ sceneSeasonNumber: int = None
+ series: dict = None
+ id: int = None
+
+
+class Movie(NamedTuple):
+ title: str = None
+ alternativeTitles: list = None
+ secondaryYearSourceId: int = None
+ sortTitle: str = None
+ sizeOnDisk: int = None
+ status: str = None
+ overview: str = None
+ inCinemas: str = None
+ images: list = None
+ downloaded: bool = None
+ year: int = None
+ secondaryYear: str = None
+ hasFile: bool = None
+ youTubeTrailerId: str = None
+ studio: str = None
+ path: str = None
+ profileId: int = None
+ pathState: str = None
+ monitored: bool = None
+ minimumAvailability: str = None
+ isAvailable: bool = None
+ folderName: str = None
+ runtime: int = None
+ lastInfoSync: str = None
+ cleanTitle: str = None
+ imdbId: str = None
+ tmdbId: int = None
+ titleSlug: str = None
+ genres: list = None
+ tags: list = None
+ added: str = None
+ ratings: dict = None
+ movieFile: dict = None
+ qualityProfileId: int = None
+ physicalRelease: str = None
+ physicalReleaseNote: str = None
+ website: str = None
+ id: int = None
diff --git a/varken/tautulli.py b/varken/tautulli.py
new file mode 100644
index 00000000..5ce67735
--- /dev/null
+++ b/varken/tautulli.py
@@ -0,0 +1,148 @@
+import logging
+from requests import Session, Request
+from datetime import datetime, timezone
+from geoip2.errors import AddressNotFoundError
+
+from varken.helpers import geo_lookup, hashit, connection_handler
+from varken.structures import TautulliStream
+
+
+class TautulliAPI(object):
+ def __init__(self, server, dbmanager):
+ # Set Time of initialization
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ self.dbmanager = dbmanager
+ self.server = server
+ self.session = Session()
+ self.session.params = {'apikey': self.server.api_key, 'cmd': 'get_activity'}
+ self.endpoint = '/api/v2'
+ self.logger = logging.getLogger()
+
+ def __repr__(self):
+ return "".format(self.server.id)
+
+ def get_activity(self):
+ self.now = datetime.now(timezone.utc).astimezone().isoformat()
+ influx_payload = []
+
+ req = self.session.prepare_request(Request('GET', self.server.url + self.endpoint))
+ g = connection_handler(self.session, req, self.server.verify_ssl)
+
+ if not g:
+ return
+
+ get = g['response']['data']
+
+ try:
+ sessions = [TautulliStream(**session) for session in get['sessions']]
+ except TypeError as e:
+ self.logger.error('TypeError has occurred : %s while creating TautulliStream structure', e)
+ return
+
+ for session in sessions:
+ try:
+ geodata = geo_lookup(session.ip_address_public)
+ except (ValueError, AddressNotFoundError):
+ if self.server.fallback_ip:
+ geodata = geo_lookup(self.server.fallback_ip)
+ else:
+ my_ip = self.session.get('http://ip.42.pl/raw').text
+ geodata = geo_lookup(my_ip)
+
+ if not all([geodata.location.latitude, geodata.location.longitude]):
+ latitude = 37.234332396
+ longitude = -115.80666344
+ else:
+ latitude = geodata.location.latitude
+ longitude = geodata.location.longitude
+
+ decision = session.transcode_decision
+ if decision == 'copy':
+ decision = 'direct stream'
+
+ video_decision = session.stream_video_decision
+ if video_decision == 'copy':
+ video_decision = 'direct stream'
+ elif video_decision == '':
+ video_decision = 'Music'
+
+ quality = session.stream_video_resolution
+ if not quality:
+ quality = session.container.upper()
+ elif quality in ('SD', 'sd', '4k'):
+ quality = session.stream_video_resolution.upper()
+ else:
+ quality = session.stream_video_resolution + 'p'
+
+ player_state = session.state.lower()
+ if player_state == 'playing':
+ player_state = 0
+ elif player_state == 'paused':
+ player_state = 1
+ elif player_state == 'buffering':
+ player_state = 3
+
+ product_version = session.product_version
+ if session.platform == 'Roku':
+ product_version = session.product_version.split('-')[0]
+
+ hash_id = hashit('{}{}{}{}'.format(session.session_id, session.session_key, session.username,
+ session.full_title))
+ influx_payload.append(
+ {
+ "measurement": "Tautulli",
+ "tags": {
+ "type": "Session",
+ "session_id": session.session_id,
+ "friendly_name": session.friendly_name,
+ "username": session.username,
+ "title": session.full_title,
+ "platform": session.platform,
+ "product_version": product_version,
+ "quality": quality,
+ "video_decision": video_decision.title(),
+ "transcode_decision": decision.title(),
+ "media_type": session.media_type.title(),
+ "audio_codec": session.audio_codec.upper(),
+ "audio_profile": session.audio_profile.upper(),
+ "stream_audio_codec": session.stream_audio_codec.upper(),
+ "quality_profile": session.quality_profile,
+ "progress_percent": session.progress_percent,
+ "region_code": geodata.subdivisions.most_specific.iso_code,
+ "location": geodata.city.name,
+ "full_location": '{} - {}'.format(geodata.subdivisions.most_specific.name,
+ geodata.city.name),
+ "latitude": latitude,
+ "longitude": longitude,
+ "player_state": player_state,
+ "device_type": session.platform,
+ "server": self.server.id
+ },
+ "time": self.now,
+ "fields": {
+ "hash": hash_id
+ }
+ }
+ )
+
+ influx_payload.append(
+ {
+ "measurement": "Tautulli",
+ "tags": {
+ "type": "current_stream_stats",
+ "server": self.server.id
+ },
+ "time": self.now,
+ "fields": {
+ "stream_count": int(get['stream_count']),
+ "total_bandwidth": int(get['total_bandwidth']),
+ "wan_bandwidth": int(get['wan_bandwidth']),
+ "lan_bandwidth": int(get['lan_bandwidth']),
+ "transcode_streams": int(get['stream_count_transcode']),
+ "direct_play_streams": int(get['stream_count_direct_play']),
+ "direct_streams": int(get['stream_count_direct_stream'])
+ }
+ }
+ )
+
+ self.dbmanager.write_points(influx_payload)
diff --git a/varken/varkenlogger.py b/varken/varkenlogger.py
new file mode 100644
index 00000000..92d097b8
--- /dev/null
+++ b/varken/varkenlogger.py
@@ -0,0 +1,55 @@
+import logging
+
+from logging.handlers import RotatingFileHandler
+from varken.helpers import mkdir_p
+
+FILENAME = "varken.log"
+MAX_SIZE = 5000000 # 5 MB
+MAX_FILES = 5
+LOG_FOLDER = 'logs'
+
+
+class VarkenLogger(object):
+ """docstring for ."""
+ def __init__(self, log_path=None, debug=None, data_folder=None):
+ self.data_folder = data_folder
+ self.log_level = debug
+
+ # Set log level
+ if self.log_level:
+ self.log_level = logging.DEBUG
+
+ else:
+ self.log_level = logging.INFO
+
+
+ # Make the log directory if it does not exist
+ mkdir_p('{}/{}'.format(self.data_folder, LOG_FOLDER))
+
+ # Create the Logger
+ self.logger = logging.getLogger()
+ self.logger.setLevel(logging.DEBUG)
+
+ # Create a Formatter for formatting the log messages
+ logger_formatter = logging.Formatter('%(asctime)s : %(levelname)s : %(module)s : %(message)s', '%Y-%m-%d %H:%M:%S')
+
+ # Create the Handler for logging data to a file
+ file_logger = RotatingFileHandler('{}/{}/{}'.format(self.data_folder, LOG_FOLDER, FILENAME),
+ mode='a', maxBytes=MAX_SIZE,
+ backupCount=MAX_FILES,
+ encoding=None, delay=0
+ )
+
+ file_logger.setLevel(self.log_level)
+
+ # Add the Formatter to the Handler
+ file_logger.setFormatter(logger_formatter)
+
+ # Add the console logger
+ console_logger = logging.StreamHandler()
+ console_logger.setFormatter(logger_formatter)
+ console_logger.setLevel(self.log_level)
+
+ # Add the Handler to the Logger
+ self.logger.addHandler(file_logger)
+ self.logger.addHandler(console_logger)