recent changes #59

Merged
ben merged 1 commits from b/infra:main into main 2023-05-15 13:31:55 +00:00
27 changed files with 1149 additions and 242 deletions

View File

@ -10,6 +10,7 @@ airconnect_group:
# for flac: 'flc' # for flac: 'flc'
airconnect_codec: mp3:320 airconnect_codec: mp3:320
airconnect_latency: 1000:2000
airconnect_max_volume: "100" airconnect_max_volume: "100"
airconnect_upnp: [] airconnect_upnp: []

View File

@ -15,7 +15,7 @@
<metadata>1</metadata> <metadata>1</metadata>
<flush>1</flush> <flush>1</flush>
<artwork></artwork> <artwork></artwork>
<latency>0:500</latency> <latency>{{ airconnect_latency }}</latency>
<drift>0</drift> <drift>0</drift>
</common> </common>
<main_log>info</main_log> <main_log>info</main_log>

View File

@ -5,13 +5,15 @@ from datetime import datetime
import subprocess import subprocess
import json import json
import sys import sys
import shutil
import os import os
import time import time
import eyed3
from loguru import logger from loguru import logger
import dateutil.parser import dateutil.parser
from dateutil.parser._parser import ParserError from dateutil.parser._parser import ParserError
from mutagen.id3 import ID3
def delete_file(file_path): def delete_file(file_path):
try: try:
@ -24,8 +26,7 @@ def delete_file(file_path):
def replace_file(src, dest): def replace_file(src, dest):
try: try:
logger.debug(f"src: {src}, dest: {dest}") logger.debug(f"src: {src}, dest: {dest}")
delete_file(dest) os.replace(src, dest)
shutil.move(src, dest)
logger.debug(f"replaced '{dest}'") logger.debug(f"replaced '{dest}'")
except (PermissionError, OSError, FileNotFoundError) as e: except (PermissionError, OSError, FileNotFoundError) as e:
logger.error(e) logger.error(e)
@ -61,9 +62,8 @@ def ffmpeg_write_date_tag(podcast_file, out_file, iso_date_str):
"-nostdin", "-nostdin",
"-y", # overwrite output file "-y", # overwrite output file
"-i", podcast_file, "-i", podcast_file,
#"-c", "copy", "-c", "copy",
"-metadata", f"date={iso_date_str}", "-metadata", f"date={iso_date_str}",
#"-metadata", f"releasedate={iso_date_str}",
out_file out_file
] ]
@ -80,24 +80,16 @@ def ffmpeg_write_date_tag(podcast_file, out_file, iso_date_str):
def eyeD3_write_date_tag(podcast_file, iso_date_str): def eyeD3_write_date_tag(podcast_file, iso_date_str):
# import eyeD3 ?
podcast_dir = os.path.basename(podcast_file)
cover_path = os.path.join(podcast_dir, "cover.jpg")
cmd = [ cmd = [
"eyeD3", "eyeD3",
"--release-date", iso_date_str, "--release-date", iso_date_str,
"--orig-release-date", iso_date_str, "--to-v2.4",
"--recording-date", iso_date_str, "--user-text-frame", f"releasedate:{iso_date_str}",
# this overwrites 'release date' i think: # "--preserve-file-times",
#"--release-year", iso_date_str.split("-")[0], # "--recording-date", iso_date_str,
#"--preserve-file-times" # "--orig-release-date", iso_date_str,
podcast_file
] ]
# if os.path.exists(cover_path):
# cmd.extend(["--add-image", f"{cover_path}:FRONT_COVER"])
cmd.append(podcast_file)
logger.debug(" ".join(cmd)) logger.debug(" ".join(cmd))
@ -122,7 +114,6 @@ def get_podcast_name_from_dir(podcast_file):
def eyeD3_write_album_tag(podcast_file, podcast_name): def eyeD3_write_album_tag(podcast_file, podcast_name):
# "album" is the name of the podcast # "album" is the name of the podcast
cmd = ["eyeD3", "--album", podcast_name, podcast_file] cmd = ["eyeD3", "--album", podcast_name, podcast_file]
try: try:
subprocess.run(cmd, capture_output=True, check=True, stdin=None) subprocess.run(cmd, capture_output=True, check=True, stdin=None)
@ -132,27 +123,101 @@ def eyeD3_write_album_tag(podcast_file, podcast_name):
def parse_iso_date(date_str): def parse_iso_date(date_str):
if date_str is None:
return None
try: try:
dt = dateutil.parser.parse(date_str) dt = dateutil.parser.parse(date_str)
return dt.date().isoformat() return dt.date().isoformat()
except (ParserError, TypeError) as e: except (ParserError, TypeError) as e:
logger.warning(f"invalid date string: '{date_str}'") logger.warning(f"invalid date string: '{date_str}'")
return None
def parse_TDAT_tag(tag_tdat): def parse_TDAT_tag(tag_tdat, tag_tyer):
if tag_tdat is None:
return None
if tag_tyer is None:
return None
if not isinstance(tag_tdat, str) or len(tag_tdat) != 4:
return None
if not isinstance(tag_tyer, str) or len(tag_tyer) != 4:
return None
# TDAT is id3 v2.3: DDMM
# TYER is id3 v2.3: YYYY
try: try:
iso_date_str = tag_tdat.split(' ')[0] #iso_date_str = tag_tdat.split(' ')[0]
return parse_iso_date(iso_date_str) #return parse_iso_date(iso_date_str)
DD = tag_tdat[0:2]
MM = tag_tdat[2:4]
YYYY = tag_tyer[0:4]
isofmt = f"{YYYY}-{MM}-{DD}"
if is_iso_string(isofmt):
return isofmt
else:
logger.warning(f"invalid TDAT: {tag_tdat} and TYER: {tag_tyer}")
return None
except (AttributeError, IndexError) as e: except (AttributeError, IndexError) as e:
logger.debug(f"invalid 'TDAT' tag: '{tag_tdat}'") logger.debug(f"invalid 'TDAT' tag: '{tag_tdat}'")
return None return None
def is_iso_string(iso_string):
if iso_string is None:
return False
try:
datetime.fromisoformat(iso_string)
return True
except ValueError:
return False
def get_iso_date_in_file(file_path): def get_iso_date_in_file(file_path):
tags = ffprobe_get_tags(file_path) tags = ffprobe_get_tags(file_path)
l = []
# print(tag_tdat)
# print(tag_tyer)
for item in ["releasedate", "date"]:
parsed = parse_iso_date(tags.get(item))
if is_iso_string(parsed):
l.append(parsed)
tag_tdat = tags.get("TDAT")
tag_tyer = tags.get("TYER")
if len(l) == 0 and (tag_tdat is not None and tag_tyer is not None):
logger.info(f"TDAT: {tag_tdat}")
logger.info(f"TYER: {tag_tyer}")
tdat = parse_TDAT_tag(tag_tdat, tag_tyer)
if is_iso_string(tdat):
l.append(tdat)
dates = set(l)
if len(dates) == 0:
logger.error(f"no valid date found for '{file_path}'")
raise SystemExit(3)
elif len(dates) == 1:
d = list(dates)[0]
logger.info(f"date found: {d}")
return d
else:
logger.info(f"multiple dates found: {dates}, picking earliest")
earliest = min([datetime.fromisoformat(a) for a in dates])
return earliest.isoformat()
def get_iso_date_in_file2(file_path):
tags = ffprobe_get_tags(file_path)
tag_TDAT = tags.get("TDAT") tag_TDAT = tags.get("TDAT")
tag_date = tags.get("date") tag_date = tags.get("date")
#tag_releasedate = tags.get("releasedate")
parsed_TDAT = parse_TDAT_tag(tag_TDAT) parsed_TDAT = parse_TDAT_tag(tag_TDAT)
parsed_date = parse_iso_date(tag_date) parsed_date = parse_iso_date(tag_date)
@ -173,22 +238,24 @@ def get_iso_date_in_file(file_path):
else: else:
return parsed_TDAT return parsed_TDAT
def st_time_to_iso(st_time):
return datetime.fromtimestamp(st_time).isoformat()
def show_info(file_path):
statinfo = os.stat(file_path)
atime = st_time_to_iso(statinfo.st_atime)
mtime = st_time_to_iso(statinfo.st_mtime)
ctime = st_time_to_iso(statinfo.st_ctime)
logger.info(f"atime: {atime}")
logger.info(f"mtime: {mtime}")
logger.info(f"ctime: {ctime}")
def file_dates_are_ok(file_path):
tags = ffprobe_get_tags(file_path)
tag_date = tags.get("date")
try:
dt = datetime.fromisoformat(tag_date)
ts = time.mktime(dt.timetuple())
os.stat(file_path).st_mtime == ts
except ValueError:
return False
def set_utime(file_path, iso_date_str): def set_utime(file_path, iso_date_str):
# settings access and modified times
dt = dateutil.parser.parse(iso_date_str) dt = dateutil.parser.parse(iso_date_str)
ts = time.mktime(dt.timetuple()) ts = time.mktime(dt.timetuple())
# shutil.move(file_path, f"{file_path}.new")
# shutil.move(f"{file_path}.new", file_path)
os.utime(file_path, (ts, ts)) os.utime(file_path, (ts, ts))
try: try:
os.utime(os.path.dirname(file_path), (ts, ts)) os.utime(os.path.dirname(file_path), (ts, ts))
@ -196,16 +263,24 @@ def set_utime(file_path, iso_date_str):
pass pass
return dt return dt
def eyed3_dates(podcast_file, date):
a = eyed3.load(podcast_file)
def mutagen_dates(podcast_file, date):
id3 = ID3(podcast_file)
print(type(id3))
def parse_args(): def parse_args():
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument("podcast_file") parser.add_argument("action", choices=["show", "utime-only", "fix-dates", "fix-album-tag"])
parser.add_argument("--out-file", default="/tmp/out-{os.getpid()}.mp3") parser.add_argument("--podcast-file")
parser.add_argument("--debug", action="store_true") parser.add_argument("--debug", action="store_true")
parser.add_argument("--quiet", action="store_true") parser.add_argument("--quiet", action="store_true")
parser.add_argument("--ffmpeg", action="store_true") parser.add_argument("--ffmpeg-out-file", default=f"/tmp/out-{os.getpid()}.mp3")
parser.add_argument("--mtime", action="store_true", help="only set mtime, no mp3 metadata") parser.add_argument("--metadata-util", default="eyed3", choices=["mutagen", "eyeD3", "ffmpeg"], type=str.lower)
parser.add_argument("--fix-album-tag", action="store_true", help="write album tag (podcast name)") parser.add_argument("--utime-only", action="store_true", help="only set utime, no mp3 metadata")
parser.add_argument("--podcast-name") parser.add_argument("--podcast-name")
args = parser.parse_args() args = parser.parse_args()
@ -216,7 +291,6 @@ def parse_args():
logger.add(sys.stderr, level="ERROR") logger.add(sys.stderr, level="ERROR")
else: else:
logger.add(sys.stderr, level="INFO") logger.add(sys.stderr, level="INFO")
return args return args
@ -224,31 +298,33 @@ def main():
args = parse_args() args = parse_args()
logger.debug(f"checking: '{os.path.basename(args.podcast_file)}'") logger.debug(f"checking: '{os.path.basename(args.podcast_file)}'")
if args.fix_album_tag: date = get_iso_date_in_file(args.podcast_file)
if args.action == "show":
show_info(args.podcast_file)
if args.action == "utime-only":
dt = set_utime(args.podcast_file, date)
logger.info(f"set mtime for '{os.path.basename(args.podcast_file)}' to '{dt.isoformat()}' according to mp3 metadata")
if args.action == "fix-dates":
if args.metadata_util == "ffmpeg":
ffmpeg_write_date_tag(args.podcast_file, args.out_file, date)
if args.metadata_util == "eyed3":
eyeD3_write_date_tag(args.podcast_file, date)
eyed3_dates(args.podcast_file, date)
if args.metadata_util == "mutagen":
mutagen_dates(args.podcast_file, date)
set_utime(args.podcast_file, date)
logger.success(f"updated dates (metadata and file attributes) for '{args.podcast_file}' as {date}")
if args.action == "fix-album-tag":
podcast_name = get_podcast_name_from_dir(args.podcast_file) podcast_name = get_podcast_name_from_dir(args.podcast_file)
if podcast_name is not None: if podcast_name is not None:
eyeD3_write_album_tag(args.podcast_file, podcast_name) eyeD3_write_album_tag(args.podcast_file, podcast_name)
logger.info(f"set album tag to '{podcast_name}' for '{args.podcast_file}'") logger.info(f"set album tag to '{podcast_name}' for '{args.podcast_file}'")
date = get_iso_date_in_file(args.podcast_file)
if args.mtime:
dt = set_utime(args.podcast_filen, date)
logger.info(f"set mtime for '{os.path.basename(args.podcast_file)}' to '{dt.isoformat()}' according to mp3 metadata")
elif file_dates_are_ok(args.podcast_file):
logger.info(f"metadata date and filesystem utimes ar ok for {args.podcast_file}', did not modify file")
else:
if args.ffmpeg:
ffmpeg_write_date_tag(args.podcast_file, args.out_file, date)
else:
eyeD3_write_date_tag(args.podcast_file, date)
set_utime(args.podcast_file, date)
logger.success(f"updated date in '{args.podcast_file}' as {date}")
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@ -0,0 +1,328 @@
#!/usr/bin/env python3
import argparse
from datetime import datetime
import subprocess
import json
import sys
import shutil
import os
import time
import eyed3
from loguru import logger
import dateutil.parser
from dateutil.parser._parser import ParserError
from mutagen.id3 import ID3
def delete_file(file_path):
try:
os.remove(file_path)
logger.debug(f"deleted: '{file_path}'")
except FileNotFoundError:
pass
def replace_file(src, dest):
try:
logger.debug(f"src: {src}, dest: {dest}")
#delete_file(dest)
#shutil.move(src, dest)
os.replace(src, dest)
logger.debug(f"replaced '{dest}'")
except (PermissionError, OSError, FileNotFoundError) as e:
logger.error(e)
raise SystemExit(2)
def ffprobe_get_tags(file_path):
cmd = [
"ffprobe",
"-v", "quiet",
file_path,
"-print_format", "json",
"-show_entries",
"stream_tags:format_tags"
]
try:
p = subprocess.run(cmd, capture_output=True, check=True)
j = json.loads(p.stdout)
return j['format']['tags']
except subprocess.CalledProcessError as e:
logger.error(f"{cmd[0]} exited with returncode {e.returncode} \n{e.stderr.decode()}")
raise SystemExit(e.returncode)
except KeyError as e:
logger.error(f"key {e} for file '{file_path}' not found in ffprobe stdout: {p.stdout.decode()}")
raise SystemExit(2)
def ffmpeg_write_date_tag(podcast_file, out_file, iso_date_str):
delete_file(out_file)
cmd = [
"ffmpeg",
"-nostdin",
"-y", # overwrite output file
"-i", podcast_file,
"-c", "copy",
"-metadata", f"date={iso_date_str}",
#"-metadata", f"releasedate={iso_date_str}",
out_file
]
try:
p = subprocess.run(cmd, capture_output=True, check=True, stdin=None)
p.check_returncode()
logger.debug(f"output: '{out_file}'")
replace_file(out_file, podcast_file)
except subprocess.CalledProcessError as e:
logger.error(f"{cmd[0]} exited with returncode {e.returncode} \n{e.stderr.decode()}")
raise SystemExit(e.returncode)
finally:
delete_file(out_file)
def eyeD3_write_date_tag(podcast_file, iso_date_str):
# import eyeD3 ?
podcast_dir = os.path.basename(podcast_file)
cover_path = os.path.join(podcast_dir, "cover.jpg")
cmd = [
"eyeD3",
"--release-date", iso_date_str,
"--to-v2.4",
"--user-text-frame", f"releasedate:{iso_date_str}",
#"--preserve-file-times",
# "--recording-date", iso_date_str,
# "--force-update",
podcast_file
#"--orig-release-date", iso_date_str,
# this overwrites 'release date' i think:
#"--release-year", iso_date_str.split("-")[0],
]
# if os.path.exists(cover_path):
# cmd.extend(["--add-image", f"{cover_path}:FRONT_COVER"])
#cmd.append(podcast_file)
logger.debug(" ".join(cmd))
try:
subprocess.run(cmd, capture_output=True, check=True, stdin=None)
logger.debug(f"updated: '{podcast_file}'")
#logger.info(f"would write date: {iso_date_str}")
except subprocess.CalledProcessError as e:
logger.error(f"{cmd[0]} exited with returncode {e.returncode} \n{e.stderr.decode()}")
raise SystemExit(e.returncode)
def get_podcast_name_from_dir(podcast_file):
podcast_dir = os.path.dirname(podcast_file)
if podcast_dir.startswith("/"):
# for now lets just do absolute dirs for names
podcast_name = os.path.basename(podcast_dir)
logger.debug(f"podcast name: {podcast_name}")
return podcast_name
else:
return None
def eyeD3_write_album_tag(podcast_file, podcast_name):
# "album" is the name of the podcast
cmd = ["eyeD3", "--album", podcast_name, podcast_file]
try:
subprocess.run(cmd, capture_output=True, check=True, stdin=None)
except subprocess.CalledProcessError as e:
logger.error(f"{cmd[0]} exited with returncode {e.returncode} \n{e.stderr.decode()}")
raise SystemExit(e.returncode)
def parse_iso_date(date_str):
try:
dt = dateutil.parser.parse(date_str)
return dt.date().isoformat()
except (ParserError, TypeError) as e:
logger.warning(f"invalid date string: '{date_str}'")
return None
def parse_TDAT_tag(tag_tdat):
try:
iso_date_str = tag_tdat.split(' ')[0]
return parse_iso_date(iso_date_str)
except (AttributeError, IndexError) as e:
logger.debug(f"invalid 'TDAT' tag: '{tag_tdat}'")
return None
def is_iso_string(iso_string):
if iso_string is None:
return False
try:
datetime.fromisoformat(iso_string)
return True
except ValueError:
return False
# def get_iso_date_in_file(file_path):
# tags = ffprobe_get_tags(file_path):
# search_tag_names = ["date", "releasedate", "TDAT"]
# tags_data = [tags.get(a) for a in search_tag_names]
# for item in tags_data:
# tags_parsed = [parse_iso_date(a) for a in tags_data]
# for item in ["date", "TDAT", "releasedate"]:
# tag_data = tags.get(item)
# parsed_data = parse_iso_date(tag_data)
# if is_iso_string(parsed_data)
# return parsed_data
def get_iso_date_in_file(file_path):
tags = ffprobe_get_tags(file_path)
tag_TDAT = tags.get("TDAT")
tag_date = tags.get("date")
parsed_TDAT = parse_TDAT_tag(tag_TDAT)
parsed_date = parse_iso_date(tag_date)
if parsed_TDAT is None and parsed_date is None:
logger.error(f"no valid date found in '{file_path}' - TDAT: '{tag_TDAT}', date: '{tag_date}'")
raise SystemExit(3)
else:
logger.debug(f"TDAT: '{parsed_TDAT}' ('{tag_TDAT}'), date: '{parsed_date}' ('{tag_date}')")
logger.debug(f"date: {parsed_date}")
if parsed_TDAT != parsed_date:
logger.debug(f"dates in 'TDAT' ({parsed_TDAT}) and 'date' ({parsed_date}) differ!")
if parsed_date is not None:
return parsed_date
else:
return parsed_TDAT
# def file_dates_are_ok(file_path):
# tags = ffprobe_get_tags(file_path)
# tag_date = tags.get("date")
# try:
# dt = datetime.fromisoformat(tag_date)
# ts = time.mktime(dt.timetuple())
# os.stat(file_path).st_mtime == ts
# except ValueError:
# return False
def st_time_to_iso(st_time):
return datetime.fromtimestamp(st_time).isoformat()
def show_info(file_path):
statinfo = os.stat(file_path)
atime = st_time_to_iso(statinfo.st_atime)
mtime = st_time_to_iso(statinfo.st_mtime)
ctime = st_time_to_iso(statinfo.st_ctime)
logger.info(f"atime: {atime}")
logger.info(f"mtime: {mtime}")
logger.info(f"ctime: {ctime}")
def set_utime(file_path, iso_date_str):
# settings access and modified times
dt = dateutil.parser.parse(iso_date_str)
ts = time.mktime(dt.timetuple())
# shutil.move(file_path, f"{file_path}.new")
# shutil.move(f"{file_path}.new", file_path)
os.utime(file_path, (ts, ts))
try:
os.utime(os.path.dirname(file_path), (ts, ts))
except FileNotFoundError:
pass
return dt
def eyed3_dates(podcast_file, date):
a = eyed3.load(podcast_file)
def mutagen_dates(podcast_file, date):
id3 = ID3(podcast_file)
print(type(id3))
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument("action", choices=["show", "utime-only", "fix-dates", "fix-album-tag"])
parser.add_argument("--podcast-file")
parser.add_argument("--debug", action="store_true")
parser.add_argument("--quiet", action="store_true")
parser.add_argument("--ffmpeg-out-file", default=f"/tmp/out-{os.getpid()}.mp3")
parser.add_argument("--metadata-util", default="eyed3", choices=["mutagen", "eyeD3", "ffmpeg"], type=str.lower)
parser.add_argument("--utime-only", action="store_true", help="only set utime, no mp3 metadata")
parser.add_argument("--podcast-name")
args = parser.parse_args()
if not args.debug:
logger.remove()
if args.quiet:
logger.add(sys.stderr, level="ERROR")
else:
logger.add(sys.stderr, level="INFO")
return args
def main():
args = parse_args()
logger.debug(f"checking: '{os.path.basename(args.podcast_file)}'")
date = get_iso_date_in_file(args.podcast_file)
if args.action == "show":
show_info(args.podcast_file)
if args.action == "utime-only":
dt = set_utime(args.podcast_file, date)
logger.info(f"set mtime for '{os.path.basename(args.podcast_file)}' to '{dt.isoformat()}' according to mp3 metadata")
if args.action == "fix-dates":
# if file_dates_are_ok(args.podcast_file):
# logger.info(f"metadata date and filesystem utimes are ok for {args.podcast_file}', did not modify file")
# else:
if args.metadata_util == "ffmpeg":
ffmpeg_write_date_tag(args.podcast_file, args.out_file, date)
if args.metadata_util == "eyed3":
eyeD3_write_date_tag(args.podcast_file, date)
eyed3_dates(args.podcast_file, date)
if args.metadata_util == "mutagen":
mutagen_dates(args.podcast_file, date)
set_utime(args.podcast_file, date)
logger.success(f"updated dates (metadata and file attributes) for '{args.podcast_file}' as {date}")
if args.action == "fix-album-tag":
podcast_name = get_podcast_name_from_dir(args.podcast_file)
if podcast_name is not None:
eyeD3_write_album_tag(args.podcast_file, podcast_name)
logger.info(f"set album tag to '{podcast_name}' for '{args.podcast_file}'")
if __name__ == "__main__":
main()

View File

@ -0,0 +1,219 @@
#!/usr/bin/env python3
import urllib.parse
import requests
import json
import sys
import os
import dateutil.parser
from loguru import logger
abs = "https://url"
authheaders = {
"Authorization": "Bearer $token"}
try:
OUTPUT_DIR = sys.argv[1]
except IndexError:
OUTPUT_DIR = "/tmp/abs_metadata"
logger.info(f"saving metadata json files to '{OUTPUT_DIR}'")
s = requests.Session()
s.headers.update(authheaders)
# idea: what is /s/? socket? tracks progress?
# check with browser dev tools
# could add nginx conf/proxy to inject auth header
# or something if i get progress tracking for free
# or custom proxy
#
# rss feed needs to be open for /feed/ urls to
# work. rss feed can be opened with api
#
# find if episode has been played
# https://api.audiobookshelf.org/#items-listened-to
#
# check with browser dev tools what happens when
# the button is clicked to mark an episode as played
# ANSWER:
# curl \
# 'https://{url}/api/me/progress/li_eadrvpei17yz1unifv/ep_8zz0zme6qtzq9rio8y' \
# -X PATCH \
# -H 'Authorization: Bearer $token' \
# -H 'Accept: application/json, text/plain, */*' \
# -H 'Accept-Language: en-US,en;q=0.5' \
# -H 'Accept-Encoding: gzip, deflate, br' \
# -H 'Content-Type: application/json' \
# -H 'Referer: https://{url}/item/li_eadrvpei17yz1unifv' \
# -H 'Origin: https://{url}' \
# -H 'DNT: 1' \
# -H 'Sec-Fetch-Dest: empty' \
# -H 'Sec-Fetch-Mode: cors' \
# -H 'Sec-Fetch-Site: same-origin' \
# -H 'Sec-GPC: 1' \
# -H 'Connection: keep-alive' \
# -H 'Pragma: no-cache' -H 'Cache-Control: no-cache' \
# -H 'TE: trailers' \
# --data-raw '{"isFinished":true}'
#
# $ curl -sX GET 'https://{url}/api/me/progress/li_eadrvpei17yz1unifv/ep_8zz0zme6qtzq9rio8y' -H 'Authorization: Bearer ${token}' | jq .
# {
# "duration": 0,
# "progress": 1,
# "currentTime": 0,
# "isFinished": true,
# "hideFromContinueListening": false,
# "finishedAt": 1679184864387
# }
# ^ removed less interested fields in the response
#
# listening sessions can be streamed?
#
# use hass to remove from playlist when playback
# is finished?
def playlist():
playlists = s.get(f"{abs}/api/playlists").json()["playlists"]
for playlist in playlists:
for episode in playlist["items"]:
li_id = episode["libraryItemId"]
ep_id = episode["episodeId"]
file_name = episode["episode"]["audioTrack"]["metadata"]["filename"]
encoded_file_name = urllib.parse.quote_plus(file_name)
file_url = f"{abs}/feed/{li_id}/item/{ep_id}/{encoded_file_name}"
print(file_url)
def item_embed_metadata(li_id):
embed_url = f"{abs}/api/tools/item/{li_id}/embed-metadata"
logger.info(embed_url)
try:
r = s.post(embed_url, data={"forceEmbedChapters": False})
r.raise_for_status()
return r.json()
except requests.exceptions.HTTPError as e:
logger.error(e)
logger.error(r.text)
return None
def metadata_main():
r = s.get(f"{abs}/api/libraries")
j = r.json()
podcasts = [a for a in j['libraries'] if a['mediaType'] == "podcast"]
logger.info(f"podcast libraries: {len(podcasts)}")
for item in podcasts:
metadata_library(podcasts)
def metadata_library(podcasts):
for item in podcasts:
r = s.get(f"{abs}/api/libraries/{item['id']}/items")
j = r.json()
lib_items = j['results']
metadata_library_item(lib_items)
def metadata_library_item(lib_items):
logger.info(f"podcasts: {len(lib_items)}")
for item in lib_items:
name = item['relPath']
li_id = item['id']
#episodes = item['media']['episodes']
#for episode in episodes:
# episode_id = episode['id']
# #lib_item = s.get(f"{abs}/api/items/{episode_id}").json()
# #logger.info(f"{name}: {lib_item}")
# #metadata = item_embed_metadata(episode_id)
lib_item = s.get(f"{abs}/api/items/{li_id}").json()
item_name = lib_item['media']['metadata']['title']
item_id = lib_item['id']
media = lib_item['media']
metadata = lib_item['media']['metadata']
podcast_path = lib_item['path']
podcast_rel_path = lib_item['relPath']
save_dir = f"{OUTPUT_DIR}/{podcast_rel_path}"
logger.info(f"{name} ({item_id}): {save_dir} ")
os.makedirs(save_dir, exist_ok=True)
podcast = {
'podcast_name': item_name,
'podcast_metadata': metadata,
'feed_url': metadata['feedUrl'],
'itunes_id': metadata['itunesId'],
'path': podcast_path,
'rel_path': podcast_rel_path,
'abs_ids': {
'library_id': lib_item['libraryId'],
'item_id': item_id,
'folder_id': lib_item['folderId'],
}
}
episodes = []
logger.info(f"latest epsiode: {media['episodes'][0]['title']}")
for ep in media['episodes']:
ep_file = ep['audioFile']
ep_metadata = ep_file['metadata']
ep_filename = ep_metadata['filename']
ep_path = ep_metadata['path']
ep_rel_path = f"{podcast_rel_path}/{ep_filename}"
published_date = dateutil.parser.parse(ep['pubDate']).isoformat()
published_date_ts = ep['publishedAt']
episode = {
'library_item_id': ep['libraryItemId'],
'id': ep['id'],
'path': ep_path,
'rel_path': ep_rel_path,
'index': ep.get('index'),
'title': ep['title'],
'subtitle': ep.get('subititle'),
'description': ep.get('description'),
'published_date': published_date,
'publised_date_ts': published_date_ts,
'filename': ep_metadata['filename'],
'ext': ep_metadata['ext'],
'mtime_ms': int(ep_metadata['mtimeMs']),
'ctime_ms': int(ep_metadata['ctimeMs']),
'birthtime_ms': int(ep_metadata['birthtimeMs']),
'added_at': int(ep_file['addedAt']),
'updated_at': int(ep_file['updatedAt']),
'duration': ep_file['duration'],
}
episodes.append(episode)
with open(f'{save_dir}/{ep_filename}.json', 'w') as f:
f.write(json.dumps(episode, indent=4))
with open(f'{save_dir}/metadata_podcast.json', 'w') as f:
f.write(json.dumps(podcast, indent=4))
full_metadata = podcast.copy()
full_metadata['episodes'] = episodes.copy()
with open(f'{save_dir}/metadata.json', 'w') as f:
f.write(json.dumps(full_metadata, indent=4))
def metadata_embed(item_id):
r = s.post(f"{abs}/api/tools/item/{tem_id}/embed-metadata")
print(r.text)
print(r.status_code)
if __name__ == "__main__":
metadata_main()

View File

@ -84,6 +84,28 @@
- abs-container - abs-container
- docker-containers - docker-containers
- name: install python utilies for mp3 metadata
apt:
name:
- eyed3
- python3-mutagen
state: present
tags:
- packages
- audiobookshelf-scripts
- name: config file for podcast tools
copy:
dest: /usr/local/bin/podcasts.json
owner: root
group: "{{ audiobookshelf_group.gid }}"
mode: 0750
content: "{{ podcast_tools_config | to_nice_json }}"
tags:
- abs-scripts
- audiobookshelf-scripts
- podcast-tools
- name: copy abs scripts - name: copy abs scripts
copy: copy:
src: "{{ item }}" src: "{{ item }}"
@ -96,3 +118,19 @@
tags: tags:
- abs-scripts - abs-scripts
- audiobookshelf-scripts - audiobookshelf-scripts
- podcast-tools
- name: cron file
template:
src: audiobookshelf-cron.j2
dest: /etc/cron.d/audiobookshelf
owner: root
group: root
mode: 0600
tags:
- cron
- abs-cron
- audiobookshelf-cron
- abs-scripts
- audiobookshelf-scripts
- podcast-tools

View File

@ -1,3 +1,7 @@
map $request_uri $kill_stupid_serviceworker_cache {
~*^/_nuxt/(.*)\.js$ 1;
}
server { server {
listen 443 ssl http2; listen 443 ssl http2;
include listen-proxy-protocol.conf; include listen-proxy-protocol.conf;
@ -22,8 +26,6 @@ server {
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $host; proxy_set_header Host $host;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_http_version 1.1; proxy_http_version 1.1;
@ -34,8 +36,25 @@ server {
add_header "Content-Type" "application/rss+xml"; add_header "Content-Type" "application/rss+xml";
} }
location /socket.io/ {
include /etc/nginx/require_auth.conf;
proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade;
proxy_set_header Connection "upgrade";
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header X-Forwarded-Protocol $scheme;
proxy_set_header X-Forwarded-Host $http_host;
proxy_pass http://{{ bridgewithdns.audiobookshelf }}:80;
}
location / { location / {
include /etc/nginx/require_auth.conf; include /etc/nginx/require_auth.conf;
if ($kill_stupid_serviceworker_cache) {
rewrite "^(.*)$" "$1?id=$request_id" redirect;
}
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for; proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Proto $scheme; proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $host; proxy_set_header Host $host;
@ -44,6 +63,16 @@ server {
proxy_http_version 1.1; proxy_http_version 1.1;
# nuke cache
add_header Last-Modified $date_gmt always;
add_header Cache-Control 'no-store' always;
if_modified_since off;
expires off;
etag off;
# nuke the service worker cache
sub_filter '.js' '.js?id=$request_id';
proxy_pass http://{{ bridgewithdns.audiobookshelf }}:80; proxy_pass http://{{ bridgewithdns.audiobookshelf }}:80;
proxy_redirect http:// https://; proxy_redirect http:// https://;
} }

View File

@ -0,0 +1,6 @@
# m h dom mon dow command
*/1 * * * * {{ audiobookshelf_user.username }} find {{ audiobookshelf_path }}/podcasts -mmin -20 -name "*.mp3" -exec /usr/local/bin/fix-podcast-date.py fix-dates --podcast-file {} {# --quiet #}\;
#

View File

@ -1,2 +1 @@
--- ---
blink1_enabled: false

View File

@ -0,0 +1,141 @@
---
input_number:
sun_adaptive_max_brightness_pct:
name: sun_adaptive_max_brightness_pct
min: 0
max: 100
step: 1.0
mode: box
unit_of_measurement: "%"
template:
- sensor:
- name: sun_zenith
unit_of_measurement: "°"
state_class: measurement
icon: >-
{% if states("sun.sun") == "above_horizon" %}
{% if state_attr("sun.sun", "rising") %}
mdi:weather-sunset-up
{% else %}
mdi:white-balance-sunny
{% endif %}
{% else %}
mdi:moon-waning-crescent
{% endif%}
state: >-
{{ state_attr("sun.sun", "elevation") | default(0.0) | float }}
attributes:
friendly_name: Solar zenith angle
below_horizon: >-
{{ states("sun.sun") == "below_horizon" }}
setting: >-
{{ state_attr("sun.sun", "rising") == false }}
# - name: sun_position
# state_class: measurement
# icon: mdi:white-balance-sunny
# state: >-
# {{ state_attr("switch.adaptive_lighting_adapt_brightness_home", "sun_position") }}
- name: sun_adaptive_brightness
unit_of_measurement: "%"
state_class: measurement
icon: >-
{{ state_attr("sensor.sun_zenith", "icon") }}
state: >-
{% set sun_zenith_pct = states("sensor.sun_zenith_pct") | float %}
{% set max_brightness_pct = states("input_number.sun_adaptive_max_brightness_pct") | default(100.0) | float %}
{% set brightness_pct = 100.0 - sun_zenith_pct %}
{{ brightness_pct | int }}
attributes:
friendly_name: Adaptive light brightness
- name: sun_zenith_pct
unit_of_measurement: "%"
state_class: measurement
icon: >-
{{ state_attr("sensor.sun_zenith", "icon") }}
state: >-
{% set sun_highest = states("sensor.sun_zenith_highest") | default(1.0) | float %}
{% set sun_zenith = states("sensor.sun_zenith") | default(0.0) | float %}
{% set daylight_pct = max(sun_zenith, 0) / sun_highest * 100.0 %}
{{ daylight_pct | round(1) }}
# - name: sun_time_until_dawn
# unit_of_measurement: minutes
# device_class: timestamp
# state_class: measurement
# state: >-
# {% set sun_dawn = state_attr("sun.sun", "next_dawn") | as_datetime %}
# {{ sun_dawn - now() }}
# attributes:
# friendly_name: "Time until dawn"
# - name: sun_time_until_dusk
# unit_of_measurement: minutes
# device_class: timestamp
# state_class: measurement
# state: >-
# {% set sun_dusk = state_attr("sun.sun", "next_dusk") | as_datetime %}
# {{ sun_dusk - now() }}
# attributes:
# friendly_name: "Time until dusk"
# - name: sun_time_until_midnight
# state_class: measurement
# device_class: timestamp
# unit_of_measurement: minutes
# state: >-
# {% set sun_midnight = state_attr("sun.sun", "next_midnight") | as_datetime %}
# {{ sun_midnight - now() }}
# attributes:
# friendly_name: "Time until midnight"
# - name: sun_time_until_noon
# unit_of_measurement: minutes
# state_class: measurement
# device_class: timestamp
# state: >-
# {% set sun_noon = state_attr("sun.sun", "next_noon") | as_datetime %}
# {{ now() - sun_noon - now() }}
# attributes:
# friendly_name: "Time until noon"
# - name: sun_time_until_rising
# unit_of_measurement: minutes
# device_class: timestamp
# state_class: measurement
# state: >-
# {% set sun_rising = state_attr("sun.sun", "next_rising") | as_datetime %}
# {{ sun_rising - now() }}
# attributes:
# friendly_name: "Time until rising"
# - name: sun_time_until_setting
# unit_of_measurement: minutes
# device_class: timestamp
# state_class: measurement
# state: >-
# {% set sun_setting = state_attr("sun.sun", "next_setting") | as_datetime %}
# {{ sun_setting - now() }}
# attributes:
# friendly_name: "Time until setting"
sensor:
- name: sun_zenith_highest
platform: statistics
entity_id: sensor.sun_zenith
state_characteristic: value_max
max_age:
hours: 24
- name: sun_zenith_lowest
platform: statistics
entity_id: sensor.sun_zenith
state_characteristic: value_min
max_age:
hours: 24

View File

@ -124,28 +124,6 @@ script:
value: "{{ brightness_steps | int }}" value: "{{ brightness_steps | int }}"
automation: automation:
- alias: grow_lights_1_state
trigger:
- platform: time_pattern
minutes: /15
condition: []
action:
- service: >-
script.grow_lights_1_{{ states('binary_sensor.grow_lights_1_schedule') }}
data: {}
- alias: grow_lights_1_hass_start
trigger:
- platform: homeassistant
event: start
condition: []
action:
- service: >-
light.turn_{{ states('binary_sensor.grow_lights_1_schedule') }}
target:
entity_id: light.grow_lights_1
data: {}
- alias: grow_lights_1_schedule - alias: grow_lights_1_schedule
trigger: trigger:
- platform: state - platform: state
@ -161,6 +139,27 @@ automation:
target: target:
entity_id: light.grow_lights_1 entity_id: light.grow_lights_1
data: {} data: {}
- alias: grow_lights_1_cron_send_state
trigger:
- platform: time_pattern
minutes: /10
condition: []
action:
- if:
- condition: state
entity_id: binary_sensor.grow_lights_1_schedule
state: "on"
then:
- service: script.grow_lights_1_on
data: {}
- if:
- condition: state
entity_id: binary_sensor.grow_lights_1_schedule
state: "off"
then:
- service: script.grow_lights_1_off
data: {}
mode: single
binary_sensor: binary_sensor:

View File

@ -0,0 +1,8 @@
---
binary_sensor:
- platform: tod
unique_id: moosetv_schedule
name: moosetv_shedule
after: "06:00"
before: "01:00"

View File

@ -18,6 +18,7 @@
when: when:
- hass_container is not defined or not hass_container.changed - hass_container is not defined or not hass_container.changed
- hass_container_state|default("stopped") == "started" - hass_container_state|default("stopped") == "started"
- hass_restart_handler|default(true)
- name: restart zwavejs container - name: restart zwavejs container
docker_container: docker_container:
@ -30,3 +31,7 @@
- name: udevadm reload rules - name: udevadm reload rules
command: udevadm control --reload-rules command: udevadm control --reload-rules
- name: git-hass-config.sh
command: /usr/local/bin/git-hass-config.sh
become_user: "{{ systemuserlist.hass.username }}"

View File

@ -69,7 +69,7 @@
- hass-git - hass-git
- hass-git-clone - hass-git-clone
- name: home assistant config files - name: home assistant config files and config packages
template: template:
src: "{{ item }}.j2" src: "{{ item }}.j2"
dest: "{{ systemuserlist.hass.home }}/home-assistant/config/{{ item }}" dest: "{{ systemuserlist.hass.home }}/home-assistant/config/{{ item }}"
@ -83,13 +83,14 @@
- templates.yaml - templates.yaml
- automations-ansible-managed.yaml - automations-ansible-managed.yaml
- scripts-ansible-managed.yaml - scripts-ansible-managed.yaml
- blink1.yaml
# - packages/climate.yaml # - packages/climate.yaml
- packages/blink1.yaml
- packages/toothbrush.yaml - packages/toothbrush.yaml
- packages/glados_tts.yaml
tags: tags:
- hass-config - hass-config
- name: copy config files - name: copy config packages
copy: copy:
src: "{{ item }}" src: "{{ item }}"
dest: "{{ systemuserlist.hass.home }}/home-assistant/config/{{ item }}" dest: "{{ systemuserlist.hass.home }}/home-assistant/config/{{ item }}"
@ -101,8 +102,11 @@
- packages/usb_led_strings.yaml - packages/usb_led_strings.yaml
- packages/grow_lights.yaml - packages/grow_lights.yaml
- packages/fans.yaml - packages/fans.yaml
- packages/moosetv.yaml
- packages/adaptive_lighting.yaml
tags: tags:
- hass-config - hass-config
- hass-packages
- name: copy dashboard config files - name: copy dashboard config files
copy: copy:
@ -126,6 +130,7 @@
mode: 0775 mode: 0775
owner: hass owner: hass
group: hass group: hass
notify: git-hass-config.sh
tags: tags:
- hass-git - hass-git

View File

@ -16,6 +16,14 @@ server {
server_name {{ hass_url }}; server_name {{ hass_url }};
location /glados/ {
# TEMP: while glados hass integration is WIP
proxy_set_header Host $host;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_pass {{ hass_glados_tts_url }};
}
location / { location / {
proxy_http_version 1.1; proxy_http_version 1.1;
proxy_set_header Upgrade $http_upgrade; proxy_set_header Upgrade $http_upgrade;
@ -33,7 +41,7 @@ server {
# autoindex_exact_size off; # autoindex_exact_size off;
# } # }
{% if blink1_enabled -%} {% if blink1_server_port is defined -%}
location /blink1/ { location /blink1/ {
{% for cidr in my_local_cidrs -%} {% for cidr in my_local_cidrs -%}
allow {{ cidr }}; allow {{ cidr }};
@ -139,6 +147,8 @@ server {
ssl_session_timeout 5m; ssl_session_timeout 5m;
ssl_certificate /usr/local/etc/certs/{{ domain }}/fullchain.pem; ssl_certificate /usr/local/etc/certs/{{ domain }}/fullchain.pem;
ssl_certificate_key /usr/local/etc/certs/{{ domain }}/privkey.pem; ssl_certificate_key /usr/local/etc/certs/{{ domain }}/privkey.pem;
ssl_protocols TLSv1.2 TLSv1.3;
ssl_prefer_server_ciphers off;
fastcgi_hide_header X-Powered-By; fastcgi_hide_header X-Powered-By;
} }

View File

@ -1,3 +1,8 @@
map $http_upgrade $connection_upgrade {
default upgrade;
#default $http_connection;
'' close;
}
server { server {
listen 443 ssl http2; listen 443 ssl http2;
{% if inventory_hostname in wg_clients -%} {% if inventory_hostname in wg_clients -%}

View File

@ -1,71 +0,0 @@
{% if blink1_enabled -%}
friendly_name: blink1
value_template: >-
{% raw -%}
{{ state_attr('sensor.blink1', 'rgb') != "#000000" }}
{% endraw %}
# color_template: >-
# {% raw -%}
# {{ state_attr('sensor.blink1', 'rgb') }}
# {% endraw %}
turn_on:
- service: rest_command.blink1_turn_on
- delay:
milliseconds: 500
- service: homeassistant.update_entity
target:
entity_id: sensor.blink1
turn_off:
- service: rest_command.blink1_turn_off
- delay:
milliseconds: 500
- service: homeassistant.update_entity
target:
entity_id: sensor.blink1
set_color:
- service: rest_command.blink1_turn_off
- service: rest_command.blink1_set_color
data:
# https://github.com/velijv/home-assistant-color-helpers#rgb-to-hex
# https://community.home-assistant.io/t/advanced-light-template-help/175654
# https://community.home-assistant.io/t/using-hsv-hsb-to-set-colored-lights/15472
rgb: >-
{%raw%}
{%- set h2 = h / 360 -%}
{%- set s2 = s / 100 -%}
{%- set v = 100 -%}
{%- set i = (h2 * 6 ) | round(2,'floor') | int-%}
{%- set f = h2 * 6 - i -%}
{%- set p = v * (1 - s2) -%}
{%- set q = v * (1 - f * s2) -%}
{%- set t = v * (1 - (1 - f) * s2) -%}
{%- if i % 6 == 0 -%}
{%- set r = v | int -%}
{%- set g = t | int -%}
{%- set b = p | int -%}
{%- elif i % 6 == 1 -%}
{%- set r = q | int -%}
{%- set g = v | int -%}
{%- set b = p | int -%}
{%- elif i % 6 == 2 -%}
{%- set r = p | int -%}
{%- set g = v | int -%}
{%- set b = t | int -%}
{%- elif i % 6 == 3 -%}
{%- set r = p | int -%}
{%- set g = q | int -%}
{%- set b = v | int -%}
{%- elif i % 6 == 4 -%}
{%- set r = t | int -%}
{%- set g = p | int -%}
{%- set b = v | int -%}
{%- elif i % 6 == 5 -%}
{%- set r = v | int -%}
{%- set g = p | int -%}
{%- set b = q | int -%}
{%- endif -%}
{{ '%02x%02x%02x' | format(r, g, b) }}
{%endraw%}
{% endif %}

View File

@ -64,6 +64,17 @@ scene: !include scenes.yaml
template: !include templates.yaml template: !include templates.yaml
#climate: !include climate.yaml #climate: !include climate.yaml
http:
server_host: 127.0.0.1
server_port: 8123
trusted_proxies:
- 127.0.0.1
use_x_forwarded_for: true
api:
websocket_api:
# Text to speech # Text to speech
tts: tts:
- platform: voicerss - platform: voicerss
@ -82,15 +93,6 @@ calendar:
url: {{ item.url | trim }} url: {{ item.url | trim }}
{% endfor %} {% endfor %}
http:
server_host: 127.0.0.1
server_port: 8123
trusted_proxies:
- 127.0.0.1
use_x_forwarded_for: true
api:
frontend: frontend:
themes: !include_dir_merge_named themes themes: !include_dir_merge_named themes
@ -261,16 +263,6 @@ sensor:
#{% endfor %} #{% endfor %}
#} #}
{% if blink1_enabled -%}
- platform: rest
resource: http://localhost:{{ blink1_server_port }}/blink1
name: blink1
json_attributes:
- rgb
- bright
value_template: "{%raw%}{{ value_json.rgb }}{%endraw%}"
{% endif %}
binary_sensor: binary_sensor:
- platform: workday - platform: workday
country: DE country: DE
@ -335,13 +327,13 @@ device_tracker:
# to known_devices.yaml when they are discovered, but they wont be # to known_devices.yaml when they are discovered, but they wont be
# tracked unless 'track' is set to 'true' for a device there (edit # tracked unless 'track' is set to 'true' for a device there (edit
# the file to track a discovered device). # the file to track a discovered device).
track_new_devices: true track_new_devices: false
#new_device_defaults: #new_device_defaults:
# track_new_devices: false # track_new_devices: false
- platform: bluetooth_tracker - platform: bluetooth_tracker
request_rssi: true request_rssi: true
track_new_devices: true track_new_devices: false
- platform: ping - platform: ping
# these are probably not used, because they are "global settings" # these are probably not used, because they are "global settings"
# and only the first values from the first platform (bluetooth_le_tracker) # and only the first values from the first platform (bluetooth_le_tracker)
@ -405,32 +397,6 @@ notify:
shell_command: shell_command:
matrixmsg: /usr/local/bin/matrixmsg.py matrixmsg: /usr/local/bin/matrixmsg.py
rest_command:
{% if blink1_enabled -%}
blink1_turn_on:
url: {{ hass_blink1_url }}/blink1/on?bright=250
#url: http://localhost:{{ blink1_server_port }}/blink1/fadeToRGB?rgb=ff0ff
method: GET
content_type: "application/json"
blink1_turn_off:
url: {{ hass_blink1_url }}/blink1/off
method: GET
content_type: "application/json"
blink1_turn_magenta:
url: {{ hass_blink1_url }}/blink1/fadeToRGB?rgb=ff00ff
method: GET
content_type: "application/json"
blink1_set_color:
url: "{{ hass_blink1_url }}/blink1/fadeToRGB?rgb={%raw%}{{ rgb }}{%endraw%}"
method: GET
{% endif %}
light:
{% if blink1_enabled -%}
- platform: template
lights:
blink1: !include blink1.yaml
{% endif %}
{# {#
# feedreader: # feedreader:

View File

@ -25,32 +25,34 @@ fi
mkdir -p ${PATH_REPO}/config/ mkdir -p ${PATH_REPO}/config/
mkdir -p ${PATH_REPO}/config/.storage mkdir -p ${PATH_REPO}/config/.storage
{% for item in hass_config_repo_cp -%} {% for item in hass_config_repo_cp_files -%}
{% if item.dir|default(false) %}{% set cp_r = "r" -%} cp -a ${PATH_HASS}/config/{{ item }} ${PATH_REPO}/config/{{ item }}
{% else %}{% set cp_r = "" -%}
{% endif -%}
{% if item.src.endswith("*") -%}
{% if item.src.count("/") > 0 -%}
{% set dest = item.src.split("/")[:-1] | join("/") + "/" -%}
{% else -%}
{% set dest = "" -%}
{% endif -%}
{% else -%}
{% set dest = item.src %}
{% endif %}
cp -a{{ cp_r }} ${PATH_HASS}/config/{{ item.src }} ${PATH_REPO}/config/{{ dest }}
{% endfor %} {% endfor %}
{% for item in hass_config_repo_cp_dirs -%}
cp -ra ${PATH_HASS}/config/{{ item }} ${PATH_REPO}/config/
{% endfor %}
{% for item in hass_config_repo_cp_globs -%}
{% set dest_dir = item.split('/')[:-1] | join("/") %}
cp -ra ${PATH_HASS}/config/{{ item }} ${PATH_REPO}/config/{{ dest_dir }}
{% endfor %}
set +e
{% for item in hass_config_repo_rm -%}
git rm -rf ${PATH_REPO}/config/{{ item }} &> /dev/null
{% endfor %}
set -e
if test -n "$(git status --porcelain)" ; then if test -n "$(git status --porcelain)" ; then
{% for item in hass_config_repo_cp -%} git add config/ > /dev/null
git add config/{{ item.src }} > /dev/null
{% endfor %}
git commit -m "config updated" > /dev/null git commit -m "config updated" > /dev/null
fi fi
git pull --quiet git pull --quiet
git push --quiet 2> /dev/null git push origin main --quiet 2> /dev/null
#git push test main --force
# TODO: copy changes from git # TODO: copy changes from git

View File

@ -0,0 +1,111 @@
---
template:
- binary_sensor:
- name: "blink1_on"
device_class: light
state: >-
{% raw -%}
{{ state_attr('sensor.blink1', 'rgb') != "#000000" }}
{% endraw %}
sensor:
- platform: rest
resource: http://localhost:{{ blink1_server_port }}/blink1
name: blink1
json_attributes:
- rgb
- bright
value_template: "{%raw%}{{ value_json.rgb }}{%endraw%}"
rest_command:
blink1_turn_on:
url: {{ hass_blink1_url }}/blink1/on?bright=250
#url: http://localhost:{{ blink1_server_port }}/blink1/fadeToRGB?rgb=ff0ff
method: GET
content_type: "application/json"
blink1_turn_off:
url: {{ hass_blink1_url }}/blink1/off
method: GET
content_type: "application/json"
blink1_turn_magenta:
url: {{ hass_blink1_url }}/blink1/fadeToRGB?rgb=ff00ff
method: GET
content_type: "application/json"
blink1_set_color:
url: "{{ hass_blink1_url }}/blink1/fadeToRGB?rgb={%raw%}{{ rgb }}{%endraw%}"
method: GET
light:
- platform: template
lights:
blink1:
friendly_name: blink1
value_template: >-
{% raw -%}
{{ state_attr('sensor.blink1', 'rgb') != "#000000" }}
{% endraw %}
# color_template: >-
# {% raw -%}
# {{ state_attr('sensor.blink1', 'rgb') }}
# {% endraw %}
turn_on:
- service: rest_command.blink1_turn_on
- delay:
milliseconds: 500
- service: homeassistant.update_entity
target:
entity_id: sensor.blink1
turn_off:
- service: rest_command.blink1_turn_off
- delay:
milliseconds: 500
- service: homeassistant.update_entity
target:
entity_id: sensor.blink1
set_color:
- service: rest_command.blink1_turn_off
- service: rest_command.blink1_set_color
data:
# https://github.com/velijv/home-assistant-color-helpers#rgb-to-hex
# https://community.home-assistant.io/t/advanced-light-template-help/175654
# https://community.home-assistant.io/t/using-hsv-hsb-to-set-colored-lights/15472
rgb: >-
{%raw%}
{%- set h2 = h / 360 -%}
{%- set s2 = s / 100 -%}
{%- set v = 100 -%}
{%- set i = (h2 * 6 ) | round(2,'floor') | int-%}
{%- set f = h2 * 6 - i -%}
{%- set p = v * (1 - s2) -%}
{%- set q = v * (1 - f * s2) -%}
{%- set t = v * (1 - (1 - f) * s2) -%}
{%- if i % 6 == 0 -%}
{%- set r = v | int -%}
{%- set g = t | int -%}
{%- set b = p | int -%}
{%- elif i % 6 == 1 -%}
{%- set r = q | int -%}
{%- set g = v | int -%}
{%- set b = p | int -%}
{%- elif i % 6 == 2 -%}
{%- set r = p | int -%}n
{%- set g = v | int -%}
{%- set b = t | int -%}
{%- elif i % 6 == 3 -%}
{%- set r = p | int -%}
{%- set g = q | int -%}
{%- set b = v | int -%}
{%- elif i % 6 == 4 -%}
{%- set r = t | int -%}
{%- set g = p | int -%}
{%- set b = v | int -%}
{%- elif i % 6 == 5 -%}
{%- set r = v | int -%}
{%- set g = p | int -%}
{%- set b = q | int -%}
{%- endif -%}
{{ '%02x%02x%02x' | format(r, g, b) }}
{%endraw%}

View File

@ -0,0 +1,23 @@
---
# example of simple tts integration:
# https://github.com/nagyrobi/home-assistant-custom-components-marytts/blob/main/custom_components/marytts/tts.py
template:
- binary_sensor:
- name: glados_tts_is_in_hass
state: false
notify:
- name: glados
platform: rest
resource: https://{{ hass_url }}/glados/say.mp3
method: POST_JSON
headers:
#Authorization:
content-type: application/json
data_template:
message: >-
{% raw -%}
{{ title ~ '\n' ~ message }}
{% endraw %}

View File

@ -66,15 +66,6 @@
#} #}
- binary_sensor: - binary_sensor:
{% if blink1_enabled -%}
- name: "blink1_on"
device_class: light
state: >-
{% raw -%}
{{ state_attr('sensor.blink1', 'rgb') != "#000000" }}
{% endraw %}
{% endif %}
- name: "heating_on" - name: "heating_on"
icon: "mdi:home-thermometer" icon: "mdi:home-thermometer"
device_class: heat device_class: heat

View File

@ -4,11 +4,20 @@
apt: apt:
name: name:
- avahi-utils - avahi-utils
- eyed3
- id3v2 - id3v2
- ffmpeg - ffmpeg
- pulseaudio-dlna
- pulseaudio-module-bluetooth
#- pulseaudio-module-raop
#- pulseaudio-module-zeroconf
#- pulseaudio-module-lirc
#- pulseaudio-module-jack
state: present state: present
tags: tags:
- homeaudio-packages - homeaudio-packages
- packages
- name: install yt-dlp - name: install yt-dlp
pip: pip:

View File

@ -208,7 +208,7 @@ server {
# External Javascript (such as cast_sender.js for Chromecast) must # External Javascript (such as cast_sender.js for Chromecast) must
# be allowlisted. # be allowlisted.
# 'self' https://*.{{ domain }} https://{{ domain }} # 'self' https://*.{{ domain }} https://{{ domain }}
add_header Content-Security-Policy "default-src https: data: blob: http://image.tmdb.org; style-src 'self' 'unsafe-inline'; script-src 'self' 'unsafe-inline' https://www.gstatic.com/cv/js/sender/v1/cast_sender.js https://www.gstatic.com/eureka/clank/cast_sender.js https://www.gstatic.com/eureka/clank/94/cast_sender.js https://www.gstatic.com/eureka/clank/95/cast_sender.js https://www.gstatic.com/eureka/clank/96/cast_sender.js https://www.gstatic.com/eureka/clank/97/cast_sender.js https://www.gstatic.com/eureka/clank/105/cast_sender.js https://www.gstatic.com/eureka/clank/106/cast_sender.js https://www.youtube.com blob:; worker-src 'self' blob:; connect-src 'self'; object-src 'none'; frame-ancestors 'self'" always; add_header Content-Security-Policy "default-src https: data: blob: http://image.tmdb.org; style-src 'self' 'unsafe-inline'; script-src 'self' 'unsafe-inline' https://www.gstatic.com/cv/js/sender/v1/cast_sender.js https://www.gstatic.com/eureka/clank/cast_sender.js https://www.gstatic.com/eureka/clank/94/cast_sender.js https://www.gstatic.com/eureka/clank/95/cast_sender.js https://www.gstatic.com/eureka/clank/96/cast_sender.js https://www.gstatic.com/eureka/clank/97/cast_sender.js https://www.gstatic.com/eureka/clank/98/cast_sender.js https://www.gstatic.com/eureka/clank/105/cast_sender.js https://www.gstatic.com/eureka/clank/106/cast_sender.js https://www.gstatic.com/eureka/clank/111/cast_sender.js https://www.youtube.com blob:; worker-src 'self' blob:; connect-src 'self'; object-src 'none'; frame-ancestors 'self'" always;
# kill cache # kill cache
#add_header date $date_gmt always; #add_header date $date_gmt always;

View File

@ -3,5 +3,6 @@
# m h dom mon dow # m h dom mon dow
*/60 * * * * {{ owntone_user.username }} touch {{ owntone_path }}/audio/trigger.init-rescan */60 * * * * {{ owntone_user.username }} touch {{ owntone_path }}/audio/trigger.init-rescan
08 08 * * * {{ owntone_user.username }} touch {{ owntone_path }}/audio/trigger.meta-rescan
# #

View File

@ -4,7 +4,7 @@ general {
# below, and full access to the databases, log and local audio # below, and full access to the databases, log and local audio
uid = "owntone" uid = "owntone"
db_path = "/config/dbase_and_logs/songs3.db" db_path = "/config/{{ owntone_db }}"
# Database backup location # Database backup location
# Uncomment and specify a full path to enable abilty to use REST endpoint # Uncomment and specify a full path to enable abilty to use REST endpoint
@ -32,8 +32,14 @@ general {
# client types like Remotes, DAAP clients (iTunes) and to the web # client types like Remotes, DAAP clients (iTunes) and to the web
# interface. Options are "any", "localhost" or the prefix to one or # interface. Options are "any", "localhost" or the prefix to one or
# more ipv4/6 networks. The default is { "localhost", "192.168", "fd" } # more ipv4/6 networks. The default is { "localhost", "192.168", "fd" }
{% set s21 = s21_cidr.split(".")[:3] | join(".") -%}
trusted_networks = { "localhost", "{{ s21 }}", "fd" } trusted_networks = {
{% for item in owntone_networks -%}
"{{ item.split(".")[:3] | join(".") | trim }}",
{% endfor -%}
"localhost",
"fd"
}
# Enable/disable IPv6 # Enable/disable IPv6
ipv6 = no ipv6 = no
@ -112,7 +118,7 @@ library {
# albums and artists with only one track. The tracks will still be # albums and artists with only one track. The tracks will still be
# visible in other lists, e.g. songs and playlists. This setting # visible in other lists, e.g. songs and playlists. This setting
# currently only works in some remotes. # currently only works in some remotes.
#hide_singles = false hide_singles = true
# Internet streams in your playlists will by default be shown in the # Internet streams in your playlists will by default be shown in the
# "Radio" library, like iTunes does. However, some clients (like # "Radio" library, like iTunes does. However, some clients (like

View File

@ -3,7 +3,7 @@
## NEW ## NEW
[Interface] [Interface]
Address = {{ wg.ip }}/32 Address = {{ wg.ip }}/24
{% if wg.listen|default(false) %} {% if wg.listen|default(false) %}
ListenPort = {{ wireguard_port }} ListenPort = {{ wireguard_port }}
{% endif %} {% endif %}