NAS_USER='aescript'
NAS_PASS='@5j15SduIhP7'
NAS_IP='edit.tvstart.ru'
NAS_PORT='443'
NAS_FILE='/team-folders/nexrender/Anons.osheet' #XXX
import logging
import logging.config
from pprint import pprint
from synology_drive_api.drive import SynologyDrive
import pandas as pd
from transliterate import translit
import requests
from time import sleep
import datetime
import sys
PLACEHOLDER = sys.platform=='win32'
#XXX
if PLACEHOLDER:
from random import random, choices
def send_job_dumb(data):
if random()<0.8:
uid=''.join(choices('abcdefghijklmnopqrstuvwxyz_',k=8))
return {'uid':uid,'outname':data['outfile_name']}
class fake_resp:
def __init__(self,state='queured',*kargs,**kwargs):
self.state=state
self.status_code==200
def json(self):
return {'state':self.state}
def fake_get():
if random<0.8:
return fake_resp()
elif random<0.8:
return fake_resp('finished')
else:
return fake_resp('error')
LOG_CONFIG={
'version': 1,
'handlers': {
'telegram': {
'class': 'telegram_handler.TelegramHandler',
'level':'INFO',
'token': '7830267871:AAHHDEGWxa2ZjGoCCBhIk0skWR6u3ISVRtg',
'chat_id': '-4576902221',
'formatter': 'telegram'
},
'console':{
'class':'logging.StreamHandler',
'level':'DEBUG',
'formatter': 'simple',
'stream': 'ext://sys.stdout'
},
'file':{
'class':'logging.FileHandler',
'level':'DEBUG',
'formatter': 'simple',
'encoding':'utf-8',
'filename':'AF_script.log'
},
},
'loggers': {
__name__: {
'handlers': ['console','file','telegram'],
'level': 'DEBUG'
}
},
'formatters': {
'telegram': {
'class': 'telegram_handler.HtmlFormatter',
'format': '%(levelname)s %(message)s',
'use_emoji': 'True'
},
'simple':{
'class': 'logging.Formatter',
'format': '%(asctime)s %(levelname)-8s %(funcName)12s() - %(message)s',
'datefmt': '%d.%m.%Y %H:%M:%S'
}
}
}
logging.config.dictConfig(LOG_CONFIG)
logger = logging.getLogger(__name__)
def load_osheet():
logger.debug('Get data')
synd = SynologyDrive(NAS_USER, NAS_PASS, NAS_IP,NAS_PORT,https=True,dsm_version='7')
try:
logger.debug(synd.login()) # Проверка что ссеия установлена.
try:
logger.debug('Try to download sheet')
bio = synd.download_synology_office_file(NAS_FILE)
# logger.debug(bio)
logger.debug('Download Success')
return bio
except:
logger.exception('Download fails')
except:
logger.exception('Login error')
def get_start(osheet):
logger.debug('Read Start page')
try:
sheet = pd.read_excel(osheet, sheet_name='Start',header=1)
sheet=sheet[sheet['STATE']==False] # Проверка "первая"
logger.debug('Проверка 1')
# logger.debug(sheet)
logger.debug("Удаление строк с отсутствием 'DATA','TIME','SPORT','LEAGUE'")
# sheet.dropna(subset=['DATA','TIME','SPORT','LEAGUE'], inplace=True)
# logger.debug(sheet)
logger.debug('Parsing OK')
return sheet
except:
logger.exception('error while read excel sheet')
def get_packs(osheet):
logger.debug('Read SPORT page')
try:
sheet = pd.read_excel(osheet, sheet_name='SPORT',header=0,index_col='SPORT')
# logger.debug(sheet)
logger.debug('Parsing OK')
return sheet[sheet.index.notna()]
except:
logger.exception('error while read excel sheet')
raise
def get_logos(osheet):
logger.debug('Read TEAMS page')
try:
sheet = pd.read_excel(osheet, sheet_name='TEAMS',header=0,index_col=[0,1])
logger.debug('Проверка "первая"')
# logger.debug(sheet)
logger.debug("Удаление строк с отсутствием 'TEAM','LINK'")
# sheet.dropna(subset=['LINK'], inplace=True)
# logger.debug(sheet)
logger.debug('Parsing OK')
return sheet
except:
logger.exception('error while read excel sheet')
def get_sport_logo(sport,pack):
logger.info('Get '+sport+' pack')
try:
d=pack.loc[sport]['LINK']
logger.debug(d)
if pd.isna(d):
logger.warning(f'There is no LINK for sport "{sport}"')
return ''
return d
except Exception as inst:
logger.exception("Couldn't get "+sport+" pack")
# logger.exception(inst)
return ''
def get_team_logo(team,sport,logos):
logger.info(f'Get {team}/{sport} logo')
try:
d=logos.loc[team,sport]['LINK']
logger.debug(d)
return d
except KeyError as inst:
logger.exception(f"There is no LINK for sport {team}/{sport}")
return ''
def make_name(ds,pack,logos):
logger.debug('Start make name')
fn=''
data={}
empty_sport=pack.iloc[0].name
if isinstance(ds['DATA'],str):
fn+=f"{ds['DATA'][6:]}{ds['DATA'][3:5]}{ds['DATA'][0:2]}"
elif isinstance(ds['DATA'],datetime.date):
fn+=f"{ds['DATA'].year}{ds['DATA'].month:02}{ds['DATA'].day:02}"
#Если нет оформления
if ds['SPORT']!=empty_sport:
fn+=f"_{ds['SPORT']}"
data['sport']=ds['SPORT']
data['pack']=unc2uri(get_sport_logo(ds['SPORT'],pack))
else:
data['sport']=''
data['pack']=''
fn+=f'_{ds["LEAGUE"]}'
#Если нет команд
if pd.isna(ds['TEAM A']):
logger.info('No Team A present')
data['team_a']=''
data['team_a_logo']=''
data['team_a_logo_res']=''
else:
name = ds['TEAM A'].split('#')
fn+=f"_{name[0]}"
data['team_a_logo_res']=name[2:]
data['team_a']=name[0]
data['team_a_logo']=unc2uri(get_team_logo(ds['TEAM A'],ds['SPORT'],logos))
data['team_a_logo_res']=''
if pd.isna(ds['TEAM B']):
logger.info('No Team B present')
data['team_b']=''
data['team_b_logo']=''
data['team_b_logo_res']=''
else:
name = ds['TEAM B'].split('#')
fn+=f"_{name[0]}"
data['team_b_logo_res']=name[2:]
data['team_b']=name[0]
data['team_b_logo']=unc2uri(get_team_logo(ds['TEAM B'],ds['SPORT'],logos))
fn=translit(fn,reversed=True)
fn=fn.replace(' ','-')
fn=fn.replace("'",'')
data['outfile_name']=fn
data['league']=ds['LEAGUE']
if isinstance(ds['TIME'],str):
t=ds['TIME'].split(':')
# data['time']=':'.join(t[0:2])
data['time_h']= t[0]
data['time_m']= t[1]
elif isinstance(ds['TIME'],datetime.time):
data['time_h']= str(ds['TIME'].hour)
data['time_m']= str(ds['TIME'].minute)
if isinstance(ds['DATA'],str):
d=ds['DATA'].split('.')
d=f"{int(d[0])} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][int(d[1])]}"
elif isinstance(ds['DATA'],datetime.date):
d=f"{ds['DATA'].day} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][ds['DATA'].month]}"
data['data']=d
# logger.debug(data)
# logger.debug(fn)
logger.debug("End make name")
watch_list=[]
watch_list.append(send_job(data))
if ds['TRIPPLE']:
data['data']='сегодня'
data['outfile_name']=fn+'_Today'
watch_list.append(send_job(data))
data['data']='завтра'
data['outfile_name']=fn+'_Tomorrow'
watch_list.append(send_job(data))
pprint(watch_list)
return list(filter(None,watch_list))
def send_job(data):
payload={}
payload["template"]={"src": "file:///c:/users/virtVmix-2/Downloads/PackShot_Sborka_eng.aepx",
"composition": "pack",
"outputModule": "Start_h264",
"outputExt": "mp4"}
payload['actions']={
"postrender": [
{
"module": "@nexrender/action-encode",
"preset": "mp4",
"output": "encoded.mp4"
},
{
"module": "@nexrender/action-copy",
"input": "encoded.mp4",
"output": f"//10.10.35.3/edit/Auto_Anons/{data['outfile_name']}.mp4"
}
]
}
payload['assets']=[]
#Дата из файла и "сегодня"/"завтра"
#Размер текста
if data['data'] == 'сегодня':
payload['assets'].append({
"layerName": "DATA",
"property": "Source Text.fontSize",
"type": "data",
"value": "95"
})
logger.info('For "'+data['data']+'" font set to 95')
#Размер текста
elif data['data'] == 'завтра':
payload['assets'].append({
"layerName": "DATA",
"property": "Source Text.fontSize",
"type": "data",
"value": "109"
})
logger.info('For "'+data['data']+'" font set to 109')
payload['assets'].append({
"type": "data",
"layerName": "DATA",
"property": "Source Text",
"value": data['data']
})
#Время
if len(data['time_h'])<2:
payload['assets'].append({
"layerName": "TIME_H",
"property": "transform.anchorPoint",
"type": "data",
"value": [37,0]
})
payload['assets'].append({
"layerName": "TIME_M",
"property": "transform.anchorPoint",
"type": "data",
"value": [37,0]
})
payload['assets'].append({
"layerName": "TIME",
"property": "transform.anchorPoint",
"type": "data",
"value": [37,0]
})
logger.info('Shifting the "Time" by 37 pixels')
payload['assets'].append({
"type": "data",
"layerName": "TIME_H",
"property": "Source Text",
"value": data['time_h']
})
payload['assets'].append({
"type": "data",
"layerName": "TIME_M",
"property": "Source Text",
"value": data['time_m']
})
#Лига
payload['assets'].append({
"type": "data",
"layerName": "LEAGUE",
"property": "Source Text",
"value": data['league']
})
#Размер текста
if len(data['league'])>16:
payload['assets'].append({
"layerName": "LEAGUE",
"property": "Source Text.fontSize",
"type": "data",
"value": "73"
})
logger.info('For "'+data['league']+'" font set to 73')
#Спорт
if data['sport']:
payload['assets'].append({
"type": "data",
"layerName": "SPORT",
"property": "Source Text",
"value": data['sport']
})
#Команда А
if data['team_a']:
payload['assets'].append({
"type": "data",
"layerName": "TEAM_A",
"property": "Source Text",
"value": data['team_a']
})
#Команда Б
if data['team_b']:
payload['assets'].append({
"type": "data",
"layerName": "TEAM_B",
"property": "Source Text",
"value": data['team_b']
})
#Логотип А
if data['team_a_logo']:
payload['assets'].append({
"src": data['team_a_logo'],
"type": "image",
"layerName": "TEAM_A_LOGO"
})
if data['team_a_logo_res']:
payload['assets'].append({
"property": "scale",
"type": "data",
"expression": "if (width > height) {max_size = width;} else {max_size = height;} var real_size = "+data['team_a_logo_res'][0]+"/max_size*100;[real_size,real_size]",
"layerName": "TEAM_A_LOGO"
})
logger.info('Team A logo was resized to '+data['team_a_logo_res'][0])
#Логотип Б
if data['team_b_logo']:
payload['assets'].append({
"src": data['team_b_logo'],
"type": "image",
"layerName": "TEAM_B_LOGO"
})
if data['team_b_logo_res']:
payload['assets'].append({
"property": "scale",
"type": "data",
"expression": "if (width > height) {max_size = width;} else {max_size = height;} var real_size = "+data['team_b_logo_res'][0]+"/max_size*100;[real_size,real_size]",
"layerName": "TEAM_B_LOGO"
})
logger.info('Team B logo was resized to '+data['team_b_logo_res'][0])
#Верхнее оформлени
if data['pack']:
payload['assets'].append({
"src": data['pack'],
"type": "video",
"layerName": "TOP"
})
url='http://10.10.2.20:3000/api/v1/jobs'
r=requests.post(url,json=payload)
if r.status_code==200:
res=r.json()
# pprint(res)
uid=res['uid']
return {'uid':uid,'outname':data['outfile_name']}
def unc2uri(unc):
from urllib.parse import urlparse
from pathlib import PureWindowsPath
p= urlparse(unc)
if len(p.scheme)>2:
return unc
else:
p=PureWindowsPath(unc)
return p.as_uri()
# if unc[:2]=='\\\\':
# uri=f"file:{unc.replace('\\','/')}"
# else:
# uri=unc
# return uri
#XXX
if PLACEHOLDER:
send_job=send_job_dumb
logger.info('Start!') # Начинаем
osheet=load_osheet()
start=get_start(osheet)
pack=get_packs(osheet)
logos=get_logos(osheet)
#Удаляем прошлые задания которые закончились или с оштбкой
r=requests.get('http://10.10.2.20:3000/api/v1/jobs')
if r.status_code==200:
jobs=r.json()
s=[{'uid':i['uid'],'state':i['state']} for i in jobs]
for job in s:
if job['state'] in ('finished', 'error'):
requests.delete(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
watch_list=[]
for row in start.iterrows():
row=row[1]
watch_list+=make_name(row,pack,logos)
logger.info(f"Queued {len(watch_list)} jobs")
while watch_list:
sleep(60)
for job in watch_list:
#XXX
if PLACEHOLDER:
r=fake_get()
else:
r=requests.get(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
if r.status_code==200 and r.json()['state'] =='finished':
watch_list.remove(job)
logger.info(f"{job['outname']}, {r.json()['state']}, {len(watch_list)} to go")
logger.debug(f"{job['uid']} - {r.json()['state']}")
elif r.status_code==200 and r.json()['state'] == 'error':
watch_list.remove(job)
logger.warning(f"{job}, {r.json()['state']}, {len(watch_list)} to go")
print('.',end="")
logger.info('End!') # Заканчиваем