diff --git a/AF_script_test_1.py b/AF_script_test_1.py
new file mode 100644
index 0000000..da92e0e
--- /dev/null
+++ b/AF_script_test_1.py
@@ -0,0 +1,400 @@
+# NAS_IP='walle.barabanov.tv'
+# NAS_PORT='443'
+# NAS_FILE='/mydrive/Drive/Anons.osheet'
+
+NAS_USER='aescript'
+NAS_PASS='@5j15SduIhP7'
+NAS_IP='edit.tvstart.ru'
+NAS_PORT='443'
+NAS_FILE='/team-folders/nexrender/Anons.osheet'
+
+import logging
+from pprint import pprint
+from synology_drive_api.drive import SynologyDrive
+import pandas as pd
+from transliterate import translit
+import requests
+from time import sleep
+import datetime
+
+logger = logging.getLogger(__name__)
+
+logging.basicConfig(filename='AF_script.log', level=logging.INFO,format='%(asctime)s %(levelname)s %(message)s')
+
+def load_osheet():
+ logger.info('Get data')
+ synd = SynologyDrive(NAS_USER, NAS_PASS, NAS_IP,NAS_PORT,https=True,dsm_version='7')
+
+ try:
+ logger.info(synd.login()) # Проверка что ссеия установлена.
+ try:
+ logger.debug('Try to download sheet')
+ bio = synd.download_synology_office_file(NAS_FILE)
+ logger.debug(bio)
+ logger.info('Download Success')
+ return bio
+ except:
+ logger.warning('Download fails')
+ except:
+ logger.warning('Login error')
+
+def get_start(osheet):
+ logger.info('Read Start page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='Start',header=1)
+ sheet=sheet[sheet['STATE']==False] # Проверка "первая"
+ logger.debug('Проверка 1')
+ logger.debug(sheet)
+ logger.debug("Удаление строк с отсутствием 'DATA','TIME','SPORT','LEAGUE'")
+ sheet.dropna(subset=['DATA','TIME','SPORT','LEAGUE'], inplace=True)
+ logger.debug(sheet)
+ logger.info('Parsing OK')
+ return sheet
+ except:
+ logger.warning('error while read excel sheet')
+
+def get_packs(osheet):
+ logger.info('Read SPORT page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='SPORT',header=0,index_col='SPORT')
+ logger.debug(sheet)
+ logger.info('Parsing OK')
+ return sheet[sheet.index.notna()]
+ except:
+ logger.warning('error while read excel sheet')
+ raise
+
+def get_logos(osheet):
+ logger.info('Read TEAMS page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='TEAMS',header=0,index_col=[0,1])
+ logger.debug('Проверка "первая"')
+ logger.debug(sheet)
+ logger.debug("Удаление строк с отсутствием 'TEAM','LINK'")
+ sheet.dropna(subset=['LINK'], inplace=True)
+ logger.debug(sheet)
+ logger.info('Parsing OK')
+ return sheet
+ except:
+ logger.warning('error while read excel sheet')
+
+def get_sport_logo(sport,pack):
+ logger.info('Get '+sport+' pack')
+ try:
+ d=pack.loc[sport]['LINK']
+ logger.debug(d)
+ if pd.isna(d):
+ logger.warning(f'There is no LINK for sport "{sport}"')
+ return ''
+ return d
+ except Exception as inst:
+ logger.warning("Couldn't get "+sport+" pack")
+ logger.warning(inst)
+ return ''
+
+def get_team_logo(team,sport,logos):
+ logger.info(f'Get {team}/{sport} logo')
+ try:
+ d=logos.loc[team,sport]['LINK']
+ logger.debug(d)
+ return d
+ except KeyError as inst:
+ logger.warning(f"There is no LINK for sport {team}/{sport}")
+ return ''
+
+def make_name(ds,pack,logos):
+ logger.info('Start make name')
+
+ fn=''
+ data={}
+ empty_sport=pack.iloc[0].name
+ if isinstance(ds['DATA'],str):
+ fn+=f"{ds['DATA'][6:]}{ds['DATA'][3:5]}{ds['DATA'][0:2]}"
+ elif isinstance(ds['DATA'],datetime.date):
+ fn+=f"{ds['DATA'].year}{ds['DATA'].month:02}{ds['DATA'].day:02}"
+
+ #Если нет оформления
+ if ds['SPORT']!=empty_sport:
+ fn+=f"_{ds['SPORT']}"
+ data['sport']=ds['SPORT']
+ data['pack']=unc2uri(get_sport_logo(ds['SPORT'],pack))
+ else:
+ data['sport']=''
+ data['pack']=''
+ fn+=f'_{ds["LEAGUE"]}'
+
+ #Если нет команд
+ if pd.isna(ds['TEAM A']):
+ logger.info('No Team A present')
+ data['team_a']=''
+ data['team_a_logo']=''
+ else:
+ fn+=f"_{ds['TEAM A']}"
+ data['team_a']=ds['TEAM A']
+ data['team_a_logo']=unc2uri(get_team_logo(ds['TEAM A'],ds['SPORT'],logos))
+
+ if pd.isna(ds['TEAM B']):
+ logger.info('No Team B present')
+ data['team_b']=''
+ data['team_b_logo']=''
+ else:
+ fn+=f"_{ds['TEAM B']}"
+ data['team_b']=ds['TEAM B']
+ data['team_b_logo']=unc2uri(get_team_logo(ds['TEAM B'],ds['SPORT'],logos))
+
+ fn=translit(fn,reversed=True)
+ fn=fn.replace(' ','-')
+ fn=fn.replace("'",'')
+
+ data['outfile_name']=fn
+ data['league']=ds['LEAGUE']
+ if isinstance(ds['TIME'],str):
+ t=ds['TIME'].split(':')
+ # data['time']=':'.join(t[0:2])
+ data['time_h']= t[0]
+ data['time_m']= t[1]
+ elif isinstance(ds['TIME'],datetime.time):
+ data['time_h']= str(ds['TIME'].hour)
+ data['time_m']= str(ds['TIME'].minute)
+
+ if isinstance(ds['DATA'],str):
+ d=ds['DATA'].split('.')
+ d=f"{int(d[0])} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][int(d[1])]}"
+ elif isinstance(ds['DATA'],datetime.date):
+ d=f"{ds['DATA'].day} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][ds['DATA'].month]}"
+ data['data']=d
+
+ logger.debug(data)
+ logger.debug(fn)
+ logger.info("End make name")
+
+ watch_list=[]
+ watch_list.append(send_job(data))
+ if ds['TRIPPLE']:
+ data['data']='сегодня'
+ data['outfile_name']=fn+'_Today'
+ watch_list.append(send_job(data))
+ data['data']='завтра'
+ data['outfile_name']=fn+'_Tomorrow'
+ watch_list.append(send_job(data))
+ pprint(watch_list)
+ return list(filter(None,watch_list))
+
+def send_job(data):
+ payload={}
+ payload["template"]={"src": "file:///c:/users/virtVmix-2/Downloads/PackShot_Sborka_eng.aepx",
+ "composition": "pack",
+ "outputModule": "Start_h264",
+ "outputExt": "mp4"}
+ payload['actions']={
+ "postrender": [
+ {
+ "module": "@nexrender/action-encode",
+ "preset": "mp4",
+ "output": "encoded.mp4"
+ },
+ {
+ "module": "@nexrender/action-copy",
+ "input": "encoded.mp4",
+ "output": f"//10.10.35.3/edit/Auto_Anons/{data['outfile_name']}.mp4"
+ }
+ ]
+ }
+
+ payload['assets']=[]
+
+ #Дата из файла и "сегодня"/"завтра"
+ #Размер текста
+ if data['data'] == 'сегодня':
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": "95"
+ })
+ logger.info('For "'+data['data']+'" font set to 95')
+
+ #Размер текста
+ elif data['data'] == 'завтра':
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": "109"
+ })
+ logger.info('For "'+data['data']+'" font set to 109')
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "DATA",
+ "property": "Source Text",
+ "value": data['data']
+ })
+
+ #Время
+ if len(data['time_h'])<2:
+ payload['assets'].append({
+ "layerName": "TIME_H",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": [37,0]
+ })
+ payload['assets'].append({
+ "layerName": "TIME_M",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": [37,0]
+ })
+ payload['assets'].append({
+ "layerName": "TIME",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": [37,0]
+ })
+ logger.info('Shifting the "Time" by 37 pixels')
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TIME_H",
+ "property": "Source Text",
+ "value": data['time_h']
+ })
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TIME_M",
+ "property": "Source Text",
+ "value": data['time_m']
+ })
+
+ #Лига
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "LEAGUE",
+ "property": "Source Text",
+ "value": data['league']
+ })
+
+ #Размер текста
+ if len(data['league'])>16:
+ payload['assets'].append({
+ "layerName": "LEAGUE",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": "73"
+ })
+ logger.info('For "'+data['league']+'" font set to 73')
+
+ #Спорт
+ if data['sport']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "SPORT",
+ "property": "Source Text",
+ "value": data['sport']
+ })
+
+ #Команда А
+ if data['team_a']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TEAM_A",
+ "property": "Source Text",
+ "value": data['team_a']
+ })
+
+ #Команда Б
+ if data['team_b']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TEAM_B",
+ "property": "Source Text",
+ "value": data['team_b']
+ })
+
+ #Логотип А
+ if data['team_a_logo']:
+ payload['assets'].append({
+ "src": data['team_a_logo'],
+ "type": "image",
+ "layerName": "TEAM_A_LOGO"
+ })
+
+ #Логотип Б
+ if data['team_b_logo']:
+ payload['assets'].append({
+ "src": data['team_b_logo'],
+ "type": "image",
+ "layerName": "TEAM_B_LOGO"
+ })
+
+ #Верхнее оформление
+ if data['pack']:
+ payload['assets'].append({
+ "src": data['pack'],
+ "type": "video",
+ "layerName": "TOP"
+ })
+
+ url='http://10.10.2.20:3000/api/v1/jobs'
+ r=requests.post(url,json=payload)
+ if r.status_code==200:
+ res=r.json()
+ # pprint(res)
+ uid=res['uid']
+ return {'uid':uid,'outname':data['outfile_name']}
+
+def unc2uri(unc):
+ if unc[:2]=='\\\\':
+ uri=f"file:{unc.replace('\\','/')}"
+ else:
+ uri=unc
+ return uri
+
+logger.info('Start!') # Начинаем
+
+osheet=load_osheet()
+start=get_start(osheet)
+pack=get_packs(osheet)
+logos=get_logos(osheet)
+
+#Удаляем прошлые задания которые закончились или с оштбкой
+r=requests.get('http://10.10.2.20:3000/api/v1/jobs')
+if r.status_code==200:
+ jobs=r.json()
+ s=[{'uid':i['uid'],'state':i['state']} for i in jobs]
+ for job in s:
+ if job['state'] in ('finished', 'error'):
+ requests.delete(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+watch_list=[]
+for row in start.iterrows():
+ row=row[1]
+ watch_list+=make_name(row,pack,logos)
+logger.info(f"Queued {len(watch_list)} jobs")
+
+while watch_list:
+ sleep(60)
+ for job in watch_list:
+ r=requests.get(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+ if r.status_code==200 and r.json()['state'] in ('finished', 'error'):
+ watch_list.remove(job)
+ logger.info(f"{job}, {r.json()['state']}, {len(watch_list)} to go")
+ print('.',end="")
+
+logger.info('End!') # Заканчиваем
+
+
+# with open('myjob.json') as myjob:
+# headers={'content-type':'application/json'}
+# print('start request')
+# r=requests.post('http://10.10.2.20:3000/api/v1/jobs',
+# headers=headers, data=myjob.read())
+# print('end request')
+# print(r.status_code)
+
+#curl http://10.10.2.20:3000/api/v1/jobs >jobs.json
+# import json
+# with open('jobs.json') as f:
+# jobs=json.load(f)
+# s=[(i['uid'],i['state']) for i in jobs]
+# pprint(s)
\ No newline at end of file
diff --git a/AF_script_test_2.py b/AF_script_test_2.py
new file mode 100644
index 0000000..abccbd5
--- /dev/null
+++ b/AF_script_test_2.py
@@ -0,0 +1,506 @@
+NAS_USER='aescript'
+NAS_PASS='@5j15SduIhP7'
+NAS_IP='edit.tvstart.ru'
+NAS_PORT='443'
+NAS_FILE='/team-folders/nexrender/Anons.osheet' #XXX
+
+import logging
+import logging.config
+from pprint import pprint
+from synology_drive_api.drive import SynologyDrive
+import pandas as pd
+from transliterate import translit
+import requests
+from time import sleep
+import datetime
+import sys
+PLACEHOLDER = sys.platform=='win32'
+
+#XXX
+if PLACEHOLDER:
+ from random import random, choices
+
+ def send_job_dumb(data):
+ if random()<0.8:
+ uid=''.join(choices('abcdefghijklmnopqrstuvwxyz_',k=8))
+ return {'uid':uid,'outname':data['outfile_name']}
+
+ class fake_resp:
+ def __init__(self,state='queured',*kargs,**kwargs):
+ self.state=state
+ self.status_code==200
+
+ def json(self):
+ return {'state':self.state}
+
+
+ def fake_get():
+ if random<0.8:
+ return fake_resp()
+ elif random<0.8:
+ return fake_resp('finished')
+ else:
+ return fake_resp('error')
+
+LOG_CONFIG={
+ 'version': 1,
+ 'handlers': {
+ 'telegram': {
+ 'class': 'telegram_handler.TelegramHandler',
+ 'level':'INFO',
+ 'token': '7830267871:AAHHDEGWxa2ZjGoCCBhIk0skWR6u3ISVRtg',
+ 'chat_id': '-4576902221',
+ 'formatter': 'telegram'
+ },
+ 'console':{
+ 'class':'logging.StreamHandler',
+ 'level':'DEBUG',
+ 'formatter': 'simple',
+ 'stream': 'ext://sys.stdout'
+ },
+ 'file':{
+ 'class':'logging.FileHandler',
+ 'level':'DEBUG',
+ 'formatter': 'simple',
+ 'encoding':'utf-8',
+ 'filename':'AF_script.log'
+ },
+ },
+ 'loggers': {
+ __name__: {
+ 'handlers': ['console','file','telegram'],
+ 'level': 'DEBUG'
+ }
+ },
+ 'formatters': {
+ 'telegram': {
+ 'class': 'telegram_handler.HtmlFormatter',
+ 'format': '%(levelname)s %(message)s',
+ 'use_emoji': 'True'
+ },
+ 'simple':{
+ 'class': 'logging.Formatter',
+ 'format': '%(asctime)s %(levelname)-8s %(funcName)12s() - %(message)s',
+ 'datefmt': '%d.%m.%Y %H:%M:%S'
+
+ }
+ }
+
+}
+logging.config.dictConfig(LOG_CONFIG)
+logger = logging.getLogger(__name__)
+
+def load_osheet():
+ logger.debug('Get data')
+ synd = SynologyDrive(NAS_USER, NAS_PASS, NAS_IP,NAS_PORT,https=True,dsm_version='7')
+
+ try:
+ logger.debug(synd.login()) # Проверка что ссеия установлена.
+ try:
+ logger.debug('Try to download sheet')
+ bio = synd.download_synology_office_file(NAS_FILE)
+ # logger.debug(bio)
+ logger.debug('Download Success')
+ return bio
+ except:
+ logger.exception('Download fails')
+ except:
+ logger.exception('Login error')
+
+def get_start(osheet):
+ logger.debug('Read Start page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='Start',header=1)
+ sheet=sheet[sheet['STATE']==False] # Проверка "первая"
+ logger.debug('Проверка 1')
+ # logger.debug(sheet)
+ logger.debug("Удаление строк с отсутствием 'DATA','TIME','SPORT','LEAGUE'")
+ # sheet.dropna(subset=['DATA','TIME','SPORT','LEAGUE'], inplace=True)
+ # logger.debug(sheet)
+ logger.debug('Parsing OK')
+ return sheet
+ except:
+ logger.exception('error while read excel sheet')
+
+def get_packs(osheet):
+ logger.debug('Read SPORT page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='SPORT',header=0,index_col='SPORT')
+ # logger.debug(sheet)
+ logger.debug('Parsing OK')
+ return sheet[sheet.index.notna()]
+ except:
+ logger.exception('error while read excel sheet')
+ raise
+
+def get_logos(osheet):
+ logger.debug('Read TEAMS page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='TEAMS',header=0,index_col=[0,1])
+ logger.debug('Проверка "первая"')
+ # logger.debug(sheet)
+ logger.debug("Удаление строк с отсутствием 'TEAM','LINK'")
+ # sheet.dropna(subset=['LINK'], inplace=True)
+ # logger.debug(sheet)
+ logger.debug('Parsing OK')
+ return sheet
+ except:
+ logger.exception('error while read excel sheet')
+
+def get_sport_logo(sport,pack):
+ logger.info('Get '+sport+' pack')
+ try:
+ d=pack.loc[sport]['LINK']
+ logger.debug(d)
+ if pd.isna(d):
+ logger.warning(f'There is no LINK for sport "{sport}"')
+ return ''
+ return d
+ except Exception as inst:
+ logger.exception("Couldn't get "+sport+" pack")
+ # logger.exception(inst)
+ return ''
+
+def get_team_logo(team,sport,logos):
+ logger.info(f'Get {team}/{sport} logo')
+ try:
+ d=logos.loc[team,sport]['LINK']
+ logger.debug(d)
+ return d
+ except KeyError as inst:
+ logger.exception(f"There is no LINK for sport {team}/{sport}")
+ return ''
+
+def make_name(ds,pack,logos):
+ logger.debug('Start make name')
+
+ fn=''
+ data={}
+ empty_sport=pack.iloc[0].name
+ if isinstance(ds['DATA'],str):
+ fn+=f"{ds['DATA'][6:]}{ds['DATA'][3:5]}{ds['DATA'][0:2]}"
+ elif isinstance(ds['DATA'],datetime.date):
+ fn+=f"{ds['DATA'].year}{ds['DATA'].month:02}{ds['DATA'].day:02}"
+
+ #Если нет оформления
+ if ds['SPORT']!=empty_sport:
+ fn+=f"_{ds['SPORT']}"
+ data['sport']=ds['SPORT']
+ data['pack']=unc2uri(get_sport_logo(ds['SPORT'],pack))
+ else:
+ data['sport']=''
+ data['pack']=''
+ fn+=f'_{ds["LEAGUE"]}'
+
+ #Если нет команд
+ if pd.isna(ds['TEAM A']):
+ logger.info('No Team A present')
+ data['team_a']=''
+ data['team_a_logo']=''
+ data['team_a_logo_res']=''
+ else:
+ name = ds['TEAM A'].split('#')
+ fn+=f"_{name[0]}"
+ data['team_a_logo_res']=name[2:]
+ data['team_a']=name[0]
+ data['team_a_logo']=unc2uri(get_team_logo(ds['TEAM A'],ds['SPORT'],logos))
+ data['team_a_logo_res']=''
+
+ if pd.isna(ds['TEAM B']):
+ logger.info('No Team B present')
+ data['team_b']=''
+ data['team_b_logo']=''
+ data['team_b_logo_res']=''
+ else:
+ name = ds['TEAM B'].split('#')
+ fn+=f"_{name[0]}"
+ data['team_b_logo_res']=name[2:]
+ data['team_b']=name[0]
+ data['team_b_logo']=unc2uri(get_team_logo(ds['TEAM B'],ds['SPORT'],logos))
+
+ fn=translit(fn,reversed=True)
+ fn=fn.replace(' ','-')
+ fn=fn.replace("'",'')
+
+ data['outfile_name']=fn
+ data['league']=ds['LEAGUE']
+ if isinstance(ds['TIME'],str):
+ t=ds['TIME'].split(':')
+ # data['time']=':'.join(t[0:2])
+ data['time_h']= t[0]
+ data['time_m']= t[1]
+ elif isinstance(ds['TIME'],datetime.time):
+ data['time_h']= str(ds['TIME'].hour)
+ data['time_m']= str(ds['TIME'].minute)
+
+ if isinstance(ds['DATA'],str):
+ d=ds['DATA'].split('.')
+ d=f"{int(d[0])} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][int(d[1])]}"
+ elif isinstance(ds['DATA'],datetime.date):
+ d=f"{ds['DATA'].day} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][ds['DATA'].month]}"
+ data['data']=d
+
+ # logger.debug(data)
+ # logger.debug(fn)
+ logger.debug("End make name")
+
+ watch_list=[]
+ watch_list.append(send_job(data))
+ if ds['TRIPPLE']:
+ data['data']='сегодня'
+ data['outfile_name']=fn+'_Today'
+ watch_list.append(send_job(data))
+ data['data']='завтра'
+ data['outfile_name']=fn+'_Tomorrow'
+ watch_list.append(send_job(data))
+ pprint(watch_list)
+ return list(filter(None,watch_list))
+
+def send_job(data):
+ payload={}
+ payload["template"]={"src": "file:///c:/users/virtVmix-2/Downloads/PackShot_Sborka_eng.aepx",
+ "composition": "pack",
+ "outputModule": "Start_h264",
+ "outputExt": "mp4"}
+ payload['actions']={
+ "postrender": [
+ {
+ "module": "@nexrender/action-encode",
+ "preset": "mp4",
+ "output": "encoded.mp4"
+ },
+ {
+ "module": "@nexrender/action-copy",
+ "input": "encoded.mp4",
+ "output": f"//10.10.35.3/edit/Auto_Anons/{data['outfile_name']}.mp4"
+ }
+ ]
+ }
+
+ payload['assets']=[]
+
+ #Дата из файла и "сегодня"/"завтра"
+ #Размер текста
+ if data['data'] == 'сегодня':
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": "95"
+ })
+ logger.info('For "'+data['data']+'" font set to 95')
+
+ #Размер текста
+ elif data['data'] == 'завтра':
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": "109"
+ })
+ logger.info('For "'+data['data']+'" font set to 109')
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "DATA",
+ "property": "Source Text",
+ "value": data['data']
+ })
+
+ #Время
+ if len(data['time_h'])<2:
+ payload['assets'].append({
+ "layerName": "TIME_H",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": [37,0]
+ })
+ payload['assets'].append({
+ "layerName": "TIME_M",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": [37,0]
+ })
+ payload['assets'].append({
+ "layerName": "TIME",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": [37,0]
+ })
+ logger.info('Shifting the "Time" by 37 pixels')
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TIME_H",
+ "property": "Source Text",
+ "value": data['time_h']
+ })
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TIME_M",
+ "property": "Source Text",
+ "value": data['time_m']
+ })
+
+ #Лига
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "LEAGUE",
+ "property": "Source Text",
+ "value": data['league']
+ })
+
+ #Размер текста
+ if len(data['league'])>16:
+ payload['assets'].append({
+ "layerName": "LEAGUE",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": "73"
+ })
+ logger.info('For "'+data['league']+'" font set to 73')
+
+ #Спорт
+ if data['sport']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "SPORT",
+ "property": "Source Text",
+ "value": data['sport']
+ })
+
+ #Команда А
+ if data['team_a']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TEAM_A",
+ "property": "Source Text",
+ "value": data['team_a']
+ })
+
+ #Команда Б
+ if data['team_b']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TEAM_B",
+ "property": "Source Text",
+ "value": data['team_b']
+ })
+
+ #Логотип А
+ if data['team_a_logo']:
+ payload['assets'].append({
+ "src": data['team_a_logo'],
+ "type": "image",
+ "layerName": "TEAM_A_LOGO"
+ })
+
+ if data['team_a_logo_res']:
+ payload['assets'].append({
+ "property": "scale",
+ "type": "data",
+ "expression": "if (width > height) {max_size = width;} else {max_size = height;} var real_size = "+data['team_a_logo_res'][0]+"/max_size*100;[real_size,real_size]",
+ "layerName": "TEAM_A_LOGO"
+ })
+ logger.info('Team A logo was resized to '+data['team_a_logo_res'][0])
+
+
+ #Логотип Б
+ if data['team_b_logo']:
+ payload['assets'].append({
+ "src": data['team_b_logo'],
+ "type": "image",
+ "layerName": "TEAM_B_LOGO"
+ })
+
+ if data['team_b_logo_res']:
+ payload['assets'].append({
+ "property": "scale",
+ "type": "data",
+ "expression": "if (width > height) {max_size = width;} else {max_size = height;} var real_size = "+data['team_b_logo_res'][0]+"/max_size*100;[real_size,real_size]",
+ "layerName": "TEAM_B_LOGO"
+ })
+ logger.info('Team B logo was resized to '+data['team_b_logo_res'][0])
+
+ #Верхнее оформлени
+ if data['pack']:
+ payload['assets'].append({
+ "src": data['pack'],
+ "type": "video",
+ "layerName": "TOP"
+ })
+
+ url='http://10.10.2.20:3000/api/v1/jobs'
+
+ r=requests.post(url,json=payload)
+ if r.status_code==200:
+ res=r.json()
+ # pprint(res)
+ uid=res['uid']
+ return {'uid':uid,'outname':data['outfile_name']}
+
+def unc2uri(unc):
+
+ from urllib.parse import urlparse
+ from pathlib import PureWindowsPath
+
+ p= urlparse(unc)
+ if len(p.scheme)>2:
+ return unc
+ else:
+ p=PureWindowsPath(unc)
+ return p.as_uri()
+
+ # if unc[:2]=='\\\\':
+ # uri=f"file:{unc.replace('\\','/')}"
+ # else:
+ # uri=unc
+ # return uri
+
+
+#XXX
+if PLACEHOLDER:
+ send_job=send_job_dumb
+
+logger.info('Start!') # Начинаем
+
+osheet=load_osheet()
+start=get_start(osheet)
+pack=get_packs(osheet)
+logos=get_logos(osheet)
+
+#Удаляем прошлые задания которые закончились или с оштбкой
+r=requests.get('http://10.10.2.20:3000/api/v1/jobs')
+if r.status_code==200:
+ jobs=r.json()
+ s=[{'uid':i['uid'],'state':i['state']} for i in jobs]
+ for job in s:
+ if job['state'] in ('finished', 'error'):
+ requests.delete(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+
+watch_list=[]
+for row in start.iterrows():
+ row=row[1]
+ watch_list+=make_name(row,pack,logos)
+logger.info(f"Queued {len(watch_list)} jobs")
+
+while watch_list:
+ sleep(60)
+ for job in watch_list:
+ #XXX
+ if PLACEHOLDER:
+ r=fake_get()
+ else:
+ r=requests.get(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+
+ if r.status_code==200 and r.json()['state'] =='finished':
+ watch_list.remove(job)
+ logger.info(f"{job['outname']}, {r.json()['state']}, {len(watch_list)} to go")
+ logger.debug(f"{job['uid']} - {r.json()['state']}")
+ elif r.status_code==200 and r.json()['state'] == 'error':
+ watch_list.remove(job)
+ logger.warning(f"{job}, {r.json()['state']}, {len(watch_list)} to go")
+ print('.',end="")
+
+logger.info('End!') # Заканчиваем
diff --git a/AF_script_test_3.5.py b/AF_script_test_3.5.py
new file mode 100644
index 0000000..3639401
--- /dev/null
+++ b/AF_script_test_3.5.py
@@ -0,0 +1,584 @@
+NAS_USER='aescript'
+NAS_PASS='@5j15SduIhP7'
+NAS_IP='edit.tvstart.ru'
+NAS_PORT='443'
+NAS_FILE='/team-folders/nexrender/Anons.osheet' #XXX
+TOKEN='7830267871:AAHHDEGWxa2ZjGoCCBhIk0skWR6u3ISVRtg'
+GROUP_CHAT='-4576902221'
+import telebot
+import logging
+import logging.config
+from pprint import pprint
+from synology_drive_api.drive import SynologyDrive
+import pandas as pd
+from transliterate import translit
+import requests
+from time import sleep
+import datetime
+import sys
+PLACEHOLDER = sys.platform=='win32'
+
+#XXX
+if PLACEHOLDER:
+ from random import random, choices
+
+ def send_job_dumb(data):
+ if random()<0.8:
+ uid=''.join(choices('abcdefghijklmnopqrstuvwxyz_',k=8))
+ return {'uid':uid,'outname':data['outfile_name']}
+
+ class fake_resp:
+ def __init__(self,state='queured',*kargs,**kwargs):
+ self.state=state
+ self.status_code==200
+
+ def json(self):
+ return {'state':self.state}
+
+
+ def fake_get():
+ if random<0.8:
+ return fake_resp()
+ elif random<0.8:
+ return fake_resp('finished')
+ else:
+ return fake_resp('error')
+
+LOG_CONFIG={
+ 'version': 1,
+ 'handlers': {
+ 'telegram': {
+ 'class': 'telegram_handler.TelegramHandler',
+ 'level':'INFO',
+ 'token': '7830267871:AAHHDEGWxa2ZjGoCCBhIk0skWR6u3ISVRtg',
+ 'chat_id': '-4576902221',
+ 'formatter': 'telegram'
+ },
+ 'console':{
+ 'class':'logging.StreamHandler',
+ 'level':'DEBUG',
+ 'formatter': 'simple',
+ 'stream': 'ext://sys.stdout'
+ },
+ 'file':{
+ 'class':'logging.FileHandler',
+ 'level':'DEBUG',
+ 'formatter': 'simple',
+ 'encoding':'utf-8',
+ 'filename':'AF_script.log'
+ },
+ },
+ 'loggers': {
+ __name__: {
+ 'handlers': ['console','file','telegram'],
+ 'level': 'DEBUG'
+ }
+ },
+ 'formatters': {
+ 'telegram': {
+ 'class': 'telegram_handler.HtmlFormatter',
+ 'format': '%(levelname)s %(message)s',
+ 'use_emoji': "True"
+ },
+ 'simple':{
+ 'class': 'logging.Formatter',
+ 'format': '%(asctime)s %(levelname)-8s %(funcName)12s() - %(message)s',
+ 'datefmt': '%d.%m.%Y %H:%M:%S'
+
+ }
+ }
+
+}
+logging.config.dictConfig(LOG_CONFIG)
+logger = logging.getLogger(__name__)
+logger.handlers[2].formatter.use_emoji=True
+
+bot = telebot.TeleBot(TOKEN)
+@bot.message_handler(commands=['help', 'start'])
+def send_welcome(message):
+ bot.send_chat_action(message.chat.id,'typing')
+ if message.from_user.username:
+ user=message.from_user.username
+ else:
+ user='!'
+ sleep(1)
+ bot.reply_to(message, "Привет "+user+"\n Я помогу тебе сделать Анонсы!\n Вот список команд которые я могу выполнить:\n /ибаш - наибашу обработаку и рендер!\n")
+
+def load_osheet():
+ logger.debug('Get data')
+ synd = SynologyDrive(NAS_USER, NAS_PASS, NAS_IP,NAS_PORT,https=True,dsm_version='7')
+
+ try:
+ logger.debug(synd.login()) # Проверка что ссеия установлена.
+ try:
+ logger.debug('Try to download sheet')
+ bio = synd.download_synology_office_file(NAS_FILE)
+ # logger.debug(bio)
+ logger.debug('Download Success')
+ return bio
+ except:
+ logger.exception('Download fails')
+ except:
+ logger.exception('Login error')
+
+def get_start(osheet):
+ logger.debug('Read Start page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='Start',header=1)
+ sheet=sheet[sheet['STATE']==False] # Проверка "первая"
+ logger.debug('Проверка 1')
+ # logger.debug(sheet)
+ logger.debug("Удаление строк с отсутствием 'DATA','TIME','SPORT','LEAGUE'")
+ # sheet.dropna(subset=['DATA','TIME','SPORT','LEAGUE'], inplace=True)
+ # logger.debug(sheet)
+ logger.debug('Parsing OK')
+ return sheet
+ except:
+ logger.exception('error while read excel sheet')
+
+def get_packs(osheet):
+ logger.debug('Read SPORT page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='SPORT',header=0,index_col='SPORT')
+ # logger.debug(sheet)
+ logger.debug('Parsing OK')
+ return sheet[sheet.index.notna()]
+ except:
+ logger.exception('error while read excel sheet')
+ raise
+
+def get_logos(osheet):
+ logger.debug('Read TEAMS page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='TEAMS',header=0,index_col=[0,1])
+ logger.debug('Проверка "первая"')
+ # logger.debug(sheet)
+ logger.debug("Удаление строк с отсутствием 'TEAM','LINK'")
+ # sheet.dropna(subset=['LINK'], inplace=True)
+ # logger.debug(sheet)
+ logger.debug('Parsing OK')
+ return sheet
+ except:
+ logger.exception('error while read excel sheet')
+
+def get_sport_logo(sport,pack):
+ logger.info('Get '+sport+' pack')
+ try:
+ d=pack.loc[sport]['LINK']
+ logger.debug(d)
+ if pd.isna(d):
+ logger.warning(f'There is no LINK for sport "{sport}"')
+ return ''
+ return d
+ except Exception as inst:
+ logger.exception("Couldn't get "+sport+" pack")
+ # logger.exception(inst)
+ return ''
+
+def get_team_logo(team,sport,logos):
+ logger.info(f'Get {team}/{sport} logo')
+ try:
+ d=logos.loc[team,sport]['LINK']
+ logger.debug(d)
+ return d
+ except KeyError as inst:
+ logger.warning(f"There is no LINK for {team}/{sport}")
+ return ''
+
+def make_name(ds,pack,logos):
+ logger.debug('Start make name')
+
+ fn=''
+ data={}
+ empty_sport=pack.iloc[0].name
+ if isinstance(ds['DATA'],str):
+ fn+=f"{ds['DATA'][6:]}{ds['DATA'][3:5]}{ds['DATA'][0:2]}"
+ elif isinstance(ds['DATA'],datetime.date):
+ fn+=f"{ds['DATA'].year}{ds['DATA'].month:02}{ds['DATA'].day:02}"
+
+ #Если нет оформления
+ if ds['SPORT']!=empty_sport:
+ fn+=f"_{ds['SPORT']}"
+ data['sport']=ds['SPORT']
+ data['pack']=unc2uri(get_sport_logo(ds['SPORT'],pack))
+ else:
+ data['sport']=''
+ data['pack']=''
+ fn+=f'_{ds["LEAGUE"]}'
+
+ #Если нет команд
+ if pd.isna(ds['TEAM A']):
+ logger.info('No Team A present')
+ data['team_a']=''
+ data['team_a_logo']=''
+ data['team_a_logo_res']=''
+ else:
+ name = ds['TEAM A'].split('#')
+ fn+=f"_{name[0]}"
+ data['team_a_logo_res']=name[2:]
+ data['team_a']=name[0]
+ data['team_a_logo']=unc2uri(get_team_logo(ds['TEAM A'],ds['SPORT'],logos))
+
+
+ if pd.isna(ds['TEAM B']):
+ logger.info('No Team B present')
+ data['team_b']=''
+ data['team_b_logo']=''
+ data['team_b_logo_res']=''
+ else:
+ name = ds['TEAM B'].split('#')
+ fn+=f"_{name[0]}"
+ data['team_b_logo_res']=name[2:]
+ data['team_b']=name[0]
+ data['team_b_logo']=unc2uri(get_team_logo(ds['TEAM B'],ds['SPORT'],logos))
+
+ #CHANEL -> START/TRIUMPH
+ if pd.isna(ds['CHANEL']):
+ logger.debug('No Chanel is set')
+ pass
+ else:
+ logger.debug('Chanel is set '+ds['CHANEL'])
+ fn+=f"_{ds['CHANEL']}"
+
+
+ fn=translit(fn,reversed=True)
+ fn=fn.replace(' ','-')
+ fn=fn.replace("'",'')
+
+ data['outfile_name']=fn
+ data['league']=ds['LEAGUE']
+ if isinstance(ds['TIME'],str):
+ t=ds['TIME'].split(':')
+ # data['time']=':'.join(t[0:2])
+ data['time_h']= t[0]
+ data['time_m']= t[1]
+ elif isinstance(ds['TIME'],datetime.time):
+ data['time_h']= str(ds['TIME'].hour)
+ data['time_m']= str(ds['TIME'].minute)
+ logger.debug('time '+data['time_h']+':'+data['time_m'])
+ if isinstance(ds['DATA'],str):
+ d=ds['DATA'].split('.')
+ d=f"{int(d[0])} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][int(d[1])]}"
+ elif isinstance(ds['DATA'],datetime.date):
+ d=f"{ds['DATA'].day} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][ds['DATA'].month]}"
+ data['data']=d
+
+ # logger.debug(data)
+ # logger.debug(fn)
+ logger.debug("End make name")
+
+ watch_list=[]
+ watch_list.append(send_job(data))
+ if ds['TRIPPLE']:
+ data['data']='сегодня'
+ data['outfile_name']=fn+'_Today'
+ watch_list.append(send_job(data))
+ data['data']='завтра'
+ data['outfile_name']=fn+'_Tomorrow'
+ watch_list.append(send_job(data))
+ pprint(watch_list)
+ return list(filter(None,watch_list))
+
+def send_job(data):
+ payload={}
+ payload["template"]={"src": "file:///c:/users/virtVmix-2/Downloads/PackShot_Sborka_eng.aepx",
+ "composition": "pack",
+ "outputModule": "Start_h264",
+ "outputExt": "mp4"}
+ payload['actions']={
+ "postrender": [
+ {
+ "module": "@nexrender/action-encode",
+ "preset": "mp4",
+ "output": "encoded.mp4"
+ },
+ {
+ "module": "@nexrender/action-copy",
+ "input": "encoded.mp4",
+ "output": f"//10.10.35.3/edit/Auto_Anons/{data['outfile_name']}.mp4"
+ }
+ ]
+ }
+
+ payload['assets']=[]
+
+ #ДАТА из файла и "сегодня"/"завтра"
+ #Размер и положение текста "Сегодня"
+ if data['data'] == 'сегодня':
+ fontSize="105"
+ anchorPoint=[0,5]
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info('For "'+data['data']+'" font set to '+fontSize+'')
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info('Shifting the "'+data['data']+'" by '+str(anchorPoint)+' pixels')
+
+ # Размер и положение текста "Завтра"
+ elif data['data'] == 'завтра':
+ fontSize="115"
+ anchorPoint=[0,25]
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info('For "'+data['data']+'" font set to '+fontSize+'')
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info('Shifting the "'+data['data']+'" by '+str(anchorPoint)+' pixels')
+
+ # Размер и положение текста "Даты"
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "DATA",
+ "property": "Source Text",
+ "value": data['data']
+ })
+
+ if len(data['data'])<6:
+ fontSize="120"
+ anchorPoint=[0,20]
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info('For "'+data['data']+'" font set to '+fontSize+'')
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info('Shifting the "'+data['data']+'" by '+str(anchorPoint)+' pixels')
+
+ #Время
+ if len(data['time_h'])<2:
+ anchorPoint=[40,0]
+ payload['assets'].append({
+ "layerName": "TIME_H",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ payload['assets'].append({
+ "layerName": "TIME_M",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ payload['assets'].append({
+ "layerName": "TIME",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info('Shifting the "'+data['time_h']+':'+data['time_m']+'" by '+str(anchorPoint)+' pixels')
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TIME_H",
+ "property": "Source Text",
+ "value": data['time_h']
+ })
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TIME_M",
+ "property": "Source Text",
+ "value": data['time_m']
+ })
+
+ #Лига
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "LEAGUE",
+ "property": "Source Text",
+ "value": data['league']
+ })
+
+ #Размер текста "Лиги"
+ if len(data['league'])>16:
+ fontSize="73"
+ payload['assets'].append({
+ "layerName": "LEAGUE",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info('For "'+data['league']+'" font set to '+fontSize+'')
+
+ #Спорт
+ if data['sport']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "SPORT",
+ "property": "Source Text",
+ "value": data['sport']
+ })
+
+ #Команда А
+ if data['team_a']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TEAM_A",
+ "property": "Source Text",
+ "value": data['team_a']
+ })
+
+ #Команда Б
+ if data['team_b']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TEAM_B",
+ "property": "Source Text",
+ "value": data['team_b']
+ })
+
+ #Логотип А
+ if data['team_a_logo']:
+ payload['assets'].append({
+ "src": data['team_a_logo'],
+ "type": "image",
+ "layerName": "TEAM_A_LOGO"
+ })
+
+ if data['team_a_logo_res']:
+ payload['assets'].append({
+ "property": "scale",
+ "type": "data",
+ "expression": "if (width > height) {max_size = width;} else {max_size = height;} var real_size = "+data['team_a_logo_res'][0]+"/max_size*100;[real_size,real_size]",
+ "layerName": "TEAM_A_LOGO"
+ })
+ logger.info(''+data['team_a']+' logo was resized to '+data['team_a_logo_res'][0]+'')
+
+
+ #Логотип Б
+ if data['team_b_logo']:
+ payload['assets'].append({
+ "src": data['team_b_logo'],
+ "type": "image",
+ "layerName": "TEAM_B_LOGO"
+ })
+
+ if data['team_b_logo_res']:
+ payload['assets'].append({
+ "property": "scale",
+ "type": "data",
+ "expression": "if (width > height) {max_size = width;} else {max_size = height;} var real_size = "+data['team_b_logo_res'][0]+"/max_size*100;[real_size,real_size]",
+ "layerName": "TEAM_B_LOGO"
+ })
+ logger.info(''+data['team_b']+' logo was resized to '+data['team_b_logo_res'][0]+'')
+
+ #Верхнее оформлени
+ if data['pack']:
+ payload['assets'].append({
+ "src": data['pack'],
+ "type": "video",
+ "layerName": "TOP"
+ })
+
+ url='http://10.10.2.20:3000/api/v1/jobs'
+
+ r=requests.post(url,json=payload)
+ if r.status_code==200:
+ res=r.json()
+ # pprint(res)
+ uid=res['uid']
+ return {'uid':uid,'outname':data['outfile_name']}
+
+def unc2uri(unc):
+
+ from urllib.parse import urlparse
+ from pathlib import PureWindowsPath
+
+ p= urlparse(unc)
+ if len(p.scheme)>2 or not unc:
+ return unc
+ else:
+ p=PureWindowsPath(unc)
+ return p.as_uri()
+
+ # if unc[:2]=='\\\\':
+ # uri=f"file:{unc.replace('\\','/')}"
+ # else:
+ # uri=unc
+ # return uri
+
+
+#XXX
+if PLACEHOLDER:
+ send_job=send_job_dumb
+
+logger.info('Start!') # Начинаем
+
+@bot.message_handler(commands=['ибаш','ibash'])
+def ibash(message):
+ bot.send_chat_action(message.chat.id,'typing')
+ if message.from_user.username:
+ user=message.from_user.username
+ else:
+ user='!'
+ bot.reply_to(message, "Ну что ж "+user+", давай попробуем НАИБАШИТЬ!")
+
+ bot.send_chat_action(message.chat.id,'upload_document')
+
+ osheet=load_osheet()
+ start=get_start(osheet)
+ pack=get_packs(osheet)
+ logos=get_logos(osheet)
+
+ #Удаляем прошлые задания которые закончились или с оштбкой
+ r=requests.get('http://10.10.2.20:3000/api/v1/jobs')
+ if r.status_code==200:
+ jobs=r.json()
+ s=[{'uid':i['uid'],'state':i['state']} for i in jobs]
+ for job in s:
+ if job['state'] in ('finished', 'error'):
+ requests.delete(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+
+ bot.send_chat_action(message.chat.id,'record_video')
+
+ watch_list=[]
+ for row in start.iterrows():
+ row=row[1]
+ watch_list+=make_name(row,pack,logos)
+ logger.info(f"Queued {len(watch_list)} jobs")
+
+ while watch_list:
+ bot.send_chat_action(message.chat.id,'record_video')
+ sleep(25)
+ for job in watch_list:
+ #XXX
+ if PLACEHOLDER:
+ r=fake_get()
+ else:
+ r=requests.get(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+
+ if r.status_code==200 and r.json()['state'] =='finished':
+ watch_list.remove(job)
+ logger.info(f"{job['outname']}, {r.json()['state']}, {len(watch_list)} to go")
+ logger.debug(f"{job['uid']} - {r.json()['state']}")
+ elif r.status_code==200 and r.json()['state'] == 'error':
+ watch_list.remove(job)
+ logger.warning(f"{job}, {r.json()['state']}, {len(watch_list)} to go")
+ print('.',end="")
+
+bot.infinity_polling()
+
+logger.info('End!') # Заканчиваем
diff --git a/AF_script_test_3.py b/AF_script_test_3.py
new file mode 100644
index 0000000..fa48141
--- /dev/null
+++ b/AF_script_test_3.py
@@ -0,0 +1,555 @@
+NAS_USER='aescript'
+NAS_PASS='@5j15SduIhP7'
+NAS_IP='edit.tvstart.ru'
+NAS_PORT='443'
+NAS_FILE='/team-folders/nexrender/Anons.osheet' #XXX
+
+import logging
+import logging.config
+from pprint import pprint
+from synology_drive_api.drive import SynologyDrive
+import pandas as pd
+from transliterate import translit
+import requests
+from time import sleep
+import datetime
+import sys
+PLACEHOLDER = sys.platform=='win32'
+
+#XXX
+if PLACEHOLDER:
+ from random import random, choices
+
+ def send_job_dumb(data):
+ if random()<0.8:
+ uid=''.join(choices('abcdefghijklmnopqrstuvwxyz_',k=8))
+ return {'uid':uid,'outname':data['outfile_name']}
+
+ class fake_resp:
+ def __init__(self,state='queured',*kargs,**kwargs):
+ self.state=state
+ self.status_code==200
+
+ def json(self):
+ return {'state':self.state}
+
+
+ def fake_get():
+ if random<0.8:
+ return fake_resp()
+ elif random<0.8:
+ return fake_resp('finished')
+ else:
+ return fake_resp('error')
+
+LOG_CONFIG={
+ 'version': 1,
+ 'handlers': {
+ 'telegram': {
+ 'class': 'telegram_handler.TelegramHandler',
+ 'level':'INFO',
+ 'token': '7830267871:AAHHDEGWxa2ZjGoCCBhIk0skWR6u3ISVRtg',
+ 'chat_id': '-4576902221',
+ 'formatter': 'telegram'
+ },
+ 'console':{
+ 'class':'logging.StreamHandler',
+ 'level':'DEBUG',
+ 'formatter': 'simple',
+ 'stream': 'ext://sys.stdout'
+ },
+ 'file':{
+ 'class':'logging.FileHandler',
+ 'level':'DEBUG',
+ 'formatter': 'simple',
+ 'encoding':'utf-8',
+ 'filename':'AF_script.log'
+ },
+ },
+ 'loggers': {
+ __name__: {
+ 'handlers': ['console','file','telegram'],
+ 'level': 'DEBUG'
+ }
+ },
+ 'formatters': {
+ 'telegram': {
+ 'class': 'telegram_handler.HtmlFormatter',
+ 'format': '%(levelname)s %(message)s',
+ 'use_emoji': "True"
+ },
+ 'simple':{
+ 'class': 'logging.Formatter',
+ 'format': '%(asctime)s %(levelname)-8s %(funcName)12s() - %(message)s',
+ 'datefmt': '%d.%m.%Y %H:%M:%S'
+
+ }
+ }
+
+}
+logging.config.dictConfig(LOG_CONFIG)
+logger = logging.getLogger(__name__)
+logger.handlers[2].formatter.use_emoji=True
+
+def load_osheet():
+ logger.debug('Get data')
+ synd = SynologyDrive(NAS_USER, NAS_PASS, NAS_IP,NAS_PORT,https=True,dsm_version='7')
+
+ try:
+ logger.debug(synd.login()) # Проверка что ссеия установлена.
+ try:
+ logger.debug('Try to download sheet')
+ bio = synd.download_synology_office_file(NAS_FILE)
+ # logger.debug(bio)
+ logger.debug('Download Success')
+ return bio
+ except:
+ logger.exception('Download fails')
+ except:
+ logger.exception('Login error')
+
+def get_start(osheet):
+ logger.debug('Read Start page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='Start',header=1)
+ sheet=sheet[sheet['STATE']==False] # Проверка "первая"
+ logger.debug('Проверка 1')
+ # logger.debug(sheet)
+ logger.debug("Удаление строк с отсутствием 'DATA','TIME','SPORT','LEAGUE'")
+ # sheet.dropna(subset=['DATA','TIME','SPORT','LEAGUE'], inplace=True)
+ # logger.debug(sheet)
+ logger.debug('Parsing OK')
+ return sheet
+ except:
+ logger.exception('error while read excel sheet')
+
+def get_packs(osheet):
+ logger.debug('Read SPORT page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='SPORT',header=0,index_col='SPORT')
+ # logger.debug(sheet)
+ logger.debug('Parsing OK')
+ return sheet[sheet.index.notna()]
+ except:
+ logger.exception('error while read excel sheet')
+ raise
+
+def get_logos(osheet):
+ logger.debug('Read TEAMS page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='TEAMS',header=0,index_col=[0,1])
+ logger.debug('Проверка "первая"')
+ # logger.debug(sheet)
+ logger.debug("Удаление строк с отсутствием 'TEAM','LINK'")
+ # sheet.dropna(subset=['LINK'], inplace=True)
+ # logger.debug(sheet)
+ logger.debug('Parsing OK')
+ return sheet
+ except:
+ logger.exception('error while read excel sheet')
+
+def get_sport_logo(sport,pack):
+ logger.info('Get '+sport+' pack')
+ try:
+ d=pack.loc[sport]['LINK']
+ logger.debug(d)
+ if pd.isna(d):
+ logger.warning(f'There is no LINK for sport "{sport}"')
+ return ''
+ return d
+ except Exception as inst:
+ logger.exception("Couldn't get "+sport+" pack")
+ # logger.exception(inst)
+ return ''
+
+def get_team_logo(team,sport,logos):
+ logger.info(f'Get {team}/{sport} logo')
+ try:
+ d=logos.loc[team,sport]['LINK']
+ logger.debug(d)
+ return d
+ except KeyError as inst:
+ logger.warning(f"There is no LINK for {team}/{sport}")
+ return ''
+
+def make_name(ds,pack,logos):
+ logger.debug('Start make name')
+
+ fn=''
+ data={}
+ empty_sport=pack.iloc[0].name
+ if isinstance(ds['DATA'],str):
+ fn+=f"{ds['DATA'][6:]}{ds['DATA'][3:5]}{ds['DATA'][0:2]}"
+ elif isinstance(ds['DATA'],datetime.date):
+ fn+=f"{ds['DATA'].year}{ds['DATA'].month:02}{ds['DATA'].day:02}"
+
+ #Если нет оформления
+ if ds['SPORT']!=empty_sport:
+ fn+=f"_{ds['SPORT']}"
+ data['sport']=ds['SPORT']
+ data['pack']=unc2uri(get_sport_logo(ds['SPORT'],pack))
+ else:
+ data['sport']=''
+ data['pack']=''
+ fn+=f'_{ds["LEAGUE"]}'
+
+ #Если нет команд
+ if pd.isna(ds['TEAM A']):
+ logger.info('No Team A present')
+ data['team_a']=''
+ data['team_a_logo']=''
+ data['team_a_logo_res']=''
+ else:
+ name = ds['TEAM A'].split('#')
+ fn+=f"_{name[0]}"
+ data['team_a_logo_res']=name[2:]
+ data['team_a']=name[0]
+ data['team_a_logo']=unc2uri(get_team_logo(ds['TEAM A'],ds['SPORT'],logos))
+
+
+ if pd.isna(ds['TEAM B']):
+ logger.info('No Team B present')
+ data['team_b']=''
+ data['team_b_logo']=''
+ data['team_b_logo_res']=''
+ else:
+ name = ds['TEAM B'].split('#')
+ fn+=f"_{name[0]}"
+ data['team_b_logo_res']=name[2:]
+ data['team_b']=name[0]
+ data['team_b_logo']=unc2uri(get_team_logo(ds['TEAM B'],ds['SPORT'],logos))
+
+ #CHANEL -> START/TRIUMPH
+ if pd.isna(ds['CHANEL']):
+ logger.debug('No Chanel is set')
+ pass
+ else:
+ logger.debug('Chanel is set '+ds['CHANEL'])
+ fn+=f"_{ds['CHANEL']}"
+
+
+ fn=translit(fn,reversed=True)
+ fn=fn.replace(' ','-')
+ fn=fn.replace("'",'')
+
+ data['outfile_name']=fn
+ data['league']=ds['LEAGUE']
+ if isinstance(ds['TIME'],str):
+ t=ds['TIME'].split(':')
+ # data['time']=':'.join(t[0:2])
+ data['time_h']= t[0]
+ data['time_m']= t[1]
+ elif isinstance(ds['TIME'],datetime.time):
+ data['time_h']= str(ds['TIME'].hour)
+ data['time_m']= str(ds['TIME'].minute)
+ logger.debug('time '+data['time_h']+':'+data['time_m'])
+ if isinstance(ds['DATA'],str):
+ d=ds['DATA'].split('.')
+ d=f"{int(d[0])} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][int(d[1])]}"
+ elif isinstance(ds['DATA'],datetime.date):
+ d=f"{ds['DATA'].day} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][ds['DATA'].month]}"
+ data['data']=d
+
+ # logger.debug(data)
+ # logger.debug(fn)
+ logger.debug("End make name")
+
+ watch_list=[]
+ watch_list.append(send_job(data))
+ if ds['TRIPPLE']:
+ data['data']='сегодня'
+ data['outfile_name']=fn+'_Today'
+ watch_list.append(send_job(data))
+ data['data']='завтра'
+ data['outfile_name']=fn+'_Tomorrow'
+ watch_list.append(send_job(data))
+ pprint(watch_list)
+ return list(filter(None,watch_list))
+
+def send_job(data):
+ payload={}
+ payload["template"]={"src": "file:///c:/users/virtVmix-2/Downloads/PackShot_Sborka_eng.aepx",
+ "composition": "pack",
+ "outputModule": "Start_h264",
+ "outputExt": "mp4"}
+ payload['actions']={
+ "postrender": [
+ {
+ "module": "@nexrender/action-encode",
+ "preset": "mp4",
+ "output": "encoded.mp4"
+ },
+ {
+ "module": "@nexrender/action-copy",
+ "input": "encoded.mp4",
+ "output": f"//10.10.35.3/edit/Auto_Anons/{data['outfile_name']}.mp4"
+ }
+ ]
+ }
+
+ payload['assets']=[]
+
+ #ДАТА из файла и "сегодня"/"завтра"
+ #Размер и положение текста "Сегодня"
+ if data['data'] == 'сегодня':
+ fontSize="105"
+ anchorPoint=[0,5]
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info('For "'+data['data']+'" font set to '+fontSize+'')
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info('Shifting the "'+data['data']+'" by '+str(anchorPoint)+' pixels')
+
+ # Размер и положение текста "Завтра"
+ elif data['data'] == 'завтра':
+ fontSize="115"
+ anchorPoint=[0,25]
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info('For "'+data['data']+'" font set to '+fontSize+'')
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info('Shifting the "'+data['data']+'" by '+str(anchorPoint)+' pixels')
+
+ # Размер и положение текста "Даты"
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "DATA",
+ "property": "Source Text",
+ "value": data['data']
+ })
+
+ if len(data['data'])<6:
+ fontSize="120"
+ anchorPoint=[0,20]
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info('For "'+data['data']+'" font set to '+fontSize+'')
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info('Shifting the "'+data['data']+'" by '+str(anchorPoint)+' pixels')
+
+ #Время
+ if len(data['time_h'])<2:
+ anchorPoint=[40,0]
+ payload['assets'].append({
+ "layerName": "TIME_H",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ payload['assets'].append({
+ "layerName": "TIME_M",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ payload['assets'].append({
+ "layerName": "TIME",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info('Shifting the "'+data['time_h']+':'+data['time_m']+'" by '+str(anchorPoint)+' pixels')
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TIME_H",
+ "property": "Source Text",
+ "value": data['time_h']
+ })
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TIME_M",
+ "property": "Source Text",
+ "value": data['time_m']
+ })
+
+ #Лига
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "LEAGUE",
+ "property": "Source Text",
+ "value": data['league']
+ })
+
+ #Размер текста "Лиги"
+ if len(data['league'])>16:
+ fontSize="73"
+ payload['assets'].append({
+ "layerName": "LEAGUE",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info('For "'+data['league']+'" font set to '+fontSize+'')
+
+ #Спорт
+ if data['sport']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "SPORT",
+ "property": "Source Text",
+ "value": data['sport']
+ })
+
+ #Команда А
+ if data['team_a']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TEAM_A",
+ "property": "Source Text",
+ "value": data['team_a']
+ })
+
+ #Команда Б
+ if data['team_b']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TEAM_B",
+ "property": "Source Text",
+ "value": data['team_b']
+ })
+
+ #Логотип А
+ if data['team_a_logo']:
+ payload['assets'].append({
+ "src": data['team_a_logo'],
+ "type": "image",
+ "layerName": "TEAM_A_LOGO"
+ })
+
+ if data['team_a_logo_res']:
+ payload['assets'].append({
+ "property": "scale",
+ "type": "data",
+ "expression": "if (width > height) {max_size = width;} else {max_size = height;} var real_size = "+data['team_a_logo_res'][0]+"/max_size*100;[real_size,real_size]",
+ "layerName": "TEAM_A_LOGO"
+ })
+ logger.info(''+data['team_a']+' logo was resized to '+data['team_a_logo_res'][0]+'')
+
+
+ #Логотип Б
+ if data['team_b_logo']:
+ payload['assets'].append({
+ "src": data['team_b_logo'],
+ "type": "image",
+ "layerName": "TEAM_B_LOGO"
+ })
+
+ if data['team_b_logo_res']:
+ payload['assets'].append({
+ "property": "scale",
+ "type": "data",
+ "expression": "if (width > height) {max_size = width;} else {max_size = height;} var real_size = "+data['team_b_logo_res'][0]+"/max_size*100;[real_size,real_size]",
+ "layerName": "TEAM_B_LOGO"
+ })
+ logger.info(''+data['team_b']+' logo was resized to '+data['team_b_logo_res'][0]+'')
+
+ #Верхнее оформлени
+ if data['pack']:
+ payload['assets'].append({
+ "src": data['pack'],
+ "type": "video",
+ "layerName": "TOP"
+ })
+
+ url='http://10.10.2.20:3000/api/v1/jobs'
+
+ r=requests.post(url,json=payload)
+ if r.status_code==200:
+ res=r.json()
+ # pprint(res)
+ uid=res['uid']
+ return {'uid':uid,'outname':data['outfile_name']}
+
+def unc2uri(unc):
+
+ from urllib.parse import urlparse
+ from pathlib import PureWindowsPath
+
+ p= urlparse(unc)
+ if len(p.scheme)>2 or not unc:
+ return unc
+ else:
+ p=PureWindowsPath(unc)
+ return p.as_uri()
+
+ # if unc[:2]=='\\\\':
+ # uri=f"file:{unc.replace('\\','/')}"
+ # else:
+ # uri=unc
+ # return uri
+
+
+#XXX
+if PLACEHOLDER:
+ send_job=send_job_dumb
+
+logger.info('Start!') # Начинаем
+
+osheet=load_osheet()
+start=get_start(osheet)
+pack=get_packs(osheet)
+logos=get_logos(osheet)
+
+#Удаляем прошлые задания которые закончились или с оштбкой
+r=requests.get('http://10.10.2.20:3000/api/v1/jobs')
+if r.status_code==200:
+ jobs=r.json()
+ s=[{'uid':i['uid'],'state':i['state']} for i in jobs]
+ for job in s:
+ if job['state'] in ('finished', 'error'):
+ requests.delete(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+
+watch_list=[]
+for row in start.iterrows():
+ row=row[1]
+ watch_list+=make_name(row,pack,logos)
+logger.info(f"Queued {len(watch_list)} jobs")
+
+while watch_list:
+ sleep(25)
+ for job in watch_list:
+ #XXX
+ if PLACEHOLDER:
+ r=fake_get()
+ else:
+ r=requests.get(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+
+ if r.status_code==200 and r.json()['state'] =='finished':
+ watch_list.remove(job)
+ logger.info(f"{job['outname']}, {r.json()['state']}, {len(watch_list)} to go")
+ logger.debug(f"{job['uid']} - {r.json()['state']}")
+ elif r.status_code==200 and r.json()['state'] == 'error':
+ watch_list.remove(job)
+ logger.warning(f"{job}, {r.json()['state']}, {len(watch_list)} to go")
+ print('.',end="")
+
+logger.info('End!') # Заканчиваем
diff --git a/AF_script_test_4.py b/AF_script_test_4.py
new file mode 100644
index 0000000..de35df0
--- /dev/null
+++ b/AF_script_test_4.py
@@ -0,0 +1,659 @@
+NAS_USER='aescript'
+NAS_PASS='@5j15SduIhP7'
+NAS_IP='10.10.35.7'
+NAS_PORT='5001'
+NAS_FILE='/team-folders/nexrender/Anons.osheet' #XXX
+TOKEN='7830267871:AAHHDEGWxa2ZjGoCCBhIk0skWR6u3ISVRtg'
+GROUP_CHAT='-4576902221'
+import telebot
+import logging
+import logging.config
+from pprint import pprint
+from synology_drive_api.drive import SynologyDrive
+import pandas as pd
+from transliterate import translit
+import requests
+from time import sleep
+import datetime
+import sys
+PLACEHOLDER = sys.platform=='win32'
+
+##test
+
+#XXX
+if PLACEHOLDER:
+ from random import random, choices
+
+ def send_job_dumb(data):
+ if random()<0.8:
+ uid=''.join(choices('abcdefghijklmnopqrstuvwxyz_',k=8))
+ return {'uid':uid,'outname':data['outfile_name']}
+
+ class fake_resp:
+ def __init__(self,state='queured',*kargs,**kwargs):
+ self.state=state
+ self.status_code==200
+
+ def json(self):
+ return {'state':self.state}
+
+
+ def fake_get():
+ if random<0.8:
+ return fake_resp()
+ elif random<0.8:
+ return fake_resp('finished')
+ else:
+ return fake_resp('error')
+
+LOG_CONFIG={
+ 'version': 1,
+ 'handlers': {
+ 'telegram': {
+ 'class': 'telegram_handler.TelegramHandler',
+ 'level':'INFO',
+ 'token': TOKEN,
+ 'chat_id': GROUP_CHAT,
+ 'formatter': 'telegram'
+ },
+ 'console':{
+ 'class':'logging.StreamHandler',
+ 'level':'DEBUG',
+ 'formatter': 'simple',
+ 'stream': 'ext://sys.stdout'
+ },
+ 'file':{
+ 'class':'logging.FileHandler',
+ 'level':'DEBUG',
+ 'formatter': 'simple',
+ 'encoding':'utf-8',
+ 'filename':'AF_script.log'
+ },
+ },
+ #,'telegram'
+ 'loggers': {
+ __name__: {
+ 'handlers': ['console','file'],
+ 'level': 'DEBUG'
+ }
+ },
+ 'formatters': {
+ 'telegram': {
+ 'class': 'telegram_handler.HtmlFormatter',
+ 'format': '%(levelname)s %(message)s',
+ 'use_emoji': "True"
+ },
+ 'simple':{
+ 'class': 'logging.Formatter',
+ 'format': '%(asctime)s %(levelname)-8s %(funcName)12s() - %(message)s',
+ 'datefmt': '%d.%m.%Y %H:%M:%S'
+
+ }
+ }
+
+}
+logging.config.dictConfig(LOG_CONFIG)
+logger = logging.getLogger(__name__)
+#logger.handlers[2].formatter.use_emoji=True
+
+bot = telebot.TeleBot(TOKEN)
+telebot.logger.addHandler(logger.handlers[1])
+@bot.message_handler(commands=['help', 'start'])
+def send_welcome(message):
+ bot.send_chat_action(message.chat.id,'typing')
+ if message.from_user.username:
+ user=f" {message.from_user.username}"
+ else:
+ user='!'
+ sleep(1)
+ bot.reply_to(message, f"Привет{user}\n Я помогу тебе сделать Анонсы!\n Вот список команд которые я могу выполнить:\n /ибаш - наибашу обработку и рендер!\n /харе - оcтанавливает нах!")
+
+def load_osheet(message):
+ logger.debug('Get data')
+ synd = SynologyDrive(NAS_USER, NAS_PASS, NAS_IP,NAS_PORT,https=True,dsm_version='7')
+
+ try:
+ logger.debug(synd.login()) # Проверка что ссеия установлена.
+ try:
+ logger.debug('Try to download sheet')
+ bio = synd.download_synology_office_file(NAS_FILE)
+ logger.debug('Download Success')
+ return bio
+ except:
+ logger.exception('Download fails')
+ bot.send_message(message.chat.id,f'Не удалось скачать таблицу',parse_mode=['html'])
+ except:
+ logger.exception('Login error')
+ bot.send_message(message.chat.id,f'Не удалось авторизоватся',parse_mode=['html'])
+
+def get_start(osheet):
+ logger.debug('Read Start page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='Start',header=1)
+ sheet=sheet[sheet['STATE']==False] # Проверка "первая"
+ logger.debug('Parsing OK')
+ return sheet
+ except:
+ logger.exception('error while read excel sheet')
+
+def get_packs(osheet):
+ logger.debug('Read SPORT page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='SPORT',header=0,index_col='SPORT')
+ logger.debug('Parsing OK')
+ return sheet[sheet.index.notna()]
+ except:
+ logger.exception('error while read excel sheet')
+ raise
+
+def get_logos(osheet):
+ logger.debug('Read TEAMS page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='TEAMS',header=0,index_col=[0,1])
+ logger.debug('Parsing OK')
+ return sheet
+ except:
+ logger.exception('error while read excel sheet')
+
+def get_sport_logo(sport,pack,message):
+ logger.info(f'Get {sport} pack')
+ bot.send_message(message.chat.id,f'Ищем оформления для {sport}',parse_mode=['html'])
+ try:
+ d=pack.loc[sport]['LINK']
+ logger.debug(d)
+ if pd.isna(d):
+ logger.warning(f'There is no LINK for sport "{sport}"')
+ return ''
+ return d
+ except:
+ logger.exception("Couldn't get "+sport+" pack")
+ return ''
+
+def get_team_logo(team,sport,logos,message):
+ logger.info(f'Get {team}/{sport} logo')
+ bot.send_message(message.chat.id,f'Поиск логотипа {sport}-{team}',parse_mode=['html'])
+ try:
+ d=logos.loc[team,sport]['LINK']
+ logger.debug(d)
+ return d
+ except KeyError as inst:
+ logger.warning(f"There is no LINK for {team}/{sport}")
+ return ''
+ except Exception:
+ logger.exception(f"Error while get {sport} pack")
+ return ''
+
+def make_data_dict(ds):
+ dd={}
+ dd['date']=ds['DATA']
+ dd['time']=ds['TIME']
+ dd['channel']=ds['CHANEL']
+ dd['sport']=ds['SPORT']
+ dd['league']=ds['LEAGUE']
+ dd['team_a']=ds['TEAM A']
+ dd['team_b']=ds['TEAM B']
+ dd['index']=ds.name
+ return dd
+
+def make_job_dicts(dd,pack,logos,message):
+# def make_name(ds,pack,logos):
+ logger.debug('Start make name')
+
+ fn=''
+ data={}
+ empty_sport=pack.iloc[0].name
+
+ #Дата и Время
+ if isinstance(dd['date'],str):
+ fn+=f"{dd['date'][6:]}{dd['date'][3:5]}{dd['date'][0:2]}"
+ elif isinstance(dd['date'],datetime.date):
+ fn+=f"{dd['date'].year}{dd['date'].month:02}{dd['date'].day:02}"
+
+ #Вид спорта и оформление
+ if dd['sport']!=empty_sport:
+ fn+=f"_{dd['sport']}"
+ data['sport']=dd['sport']
+ data['pack']=unc2uri(get_sport_logo(dd['sport'],pack,message))
+ else:
+ data['sport']=''
+ data['pack']=''
+
+ #Лига
+ if dd["league"][-1]=='.':
+ logger.debug('dot in league name!')
+ fn+=f'_{dd["league"][:-1]}'
+ data['league']=dd['league'][:-1]
+ else:
+ data['league']=dd['league']
+ fn+=f'_{dd["league"]}'
+
+ #Команды А и Б
+ if pd.isna(dd['team_a']):
+ logger.info('No Team A present')
+ bot.send_message(message.chat.id,f'Нет команды А',parse_mode=['html'])
+ data['team_a']=''
+ data['team_a_logo']=''
+ data['team_a_logo_res']=''
+ else:
+ name = dd['team_a'].split('#')
+ fn+=f"_{name[0]}"
+ data['team_a_logo_res']=name[2:]
+ data['team_a']=name[0]
+ data['team_a_logo']=unc2uri(get_team_logo(dd['team_a'],dd['sport'],logos,message))
+
+
+ if pd.isna(dd['team_b']):
+ logger.info('No Team B present')
+ bot.send_message(message.chat.id,f'Нет команды Б',parse_mode=['html'])
+ data['team_b']=''
+ data['team_b_logo']=''
+ data['team_b_logo_res']=''
+ else:
+ name = dd['team_b'].split('#')
+ fn+=f"_{name[0]}"
+ data['team_b_logo_res']=name[2:]
+ data['team_b']=name[0]
+ data['team_b_logo']=unc2uri(get_team_logo(dd['team_b'],dd['sport'],logos,message))
+
+ #CHANEL -> START/TRIUMPH
+ if pd.isna(dd['channel']):
+ logger.debug('No Channel is set')
+ pass
+ else:
+ logger.debug('Channel is set '+dd['channel'])
+ fn+=f"_{dd['channel']}"
+
+
+ fn=translit(fn,reversed=True)
+ fn=fn.replace(' ','-')
+ fn=fn.replace("'",'')
+ data['outfile_name']=fn
+
+ if isinstance(dd['time'],str):
+ t=dd['time'].split(':')
+ # data['time']=':'.join(t[0:2])
+ data['time_h']= t[0]
+ data['time_m']= t[1]
+ elif isinstance(dd['time'],datetime.time):
+ data['time_h']= str(dd['time'].hour)
+ data['time_m']= str(dd['time'].minute)
+ logger.debug('time '+data['time_h']+':'+data['time_m'])
+ if isinstance(dd['date'],str):
+ d=dd['date'].split('.')
+ d=f"{int(d[0])} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][int(d[1])]}"
+ elif isinstance(dd['date'],datetime.date):
+ d=f"{dd['date'].day} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][dd['date'].month]}"
+ data['data']=d
+
+ # logger.debug(data)
+ # logger.debug(fn)
+ logger.debug("End make name")
+
+ watch_list=[]
+ watch_list.append(send_job(data,message))
+ if True:
+ data['data']='сегодня'
+ data['outfile_name']=fn+'_Today'
+ watch_list.append(send_job(data,message))
+ data['data']='завтра'
+ data['outfile_name']=fn+'_Tomorrow'
+ watch_list.append(send_job(data,message))
+ pprint(watch_list)
+ return list(filter(None,watch_list))
+
+def send_job(data,message):
+ payload={}
+ payload["template"]={"src": "file:///c:/users/virtVmix-2/Downloads/PackShot_Sborka_eng.aepx",
+ "composition": "pack",
+ "outputModule": "Start_h264",
+ "outputExt": "mp4"}
+ payload['actions']={
+ "postrender": [
+ {
+ "module": "@nexrender/action-encode",
+ "preset": "mp4",
+ "output": "encoded.mp4"
+ },
+ {
+ "module": "@nexrender/action-copy",
+ "input": "encoded.mp4",
+ "output": f"//10.10.35.3/edit/Auto_Anons/{data['outfile_name']}.mp4"
+ }
+ ]
+ }
+
+ payload['assets']=[]
+
+ #ДАТА из файла и "сегодня"/"завтра"
+ #Размер и положение текста "Сегодня"
+ if data['data'] == 'сегодня':
+ fontSize="105"
+ anchorPoint=[0,5]
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info(f'For "{data['data']}" font set to {fontSize}')
+ bot.send_message(message.chat.id,f'Для "{data['data']}" размер шрифта установлен {fontSize}',parse_mode=['html'])
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info(f'Shifting the "{data['data']}" by {anchorPoint} pixels')
+ bot.send_message(message.chat.id,f'Сдвигаем "{data['data']}" на {anchorPoint} пикселей',parse_mode=['html'])
+
+ # Размер и положение текста "Завтра"
+ elif data['data'] == 'завтра':
+ fontSize="115"
+ anchorPoint=[0,25]
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info(f'For "{data['data']}" font set to {fontSize}')
+ bot.send_message(message.chat.id,f'Для "{data['data']}" размер шрифта установлен {fontSize}',parse_mode=['html'])
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info(f'Shifting the "{data['data']}" by {anchorPoint} pixels')
+ bot.send_message(message.chat.id,f'Сдвигаем "{data['data']}" на {anchorPoint} пикселей',parse_mode=['html'])
+
+ # Размер и положение текста "Даты"
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "DATA",
+ "property": "Source Text",
+ "value": data['data']
+ })
+
+ if len(data['data'])<6:
+ fontSize="120"
+ anchorPoint=[0,20]
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info(f'For "{data['data']}" font set to {fontSize}')
+ bot.send_message(message.chat.id,f'Для "{data['data']}" размер шрифта установлен {fontSize}',parse_mode=['html'])
+ payload['assets'].append({
+ "layerName": "DATA",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info(f'Shifting the "{data['data']}" by {anchorPoint} pixels')
+ bot.send_message(message.chat.id,f'Сдвигаем "{data['data']}" на {anchorPoint} пикселей',parse_mode=['html'])
+
+ #Время
+ if len(data['time_h'])<2:
+ anchorPoint=[40,0]
+ payload['assets'].append({
+ "layerName": "TIME_H",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ payload['assets'].append({
+ "layerName": "TIME_M",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ payload['assets'].append({
+ "layerName": "TIME",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchorPoint
+ })
+ logger.info(f'Shifting the "{data['time_h']}:{data['time_m']}" by {anchorPoint} pixels')
+ bot.send_message(message.chat.id,f'Сдвигаем "{data['time_h']}:{data['time_m']}" на {anchorPoint} пикседей',parse_mode=['html'])
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TIME_H",
+ "property": "Source Text",
+ "value": data['time_h']
+ })
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TIME_M",
+ "property": "Source Text",
+ "value": data['time_m']
+ })
+
+ #Лига
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "LEAGUE",
+ "property": "Source Text",
+ "value": data['league']
+ })
+
+ #Размер текста "Лиги"
+ if len(data['league'])>16:
+ fontSize="73"
+ payload['assets'].append({
+ "layerName": "LEAGUE",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": fontSize
+ })
+ logger.info(f'For "{data['league']}" font set to {fontSize}')
+ bot.send_message(message.chat.id,f'Для "{data['league']}" размер шрифта установлен {fontSize}',parse_mode=['html'])
+
+ #Спорт
+ if data['sport']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "SPORT",
+ "property": "Source Text",
+ "value": data['sport']
+ })
+
+ #Команда А
+ if data['team_a']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TEAM_A",
+ "property": "Source Text",
+ "value": data['team_a']
+ })
+
+ #Команда Б
+ if data['team_b']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "TEAM_B",
+ "property": "Source Text",
+ "value": data['team_b']
+ })
+
+ #Логотип А
+ if data['team_a_logo']:
+ payload['assets'].append({
+ "src": data['team_a_logo'],
+ "type": "image",
+ "layerName": "TEAM_A_LOGO"
+ })
+
+ if data['team_a_logo_res']:
+ payload['assets'].append({
+ "property": "scale",
+ "type": "data",
+ "expression": "if (width > height) {max_size = width;} else {max_size = height;} var real_size = "+data['team_a_logo_res'][0]+"/max_size*100;[real_size,real_size]",
+ "layerName": "TEAM_A_LOGO"
+ })
+ logger.info(f'{data['team_a']} logo was resized to {data['team_a_logo_res'][0]}')
+ bot.send_message(message.chat.id,f'{data['team_a']} маштабирован под {data['team_a_logo_res'][0]} пикселей',parse_mode=['html'])
+
+
+ #Логотип Б
+ if data['team_b_logo']:
+ payload['assets'].append({
+ "src": data['team_b_logo'],
+ "type": "image",
+ "layerName": "TEAM_B_LOGO"
+ })
+
+ if data['team_b_logo_res']:
+ payload['assets'].append({
+ "property": "scale",
+ "type": "data",
+ "expression": "if (width > height) {max_size = width;} else {max_size = height;} var real_size = "+data['team_b_logo_res'][0]+"/max_size*100;[real_size,real_size]",
+ "layerName": "TEAM_B_LOGO"
+ })
+ logger.info(f'{data['team_b']} logo was resized to {data['team_b_logo_res'][0]}')
+ bot.send_message(message.chat.id,f'{data['team_b']} маштабирован под {data['team_b_logo_res'][0]} пикселей',parse_mode=['html'])
+
+ #Верхнее оформлени
+ if data['pack']:
+ payload['assets'].append({
+ "src": data['pack'],
+ "type": "video",
+ "layerName": "TOP"
+ })
+
+ url='http://10.10.2.20:3000/api/v1/jobs'
+
+ r=requests.post(url,json=payload)
+ if r.status_code==200:
+ res=r.json()
+ # pprint(res)
+ uid=res['uid']
+ return {'uid':uid,'outname':data['outfile_name']}
+
+def unc2uri(unc):
+ logger.debug('Start')
+ from urllib.parse import urlparse
+ from pathlib import PureWindowsPath
+ try:
+ p= urlparse(unc)
+ if len(p.scheme)>2 or not unc:
+ return unc
+ else:
+ p=PureWindowsPath(unc)
+ return p.as_uri()
+ except:
+ logger.exception('erro wile prasing url')
+
+#XXX
+if PLACEHOLDER:
+ send_job=send_job_dumb
+
+logger.info('Start!') # Начинаем
+
+@bot.message_handler(commands=['чёкак','status'])
+def status(message):
+ logger.info(f'Staus requested by {message.from_user.username}')
+ r=requests.get('http://10.10.2.20:3000/api/v1/jobs')
+ if r.status_code==200:
+ jobs=r.json()
+ s=[{'uid':i['uid'],'state':i['state']} for i in jobs]
+ queued=0
+ if s :
+ for job in s:
+ if job['state'] in ('queued'):
+ queued+=1
+ t=requests.get(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+ #bot.send_message(message.chat.id,f'{job['uid']} - {t.json()['state']}')
+ pprint(t)
+ logger.info(f"{queued} queud")
+ bot.send_message(message.chat.id,f"В очереди {queued}")
+ else:
+ logger.info(f"no queued jobs")
+ bot.send_message(message.chat.id,"Нет задач в очереди")
+
+@bot.message_handler(commands=['харе','stop'])
+def stop(message):
+ r=requests.get('http://10.10.2.20:3000/api/v1/jobs')
+ if r.status_code==200:
+ jobs=r.json()
+ s=[{'uid':i['uid'],'state':i['state']} for i in jobs]
+ queued=0
+ if s :
+ for job in s:
+ if job['state'] in ('queued', 'picked'):
+ requests.delete(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+ queued+=1
+ else:
+ requests.delete(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+ logger.info(f"Cancelled {queued} jobs by {message.from_user.username}")
+ bot.send_message(message.chat.id,f"Отменяно {queued}")
+ else:
+ logger.info(f"{message.from_user.username} requested job cancel but No jobs to cancel")
+ bot.send_message(message.chat.id,"Нет задач для отмены")
+
+@bot.message_handler(commands=['ибаш','ibash'])
+def ibash(message):
+ logger.info(f'Starting jobs for {message.from_user.username}')
+ bot.send_chat_action(message.chat.id,'typing')
+ if message.from_user.username:
+ user=message.from_user.username
+ else:
+ user='!'
+ bot.send_message(message.chat.id, f"Ну что ж {user}, давай попробуем \nНАИБАШИТЬ!!!",parse_mode=['html'])
+
+ bot.send_chat_action(message.chat.id,'upload_document')
+
+ osheet=load_osheet(message)
+ start=get_start(osheet)
+ pack=get_packs(osheet)
+ logos=get_logos(osheet)
+
+ #Удаляем прошлые задания которые закончились или с оштбкой
+ r=requests.get('http://10.10.2.20:3000/api/v1/jobs')
+ if r.status_code==200:
+ jobs=r.json()
+ s=[{'uid':i['uid'],'state':i['state']} for i in jobs]
+ for job in s:
+ if job['state'] in ('finished', 'error'):
+ requests.delete(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+
+ bot.send_chat_action(message.chat.id,'record_video')
+
+ watch_list=[]
+ for i,row in start.iterrows():
+ dd=make_data_dict(row)
+ watch_list+=make_job_dicts(dd,pack,logos,message)
+ logger.info(f"Queued {len(watch_list)} jobs")
+ bot.send_message(message.chat.id,f"В очереди {len(watch_list)} задач")
+
+ while watch_list:
+ bot.send_chat_action(message.chat.id,'record_video')
+ sleep(25)
+ for job in watch_list:
+ #XXX
+ if PLACEHOLDER:
+ r=fake_get()
+ else:
+ r=requests.get(f"http://10.10.2.20:3000/api/v1/jobs/{job['uid']}")
+ logger.debug(r.text)
+ if r.status_code==200 and r.json()['state'] =='finished':
+ watch_list.remove(job)
+ logger.info(f"{job['outname']}, {r.json()['state']}, {len(watch_list)} to go")
+ bot.send_message(message.chat.id,f"{job['outname']}, готов, {len(watch_list)} осталось выполнить",parse_mode=['html'])
+ logger.debug(f"{job['uid']} - {r.json()['state']}")
+ elif r.status_code==200 and r.json()['state'] == 'error':
+ watch_list.remove(job)
+ logger.warning(f"{job}, {r.json()['state']}, {len(watch_list)} to go")
+ bot.send_message(message.chat.id,f"!!!{job}, {r.json()['state']}, {len(watch_list)} осталось выполнить",parse_mode=['html'])
+ #print('.',end="")
+ bot.send_message('Пойду спать :)')
+
+bot.infinity_polling()
+
+logger.info('End!') # Заканчиваем
+
+
diff --git a/AF_script_test_5.1.py b/AF_script_test_5.1.py
new file mode 100644
index 0000000..66eeaf3
--- /dev/null
+++ b/AF_script_test_5.1.py
@@ -0,0 +1,927 @@
+import os
+from dotenv import load_dotenv
+import logging
+import logging.config
+from pprint import pprint
+import pandas as pd
+from transliterate import translit
+import requests
+from time import sleep
+import datetime
+import sys
+from urllib.parse import urlparse
+from pathlib import PureWindowsPath
+import telebot
+from telebot import types
+from synology_drive_api.drive import SynologyDrive
+from flask import Flask, jsonify
+import threading
+from functools import wraps
+
+# Инициализация Flask приложения для панели мониторинга
+flask_app = Flask(__name__)
+flask_app.config['SECRET_KEY'] = os.getenv('FLASK_SECRET', 'default-secret-key')
+
+# Загрузка переменных окружения
+load_dotenv('AF_environment.env')
+
+class Monitoring:
+ """Класс для сбора статистики и мониторинга"""
+ def __init__(self):
+ self.jobs_history = []
+ self.system_stats = {
+ 'total_jobs': 0,
+ 'successful_jobs': 0,
+ 'failed_jobs': 0,
+ 'active_jobs': 0,
+ 'users': {}
+ }
+
+ def add_job(self, job_data):
+ """Добавление информации о новой задаче"""
+ self.jobs_history.append(job_data)
+ self.system_stats['total_jobs'] += 1
+ self.system_stats['active_jobs'] += 1
+
+ user_id = job_data.get('user_id')
+ if user_id:
+ if user_id not in self.system_stats['users']:
+ self.system_stats['users'][user_id] = {
+ 'total_jobs': 0,
+ 'successful_jobs': 0,
+ 'failed_jobs': 0
+ }
+ self.system_stats['users'][user_id]['total_jobs'] += 1
+
+ def job_completed(self, job_id, success=True, user_id=None):
+ """Обновление статуса завершенной задачи"""
+ self.system_stats['active_jobs'] -= 1
+
+ if success:
+ self.system_stats['successful_jobs'] += 1
+ else:
+ self.system_stats['failed_jobs'] += 1
+
+ if user_id and user_id in self.system_stats['users']:
+ if success:
+ self.system_stats['users'][user_id]['successful_jobs'] += 1
+ else:
+ self.system_stats['users'][user_id]['failed_jobs'] += 1
+
+ # Обновляем статус в истории
+ for job in self.jobs_history:
+ if job.get('job_id') == job_id:
+ job['status'] = 'completed' if success else 'failed'
+ job['completed_at'] = datetime.datetime.now()
+ break
+
+ def get_stats(self):
+ """Получение текущей статистики"""
+ return self.system_stats
+
+ def get_recent_jobs(self, limit=10):
+ """Получение последних задач"""
+ return self.jobs_history[-limit:] if self.jobs_history else []
+
+ def get_user_stats(self, user_id):
+ """Получение статистики по конкретному пользователю"""
+ return self.system_stats['users'].get(user_id, {
+ 'total_jobs': 0,
+ 'successful_jobs': 0,
+ 'failed_jobs': 0
+ })
+
+class Config:
+ """Класс для работы с конфигурацией приложения"""
+ def __init__(self):
+ self.nas_user = os.getenv('NAS_USER')
+ self.nas_pass = os.getenv('NAS_PASS')
+ self.nas_ip = os.getenv('NAS_IP')
+ self.nas_port = os.getenv('NAS_PORT')
+ self.nas_file = os.getenv('NAS_FILE')
+ self.token = os.getenv('TELEGRAM_TOKEN')
+ self.group_chat = os.getenv('TELEGRAM_GROUP_CHAT')
+ self.nexrender_url = os.getenv('NEXRENDER_URL')
+ self.admin_password = os.getenv('ADMIN_PASSWORD', 'admin123')
+ self._validate_config()
+
+ def _validate_config(self):
+ """Проверка наличия обязательных переменных окружения"""
+ required_vars = {
+ 'NAS_USER': self.nas_user,
+ 'NAS_PASS': self.nas_pass,
+ 'NAS_IP': self.nas_ip,
+ 'TELEGRAM_TOKEN': self.token,
+ 'NEXRENDER_URL': self.nexrender_url
+ }
+ missing = [k for k, v in required_vars.items() if not v]
+ if missing:
+ raise ValueError(f"Отсутствуют обязательные переменные окружения: {', '.join(missing)}")
+
+class JobManager:
+ """Основной класс для управления задачами рендеринга"""
+ def __init__(self, config, monitoring):
+ self.config = config
+ self.monitoring = monitoring
+ self.bot = telebot.TeleBot(config.token)
+ self.PLACEHOLDER = sys.platform == 'win32'
+ self.setup_logging()
+ self.setup_handlers()
+
+ if self.PLACEHOLDER:
+ self._init_placeholders()
+
+ def _init_placeholders(self):
+ """Инициализация заглушек для тестирования"""
+ from random import random, choices
+
+ def send_job_dumb(data):
+ if random() < 0.8:
+ uid = ''.join(choices('abcdefghijklmnopqrstuvwxyz_', k=8))
+ return {'uid': uid, 'outname': data['outfile_name']}
+ return None
+
+ class FakeResp:
+ def __init__(self, state='queued', *args, **kwargs):
+ self.state = state
+ self.status_code = 200
+
+ def json(self):
+ return {'state': self.state}
+
+ def fake_get(*args, **kwargs):
+ rand = random()
+ if rand < 0.8:
+ return FakeResp()
+ elif rand < 0.95:
+ return FakeResp('finished')
+ else:
+ return FakeResp('error')
+
+ self._send_job_real = self.send_job
+ self.send_job = send_job_dumb
+ self._fake_get = fake_get
+
+ def setup_logging(self):
+ """Настройка системы логирования"""
+ LOG_CONFIG = {
+ 'version': 1,
+ 'formatters': {
+ 'detailed': {
+ 'format': '%(asctime)s %(levelname)-8s %(name)-15s %(message)s',
+ 'datefmt': '%Y-%m-%d %H:%M:%S'
+ }
+ },
+ 'handlers': {
+ 'console': {
+ 'class': 'logging.StreamHandler',
+ 'level': 'INFO',
+ 'formatter': 'detailed'
+ },
+ 'file': {
+ 'class': 'logging.FileHandler',
+ 'filename': 'af_bot.log',
+ 'mode': 'a',
+ 'level': 'DEBUG',
+ 'formatter': 'detailed'
+ }
+ },
+ 'loggers': {
+ '': {
+ 'handlers': ['console', 'file'],
+ 'level': 'DEBUG',
+ 'propagate': True
+ }
+ }
+ }
+ logging.config.dictConfig(LOG_CONFIG)
+ self.logger = logging.getLogger(__name__)
+
+ def setup_handlers(self):
+ """Настройка обработчиков команд Telegram"""
+ @self.bot.message_handler(commands=['start', 'help', 'menu'])
+ def send_welcome(message):
+ self.show_main_menu(message)
+
+ @self.bot.message_handler(func=lambda message: True)
+ def handle_text(message):
+ if message.text == '📊 Статистика':
+ self.show_stats(message)
+ elif message.text == '🔄 Создать анонс':
+ self.start_ibash(message)
+ elif message.text == '❌ Отменить задачи':
+ self.cancel_jobs(message)
+ elif message.text == '👤 Моя статистика':
+ self.show_user_stats(message)
+ else:
+ self.bot.reply_to(message, "Используйте меню для навигации")
+
+ @self.bot.callback_query_handler(func=lambda call: True)
+ def handle_callback(call):
+ if call.data == 'ibash_all':
+ self.process_ibash(call.message, all_announcements=True)
+ elif call.data == 'ibash_new':
+ self.process_ibash(call.message, all_announcements=False)
+ elif call.data == 'cancel':
+ self.bot.edit_message_text(
+ "Действие отменено",
+ call.message.chat.id,
+ call.message.message_id
+ )
+ self.show_main_menu(call.message)
+
+ def show_main_menu(self, message):
+ """Отображение главного меню с кнопками"""
+ markup = types.ReplyKeyboardMarkup(
+ row_width=2,
+ resize_keyboard=True,
+ one_time_keyboard=False
+ )
+
+ buttons = [
+ types.KeyboardButton('🔄 Создать анонс'),
+ types.KeyboardButton('📊 Статистика'),
+ types.KeyboardButton('👤 Моя статистика'),
+ types.KeyboardButton('❌ Отменить задачи')
+ ]
+
+ markup.add(*buttons)
+
+ self.bot.send_message(
+ message.chat.id,
+ "📱 *Главное меню*:\nВыберите действие:",
+ reply_markup=markup,
+ parse_mode='Markdown'
+ )
+
+ def show_stats(self, message):
+ """Отображение статистики системы"""
+ stats = self.monitoring.get_stats()
+ recent_jobs = self.monitoring.get_recent_jobs(5)
+
+ stats_text = (
+ "📈 *Статистика системы*\n\n"
+ f"• Всего задач: {stats['total_jobs']}\n"
+ f"• Успешных: {stats['successful_jobs']}\n"
+ f"• Неудачных: {stats['failed_jobs']}\n"
+ f"• Активных: {stats['active_jobs']}\n\n"
+ "⏱ *Последние задачи*:\n"
+ )
+
+ for job in recent_jobs:
+ status_icon = '✅' if job.get('status') == 'completed' else '❌' if job.get('status') == 'failed' else '🔄'
+ stats_text += f"{status_icon} {job.get('name', 'N/A')} ({job.get('user', 'system')})\n"
+
+ self.bot.send_message(
+ message.chat.id,
+ stats_text,
+ parse_mode='Markdown'
+ )
+
+ def show_user_stats(self, message):
+ """Отображение статистики пользователя"""
+ user_id = message.from_user.id
+ username = message.from_user.username or message.from_user.first_name
+ user_stats = self.monitoring.get_user_stats(user_id)
+
+ stats_text = (
+ f"👤 *Ваша статистика* ({username})\n\n"
+ f"• Всего задач: {user_stats['total_jobs']}\n"
+ f"• Успешных: {user_stats['successful_jobs']}\n"
+ f"• Неудачных: {user_stats['failed_jobs']}\n"
+ f"• Процент успеха: {user_stats['successful_jobs'] / user_stats['total_jobs'] * 100:.1f}%"
+ if user_stats['total_jobs'] > 0 else "0%"
+ )
+
+ self.bot.send_message(
+ message.chat.id,
+ stats_text,
+ parse_mode='Markdown'
+ )
+
+ def start_ibash(self, message):
+ """Начало процесса создания анонсов с интерактивным меню"""
+ self.logger.info(f"Start ibash requested by {message.from_user.username}")
+
+ markup = types.InlineKeyboardMarkup()
+ markup.row(
+ types.InlineKeyboardButton("Все анонсы", callback_data="ibash_all"),
+ types.InlineKeyboardButton("Только новые", callback_data="ibash_new")
+ )
+ markup.row(types.InlineKeyboardButton("Отмена", callback_data="cancel"))
+
+ self.bot.send_message(
+ message.chat.id,
+ "🔧 *Создание анонсов*\n\nВыберите тип обработки:",
+ reply_markup=markup,
+ parse_mode='Markdown'
+ )
+
+ def process_ibash(self, message, all_announcements=False):
+ """Обработка создания анонсов"""
+ user_id = message.from_user.id
+ username = message.from_user.username or message.from_user.first_name
+
+ self.bot.send_chat_action(message.chat.id, 'typing')
+
+ try:
+ # Загрузка данных
+ osheet = self.load_osheet(message)
+ start = self.get_sheet_data(osheet, 'Start', header=1)
+
+ if not all_announcements:
+ start = start[start['STATE'] == False] # Только новые анонсы
+
+ pack = self.get_sheet_data(osheet, 'SPORT', header=0, index_col='SPORT')
+ pack = pack[pack.index.notna()]
+ logos = self.get_sheet_data(osheet, 'TEAMS', header=0, index_col=[0, 1])
+
+ # Очистка старых задач
+ self.cleanup_old_jobs()
+
+ # Создание задач
+ self.bot.send_chat_action(message.chat.id, 'record_video')
+ watch_list = []
+
+ for i, row in start.iterrows():
+ dd = self.make_data_dict(row)
+ jobs = self.make_job_dicts(dd, pack, logos, message)
+
+ for job in jobs:
+ if job:
+ job_data = {
+ 'job_id': job['uid'],
+ 'name': job['outname'],
+ 'user_id': user_id,
+ 'user': username,
+ 'status': 'started',
+ 'started_at': datetime.datetime.now()
+ }
+ self.monitoring.add_job(job_data)
+ watch_list.append(job)
+
+ self.logger.info(f"В очереди {len(watch_list)} задач")
+ self.bot.send_message(
+ message.chat.id,
+ f"🚀 Запущено {len(watch_list)} задач на рендеринг",
+ parse_mode='Markdown'
+ )
+
+ # Отслеживание выполнения
+ self.track_jobs(message, watch_list, user_id)
+
+ except Exception as e:
+ self.logger.exception("Ошибка в process_ibash")
+ self.bot.send_message(
+ message.chat.id,
+ f"❌ Ошибка при создании анонсов: {str(e)}",
+ parse_mode='Markdown'
+ )
+
+ def track_jobs(self, message, watch_list, user_id):
+ """Отслеживание выполнения задач"""
+ while watch_list:
+ self.bot.send_chat_action(message.chat.id, 'record_video')
+ sleep(25)
+
+ for job in watch_list[:]:
+ try:
+ if self.PLACEHOLDER:
+ r = self._fake_get()
+ else:
+ r = requests.get(f"{self.config.nexrender_url}/{job['uid']}")
+
+ if r.status_code == 200:
+ state = r.json()['state']
+ if state == 'finished':
+ watch_list.remove(job)
+ self.monitoring.job_completed(job['uid'], True, user_id)
+
+ self.logger.info(f"{job['outname']} готов, осталось {len(watch_list)}")
+ self.bot.send_message(
+ message.chat.id,
+ f"✅ *{job['outname']}* готов\nОсталось задач: {len(watch_list)}",
+ parse_mode='Markdown'
+ )
+
+ elif state == 'error':
+ watch_list.remove(job)
+ self.monitoring.job_completed(job['uid'], False, user_id)
+
+ self.logger.warning(f"{job['outname']} завершился с ошибкой")
+ self.bot.send_message(
+ message.chat.id,
+ f"❌ *{job['outname']}* завершился с ошибкой",
+ parse_mode='Markdown'
+ )
+
+ except Exception as e:
+ self.logger.error(f"Ошибка проверки статуса задачи {job['uid']}: {e}")
+
+ self.bot.send_message(
+ message.chat.id,
+ "🎉 Все задачи завершены!",
+ reply_markup=types.ReplyKeyboardRemove(),
+ parse_mode='Markdown'
+ )
+ self.show_main_menu(message)
+
+ def cleanup_old_jobs(self):
+ """Очистка завершенных задач"""
+ try:
+ r = requests.get(self.config.nexrender_url)
+ if r.status_code == 200:
+ jobs = r.json()
+ for job in jobs:
+ if job['state'] in ('finished', 'error'):
+ requests.delete(f"{self.config.nexrender_url}/{job['uid']}")
+ except Exception as e:
+ self.logger.error(f"Ошибка очистки старых задач: {e}")
+
+ def cancel_jobs(self, message):
+ """Отмена всех активных задач"""
+ try:
+ r = requests.get(self.config.nexrender_url)
+ if r.status_code == 200:
+ jobs = r.json()
+ cancelled = 0
+
+ for job in jobs:
+ if job['state'] in ('queued', 'picked'):
+ requests.delete(f"{self.config.nexrender_url}/{job['uid']}")
+ cancelled += 1
+
+ self.logger.info(f"Отменено {cancelled} задач")
+ self.bot.send_message(
+ message.chat.id,
+ f"⏹ Отменено {cancelled} активных задач",
+ parse_mode='Markdown'
+ )
+ else:
+ self.bot.send_message(
+ message.chat.id,
+ "⚠ Не удалось получить список задач для отмены",
+ parse_mode='Markdown'
+ )
+ except Exception as e:
+ self.logger.error(f"Ошибка отмены задач: {e}")
+ self.bot.send_message(
+ message.chat.id,
+ f"❌ Ошибка при отмене задач: {str(e)}",
+ parse_mode='Markdown'
+ )
+
+ def load_osheet(self, message):
+ """Загрузка файла с Synology Drive"""
+ self.logger.debug('Получение данных')
+ try:
+ synd = SynologyDrive(
+ self.config.nas_user,
+ self.config.nas_pass,
+ self.config.nas_ip,
+ self.config.nas_port,
+ https=True,
+ dsm_version='7'
+ )
+
+ self.logger.debug(synd.login()) # Проверка сессии
+ try:
+ self.logger.debug('Попытка загрузки таблицы')
+ bio = synd.download_synology_office_file(self.config.nas_file)
+ self.logger.debug('Успешная загрузка')
+ return bio
+ except Exception as e:
+ self.logger.exception('Ошибка загрузки')
+ self.bot.send_message(
+ message.chat.id,
+ 'Не удалось скачать таблицу',
+ parse_mode='html'
+ )
+ raise
+ except Exception as e:
+ self.logger.exception('Ошибка авторизации')
+ self.bot.send_message(
+ message.chat.id,
+ 'Не удалось авторизоваться',
+ parse_mode='html'
+ )
+ raise
+
+ def get_sheet_data(self, osheet, sheet_name, **kwargs):
+ """Получение данных из листа Excel"""
+ self.logger.debug(f'Чтение листа {sheet_name}')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name=sheet_name, **kwargs)
+ self.logger.debug('Успешное чтение')
+ return sheet
+ except Exception as e:
+ self.logger.exception(f'Ошибка чтения листа {sheet_name}')
+ raise
+
+ def get_sport_logo(self, sport, pack, message):
+ """Получение логотипа вида спорта"""
+ self.logger.info(f'Получение оформления для {sport}')
+ self.bot.send_message(
+ message.chat.id,
+ f'Ищем оформления для {sport}',
+ parse_mode='html'
+ )
+ try:
+ d = pack.loc[sport]['LINK']
+ self.logger.debug(d)
+ if pd.isna(d):
+ self.logger.warning(f'Нет LINK для вида спорта "{sport}"')
+ return ''
+ return d
+ except Exception as e:
+ self.logger.exception(f"Не удалось получить оформление для {sport}")
+ return ''
+
+ def get_team_logo(self, team, sport, logos, message):
+ """Получение логотипа команды"""
+ self.logger.info(f'Получение логотипа {team}/{sport}')
+ self.bot.send_message(
+ message.chat.id,
+ f'Поиск логотипа {sport}-{team}',
+ parse_mode='html'
+ )
+ try:
+ d = logos.loc[team, sport]['LINK']
+ self.logger.debug(d)
+ return d
+ except KeyError:
+ self.logger.warning(f"Нет LINK для {team}/{sport}")
+ return ''
+ except Exception as e:
+ self.logger.exception(f"Ошибка при получении логотипа {sport}")
+ return ''
+
+ def make_data_dict(self, ds):
+ """Создание словаря с данными"""
+ return {
+ 'date': ds['DATA'],
+ 'time': ds['TIME'],
+ 'channel': ds['CHANEL'],
+ 'sport': ds['SPORT'],
+ 'league': ds['LEAGUE'],
+ 'team_a': ds['TEAM A'],
+ 'team_b': ds['TEAM B'],
+ 'index': ds.name
+ }
+
+ def unc2uri(self, unc):
+ """Преобразование UNC пути в URI"""
+ self.logger.debug('Преобразование пути')
+ try:
+ p = urlparse(unc)
+ if len(p.scheme) > 2 or not unc:
+ return unc
+ else:
+ p = PureWindowsPath(unc)
+ return p.as_uri()
+ except Exception as e:
+ self.logger.exception('Ошибка преобразования пути')
+ return unc
+
+ def send_job(self, data, message):
+ """Отправка задачи на рендеринг"""
+ if self.PLACEHOLDER:
+ return self._send_job_dumb(data)
+
+ payload = {
+ "template": {
+ "src": "file:///c:/users/virtVmix-2/Downloads/PackShot_Sborka_eng.aepx",
+ "composition": "pack",
+ "outputModule": "Start_h264",
+ "outputExt": "mp4"
+ },
+ "actions": {
+ "postrender": [
+ {
+ "module": "@nexrender/action-encode",
+ "preset": "mp4",
+ "output": "encoded.mp4"
+ },
+ {
+ "module": "@nexrender/action-copy",
+ "input": "encoded.mp4",
+ "output": f"//10.10.35.3/edit/Auto_Anons/{data['outfile_name']}.mp4"
+ }
+ ]
+ },
+ "assets": []
+ }
+
+ # Добавление данных в payload
+ self._add_data_to_payload(payload, data, message)
+
+ url = self.config.nexrender_url
+ try:
+ r = requests.post(url, json=payload)
+ if r.status_code == 200:
+ res = r.json()
+ uid = res['uid']
+ return {'uid': uid, 'outname': data['outfile_name']}
+ except Exception as e:
+ self.logger.exception('Ошибка отправки задачи')
+ return None
+
+ def _add_data_to_payload(self, payload, data, message):
+ """Добавление данных в payload"""
+ # Добавление даты
+ self._add_date_data(payload, data, message)
+
+ # Добавление времени
+ self._add_time_data(payload, data, message)
+
+ # Добавление лиги
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "LEAGUE",
+ "property": "Source Text",
+ "value": data['league']
+ })
+
+ # Добавление спорта
+ if data['sport']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "SPORT",
+ "property": "Source Text",
+ "value": data['sport']
+ })
+
+ # Добавление команд и логотипов
+ self._add_team_data(payload, data, message, 'A')
+ self._add_team_data(payload, data, message, 'B')
+
+ # Добавление оформления
+ if data['pack']:
+ payload['assets'].append({
+ "src": data['pack'],
+ "type": "video",
+ "layerName": "TOP"
+ })
+
+ def _add_date_data(self, payload, data, message):
+ """Добавление данных о дате"""
+ if data['data'] == 'сегодня':
+ self._add_specific_date_style(payload, data, message, "105", [0, 5])
+ elif data['data'] == 'завтра':
+ self._add_specific_date_style(payload, data, message, "115", [0, 25])
+ elif len(data['data']) < 6:
+ self._add_specific_date_style(payload, data, message, "120", [0, 20])
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "DATA",
+ "property": "Source Text",
+ "value": data['data']
+ })
+
+ def _add_specific_date_style(self, payload, data, message, font_size, anchor_point):
+ """Добавление стилей для конкретной даты"""
+ payload['assets'].extend([
+ {
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": font_size
+ },
+ {
+ "layerName": "DATA",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchor_point
+ }
+ ])
+ self.logger.info(f'Для "{data["data"]}" шрифт установлен {font_size}')
+ self.bot.send_message(
+ message.chat.id,
+ f'Для "{data["data"]}" размер шрифта установлен {font_size}',
+ parse_mode='html'
+ )
+ self.logger.info(f'Сдвиг "{data["data"]}" на {anchor_point} пикселей')
+ self.bot.send_message(
+ message.chat.id,
+ f'Сдвигаем "{data["data"]}" на {anchor_point} пикселей',
+ parse_mode='html'
+ )
+
+ def _add_time_data(self, payload, data, message):
+ """Добавление данных о времени"""
+ if len(data['time_h']) < 2:
+ anchor_point = [40, 0]
+ for layer in ["TIME_H", "TIME_M", "TIME"]:
+ payload['assets'].append({
+ "layerName": layer,
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchor_point
+ })
+ self.logger.info(f'Сдвиг "{data["time_h"]}:{data["time_m"]}" на {anchor_point} пикселей')
+ self.bot.send_message(
+ message.chat.id,
+ f'Сдвигаем "{data["time_h"]}:{data["time_m"]}" на {anchor_point} пикседей',
+ parse_mode='html'
+ )
+
+ payload['assets'].extend([
+ {
+ "type": "data",
+ "layerName": "TIME_H",
+ "property": "Source Text",
+ "value": data['time_h']
+ },
+ {
+ "type": "data",
+ "layerName": "TIME_M",
+ "property": "Source Text",
+ "value": data['time_m']
+ }
+ ])
+
+ def _add_team_data(self, payload, data, message, team):
+ """Добавление данных о команде"""
+ team_key = f'team_{team.lower()}'
+ if data[team_key]:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": f"TEAM_{team}",
+ "property": "Source Text",
+ "value": data[team_key]
+ })
+
+ logo_key = f'{team_key}_logo'
+ if data[logo_key]:
+ payload['assets'].append({
+ "src": data[logo_key],
+ "type": "image",
+ "layerName": f"TEAM_{team}_LOGO"
+ })
+
+ logo_res_key = f'{team_key}_logo_res'
+ if data.get(logo_res_key):
+ payload['assets'].append({
+ "property": "scale",
+ "type": "data",
+ "expression": f"if (width > height) {{max_size = width;}} else {{max_size = height;}} var real_size = {data[logo_res_key][0]}/max_size*100;[real_size,real_size]",
+ "layerName": f"TEAM_{team}_LOGO"
+ })
+ self.logger.info(f'{data[team_key]} логотип изменен до {data[logo_res_key][0]}')
+ self.bot.send_message(
+ message.chat.id,
+ f'{data[team_key]} масштабирован под {data[logo_res_key][0]} пикселей',
+ parse_mode='html'
+ )
+
+ def make_job_dicts(self, dd, pack, logos, message):
+ """Создание задач рендеринга"""
+ self.logger.debug('Начало создания имени')
+ fn = ''
+ data = {}
+ empty_sport = pack.iloc[0].name
+
+ # Дата
+ if isinstance(dd['date'], str):
+ fn += f"{dd['date'][6:]}{dd['date'][3:5]}{dd['date'][0:2]}"
+ d = dd['date'].split('.')
+ data['data'] = f"{int(d[0])} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][int(d[1])]}"
+ elif isinstance(dd['date'], datetime.date):
+ fn += f"{dd['date'].year}{dd['date'].month:02}{dd['date'].day:02}"
+ data['data'] = f"{dd['date'].day} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][dd['date'].month]}"
+
+ # Вид спорта и оформление
+ if dd['sport'] != empty_sport:
+ fn += f"_{dd['sport']}"
+ data['sport'] = dd['sport']
+ data['pack'] = self.unc2uri(self.get_sport_logo(dd['sport'], pack, message))
+ else:
+ data['sport'] = ''
+ data['pack'] = ''
+
+ # Лига
+ if dd["league"][-1] == '.':
+ self.logger.debug('Точка в названии лиги!')
+ fn += f'_{dd["league"][:-1]}'
+ data['league'] = dd['league'][:-1]
+ else:
+ data['league'] = dd['league']
+ fn += f'_{dd["league"]}'
+
+ # Команды
+ self._process_team_data(dd, data, fn, 'A', logos, message)
+ self._process_team_data(dd, data, fn, 'B', logos, message)
+
+ # Канал
+ if not pd.isna(dd['channel']):
+ self.logger.debug('Канал установлен ' + dd['channel'])
+ fn += f"_{dd['channel']}"
+
+ # Финальное форматирование имени файла
+ fn = translit(fn, reversed=True)
+ fn = fn.replace(' ', '-').replace("'", '')
+ data['outfile_name'] = fn
+
+ # Время
+ if isinstance(dd['time'], str):
+ t = dd['time'].split(':')
+ data['time_h'] = t[0]
+ data['time_m'] = t[1]
+ elif isinstance(dd['time'], datetime.time):
+ data['time_h'] = str(dd['time'].hour)
+ data['time_m'] = str(dd['time'].minute)
+
+ self.logger.debug('Время ' + data['time_h'] + ':' + data['time_m'])
+ self.logger.debug("Конец создания имени")
+
+ # Создание задач
+ watch_list = []
+ watch_list.append(self.send_job(data, message))
+
+ if True: # TODO: Заменить на условие, если нужно
+ data['data'] = 'сегодня'
+ data['outfile_name'] = fn + '_Today'
+ watch_list.append(self.send_job(data, message))
+ data['data'] = 'завтра'
+ data['outfile_name'] = fn + '_Tomorrow'
+ watch_list.append(self.send_job(data, message))
+
+ pprint(watch_list)
+ return list(filter(None, watch_list))
+
+ def _process_team_data(self, dd, data, fn, team, logos, message):
+ """Обработка данных команды"""
+ team_key = f'team_{team.lower()}'
+ if pd.isna(dd[f'TEAM {team}']):
+ self.logger.info(f'Нет команды {team}')
+ self.bot.send_message(
+ message.chat.id,
+ f'Нет команды {team}',
+ parse_mode='html'
+ )
+ data[team_key] = ''
+ data[f'{team_key}_logo'] = ''
+ data[f'{team_key}_logo_res'] = ''
+ else:
+ name = dd[f'TEAM {team}'].split('#')
+ fn += f"_{name[0]}"
+ data[f'{team_key}_logo_res'] = name[2:]
+ data[team_key] = name[0]
+ data[f'{team_key}_logo'] = self.unc2uri(
+ self.get_team_logo(dd[f'TEAM {team}'], dd['sport'], logos, message)
+ )
+
+def run_flask():
+ """Запуск Flask сервера для панели мониторинга"""
+ flask_app.run(host='0.0.0.0', port=5000)
+
+@flask_app.route('/admin/stats')
+def admin_stats():
+ """API endpoint для получения статистики"""
+ stats = monitoring.get_stats()
+ return jsonify({
+ 'status': 'success',
+ 'data': stats
+ })
+
+@flask_app.route('/admin/jobs')
+def admin_jobs():
+ """API endpoint для получения списка задач"""
+ jobs = monitoring.get_recent_jobs(50)
+ return jsonify({
+ 'status': 'success',
+ 'data': jobs
+ })
+
+@flask_app.route('/admin/users')
+def admin_users():
+ """API endpoint для получения статистики по пользователям"""
+ stats = monitoring.get_stats()
+ return jsonify({
+ 'status': 'success',
+ 'data': stats.get('users', {})
+ })
+
+if __name__ == '__main__':
+ try:
+ # Проверяем наличие файла окружения
+ if not os.path.exists('AF_environment.env'):
+ raise FileNotFoundError(
+ "Файл окружения AF_environment.env не найден. "
+ "Создайте его по образцу AF_environment.example.env"
+ )
+
+ # Инициализация компонентов
+ config = Config()
+ monitoring = Monitoring()
+ job_manager = JobManager(config, monitoring)
+
+ # Запуск Flask в отдельном потоке
+ flask_thread = threading.Thread(target=run_flask, daemon=True)
+ flask_thread.start()
+
+ # Запуск Telegram бота
+ job_manager.bot.infinity_polling()
+
+ # except FileNotFoundError
\ No newline at end of file
diff --git a/AF_script_test_5.py b/AF_script_test_5.py
new file mode 100644
index 0000000..3c5b754
--- /dev/null
+++ b/AF_script_test_5.py
@@ -0,0 +1,716 @@
+import os
+from dotenv import load_dotenv
+import logging
+import logging.config
+from pprint import pprint
+import pandas as pd
+from transliterate import translit
+import requests
+from time import sleep
+import datetime
+import sys
+from urllib.parse import urlparse
+from pathlib import PureWindowsPath
+import telebot
+from synology_drive_api.drive import SynologyDrive
+
+# Загрузка переменных окружения из файла
+load_dotenv('AF_environment.env')
+
+class Config:
+ """Класс для хранения конфигурации"""
+ def __init__(self):
+ # Обязательные переменные окружения
+ self.nas_user = os.getenv('NAS_USER')
+ self.nas_pass = os.getenv('NAS_PASS')
+ self.nas_ip = os.getenv('NAS_IP')
+ self.nas_port = os.getenv('NAS_PORT')
+ self.nas_file = os.getenv('NAS_FILE')
+ self.token = os.getenv('TELEGRAM_TOKEN')
+ self.group_chat = os.getenv('TELEGRAM_GROUP_CHAT')
+ self.nexrender_url = os.getenv('NEXRENDER_URL')
+
+ # Валидация конфигурации
+ self._validate_config()
+
+ def _validate_config(self):
+ """Проверяет, что все обязательные переменные окружения установлены"""
+ missing_vars = []
+ for var, value in vars(self).items():
+ if value is None:
+ missing_vars.append(var.upper())
+
+ if missing_vars:
+ raise ValueError(
+ f"Отсутствуют обязательные переменные окружения: {', '.join(missing_vars)}. "
+ "Пожалуйста, проверьте файл AF_environment.env"
+ )
+
+class JobManager:
+ """Класс для управления задачами рендеринга"""
+ def __init__(self, config):
+ self.config = config
+ self.bot = telebot.TeleBot(config.token)
+ self.PLACEHOLDER = sys.platform == 'win32'
+
+ if self.PLACEHOLDER:
+ self._init_placeholders()
+
+ def _init_placeholders(self):
+ """Инициализация заглушек для тестирования"""
+ from random import random, choices
+
+ def send_job_dumb(data):
+ if random() < 0.8:
+ uid = ''.join(choices('abcdefghijklmnopqrstuvwxyz_', k=8))
+ return {'uid': uid, 'outname': data['outfile_name']}
+ return None
+
+ class FakeResp:
+ def __init__(self, state='queued', *args, **kwargs):
+ self.state = state
+ self.status_code = 200
+
+ def json(self):
+ return {'state': self.state}
+
+ def fake_get(*args, **kwargs):
+ rand = random()
+ if rand < 0.8:
+ return FakeResp()
+ elif rand < 0.95:
+ return FakeResp('finished')
+ else:
+ return FakeResp('error')
+
+ self._send_job_real = self.send_job
+ self.send_job = send_job_dumb
+ self._fake_get = fake_get
+
+ def setup_logging(self):
+ """Настройка логирования"""
+ LOG_CONFIG = {
+ 'version': 1,
+ 'handlers': {
+ 'console': {
+ 'class': 'logging.StreamHandler',
+ 'level': 'DEBUG',
+ 'formatter': 'simple',
+ 'stream': 'ext://sys.stdout'
+ },
+ 'file': {
+ 'class': 'logging.FileHandler',
+ 'level': 'DEBUG',
+ 'formatter': 'simple',
+ 'encoding': 'utf-8',
+ 'filename': 'AF_script.log'
+ },
+ },
+ 'loggers': {
+ __name__: {
+ 'handlers': ['console', 'file'],
+ 'level': 'DEBUG'
+ }
+ },
+ 'formatters': {
+ 'simple': {
+ 'class': 'logging.Formatter',
+ 'format': '%(asctime)s %(levelname)-8s %(funcName)12s() - %(message)s',
+ 'datefmt': '%d.%m.%Y %H:%M:%S'
+ }
+ }
+ }
+ logging.config.dictConfig(LOG_CONFIG)
+ self.logger = logging.getLogger(__name__)
+ telebot.logger.addHandler(self.logger.handlers[0])
+
+ def load_osheet(self, message):
+ """Загрузка файла с Synology Drive"""
+ self.logger.debug('Получение данных')
+ try:
+ synd = SynologyDrive(
+ self.config.nas_user,
+ self.config.nas_pass,
+ self.config.nas_ip,
+ self.config.nas_port,
+ https=True,
+ dsm_version='7'
+ )
+
+ self.logger.debug(synd.login()) # Проверка сессии
+ try:
+ self.logger.debug('Попытка загрузки таблицы')
+ bio = synd.download_synology_office_file(self.config.nas_file)
+ self.logger.debug('Успешная загрузка')
+ return bio
+ except Exception as e:
+ self.logger.exception('Ошибка загрузки')
+ self.bot.send_message(
+ message.chat.id,
+ 'Не удалось скачать таблицу',
+ parse_mode='html'
+ )
+ raise
+ except Exception as e:
+ self.logger.exception('Ошибка авторизации')
+ self.bot.send_message(
+ message.chat.id,
+ 'Не удалось авторизоваться',
+ parse_mode='html'
+ )
+ raise
+
+ def get_sheet_data(self, osheet, sheet_name, **kwargs):
+ """Получение данных из листа Excel"""
+ self.logger.debug(f'Чтение листа {sheet_name}')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name=sheet_name, **kwargs)
+ self.logger.debug('Успешное чтение')
+ return sheet
+ except Exception as e:
+ self.logger.exception(f'Ошибка чтения листа {sheet_name}')
+ raise
+
+ def get_sport_logo(self, sport, pack, message):
+ """Получение логотипа вида спорта"""
+ self.logger.info(f'Получение оформления для {sport}')
+ self.bot.send_message(
+ message.chat.id,
+ f'Ищем оформления для {sport}',
+ parse_mode='html'
+ )
+ try:
+ d = pack.loc[sport]['LINK']
+ self.logger.debug(d)
+ if pd.isna(d):
+ self.logger.warning(f'Нет LINK для вида спорта "{sport}"')
+ return ''
+ return d
+ except Exception as e:
+ self.logger.exception(f"Не удалось получить оформление для {sport}")
+ return ''
+
+ def get_team_logo(self, team, sport, logos, message):
+ """Получение логотипа команды"""
+ self.logger.info(f'Получение логотипа {team}/{sport}')
+ self.bot.send_message(
+ message.chat.id,
+ f'Поиск логотипа {sport}-{team}',
+ parse_mode='html'
+ )
+ try:
+ d = logos.loc[team, sport]['LINK']
+ self.logger.debug(d)
+ return d
+ except KeyError:
+ self.logger.warning(f"Нет LINK для {team}/{sport}")
+ return ''
+ except Exception as e:
+ self.logger.exception(f"Ошибка при получении логотипа {sport}")
+ return ''
+
+ def make_data_dict(self, ds):
+ """Создание словаря с данными"""
+ return {
+ 'date': ds['DATA'],
+ 'time': ds['TIME'],
+ 'channel': ds['CHANEL'],
+ 'sport': ds['SPORT'],
+ 'league': ds['LEAGUE'],
+ 'TEAM A': ds['TEAM A'],
+ 'TEAM B': ds['TEAM B'],
+ 'index': ds.name
+ }
+
+ def unc2uri(self, unc):
+ """Преобразование UNC пути в URI"""
+ self.logger.debug('Преобразование пути')
+ try:
+ p = urlparse(unc)
+ if len(p.scheme) > 2 or not unc:
+ return unc
+ else:
+ p = PureWindowsPath(unc)
+ return p.as_uri()
+ except Exception as e:
+ self.logger.exception('Ошибка преобразования пути')
+ return unc
+
+ def send_job(self, data, message):
+ """Отправка задачи на рендеринг"""
+ if self.PLACEHOLDER:
+ return self.send_job_dumb(data)
+
+ payload = {
+ "template": {
+ "src": "file:///c:/users/virtVmix-2/Downloads/PackShot_Sborka_eng.aepx",
+ "composition": "pack",
+ "outputModule": "Start_h264",
+ "outputExt": "mp4"
+ },
+ "actions": {
+ "postrender": [
+ {
+ "module": "@nexrender/action-encode",
+ "preset": "mp4",
+ "output": "encoded.mp4"
+ },
+ {
+ "module": "@nexrender/action-copy",
+ "input": "encoded.mp4",
+ "output": f"//10.10.35.3/edit/Auto_Anons/{data['outfile_name']}.mp4"
+ }
+ ]
+ },
+ "assets": []
+ }
+
+ # Добавление данных в payload
+ self._add_data_to_payload(payload, data, message)
+
+ url = self.config.nexrender_url
+ try:
+ r = requests.post(url, json=payload)
+ if r.status_code == 200:
+ res = r.json()
+ uid = res['uid']
+ return {'uid': uid, 'outname': data['outfile_name']}
+ except Exception as e:
+ self.logger.exception('Ошибка отправки задачи')
+ return None
+
+ def _add_data_to_payload(self, payload, data, message):
+ """Добавление данных в payload"""
+ # Добавление даты
+ self._add_date_data(payload, data, message)
+
+ # Добавление времени
+ self._add_time_data(payload, data, message)
+
+ # Добавление лиги
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "LEAGUE",
+ "property": "Source Text",
+ "value": data['league']
+ })
+
+ # Добавление спорта
+ if data['sport']:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "SPORT",
+ "property": "Source Text",
+ "value": data['sport']
+ })
+
+ # Добавление команд и логотипов
+ self._add_team_data(payload, data, message, 'A')
+ self._add_team_data(payload, data, message, 'B')
+
+ # Добавление оформления
+ if data['pack']:
+ payload['assets'].append({
+ "src": data['pack'],
+ "type": "video",
+ "layerName": "TOP"
+ })
+
+ def _add_date_data(self, payload, data, message):
+ """Добавление данных о дате"""
+ if data['data'] == 'сегодня':
+ self._add_specific_date_style(payload, data, message, "105", [0, 5])
+ elif data['data'] == 'завтра':
+ self._add_specific_date_style(payload, data, message, "115", [0, 25])
+ elif len(data['data']) < 6:
+ self._add_specific_date_style(payload, data, message, "120", [0, 20])
+
+ payload['assets'].append({
+ "type": "data",
+ "layerName": "DATA",
+ "property": "Source Text",
+ "value": data['data']
+ })
+
+ def _add_specific_date_style(self, payload, data, message, font_size, anchor_point):
+ """Добавление стилей для конкретной даты"""
+ payload['assets'].extend([
+ {
+ "layerName": "DATA",
+ "property": "Source Text.fontSize",
+ "type": "data",
+ "value": font_size
+ },
+ {
+ "layerName": "DATA",
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchor_point
+ }
+ ])
+ self.logger.info(f'Для "{data["data"]}" шрифт установлен {font_size}')
+ self.bot.send_message(
+ message.chat.id,
+ f'Для "{data["data"]}" размер шрифта установлен {font_size}',
+ parse_mode='html'
+ )
+ self.logger.info(f'Сдвиг "{data["data"]}" на {anchor_point} пикселей')
+ self.bot.send_message(
+ message.chat.id,
+ f'Сдвигаем "{data["data"]}" на {anchor_point} пикселей',
+ parse_mode='html'
+ )
+
+ def _add_time_data(self, payload, data, message):
+ """Добавление данных о времени"""
+ if len(data['time_h']) < 2:
+ anchor_point = [40, 0]
+ for layer in ["TIME_H", "TIME_M", "TIME"]:
+ payload['assets'].append({
+ "layerName": layer,
+ "property": "transform.anchorPoint",
+ "type": "data",
+ "value": anchor_point
+ })
+ self.logger.info(f'Сдвиг "{data["time_h"]}:{data["time_m"]}" на {anchor_point} пикселей')
+ self.bot.send_message(
+ message.chat.id,
+ f'Сдвигаем "{data["time_h"]}:{data["time_m"]}" на {anchor_point} пикседей',
+ parse_mode='html'
+ )
+
+ payload['assets'].extend([
+ {
+ "type": "data",
+ "layerName": "TIME_H",
+ "property": "Source Text",
+ "value": data['time_h']
+ },
+ {
+ "type": "data",
+ "layerName": "TIME_M",
+ "property": "Source Text",
+ "value": data['time_m']
+ }
+ ])
+
+ def _add_team_data(self, payload, data, message, team):
+ """Добавление данных о команде"""
+ team_key = f'team_{team.lower()}'
+ if data[team_key]:
+ payload['assets'].append({
+ "type": "data",
+ "layerName": f"TEAM_{team}",
+ "property": "Source Text",
+ "value": data[team_key]
+ })
+
+ logo_key = f'{team_key}_logo'
+ if data[logo_key]:
+ payload['assets'].append({
+ "src": data[logo_key],
+ "type": "image",
+ "layerName": f"TEAM_{team}_LOGO"
+ })
+
+ logo_res_key = f'{team_key}_logo_res'
+ if data.get(logo_res_key):
+ payload['assets'].append({
+ "property": "scale",
+ "type": "data",
+ "expression": f"if (width > height) {{max_size = width;}} else {{max_size = height;}} var real_size = {data[logo_res_key][0]}/max_size*100;[real_size,real_size]",
+ "layerName": f"TEAM_{team}_LOGO"
+ })
+ self.logger.info(f'{data[team_key]} логотип изменен до {data[logo_res_key][0]}')
+ self.bot.send_message(
+ message.chat.id,
+ f'{data[team_key]} масштабирован под {data[logo_res_key][0]} пикселей',
+ parse_mode='html'
+ )
+
+ def make_job_dicts(self, dd, pack, logos, message):
+ """Создание задач рендеринга"""
+ self.logger.debug('Начало создания имени')
+ fn = ''
+ data = {}
+ empty_sport = pack.iloc[0].name
+
+ # Дата
+ if isinstance(dd['date'], str):
+ fn += f"{dd['date'][6:]}{dd['date'][3:5]}{dd['date'][0:2]}"
+ d = dd['date'].split('.')
+ data['data'] = f"{int(d[0])} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][int(d[1])]}"
+ elif isinstance(dd['date'], datetime.date):
+ fn += f"{dd['date'].year}{dd['date'].month:02}{dd['date'].day:02}"
+ data['data'] = f"{dd['date'].day} {['','января','февраля','марта','апреля','мая','июня','июля','августа','сентября','октября','ноября','декабря'][dd['date'].month]}"
+
+ # Вид спорта и оформление
+ if dd['sport'] != empty_sport:
+ fn += f"_{dd['sport']}"
+ data['sport'] = dd['sport']
+ data['pack'] = self.unc2uri(self.get_sport_logo(dd['sport'], pack, message))
+ else:
+ data['sport'] = ''
+ data['pack'] = ''
+
+ # Лига
+ if dd["league"][-1] == '.':
+ self.logger.debug('Точка в названии лиги!')
+ fn += f'_{dd["league"][:-1]}'
+ data['league'] = dd['league'][:-1]
+ else:
+ data['league'] = dd['league']
+ fn += f'_{dd["league"]}'
+
+ # Команды
+ self._process_team_data(dd, data, fn, 'A', logos, message)
+ self._process_team_data(dd, data, fn, 'B', logos, message)
+
+ # Канал
+ if not pd.isna(dd['channel']):
+ self.logger.debug('Канал установлен ' + dd['channel'])
+ fn += f"_{dd['channel']}"
+
+ # Финальное форматирование имени файла
+ fn = translit(fn, reversed=True)
+ fn = fn.replace(' ', '-').replace("'", '')
+ data['outfile_name'] = fn
+
+ # Время
+ if isinstance(dd['time'], str):
+ t = dd['time'].split(':')
+ data['time_h'] = t[0]
+ data['time_m'] = t[1]
+ elif isinstance(dd['time'], datetime.time):
+ data['time_h'] = str(dd['time'].hour)
+ data['time_m'] = str(dd['time'].minute)
+
+ self.logger.debug('Время ' + data['time_h'] + ':' + data['time_m'])
+ self.logger.debug("Конец создания имени")
+
+ # Создание задач
+ watch_list = []
+ watch_list.append(self.send_job(data, message))
+
+ if True: # TODO: Заменить на условие, если нужно
+ data['data'] = 'сегодня'
+ data['outfile_name'] = fn + '_Today'
+ watch_list.append(self.send_job(data, message))
+ data['data'] = 'завтра'
+ data['outfile_name'] = fn + '_Tomorrow'
+ watch_list.append(self.send_job(data, message))
+
+ pprint(watch_list)
+ return list(filter(None, watch_list))
+
+ def _process_team_data(self, dd, data, fn, team, logos, message):
+ """Обработка данных команды"""
+ team_key = f'team_{team.lower()}'
+ if pd.isna(dd[f'TEAM {team}']):
+ self.logger.info(f'Нет команды {team}')
+ self.bot.send_message(
+ message.chat.id,
+ f'Нет команды {team}',
+ parse_mode='html'
+ )
+ data[team_key] = ''
+ data[f'{team_key}_logo'] = ''
+ data[f'{team_key}_logo_res'] = ''
+ else:
+ name = dd[f'TEAM {team}'].split('#')
+ fn += f"_{name[0]}"
+ data[f'{team_key}_logo_res'] = name[2:]
+ data[team_key] = name[0]
+ data[f'{team_key}_logo'] = self.unc2uri(
+ self.get_team_logo(dd[f'TEAM {team}'], dd['sport'], logos, message)
+ )
+
+ def run(self):
+ """Запуск бота"""
+ @self.bot.message_handler(commands=['help', 'start'])
+ def send_welcome(message):
+ self.bot.send_chat_action(message.chat.id, 'typing')
+ user = f" {message.from_user.username}" if message.from_user.username else '!'
+ sleep(1)
+ self.bot.reply_to(
+ message,
+ f"Привет{user}\nЯ помогу тебе сделать Анонсы!\nВот список команд которые я могу выполнить:\n/ибаш - наибашу обработку и рендер!\n/харе - остановит нах!"
+ )
+
+ @self.bot.message_handler(commands=['чёкак', 'status'])
+ def status(message):
+ self.logger.info(f'Статус запрошен {message.from_user.username}')
+ try:
+ r = requests.get(self.config.nexrender_url)
+ if r.status_code == 200:
+ jobs = r.json()
+ s = [{'uid': i['uid'], 'state': i['state']} for i in jobs]
+ queued = sum(1 for job in s if job['state'] in ('queued', 'picked'))
+
+ if s:
+ self.logger.info(f"{queued} в очереди")
+ self.bot.send_message(
+ message.chat.id,
+ f"В очереди {queued}"
+ )
+ else:
+ self.logger.info("Нет задач в очереди")
+ self.bot.send_message(
+ message.chat.id,
+ "Нет задач в очереди"
+ )
+ except Exception as e:
+ self.logger.exception("Ошибка получения статуса")
+ self.bot.send_message(
+ message.chat.id,
+ "Ошибка получения статуса"
+ )
+
+ @self.bot.message_handler(commands=['харе', 'stop'])
+ def stop(message):
+ try:
+ r = requests.get(self.config.nexrender_url)
+ if r.status_code == 200:
+ jobs = r.json()
+ s = [{'uid': i['uid'], 'state': i['state']} for i in jobs]
+ cancelled = 0
+
+ if s:
+ for job in s:
+ requests.delete(f"{self.config.nexrender_url}/{job['uid']}")
+ cancelled += 1
+
+ self.logger.info(f"Отменено {cancelled} задач пользователем {message.from_user.username}")
+ self.bot.send_message(
+ message.chat.id,
+ f"Отменено {cancelled}"
+ )
+ else:
+ self.logger.info(f"{message.from_user.username} запросил отмену, но нет задач для отмены")
+ self.bot.send_message(
+ message.chat.id,
+ "Нет задач для отмены"
+ )
+ except Exception as e:
+ self.logger.exception("Ошибка отмены задач")
+ self.bot.send_message(
+ message.chat.id,
+ "Ошибка отмены задач"
+ )
+
+ @self.bot.message_handler(commands=['ибаш', 'ibash'])
+ def ibash(message):
+ self.logger.info(f'Запуск задач для {message.from_user.username}')
+ self.bot.send_chat_action(message.chat.id, 'typing')
+
+ user = message.from_user.username if message.from_user.username else '!'
+ self.bot.send_message(
+ message.chat.id,
+ f"Ну что ж {user}, давай попробуем \nНАИБАШИТЬ!!!",
+ parse_mode='html'
+ )
+
+ self.bot.send_chat_action(message.chat.id, 'upload_document')
+
+ try:
+ osheet = self.load_osheet(message)
+ start = self.get_sheet_data(osheet, 'Start', header=1)
+ start = start[start['STATE'] == False] # Проверка "первая"
+ pack = self.get_sheet_data(osheet, 'SPORT', header=0, index_col='SPORT')
+ pack = pack[pack.index.notna()]
+ logos = self.get_sheet_data(osheet, 'TEAMS', header=0, index_col=[0, 1])
+
+ # Очистка старых задач
+ try:
+ r = requests.get(self.config.nexrender_url)
+ if r.status_code == 200:
+ jobs = r.json()
+ for job in jobs:
+ if job['state'] in ('finished', 'error'):
+ requests.delete(f"{self.config.nexrender_url}/{job['uid']}")
+ except Exception as e:
+ self.logger.exception("Ошибка очистки старых задач")
+
+ self.bot.send_chat_action(message.chat.id, 'record_video')
+
+ watch_list = []
+ for i, row in start.iterrows():
+ dd = self.make_data_dict(row)
+ watch_list += self.make_job_dicts(dd, pack, logos, message)
+
+ self.logger.info(f"В очереди {len(watch_list)} задач")
+ self.bot.send_message(
+ message.chat.id,
+ f"В очереди {len(watch_list)} задач"
+ )
+
+ while watch_list:
+ self.bot.send_chat_action(message.chat.id, 'record_video')
+ sleep(25)
+
+ for job in watch_list[:]: # Копия списка для итерации
+ try:
+ if self.PLACEHOLDER:
+ r = self._fake_get()
+ else:
+ r = requests.get(f"{self.config.nexrender_url}/{job['uid']}")
+
+ if r.status_code == 200:
+ state = r.json()['state']
+ if state == 'finished':
+ watch_list.remove(job)
+ self.logger.info(f"{job['outname']}, {state}, {len(watch_list)} осталось")
+ self.bot.send_message(
+ message.chat.id,
+ f"{job['outname']}, готов, {len(watch_list)} осталось выполнить",
+ parse_mode='html'
+ )
+ elif state == 'error':
+ watch_list.remove(job)
+ self.logger.warning(f"{job}, {state}, {len(watch_list)} осталось")
+ self.bot.send_message(
+ message.chat.id,
+ f"!!!{job}, {state}, {len(watch_list)} осталось выполнить",
+ parse_mode='html'
+ )
+ except Exception as e:
+ self.logger.exception(f"Ошибка проверки статуса задачи {job['uid']}")
+
+ self.bot.send_message(message.chat.id, 'Пойду спать :)')
+
+ except Exception as e:
+ self.logger.exception("Ошибка выполнения команды ибаш")
+ self.bot.send_message(
+ message.chat.id,
+ "Произошла ошибка при обработке команды"
+ )
+
+ self.logger.info('Запуск бота')
+ self.bot.infinity_polling()
+ self.logger.info('Завершение работы бота')
+
+if __name__ == '__main__':
+ try:
+ # Проверяем, что файл окружения существует
+ if not os.path.exists('AF_environment.env'):
+ raise FileNotFoundError(
+ "Файл окружения AF_environment.env не найден. "
+ )
+
+ config = Config()
+ job_manager = JobManager(config)
+ job_manager.setup_logging()
+ job_manager.run()
+
+ except FileNotFoundError as e:
+ logging.error(str(e))
+ print(str(e))
+ sys.exit(1)
+ except ValueError as e:
+ logging.error(f"Ошибка конфигурации: {e}")
+ print(f"Ошибка конфигурации: {e}")
+ sys.exit(1)
+ except Exception as e:
+ logging.error(f"Неожиданная ошибка: {e}", exc_info=True)
+ print(f"Неожиданная ошибка: {e}")
+ sys.exit(1)
\ No newline at end of file
diff --git a/test.py b/test.py
new file mode 100644
index 0000000..bd4704c
--- /dev/null
+++ b/test.py
@@ -0,0 +1,79 @@
+# NAS_IP='walle.barabanov.tv'
+# NAS_PORT='443'
+# NAS_FILE='/mydrive/Drive/Anons.osheet'
+
+NAS_USER='aescript'
+NAS_PASS='@5j15SduIhP7'
+NAS_IP='edit.tvstart.ru'
+NAS_PORT='443'
+NAS_FILE='/team-folders/nexrender/TEST.osheet'
+PUT_FILE='/team-folders/nexrender/TEST.osheet'
+
+
+import logging
+from pprint import pprint
+from synology_drive_api.drive import SynologyDrive
+import pandas as pd
+from transliterate import translit
+import requests
+from time import sleep
+import datetime
+
+logger = logging.getLogger(__name__)
+
+logging.basicConfig(filename='AF_script.log', level=logging.INFO,format='%(asctime)s %(levelname)s %(message)s')
+
+def load_osheet():
+ logger.info('Get data')
+ synd = SynologyDrive(NAS_USER, NAS_PASS, NAS_IP,NAS_PORT,https=True,dsm_version='7')
+
+ try:
+ logger.info(synd.login()) # Проверка что ссеия установлена.
+ try:
+ logger.debug('Try to download sheet')
+ bio = synd.download_synology_office_file(NAS_FILE)
+ logger.debug(bio)
+ logger.info('Download Success')
+ return bio
+ except:
+ logger.exception('Download fails')
+ except:
+ logger.exception('Login error')
+
+def upload_osheet(xls):
+ logger.info('Put data')
+ synd = SynologyDrive(NAS_USER, NAS_PASS, NAS_IP,NAS_PORT,https=True,dsm_version='7')
+
+ try:
+ logger.info(synd.login()) # Проверка что ссеия установлена.
+ try:
+ logger.debug('Try to upload sheet')
+ bio = synd.upload_file(xls,PUT_FILE+'.xlsx')
+ synd.convert_to_online_office(PUT_FILE+'.xlsx/Anons.xlsx')
+ logger.debug(bio)
+ logger.info('Upwnload Success')
+ return bio
+ except:
+ logger.exception('Upwnload fails')
+ except:
+ logger.exception('Login error')
+
+
+def get_logos(osheet):
+ logger.debug('Read TEAMS page')
+ try:
+ sheet = pd.read_excel(osheet, sheet_name='Sheet1')
+
+ logger.debug('Parsing OK')
+ return sheet
+ except:
+ logger.exception('error while read excel sheet')
+
+xls=load_osheet()
+print(get_logos(xls))
+
+from openpyxl import load_workbook
+
+wb=load_workbook(xls,data_only=True)
+ws=wb['Sheet1']
+print(ws['C1'].value)
\ No newline at end of file
diff --git a/test_xls.py b/test_xls.py
new file mode 100644
index 0000000..32d65de
--- /dev/null
+++ b/test_xls.py
@@ -0,0 +1,33 @@
+import pandas as pd
+import re
+
+from xlrd import open_workbook
+
+INFILE='Анонсы_Старт.xls'
+INFILE='Анонсы_Триумф.xls'
+
+with open_workbook(INFILE) as wb:
+ chanel=wb[0][0,2].value
+
+
+df=pd.read_excel(INFILE,header=1)
+
+
+for i,row in df.iterrows():
+ title=row.at['Title']
+ title=re.sub(r'\(.+?\)','',title)
+
+ m=re.match(r'(?:Прямой эфир.)?\s*([^\.]+)\.\s*(.+)\.\s*([^\.]+?)\s*-\s*([^\.]+?)\s*\.',title)
+
+ if m:
+ #print(title)
+ sport,league,team_a,team_b = m.groups()
+
+ r={'date':row.at['Date'],'time':row.at['Start Time'],
+ 'chanel':'START' if chanel=='Старт' else 'TRIUMPH',
+ 'sport':sport,'league':league,
+ 'team_a':team_a,'team_b':team_b}
+ print(r)
+
+ else:
+ print(title)