Thanks to visit codestin.com
Credit goes to github.com

Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -382,4 +382,5 @@ src/subtitle/models/base.pt
src/subtitle/models/small.pt
src/burn/mergevideo.txt
src/upload/upload.yaml
src/upload/uploadVideoQueue.txt
src/upload/uploadVideoQueue.txt
src/db/data.db
2 changes: 2 additions & 0 deletions requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -13,3 +13,5 @@ triton==3.1.0
zhconv==1.4.3
bilitool
zhipuai
pysqlite3
uuid
14 changes: 11 additions & 3 deletions src/burn/render_then_merge.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,15 @@
import argparse
import os
import subprocess
from src.config import GPU_EXIST, SRC_DIR
from src.config import GPU_EXIST, SRC_DIR, VIDEOS_DIR
from src.danmaku.generate_danmakus import get_resolution, process_danmakus
from src.subtitle.generate_subtitles import generate_subtitles
from src.burn.render_command import render_command
from src.upload.extract_video_info import get_video_info
from src.log.logger import scan_log
from db.conn import insert_upload_queue
from src.upload.generate_yaml import generate_yaml_template
from uuid import uuid4

def normalize_video_path(filepath):
"""Normalize the video path to upload
Expand Down Expand Up @@ -90,5 +93,10 @@ def render_then_merge(video_path_list):
merge_command(output_video_path, title, artist, date, merge_list)
subprocess.run(['rm', '-r', tmp])

with open(f"{SRC_DIR}/upload/uploadVideoQueue.txt", "a") as file:
file.write(f"{output_video_path}\n")
yaml_template = generate_yaml_template(output_video_path)
template_path = os.path.join(VIDEOS_DIR, f'upload_conf/{uuid4()}.yaml')
with open(template_path, 'w', encoding='utf-8') as f:
f.write(yaml_template)

if not insert_upload_queue(output_video_path, template_path):
scan_log('插入待上传条目失败')
26 changes: 19 additions & 7 deletions src/burn/render_video.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import argparse
import os
import subprocess
from src.config import GPU_EXIST, SRC_DIR, MODEL_TYPE, AUTO_SLICE, SLICE_DURATION, MIN_VIDEO_SIZE, SLICE_NUM, SLICE_OVERLAP, SLICE_STEP
from src.config import GPU_EXIST, SRC_DIR, MODEL_TYPE, AUTO_SLICE, SLICE_DURATION, MIN_VIDEO_SIZE, VIDEOS_DIR , SLICE_NUM, SLICE_OVERLAP, SLICE_STEP
from src.danmaku.generate_danmakus import get_resolution, process_danmakus
from src.subtitle.generate_subtitles import generate_subtitles
from src.burn.render_command import render_command
Expand All @@ -12,6 +12,9 @@
from src.autoslice.zhipu_sdk import zhipu_glm_4v_plus_generate_title
from src.upload.extract_video_info import get_video_info
from src.log.logger import scan_log
from db.conn import insert_upload_queue
from src.upload.generate_yaml import generate_yaml_template, generate_slice_yaml_template
from uuid import uuid4

def normalize_video_path(filepath):
"""Normalize the video path to upload
Expand Down Expand Up @@ -69,9 +72,13 @@ def render_video(video_path):
slice_video_flv_path = slice_path[:-4] + '.flv'
inject_metadata(slice_path, glm_title, slice_video_flv_path)
os.remove(slice_path)
with open(f"{SRC_DIR}/upload/uploadVideoQueue.txt", "a") as file:
scan_log.info(f"Complete {slice_video_flv_path} and wait for uploading!")
file.write(f"{slice_video_flv_path}\n")
slice_yaml_template = generate_slice_yaml_template(slice_video_flv_path)
slice_template_path = os.path.join(VIDEOS_DIR, f'upload_conf/{uuid4()}.yaml')
with open(slice_template_path, 'w', encoding='utf-8') as f:
f.write(slice_yaml_template)

if not insert_upload_queue(slice_video_flv_path, slice_template_path):
scan_log('插入待上传条目失败')
except Exception as e:
scan_log.error(f"Error in {slice_path}: {e}")

Expand All @@ -83,6 +90,11 @@ def render_video(video_path):
# # For test
# test_path = original_video_path[:-4]
# os.rename(original_video_path, test_path)

with open(f"{SRC_DIR}/upload/uploadVideoQueue.txt", "a") as file:
file.write(f"{format_video_path}\n")

yaml_template = generate_yaml_template(format_video_path)
template_path = os.path.join(VIDEOS_DIR, f'upload_conf/{uuid4()}.yaml')
with open(template_path, 'w', encoding='utf-8') as f:
f.write(yaml_template)

if not insert_upload_queue(format_video_path, template_path):
scan_log('插入待上传条目失败')
11 changes: 11 additions & 0 deletions src/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
from pathlib import Path
from datetime import datetime
import configparser
from db.conn import create_table

# ============================ Your configuration ============================
GPU_EXIST=True
Expand Down Expand Up @@ -33,6 +34,16 @@
LOG_DIR = os.path.join(BILIVE_DIR, 'logs')
VIDEOS_DIR = os.path.join(BILIVE_DIR, 'Videos')


if not os.path.exists(SRC_DIR + '/db/data.db'):
print("初始化数据库")
create_table()

if not os.path.exists(VIDEOS_DIR):
os.makedirs(VIDEOS_DIR)
if not os.path.exists(VIDEOS_DIR + '/upload_conf'):
os.makedirs(VIDEOS_DIR + '/upload_conf')

def get_model_path():
SRC_DIR = str(Path(os.path.abspath(__file__)).parent)
model_dir = os.path.join(SRC_DIR, 'subtitle', 'models')
Expand Down
1 change: 1 addition & 0 deletions src/db/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
from db.conn import *
87 changes: 87 additions & 0 deletions src/db/conn.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
import sqlite3
import os

# DATA_BASE_FILE ='./data.db'
DATA_BASE_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'data.db')

def connect():
db = sqlite3.connect(DATA_BASE_FILE)
return db

def create_table():
try:
db = connect()
cursor = db.cursor()
sql = [
"create table upload_queue (id integer primary key autoincrement, video_path text, config_path text, locked integer default 0);",
"create unique index idx_video_path on upload_queue(video_path);",
]
for s in sql:
cursor.execute(s)
db.commit()
db.close()
return True
except:
print("Create table failed.")
return False

def get_single_upload_queue():
db = connect()
cursor = db.cursor()
cursor.execute("select video_path, config_path from upload_queue where locked = 0 limit 1;")
row = cursor.fetchone()
result = {'video_path': row[0], 'config_path': row[1]} if row else None
db.close()
return result

def insert_upload_queue(video_path: str, config_path: str):
try:
db = connect()
cursor = db.cursor()
cursor.execute("insert into upload_queue (video_path, config_path) values (?, ?);", (video_path, config_path))
db.commit()
db.close()
return True
except sqlite3.IntegrityError:
print("Insert Upload Queue failed, the video path already exists.")
return False

def delete_upload_queue(video_path: str):
try:
db = connect()
cursor = db.cursor()
cursor.execute("delete from upload_queue where video_path = ?;", (video_path,))
db.commit()
db.close()
return True
except:
print("Delete Upload Queue failed.")
return False

def update_upload_queue_lock(video_path: str, locked: int):
try:
db = connect()
cursor = db.cursor()
cursor.execute("update upload_queue set locked = ? where video_path = ?;", (locked, video_path))
db.commit()
db.close()
return True
except:
print("Update Upload Queue failed.")
return False



if __name__ == "__main__":
# Create Table
create_table()
# Insert Test Data
insert_upload_queue('test.mp4', 'config.yaml')
# Insert again to check the unique index
print(insert_upload_queue('test.mp4', 'config.yaml'))
# Get the single upload queue, shold be {'video_path': 'test.mp4', 'config_path': 'config.yaml'}
print(get_single_upload_queue())
# Delete the upload queue
delete_upload_queue('test.mp4')
# Get the single upload queue after delete, should be None
print(get_single_upload_queue())
119 changes: 62 additions & 57 deletions src/upload/upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,11 @@
from src.log.logger import upload_log
import time
import fcntl
from concurrent.futures import ThreadPoolExecutor, as_completed
from db.conn import get_single_upload_queue, delete_upload_queue, update_upload_queue_lock
import threading

read_lock = threading.Lock()

def upload_video(upload_path, yaml_file_path):
try:
Expand All @@ -32,12 +37,16 @@ def upload_video(upload_path, yaml_file_path):
if result.returncode == 0:
upload_log.info("Upload successfully, then delete the video")
os.remove(upload_path)
os.remove(yaml_file_path)
delete_upload_queue(upload_path)
else:
upload_log.error("Fail to upload, the files will be reserved.")
update_upload_queue_lock(upload_path, 0)
return False

except subprocess.CalledProcessError as e:
upload_log.error(f"The upload_video called failed, the files will be reserved. error: {e}")
update_upload_queue_lock(upload_path, 0)
return False

def find_bv_number(target_str, my_list):
Expand All @@ -48,57 +57,6 @@ def find_bv_number(target_str, my_list):
return parts[1].strip()
return None

def read_append_and_delete_lines(file_path):
try:
while True:
if os.path.getsize(file_path) == 0:
upload_log.info("Empty queue, wait 2 minutes and check again.")
time.sleep(120)
return

with open(file_path, "r+") as file:
fcntl.flock(file, fcntl.LOCK_EX)
lines = file.readlines()
upload_video_path = lines.pop(0).strip()
file.seek(0)
file.writelines(lines)
# Truncate the file to the current position
file.truncate()
# Release the lock
fcntl.flock(file, fcntl.LOCK_UN)

upload_log.info(f"deal with {upload_video_path}")
# check if the live is already uploaded
if upload_video_path.endswith('.flv'):
# upload slice video
yaml_template = generate_slice_yaml_template(upload_video_path)
yaml_file_path = SRC_DIR + "/upload/upload.yaml"
with open(yaml_file_path, 'w', encoding='utf-8') as file:
file.write(yaml_template)
upload_video(upload_video_path, yaml_file_path)
return
else:
query = generate_title(upload_video_path)
result = subprocess.check_output("bilitool" + " list", shell=True)
# print(result.decode("utf-8"), flush=True)
upload_list = result.decode("utf-8").splitlines()
bv_result = find_bv_number(query, upload_list)
if bv_result:
upload_log.info(f"The series of videos has already been uploaded, the BV number is: {bv_result}")
append_upload(upload_video_path, bv_result)
else:
upload_log.info("First upload this live")
# generate the yaml template
yaml_template = generate_yaml_template(upload_video_path)
yaml_file_path = SRC_DIR + "/upload/upload.yaml"
with open(yaml_file_path, 'w', encoding='utf-8') as file:
file.write(yaml_template)
upload_video(upload_video_path, yaml_file_path)
return

except subprocess.CalledProcessError as e:
upload_log.error(f"The read_append_and_delete_lines called failed, the files will be reserved. error: {e}")
return False

def append_upload(upload_path, bv_result):
try:
Expand All @@ -120,18 +78,65 @@ def append_upload(upload_path, bv_result):
if result.returncode == 0:
upload_log.info("Upload successfully, then delete the video")
os.remove(upload_path)
delete_upload_queue(upload_path)
else:
upload_log.error("Fail to append, the files will be reserved.")
update_upload_queue_lock(upload_path, 0)
return False

except subprocess.CalledProcessError as e:
upload_log.error(f"The append_upload called failed, the files will be reserved. error: {e}")
update_upload_queue_lock(upload_path, 0)
return False


def read_append_and_delete_lines():
while True:
upload_queue = None
# read the queue and update lock status to prevent other threads from reading the same data
with read_lock:
upload_queue = get_single_upload_queue()
# if there is a task in the queue, try to lock the task
if upload_queue:
video_path, config_path = upload_queue.values()
# lock the task by updating the locked status to 1
update_result = update_upload_queue_lock(video_path, 1)
# if failed to lock, log the error and let the next thread to handle the task
if not update_result:
upload_log.error(f"Failed to lock task for {video_path}, possibly already locked by another thread or database error.")
upload_queue = None
continue
else:
upload_log.info("Empty queue, wait 2 minutes and check again.")
time.sleep(120)
continue

if upload_queue:
video_path, config_path = upload_queue.values()
upload_log.info(f"deal with {video_path}")
# check if the live is already uploaded
if video_path.endswith('.flv'):
# upload slice video
upload_video(video_path, config_path)
return
else:
query = generate_title(video_path)
result = subprocess.check_output("bilitool" + " list", shell=True)
# print(result.decode("utf-8"), flush=True)
upload_list = result.decode("utf-8").splitlines()
bv_result = find_bv_number(query, upload_list)
if bv_result:
upload_log.info(f"The series of videos has already been uploaded, the BV number is: {bv_result}")
append_upload(video_path, bv_result)
else:
upload_log.info("First upload this live")
upload_video(video_path, config_path)
return
time.sleep(20)


if __name__ == "__main__":
# read the queue and upload the video
queue_path = SRC_DIR + "/upload/uploadVideoQueue.txt"
while True:
read_append_and_delete_lines(queue_path)
upload_log.info("wait for 20 seconds")
time.sleep(20)
max_workers = os.getenv("MAX_WORKERS", 5)
with ThreadPoolExecutor(max_workers=max_workers) as executor:
future_to_upload = {executor.submit(read_append_and_delete_lines) for _ in range(max_workers)}