Move DB utility functions to the proper module

This commit is contained in:
Samantaz Fox 2021-12-06 18:03:37 +01:00
parent 914cfbd953
commit 6704ce3214
No known key found for this signature in database
GPG Key ID: F42821059186176E
4 changed files with 117 additions and 116 deletions

View File

@ -113,19 +113,19 @@ LOGGER = Invidious::LogHandler.new(OUTPUT, CONFIG.log_level)
# Check table integrity
if CONFIG.check_tables
check_enum(PG_DB, "privacy", PlaylistPrivacy)
Invidious::Database.check_enum(PG_DB, "privacy", PlaylistPrivacy)
check_table(PG_DB, "channels", InvidiousChannel)
check_table(PG_DB, "channel_videos", ChannelVideo)
check_table(PG_DB, "playlists", InvidiousPlaylist)
check_table(PG_DB, "playlist_videos", PlaylistVideo)
check_table(PG_DB, "nonces", Nonce)
check_table(PG_DB, "session_ids", SessionId)
check_table(PG_DB, "users", User)
check_table(PG_DB, "videos", Video)
Invidious::Database.check_table(PG_DB, "channels", InvidiousChannel)
Invidious::Database.check_table(PG_DB, "channel_videos", ChannelVideo)
Invidious::Database.check_table(PG_DB, "playlists", InvidiousPlaylist)
Invidious::Database.check_table(PG_DB, "playlist_videos", PlaylistVideo)
Invidious::Database.check_table(PG_DB, "nonces", Nonce)
Invidious::Database.check_table(PG_DB, "session_ids", SessionId)
Invidious::Database.check_table(PG_DB, "users", User)
Invidious::Database.check_table(PG_DB, "videos", Video)
if CONFIG.cache_annotations
check_table(PG_DB, "annotations", Annotation)
Invidious::Database.check_table(PG_DB, "annotations", Annotation)
end
end

View File

@ -1,4 +1,110 @@
require "pg"
module Invidious::Database
extend self
def check_enum(db, enum_name, struct_type = nil)
return # TODO
if !db.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool)
LOGGER.info("check_enum: CREATE TYPE #{enum_name}")
db.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql"))
end
end
end
def check_table(db, table_name, struct_type = nil)
# Create table if it doesn't exist
begin
db.exec("SELECT * FROM #{table_name} LIMIT 0")
rescue ex
LOGGER.info("check_table: check_table: CREATE TABLE #{table_name}")
db.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
end
end
return if !struct_type
struct_array = struct_type.type_array
column_array = get_column_array(db, table_name)
column_types = File.read("config/sql/#{table_name}.sql").match(/CREATE TABLE public\.#{table_name}\n\((?<types>[\d\D]*?)\);/)
.try &.["types"].split(",").map(&.strip).reject &.starts_with?("CONSTRAINT")
return if !column_types
struct_array.each_with_index do |name, i|
if name != column_array[i]?
if !column_array[i]?
new_column = column_types.select(&.starts_with?(name))[0]
LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
next
end
# Column doesn't exist
if !column_array.includes? name
new_column = column_types.select(&.starts_with?(name))[0]
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
end
# Column exists but in the wrong position, rotate
if struct_array.includes? column_array[i]
until name == column_array[i]
new_column = column_types.select(&.starts_with?(column_array[i]))[0]?.try &.gsub("#{column_array[i]}", "#{column_array[i]}_new")
# There's a column we didn't expect
if !new_column
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
column_array = get_column_array(db, table_name)
next
end
LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
LOGGER.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
LOGGER.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
column_array = get_column_array(db, table_name)
end
else
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
end
end
end
return if column_array.size <= struct_array.size
column_array.each do |column|
if !struct_array.includes? column
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
end
end
end
def get_column_array(db, table_name)
column_array = [] of String
db.query("SELECT * FROM #{table_name} LIMIT 0") do |rs|
rs.column_count.times do |i|
column = rs.as(PG::ResultSet).field(i)
column_array << column.name
end
end
return column_array
end
end

View File

@ -60,111 +60,6 @@ def html_to_content(description_html : String)
return description
end
def check_enum(db, enum_name, struct_type = nil)
return # TODO
if !db.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool)
LOGGER.info("check_enum: CREATE TYPE #{enum_name}")
db.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql"))
end
end
end
def check_table(db, table_name, struct_type = nil)
# Create table if it doesn't exist
begin
db.exec("SELECT * FROM #{table_name} LIMIT 0")
rescue ex
LOGGER.info("check_table: check_table: CREATE TABLE #{table_name}")
db.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
end
end
return if !struct_type
struct_array = struct_type.type_array
column_array = get_column_array(db, table_name)
column_types = File.read("config/sql/#{table_name}.sql").match(/CREATE TABLE public\.#{table_name}\n\((?<types>[\d\D]*?)\);/)
.try &.["types"].split(",").map(&.strip).reject &.starts_with?("CONSTRAINT")
return if !column_types
struct_array.each_with_index do |name, i|
if name != column_array[i]?
if !column_array[i]?
new_column = column_types.select(&.starts_with?(name))[0]
LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
next
end
# Column doesn't exist
if !column_array.includes? name
new_column = column_types.select(&.starts_with?(name))[0]
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
end
# Column exists but in the wrong position, rotate
if struct_array.includes? column_array[i]
until name == column_array[i]
new_column = column_types.select(&.starts_with?(column_array[i]))[0]?.try &.gsub("#{column_array[i]}", "#{column_array[i]}_new")
# There's a column we didn't expect
if !new_column
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
column_array = get_column_array(db, table_name)
next
end
LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
LOGGER.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
LOGGER.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
column_array = get_column_array(db, table_name)
end
else
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
end
end
end
return if column_array.size <= struct_array.size
column_array.each do |column|
if !struct_array.includes? column
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
end
end
end
def get_column_array(db, table_name)
column_array = [] of String
db.query("SELECT * FROM #{table_name} LIMIT 0") do |rs|
rs.column_count.times do |i|
column = rs.as(PG::ResultSet).field(i)
column_array << column.name
end
end
return column_array
end
def cache_annotation(db, id, annotations)
if !CONFIG.cache_annotations
return

View File

@ -25,7 +25,7 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
spawn do
begin
# Drop outdated views
column_array = get_column_array(db, view_name)
column_array = Invidious::Database.get_column_array(db, view_name)
ChannelVideo.type_array.each_with_index do |name, i|
if name != column_array[i]?
LOGGER.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}")