diff --git a/README.md b/README.md index 091d79dc..1f1d383e 100644 --- a/README.md +++ b/README.md @@ -87,13 +87,13 @@ The goal of Fireshare is to provide a very simple and easy way for you to share ![list-view] -

Foldered Sorting

-

Fireshare will use the top most directory that your videos are in as an easy and simple way for you to organize your videos into categories of your choosing.

- -![folders] +

NEW! Automatic Organizing by Games

+

Organize your video clips by games so you know exactly where they are. Fireshare can automatically scan and organize your video clips by games, and organize them with beautiful and relevant cover art.

+Gamestab

Uploading

Allow your community or the public the ability to upload videos. Of course, this feature can be disabled or limited to only administrator access

+![Uploading Gamestab.png…]() ![uploading] @@ -138,15 +138,6 @@ Connect Fireshare to a central user directory and keep user access organised. - [Python](https://www.python.org/) - [Material UI](https://mui.com/) - - -# Changelog - -## v1.2.13 -``` - Added a catch for finding corrupt or malformed files when initiating a scan -``` - # Installation diff --git a/app/client/package-lock.json b/app/client/package-lock.json index 6bdc228f..9a6584bf 100644 --- a/app/client/package-lock.json +++ b/app/client/package-lock.json @@ -1,12 +1,12 @@ { "name": "fireshare", - "version": "1.3.2", + "version": "1.3.3", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "fireshare", - "version": "1.3.2", + "version": "1.3.3", "dependencies": { "@emotion/react": "^11.9.0", "@emotion/styled": "^11.8.1", diff --git a/app/client/package.json b/app/client/package.json index 42415e6e..662c63f5 100644 --- a/app/client/package.json +++ b/app/client/package.json @@ -1,6 +1,6 @@ { "name": "fireshare", - "version": "1.3.3", + "version": "1.4.0", "private": true, "dependencies": { "@emotion/react": "^11.9.0", diff --git a/app/client/src/components/admin/CompactVideoCard.js b/app/client/src/components/admin/CompactVideoCard.js index 0bced8eb..9a5fa07d 100644 --- a/app/client/src/components/admin/CompactVideoCard.js +++ b/app/client/src/components/admin/CompactVideoCard.js @@ -10,6 +10,7 @@ import VideoService from '../../services/VideoService' import _ from 'lodash' import UpdateDetailsModal from '../modal/UpdateDetailsModal' import LightTooltip from '../misc/LightTooltip' +import GameDetectionCard from '../game/GameDetectionCard' const URL = getUrl() const PURL = getPublicWatchUrl() @@ -36,6 +37,8 @@ const CompactVideoCard = ({ const [privateView, setPrivateView] = React.useState(video.info?.private) const [detailsModalOpen, setDetailsModalOpen] = React.useState(false) + const [gameSuggestion, setGameSuggestion] = React.useState(null) + const [showSuggestion, setShowSuggestion] = React.useState(true) const previousVideoRef = React.useRef() const previousVideo = previousVideoRef.current @@ -51,6 +54,25 @@ const CompactVideoCard = ({ previousVideoRef.current = video }) + React.useEffect(() => { + // Fetch game suggestion when component mounts + VideoService.getGameSuggestion(video.video_id) + .then((response) => { + if (response.data) { + setGameSuggestion(response.data) + setShowSuggestion(true) + } + }) + .catch(() => { + // No suggestion or error - that's fine + }) + }, [video.video_id]) + + const handleSuggestionComplete = () => { + setShowSuggestion(false) + setGameSuggestion(null) + } + const debouncedMouseEnter = React.useRef( _.debounce(() => { setHover(true) @@ -164,9 +186,8 @@ const CompactVideoCard = ({ variant="contained" size="small" sx={{ - width: '100%', + width: cardWidth, background: '#0b132b', - borderRadius: '6px', borderBottomLeftRadius: 0, borderBottomRightRadius: 0, @@ -239,6 +260,7 @@ const CompactVideoCard = ({ + + {/* Game Detection Suggestion Card */} + {authenticated && gameSuggestion && showSuggestion && !editMode && ( + + )} ) diff --git a/app/client/src/components/game/GameDetectionCard.js b/app/client/src/components/game/GameDetectionCard.js new file mode 100644 index 00000000..d2656f2d --- /dev/null +++ b/app/client/src/components/game/GameDetectionCard.js @@ -0,0 +1,181 @@ +import React, { useState } from 'react' +import { Box, IconButton, Typography, Fade } from '@mui/material' +import CheckIcon from '@mui/icons-material/Check' +import CloseIcon from '@mui/icons-material/Close' +import SportsEsportsIcon from '@mui/icons-material/SportsEsports' +import VideoService from '../../services/VideoService' +import GameService from '../../services/GameService' + +/** + * GameDetectionCard - Shows automatic game detection suggestions + * Appears below video thumbnails when a game is detected from the filename + */ +export default function GameDetectionCard({ videoId, suggestion, onComplete, cardWidth }) { + const [loading, setLoading] = useState(false) + const [status, setStatus] = useState('pending') // 'pending', 'accepted', 'rejected' + + const handleAccept = async (e) => { + e.stopPropagation() // Prevent triggering video card click + setLoading(true) + + try { + let gameId = suggestion.game_id + + // If game doesn't exist in our DB (came from SteamGridDB), create it first + if (!gameId && suggestion.steamgriddb_id) { + // Reuse the same logic as GameSearch.js + const assets = (await GameService.getGameAssets(suggestion.steamgriddb_id)).data + const gameData = { + steamgriddb_id: suggestion.steamgriddb_id, + name: suggestion.game_name, + hero_url: assets.hero_url, + logo_url: assets.logo_url, + icon_url: assets.icon_url, + } + const createdGame = (await GameService.createGame(gameData)).data + gameId = createdGame.id + } + + // Link video to game using existing service + await GameService.linkVideoToGame(videoId, gameId) + + // Remove the suggestion from cache + await VideoService.rejectGameSuggestion(videoId) + + setStatus('accepted') + // Auto-hide after showing success + setTimeout(() => { + onComplete?.() + }, 2000) + } catch (err) { + console.error('Failed to accept game suggestion:', err) + setLoading(false) + } + } + + const handleReject = async (e) => { + e.stopPropagation() // Prevent triggering video card click + setLoading(true) + + try { + await VideoService.rejectGameSuggestion(videoId) + setStatus('rejected') + // Hide immediately + setTimeout(() => { + onComplete?.() + }, 300) + } catch (err) { + console.error('Failed to reject game suggestion:', err) + setLoading(false) + } + } + + if (status === 'accepted') { + return ( + + + + + Linked to {suggestion.game_name} + + + + ) + } + + if (status === 'rejected') { + return null + } + + return ( + + e.stopPropagation()} // Prevent triggering video card click + sx={{ + display: 'flex', + alignItems: 'center', + justifyContent: 'space-between', + p: 1, + width: cardWidth, + background: '#101c3c', + borderLeft: '1px solid #3399FFAE', + borderRight: '1px solid #3399FFAE', + borderBottom: '1px solid #3399FFAE', + borderBottomLeftRadius: '6px', + borderBottomRightRadius: '6px', + lineHeight: 0, + }} + > + + + + Detected: {suggestion.game_name} + + + + + + + + + + + + + ) +} diff --git a/app/client/src/components/modal/VideoModal.js b/app/client/src/components/modal/VideoModal.js index 261947c7..e62521ac 100644 --- a/app/client/src/components/modal/VideoModal.js +++ b/app/client/src/components/modal/VideoModal.js @@ -94,6 +94,11 @@ const VideoModal = ({ open, onClose, videoId, feedView, authenticated, updateCal } } if (videoId) { + // Reset video state before loading new video to prevent showing old data + setVideo(null) + setTitle('') + setDescription('') + setSelectedGame(null) fetch() } }, [videoId]) @@ -272,12 +277,13 @@ const VideoModal = ({ open, onClose, videoId, feedView, authenticated, updateCal { + const [loading, setLoading] = React.useState(false) + const folderHighlightSx = { color: '#3399FF', fontWeight: 600 } + const countHighlightSx = { color: '#3399FF', fontWeight: 700 } + + if (!suggestion || !folderName) return null + + const handleApply = async () => { + setLoading(true) + try { + const gamesRes = await GameService.getGames() + let game = gamesRes.data.find(g => g.steamgriddb_id === suggestion.steamgriddb_id) + + if (!game) { + const createRes = await GameService.createGame({ + name: suggestion.game_name, + steamgriddb_id: suggestion.steamgriddb_id, + release_date: suggestion.release_date, + }) + game = createRes.data + } + + const linkPromises = suggestion.video_ids.map(videoId => + GameService.linkVideoToGame(videoId, game.id) + ) + await Promise.all(linkPromises) + + await GameService.dismissFolderSuggestion(folderName) + onApplied(folderName, game.name, suggestion.video_ids.length) + } catch (err) { + console.error('Error applying folder suggestion:', err) + } finally { + setLoading(false) + } + } + + const handleDismiss = async () => { + setLoading(true) + try { + await GameService.dismissFolderSuggestion(folderName) + onDismiss() + } catch (err) { + console.error('Error dismissing folder suggestion:', err) + } finally { + setLoading(false) + } + } + + if (open) { + return ( + theme.palette.background.paper, + }} + > + + + Game Detected + + + + Found folder " + + {folderName} + + ". Link{' '} + + {suggestion.video_count} + {' '} + clips to{' '} + + {suggestion.game_name} + + ? + + {loading ? ( + + + + ) : ( + + + + + + + + + )} + + ) + } + + // Collapsed view + return ( + + theme.palette.background.paper, + cursor: 'pointer', + '&:hover': { bgcolor: 'rgba(194, 224, 255, 0.08)' }, + }} + > + + + + ) +} + +export default FolderSuggestionInline diff --git a/app/client/src/components/nav/GameScanStatus.js b/app/client/src/components/nav/GameScanStatus.js new file mode 100644 index 00000000..2910d3e8 --- /dev/null +++ b/app/client/src/components/nav/GameScanStatus.js @@ -0,0 +1,140 @@ +import * as React from 'react' +import Box from '@mui/material/Box' +import Typography from '@mui/material/Typography' +import Tooltip from '@mui/material/Tooltip' +import SyncIcon from '@mui/icons-material/Sync' +import { StatsService } from '../../services' + +const spinAnimation = { + animation: 'spin 1s linear infinite', + '@keyframes spin': { + '0%': { transform: 'rotate(0deg)' }, + '100%': { transform: 'rotate(360deg)' }, + } +} + +const GameScanStatus = ({ open, onComplete }) => { + const [scanStatus, setScanStatus] = React.useState(null) + const [pollKey, setPollKey] = React.useState(0) + + React.useEffect(() => { + const shouldPoll = localStorage.getItem('gameScanInProgress') === 'true' + console.log('[GameScanStatus] shouldPoll:', shouldPoll) + if (!shouldPoll) { + setScanStatus(null) + return + } + + const checkStatus = async () => { + try { + console.log('[GameScanStatus] Checking status...') + const res = await StatsService.getGameScanStatus() + console.log('[GameScanStatus] Status response:', res.data) + if (res.data.is_running) { + setScanStatus(res.data) + } else { + // Scan finished + console.log('[GameScanStatus] Scan finished, calling onComplete') + onComplete?.(res.data) + setScanStatus(null) + localStorage.removeItem('gameScanInProgress') + setPollKey(prev => prev + 1) + } + } catch (e) { + console.error('[GameScanStatus] Error checking status:', e) + } + } + + checkStatus() + const interval = setInterval(checkStatus, 2000) + return () => clearInterval(interval) + }, [pollKey, onComplete]) + + // Listen for localStorage changes from Settings page + React.useEffect(() => { + const handleStorageChange = (e) => { + console.log('[GameScanStatus] Storage event:', e.key) + if (e.key === 'gameScanInProgress') { + console.log('[GameScanStatus] gameScanInProgress changed, triggering poll') + setPollKey(prev => prev + 1) + } + } + window.addEventListener('storage', handleStorageChange) + return () => window.removeEventListener('storage', handleStorageChange) + }, []) + + if (!scanStatus) return null + + if (open) { + return ( + + + + {scanStatus.total === 0 ? ( + 'Preparing scan...' + ) : ( + <> + Scanning for games{' '} + + {scanStatus.current}/{scanStatus.total} + + + )} + + + ) + } + + const tooltipText = scanStatus.total === 0 + ? 'Preparing scan...' + : `Scanning: ${scanStatus.current}/${scanStatus.total}` + + return ( + + + + + + ) +} + +export default GameScanStatus diff --git a/app/client/src/components/nav/Navbar20.js b/app/client/src/components/nav/Navbar20.js index 10a4264e..3ae398b7 100644 --- a/app/client/src/components/nav/Navbar20.js +++ b/app/client/src/components/nav/Navbar20.js @@ -43,13 +43,16 @@ import LightTooltip from '../misc/LightTooltip' import SnackbarAlert from '../alert/SnackbarAlert' import { getSetting, setSetting } from '../../common/utils' import SliderWrapper from '../misc/SliderWrapper' +import GameScanStatus from './GameScanStatus' +import FolderSuggestionInline from './FolderSuggestionInline' +import { GameService } from '../../services' const drawerWidth = 240 const minimizedDrawerWidth = 57 const CARD_SIZE_DEFAULT = 375 const CARD_SIZE_MULTIPLIER = 2 -const pages = [ +const allPages = [ { title: 'My Videos', icon: , href: '/', private: true }, { title: 'Public Videos', icon: , href: '/feed', private: false }, { title: 'Games', icon: , href: '/games', private: false }, @@ -147,8 +150,18 @@ function Navbar20({ const [cardSize, setCardSize] = React.useState(getSetting('cardSize') || CARD_SIZE_DEFAULT) const [alert, setAlert] = React.useState({ open: false }) + const [folderSuggestions, setFolderSuggestions] = React.useState({}) + const [currentSuggestionFolder, setCurrentSuggestionFolder] = React.useState(null) const navigate = useNavigate() + const uiConfig = getSetting('ui_config') || {} + const pages = allPages.filter((p) => { + if (p.href === '/' && uiConfig.show_my_videos === false) return false + if (p.href === '/feed' && uiConfig.show_public_videos === false) return false + if (p.href === '/games' && uiConfig.show_games === false) return false + return true + }) + const handleDrawerToggle = () => { setMobileOpen(!mobileOpen) } @@ -203,6 +216,77 @@ function Navbar20({ fetchFolderSize(); }, []); + // Load pending folder suggestions on mount + React.useEffect(() => { + if (!authenticated) return; + + const loadPendingSuggestions = async () => { + try { + const res = await GameService.getFolderSuggestions(); + const suggestions = res.data; + if (Object.keys(suggestions).length > 0) { + setFolderSuggestions(suggestions); + setCurrentSuggestionFolder(Object.keys(suggestions)[0]); + } + } catch (err) { + // Ignore errors + } + }; + + loadPendingSuggestions(); + }, [authenticated]); + + // Game scan complete handler + const handleGameScanComplete = React.useCallback(async (data) => { + console.log('[Navbar20] handleGameScanComplete called with data:', data) + setAlert({ + open: true, + type: 'success', + message: data.total > 0 + ? `Game scan complete! Check remaining suggestions in My Videos.` + : 'Game scan complete!', + }); + + await new Promise(resolve => setTimeout(resolve, 500)) + + try { + console.log('[Navbar20] Fetching folder suggestions...') + const res = await GameService.getFolderSuggestions() + const suggestions = res.data + console.log('[Navbar20] Folder suggestions response:', suggestions) + if (Object.keys(suggestions).length > 0) { + console.log('[Navbar20] Setting folder suggestions for:', Object.keys(suggestions)[0]) + setFolderSuggestions(suggestions) + setCurrentSuggestionFolder(Object.keys(suggestions)[0]) + } + } catch (err) { + console.error('[Navbar20] Error fetching folder suggestions:', err) + } + }, []); + + const handleFolderSuggestionApplied = (folderName, gameName, videoCount) => { + setAlert({ + open: true, + type: 'success', + message: `Linked ${videoCount} clips to ${gameName}`, + }) + // Show next suggestion or close + const remaining = { ...folderSuggestions } + delete remaining[folderName] + setFolderSuggestions(remaining) + const nextFolder = Object.keys(remaining)[0] + setCurrentSuggestionFolder(nextFolder || null) + } + + const handleFolderSuggestionClose = () => { + // Show next suggestion or close + const remaining = { ...folderSuggestions } + delete remaining[currentSuggestionFolder] + setFolderSuggestions(remaining) + const nextFolder = Object.keys(remaining)[0] + setCurrentSuggestionFolder(nextFolder || null) + } + const drawer = (
+ + {authenticated && ( @@ -440,10 +532,8 @@ function Navbar20({ }} /> } - - )} - + )} {open ? ( { {/* Link to Game Dialog */} - Link Videos to Game - + Link {selectedVideos.size} Clip{selectedVideos.size !== 1 ? 's' : ''} to Game + {!showAddNewGame ? ( <> - - Select a game to link {selectedVideos.size} video{selectedVideos.size > 1 ? 's' : ''} to: - option.name || ''} @@ -415,7 +412,7 @@ const Dashboard = ({ authenticated, searchText, cardSize, listStyle }) => { setSelectedGame(newValue) } }} - renderInput={(params) => } + renderInput={(params) => } renderOption={(props, option) => ( { ) : ( <> - - Search for a game to add and link {selectedVideos.size} video{selectedVideos.size > 1 ? 's' : ''} to: - diff --git a/app/client/src/views/GameVideos.js b/app/client/src/views/GameVideos.js index b00febbf..e7504977 100644 --- a/app/client/src/views/GameVideos.js +++ b/app/client/src/views/GameVideos.js @@ -7,13 +7,21 @@ import VideoList from '../components/admin/VideoList' import LoadingSpinner from '../components/misc/LoadingSpinner' import SnackbarAlert from '../components/alert/SnackbarAlert' -const GameVideos = ({ cardSize, listStyle, authenticated }) => { +const GameVideos = ({ cardSize, listStyle, authenticated, searchText }) => { const { gameId } = useParams() const [videos, setVideos] = React.useState([]) + const [filteredVideos, setFilteredVideos] = React.useState([]) + const [search, setSearch] = React.useState(searchText) const [game, setGame] = React.useState(null) const [loading, setLoading] = React.useState(true) const [alert, setAlert] = React.useState({ open: false }) + // Filter videos when searchText changes + if (searchText !== search) { + setSearch(searchText) + setFilteredVideos(videos.filter((v) => v.info.title.search(new RegExp(searchText, 'i')) >= 0)) + } + React.useEffect(() => { Promise.all([ GameService.getGames(), @@ -23,6 +31,7 @@ const GameVideos = ({ cardSize, listStyle, authenticated }) => { const foundGame = gamesRes.data.find(g => g.steamgriddb_id === parseInt(gameId)) setGame(foundGame) setVideos(videosRes.data) + setFilteredVideos(videosRes.data) setLoading(false) }) .catch((err) => { @@ -59,13 +68,13 @@ const GameVideos = ({ cardSize, listStyle, authenticated }) => { )} {listStyle === 'list' ? ( ) : ( { - {games.map((game) => { + {[...games] + .sort((a, b) => (a.name || '').localeCompare(b.name || '', undefined, { sensitivity: 'base' })) + .map((game) => { const isHovered = hoveredGame === game.steamgriddb_id const heroTransform = isHovered ? `translate(${mousePos.x * -15}px, ${mousePos.y * -15}px) scale(1.1)` diff --git a/app/client/src/views/Settings.js b/app/client/src/views/Settings.js index 5f3cfe48..ddd7e61a 100644 --- a/app/client/src/views/Settings.js +++ b/app/client/src/views/Settings.js @@ -14,6 +14,7 @@ import { import SnackbarAlert from '../components/alert/SnackbarAlert' import SaveIcon from '@mui/icons-material/Save' import SensorsIcon from '@mui/icons-material/Sensors' +import SportsEsportsIcon from '@mui/icons-material/SportsEsports' import VisibilityIcon from '@mui/icons-material/Visibility' import VisibilityOffIcon from '@mui/icons-material/VisibilityOff' import { ConfigService, VideoService } from '../services' @@ -36,7 +37,6 @@ const Settings = ({ authenticated }) => { const [showSteamGridKey, setShowSteamGridKey] = React.useState(false) const isDiscordUsed = discordUrl.trim() !== '' - React.useEffect(() => { async function fetch() { try { @@ -88,6 +88,32 @@ const Settings = ({ authenticated }) => { }) } + const handleScanGames = async () => { + try { + const response = await VideoService.scanGames() + if (response.status === 202) { + // Scan started successfully + localStorage.setItem('gameScanInProgress', 'true') + // Dispatch storage event for same-tab updates + window.dispatchEvent(new StorageEvent('storage', { key: 'gameScanInProgress' })) + } + } catch (err) { + if (err.response?.status === 409) { + setAlert({ + open: true, + type: 'warning', + message: 'A game scan is already in progress.', + }) + } else { + setAlert({ + open: true, + type: 'error', + message: err.response?.data?.error || 'Failed to start game scan', + }) + } + } + } + const checkForWarnings = async () =>{ let warnings = await WarningService.getAdminWarnings() @@ -281,13 +307,13 @@ const Settings = ({ authenticated }) => { + checked={updatedConfig.ui_config?.autoplay || false} + onChange={(e) => setUpdatedConfig((prev) => ({ ...prev, - ui_config: { - ...prev.ui_config, - autoplay: e.target.checked + ui_config: { + ...prev.ui_config, + autoplay: e.target.checked } })) } @@ -296,6 +322,54 @@ const Settings = ({ authenticated }) => { label="Auto Play Videos" /> + + + Sidebar + + + + setUpdatedConfig((prev) => ({ + ...prev, + ui_config: { ...prev.ui_config, show_my_videos: e.target.checked }, + })) + } + /> + } + label="My Videos" + /> + + setUpdatedConfig((prev) => ({ + ...prev, + ui_config: { ...prev.ui_config, show_public_videos: e.target.checked }, + })) + } + /> + } + label="Public Videos" + /> + + setUpdatedConfig((prev) => ({ + ...prev, + ui_config: { ...prev.ui_config, show_games: e.target.checked }, + })) + } + /> + } + label="Games" + /> + Integrations @@ -375,10 +449,13 @@ const Settings = ({ authenticated }) => { - + + @@ -387,4 +464,4 @@ const Settings = ({ authenticated }) => { ) } -export default Settings \ No newline at end of file +export default Settings diff --git a/app/client/src/views/Watch.js b/app/client/src/views/Watch.js index 6f11dba2..805e982b 100644 --- a/app/client/src/views/Watch.js +++ b/app/client/src/views/Watch.js @@ -203,7 +203,7 @@ const Watch = ({ authenticated }) => { /dismiss', methods=['POST']) +@login_required +def dismiss_folder_suggestion(folder_name): + """Dismiss a folder suggestion""" + from fireshare.cli import _load_suggestions, _save_suggestions + + suggestions = _load_suggestions() + folder_suggestions = suggestions.get('_folders', {}) + + if folder_name not in folder_suggestions: + logger.warning(f"Folder suggestion not found: {folder_name}") + return jsonify({'error': 'Folder suggestion not found'}), 404 + + video_count = len(folder_suggestions[folder_name].get('video_ids', [])) + del folder_suggestions[folder_name] + suggestions['_folders'] = folder_suggestions + _save_suggestions(suggestions) + + logger.info(f"Dismissed folder suggestion: {folder_name} ({video_count} videos)") + return jsonify({'dismissed': True}) + +@api.route('/api/manual/scan-games') +@login_required +def manual_scan_games(): + """Start game scan in background thread""" + from fireshare import util + from fireshare.cli import save_game_suggestion, _load_suggestions + + # Check if already running + with _game_scan_state['lock']: + if _game_scan_state['is_running']: + return jsonify({'already_running': True}), 409 + _game_scan_state['is_running'] = True + _game_scan_state['current'] = 0 + _game_scan_state['total'] = 0 + _game_scan_state['suggestions_created'] = 0 + + # Get app context for background thread + app = current_app._get_current_object() + + def run_scan(): + with app.app_context(): + try: + steamgriddb_api_key = get_steamgriddb_api_key() + logger.info(f"Starting game scan, API key configured: {bool(steamgriddb_api_key)}") + + # Get all videos + videos = Video.query.join(VideoInfo).all() + logger.info(f"Found {len(videos)} total videos in database") + + # Load existing suggestions and linked videos upfront (single queries) + existing_suggestions = _load_suggestions() + linked_video_ids = {link.video_id for link in VideoGameLink.query.all()} + existing_folder_suggestions = existing_suggestions.get('_folders', {}) + logger.info(f"Existing suggestions: {len(existing_suggestions) - 1 if '_folders' in existing_suggestions else len(existing_suggestions)} individual, {len(existing_folder_suggestions)} folders") + logger.info(f"Already linked videos: {len(linked_video_ids)}") + + # Get all unlinked videos for folder grouping + unlinked_videos = [v for v in videos if v.video_id not in linked_video_ids] + logger.info(f"Unlinked videos for folder grouping: {len(unlinked_videos)}") + + # Videos needing individual suggestions (not linked and no existing suggestion) + videos_needing_suggestions = [ + video for video in unlinked_videos + if video.video_id not in existing_suggestions + ] + logger.info(f"Videos needing individual suggestions: {len(videos_needing_suggestions)}") + + # Set total for progress tracking + _game_scan_state['total'] = len(unlinked_videos) + + # If nothing unlinked, we're done + if not unlinked_videos: + logger.info("Game scan complete: no unlinked videos to process") + return + suggestions_created = 0 + + # Group ALL unlinked videos by folder (not just those without suggestions) + folder_videos = {} + for video in unlinked_videos: + parts = video.path.split('/') + folder = parts[0] if len(parts) > 1 else None + if folder: + if folder not in folder_videos: + folder_videos[folder] = [] + folder_videos[folder].append(video) + + logger.info(f"Grouped videos into {len(folder_videos)} folders") + for folder, vids in folder_videos.items(): + logger.info(f" Folder '{folder}': {len(vids)} videos") + + # Process folder suggestions (folders with 2+ videos) + folder_suggestions = existing_suggestions.get('_folders', {}) + processed_video_ids = set() + + # Skip upload folders + from fireshare.constants import DEFAULT_CONFIG + upload_folders = { + DEFAULT_CONFIG['app_config']['admin_upload_folder_name'].lower(), + DEFAULT_CONFIG['app_config']['public_upload_folder_name'].lower(), + } + + for folder, folder_vids in folder_videos.items(): + # Skip upload folders + if folder.lower() in upload_folders: + logger.info(f"Skipping upload folder '{folder}' for game detection") + continue + logger.info(f"Processing folder '{folder}': {len(folder_vids)} videos, already in suggestions: {folder in folder_suggestions}") + if len(folder_vids) >= 2 and folder not in folder_suggestions: + logger.info(f"Attempting game detection for folder: '{folder}'") + detected_game = util.detect_game_from_filename(folder, steamgriddb_api_key, path=f"{folder}/") + + if detected_game: + logger.info(f"Detection result for '{folder}': {detected_game['game_name']} (confidence: {detected_game['confidence']:.2f})") + else: + logger.info(f"No game detected for folder '{folder}'") + + if detected_game and detected_game['confidence'] >= 0.65: + video_ids = [v.video_id for v in folder_vids] + folder_suggestions[folder] = { + 'game_name': detected_game['game_name'], + 'steamgriddb_id': detected_game.get('steamgriddb_id'), + 'game_id': detected_game.get('game_id'), + 'confidence': detected_game['confidence'], + 'source': detected_game['source'], + 'video_ids': video_ids, + 'video_count': len(video_ids) + } + processed_video_ids.update(video_ids) + suggestions_created += 1 + _game_scan_state['suggestions_created'] = suggestions_created + logger.info(f"Created folder suggestion: {folder} -> {detected_game['game_name']} ({len(video_ids)} videos)") + elif detected_game: + logger.info(f"Skipping folder '{folder}' - confidence {detected_game['confidence']:.2f} below threshold 0.65") + + # Save folder suggestions + if folder_suggestions: + existing_suggestions['_folders'] = folder_suggestions + from fireshare.cli import _save_suggestions + _save_suggestions(existing_suggestions) + + # Process remaining individual videos (not in folder suggestions and no existing suggestion) + for i, video in enumerate(videos_needing_suggestions): + _game_scan_state['current'] = i + 1 + + if video.video_id in processed_video_ids: + continue + + filename = Path(video.path).stem + detected_game = util.detect_game_from_filename(filename, steamgriddb_api_key, path=video.path) + + if detected_game and detected_game['confidence'] >= 0.65: + save_game_suggestion(video.video_id, detected_game) + suggestions_created += 1 + _game_scan_state['suggestions_created'] = suggestions_created + logger.info(f"Created game suggestion for video {video.video_id}: {detected_game['game_name']} (confidence: {detected_game['confidence']:.2f}, source: {detected_game['source']})") + + logger.info(f"Game scan complete: {suggestions_created} suggestions created from {len(unlinked_videos)} unlinked videos") + + except Exception as e: + logger.error(f"Error scanning videos for games: {e}") + finally: + # Brief delay so frontend can display the completed status before hiding + import time + time.sleep(2) + _game_scan_state['is_running'] = False + + thread = threading.Thread(target=run_scan) + thread.daemon = True + thread.start() + + return jsonify({'started': True}), 202 + @api.route('/api/videos') @login_required def get_videos(): @@ -291,10 +615,13 @@ def get_video_poster(): video_id = request.args['id'] webm_poster_path = Path(current_app.config["PROCESSED_DIRECTORY"], "derived", video_id, "boomerang-preview.webm") jpg_poster_path = Path(current_app.config["PROCESSED_DIRECTORY"], "derived", video_id, "poster.jpg") + if request.args.get('animated'): - return send_file(webm_poster_path, mimetype='video/webm') + response = send_file(webm_poster_path, mimetype='video/webm') else: - return send_file(jpg_poster_path, mimetype='image/jpg') + response = send_file(jpg_poster_path, mimetype='image/jpg') + + return add_cache_headers(response, video_id) @api.route('/api/video/view', methods=['POST']) def add_video_view(): @@ -631,7 +958,7 @@ def get_games(): # If user is authenticated, show all games if current_user.is_authenticated: - games = GameMetadata.query.all() + games = GameMetadata.query.order_by(GameMetadata.name).all() else: # For public users, only show games that have at least one public (available) video games = ( @@ -644,6 +971,7 @@ def get_games(): VideoInfo.private.is_(False), ) .distinct() + .order_by(GameMetadata.name) .all() ) @@ -660,6 +988,20 @@ def create_game(): if not data.get('steamgriddb_id'): return Response(status=400, response='SteamGridDB ID is required.') + existing_game = GameMetadata.query.filter_by(steamgriddb_id=data['steamgriddb_id']).first() + if existing_game: + updated = False + if data.get('name') and data['name'] != existing_game.name: + existing_game.name = data['name'] + updated = True + if data.get('release_date') and data.get('release_date') != existing_game.release_date: + existing_game.release_date = data['release_date'] + updated = True + if updated: + existing_game.updated_at = datetime.utcnow() + db.session.commit() + return jsonify(existing_game.json()), 200 + # Get API key and initialize client api_key = get_steamgriddb_api_key() if not api_key: @@ -681,6 +1023,12 @@ def create_game(): response=f"Failed to download game assets: {result['error']}" ) + # Re-check for existing game after asset download (handles race condition) + existing_game = GameMetadata.query.filter_by(steamgriddb_id=data['steamgriddb_id']).first() + if existing_game: + current_app.logger.info(f"Game {data['name']} was created by another request, returning existing") + return jsonify(existing_game.json()), 200 + # Create game metadata (without URL fields - they will be constructed dynamically) game = GameMetadata( steamgriddb_id=data['steamgriddb_id'], @@ -811,7 +1159,8 @@ def get_game_asset(steamgriddb_id, filename): } mime_type = mime_types.get(ext, 'image/png') - return send_file(asset_path, mimetype=mime_type) + response = send_file(asset_path, mimetype=mime_type) + return add_cache_headers(response, f"{steamgriddb_id}-{filename}") @api.route('/api/games//videos', methods=["GET"]) def get_game_videos(steamgriddb_id): @@ -823,6 +1172,9 @@ def get_game_videos(steamgriddb_id): videos_json = [] for link in game.videos: + if not link.video: + continue + if not current_user.is_authenticated: # Only show available, non-private videos to public users if not link.video.available: @@ -859,6 +1211,10 @@ def delete_game(steamgriddb_id): paths = current_app.config['PATHS'] for link in video_links: video = link.video + if video is None: + # Orphaned link - just delete it + db.session.delete(link) + continue logger.info(f"Deleting video: {video.video_id}") file_path = paths['video'] / video.path @@ -866,6 +1222,8 @@ def delete_game(steamgriddb_id): derived_path = paths['processed'] / 'derived' / video.video_id # Delete from database + VideoGameLink.query.filter_by(video_id=video.video_id).delete() + VideoView.query.filter_by(video_id=video.video_id).delete() VideoInfo.query.filter_by(video_id=video.video_id).delete() Video.query.filter_by(video_id=video.video_id).delete() @@ -904,6 +1262,93 @@ def delete_game(steamgriddb_id): logger.info(f"Successfully deleted game {game.name}") return Response(status=200) +@api.route('/api/videos//game/suggestion', methods=["GET"]) +def get_video_game_suggestion(video_id): + """Get automatic game detection suggestion for a video""" + from fireshare.cli import get_game_suggestion + + # Check if video is already linked to a game + existing_link = VideoGameLink.query.filter_by(video_id=video_id).first() + if existing_link: + return jsonify(None) + + suggestion = get_game_suggestion(video_id) + if not suggestion: + return jsonify(None) + + return jsonify({ + 'video_id': video_id, + 'game_id': suggestion.get('game_id'), + 'game_name': suggestion.get('game_name'), + 'steamgriddb_id': suggestion.get('steamgriddb_id'), + 'confidence': suggestion.get('confidence'), + 'source': suggestion.get('source') + }) + +@api.route('/api/videos//game/suggestion', methods=["DELETE"]) +@login_required +def reject_game_suggestion(video_id): + """User rejected the game suggestion - remove from storage""" + from fireshare.cli import delete_game_suggestion + + if delete_game_suggestion(video_id): + logger.info(f"User rejected game suggestion for video {video_id}") + + return Response(status=204) + +@api.route('/api/videos/corrupt', methods=["GET"]) +@login_required +def get_corrupt_videos(): + """Get a list of all videos marked as corrupt""" + from fireshare.cli import get_all_corrupt_videos + + corrupt_video_ids = get_all_corrupt_videos() + + # Get video details for all corrupt videos in a single query + video_info_map = {} + if corrupt_video_ids: + video_infos = VideoInfo.query.filter(VideoInfo.video_id.in_(corrupt_video_ids)).all() + video_info_map = {vi.video_id: vi for vi in video_infos} + + corrupt_videos = [] + for video_id in corrupt_video_ids: + vi = video_info_map.get(video_id) + if vi: + corrupt_videos.append({ + 'video_id': video_id, + 'title': vi.title, + 'path': vi.video.path if vi.video else None + }) + else: + # Video may have been deleted but still in corrupt list + corrupt_videos.append({ + 'video_id': video_id, + 'title': None, + 'path': None + }) + return jsonify(corrupt_videos) + +@api.route('/api/videos//corrupt', methods=["DELETE"]) +@login_required +def clear_corrupt_status(video_id): + """Clear the corrupt status for a specific video so it can be retried""" + from fireshare.cli import clear_video_corrupt, is_video_corrupt + + if not is_video_corrupt(video_id): + return Response(status=400, response="Video is not marked as corrupt") + + clear_video_corrupt(video_id) + return Response(status=204) + +@api.route('/api/videos/corrupt/clear-all', methods=["DELETE"]) +@login_required +def clear_all_corrupt_status(): + """Clear the corrupt status for all videos so they can be retried""" + from fireshare.cli import clear_all_corrupt_videos + + count = clear_all_corrupt_videos() + return jsonify({'cleared': count}) + @api.after_request def after_request(response): response.headers.add('Accept-Ranges', 'bytes') diff --git a/app/server/fireshare/cli.py b/app/server/fireshare/cli.py index ac46efbb..f057af31 100755 --- a/app/server/fireshare/cli.py +++ b/app/server/fireshare/cli.py @@ -15,6 +15,116 @@ from .constants import SUPPORTED_FILE_EXTENSIONS +# Helper functions for persistent game suggestions storage +def _get_suggestions_file(): + """Get path to the suggestions JSON file""" + from flask import current_app + data_dir = Path(current_app.config.get('DATA_DIRECTORY', '/data')) + return data_dir / 'game_suggestions.json' + +def _load_suggestions(): + """Load suggestions from JSON file""" + suggestions_file = _get_suggestions_file() + if suggestions_file.exists(): + try: + with open(suggestions_file, 'r') as f: + return json.load(f) + except (json.JSONDecodeError, IOError): + return {} + return {} + +def _save_suggestions(suggestions): + """Save suggestions to JSON file""" + suggestions_file = _get_suggestions_file() + suggestions_file.parent.mkdir(parents=True, exist_ok=True) + try: + with open(suggestions_file, 'w') as f: + json.dump(suggestions, f) + except IOError as e: + logger.error(f"Failed to save game suggestions: {e}") + +def get_game_suggestion(video_id): + """Get a game suggestion for a video""" + suggestions = _load_suggestions() + return suggestions.get(video_id) + +def save_game_suggestion(video_id, suggestion): + """Save a game suggestion for a video""" + suggestions = _load_suggestions() + suggestions[video_id] = suggestion + _save_suggestions(suggestions) + +def delete_game_suggestion(video_id): + """Delete a game suggestion for a video""" + suggestions = _load_suggestions() + if video_id in suggestions: + del suggestions[video_id] + _save_suggestions(suggestions) + return True + return False + +# Helper functions for persistent corrupt video tracking +def _get_corrupt_videos_file(): + """Get path to the corrupt videos JSON file""" + from flask import current_app + data_dir = Path(current_app.config.get('DATA_DIRECTORY', '/data')) + return data_dir / 'corrupt_videos.json' + +def _load_corrupt_videos(): + """Load corrupt videos list from JSON file""" + corrupt_file = _get_corrupt_videos_file() + if corrupt_file.exists(): + try: + with open(corrupt_file, 'r') as f: + return json.load(f) + except (json.JSONDecodeError, IOError): + return [] + return [] + +def _save_corrupt_videos(corrupt_list): + """Save corrupt videos list to JSON file""" + corrupt_file = _get_corrupt_videos_file() + corrupt_file.parent.mkdir(parents=True, exist_ok=True) + try: + with open(corrupt_file, 'w') as f: + json.dump(corrupt_list, f) + except IOError as e: + logger.error(f"Failed to save corrupt videos list: {e}") + +def is_video_corrupt(video_id): + """Check if a video is marked as corrupt""" + corrupt_list = _load_corrupt_videos() + return video_id in corrupt_list + +def mark_video_corrupt(video_id): + """Mark a video as corrupt""" + corrupt_list = _load_corrupt_videos() + if video_id not in corrupt_list: + corrupt_list.append(video_id) + _save_corrupt_videos(corrupt_list) + logger.info(f"Marked video {video_id} as corrupt") + +def clear_video_corrupt(video_id): + """Clear the corrupt status for a video""" + corrupt_list = _load_corrupt_videos() + if video_id in corrupt_list: + corrupt_list.remove(video_id) + _save_corrupt_videos(corrupt_list) + logger.info(f"Cleared corrupt status for video {video_id}") + return True + return False + +def get_all_corrupt_videos(): + """Get list of all corrupt video IDs""" + return _load_corrupt_videos() + +def clear_all_corrupt_videos(): + """Clear all corrupt video statuses""" + count = len(_load_corrupt_videos()) + _save_corrupt_videos([]) + logger.info(f"Cleared corrupt status for {count} video(s)") + return count + def send_discord_webhook(webhook_url=None, video_url=None): payload = { "content": video_url, @@ -160,6 +270,25 @@ def scan_videos(root): video_url = get_public_watch_url(nv.video_id, config, domain) send_discord_webhook(webhook_url=discord_webhook_url, video_url=video_url) + # Automatic game detection for new videos + steamgriddb_api_key = config.get("integrations", {}).get("steamgriddb_api_key") + if new_videos: + logger.info(f"Running game detection for {len(new_videos)} new video(s)...") + for nv in new_videos: + filename = Path(nv.path).stem + logger.info(f"[Game Detection] Video: {nv.video_id}, Path: {nv.path}, Filename: {filename}") + detected_game = util.detect_game_from_filename(filename, steamgriddb_api_key, path=nv.path) + + if detected_game: + logger.info(f"[Game Detection] Result: {detected_game['game_name']} (confidence: {detected_game['confidence']:.2f}, source: {detected_game['source']})") + if detected_game['confidence'] >= 0.65: + save_game_suggestion(nv.video_id, detected_game) + logger.info(f"[Game Detection] Saved suggestion for {nv.video_id}") + else: + logger.info(f"[Game Detection] Confidence too low, skipping suggestion") + else: + logger.info(f"[Game Detection] No match found for {nv.video_id}") + existing_videos = Video.query.filter_by(available=True).all() logger.info(f"Verifying {len(existing_videos):,} video files still exist...") for ev in existing_videos: @@ -249,6 +378,18 @@ def scan_video(ctx, path): db.session.add(info) db.session.commit() + # Automatic game detection + logger.info("Attempting automatic game detection...") + steamgriddb_api_key = config.get("integrations", {}).get("steamgriddb_api_key") + filename = Path(v.path).stem + detected_game = util.detect_game_from_filename(filename, steamgriddb_api_key, path=v.path) + + if detected_game and detected_game['confidence'] >= 0.65: + save_game_suggestion(v.video_id, detected_game) + logger.info(f"Created game suggestion for video {v.video_id}: {detected_game['game_name']} (confidence: {detected_game['confidence']:.2f}, source: {detected_game['source']})") + else: + logger.info(f"No confident game match found for video {v.video_id}") + logger.info("Syncing metadata") ctx.invoke(sync_metadata, video=video_id) info = VideoInfo.query.filter(VideoInfo.video_id==video_id).one() @@ -434,7 +575,8 @@ def create_boomerang_posters(regenerate): @cli.command() @click.option("--regenerate", "-r", help="Overwrite existing transcoded videos", is_flag=True) @click.option("--video", "-v", help="Transcode a specific video by id", default=None) -def transcode_videos(regenerate, video): +@click.option("--include-corrupt", help="Include videos previously marked as corrupt", is_flag=True) +def transcode_videos(regenerate, video, include_corrupt): """Transcode videos to 1080p and 720p variants""" with create_app().app_context(): if not current_app.config.get('ENABLE_TRANSCODING'): @@ -447,6 +589,16 @@ def transcode_videos(regenerate, video): # Get videos to transcode vinfos = VideoInfo.query.filter(VideoInfo.video_id==video).all() if video else VideoInfo.query.all() + + # Filter out corrupt videos unless explicitly included + corrupt_videos = set(get_all_corrupt_videos()) + if not include_corrupt and not video: + original_count = len(vinfos) + vinfos = [vi for vi in vinfos if vi.video_id not in corrupt_videos] + skipped_count = original_count - len(vinfos) + if skipped_count > 0: + logger.info(f"Skipping {skipped_count} video(s) previously marked as corrupt. Use --include-corrupt to retry them.") + logger.info(f'Processing {len(vinfos):,} videos for transcoding (GPU: {use_gpu}, Base timeout: {base_timeout}s)') for vi in vinfos: @@ -462,18 +614,25 @@ def transcode_videos(regenerate, video): # Determine which qualities to transcode original_height = vi.height or 0 + video_is_corrupt = False # Transcode to 1080p if original is higher and 1080p doesn't exist transcode_1080p_path = derived_path / f"{vi.video_id}-1080p.mp4" if original_height > 1080 and (not transcode_1080p_path.exists() or regenerate): logger.info(f"Transcoding {vi.video_id} to 1080p") - # Pass None for timeout to use smart calculation, or pass base_timeout if needed timeout = None # Uses smart calculation based on video duration - success = util.transcode_video_quality(video_path, transcode_1080p_path, 1080, use_gpu, timeout) + success, failure_reason = util.transcode_video_quality(video_path, transcode_1080p_path, 1080, use_gpu, timeout) if success: vi.has_1080p = True + # Clear corrupt status if transcode succeeds (file may have been replaced) + if is_video_corrupt(vi.video_id): + clear_video_corrupt(vi.video_id) db.session.add(vi) db.session.commit() + elif failure_reason == 'corruption': + logger.warning(f"Skipping video {vi.video_id} 1080p transcode - source file appears corrupt") + mark_video_corrupt(vi.video_id) + video_is_corrupt = True else: logger.warning(f"Skipping video {vi.video_id} 1080p transcode - all encoders failed") elif transcode_1080p_path.exists(): @@ -482,17 +641,26 @@ def transcode_videos(regenerate, video): db.session.add(vi) db.session.commit() + # Skip 720p transcode if video was marked as corrupt + if video_is_corrupt: + continue + # Transcode to 720p if original is higher than 720p and 720p doesn't exist transcode_720p_path = derived_path / f"{vi.video_id}-720p.mp4" if original_height > 720 and (not transcode_720p_path.exists() or regenerate): logger.info(f"Transcoding {vi.video_id} to 720p") - # Pass None for timeout to use smart calculation, or pass base_timeout if needed timeout = None # Uses smart calculation based on video duration - success = util.transcode_video_quality(video_path, transcode_720p_path, 720, use_gpu, timeout) + success, failure_reason = util.transcode_video_quality(video_path, transcode_720p_path, 720, use_gpu, timeout) if success: vi.has_720p = True + # Clear corrupt status if transcode succeeds (file may have been replaced) + if is_video_corrupt(vi.video_id): + clear_video_corrupt(vi.video_id) db.session.add(vi) db.session.commit() + elif failure_reason == 'corruption': + logger.warning(f"Skipping video {vi.video_id} 720p transcode - source file appears corrupt") + mark_video_corrupt(vi.video_id) else: logger.warning(f"Skipping video {vi.video_id} 720p transcode - all encoders failed") elif transcode_720p_path.exists(): diff --git a/app/server/fireshare/schedule.py b/app/server/fireshare/schedule.py index 7e19ae37..9a9e429a 100644 --- a/app/server/fireshare/schedule.py +++ b/app/server/fireshare/schedule.py @@ -1,6 +1,6 @@ -from tabnanny import check from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.jobstores.sqlalchemy import SQLAlchemyJobStore +from sqlalchemy.pool import NullPool import logging from subprocess import Popen @@ -15,6 +15,17 @@ def fireshare_scan(): def init_schedule(dburl, mins_between_scan=5): if mins_between_scan > 0: logger.info(f'Initializing scheduled video scan. minutes={mins_between_scan}') - scheduler = BackgroundScheduler(jobstores={'default': SQLAlchemyJobStore(url=dburl)}) + # Configure SQLite connection for better concurrency handling + # NullPool disables connection pooling - prevents stale connections and lock issues + engine_options = { + 'poolclass': NullPool, + 'connect_args': { + 'timeout': 30, + 'check_same_thread': False, + }, + } + scheduler = BackgroundScheduler( + jobstores={'default': SQLAlchemyJobStore(url=dburl, engine_options=engine_options)} + ) scheduler.add_job(fireshare_scan, 'interval', minutes=mins_between_scan, id='fireshare_scan', replace_existing=True) scheduler.start() diff --git a/app/server/fireshare/util.py b/app/server/fireshare/util.py index d956cb49..19d9d162 100644 --- a/app/server/fireshare/util.py +++ b/app/server/fireshare/util.py @@ -6,6 +6,44 @@ from fireshare import logger import time import glob +import shutil + +# Corruption indicators to detect during video validation +# These are ffmpeg error messages that indicate file corruption +VIDEO_CORRUPTION_INDICATORS = [ + "Corrupt frame detected", + "No sequence header", + "Error submitting packet to decoder", + "Invalid data found when processing input", + "Decode error rate", + "moov atom not found", + "Invalid NAL unit size", + "non-existing PPS", + "Could not find codec parameters", +] + +# Corruption indicators that are known false positives for AV1 files +# These warnings can occur during initial frame decoding of valid AV1 files +# and should be ignored if the decode test succeeds (returncode 0) +# Note: Values are lowercase for consistent case-insensitive matching +AV1_FALSE_POSITIVE_INDICATORS = frozenset([ + "corrupt frame detected", + "no sequence header", + "error submitting packet to decoder", + "decode error rate", + "invalid nal unit size", + "non-existing pps", +]) + +# Known AV1 codec names as reported by ffprobe (lowercase for matching) +# These are used to detect AV1-encoded source files for special handling +AV1_CODEC_NAMES = frozenset([ + 'av1', + 'libaom-av1', + 'libsvtav1', + 'av1_nvenc', + 'av1_qsv', +]) def lock_exists(path: Path): """ @@ -71,6 +109,138 @@ def get_video_duration(path): logger.debug(f'Could not extract video duration: {ex}') return None +def validate_video_file(path, timeout=30): + """ + Validate that a video file is not corrupt and can be decoded. + + This function performs a quick decode test on the first few seconds of the video + to detect corruption issues like missing sequence headers, corrupt frames, etc. + + For AV1 files, validation is more lenient as some AV1 encoders produce files + that generate warnings during initial frame decoding but play back correctly. + + Args: + path: Path to the video file + timeout: Maximum time in seconds to wait for validation (default: 30) + + Returns: + tuple: (is_valid: bool, error_message: str or None) + - (True, None) if the video is valid + - (False, error_message) if the video is corrupt or unreadable + """ + # Check if ffprobe and ffmpeg are available using shutil.which + if not shutil.which('ffprobe'): + return False, "ffprobe command not found - ensure ffmpeg is installed" + if not shutil.which('ffmpeg'): + return False, "ffmpeg command not found - ensure ffmpeg is installed" + + try: + # First, check if ffprobe can read the stream information + probe_cmd = [ + 'ffprobe', '-v', 'error', '-select_streams', 'v:0', + '-show_entries', 'stream=codec_name,width,height', + '-of', 'json', str(path) + ] + logger.debug(f"Validating video file: {' '.join(probe_cmd)}") + + probe_result = sp.run(probe_cmd, capture_output=True, text=True, timeout=timeout) + + if probe_result.returncode != 0: + error_msg = probe_result.stderr.strip() if probe_result.stderr else "Unknown error reading video metadata" + return False, f"ffprobe failed: {error_msg}" + + # Check if we got valid stream data + # Note: -select_streams v:0 in probe_cmd ensures only video streams are returned + try: + probe_data = json.loads(probe_result.stdout) + streams = probe_data.get('streams', []) + if not streams: + return False, "No video streams found in file" + except json.JSONDecodeError: + return False, "Failed to parse video metadata" + + # Get the codec name from the video stream + # Safe to access streams[0] because we checked for empty streams above + video_stream = streams[0] + codec_name = video_stream.get('codec_name', '').lower() + + # Detect if the source file is AV1-encoded + # AV1 files may produce false positive corruption warnings during initial frame decoding + is_av1_source = codec_name in AV1_CODEC_NAMES + + # Now perform a quick decode test by decoding the first 2 seconds + # This catches issues like "No sequence header" or "Corrupt frame detected" + decode_cmd = [ + 'ffmpeg', '-v', 'error', '-t', '2', + '-i', str(path), '-f', 'null', '-' + ] + logger.debug(f"Decode test: {' '.join(decode_cmd)}") + + decode_result = sp.run(decode_cmd, capture_output=True, text=True, timeout=timeout) + + # Check for decode errors - only treat as error if return code is non-zero + # or if stderr contains known corruption indicators + stderr = decode_result.stderr.strip() if decode_result.stderr else "" + stderr_lower = stderr.lower() + + # For AV1 files, be more lenient about certain error messages + # Some AV1 encoders produce files that generate warnings/errors during initial + # frame decoding (e.g., "Corrupt frame detected", "No sequence header") but + # play back correctly. This is especially common with files that use temporal + # scalability or have non-standard sequence header placement. + if is_av1_source: + # Check if the only errors are known false positives for AV1 + found_real_error = False + found_false_positive = False + + for indicator in VIDEO_CORRUPTION_INDICATORS: + indicator_lower = indicator.lower() + if indicator_lower in stderr_lower: + if indicator_lower in AV1_FALSE_POSITIVE_INDICATORS: + found_false_positive = True + else: + found_real_error = True + # Found a real error, fail immediately + return False, f"Video file appears to be corrupt: {indicator}" + + # If we only found false positives (no real errors), the file is valid + if found_false_positive and not found_real_error: + logger.debug(f"AV1 file had known false positive warnings during validation (ignoring): {stderr[:200]}") + return True, None + + # If returncode is non-zero, fail (either with stderr message or generic failure) + if decode_result.returncode != 0: + if stderr: + return False, f"Decode test failed: {stderr[:200]}" + else: + return False, "Decode test failed with no error message" + + return True, None + else: + # For non-AV1 files, use strict validation + if decode_result.returncode != 0: + # Decode failed - check for specific corruption indicators + for indicator in VIDEO_CORRUPTION_INDICATORS: + if indicator.lower() in stderr_lower: + return False, f"Video file appears to be corrupt: {indicator}" + # Generic decode failure + return False, f"Decode test failed: {stderr[:200] if stderr else 'Unknown error'}" + + # Return code is 0 (success), but check for corruption indicators in warnings + for indicator in VIDEO_CORRUPTION_INDICATORS: + if indicator.lower() in stderr_lower: + return False, f"Video file appears to be corrupt: {indicator}" + + return True, None + + except sp.TimeoutExpired: + return False, f"Validation timed out after {timeout} seconds" + except FileNotFoundError: + return False, "Video file not found" + except Exception as ex: + return False, f"Validation error: {str(ex)}" + + def calculate_transcode_timeout(video_path, base_timeout=7200): """ Calculate a smart timeout for video transcoding based on video duration. @@ -336,11 +506,22 @@ def transcode_video_quality(video_path, out_path, height, use_gpu=False, timeout timeout_seconds: Maximum time allowed for encoding (default: calculated based on video duration) Returns: - bool: True if transcoding succeeded, False if all encoders failed + tuple: (success: bool, failure_reason: str or None) + - (True, None) if transcoding succeeded + - (False, 'corruption') if source file appears corrupt + - (False, 'encoders') if all encoders failed """ global _working_encoder_cache s = time.time() + # Validate the source video file before attempting transcoding + # This catches corrupt files early instead of trying all encoders + is_valid, error_msg = validate_video_file(video_path) + if not is_valid: + logger.error(f"Source video validation failed: {error_msg}") + logger.warning("Skipping transcoding for this video due to file corruption or read errors") + return (False, 'corruption') + # Calculate smart timeout based on video duration if not provided if timeout_seconds is None: timeout_seconds = calculate_transcode_timeout(video_path) @@ -368,7 +549,7 @@ def transcode_video_quality(video_path, out_path, height, use_gpu=False, timeout if result.returncode == 0: e = time.time() logger.info(f'Transcoded {str(out_path)} to {height}p in {e-s:.2f}s') - return True + return (True, None) else: # Cached encoder failed - clear cache and fall through to try all encoders logger.warning(f"Cached encoder {encoder['name']} failed with exit code {result.returncode}") @@ -498,7 +679,7 @@ def transcode_video_quality(video_path, out_path, height, use_gpu=False, timeout _working_encoder_cache[mode] = encoder e = time.time() logger.info(f'Transcoded {str(out_path)} to {height}p in {e-s:.2f}s') - return True + return (True, None) else: logger.warning(f"✗ {encoder['name']} failed with exit code {result.returncode}") last_exception = Exception(f"Transcode failed with exit code {result.returncode}") @@ -536,9 +717,9 @@ def transcode_video_quality(video_path, out_path, height, use_gpu=False, timeout if last_exception: logger.error(f"Last error was: {last_exception}") - # Return False to indicate failure instead of raising exception + # Return failure with 'encoders' reason to indicate encoder failure (not corruption) # This allows the calling code to continue processing other videos - return False + return (False, 'encoders') def create_boomerang_preview(video_path, out_path, clip_duration=1.5): # https://stackoverflow.com/questions/65874316/trim-a-video-and-add-the-boomerang-effect-on-it-with-ffmpeg @@ -582,4 +763,155 @@ def seconds_to_dur_string(sec): if hours: return ':'.join([str(hours), str(mins).zfill(2), str(s).zfill(2)]) else: - return ':'.join([str(mins), str(s).zfill(2)]) \ No newline at end of file + return ':'.join([str(mins), str(s).zfill(2)]) + +def detect_game_from_filename(filename: str, steamgriddb_api_key: str = None, path: str = None): + """ + Fuzzy match a video filename against existing games in database using RapidFuzz. + Falls back to SteamGridDB search if no local match found. + + Args: + filename: Video filename without extension + steamgriddb_api_key: Optional API key for SteamGridDB fallback + path: Optional relative path (e.g. "Game Name/clip.mp4") - folder name is tried first + + Returns: + dict with 'game_id', 'game_name', 'steamgriddb_id', 'confidence', 'source' or None + """ + from rapidfuzz import fuzz, process + from fireshare.models import GameMetadata + import re + + # Step 0: Try folder name first (highest confidence source) + # Skip folder-based detection for upload folders (they're not game names) + from fireshare.constants import DEFAULT_CONFIG + upload_folders = { + DEFAULT_CONFIG['app_config']['admin_upload_folder_name'].lower(), + DEFAULT_CONFIG['app_config']['public_upload_folder_name'].lower(), + } + + if path: + parts = path.split('/') + if len(parts) > 1: # Has at least one folder + folder_name = parts[0] # Top-level folder + + # Skip folder-based detection for upload folders + if folder_name.lower() not in upload_folders: + # Try matching folder name against local game database + games = GameMetadata.query.all() + if games: + game_choices = [(game.name, game) for game in games] + result = process.extractOne( + folder_name, + game_choices, + scorer=fuzz.token_set_ratio, + score_cutoff=80 # Higher threshold for folder match + ) + + if result: + matched_name, score, matched_game = result[0], result[1], result[2] + best_match = { + 'game_id': matched_game.id, + 'game_name': matched_game.name, + 'steamgriddb_id': matched_game.steamgriddb_id, + 'confidence': score / 100, + 'source': 'folder_local' + } + logger.info(f"Folder-based game match: {best_match['game_name']} (confidence: {score:.0f}%)") + return best_match + + # Try SteamGridDB with folder name + if steamgriddb_api_key: + logger.info(f"No local folder match, searching SteamGridDB for folder: '{folder_name}'") + from fireshare.steamgrid import SteamGridDBClient + client = SteamGridDBClient(steamgriddb_api_key) + + try: + results = client.search_games(folder_name) + if results and len(results) > 0: + top_result = results[0] + # Use higher confidence for folder-based SteamGridDB match + detected = { + 'game_id': None, + 'game_name': top_result.get('name'), + 'steamgriddb_id': top_result.get('id'), + 'confidence': 0.85, # Higher than filename-based + 'source': 'folder_steamgriddb', + 'release_date': top_result.get('release_date') + } + logger.info(f"Folder-based SteamGridDB match: {detected['game_name']} (id: {detected['steamgriddb_id']})") + return detected + except Exception as ex: + logger.warning(f"SteamGridDB folder search failed: {ex}") + else: + logger.debug(f"Skipping folder-based detection for upload folder: '{folder_name}'") + + # Clean filename for better matching + clean_name = filename.lower() + # Remove common patterns: dates, numbers, "gameplay", etc. + clean_name = re.sub(r'\d{4}-\d{2}-\d{2}', '', clean_name) # Remove dates like 2024-01-14 + clean_name = re.sub(r'\d{8}', '', clean_name) # Remove YYYYMMDD format + clean_name = re.sub(r'\b(gameplay|clip|highlights?|match|game|recording|video)\b', '', clean_name, flags=re.IGNORECASE) + clean_name = re.sub(r'[_\-]+', ' ', clean_name) # Replace _ and - with spaces + clean_name = re.sub(r'\s+', ' ', clean_name) # Normalize whitespace + clean_name = clean_name.strip() + + if not clean_name: + logger.debug("Filename cleaned to empty string, cannot detect game") + return None + + # Step 1: Try local database first + games = GameMetadata.query.all() + + if not games: + logger.debug("No games in database to match against") + else: + # Create list of (game_name, game_object) tuples for rapidfuzz + game_choices = [(game.name, game) for game in games] + + # Use token_set_ratio - ignores word order and extra words + result = process.extractOne( + clean_name, + game_choices, + scorer=fuzz.token_set_ratio, + score_cutoff=65 # Minimum confidence (0-100 scale) + ) + + if result: + matched_name, score, matched_game = result[0], result[1], result[2] + best_match = { + 'game_id': matched_game.id, + 'game_name': matched_game.name, + 'steamgriddb_id': matched_game.steamgriddb_id, + 'confidence': score / 100, # Convert to 0-1 scale + 'source': 'local' + } + logger.info(f"Local game match: {best_match['game_name']} (confidence: {score:.0f}%)") + return best_match + + # Step 2: Fallback to SteamGridDB search + if steamgriddb_api_key: + logger.info(f"No local match found, searching SteamGridDB for: '{clean_name}'") + from fireshare.steamgrid import SteamGridDBClient + client = SteamGridDBClient(steamgriddb_api_key) + + try: + results = client.search_games(clean_name) + if results and len(results) > 0: + # Take the first result (SteamGridDB returns best matches first) + top_result = results[0] + detected = { + 'game_id': None, # Not in our DB yet + 'game_name': top_result.get('name'), + 'steamgriddb_id': top_result.get('id'), + 'confidence': 0.75, # Assume SteamGridDB results are good + 'source': 'steamgriddb', + 'release_date': top_result.get('release_date') + } + logger.info(f"SteamGridDB match: {detected['game_name']} (id: {detected['steamgriddb_id']})") + return detected + except Exception as ex: + logger.warning(f"SteamGridDB search failed: {ex}") + + logger.debug(f"No game match found for filename: '{clean_name}'") + return None diff --git a/app/server/gunicorn.conf.py b/app/server/gunicorn.conf.py new file mode 100644 index 00000000..704433ad --- /dev/null +++ b/app/server/gunicorn.conf.py @@ -0,0 +1,54 @@ +import multiprocessing +import os + +# Server socket +bind = "127.0.0.1:5000" +backlog = 2048 # Number of pending connections + +# Worker processes +workers = multiprocessing. cpu_count() * 2 + 1 # Recommended formula +worker_class = "gthread" # Use threaded workers +threads = 4 # 4 threads per worker +worker_connections = 1000 +max_requests = 2000 # Restart workers after N requests (prevents memory leaks) +max_requests_jitter = 100 # Add randomness to prevent all workers restarting at once + +# Timeouts +timeout = 120 # Worker timeout (normal requests) +graceful_timeout = 30 +keepalive = 5 # Keep connections alive + +# Logging +loglevel = "info" +accesslog = "-" # Log to stdout +errorlog = "-" # Log to stderr +access_log_format = '%(h)s %(l)s %(u)s %(t)s "%(r)s" %(s)s %(b)s "%(f)s" "%(a)s" %(D)s' + +# Process naming +proc_name = "fireshare" + +# Server mechanics +daemon = False +pidfile = None +umask = 0 +user = None # Set by command line +group = None # Set by command line +tmp_upload_dir = None + +# Preload app (be careful with SQLite - might need to disable) +preload_app = False # Changed from True - SQLite doesn't like forking + +# Worker tmp directory +worker_tmp_dir = "/dev/shm" # Use RAM for worker tmp files + +def on_starting(server): + """Called just before the master process is initialized.""" + server.log.info("Starting Fireshare") + +def when_ready(server): + """Called just after the server is started.""" + server.log.info("Fireshare is ready") + +def on_reload(server): + """Called to recycle workers during a reload.""" + server.log.info("Reloading Fireshare") diff --git a/app/server/requirements.txt b/app/server/requirements.txt index eb334a0e..5f40ff58 100644 --- a/app/server/requirements.txt +++ b/app/server/requirements.txt @@ -21,4 +21,5 @@ zipp==3.8.0 xxhash==3.0.0 apscheduler==3.9.1 python-ldap==3.4.3 -requests==2.27.1 \ No newline at end of file +requests==2.27.1 +rapidfuzz==3.6.1 \ No newline at end of file diff --git a/entrypoint.sh b/entrypoint.sh index 5df099c1..25501a64 100755 --- a/entrypoint.sh +++ b/entrypoint.sh @@ -1,42 +1,50 @@ -#/bin/bash +#!/bin/bash +set -e -nginx -g 'daemon on;' +echo "=== Fireshare Startup ===" PUID=${PUID:-1000} PGID=${PGID:-1000} -useradd appuser || true +# Create user if it doesn't exist +useradd appuser 2>/dev/null || true +# Update user and group IDs groupmod -o -g "$PGID" appuser usermod -o -u "$PUID" appuser +# Set ownership of directories chown -R appuser:appuser $DATA_DIRECTORY chown -R appuser:appuser $VIDEO_DIRECTORY chown -R appuser:appuser $PROCESSED_DIRECTORY -su appuser - -echo ' --------------------------------------' -echo " -User uid: $(id -u appuser) -User gid: $(id -g appuser) -------------------------------------- -" +echo '-------------------------------------' +echo "User uid: $(id -u appuser)" +echo "User gid: $(id -g appuser)" +echo '-------------------------------------' # Remove any lockfiles on startup -runuser -u appuser -- rm $DATA_DIRECTORY/*.lock 2> /dev/null +rm -f $DATA_DIRECTORY/*.lock 2>/dev/null || true +rm -f $DATA_DIRECTORY/jobs.sqlite 2>/dev/null || true -# Remove job db on start -runuser -u appuser -- rm /jobs.sqlite -# Ensure PATH and LD_LIBRARY_PATH are set for all processes +# Start nginx as ROOT (it will drop to nginx user automatically) +echo "Starting nginx..." +nginx -g 'daemon on;' +echo "Nginx started successfully" + +# Ensure PATH and LD_LIBRARY_PATH are set export PATH=/usr/local/bin:$PATH export LD_LIBRARY_PATH=/usr/local/nvidia/lib:/usr/local/nvidia/lib64:/usr/local/lib:/usr/local/cuda/lib64:${LD_LIBRARY_PATH} +# Run migrations - try different user-switching commands +echo "Running database migrations..." runuser -u appuser -- env PATH="$PATH" LD_LIBRARY_PATH="$LD_LIBRARY_PATH" flask db upgrade -# Run gunicorn with environment variables preserved +echo "Database migrations complete" + +# Start gunicorn with config file if it exists, otherwise use command-line args +echo "Starting gunicorn..." exec env PATH="$PATH" LD_LIBRARY_PATH="$LD_LIBRARY_PATH" \ gunicorn --bind=127.0.0.1:5000 "fireshare:create_app(init_schedule=True)" \ --user appuser --group appuser --workers 3 --threads 3 --preload