Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Make Dopamine snappier #1172

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
138 changes: 82 additions & 56 deletions Dopamine.Core/IO/FileOperations.cs
Original file line number Diff line number Diff line change
@@ -1,113 +1,139 @@
using Digimezzo.Foundation.Core.Utils;
using Digimezzo.Foundation.Core.Logging;
using Digimezzo.Foundation.Core.Logging;
using System;
using System.Collections.Concurrent;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Threading.Tasks;
using System.Diagnostics;
using System.Threading;

namespace Dopamine.Core.IO
{
public sealed class FileOperations
{
public static List<FolderPathInfo> GetValidFolderPaths(long folderId, string directory, string[] validExtensions)
public static Task<List<FolderPathInfo>> GetValidFolderPathsAsync(
long folderId,
string directory,
string[] validExtensions,
CancellationToken cancellationToken)
{
return Task.Run(() =>
{
return GetValidFolderPaths(folderId, directory, validExtensions, cancellationToken);
});
}

private static List<FolderPathInfo> GetValidFolderPaths(
long folderId,
string directory,
string[] validExtensions,
CancellationToken cancellationToken)
{
LogClient.Info("Get paths of directory {0}", directory);

var folderPaths = new List<FolderPathInfo>();
var validExtensionSet = new HashSet<string>(validExtensions);

var sw = Stopwatch.StartNew();

try
{
var files = new List<string>();
var files = new List<FileInfo>();
var exceptions = new ConcurrentQueue<Exception>();

TryDirectoryRecursiveGetFiles(directory, files, exceptions);
var sw2 = Stopwatch.StartNew();

TryDirectoryRecursiveGetFiles(directory, files, exceptions, cancellationToken);

sw2.Stop();

LogClient.Info("Retrieved {0} files from {1} ({2} ms)", files.Count, directory, sw2.ElapsedMilliseconds);

foreach (Exception ex in exceptions)
{
LogClient.Error("Error occurred while getting files recursively. Exception: {0}", ex.Message);
}

foreach (string file in files)
folderPaths.Capacity = files.Count;

Parallel.ForEach(
files,
new ParallelOptions { CancellationToken = cancellationToken },
file =>
{
try
{
var extension = file.Extension.ToLower();

// Only add the file if they have a valid extension
if (validExtensions.Contains(Path.GetExtension(file.ToLower())))
if (validExtensionSet.Contains(extension))
{
folderPaths.Add(new FolderPathInfo(folderId, file, FileUtils.DateModifiedTicks(file)));
var dateModifiedTicks = file.LastWriteTime.Ticks;

lock (folderPaths)
{
folderPaths.Add(new FolderPathInfo(folderId, file.FullName, dateModifiedTicks));
}
}
}
catch (Exception ex)
{
LogClient.Error("Error occurred while getting folder path for file '{0}'. Exception: {1}", file, ex.Message);
}
}
});
}
catch (Exception ex)
{
LogClient.Error("Unexpected error occurred while getting folder paths. Exception: {0}", ex.Message);
}

sw.Stop();

LogClient.Info("Get paths of directory {0} finished ({1} ms)", directory, sw.ElapsedMilliseconds);

return folderPaths;
}

private static void TryDirectoryRecursiveGetFiles(string path, List<String> files, ConcurrentQueue<Exception> exceptions)
private static void TryDirectoryRecursiveGetFiles(
string path,
List<FileInfo> files,
ConcurrentQueue<Exception> exceptions,
CancellationToken cancellationToken)
{
// Process the list of files found in the directory.
try
{
// Process the list of files found in the directory.
string[] fileEntries = null;
var fileEntries = new DirectoryInfo(path).GetFiles();

try
lock (files)
{
fileEntries = Directory.GetFiles(path);
}
catch (Exception ex)
{
exceptions.Enqueue(ex);
files.AddRange(fileEntries);
}
}
catch (Exception ex)
{
exceptions.Enqueue(ex);
}

// Recurse into subdirectories of this directory.
try
{
var subdirectoryEntries = Directory.GetDirectories(path);

if (fileEntries != null && fileEntries.Count() > 0)
Parallel.ForEach(
subdirectoryEntries,
new ParallelOptions { CancellationToken = cancellationToken },
subdirectory =>
{
foreach (string fileName in fileEntries)
try
{
try
{
files.Add(fileName);
}
catch (Exception ex)
{
exceptions.Enqueue(ex);
}
TryDirectoryRecursiveGetFiles(subdirectory, files, exceptions, cancellationToken);
}
}

// Recurse into subdirectories of this directory.
string[] subdirectoryEntries = null;

try
{
subdirectoryEntries = Directory.GetDirectories(path);
}
catch (Exception ex)
{
exceptions.Enqueue(ex);
}

if (subdirectoryEntries != null && subdirectoryEntries.Count() > 0)
{

foreach (string subdirectory in subdirectoryEntries)
catch (Exception ex)
{
try
{
TryDirectoryRecursiveGetFiles(subdirectory, files, exceptions);
}
catch (Exception ex)
{
exceptions.Enqueue(ex);
}
exceptions.Enqueue(ex);
}
}
});
}
catch (Exception ex)
{
Expand Down
45 changes: 36 additions & 9 deletions Dopamine.Data/DbMigrator.cs
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ public int Version
// NOTE: whenever there is a change in the database schema,
// this version MUST be incremented and a migration method
// MUST be supplied to match the new version number
protected const int CURRENT_VERSION = 26;
protected const int CURRENT_VERSION = 27;
private ISQLiteConnectionFactory factory;
private int userDatabaseVersion;

Expand Down Expand Up @@ -116,14 +116,15 @@ private void CreateTablesAndIndexes()
"DateRemoved INTEGER," +
"PRIMARY KEY(TrackID));");

conn.Execute("CREATE TABLE QueuedTrack (" +
"QueuedTrackID INTEGER," +
"Path TEXT," +
"SafePath TEXT," +
"IsPlaying INTEGER," +
"ProgressSeconds INTEGER," +
"OrderID INTEGER," +
"PRIMARY KEY(QueuedTrackID));");
conn.Execute(@"
CREATE TABLE QueuedTrack
(
TrackID INTEGER NOT NULL,
IsPlaying INTEGER,
ProgressSeconds INTEGER,
OrderID INTEGER,
FOREIGN KEY(TrackID) REFERENCES Track(TrackID) ON DELETE CASCADE
);");

conn.Execute("CREATE TABLE TrackStatistic (" +
"TrackStatisticID INTEGER PRIMARY KEY AUTOINCREMENT," +
Expand Down Expand Up @@ -1079,6 +1080,32 @@ private void Migrate26()
}
}

[DatabaseVersion(27)]
private void Migrate27()
{
using (var conn = this.factory.GetConnection())
{
conn.Execute("PRAGMA foreign_keys = ON;");

conn.Execute("BEGIN TRANSACTION;");
conn.Execute("DROP TABLE IF EXISTS QueuedTrack;");

conn.Execute(@"
CREATE TABLE QueuedTrack
(
TrackID INTEGER NOT NULL,
IsPlaying INTEGER,
ProgressSeconds INTEGER,
OrderID INTEGER,
FOREIGN KEY(TrackID) REFERENCES Track(TrackID) ON DELETE CASCADE
);
");

conn.Execute("COMMIT;");
conn.Execute("VACUUM;");
}
}

public void Migrate()
{
try
Expand Down
12 changes: 4 additions & 8 deletions Dopamine.Data/Entities/QueuedTrack.cs
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,8 @@ namespace Dopamine.Data.Entities
{
public class QueuedTrack
{
[PrimaryKey(), AutoIncrement()]
public long QueuedTrackID { get; set; }

public string Path { get; set; }

public string SafePath { get; set; }
[PrimaryKey()]
public long TrackID { get; set; }

public long IsPlaying { get; set; }

Expand All @@ -24,12 +20,12 @@ public override bool Equals(object obj)
return false;
}

return this.QueuedTrackID.Equals(((QueuedTrack)obj).QueuedTrackID);
return TrackID == ((QueuedTrack)obj).TrackID;
}

public override int GetHashCode()
{
return new { this.QueuedTrackID }.GetHashCode();
return TrackID.GetHashCode();
}
}
}
8 changes: 4 additions & 4 deletions Dopamine.Data/MetaDataUtils.cs
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ private static string GetAlbumTitle(FileMetadata fileMetadata)
return string.IsNullOrWhiteSpace(fileMetadata.Album.Value) ? string.Empty : FormatUtils.TrimValue(fileMetadata.Album.Value);
}

public static void FillTrackBase(FileMetadata fileMetadata, ref Track track)
public static void FillTrackBase(FileMetadata fileMetadata, Track track)
{
track.TrackTitle = FormatUtils.TrimValue(fileMetadata.Title.Value);
track.Year = SafeConvertToLong(fileMetadata.Year.Value);
Expand All @@ -150,7 +150,7 @@ public static void FillTrackBase(FileMetadata fileMetadata, ref Track track)
track.AlbumKey = GenerateInitialAlbumKey(track.AlbumTitle, track.AlbumArtists);
}

public static void FillTrack(FileMetadata fileMetadata, ref Track track)
public static void FillTrack(FileMetadata fileMetadata, Track track)
{
string path = fileMetadata.Path;
long nowTicks = DateTime.Now.Ticks;
Expand All @@ -171,7 +171,7 @@ public static void FillTrack(FileMetadata fileMetadata, ref Track track)
track.DateLastSynced = nowTicks;
track.Rating = fileMetadata.Rating.Value;

FillTrackBase(fileMetadata, ref track);
FillTrackBase(fileMetadata, track);
}

private static string GenerateInitialAlbumKey(string albumTitle, string albumArtists)
Expand Down Expand Up @@ -199,7 +199,7 @@ public static async Task<Track> Path2TrackAsync(string path)

await Task.Run(() =>
{
MetadataUtils.FillTrack(fileMetadata, ref track);
MetadataUtils.FillTrack(fileMetadata, track);
});
}

Expand Down
7 changes: 4 additions & 3 deletions Dopamine.Data/Repositories/IQueuedTrackRepository.cs
Original file line number Diff line number Diff line change
@@ -1,13 +1,14 @@
using Dopamine.Data.Entities;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;

namespace Dopamine.Data.Repositories
{
public interface IQueuedTrackRepository
{
Task<List<QueuedTrack>> GetSavedQueuedTracksAsync();
Task SaveQueuedTracksAsync(IList<QueuedTrack> tracks);
Task<QueuedTrack> GetPlayingTrackAsync();
Task<List<Track>> GetSavedQueuedTracksAsync();
Task SaveQueuedTracksAsync(IList<Track> tracks, long? currentTrackId, long progressSeconds);
Task<Tuple<Track, long>> GetPlayingTrackAsync();
}
}
Loading