|
|
@@ -50,12 +50,12 @@ public:
|
|
|
|
|
|
std::filesystem::path dirPath(path);
|
|
|
if (!std::filesystem::exists(dirPath)) {
|
|
|
- std::cerr << "Directory does not exist: " << path << std::endl;
|
|
|
+ LOG_ERROR("Directory does not exist: " + path);
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
if (!std::filesystem::is_directory(dirPath)) {
|
|
|
- std::cerr << "Path is not a directory: " << path << std::endl;
|
|
|
+ LOG_ERROR("Path is not a directory: " + path);
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
@@ -276,7 +276,7 @@ public:
|
|
|
});
|
|
|
|
|
|
if (future.wait_for(std::chrono::milliseconds(timeoutMs)) == std::future_status::timeout) {
|
|
|
- std::cerr << "Timeout getting file info for " << filePath << std::endl;
|
|
|
+ LOG_ERROR("Timeout getting file info for " + filePath.string());
|
|
|
return {false, {0, fs::file_time_type{}}};
|
|
|
}
|
|
|
|
|
|
@@ -410,10 +410,7 @@ public:
|
|
|
info.useFolderBasedDetection = (cachedEntry.detectionSource == "folder");
|
|
|
info.detectionSource = cachedEntry.detectionSource;
|
|
|
|
|
|
- if (verbose) {
|
|
|
- std::cout << "Using cached detection for " << info.name
|
|
|
- << " (source: " << cachedEntry.detectionSource << ")" << std::endl;
|
|
|
- }
|
|
|
+ LOG_DEBUG("Using cached detection for " + info.name + " (source: " + cachedEntry.detectionSource + ")");
|
|
|
} else {
|
|
|
// Perform new detection
|
|
|
try {
|
|
|
@@ -661,9 +658,7 @@ bool ModelManager::loadModel(const std::string& name, const std::string& path, M
|
|
|
|
|
|
// Apply detected model type and parameters
|
|
|
if (detection.architecture != ModelArchitecture::UNKNOWN) {
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Detected model architecture: " << detection.architectureName << " for " << name << std::endl;
|
|
|
- }
|
|
|
+ LOG_DEBUG("Detected model architecture: " + detection.architectureName + " for " + name);
|
|
|
|
|
|
// Set model type from detection if available
|
|
|
if (detection.suggestedParams.count("model_type")) {
|
|
|
@@ -681,18 +676,13 @@ bool ModelManager::loadModel(const std::string& name, const std::string& path, M
|
|
|
// Check if the resolved VAE file exists before setting the path
|
|
|
if (fs::exists(resolvedVAEPath) && fs::is_regular_file(resolvedVAEPath)) {
|
|
|
loadParams.vaePath = resolvedVAEPath;
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Using VAE: " << fs::absolute(resolvedVAEPath).string() << std::endl;
|
|
|
- }
|
|
|
+ LOG_DEBUG("Using VAE: " + fs::absolute(resolvedVAEPath).string());
|
|
|
} else {
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "VAE file not found: \"" << fs::absolute(resolvedVAEPath).string()
|
|
|
- << "\" - continuing without VAE" << std::endl;
|
|
|
- }
|
|
|
+ LOG_DEBUG("VAE file not found: \"" + fs::absolute(resolvedVAEPath).string() + "\" - continuing without VAE");
|
|
|
// Don't set vaePath if file doesn't exist, continue without VAE
|
|
|
}
|
|
|
} else {
|
|
|
- std::cerr << "VAE directory not configured - continuing without VAE" << std::endl;
|
|
|
+ LOG_ERROR("VAE directory not configured - continuing without VAE");
|
|
|
}
|
|
|
}
|
|
|
|
|
|
@@ -707,20 +697,16 @@ bool ModelManager::loadModel(const std::string& name, const std::string& path, M
|
|
|
// These would need to be passed through the underlying stable-diffusion.cpp library directly
|
|
|
}
|
|
|
} else {
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Could not detect model architecture for " << name << ", using defaults" << std::endl;
|
|
|
- }
|
|
|
+ LOG_DEBUG("Could not detect model architecture for " + name + ", using defaults");
|
|
|
}
|
|
|
} catch (const std::exception& e) {
|
|
|
- std::cerr << "Model detection failed for " << name << ": " << e.what() << " - using defaults" << std::endl;
|
|
|
+ LOG_ERROR("Model detection failed for " + name + ": " + std::string(e.what()) + " - using defaults");
|
|
|
}
|
|
|
}
|
|
|
|
|
|
// Handle upscaler models differently - they don't need to be pre-loaded
|
|
|
if (type == ModelType::ESRGAN || type == ModelType::UPSCALER) {
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Upscaler model '" << name << "' does not need pre-loading, marking as available for use" << std::endl;
|
|
|
- }
|
|
|
+ LOG_DEBUG("Upscaler model '" + name + "' does not need pre-loading, marking as available for use");
|
|
|
|
|
|
// For upscaler models, we don't create a wrapper or call loadModel
|
|
|
// They are loaded dynamically during upscaling
|
|
|
@@ -736,7 +722,7 @@ bool ModelManager::loadModel(const std::string& name, const std::string& path, M
|
|
|
|
|
|
// Try to load the model (for checkpoint and diffusion models)
|
|
|
if (!wrapper->loadModel(path, loadParams)) {
|
|
|
- std::cerr << "Failed to load model '" << name << "': " << wrapper->getLastError() << std::endl;
|
|
|
+ LOG_ERROR("Failed to load model '" + name + "': " + wrapper->getLastError());
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
@@ -762,7 +748,7 @@ bool ModelManager::loadModel(const std::string& name, const std::string& path, M
|
|
|
info.modifiedAt = fs::last_write_time(path);
|
|
|
info.createdAt = info.modifiedAt; // Use modified time as creation time for now
|
|
|
} catch (const fs::filesystem_error& e) {
|
|
|
- std::cerr << "Error getting file info for " << path << ": " << e.what() << std::endl;
|
|
|
+ LOG_ERROR("Error getting file info for " + path + ": " + std::string(e.what()));
|
|
|
info.fileSize = 0;
|
|
|
info.modifiedAt = fs::file_time_type{};
|
|
|
info.createdAt = fs::file_time_type{};
|
|
|
@@ -784,7 +770,7 @@ bool ModelManager::loadModel(const std::string& name) {
|
|
|
// Check if model exists in available models
|
|
|
auto it = pImpl->availableModels.find(name);
|
|
|
if (it == pImpl->availableModels.end()) {
|
|
|
- std::cerr << "Model '" << name << "' not found in available models" << std::endl;
|
|
|
+ LOG_ERROR("Model '" + name + "' not found in available models");
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
@@ -1102,9 +1088,7 @@ void ModelManager::unloadAllModels() {
|
|
|
|
|
|
// Unload each model
|
|
|
for (const auto& modelName : loadedModelNames) {
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Unloading model: " << modelName << std::endl;
|
|
|
- }
|
|
|
+ LOG_DEBUG("Unloading model: " + modelName);
|
|
|
|
|
|
auto it = pImpl->loadedModels.find(modelName);
|
|
|
if (it != pImpl->loadedModels.end()) {
|
|
|
@@ -1121,9 +1105,7 @@ void ModelManager::unloadAllModels() {
|
|
|
info.isLoaded = false;
|
|
|
}
|
|
|
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Unloaded " << loadedModelNames.size() << " models" << std::endl;
|
|
|
- }
|
|
|
+ LOG_DEBUG("Unloaded " + std::to_string(loadedModelNames.size()) + " models");
|
|
|
}
|
|
|
// SHA256 Hashing Implementation
|
|
|
|
|
|
@@ -1132,7 +1114,7 @@ std::string ModelManager::computeModelHash(const std::string& modelName) {
|
|
|
|
|
|
auto it = pImpl->availableModels.find(modelName);
|
|
|
if (it == pImpl->availableModels.end()) {
|
|
|
- std::cerr << "Model not found: " << modelName << std::endl;
|
|
|
+ LOG_ERROR("Model not found: " + modelName);
|
|
|
return "";
|
|
|
}
|
|
|
|
|
|
@@ -1141,19 +1123,19 @@ std::string ModelManager::computeModelHash(const std::string& modelName) {
|
|
|
|
|
|
std::ifstream file(filePath, std::ios::binary);
|
|
|
if (!file.is_open()) {
|
|
|
- std::cerr << "Failed to open file for hashing: " << filePath << std::endl;
|
|
|
+ LOG_ERROR("Failed to open file for hashing: " + filePath);
|
|
|
return "";
|
|
|
}
|
|
|
|
|
|
// Create and initialize EVP context for SHA256
|
|
|
EVP_MD_CTX* mdctx = EVP_MD_CTX_new();
|
|
|
if (mdctx == nullptr) {
|
|
|
- std::cerr << "Failed to create EVP context" << std::endl;
|
|
|
+ LOG_ERROR("Failed to create EVP context");
|
|
|
return "";
|
|
|
}
|
|
|
|
|
|
if (EVP_DigestInit_ex(mdctx, EVP_sha256(), nullptr) != 1) {
|
|
|
- std::cerr << "Failed to initialize SHA256 digest" << std::endl;
|
|
|
+ LOG_ERROR("Failed to initialize SHA256 digest");
|
|
|
EVP_MD_CTX_free(mdctx);
|
|
|
return "";
|
|
|
}
|
|
|
@@ -1168,7 +1150,7 @@ std::string ModelManager::computeModelHash(const std::string& modelName) {
|
|
|
while (file.read(buffer, bufferSize) || file.gcount() > 0) {
|
|
|
size_t bytesRead = file.gcount();
|
|
|
if (EVP_DigestUpdate(mdctx, buffer, bytesRead) != 1) {
|
|
|
- std::cerr << "Failed to update digest" << std::endl;
|
|
|
+ LOG_ERROR("Failed to update digest");
|
|
|
EVP_MD_CTX_free(mdctx);
|
|
|
return "";
|
|
|
}
|
|
|
@@ -1187,7 +1169,7 @@ std::string ModelManager::computeModelHash(const std::string& modelName) {
|
|
|
unsigned char hash[EVP_MAX_MD_SIZE];
|
|
|
unsigned int hashLen = 0;
|
|
|
if (EVP_DigestFinal_ex(mdctx, hash, &hashLen) != 1) {
|
|
|
- std::cerr << "Failed to finalize digest" << std::endl;
|
|
|
+ LOG_ERROR("Failed to finalize digest");
|
|
|
EVP_MD_CTX_free(mdctx);
|
|
|
return "";
|
|
|
}
|
|
|
@@ -1235,7 +1217,7 @@ std::string ModelManager::loadModelHashFromFile(const std::string& modelName) {
|
|
|
return j["sha256"].get<std::string>();
|
|
|
}
|
|
|
} catch (const std::exception& e) {
|
|
|
- std::cerr << "Error loading hash from JSON: " << e.what() << std::endl;
|
|
|
+ LOG_ERROR("Error loading hash from JSON: " + std::string(e.what()));
|
|
|
}
|
|
|
|
|
|
return "";
|
|
|
@@ -1261,7 +1243,7 @@ bool ModelManager::saveModelHashToFile(const std::string& modelName, const std::
|
|
|
|
|
|
std::ofstream jsonFile(jsonPath);
|
|
|
if (!jsonFile.is_open()) {
|
|
|
- std::cerr << "Failed to open file for writing: " << jsonPath << std::endl;
|
|
|
+ LOG_ERROR("Failed to open file for writing: " + jsonPath);
|
|
|
return false;
|
|
|
}
|
|
|
|
|
|
@@ -1271,14 +1253,14 @@ bool ModelManager::saveModelHashToFile(const std::string& modelName, const std::
|
|
|
// Hash save output removed from stdout
|
|
|
return true;
|
|
|
} catch (const std::exception& e) {
|
|
|
- std::cerr << "Error saving hash to JSON: " << e.what() << std::endl;
|
|
|
+ LOG_ERROR("Error saving hash to JSON: " + std::string(e.what()));
|
|
|
return false;
|
|
|
}
|
|
|
}
|
|
|
|
|
|
std::string ModelManager::findModelByHash(const std::string& hash) {
|
|
|
if (hash.length() < 10) {
|
|
|
- std::cerr << "Hash must be at least 10 characters" << std::endl;
|
|
|
+ LOG_ERROR("Hash must be at least 10 characters");
|
|
|
return "";
|
|
|
}
|
|
|
|
|
|
@@ -1403,7 +1385,7 @@ bool ModelManager::ModelPathSelector::isModelInDirectory(const std::string& mode
|
|
|
|
|
|
return isUnderDirectory;
|
|
|
} catch (const std::filesystem::filesystem_error& e) {
|
|
|
- std::cerr << "Error checking if model is in directory: " << e.what() << std::endl;
|
|
|
+ LOG_ERROR("Error checking if model is in directory: " + std::string(e.what()));
|
|
|
return false;
|
|
|
}
|
|
|
}
|
|
|
@@ -1426,13 +1408,13 @@ ModelManager::ModelDetectionCache::CacheEntry ModelManager::ModelDetectionCache:
|
|
|
|
|
|
// Check if cache is still valid (file hasn't been modified)
|
|
|
if (entry.fileModifiedAt == currentModifiedTime && entry.isValid) {
|
|
|
- std::cout << "Using cached detection result for: " << modelPath << std::endl;
|
|
|
+ LOG_DEBUG("Using cached detection result for: " + modelPath);
|
|
|
return entry;
|
|
|
}
|
|
|
|
|
|
// Cache is stale, remove it
|
|
|
cache_.erase(it);
|
|
|
- std::cout << "Cache entry expired for: " << modelPath << std::endl;
|
|
|
+ LOG_DEBUG("Cache entry expired for: " + modelPath);
|
|
|
return CacheEntry{}; // Return invalid entry
|
|
|
}
|
|
|
|
|
|
@@ -1549,8 +1531,7 @@ void ModelManager::ModelDetectionCache::cacheDetectionResult(
|
|
|
}
|
|
|
|
|
|
cache_[modelPath] = entry;
|
|
|
- std::cout << "Cached detection result for: " << modelPath
|
|
|
- << " (source: " << detectionSource << ", path type: " << pathType << ")" << std::endl;
|
|
|
+ LOG_DEBUG("Cached detection result for: " + modelPath + " (source: " + detectionSource + ", path type: " + pathType + ")");
|
|
|
}
|
|
|
|
|
|
void ModelManager::ModelDetectionCache::invalidateCache(const std::string& modelPath) {
|
|
|
@@ -1559,7 +1540,7 @@ void ModelManager::ModelDetectionCache::invalidateCache(const std::string& model
|
|
|
auto it = cache_.find(modelPath);
|
|
|
if (it != cache_.end()) {
|
|
|
cache_.erase(it);
|
|
|
- std::cout << "Invalidated cache for: " << modelPath << std::endl;
|
|
|
+ LOG_WARNING("Invalid cache for: " + modelPath);
|
|
|
}
|
|
|
}
|
|
|
|
|
|
@@ -1568,7 +1549,7 @@ void ModelManager::ModelDetectionCache::clearAllCache() {
|
|
|
|
|
|
size_t count = cache_.size();
|
|
|
cache_.clear();
|
|
|
- std::cout << "Cleared " << count << " cache entries" << std::endl;
|
|
|
+ LOG_DEBUG("Cleared " + std::to_string(count) + " cache entries");
|
|
|
}
|
|
|
|
|
|
std::vector<ModelManager::ModelDetails> ModelManager::checkRequiredModelsExistence(const std::vector<std::string>& requiredModels) {
|
|
|
@@ -1652,22 +1633,18 @@ std::vector<ModelManager::ModelDetails> ModelManager::checkRequiredModelsExisten
|
|
|
}
|
|
|
}
|
|
|
} catch (const std::exception& e) {
|
|
|
- std::cerr << "Error loading hash for " << fullPath << ": " << e.what() << std::endl;
|
|
|
+ LOG_ERROR("Error loading hash for " + fullPath + ": " + e.what());
|
|
|
}
|
|
|
}
|
|
|
|
|
|
if (pImpl->verbose) {
|
|
|
- std::ostringstream info_oss;
|
|
|
- info_oss << "Found required model: " << modelType << " at " << details.path;
|
|
|
- LOG_INFO(info_oss.str());
|
|
|
+ LOG_DEBUG("Found required model: " + modelType + " at " + details.path);
|
|
|
}
|
|
|
} else {
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Missing required model: " << modelType << " - expected at " << fullPath << std::endl;
|
|
|
- }
|
|
|
+ LOG_DEBUG("Missing required model: " + modelType + " - expected at " + fullPath);
|
|
|
}
|
|
|
} catch (const fs::filesystem_error& e) {
|
|
|
- std::cerr << "Error checking model existence for " << fullPath << ": " << e.what() << std::endl;
|
|
|
+ LOG_ERROR("Error checking model existence for " + fullPath + ": " + e.what());
|
|
|
}
|
|
|
|
|
|
modelDetails.push_back(details);
|
|
|
@@ -1678,42 +1655,36 @@ std::vector<ModelManager::ModelDetails> ModelManager::checkRequiredModelsExisten
|
|
|
|
|
|
bool ModelManager::hashAllModels(std::function<void(int, int, const std::string&)> progressCallback) {
|
|
|
std::shared_lock<std::shared_mutex> lock(pImpl->modelsMutex);
|
|
|
-
|
|
|
+
|
|
|
if (pImpl->availableModels.empty()) {
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "No models found to hash" << std::endl;
|
|
|
- }
|
|
|
+ LOG_WARNING("No models found to hash");
|
|
|
return true;
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// Create a copy of model names to avoid holding the lock during hashing
|
|
|
std::vector<std::string> modelNames;
|
|
|
for (const auto& [name, info] : pImpl->availableModels) {
|
|
|
modelNames.push_back(name);
|
|
|
}
|
|
|
lock.unlock();
|
|
|
-
|
|
|
+
|
|
|
const int totalModels = static_cast<int>(modelNames.size());
|
|
|
- int processedModels = 0;
|
|
|
- int successfulHashes = 0;
|
|
|
-
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Hashing " << totalModels << " models..." << std::endl;
|
|
|
- }
|
|
|
-
|
|
|
+ int processedModels = 0;
|
|
|
+ int successfulHashes = 0;
|
|
|
+
|
|
|
+ LOG_INFO("Hashing " + std::to_string(totalModels) + " models...");
|
|
|
+
|
|
|
for (const auto& modelName : modelNames) {
|
|
|
if (pImpl->scanCancelled.load()) {
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Model hashing cancelled" << std::endl;
|
|
|
- }
|
|
|
+ LOG_DEBUG("Model hashing cancelled");
|
|
|
return false;
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// Report progress
|
|
|
if (progressCallback) {
|
|
|
progressCallback(processedModels + 1, totalModels, modelName);
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// Get model info
|
|
|
ModelInfo modelInfo;
|
|
|
{
|
|
|
@@ -1727,22 +1698,18 @@ bool ModelManager::hashAllModels(std::function<void(int, int, const std::string&
|
|
|
continue;
|
|
|
}
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// Check if model already has a hash
|
|
|
if (!modelInfo.sha256.empty()) {
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Model " << modelName << " already has hash: " << modelInfo.sha256.substr(0, 10) << "..." << std::endl;
|
|
|
- }
|
|
|
+ LOG_INFO("Model " + modelName + " already has hash: " + modelInfo.sha256.substr(0, 10) + "...");
|
|
|
processedModels++;
|
|
|
successfulHashes++;
|
|
|
continue;
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
// Hash the model
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Hashing model " << modelName << " (" << std::filesystem::absolute(modelInfo.fullPath).string() << ")" << std::endl;
|
|
|
- }
|
|
|
-
|
|
|
+ LOG_INFO("Hashing model " + modelName + " (" + std::filesystem::absolute(modelInfo.fullPath).string() + ")");
|
|
|
+
|
|
|
std::string hash = computeModelHash(modelName);
|
|
|
if (!hash.empty()) {
|
|
|
// Save hash to file
|
|
|
@@ -1756,23 +1723,19 @@ bool ModelManager::hashAllModels(std::function<void(int, int, const std::string&
|
|
|
}
|
|
|
}
|
|
|
successfulHashes++;
|
|
|
-
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Successfully hashed model " << modelName << ": " << hash.substr(0, 10) << "..." << std::endl;
|
|
|
- }
|
|
|
+
|
|
|
+ LOG_INFO("Successfully hashed model " + modelName + ": " + hash.substr(0, 10) + "...");
|
|
|
} else {
|
|
|
- std::cerr << "Failed to save hash for model: " << modelName << std::endl;
|
|
|
+ LOG_ERROR("Failed to save hash for model: " + modelName);
|
|
|
}
|
|
|
} else {
|
|
|
- std::cerr << "Failed to compute hash for model: " << modelName << std::endl;
|
|
|
+ LOG_ERROR("Failed to compute hash for model: " + modelName);
|
|
|
}
|
|
|
-
|
|
|
+
|
|
|
processedModels++;
|
|
|
}
|
|
|
-
|
|
|
- if (pImpl->verbose) {
|
|
|
- std::cout << "Hashing complete. Successfully hashed " << successfulHashes << " out of " << totalModels << " models" << std::endl;
|
|
|
- }
|
|
|
-
|
|
|
+
|
|
|
+ LOG_INFO("Hashing complete. Successfully hashed " + std::to_string(successfulHashes) + " out of " + std::to_string(totalModels) + " models");
|
|
|
+
|
|
|
return successfulHashes == totalModels; // Return true only if all models were hashed successfully
|
|
|
}
|