diff --git a/lib/database.js b/lib/database.js index 768e588..1c8552d 100644 --- a/lib/database.js +++ b/lib/database.js @@ -18,7 +18,7 @@ export const File = defineFile(sequelize); export const QueryCount = defineQueryCount(sequelize); export const Metadata = defineMetadata(sequelize) Metadata.hasMany(File) -File.belongsTo(Metadata) +File.belongsTo(Metadata, {as: "details"}) export async function initDB() { try { diff --git a/lib/dboptimize.js b/lib/dboptimize.js index 80c08e4..4b9f764 100644 --- a/lib/dboptimize.js +++ b/lib/dboptimize.js @@ -5,7 +5,7 @@ import { readFileSync } from "fs"; import { fileURLToPath } from "url"; import { dirname, resolve } from "path"; import { Piscina, FixedQueue } from "piscina"; -import { timer } from "./time.js"; +import { Timer } from "./time.js"; let piscina = new Piscina({ filename: resolve("./lib", "dbkwworker.js"), @@ -28,7 +28,7 @@ const keywords = { }; export async function optimizeDatabaseKws() { - let proctime = new timer(); + let proctime = new Timer(); let changes = 0; console.log("Optimizing DB Keywords..."); let dbLength = await File.count(); diff --git a/lib/dircrawl.js b/lib/dircrawl.js index b8d379a..4ac9809 100644 --- a/lib/dircrawl.js +++ b/lib/dircrawl.js @@ -5,7 +5,7 @@ import debugPrint from "./debugprint.js"; import { File } from './models/index.js'; import { bulkIndexFiles } from './services/elasticsearch.js'; import { optimizeDatabaseKws } from "./dboptimize.js"; -import { timer } from "./time.js"; +import { Timer } from "./time.js"; let piscina = new Piscina({ filename: resolve("./lib", "fileworker.js"), @@ -15,7 +15,7 @@ let piscina = new Piscina({ const BATCH_SIZE = 1000; // Process files in batches for better performance export default async function getAllFiles(catList) { - var proctime = new timer() + var proctime = new Timer() const url = "https://myrient.erista.me/files/"; let parentRows = await getTableRows({ url: url, base: "" }); let parents = []; @@ -161,7 +161,8 @@ async function processBatch(files) { type: file.type, date: file.date, region: file.region, - group: file.group + group: file.group, + nongame: file.nongame })), { returning: true, diff --git a/lib/fileworker.js b/lib/fileworker.js index d5dde1c..ebfceb1 100644 --- a/lib/fileworker.js +++ b/lib/fileworker.js @@ -1,5 +1,8 @@ import innertext from "innertext"; import HTMLParse from "node-html-parser"; +import { fileURLToPath } from 'url'; +import { dirname, resolve } from "path"; +import { readFileSync } from "fs"; export async function getTableRows(data) { let retryLeft = 5; @@ -55,7 +58,8 @@ export async function parseOutFile(data) { type: findType(fullName, data.catList), date: innertext(file.querySelector(".date").innerHTML).trim(), region: findRegion(fullName, data.catList), - group: findGroup(fullName) + group: findGroup(fullName), + nongame: checkNonGame(name) }; return processedFile; } @@ -166,6 +170,29 @@ function findGroup(str){ } } +// Cache for nonGameTerms +let nonGameTermsCache = null; + +function getNonGameTerms() { + if (nonGameTermsCache) { + return nonGameTermsCache; + } + + const __filename = fileURLToPath(import.meta.url); + const __dirname = dirname(__filename); + + const nonGameTermsPath = resolve(__dirname, 'nonGameTerms.json'); + nonGameTermsCache = JSON.parse(readFileSync(nonGameTermsPath, 'utf8')); + + return nonGameTermsCache; +} + +function checkNonGame(str){ + const nonGameTerms = getNonGameTerms(); + const termPatterns = nonGameTerms.terms.map(term => new RegExp(term, 'i')); + return termPatterns.some(pattern => pattern.test(str)); +} + class HTTPResponseError extends Error { constructor(response) { super(`HTTP Error Response: ${response.status} ${response.statusText}`); diff --git a/lib/metadatasearch.js b/lib/metadatasearch.js new file mode 100644 index 0000000..c339f16 --- /dev/null +++ b/lib/metadatasearch.js @@ -0,0 +1,223 @@ +import { + twitchAccessToken, + igdb, + request, + multi, +} from "@phalcode/ts-igdb-client"; +import { + fields, + or, + and, + where, + whereIn, + WhereFlags, + WhereInFlags, + sort, + limit, + offset, +} from "@phalcode/ts-igdb-client"; +import { File, Metadata } from "./database.js"; +import { Sequelize } from "sequelize"; + +export default class MetadataSearch { + constructor() { + this.twitchSecrets = { + client_id: process.env.TWITCH_CLIENT_ID, + client_secret: process.env.TWITCH_CLIENT_SECRET, + }; + this.setupClient(); + } + gameFields = [ + "name", + "alternative_names.comment", + "alternative_names.name", + "cover.image_id", + "total_rating", + "first_release_date", + "summary", + "genres.name", + "involved_companies.company.name", + "involved_companies.developer", + "involved_companies.publisher", + "involved_companies.supporting", + "game_modes.name", + "game_localizations.name", + "game_localizations.region", + "game_localizations.region.name", + "platforms.name", + "game_type.type", + ]; + + async setupClient() { + try { + if (this.twitchSecrets.client_id && this.twitchSecrets.client_secret) { + this.accessToken = await twitchAccessToken(this.twitchSecrets); + this.client = igdb(this.twitchSecrets.client_id, this.accessToken); + if (this.accessToken) { + this.authorized = true; + return; + } + } + this.authorized = false; //disable + } catch (error) { + this.authorized = false; + } + } + + async getMetadata(query, retrying = false) { + try { + if (!this.authorized) return; + const { data } = await this.client + .multi(...this.buildGameMultiQuery(query)) + .execute(); + return data; + } catch (error) { + if (error === "ERR_BAD_REQUEST" && !retrying) { + this.setupClient(); + return this.getMetadata(query, true); + } + console.error("Failed to retrieve metadata:", error); + } + } + + buildGameMultiQuery(query) { + let multiQuery = []; + for (let x in query) { + multiQuery.push( + request("games") + .alias(x) + .pipe( + fields(this.gameFields), + and( + ...this.buildAndClauses("name", "~", query[x].name), + where("game_type.type", "=", "Main Game"), + where("platforms.name", "~", query[x].platform) + ), + limit(1) + ) + ); + } + return multiQuery; + } + + buildAndClauses(field, op, string) { + let andClauses = []; + let name = [...new Set(string.split(" "))].filter((n) => n); //dedupe; + for (let x in name) { + andClauses.push(where(field, op, name[x], WhereFlags.CONTAINS)); + } + return andClauses; + } + + normalizeName(filename) { + if (!filename) return; + return filename + .replace(/\.[A-z]{3,3}|\.|&|-|,|v[0-9]+\.[0-9]+|\[.*?\]|\(.*?\)/g, "") + .trim(); + } + + async getGamesMetadata(games) { + try { + if (!this.authorized || !games.length) return []; + let gameQuery = []; + for (let x in games) { + if (!(await games[x].getDetails())) + if (!games[x].nongame) { + if (!games[x].blockmetadata) { + gameQuery.push({ + name: this.normalizeName(games[x].filename), + platform: games[x].category, + id: x, + }); + } + } + } + if (!gameQuery.length) return []; + let gameMetas = await this.getMetadata(gameQuery); + if (!gameMetas.length) return []; + for (let x in gameMetas) { + if (gameMetas[x].result.length) { + await this.addMetadataToDb( + gameMetas[x].result[0], + games[gameQuery[x].id] + ); + } + } + let details = await Promise.all(games.map((game) => game.getDetails())); + return details.map((details) => details?.dataValues); + } catch (error) { + console.error("Error getting metadata:", error); + } + } + + async addMetadataToDb(metadata, game) { + try { + let md = await Metadata.findOne({ + where: { + id: metadata.id, + }, + }); + if (!md) { + md = await Metadata.build( + { + id: metadata.id, + title: metadata.name, + + description: metadata.summary, + rating: metadata.total_rating, + coverartid: metadata.cover?.image_id, + releasedate: metadata.first_release_date + ? new Date(metadata.first_release_date * 1000) + : null, + genre: JSON.stringify(metadata.genres?.map((genre) => genre.name)), + gamemodes: JSON.stringify( + metadata.game_modes?.map((gm) => gm.name) + ), + platforms: JSON.stringify( + metadata.platforms?.map((platform) => platform.name) + ), + }, + { + returning: true, + updateOnDuplicate: ["id"], + include: File, + } + ); + } + //these don't work right unless I do them after the fact. + md.developers = JSON.stringify( + metadata.involved_companies + ?.filter((ic) => ic.developer) + ?.map((ic) => ic.company.name) + ); + md.publishers = JSON.stringify( + metadata.involved_companies + ?.filter((ic) => ic.publisher) + ?.map((ic) => ic.company.name) + ); + let alternates = []; + if (metadata.alternative_names) { + alternates.push( + metadata.alternative_names.map((an) => ({ + type: an.comment, + name: an.name, + })) + ); + } + if (metadata.game_localizations) { + alternates.push( + metadata.game_localizations.map((gn) => ({ + type: gn.region.name, + name: gn.name, + })) + ); + } + md.alternatetiles = JSON.stringify(alternates); + await md.save(); + await game.setDetails(md); + await md.addFile(game); + } catch (error) { + console.error("Error adding metadata:", error); + } + } +} diff --git a/lib/metadataworker.js b/lib/metadataworker.js deleted file mode 100644 index ddd3102..0000000 --- a/lib/metadataworker.js +++ /dev/null @@ -1,113 +0,0 @@ -import { twitchAccessToken, igdb, request } from "@phalcode/ts-igdb-client"; -import { - fields, - or, - and, - where, - whereIn, - WhereFlags, - WhereInFlags, - sort, - limit, - offset, -} from "@phalcode/ts-igdb-client"; - -const twitchSecrets = { - client_id: process.env.TWITCH_CLIENT_ID, - client_secret: process.env.TWITCH_CLIENT_SECRET, -}; -const accessToken = await twitchAccessToken(twitchSecrets); - -const client = igdb(twitchSecrets.client_id, accessToken); - -const gameFields = [ - "name", - "alternative_names.comment", - "alternative_names.name", - "cover.image_id", - "total_rating", - "first_release_date", - "summary", - "genres.name", - "involved_companies.company.name", - "involved_companies.developer", - "involved_companies.publisher", - "involved_companies.supporting", - "multiplayer_modes.*", - "game_localizations.name", - "game_localizations.region", - "platforms.name", -]; - -export async function getMetadata(query) { - const data = await client - .request("games") - .pipe( - fields(gameFields), - or(...buildOrAndClauses("name", "~", query)), - sort("name", "asc") - ) - .execute(); - return data; -} - -function buildOrClauses(field, op, queries) { - let orClauses = []; - for (let x in queries) { - orClauses.push(where(field, op, queries[x], WhereFlags.CONTAINS)); - } - return orClauses; -} - -function buildOrAndClauses(field, op, queries) { - let orClauses = []; - - for (let x in queries) { - let name = [...new Set(queries[x].split(" "))]; //dedupe; - let andClauses = []; - for (let y in name) { - andClauses.push(where(field, op, name[y], WhereFlags.CONTAINS)); - } - orClauses.push(and(...andClauses)); - } - return orClauses; -} - -function normalizeName(filename) { - if (!filename) return; - return filename - .replace(/\.[A-z]{3,3}|\.|&|-|,|v[0-9]+\.[0-9]+|\[.*?\]|\(.*?\)/g, "") - .trim(); -} - -function getBestMatch(filename, data) { - const words = filename.split(" "); - let bestIndex = null; - let bestMatchCount = 0; - let lengthDifference = 0; - for (let x in data) { - let matchingWords = 0; - for (let y in words) { - if (data[x].name.toLowerCase().includes(words[y].toLowerCase())) - matchingWords++; - } - let diff = matchingWords - dataWords.length; - if (matchingWords > bestMatchCount && diff < lengthDifference) { - bestIndex = x; - bestMatchCount = matchingWords; - lengthDifference = diff; - if (lengthDifference < 0) lengthDifference = 0; - } - } - if (bestIndex != null) { - return data[bestIndex]; - } - return; -} - -let games = await getMetadata([ - "The Legend of Zelda A Link to the Past", - "Super Mario Sunshine", -]); -console.log(JSON.stringify(games.data, null, 2)); -//console.log(await getMetadata(games)) diff --git a/lib/models/file.js b/lib/models/file.js index b3f0467..754ba21 100644 --- a/lib/models/file.js +++ b/lib/models/file.js @@ -55,6 +55,11 @@ export default function (sequelize) { type: DataTypes.BOOLEAN, defaultValue: false, allowNull: false + }, + nongame: { + type: DataTypes.BOOLEAN, + defaultValue: false, + allowNull: false } }, { indexes: [ diff --git a/lib/models/metadata.js b/lib/models/metadata.js index f86fa9a..19f9e9e 100644 --- a/lib/models/metadata.js +++ b/lib/models/metadata.js @@ -2,10 +2,9 @@ import { DataTypes } from "sequelize" export default function (sequelize) { const Metadata = sequelize.define('Metadata', { - id: { + id: {//these will match the igdbid to make things a little easier type: DataTypes.INTEGER, primaryKey: true, - autoIncrement: true }, title: { type: DataTypes.STRING, @@ -15,23 +14,17 @@ export default function (sequelize) { type: DataTypes.STRING }, description: { - type: DataTypes.STRING + type: DataTypes.TEXT }, rating: { type: DataTypes.STRING }, - coverarturl: { + coverartid: { type: DataTypes.STRING }, releasedate: { type: DataTypes.DATE }, - igdbid: { - type: DataTypes.INTEGER - }, - timetobeat: { - type: DataTypes.STRING - }, genre: { type: DataTypes.STRING }, @@ -43,11 +36,14 @@ export default function (sequelize) { }, gamemodes:{ type: DataTypes.STRING + }, + platforms: { + type: DataTypes.STRING } }, { indexes: [ { fields: ['title'] }, - { fields: ['description'] }//If this slows down the db may want to not index this. + { fields: ['description'] },//If this slows down the db may want to not index this. ] }) diff --git a/lib/nonGameTerms.json b/lib/nonGameTerms.json index e5088f4..1259ee0 100644 --- a/lib/nonGameTerms.json +++ b/lib/nonGameTerms.json @@ -7,6 +7,7 @@ "beta", "box", "boxart", + "cbr", "cheat", "config", "cfg", @@ -32,6 +33,7 @@ "mod", "movie", "music", + "mp4", "ost", "overlay", "patch", diff --git a/lib/services/elasticsearch.js b/lib/services/elasticsearch.js index f407793..1f96701 100644 --- a/lib/services/elasticsearch.js +++ b/lib/services/elasticsearch.js @@ -1,32 +1,13 @@ -import { Client } from '@elastic/elasticsearch'; -import debugPrint from '../debugprint.js'; -import { File } from '../models/index.js'; -import { readFileSync } from 'fs'; -import { fileURLToPath } from 'url'; -import { dirname, resolve } from 'path'; +import { Client } from "@elastic/elasticsearch"; +import debugPrint from "../debugprint.js"; +import { File } from "../models/index.js"; +import { Timer } from "../time.js"; const client = new Client({ - node: process.env.ELASTICSEARCH_URL || 'http://localhost:9200' + node: process.env.ELASTICSEARCH_URL || "http://localhost:9200", }); -const INDEX_NAME = 'myrient_files'; - -// Cache for nonGameTerms -let nonGameTermsCache = null; - -function getNonGameTerms() { - if (nonGameTermsCache) { - return nonGameTermsCache; - } - - const __filename = fileURLToPath(import.meta.url); - const __dirname = dirname(__filename); - - const nonGameTermsPath = resolve(__dirname, '../../lib/nonGameTerms.json'); - nonGameTermsCache = JSON.parse(readFileSync(nonGameTermsPath, 'utf8')); - - return nonGameTermsCache; -} +const INDEX_NAME = "myrient_files"; export async function initElasticsearch() { try { @@ -40,56 +21,59 @@ export async function initElasticsearch() { analysis: { analyzer: { filename_analyzer: { - type: 'custom', - tokenizer: 'standard', - filter: ['lowercase', 'word_delimiter_graph'] - } - } - } + type: "custom", + tokenizer: "standard", + filter: ["lowercase", "word_delimiter_graph"], + }, + }, + }, }, mappings: { properties: { filename: { - type: 'text', - analyzer: 'filename_analyzer' + type: "text", + analyzer: "filename_analyzer", }, category: { - type: 'text', - analyzer: 'standard', + type: "text", + analyzer: "standard", fields: { keyword: { - type: 'keyword' - } - } + type: "keyword", + }, + }, }, type: { - type: 'text', - analyzer: 'standard' + type: "text", + analyzer: "standard", }, region: { - type: 'text', - analyzer: 'standard' + type: "text", + analyzer: "standard", }, filenamekws: { - type: 'text', - analyzer: 'standard' + type: "text", + analyzer: "standard", }, categorykws: { - type: 'text', - analyzer: 'standard' + type: "text", + analyzer: "standard", }, regionkws: { - type: 'text', - analyzer: 'standard' - } - } - } - } + type: "text", + analyzer: "standard", + }, + nongame: { + type: "boolean", + }, + }, + }, + }, }); - console.log('Elasticsearch index created'); + console.log("Elasticsearch index created"); } } catch (error) { - console.error('Elasticsearch init error:', error); + console.error("Elasticsearch init error:", error); process.exit(1); } } @@ -99,16 +83,16 @@ export async function indexFile(file) { await client.index({ index: INDEX_NAME, id: file.id.toString(), - document: file + document: file, }); debugPrint(`Indexed file: ${file.filename}`); } catch (error) { - console.error('Error indexing file:', error); + console.error("Error indexing file:", error); } } export async function bulkIndexFiles(files) { - const operations = files.flatMap(file => [ + const operations = files.flatMap((file) => [ { index: { _index: INDEX_NAME, _id: file.id.toString() } }, { filename: file.filename, @@ -117,19 +101,20 @@ export async function bulkIndexFiles(files) { region: file.region, filenamekws: file.filenamekws, categorykws: file.categorykws, - regionkws: file.regionkws - } + regionkws: file.regionkws, + nongame: file.nongame + }, ]); try { const { errors, items } = await client.bulk({ refresh: true, - operations + operations, }); if (errors) { - console.error('Bulk indexing had errors'); - items.forEach(item => { + console.error("Bulk indexing had errors"); + items.forEach((item) => { if (item.index.error) { console.error(item.index.error); } @@ -138,46 +123,52 @@ export async function bulkIndexFiles(files) { debugPrint(`Bulk indexed ${files.length} files`); } catch (error) { - console.error('Bulk indexing error:', error); + console.error("Bulk indexing error:", error); } } export async function search(query, options) { //add kws for selected fields - let builtFields = [] - for(let field in options.fields){ - builtFields.push(options.fields[field]) - builtFields.push(options.fields[field] + 'kws') + let builtFields = []; + for (let field in options.fields) { + builtFields.push(options.fields[field]); + builtFields.push(options.fields[field] + "kws"); } const searchQuery = { index: INDEX_NAME, body: { - size: 1500, + size: options.pageSize, + from: options.pageSize * options.page, query: { bool: { must: buildMustClauses(query, options, builtFields), - should: buildShouldClauses(query, options, builtFields) - } + should: buildShouldClauses(query, options, builtFields), + }, }, highlight: { fields: { filename: {}, category: {}, type: {}, - region: {} - } - } - } + region: {}, + }, + }, + }, }; + if (options.hideNonGame) { + searchQuery.body.query.bool["filter"] = { + term: { nongame: false }, + }; + } try { - const startTime = process.hrtime(); + let timer = new Timer(); const response = await client.search(searchQuery); // Fetch full records from PostgreSQL for the search results - const ids = response.hits.hits.map(hit => hit._id); + const ids = response.hits.hits.map((hit) => hit._id); const fullRecords = await File.findAll({ - where: { id: ids } + where: { id: ids }, }); // Create a map of full records by id @@ -187,49 +178,44 @@ export async function search(query, options) { }, {}); // Build results with full PostgreSQL records - let results = response.hits.hits.map(hit => ({ - ...recordMap[hit._id].dataValues, + let results = response.hits.hits.map((hit) => ({ + ...recordMap[hit._id]?.dataValues, score: hit._score, - highlights: hit.highlight + highlights: hit.highlight, })); - // Apply non-game content filtering in JavaScript if the option is enabled - if (options.hideNonGame) { - const nonGameTerms = getNonGameTerms(); - const termPatterns = nonGameTerms.terms.map(term => new RegExp(term, 'i')); + //Filter out anything that couldn't be found in postgres + results = results.filter(result => result.filename) - // Filter results in JavaScript (much faster than complex Elasticsearch queries) - results = results.filter(item => { - // Check if filename contains any of the non-game terms - return !termPatterns.some(pattern => pattern.test(item.filename)); - }); - } - - const elapsed = parseHrtimeToSeconds(process.hrtime(startTime)); + const elapsed = timer.elapsedSeconds(); return { items: results, - elapsed + db: fullRecords, + count: response.hits.total.value || 0, + elapsed, }; } catch (error) { - console.error('Search error:', error); - return { items: [], elapsed: 0 }; + console.error("Search error:", error); + return { items: [], elapsed: 0, count: 0 }; } } function buildMustClauses(query, options, builtFields) { const clauses = []; - if (options.combineWith === 'AND') { - query.split(' ').forEach(term => { + if (options.combineWith === "AND") { + query.split(" ").forEach((term) => { clauses.push({ multi_match: { query: term, - fields: builtFields.map(field => - field === 'filename' || 'filenamekws' ? `${field}^2` : field + fields: builtFields.map((field) => + field === "filename" || field === "filenamekws" + ? `${field}^2` + : field ), fuzziness: options.fuzzy || 0, - type: 'best_fields' - } + type: "best_fields", + }, }); }); } @@ -240,26 +226,22 @@ function buildMustClauses(query, options, builtFields) { function buildShouldClauses(query, options, builtFields) { const clauses = []; - if (options.combineWith !== 'AND') { + if (options.combineWith !== "AND") { clauses.push({ multi_match: { query, - fields: builtFields.map(field => - field === 'filename' || 'filenamekws' ? `${field}^2` : field + fields: builtFields.map((field) => + field === "filename" || field === "filenamekws" ? `${field}^2` : field ), fuzziness: options.fuzzy || 0, - type: 'best_fields' - } + type: "best_fields", + }, }); } return clauses; } -function parseHrtimeToSeconds(hrtime) { - return (hrtime[0] + (hrtime[1] / 1e9)).toFixed(3); -} - export async function getSuggestions(query, options) { try { const response = await client.search({ @@ -268,26 +250,26 @@ export async function getSuggestions(query, options) { query: { multi_match: { query, - fields: ['filename^2', 'filenamekws^2', 'category', 'categorykws'], - fuzziness: 'AUTO', - type: 'best_fields' - } + fields: ["filename^2", "filenamekws^2", "category", "categorykws"], + fuzziness: "AUTO", + type: "best_fields", + }, }, - _source: ['filename', 'category'], - size: 10 - } + _source: ["filename", "category"], + size: 10, + }, }); - return response.hits.hits.map(hit => ({ - suggestion: hit._source.filename + return response.hits.hits.map((hit) => ({ + suggestion: hit._source.filename, })); } catch (error) { - console.error('Suggestion error:', error); + console.error("Suggestion error:", error); return []; } } -export async function getSample(query, options){ +export async function getSample(query, options) { try { const response = await client.search({ index: INDEX_NAME, @@ -295,18 +277,18 @@ export async function getSample(query, options){ query: { match: { filename: query, - } + }, }, - _source: ['filename'], - size: 30 - } + _source: ["filename"], + size: 30, + }, }); - return response.hits.hits.map(hit => ({ - sample: hit._source.filename + return response.hits.hits.map((hit) => ({ + sample: hit._source.filename, })); } catch (error) { - console.error('Sample error:', error); + console.error("Sample error:", error); return []; } -} \ No newline at end of file +} diff --git a/lib/time.js b/lib/time.js index 501fa0b..5275d38 100644 --- a/lib/time.js +++ b/lib/time.js @@ -1,4 +1,4 @@ -export class timer { +export class Timer { constructor() { this.startTime = process.hrtime(); } @@ -13,4 +13,7 @@ export class timer { let s = Math.floor(elapsed % 60); return `${h ? h + "h" : ""}${m ? m + "m" : ""}${s + "s"}`; } -} + elapsedSeconds(){ + return this.parseHrtimetoSeconds(process.hrtime(this.startTime)); + } +} \ No newline at end of file diff --git a/package-lock.json b/package-lock.json index 4eed92c..88efe08 100644 --- a/package-lock.json +++ b/package-lock.json @@ -24,7 +24,7 @@ "pg-hstore": "^2.3.4", "piscina": "^4.7.0", "sanitize": "^2.1.2", - "sequelize": "^6.37.1", + "sequelize": "^6.37.7", "sequelize-cli": "^6.6.2", "to-words": "^4.5.1", "uuid": "^11.1.0" @@ -3175,9 +3175,9 @@ "license": "MIT" }, "node_modules/sequelize": { - "version": "6.37.5", - "resolved": "https://registry.npmjs.org/sequelize/-/sequelize-6.37.5.tgz", - "integrity": "sha512-10WA4poUb3XWnUROThqL2Apq9C2NhyV1xHPMZuybNMCucDsbbFuKg51jhmyvvAUyUqCiimwTZamc3AHhMoBr2Q==", + "version": "6.37.7", + "resolved": "https://registry.npmjs.org/sequelize/-/sequelize-6.37.7.tgz", + "integrity": "sha512-mCnh83zuz7kQxxJirtFD7q6Huy6liPanI67BSlbzSYgVNl5eXVdE2CN1FuAeZwG1SNpGsNRCV+bJAVVnykZAFA==", "funding": [ { "type": "opencollective", diff --git a/package.json b/package.json index 29d1dba..9e78c8f 100644 --- a/package.json +++ b/package.json @@ -19,7 +19,7 @@ "pg-hstore": "^2.3.4", "piscina": "^4.7.0", "sanitize": "^2.1.2", - "sequelize": "^6.37.1", + "sequelize": "^6.37.7", "sequelize-cli": "^6.6.2", "to-words": "^4.5.1", "uuid": "^11.1.0" diff --git a/server.js b/server.js index 4eb3e73..887bfed 100644 --- a/server.js +++ b/server.js @@ -21,6 +21,7 @@ import { initElasticsearch } from "./lib/services/elasticsearch.js"; import i18n, { locales } from "./config/i18n.js"; import { v4 as uuidv4 } from "uuid"; import { optimizeDatabaseKws } from "./lib/dboptimize.js"; +import MetadataSearch from "./lib/metadatasearch.js"; let categoryListPath = "./lib/categories.json"; let nonGameTermsPath = "./lib/nonGameTerms.json"; @@ -51,6 +52,7 @@ let defaultSettings = { fuzzy: 0, prefix: true, hideNonGame: true, + useOldResults: false, }; //programmatically set the default boosts while reducing overhead when adding another search field @@ -64,6 +66,7 @@ for (let field in searchFields) { } let search = new Searcher(searchFields); +let metadataSearch = new MetadataSearch(); async function getFilesJob() { console.log("Updating the file list."); @@ -106,7 +109,7 @@ app.use((req, res, next) => { }); //static files -app.use('/public', express.static('views/public')) +app.use("/public", express.static("views/public")); //middleware app.use(sanitize.middleware); @@ -193,22 +196,35 @@ app.get("/search", async function (req, res) { if (settings.combineWith != "AND") { delete settings.combineWith; } + settings.pageSize = settings.useOldResults ? 100 : 10; + settings.page = pageNum - 1; let results = await search.findAllMatches(query, settings); debugPrint(results); - if (results.items.length && pageNum == 1) { + let metas = await metadataSearch.getGamesMetadata(results.db); + if (results.count && pageNum == 1) { queryCount += 1; await QueryCount.update({ count: queryCount }, { where: { id: 1 } }); updateDefaults(); } + let resultOutput = []; + for (let x in results.items) { + resultOutput.push({ + file: results.items[x], + metadata: metas[x] || [], + }); + } let options = { query: query, - results: results, + results: resultOutput, + count: results.count, + elapsed: results.elapsed, pageNum: pageNum, + pageCount: Math.ceil(results.count / settings.pageSize), indexing: search.indexing, urlPrefix: urlPrefix, settings: settings, }; - let page = "resultsnew"; + let page = settings.useOldResults ? "resultsold" : "results"; options = buildOptions(page, options); res.render(indexPage, options); }); @@ -511,4 +527,4 @@ if ( await getFilesJob(); } -cron.schedule("0 30 2 * * *", getFilesJob); \ No newline at end of file +cron.schedule("0 30 2 * * *", getFilesJob); diff --git a/views/pages/results.ejs b/views/pages/results.ejs index 74efa8e..c14b3ea 100644 --- a/views/pages/results.ejs +++ b/views/pages/results.ejs @@ -1,12 +1,8 @@ <% - let pageCount = Math.ceil(results.items.length / 100) pageCount = pageCount ? pageCount : 1 //always ensure 1 page if(pageNum > pageCount){ pageNum = 1 } - let entryStart = Math.floor((pageNum - 1) * 100) - let entryEnd = entryStart + 100 - entryEnd = entryEnd > results.items.length ? results.items.length : entryEnd %> @@ -31,7 +27,7 @@

- <%= __('search.found_plural', { count: results.items.length }) %> <%= __('search.in_seconds', { seconds: results.elapsed }) %>. + <%= __('search.found_plural', { count: count }) %> <%= __('search.in_seconds', { seconds: elapsed }) %>. <%= indexing ? __('search.indexing') : "" %> <% if (settings.hideNonGame) { %> @@ -43,63 +39,11 @@

-

<%= __('search.displaying_results', { start: entryStart, end: entryEnd }) %>

- - - - - - - - - - - - <% if (process.env.EMULATOR_ENABLED === 'true') { %> - - <% } %> - - - <% for (let x = entryStart; x < entryEnd; x++) { %> - - - - - - - - - - <% if (process.env.EMULATOR_ENABLED === 'true') { %> - - <% } %> - +
+ <% for (let x = 0; x < results.length; x++) { %> + <%- include("../partials/result", {result: results[x]}) %> <% } %> -
<%= __('results.table.name') %><%= __('results.table.group') %><%= __('results.table.category') %><%= __('results.table.region') %><%= __('results.table.type') %><%= __('results.table.size') %><%= __('results.table.date') %><%= __('results.table.score') %><%= __('results.table.play') %>
- - <%= results.items[x].filename %> - - - <%= results.items[x].group %> - - <%= results.items[x].category %> - - <%= results.items[x].region %> - - <%= results.items[x].type %> - - <%= results.items[x].size %> - - <%= results.items[x].date %> - - <%= results.items[x].score.toFixed(2) %> - - <% if (isEmulatorCompatible(results.items[x].category)) { %> - <%= __('emulator.play') %> - <% } else { %> - - <% } %> -
+
<% if(pageCount > 1) { %> @@ -150,33 +94,4 @@ <% } %> - - \ No newline at end of file + \ No newline at end of file diff --git a/views/pages/resultsnew.ejs b/views/pages/resultsold.ejs similarity index 66% rename from views/pages/resultsnew.ejs rename to views/pages/resultsold.ejs index 0bdf09f..74efa8e 100644 --- a/views/pages/resultsnew.ejs +++ b/views/pages/resultsold.ejs @@ -44,11 +44,62 @@

<%= __('search.displaying_results', { start: entryStart, end: entryEnd }) %>

-
+ + + + + + + + + + + + <% if (process.env.EMULATOR_ENABLED === 'true') { %> + + <% } %> + + <% for (let x = entryStart; x < entryEnd; x++) { %> - <%- include("../partials/result", {result: results.items[x]}) %> + + + + + + + + + + <% if (process.env.EMULATOR_ENABLED === 'true') { %> + + <% } %> + <% } %> - +
<%= __('results.table.name') %><%= __('results.table.group') %><%= __('results.table.category') %><%= __('results.table.region') %><%= __('results.table.type') %><%= __('results.table.size') %><%= __('results.table.date') %><%= __('results.table.score') %><%= __('results.table.play') %>
+ + <%= results.items[x].filename %> + + + <%= results.items[x].group %> + + <%= results.items[x].category %> + + <%= results.items[x].region %> + + <%= results.items[x].type %> + + <%= results.items[x].size %> + + <%= results.items[x].date %> + + <%= results.items[x].score.toFixed(2) %> + + <% if (isEmulatorCompatible(results.items[x].category)) { %> + <%= __('emulator.play') %> + <% } else { %> + + <% } %> +
<% if(pageCount > 1) { %> @@ -99,4 +150,33 @@
<% } %>
- \ No newline at end of file + + \ No newline at end of file diff --git a/views/partials/opengraphresults.ejs b/views/partials/opengraphresults.ejs index 622f049..358f974 100644 --- a/views/partials/opengraphresults.ejs +++ b/views/partials/opengraphresults.ejs @@ -1,9 +1,8 @@ <% -let resultStart = Math.floor((pageNum - 1) * 100) -let length = results.items.length > 5 + resultStart ? 5 + resultStart : results.items.length +let length = results.length > 5 ? 5 : results.length let resultString = '' -for(let x = resultStart ; x < length; x++){ - resultString += `${x + 1}: ${results.items[x].filename}\n\n` +for(let x = 0 ; x < length; x++){ + resultString += `${x + 1}: ${results[x].filename}\n\n` } resultString = resultString.trim() %> diff --git a/views/partials/result.ejs b/views/partials/result.ejs index 68c6d78..552cebc 100644 --- a/views/partials/result.ejs +++ b/views/partials/result.ejs @@ -1,14 +1,19 @@ +<% + const metadata = result.metadata || new Object() + const file = result.file || new Object() + const coverUrl = metadata.coverartid ? `/proxy-image?url=https://images.igdb.com/igdb/image/upload/t_cover_big/${metadata.coverartid}.webp` : "/public/images/coverart/nocoverart.png" +%>
- "> +
-

<%= result.title || result.filename %>

-

Released: <%= result.releaseDate || result.date %> Region: <%= result.region %>

-

<%= result.description || "No description was found." %>

- <% if(result.title) {%> -

Filename: <%= result.filename %>

+

<%= metadata.title || file.filename %>

+

Released: <%= metadata.releasedate || file.date %> Region: <%= file.region %> Platform: <%= file.category %>

+

<%= metadata.description || "No description was found." %>

+ <% if(metadata.title) {%> +

Filename: <%= file.filename %>

<% } %> -

+

\ No newline at end of file