diff --git a/src/bootstrap/bootstrap.js b/bootstrap/bootstrap.js similarity index 100% rename from src/bootstrap/bootstrap.js rename to bootstrap/bootstrap.js diff --git a/package.json b/package.json index fa6a963..b35be66 100644 --- a/package.json +++ b/package.json @@ -2,7 +2,7 @@ "name": "scenery", "version": "2.0.0", "scripts": { - "bootstrap": "node ./src/bootstrap/bootstrap.js", + "bootstrap": "node ./bootstrap/bootstrap.js", "dev": "cross-env NODE_OPTIONS=--openssl-legacy-provider nodemon", "build": "cross-env NODE_OPTIONS=--openssl-legacy-provider next build && tsc --project tsconfig.server.json", "build_pages": "next build", diff --git a/src/config/config.ts b/src/config/config.ts index fe07301..9ee47f1 100644 --- a/src/config/config.ts +++ b/src/config/config.ts @@ -18,5 +18,6 @@ const server_config = { use_backup_file_server: false, backup_file_server_url: "http://127.0.0.1:8787", optimize_images: true, + import_images_bot_password: "" //if "" then deactivated } export default server_config \ No newline at end of file diff --git a/src/server/bulk_import_images/bulk_import_images_without_check.ts b/src/server/bulk_import_images/bulk_import_images.ts similarity index 69% rename from src/server/bulk_import_images/bulk_import_images_without_check.ts rename to src/server/bulk_import_images/bulk_import_images.ts index b871289..64d2087 100644 --- a/src/server/bulk_import_images/bulk_import_images_without_check.ts +++ b/src/server/bulk_import_images/bulk_import_images.ts @@ -3,15 +3,26 @@ import path from 'path' import image_ops from "../helpers/image_ops" import config from "../../config/config" import db_ops from "../helpers/db_ops" +// import { exit } from 'process' const myArgs:any = {} +let PATH_TO_IMAGE_IMPORT = path.join(config.root_path, 'import', 'images') + + +console.log(process.argv.slice(2)) for (const arg of process.argv.slice(2)){ if(arg === "--use_filename_id"){ myArgs["use_filename_id"]=true + }else if(arg === "--move"){ + myArgs["move"]=true + }else if (arg.startsWith("--path=")){ + PATH_TO_IMAGE_IMPORT=path.resolve(arg.slice(arg.indexOf("--path=")+7)) + console.log(PATH_TO_IMAGE_IMPORT) + }else if(arg === "--bypass_ambience"){ + myArgs["bypass_ambience"]=true } } const fsPromises = fs.promises; -const PATH_TO_IMAGE_IMPORT = path.join(config.root_path, 'import', 'images') const IMAGES = fs.readdirSync(PATH_TO_IMAGE_IMPORT) async function import_images() { for (const image_file_name of IMAGES) { @@ -27,7 +38,7 @@ async function import_images() { console.log(`id: ${img_id} is already in db`) break } - const img_data = await image_ops.import_image(img_buffer, [], "", true, img_id) + const img_data = await image_ops.import_image(img_buffer, [], "", myArgs["bypass_ambience"], img_id, myArgs["move"]?img_path:"") console.log(img_data) // fsPromises.unlink(img_path) } diff --git a/src/server/bulk_import_images/bulk_import_images_with_check.ts b/src/server/bulk_import_images/bulk_import_images_with_check.ts deleted file mode 100644 index b416b10..0000000 --- a/src/server/bulk_import_images/bulk_import_images_with_check.ts +++ /dev/null @@ -1,35 +0,0 @@ -import fs from 'fs' -import path from 'path' -import image_ops from "../helpers/image_ops" -import config from "../../config/config" -import db_ops from "../helpers/db_ops" -const myArgs:any = {} -for (const arg of process.argv.slice(2)){ - if(arg === "--use_filename_id"){ - myArgs["use_filename_id"]=true - } -} -const fsPromises = fs.promises; -const PATH_TO_IMAGE_IMPORT = path.join(config.root_path, 'import', 'images') -const IMAGES = fs.readdirSync(PATH_TO_IMAGE_IMPORT) -async function import_images() { - for (const image_file_name of IMAGES) { - const img_path = `${PATH_TO_IMAGE_IMPORT}/${image_file_name}` - const img_buffer = await fsPromises.readFile(img_path) - const img_id = myArgs["use_filename_id"]? parseInt(path.parse(img_path).name) : -1 - if (isNaN(img_id)) { - console.log(`${path.parse(img_path).name} is not a number`) - break - } - const img_exists = await db_ops.image_ops.check_if_image_exists_by_id(img_id) - if (img_exists){ - console.log(`id: ${img_id} is already in db`) - break - } - const img_data = await image_ops.import_image(img_buffer, [], "", false, img_id) - console.log(img_data) - // fsPromises.unlink(img_path) - } - process.exit() -} -import_images() diff --git a/src/server/helpers/image_ops.ts b/src/server/helpers/image_ops.ts index d7d8c97..90dd115 100644 --- a/src/server/helpers/image_ops.ts +++ b/src/server/helpers/image_ops.ts @@ -206,14 +206,14 @@ function parse_author(tags: string[]) { //tags like "artist:shishkin" return "???" } -async function import_image(image_buffer: Buffer, tags: string[] = [], source_url = "", local_add = false, img_id=-1) { +async function import_image(image_buffer: Buffer, tags: string[] = [], source_url = "", bypass_checks = false, img_id=-1,move_path="") { try { const sha256_hash = crypto_ops.image_buffer_sha256_hash(image_buffer) const found_img = await db_ops.image_ops.find_image_by_sha256(sha256_hash) if (found_img) { return `Image with the same sha256 is already in the db. Image id = ${found_img.id} ` } - if (!local_add && !tags.includes("bypass_dup_check")) { + if (!bypass_checks && !tags.includes("bypass_dup_check")) { const res = await reverse_search(image_buffer,true) if (res["local_features_res"] !== undefined) { return `Similar image is already in the db. Image ids = ${JSON.stringify(res["local_features_res"])} ` @@ -251,16 +251,20 @@ async function import_image(image_buffer: Buffer, tags: string[] = [], source_ur const author = parse_author(tags) let generated_tags = [] let caption = "" - if(!local_add){ + if(!bypass_checks){ [generated_tags, caption] = (await Promise.allSettled([get_image_tags(image_buffer), get_image_caption(image_buffer)])).map((promise: any) => promise.value) tags.push(...generated_tags) } const new_image_id = img_id === -1 ? (await db_ops.image_ops.get_max_image_id()) + 1 : img_id await db_ops.image_ops.add_image({ id: new_image_id, caption, source_url, file_ext, width, height, author, size, tags: [...new Set(tags)], sha256: sha256_hash, created_at: new Date() }) - await fs.writeFile(`${PATH_TO_IMAGES}/${new_image_id}.${file_ext}`, image_buffer, 'binary') + if (move_path){ + await fs.rename(move_path, `${PATH_TO_IMAGES}/${new_image_id}.${file_ext}`) + }else{ + await fs.writeFile(`${PATH_TO_IMAGES}/${new_image_id}.${file_ext}`, image_buffer, 'binary') + } await fs.writeFile(`${PATH_TO_THUMBNAILS}/${new_image_id}.jpg`, thumbnail_buffer, 'binary') - if(!local_add){ + if(!bypass_checks){ const res = await calculate_all_image_features(new_image_id, image_buffer) if (!res) { return "Can't calculate_all_image_features" @@ -279,7 +283,7 @@ async function import_image(image_buffer: Buffer, tags: string[] = [], source_ur } } } - console.log(`OK. New image_id: ${new_image_id}. local_add = ${local_add}`) + console.log(`OK. New image_id: ${new_image_id}. bypass_checks = ${bypass_checks}`) return `Success! Image id = ${new_image_id}` } catch (error) { console.error(error); diff --git a/src/server/routes/import_image.ts b/src/server/routes/import_image.ts index d819a17..00ef910 100644 --- a/src/server/routes/import_image.ts +++ b/src/server/routes/import_image.ts @@ -1,7 +1,9 @@ import db_ops from './../helpers/db_ops' import image_ops from './../helpers/image_ops' import { FastifyRequest, FastifyReply } from "fastify" -import { FromSchema } from "json-schema-to-ts"; +import { FromSchema } from "json-schema-to-ts" +import config from "./../../config/config" + const body_schema_import_image = { type: 'object', properties: { @@ -26,6 +28,12 @@ const body_schema_import_image = { value: { type: "string" } } }, + import_images_bot_password: { + type: "object", + properties: { + value: { type: "string" } + } + }, }, required: ['image'], } as const; @@ -67,7 +75,14 @@ async function import_image(req: FastifyRequest<{ Body: FromSchema