Explorar o código

Merge pull request #762 from marvin-wtt/v3-db-clients

[feat] Add support for other db clients
Pouria Ezzati hai 1 ano
pai
achega
0c310ccb53

+ 10 - 3
.example.env

@@ -10,13 +10,20 @@ DEFAULT_DOMAIN=localhost:3000
 # Generated link length
 LINK_LENGTH=6
 
-# Postgres database credential details
+# Database client. Available clients for the supported databases:
+# pg | pg-native | sqlite3 | better-sqlite3 | mysql | mysql2 | oracledb | tedious
+DB_CLIENT=sqlite3
+# SQL database credential details
 DB_HOST=localhost
 DB_PORT=5432
-DB_NAME=postgres
-DB_USER=
+DB_NAME=kutt
+DB_USER=postgres
 DB_PASSWORD=
 DB_SSL=false
+DB_POOL_MIN=0
+DB_POOL_MAX=10
+# SQLite database file name. Only if you use SQLite.
+DB_FILENAME=data
 
 # Redis host and port
 REDIS_HOST=127.0.0.1

+ 2 - 0
.gitignore

@@ -12,3 +12,5 @@ production-server
 dump.rdb
 docs/api/static
 **/.DS_Store
+db/*
+!db/.gitkeep

+ 2 - 0
db/.gitkeep

@@ -0,0 +1,2 @@
+# keep this folder in git
+# if you use a file-based databases such as sqlite3, the database files would be stored here

+ 17 - 15
knexfile.js

@@ -1,20 +1,22 @@
 const env = require("./server/env");
 
+const isSQLite = env.DB_CLIENT === "sqlite3" || env.DB_CLIENT === "better-sqlite3";
+
 module.exports = {
-  production: {
-    client: "postgresql",
-    connection: {
-      host: env.DB_HOST,
-      database: env.DB_NAME,
-      user: env.DB_USER,
-      port: env.DB_PORT,
-      password: env.DB_PASSWORD,
-      ssl: env.DB_SSL,
-    },
-    migrations: {
-      tableName: "knex_migrations",
-      directory: "server/migrations",
-      disableMigrationsListValidation: true,
-    }
+  client: env.DB_CLIENT,
+  connection: {
+    ...(isSQLite && { filename: "db/" + env.DB_FILENAME }),
+    host: env.DB_HOST,
+    database: env.DB_NAME,
+    user: env.DB_USER,
+    port: env.DB_PORT,
+    password: env.DB_PASSWORD,
+    ssl: env.DB_SSL,
+  },
+  useNullAsDefault: true,
+  migrations: {
+    tableName: "knex_migrations",
+    directory: "server/migrations",
+    disableMigrationsListValidation: true,
   }
 };

A diferenza do arquivo foi suprimida porque é demasiado grande
+ 707 - 16
package-lock.json


+ 5 - 3
package.json

@@ -6,10 +6,10 @@
   "scripts": {
     "docker:build": "docker build -t kutt .",
     "docker:run": "docker run -p 3000:3000 --env-file .env -d kutt:latest",
-    "dev": "node --watch-path=./server server/server.js",
+    "dev": "npm run migrate && node --watch-path=./server server/server.js",
     "start": "npm run migrate && cross-env NODE_ENV=production node server/server.js",
-    "migrate": "knex migrate:latest --env production",
-    "migrate:make": "knex migrate:make --env production",
+    "migrate": "knex migrate:latest",
+    "migrate:make": "knex migrate:make",
     "docs:build": "cd docs/api && node generate && cd ../.."
   },
   "repository": {
@@ -45,6 +45,7 @@
     "knex": "3.1.0",
     "morgan": "1.10.0",
     "ms": "2.1.3",
+    "mysql2": "3.11.3",
     "nanoid": "2.1.11",
     "node-cron": "3.0.2",
     "nodemailer": "^6.9.15",
@@ -54,6 +55,7 @@
     "passport-localapikey-update": "0.6.0",
     "pg": "8.12.0",
     "pg-query-stream": "4.6.0",
+    "sqlite3": "5.1.7",
     "useragent": "2.3.0",
     "uuid": "10.0.0"
   },

+ 3 - 1
server/cron.js

@@ -1,6 +1,7 @@
 const cron = require("node-cron");
 
 const query = require("./queries");
+const utils = require("./utils");
 const env = require("./env");
 
 if (env.NON_USER_COOLDOWN) {
@@ -9,6 +10,7 @@ if (env.NON_USER_COOLDOWN) {
   });
 }
 
+// check and delete links every minute
 cron.schedule("*/15 * * * * *", function() {
-  query.link.batchRemove({ expire_in: ["<", new Date().toISOString()] }).catch();
+  query.link.batchRemove({ expire_in: ["<", utils.dateToUTC(new Date())] }).catch();
 });

+ 14 - 3
server/env.js

@@ -1,18 +1,29 @@
 require("dotenv").config();
 const { cleanEnv, num, str, bool } = require("envalid");
 
+const supportedDBClients = [
+  "pg",
+  "pg-native",
+  "sqlite3",
+  "better-sqlite3",
+  "mysql",
+  "mysql2"
+];
+
 const env = cleanEnv(process.env, {
   PORT: num({ default: 3000 }),
   SITE_NAME: str({ example: "Kutt" }),
   DEFAULT_DOMAIN: str({ example: "kutt.it" }),
   LINK_LENGTH: num({ default: 6 }),
+  DB_CLIENT: str({ choices: supportedDBClients }),
+  DB_FILENAME: str({ default: "data" }),
   DB_HOST: str({ default: "localhost" }),
   DB_PORT: num({ default: 5432 }),
   DB_NAME: str({ default: "postgres" }),
-  DB_USER: str(),
-  DB_PASSWORD: str(),
+  DB_USER: str({ default: "postgres" }),
+  DB_PASSWORD: str({ default: "" }),
   DB_SSL: bool({ default: false }),
-  DB_POOL_MIN: num({ default: 2 }),
+  DB_POOL_MIN: num({ default: 0 }),
   DB_POOL_MAX: num({ default: 10 }),
   REDIS_HOST: str({ default: "127.0.0.1" }),
   REDIS_PORT: num({ default: 6379 }),

+ 18 - 17
server/handlers/auth.handler.js

@@ -87,12 +87,11 @@ async function cooldown(req, res, next) {
   
   const ip = await query.ip.find({
     ip: req.realIP.toLowerCase(),
-    created_at: [">", subMinutes(new Date(), cooldownConfig).toISOString()]
+    created_at: [">", utils.dateToUTC(subMinutes(new Date(), cooldownConfig))]
   });
   
   if (ip) {
-    const timeToWait =
-      cooldownConfig - differenceInMinutes(new Date(), new Date(ip.created_at));
+    const timeToWait = cooldownConfig - differenceInMinutes(new Date(), utils.parseDatetime(ip.created_at));
     throw new CustomError(
       `Non-logged in users are limited. Wait ${timeToWait} minutes or log in.`,
       400
@@ -139,12 +138,16 @@ function login(req, res) {
 
 async function verify(req, res, next) {
   if (!req.params.verificationToken) return next();
+
+  const user = await query.user.find({
+    verification_token: req.params.verificationToken,
+    verification_expires: [">", utils.dateToUTC(new Date())]
+  });
+
+  if (!user) return next();
   
-  const [user] = await query.user.update(
-    {
-      verification_token: req.params.verificationToken,
-      verification_expires: [">", new Date().toISOString()]
-    },
+  const [updatedUser] = await query.user.update(
+    { id: user.id },
     {
       verified: true,
       verification_token: null,
@@ -152,7 +155,7 @@ async function verify(req, res, next) {
     }
   );
   
-  if (user) {
+  if (updatedUser) {
     const token = utils.signToken(user);
     utils.deleteCurrentToken(res);
     utils.setToken(res, token);
@@ -219,7 +222,7 @@ async function resetPasswordRequest(req, res) {
     { email: req.body.email },
     {
       reset_password_token: uuid(),
-      reset_password_expires: addMinutes(new Date(), 30).toISOString()
+      reset_password_expires: utils.dateToUTC(addMinutes(new Date(), 30))
     }
   );
 
@@ -248,7 +251,7 @@ async function resetPassword(req, res, next) {
     const [user] = await query.user.update(
       {
         reset_password_token: resetPasswordToken,
-        reset_password_expires: [">", new Date().toISOString()]
+        reset_password_expires: [">", utils.dateToUTC(new Date())]
       },
       { reset_password_expires: null, reset_password_token: null }
     );
@@ -289,7 +292,7 @@ async function changeEmailRequest(req, res) {
     {
       change_email_address: email,
       change_email_token: uuid(),
-      change_email_expires: addMinutes(new Date(), 30).toISOString()
+      change_email_expires: utils.dateToUTC(addMinutes(new Date(), 30))
     }
   );
   
@@ -317,16 +320,14 @@ async function changeEmail(req, res, next) {
   
   if (changeEmailToken) {
     const foundUser = await query.user.find({
-      change_email_token: changeEmailToken
+      change_email_token: changeEmailToken,
+      change_email_expires: [">", utils.dateToUTC(new Date())]
     });
   
     if (!foundUser) return next();
   
     const [user] = await query.user.update(
-      {
-        change_email_token: changeEmailToken,
-        change_email_expires: [">", new Date().toISOString()]
-      },
+      { id: foundUser.id },
       {
         change_email_token: null,
         change_email_expires: null,

+ 14 - 8
server/handlers/domains.handler.js

@@ -26,20 +26,26 @@ async function add(req, res) {
 };
 
 async function remove(req, res) {
-  const [domain] = await query.domain.update(
-    {
-      uuid: req.params.id,
-      user_id: req.user.id
-    },
+  const domain = await query.domain.find({
+    uuid: req.params.id,
+    user_id: req.user.id
+  });
+
+  if (!domain) {
+    throw new CustomError("Could not delete the domain.", 500);
+  }
+  
+  const [updatedDomain] = await query.domain.update(
+    { id: domain.id },
     { user_id: null }
   );
 
-  redis.remove.domain(domain);
-
-  if (!domain) {
+  if (!updatedDomain) {
     throw new CustomError("Could not delete the domain.", 500);
   }
 
+  redis.remove.domain(updatedDomain);
+
   if (req.isHTML) {
     const domains = (await query.domain.get({ user_id: req.user.id })).map(sanitize.domain);
     res.setHeader("HX-Reswap", "outerHTML");

+ 2 - 2
server/handlers/links.handler.js

@@ -151,8 +151,8 @@ async function edit(req, res) {
       delete req.body[name];
       return;
     }
-    if (name === "expire_in")
-      if (differenceInSeconds(new Date(value), new Date(link.expire_in)) <= 60) 
+    if (name === "expire_in" && link.expire_in)
+      if (Math.abs(differenceInSeconds(utils.parseDatetime(value), utils.parseDatetime(link.expire_in))) < 60)
           return;
     if (name === "password")
       if (value && value.replace(/•/ig, "").length === 0) {

+ 12 - 15
server/handlers/validators.handler.js

@@ -1,4 +1,4 @@
-const { isAfter, subDays, subHours, addMilliseconds } = require("date-fns");
+const { isAfter, subDays, subHours, addMilliseconds, differenceInHours } = require("date-fns");
 const { body, param } = require("express-validator");
 const promisify = require("util").promisify;
 const bcrypt = require("bcryptjs");
@@ -75,7 +75,7 @@ const createLink = [
     .customSanitizer(ms)
     .custom(value => value >= ms("1m"))
     .withMessage("Expire time should be more than 1 minute.")
-    .customSanitizer(value => addMilliseconds(new Date(), value).toISOString()),
+    .customSanitizer(value => utils.dateToUTC(addMilliseconds(new Date(), value))),
   body("domain")
     .optional({ nullable: true, checkFalsy: true })
     .customSanitizer(value => value === env.DEFAULT_DOMAIN ? null : value)
@@ -138,7 +138,7 @@ const editLink = [
     .customSanitizer(ms)
     .custom(value => value >= ms("1m"))
     .withMessage("Expire time should be more than 1 minute.")
-    .customSanitizer(value => addMilliseconds(new Date(), value).toISOString()),
+    .customSanitizer(value => utils.dateToUTC(addMilliseconds(new Date(), value))),
   body("description")
     .optional({ nullable: true, checkFalsy: true })
     .isString()
@@ -342,12 +342,11 @@ const deleteUser = [
 
 // TODO: if user has posted malware should do something better
 function cooldown(user) {
-  if (!env.GOOGLE_SAFE_BROWSING_KEY || !user || !user.cooldowns) return;
 
-  // If has active cooldown then throw error
-  const hasCooldownNow = user.cooldowns.some(cooldown =>
-    isAfter(subHours(new Date(), 12), new Date(cooldown))
-  );
+  if (!user?.cooldown) return;
+
+  // If user has active cooldown then throw error
+  const hasCooldownNow = differenceInHours(new Date(), utils.parseDatetime(user.cooldown)) < 12;
 
   if (hasCooldownNow) {
     throw new utils.CustomError("Cooldown because of a malware URL. Wait 12h");
@@ -386,20 +385,18 @@ async function malware(user, target) {
       })
     }
   ).then(res => res.json());
+
   if (!isMalware.data || !isMalware.data.matches) return;
 
   if (user) {
     const [updatedUser] = await query.user.update(
       { id: user.id },
-      {
-        cooldowns: knex.raw("array_append(cooldowns, ?)", [
-          new Date().toISOString()
-        ])
-      }
+      { cooldown: utils.dateToUTC(new Date()) },
+      { increments: ["malicious_attempts"] }
     );
 
     // Ban if too many cooldowns
-    if (updatedUser.cooldowns.length > 2) {
+    if (updatedUser.malicious_attempts > 2) {
       await query.user.update({ id: user.id }, { banned: true });
       throw new utils.CustomError("Too much malware requests. You are now banned.");
     }
@@ -415,7 +412,7 @@ async function linksCount(user) {
 
   const count = await query.link.total({
     user_id: user.id,
-    "links.created_at": [">", subDays(new Date(), 1).toISOString()]
+    "links.created_at": [">", utils.dateToUTC(subDays(new Date(), 1))]
   });
 
   if (count > env.USER_LIMIT_PER_DAY) {

+ 12 - 2
server/knex.js

@@ -2,9 +2,14 @@ const knex = require("knex");
 
 const env = require("./env");
 
+const isSQLite = env.DB_CLIENT === "sqlite3" || env.DB_CLIENT === "better-sqlite3";
+const isPostgres = env.DB_CLIENT === "pg" || env.DB_CLIENT === "pg-native";
+const isMySQL = env.DB_CLIENT === "mysql" || env.DB_CLIENT === "mysql2";
+
 const db = knex({
-  client: "postgres",
+  client: env.DB_CLIENT,
   connection: {
+    ...(isSQLite && { filename: "db/" + env.DB_FILENAME }),
     host: env.DB_HOST,
     port: env.DB_PORT,
     database: env.DB_NAME,
@@ -15,7 +20,12 @@ const db = knex({
       min: env.DB_POOL_MIN,
       max: env.DB_POOL_MAX
     }
-  }
+  },
+  useNullAsDefault: true,
 });
 
+db.isPostgres = isPostgres;
+db.isSQLite = isSQLite;
+db.isMySQL = isMySQL;
+
 module.exports = db;

+ 0 - 27
server/migrations/20200211220920_constraints.js

@@ -7,33 +7,6 @@ async function up(knex) {
   await models.createHostTable(knex);
   await models.createLinkTable(knex);
   await models.createVisitTable(knex);
-
-  await Promise.all([
-    knex.raw(`
-      ALTER TABLE domains
-      DROP CONSTRAINT domains_user_id_foreign,
-      ADD CONSTRAINT domains_user_id_foreign
-        FOREIGN KEY (user_id) 
-        REFERENCES users (id)
-        ON DELETE SET NULL;
-    `),
-    knex.raw(`
-      ALTER TABLE links
-      DROP CONSTRAINT links_user_id_foreign,
-      ADD CONSTRAINT links_user_id_foreign
-        FOREIGN KEY (user_id)
-        REFERENCES users (id)
-        ON DELETE CASCADE;
-    `),
-    knex.raw(`
-      ALTER TABLE visits
-      DROP CONSTRAINT visits_link_id_foreign,
-      ADD CONSTRAINT visits_link_id_foreign
-        FOREIGN KEY (link_Id)
-        REFERENCES links (id)
-        ON DELETE CASCADE;
-    `)
-  ]);
 }
 
 async function down() {

+ 14 - 6
server/migrations/20200510140704_domains.js

@@ -8,16 +8,24 @@ async function up(knex) {
   await models.createLinkTable(knex);
   await models.createVisitTable(knex);
 
-  await Promise.all([
+  // drop unique user id constraint only if database is postgres
+  // because other databases use the new version of the app and they start fresh with the correct model
+  // if i use table.dropUnique() method it would throw error on fresh install because the constraint does not exist
+  // and if it throws error, the rest of the transactions fail as well
+  if (knex.client.driverName === "pg") {
     knex.raw(`
       ALTER TABLE domains
       DROP CONSTRAINT IF EXISTS domains_user_id_unique
-    `),
-    knex.raw(`
-      ALTER TABLE domains
-      ADD COLUMN IF NOT EXISTS uuid UUID DEFAULT uuid_generate_v4()
     `)
-  ]);
+  }
+
+  const hasUUID = await knex.schema.hasColumn("domains", "uuid");
+
+  if (!hasUUID) {
+    await knex.schema.alterTable("domains", (table) => {
+       table.uuid("uuid").notNullable().defaultTo(knex.fn.uuid());
+     });
+   }
 }
 
 async function down() {

+ 31 - 0
server/migrations/20240911230301_change_cooldown.js

@@ -0,0 +1,31 @@
+async function up(knex) {
+
+  const hasCooldowns = await knex.schema.hasColumn("users", "cooldowns");
+  if (hasCooldowns) {
+    await knex.schema.alterTable("users", table => {
+      table.dropColumn("cooldowns");
+    });
+  }
+
+  const hasCooldown = await knex.schema.hasColumn("users", "cooldown");
+  if (!hasCooldown) {
+    await knex.schema.alterTable("users", table => {
+      table.datetime("cooldown").nullable();
+    });
+  }
+
+  const hasMaliciousAttempts = await knex.schema.hasColumn("users", "malicious_attempts");
+  if (!hasMaliciousAttempts) {
+    await knex.schema.alterTable("users", table => {
+      table.integer("malicious_attempts").notNullable().defaultTo(0);
+    });
+  }
+}
+
+async function down(knex) {}
+
+module.exports = {
+  up,
+  down
+};
+

+ 8 - 3
server/models/domain.model.js

@@ -1,7 +1,6 @@
 async function createDomainTable(knex) {
   const hasTable = await knex.schema.hasTable("domains");
   if (!hasTable) {
-    await knex.schema.raw('create extension if not exists "uuid-ossp"');
     await knex.schema.createTable("domains", table => {
       table.increments("id").primary();
       table
@@ -10,6 +9,7 @@ async function createDomainTable(knex) {
         .defaultTo(false);
       table
         .integer("banned_by_id")
+        .unsigned()
         .references("id")
         .inTable("users");
       table
@@ -19,14 +19,19 @@ async function createDomainTable(knex) {
       table.string("homepage").nullable();
       table
         .integer("user_id")
+        .unsigned();
+      table
+        .foreign("user_id")
         .references("id")
         .inTable("users")
-        .onDelete("SET NULL");
+        .onDelete("SET NULL")
+        .withKeyName("domains_user_id_foreign");
       table
         .uuid("uuid")
         .notNullable()
-        .defaultTo(knex.raw("uuid_generate_v4()"));
+        .defaultTo(knex.fn.uuid());
       table.timestamps(false, true);
+      
     });
   }
 }

+ 1 - 0
server/models/host.model.js

@@ -13,6 +13,7 @@ async function createHostTable(knex) {
         .defaultTo(false);
       table
         .integer("banned_by_id")
+        .unsigned()
         .references("id")
         .inTable("users");
       table.timestamps(false, true);

+ 12 - 5
server/models/link.model.js

@@ -2,9 +2,7 @@ async function createLinkTable(knex) {
   const hasTable = await knex.schema.hasTable("links");
 
   if (!hasTable) {
-    await knex.schema.raw('create extension if not exists "uuid-ossp"');
     await knex.schema.createTable("links", table => {
-      knex.raw('create extension if not exists "uuid-ossp"');
       table.increments("id").primary();
       table.string("address").notNullable();
       table.string("description");
@@ -14,10 +12,12 @@ async function createLinkTable(knex) {
         .defaultTo(false);
       table
         .integer("banned_by_id")
+        .unsigned()
         .references("id")
         .inTable("users");
       table
         .integer("domain_id")
+        .unsigned()
         .references("id")
         .inTable("domains");
       table.string("password");
@@ -25,25 +25,32 @@ async function createLinkTable(knex) {
       table.string("target", 2040).notNullable();
       table
         .integer("user_id")
+        .unsigned();
+      table
+        .foreign("user_id")
         .references("id")
         .inTable("users")
-        .onDelete("CASCADE");
+        .onDelete("CASCADE")
+        .withKeyName("links_user_id_foreign");
       table
         .integer("visit_count")
         .notNullable()
         .defaultTo(0);
+      table
+        .uuid("uuid")
+        .notNullable()
+        .defaultTo(knex.fn.uuid());
       table.timestamps(false, true);
     });
   }
 
   const hasUUID = await knex.schema.hasColumn("links", "uuid");
   if (!hasUUID) {
-    await knex.schema.raw('create extension if not exists "uuid-ossp"');
     await knex.schema.alterTable("links", table => {
       table
         .uuid("uuid")
         .notNullable()
-        .defaultTo(knex.raw("uuid_generate_v4()"));
+        .defaultTo(knex.fn.uuid());
     });
   }
 }

+ 3 - 1
server/models/user.model.js

@@ -10,14 +10,16 @@ async function createUserTable(knex) {
         .defaultTo(false);
       table
         .integer("banned_by_id")
+        .unsigned()
         .references("id")
         .inTable("users");
-      table.specificType("cooldowns", "timestamptz[]");
       table
         .string("email")
         .unique()
         .notNullable();
       table.string("password").notNullable();
+      table.datetime("cooldown").nullable();
+      table.integer("malicious_attempts").notNullable().defaultTo(0);
       table.dateTime("reset_password_expires");
       table.string("reset_password_token");
       table.dateTime("change_email_expires");

+ 5 - 2
server/models/visit.model.js

@@ -11,10 +11,13 @@ async function createVisitTable(knex) {
       table.dateTime("updated_at").defaultTo(knex.fn.now());
       table
         .integer("link_id")
+        .unsigned();
+      table
+        .foreign("link_id")
         .references("id")
         .inTable("links")
-        .notNullable()
-        .onDelete("CASCADE");
+        .onDelete("CASCADE")
+        .withKeyName("visits_link_id_foreign");
       table.jsonb("referrers").defaultTo("{}");
       table
         .integer("total")

+ 33 - 29
server/queries/domain.queries.js

@@ -1,4 +1,5 @@
 const redis = require("../redis");
+const utils = require("../utils");
 const knex = require("../knex");
 
 async function find(match) {
@@ -6,9 +7,9 @@ async function find(match) {
     const cachedDomain = await redis.client.get(redis.key.domain(match.address));
     if (cachedDomain) return JSON.parse(cachedDomain);
   }
-  
+
   const domain = await knex("domains").where(match).first();
-  
+
   if (domain) {
     redis.client.set(
       redis.key.domain(domain.address),
@@ -17,7 +18,7 @@ async function find(match) {
       60 * 60 * 6
     );
   }
-  
+
   return domain;
 }
 
@@ -27,44 +28,47 @@ function get(match) {
 
 async function add(params) {
   params.address = params.address.toLowerCase();
-  const exists = await knex("domains").where("address", params.address).first();
-  
+
+  const existingDomain = await knex("domains").where("address", params.address).first();
+
+  let id = existingDomain?.id;
+
   const newDomain = {
     address: params.address,
-    homepage: params.homepage || null,
-    user_id: params.user_id || null,
-    banned: !!params.banned
+    homepage: params.homepage,
+    user_id: params.user_id,
+    banned: !!params.banned,
+    banned_by_id: params.banned_by_id
   };
-  
-  let domain;
-  if (exists) {
-    const [response] = await knex("domains")
-      .where("id", exists.id)
-      .update(
-        {
-          ...newDomain,
-          updated_at: params.updated_at || new Date().toISOString()
-        },
-        "*"
-      );
-    domain = response;
+
+  if (id) {
+    await knex("domains").where("id", id).update({
+      ...newDomain,
+      updated_at: params.updated_at || utils.dateToUTC(new Date())
+    });
   } else {
-    const [response] = await knex("domains").insert(newDomain, "*");
-    domain = response;
+    // Mysql and sqlite don't support returning but return the inserted id by default
+    const [createdDomain] = await knex("domains").insert(newDomain).returning("id");
+    id = createdDomain.id;
   }
-  
+
+  // Query domain instead of using returning as sqlite and mysql don't support it
+  const domain = await knex("domains").where("id", id).first();
+
   redis.remove.domain(domain);
-  
+
   return domain;
 }
 
 async function update(match, update) {
-  const domains = await knex("domains")
+  await knex("domains")
     .where(match)
-    .update({ ...update, updated_at: new Date().toISOString() }, "*");
-  
+    .update({ ...update, updated_at: utils.dateToUTC(new Date()) });
+
+  const domains = await knex("domains").select("*").where(match);
+
   domains.forEach(redis.remove.domain);
-  
+
   return domains;
 }
 

+ 19 - 20
server/queries/host.queries.js

@@ -1,4 +1,5 @@
 const redis = require("../redis");
+const utils = require("../utils");
 const knex = require("../knex");
 
 async function find(match) {
@@ -21,41 +22,39 @@ async function find(match) {
   }
 
   return host;
-};
+}
 
 async function add(params) {
   params.address = params.address.toLowerCase();
 
-  const exists = await knex("hosts")
-    .where("address", params.address)
-    .first();
+  const existingHost = await knex("hosts").where("address", params.address).first();
+
+  let id = existingHost?.id;
 
   const newHost = {
     address: params.address,
-    banned: !!params.banned
+    banned: !!params.banned,
+    banned_by_id: params.banned_by_id,
   };
 
-  let host;
-  if (exists) {
-    const [response] = await knex("hosts")
-      .where("id", exists.id)
-      .update(
-        {
-          ...newHost,
-          updated_at: params.updated_at || new Date().toISOString()
-        },
-        "*"
-      );
-    host = response;
+  if (id) {
+    await knex("hosts").where("id", id).update({
+      ...newHost,
+      updated_at: params.updated_at || utils.dateToUTC(new Date())
+    });
   } else {
-    const [response] = await knex("hosts").insert(newHost, "*");
-    host = response;
+    // Mysql and sqlite don't support returning but return the inserted id by default
+    const [createdHost] = await knex("hosts").insert(newHost).returning("id");
+    id = createdHost.id;
   }
 
+  // Query domain instead of using returning as sqlite and mysql don't support it
+  const host = await knex("hosts").where("id", id);
+
   redis.remove.host(host);
 
   return host;
-};
+}
 
 module.exports = {
   add,

+ 3 - 2
server/queries/ip.queries.js

@@ -1,5 +1,6 @@
 const { subMinutes } = require("date-fns");
 
+const utils = require("../utils");
 const knex = require("../knex");
 const env = require("../env");
 
@@ -9,7 +10,7 @@ async function add(ipToAdd) {
   const currentIP = await knex("ips").where("ip", ip).first();
   
   if (currentIP) {
-    const currentDate = new Date().toISOString();
+    const currentDate = utils.dateToUTC(new Date());
     await knex("ips")
       .where({ ip })
       .update({
@@ -41,7 +42,7 @@ function clear() {
   .where(
     "created_at",
     "<",
-    subMinutes(new Date(), env.NON_USER_COOLDOWN).toISOString()
+    utils.dateToUTC(subMinutes(new Date(), env.NON_USER_COOLDOWN))
   )
   .delete();
 }

+ 15 - 13
server/queries/link.queries.js

@@ -1,9 +1,11 @@
 const bcrypt = require("bcryptjs");
 
-const CustomError = require("../utils").CustomError;
+const utils = require("../utils");
 const redis = require("../redis");
 const knex = require("../knex");
 
+const CustomError = utils.CustomError;
+
 const selectable = [
   "links.id",
   "links.address",
@@ -40,7 +42,7 @@ function normalizeMatch(match) {
   }
 
   return newMatch;
-};
+}
 
 async function total(match, params) {
   const normalizedMatch = normalizeMatch(match);
@@ -70,7 +72,7 @@ async function get(match, params) {
     .where(normalizeMatch(match))
     .offset(params.skip)
     .limit(params.limit)
-    .orderBy("created_at", "desc");
+    .orderBy("links.created_at", "desc");
   
   if (params?.search) {
     query.andWhereRaw(
@@ -80,10 +82,8 @@ async function get(match, params) {
   }
   
   query.leftJoin("domains", "links.domain_id", "domains.id");
-  
-  const links = await query;
-  
-  return links;
+
+  return query;
 }
 
 async function find(match) {
@@ -138,7 +138,7 @@ async function remove(match) {
   
   if (!link) {
     return { isRemoved: false, error: "Could not find the link.", link: null }
-  };
+  }
 
   const deletedLink = await knex("links").where("id", link.id).delete();
   redis.remove.link(link);
@@ -157,9 +157,9 @@ async function batchRemove(match) {
   
   const links = await findQuery;
   
-  links.forEach(redis.remove.link);
-  
   await deleteQuery.delete();
+  
+  links.forEach(redis.remove.link);
 }
 
 async function update(match, update) {
@@ -168,10 +168,12 @@ async function update(match, update) {
     update.password = await bcrypt.hash(update.password, salt);
   }
   
-  const links = await knex("links")
+  await knex("links")
     .where(match)
-    .update({ ...update, updated_at: new Date().toISOString() }, "*");
-  
+    .update({ ...update, updated_at: utils.dateToUTC(new Date()) });
+
+  const links = await knex("links").select('*').where(match);
+
   links.forEach(redis.remove.link);
   
   return links;

+ 23 - 12
server/queries/user.queries.js

@@ -1,6 +1,7 @@
 const { addMinutes } = require("date-fns");
 const { v4: uuid } = require("uuid");
 
+const utils = require("../utils");
 const redis = require("../redis");
 const knex = require("../knex");
 
@@ -10,8 +11,13 @@ async function find(match) {
     const cachedUser = await redis.client.get(key);
     if (cachedUser) return JSON.parse(cachedUser);
   }
-  
-  const user = await knex("users").where(match).first();
+
+  const query = knex("users");
+  Object.entries(match).forEach(([key, value]) => {
+    query.andWhere(key, ...(Array.isArray(value) ? value : [value]));
+  });
+
+  const user = await query.first();
   
   if (user) {
     const emailKey = redis.key.user(user.email);
@@ -24,7 +30,6 @@ async function find(match) {
   }
   
   return user;
-
 }
 
 async function add(params, user) {
@@ -32,13 +37,13 @@ async function add(params, user) {
     email: params.email,
     password: params.password,
     verification_token: uuid(),
-    verification_expires: addMinutes(new Date(), 60).toISOString()
+    verification_expires: utils.dateToUTC(addMinutes(new Date(), 60))
   };
   
   if (user) {
     await knex("users")
       .where("id", user.id)
-      .update({ ...data, updated_at: new Date().toISOString() });
+      .update({ ...data, updated_at: utils.dateToUTC(new Date()) });
   } else {
     await knex("users").insert(data);
   }
@@ -51,18 +56,24 @@ async function add(params, user) {
   };
 }
 
-async function update(match, update) {
+async function update(match, update, methods) {
   const query = knex("users");
-  
+
   Object.entries(match).forEach(([key, value]) => {
     query.andWhere(key, ...(Array.isArray(value) ? value : [value]));
   });
+
+  const updateQuery = query.clone();
+  if (methods?.increments) {
+    methods.increments.forEach(columnName => {
+      updateQuery.increment(columnName);
+    });
+  }
   
-  const users = await query.update(
-    { ...update, updated_at: new Date().toISOString() },
-    "*"
-  );
-  
+  await updateQuery.update({ ...update, updated_at: utils.dateToUTC(new Date()) });
+
+  const users = await query.select("*");
+
   users.forEach(redis.remove.user);
   
   return users;

+ 62 - 46
server/queries/visit.queries.js

@@ -1,4 +1,4 @@
-const { isAfter, subDays, subHours, set } = require("date-fns");
+const { isAfter, subDays, subHours, set, format } = require("date-fns");
 
 const utils = require("../utils");
 const redis = require("../redis");
@@ -11,45 +11,60 @@ async function add(params) {
     referrer: params.referrer.toLowerCase()
   };
 
-  const visit = await knex("visits")
-    .where({ link_id: params.id })
-    .andWhere(
-      knex.raw("date_trunc('hour', created_at) = date_trunc('hour', ?)", [
-        knex.fn.now()
-      ])
-    )
-    .first();
-
-  if (visit) {
-    await knex("visits")
-      .where({ id: visit.id })
-      .increment(`br_${data.browser}`, 1)
-      .increment(`os_${data.os}`, 1)
-      .increment("total", 1)
-      .update({
-        updated_at: new Date().toISOString(),
-        countries: knex.raw(
-          "jsonb_set(countries, '{??}', (COALESCE(countries->>?,'0')::int + 1)::text::jsonb)",
-          [data.country, data.country]
-        ),
-        referrers: knex.raw(
-          "jsonb_set(referrers, '{??}', (COALESCE(referrers->>?,'0')::int + 1)::text::jsonb)",
-          [data.referrer, data.referrer]
-        )
+  const nowUTC = new Date().toISOString();
+  const truncatedNow = nowUTC.substring(0, 10) + " " + nowUTC.substring(11, 14) + "00:00";
+
+  return knex.transaction(async (trx) => {
+    // Create a subquery first that truncates the
+    const subquery = trx("visits")
+      .select("visits.*")
+      .select({
+        created_at_hours: utils.knexUtils(trx).truncatedTimestamp("created_at", "hour")
+      })
+      .where({ link_id: params.id })
+      .as("subquery");
+
+    const visit = await trx
+      .select("*")
+      .from(subquery)
+      .where("created_at_hours", "=", truncatedNow)
+      .forUpdate()
+      .first();
+      
+    if (visit) {
+      const countries = typeof visit.countries === "string" ? JSON.parse(visit.countries) : visit.countries;
+      const referrers = typeof visit.referrers === "string" ? JSON.parse(visit.referrers) : visit.referrers;
+      await trx("visits")
+        .where({ id: visit.id })
+        .increment(`br_${data.browser}`, 1)
+        .increment(`os_${data.os}`, 1)
+        .increment("total", 1)
+        .update({
+          updated_at: utils.dateToUTC(new Date()),
+          countries: JSON.stringify({
+            ...countries,
+            [data.country]: (countries[data.country] ?? 0) + 1
+          }),
+          referrers: JSON.stringify({
+            ...referrers,
+             [data.referrer]: (referrers[data.referrer] ?? 0) + 1
+          })
+        });
+    } else {
+      // This must also happen in the transaction to avoid concurrency
+      await trx("visits").insert({
+        [`br_${data.browser}`]: 1,
+        countries: { [data.country]: 1 },
+        referrers: { [data.referrer]: 1 },
+        [`os_${data.os}`]: 1,
+        total: 1,
+        link_id: data.id
       });
-  } else {
-    await knex("visits").insert({
-      [`br_${data.browser}`]: 1,
-      countries: { [data.country]: 1 },
-      referrers: { [data.referrer]: 1 },
-      [`os_${data.os}`]: 1,
-      total: 1,
-      link_id: data.id
-    });
-  }
+    }
 
-  return visit;
-};
+    return visit;
+  });
+}
 
 async function find(match, total) {
   if (match.link_id) {
@@ -82,20 +97,21 @@ async function find(match, total) {
   };
 
   const visitsStream = knex("visits").where(match).stream();
-  const nowUTC = utils.getUTCDate();
   const now = new Date();
 
   const periods = utils.getStatsPeriods(now);
 
   for await (const visit of visitsStream) {
     periods.forEach(([type, fromDate]) => {
-      const isIncluded = isAfter(new Date(visit.created_at), fromDate);
+      const isIncluded = isAfter(utils.parseDatetime(visit.created_at), fromDate);
       if (!isIncluded) return;
       const diffFunction = utils.getDifferenceFunction(type);
-      const diff = diffFunction(now, new Date(visit.created_at));
+      const diff = diffFunction(now, utils.parseDatetime(visit.created_at));
       const index = stats[type].views.length - diff - 1;
       const view = stats[type].views[index];
       const period = stats[type].stats;
+      const countries = typeof visit.countries === "string" ? JSON.parse(visit.countries) : visit.countries;
+      const referrers = typeof visit.referrers === "string" ? JSON.parse(visit.referrers) : visit.referrers;
       stats[type].stats = {
         browser: {
           chrome: period.browser.chrome + visit.br_chrome,
@@ -116,7 +132,7 @@ async function find(match, total) {
         },
         country: {
           ...period.country,
-          ...Object.entries(visit.countries).reduce(
+          ...Object.entries(countries).reduce(
             (obj, [country, count]) => ({
               ...obj,
               [country]: (period.country[country] || 0) + count
@@ -126,7 +142,7 @@ async function find(match, total) {
         },
         referrer: {
           ...period.referrer,
-          ...Object.entries(visit.referrers).reduce(
+          ...Object.entries(referrers).reduce(
             (obj, [referrer, count]) => ({
               ...obj,
               [referrer]: (period.referrer[referrer] || 0) + count
@@ -161,7 +177,7 @@ async function find(match, total) {
       views: stats.lastWeek.views,
       total: stats.lastWeek.total
     },
-    updatedAt: new Date().toISOString()
+    updatedAt: new Date()
   };
 
   if (match.link_id) {
@@ -176,5 +192,5 @@ async function find(match, total) {
 
 module.exports = {
   add,
-  find,
-}
+  find
+};

+ 55 - 0
server/utils/knex.js

@@ -0,0 +1,55 @@
+
+function knexUtils(knex) {
+  function truncatedTimestamp(columnName, precision = "hour") {
+    switch (knex.client.driverName) {
+      case "sqlite3":
+      case "better-sqlite3":
+        // SQLite uses strftime for date truncation
+        const sqliteFormats = {
+          second: "%Y-%m-%d %H:%M:%S",
+          minute: "%Y-%m-%d %H:%M:00",
+          hour: "%Y-%m-%d %H:00:00",
+          day: "%Y-%m-%d 00:00:00",
+        };
+        return knex.raw(`strftime('${sqliteFormats[precision]}', ${columnName})`); // Default to 'hour'
+      case "mssql":
+        // For MSSQL, we can use FORMAT or CONVERT to truncate the timestamp
+        const mssqlFormats = {
+          second: "yyyy-MM-dd HH:mm:ss",
+          minute: "yyyy-MM-dd HH:mm:00",
+          hour: "yyyy-MM-dd HH:00:00",
+          day: "yyyy-MM-dd 00:00:00",
+        };
+        return knex.raw(`FORMAT(${columnName}, '${mssqlFormats[precision]}'`);
+      case "pg":
+      case "pgnative":
+      case "cockroachdb":
+        // PostgreSQL has the `date_trunc` function, which is ideal for this task
+        return knex.raw(`date_trunc(?, ${columnName})`, [precision]);
+      case "oracle":
+      case "oracledb":
+        // Oracle truncates dates using the `TRUNC` function
+        return knex.raw(`TRUNC(${columnName}, ?)`, [precision]);
+      case "mysql":
+      case "mysql2":
+        // MySQL can use the DATE_FORMAT function to truncate
+        const mysqlFormats = {
+          second: "%Y-%m-%d %H:%i:%s",
+          minute: "%Y-%m-%d %H:%i:00",
+          hour: "%Y-%m-%d %H:00:00",
+          day: "%Y-%m-%d 00:00:00",
+        };
+        return knex.raw(`DATE_FORMAT(${columnName}, '${mysqlFormats[precision]}')`);
+      default:
+        throw new Error(`${this.client.driverName} does not support timestamp truncation with precision`);
+    }
+  }
+
+  return {
+    truncatedTimestamp
+  }
+}
+
+module.exports = {
+  knexUtils
+}

+ 53 - 23
server/utils/utils.js

@@ -1,6 +1,8 @@
-const { differenceInDays, differenceInHours, differenceInMonths, differenceInMilliseconds, addDays, subHours, subDays, subMonths, subYears } = require("date-fns");
+const { differenceInDays, differenceInHours, differenceInMonths, differenceInMilliseconds, addDays, subHours, subDays, subMonths, subYears, format } = require("date-fns");
 const nanoid = require("nanoid/generate");
+const knexUtils = require("./knex");
 const JWT = require("jsonwebtoken");
+const knex = require("../knex");
 const path = require("path");
 const hbs = require("hbs");
 const ms = require("ms");
@@ -112,14 +114,33 @@ function getDifferenceFunction(type) {
   throw new Error("Unknown type.");
 }
 
-function getUTCDate(dateString) {
-  const date = new Date(dateString || Date.now());
-  return new Date(
-    date.getUTCFullYear(),
-    date.getUTCMonth(),
-    date.getUTCDate(),
-    date.getUTCHours()
-  );
+function parseDatetime(date) {
+  // because postgres and mysql return date, sqlite returns formatted iso 8601 string in utc
+  return date instanceof Date ? date : new Date(date + "Z");
+}
+
+function parseTimestamps(item) {
+  return {
+    created_at: parseDatetime(item.created_at),
+    updated_at: parseDatetime(item.updated_at),
+  }
+}
+
+function dateToUTC(date) {
+  const dateUTC = date instanceof Date ? date.toISOString() : new Date(date).toISOString();
+
+  // format the utc date in 'YYYY-MM-DD hh:mm:ss' for SQLite
+  if (knex.isSQLite) {
+    return dateUTC.substring(0, 10) + " " + dateUTC.substring(11, 19);
+  }
+  
+  // mysql doesn't save time in utc, so format the date in local timezone instead
+  if (knex.isMySQL) {
+    return format(new Date(date), "yyyy-MM-dd HH:mm:ss");
+  }
+  
+  // return unformatted utc string for postgres
+  return dateUTC;
 }
 
 function getStatsPeriods(now) {
@@ -196,7 +217,8 @@ const MINUTE = 60,
       WEEK = DAY * 7,
       MONTH = DAY * 30,
       YEAR = DAY * 365;
-function getTimeAgo(date) {
+function getTimeAgo(dateString) {
+  const date = new Date(dateString);
   const secondsAgo = Math.round((Date.now() - Number(date)) / 1000);
 
   if (secondsAgo < MINUTE) {
@@ -228,23 +250,28 @@ function getTimeAgo(date) {
 const sanitize = {
   domain: domain => ({
     ...domain,
+    ...parseTimestamps(domain),
     id: domain.uuid,
     uuid: undefined,
     user_id: undefined,
     banned_by_id: undefined
   }),
-  link: link => ({
-    ...link,
-    banned_by_id: undefined,
-    domain_id: undefined,
-    user_id: undefined,
-    uuid: undefined,
-    id: link.uuid,
-    relative_created_at: getTimeAgo(link.created_at),
-    relative_expire_in: link.expire_in && ms(differenceInMilliseconds(new Date(link.expire_in), new Date()), { long: true }),
-    password: !!link.password,
-    link: getShortURL(link.address, link.domain)
-  })
+  link: link => {
+    const timestamps = parseTimestamps(link);
+    return {
+      ...link,
+      ...timestamps,
+      banned_by_id: undefined,
+      domain_id: undefined,
+      user_id: undefined,
+      uuid: undefined,
+      id: link.uuid,
+      relative_created_at: getTimeAgo(timestamps.created_at),
+      relative_expire_in: link.expire_in && ms(differenceInMilliseconds(parseDatetime(link.expire_in), new Date()), { long: true }),
+      password: !!link.password,
+      link: getShortURL(link.address, link.domain)
+    }
+  }
 };
 
 function sleep(ms) {
@@ -285,6 +312,7 @@ function registerHandlebarsHelpers() {
 module.exports = {
   addProtocol,
   CustomError,
+  dateToUTC,
   deleteCurrentToken,
   generateId,
   getDifferenceFunction,
@@ -294,8 +322,9 @@ module.exports = {
   getStatsCacheTime,
   getStatsLimit,
   getStatsPeriods,
-  getUTCDate,
   isAdmin,
+  parseDatetime,
+  parseTimestamps,
   preservedURLs,
   registerHandlebarsHelpers,
   removeWww,
@@ -305,4 +334,5 @@ module.exports = {
   sleep,
   statsObjectToArray,
   urlRegex,
+  ...knexUtils,
 }

Algúns arquivos non se mostraron porque demasiados arquivos cambiaron neste cambio