Add files

This commit is contained in:
2024-08-23 16:45:28 +02:00
parent 2b8a6e1428
commit 448e25de50
128 changed files with 5134 additions and 0 deletions

12
backend/.env.example Normal file
View File

@@ -0,0 +1,12 @@
DB_PATH=/nb/database.db
API_TOKEN=test
UPLOAD_DIR=/nb/uploads/
DESCRIBE_IMAGES=1
DESCRIBE_IMAGES_API=ollama
DESCRIBE_IMAGES_PROMPT="Your task is to describe images to your friend in a friendly, detailed but concise manner.\n"
DESCRIBE_IMAGES_TEMPERATURE=0.5
DESCRIBE_IMAGES_MAX_TOKENS=8192
OPENAI_API_KEY=sk-blahblahblahblahblahImAnAPIKeyWoopDeeDoo
OPENAI_MODEL=gpt-4o
OLLAMA_URL=http://localhost:11434
OLLAMA_MODEL=moondream

175
backend/.gitignore vendored Normal file
View File

@@ -0,0 +1,175 @@
# Based on https://raw.githubusercontent.com/github/gitignore/main/Node.gitignore
# Logs
logs
_.log
npm-debug.log_
yarn-debug.log*
yarn-error.log*
lerna-debug.log*
.pnpm-debug.log*
# Caches
.cache
# Diagnostic reports (https://nodejs.org/api/report.html)
report.[0-9]_.[0-9]_.[0-9]_.[0-9]_.json
# Runtime data
pids
_.pid
_.seed
*.pid.lock
# Directory for instrumented libs generated by jscoverage/JSCover
lib-cov
# Coverage directory used by tools like istanbul
coverage
*.lcov
# nyc test coverage
.nyc_output
# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files)
.grunt
# Bower dependency directory (https://bower.io/)
bower_components
# node-waf configuration
.lock-wscript
# Compiled binary addons (https://nodejs.org/api/addons.html)
build/Release
# Dependency directories
node_modules/
jspm_packages/
# Snowpack dependency directory (https://snowpack.dev/)
web_modules/
# TypeScript cache
*.tsbuildinfo
# Optional npm cache directory
.npm
# Optional eslint cache
.eslintcache
# Optional stylelint cache
.stylelintcache
# Microbundle cache
.rpt2_cache/
.rts2_cache_cjs/
.rts2_cache_es/
.rts2_cache_umd/
# Optional REPL history
.node_repl_history
# Output of 'npm pack'
*.tgz
# Yarn Integrity file
.yarn-integrity
# dotenv environment variable files
.env
.env.development.local
.env.test.local
.env.production.local
.env.local
# parcel-bundler cache (https://parceljs.org/)
.parcel-cache
# Next.js build output
.next
out
# Nuxt.js build / generate output
.nuxt
dist
# Gatsby files
# Comment in the public line in if your project uses Gatsby and not Next.js
# https://nextjs.org/blog/next-9-1#public-directory-support
# public
# vuepress build output
.vuepress/dist
# vuepress v2.x temp and cache directory
.temp
# Docusaurus cache and generated files
.docusaurus
# Serverless directories
.serverless/
# FuseBox cache
.fusebox/
# DynamoDB Local files
.dynamodb/
# TernJS port file
.tern-port
# Stores VSCode versions used for testing VSCode extensions
.vscode-test
# yarn v2
.yarn/cache
.yarn/unplugged
.yarn/build-state.yml
.yarn/install-state.gz
.pnp.*
# IntelliJ based IDEs
.idea
# Finder (MacOS) folder config
.DS_Store

15
backend/README.md Normal file
View File

@@ -0,0 +1,15 @@
# notebrook-backend
To install dependencies:
```bash
bun install
```
To run:
```bash
bun run index.ts
```
This project was created using `bun init` in bun v1.1.21. [Bun](https://bun.sh) is a fast all-in-one JavaScript runtime.

BIN
backend/bun.lockb Normal file

Binary file not shown.

31
backend/package.json Normal file
View File

@@ -0,0 +1,31 @@
{
"name": "notebrook-backend",
"module": "src/server.ts",
"type": "module",
"scripts": {
"start": "bun run src/server.ts",
"dev": "bun run --watch src/server.ts",
"test": "echo \"Error: no test specified\" && exit 1"
},
"devDependencies": {
"@types/bun": "latest"
},
"peerDependencies": {
"typescript": "^5.5.4"
},
"dependencies": {
"@types/better-sqlite3": "^7.6.11",
"@types/cors": "^2.8.17",
"@types/express": "^4.17.21",
"@types/jsonwebtoken": "^9.0.6",
"@types/multer": "^1.4.11",
"@types/ws": "^8.5.12",
"cors": "^2.8.5",
"express": "^4.19.2",
"multer": "^1.4.5-lts.1",
"ollama": "^0.5.8",
"openai": "^4.56.0",
"sharp": "^0.33.5",
"ws": "^8.18.0"
}
}

31
backend/schema.sql Normal file
View File

@@ -0,0 +1,31 @@
CREATE TABLE IF NOT EXISTS channels (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP
);
CREATE TABLE IF NOT EXISTS files (
id INTEGER PRIMARY KEY AUTOINCREMENT,
channelId INTEGER,
filePath TEXT,
fileType TEXT,
fileSize INTEGER,
originalName TEXT,
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (channelId) REFERENCES channels (id) ON DELETE CASCADE
);
CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
channelId INTEGER,
content TEXT,
fileId INTEGER NULL,
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (channelId) REFERENCES channels (id) ON DELETE CASCADE,
FOREIGN KEY (fileId) REFERENCES files (id) ON DELETE
SET
NULL
);
CREATE VIRTUAL TABLE IF NOT EXISTS messages_fts USING fts5(
content,
content = 'messages',
content_rowid = 'id'
);

27
backend/src/app.ts Normal file
View File

@@ -0,0 +1,27 @@
import express from "express";
import cors from "cors";
import * as ChannelRoutes from "./routes/channel";
import * as FileRoutes from "./routes/file";
import * as MessageRoutes from "./routes/message";
import * as SearchRoutes from "./routes/search";
import { authenticate } from "./middleware/auth";
import { initializeDB } from "./db";
import { FRONTEND_DIR, UPLOAD_DIR } from "./config";
export const app = express();
app.use(express.json());
app.use(cors());
app.use('/uploads', express.static(UPLOAD_DIR));
app.use(express.static(FRONTEND_DIR));
app.use("/channels", ChannelRoutes.router);
app.use("/channels/:channelId/messages", MessageRoutes.router);
app.use("/channels/:channelId/messages/:messageId/files", FileRoutes.router);
app.use("/search", SearchRoutes.router);
app.get('/check-token', authenticate, (req, res) => {
res.json({ message: 'Token is valid' });
});

15
backend/src/config.ts Normal file
View File

@@ -0,0 +1,15 @@
export const DB_PATH = process.env["DB_PATH"] || "/usr/src/app/data/db.sqlite";
export const SECRET_KEY = process.env["API_TOKEN"] || "";
export const UPLOAD_DIR = process.env["UPLOAD_DIR"] || "/usr/src/app/data/uploads/";
export const FRONTEND_DIR = process.env["FRONTEND_DIR"] || "/usr/src/app/backend/public";
export const DESCRIBE_IMAGES: boolean = process.env["DESCRIBE_IMAGES"] === "1" ? true : false;
export const DESCRIBE_IMAGES_API = process.env["DESCRIBE_IMAGES_API"] || "ollama";
export const DESCRIBE_IMAGES_PROMPT= process.env["DESCRIBE_IMAGES_PROMPT"] || "Describe this image.";
export const DESCRIBE_IMAGES_TEMPERATURE= parseFloat(process.env["DESCRIBE_IMAGES_TEMPERATURE"]!) || 0.5;
export const DESCRIBE_IMAGES_MAX_TOKENS= parseInt(process.env["DESCRIBE_IMAGES_MAX_TOKENS"]!) || 1024;
export const OPENAI_API_KEY= process.env["OPENAI_API_KEY"] || "";
export const OPENAI_MODEL = process.env["OPENAI_MODEL"] || "gpt-4o";
export const OLLAMA_URL= process.env["OLLAMA_URL"] || "http://localhost:11434";
export const OLLAMA_MODEL= process.env["OLLAMA_MODEL"] || "moondream";
// list all files in /usr/src/app/data/

View File

@@ -0,0 +1,57 @@
import type { Request, Response } from "express";
import * as ChannelService from "../services/channel-service";
export const createChannel = async (req: Request, res: Response) => {
const { name } = req.body;
if (!name) {
return res.status(400).json({ error: 'Name is required' });
}
const chan = await ChannelService.createChannel(name);
res.json(chan);
}
export const deleteChannel = async (req: Request, res: Response) => {
const { channelId } = req.params;
if (!channelId) {
return res.status(400).json({ error: 'Channel ID is required' });
}
const result = await ChannelService.deleteChannel(channelId);
if (result.changes === 0) {
return res.status(404).json({ error: 'Channel not found' });
}
res.json({ message: 'Channel deleted successfully' });
}
export const getChannels = async (req: Request, res: Response) => {
const channels = await ChannelService.getChannels();
res.json({ channels });
}
export const mergeChannel = async (req: Request, res: Response) => {
const { channelId } = req.params;
const { targetChannelId } = req.body;
if (!channelId || !targetChannelId) {
return res.status(400).json({ error: 'Channel ID and targetChannelId are required' });
}
const result = await ChannelService.mergeChannel(channelId, targetChannelId);
res.json({ message: 'Channels merged successfully' });
}
export const updateChannel = async (req: Request, res: Response) => {
const { channelId } = req.params;
const { name } = req.body;
if (!channelId || !name) {
return res.status(400).json({ error: 'Channel ID and name are required' });
}
const result = await ChannelService.updateChannel(channelId, name);
if (result.changes === 0) {
return res.status(404).json({ error: 'Channel not found' });
}
res.json({ message: 'Channel updated successfully' });
}

View File

@@ -0,0 +1,31 @@
import type { Request, Response } from "express";
import * as FileService from "../services/file-service";
export const uploadFile = async (req: Request, res: Response) => {
const { channelId, messageId } = req.params;
const filePath = (req.file as Express.Multer.File).path;
const fileType = req.file?.mimetype;
const fileSize = req.file?.size;
const originalName = req.file?.originalname;
if (!channelId || !messageId) {
return res.status(400).json({ error: 'Channel ID and message ID are required' });
}
if (!filePath || !fileType || !fileSize || !originalName) {
return res.status(400).json({ error: 'File is required' });
}
const result = await FileService.uploadFile(channelId, messageId, filePath, fileType!, fileSize!, originalName!);
res.json({ id: result.lastInsertRowid, channelId, messageId, filePath, fileType });
}
export const getFiles = async (req: Request, res: Response) => {
const { messageId } = req.params;
if (!messageId) {
return res.status(400).json({ error: 'Message ID is required' });
}
const files = await FileService.getFiles(messageId);
res.json({ files });
}

View File

@@ -0,0 +1,47 @@
import type { Request, Response } from "express";
import * as MessageService from "../services/message-service";
export const createMessage = async (req: Request, res: Response) => {
const { content } = req.body;
const { channelId } = req.params;
if (!content || !channelId) {
return res.status(400).json({ error: 'Content and channel ID are required' });
}
const messageId = await MessageService.createMessage(channelId, content);
res.json({ id: messageId, channelId, content, createdAt: new Date().toISOString() });
};
export const updateMessage = async (req: Request, res: Response) => {
const { content } = req.body;
const { messageId } = req.params;
if (!content || !messageId) {
return res.status(400).json({ error: 'Content and message ID are required ' });
}
const result = await MessageService.updateMessage(messageId, content);
res.json({ id: messageId, content });
}
export const deleteMessage = async (req: Request, res: Response) => {
const { messageId } = req.params;
if (!messageId) {
return res.status(400).json({ error: 'Message ID is required' });
}
const result = await MessageService.deleteMessage(messageId);
if (result.changes === 0) {
return res.status(404).json({ error: 'Message not found' });
}
res.json({ message: 'Message deleted successfully' });
}
export const getMessages = async (req: Request, res: Response) => {
const { channelId } = req.params;
if (!channelId) {
return res.status(400).json({ error: 'Channel ID is required' });
}
const messages = await MessageService.getMessages(channelId);
res.json({ messages });
}

View File

@@ -0,0 +1,11 @@
import type { Request, Response } from "express";
import * as SearchService from "../services/search-service";
export const search = async (req: Request, res: Response) => {
const { query, channelId } = req.query;
if (!query) {
return res.status(400).json({ error: 'Query is required' });
}
const results = await SearchService.search(query as string, channelId as string);
res.json({ results });
}

View File

@@ -0,0 +1,29 @@
import { events } from "../globals";
import { WebSocket } from "ws";
export const attachEvents = (ws: WebSocket) => {
events.on('file-uploaded', (id, channelId, messageId, filePath, fileType, fileSize, originalName) => {
ws.send(JSON.stringify({ type: 'file-uploaded', id, channelId, messageId, filePath, fileType, fileSize, originalName }));
});
events.on('message-created', (id, channelId, content) => {
ws.send(JSON.stringify({ type: 'message-created', id, channelId, content }));
});
events.on('message-updated', (id, content) => {
ws.send(JSON.stringify({ type: 'message-updated', id, content }));
});
events.on('message-deleted', (id) => {
ws.send(JSON.stringify({ type: 'message-deleted', id }));
});
events.on('channel-created', (channel) => {
ws.send(JSON.stringify({ type: 'channel-created', channel }));
});
events.on('channel-deleted', (id) => {
ws.send(JSON.stringify({ type: 'channel-deleted', id }));
});
events.on('channel-merged', (channelId, targetChannelId) => {
ws.send(JSON.stringify({ type: 'channel-merged', channelId, targetChannelId }));
});
events.on('channel-updated', (id, name) => {
ws.send(JSON.stringify({ type: 'channel-updated', id, name }));
});
}

73
backend/src/db.ts Normal file
View File

@@ -0,0 +1,73 @@
import { Database } from 'bun:sqlite';
import { DB_PATH } from './config';
import { logger } from './globals';
export let FTS5Enabled = true;
export const initializeDB = () => {
logger.info("Checking fts");
const ftstest = db.query(`pragma compile_options;`);
const result = ftstest.all() as { compile_options: string }[];
if (result.find((o) => o["compile_options"].includes("ENABLE_FTS5"))) {
logger.info("FTS5 is enabled");
} else {
logger.info("FTS5 is not enabled. Attempting to load...");
try {
db.loadExtension('./fts5');
} catch (e) {
logger.warn("Failed to load FTS5 extension. Disabling FTS5");
FTS5Enabled = false;
}
}
db.run(`
CREATE TABLE IF NOT EXISTS channels (
id INTEGER PRIMARY KEY AUTOINCREMENT,
name TEXT NOT NULL,
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP
)
`);
db.run(`
CREATE TABLE IF NOT EXISTS files (
id INTEGER PRIMARY KEY AUTOINCREMENT,
channelId INTEGER,
filePath TEXT,
fileType TEXT,
fileSize INTEGER,
originalName TEXT,
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (channelId) REFERENCES channels (id) ON DELETE CASCADE
)
`);
db.run(`
CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY AUTOINCREMENT,
channelId INTEGER,
content TEXT,
fileId INTEGER NULL,
createdAt DATETIME DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (channelId) REFERENCES channels (id) ON DELETE CASCADE,
FOREIGN KEY (fileId) REFERENCES files (id) ON DELETE SET NULL
)
`);
db.run(`
CREATE VIRTUAL TABLE IF NOT EXISTS messages_fts USING fts5(
content,
content='messages',
content_rowid='id'
);
`)
return FTS5Enabled;
}
logger.info(`Loading database at ${DB_PATH}`);
export const db = new Database(DB_PATH);
initializeDB();

14
backend/src/globals.ts Normal file
View File

@@ -0,0 +1,14 @@
import { EventEmitter } from "events";
import { Scheduler } from "./utils/scheduler";
import { jobs } from "./jobs";
import { Logger } from "./logging/logger";
import { ConsoleAdapter } from "./logging/adapters/console-adapter";
export const events = new EventEmitter();
export const scheduler = new Scheduler();
export const logger = new Logger();
logger.addAdapter(new ConsoleAdapter());
jobs.forEach((job) => {
job();
});

View File

@@ -0,0 +1,6 @@
import { loadImage, describeWithOpenAI, describeImage } from "./services/image-description";
import { DESCRIBE_IMAGES_PROMPT, OPENAI_API_KEY } from "./config";
(async () => {
console.log(await describeImage("d:/avatar.jpg"));
})();

View File

@@ -0,0 +1,16 @@
import { events, logger } from "../globals"
import { describeImage } from "../services/image-description";
import { getMessage, updateMessage } from "../services/message-service";
export const describeImageJob = () => {
events.on("file-uploaded", (id, channelId, messageId, filePath, fileType, fileSize, originalName) => {
if (fileType.includes("image")) {
describeImage(filePath).then((description) => {
const msg = getMessage(messageId) as any;
updateMessage(messageId, `${msg.content ? msg.content : ''}\n\n${description}`);
}).catch((e) => {
logger.warn(`Failed to describe image: ${e.message}`);
});
}
});
}

View File

@@ -0,0 +1,7 @@
import { describeImageJob } from "./describe-image";
import { scheduleVacuum } from "./vacuum";
export const jobs = [
scheduleVacuum,
describeImageJob
]

View File

@@ -0,0 +1,9 @@
import { Scheduler, TimeUnit } from "../utils/scheduler";
import { scheduler } from "../globals";
import { db } from "../db";
export const scheduleVacuum = () => {
scheduler.register('vacuum', () => {
db.query('VACUUM');
}, 1, TimeUnit.DAY);
}

View File

@@ -0,0 +1,15 @@
import { type LogEntry } from "./log-entry";
export abstract class LogAdapter {
public log(message: LogEntry) {
if (this.shouldLog(message)) {
this.logImpl(message);
}
}
public abstract logImpl(message: LogEntry): boolean;
public shouldLog(message: LogEntry): boolean {
return true;
}
}

View File

@@ -0,0 +1,10 @@
import { LogAdapter } from "../adapter";
import { type LogEntry, LogLevel } from "../log-entry";
export class ConsoleAdapter extends LogAdapter {
public logImpl(message: LogEntry): boolean {
console.log(`${LogLevel[message.level]}: ${message.message}; ${new Date(message.timestamp).toLocaleString()}:`);
if (message.additionalInfo) console.log(message.additionalInfo);
return true;
}
}

View File

@@ -0,0 +1,12 @@
export interface LogEntry {
level: LogLevel;
timestamp: number;
message: string;
additionalInfo?: any;
}
export enum LogLevel {
info,
warning,
critical
}

View File

@@ -0,0 +1,49 @@
import { LogAdapter } from "./adapter";
import { type LogEntry, LogLevel } from "./log-entry";
export class Logger {
private adapters: LogAdapter[];
public constructor() {
this.adapters = [];
}
public log(message: LogEntry) {
this.adapters.forEach((adapter) => adapter.log(message));
}
public info(message: string, additionalInfo?: any) {
this.log({
level: LogLevel.info,
message,
additionalInfo,
timestamp: Date.now()
})
}
public warn(message: string, additionalInfo?: any) {
this.log({
level: LogLevel.warning,
message,
additionalInfo,
timestamp: Date.now()
})
}
public critical(message: string, additionalInfo?: any) {
this.log({
level: LogLevel.critical,
message,
additionalInfo,
timestamp: Date.now()
})
}
public addAdapter(adapter: LogAdapter) {
this.adapters.push(adapter);
}
public removeAdapter(adapter: LogAdapter) {
this.adapters.slice(this.adapters.indexOf(adapter), 1);
}
}

View File

@@ -0,0 +1,16 @@
import type { NextFunction, Request, Response } from "express";
import { SECRET_KEY } from "../config";
import { logger } from "../globals";
export const authenticate = (req: Request, res: Response, next: NextFunction) => {
const token = req.headers['authorization'];
logger.info(`Checking ${SECRET_KEY} against ${token}`);
if (!token) {
return res.status(403).json({ error: 'No token provided' });
}
if (token === SECRET_KEY) {
next();
} else {
res.status(401).json({ error: "Unauthenticated" })
}
}

View File

@@ -0,0 +1,10 @@
import { Router } from 'express';
import * as ChannelController from '../controllers/channel-controller';
import { authenticate } from '../middleware/auth';
export const router = Router({mergeParams: true});
router.post('/', authenticate, ChannelController.createChannel);
router.get('/', authenticate, ChannelController.getChannels);
router.delete('/:channelId', authenticate, ChannelController.deleteChannel);
router.put('/:channelId/merge', authenticate, ChannelController.mergeChannel);

View File

@@ -0,0 +1,9 @@
import { Router } from "express";
import { upload } from "../utils/multer";
import * as FileController from "../controllers/file-controller";
import { authenticate } from "../middleware/auth";
export const router = Router({mergeParams: true});
router.post("/", authenticate, upload.single("file"), FileController.uploadFile);
router.get("/", authenticate, FileController.getFiles);

View File

@@ -0,0 +1,11 @@
import { Router } from 'express';
import * as MessageController from '../controllers/message-controller';
import { authenticate } from '../middleware/auth';
export const router = Router({mergeParams: true});
router.post('/', authenticate, MessageController.createMessage);
router.put('/:messageId', authenticate, MessageController.updateMessage);
router.delete('/:messageId', authenticate, MessageController.deleteMessage);
router.get('/', authenticate, MessageController.getMessages);

View File

@@ -0,0 +1,7 @@
import { Router } from "express";
import * as SearchController from "../controllers/search-controller";
import { authenticate } from "../middleware/auth";
export const router = Router({mergeParams: true});
router.get("/", authenticate, SearchController.search);

29
backend/src/server.ts Normal file
View File

@@ -0,0 +1,29 @@
import { app } from "./app";
import { createServer } from "http";
import { WebSocket, WebSocketServer } from "ws";
import { attachEvents } from "./controllers/websocket-controller";
import { logger } from "./globals";
const PORT = process.env.PORT || 3000;
const server = createServer(app);
const wss = new WebSocketServer({ server });
wss.on('connection', (ws: WebSocket) => {
logger.info('Websocket client connected');
attachEvents(ws);
ws.on('message', (message: string) => {
logger.info(`Received message: ${message}`);
});
ws.on('close', () => {
logger.info('Websocket client disconnected');
});
});
server.listen(3000, () => {
logger.info(`Server is running on http://localhost:${3000}`);
});

View File

@@ -0,0 +1,37 @@
import { db } from "../db";
import { events } from "../globals";
export const createChannel = async (name: string) => {
const query = db.query(`INSERT INTO channels (name) VALUES ($name)`);
const result = query.run({ $name: name });
events.emit('channel-created', { id: result.lastInsertRowid, name });
return { id: result.lastInsertRowid, name };
}
export const deleteChannel = async (id: string) => {
const query = db.query(`DELETE FROM channels WHERE id = ($channelId)`);
const result = db.run(id);
// No need to manually delete messages and files as they are set to cascade on delete in the schema
events .emit('channel-deleted', id);
return result;
}
export const getChannels = async () => {
const query = db.query(`SELECT * FROM channels`);
const rows = query.all();
return rows;
}
export const mergeChannel = async (channelId: string, targetChannelId: string) => {
const query = db.query(`UPDATE messages SET channelId = $targetChannelId WHERE channelId = $channelId`);
const result = query.run({ $channelId: channelId, $targetChannelId: targetChannelId });
events.emit('channel-merged', channelId, targetChannelId);
return result;
}
export const updateChannel = async (id: string, name: string) => {
const query = db.query(`UPDATE channels SET name = $name WHERE id = $id`);
const result = query.run({ $id: id, $name: name });
events.emit('channel-updated', id, name);
return result;
}

View File

@@ -0,0 +1,21 @@
import { db } from "../db";
import { events } from "../globals";
export const uploadFile = async (channelId: string, messageId: string, filePath: string, fileType: string, fileSize: number, originalName: string) => {
const query = db.query(`INSERT INTO files (channelId, filePath, fileType, fileSize, originalName) VALUES ($channelId, $filePath, $fileType, $fileSize, $originalName)`);
const result = query.run({ $channelId: channelId, $filePath: filePath, $fileType: fileType, $fileSize: fileSize, $originalName: originalName } as any);
const fileId = result.lastInsertRowid;
const updateQuery = db.query(`UPDATE messages SET fileId = $fileId WHERE id = $messageId`);
const result2 = updateQuery.run({ $fileId: fileId, $messageId: messageId });
events.emit('file-uploaded', result.lastInsertRowid, channelId, messageId, filePath, fileType, fileSize, originalName);
return result2; ''
}
export const getFiles = async (messageId: string) => {
const query = db.query(`SELECT * FROM files WHERE messageId = $messageId`);
const rows = query.all({ $messageId: messageId });
return rows;
}

View File

@@ -0,0 +1,83 @@
import { Ollama } from "ollama";
import OpenAI from "openai";
import { DESCRIBE_IMAGES_API, DESCRIBE_IMAGES_MAX_TOKENS, DESCRIBE_IMAGES_PROMPT, DESCRIBE_IMAGES_TEMPERATURE, OLLAMA_MODEL, OLLAMA_URL, OPENAI_API_KEY, OPENAI_MODEL } from "../config";
import { readFile } from "fs/promises";
import sharp from "sharp";
export const describeWithOllama = async (image: Buffer) => {
const client = new Ollama({ host: OLLAMA_URL });
const response = await client.chat({
model: OLLAMA_MODEL,
options: {
temperature: DESCRIBE_IMAGES_TEMPERATURE,
},
messages: [
{ role: "system", content: DESCRIBE_IMAGES_PROMPT },
{ role: "user", images: [image], content: "Describe this image." },
]
});
return response.message.content;
}
export const describeWithOpenAI = async (image: Buffer) => {
const client = new OpenAI({
apiKey: OPENAI_API_KEY,
});
const response = await client.chat.completions.create({
model: OPENAI_MODEL,
max_tokens: DESCRIBE_IMAGES_MAX_TOKENS,
temperature: DESCRIBE_IMAGES_TEMPERATURE,
messages: [
{ role: "system", content: DESCRIBE_IMAGES_PROMPT },
{ role: "user", content: [{ type: "text", text: "Describe the following image in a detailed but concise manner." }, { type: "image_url", image_url: { url: imageToBase64URL(image) } }] },
]
})
return response.choices[0].message.content;
}
export const describeImage = async (filePath: string) => {
const image = await loadImage(filePath);
if (DESCRIBE_IMAGES_API === "ollama") {
return describeWithOllama(image);
} else {
return describeWithOpenAI(image);
}
return "";
}
export const loadImage = async (filePath: string) => {
return processImage(filePath);
}
async function processImage(imagePath: string): Promise<Buffer> {
try {
const image = sharp(imagePath);
const metadata = await image.metadata();
const maxDimension = 1024;
// Check if the image needs to be resized
let resizedImage = image;
if (metadata.width && metadata.height && (metadata.width > maxDimension || metadata.height > maxDimension)) {
resizedImage = image.resize({
width: Math.min(metadata.width, maxDimension),
height: Math.min(metadata.height, maxDimension),
fit: sharp.fit.inside,
withoutEnlargement: true
});
}
// Convert the image to JPG
const jpgBuffer = await resizedImage.jpeg().toBuffer();
return jpgBuffer;
} catch (error) {
console.error('Error processing the image:', error);
throw new Error('Failed to process the image.');
}
}
export const imageToBase64URL = (input: Buffer) => {
return `data:image/jpeg;base64,${input.toString('base64')}`;
}

View File

@@ -0,0 +1,83 @@
import { db, FTS5Enabled } from "../db";
import { events } from "../globals";
export const createMessage = async (channelId: string, content: string) => {
const query = db.query(`INSERT INTO messages (channelId, content) VALUES ($channelId, $content)`);
const result = query.run({ $channelId: channelId, $content: content });
const messageId = result.lastInsertRowid;
console.log(`Adding message for search with id ${messageId}`);
// Insert into FTS table if FTS is enabled.
if (FTS5Enabled) {
const query2 = db.query(`INSERT INTO messages_fts (rowid, content) VALUES ($rowId, $content)`);
const result2 = query2.run({ $rowId: messageId, $content: content });
}
events.emit('message-created', messageId, channelId, content);
return messageId;
}
export const updateMessage = async (messageId: string, content: string, append: boolean = false) => {
const query = db.query(`UPDATE messages SET content = $content WHERE id = $id`);
const result = query.run({ $content: content, $id: messageId });
// Update FTS table if enabled
if (!FTS5Enabled) {
const query2 = db.query(`INSERT INTO messages_fts (rowid, content) VALUES ($rowId, $content) ON CONFLICT(rowid) DO UPDATE SET content = excluded.content`);
const result2 = query.run({ $rowId: messageId, $content: content });
}
events.emit('message-updated', messageId, content);
return result;
}
export const deleteMessage = async (messageId: string) => {
const query = db.query(`DELETE FROM messages WHERE id = $id`);
const result = query.run({ $id: messageId });
// Remove from FTS table if enabled
if (FTS5Enabled) {
const query2 = db.query(`DELETE FROM messages_fts WHERE rowid = $rowId`);
const result2 = query.run({ $rowId: messageId });
}
events.emit('message-deleted', messageId);
return result;
}
export const getMessages = async (channelId: string) => {
const query = db.query(`
SELECT
messages.id, messages.channelId, messages.content, messages.createdAt,
files.id as fileId, files.filePath, files.fileType, files.createdAt as fileCreatedAt, files.originalName, files.fileSize
FROM
messages
LEFT JOIN
files
ON
messages.fileId = files.id
WHERE
messages.channelId = $channelId
`);
const rows = query.all({ $channelId: channelId });
return rows;
}
export const getMessage = async (id: string) => {
const query = db.query(`
SELECT
messages.id, messages.channelId, messages.content, messages.createdAt,
files.id as fileId, files.filePath, files.fileType, files.createdAt as fileCreatedAt, files.originalName, files.fileSize
FROM
messages
LEFT JOIN
files
ON
messages.fileId = files.id
WHERE
messages.id = $id
`);
const row = query.get({ $id: id });
return row;
}

View File

@@ -0,0 +1,44 @@
import { db, FTS5Enabled } from "../db";
export const search = async (query: string, channelId?: string) => {
let sql: string;
let params: any;
if (FTS5Enabled) {
if (channelId) {
sql = `
SELECT messages.id, messages.channelId, messages.content, messages.createdAt
FROM messages_fts
JOIN messages ON messages_fts.rowid = messages.id
WHERE messages_fts MATCH lower($query) AND messages.channelId = $channelId
`;
params = { $channelId: channelId, $query: (query || '').toString().toLowerCase() };
} else {
sql = `
SELECT messages.id, messages.channelId, messages.content, messages.createdAt
FROM messages_fts
JOIN messages ON messages_fts.rowid = messages.id
WHERE messages_fts MATCH lower($query)
`;
params = { $query: (query || '').toString().toLowerCase() };
}
} else {
console.log("Performing search without FTS5. This might be very slow.");
if (channelId) {
sql = `
SELECT * FROM messages WHERE LOWER(content) LIKE '%' || LOWER($query) || '%' AND channelId = $channelId
`;
params = { $channelId: channelId, $query: query };
} else {
sql = `
SELECT * FROM messages WHERE LOWER(content) LIKE '%' || LOWER($query) || '%'
`;
params = { $query: query };
}
}
const sqlquery = db.query(sql);
const rows = sqlquery.all(params);
return rows;
}

View File

@@ -0,0 +1,4 @@
import multer from "multer";
import { UPLOAD_DIR } from "../config";
export const upload = multer({ dest: UPLOAD_DIR });

View File

@@ -0,0 +1,54 @@
export enum TimeUnit {
SECOND = 1000,
MINUTE = 60 * 1000,
HOUR = 60 * 60 * 1000,
DAY = 24 * 60 * 60 * 1000,
WEEK = 7 * 24 * 60 * 60 * 1000
}
export type Task = () => void;
export interface TaskEntry {
id: Timer;
task: Task;
remainingRuns: number;
}
export class Scheduler {
private tasks: Map<string, TaskEntry> = new Map();
static toMilliseconds(time: number, unit: TimeUnit): number {
return time * unit;
}
register(taskName: string, task: Task, delay: number, unit: TimeUnit, runs: number = Infinity): void {
if (this.tasks.has(taskName)) {
throw new Error(`Task ${taskName} is already registered.`);
}
const performTask = () => {
task();
const taskEntry = this.tasks.get(taskName);
if (taskEntry) {
taskEntry.remainingRuns--;
if (taskEntry.remainingRuns > 0) {
taskEntry.id = setTimeout(performTask, Scheduler.toMilliseconds(delay, unit));
} else {
this.tasks.delete(taskName);
}
}
};
this.tasks.set(taskName, { id: setTimeout(performTask, Scheduler.toMilliseconds(delay, unit)), task, remainingRuns: runs });
}
unregister(taskName: string): void {
const taskEntry = this.tasks.get(taskName);
if (taskEntry) {
clearTimeout(taskEntry.id);
this.tasks.delete(taskName);
}
}
getTasks(): Map<string, TaskEntry> {
return this.tasks;
}
}

27
backend/tsconfig.json Normal file
View File

@@ -0,0 +1,27 @@
{
"compilerOptions": {
// Enable latest features
"lib": ["ESNext", "DOM"],
"target": "ESNext",
"module": "ESNext",
"moduleDetection": "force",
"jsx": "react-jsx",
"allowJs": true,
// Bundler mode
"moduleResolution": "bundler",
"allowImportingTsExtensions": true,
"verbatimModuleSyntax": true,
"noEmit": true,
// Best practices
"strict": true,
"skipLibCheck": true,
"noFallthroughCasesInSwitch": true,
// Some stricter flags (disabled by default)
"noUnusedLocals": false,
"noUnusedParameters": false,
"noPropertyAccessFromIndexSignature": false
}
}

21
backend/types.ts Normal file
View File

@@ -0,0 +1,21 @@
export interface Channel {
id: number;
name: string;
created_at: string;
}
export interface Message {
id: number;
channel_id: number;
content: string;
created_at: string;
}
export interface File {
id: number;
channel_id: number;
message_id: number;
file_path: string;
file_type: string;
created_at: string;
}