Compare commits

...

25 Commits

Author SHA1 Message Date
github-actions[bot]
1dad903199 Update version to v1.4.146 and commit 2025-02-27 06:16:20 +00:00
Eugen Eisler
0bec53360e Merge pull request #1319 from jmd1010/pdf-integration-clean
Enhancement: PDF to Markdown Conversion Functionality to the Web Svelte Chat Interface
2025-02-27 07:15:03 +01:00
JM
cf637e4137 Merge branch 'main' into pdf-integration-clean 2025-02-27 01:02:04 -05:00
jmd1010
9507c2cca1 Reinstate file in original location to resolve PR conflict 2025-02-27 01:01:16 -05:00
jmd1010
fa575638d1 Remove pr-1284-update.md from tracking to resolve PR conflict 2025-02-27 00:55:58 -05:00
jmd1010
51220c40d9 Add required UI image assets for feature implementation 2025-02-27 00:11:04 -05:00
jmd1010
d1d62fcc4c Complete directory reorganization by moving pr-1284-update.md to new location 2025-02-26 23:44:56 -05:00
jmd1010
96e6a56e5f Restore file to original location to resolve path conflict 2025-02-26 23:38:14 -05:00
jmd1010
0d7514ea0e Remove pr-1284-update.md from PR scope 2025-02-26 23:21:06 -05:00
jmd1010
a74da4acff Rename pattern descriptions directory to follow consistent naming convention 2025-02-26 23:14:51 -05:00
jmd1010
6d8c3eb6e2 Update README files directory structure and naming convention 2025-02-26 22:23:53 -05:00
jmd1010
adbfa2f6ba Remove pdf-to-markdown folder from PR 2025-02-26 21:57:04 -05:00
github-actions[bot]
f5776637d9 Update version to v1.4.145 and commit 2025-02-26 16:54:02 +00:00
Eugen Eisler
34db384265 Merge pull request #1324 from jaredmontoya/nix-fix
flake: fix/update and enhance
2025-02-26 17:52:47 +01:00
jaredmontoya
1f765c5b53 flake: fix/update 2025-02-26 16:12:05 +01:00
github-actions[bot]
f9395fa108 Update version to v1.4.144 and commit 2025-02-26 08:55:35 +00:00
Eugen Eisler
22d2a3ee19 Upgrade upload artifacts to v4 2025-02-26 09:54:44 +01:00
github-actions[bot]
b64178c292 Update version to v1.4.143 and commit 2025-02-26 08:53:09 +00:00
Eugen Eisler
f7d38fb51f Merge pull request #1264 from danielmiessler/feat/exolab
feat: implement support for exolab
2025-02-26 09:52:13 +01:00
Eugen Eisler
ea6c0b9025 Merge branch 'main' into feat/exolab 2025-02-26 09:50:53 +01:00
jmd1010
4725a94f00 Add complete PDF to Markdown documentation F 2025-02-24 22:33:27 -05:00
jmd1010
15ac5351cf Add Svelte implementation files for PDF integration 2025-02-24 21:46:03 -05:00
jmd1010
f69cda8fab Add PDF to Markdown integration documentation 2025-02-24 21:39:43 -05:00
jmd1010
a0e1f7204d Add PDF to Markdown conversion functionality to the web svelte caht interface 2025-02-24 17:24:02 -05:00
Eugen Eisler
d25be21939 feat: implement support for https://github.com/exo-explore/exo 2025-01-18 19:55:24 +01:00
53 changed files with 514 additions and 120 deletions

2
.envrc
View File

@@ -1,2 +1,2 @@
watch_file shell.nix
watch_file nix/shell.nix
use flake

View File

@@ -22,6 +22,9 @@ jobs:
- name: Checkout code
uses: actions/checkout@v4
- name: Install Nix
uses: DeterminateSystems/nix-installer-action@main
- name: Set up Go
uses: actions/setup-go@v4
with:
@@ -29,3 +32,6 @@ jobs:
- name: Run tests
run: go test -v ./...
- name: Check Formatting
run: nix flake check

View File

@@ -27,7 +27,7 @@ jobs:
run: zip -r patterns.zip patterns/
- name: Upload Patterns Artifact
uses: actions/upload-artifact@v3
uses: actions/upload-artifact@v4
with:
name: patterns
path: patterns.zip

View File

@@ -25,9 +25,6 @@ jobs:
- name: Install Nix
uses: DeterminateSystems/nix-installer-action@main
- name: Setup Nix Cache
uses: DeterminateSystems/magic-nix-cache-action@main
- name: Set up Git
run: |
git config user.name "github-actions[bot]"
@@ -63,21 +60,21 @@ jobs:
- name: Update version.nix file
run: |
echo "\"${{ env.new_version }}\"" > pkgs/fabric/version.nix
echo "\"${{ env.new_version }}\"" > nix/pkgs/fabric/version.nix
- name: Format source codes
- name: Format source code
run: |
go fmt ./...
nix fmt
- name: Update gomod2nix.toml file
run: |
nix run .#gomod2nix
nix run .#gomod2nix -- --outdir nix/pkgs/fabric
- name: Commit changes
run: |
git add version.go
git add pkgs/fabric/version.nix
git add gomod2nix.toml
git add nix/pkgs/fabric/version.nix
git add nix/pkgs/fabric/gomod2nix.toml
git add .
if ! git diff --staged --quiet; then
git commit -m "Update version to ${{ env.new_tag }} and commit $commit_hash"

View File

@@ -1,10 +1,11 @@
package cli
import (
"github.com/danielmiessler/fabric/core"
"os"
"testing"
"github.com/danielmiessler/fabric/core"
"github.com/stretchr/testify/assert"
)

View File

@@ -2,8 +2,9 @@ package cli
import (
"fmt"
"github.com/atotto/clipboard"
"os"
"github.com/atotto/clipboard"
)
func CopyToClipboard(message string) (err error) {

View File

@@ -6,11 +6,12 @@ import (
"encoding/base64"
"encoding/json"
"fmt"
"github.com/gabriel-vasile/mimetype"
"io/ioutil"
"net/http"
"os"
"path/filepath"
"github.com/gabriel-vasile/mimetype"
)
type Attachment struct {

View File

@@ -1,9 +1,10 @@
package common
import (
"testing"
goopenai "github.com/sashabaranov/go-openai"
"github.com/stretchr/testify/assert"
"testing"
)
func TestNormalizeMessages(t *testing.T) {

View File

@@ -2,6 +2,7 @@ package common
import (
"fmt"
"github.com/samber/lo"
)

View File

@@ -7,6 +7,8 @@ import (
"path/filepath"
"strconv"
"github.com/danielmiessler/fabric/plugins/ai/exolab"
"github.com/samber/lo"
"github.com/danielmiessler/fabric/common"
@@ -55,7 +57,7 @@ func NewPluginRegistry(db *fsdb.Db) (ret *PluginRegistry, err error) {
gemini.NewClient(),
//gemini_openai.NewClient(),
anthropic.NewClient(), siliconcloud.NewClient(),
openrouter.NewClient(), lmstudio.NewClient(), mistral.NewClient(), deepseek.NewClient())
openrouter.NewClient(), lmstudio.NewClient(), mistral.NewClient(), deepseek.NewClient(), exolab.NewClient())
_ = ret.Configure()
return

18
flake.lock generated
View File

@@ -26,11 +26,11 @@
]
},
"locked": {
"lastModified": 1729448365,
"narHash": "sha256-oquZeWTYWTr5IxfwEzgsxjtD8SSFZYLdO9DaQb70vNU=",
"lastModified": 1733668782,
"narHash": "sha256-tPsqU00FhgdFr0JiQUiBMgPVbl1jbPCY5gbFiJycL3I=",
"owner": "nix-community",
"repo": "gomod2nix",
"rev": "5d387097aa716f35dd99d848dc26d8d5b62a104c",
"rev": "514283ec89c39ad0079ff2f3b1437404e4cba608",
"type": "github"
},
"original": {
@@ -41,11 +41,11 @@
},
"nixpkgs": {
"locked": {
"lastModified": 1729665710,
"narHash": "sha256-AlcmCXJZPIlO5dmFzV3V2XF6x/OpNWUV8Y/FMPGd8Z4=",
"lastModified": 1736344531,
"narHash": "sha256-8YVQ9ZbSfuUk2bUf2KRj60NRraLPKPS0Q4QFTbc+c2c=",
"owner": "nixos",
"repo": "nixpkgs",
"rev": "2768c7d042a37de65bb1b5b3268fc987e534c49d",
"rev": "bffc22eb12172e6db3c5dde9e3e5628f8e3e7912",
"type": "github"
},
"original": {
@@ -100,11 +100,11 @@
]
},
"locked": {
"lastModified": 1729613947,
"narHash": "sha256-XGOvuIPW1XRfPgHtGYXd5MAmJzZtOuwlfKDgxX5KT3s=",
"lastModified": 1736154270,
"narHash": "sha256-p2r8xhQZ3TYIEKBoiEhllKWQqWNJNoT9v64Vmg4q8Zw=",
"owner": "numtide",
"repo": "treefmt-nix",
"rev": "aac86347fb5063960eccb19493e0cadcdb4205ca",
"rev": "13c913f5deb3a5c08bb810efd89dc8cb24dd968b",
"type": "github"
},
"original": {

View File

@@ -33,7 +33,7 @@
let
pkgs = nixpkgs.legacyPackages.${system};
in
treefmt-nix.lib.evalModule pkgs ./treefmt.nix
treefmt-nix.lib.evalModule pkgs ./nix/treefmt.nix
);
in
{
@@ -49,7 +49,7 @@
pkgs = nixpkgs.legacyPackages.${system};
goEnv = gomod2nix.legacyPackages.${system}.mkGoEnv { pwd = ./.; };
in
import ./shell.nix {
import ./nix/shell.nix {
inherit pkgs goEnv;
inherit (gomod2nix.legacyPackages.${system}) gomod2nix;
}
@@ -62,7 +62,7 @@
in
{
default = self.packages.${system}.fabric;
fabric = pkgs.callPackage ./pkgs/fabric {
fabric = pkgs.callPackage ./nix/pkgs/fabric {
inherit (gomod2nix.legacyPackages.${system}) buildGoApplication;
};
inherit (gomod2nix.legacyPackages.${system}) gomod2nix;

View File

@@ -2,9 +2,10 @@ package main
import (
"fmt"
"github.com/jessevdk/go-flags"
"os"
"github.com/jessevdk/go-flags"
"github.com/danielmiessler/fabric/cli"
)

View File

@@ -6,9 +6,11 @@
buildGoApplication {
pname = "fabric-ai";
version = import ./version.nix;
src = ../../.;
pwd = ../../.;
modules = ../../gomod2nix.toml;
src = ../../../.;
pwd = ../../../.;
modules = ./gomod2nix.toml;
doCheck = false;
ldflags = [
"-s"

View File

@@ -0,0 +1 @@
"1.4.146"

View File

@@ -6,6 +6,7 @@
statix.enable = true;
nixfmt.enable = true;
goimports.enable = true;
gofmt.enable = true;
};
}

View File

@@ -1 +0,0 @@
"1.4.142"

View File

@@ -1,10 +1,11 @@
package azure
import (
"strings"
"github.com/danielmiessler/fabric/plugins"
"github.com/danielmiessler/fabric/plugins/ai/openai"
goopenai "github.com/sashabaranov/go-openai"
"strings"
)
func NewClient() (ret *Client) {

View File

@@ -1,10 +1,11 @@
package dryrun
import (
"github.com/danielmiessler/fabric/common"
"github.com/sashabaranov/go-openai"
"reflect"
"testing"
"github.com/danielmiessler/fabric/common"
"github.com/sashabaranov/go-openai"
)
// Test generated using Keploy

View File

@@ -0,0 +1,45 @@
package exolab
import (
"strings"
"github.com/danielmiessler/fabric/plugins"
"github.com/danielmiessler/fabric/plugins/ai/openai"
goopenai "github.com/sashabaranov/go-openai"
)
func NewClient() (ret *Client) {
ret = &Client{}
ret.Client = openai.NewClientCompatibleNoSetupQuestions("Exolab", ret.configure)
ret.ApiBaseURL = ret.AddSetupQuestion("API Base URL", true)
ret.ApiBaseURL.Value = "http://localhost:52415"
ret.ApiModels = ret.AddSetupQuestionCustom("models", true,
"Enter your deployed Exolab models (comma separated)")
return
}
type Client struct {
*openai.Client
ApiModels *plugins.SetupQuestion
apiModels []string
}
func (oi *Client) configure() (err error) {
oi.apiModels = strings.Split(oi.ApiModels.Value, ",")
config := goopenai.DefaultConfig("")
config.BaseURL = oi.ApiBaseURL.Value
oi.ApiClient = goopenai.NewClientWithConfig(config)
return
}
func (oi *Client) ListModels() (ret []string, err error) {
ret = oi.apiModels
return
}

View File

@@ -4,9 +4,10 @@ import (
"context"
"errors"
"fmt"
"strings"
"github.com/danielmiessler/fabric/plugins"
goopenai "github.com/sashabaranov/go-openai"
"strings"
"github.com/danielmiessler/fabric/common"
"github.com/google/generative-ai-go/genai"

View File

@@ -1,8 +1,9 @@
package gemini
import (
"github.com/google/generative-ai-go/genai"
"testing"
"github.com/google/generative-ai-go/genai"
)
// Test generated using Keploy

View File

@@ -4,10 +4,11 @@ import (
"context"
"errors"
"fmt"
"github.com/danielmiessler/fabric/plugins"
"io"
"log/slog"
"github.com/danielmiessler/fabric/plugins"
"github.com/danielmiessler/fabric/common"
"github.com/samber/lo"
"github.com/sashabaranov/go-openai"
@@ -18,6 +19,16 @@ func NewClient() (ret *Client) {
}
func NewClientCompatible(vendorName string, defaultBaseUrl string, configureCustom func() error) (ret *Client) {
ret = NewClientCompatibleNoSetupQuestions(vendorName, configureCustom)
ret.ApiKey = ret.AddSetupQuestion("API Key", true)
ret.ApiBaseURL = ret.AddSetupQuestion("API Base URL", false)
ret.ApiBaseURL.Value = defaultBaseUrl
return
}
func NewClientCompatibleNoSetupQuestions(vendorName string, configureCustom func() error) (ret *Client) {
ret = &Client{}
if configureCustom == nil {
@@ -30,10 +41,6 @@ func NewClientCompatible(vendorName string, defaultBaseUrl string, configureCust
ConfigureCustom: configureCustom,
}
ret.ApiKey = ret.AddSetupQuestion("API Key", true)
ret.ApiBaseURL = ret.AddSetupQuestion("API Base URL", false)
ret.ApiBaseURL.Value = defaultBaseUrl
return
}

View File

@@ -2,6 +2,7 @@ package ai
import (
"context"
"github.com/danielmiessler/fabric/plugins"
goopenai "github.com/sashabaranov/go-openai"

View File

@@ -4,8 +4,9 @@ import (
"bytes"
"context"
"fmt"
"github.com/danielmiessler/fabric/plugins"
"sync"
"github.com/danielmiessler/fabric/plugins"
)
func NewVendorsManager() *VendorsManager {

View File

@@ -2,10 +2,11 @@ package fsdb
import (
"fmt"
"github.com/joho/godotenv"
"os"
"path/filepath"
"time"
"github.com/joho/godotenv"
)
func NewDb(dir string) (db *Db) {

View File

@@ -2,6 +2,7 @@ package fsdb
import (
"fmt"
"github.com/danielmiessler/fabric/common"
goopenai "github.com/sashabaranov/go-openai"
)

View File

@@ -1,8 +1,9 @@
package fsdb
import (
goopenai "github.com/sashabaranov/go-openai"
"testing"
goopenai "github.com/sashabaranov/go-openai"
)
func TestSessions_GetOrCreateSession(t *testing.T) {

View File

@@ -2,6 +2,7 @@ package converter
import (
"bytes"
"github.com/go-shiori/go-readability"
)

View File

@@ -5,13 +5,14 @@ import (
"context"
"encoding/json"
"fmt"
"github.com/danielmiessler/fabric/core"
"github.com/gin-gonic/gin"
"io"
"log"
"net/http"
"strings"
"time"
"github.com/danielmiessler/fabric/core"
"github.com/gin-gonic/gin"
)
type OllamaModel struct {

View File

@@ -2,10 +2,11 @@ package restapi
import (
"fmt"
"github.com/danielmiessler/fabric/plugins/db"
"github.com/gin-gonic/gin"
"io"
"net/http"
"github.com/danielmiessler/fabric/plugins/db"
"github.com/gin-gonic/gin"
)
// StorageHandler defines the handler for storage-related operations

View File

@@ -1,3 +1,3 @@
package main
var version = "v1.4.142"
var version = "v1.4.146"

View File

@@ -0,0 +1,77 @@
## PDF TO MARKDOWN CONVERSION IMPLEMENTATION
- PDF to Markdown conversion functionality for the web interface
- Automatic detection and processing of PDF files in chat
- Conversion to markdown format for LLM processing
- Installation instructions from the pdf-to-markdown repository
The PDF conversion module has been integrated in the svelte web browser interface. Once installed, it will automatically detect pdf files in the chat interface and convert them to markdown automatically for llm processing.
## HOW TO INSTALL
# FROM FABRIC ROOT DIRECTORY
cd .. web
# Install in this sequence:
# Step 1
npm install -D patch-package
# Step 2
npm install -D pdfjs-dist@2.5.207
# Step 3
npm install -D github:jzillmann/pdf-to-markdown#modularize
## 🎥 Demo Video (see 4min)
https://youtu.be/bhwtWXoMASA
# Integration with Svelte
The integration approach focused on using the library's high-level API while maintaining SSR compatibility:
- Create PdfConversionService for PDF processing
- Handle file uploads in ChatInput component
- Convert PDF content to markdown text
- Integrate with existing chat processing flow
### How it Works
The PDF to Markdown conversion is implemented as a separate module located in the `pdf-to-markdown` directory. It leverages the `pdf-parse` library (likely via `PdfParser.ts`) to parse PDF documents and extract text content. The core logic resides in `PdfPipeline.ts`, which orchestrates the PDF parsing and conversion process. `Pdf-to-Markdown` is a folk from `pdf.js` - Mozilla's PDF parsing & rendering platform which is used as a raw parser
Here's a simplified breakdown of the process:
1. **PDF Parsing:** The `PdfParser.ts` uses `pdf-parse` to read the PDF file and extract text content from each page.
2. **Content Extraction:** The extracted text content is processed to identify text elements, formatting, and structure.
3. **Markdown Conversion:** The `PdfPipeline.ts` then converts the extracted and processed text content into Markdown format. This involves mapping PDF elements to Markdown syntax, attempting to preserve formatting like headings, lists, and basic text styles.
4. **Frontend Integration:** The `PdfConversionService.ts` in the `web/src/lib/services` directory acts as a frontend service that utilizes the `pdf-to-markdown` module. It provides a `convertToMarkdown` function that takes a File object (PDF file) as input, calls the `pdf-to-markdown` module to perform the conversion, and returns the Markdown output as a string.
5. **Chat Input Integration:** The `ChatInput.svelte` component uses the `PdfConversionService` to convert uploaded PDF files to Markdown before sending the content to the chat service for pattern processing.
### File Changes
The following files were added or modified to implement the PDF to Markdown conversion:
**New files:**
* `pdf-to-markdown/`: (New directory for the PDF to Markdown module)
* `pdf-to-markdown/package.json`: Defines dependencies and build scripts for the PDF to Markdown module.
* `pdf-to-markdown/tsconfig.json`: TypeScript configuration for the PDF to Markdown module.
* `pdf-to-markdown/src/`: Source code directory for the PDF to Markdown module.
* `pdf-to-markdown/src/index.ts`: Entry point of the PDF to Markdown module.
* `pdf-to-markdown/src/PdfPipeline.ts`: Core logic for PDF to Markdown conversion pipeline.
* `pdf-to-markdown/src/PdfParser.ts`: PDF parsing logic using `pdf-parse`.
* `web/src/lib/services/PdfConversionService.ts`: (New file)
* Frontend service to use the `pdf-to-markdown` module and expose `convertToMarkdown` function.
**Modified files:**
* `web/src/lib/components/chat/ChatInput.svelte`:
* Modified to import and use the `PdfConversionService` in the `readFileContent` function to handle PDF files.
* Modified `readFileContent` to call `pdfService.convertToMarkdown` for PDF files.
These file changes introduce the new PDF to Markdown conversion functionality and integrate it into the chat input component of the web interface.

View File

@@ -13,6 +13,12 @@
// import { obsidianSettings } from '$lib/store/obsidian-store';
import { languageStore } from '$lib/store/language-store';
import { obsidianSettings, updateObsidianSettings } from '$lib/store/obsidian-store';
import { PdfConversionService } from '$lib/services/PdfConversionService';
const pdfService = new PdfConversionService();
const chatService = new ChatService();
let userInput = "";
@@ -22,7 +28,8 @@
let uploadedFiles: string[] = [];
let fileContents: string[] = [];
let isProcessingFiles = false;
let isFileIndicatorVisible = false; // Add new variable
let fileButtonKey = false; // Add new key variable for FileButton
function detectYouTubeURL(input: string): boolean {
const youtubePattern = /(?:https?:\/\/)?(?:www\.)?(?:youtube\.com|youtu\.be)/i;
const isYoutube = youtubePattern.test(input);
@@ -76,43 +83,162 @@
}
async function handleFileUpload(e: Event) {
if (!files || files.length === 0) return;
uploadedFiles = []; // Clear uploadedFiles at the beginning
if (!files || files.length === 0) return;
if (uploadedFiles.length >= 5 || (uploadedFiles.length + files.length) > 5) {
toastStore.trigger({
message: 'Maximum 5 files allowed',
background: 'variant-filled-error'
});
return;
}
isProcessingFiles = true;
try {
for (let i = 0; i < files.length && uploadedFiles.length < 5; i++) {
const file = files[i];
const content = await readFileContent(file);
fileContents.push(content);
uploadedFiles = [...uploadedFiles, file.name];
}
} catch (error) {
toastStore.trigger({
message: 'Error processing files: ' + (error as Error).message,
background: 'variant-filled-error'
});
} finally {
isProcessingFiles = false;
}
}
function readFileContent(file: File): Promise<string> {
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = (e) => resolve(e.target?.result as string);
reader.onerror = (e) => reject(new Error('Failed to read file'));
reader.readAsText(file);
if (uploadedFiles.length >= 5 || (uploadedFiles.length + files.length) > 5) {
toastStore.trigger({
message: 'Maximum 5 files allowed',
background: 'variant-filled-error'
});
return;
}
isProcessingFiles = true;
try {
// Add processing indicator to message store
messageStore.update(messages => [...messages, {
role: 'system',
content: 'Processing files...',
format: 'loading'
}]);
for (let i = 0; i < files.length && uploadedFiles.length < 5; i++) {
const file = files[i];
const content = await readFileContent(file);
fileContents.push(content);
uploadedFiles = [...uploadedFiles, file.name];
// Update processing status per file
messageStore.update(messages => {
const newMessages = [...messages];
const lastMessage = newMessages[newMessages.length - 1];
if (lastMessage?.format === 'loading') {
lastMessage.content = `Processing ${file.name} (${file.type})...`;
}
return newMessages;
});
}
// Remove processing message on completion
messageStore.update(messages =>
messages.filter(m => m.format !== 'loading')
);
} catch (error) {
toastStore.trigger({
message: 'Error processing files: ' + (error as Error).message,
background: 'variant-filled-error'
});
// Clean up processing message on error
messageStore.update(messages =>
messages.filter(m => m.format !== 'loading')
);
} finally {
isProcessingFiles = false;
}
}
async function readFileContent(file: File): Promise<string> {
// Log initial file metadata
console.log('Reading file:', {
name: file.name,
type: file.type,
size: file.size,
lastModified: new Date(file.lastModified).toISOString()
});
// Handle PDF files
if (file.type === 'application/pdf') {
try {
// Start PDF processing
console.log('Starting PDF conversion process');
const markdown = await pdfService.convertToMarkdown(file);
// Validate conversion result
console.log('PDF conversion completed:', {
resultLength: markdown.length,
preview: markdown.substring(0, 100)
});
// Ensure we have valid content
if (!markdown || markdown.trim().length === 0) {
throw new Error('PDF conversion returned empty content');
}
// Add to fileContents for pattern processing
fileContents.push(markdown);
// Prepare enhanced prompt with system instructions
const enhancedPrompt = `${$systemPrompt}\nAnalyze and process the provided content according to these instructions.`;
// Format final content with proper labeling
const finalContent = `${userInput}\n\nFile Contents (PDF):\n${markdown}`;
// Process through pattern system
await sendMessage(finalContent, enhancedPrompt);
return markdown;
} catch (error) {
console.error('PDF Conversion error:', {
error,
fileName: file.name,
fileSize: file.size
});
const errorMessage = error instanceof Error
? error.message
: 'Unknown error during PDF conversion';
throw new Error(`Failed to convert PDF ${file.name}: ${errorMessage}`);
}
}
// Handle text files
return new Promise((resolve, reject) => {
const reader = new FileReader();
reader.onload = async (e) => {
const content = e.target?.result as string;
console.log('Text file processed:', {
fileName: file.name,
contentLength: content.length,
preview: content.substring(0, 100)
});
// resolve(content);
const enhancedPrompt = `${$systemPrompt}\nAnalyze and process the provided content according to these instructions.`;
const finalContent = `${userInput}\n\nFile Contents (Text):\n${content}`;
await sendMessage(finalContent, enhancedPrompt);
resolve(content);
};
reader.onerror = (e) => {
console.error('FileReader error:', {
error: reader.error,
fileName: file.name
});
reject(new Error(`Failed to read ${file.name}: ${reader.error?.message}`));
};
// Start reading the file
reader.readAsText(file);
});
}
async function saveToObsidian(content: string) {
if (!$obsidianSettings.saveToObsidian) {
console.log('Obsidian saving is disabled');
@@ -251,31 +377,56 @@
}
}
async function handleSubmit() {
if (!userInput.trim()) return;
try {
console.log('\n=== Submit Handler Start ===');
if (isYouTubeURL) {
console.log('2a. Starting YouTube flow');
await processYouTubeURL(userInput);
return;
}
const finalContent = fileContents.length > 0
? userInput + '\n\nFile Contents:\n' + fileContents.join('\n\n')
: userInput;
await sendMessage(finalContent);
userInput = "";
uploadedFiles = [];
fileContents = [];
} catch (error) {
console.error('Chat submission error:', error);
async function handleSubmit() {
if (!userInput.trim()) return;
try {
console.log('\n=== Submit Handler Start ===');
if (isYouTubeURL) {
console.log('2a. Starting YouTube flow');
await processYouTubeURL(userInput);
return;
}
const enhancedPrompt = fileContents.length > 0
? `${$systemPrompt}\nAnalyze and process the provided content according to these instructions.`
: $systemPrompt;
// Hide raw content from display but keep it for processing
messageStore.update(messages => [...messages, {
role: 'system',
content: 'Processing content...',
format: 'loading'
}]);
userInput = ""; // Reset userInput BEFORE sendMessage
uploadedFiles = []; // Reset uploadedFiles BEFORE sendMessage
fileContents = []; // Reset fileContents BEFORE sendMessage
fileButtonKey = !fileButtonKey; // Toggle key to force re-creation
const finalContent = fileContents.length > 0
? `${userInput}\n\nFile Contents (${uploadedFiles.map(f => f.endsWith('.pdf') ? 'PDF' : 'Text').join(', ')}):\n${fileContents.join('\n\n---\n\n')}`
: userInput;
await sendMessage(finalContent, enhancedPrompt);
} catch (error) {
console.error('Chat submission error:', error);
}
}
function handleKeydown(event: KeyboardEvent) {
if (event.key === 'Enter' && !event.shiftKey) {
@@ -300,11 +451,12 @@
/>
<div class="absolute bottom-3 right-3 flex items-center gap-2">
<div class="flex items-center gap-2">
{#if uploadedFiles.length > 0}
{#if isFileIndicatorVisible}
<span class="text-xs text-white/70">
{uploadedFiles.length} file{uploadedFiles.length > 1 ? 's' : ''} attached
</span>
{/if}
{#key fileButtonKey}
<FileButton
name="file-upload"
button="btn-icon variant-ghost"
@@ -313,12 +465,10 @@
disabled={isProcessingFiles || uploadedFiles.length >= 5}
class="h-10 w-10 bg-primary-800/30 hover:bg-primary-800/50 rounded-full transition-colors"
>
{#if uploadedFiles.length > 0}
<FileCheck class="w-5 h-5" />
{:else}
<Paperclip class="w-5 h-5" />
{/if}
<Paperclip class="w-5 h-5" />
</FileButton>
{/key}
<Button
type="button"
variant="ghost"

View File

@@ -48,8 +48,8 @@
</section>
<section>
<h3 class="text-base font-bold text-primary-300 mb-2">File Support</h3>
<p class="text-sm text-muted-foreground">You can attach files to your messages using the paperclip icon. The AI will analyze the content of text files.</p>
<h3 class="text-base font-bold text-primary-300 mb-2">Upload PDFs and other txt files</h3>
<p class="text-sm text-muted-foreground">Now accepts PDFs and other txt files as input using the paperclip icon. PDFs are converted to markdown automatically.</p>
</section>
<section>

View File

@@ -159,28 +159,31 @@ export class ChatService {
}
});
}
private createChatPrompt(userInput: string, systemPromptText?: string): ChatPrompt {
const config = get(modelConfig);
const language = get(languageStore);
const languageInstruction = language !== 'en'
? `You MUST respond in ${language} language. ALL output, including section headers, titles, and formatting, MUST be translated into ${language}. It is CRITICAL that you translate ALL headers, such as SUMMARY, IDEAS, QUOTES, TAKEAWAYS, MAIN POINTS, etc., into ${language}. Maintain markdown formatting in the response. Do not output any English headers.`
? `You MUST respond in ${language} language. All output must be in ${language}. `
// ? `You MUST respond in ${language} language. ALL output, including section headers, titles, and formatting, MUST be translated into ${language}. It is CRITICAL that you translate ALL headers, such as SUMMARY, IDEAS, QUOTES, TAKEAWAYS, MAIN POINTS, etc., into ${language}. Maintain markdown formatting in the response. Do not output any English headers.`
: '';
const finalSystemPrompt = languageInstruction + (systemPromptText ?? get(systemPrompt));
console.log('Final system prompt in createChatPrompt:', finalSystemPrompt);
const finalUserInput = language !== 'en'
? `${userInput}\n\nIMPORTANT: Respond in ${language} language only.`
: userInput;
return {
userInput: finalUserInput,
systemPrompt: finalSystemPrompt,
model: config.model,
patternName: get(selectedPatternName)
};
}
}

View File

@@ -0,0 +1,81 @@
import { createPipeline, transformers } from 'pdf-to-markdown-core/lib/src';
import { PARSE_SCHEMA } from 'pdf-to-markdown-core/lib/src/PdfParser';
import * as pdfjs from 'pdfjs-dist';
export class PdfConversionService {
constructor() {
if (typeof window !== 'undefined') {
console.log('PDF.js version:', pdfjs.version);
const workerUrl = new URL(
'pdfjs-dist/build/pdf.worker.min.js',
import.meta.url
);
console.log('Worker URL:', workerUrl.href);
pdfjs.GlobalWorkerOptions.workerSrc = workerUrl.href;
console.log('Worker configuration complete');
}
}
async convertToMarkdown(file: File): Promise<string> {
console.log('Starting PDF conversion:', {
fileName: file.name,
fileSize: file.size
});
const buffer = await file.arrayBuffer();
console.log('Buffer created:', buffer.byteLength);
const pipeline = createPipeline(pdfjs, {
transformConfig: {
transformers
}
});
console.log('Pipeline created');
const result = await pipeline.parse(
buffer,
(progress) => console.log('Processing:', {
stage: progress.stages,
details: progress.stageDetails,
progress: progress.stageProgress
})
);
console.log('Parse complete, validating result');
const transformed = result.transform();
console.log('Transform applied:', transformed);
const markdown = transformed.convert({
convert: (items) => {
console.log('PDF Structure:', {
itemCount: items.length,
firstItem: items[0],
schema: PARSE_SCHEMA // ['transform', 'width', 'height', 'str', 'fontName', 'dir']
});
const text = items
.map(item => item.value('str')) // Using 'str' instead of 'text' based on PARSE_SCHEMA
.filter(Boolean)
.join('\n');
console.log('Converted text:', {
length: text.length,
preview: text.substring(0, 100)
});
return text;
}
});
return markdown;
}
}

BIN
web/static/brain.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.0 MiB

BIN
web/static/electric.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.7 MiB

BIN
web/static/fabric-logo.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 491 KiB

BIN
web/static/favicon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 42 MiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 86 KiB

View File

@@ -4,6 +4,11 @@ import { defineConfig } from 'vite';
export default defineConfig({
plugins: [sveltekit(), purgeCss()],
build: {
commonjsOptions: {
transformMixedEsModules: true
}
},
define: {
'process.env': {
NODE_ENV: JSON.stringify(process.env.NODE_ENV)