mirror of
https://github.com/foambubble/foam.git
synced 2026-01-10 22:48:09 -05:00
Compare commits
128 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
129482a43e | ||
|
|
0c1c4da154 | ||
|
|
7f4b700b21 | ||
|
|
686e05ed25 | ||
|
|
b2c7ecbb3d | ||
|
|
2c643e0c63 | ||
|
|
3b33d3d696 | ||
|
|
87633e68b1 | ||
|
|
6c7b558f36 | ||
|
|
12037704d7 | ||
|
|
e549fb8c21 | ||
|
|
ac7d3243c4 | ||
|
|
748df5e352 | ||
|
|
dcd46f1378 | ||
|
|
f9f751a27a | ||
|
|
0764da0dd6 | ||
|
|
f747d7445a | ||
|
|
eb74e57a9e | ||
|
|
a01cf8ec8d | ||
|
|
5b63fa8108 | ||
|
|
ddf7ddf7b3 | ||
|
|
4b263667ea | ||
|
|
309194b3c3 | ||
|
|
c4f35b7649 | ||
|
|
b9e18de7e7 | ||
|
|
23cf5a021e | ||
|
|
8231ed14c5 | ||
|
|
3bea283c04 | ||
|
|
a3cffe8418 | ||
|
|
675e7fa216 | ||
|
|
87d12bf3af | ||
|
|
e118ab74b5 | ||
|
|
04a61eed0e | ||
|
|
350b3005f1 | ||
|
|
f7293b1eb4 | ||
|
|
672eb6ed20 | ||
|
|
37a9bc49bc | ||
|
|
38741ca52e | ||
|
|
ed762618ed | ||
|
|
21a32382a2 | ||
|
|
7e6c041b87 | ||
|
|
c9a0a1d53c | ||
|
|
0516088656 | ||
|
|
f98ff336bf | ||
|
|
1b1396d949 | ||
|
|
ebaab2ee59 | ||
|
|
c6a754f1a8 | ||
|
|
3fb35494d4 | ||
|
|
a7af7689a4 | ||
|
|
5b7a2ab022 | ||
|
|
88227d4028 | ||
|
|
a531c9f9cd | ||
|
|
ff172dd709 | ||
|
|
8bad56f71e | ||
|
|
4e608a67a9 | ||
|
|
a2f7c8a549 | ||
|
|
63c6b7056e | ||
|
|
b48268e20f | ||
|
|
f5f476e717 | ||
|
|
25172ee100 | ||
|
|
cbb0dab124 | ||
|
|
d570983e16 | ||
|
|
b5e979ead6 | ||
|
|
aed907663a | ||
|
|
a65325a6e1 | ||
|
|
772cba4b43 | ||
|
|
f1a0054141 | ||
|
|
854e329c90 | ||
|
|
0978bebd5b | ||
|
|
6eaae23e19 | ||
|
|
4c615bdb02 | ||
|
|
3adf853b89 | ||
|
|
111c7718c4 | ||
|
|
9c7f03d62e | ||
|
|
0d90fc5c5a | ||
|
|
537c78b630 | ||
|
|
6d210590b2 | ||
|
|
ab8e97ce0b | ||
|
|
f756d9c966 | ||
|
|
aa7669f8ad | ||
|
|
38bd5f67f2 | ||
|
|
336b8cfbba | ||
|
|
ea03b86338 | ||
|
|
449c062566 | ||
|
|
880c2e3d3b | ||
|
|
17cb619480 | ||
|
|
6deae95d80 | ||
|
|
1c0ebb8af7 | ||
|
|
fe56823e76 | ||
|
|
6956e0779a | ||
|
|
c8f1f8e03a | ||
|
|
9589071760 | ||
|
|
6f65c10746 | ||
|
|
e04409e74f | ||
|
|
dba0a72d98 | ||
|
|
fd23b1d010 | ||
|
|
cfb946a5f2 | ||
|
|
32c3a484d6 | ||
|
|
4195797024 | ||
|
|
fa405f5f65 | ||
|
|
4fd573b9e4 | ||
|
|
f613e1b9e2 | ||
|
|
0ada7d8e2c | ||
|
|
8b39bcdf16 | ||
|
|
6073dc246d | ||
|
|
5b671d59a8 | ||
|
|
8abea48b5c | ||
|
|
2eeb2e156b | ||
|
|
dc76660a63 | ||
|
|
e8eeffa4ca | ||
|
|
7d4f5e1532 | ||
|
|
e7749cd52b | ||
|
|
c6a4eab744 | ||
|
|
c88bd6f2f0 | ||
|
|
304a803310 | ||
|
|
632c41ac5f | ||
|
|
ec636809d8 | ||
|
|
af43a31ae8 | ||
|
|
7235af70dd | ||
|
|
de84541692 | ||
|
|
84fab168ce | ||
|
|
4f116cfc88 | ||
|
|
fd71dbe557 | ||
|
|
df4bf5a5cb | ||
|
|
122db20695 | ||
|
|
3b40e26a83 | ||
|
|
bbe44ea21b | ||
|
|
59bb2eb38f |
@@ -788,6 +788,132 @@
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "iam-yan",
|
||||
"name": "Yan",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/48427014?v=4",
|
||||
"profile": "https://github.com/iam-yan",
|
||||
"contributions": [
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "jimt",
|
||||
"name": "Jim Tittsler",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/180326?v=4",
|
||||
"profile": "https://WikiEducator.org/User:JimTittsler",
|
||||
"contributions": [
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "MalcolmMielle",
|
||||
"name": "Malcolm Mielle",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/4457840?v=4",
|
||||
"profile": "http://malcolmmielle.wordpress.com/",
|
||||
"contributions": [
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "veesar",
|
||||
"name": "Veesar",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/74916913?v=4",
|
||||
"profile": "https://snippets.page/",
|
||||
"contributions": [
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "bentongxyz",
|
||||
"name": "bentongxyz",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/60358804?v=4",
|
||||
"profile": "https://github.com/bentongxyz",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "techCarpenter",
|
||||
"name": "Brian DeVries",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/42778030?v=4",
|
||||
"profile": "https://brianjdevries.com",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "cliffordfajardo",
|
||||
"name": "Clifford Fajardo ",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/6743796?v=4",
|
||||
"profile": "http://Cliffordfajardo.com",
|
||||
"contributions": [
|
||||
"tool"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "chrisUsick",
|
||||
"name": "Chris Usick",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/6589365?v=4",
|
||||
"profile": "http://cu-dev.ca",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "josephdecock",
|
||||
"name": "Joe DeCock",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1145533?v=4",
|
||||
"profile": "https://github.com/josephdecock",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "drewtyler",
|
||||
"name": "Drew Tyler",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/5640816?v=4",
|
||||
"profile": "http://www.drewtyler.com",
|
||||
"contributions": [
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "Lauviah0622",
|
||||
"name": "Lauviah0622",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/43416399?v=4",
|
||||
"profile": "https://github.com/Lauviah0622",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "joshdover",
|
||||
"name": "Josh Dover",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/1813008?v=4",
|
||||
"profile": "https://www.elastic.co/elastic-agent",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "phelma",
|
||||
"name": "Phil Helm",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/4057948?v=4",
|
||||
"profile": "http://phelm.co.uk",
|
||||
"contributions": [
|
||||
"doc"
|
||||
]
|
||||
},
|
||||
{
|
||||
"login": "lingyv-li",
|
||||
"name": "Larry Li",
|
||||
"avatar_url": "https://avatars.githubusercontent.com/u/8937944?v=4",
|
||||
"profile": "https://github.com/lingyv-li",
|
||||
"contributions": [
|
||||
"code"
|
||||
]
|
||||
}
|
||||
],
|
||||
"contributorsPerLine": 7,
|
||||
|
||||
@@ -5,14 +5,56 @@
|
||||
"ecmaVersion": 6,
|
||||
"sourceType": "module"
|
||||
},
|
||||
"plugins": ["@typescript-eslint", "import"],
|
||||
"env": { "node": true, "es6": true },
|
||||
"plugins": ["@typescript-eslint", "import", "jest"],
|
||||
"extends": [
|
||||
"eslint:recommended",
|
||||
"plugin:@typescript-eslint/recommended",
|
||||
"plugin:import/recommended",
|
||||
"plugin:import/typescript",
|
||||
"plugin:jest/recommended"
|
||||
],
|
||||
"rules": {
|
||||
"@typescript-eslint/class-name-casing": "warn",
|
||||
"@typescript-eslint/semi": "warn",
|
||||
"curly": "warn",
|
||||
"eqeqeq": "warn",
|
||||
"no-throw-literal": "warn",
|
||||
"semi": "off",
|
||||
"require-await": "warn"
|
||||
}
|
||||
"no-use-before-define": "off",
|
||||
"@typescript-eslint/no-use-before-define": "off",
|
||||
"@typescript-eslint/no-explicit-any": "off",
|
||||
"@typescript-eslint/no-non-null-assertion": "off",
|
||||
"@typescript-eslint/explicit-function-return-type": "off",
|
||||
"@typescript-eslint/interface-name-prefix": "off",
|
||||
"import/no-extraneous-dependencies": [
|
||||
"error",
|
||||
{
|
||||
"devDependencies": ["**/src/test/**", "**/src/**/*{test,spec}.ts"]
|
||||
}
|
||||
]
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
// Restrict usage of fs module outside tests to keep foam compatible with the browser
|
||||
"files": ["**/src/**"],
|
||||
"excludedFiles": ["**/src/test/**", "**/src/**/*{test,spec}.ts"],
|
||||
"rules": {
|
||||
"no-restricted-imports": [
|
||||
"error",
|
||||
{
|
||||
"name": "fs",
|
||||
"message": "Extension code must not rely Node.js filesystem, use vscode.workspace.fs instead."
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
],
|
||||
"settings": {
|
||||
"import/core-modules": ["vscode"],
|
||||
"import/parsers": {
|
||||
"@typescript-eslint/parser": [".ts", ".tsx"]
|
||||
},
|
||||
"import/resolver": {
|
||||
"typescript": {
|
||||
"alwaysTryTypes": true
|
||||
}
|
||||
}
|
||||
},
|
||||
"ignorePatterns": ["**/core/common/**", "*.js"],
|
||||
"reportUnusedDisableDirectives": true
|
||||
}
|
||||
|
||||
23
.github/ISSUE_TEMPLATE/bug.md
vendored
23
.github/ISSUE_TEMPLATE/bug.md
vendored
@@ -1,23 +0,0 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us be foamier
|
||||
labels: bug
|
||||
---
|
||||
|
||||
- Foam version: <!-- Check in the VSCode extension tab. -->
|
||||
- Platform: Windows | Mac | Linux
|
||||
- Issue occur on the [foam template](https://github.com/foambubble/foam-template) repo: Yes | No
|
||||
|
||||
**Summary**
|
||||
<!-- A clear and concise description of what the bug is.-->
|
||||
|
||||
**Steps to reproduce**
|
||||
1.
|
||||
2.
|
||||
|
||||
**Additional information**
|
||||
<!-- Add any other context about the problem here. -->
|
||||
Feel free to attach any of the following that might help with debugging the issue:
|
||||
- screenshots
|
||||
- a zip with a minimal repo to reproduce the issue
|
||||
- the Foam log in VsCode (see [instructions](https://github.com/foambubble/foam/blob/master/docs/features/foam-logging-in-vscode.md))
|
||||
97
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
97
.github/ISSUE_TEMPLATE/bug_report.yml
vendored
Normal file
@@ -0,0 +1,97 @@
|
||||
name: 'Bug report'
|
||||
description: Create a report to help us improve
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
Thank you for reporting an issue :pray:.
|
||||
|
||||
This issue tracker is for reporting bugs found in `foam` (https://github.com/foambubble).
|
||||
If you have a question about how to achieve something and are struggling, please post a question
|
||||
inside of either of the following places:
|
||||
- Foam's Discussion's tab: https://github.com/foambubble/foam/discussions
|
||||
- Foam's Discord channel: https://foambubble.github.io/join-discord/g
|
||||
|
||||
|
||||
Before submitting a new bug/issue, please check the links below to see if there is a solution or question posted there already:
|
||||
- Foam's Issue's tab: https://github.com/foambubble/foam/pulls?q=is%3Apr+is%3Aopen+sort%3Aupdated-desc
|
||||
- Foam's closed issues tab: https://github.com/foambubble/foam/issues?q=is%3Aissue+sort%3Aupdated-desc+is%3Aclosed
|
||||
- Foam's Discussions tab: https://github.com/foambubble/foam/discussions
|
||||
|
||||
The more information you fill in, the better the community can help you.
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Describe the bug
|
||||
description: Provide a clear and concise description of the challenge you are running into.
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: reproducible_example
|
||||
attributes:
|
||||
label: Small Reproducible Example
|
||||
description: |
|
||||
Note:
|
||||
- Your bug will may get fixed much faster if there is a way we can somehow run your example or code.
|
||||
- To create a shareable example, consider cloning the following Foam Github template: https://github.com/foambubble/foam-template
|
||||
- Please read these tips for providing a minimal example: https://stackoverflow.com/help/mcve.
|
||||
placeholder: |
|
||||
e.g. Link to your github repository containing a small reproducible example that the team can run.
|
||||
validations:
|
||||
required: false
|
||||
- type: textarea
|
||||
id: steps
|
||||
attributes:
|
||||
label: Steps to Reproduce the Bug or Issue
|
||||
description: Describe the steps we have to take to reproduce the behavior.
|
||||
placeholder: |
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. Scroll down to '....'
|
||||
4. See error
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: expected
|
||||
attributes:
|
||||
label: Expected behavior
|
||||
description: Provide a clear and concise description of what you expected to happen.
|
||||
placeholder: |
|
||||
As a user, I expected ___ behavior but i am seeing ___
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: screenshots_or_videos
|
||||
attributes:
|
||||
label: Screenshots or Videos
|
||||
description: |
|
||||
If applicable, add screenshots or a video to help explain your problem.
|
||||
For more information on the supported file image/file types and the file size limits, please refer
|
||||
to the following link: https://docs.github.com/en/github/writing-on-github/working-with-advanced-formatting/attaching-files
|
||||
placeholder: |
|
||||
You can drag your video or image files inside of this editor ↓
|
||||
- type: input
|
||||
id: os
|
||||
attributes:
|
||||
label: Operating System Version
|
||||
description: What opearting system are you using?
|
||||
placeholder: |
|
||||
- OS: [e.g. macOS, Windows, Linux]
|
||||
validations:
|
||||
required: true
|
||||
- type: input
|
||||
id: vscode_version
|
||||
attributes:
|
||||
label: Visual Studio Code Version
|
||||
description: |
|
||||
What version of Visual Studio Code are you using?
|
||||
How to find Visual Studio Code Version: https://code.visualstudio.com/docs/supporting/FAQ#_how-do-i-find-the-version
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: additional
|
||||
attributes:
|
||||
label: Additional context
|
||||
description: |
|
||||
Add any other context about the problem here.
|
||||
The Foam log output for VSCode can be found here: https://github.com/foambubble/foam/blob/master/docs/features/foam-logging-in-vscode.md
|
||||
6
.github/ISSUE_TEMPLATE/feature.md
vendored
6
.github/ISSUE_TEMPLATE/feature.md
vendored
@@ -1,6 +0,0 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea to help us be foamier
|
||||
---
|
||||
|
||||
<!-- Describe the feature you'd like. -->
|
||||
42
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
42
.github/ISSUE_TEMPLATE/feature_request.yml
vendored
Normal file
@@ -0,0 +1,42 @@
|
||||
name: Feature request
|
||||
description: Suggest an idea for the `Foam` project
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
value: |
|
||||
This issue form is for requesting features only!
|
||||
If you want to report a bug, please use the [bug report](https://github.com/foambubble/foam/issues/new?assignees=&labels=&template=bug_report.yml) form.
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Is your feature request related to a problem? Please describe.
|
||||
description: A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Describe the solution you'd like
|
||||
description: A clear and concise description of what you want to happen.
|
||||
placeholder: |
|
||||
As a user, I expected ___ behavior but ___ ...
|
||||
|
||||
Ideal Steps I would like to see:
|
||||
1. Go to '...'
|
||||
2. Click on '....'
|
||||
3. ....
|
||||
- type: textarea
|
||||
validations:
|
||||
required: true
|
||||
attributes:
|
||||
label: Describe alternatives you've considered
|
||||
description: A clear and concise description of any alternative solutions or features you've considered.
|
||||
- type: textarea
|
||||
attributes:
|
||||
label: Screenshots or Videos
|
||||
description: |
|
||||
If applicable, add screenshots or a video to help explain your problem.
|
||||
For more information on the supported file image/file types and the file size limits, please refer
|
||||
to the following link: https://docs.github.com/en/github/writing-on-github/working-with-advanced-formatting/attaching-files
|
||||
placeholder: |
|
||||
You can drag your video or image files inside of this editor ↓
|
||||
4
.github/workflows/ci.yml
vendored
4
.github/workflows/ci.yml
vendored
@@ -12,6 +12,7 @@ jobs:
|
||||
lint:
|
||||
name: Lint
|
||||
runs-on: ubuntu-18.04
|
||||
timeout-minutes: 10
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Setup Node
|
||||
@@ -39,6 +40,7 @@ jobs:
|
||||
runs-on: ${{ matrix.os }}
|
||||
env:
|
||||
OS: ${{ matrix.os }}
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@v1
|
||||
- name: Setup Node
|
||||
@@ -60,4 +62,4 @@ jobs:
|
||||
- name: Run Tests
|
||||
uses: GabrielBB/xvfb-action@v1.4
|
||||
with:
|
||||
run: yarn test
|
||||
run: yarn test --stream
|
||||
|
||||
1
.gitignore
vendored
1
.gitignore
vendored
@@ -9,3 +9,4 @@ dist
|
||||
docs/_site
|
||||
docs/.sass-cache
|
||||
docs/.jekyll-metadata
|
||||
.test-workspace
|
||||
|
||||
38
.vscode/launch.json
vendored
38
.vscode/launch.json
vendored
@@ -6,15 +6,20 @@
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"type": "node",
|
||||
"name": "Debug Jest Tests",
|
||||
"type": "extensionHost",
|
||||
"request": "launch",
|
||||
"runtimeArgs": ["workspace", "foam-vscode", "run", "test"], // ${yarnWorkspaceName} is what we're missing
|
||||
"args": ["--runInBand"],
|
||||
"runtimeExecutable": "yarn",
|
||||
"console": "integratedTerminal",
|
||||
"internalConsoleOptions": "neverOpen",
|
||||
"disableOptimisticBPs": true
|
||||
"args": [
|
||||
"${workspaceFolder}/packages/foam-vscode/.test-workspace",
|
||||
"--disable-extensions",
|
||||
"--disable-workspace-trust",
|
||||
"--extensionDevelopmentPath=${workspaceFolder}/packages/foam-vscode",
|
||||
"--extensionTestsPath=${workspaceFolder}/packages/foam-vscode/out/test/suite"
|
||||
],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/packages/foam-vscode/out/**/*.js"
|
||||
],
|
||||
"preLaunchTask": "${defaultBuildTask}"
|
||||
},
|
||||
{
|
||||
"name": "Run VSCode Extension",
|
||||
@@ -24,8 +29,25 @@
|
||||
"args": [
|
||||
"--extensionDevelopmentPath=${workspaceFolder}/packages/foam-vscode"
|
||||
],
|
||||
"outFiles": ["${workspaceFolder}/packages/foam-vscode/out/**/*.js"],
|
||||
"outFiles": [
|
||||
"${workspaceFolder}/packages/foam-vscode/out/**/*.js"
|
||||
],
|
||||
"preLaunchTask": "${defaultBuildTask}"
|
||||
},
|
||||
{
|
||||
"type": "node",
|
||||
"name": "vscode-jest-tests",
|
||||
"request": "launch",
|
||||
"console": "integratedTerminal",
|
||||
"internalConsoleOptions": "neverOpen",
|
||||
"disableOptimisticBPs": true,
|
||||
"cwd": "${workspaceFolder}/packages/foam-vscode",
|
||||
"runtimeExecutable": "yarn",
|
||||
"args": [
|
||||
"jest",
|
||||
"--runInBand",
|
||||
"--watchAll=false"
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
12
.vscode/settings.json
vendored
12
.vscode/settings.json
vendored
@@ -20,13 +20,13 @@
|
||||
"**/node_modules/**/*",
|
||||
"packages/**/*"
|
||||
],
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"prettier.requireConfig": true,
|
||||
"editor.formatOnSave": true,
|
||||
"editor.tabSize": 2,
|
||||
"jest.debugCodeLens.showWhenTestStateIn": ["fail", "unknown", "pass"],
|
||||
"editor.formatOnSave": true,
|
||||
"editor.formatOnSaveMode": "file",
|
||||
"editor.defaultFormatter": "esbenp.prettier-vscode",
|
||||
"jest.autoRun": "off",
|
||||
"jest.rootPath": "packages/foam-vscode",
|
||||
"jest.jestCommandLine": "yarn jest",
|
||||
"gitdoc.enabled": false,
|
||||
"jest.autoEnable": false,
|
||||
"jest.runAllTestsFirst": false,
|
||||
"search.mode": "reuseEditor"
|
||||
}
|
||||
|
||||
BIN
assets/screenshots/feature-link-sync.gif
Normal file
BIN
assets/screenshots/feature-link-sync.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 934 KiB |
27
docs/dev/releasing-foam.md
Normal file
27
docs/dev/releasing-foam.md
Normal file
@@ -0,0 +1,27 @@
|
||||
# Releasing Foam
|
||||
|
||||
1. Get to the latest code
|
||||
- `git checkout master && git fetch && git rebase`
|
||||
2. Sanity checks
|
||||
- `yarn reset`
|
||||
- `yarn test`
|
||||
3. Update change log
|
||||
- `./packages/foam-vscode/CHANGELOG.md`
|
||||
- `git add *`
|
||||
- `git commit -m"Preparation for next release"`
|
||||
4. Update version
|
||||
- `$ cd packages/foam-vscode`
|
||||
- `foam-vscode$ yarn lerna version <version>` (where `version` is `patch/minor/major`)
|
||||
- `cd ../..`
|
||||
5. Package extension
|
||||
- `$ yarn vscode:package-extension`
|
||||
6. Publish extension
|
||||
- `$ yarn vscode:publish-extension`
|
||||
7. Update the release notes in GitHub
|
||||
- in GitHub, top right, click on "releases"
|
||||
- select "tags" in top left
|
||||
- select the tag that was just released, click "edit" and copy release information from changelog
|
||||
- publish (no need to attach artifacts)
|
||||
8. Annouce on Discord
|
||||
|
||||
Steps 1 to 6 should really be replaced by a GitHub action...
|
||||
@@ -1,6 +1,6 @@
|
||||
# Backlinking
|
||||
|
||||
When using [[wikilinks]], you can find all notes that link to a specific note in the [VS Code Markdown Notes](https://marketplace.visualstudio.com/items?itemName=kortina.vscode-markdown-notes) **Backlinks Explorer**
|
||||
When using [[wikilinks]], you can find all notes that link to a specific note in the **Backlinks Explorer**
|
||||
|
||||
- Run `Cmd` + `Shift` + `P` (`Ctrl` + `Shift` + `P` for Windows), type "backlinks" and run the **Explorer: Focus on Backlinks** view.
|
||||
- Keep this pane always visible to discover relationships between your thoughts
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
# Creating New Notes
|
||||
|
||||
- Write out a new `[[wikilink]]` and `Cmd` + `Click` to create a new file and enter it.
|
||||
- For keyboard navigation, use the 'Follow Definition' key `F12` (or [remap key binding](https://code.visualstudio.com/docs/getstarted/keybindings) to something more ergonomic)
|
||||
- For keyboard navigation, use the 'Follow Definition' key `F12` (or [remap the 'editor.action.revealDefinition' key binding](https://code.visualstudio.com/docs/getstarted/keybindings) to something more ergonomic)
|
||||
- `Cmd` + `Shift` + `P` (`Ctrl` + `Shift` + `P` for Windows), execute `Foam: Create New Note` and enter a **Title Case Name** to create `Title Case Name.md`
|
||||
- Add a keyboard binding to make creating new notes easier.
|
||||
- The [[note-templates]] used by this command can be customized.
|
||||
|
||||
@@ -45,10 +45,9 @@ In addition, you can also use variables provided by Foam:
|
||||
| -------------------- | --------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `FOAM_SELECTED_TEXT` | Foam will fill it with selected text when creating a new note, if any text is selected. Selected text will be replaced with a wikilink to the new note. |
|
||||
| `FOAM_TITLE` | The title of the note. If used, Foam will prompt you to enter a title for the note. |
|
||||
| `FOAM_SLUG` | The sluggified title of the note (using the default github slug method). If used, Foam will prompt you to enter a title for the note unless `FOAM_TITLE` has already caused the prompt. |
|
||||
| `FOAM_DATE_*` | `FOAM_DATE_YEAR`, `FOAM_DATE_MONTH`, etc. Foam-specific versions of [VS Code's datetime snippet variables](https://code.visualstudio.com/docs/editor/userdefinedsnippets#_variables). Prefer these versions over VS Code's. |
|
||||
|
||||
**Note:** neither the defaulting feature (eg. `${variable:default}`) nor the format feature (eg. `${variable/(.*)/${1:/upcase}/}`) (available to other variables) are available for these Foam-provided variables. See [#693](https://github.com/foambubble/foam/issues/693).
|
||||
|
||||
### `FOAM_DATE_*` variables
|
||||
|
||||
Foam defines its own set of datetime variables that have a similar behaviour as [VS Code's datetime snippet variables](https://code.visualstudio.com/docs/editor/userdefinedsnippets#_variables).
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
|
||||
- Ensure that you have all the [[recommended-extensions]] installed in Visual Studio Code
|
||||
- Reload Visual Studio Code by running `Cmd` + `Shift` + `P` (`Ctrl` + `Shift` + `P` for Windows), type "reload" and run the **Developer: Reload Window** command to for the updated extensions take effect
|
||||
- Check the formatting rules for links on [[foam-file-format]], [[wikilinks]] and [[link-formatting-and-autocompletion]]
|
||||
- Check the formatting rules for links on [[foam-file-format]] and [[wikilinks]]
|
||||
|
||||
## I don't want Foam enabled for all my workspaces
|
||||
Any extension you install in Visual Studio Code is enabled by default. Give the philosophy of Foam it works out of the box without doing any configuration upfront. In case you want to disable Foam for a specific workspace, or disable Foam by default and enable it for specific workspaces, it is advised to follow the best practices as [documented by Visual Studio Code](https://code.visualstudio.com/docs/editor/extension-marketplace#_manage-extensions)
|
||||
|
||||
@@ -31,8 +31,6 @@ You can use **Foam** for organising your research, keeping re-discoverable notes
|
||||
|
||||
**Foam** is a tool that supports creating relationships between thoughts and information to help you think better.
|
||||
|
||||

|
||||
|
||||
Whether you want to build a [Second Brain](https://www.buildingasecondbrain.com/) or a [Zettelkasten](https://zettelkasten.de/posts/overview/), write a book, or just get better at long-term learning, **Foam** can help you organise your thoughts if you follow these simple rules:
|
||||
|
||||
1. Create a single **Foam** workspace for all your knowledge and research following the [Getting started](#getting-started) guide.
|
||||
@@ -218,6 +216,24 @@ If that sounds like something you're interested in, I'd love to have you along o
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center"><a href="https://github.com/AndreiD049"><img src="https://avatars.githubusercontent.com/u/52671223?v=4?s=60" width="60px;" alt=""/><br /><sub><b>AndreiD049</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=AndreiD049" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/iam-yan"><img src="https://avatars.githubusercontent.com/u/48427014?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Yan</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=iam-yan" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="https://WikiEducator.org/User:JimTittsler"><img src="https://avatars.githubusercontent.com/u/180326?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Jim Tittsler</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=jimt" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="http://malcolmmielle.wordpress.com/"><img src="https://avatars.githubusercontent.com/u/4457840?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Malcolm Mielle</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=MalcolmMielle" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="https://snippets.page/"><img src="https://avatars.githubusercontent.com/u/74916913?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Veesar</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=veesar" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="https://github.com/bentongxyz"><img src="https://avatars.githubusercontent.com/u/60358804?v=4?s=60" width="60px;" alt=""/><br /><sub><b>bentongxyz</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=bentongxyz" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://brianjdevries.com"><img src="https://avatars.githubusercontent.com/u/42778030?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Brian DeVries</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=techCarpenter" title="Code">💻</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center"><a href="http://Cliffordfajardo.com"><img src="https://avatars.githubusercontent.com/u/6743796?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Clifford Fajardo </b></sub></a><br /><a href="#tool-cliffordfajardo" title="Tools">🔧</a></td>
|
||||
<td align="center"><a href="http://cu-dev.ca"><img src="https://avatars.githubusercontent.com/u/6589365?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Chris Usick</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=chrisUsick" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://github.com/josephdecock"><img src="https://avatars.githubusercontent.com/u/1145533?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Joe DeCock</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=josephdecock" title="Code">💻</a></td>
|
||||
<td align="center"><a href="http://www.drewtyler.com"><img src="https://avatars.githubusercontent.com/u/5640816?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Drew Tyler</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=drewtyler" title="Documentation">📖</a></td>
|
||||
<td align="center"><a href="https://github.com/Lauviah0622"><img src="https://avatars.githubusercontent.com/u/43416399?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Lauviah0622</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=Lauviah0622" title="Code">💻</a></td>
|
||||
<td align="center"><a href="https://www.elastic.co/elastic-agent"><img src="https://avatars.githubusercontent.com/u/1813008?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Josh Dover</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=joshdover" title="Code">💻</a></td>
|
||||
<td align="center"><a href="http://phelm.co.uk"><img src="https://avatars.githubusercontent.com/u/4057948?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Phil Helm</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=phelma" title="Documentation">📖</a></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td align="center"><a href="https://github.com/lingyv-li"><img src="https://avatars.githubusercontent.com/u/8937944?v=4?s=60" width="60px;" alt=""/><br /><sub><b>Larry Li</b></sub></a><br /><a href="https://github.com/foambubble/foam/commits?author=lingyv-li" title="Code">💻</a></td>
|
||||
</tr>
|
||||
</table>
|
||||
|
||||
|
||||
@@ -6,7 +6,7 @@ Wikilinks can refer to any note or attachment in the repo: `[[note.md]]`, `[[doc
|
||||
|
||||
The usual wikilink syntax without extension refers to notes: `[[wikilink]]` and `[[wikilink.md]]` are equivalent.
|
||||
|
||||
The goal of wikilinks is to uniquily identify a file in a repo, no matter in which directory it lives.
|
||||
The goal of wikilinks is to uniquely identify a file in a repo, no matter in which directory it lives.
|
||||
|
||||
Sometimes in a repo you can have files with the same name in different directories.
|
||||
Foam allows you to identify those files using the minimum effort needed to disambiguate them.
|
||||
@@ -67,10 +67,6 @@ Basically we could say as a rule:
|
||||
|
||||
## Compatibility with other apps
|
||||
|
||||
Foam's identifiers are a super set of Obsidian's: all Obsidian links are supported by Foam, but Foam multi-part identifier (scenario 6) is only supported by Foam.
|
||||
|
||||
To improve compatibility this option should either be behind a configuration key, or it should be easily updated e.g. via the janitor.
|
||||
|
||||
| Scenario | Obsidian | Foam |
|
||||
| --------------------------- | ------------------------------- | ------------------------------- |
|
||||
| 1 `[[notes]]` | ✔ unique identifier in repo | ✔ unique identifier in repo |
|
||||
@@ -78,7 +74,7 @@ To improve compatibility this option should either be behind a configuration key
|
||||
| 3 `[[work/notes]]` | ✔ valid path from repo root | ✔ valid identifier in repo |
|
||||
| 4 `[[project/house/todo]]` | ✔ valid path from repo root | ✔ valid unique identifier |
|
||||
| 5 `[[/project/house/todo]]` | ✔ valid path from repo root | ✔ valid path from repo root |
|
||||
| 6 `[[house/todo]]` | ✘ incorrect path from repo root | ✔ valid unique identifier |
|
||||
| 6 `[[house/todo]]` | ✔ valid unique identifier | ✔ valid unique identifier |
|
||||
| 7 `[[todo]]` | ✘ ambiguous identifier | ✘ ambiguous identifier |
|
||||
| 8 `[[/house/todo]]` | ✘ incorrect path from repo root | ✘ incorrect path from repo root |
|
||||
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
# GitHub Pages
|
||||
|
||||
- In VSCode workspace settings set `"foam.edit.linkReferenceDefinitions": "withoutExtensions"`
|
||||
- Execute the “Foam: Run Janitor” command from the command palette.
|
||||
- [Turn **GitHub Pages** on in your repository settings](https://guides.github.com/features/pages/).
|
||||
- The default GitHub Pages template is called [Primer](https://github.com/pages-themes/primer). See Primer docs for how to customise html layouts and templates.
|
||||
- GitHub Pages is built on [Jekyll](https://jekyllrb.com/), so it supports things like permalinks, front matter metadata etc.
|
||||
1. In VSCode workspace settings set `"foam.edit.linkReferenceDefinitions": "withoutExtensions"`
|
||||
2. Execute the “Foam: Run Janitor” command from the command palette.
|
||||
3. [Turn **GitHub Pages** on in your repository settings](https://guides.github.com/features/pages/).
|
||||
- The default GitHub Pages template is called [Primer](https://github.com/pages-themes/primer). See Primer docs for how to customise html layouts and templates.
|
||||
- GitHub Pages is built on [Jekyll](https://jekyllrb.com/), so it supports things like permalinks, front matter metadata etc.
|
||||
|
||||
## How to publish locally
|
||||
|
||||
|
||||
@@ -2,13 +2,175 @@
|
||||
|
||||
You don't have to use GitHub to serve Foam pages. You can also use GitLab.
|
||||
|
||||
Gitlab pages can be kept private for private repo, so that your notes are still private.
|
||||
|
||||
## Setup a project
|
||||
|
||||
### Generate the directory from GitHub
|
||||
|
||||
Generate a solution using the [Foam template].
|
||||
Generate a solution using the [Foam template](https://github.com/foambubble/foam-template).
|
||||
|
||||
Change the remote to GitLab, or copy all the files into a new GitLab repo.
|
||||
Change the remote to GitLab, or copy all the files into a new GitLab repo
|
||||
|
||||
## Publishing pages with Gatsby
|
||||
|
||||
### Setup the Gatsby config
|
||||
|
||||
Add a .gatsby-config.js file where:
|
||||
|
||||
* `$REPO_NAME` correspond to the name of your gtlab repo.
|
||||
* `$USER_NAME` correspond to your gitlab username.
|
||||
|
||||
```js
|
||||
const path = require("path");
|
||||
const pathPrefix = `/$REPO_NAME`;
|
||||
|
||||
// Change me
|
||||
const siteMetadata = {
|
||||
title: "A title",
|
||||
shortName: "A short name",
|
||||
description: "",
|
||||
imageUrl: "/graph-visualisation.jpg",
|
||||
siteUrl: "https://$USER_NAME.gitlab.io",
|
||||
};
|
||||
module.exports = {
|
||||
siteMetadata,
|
||||
pathPrefix,
|
||||
flags: {
|
||||
DEV_SSR: true,
|
||||
},
|
||||
plugins: [
|
||||
`gatsby-plugin-sharp`,
|
||||
{
|
||||
resolve: "gatsby-theme-primer-wiki",
|
||||
options: {
|
||||
defaultColorMode: "night",
|
||||
icon: "./path_to/logo.png",
|
||||
sidebarComponents: ["tag", "category"],
|
||||
nav: [
|
||||
{
|
||||
title: "Github",
|
||||
url: "https://github.com/$USER_NAME/",
|
||||
},
|
||||
{
|
||||
title: "Gitlab",
|
||||
url: "https://gitlab.com/$USER_NAME/",
|
||||
},
|
||||
],
|
||||
editUrl:
|
||||
"https://gitlab.com/$USER_NAME/$REPO_NAME/tree/main/",
|
||||
},
|
||||
},
|
||||
{
|
||||
resolve: "gatsby-source-filesystem",
|
||||
options: {
|
||||
name: "content",
|
||||
path: `${__dirname}`,
|
||||
ignore: [`**/\.*/**/*`],
|
||||
},
|
||||
},
|
||||
|
||||
{
|
||||
resolve: "gatsby-plugin-manifest",
|
||||
options: {
|
||||
name: siteMetadata.title,
|
||||
short_name: siteMetadata.shortName,
|
||||
start_url: pathPrefix,
|
||||
background_color: `#f7f0eb`,
|
||||
display: `standalone`,
|
||||
icon: path.resolve(__dirname, "./path_to/logo.png"),
|
||||
},
|
||||
},
|
||||
{
|
||||
resolve: `gatsby-plugin-sitemap`,
|
||||
},
|
||||
{
|
||||
resolve: "gatsby-plugin-robots-txt",
|
||||
options: {
|
||||
host: siteMetadata.siteUrl,
|
||||
sitemap: `${siteMetadata.siteUrl}/sitemap/sitemap-index.xml`,
|
||||
policy: [{ userAgent: "*", allow: "/" }],
|
||||
},
|
||||
},
|
||||
],
|
||||
};
|
||||
```
|
||||
|
||||
And a `package.json` file containing:
|
||||
|
||||
```json
|
||||
{
|
||||
"private": true,
|
||||
"name": "wiki",
|
||||
"version": "1.0.0",
|
||||
"license": "MIT",
|
||||
"scripts": {
|
||||
"develop": "gatsby develop -H 0.0.0.0",
|
||||
"start": "gatsby develop -H 0.0.0.0",
|
||||
"build": "gatsby build",
|
||||
"clean": "gatsby clean",
|
||||
"serve": "gatsby serve",
|
||||
"test": "echo test"
|
||||
},
|
||||
"dependencies": {
|
||||
"@primer/react": "^34.1.0",
|
||||
"@primer/css": "^17.5.0",
|
||||
"foam-cli": "^0.11.0",
|
||||
"gatsby": "^3.12.0",
|
||||
"gatsby-plugin-manifest": "^3.12.0",
|
||||
"gatsby-plugin-robots-txt": "^1.6.9",
|
||||
"gatsby-plugin-sitemap": "^5.4.0",
|
||||
"gatsby-source-filesystem": "^3.12.0",
|
||||
"gatsby-theme-primer-wiki": "^1.14.5",
|
||||
"react": "^17.0.2",
|
||||
"react-dom": "^17.0.2"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
The theme will be based on [gatsby-theme-primer-wiki](https://github.com/theowenyoung/gatsby-theme-primer-wiki).
|
||||
|
||||
To test the theme locally first run `yarn install` and then use `gatsby develop` to serve the website.
|
||||
See gatsby documentation for more details.
|
||||
|
||||
### Set-up the CI for deployment
|
||||
|
||||
Create a `.gitlab-ci.yml` file containing:
|
||||
|
||||
```yml
|
||||
# To contribute improvements to CI/CD templates, please follow the Development guide at:
|
||||
# https://docs.gitlab.com/ee/development/cicd/templates.html
|
||||
# This specific template is located at:
|
||||
# https://gitlab.com/gitlab-org/gitlab/-/blob/master/lib/gitlab/ci/templates/Pages/Gatsby.gitlab-ci.yml
|
||||
|
||||
image: node:latest
|
||||
|
||||
stages:
|
||||
- deploy
|
||||
|
||||
pages:
|
||||
stage: deploy
|
||||
# This folder is cached between builds
|
||||
# https://docs.gitlab.com/ee/ci/yaml/index.html#cache
|
||||
cache:
|
||||
paths:
|
||||
- node_modules/
|
||||
# Enables git-lab CI caching. Both .cache and public must be cached, otherwise builds will fail.
|
||||
- .cache/
|
||||
- public/
|
||||
script:
|
||||
- yarn install
|
||||
- ./node_modules/.bin/gatsby build --prefix-paths
|
||||
artifacts:
|
||||
paths:
|
||||
- public
|
||||
rules:
|
||||
- if: $CI_COMMIT_BRANCH == $CI_DEFAULT_BRANCH
|
||||
```
|
||||
|
||||
This pipeline will now serve your website on every push to the main branch of your project.
|
||||
|
||||
## Publish with Jekyll
|
||||
|
||||
### Add a _config.yaml
|
||||
|
||||
@@ -47,14 +209,14 @@ gem "jekyll-optional-front-matter"
|
||||
|
||||
Commit the file and push it to gitlab.
|
||||
|
||||
## Setup CI/CD
|
||||
### Setup CI/CD
|
||||
|
||||
1. From the project home in GitLab click `Set up CI/CD`
|
||||
2. Choose `Jekyll` as your template from the template dropdown
|
||||
3. Click `commit`
|
||||
4. Now when you go to CI / CD > Pipelines, you should see the code running
|
||||
|
||||
## Troubleshooting
|
||||
### Troubleshooting
|
||||
|
||||
- *Could not locate Gemfile* - You didn't follow the steps above to [#Add a Gemlock file]
|
||||
- *Conversion error: Jekyll::Converters::Scss encountered an error while converting* You need to reference a theme.
|
||||
|
||||
@@ -11,8 +11,6 @@ We have two alternative #recipe for displaying diagrams in markdown:
|
||||
|
||||
You can use [Mermaid](https://marketplace.visualstudio.com/items?itemName=bierner.markdown-mermaid) plugin to draw and preview diagrams in your content.
|
||||
|
||||
⚠️ Be aware that Mermaid diagrams don't automatically get rendered in published Foams in [[publish-to-github-pages]], and would require you to eject to another static site generation approach that supports Mermaid plugins.
|
||||
|
||||
## Draw.io
|
||||
|
||||
[Draw.io](https://marketplace.visualstudio.com/items?itemName=hediet.vscode-drawio) extension allows you to create, edit, and display your diagrams without leaving Visual Studio Code. The `.drawio.svg` or `.drawio.png` files can be automatically embedded and displayed in published Foams, no export needed. FYI, the diagram below was made using Draw.io! You can check the diagram [here](../assets/images/diagram-drawio-demo.drawio.svg).
|
||||
|
||||
@@ -62,7 +62,7 @@ A #recipe is a guide, tip or strategy for getting the most out of your Foam work
|
||||
|
||||
- Quick commits with VS Code's built in [[git-integration]]
|
||||
- Store your workspace in an auto-synced GitHub repo with [[write-your-notes-in-github-gist]]
|
||||
- Sync your GitHub repo automatically [[todo]].
|
||||
- Sync your GitHub repo automatically using the [GitDoc VSCode Plugin](https://marketplace.visualstudio.com/items?itemName=vsls-contrib.gitdoc).
|
||||
|
||||
## Publish
|
||||
|
||||
|
||||
@@ -9,3 +9,6 @@ There are a couple of options when it comes to clipping web pages:
|
||||
|
||||
- [Markdown Clipper](https://github.com/deathau/markdownload)
|
||||
- A Firefox and Google Chrome extension to clip websites and download them into a readable markdown file.
|
||||
|
||||
- [Web Clipper](https://clipper.website/)
|
||||
- A Firefox, Chrome and Edge extension to clip websites and save them directly to the GitHub repository into a readable markdown file.
|
||||
|
||||
@@ -3,13 +3,16 @@
|
||||
Foam enables you to Link pages together using `[[file-name]]` annotations (i.e. `[[MediaWiki]]` links).
|
||||
|
||||
- Type `[[` and start typing a file name for autocompletion.
|
||||
- See [[link-formatting-and-autocompletion]] for more information, and how to setup your link autocompletions to make this easier.
|
||||
- `Cmd` + `Click` ( `Ctrl` + `Click` on Windows ) on file name to navigate to file (`F12` also works while your cursor is on the file name)
|
||||
- `Cmd` + `Click` ( `Ctrl` + `Click` on Windows ) on non-existent file to create that file in the workspace.
|
||||
- The note creation makes use of the special [`new-note.md` note template](features/note-templates)
|
||||
|
||||
> If the `F12` shortcut feels unnatural you can rebind it at File > Preferences > Keyboard Shortcuts by searching for `editor.action.revealDefinition`.
|
||||
|
||||
## Support for sections
|
||||
|
||||
Foam supports autocompletion, navigation, embedding and diagnostics for note sections. Just use the standard wiki syntax of `[[resource#Section Title]]`.
|
||||
|
||||
## Markdown compatibility
|
||||
|
||||
The [Foam for VSCode](https://marketplace.visualstudio.com/items?itemName=foam.foam-vscode) extension automatically generates [[link-reference-definitions]] at the bottom of the file to make wikilinks compatible with Markdown tools and parsers.
|
||||
@@ -18,7 +21,6 @@ The [Foam for VSCode](https://marketplace.visualstudio.com/items?itemName=foam.f
|
||||
|
||||
- [[foam-file-format]]
|
||||
- [[note-templates]]
|
||||
- [[link-formatting-and-autocompletion]]
|
||||
- See [[link-reference-definition-improvements]] for further discussion on current problems and potential solutions.
|
||||
|
||||
[//begin]: # "Autogenerated link references for markdown compatibility"
|
||||
|
||||
@@ -4,5 +4,5 @@
|
||||
],
|
||||
"npmClient": "yarn",
|
||||
"useWorkspaces": true,
|
||||
"version": "0.16.1"
|
||||
"version": "0.19.0"
|
||||
}
|
||||
|
||||
0
packages/foam-vscode/.test-workspace/.keep
Normal file
0
packages/foam-vscode/.test-workspace/.keep
Normal file
@@ -8,3 +8,5 @@ vsc-extension-quickstart.md
|
||||
**/.eslintrc.json
|
||||
**/*.map
|
||||
**/*.ts
|
||||
assets/screenshots
|
||||
node_modules
|
||||
|
||||
@@ -4,6 +4,121 @@ All notable changes to the "foam-vscode" extension will be documented in this fi
|
||||
|
||||
Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how to structure this file.
|
||||
|
||||
## [0.19.0] - 2022-07-07
|
||||
|
||||
New Features:
|
||||
- Support for attachments (PDF) and images (#1027)
|
||||
- Support for opening day notes for other days as well (#1026, thanks @alper)
|
||||
|
||||
## [0.18.5] - 2022-06-29
|
||||
|
||||
Fixes and Improvements:
|
||||
- Support for `alias` YAML property to define note alias (#1014 - thanks @lingyv-li)
|
||||
|
||||
Internal:
|
||||
- Improved extension bundling (#1015 - thanks @lingyv-li)
|
||||
- Use `vscode.workspace.fs` instead of `fs` (#1005 - thanks @joshdover)
|
||||
|
||||
## [0.18.4] - 2022-06-03
|
||||
|
||||
Fixes and Improvements:
|
||||
- move past `]]` when writing wikilinks (#998 - thanks @Lauviah0622)
|
||||
- highlight improvements (#890 - thanks @memeplex)
|
||||
|
||||
## [0.18.3] - 2022-04-17
|
||||
|
||||
Fixes and Improvements:
|
||||
- Better reporting when links fail to resolve
|
||||
- Failing link resolution during graph computation no longer fatal
|
||||
|
||||
## [0.18.2] - 2022-04-14
|
||||
|
||||
Fixes and Improvements:
|
||||
- Fixed parsing error on empty direct links (#980 - thanks @chrisUsick)
|
||||
- Improved rendering in preview of wikilinks that have link definitions (#979 - thanks @josephdecock)
|
||||
- Restored handling of section-only wikilinks (#981)
|
||||
|
||||
## [0.18.1] - 2022-04-13
|
||||
|
||||
Fixes and Improvements:
|
||||
- Fixed parsing error for direct links with square brackets in them (#977)
|
||||
- Improved markdown direct link resolution (#972)
|
||||
- Improved templates support for custom paths (#970)
|
||||
|
||||
## [0.18.0] - 2022-04-11
|
||||
|
||||
Features:
|
||||
- Link synchronization on file rename
|
||||
|
||||
Internal:
|
||||
- Changed graph computation on workspace change to simplify code
|
||||
|
||||
## [0.17.8] - 2022-04-01
|
||||
|
||||
Fixes and Improvements:
|
||||
- Do not add ignored files to Foam upon change (#480)
|
||||
- Restore full use of editor.action.openLink (#693)
|
||||
- Minor performance improvements
|
||||
|
||||
## [0.17.7] - 2022-03-29
|
||||
|
||||
Fixes and Improvements:
|
||||
- Include links with sections in backlinks (#895)
|
||||
- Improved navigation when document editor is already open
|
||||
|
||||
## [0.17.6] - 2022-03-03
|
||||
|
||||
Fixes and Improvements:
|
||||
- Don't fail on error when scannig workspace (#943 - thanks @develmusa)
|
||||
|
||||
## [0.17.5] - 2022-02-22
|
||||
|
||||
Fixes and Improvements:
|
||||
- Added FOAM_SLUG template variable (#865 - Thanks @techCarpenter)
|
||||
|
||||
## [0.17.4] - 2022-02-13
|
||||
|
||||
Fixes and Improvements:
|
||||
- Improvements to Foam variables in templates (#882 - thanks @movermeyer)
|
||||
- Foam variables can now be used just any other VS Code variables, including in combination with placeholders and transformers
|
||||
|
||||
## [0.17.3] - 2022-01-14
|
||||
|
||||
Fixes and Improvements:
|
||||
- Fixed autocompletion with tags (#885 - thanks @memeplex)
|
||||
- Improved "Open Daily Note" to be usabled in tasks (#897 - thanks @MCluck90)
|
||||
|
||||
## [0.17.2] - 2021-12-22
|
||||
|
||||
Fixes and Improvements:
|
||||
- Improved support for wikilinks in titles (#878)
|
||||
- Use syntax injection for wikilinks (#876 - thanks @memeplex)
|
||||
- Fix when applying text edits in last line
|
||||
|
||||
Internal:
|
||||
- DX: Clean up of testing setup (#881 - thanks @memeplex)
|
||||
|
||||
## [0.17.1] - 2021-12-16
|
||||
|
||||
Fixes and Improvements:
|
||||
- Decorate markdown files only (#857)
|
||||
- Fix template placeholders issue (#859)
|
||||
- Improved replacement range for link completion
|
||||
|
||||
Internal:
|
||||
- Major URI/path handling refactoring (#858 - thanks @memeplex)
|
||||
|
||||
## [0.17.0] - 2021-12-08
|
||||
|
||||
Features:
|
||||
|
||||
- Added first class support for sections (#856)
|
||||
- Sections can be referred to in wikilinks
|
||||
- Sections can be embedded
|
||||
- Autocompletion for sections
|
||||
- Diagnostic for sections
|
||||
- Embed sections
|
||||
|
||||
## [0.16.1] - 2021-11-30
|
||||
|
||||
Fixes and Improvements:
|
||||
|
||||
@@ -27,6 +27,12 @@ Foam helps you create the connections between your notes, and your placeholders
|
||||
|
||||

|
||||
|
||||
### Sync links on file rename
|
||||
|
||||
Foam updates the links to renamed files, so your notes stay consistent.
|
||||
|
||||

|
||||
|
||||
### Unique identifiers across directories
|
||||
|
||||
Foam supports files with the same name in multiple directories.
|
||||
@@ -58,6 +64,11 @@ Embed the content from other notes.
|
||||
|
||||

|
||||
|
||||
### Support for sections
|
||||
|
||||
Foam supports autocompletion, navigation, embedding and diagnostics for note sections.
|
||||
Just use the standard wiki syntax of `[[resource#Section Title]]`.
|
||||
|
||||
### Link Alias
|
||||
|
||||
Foam supports link aliasing, so you can have a `[[wikilink]]`, or a `[[wikilink|alias]]`.
|
||||
|
||||
BIN
packages/foam-vscode/assets/screenshots/feature-link-sync.gif
Normal file
BIN
packages/foam-vscode/assets/screenshots/feature-link-sync.gif
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 934 KiB |
@@ -1,7 +0,0 @@
|
||||
module.exports = {
|
||||
presets: [
|
||||
["@babel/preset-env", { targets: { node: "current" } }],
|
||||
"@babel/preset-typescript"
|
||||
],
|
||||
plugins: [["@babel/plugin-transform-runtime", { helpers: false }]]
|
||||
};
|
||||
@@ -82,7 +82,7 @@ module.exports = {
|
||||
// moduleNameMapper: {},
|
||||
|
||||
// An array of regexp pattern strings, matched against all module paths before considered 'visible' to the module loader
|
||||
// modulePathIgnorePatterns: [],
|
||||
modulePathIgnorePatterns: ['.vscode-test'],
|
||||
|
||||
// Activates notifications for test results
|
||||
// notify: false,
|
||||
@@ -91,7 +91,7 @@ module.exports = {
|
||||
// notifyMode: "failure-change",
|
||||
|
||||
// A preset that is used as a base for Jest's configuration
|
||||
// preset: undefined,
|
||||
preset: 'ts-jest',
|
||||
|
||||
// Run tests from one or more projects
|
||||
// projects: undefined,
|
||||
@@ -126,13 +126,13 @@ module.exports = {
|
||||
// setupFiles: [],
|
||||
|
||||
// A list of paths to modules that run some code to configure or set up the testing framework before each test
|
||||
// setupFilesAfterEnv: [],
|
||||
setupFilesAfterEnv: ['jest-extended'],
|
||||
|
||||
// A list of paths to snapshot serializer modules Jest should use for snapshot testing
|
||||
// snapshotSerializers: [],
|
||||
|
||||
// The test environment that will be used for testing
|
||||
testEnvironment: "node"
|
||||
testEnvironment: 'node',
|
||||
|
||||
// Options that will be passed to the testEnvironment
|
||||
// testEnvironmentOptions: {},
|
||||
@@ -152,7 +152,10 @@ module.exports = {
|
||||
// ],
|
||||
|
||||
// The regexp pattern or array of patterns that Jest uses to detect test files
|
||||
// testRegex: [],
|
||||
// This is overridden in every runCLI invocation but it's here as the default
|
||||
// for vscode-jest. We only want unit tests in the test explorer (sidebar),
|
||||
// since spec tests require the entire extension host to be launched before.
|
||||
testRegex: ['\\.test\\.ts$'],
|
||||
|
||||
// This option allows the use of a custom results processor
|
||||
// testResultsProcessor: undefined,
|
||||
|
||||
@@ -8,7 +8,7 @@
|
||||
"type": "git"
|
||||
},
|
||||
"homepage": "https://github.com/foambubble/foam",
|
||||
"version": "0.16.1",
|
||||
"version": "0.19.0",
|
||||
"license": "MIT",
|
||||
"publisher": "foam",
|
||||
"engines": {
|
||||
@@ -37,30 +37,50 @@
|
||||
"markdown.previewStyles": [
|
||||
"./static/preview/style.css"
|
||||
],
|
||||
"grammars": [
|
||||
{
|
||||
"path": "./syntaxes/injection.json",
|
||||
"scopeName": "foam.wikilink.injection",
|
||||
"injectTo": [
|
||||
"text.html.markdown"
|
||||
]
|
||||
}
|
||||
],
|
||||
"colors": [
|
||||
{
|
||||
"id": "foam.placeholder",
|
||||
"description": "Color of foam placeholders.",
|
||||
"defaults": {
|
||||
"dark": "editorWarning.foreground",
|
||||
"light": "editorWarning.foreground",
|
||||
"highContrast": "editorWarning.foreground"
|
||||
}
|
||||
}
|
||||
],
|
||||
"views": {
|
||||
"explorer": [
|
||||
{
|
||||
"id": "foam-vscode.backlinks",
|
||||
"name": "Backlinks",
|
||||
"icon": "media/dep.svg",
|
||||
"icon": "$(references)",
|
||||
"contextualTitle": "Backlinks"
|
||||
},
|
||||
{
|
||||
"id": "foam-vscode.tags-explorer",
|
||||
"name": "Tag Explorer",
|
||||
"icon": "media/dep.svg",
|
||||
"icon": "$(tag)",
|
||||
"contextualTitle": "Tags Explorer"
|
||||
},
|
||||
{
|
||||
"id": "foam-vscode.orphans",
|
||||
"name": "Orphans",
|
||||
"icon": "media/dep.svg",
|
||||
"icon": "$(debug-gripper)",
|
||||
"contextualTitle": "Orphans"
|
||||
},
|
||||
{
|
||||
"id": "foam-vscode.placeholders",
|
||||
"name": "Placeholders",
|
||||
"icon": "media/dep.svg",
|
||||
"icon": "$(debug-disconnect)",
|
||||
"contextualTitle": "Placeholders"
|
||||
}
|
||||
]
|
||||
@@ -126,6 +146,10 @@
|
||||
{
|
||||
"command": "foam-vscode.open-resource",
|
||||
"when": "false"
|
||||
},
|
||||
{
|
||||
"command": "foam-vscode.completion-move-cursor",
|
||||
"when": "false"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -144,6 +168,10 @@
|
||||
},
|
||||
{
|
||||
"command": "foam-vscode.open-daily-note",
|
||||
"title": "Foam: Open Today's Note"
|
||||
},
|
||||
{
|
||||
"command": "foam-vscode.open-daily-note-for-date",
|
||||
"title": "Foam: Open Daily Note"
|
||||
},
|
||||
{
|
||||
@@ -193,6 +221,10 @@
|
||||
{
|
||||
"command": "foam-vscode.create-new-template",
|
||||
"title": "Foam: Create New Template"
|
||||
},
|
||||
{
|
||||
"command": "foam-vscode.completion-move-cursor",
|
||||
"title": "Foam: Move cursor after completion"
|
||||
}
|
||||
],
|
||||
"configuration": {
|
||||
@@ -235,13 +267,13 @@
|
||||
"Disable wikilink definitions generation"
|
||||
]
|
||||
},
|
||||
"foam.links.hover.enable": {
|
||||
"description": "Enable displaying note content on hover links",
|
||||
"foam.links.sync.enable": {
|
||||
"description": "Enable synching links when moving/renaming notes",
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
},
|
||||
"foam.decorations.links.enable": {
|
||||
"description": "Enable decorations for links",
|
||||
"foam.links.hover.enable": {
|
||||
"description": "Enable displaying note content on hover links",
|
||||
"type": "boolean",
|
||||
"default": true
|
||||
},
|
||||
@@ -333,6 +365,11 @@
|
||||
],
|
||||
"description": "Whether or not to navigate to the target daily note when a daily note snippet is selected."
|
||||
},
|
||||
"foam.preview.embedNoteInContainer": {
|
||||
"type": "boolean",
|
||||
"default": true,
|
||||
"description": "Wrap embedded notes in a container when displayed in preview panel"
|
||||
},
|
||||
"foam.graph.titleMaxLength": {
|
||||
"type": "number",
|
||||
"default": 24,
|
||||
@@ -349,6 +386,10 @@
|
||||
{
|
||||
"command": "foam-vscode.open-daily-note",
|
||||
"key": "alt+d"
|
||||
},
|
||||
{
|
||||
"command": "foam-vscode.open-daily-note-for-date",
|
||||
"key": "alt+h"
|
||||
}
|
||||
]
|
||||
},
|
||||
@@ -356,28 +397,24 @@
|
||||
"build": "tsc -p ./",
|
||||
"pretest": "yarn build",
|
||||
"test": "node ./out/test/run-tests.js",
|
||||
"pretest:unit": "yarn build",
|
||||
"test:unit": "node ./out/test/run-tests.js --unit",
|
||||
"pretest:e2e": "yarn build",
|
||||
"test:e2e": "node ./out/test/run-tests.js --e2e",
|
||||
"lint": "tsdx lint src",
|
||||
"clean": "rimraf out",
|
||||
"watch": "tsc --build ./tsconfig.json --watch",
|
||||
"vscode:start-debugging": "yarn clean && yarn watch",
|
||||
"vscode:prepublish": "yarn npm-install && yarn run build",
|
||||
"npm-install": "rimraf node_modules && npm i",
|
||||
"npm-cleanup": "rimraf package-lock.json node_modules && yarn",
|
||||
"package-extension": "npx vsce package && yarn npm-cleanup",
|
||||
"esbuild-base": "esbuild ./src/extension.ts --bundle --outfile=out/extension.js --external:vscode --format=cjs --platform=node",
|
||||
"vscode:prepublish": "yarn run esbuild-base -- --minify",
|
||||
"package-extension": "npx vsce package --yarn",
|
||||
"install-extension": "code --install-extension ./foam-vscode-$npm_package_version.vsix",
|
||||
"publish-extension-openvsx": "npx ovsx publish foam-vscode-$npm_package_version.vsix -p $OPENVSX_TOKEN",
|
||||
"publish-extension-vscode": "npx vsce publish --packagePath foam-vscode-$npm_package_version.vsix",
|
||||
"publish-extension": "yarn publish-extension-vscode && yarn publish-extension-openvsx && yarn npm-cleanup"
|
||||
},
|
||||
"devDependencies": {
|
||||
"@babel/core": "^7.11.0",
|
||||
"@babel/plugin-transform-runtime": "^7.10.4",
|
||||
"@babel/preset-env": "^7.11.0",
|
||||
"@babel/preset-typescript": "^7.10.4",
|
||||
"@types/dateformat": "^3.0.1",
|
||||
"@types/github-slugger": "^1.3.0",
|
||||
"@types/glob": "^7.1.1",
|
||||
"@types/lodash": "^4.14.157",
|
||||
"@types/markdown-it": "^12.0.1",
|
||||
@@ -388,12 +425,13 @@
|
||||
"@types/vscode": "^1.47.1",
|
||||
"@typescript-eslint/eslint-plugin": "^2.30.0",
|
||||
"@typescript-eslint/parser": "^2.30.0",
|
||||
"babel-jest": "^26.2.2",
|
||||
"esbuild": "^0.14.45",
|
||||
"eslint": "^6.8.0",
|
||||
"eslint-import-resolver-typescript": "^2.5.0",
|
||||
"eslint-plugin-import": "^2.24.2",
|
||||
"eslint-plugin-jest": "^25.3.0",
|
||||
"husky": "^4.2.5",
|
||||
"jest": "^26.2.2",
|
||||
"jest-environment-vscode": "^1.0.0",
|
||||
"jest-extended": "^0.11.5",
|
||||
"markdown-it": "^12.0.4",
|
||||
"rimraf": "^3.0.2",
|
||||
@@ -401,13 +439,14 @@
|
||||
"tsdx": "^0.13.2",
|
||||
"tslib": "^2.0.0",
|
||||
"typescript": "^3.9.5",
|
||||
"vscode-test": "^1.3.0"
|
||||
"vscode-test": "^1.3.0",
|
||||
"wait-for-expect": "^3.0.2"
|
||||
},
|
||||
"dependencies": {
|
||||
"dateformat": "^3.0.3",
|
||||
"detect-newline": "^3.1.0",
|
||||
"fast-array-diff": "^1.0.1",
|
||||
"github-slugger": "^1.3.0",
|
||||
"github-slugger": "^1.4.0",
|
||||
"glob": "^7.1.6",
|
||||
"gray-matter": "^4.0.2",
|
||||
"lodash": "^4.17.21",
|
||||
|
||||
811
packages/foam-vscode/src/core/common/snippetParser.test.ts
Normal file
811
packages/foam-vscode/src/core/common/snippetParser.test.ts
Normal file
@@ -0,0 +1,811 @@
|
||||
/*---------------------------------------------------------------------------------------------
|
||||
* Originally taken from https://github.com/microsoft/vscode/blob/d31496c866683bdbccfc85bc11a3107d6c789b52/src/vs/editor/contrib/snippet/test/snippetParser.test.ts
|
||||
* Here was the license:
|
||||
*
|
||||
* MIT License
|
||||
*
|
||||
* Copyright (c) 2015 - present Microsoft Corporation
|
||||
*
|
||||
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
* of this software and associated documentation files (the "Software"), to deal
|
||||
* in the Software without restriction, including without limitation the rights
|
||||
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
* copies of the Software, and to permit persons to whom the Software is
|
||||
* furnished to do so, subject to the following conditions:
|
||||
*
|
||||
* The above copyright notice and this permission notice shall be included in all
|
||||
* copies or substantial portions of the Software.
|
||||
*
|
||||
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
* SOFTWARE.
|
||||
*
|
||||
*--------------------------------------------------------------------------------------------*/
|
||||
|
||||
import * as assert from 'assert';
|
||||
import { Choice, FormatString, Marker, Placeholder, Scanner, SnippetParser, Text, TextmateSnippet, TokenType, Transform, Variable } from './snippetParser';
|
||||
|
||||
describe('SnippetParser', () => {
|
||||
|
||||
test('Scanner', () => {
|
||||
|
||||
const scanner = new Scanner();
|
||||
assert.strictEqual(scanner.next().type, TokenType.EOF);
|
||||
|
||||
scanner.text('abc');
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.EOF);
|
||||
|
||||
scanner.text('{{abc}}');
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyOpen);
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyOpen);
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyClose);
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyClose);
|
||||
assert.strictEqual(scanner.next().type, TokenType.EOF);
|
||||
|
||||
scanner.text('abc() ');
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.Format);
|
||||
assert.strictEqual(scanner.next().type, TokenType.EOF);
|
||||
|
||||
scanner.text('abc 123');
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.Format);
|
||||
assert.strictEqual(scanner.next().type, TokenType.Int);
|
||||
assert.strictEqual(scanner.next().type, TokenType.EOF);
|
||||
|
||||
scanner.text('$foo');
|
||||
assert.strictEqual(scanner.next().type, TokenType.Dollar);
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.EOF);
|
||||
|
||||
scanner.text('$foo_bar');
|
||||
assert.strictEqual(scanner.next().type, TokenType.Dollar);
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.EOF);
|
||||
|
||||
scanner.text('$foo-bar');
|
||||
assert.strictEqual(scanner.next().type, TokenType.Dollar);
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.Dash);
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.EOF);
|
||||
|
||||
scanner.text('${foo}');
|
||||
assert.strictEqual(scanner.next().type, TokenType.Dollar);
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyOpen);
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyClose);
|
||||
assert.strictEqual(scanner.next().type, TokenType.EOF);
|
||||
|
||||
scanner.text('${1223:foo}');
|
||||
assert.strictEqual(scanner.next().type, TokenType.Dollar);
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyOpen);
|
||||
assert.strictEqual(scanner.next().type, TokenType.Int);
|
||||
assert.strictEqual(scanner.next().type, TokenType.Colon);
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyClose);
|
||||
assert.strictEqual(scanner.next().type, TokenType.EOF);
|
||||
|
||||
scanner.text('\\${}');
|
||||
assert.strictEqual(scanner.next().type, TokenType.Backslash);
|
||||
assert.strictEqual(scanner.next().type, TokenType.Dollar);
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyOpen);
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyClose);
|
||||
|
||||
scanner.text('${foo/regex/format/option}');
|
||||
assert.strictEqual(scanner.next().type, TokenType.Dollar);
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyOpen);
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.Forwardslash);
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.Forwardslash);
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.Forwardslash);
|
||||
assert.strictEqual(scanner.next().type, TokenType.VariableName);
|
||||
assert.strictEqual(scanner.next().type, TokenType.CurlyClose);
|
||||
assert.strictEqual(scanner.next().type, TokenType.EOF);
|
||||
});
|
||||
|
||||
function assertText(value: string, expected: string) {
|
||||
const p = new SnippetParser();
|
||||
const actual = p.text(value);
|
||||
assert.strictEqual(actual, expected);
|
||||
}
|
||||
|
||||
function assertMarker(input: TextmateSnippet | Marker[] | string, ...ctors: Function[]) {
|
||||
let marker: Marker[];
|
||||
if (input instanceof TextmateSnippet) {
|
||||
marker = input.children;
|
||||
} else if (typeof input === 'string') {
|
||||
const p = new SnippetParser();
|
||||
marker = p.parse(input).children;
|
||||
} else {
|
||||
marker = input;
|
||||
}
|
||||
while (marker.length > 0) {
|
||||
let m = marker.pop();
|
||||
let ctor = ctors.pop()!;
|
||||
assert.ok(m instanceof ctor);
|
||||
}
|
||||
assert.strictEqual(marker.length, ctors.length);
|
||||
assert.strictEqual(marker.length, 0);
|
||||
}
|
||||
|
||||
function assertTextAndMarker(value: string, escaped: string, ...ctors: Function[]) {
|
||||
assertText(value, escaped);
|
||||
assertMarker(value, ...ctors);
|
||||
}
|
||||
|
||||
function assertEscaped(value: string, expected: string) {
|
||||
const actual = SnippetParser.escape(value);
|
||||
assert.strictEqual(actual, expected);
|
||||
}
|
||||
|
||||
test('Parser, escaped', function () {
|
||||
assertEscaped('foo$0', 'foo\\$0');
|
||||
assertEscaped('foo\\$0', 'foo\\\\\\$0');
|
||||
assertEscaped('f$1oo$0', 'f\\$1oo\\$0');
|
||||
assertEscaped('${1:foo}$0', '\\${1:foo\\}\\$0');
|
||||
assertEscaped('$', '\\$');
|
||||
});
|
||||
|
||||
test('Parser, text', () => {
|
||||
assertText('$', '$');
|
||||
assertText('\\\\$', '\\$');
|
||||
assertText('{', '{');
|
||||
assertText('\\}', '}');
|
||||
assertText('\\abc', '\\abc');
|
||||
assertText('foo${f:\\}}bar', 'foo}bar');
|
||||
assertText('\\{', '\\{');
|
||||
assertText('I need \\\\\\$', 'I need \\$');
|
||||
assertText('\\', '\\');
|
||||
assertText('\\{{', '\\{{');
|
||||
assertText('{{', '{{');
|
||||
assertText('{{dd', '{{dd');
|
||||
assertText('}}', '}}');
|
||||
assertText('ff}}', 'ff}}');
|
||||
|
||||
assertText('farboo', 'farboo');
|
||||
assertText('far{{}}boo', 'far{{}}boo');
|
||||
assertText('far{{123}}boo', 'far{{123}}boo');
|
||||
assertText('far\\{{123}}boo', 'far\\{{123}}boo');
|
||||
assertText('far{{id:bern}}boo', 'far{{id:bern}}boo');
|
||||
assertText('far{{id:bern {{basel}}}}boo', 'far{{id:bern {{basel}}}}boo');
|
||||
assertText('far{{id:bern {{id:basel}}}}boo', 'far{{id:bern {{id:basel}}}}boo');
|
||||
assertText('far{{id:bern {{id2:basel}}}}boo', 'far{{id:bern {{id2:basel}}}}boo');
|
||||
});
|
||||
|
||||
|
||||
test('Parser, TM text', () => {
|
||||
assertTextAndMarker('foo${1:bar}}', 'foobar}', Text, Placeholder, Text);
|
||||
assertTextAndMarker('foo${1:bar}${2:foo}}', 'foobarfoo}', Text, Placeholder, Placeholder, Text);
|
||||
|
||||
assertTextAndMarker('foo${1:bar\\}${2:foo}}', 'foobar}foo', Text, Placeholder);
|
||||
|
||||
let [, placeholder] = new SnippetParser().parse('foo${1:bar\\}${2:foo}}').children;
|
||||
let { children } = (<Placeholder>placeholder);
|
||||
|
||||
assert.strictEqual((<Placeholder>placeholder).index, 1);
|
||||
assert.ok(children[0] instanceof Text);
|
||||
assert.strictEqual(children[0].toString(), 'bar}');
|
||||
assert.ok(children[1] instanceof Placeholder);
|
||||
assert.strictEqual(children[1].toString(), 'foo');
|
||||
});
|
||||
|
||||
test('Parser, placeholder', () => {
|
||||
assertTextAndMarker('farboo', 'farboo', Text);
|
||||
assertTextAndMarker('far{{}}boo', 'far{{}}boo', Text);
|
||||
assertTextAndMarker('far{{123}}boo', 'far{{123}}boo', Text);
|
||||
assertTextAndMarker('far\\{{123}}boo', 'far\\{{123}}boo', Text);
|
||||
});
|
||||
|
||||
test('Parser, literal code', () => {
|
||||
assertTextAndMarker('far`123`boo', 'far`123`boo', Text);
|
||||
assertTextAndMarker('far\\`123\\`boo', 'far\\`123\\`boo', Text);
|
||||
});
|
||||
|
||||
test('Parser, variables/tabstop', () => {
|
||||
assertTextAndMarker('$far-boo', '-boo', Variable, Text);
|
||||
assertTextAndMarker('\\$far-boo', '$far-boo', Text);
|
||||
assertTextAndMarker('far$farboo', 'far', Text, Variable);
|
||||
assertTextAndMarker('far${farboo}', 'far', Text, Variable);
|
||||
assertTextAndMarker('$123', '', Placeholder);
|
||||
assertTextAndMarker('$farboo', '', Variable);
|
||||
assertTextAndMarker('$far12boo', '', Variable);
|
||||
assertTextAndMarker('000_${far}_000', '000__000', Text, Variable, Text);
|
||||
assertTextAndMarker('FFF_${TM_SELECTED_TEXT}_FFF$0', 'FFF__FFF', Text, Variable, Text, Placeholder);
|
||||
});
|
||||
|
||||
test('Parser, variables/placeholder with defaults', () => {
|
||||
assertTextAndMarker('${name:value}', 'value', Variable);
|
||||
assertTextAndMarker('${1:value}', 'value', Placeholder);
|
||||
assertTextAndMarker('${1:bar${2:foo}bar}', 'barfoobar', Placeholder);
|
||||
|
||||
assertTextAndMarker('${name:value', '${name:value', Text);
|
||||
assertTextAndMarker('${1:bar${2:foobar}', '${1:barfoobar', Text, Placeholder);
|
||||
});
|
||||
|
||||
test('Parser, variable transforms', function () {
|
||||
assertTextAndMarker('${foo///}', '', Variable);
|
||||
assertTextAndMarker('${foo/regex/format/gmi}', '', Variable);
|
||||
assertTextAndMarker('${foo/([A-Z][a-z])/format/}', '', Variable);
|
||||
|
||||
// invalid regex
|
||||
assertTextAndMarker('${foo/([A-Z][a-z])/format/GMI}', '${foo/([A-Z][a-z])/format/GMI}', Text);
|
||||
assertTextAndMarker('${foo/([A-Z][a-z])/format/funky}', '${foo/([A-Z][a-z])/format/funky}', Text);
|
||||
assertTextAndMarker('${foo/([A-Z][a-z]/format/}', '${foo/([A-Z][a-z]/format/}', Text);
|
||||
|
||||
// tricky regex
|
||||
assertTextAndMarker('${foo/m\\/atch/$1/i}', '', Variable);
|
||||
assertMarker('${foo/regex\/format/options}', Text);
|
||||
|
||||
// incomplete
|
||||
assertTextAndMarker('${foo///', '${foo///', Text);
|
||||
assertTextAndMarker('${foo/regex/format/options', '${foo/regex/format/options', Text);
|
||||
|
||||
// format string
|
||||
assertMarker('${foo/.*/${0:fooo}/i}', Variable);
|
||||
assertMarker('${foo/.*/${1}/i}', Variable);
|
||||
assertMarker('${foo/.*/$1/i}', Variable);
|
||||
assertMarker('${foo/.*/This-$1-encloses/i}', Variable);
|
||||
assertMarker('${foo/.*/complex${1:else}/i}', Variable);
|
||||
assertMarker('${foo/.*/complex${1:-else}/i}', Variable);
|
||||
assertMarker('${foo/.*/complex${1:+if}/i}', Variable);
|
||||
assertMarker('${foo/.*/complex${1:?if:else}/i}', Variable);
|
||||
assertMarker('${foo/.*/complex${1:/upcase}/i}', Variable);
|
||||
|
||||
});
|
||||
|
||||
test('Parser, placeholder transforms', function () {
|
||||
assertTextAndMarker('${1///}', '', Placeholder);
|
||||
assertTextAndMarker('${1/regex/format/gmi}', '', Placeholder);
|
||||
assertTextAndMarker('${1/([A-Z][a-z])/format/}', '', Placeholder);
|
||||
|
||||
// tricky regex
|
||||
assertTextAndMarker('${1/m\\/atch/$1/i}', '', Placeholder);
|
||||
assertMarker('${1/regex\/format/options}', Text);
|
||||
|
||||
// incomplete
|
||||
assertTextAndMarker('${1///', '${1///', Text);
|
||||
assertTextAndMarker('${1/regex/format/options', '${1/regex/format/options', Text);
|
||||
});
|
||||
|
||||
test('No way to escape forward slash in snippet regex #36715', function () {
|
||||
assertMarker('${TM_DIRECTORY/src\\//$1/}', Variable);
|
||||
});
|
||||
|
||||
test('No way to escape forward slash in snippet format section #37562', function () {
|
||||
assertMarker('${TM_SELECTED_TEXT/a/\\/$1/g}', Variable);
|
||||
assertMarker('${TM_SELECTED_TEXT/a/in\\/$1ner/g}', Variable);
|
||||
assertMarker('${TM_SELECTED_TEXT/a/end\\//g}', Variable);
|
||||
});
|
||||
|
||||
test('Parser, placeholder with choice', () => {
|
||||
|
||||
assertTextAndMarker('${1|one,two,three|}', 'one', Placeholder);
|
||||
assertTextAndMarker('${1|one|}', 'one', Placeholder);
|
||||
assertTextAndMarker('${1|one1,two2|}', 'one1', Placeholder);
|
||||
assertTextAndMarker('${1|one1\\,two2|}', 'one1,two2', Placeholder);
|
||||
assertTextAndMarker('${1|one1\\|two2|}', 'one1|two2', Placeholder);
|
||||
assertTextAndMarker('${1|one1\\atwo2|}', 'one1\\atwo2', Placeholder);
|
||||
assertTextAndMarker('${1|one,two,three,|}', '${1|one,two,three,|}', Text);
|
||||
assertTextAndMarker('${1|one,', '${1|one,', Text);
|
||||
|
||||
const p = new SnippetParser();
|
||||
const snippet = p.parse('${1|one,two,three|}');
|
||||
assertMarker(snippet, Placeholder);
|
||||
const expected = [Placeholder, Text, Text, Text];
|
||||
snippet.walk(marker => {
|
||||
assert.strictEqual(marker, expected.shift());
|
||||
return true;
|
||||
});
|
||||
});
|
||||
|
||||
test('Snippet choices: unable to escape comma and pipe, #31521', function () {
|
||||
assertTextAndMarker('console.log(${1|not\\, not, five, 5, 1 23|});', 'console.log(not, not);', Text, Placeholder, Text);
|
||||
});
|
||||
|
||||
test('Marker, toTextmateString()', function () {
|
||||
|
||||
function assertTextsnippetString(input: string, expected: string): void {
|
||||
const snippet = new SnippetParser().parse(input);
|
||||
const actual = snippet.toTextmateString();
|
||||
assert.strictEqual(actual, expected);
|
||||
}
|
||||
|
||||
assertTextsnippetString('$1', '$1');
|
||||
assertTextsnippetString('\\$1', '\\$1');
|
||||
assertTextsnippetString('console.log(${1|not\\, not, five, 5, 1 23|});', 'console.log(${1|not\\, not, five, 5, 1 23|});');
|
||||
assertTextsnippetString('console.log(${1|not\\, not, \\| five, 5, 1 23|});', 'console.log(${1|not\\, not, \\| five, 5, 1 23|});');
|
||||
assertTextsnippetString('this is text', 'this is text');
|
||||
assertTextsnippetString('this ${1:is ${2:nested with $var}}', 'this ${1:is ${2:nested with ${var}}}');
|
||||
assertTextsnippetString('this ${1:is ${2:nested with $var}}}', 'this ${1:is ${2:nested with ${var}}}\\}');
|
||||
});
|
||||
|
||||
test('Marker, toTextmateString() <-> identity', function () {
|
||||
|
||||
function assertIdent(input: string): void {
|
||||
// full loop: (1) parse input, (2) generate textmate string, (3) parse, (4) ensure both trees are equal
|
||||
const snippet = new SnippetParser().parse(input);
|
||||
const input2 = snippet.toTextmateString();
|
||||
const snippet2 = new SnippetParser().parse(input2);
|
||||
|
||||
function checkCheckChildren(marker1: Marker, marker2: Marker) {
|
||||
assert.ok(marker1 instanceof Object.getPrototypeOf(marker2).constructor);
|
||||
assert.ok(marker2 instanceof Object.getPrototypeOf(marker1).constructor);
|
||||
|
||||
assert.strictEqual(marker1.children.length, marker2.children.length);
|
||||
assert.strictEqual(marker1.toString(), marker2.toString());
|
||||
|
||||
for (let i = 0; i < marker1.children.length; i++) {
|
||||
checkCheckChildren(marker1.children[i], marker2.children[i]);
|
||||
}
|
||||
}
|
||||
|
||||
checkCheckChildren(snippet, snippet2);
|
||||
}
|
||||
|
||||
assertIdent('$1');
|
||||
assertIdent('\\$1');
|
||||
assertIdent('console.log(${1|not\\, not, five, 5, 1 23|});');
|
||||
assertIdent('console.log(${1|not\\, not, \\| five, 5, 1 23|});');
|
||||
assertIdent('this is text');
|
||||
assertIdent('this ${1:is ${2:nested with $var}}');
|
||||
assertIdent('this ${1:is ${2:nested with $var}}}');
|
||||
assertIdent('this ${1:is ${2:nested with $var}} and repeating $1');
|
||||
});
|
||||
|
||||
test('Parser, choise marker', () => {
|
||||
const { placeholders } = new SnippetParser().parse('${1|one,two,three|}');
|
||||
|
||||
assert.strictEqual(placeholders.length, 1);
|
||||
assert.ok(placeholders[0].choice instanceof Choice);
|
||||
assert.ok(placeholders[0].children[0] instanceof Choice);
|
||||
assert.strictEqual((<Choice>placeholders[0].children[0]).options.length, 3);
|
||||
|
||||
assertText('${1|one,two,three|}', 'one');
|
||||
assertText('\\${1|one,two,three|}', '${1|one,two,three|}');
|
||||
assertText('${1\\|one,two,three|}', '${1\\|one,two,three|}');
|
||||
assertText('${1||}', '${1||}');
|
||||
});
|
||||
|
||||
test('Backslash character escape in choice tabstop doesn\'t work #58494', function () {
|
||||
|
||||
const { placeholders } = new SnippetParser().parse('${1|\\,,},$,\\|,\\\\|}');
|
||||
assert.strictEqual(placeholders.length, 1);
|
||||
assert.ok(placeholders[0].choice instanceof Choice);
|
||||
});
|
||||
|
||||
test('Parser, only textmate', () => {
|
||||
const p = new SnippetParser();
|
||||
assertMarker(p.parse('far{{}}boo'), Text);
|
||||
assertMarker(p.parse('far{{123}}boo'), Text);
|
||||
assertMarker(p.parse('far\\{{123}}boo'), Text);
|
||||
|
||||
assertMarker(p.parse('far$0boo'), Text, Placeholder, Text);
|
||||
assertMarker(p.parse('far${123}boo'), Text, Placeholder, Text);
|
||||
assertMarker(p.parse('far\\${123}boo'), Text);
|
||||
});
|
||||
|
||||
test('Parser, real world', () => {
|
||||
let marker = new SnippetParser().parse('console.warn(${1: $TM_SELECTED_TEXT })').children;
|
||||
|
||||
assert.strictEqual(marker[0].toString(), 'console.warn(');
|
||||
assert.ok(marker[1] instanceof Placeholder);
|
||||
assert.strictEqual(marker[2].toString(), ')');
|
||||
|
||||
const placeholder = <Placeholder>marker[1];
|
||||
assert.strictEqual(placeholder.index, 1);
|
||||
assert.strictEqual(placeholder.children.length, 3);
|
||||
assert.ok(placeholder.children[0] instanceof Text);
|
||||
assert.ok(placeholder.children[1] instanceof Variable);
|
||||
assert.ok(placeholder.children[2] instanceof Text);
|
||||
assert.strictEqual(placeholder.children[0].toString(), ' ');
|
||||
assert.strictEqual(placeholder.children[1].toString(), '');
|
||||
assert.strictEqual(placeholder.children[2].toString(), ' ');
|
||||
|
||||
const nestedVariable = <Variable>placeholder.children[1];
|
||||
assert.strictEqual(nestedVariable.name, 'TM_SELECTED_TEXT');
|
||||
assert.strictEqual(nestedVariable.children.length, 0);
|
||||
|
||||
marker = new SnippetParser().parse('$TM_SELECTED_TEXT').children;
|
||||
assert.strictEqual(marker.length, 1);
|
||||
assert.ok(marker[0] instanceof Variable);
|
||||
});
|
||||
|
||||
test('Parser, transform example', () => {
|
||||
let { children } = new SnippetParser().parse('${1:name} : ${2:type}${3/\\s:=(.*)/${1:+ :=}${1}/};\n$0');
|
||||
|
||||
//${1:name}
|
||||
assert.ok(children[0] instanceof Placeholder);
|
||||
assert.strictEqual(children[0].children.length, 1);
|
||||
assert.strictEqual(children[0].children[0].toString(), 'name');
|
||||
assert.strictEqual((<Placeholder>children[0]).transform, undefined);
|
||||
|
||||
// :
|
||||
assert.ok(children[1] instanceof Text);
|
||||
assert.strictEqual(children[1].toString(), ' : ');
|
||||
|
||||
//${2:type}
|
||||
assert.ok(children[2] instanceof Placeholder);
|
||||
assert.strictEqual(children[2].children.length, 1);
|
||||
assert.strictEqual(children[2].children[0].toString(), 'type');
|
||||
|
||||
//${3/\\s:=(.*)/${1:+ :=}${1}/}
|
||||
assert.ok(children[3] instanceof Placeholder);
|
||||
assert.strictEqual(children[3].children.length, 0);
|
||||
assert.notStrictEqual((<Placeholder>children[3]).transform, undefined);
|
||||
let transform = (<Placeholder>children[3]).transform!;
|
||||
assert.deepStrictEqual(transform.regexp.source, /\s:=(.*)/.source);
|
||||
assert.strictEqual(transform.children.length, 2);
|
||||
assert.ok(transform.children[0] instanceof FormatString);
|
||||
assert.strictEqual((<FormatString>transform.children[0]).index, 1);
|
||||
assert.strictEqual((<FormatString>transform.children[0]).ifValue, ' :=');
|
||||
assert.ok(transform.children[1] instanceof FormatString);
|
||||
assert.strictEqual((<FormatString>transform.children[1]).index, 1);
|
||||
assert.ok(children[4] instanceof Text);
|
||||
assert.strictEqual(children[4].toString(), ';\n');
|
||||
|
||||
});
|
||||
|
||||
// TODO @jrieken making this strictEqul causes circular json conversion errors
|
||||
test('Parser, default placeholder values', () => {
|
||||
|
||||
assertMarker('errorContext: `${1:err}`, error: $1', Text, Placeholder, Text, Placeholder);
|
||||
|
||||
const [, p1, , p2] = new SnippetParser().parse('errorContext: `${1:err}`, error:$1').children;
|
||||
|
||||
assert.strictEqual((<Placeholder>p1).index, 1);
|
||||
assert.strictEqual((<Placeholder>p1).children.length, 1);
|
||||
assert.strictEqual((<Text>(<Placeholder>p1).children[0]).toString(), 'err');
|
||||
|
||||
assert.strictEqual((<Placeholder>p2).index, 1);
|
||||
assert.strictEqual((<Placeholder>p2).children.length, 1);
|
||||
assert.strictEqual((<Text>(<Placeholder>p2).children[0]).toString(), 'err');
|
||||
});
|
||||
|
||||
// TODO @jrieken making this strictEqul causes circular json conversion errors
|
||||
test('Parser, default placeholder values and one transform', () => {
|
||||
|
||||
assertMarker('errorContext: `${1:err}`, error: ${1/err/ok/}', Text, Placeholder, Text, Placeholder);
|
||||
|
||||
const [, p3, , p4] = new SnippetParser().parse('errorContext: `${1:err}`, error:${1/err/ok/}').children;
|
||||
|
||||
assert.strictEqual((<Placeholder>p3).index, 1);
|
||||
assert.strictEqual((<Placeholder>p3).children.length, 1);
|
||||
assert.strictEqual((<Text>(<Placeholder>p3).children[0]).toString(), 'err');
|
||||
assert.strictEqual((<Placeholder>p3).transform, undefined);
|
||||
|
||||
assert.strictEqual((<Placeholder>p4).index, 1);
|
||||
assert.strictEqual((<Placeholder>p4).children.length, 1);
|
||||
assert.strictEqual((<Text>(<Placeholder>p4).children[0]).toString(), 'err');
|
||||
assert.notStrictEqual((<Placeholder>p4).transform, undefined);
|
||||
});
|
||||
|
||||
test('Repeated snippet placeholder should always inherit, #31040', function () {
|
||||
assertText('${1:foo}-abc-$1', 'foo-abc-foo');
|
||||
assertText('${1:foo}-abc-${1}', 'foo-abc-foo');
|
||||
assertText('${1:foo}-abc-${1:bar}', 'foo-abc-foo');
|
||||
assertText('${1}-abc-${1:foo}', 'foo-abc-foo');
|
||||
});
|
||||
|
||||
test('backspace esapce in TM only, #16212', () => {
|
||||
const actual = new SnippetParser().text('Foo \\\\${abc}bar');
|
||||
assert.strictEqual(actual, 'Foo \\bar');
|
||||
});
|
||||
|
||||
test('colon as variable/placeholder value, #16717', () => {
|
||||
let actual = new SnippetParser().text('${TM_SELECTED_TEXT:foo:bar}');
|
||||
assert.strictEqual(actual, 'foo:bar');
|
||||
|
||||
actual = new SnippetParser().text('${1:foo:bar}');
|
||||
assert.strictEqual(actual, 'foo:bar');
|
||||
});
|
||||
|
||||
test('incomplete placeholder', () => {
|
||||
assertTextAndMarker('${1:}', '', Placeholder);
|
||||
});
|
||||
|
||||
test('marker#len', () => {
|
||||
|
||||
function assertLen(template: string, ...lengths: number[]): void {
|
||||
const snippet = new SnippetParser().parse(template, true);
|
||||
snippet.walk(m => {
|
||||
const expected = lengths.shift();
|
||||
assert.strictEqual(m.len(), expected);
|
||||
return true;
|
||||
});
|
||||
assert.strictEqual(lengths.length, 0);
|
||||
}
|
||||
|
||||
assertLen('text$0', 4, 0);
|
||||
assertLen('$1text$0', 0, 4, 0);
|
||||
assertLen('te$1xt$0', 2, 0, 2, 0);
|
||||
assertLen('errorContext: `${1:err}`, error: $0', 15, 0, 3, 10, 0);
|
||||
assertLen('errorContext: `${1:err}`, error: $1$0', 15, 0, 3, 10, 0, 3, 0);
|
||||
assertLen('$TM_SELECTED_TEXT$0', 0, 0);
|
||||
assertLen('${TM_SELECTED_TEXT:def}$0', 0, 3, 0);
|
||||
});
|
||||
|
||||
test('parser, parent node', function () {
|
||||
let snippet = new SnippetParser().parse('This ${1:is ${2:nested}}$0', true);
|
||||
|
||||
assert.strictEqual(snippet.placeholders.length, 3);
|
||||
let [first, second] = snippet.placeholders;
|
||||
assert.strictEqual(first.index, 1);
|
||||
assert.strictEqual(second.index, 2);
|
||||
assert.ok(second.parent === first);
|
||||
assert.ok(first.parent === snippet);
|
||||
|
||||
snippet = new SnippetParser().parse('${VAR:default${1:value}}$0', true);
|
||||
assert.strictEqual(snippet.placeholders.length, 2);
|
||||
[first] = snippet.placeholders;
|
||||
assert.strictEqual(first.index, 1);
|
||||
|
||||
assert.ok(snippet.children[0] instanceof Variable);
|
||||
assert.ok(first.parent === snippet.children[0]);
|
||||
});
|
||||
|
||||
test('TextmateSnippet#enclosingPlaceholders', () => {
|
||||
let snippet = new SnippetParser().parse('This ${1:is ${2:nested}}$0', true);
|
||||
let [first, second] = snippet.placeholders;
|
||||
|
||||
assert.deepStrictEqual(snippet.enclosingPlaceholders(first), []);
|
||||
assert.deepStrictEqual(snippet.enclosingPlaceholders(second), [first]);
|
||||
});
|
||||
|
||||
test('TextmateSnippet#offset', () => {
|
||||
let snippet = new SnippetParser().parse('te$1xt', true);
|
||||
assert.strictEqual(snippet.offset(snippet.children[0]), 0);
|
||||
assert.strictEqual(snippet.offset(snippet.children[1]), 2);
|
||||
assert.strictEqual(snippet.offset(snippet.children[2]), 2);
|
||||
|
||||
snippet = new SnippetParser().parse('${TM_SELECTED_TEXT:def}', true);
|
||||
assert.strictEqual(snippet.offset(snippet.children[0]), 0);
|
||||
assert.strictEqual(snippet.offset((<Variable>snippet.children[0]).children[0]), 0);
|
||||
|
||||
// forgein marker
|
||||
assert.strictEqual(snippet.offset(new Text('foo')), -1);
|
||||
});
|
||||
|
||||
test('TextmateSnippet#placeholder', () => {
|
||||
let snippet = new SnippetParser().parse('te$1xt$0', true);
|
||||
let placeholders = snippet.placeholders;
|
||||
assert.strictEqual(placeholders.length, 2);
|
||||
|
||||
snippet = new SnippetParser().parse('te$1xt$1$0', true);
|
||||
placeholders = snippet.placeholders;
|
||||
assert.strictEqual(placeholders.length, 3);
|
||||
|
||||
|
||||
snippet = new SnippetParser().parse('te$1xt$2$0', true);
|
||||
placeholders = snippet.placeholders;
|
||||
assert.strictEqual(placeholders.length, 3);
|
||||
|
||||
snippet = new SnippetParser().parse('${1:bar${2:foo}bar}$0', true);
|
||||
placeholders = snippet.placeholders;
|
||||
assert.strictEqual(placeholders.length, 3);
|
||||
});
|
||||
|
||||
test('TextmateSnippet#replace 1/2', function () {
|
||||
let snippet = new SnippetParser().parse('aaa${1:bbb${2:ccc}}$0', true);
|
||||
|
||||
assert.strictEqual(snippet.placeholders.length, 3);
|
||||
const [, second] = snippet.placeholders;
|
||||
assert.strictEqual(second.index, 2);
|
||||
|
||||
const enclosing = snippet.enclosingPlaceholders(second);
|
||||
assert.strictEqual(enclosing.length, 1);
|
||||
assert.strictEqual(enclosing[0].index, 1);
|
||||
|
||||
let nested = new SnippetParser().parse('ddd$1eee$0', true);
|
||||
snippet.replace(second, nested.children);
|
||||
|
||||
assert.strictEqual(snippet.toString(), 'aaabbbdddeee');
|
||||
assert.strictEqual(snippet.placeholders.length, 4);
|
||||
assert.strictEqual(snippet.placeholders[0].index, 1);
|
||||
assert.strictEqual(snippet.placeholders[1].index, 1);
|
||||
assert.strictEqual(snippet.placeholders[2].index, 0);
|
||||
assert.strictEqual(snippet.placeholders[3].index, 0);
|
||||
|
||||
const newEnclosing = snippet.enclosingPlaceholders(snippet.placeholders[1]);
|
||||
assert.ok(newEnclosing[0] === snippet.placeholders[0]);
|
||||
assert.strictEqual(newEnclosing.length, 1);
|
||||
assert.strictEqual(newEnclosing[0].index, 1);
|
||||
});
|
||||
|
||||
test('TextmateSnippet#replace 2/2', function () {
|
||||
let snippet = new SnippetParser().parse('aaa${1:bbb${2:ccc}}$0', true);
|
||||
|
||||
assert.strictEqual(snippet.placeholders.length, 3);
|
||||
const [, second] = snippet.placeholders;
|
||||
assert.strictEqual(second.index, 2);
|
||||
|
||||
let nested = new SnippetParser().parse('dddeee$0', true);
|
||||
snippet.replace(second, nested.children);
|
||||
|
||||
assert.strictEqual(snippet.toString(), 'aaabbbdddeee');
|
||||
assert.strictEqual(snippet.placeholders.length, 3);
|
||||
});
|
||||
|
||||
test('Snippet order for placeholders, #28185', function () {
|
||||
|
||||
const _10 = new Placeholder(10);
|
||||
const _2 = new Placeholder(2);
|
||||
|
||||
assert.strictEqual(Placeholder.compareByIndex(_10, _2), 1);
|
||||
});
|
||||
|
||||
test('Maximum call stack size exceeded, #28983', function () {
|
||||
new SnippetParser().parse('${1:${foo:${1}}}');
|
||||
});
|
||||
|
||||
test('Snippet can freeze the editor, #30407', function () {
|
||||
|
||||
const seen = new Set<Marker>();
|
||||
|
||||
seen.clear();
|
||||
new SnippetParser().parse('class ${1:${TM_FILENAME/(?:\\A|_)([A-Za-z0-9]+)(?:\\.rb)?/(?2::\\u$1)/g}} < ${2:Application}Controller\n $3\nend').walk(marker => {
|
||||
assert.ok(!seen.has(marker));
|
||||
seen.add(marker);
|
||||
return true;
|
||||
});
|
||||
|
||||
seen.clear();
|
||||
new SnippetParser().parse('${1:${FOO:abc$1def}}').walk(marker => {
|
||||
assert.ok(!seen.has(marker));
|
||||
seen.add(marker);
|
||||
return true;
|
||||
});
|
||||
});
|
||||
|
||||
test('Snippets: make parser ignore `${0|choice|}`, #31599', function () {
|
||||
assertTextAndMarker('${0|foo,bar|}', '${0|foo,bar|}', Text);
|
||||
assertTextAndMarker('${1|foo,bar|}', 'foo', Placeholder);
|
||||
});
|
||||
|
||||
|
||||
test('Transform -> FormatString#resolve', function () {
|
||||
|
||||
// shorthand functions
|
||||
assert.strictEqual(new FormatString(1, 'upcase').resolve('foo'), 'FOO');
|
||||
assert.strictEqual(new FormatString(1, 'downcase').resolve('FOO'), 'foo');
|
||||
assert.strictEqual(new FormatString(1, 'capitalize').resolve('bar'), 'Bar');
|
||||
assert.strictEqual(new FormatString(1, 'capitalize').resolve('bar no repeat'), 'Bar no repeat');
|
||||
assert.strictEqual(new FormatString(1, 'pascalcase').resolve('bar-foo'), 'BarFoo');
|
||||
assert.strictEqual(new FormatString(1, 'pascalcase').resolve('bar-42-foo'), 'Bar42Foo');
|
||||
assert.strictEqual(new FormatString(1, 'camelcase').resolve('bar-foo'), 'barFoo');
|
||||
assert.strictEqual(new FormatString(1, 'camelcase').resolve('bar-42-foo'), 'bar42Foo');
|
||||
assert.strictEqual(new FormatString(1, 'notKnown').resolve('input'), 'input');
|
||||
|
||||
// if
|
||||
assert.strictEqual(new FormatString(1, undefined, 'foo', undefined).resolve(undefined), '');
|
||||
assert.strictEqual(new FormatString(1, undefined, 'foo', undefined).resolve(''), '');
|
||||
assert.strictEqual(new FormatString(1, undefined, 'foo', undefined).resolve('bar'), 'foo');
|
||||
|
||||
// else
|
||||
assert.strictEqual(new FormatString(1, undefined, undefined, 'foo').resolve(undefined), 'foo');
|
||||
assert.strictEqual(new FormatString(1, undefined, undefined, 'foo').resolve(''), 'foo');
|
||||
assert.strictEqual(new FormatString(1, undefined, undefined, 'foo').resolve('bar'), 'bar');
|
||||
|
||||
// if-else
|
||||
assert.strictEqual(new FormatString(1, undefined, 'bar', 'foo').resolve(undefined), 'foo');
|
||||
assert.strictEqual(new FormatString(1, undefined, 'bar', 'foo').resolve(''), 'foo');
|
||||
assert.strictEqual(new FormatString(1, undefined, 'bar', 'foo').resolve('baz'), 'bar');
|
||||
});
|
||||
|
||||
test('Snippet variable transformation doesn\'t work if regex is complicated and snippet body contains \'$$\' #55627', function () {
|
||||
const snippet = new SnippetParser().parse('const fileName = "${TM_FILENAME/(.*)\\..+$/$1/}"');
|
||||
assert.strictEqual(snippet.toTextmateString(), 'const fileName = "${TM_FILENAME/(.*)\\..+$/${1}/}"');
|
||||
});
|
||||
|
||||
test('[BUG] HTML attribute suggestions: Snippet session does not have end-position set, #33147', function () {
|
||||
|
||||
const { placeholders } = new SnippetParser().parse('src="$1"', true);
|
||||
const [first, second] = placeholders;
|
||||
|
||||
assert.strictEqual(placeholders.length, 2);
|
||||
assert.strictEqual(first.index, 1);
|
||||
assert.strictEqual(second.index, 0);
|
||||
|
||||
});
|
||||
|
||||
test('Snippet optional transforms are not applied correctly when reusing the same variable, #37702', function () {
|
||||
|
||||
const transform = new Transform();
|
||||
transform.appendChild(new FormatString(1, 'upcase'));
|
||||
transform.appendChild(new FormatString(2, 'upcase'));
|
||||
transform.regexp = /^(.)|-(.)/g;
|
||||
|
||||
assert.strictEqual(transform.resolve('my-file-name'), 'MyFileName');
|
||||
|
||||
const clone = transform.clone();
|
||||
assert.strictEqual(clone.resolve('my-file-name'), 'MyFileName');
|
||||
});
|
||||
|
||||
test('problem with snippets regex #40570', function () {
|
||||
|
||||
const snippet = new SnippetParser().parse('${TM_DIRECTORY/.*src[\\/](.*)/$1/}');
|
||||
assertMarker(snippet, Variable);
|
||||
});
|
||||
|
||||
test('Variable transformation doesn\'t work if undefined variables are used in the same snippet #51769', function () {
|
||||
let transform = new Transform();
|
||||
transform.appendChild(new Text('bar'));
|
||||
transform.regexp = new RegExp('foo', 'gi');
|
||||
assert.strictEqual(transform.toTextmateString(), '/foo/bar/ig');
|
||||
});
|
||||
|
||||
test('Snippet parser freeze #53144', function () {
|
||||
let snippet = new SnippetParser().parse('${1/(void$)|(.+)/${1:?-\treturn nil;}/}');
|
||||
assertMarker(snippet, Placeholder);
|
||||
});
|
||||
|
||||
test('snippets variable not resolved in JSON proposal #52931', function () {
|
||||
assertTextAndMarker('FOO${1:/bin/bash}', 'FOO/bin/bash', Text, Placeholder);
|
||||
});
|
||||
|
||||
test('Mirroring sequence of nested placeholders not selected properly on backjumping #58736', function () {
|
||||
let snippet = new SnippetParser().parse('${3:nest1 ${1:nest2 ${2:nest3}}} $3');
|
||||
assert.strictEqual(snippet.children.length, 3);
|
||||
assert.ok(snippet.children[0] instanceof Placeholder);
|
||||
assert.ok(snippet.children[1] instanceof Text);
|
||||
assert.ok(snippet.children[2] instanceof Placeholder);
|
||||
|
||||
function assertParent(marker: Marker) {
|
||||
marker.children.forEach(assertParent);
|
||||
if (!(marker instanceof Placeholder)) {
|
||||
return;
|
||||
}
|
||||
let found = false;
|
||||
let m: Marker = marker;
|
||||
while (m && !found) {
|
||||
if (m.parent === snippet) {
|
||||
found = true;
|
||||
}
|
||||
m = m.parent;
|
||||
}
|
||||
assert.ok(found);
|
||||
}
|
||||
let [, , clone] = snippet.children;
|
||||
assertParent(clone);
|
||||
});
|
||||
|
||||
test('Backspace can\'t be escaped in snippet variable transforms #65412', function () {
|
||||
|
||||
let snippet = new SnippetParser().parse('namespace ${TM_DIRECTORY/[\\/]/\\\\/g};');
|
||||
assertMarker(snippet, Text, Variable, Text);
|
||||
});
|
||||
|
||||
test('Snippet cannot escape closing bracket inside conditional insertion variable replacement #78883', function () {
|
||||
|
||||
let snippet = new SnippetParser().parse('${TM_DIRECTORY/(.+)/${1:+import { hello \\} from world}/}');
|
||||
let variable = <Variable>snippet.children[0];
|
||||
assert.strictEqual(snippet.children.length, 1);
|
||||
assert.ok(variable instanceof Variable);
|
||||
assert.ok(variable.transform);
|
||||
assert.strictEqual(variable.transform!.children.length, 1);
|
||||
assert.ok(variable.transform!.children[0] instanceof FormatString);
|
||||
assert.strictEqual((<FormatString>variable.transform!.children[0]).ifValue, 'import { hello } from world');
|
||||
assert.strictEqual((<FormatString>variable.transform!.children[0]).elseValue, undefined);
|
||||
});
|
||||
|
||||
test('Snippet escape backslashes inside conditional insertion variable replacement #80394', function () {
|
||||
|
||||
let snippet = new SnippetParser().parse('${CURRENT_YEAR/(.+)/${1:+\\\\}/}');
|
||||
let variable = <Variable>snippet.children[0];
|
||||
assert.strictEqual(snippet.children.length, 1);
|
||||
assert.ok(variable instanceof Variable);
|
||||
assert.ok(variable.transform);
|
||||
assert.strictEqual(variable.transform!.children.length, 1);
|
||||
assert.ok(variable.transform!.children[0] instanceof FormatString);
|
||||
assert.strictEqual((<FormatString>variable.transform!.children[0]).ifValue, '\\');
|
||||
assert.strictEqual((<FormatString>variable.transform!.children[0]).elseValue, undefined);
|
||||
});
|
||||
});
|
||||
1166
packages/foam-vscode/src/core/common/snippetParser.ts
Normal file
1166
packages/foam-vscode/src/core/common/snippetParser.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -13,8 +13,8 @@ export const applyTextEdit = (text: string, textEdit: TextEdit): string => {
|
||||
const eol = detectNewline(text) || os.EOL;
|
||||
const lines = text.split(eol);
|
||||
const characters = text.split('');
|
||||
let startOffset = getOffset(lines, textEdit.range.start, eol);
|
||||
let endOffset = getOffset(lines, textEdit.range.end, eol);
|
||||
const startOffset = getOffset(lines, textEdit.range.start, eol);
|
||||
const endOffset = getOffset(lines, textEdit.range.end, eol);
|
||||
const deleteCount = endOffset - startOffset;
|
||||
|
||||
const textToAppend = `${textEdit.newText}`;
|
||||
@@ -34,5 +34,5 @@ const getOffset = (
|
||||
offset = offset + lines[i].length + eolLen;
|
||||
i++;
|
||||
}
|
||||
return offset + Math.min(position.character, lines[i].length);
|
||||
return offset + Math.min(position.character, lines[i]?.length ?? 0);
|
||||
};
|
||||
|
||||
@@ -1,10 +1,9 @@
|
||||
import { generateHeading } from '.';
|
||||
import { TEST_DATA_DIR } from '../../test/test-utils';
|
||||
import { MarkdownResourceProvider } from '../markdown-provider';
|
||||
import { readFileFromFs, TEST_DATA_DIR } from '../../test/test-utils';
|
||||
import { MarkdownResourceProvider } from '../services/markdown-provider';
|
||||
import { bootstrap } from '../model/foam';
|
||||
import { Resource } from '../model/note';
|
||||
import { Range } from '../model/range';
|
||||
import { URI } from '../model/uri';
|
||||
import { FoamWorkspace } from '../model/workspace';
|
||||
import { FileDataStore, Matcher } from '../services/datastore';
|
||||
import { Logger } from '../utils/log';
|
||||
@@ -16,13 +15,14 @@ describe('generateHeadings', () => {
|
||||
const findBySlug = (slug: string): Resource => {
|
||||
return _workspace
|
||||
.list()
|
||||
.find(res => URI.getBasename(res.uri) === slug) as Resource;
|
||||
.find(res => res.uri.getName() === slug) as Resource;
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
const matcher = new Matcher([URI.joinPath(TEST_DATA_DIR, '__scaffold__')]);
|
||||
const mdProvider = new MarkdownResourceProvider(matcher);
|
||||
const foam = await bootstrap(matcher, new FileDataStore(), [mdProvider]);
|
||||
const matcher = new Matcher([TEST_DATA_DIR.joinPath('__scaffold__')]);
|
||||
const dataStore = new FileDataStore(readFileFromFs);
|
||||
const mdProvider = new MarkdownResourceProvider(matcher, dataStore);
|
||||
const foam = await bootstrap(matcher, dataStore, [mdProvider]);
|
||||
_workspace = foam.workspace;
|
||||
});
|
||||
|
||||
|
||||
@@ -1,28 +1,34 @@
|
||||
import { generateLinkReferences } from '.';
|
||||
import { TEST_DATA_DIR } from '../../test/test-utils';
|
||||
import { MarkdownResourceProvider } from '../markdown-provider';
|
||||
import { MarkdownResourceProvider } from '../services/markdown-provider';
|
||||
import { bootstrap } from '../model/foam';
|
||||
import { Resource } from '../model/note';
|
||||
import { Range } from '../model/range';
|
||||
import { URI } from '../model/uri';
|
||||
import { FoamWorkspace } from '../model/workspace';
|
||||
import { FileDataStore, Matcher } from '../services/datastore';
|
||||
import { Logger } from '../utils/log';
|
||||
import fs from 'fs';
|
||||
import { URI } from '../model/uri';
|
||||
|
||||
Logger.setLevel('error');
|
||||
|
||||
describe('generateLinkReferences', () => {
|
||||
let _workspace: FoamWorkspace;
|
||||
// TODO slug must be reserved for actual slugs, not file names
|
||||
const findBySlug = (slug: string): Resource => {
|
||||
return _workspace
|
||||
.list()
|
||||
.find(res => URI.getBasename(res.uri) === slug) as Resource;
|
||||
.find(res => res.uri.getName() === slug) as Resource;
|
||||
};
|
||||
|
||||
beforeAll(async () => {
|
||||
const matcher = new Matcher([URI.joinPath(TEST_DATA_DIR, '__scaffold__')]);
|
||||
const mdProvider = new MarkdownResourceProvider(matcher);
|
||||
const foam = await bootstrap(matcher, new FileDataStore(), [mdProvider]);
|
||||
const matcher = new Matcher([TEST_DATA_DIR.joinPath('__scaffold__')]);
|
||||
/** Use fs for reading files in units where vscode.workspace is unavailable */
|
||||
const readFile = async (uri: URI) =>
|
||||
(await fs.promises.readFile(uri.toFsPath())).toString();
|
||||
const dataStore = new FileDataStore(readFile);
|
||||
const mdProvider = new MarkdownResourceProvider(matcher, dataStore);
|
||||
const foam = await bootstrap(matcher, dataStore, [mdProvider]);
|
||||
_workspace = foam.workspace;
|
||||
});
|
||||
|
||||
|
||||
@@ -1,19 +1,15 @@
|
||||
import GithubSlugger from 'github-slugger';
|
||||
import { Resource } from '../model/note';
|
||||
import { Range } from '../model/range';
|
||||
import {
|
||||
createMarkdownReferences,
|
||||
stringifyMarkdownLinkReferenceDefinition,
|
||||
} from '../markdown-provider';
|
||||
} from '../services/markdown-provider';
|
||||
import { getHeadingFromFileName } from '../utils';
|
||||
import { FoamWorkspace } from '../model/workspace';
|
||||
import { uriToSlug } from '../utils/slug';
|
||||
|
||||
export const LINK_REFERENCE_DEFINITION_HEADER = `[//begin]: # "Autogenerated link references for markdown compatibility"`;
|
||||
export const LINK_REFERENCE_DEFINITION_FOOTER = `[//end]: # "Autogenerated link references"`;
|
||||
|
||||
const slugger = new GithubSlugger();
|
||||
|
||||
export interface TextEdit {
|
||||
range: Range;
|
||||
newText: string;
|
||||
@@ -60,7 +56,7 @@ export const generateLinkReferences = (
|
||||
const first = note.definitions[0];
|
||||
const last = note.definitions[note.definitions.length - 1];
|
||||
|
||||
var nonGeneratedReferenceDefinitions = note.definitions;
|
||||
let nonGeneratedReferenceDefinitions = note.definitions;
|
||||
|
||||
// if we have more definitions then referenced pages AND the page refers to a page
|
||||
// we expect non-generated link definitions to be present
|
||||
@@ -119,7 +115,7 @@ export const generateLinkReferences = (
|
||||
return null;
|
||||
}
|
||||
|
||||
var fullReferences = `${newReferences}`;
|
||||
let fullReferences = `${newReferences}`;
|
||||
// If there are any non-generated definitions, add those to the output as well
|
||||
if (
|
||||
nonGeneratedReferenceDefinitions.length > 0 &&
|
||||
@@ -168,7 +164,7 @@ export const generateHeading = (note: Resource): TextEdit | null => {
|
||||
|
||||
return {
|
||||
newText: `${paddingStart}# ${getHeadingFromFileName(
|
||||
uriToSlug(note.uri)
|
||||
note.uri.getName()
|
||||
)}${paddingEnd}`,
|
||||
range: Range.createFromPosition(
|
||||
note.source.contentStart,
|
||||
@@ -176,14 +172,3 @@ export const generateHeading = (note: Resource): TextEdit | null => {
|
||||
),
|
||||
};
|
||||
};
|
||||
|
||||
/**
|
||||
*
|
||||
* @param fileName
|
||||
* @returns null if file name is already in kebab case otherise returns
|
||||
* the kebab cased file name
|
||||
*/
|
||||
export const getKebabCaseFileName = (fileName: string) => {
|
||||
const kebabCasedFileName = slugger.slug(fileName);
|
||||
return kebabCasedFileName === fileName ? null : kebabCasedFileName;
|
||||
};
|
||||
|
||||
@@ -1,492 +0,0 @@
|
||||
import {
|
||||
createMarkdownParser,
|
||||
createMarkdownReferences,
|
||||
ParserPlugin,
|
||||
} from './markdown-provider';
|
||||
import { DirectLink, WikiLink } from './model/note';
|
||||
import { Logger } from './utils/log';
|
||||
import { uriToSlug } from './utils/slug';
|
||||
import { URI } from './model/uri';
|
||||
import { FoamGraph } from './model/graph';
|
||||
import { Range } from './model/range';
|
||||
import { createTestWorkspace } from '../test/test-utils';
|
||||
|
||||
Logger.setLevel('error');
|
||||
|
||||
const pageA = `
|
||||
# Page A
|
||||
|
||||
## Section
|
||||
- [[page-b]]
|
||||
- [[page-c]]
|
||||
- [[Page D]]
|
||||
- [[page e]]
|
||||
`;
|
||||
|
||||
const pageB = `
|
||||
# Page B
|
||||
|
||||
This references [[page-a]]`;
|
||||
|
||||
const pageC = `
|
||||
# Page C
|
||||
`;
|
||||
|
||||
const pageD = `
|
||||
# Page D
|
||||
`;
|
||||
|
||||
const pageE = `
|
||||
# Page E
|
||||
`;
|
||||
|
||||
const createNoteFromMarkdown = (path: string, content: string) =>
|
||||
createMarkdownParser([]).parse(URI.file(path), content);
|
||||
|
||||
describe('Markdown loader', () => {
|
||||
it('Converts markdown to notes', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
workspace.set(createNoteFromMarkdown('/page-a.md', pageA));
|
||||
workspace.set(createNoteFromMarkdown('/page-b.md', pageB));
|
||||
workspace.set(createNoteFromMarkdown('/page-c.md', pageC));
|
||||
workspace.set(createNoteFromMarkdown('/page-d.md', pageD));
|
||||
workspace.set(createNoteFromMarkdown('/page-e.md', pageE));
|
||||
|
||||
expect(
|
||||
workspace
|
||||
.list()
|
||||
.map(n => n.uri)
|
||||
.map(uriToSlug)
|
||||
.sort()
|
||||
).toEqual(['page-a', 'page-b', 'page-c', 'page-d', 'page-e']);
|
||||
});
|
||||
|
||||
it('Ingores external links', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/path/to/page-a.md',
|
||||
`
|
||||
this is a [link to google](https://www.google.com)
|
||||
`
|
||||
);
|
||||
expect(note.links.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('Ignores references to sections in the same file', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/path/to/page-a.md',
|
||||
`
|
||||
this is a [link to intro](#introduction)
|
||||
`
|
||||
);
|
||||
expect(note.links.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('Parses internal links correctly', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/path/to/page-a.md',
|
||||
'this is a [link to page b](../doc/page-b.md)'
|
||||
);
|
||||
expect(note.links.length).toEqual(1);
|
||||
const link = note.links[0] as DirectLink;
|
||||
expect(link.type).toEqual('link');
|
||||
expect(link.label).toEqual('link to page b');
|
||||
expect(link.target).toEqual('../doc/page-b.md');
|
||||
});
|
||||
|
||||
it('Parses links that have formatting in label', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/path/to/page-a.md',
|
||||
'this is [**link** with __formatting__](../doc/page-b.md)'
|
||||
);
|
||||
expect(note.links.length).toEqual(1);
|
||||
const link = note.links[0] as DirectLink;
|
||||
expect(link.type).toEqual('link');
|
||||
expect(link.label).toEqual('link with formatting');
|
||||
expect(link.target).toEqual('../doc/page-b.md');
|
||||
});
|
||||
|
||||
it('Parses wikilinks correctly', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown('/page-a.md', pageA);
|
||||
const noteB = createNoteFromMarkdown('/page-b.md', pageB);
|
||||
const noteC = createNoteFromMarkdown('/page-c.md', pageC);
|
||||
const noteD = createNoteFromMarkdown('/Page D.md', pageD);
|
||||
const noteE = createNoteFromMarkdown('/page e.md', pageE);
|
||||
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(noteB)
|
||||
.set(noteC)
|
||||
.set(noteD)
|
||||
.set(noteE);
|
||||
const graph = FoamGraph.fromWorkspace(workspace);
|
||||
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
noteB.uri,
|
||||
noteC.uri,
|
||||
noteD.uri,
|
||||
noteE.uri,
|
||||
]);
|
||||
});
|
||||
|
||||
it('Parses backlinks with an alias', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/path/to/page-a.md',
|
||||
'this is [[link|link alias]]. A link with spaces [[other link | spaced]]'
|
||||
);
|
||||
expect(note.links.length).toEqual(2);
|
||||
let link = note.links[0] as WikiLink;
|
||||
expect(link.type).toEqual('wikilink');
|
||||
expect(link.rawText).toEqual('[[link|link alias]]');
|
||||
expect(link.label).toEqual('link alias');
|
||||
expect(link.target).toEqual('link');
|
||||
link = note.links[1] as WikiLink;
|
||||
expect(link.type).toEqual('wikilink');
|
||||
expect(link.rawText).toEqual('[[other link | spaced]]');
|
||||
expect(link.label).toEqual('spaced');
|
||||
expect(link.target).toEqual('other link');
|
||||
});
|
||||
|
||||
it('Skips wikilinks in codeblocks', () => {
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'/dir1/page-a.md',
|
||||
`
|
||||
this is some text with our [[first-wikilink]].
|
||||
|
||||
\`\`\`
|
||||
this is inside a [[codeblock]]
|
||||
\`\`\`
|
||||
|
||||
this is some text with our [[second-wikilink]].
|
||||
`
|
||||
);
|
||||
expect(noteA.links.map(l => l.label)).toEqual([
|
||||
'first-wikilink',
|
||||
'second-wikilink',
|
||||
]);
|
||||
});
|
||||
|
||||
it('Skips wikilinks in inlined codeblocks', () => {
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'/dir1/page-a.md',
|
||||
`
|
||||
this is some text with our [[first-wikilink]].
|
||||
|
||||
this is \`inside a [[codeblock]]\`
|
||||
|
||||
this is some text with our [[second-wikilink]].
|
||||
`
|
||||
);
|
||||
expect(noteA.links.map(l => l.label)).toEqual([
|
||||
'first-wikilink',
|
||||
'second-wikilink',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Note Title', () => {
|
||||
it('should initialize note title if heading exists', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/page-a.md',
|
||||
`
|
||||
# Page A
|
||||
this note has a title
|
||||
`
|
||||
);
|
||||
expect(note.title).toBe('Page A');
|
||||
});
|
||||
|
||||
it('should default to file name if heading does not exist', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/page-d.md',
|
||||
`
|
||||
This file has no heading.
|
||||
`
|
||||
);
|
||||
|
||||
expect(note.title).toEqual('page-d');
|
||||
});
|
||||
|
||||
it('should give precedence to frontmatter title over other headings', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/page-e.md',
|
||||
`
|
||||
---
|
||||
title: Note Title
|
||||
date: 20-12-12
|
||||
---
|
||||
|
||||
# Other Note Title
|
||||
`
|
||||
);
|
||||
|
||||
expect(note.title).toBe('Note Title');
|
||||
});
|
||||
|
||||
it('should support numbers', () => {
|
||||
const note1 = createNoteFromMarkdown('/157.md', `hello`);
|
||||
expect(note1.title).toBe('157');
|
||||
|
||||
const note2 = createNoteFromMarkdown('/157.md', `# 158`);
|
||||
expect(note2.title).toBe('158');
|
||||
|
||||
const note3 = createNoteFromMarkdown(
|
||||
'/157.md',
|
||||
`
|
||||
---
|
||||
title: 159
|
||||
---
|
||||
|
||||
# 158
|
||||
`
|
||||
);
|
||||
expect(note3.title).toBe('159');
|
||||
});
|
||||
|
||||
it('should not break on empty titles (see #276)', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/Hello Page.md',
|
||||
`
|
||||
#
|
||||
|
||||
this note has an empty title line
|
||||
`
|
||||
);
|
||||
expect(note.title).toEqual('Hello Page');
|
||||
});
|
||||
});
|
||||
|
||||
describe('frontmatter', () => {
|
||||
it('should parse yaml frontmatter', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/page-e.md',
|
||||
`
|
||||
---
|
||||
title: Note Title
|
||||
date: 20-12-12
|
||||
---
|
||||
|
||||
# Other Note Title`
|
||||
);
|
||||
|
||||
expect(note.properties.title).toBe('Note Title');
|
||||
expect(note.properties.date).toBe('20-12-12');
|
||||
});
|
||||
|
||||
it('should parse empty frontmatter', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/page-f.md',
|
||||
`
|
||||
---
|
||||
---
|
||||
|
||||
# Empty Frontmatter
|
||||
`
|
||||
);
|
||||
|
||||
expect(note.properties).toEqual({});
|
||||
});
|
||||
|
||||
it('should not fail when there are issues with parsing frontmatter', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'/page-f.md',
|
||||
`
|
||||
---
|
||||
title: - one
|
||||
- two
|
||||
- #
|
||||
---
|
||||
|
||||
`
|
||||
);
|
||||
|
||||
expect(note.properties).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('wikilinks definitions', () => {
|
||||
it('can generate links without file extension when includeExtension = false', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown('/dir1/page-a.md', pageA);
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(createNoteFromMarkdown('/dir1/page-b.md', pageB))
|
||||
.set(createNoteFromMarkdown('/dir1/page-c.md', pageC));
|
||||
|
||||
const noExtRefs = createMarkdownReferences(workspace, noteA.uri, false);
|
||||
expect(noExtRefs.map(r => r.url)).toEqual(['page-b', 'page-c']);
|
||||
});
|
||||
|
||||
it('can generate links with file extension when includeExtension = true', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown('/dir1/page-a.md', pageA);
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(createNoteFromMarkdown('/dir1/page-b.md', pageB))
|
||||
.set(createNoteFromMarkdown('/dir1/page-c.md', pageC));
|
||||
|
||||
const extRefs = createMarkdownReferences(workspace, noteA.uri, true);
|
||||
expect(extRefs.map(r => r.url)).toEqual(['page-b.md', 'page-c.md']);
|
||||
});
|
||||
|
||||
it('use relative paths', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown('/dir1/page-a.md', pageA);
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(createNoteFromMarkdown('/dir2/page-b.md', pageB))
|
||||
.set(createNoteFromMarkdown('/dir3/page-c.md', pageC));
|
||||
|
||||
const extRefs = createMarkdownReferences(workspace, noteA.uri, true);
|
||||
expect(extRefs.map(r => r.url)).toEqual([
|
||||
'../dir2/page-b.md',
|
||||
'../dir3/page-c.md',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('tags plugin', () => {
|
||||
it('can find tags in the text of the note', () => {
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'/dir1/page-a.md',
|
||||
`
|
||||
# this is a #heading
|
||||
#this is some #text that includes #tags we #care-about.
|
||||
`
|
||||
);
|
||||
expect(noteA.tags).toEqual([
|
||||
{ label: 'heading', range: Range.create(1, 12, 1, 20) },
|
||||
{ label: 'this', range: Range.create(2, 0, 2, 5) },
|
||||
{ label: 'text', range: Range.create(2, 14, 2, 19) },
|
||||
{ label: 'tags', range: Range.create(2, 34, 2, 39) },
|
||||
{ label: 'care-about', range: Range.create(2, 43, 2, 54) },
|
||||
]);
|
||||
});
|
||||
|
||||
it('will skip tags in codeblocks', () => {
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'/dir1/page-a.md',
|
||||
`
|
||||
this is some #text that includes #tags we #care-about.
|
||||
|
||||
\`\`\`
|
||||
this is a #codeblock
|
||||
\`\`\`
|
||||
`
|
||||
);
|
||||
expect(noteA.tags.map(t => t.label)).toEqual([
|
||||
'text',
|
||||
'tags',
|
||||
'care-about',
|
||||
]);
|
||||
});
|
||||
|
||||
it('will skip tags in inlined codeblocks', () => {
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'/dir1/page-a.md',
|
||||
`
|
||||
this is some #text that includes #tags we #care-about.
|
||||
this is a \`inlined #codeblock\`
|
||||
`
|
||||
);
|
||||
expect(noteA.tags.map(t => t.label)).toEqual([
|
||||
'text',
|
||||
'tags',
|
||||
'care-about',
|
||||
]);
|
||||
});
|
||||
it('can find tags as text in yaml', () => {
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'/dir1/page-a.md',
|
||||
`
|
||||
---
|
||||
tags: hello, world this_is_good
|
||||
---
|
||||
# this is a heading
|
||||
this is some #text that includes #tags we #care-about.
|
||||
`
|
||||
);
|
||||
expect(noteA.tags.map(t => t.label)).toEqual([
|
||||
'hello',
|
||||
'world',
|
||||
'this_is_good',
|
||||
'text',
|
||||
'tags',
|
||||
'care-about',
|
||||
]);
|
||||
});
|
||||
|
||||
it('can find tags as array in yaml', () => {
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'/dir1/page-a.md',
|
||||
`
|
||||
---
|
||||
tags: [hello, world, this_is_good]
|
||||
---
|
||||
# this is a heading
|
||||
this is some #text that includes #tags we #care-about.
|
||||
`
|
||||
);
|
||||
expect(noteA.tags.map(t => t.label)).toEqual([
|
||||
'hello',
|
||||
'world',
|
||||
'this_is_good',
|
||||
'text',
|
||||
'tags',
|
||||
'care-about',
|
||||
]);
|
||||
});
|
||||
|
||||
it('provides rough range for tags in yaml', () => {
|
||||
// For now it's enough to just get the YAML block range
|
||||
// in the future we might want to be more specific
|
||||
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'/dir1/page-a.md',
|
||||
`
|
||||
---
|
||||
tags: [hello, world, this_is_good]
|
||||
---
|
||||
# this is a heading
|
||||
this is some text
|
||||
`
|
||||
);
|
||||
expect(noteA.tags[0]).toEqual({
|
||||
label: 'hello',
|
||||
range: Range.create(1, 0, 3, 3),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('parser plugins', () => {
|
||||
const testPlugin: ParserPlugin = {
|
||||
visit: (node, note) => {
|
||||
if (node.type === 'heading') {
|
||||
note.properties.hasHeading = true;
|
||||
}
|
||||
},
|
||||
};
|
||||
const parser = createMarkdownParser([testPlugin]);
|
||||
|
||||
it('can augment the parsing of the file', () => {
|
||||
const note1 = parser.parse(
|
||||
URI.file('/path/to/a'),
|
||||
`
|
||||
This is a test note without headings.
|
||||
But with some content.
|
||||
`
|
||||
);
|
||||
expect(note1.properties.hasHeading).toBeUndefined();
|
||||
|
||||
const note2 = parser.parse(
|
||||
URI.file('/path/to/a'),
|
||||
`
|
||||
# This is a note with header
|
||||
and some content`
|
||||
);
|
||||
expect(note2.properties.hasHeading).toBeTruthy();
|
||||
});
|
||||
});
|
||||
@@ -1,530 +0,0 @@
|
||||
import { Node, Position as AstPosition } from 'unist';
|
||||
import unified from 'unified';
|
||||
import markdownParse from 'remark-parse';
|
||||
import wikiLinkPlugin from 'remark-wiki-link';
|
||||
import frontmatterPlugin from 'remark-frontmatter';
|
||||
import { parse as parseYAML } from 'yaml';
|
||||
import visit from 'unist-util-visit';
|
||||
import { Parent, Point } from 'unist';
|
||||
import detectNewline from 'detect-newline';
|
||||
import os from 'os';
|
||||
import {
|
||||
NoteLinkDefinition,
|
||||
Resource,
|
||||
ResourceLink,
|
||||
WikiLink,
|
||||
ResourceParser,
|
||||
} from './model/note';
|
||||
import { Position } from './model/position';
|
||||
import { Range } from './model/range';
|
||||
import {
|
||||
dropExtension,
|
||||
extractHashtags,
|
||||
extractTagsFromProp,
|
||||
isNone,
|
||||
isSome,
|
||||
} from './utils';
|
||||
import { Logger } from './utils/log';
|
||||
import { URI } from './model/uri';
|
||||
import { FoamWorkspace } from './model/workspace';
|
||||
import { IDataStore, FileDataStore, IMatcher } from './services/datastore';
|
||||
import { IDisposable } from './common/lifecycle';
|
||||
import { ResourceProvider } from './model/provider';
|
||||
|
||||
const ALIAS_DIVIDER_CHAR = '|';
|
||||
|
||||
export interface ParserPlugin {
|
||||
name?: string;
|
||||
visit?: (node: Node, note: Resource, noteSource: string) => void;
|
||||
onDidInitializeParser?: (parser: unified.Processor) => void;
|
||||
onWillParseMarkdown?: (markdown: string) => string;
|
||||
onWillVisitTree?: (tree: Node, note: Resource) => void;
|
||||
onDidVisitTree?: (tree: Node, note: Resource) => void;
|
||||
onDidFindProperties?: (properties: any, note: Resource, node: Node) => void;
|
||||
}
|
||||
|
||||
export class MarkdownResourceProvider implements ResourceProvider {
|
||||
private disposables: IDisposable[] = [];
|
||||
|
||||
constructor(
|
||||
private readonly matcher: IMatcher,
|
||||
private readonly watcherInit?: (triggers: {
|
||||
onDidChange: (uri: URI) => void;
|
||||
onDidCreate: (uri: URI) => void;
|
||||
onDidDelete: (uri: URI) => void;
|
||||
}) => IDisposable[],
|
||||
private readonly parser: ResourceParser = createMarkdownParser([]),
|
||||
private readonly dataStore: IDataStore = new FileDataStore()
|
||||
) {}
|
||||
|
||||
async init(workspace: FoamWorkspace) {
|
||||
const filesByFolder = await Promise.all(
|
||||
this.matcher.include.map(glob =>
|
||||
this.dataStore.list(glob, this.matcher.exclude)
|
||||
)
|
||||
);
|
||||
const files = this.matcher
|
||||
.match(filesByFolder.flat())
|
||||
.filter(this.supports);
|
||||
|
||||
await Promise.all(
|
||||
files.map(async uri => {
|
||||
Logger.info('Found: ' + URI.toString(uri));
|
||||
const content = await this.dataStore.read(uri);
|
||||
if (isSome(content)) {
|
||||
workspace.set(this.parser.parse(uri, content));
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
this.disposables =
|
||||
this.watcherInit?.({
|
||||
onDidChange: async uri => {
|
||||
if (this.matcher.isMatch(uri) && this.supports(uri)) {
|
||||
const content = await this.dataStore.read(uri);
|
||||
isSome(content) &&
|
||||
workspace.set(await this.parser.parse(uri, content));
|
||||
}
|
||||
},
|
||||
onDidCreate: async uri => {
|
||||
if (this.matcher.isMatch(uri) && this.supports(uri)) {
|
||||
const content = await this.dataStore.read(uri);
|
||||
isSome(content) &&
|
||||
workspace.set(await this.parser.parse(uri, content));
|
||||
}
|
||||
},
|
||||
onDidDelete: uri => {
|
||||
this.supports(uri) && workspace.delete(uri);
|
||||
},
|
||||
}) ?? [];
|
||||
}
|
||||
|
||||
supports(uri: URI) {
|
||||
return URI.isMarkdownFile(uri);
|
||||
}
|
||||
|
||||
read(uri: URI): Promise<string | null> {
|
||||
return this.dataStore.read(uri);
|
||||
}
|
||||
|
||||
readAsMarkdown(uri: URI): Promise<string | null> {
|
||||
return this.dataStore.read(uri);
|
||||
}
|
||||
|
||||
async fetch(uri: URI) {
|
||||
const content = await this.read(uri);
|
||||
return isSome(content) ? this.parser.parse(uri, content) : null;
|
||||
}
|
||||
|
||||
resolveLink(
|
||||
workspace: FoamWorkspace,
|
||||
resource: Resource,
|
||||
link: ResourceLink
|
||||
) {
|
||||
let targetUri: URI | undefined;
|
||||
switch (link.type) {
|
||||
case 'wikilink':
|
||||
const definitionUri = resource.definitions.find(
|
||||
def => def.label === link.target
|
||||
)?.url;
|
||||
if (isSome(definitionUri)) {
|
||||
const definedUri = URI.resolve(definitionUri, resource.uri);
|
||||
targetUri =
|
||||
workspace.find(definedUri, resource.uri)?.uri ??
|
||||
URI.placeholder(definedUri.path);
|
||||
} else {
|
||||
targetUri =
|
||||
workspace.find(link.target, resource.uri)?.uri ??
|
||||
URI.placeholder(link.target);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'link':
|
||||
targetUri =
|
||||
workspace.find(link.target, resource.uri)?.uri ??
|
||||
URI.placeholder(URI.resolve(link.target, resource.uri).path);
|
||||
break;
|
||||
}
|
||||
return targetUri;
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this.disposables.forEach(d => d.dispose());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverses all the children of the given node, extracts
|
||||
* the text from them, and returns it concatenated.
|
||||
*
|
||||
* @param root the node from which to start collecting text
|
||||
*/
|
||||
const getTextFromChildren = (root: Node): string => {
|
||||
let text = '';
|
||||
visit(root, 'text', node => {
|
||||
if (node.type === 'text') {
|
||||
text = text + (node as any).value;
|
||||
}
|
||||
});
|
||||
return text;
|
||||
};
|
||||
|
||||
const tagsPlugin: ParserPlugin = {
|
||||
name: 'tags',
|
||||
onDidFindProperties: (props, note, node) => {
|
||||
if (isSome(props.tags)) {
|
||||
const yamlTags = extractTagsFromProp(props.tags);
|
||||
yamlTags.forEach(t => {
|
||||
note.tags.push({
|
||||
label: t,
|
||||
range: astPositionToFoamRange(node.position!),
|
||||
});
|
||||
});
|
||||
}
|
||||
},
|
||||
visit: (node, note) => {
|
||||
if (node.type === 'text') {
|
||||
const tags = extractHashtags((node as any).value);
|
||||
tags.forEach(tag => {
|
||||
let start = astPointToFoamPosition(node.position!.start);
|
||||
start.character = start.character + tag.offset;
|
||||
const end: Position = {
|
||||
line: start.line,
|
||||
character: start.character + tag.label.length + 1,
|
||||
};
|
||||
note.tags.push({
|
||||
label: tag.label,
|
||||
range: Range.createFromPosition(start, end),
|
||||
});
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const titlePlugin: ParserPlugin = {
|
||||
name: 'title',
|
||||
visit: (node, note) => {
|
||||
if (
|
||||
note.title === '' &&
|
||||
node.type === 'heading' &&
|
||||
(node as any).depth === 1
|
||||
) {
|
||||
note.title =
|
||||
((node as Parent)!.children?.[0] as any)?.value || note.title;
|
||||
}
|
||||
},
|
||||
onDidFindProperties: (props, note) => {
|
||||
// Give precendence to the title from the frontmatter if it exists
|
||||
note.title = props.title?.toString() ?? note.title;
|
||||
},
|
||||
onDidVisitTree: (tree, note) => {
|
||||
if (note.title === '') {
|
||||
note.title = URI.getBasename(note.uri);
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const wikilinkPlugin: ParserPlugin = {
|
||||
name: 'wikilink',
|
||||
visit: (node, note, noteSource) => {
|
||||
if (node.type === 'wikiLink') {
|
||||
const text = (node as any).value;
|
||||
const alias = node.data?.alias as string;
|
||||
const literalContent = noteSource.substring(
|
||||
node.position!.start.offset!,
|
||||
node.position!.end.offset!
|
||||
);
|
||||
|
||||
const hasAlias =
|
||||
literalContent !== text && literalContent.includes(ALIAS_DIVIDER_CHAR);
|
||||
note.links.push({
|
||||
type: 'wikilink',
|
||||
rawText: literalContent,
|
||||
label: hasAlias
|
||||
? alias.trim()
|
||||
: literalContent.substring(2, literalContent.length - 2),
|
||||
target: hasAlias
|
||||
? literalContent
|
||||
.substring(2, literalContent.indexOf(ALIAS_DIVIDER_CHAR))
|
||||
.replace(/\\/g, '')
|
||||
.trim()
|
||||
: text.trim(),
|
||||
range: astPositionToFoamRange(node.position!),
|
||||
});
|
||||
}
|
||||
if (node.type === 'link') {
|
||||
const targetUri = (node as any).url;
|
||||
const uri = URI.resolve(targetUri, note.uri);
|
||||
if (uri.scheme !== 'file' || uri.path === note.uri.path) {
|
||||
return;
|
||||
}
|
||||
const label = getTextFromChildren(node);
|
||||
note.links.push({
|
||||
type: 'link',
|
||||
target: targetUri,
|
||||
label: label,
|
||||
range: astPositionToFoamRange(node.position!),
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const definitionsPlugin: ParserPlugin = {
|
||||
name: 'definitions',
|
||||
visit: (node, note) => {
|
||||
if (node.type === 'definition') {
|
||||
note.definitions.push({
|
||||
label: (node as any).label,
|
||||
url: (node as any).url,
|
||||
title: (node as any).title,
|
||||
range: astPositionToFoamRange(node.position!),
|
||||
});
|
||||
}
|
||||
},
|
||||
onDidVisitTree: (tree, note) => {
|
||||
note.definitions = getFoamDefinitions(note.definitions, note.source.end);
|
||||
},
|
||||
};
|
||||
|
||||
const handleError = (
|
||||
plugin: ParserPlugin,
|
||||
fnName: string,
|
||||
uri: URI | undefined,
|
||||
e: Error
|
||||
): void => {
|
||||
const name = plugin.name || '';
|
||||
Logger.warn(
|
||||
`Error while executing [${fnName}] in plugin [${name}]. ${
|
||||
uri ? 'for file [' + URI.toString(uri) : ']'
|
||||
}.`,
|
||||
e
|
||||
);
|
||||
};
|
||||
|
||||
export function createMarkdownParser(
|
||||
extraPlugins: ParserPlugin[]
|
||||
): ResourceParser {
|
||||
const parser = unified()
|
||||
.use(markdownParse, { gfm: true })
|
||||
.use(frontmatterPlugin, ['yaml'])
|
||||
.use(wikiLinkPlugin, { aliasDivider: ALIAS_DIVIDER_CHAR });
|
||||
|
||||
const plugins = [
|
||||
titlePlugin,
|
||||
wikilinkPlugin,
|
||||
definitionsPlugin,
|
||||
tagsPlugin,
|
||||
...extraPlugins,
|
||||
];
|
||||
|
||||
plugins.forEach(plugin => {
|
||||
try {
|
||||
plugin.onDidInitializeParser?.(parser);
|
||||
} catch (e) {
|
||||
handleError(plugin, 'onDidInitializeParser', undefined, e);
|
||||
}
|
||||
});
|
||||
|
||||
const foamParser: ResourceParser = {
|
||||
parse: (uri: URI, markdown: string): Resource => {
|
||||
Logger.debug('Parsing:', URI.toString(uri));
|
||||
markdown = plugins.reduce((acc, plugin) => {
|
||||
try {
|
||||
return plugin.onWillParseMarkdown?.(acc) || acc;
|
||||
} catch (e) {
|
||||
handleError(plugin, 'onWillParseMarkdown', uri, e);
|
||||
return acc;
|
||||
}
|
||||
}, markdown);
|
||||
const tree = parser.parse(markdown);
|
||||
const eol = detectNewline(markdown) || os.EOL;
|
||||
|
||||
var note: Resource = {
|
||||
uri: uri,
|
||||
type: 'note',
|
||||
properties: {},
|
||||
title: '',
|
||||
tags: [],
|
||||
links: [],
|
||||
definitions: [],
|
||||
source: {
|
||||
text: markdown,
|
||||
contentStart: astPointToFoamPosition(tree.position!.start),
|
||||
end: astPointToFoamPosition(tree.position!.end),
|
||||
eol: eol,
|
||||
},
|
||||
};
|
||||
|
||||
plugins.forEach(plugin => {
|
||||
try {
|
||||
plugin.onWillVisitTree?.(tree, note);
|
||||
} catch (e) {
|
||||
handleError(plugin, 'onWillVisitTree', uri, e);
|
||||
}
|
||||
});
|
||||
visit(tree, node => {
|
||||
if (node.type === 'yaml') {
|
||||
try {
|
||||
const yamlProperties = parseYAML((node as any).value) ?? {};
|
||||
note.properties = {
|
||||
...note.properties,
|
||||
...yamlProperties,
|
||||
};
|
||||
// Update the start position of the note by exluding the metadata
|
||||
note.source.contentStart = Position.create(
|
||||
node.position!.end.line! + 2,
|
||||
0
|
||||
);
|
||||
|
||||
for (let i = 0, len = plugins.length; i < len; i++) {
|
||||
try {
|
||||
plugins[i].onDidFindProperties?.(yamlProperties, note, node);
|
||||
} catch (e) {
|
||||
handleError(plugins[i], 'onDidFindProperties', uri, e);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
Logger.warn(
|
||||
`Error while parsing YAML for [${URI.toString(uri)}]`,
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
for (let i = 0, len = plugins.length; i < len; i++) {
|
||||
try {
|
||||
plugins[i].visit?.(node, note, markdown);
|
||||
} catch (e) {
|
||||
handleError(plugins[i], 'visit', uri, e);
|
||||
}
|
||||
}
|
||||
});
|
||||
plugins.forEach(plugin => {
|
||||
try {
|
||||
plugin.onDidVisitTree?.(tree, note);
|
||||
} catch (e) {
|
||||
handleError(plugin, 'onDidVisitTree', uri, e);
|
||||
}
|
||||
});
|
||||
Logger.debug('Result:', note);
|
||||
return note;
|
||||
},
|
||||
};
|
||||
return foamParser;
|
||||
}
|
||||
|
||||
function getFoamDefinitions(
|
||||
defs: NoteLinkDefinition[],
|
||||
fileEndPoint: Position
|
||||
): NoteLinkDefinition[] {
|
||||
let previousLine = fileEndPoint.line;
|
||||
let foamDefinitions = [];
|
||||
|
||||
// walk through each definition in reverse order
|
||||
// (last one first)
|
||||
for (const def of defs.reverse()) {
|
||||
// if this definition is more than 2 lines above the
|
||||
// previous one below it (or file end), that means we
|
||||
// have exited the trailing definition block, and should bail
|
||||
const start = def.range!.start.line;
|
||||
if (start < previousLine - 2) {
|
||||
break;
|
||||
}
|
||||
|
||||
foamDefinitions.unshift(def);
|
||||
previousLine = def.range!.end.line;
|
||||
}
|
||||
|
||||
return foamDefinitions;
|
||||
}
|
||||
|
||||
export function stringifyMarkdownLinkReferenceDefinition(
|
||||
definition: NoteLinkDefinition
|
||||
) {
|
||||
let url =
|
||||
definition.url.indexOf(' ') > 0 ? `<${definition.url}>` : definition.url;
|
||||
let text = `[${definition.label}]: ${url}`;
|
||||
if (definition.title) {
|
||||
text = `${text} "${definition.title}"`;
|
||||
}
|
||||
|
||||
return text;
|
||||
}
|
||||
export function createMarkdownReferences(
|
||||
workspace: FoamWorkspace,
|
||||
noteUri: URI,
|
||||
includeExtension: boolean
|
||||
): NoteLinkDefinition[] {
|
||||
const source = workspace.find(noteUri);
|
||||
// Should never occur since we're already in a file,
|
||||
if (source?.type !== 'note') {
|
||||
console.warn(
|
||||
`Note ${URI.toString(
|
||||
noteUri
|
||||
)} note found in workspace when attempting to generate markdown reference list`
|
||||
);
|
||||
return [];
|
||||
}
|
||||
|
||||
return source.links
|
||||
.filter(isWikilink)
|
||||
.map(link => {
|
||||
const targetUri = workspace.resolveLink(source, link);
|
||||
const target = workspace.find(targetUri);
|
||||
if (isNone(target)) {
|
||||
Logger.warn(
|
||||
`Link ${URI.toString(targetUri)} in ${URI.toString(
|
||||
noteUri
|
||||
)} is not valid.`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
if (target.type === 'placeholder') {
|
||||
// no need to create definitions for placeholders
|
||||
return null;
|
||||
}
|
||||
|
||||
const relativePath = URI.relativePath(noteUri, target.uri);
|
||||
const pathToNote = includeExtension
|
||||
? relativePath
|
||||
: dropExtension(relativePath);
|
||||
|
||||
// [wikilink-text]: path/to/file.md "Page title"
|
||||
return {
|
||||
label:
|
||||
link.rawText.indexOf('[[') > -1
|
||||
? link.rawText.substring(2, link.rawText.length - 2)
|
||||
: link.rawText || link.label,
|
||||
url: pathToNote,
|
||||
title: target.title,
|
||||
};
|
||||
})
|
||||
.filter(isSome)
|
||||
.sort();
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the 1-index Point object into the VS Code 0-index Position object
|
||||
* @param point ast Point (1-indexed)
|
||||
* @returns Foam Position (0-indexed)
|
||||
*/
|
||||
const astPointToFoamPosition = (point: Point): Position => {
|
||||
return Position.create(point.line - 1, point.column - 1);
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts the 1-index Position object into the VS Code 0-index Range object
|
||||
* @param position an ast Position object (1-indexed)
|
||||
* @returns Foam Range (0-indexed)
|
||||
*/
|
||||
const astPositionToFoamRange = (pos: AstPosition): Range =>
|
||||
Range.create(
|
||||
pos.start.line - 1,
|
||||
pos.start.column - 1,
|
||||
pos.end.line - 1,
|
||||
pos.end.column - 1
|
||||
);
|
||||
|
||||
const isWikilink = (link: ResourceLink): link is WikiLink => {
|
||||
return link.type === 'wikilink';
|
||||
};
|
||||
@@ -4,8 +4,9 @@ import { FoamWorkspace } from './workspace';
|
||||
import { FoamGraph } from './graph';
|
||||
import { ResourceParser } from './note';
|
||||
import { ResourceProvider } from './provider';
|
||||
import { createMarkdownParser } from '../markdown-provider';
|
||||
import { createMarkdownParser } from '../services/markdown-parser';
|
||||
import { FoamTags } from './tags';
|
||||
import { Logger } from '../utils/log';
|
||||
|
||||
export interface Services {
|
||||
dataStore: IDataStore;
|
||||
@@ -27,10 +28,19 @@ export const bootstrap = async (
|
||||
) => {
|
||||
const parser = createMarkdownParser([]);
|
||||
const workspace = new FoamWorkspace();
|
||||
const tsStart = Date.now();
|
||||
|
||||
await Promise.all(initialProviders.map(p => workspace.registerProvider(p)));
|
||||
const tsWsDone = Date.now();
|
||||
Logger.info(`Workspace loaded in ${tsWsDone - tsStart}ms`);
|
||||
|
||||
const graph = FoamGraph.fromWorkspace(workspace, true);
|
||||
const tsGraphDone = Date.now();
|
||||
Logger.info(`Graph loaded in ${tsGraphDone - tsWsDone}ms`);
|
||||
|
||||
const tags = FoamTags.fromWorkspace(workspace, true);
|
||||
const tsTagsEnd = Date.now();
|
||||
Logger.info(`Tags loaded in ${tsTagsEnd - tsGraphDone}ms`);
|
||||
|
||||
const foam: Foam = {
|
||||
workspace,
|
||||
|
||||
681
packages/foam-vscode/src/core/model/graph.test.ts
Normal file
681
packages/foam-vscode/src/core/model/graph.test.ts
Normal file
@@ -0,0 +1,681 @@
|
||||
import { createTestNote, createTestWorkspace } from '../../test/test-utils';
|
||||
import { FoamGraph } from './graph';
|
||||
import { URI } from './uri';
|
||||
|
||||
describe('Graph', () => {
|
||||
it('should use wikilink slugs to connect nodes', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createTestNote({
|
||||
uri: '/page-a.md',
|
||||
links: [
|
||||
{ slug: 'page-b' },
|
||||
{ slug: 'page-c' },
|
||||
{ slug: 'Page D' },
|
||||
{ slug: 'page e' },
|
||||
],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/page-b.md',
|
||||
links: [{ slug: 'page-a' }],
|
||||
});
|
||||
const noteC = createTestNote({ uri: '/page-c.md' });
|
||||
const noteD = createTestNote({ uri: '/Page D.md' });
|
||||
const noteE = createTestNote({ uri: '/page e.md' });
|
||||
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(noteB)
|
||||
.set(noteC)
|
||||
.set(noteD)
|
||||
.set(noteE);
|
||||
const graph = FoamGraph.fromWorkspace(workspace);
|
||||
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
noteB.uri,
|
||||
noteC.uri,
|
||||
noteD.uri,
|
||||
noteE.uri,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should include resources and placeholders', () => {
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(
|
||||
createTestNote({
|
||||
uri: '/page-a.md',
|
||||
links: [{ slug: 'placeholder-link' }],
|
||||
})
|
||||
);
|
||||
ws.set(createTestNote({ uri: '/file.pdf' }));
|
||||
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(
|
||||
graph
|
||||
.getAllNodes()
|
||||
.map(uri => uri.path)
|
||||
.sort()
|
||||
).toEqual(['/file.pdf', '/page-a.md', 'placeholder-link']);
|
||||
});
|
||||
|
||||
it('should support multiple connections between the same resources', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/note-a.md',
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/note-b.md',
|
||||
links: [{ to: noteA.uri.path }, { to: noteA.uri.path }],
|
||||
});
|
||||
const ws = createTestWorkspace()
|
||||
.set(noteA)
|
||||
.set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
expect(graph.getBacklinks(noteA.uri)).toEqual([
|
||||
{
|
||||
source: noteB.uri,
|
||||
target: noteA.uri,
|
||||
link: expect.objectContaining({ type: 'link' }),
|
||||
},
|
||||
{
|
||||
source: noteB.uri,
|
||||
target: noteA.uri,
|
||||
link: expect.objectContaining({ type: 'link' }),
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('should keep the connection when removing a single link amongst several between two resources', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/note-a.md',
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/note-b.md',
|
||||
links: [{ to: noteA.uri.path }, { to: noteA.uri.path }],
|
||||
});
|
||||
const ws = createTestWorkspace()
|
||||
.set(noteA)
|
||||
.set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
|
||||
expect(graph.getBacklinks(noteA.uri).length).toEqual(2);
|
||||
|
||||
const noteBBis = createTestNote({
|
||||
uri: '/note-b.md',
|
||||
links: [{ to: noteA.uri.path }],
|
||||
});
|
||||
ws.set(noteBBis);
|
||||
expect(graph.getBacklinks(noteA.uri).length).toEqual(1);
|
||||
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
|
||||
it('should create inbound connections for target note', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const ws = createTestWorkspace()
|
||||
.set(noteA)
|
||||
.set(
|
||||
createTestNote({
|
||||
uri: '/somewhere/page-b.md',
|
||||
links: [{ slug: 'page-a' }],
|
||||
})
|
||||
)
|
||||
.set(
|
||||
createTestNote({
|
||||
uri: '/path/another/page-c.md',
|
||||
links: [{ slug: '/path/to/page-a' }],
|
||||
})
|
||||
)
|
||||
.set(
|
||||
createTestNote({
|
||||
uri: '/absolute/path/page-d.md',
|
||||
links: [{ slug: '../to/page-a.md' }],
|
||||
})
|
||||
);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(
|
||||
graph
|
||||
.getBacklinks(noteA.uri)
|
||||
.map(link => link.source.path)
|
||||
.sort()
|
||||
).toEqual(['/path/another/page-c.md', '/somewhere/page-b.md']);
|
||||
});
|
||||
|
||||
it('should support attachments', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [
|
||||
// wikilink with extension
|
||||
{ slug: 'attachment-a.pdf' },
|
||||
// wikilink without extension
|
||||
{ slug: 'attachment-b' },
|
||||
],
|
||||
});
|
||||
const attachmentA = createTestNote({
|
||||
uri: '/path/to/more/attachment-a.pdf',
|
||||
});
|
||||
const attachmentB = createTestNote({
|
||||
uri: '/path/to/more/attachment-b.pdf',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(attachmentA)
|
||||
.set(attachmentB);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getBacklinks(attachmentA.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
// Attachments require extension
|
||||
expect(graph.getBacklinks(attachmentB.uri).map(l => l.source)).toEqual([]);
|
||||
});
|
||||
|
||||
it('should resolve conflicts alphabetically - part 1', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'attachment-a.pdf' }],
|
||||
});
|
||||
const attachmentA = createTestNote({
|
||||
uri: '/path/to/more/attachment-a.pdf',
|
||||
});
|
||||
const attachmentABis = createTestNote({
|
||||
uri: '/path/to/attachment-a.pdf',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(attachmentA)
|
||||
.set(attachmentABis);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
attachmentABis.uri,
|
||||
]);
|
||||
});
|
||||
|
||||
it('should resolve conflicts alphabetically - part 2', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'attachment-a.pdf' }],
|
||||
});
|
||||
const attachmentA = createTestNote({
|
||||
uri: '/path/to/more/attachment-a.pdf',
|
||||
});
|
||||
const attachmentABis = createTestNote({
|
||||
uri: '/path/to/attachment-a.pdf',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(attachmentABis)
|
||||
.set(attachmentA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
attachmentABis.uri,
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Placeholders', () => {
|
||||
it('should treat direct links to non-existing files as placeholders', () => {
|
||||
const ws = createTestWorkspace();
|
||||
const noteA = createTestNote({
|
||||
uri: '/somewhere/from/page-a.md',
|
||||
links: [{ to: '../page-b.md' }, { to: '/path/to/page-c.md' }],
|
||||
});
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getAllConnections()[0]).toEqual({
|
||||
source: noteA.uri,
|
||||
target: URI.placeholder('/somewhere/page-b.md'),
|
||||
link: expect.objectContaining({ type: 'link' }),
|
||||
});
|
||||
expect(graph.getAllConnections()[1]).toEqual({
|
||||
source: noteA.uri,
|
||||
target: URI.placeholder('/path/to/page-c.md'),
|
||||
link: expect.objectContaining({ type: 'link' }),
|
||||
});
|
||||
});
|
||||
|
||||
it('should treat wikilinks without matching file as placeholders', () => {
|
||||
const ws = createTestWorkspace();
|
||||
const noteA = createTestNote({
|
||||
uri: '/somewhere/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getAllConnections()[0]).toEqual({
|
||||
source: noteA.uri,
|
||||
target: URI.placeholder('page-b'),
|
||||
link: expect.objectContaining({ type: 'wikilink' }),
|
||||
});
|
||||
});
|
||||
|
||||
it('should treat wikilink with definition to non-existing file as placeholders', () => {
|
||||
const ws = createTestWorkspace();
|
||||
const noteA = createTestNote({
|
||||
uri: '/somewhere/page-a.md',
|
||||
links: [{ slug: 'page-b' }, { slug: 'page-c' }],
|
||||
});
|
||||
noteA.definitions.push({
|
||||
label: 'page-b',
|
||||
url: './page-b.md',
|
||||
});
|
||||
noteA.definitions.push({
|
||||
label: 'page-c',
|
||||
url: '/path/to/page-c.md',
|
||||
});
|
||||
ws.set(noteA).set(
|
||||
createTestNote({ uri: '/different/location/for/note-b.md' })
|
||||
);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getAllConnections()[0]).toEqual({
|
||||
source: noteA.uri,
|
||||
target: URI.placeholder('/somewhere/page-b.md'),
|
||||
link: expect.objectContaining({ type: 'wikilink' }),
|
||||
});
|
||||
expect(graph.getAllConnections()[1]).toEqual({
|
||||
source: noteA.uri,
|
||||
target: URI.placeholder('/path/to/page-c.md'),
|
||||
link: expect.objectContaining({ type: 'wikilink' }),
|
||||
});
|
||||
});
|
||||
|
||||
it('should work with a placeholder named like a JS prototype property', () => {
|
||||
const ws = createTestWorkspace();
|
||||
const noteA = createTestNote({
|
||||
uri: '/page-a.md',
|
||||
links: [{ slug: 'constructor' }],
|
||||
});
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(
|
||||
graph
|
||||
.getAllNodes()
|
||||
.map(uri => uri.path)
|
||||
.sort()
|
||||
).toEqual(['/page-a.md', 'constructor']);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Regenerating graph after workspace changes', () => {
|
||||
it('should update links when modifying a resource', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
links: [{ slug: 'page-c' }],
|
||||
});
|
||||
const noteC = createTestNote({
|
||||
uri: '/path/to/more/page-c.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(noteB)
|
||||
.set(noteC);
|
||||
let graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(graph.getBacklinks(noteC.uri).map(l => l.source)).toEqual([
|
||||
noteB.uri,
|
||||
]);
|
||||
|
||||
// update the note
|
||||
const noteABis = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-c' }],
|
||||
});
|
||||
ws.set(noteABis);
|
||||
// change is not propagated immediately
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(graph.getBacklinks(noteC.uri).map(l => l.source)).toEqual([
|
||||
noteB.uri,
|
||||
]);
|
||||
|
||||
// recompute the links
|
||||
graph = FoamGraph.fromWorkspace(ws);
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteC.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([]);
|
||||
expect(
|
||||
graph
|
||||
.getBacklinks(noteC.uri)
|
||||
.map(link => link.source.path)
|
||||
.sort()
|
||||
).toEqual(['/path/to/another/page-b.md', '/path/to/page-a.md']);
|
||||
graph.dispose();
|
||||
ws.dispose();
|
||||
});
|
||||
|
||||
it('should produce a placeholder for wikilinks pointing to a removed resource', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
|
||||
// remove note-b
|
||||
ws.delete(noteB.uri);
|
||||
const graph2 = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(() => ws.get(noteB.uri)).toThrow();
|
||||
expect(graph2.contains(URI.placeholder('page-b'))).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should turn a placeholder into a connection when adding a resource matching a wikilink', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
URI.placeholder('page-b'),
|
||||
]);
|
||||
expect(graph.contains(URI.placeholder('page-b'))).toBeTruthy();
|
||||
|
||||
// add note-b
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
|
||||
ws.set(noteB);
|
||||
FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(() => ws.get(URI.placeholder('page-b'))).toThrow();
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
});
|
||||
|
||||
it('should produce a placeholder for direct links pointing to a removed resource', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
|
||||
// remove note-b
|
||||
ws.delete(noteB.uri);
|
||||
const graph2 = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(() => ws.get(noteB.uri)).toThrow();
|
||||
expect(
|
||||
graph2.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should turn a placeholder into a connection when adding a resource matching a direct link', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
URI.placeholder('/path/to/another/page-b.md'),
|
||||
]);
|
||||
expect(() =>
|
||||
ws.get(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toThrow();
|
||||
|
||||
// add note-b
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
|
||||
ws.set(noteB);
|
||||
FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(() => ws.get(URI.placeholder('page-b'))).toThrow();
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
});
|
||||
|
||||
it('should remove the placeholder from graph when removing all links to it', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const ws = createTestWorkspace().set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
expect(
|
||||
graph.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeTruthy();
|
||||
|
||||
// update the note
|
||||
const noteABis = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [],
|
||||
});
|
||||
ws.set(noteABis);
|
||||
const graph2 = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(
|
||||
graph2.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Updating graph on workspace state', () => {
|
||||
it('should automatically update the links when modifying a resource', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
links: [{ slug: 'page-c' }],
|
||||
});
|
||||
const noteC = createTestNote({
|
||||
uri: '/path/to/more/page-c.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(noteB)
|
||||
.set(noteC);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(graph.getBacklinks(noteC.uri).map(l => l.source)).toEqual([
|
||||
noteB.uri,
|
||||
]);
|
||||
|
||||
// update the note
|
||||
const noteABis = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-c' }],
|
||||
});
|
||||
ws.set(noteABis);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteC.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([]);
|
||||
expect(
|
||||
graph
|
||||
.getBacklinks(noteC.uri)
|
||||
.map(link => link.source.path)
|
||||
.sort()
|
||||
).toEqual(['/path/to/another/page-b.md', '/path/to/page-a.md']);
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
|
||||
it('should produce a placeholder for wikilinks pointing to a removed resource', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
|
||||
// remove note-b
|
||||
ws.delete(noteB.uri);
|
||||
|
||||
expect(() => ws.get(noteB.uri)).toThrow();
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
|
||||
it('should turn a placeholder into a connection when adding a resource matching a wikilink', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
URI.placeholder('page-b'),
|
||||
]);
|
||||
|
||||
// add note-b
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
|
||||
ws.set(noteB);
|
||||
|
||||
expect(() => ws.get(URI.placeholder('page-b'))).toThrow();
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
|
||||
it('should produce a placeholder for direct links pointing to a removed resource', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
|
||||
// remove note-b
|
||||
ws.delete(noteB.uri);
|
||||
|
||||
expect(() => ws.get(noteB.uri)).toThrow();
|
||||
expect(
|
||||
graph.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeTruthy();
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
|
||||
it('should turn a placeholder into a connection when adding a resource matching a direct link', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
URI.placeholder('/path/to/another/page-b.md'),
|
||||
]);
|
||||
expect(
|
||||
graph.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeTruthy();
|
||||
|
||||
// add note-b
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
|
||||
ws.set(noteB);
|
||||
|
||||
expect(() => ws.get(URI.placeholder('page-b'))).toThrow();
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
|
||||
it('should remove the placeholder from graph when removing all links to it', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
expect(
|
||||
graph.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeTruthy();
|
||||
|
||||
// update the note
|
||||
const noteABis = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [],
|
||||
});
|
||||
ws.set(noteABis);
|
||||
expect(
|
||||
graph.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeFalsy();
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
});
|
||||
@@ -1,10 +1,10 @@
|
||||
import { diff } from 'fast-array-diff';
|
||||
import { isEqual } from 'lodash';
|
||||
import { Resource, ResourceLink } from './note';
|
||||
import { debounce } from 'lodash';
|
||||
import { ResourceLink } from './note';
|
||||
import { URI } from './uri';
|
||||
import { FoamWorkspace } from './workspace';
|
||||
import { Range } from './range';
|
||||
import { IDisposable } from '../common/lifecycle';
|
||||
import { Logger } from '../utils/log';
|
||||
import { Emitter } from '../common/event';
|
||||
|
||||
export type Connection = {
|
||||
source: URI;
|
||||
@@ -29,6 +29,9 @@ export class FoamGraph implements IDisposable {
|
||||
*/
|
||||
public readonly backlinks: Map<string, Connection[]> = new Map();
|
||||
|
||||
private onDidUpdateEmitter = new Emitter<void>();
|
||||
onDidUpdate = this.onDidUpdateEmitter.event;
|
||||
|
||||
/**
|
||||
* List of disposables to destroy with the workspace
|
||||
*/
|
||||
@@ -72,88 +75,56 @@ export class FoamGraph implements IDisposable {
|
||||
*
|
||||
* @param workspace the target workspace
|
||||
* @param keepMonitoring whether to recompute the links when the workspace changes
|
||||
* @param debounceFor how long to wait between change detection and graph update
|
||||
* @returns the FoamGraph
|
||||
*/
|
||||
public static fromWorkspace(
|
||||
workspace: FoamWorkspace,
|
||||
keepMonitoring: boolean = false
|
||||
keepMonitoring = false,
|
||||
debounceFor = 0
|
||||
): FoamGraph {
|
||||
let graph = new FoamGraph(workspace);
|
||||
|
||||
workspace.list().forEach(resource => graph.resolveResource(resource));
|
||||
const graph = new FoamGraph(workspace);
|
||||
graph.update();
|
||||
if (keepMonitoring) {
|
||||
const updateGraph =
|
||||
debounceFor > 0
|
||||
? debounce(graph.update.bind(graph), 500)
|
||||
: graph.update.bind(graph);
|
||||
graph.disposables.push(
|
||||
workspace.onDidAdd(resource => {
|
||||
graph.updateLinksRelatedToAddedResource(resource);
|
||||
}),
|
||||
workspace.onDidUpdate(change => {
|
||||
graph.updateLinksForResource(change.old, change.new);
|
||||
}),
|
||||
workspace.onDidDelete(resource => {
|
||||
graph.updateLinksRelatedToDeletedResource(resource);
|
||||
})
|
||||
workspace.onDidAdd(updateGraph),
|
||||
workspace.onDidUpdate(updateGraph),
|
||||
workspace.onDidDelete(updateGraph)
|
||||
);
|
||||
}
|
||||
return graph;
|
||||
}
|
||||
|
||||
private updateLinksRelatedToAddedResource(resource: Resource) {
|
||||
// check if any existing connection can be filled by new resource
|
||||
let resourcesToUpdate: Resource[] = [];
|
||||
for (const placeholderId of this.placeholders.keys()) {
|
||||
// quick and dirty check for affected resources
|
||||
if (resource.uri.path.endsWith(placeholderId + '.md')) {
|
||||
resourcesToUpdate.push(resource);
|
||||
private update() {
|
||||
const start = Date.now();
|
||||
this.backlinks.clear();
|
||||
this.links.clear();
|
||||
this.placeholders.clear();
|
||||
|
||||
for (const resource of this.workspace.resources()) {
|
||||
for (const link of resource.links) {
|
||||
try {
|
||||
const targetUri = this.workspace.resolveLink(resource, link);
|
||||
this.connect(resource.uri, targetUri, link);
|
||||
} catch (e) {
|
||||
Logger.error(
|
||||
`Error while resolving link ${
|
||||
link.rawText
|
||||
} in ${resource.uri.toFsPath()}, skipping.`,
|
||||
link,
|
||||
e
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
resourcesToUpdate.forEach(res =>
|
||||
this.resolveResource(this.workspace.get(res.uri))
|
||||
);
|
||||
// resolve the resource
|
||||
this.resolveResource(resource);
|
||||
}
|
||||
|
||||
private updateLinksForResource(oldResource: Resource, newResource: Resource) {
|
||||
if (oldResource.uri.path !== newResource.uri.path) {
|
||||
throw new Error(
|
||||
'Unexpected State: update should only be called on same resource ' +
|
||||
{
|
||||
old: oldResource,
|
||||
new: newResource,
|
||||
}
|
||||
);
|
||||
}
|
||||
if (oldResource.type === 'note' && newResource.type === 'note') {
|
||||
const patch = diff(oldResource.links, newResource.links, isEqual);
|
||||
patch.removed.forEach(link => {
|
||||
const target = this.workspace.resolveLink(oldResource, link);
|
||||
return this.disconnect(oldResource.uri, target, link);
|
||||
}, this);
|
||||
patch.added.forEach(link => {
|
||||
const target = this.workspace.resolveLink(newResource, link);
|
||||
return this.connect(newResource.uri, target, link);
|
||||
}, this);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
private updateLinksRelatedToDeletedResource(resource: Resource) {
|
||||
const uri = resource.uri;
|
||||
|
||||
// remove forward links from old resource
|
||||
const resourcesPointedByDeletedNote = this.links.get(uri.path) ?? [];
|
||||
this.links.delete(uri.path);
|
||||
resourcesPointedByDeletedNote.forEach(connection =>
|
||||
this.disconnect(uri, connection.target, connection.link)
|
||||
);
|
||||
|
||||
// recompute previous links to old resource
|
||||
const notesPointingToDeletedResource = this.backlinks.get(uri.path) ?? [];
|
||||
this.backlinks.delete(uri.path);
|
||||
notesPointingToDeletedResource.forEach(link =>
|
||||
this.resolveResource(this.workspace.get(link.source))
|
||||
);
|
||||
return this;
|
||||
const end = Date.now();
|
||||
Logger.info(`Graph updated in ${end - start}ms`);
|
||||
this.onDidUpdateEmitter.fire();
|
||||
}
|
||||
|
||||
private connect(source: URI, target: URI, link: ResourceLink) {
|
||||
@@ -164,77 +135,20 @@ export class FoamGraph implements IDisposable {
|
||||
}
|
||||
this.links.get(source.path)?.push(connection);
|
||||
|
||||
if (!this.backlinks.get(target.path)) {
|
||||
if (!this.backlinks.has(target.path)) {
|
||||
this.backlinks.set(target.path, []);
|
||||
}
|
||||
|
||||
this.backlinks.get(target.path)?.push(connection);
|
||||
|
||||
if (URI.isPlaceholder(target)) {
|
||||
if (target.isPlaceholder()) {
|
||||
this.placeholders.set(uriToPlaceholderId(target), target);
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes a connection, or all connections, between the source and
|
||||
* target resources
|
||||
*
|
||||
* @param workspace the Foam workspace
|
||||
* @param source the source resource
|
||||
* @param target the target resource
|
||||
* @param link the link reference, or `true` to remove all links
|
||||
* @returns the updated Foam workspace
|
||||
*/
|
||||
private disconnect(source: URI, target: URI, link: ResourceLink | true) {
|
||||
const connectionsToKeep =
|
||||
link === true
|
||||
? (c: Connection) =>
|
||||
!URI.isEqual(source, c.source) || !URI.isEqual(target, c.target)
|
||||
: (c: Connection) => !isSameConnection({ source, target, link }, c);
|
||||
|
||||
this.links.set(
|
||||
source.path,
|
||||
this.links.get(source.path)?.filter(connectionsToKeep) ?? []
|
||||
);
|
||||
if (this.links.get(source.path)?.length === 0) {
|
||||
this.links.delete(source.path);
|
||||
}
|
||||
this.backlinks.set(
|
||||
target.path,
|
||||
this.backlinks.get(target.path)?.filter(connectionsToKeep) ?? []
|
||||
);
|
||||
if (this.backlinks.get(target.path)?.length === 0) {
|
||||
this.backlinks.delete(target.path);
|
||||
if (URI.isPlaceholder(target)) {
|
||||
this.placeholders.delete(uriToPlaceholderId(target));
|
||||
}
|
||||
}
|
||||
return this;
|
||||
}
|
||||
|
||||
public resolveResource(resource: Resource) {
|
||||
this.links.delete(resource.uri.path);
|
||||
// prettier-ignore
|
||||
resource.links.forEach(link => {
|
||||
const targetUri = this.workspace.resolveLink(resource, link);
|
||||
this.connect(resource.uri, targetUri, link);
|
||||
});
|
||||
return this;
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
this.onDidUpdateEmitter.dispose();
|
||||
this.disposables.forEach(d => d.dispose());
|
||||
this.disposables = [];
|
||||
}
|
||||
}
|
||||
|
||||
// TODO move these utility fns to appropriate places
|
||||
|
||||
const isSameConnection = (a: Connection, b: Connection) =>
|
||||
URI.isEqual(a.source, b.source) &&
|
||||
URI.isEqual(a.target, b.target) &&
|
||||
isSameLink(a.link, b.link);
|
||||
|
||||
const isSameLink = (a: ResourceLink, b: ResourceLink) =>
|
||||
a.type === b.type && Range.isEqual(a.range, b.range);
|
||||
|
||||
@@ -9,23 +9,12 @@ export interface NoteSource {
|
||||
eol: string;
|
||||
}
|
||||
|
||||
export interface WikiLink {
|
||||
type: 'wikilink';
|
||||
target: string;
|
||||
label: string;
|
||||
export interface ResourceLink {
|
||||
type: 'wikilink' | 'link';
|
||||
rawText: string;
|
||||
range: Range;
|
||||
}
|
||||
|
||||
export interface DirectLink {
|
||||
type: 'link';
|
||||
label: string;
|
||||
target: string;
|
||||
range: Range;
|
||||
}
|
||||
|
||||
export type ResourceLink = WikiLink | DirectLink;
|
||||
|
||||
export interface NoteLinkDefinition {
|
||||
label: string;
|
||||
url: string;
|
||||
@@ -38,13 +27,24 @@ export interface Tag {
|
||||
range: Range;
|
||||
}
|
||||
|
||||
export interface Alias {
|
||||
title: string;
|
||||
range: Range;
|
||||
}
|
||||
|
||||
export interface Section {
|
||||
label: string;
|
||||
range: Range;
|
||||
}
|
||||
|
||||
export interface Resource {
|
||||
uri: URI;
|
||||
type: string;
|
||||
title: string;
|
||||
properties: any;
|
||||
// sections: NoteSection[]
|
||||
sections: Section[];
|
||||
tags: Tag[];
|
||||
aliases: Alias[];
|
||||
links: ResourceLink[];
|
||||
|
||||
// TODO to remove
|
||||
@@ -66,12 +66,20 @@ export abstract class Resource {
|
||||
return false;
|
||||
}
|
||||
return (
|
||||
URI.isUri((thing as Resource).uri) &&
|
||||
(thing as Resource).uri instanceof URI &&
|
||||
typeof (thing as Resource).title === 'string' &&
|
||||
typeof (thing as Resource).type === 'string' &&
|
||||
typeof (thing as Resource).properties === 'object' &&
|
||||
typeof (thing as Resource).tags === 'object' &&
|
||||
typeof (thing as Resource).aliases === 'object' &&
|
||||
typeof (thing as Resource).links === 'object'
|
||||
);
|
||||
}
|
||||
|
||||
public static findSection(resource: Resource, label: string): Section | null {
|
||||
if (label) {
|
||||
return resource.sections.find(s => s.label === label) ?? null;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -59,7 +59,8 @@ describe('FoamTags', () => {
|
||||
tags: ['primary'],
|
||||
});
|
||||
|
||||
tags.updateResourceWithinTagIndex(taglessPage, newPage);
|
||||
ws.set(newPage);
|
||||
tags.update();
|
||||
|
||||
expect(tags.tags).toEqual(new Map([['primary', [page.uri, newPage.uri]]]));
|
||||
});
|
||||
@@ -86,7 +87,8 @@ describe('FoamTags', () => {
|
||||
tags: ['new'],
|
||||
});
|
||||
|
||||
tags.updateResourceWithinTagIndex(page, pageEdited);
|
||||
ws.set(pageEdited);
|
||||
tags.update();
|
||||
|
||||
expect(tags.tags).toEqual(new Map([['new', [page.uri]]]));
|
||||
});
|
||||
@@ -112,12 +114,14 @@ describe('FoamTags', () => {
|
||||
tags: ['primary'],
|
||||
});
|
||||
|
||||
tags.updateResourceWithinTagIndex(page, pageEdited);
|
||||
ws.delete(page.uri);
|
||||
ws.set(pageEdited);
|
||||
tags.update();
|
||||
|
||||
expect(tags.tags).toEqual(new Map([['primary', [pageEdited.uri]]]));
|
||||
});
|
||||
|
||||
it('Updates the metadata of a tag when a note is delete', () => {
|
||||
it('Updates the metadata of a tag when a note is deleted', () => {
|
||||
const ws = createTestWorkspace();
|
||||
|
||||
const page = createTestNote({
|
||||
@@ -131,7 +135,8 @@ describe('FoamTags', () => {
|
||||
const tags = FoamTags.fromWorkspace(ws);
|
||||
expect(tags.tags).toEqual(new Map([['primary', [page.uri]]]));
|
||||
|
||||
tags.removeResourceFromTagIndex(page);
|
||||
ws.delete(page.uri);
|
||||
tags.update();
|
||||
|
||||
expect(tags.tags).toEqual(new Map());
|
||||
});
|
||||
|
||||
@@ -1,81 +1,66 @@
|
||||
import { FoamWorkspace } from './workspace';
|
||||
import { URI } from './uri';
|
||||
import { Resource } from './note';
|
||||
import { IDisposable } from '../common/lifecycle';
|
||||
import { debounce } from 'lodash';
|
||||
import { Emitter } from '../common/event';
|
||||
|
||||
export class FoamTags implements IDisposable {
|
||||
public readonly tags: Map<string, URI[]> = new Map();
|
||||
|
||||
private onDidUpdateEmitter = new Emitter<void>();
|
||||
onDidUpdate = this.onDidUpdateEmitter.event;
|
||||
|
||||
/**
|
||||
* List of disposables to destroy with the tags
|
||||
*/
|
||||
private disposables: IDisposable[] = [];
|
||||
|
||||
constructor(private readonly workspace: FoamWorkspace) {}
|
||||
|
||||
/**
|
||||
* Computes all tags in the workspace and keep them up-to-date
|
||||
*
|
||||
* @param workspace the target workspace
|
||||
* @param keepMonitoring whether to recompute the links when the workspace changes
|
||||
* @param debounceFor how long to wait between change detection and tags update
|
||||
* @returns the FoamTags
|
||||
*/
|
||||
public static fromWorkspace(
|
||||
workspace: FoamWorkspace,
|
||||
keepMonitoring: boolean = false
|
||||
keepMonitoring = false,
|
||||
debounceFor = 0
|
||||
): FoamTags {
|
||||
let tags = new FoamTags();
|
||||
|
||||
workspace
|
||||
.list()
|
||||
.forEach(resource => tags.addResourceFromTagIndex(resource));
|
||||
const tags = new FoamTags(workspace);
|
||||
tags.update();
|
||||
|
||||
if (keepMonitoring) {
|
||||
const updateTags =
|
||||
debounceFor > 0
|
||||
? debounce(tags.update.bind(tags), 500)
|
||||
: tags.update.bind(tags);
|
||||
tags.disposables.push(
|
||||
workspace.onDidAdd(resource => {
|
||||
tags.addResourceFromTagIndex(resource);
|
||||
}),
|
||||
workspace.onDidUpdate(change => {
|
||||
tags.updateResourceWithinTagIndex(change.old, change.new);
|
||||
}),
|
||||
workspace.onDidDelete(resource => {
|
||||
tags.removeResourceFromTagIndex(resource);
|
||||
})
|
||||
workspace.onDidAdd(updateTags),
|
||||
workspace.onDidUpdate(updateTags),
|
||||
workspace.onDidDelete(updateTags)
|
||||
);
|
||||
}
|
||||
return tags;
|
||||
}
|
||||
|
||||
update(): void {
|
||||
this.tags.clear();
|
||||
for (const resource of this.workspace.resources()) {
|
||||
for (const tag of new Set(resource.tags.map(t => t.label))) {
|
||||
const tagMeta = this.tags.get(tag) ?? [];
|
||||
tagMeta.push(resource.uri);
|
||||
this.tags.set(tag, tagMeta);
|
||||
}
|
||||
}
|
||||
this.onDidUpdateEmitter.fire();
|
||||
}
|
||||
|
||||
dispose(): void {
|
||||
this.disposables.forEach(d => d.dispose());
|
||||
this.disposables = [];
|
||||
}
|
||||
|
||||
updateResourceWithinTagIndex(oldResource: Resource, newResource: Resource) {
|
||||
this.removeResourceFromTagIndex(oldResource);
|
||||
this.addResourceFromTagIndex(newResource);
|
||||
}
|
||||
|
||||
addResourceFromTagIndex(resource: Resource) {
|
||||
new Set(resource.tags.map(t => t.label)).forEach(tag => {
|
||||
const tagMeta = this.tags.get(tag) ?? [];
|
||||
tagMeta.push(resource.uri);
|
||||
this.tags.set(tag, tagMeta);
|
||||
});
|
||||
}
|
||||
|
||||
removeResourceFromTagIndex(resource: Resource) {
|
||||
resource.tags.forEach(t => {
|
||||
const tag = t.label;
|
||||
if (this.tags.has(tag)) {
|
||||
const remainingLocations = this.tags
|
||||
.get(tag)
|
||||
?.filter(uri => !URI.isEqual(uri, resource.uri));
|
||||
|
||||
if (remainingLocations && remainingLocations.length > 0) {
|
||||
this.tags.set(tag, remainingLocations);
|
||||
} else {
|
||||
this.tags.delete(tag);
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
import { Logger } from '../utils/log';
|
||||
import { uriToSlug } from '../utils/slug';
|
||||
import { URI } from './uri';
|
||||
|
||||
Logger.setLevel('error');
|
||||
@@ -11,13 +10,13 @@ describe('Foam URI', () => {
|
||||
['https://www.google.com', URI.parse('https://www.google.com')],
|
||||
['/path/to/a/file.md', URI.file('/path/to/a/file.md')],
|
||||
['../relative/file.md', URI.file('/path/relative/file.md')],
|
||||
['#section', URI.create({ ...base, fragment: 'section' })],
|
||||
['#section', base.withFragment('section')],
|
||||
[
|
||||
'../relative/file.md#section',
|
||||
URI.parse('file:/path/relative/file.md#section'),
|
||||
],
|
||||
])('URI Parsing (%s)', (input, exp) => {
|
||||
const result = URI.resolve(input, base);
|
||||
const result = base.resolve(input);
|
||||
expect(result.scheme).toEqual(exp.scheme);
|
||||
expect(result.authority).toEqual(exp.authority);
|
||||
expect(result.path).toEqual(exp.path);
|
||||
@@ -30,8 +29,8 @@ describe('Foam URI', () => {
|
||||
const lowerCase = URI.parse('file:///c:/this/is/a/Path');
|
||||
expect(upperCase.path).toEqual('/C:/this/is/a/Path');
|
||||
expect(lowerCase.path).toEqual('/C:/this/is/a/Path');
|
||||
expect(URI.toFsPath(upperCase)).toEqual('C:\\this\\is\\a\\Path');
|
||||
expect(URI.toFsPath(lowerCase)).toEqual('C:\\this\\is\\a\\Path');
|
||||
expect(upperCase.toFsPath()).toEqual('C:\\this\\is\\a\\Path');
|
||||
expect(lowerCase.toFsPath()).toEqual('C:\\this\\is\\a\\Path');
|
||||
});
|
||||
|
||||
it('consistently parses file paths', () => {
|
||||
@@ -48,13 +47,13 @@ describe('Foam URI', () => {
|
||||
const winUri = URI.file('c:\\this\\is\\a\\path');
|
||||
const unixUri = URI.file('/this/is/a/path');
|
||||
expect(winUri).toEqual(
|
||||
URI.create({
|
||||
new URI({
|
||||
scheme: 'file',
|
||||
path: '/C:/this/is/a/path',
|
||||
})
|
||||
);
|
||||
expect(unixUri).toEqual(
|
||||
URI.create({
|
||||
new URI({
|
||||
scheme: 'file',
|
||||
path: '/this/is/a/path',
|
||||
})
|
||||
@@ -63,26 +62,22 @@ describe('Foam URI', () => {
|
||||
});
|
||||
|
||||
it('supports computing relative paths', () => {
|
||||
expect(
|
||||
URI.computeRelativeURI(URI.file('/my/file.md'), '../hello.md')
|
||||
).toEqual(URI.file('/hello.md'));
|
||||
expect(URI.computeRelativeURI(URI.file('/my/file.md'), '../hello')).toEqual(
|
||||
expect(URI.file('/my/file.md').resolve('../hello.md')).toEqual(
|
||||
URI.file('/hello.md')
|
||||
);
|
||||
expect(URI.file('/my/file.md').resolve('../hello')).toEqual(
|
||||
URI.file('/hello.md')
|
||||
);
|
||||
expect(URI.file('/my/file.markdown').resolve('../hello')).toEqual(
|
||||
URI.file('/hello.markdown')
|
||||
);
|
||||
expect(
|
||||
URI.computeRelativeURI(URI.file('/my/file.markdown'), '../hello')
|
||||
).toEqual(URI.file('/hello.markdown'));
|
||||
});
|
||||
|
||||
it('can be slugified', () => {
|
||||
expect(uriToSlug(URI.file('/this/is/a/path.md'))).toEqual('path');
|
||||
expect(uriToSlug(URI.file('../a/relative/path.md'))).toEqual('path');
|
||||
expect(uriToSlug(URI.file('another/relative/path.md'))).toEqual('path');
|
||||
expect(uriToSlug(URI.file('no-directory.markdown'))).toEqual(
|
||||
'no-directory'
|
||||
);
|
||||
expect(uriToSlug(URI.file('many.dots.name.markdown'))).toEqual(
|
||||
'manydotsname'
|
||||
);
|
||||
URI.file('/path/to/a/note.md').resolve('../another-note.md')
|
||||
).toEqual(URI.file('/path/to/another-note.md'));
|
||||
expect(
|
||||
URI.file('/path/to/a/note.md').relativeTo(
|
||||
URI.file('/path/to/another/note.md').getDirectory()
|
||||
)
|
||||
).toEqual(URI.file('../a/note.md'));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -4,8 +4,8 @@
|
||||
// Some code in this file comes from https://github.com/microsoft/vscode/main/src/vs/base/common/uri.ts
|
||||
// See LICENSE for details
|
||||
|
||||
import * as paths from 'path';
|
||||
import { CharCode } from '../common/charCode';
|
||||
import * as pathUtils from '../utils/path';
|
||||
|
||||
/**
|
||||
* Uniform Resource Identifier (URI) http://tools.ietf.org/html/rfc3986.
|
||||
@@ -23,248 +23,141 @@ import { CharCode } from '../common/charCode';
|
||||
* urn:example:animal:ferret:nose
|
||||
* ```
|
||||
*/
|
||||
export interface URI {
|
||||
scheme: string;
|
||||
authority: string;
|
||||
path: string;
|
||||
query: string;
|
||||
fragment: string;
|
||||
}
|
||||
|
||||
const { posix } = paths;
|
||||
const _empty = '';
|
||||
const _slash = '/';
|
||||
const _regexp = /^(([^:/?#]{2,}?):)?(\/\/([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?/;
|
||||
|
||||
export abstract class URI {
|
||||
static create(from: Partial<URI>): URI {
|
||||
// When using this method we assume the path is already posix
|
||||
// so we don't check whether it's a Windows path, nor we do any
|
||||
// conversion
|
||||
return {
|
||||
scheme: from.scheme ?? _empty,
|
||||
authority: from.authority ?? _empty,
|
||||
path: from.path ?? _empty,
|
||||
query: from.query ?? _empty,
|
||||
fragment: from.fragment ?? _empty,
|
||||
};
|
||||
export class URI {
|
||||
readonly scheme: string;
|
||||
readonly authority: string;
|
||||
readonly path: string;
|
||||
readonly query: string;
|
||||
readonly fragment: string;
|
||||
|
||||
constructor(from: Partial<URI> = {}) {
|
||||
this.scheme = from.scheme ?? _empty;
|
||||
this.authority = from.authority ?? _empty;
|
||||
this.path = from.path ?? _empty; // We assume the path is already posix
|
||||
this.query = from.query ?? _empty;
|
||||
this.fragment = from.fragment ?? _empty;
|
||||
}
|
||||
|
||||
static parse(value: string): URI {
|
||||
const match = _regexp.exec(value);
|
||||
if (!match) {
|
||||
return URI.create({});
|
||||
return new URI();
|
||||
}
|
||||
let path = percentDecode(match[5] ?? _empty);
|
||||
if (URI.isWindowsPath(path)) {
|
||||
path = windowsPathToUriPath(path);
|
||||
}
|
||||
return URI.create({
|
||||
return new URI({
|
||||
scheme: match[2] || 'file',
|
||||
authority: percentDecode(match[4] ?? _empty),
|
||||
path: path,
|
||||
path: pathUtils.fromFsPath(percentDecode(match[5] ?? _empty))[0],
|
||||
query: percentDecode(match[7] ?? _empty),
|
||||
fragment: percentDecode(match[9] ?? _empty),
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Parses a URI from value, taking into consideration possible relative paths.
|
||||
*
|
||||
* @param reference the URI to use as reference in case value is a relative path
|
||||
* @param value the value to parse for a URI
|
||||
* @returns the URI from the given value. In case of a relative path, the URI will take into account
|
||||
* the reference from which it is computed
|
||||
*/
|
||||
static resolve(value: string, reference: URI): URI {
|
||||
let uri = URI.parse(value);
|
||||
if (uri.scheme === 'file' && !value.startsWith('/')) {
|
||||
const [path, fragment] = value.split('#');
|
||||
uri =
|
||||
path.length > 0 ? URI.computeRelativeURI(reference, path) : reference;
|
||||
if (fragment) {
|
||||
uri = URI.create({
|
||||
...uri,
|
||||
fragment: fragment,
|
||||
});
|
||||
static file(value: string): URI {
|
||||
const [path, authority] = pathUtils.fromFsPath(value);
|
||||
return new URI({ scheme: 'file', authority, path });
|
||||
}
|
||||
|
||||
static placeholder(path: string): URI {
|
||||
return new URI({ scheme: 'placeholder', path: path });
|
||||
}
|
||||
|
||||
resolve(value: string | URI, isDirectory = false): URI {
|
||||
const uri = value instanceof URI ? value : URI.parse(value);
|
||||
if (!uri.isAbsolute()) {
|
||||
if (uri.scheme === 'file' || uri.scheme === 'placeholder') {
|
||||
let newUri = this.withFragment(uri.fragment);
|
||||
if (uri.path) {
|
||||
newUri = (isDirectory ? newUri : newUri.getDirectory())
|
||||
.joinPath(uri.path)
|
||||
.changeExtension('', this.getExtension());
|
||||
}
|
||||
return newUri;
|
||||
}
|
||||
}
|
||||
return uri;
|
||||
}
|
||||
|
||||
static computeRelativeURI(reference: URI, relativeSlug: string): URI {
|
||||
// if no extension is provided, use the same extension as the source file
|
||||
const slug =
|
||||
posix.extname(relativeSlug) !== ''
|
||||
? relativeSlug
|
||||
: `${relativeSlug}${posix.extname(reference.path)}`;
|
||||
return URI.create({
|
||||
...reference,
|
||||
path: posix.join(posix.dirname(reference.path), slug),
|
||||
});
|
||||
isAbsolute(): boolean {
|
||||
return pathUtils.isAbsolute(this.path);
|
||||
}
|
||||
|
||||
static file(path: string): URI {
|
||||
let authority = _empty;
|
||||
|
||||
// normalize to fwd-slashes on windows,
|
||||
// on other systems bwd-slashes are valid
|
||||
// filename character, eg /f\oo/ba\r.txt
|
||||
if (URI.isWindowsPath(path)) {
|
||||
path = windowsPathToUriPath(path);
|
||||
}
|
||||
|
||||
// check for authority as used in UNC shares
|
||||
// or use the path as given
|
||||
if (path[0] === _slash && path[1] === _slash) {
|
||||
const idx = path.indexOf(_slash, 2);
|
||||
if (idx === -1) {
|
||||
authority = path.substring(2);
|
||||
path = _slash;
|
||||
} else {
|
||||
authority = path.substring(2, idx);
|
||||
path = path.substring(idx) || _slash;
|
||||
}
|
||||
}
|
||||
|
||||
return URI.create({ scheme: 'file', authority, path });
|
||||
getDirectory(): URI {
|
||||
const path = pathUtils.getDirectory(this.path);
|
||||
return new URI({ ...this, path });
|
||||
}
|
||||
|
||||
static placeholder(key: string): URI {
|
||||
return URI.create({
|
||||
scheme: 'placeholder',
|
||||
path: key,
|
||||
});
|
||||
getBasename(): string {
|
||||
return pathUtils.getBasename(this.path);
|
||||
}
|
||||
|
||||
static relativePath(source: URI, target: URI): string {
|
||||
const relativePath = posix.relative(
|
||||
posix.dirname(source.path),
|
||||
target.path
|
||||
);
|
||||
return relativePath;
|
||||
getName(): string {
|
||||
return pathUtils.getName(this.path);
|
||||
}
|
||||
|
||||
static getBasename(uri: URI) {
|
||||
return posix.parse(uri.path).name;
|
||||
getExtension(): string {
|
||||
return pathUtils.getExtension(this.path);
|
||||
}
|
||||
|
||||
static getDir(uri: URI) {
|
||||
return URI.file(posix.dirname(uri.path));
|
||||
changeExtension(from: string, to: string): URI {
|
||||
const path = pathUtils.changeExtension(this.path, from, to);
|
||||
return new URI({ ...this, path });
|
||||
}
|
||||
|
||||
static getFileNameWithoutExtension(uri: URI) {
|
||||
return URI.getBasename(uri).replace(/\.[^.]+$/, '');
|
||||
joinPath(...paths: string[]) {
|
||||
const path = pathUtils.joinPath(this.path, ...paths);
|
||||
return new URI({ ...this, path });
|
||||
}
|
||||
|
||||
relativeTo(uri: URI) {
|
||||
const path = pathUtils.relativeTo(this.path, uri.path);
|
||||
return new URI({ ...this, path });
|
||||
}
|
||||
|
||||
withFragment(fragment: string): URI {
|
||||
return new URI({ ...this, fragment });
|
||||
}
|
||||
|
||||
/**
|
||||
* Uses a placeholder URI, and a reference directory, to generate
|
||||
* the URI of the corresponding resource
|
||||
*
|
||||
* @param placeholderUri the placeholder URI
|
||||
* @param basedir the dir to be used as reference
|
||||
* @returns the target resource URI
|
||||
* Returns a URI without the fragment and query information
|
||||
*/
|
||||
static createResourceUriFromPlaceholder(
|
||||
basedir: URI,
|
||||
placeholderUri: URI
|
||||
): URI {
|
||||
const tokens = placeholderUri.path.split('/');
|
||||
const path = tokens.slice(0, -1);
|
||||
const filename = tokens.slice(-1);
|
||||
return URI.joinPath(basedir, ...path, `${filename}.md`);
|
||||
asPlain(): URI {
|
||||
return new URI({ ...this, fragment: '', query: '' });
|
||||
}
|
||||
|
||||
/**
|
||||
* Join a URI path with path fragments and normalizes the resulting path.
|
||||
*
|
||||
* @param uri The input URI.
|
||||
* @param pathFragment The path fragment to add to the URI path.
|
||||
* @returns The resulting URI.
|
||||
*/
|
||||
static joinPath(uri: URI, ...pathFragment: string[]): URI {
|
||||
if (!uri.path) {
|
||||
throw new Error(`[UriError]: cannot call joinPath on URI without path`);
|
||||
}
|
||||
let newPath: string;
|
||||
if (URI.isWindowsPath(uri.path) && uri.scheme === 'file') {
|
||||
newPath = URI.file(paths.win32.join(URI.toFsPath(uri), ...pathFragment))
|
||||
.path;
|
||||
} else {
|
||||
newPath = paths.posix.join(uri.path, ...pathFragment);
|
||||
}
|
||||
return URI.create({ ...uri, path: newPath });
|
||||
isPlaceholder(): boolean {
|
||||
return this.scheme === 'placeholder';
|
||||
}
|
||||
|
||||
static toFsPath(uri: URI): string {
|
||||
let value: string;
|
||||
if (uri.authority && uri.path.length > 1 && uri.scheme === 'file') {
|
||||
// unc path: file://shares/c$/far/boo
|
||||
value = `//${uri.authority}${uri.path}`;
|
||||
} else if (
|
||||
uri.path.charCodeAt(0) === CharCode.Slash &&
|
||||
((uri.path.charCodeAt(1) >= CharCode.A &&
|
||||
uri.path.charCodeAt(1) <= CharCode.Z) ||
|
||||
(uri.path.charCodeAt(1) >= CharCode.a &&
|
||||
uri.path.charCodeAt(1) <= CharCode.z)) &&
|
||||
uri.path.charCodeAt(2) === CharCode.Colon
|
||||
) {
|
||||
// windows drive letter: file:///C:/far/boo
|
||||
value = uri.path[1].toUpperCase() + uri.path.substr(2);
|
||||
} else {
|
||||
// other path
|
||||
value = uri.path;
|
||||
}
|
||||
if (URI.isWindowsPath(value)) {
|
||||
value = value.replace(/\//g, '\\');
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
static toString(uri: URI): string {
|
||||
return encode(uri, false);
|
||||
}
|
||||
|
||||
// --- utility
|
||||
|
||||
static isWindowsPath(path: string) {
|
||||
return (
|
||||
(path.length >= 2 && path.charCodeAt(1) === CharCode.Colon) ||
|
||||
(path.length >= 3 &&
|
||||
path.charCodeAt(0) === CharCode.Slash &&
|
||||
path.charCodeAt(2) === CharCode.Colon)
|
||||
toFsPath() {
|
||||
return pathUtils.toFsPath(
|
||||
this.path,
|
||||
this.scheme === 'file' ? this.authority : ''
|
||||
);
|
||||
}
|
||||
|
||||
static isUri(thing: any): thing is URI {
|
||||
if (!thing) {
|
||||
return false;
|
||||
}
|
||||
return (
|
||||
typeof (thing as URI).authority === 'string' &&
|
||||
typeof (thing as URI).fragment === 'string' &&
|
||||
typeof (thing as URI).path === 'string' &&
|
||||
typeof (thing as URI).query === 'string' &&
|
||||
typeof (thing as URI).scheme === 'string'
|
||||
);
|
||||
toString(): string {
|
||||
return encode(this, false);
|
||||
}
|
||||
|
||||
static isPlaceholder(uri: URI): boolean {
|
||||
return uri.scheme === 'placeholder';
|
||||
isMarkdown(): boolean {
|
||||
const ext = this.getExtension();
|
||||
return ext === '.md' || ext === '.markdown';
|
||||
}
|
||||
|
||||
static isEqual(a: URI, b: URI): boolean {
|
||||
isEqual(uri: URI): boolean {
|
||||
return (
|
||||
a.authority === b.authority &&
|
||||
a.scheme === b.scheme &&
|
||||
a.path === b.path &&
|
||||
a.fragment === b.fragment &&
|
||||
a.query === b.query
|
||||
this.authority === uri.authority &&
|
||||
this.scheme === uri.scheme &&
|
||||
this.path === uri.path &&
|
||||
this.fragment === uri.fragment &&
|
||||
this.query === uri.query
|
||||
);
|
||||
}
|
||||
static isMarkdownFile(uri: URI): boolean {
|
||||
return uri.path.endsWith('.md');
|
||||
}
|
||||
}
|
||||
|
||||
// --- encode / decode
|
||||
@@ -292,33 +185,6 @@ function percentDecode(str: string): string {
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a windows-like path to standard URI path
|
||||
* - Normalize the Windows drive letter to upper case
|
||||
* - replace \ with /
|
||||
* - always start with /
|
||||
*
|
||||
* see https://github.com/foambubble/foam/issues/813
|
||||
* see https://github.com/microsoft/vscode/issues/43959
|
||||
* see https://github.com/microsoft/vscode/issues/116298
|
||||
*
|
||||
* @param path the path to convert
|
||||
* @returns the URI compatible path
|
||||
*/
|
||||
function windowsPathToUriPath(path: string): string {
|
||||
path = path.charCodeAt(0) === CharCode.Slash ? path : `/${path}`;
|
||||
path = path.replace(/\\/g, _slash);
|
||||
const code = path.charCodeAt(1);
|
||||
if (
|
||||
path.charCodeAt(2) === CharCode.Colon &&
|
||||
code >= CharCode.a &&
|
||||
code <= CharCode.z
|
||||
) {
|
||||
path = `/${String.fromCharCode(code - 32)}:${path.substr(3)}`; // "/C:".length === 3
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create the external version of a uri
|
||||
*/
|
||||
@@ -328,6 +194,7 @@ function encode(uri: URI, skipEncoding: boolean): string {
|
||||
: encodeURIComponentMinimal;
|
||||
|
||||
let res = '';
|
||||
// eslint-disable-next-line prefer-const
|
||||
let { scheme, authority, path, query, fragment } = uri;
|
||||
if (scheme) {
|
||||
res += scheme;
|
||||
|
||||
@@ -1,31 +1,12 @@
|
||||
import { FoamWorkspace, getReferenceType } from './workspace';
|
||||
import { FoamGraph } from './graph';
|
||||
import { FoamWorkspace } from './workspace';
|
||||
import { Logger } from '../utils/log';
|
||||
import { URI } from './uri';
|
||||
import { createTestNote, createTestWorkspace } from '../../test/test-utils';
|
||||
|
||||
Logger.setLevel('error');
|
||||
|
||||
describe('Reference types', () => {
|
||||
it('Detects absolute references', () => {
|
||||
expect(getReferenceType('/hello')).toEqual('absolute-path');
|
||||
expect(getReferenceType('/hello/there')).toEqual('absolute-path');
|
||||
});
|
||||
it('Detects relative references', () => {
|
||||
expect(getReferenceType('../hello')).toEqual('relative-path');
|
||||
expect(getReferenceType('./hello')).toEqual('relative-path');
|
||||
expect(getReferenceType('./hello/there')).toEqual('relative-path');
|
||||
});
|
||||
it('Detects key references', () => {
|
||||
expect(getReferenceType('hello')).toEqual('key');
|
||||
});
|
||||
it('Detects URIs', () => {
|
||||
expect(getReferenceType(URI.file('/path/to/file.md'))).toEqual('uri');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Workspace resources', () => {
|
||||
it('Adds notes to workspace', () => {
|
||||
it('should allow adding notes to the workspace', () => {
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(createTestNote({ uri: '/page-a.md' }));
|
||||
ws.set(createTestNote({ uri: '/page-b.md' }));
|
||||
@@ -39,7 +20,7 @@ describe('Workspace resources', () => {
|
||||
).toEqual(['/page-a.md', '/page-b.md', '/page-c.md']);
|
||||
});
|
||||
|
||||
it('Listing resources includes all notes', () => {
|
||||
it('should includes all notes when listing resources', () => {
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(createTestNote({ uri: '/page-a.md' }));
|
||||
ws.set(createTestNote({ uri: '/file.pdf' }));
|
||||
@@ -52,7 +33,7 @@ describe('Workspace resources', () => {
|
||||
).toEqual(['/file.pdf', '/page-a.md']);
|
||||
});
|
||||
|
||||
it('Fails if getting non-existing note', () => {
|
||||
it('should fail when trying to get a non-existing note', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
});
|
||||
@@ -65,91 +46,46 @@ describe('Workspace resources', () => {
|
||||
expect(() => ws.get(uri)).toThrow();
|
||||
});
|
||||
|
||||
it('Should work with a resource named like a JS prototype property', () => {
|
||||
it('should work with a resource named like a JS prototype property', () => {
|
||||
const ws = createTestWorkspace();
|
||||
const noteA = createTestNote({ uri: '/somewhere/constructor.md' });
|
||||
ws.set(noteA);
|
||||
expect(ws.list()).toEqual([noteA]);
|
||||
});
|
||||
|
||||
it('#851 - listing by ID should not return files with same suffix', () => {
|
||||
it('should not return files with same suffix when listing by ID - #851', () => {
|
||||
const ws = createTestWorkspace()
|
||||
.set(createTestNote({ uri: 'test-file.md' }))
|
||||
.set(createTestNote({ uri: 'file.md' }));
|
||||
expect(ws.listById('file').length).toEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Graph', () => {
|
||||
it('contains notes and placeholders', () => {
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(
|
||||
createTestNote({
|
||||
uri: '/page-a.md',
|
||||
links: [{ slug: 'placeholder-link' }],
|
||||
})
|
||||
);
|
||||
ws.set(createTestNote({ uri: '/file.pdf' }));
|
||||
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(
|
||||
graph
|
||||
.getAllNodes()
|
||||
.map(uri => uri.path)
|
||||
.sort()
|
||||
).toEqual(['/file.pdf', '/page-a.md', 'placeholder-link']);
|
||||
expect(ws.listByIdentifier('file').length).toEqual(1);
|
||||
});
|
||||
|
||||
it('Supports multiple connections between the same resources', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/note-a.md',
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/note-b.md',
|
||||
links: [{ to: noteA.uri.path }, { to: noteA.uri.path }],
|
||||
});
|
||||
it('should support dendron-style names', () => {
|
||||
const ws = createTestWorkspace()
|
||||
.set(noteA)
|
||||
.set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
expect(graph.getBacklinks(noteA.uri)).toEqual([
|
||||
{
|
||||
source: noteB.uri,
|
||||
target: noteA.uri,
|
||||
link: expect.objectContaining({ type: 'link' }),
|
||||
},
|
||||
{
|
||||
source: noteB.uri,
|
||||
target: noteA.uri,
|
||||
link: expect.objectContaining({ type: 'link' }),
|
||||
},
|
||||
]);
|
||||
.set(createTestNote({ uri: 'note.pdf' }))
|
||||
.set(createTestNote({ uri: 'note.md' }))
|
||||
.set(createTestNote({ uri: 'note.yo.md' }))
|
||||
.set(createTestNote({ uri: 'note2.md' }));
|
||||
for (const [reference, path] of [
|
||||
['note', '/note.md'],
|
||||
['note.md', '/note.md'],
|
||||
['note.yo', '/note.yo.md'],
|
||||
['note.yo.md', '/note.yo.md'],
|
||||
['note.pdf', '/note.pdf'],
|
||||
['note2', '/note2.md'],
|
||||
]) {
|
||||
expect(ws.listByIdentifier(reference)[0].uri.path).toEqual(path);
|
||||
expect(ws.find(reference).uri.path).toEqual(path);
|
||||
}
|
||||
});
|
||||
it('Supports removing a single link amongst several between two resources', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/note-a.md',
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/note-b.md',
|
||||
links: [{ to: noteA.uri.path }, { to: noteA.uri.path }],
|
||||
});
|
||||
|
||||
it('should keep the fragment information when finding a resource', () => {
|
||||
const ws = createTestWorkspace()
|
||||
.set(noteA)
|
||||
.set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
.set(createTestNote({ uri: 'test-file.md' }))
|
||||
.set(createTestNote({ uri: 'file.md' }));
|
||||
|
||||
expect(graph.getBacklinks(noteA.uri).length).toEqual(2);
|
||||
|
||||
const noteBBis = createTestNote({
|
||||
uri: '/note-b.md',
|
||||
links: [{ to: noteA.uri.path }],
|
||||
});
|
||||
ws.set(noteBBis);
|
||||
expect(graph.getBacklinks(noteA.uri).length).toEqual(1);
|
||||
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
const res = ws.find('test-file#my-section');
|
||||
expect(res.uri.fragment).toEqual('my-section');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -173,770 +109,81 @@ describe('Identifier computation', () => {
|
||||
expect(ws.getIdentifier(second.uri)).toEqual('way/for/page-a');
|
||||
expect(ws.getIdentifier(third.uri)).toEqual('path/for/page-a');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Wikilinks', () => {
|
||||
it('Can be defined with basename, relative path, absolute path, extension', () => {
|
||||
const noteA = createTestNote({
|
||||
it('should support sections in identifier computation', () => {
|
||||
const first = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [
|
||||
// wikilink
|
||||
{ slug: 'page-b' },
|
||||
// relative path wikilink
|
||||
{ slug: '../another/page-c.md' },
|
||||
// absolute path wikilink
|
||||
{ slug: '/absolute/path/page-d' },
|
||||
// wikilink with extension
|
||||
{ slug: 'page-e.md' },
|
||||
// wikilink to placeholder
|
||||
{ slug: 'placeholder-test' },
|
||||
],
|
||||
});
|
||||
const ws = createTestWorkspace()
|
||||
.set(noteA)
|
||||
.set(createTestNote({ uri: '/somewhere/page-b.md' }))
|
||||
.set(createTestNote({ uri: '/path/another/page-c.md' }))
|
||||
.set(createTestNote({ uri: '/absolute/path/page-d.md' }))
|
||||
.set(createTestNote({ uri: '/absolute/path/page-e.md' }));
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
const second = createTestNote({
|
||||
uri: '/another/way/for/page-a.md',
|
||||
});
|
||||
const third = createTestNote({
|
||||
uri: '/another/path/for/page-a.md',
|
||||
});
|
||||
const ws = new FoamWorkspace()
|
||||
.set(first)
|
||||
.set(second)
|
||||
.set(third);
|
||||
|
||||
expect(
|
||||
graph
|
||||
.getLinks(noteA.uri)
|
||||
.map(link => link.target.path)
|
||||
.sort()
|
||||
).toEqual([
|
||||
'/absolute/path/page-d.md',
|
||||
'/absolute/path/page-e.md',
|
||||
'/path/another/page-c.md',
|
||||
'/somewhere/page-b.md',
|
||||
'placeholder-test',
|
||||
]);
|
||||
});
|
||||
|
||||
it('Creates inbound connections for target note', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const ws = createTestWorkspace()
|
||||
.set(noteA)
|
||||
.set(
|
||||
createTestNote({
|
||||
uri: '/somewhere/page-b.md',
|
||||
links: [{ slug: 'page-a' }],
|
||||
})
|
||||
)
|
||||
.set(
|
||||
createTestNote({
|
||||
uri: '/path/another/page-c.md',
|
||||
links: [{ slug: '/path/to/page-a' }],
|
||||
})
|
||||
)
|
||||
.set(
|
||||
createTestNote({
|
||||
uri: '/absolute/path/page-d.md',
|
||||
links: [{ slug: '../to/page-a.md' }],
|
||||
})
|
||||
);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(
|
||||
graph
|
||||
.getBacklinks(noteA.uri)
|
||||
.map(link => link.source.path)
|
||||
.sort()
|
||||
).toEqual(['/path/another/page-c.md', '/somewhere/page-b.md']);
|
||||
});
|
||||
|
||||
it('Uses wikilink definitions when available to resolve target', () => {
|
||||
const ws = createTestWorkspace();
|
||||
const noteA = createTestNote({
|
||||
uri: '/somewhere/from/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
noteA.definitions.push({
|
||||
label: 'page-b',
|
||||
url: '../to/page-b.md',
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/somewhere/to/page-b.md',
|
||||
});
|
||||
ws.set(noteA).set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getAllConnections()[0]).toEqual({
|
||||
source: noteA.uri,
|
||||
target: noteB.uri,
|
||||
link: expect.objectContaining({ type: 'wikilink', label: 'page-b' }),
|
||||
});
|
||||
});
|
||||
|
||||
it('Resolves wikilink referencing more than one note', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const noteB1 = createTestNote({ uri: '/path/to/another/page-b.md' });
|
||||
const noteB2 = createTestNote({ uri: '/path/to/more/page-b.md' });
|
||||
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(noteB1)
|
||||
.set(noteB2);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri)).toEqual([
|
||||
{
|
||||
source: noteA.uri,
|
||||
target: noteB1.uri,
|
||||
link: expect.objectContaining({ type: 'wikilink' }),
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
it('Resolves path wikilink in case of name conflict', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: './more/page-b' }, { slug: 'yet/page-b' }],
|
||||
});
|
||||
const noteB1 = createTestNote({ uri: '/path/to/another/page-b.md' });
|
||||
const noteB2 = createTestNote({ uri: '/path/to/more/page-b.md' });
|
||||
const noteB3 = createTestNote({ uri: '/path/to/yet/page-b.md' });
|
||||
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(noteB1)
|
||||
.set(noteB2)
|
||||
.set(noteB3);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
noteB2.uri,
|
||||
noteB3.uri,
|
||||
]);
|
||||
});
|
||||
|
||||
it('Supports attachments', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [
|
||||
// wikilink with extension
|
||||
{ slug: 'attachment-a.pdf' },
|
||||
// wikilink without extension
|
||||
{ slug: 'attachment-b' },
|
||||
],
|
||||
});
|
||||
const attachmentA = createTestNote({
|
||||
uri: '/path/to/more/attachment-a.pdf',
|
||||
});
|
||||
const attachmentB = createTestNote({
|
||||
uri: '/path/to/more/attachment-b.pdf',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(attachmentA)
|
||||
.set(attachmentB);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getBacklinks(attachmentA.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
// Attachments require extension
|
||||
expect(graph.getBacklinks(attachmentB.uri).map(l => l.source)).toEqual([]);
|
||||
});
|
||||
|
||||
it('Resolves conflicts alphabetically - part 1', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'attachment-a.pdf' }],
|
||||
});
|
||||
const attachmentA = createTestNote({
|
||||
uri: '/path/to/more/attachment-a.pdf',
|
||||
});
|
||||
const attachmentABis = createTestNote({
|
||||
uri: '/path/to/attachment-a.pdf',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(attachmentA)
|
||||
.set(attachmentABis);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
attachmentABis.uri,
|
||||
]);
|
||||
});
|
||||
|
||||
it('Resolves conflicts alphabetically - part 2', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'attachment-a.pdf' }],
|
||||
});
|
||||
const attachmentA = createTestNote({
|
||||
uri: '/path/to/more/attachment-a.pdf',
|
||||
});
|
||||
const attachmentABis = createTestNote({
|
||||
uri: '/path/to/attachment-a.pdf',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(attachmentABis)
|
||||
.set(attachmentA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
attachmentABis.uri,
|
||||
]);
|
||||
});
|
||||
|
||||
it('Allows for dendron-style wikilinks, including a dot', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'dendron.style' }],
|
||||
});
|
||||
const noteB1 = createTestNote({ uri: '/path/to/another/dendron.style.md' });
|
||||
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB1);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB1.uri]);
|
||||
});
|
||||
|
||||
it('Handles capitalization of files and wikilinks correctly', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [
|
||||
// uppercased filename, lowercased slug
|
||||
{ slug: 'page-b' },
|
||||
// lowercased filename, camelcased wikilink
|
||||
{ slug: 'Page-C' },
|
||||
// lowercased filename, lowercased wikilink
|
||||
{ slug: 'page-d' },
|
||||
],
|
||||
});
|
||||
const ws = createTestWorkspace()
|
||||
.set(noteA)
|
||||
.set(createTestNote({ uri: '/somewhere/PAGE-B.md' }))
|
||||
.set(createTestNote({ uri: '/path/another/page-c.md' }))
|
||||
.set(createTestNote({ uri: '/path/another/page-d.md' }));
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(
|
||||
graph
|
||||
.getLinks(noteA.uri)
|
||||
.map(link => link.target.path)
|
||||
.sort()
|
||||
).toEqual([
|
||||
'/path/another/page-c.md',
|
||||
'/path/another/page-d.md',
|
||||
'/somewhere/PAGE-B.md',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('markdown direct links', () => {
|
||||
it('Support absolute and relative path', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: './another/page-b.md' }, { to: 'more/page-c.md' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
links: [{ to: '../../to/page-a.md' }],
|
||||
});
|
||||
const noteC = createTestNote({
|
||||
uri: '/path/to/more/page-c.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(noteB)
|
||||
.set(noteC);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(
|
||||
graph
|
||||
.getLinks(noteA.uri)
|
||||
.map(link => link.target.path)
|
||||
.sort()
|
||||
).toEqual(['/path/to/another/page-b.md', '/path/to/more/page-c.md']);
|
||||
|
||||
expect(graph.getLinks(noteB.uri).map(l => l.target)).toEqual([noteA.uri]);
|
||||
expect(graph.getBacklinks(noteA.uri).map(l => l.source)).toEqual([
|
||||
noteB.uri,
|
||||
]);
|
||||
expect(graph.getConnections(noteA.uri)).toEqual([
|
||||
{
|
||||
source: noteA.uri,
|
||||
target: noteB.uri,
|
||||
link: expect.objectContaining({ type: 'link' }),
|
||||
},
|
||||
{
|
||||
source: noteA.uri,
|
||||
target: noteC.uri,
|
||||
link: expect.objectContaining({ type: 'link' }),
|
||||
},
|
||||
{
|
||||
source: noteB.uri,
|
||||
target: noteA.uri,
|
||||
link: expect.objectContaining({ type: 'link' }),
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Placeholders', () => {
|
||||
it('Treats direct links to non-existing files as placeholders', () => {
|
||||
const ws = createTestWorkspace();
|
||||
const noteA = createTestNote({
|
||||
uri: '/somewhere/from/page-a.md',
|
||||
links: [{ to: '../page-b.md' }, { to: '/path/to/page-c.md' }],
|
||||
});
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getAllConnections()[0]).toEqual({
|
||||
source: noteA.uri,
|
||||
target: URI.placeholder('/somewhere/page-b.md'),
|
||||
link: expect.objectContaining({ type: 'link' }),
|
||||
});
|
||||
expect(graph.getAllConnections()[1]).toEqual({
|
||||
source: noteA.uri,
|
||||
target: URI.placeholder('/path/to/page-c.md'),
|
||||
link: expect.objectContaining({ type: 'link' }),
|
||||
});
|
||||
});
|
||||
|
||||
it('Treats wikilinks without matching file as placeholders', () => {
|
||||
const ws = createTestWorkspace();
|
||||
const noteA = createTestNote({
|
||||
uri: '/somewhere/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getAllConnections()[0]).toEqual({
|
||||
source: noteA.uri,
|
||||
target: URI.placeholder('page-b'),
|
||||
link: expect.objectContaining({ type: 'wikilink' }),
|
||||
});
|
||||
});
|
||||
it('Treats wikilink with definition to non-existing file as placeholders', () => {
|
||||
const ws = createTestWorkspace();
|
||||
const noteA = createTestNote({
|
||||
uri: '/somewhere/page-a.md',
|
||||
links: [{ slug: 'page-b' }, { slug: 'page-c' }],
|
||||
});
|
||||
noteA.definitions.push({
|
||||
label: 'page-b',
|
||||
url: './page-b.md',
|
||||
});
|
||||
noteA.definitions.push({
|
||||
label: 'page-c',
|
||||
url: '/path/to/page-c.md',
|
||||
});
|
||||
ws.set(noteA).set(
|
||||
createTestNote({ uri: '/different/location/for/note-b.md' })
|
||||
expect(ws.getIdentifier(first.uri.withFragment('section name'))).toEqual(
|
||||
'to/page-a#section name'
|
||||
);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getAllConnections()[0]).toEqual({
|
||||
source: noteA.uri,
|
||||
target: URI.placeholder('/somewhere/page-b.md'),
|
||||
link: expect.objectContaining({ type: 'wikilink' }),
|
||||
});
|
||||
expect(graph.getAllConnections()[1]).toEqual({
|
||||
source: noteA.uri,
|
||||
target: URI.placeholder('/path/to/page-c.md'),
|
||||
link: expect.objectContaining({ type: 'wikilink' }),
|
||||
});
|
||||
});
|
||||
|
||||
it('Should work with a placeholder named like a JS prototype property', () => {
|
||||
const ws = createTestWorkspace();
|
||||
const noteA = createTestNote({
|
||||
uri: '/page-a.md',
|
||||
links: [{ slug: 'constructor' }],
|
||||
});
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
const needle = '/project/car/todo';
|
||||
|
||||
expect(
|
||||
graph
|
||||
.getAllNodes()
|
||||
.map(uri => uri.path)
|
||||
.sort()
|
||||
).toEqual(['/page-a.md', 'constructor']);
|
||||
test.each([
|
||||
[['/project/home/todo', '/other/todo', '/something/else'], 'car/todo'],
|
||||
[['/family/car/todo', '/other/todo'], 'project/car/todo'],
|
||||
[[], 'todo'],
|
||||
])('should find shortest identifier', (haystack, id) => {
|
||||
expect(FoamWorkspace.getShortestIdentifier(needle, haystack)).toEqual(id);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Updating workspace happy path', () => {
|
||||
it('Update links when modifying note', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
links: [{ slug: 'page-c' }],
|
||||
});
|
||||
const noteC = createTestNote({
|
||||
uri: '/path/to/more/page-c.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
it('should ignore same string in haystack', () => {
|
||||
const haystack = [
|
||||
needle,
|
||||
'/project/home/todo',
|
||||
'/other/todo',
|
||||
'/something/else',
|
||||
];
|
||||
const identifier = FoamWorkspace.getShortestIdentifier(needle, haystack);
|
||||
expect(identifier).toEqual('car/todo');
|
||||
});
|
||||
|
||||
it('should return the best guess when no solution is possible', () => {
|
||||
/**
|
||||
* In this case there is no way to uniquely identify the element,
|
||||
* our fallback is to just return the "least wrong" result, basically
|
||||
* a full identifier
|
||||
* This is an edge case that should never happen in a real repo
|
||||
*/
|
||||
const haystack = [
|
||||
'/parent/' + needle,
|
||||
'/project/home/todo',
|
||||
'/other/todo',
|
||||
'/something/else',
|
||||
];
|
||||
const identifier = FoamWorkspace.getShortestIdentifier(needle, haystack);
|
||||
expect(identifier).toEqual('project/car/todo');
|
||||
});
|
||||
|
||||
it('should ignore elements from the exclude list', () => {
|
||||
const workspace = new FoamWorkspace();
|
||||
const noteA = createTestNote({ uri: '/path/to/note-a.md' });
|
||||
const noteB = createTestNote({ uri: '/path/to/note-b.md' });
|
||||
const noteC = createTestNote({ uri: '/path/to/note-c.md' });
|
||||
const noteD = createTestNote({ uri: '/path/to/note-d.md' });
|
||||
const noteABis = createTestNote({ uri: '/path/to/another/note-a.md' });
|
||||
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(noteB)
|
||||
.set(noteC);
|
||||
let graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(graph.getBacklinks(noteC.uri).map(l => l.source)).toEqual([
|
||||
noteB.uri,
|
||||
]);
|
||||
|
||||
// update the note
|
||||
const noteABis = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-c' }],
|
||||
});
|
||||
ws.set(noteABis);
|
||||
// change is not propagated immediately
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(graph.getBacklinks(noteC.uri).map(l => l.source)).toEqual([
|
||||
noteB.uri,
|
||||
]);
|
||||
|
||||
// recompute the links
|
||||
graph = FoamGraph.fromWorkspace(ws);
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteC.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([]);
|
||||
.set(noteC)
|
||||
.set(noteD);
|
||||
expect(workspace.getIdentifier(noteABis.uri)).toEqual('another/note-a');
|
||||
expect(
|
||||
graph
|
||||
.getBacklinks(noteC.uri)
|
||||
.map(link => link.source.path)
|
||||
.sort()
|
||||
).toEqual(['/path/to/another/page-b.md', '/path/to/page-a.md']);
|
||||
});
|
||||
|
||||
it('Removing target note should produce placeholder for wikilinks', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
|
||||
// remove note-b
|
||||
ws.delete(noteB.uri);
|
||||
const graph2 = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(() => ws.get(noteB.uri)).toThrow();
|
||||
expect(graph2.contains(URI.placeholder('page-b'))).toBeTruthy();
|
||||
});
|
||||
|
||||
it('Adding note should replace placeholder for wikilinks', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
URI.placeholder('page-b'),
|
||||
]);
|
||||
expect(graph.contains(URI.placeholder('page-b'))).toBeTruthy();
|
||||
|
||||
// add note-b
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
|
||||
ws.set(noteB);
|
||||
FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(() => ws.get(URI.placeholder('page-b'))).toThrow();
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
});
|
||||
|
||||
it('Removing target note should produce placeholder for direct links', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
|
||||
// remove note-b
|
||||
ws.delete(noteB.uri);
|
||||
const graph2 = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(() => ws.get(noteB.uri)).toThrow();
|
||||
expect(
|
||||
graph2.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeTruthy();
|
||||
});
|
||||
|
||||
it('Adding note should replace placeholder for direct links', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
URI.placeholder('/path/to/another/page-b.md'),
|
||||
]);
|
||||
expect(() =>
|
||||
ws.get(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toThrow();
|
||||
|
||||
// add note-b
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
|
||||
ws.set(noteB);
|
||||
FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(() => ws.get(URI.placeholder('page-b'))).toThrow();
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
});
|
||||
|
||||
it('removing link to placeholder should remove placeholder', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const ws = createTestWorkspace().set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
expect(
|
||||
graph.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeTruthy();
|
||||
|
||||
// update the note
|
||||
const noteABis = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [],
|
||||
});
|
||||
ws.set(noteABis);
|
||||
const graph2 = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
expect(
|
||||
graph2.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeFalsy();
|
||||
});
|
||||
});
|
||||
|
||||
describe('Monitoring of workspace state', () => {
|
||||
it('Update links when modifying note', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
links: [{ slug: 'page-c' }],
|
||||
});
|
||||
const noteC = createTestNote({
|
||||
uri: '/path/to/more/page-c.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(noteB)
|
||||
.set(noteC);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(graph.getBacklinks(noteC.uri).map(l => l.source)).toEqual([
|
||||
noteB.uri,
|
||||
]);
|
||||
|
||||
// update the note
|
||||
const noteABis = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-c' }],
|
||||
});
|
||||
ws.set(noteABis);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteC.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([]);
|
||||
expect(
|
||||
graph
|
||||
.getBacklinks(noteC.uri)
|
||||
.map(link => link.source.path)
|
||||
.sort()
|
||||
).toEqual(['/path/to/another/page-b.md', '/path/to/page-a.md']);
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
|
||||
it('Removing target note should produce placeholder for wikilinks', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
|
||||
// remove note-b
|
||||
ws.delete(noteB.uri);
|
||||
|
||||
expect(() => ws.get(noteB.uri)).toThrow();
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
|
||||
it('Adding note should replace placeholder for wikilinks', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
URI.placeholder('page-b'),
|
||||
]);
|
||||
|
||||
// add note-b
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
|
||||
ws.set(noteB);
|
||||
|
||||
expect(() => ws.get(URI.placeholder('page-b'))).toThrow();
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
|
||||
it('Removing target note should produce placeholder for direct links', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([noteB.uri]);
|
||||
expect(graph.getBacklinks(noteB.uri).map(l => l.source)).toEqual([
|
||||
noteA.uri,
|
||||
]);
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
|
||||
// remove note-b
|
||||
ws.delete(noteB.uri);
|
||||
|
||||
expect(() => ws.get(noteB.uri)).toThrow();
|
||||
expect(
|
||||
graph.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeTruthy();
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
|
||||
it('Adding note should replace placeholder for direct links', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
|
||||
expect(graph.getLinks(noteA.uri).map(l => l.target)).toEqual([
|
||||
URI.placeholder('/path/to/another/page-b.md'),
|
||||
]);
|
||||
expect(
|
||||
graph.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeTruthy();
|
||||
|
||||
// add note-b
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
});
|
||||
|
||||
ws.set(noteB);
|
||||
|
||||
expect(() => ws.get(URI.placeholder('page-b'))).toThrow();
|
||||
expect(ws.get(noteB.uri).type).toEqual('note');
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
});
|
||||
|
||||
it('removing link to placeholder should remove placeholder', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA);
|
||||
const graph = FoamGraph.fromWorkspace(ws, true);
|
||||
expect(
|
||||
graph.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeTruthy();
|
||||
|
||||
// update the note
|
||||
const noteABis = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [],
|
||||
});
|
||||
ws.set(noteABis);
|
||||
expect(
|
||||
graph.contains(URI.placeholder('/path/to/another/page-b.md'))
|
||||
).toBeFalsy();
|
||||
ws.dispose();
|
||||
graph.dispose();
|
||||
workspace.getIdentifier(noteABis.uri, [noteB.uri, noteA.uri])
|
||||
).toEqual('note-a');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,30 +1,11 @@
|
||||
import { Resource, ResourceLink } from './note';
|
||||
import { URI } from './uri';
|
||||
import { isSome, isNone, getShortestIdentifier } from '../utils';
|
||||
import { isAbsolute, getExtension, changeExtension } from '../utils/path';
|
||||
import { isSome } from '../utils';
|
||||
import { Emitter } from '../common/event';
|
||||
import { ResourceProvider } from './provider';
|
||||
import { IDisposable } from '../common/lifecycle';
|
||||
|
||||
export function getReferenceType(
|
||||
reference: URI | string
|
||||
): 'uri' | 'absolute-path' | 'relative-path' | 'key' {
|
||||
if (URI.isUri(reference)) {
|
||||
return 'uri';
|
||||
}
|
||||
if (reference.startsWith('/')) {
|
||||
return 'absolute-path';
|
||||
}
|
||||
if (reference.startsWith('./') || reference.startsWith('../')) {
|
||||
return 'relative-path';
|
||||
}
|
||||
return 'key';
|
||||
}
|
||||
|
||||
function hasExtension(path: string): boolean {
|
||||
const dotIdx = path.lastIndexOf('.');
|
||||
return dotIdx > 0 && path.length - dotIdx <= 4;
|
||||
}
|
||||
|
||||
export class FoamWorkspace implements IDisposable {
|
||||
private onDidAddEmitter = new Emitter<Resource>();
|
||||
private onDidUpdateEmitter = new Emitter<{ old: Resource; new: Resource }>();
|
||||
@@ -38,7 +19,7 @@ export class FoamWorkspace implements IDisposable {
|
||||
/**
|
||||
* Resources by path
|
||||
*/
|
||||
private resources: Map<string, Resource> = new Map();
|
||||
private _resources: Map<string, Resource> = new Map();
|
||||
|
||||
registerProvider(provider: ResourceProvider) {
|
||||
this.providers.push(provider);
|
||||
@@ -47,7 +28,7 @@ export class FoamWorkspace implements IDisposable {
|
||||
|
||||
set(resource: Resource) {
|
||||
const old = this.find(resource.uri);
|
||||
this.resources.set(normalize(resource.uri.path), resource);
|
||||
this._resources.set(normalize(resource.uri.path), resource);
|
||||
isSome(old)
|
||||
? this.onDidUpdateEmitter.fire({ old: old, new: resource })
|
||||
: this.onDidAddEmitter.fire(resource);
|
||||
@@ -55,21 +36,23 @@ export class FoamWorkspace implements IDisposable {
|
||||
}
|
||||
|
||||
delete(uri: URI) {
|
||||
const deleted = this.resources.get(normalize(uri.path));
|
||||
this.resources.delete(normalize(uri.path));
|
||||
const deleted = this._resources.get(normalize(uri.path));
|
||||
this._resources.delete(normalize(uri.path));
|
||||
|
||||
isSome(deleted) && this.onDidDeleteEmitter.fire(deleted);
|
||||
return deleted ?? null;
|
||||
}
|
||||
|
||||
public exists(uri: URI): boolean {
|
||||
return (
|
||||
!URI.isPlaceholder(uri) && isSome(this.resources.get(normalize(uri.path)))
|
||||
);
|
||||
return isSome(this.find(uri));
|
||||
}
|
||||
|
||||
public list(): Resource[] {
|
||||
return Array.from(this.resources.values());
|
||||
return Array.from(this._resources.values());
|
||||
}
|
||||
|
||||
public resources(): IterableIterator<Resource> {
|
||||
return this._resources.values();
|
||||
}
|
||||
|
||||
public get(uri: URI): Resource {
|
||||
@@ -81,19 +64,17 @@ export class FoamWorkspace implements IDisposable {
|
||||
}
|
||||
}
|
||||
|
||||
public listById(resourceId: string): Resource[] {
|
||||
let needle = '/' + resourceId;
|
||||
if (!hasExtension(needle)) {
|
||||
needle = needle + '.md';
|
||||
}
|
||||
needle = normalize(needle);
|
||||
let resources = [];
|
||||
for (const key of this.resources.keys()) {
|
||||
if (key.endsWith(needle)) {
|
||||
resources.push(this.resources.get(normalize(key)));
|
||||
public listByIdentifier(identifier: string): Resource[] {
|
||||
const needle = normalize('/' + identifier);
|
||||
const mdNeedle =
|
||||
getExtension(needle) !== '.md' ? needle + '.md' : undefined;
|
||||
const resources = [];
|
||||
for (const key of this._resources.keys()) {
|
||||
if ((mdNeedle && key.endsWith(mdNeedle)) || key.endsWith(needle)) {
|
||||
resources.push(this._resources.get(normalize(key)));
|
||||
}
|
||||
}
|
||||
return resources;
|
||||
return resources.sort((a, b) => a.uri.path.localeCompare(b.uri.path));
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -101,81 +82,90 @@ export class FoamWorkspace implements IDisposable {
|
||||
*
|
||||
* @param forResource the resource to compute the identifier for
|
||||
*/
|
||||
public getIdentifier(forResource: URI): string {
|
||||
public getIdentifier(forResource: URI, exclude?: URI[]): string {
|
||||
const amongst = [];
|
||||
const base = forResource.path.split('/').pop();
|
||||
for (const res of this.resources.values()) {
|
||||
// Just a quick optimization to only add the elements that might match
|
||||
if (res.uri.path.endsWith(base)) {
|
||||
if (!URI.isEqual(res.uri, forResource)) {
|
||||
amongst.push(res.uri);
|
||||
}
|
||||
const basename = forResource.getBasename();
|
||||
for (const res of this._resources.values()) {
|
||||
// skip elements that cannot possibly match
|
||||
if (!res.uri.path.endsWith(basename)) {
|
||||
continue;
|
||||
}
|
||||
// skip self
|
||||
if (res.uri.isEqual(forResource)) {
|
||||
continue;
|
||||
}
|
||||
// skip exclude list
|
||||
if (exclude && exclude.find(ex => ex.isEqual(res.uri))) {
|
||||
continue;
|
||||
}
|
||||
amongst.push(res.uri);
|
||||
}
|
||||
const identifier = getShortestIdentifier(
|
||||
|
||||
let identifier = FoamWorkspace.getShortestIdentifier(
|
||||
forResource.path,
|
||||
amongst.map(uri => uri.path)
|
||||
);
|
||||
|
||||
return identifier.endsWith('.md') ? identifier.slice(0, -3) : identifier;
|
||||
identifier = changeExtension(identifier, '.md', '');
|
||||
if (forResource.fragment) {
|
||||
identifier += `#${forResource.fragment}`;
|
||||
}
|
||||
return identifier;
|
||||
}
|
||||
|
||||
public find(resourceId: URI | string, reference?: URI): Resource | null {
|
||||
const refType = getReferenceType(resourceId);
|
||||
switch (refType) {
|
||||
case 'uri':
|
||||
const uri = resourceId as URI;
|
||||
return this.exists(uri)
|
||||
? this.resources.get(normalize(uri.path)) ?? null
|
||||
: null;
|
||||
|
||||
case 'key':
|
||||
const resources = this.listById(resourceId as string);
|
||||
const sorted = resources.sort((a, b) =>
|
||||
a.uri.path.localeCompare(b.uri.path)
|
||||
);
|
||||
return sorted[0] ?? null;
|
||||
|
||||
case 'absolute-path':
|
||||
if (!hasExtension(resourceId as string)) {
|
||||
resourceId = resourceId + '.md';
|
||||
}
|
||||
const resourceUri = URI.file(resourceId as string);
|
||||
return this.resources.get(normalize(resourceUri.path)) ?? null;
|
||||
|
||||
case 'relative-path':
|
||||
if (isNone(reference)) {
|
||||
return null;
|
||||
}
|
||||
if (!hasExtension(resourceId as string)) {
|
||||
resourceId = resourceId + '.md';
|
||||
}
|
||||
const relativePath = resourceId as string;
|
||||
const targetUri = URI.computeRelativeURI(reference, relativePath);
|
||||
return this.resources.get(normalize(targetUri.path)) ?? null;
|
||||
|
||||
default:
|
||||
throw new Error('Unexpected reference type: ' + refType);
|
||||
public find(reference: URI | string, baseUri?: URI): Resource | null {
|
||||
if (reference instanceof URI) {
|
||||
return this._resources.get(normalize((reference as URI).path)) ?? null;
|
||||
}
|
||||
let resource: Resource | null = null;
|
||||
const [path, fragment] = (reference as string).split('#');
|
||||
if (FoamWorkspace.isIdentifier(path)) {
|
||||
resource = this.listByIdentifier(path)[0];
|
||||
} else {
|
||||
if (isAbsolute(path) || isSome(baseUri)) {
|
||||
if (getExtension(path) !== '.md') {
|
||||
const uri = baseUri.resolve(path + '.md');
|
||||
resource = uri ? this._resources.get(normalize(uri.path)) : null;
|
||||
}
|
||||
if (!resource) {
|
||||
const uri = baseUri.resolve(path);
|
||||
resource = uri ? this._resources.get(normalize(uri.path)) : null;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (resource && fragment) {
|
||||
resource = { ...resource, uri: resource.uri.withFragment(fragment) };
|
||||
}
|
||||
return resource ?? null;
|
||||
}
|
||||
|
||||
public resolveLink(resource: Resource, link: ResourceLink): URI {
|
||||
// TODO add tests
|
||||
const provider = this.providers.find(p => p.supports(resource.uri));
|
||||
return (
|
||||
provider?.resolveLink(this, resource, link) ??
|
||||
URI.placeholder(link.target)
|
||||
for (const provider of this.providers) {
|
||||
if (provider.supports(resource.uri)) {
|
||||
return provider.resolveLink(this, resource, link);
|
||||
}
|
||||
}
|
||||
throw new Error(
|
||||
`Couldn't find provider for resource "${resource.uri.toString()}"`
|
||||
);
|
||||
}
|
||||
|
||||
public read(uri: URI): Promise<string | null> {
|
||||
const provider = this.providers.find(p => p.supports(uri));
|
||||
return provider?.read(uri) ?? Promise.resolve(null);
|
||||
for (const provider of this.providers) {
|
||||
if (provider.supports(uri)) {
|
||||
return provider.read(uri);
|
||||
}
|
||||
}
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
|
||||
public readAsMarkdown(uri: URI): Promise<string | null> {
|
||||
const provider = this.providers.find(p => p.supports(uri));
|
||||
return provider?.readAsMarkdown(uri) ?? Promise.resolve(null);
|
||||
for (const provider of this.providers) {
|
||||
if (provider.supports(uri)) {
|
||||
return provider.readAsMarkdown(uri);
|
||||
}
|
||||
}
|
||||
return Promise.resolve(null);
|
||||
}
|
||||
|
||||
public dispose(): void {
|
||||
@@ -183,6 +173,51 @@ export class FoamWorkspace implements IDisposable {
|
||||
this.onDidDeleteEmitter.dispose();
|
||||
this.onDidUpdateEmitter.dispose();
|
||||
}
|
||||
|
||||
static isIdentifier(path: string): boolean {
|
||||
return !(
|
||||
path.startsWith('/') ||
|
||||
path.startsWith('./') ||
|
||||
path.startsWith('../')
|
||||
);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns the minimal identifier for the given string amongst others
|
||||
*
|
||||
* @param forPath the value to compute the identifier for
|
||||
* @param amongst the set of strings within which to find the identifier
|
||||
*/
|
||||
static getShortestIdentifier(forPath: string, amongst: string[]): string {
|
||||
const needleTokens = forPath.split('/').reverse();
|
||||
const haystack = amongst
|
||||
.filter(value => value !== forPath)
|
||||
.map(value => value.split('/').reverse());
|
||||
|
||||
let tokenIndex = 0;
|
||||
let res = needleTokens;
|
||||
while (tokenIndex < needleTokens.length) {
|
||||
for (let j = haystack.length - 1; j >= 0; j--) {
|
||||
if (
|
||||
haystack[j].length < tokenIndex ||
|
||||
needleTokens[tokenIndex] !== haystack[j][tokenIndex]
|
||||
) {
|
||||
haystack.splice(j, 1);
|
||||
}
|
||||
}
|
||||
if (haystack.length === 0) {
|
||||
res = needleTokens.splice(0, tokenIndex + 1);
|
||||
break;
|
||||
}
|
||||
tokenIndex++;
|
||||
}
|
||||
const identifier = res
|
||||
.filter(token => token.trim() !== '')
|
||||
.reverse()
|
||||
.join('/');
|
||||
|
||||
return identifier;
|
||||
}
|
||||
}
|
||||
|
||||
const normalize = (v: string) => v.toLocaleLowerCase();
|
||||
|
||||
111
packages/foam-vscode/src/core/services/attachment-provider.ts
Normal file
111
packages/foam-vscode/src/core/services/attachment-provider.ts
Normal file
@@ -0,0 +1,111 @@
|
||||
import { Resource, ResourceLink } from '../model/note';
|
||||
import { Logger } from '../utils/log';
|
||||
import { URI } from '../model/uri';
|
||||
import { FoamWorkspace } from '../model/workspace';
|
||||
import { IDataStore, IMatcher } from '../services/datastore';
|
||||
import { IDisposable } from '../common/lifecycle';
|
||||
import { ResourceProvider } from '../model/provider';
|
||||
import { Position } from '../model/position';
|
||||
|
||||
const imageExtensions = ['.png', '.jpg', '.gif'];
|
||||
const attachmentExtensions = ['.pdf', ...imageExtensions];
|
||||
|
||||
const asResource = (uri: URI): Resource => {
|
||||
const type = imageExtensions.includes(uri.getExtension())
|
||||
? 'image'
|
||||
: 'attachment';
|
||||
return {
|
||||
uri: uri,
|
||||
title: uri.getBasename(),
|
||||
type: type,
|
||||
aliases: [],
|
||||
properties: { type: type },
|
||||
sections: [],
|
||||
links: [],
|
||||
tags: [],
|
||||
definitions: [],
|
||||
source: {
|
||||
contentStart: Position.create(0, 0),
|
||||
end: Position.create(0, 0),
|
||||
eol: '\n',
|
||||
text: '',
|
||||
},
|
||||
};
|
||||
};
|
||||
|
||||
export class AttachmentResourceProvider implements ResourceProvider {
|
||||
private disposables: IDisposable[] = [];
|
||||
|
||||
constructor(
|
||||
private readonly matcher: IMatcher,
|
||||
private readonly dataStore: IDataStore,
|
||||
private readonly watcherInit?: (triggers: {
|
||||
onDidChange: (uri: URI) => void;
|
||||
onDidCreate: (uri: URI) => void;
|
||||
onDidDelete: (uri: URI) => void;
|
||||
}) => IDisposable[]
|
||||
) {}
|
||||
|
||||
async init(workspace: FoamWorkspace) {
|
||||
const filesByFolder = await Promise.all(
|
||||
this.matcher.include.map(glob =>
|
||||
this.dataStore.list(glob, this.matcher.exclude)
|
||||
)
|
||||
);
|
||||
const files = this.matcher
|
||||
.match(filesByFolder.flat())
|
||||
.filter(this.supports);
|
||||
|
||||
for (const uri of files) {
|
||||
Logger.info('Found: ' + uri.toString());
|
||||
workspace.set(asResource(uri));
|
||||
}
|
||||
|
||||
this.disposables =
|
||||
this.watcherInit?.({
|
||||
onDidChange: async uri => {
|
||||
if (this.matcher.isMatch(uri) && this.supports(uri)) {
|
||||
workspace.set(asResource(uri));
|
||||
}
|
||||
},
|
||||
onDidCreate: async uri => {
|
||||
if (this.matcher.isMatch(uri) && this.supports(uri)) {
|
||||
workspace.set(asResource(uri));
|
||||
}
|
||||
},
|
||||
onDidDelete: uri => {
|
||||
this.supports(uri) && workspace.delete(uri);
|
||||
},
|
||||
}) ?? [];
|
||||
}
|
||||
|
||||
supports(uri: URI) {
|
||||
return attachmentExtensions.includes(uri.getExtension());
|
||||
}
|
||||
|
||||
read(uri: URI): Promise<string | null> {
|
||||
return null;
|
||||
}
|
||||
|
||||
async readAsMarkdown(uri: URI): Promise<string | null> {
|
||||
if (imageExtensions.includes(uri.getExtension())) {
|
||||
return `}|height=200)`;
|
||||
}
|
||||
return `### ${uri.getBasename()}`;
|
||||
}
|
||||
|
||||
async fetch(uri: URI) {
|
||||
return asResource(uri);
|
||||
}
|
||||
|
||||
resolveLink(w: FoamWorkspace, resource: Resource, l: ResourceLink) {
|
||||
throw new Error('not supported');
|
||||
// Silly workaround to make VS Code and es-lint happy
|
||||
// eslint-disable-next-line
|
||||
return resource.uri;
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this.disposables.forEach(d => d.dispose());
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,18 @@
|
||||
import { TEST_DATA_DIR } from '../../test/test-utils';
|
||||
import { readFileFromFs, TEST_DATA_DIR } from '../../test/test-utils';
|
||||
import { URI } from '../model/uri';
|
||||
import { Logger } from '../utils/log';
|
||||
import { FileDataStore, Matcher, toMatcherPathFormat } from './datastore';
|
||||
|
||||
Logger.setLevel('error');
|
||||
|
||||
const testFolder = URI.joinPath(TEST_DATA_DIR, 'test-datastore');
|
||||
const testFolder = TEST_DATA_DIR.joinPath('test-datastore');
|
||||
|
||||
describe('Matcher', () => {
|
||||
it('generates globs with the base dir provided', () => {
|
||||
const matcher = new Matcher([testFolder], ['*'], []);
|
||||
expect(matcher.folders).toEqual([toMatcherPathFormat(testFolder)]);
|
||||
expect(matcher.include).toEqual([
|
||||
toMatcherPathFormat(URI.joinPath(testFolder, '*')),
|
||||
toMatcherPathFormat(testFolder.joinPath('*')),
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -20,7 +20,7 @@ describe('Matcher', () => {
|
||||
const matcher = new Matcher([testFolder]);
|
||||
expect(matcher.exclude).toEqual([]);
|
||||
expect(matcher.include).toEqual([
|
||||
toMatcherPathFormat(URI.joinPath(testFolder, '**', '*')),
|
||||
toMatcherPathFormat(testFolder.joinPath('**', '*')),
|
||||
]);
|
||||
});
|
||||
|
||||
@@ -28,32 +28,32 @@ describe('Matcher', () => {
|
||||
const matcher = new Matcher([testFolder], ['g1', 'g2'], []);
|
||||
expect(matcher.exclude).toEqual([]);
|
||||
expect(matcher.include).toEqual([
|
||||
toMatcherPathFormat(URI.joinPath(testFolder, 'g1')),
|
||||
toMatcherPathFormat(URI.joinPath(testFolder, 'g2')),
|
||||
toMatcherPathFormat(testFolder.joinPath('g1')),
|
||||
toMatcherPathFormat(testFolder.joinPath('g2')),
|
||||
]);
|
||||
});
|
||||
|
||||
it('has a match method to filter strings', () => {
|
||||
const matcher = new Matcher([testFolder], ['*.md'], []);
|
||||
const files = [
|
||||
URI.joinPath(testFolder, 'file1.md'),
|
||||
URI.joinPath(testFolder, 'file2.md'),
|
||||
URI.joinPath(testFolder, 'file3.mdx'),
|
||||
URI.joinPath(testFolder, 'sub', 'file4.md'),
|
||||
testFolder.joinPath('file1.md'),
|
||||
testFolder.joinPath('file2.md'),
|
||||
testFolder.joinPath('file3.mdx'),
|
||||
testFolder.joinPath('sub', 'file4.md'),
|
||||
];
|
||||
expect(matcher.match(files)).toEqual([
|
||||
URI.joinPath(testFolder, 'file1.md'),
|
||||
URI.joinPath(testFolder, 'file2.md'),
|
||||
testFolder.joinPath('file1.md'),
|
||||
testFolder.joinPath('file2.md'),
|
||||
]);
|
||||
});
|
||||
|
||||
it('has a isMatch method to see whether a file is matched or not', () => {
|
||||
const matcher = new Matcher([testFolder], ['*.md'], []);
|
||||
const files = [
|
||||
URI.joinPath(testFolder, 'file1.md'),
|
||||
URI.joinPath(testFolder, 'file2.md'),
|
||||
URI.joinPath(testFolder, 'file3.mdx'),
|
||||
URI.joinPath(testFolder, 'sub', 'file4.md'),
|
||||
testFolder.joinPath('file1.md'),
|
||||
testFolder.joinPath('file2.md'),
|
||||
testFolder.joinPath('file3.mdx'),
|
||||
testFolder.joinPath('sub', 'file4.md'),
|
||||
];
|
||||
expect(matcher.isMatch(files[0])).toEqual(true);
|
||||
expect(matcher.isMatch(files[1])).toEqual(true);
|
||||
@@ -72,10 +72,10 @@ describe('Matcher', () => {
|
||||
it('ignores files in the exclude list', () => {
|
||||
const matcher = new Matcher([testFolder], ['*.md'], ['file1.*']);
|
||||
const files = [
|
||||
URI.joinPath(testFolder, 'file1.md'),
|
||||
URI.joinPath(testFolder, 'file2.md'),
|
||||
URI.joinPath(testFolder, 'file3.mdx'),
|
||||
URI.joinPath(testFolder, 'sub', 'file4.md'),
|
||||
testFolder.joinPath('file1.md'),
|
||||
testFolder.joinPath('file2.md'),
|
||||
testFolder.joinPath('file3.mdx'),
|
||||
testFolder.joinPath('sub', 'file4.md'),
|
||||
];
|
||||
expect(matcher.isMatch(files[0])).toEqual(false);
|
||||
expect(matcher.isMatch(files[1])).toEqual(true);
|
||||
@@ -87,7 +87,7 @@ describe('Matcher', () => {
|
||||
describe('Datastore', () => {
|
||||
it('uses the matcher to get the file list', async () => {
|
||||
const matcher = new Matcher([testFolder], ['**/*.md'], []);
|
||||
const ds = new FileDataStore();
|
||||
const ds = new FileDataStore(readFileFromFs);
|
||||
expect((await ds.list(matcher.include[0])).length).toEqual(4);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,12 +1,12 @@
|
||||
import micromatch from 'micromatch';
|
||||
import fs from 'fs';
|
||||
import { URI } from '../model/uri';
|
||||
import { Logger } from '../utils/log';
|
||||
import glob from 'glob';
|
||||
import { glob } from 'glob';
|
||||
import { promisify } from 'util';
|
||||
import { isWindows } from '../common/platform';
|
||||
|
||||
const findAllFiles = promisify(glob);
|
||||
|
||||
export interface IMatcher {
|
||||
/**
|
||||
* Filters the given list of URIs, keepin only the ones that
|
||||
@@ -39,8 +39,8 @@ export interface IMatcher {
|
||||
* we convert the fs path on the way in and out
|
||||
*/
|
||||
export const toMatcherPathFormat = isWindows
|
||||
? (uri: URI) => URI.toFsPath(uri).replace(/\\/g, '/')
|
||||
: (uri: URI) => URI.toFsPath(uri);
|
||||
? (uri: URI) => uri.toFsPath().replace(/\\/g, '/')
|
||||
: (uri: URI) => uri.toFsPath();
|
||||
|
||||
export const toFsPath = isWindows
|
||||
? (path: string): string => path.replace(/\//g, '\\')
|
||||
@@ -76,7 +76,7 @@ export class Matcher implements IMatcher {
|
||||
|
||||
match(files: URI[]) {
|
||||
const matches = micromatch(
|
||||
files.map(f => URI.toFsPath(f)),
|
||||
files.map(f => f.toFsPath()),
|
||||
this.include,
|
||||
{
|
||||
ignore: this.exclude,
|
||||
@@ -114,16 +114,19 @@ export interface IDataStore {
|
||||
* File system based data store
|
||||
*/
|
||||
export class FileDataStore implements IDataStore {
|
||||
constructor(private readFile: (uri: URI) => Promise<string>) {}
|
||||
|
||||
async list(glob: string, ignoreGlob?: string | string[]): Promise<URI[]> {
|
||||
const res = await findAllFiles(glob, {
|
||||
ignore: ignoreGlob,
|
||||
strict: false,
|
||||
});
|
||||
return res.map(URI.file);
|
||||
}
|
||||
|
||||
async read(uri: URI) {
|
||||
try {
|
||||
return (await fs.promises.readFile(URI.toFsPath(uri))).toString();
|
||||
return await this.readFile(uri);
|
||||
} catch (e) {
|
||||
Logger.error(
|
||||
`FileDataStore: error while reading uri: ${uri.path} - ${e}`
|
||||
|
||||
238
packages/foam-vscode/src/core/services/markdown-link.test.ts
Normal file
238
packages/foam-vscode/src/core/services/markdown-link.test.ts
Normal file
@@ -0,0 +1,238 @@
|
||||
import { getRandomURI } from '../../test/test-utils';
|
||||
import { ResourceLink } from '../model/note';
|
||||
import { Range } from '../model/range';
|
||||
import { createMarkdownParser } from '../services/markdown-parser';
|
||||
import { MarkdownLink } from './markdown-link';
|
||||
|
||||
describe('MarkdownLink', () => {
|
||||
const parser = createMarkdownParser([]);
|
||||
describe('parse wikilink', () => {
|
||||
it('should parse target', () => {
|
||||
const link = parser.parse(getRandomURI(), `this is a [[wikilink]]`)
|
||||
.links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('wikilink');
|
||||
expect(parsed.section).toEqual('');
|
||||
expect(parsed.alias).toEqual('');
|
||||
});
|
||||
it('should parse target and section', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [[wikilink#section]]`
|
||||
).links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('wikilink');
|
||||
expect(parsed.section).toEqual('section');
|
||||
expect(parsed.alias).toEqual('');
|
||||
});
|
||||
it('should parse target and alias', () => {
|
||||
const link = parser.parse(getRandomURI(), `this is a [[wikilink|alias]]`)
|
||||
.links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('wikilink');
|
||||
expect(parsed.section).toEqual('');
|
||||
expect(parsed.alias).toEqual('alias');
|
||||
});
|
||||
it('should parse links with square brackets #975', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [[wikilink [with] brackets]]`
|
||||
).links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('wikilink [with] brackets');
|
||||
expect(parsed.section).toEqual('');
|
||||
expect(parsed.alias).toEqual('');
|
||||
});
|
||||
it('should parse links with square brackets in alias #975', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [[wikilink|alias [with] brackets]]`
|
||||
).links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('wikilink');
|
||||
expect(parsed.section).toEqual('');
|
||||
expect(parsed.alias).toEqual('alias [with] brackets');
|
||||
});
|
||||
it('should parse target and alias with escaped separator', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [[wikilink\\|alias]]`
|
||||
).links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('wikilink');
|
||||
expect(parsed.section).toEqual('');
|
||||
expect(parsed.alias).toEqual('alias');
|
||||
});
|
||||
it('should parse target section and alias', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [[wikilink with spaces#section with spaces|alias with spaces]]`
|
||||
).links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('wikilink with spaces');
|
||||
expect(parsed.section).toEqual('section with spaces');
|
||||
expect(parsed.alias).toEqual('alias with spaces');
|
||||
});
|
||||
it('should parse section', () => {
|
||||
const link = parser.parse(getRandomURI(), `this is a [[#section]]`)
|
||||
.links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('');
|
||||
expect(parsed.section).toEqual('section');
|
||||
expect(parsed.alias).toEqual('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('parse direct link', () => {
|
||||
it('should parse target', () => {
|
||||
const link = parser.parse(getRandomURI(), `this is a [link](to/path.md)`)
|
||||
.links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('to/path.md');
|
||||
expect(parsed.section).toEqual('');
|
||||
expect(parsed.alias).toEqual('link');
|
||||
});
|
||||
it('should parse target and section', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [link](to/path.md#section)`
|
||||
).links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('to/path.md');
|
||||
expect(parsed.section).toEqual('section');
|
||||
expect(parsed.alias).toEqual('link');
|
||||
});
|
||||
it('should parse section only', () => {
|
||||
const link: ResourceLink = {
|
||||
type: 'link',
|
||||
rawText: '[link](#section)',
|
||||
range: Range.create(0, 0),
|
||||
};
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('');
|
||||
expect(parsed.section).toEqual('section');
|
||||
expect(parsed.alias).toEqual('link');
|
||||
});
|
||||
it('should parse links with square brackets in label #975', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [inbox [xyz]](to/path.md)`
|
||||
).links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('to/path.md');
|
||||
expect(parsed.section).toEqual('');
|
||||
expect(parsed.alias).toEqual('inbox [xyz]');
|
||||
});
|
||||
it('should parse links with empty label #975', () => {
|
||||
const link = parser.parse(getRandomURI(), `this is a [](to/path.md)`)
|
||||
.links[0];
|
||||
const parsed = MarkdownLink.analyzeLink(link);
|
||||
expect(parsed.target).toEqual('to/path.md');
|
||||
expect(parsed.section).toEqual('');
|
||||
expect(parsed.alias).toEqual('');
|
||||
});
|
||||
});
|
||||
|
||||
describe('rename wikilink', () => {
|
||||
it('should rename the target only', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [[wikilink#section]]`
|
||||
).links[0];
|
||||
const edit = MarkdownLink.createUpdateLinkEdit(link, {
|
||||
target: 'new-link',
|
||||
});
|
||||
expect(edit.newText).toEqual(`[[new-link#section]]`);
|
||||
expect(edit.selection).toEqual(link.range);
|
||||
});
|
||||
it('should rename the section only', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [[wikilink#section]]`
|
||||
).links[0];
|
||||
const edit = MarkdownLink.createUpdateLinkEdit(link, {
|
||||
section: 'new-section',
|
||||
});
|
||||
expect(edit.newText).toEqual(`[[wikilink#new-section]]`);
|
||||
expect(edit.selection).toEqual(link.range);
|
||||
});
|
||||
it('should rename both target and section', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [[wikilink#section]]`
|
||||
).links[0];
|
||||
const edit = MarkdownLink.createUpdateLinkEdit(link, {
|
||||
target: 'new-link',
|
||||
section: 'new-section',
|
||||
});
|
||||
expect(edit.newText).toEqual(`[[new-link#new-section]]`);
|
||||
expect(edit.selection).toEqual(link.range);
|
||||
});
|
||||
it('should be able to remove the section', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [[wikilink#section]]`
|
||||
).links[0];
|
||||
const edit = MarkdownLink.createUpdateLinkEdit(link, {
|
||||
section: '',
|
||||
});
|
||||
expect(edit.newText).toEqual(`[[wikilink]]`);
|
||||
expect(edit.selection).toEqual(link.range);
|
||||
});
|
||||
it('should be able to rename the alias', () => {
|
||||
const link = parser.parse(getRandomURI(), `this is a [[wikilink|alias]]`)
|
||||
.links[0];
|
||||
const edit = MarkdownLink.createUpdateLinkEdit(link, {
|
||||
alias: 'new-alias',
|
||||
});
|
||||
expect(edit.newText).toEqual(`[[wikilink|new-alias]]`);
|
||||
expect(edit.selection).toEqual(link.range);
|
||||
});
|
||||
});
|
||||
|
||||
describe('rename direct link', () => {
|
||||
it('should rename the target only', () => {
|
||||
const link = parser.parse(getRandomURI(), `this is a [link](to/path.md)`)
|
||||
.links[0];
|
||||
const edit = MarkdownLink.createUpdateLinkEdit(link, {
|
||||
target: 'to/another-path.md',
|
||||
});
|
||||
expect(edit.newText).toEqual(`[link](to/another-path.md)`);
|
||||
expect(edit.selection).toEqual(link.range);
|
||||
});
|
||||
it('should rename the section only', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [link](to/path.md#section)`
|
||||
).links[0];
|
||||
const edit = MarkdownLink.createUpdateLinkEdit(link, {
|
||||
section: 'section2',
|
||||
});
|
||||
expect(edit.newText).toEqual(`[link](to/path.md#section2)`);
|
||||
expect(edit.selection).toEqual(link.range);
|
||||
});
|
||||
it('should rename both target and section', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [link](to/path.md#section)`
|
||||
).links[0];
|
||||
const edit = MarkdownLink.createUpdateLinkEdit(link, {
|
||||
target: 'to/another-path.md',
|
||||
section: 'section2',
|
||||
});
|
||||
expect(edit.newText).toEqual(`[link](to/another-path.md#section2)`);
|
||||
expect(edit.selection).toEqual(link.range);
|
||||
});
|
||||
it('should be able to remove the section', () => {
|
||||
const link = parser.parse(
|
||||
getRandomURI(),
|
||||
`this is a [link](to/path.md#section)`
|
||||
).links[0];
|
||||
const edit = MarkdownLink.createUpdateLinkEdit(link, {
|
||||
section: '',
|
||||
});
|
||||
expect(edit.newText).toEqual(`[link](to/path.md)`);
|
||||
expect(edit.selection).toEqual(link.range);
|
||||
});
|
||||
});
|
||||
});
|
||||
65
packages/foam-vscode/src/core/services/markdown-link.ts
Normal file
65
packages/foam-vscode/src/core/services/markdown-link.ts
Normal file
@@ -0,0 +1,65 @@
|
||||
import { ResourceLink } from '../model/note';
|
||||
|
||||
export abstract class MarkdownLink {
|
||||
private static wikilinkRegex = new RegExp(
|
||||
/\[\[([^#|]+)?#?([^|]+)?\|?(.*)?\]\]/
|
||||
);
|
||||
private static directLinkRegex = new RegExp(
|
||||
/\[(.*)\]\(([^#]*)?#?([^\]]+)?\)/
|
||||
);
|
||||
|
||||
public static analyzeLink(link: ResourceLink) {
|
||||
try {
|
||||
if (link.type === 'wikilink') {
|
||||
const [, target, section, alias] = this.wikilinkRegex.exec(
|
||||
link.rawText
|
||||
);
|
||||
return {
|
||||
target: target?.replace(/\\/g, '') ?? '',
|
||||
section: section ?? '',
|
||||
alias: alias ?? '',
|
||||
};
|
||||
}
|
||||
if (link.type === 'link') {
|
||||
const [, alias, target, section] = this.directLinkRegex.exec(
|
||||
link.rawText
|
||||
);
|
||||
return {
|
||||
target: target ?? '',
|
||||
section: section ?? '',
|
||||
alias: alias ?? '',
|
||||
};
|
||||
}
|
||||
throw new Error(`Link of type ${link.type} is not supported`);
|
||||
} catch (e) {
|
||||
throw new Error(`Couldn't parse link ${link.rawText} - ${e}`);
|
||||
}
|
||||
}
|
||||
|
||||
public static createUpdateLinkEdit(
|
||||
link: ResourceLink,
|
||||
delta: { target?: string; section?: string; alias?: string }
|
||||
) {
|
||||
const { target, section, alias } = MarkdownLink.analyzeLink(link);
|
||||
const newTarget = delta.target ?? target;
|
||||
const newSection = delta.section ?? section ?? '';
|
||||
const newAlias = delta.alias ?? alias ?? '';
|
||||
const sectionDivider = newSection ? '#' : '';
|
||||
const aliasDivider = newAlias ? '|' : '';
|
||||
if (link.type === 'wikilink') {
|
||||
return {
|
||||
newText: `[[${newTarget}${sectionDivider}${newSection}${aliasDivider}${newAlias}]]`,
|
||||
selection: link.range,
|
||||
};
|
||||
}
|
||||
if (link.type === 'link') {
|
||||
return {
|
||||
newText: `[${newAlias}](${newTarget}${sectionDivider}${newSection})`,
|
||||
selection: link.range,
|
||||
};
|
||||
}
|
||||
throw new Error(
|
||||
`Unexpected state: link of type ${link.type} is not supported`
|
||||
);
|
||||
}
|
||||
}
|
||||
440
packages/foam-vscode/src/core/services/markdown-parser.test.ts
Normal file
440
packages/foam-vscode/src/core/services/markdown-parser.test.ts
Normal file
@@ -0,0 +1,440 @@
|
||||
import { createMarkdownParser, ParserPlugin } from './markdown-parser';
|
||||
import { Logger } from '../utils/log';
|
||||
import { URI } from '../model/uri';
|
||||
import { Range } from '../model/range';
|
||||
import { getRandomURI } from '../../test/test-utils';
|
||||
|
||||
Logger.setLevel('error');
|
||||
|
||||
const parser = createMarkdownParser([]);
|
||||
const createNoteFromMarkdown = (content: string, path?: string) =>
|
||||
parser.parse(path ? URI.file(path) : getRandomURI(), content);
|
||||
|
||||
describe('Markdown parsing', () => {
|
||||
it('should create a Resource from a markdown file', () => {
|
||||
const note = createNoteFromMarkdown('Note content', '/a/path.md');
|
||||
expect(note.uri).toEqual(URI.file('/a/path.md'));
|
||||
});
|
||||
|
||||
describe('Links', () => {
|
||||
it('should skip external links', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
`this is a [link to google](https://www.google.com)`
|
||||
);
|
||||
expect(note.links.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should skip links to a section within the file', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
`this is a [link to intro](#introduction)`
|
||||
);
|
||||
expect(note.links.length).toEqual(0);
|
||||
});
|
||||
|
||||
it('should detect regular markdown links', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'this is a [link to page b](../doc/page-b.md)'
|
||||
);
|
||||
expect(note.links.length).toEqual(1);
|
||||
const link = note.links[0];
|
||||
expect(link.type).toEqual('link');
|
||||
expect(link.rawText).toEqual('[link to page b](../doc/page-b.md)');
|
||||
});
|
||||
|
||||
it('should detect links that have formatting in label', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'this is [**link** with __formatting__](../doc/page-b.md)'
|
||||
);
|
||||
expect(note.links.length).toEqual(1);
|
||||
const link = note.links[0];
|
||||
expect(link.type).toEqual('link');
|
||||
});
|
||||
|
||||
it('should detect wikilinks', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'Some content and [[a link]] to [[a file]]'
|
||||
);
|
||||
expect(note.links.length).toEqual(2);
|
||||
let link = note.links[0];
|
||||
expect(link.type).toEqual('wikilink');
|
||||
expect(link.rawText).toEqual('[[a link]]');
|
||||
link = note.links[1];
|
||||
expect(link.type).toEqual('wikilink');
|
||||
expect(link.rawText).toEqual('[[a file]]');
|
||||
});
|
||||
|
||||
it('should detect wikilinks that have aliases', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
'this is [[link|link alias]]. A link with spaces [[other link | spaced]]'
|
||||
);
|
||||
expect(note.links.length).toEqual(2);
|
||||
let link = note.links[0];
|
||||
expect(link.type).toEqual('wikilink');
|
||||
expect(link.rawText).toEqual('[[link|link alias]]');
|
||||
link = note.links[1];
|
||||
expect(link.type).toEqual('wikilink');
|
||||
expect(link.rawText).toEqual('[[other link | spaced]]');
|
||||
});
|
||||
|
||||
it('should skip wikilinks in codeblocks', () => {
|
||||
const noteA = createNoteFromMarkdown(`
|
||||
this is some text with our [[first-wikilink]].
|
||||
|
||||
\`\`\`
|
||||
this is inside a [[codeblock]]
|
||||
\`\`\`
|
||||
|
||||
this is some text with our [[second-wikilink]].
|
||||
`);
|
||||
expect(noteA.links.map(l => l.rawText)).toEqual([
|
||||
'[[first-wikilink]]',
|
||||
'[[second-wikilink]]',
|
||||
]);
|
||||
});
|
||||
|
||||
it('should skip wikilinks in inlined codeblocks', () => {
|
||||
const noteA = createNoteFromMarkdown(`
|
||||
this is some text with our [[first-wikilink]].
|
||||
|
||||
this is \`inside a [[codeblock]]\`
|
||||
|
||||
this is some text with our [[second-wikilink]].
|
||||
`);
|
||||
expect(noteA.links.map(l => l.rawText)).toEqual([
|
||||
'[[first-wikilink]]',
|
||||
'[[second-wikilink]]',
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Note Title', () => {
|
||||
it('should initialize note title if heading exists', () => {
|
||||
const note = createNoteFromMarkdown(`
|
||||
# Page A
|
||||
this note has a title
|
||||
`);
|
||||
expect(note.title).toBe('Page A');
|
||||
});
|
||||
|
||||
it('should support wikilinks and urls in title', () => {
|
||||
const note = createNoteFromMarkdown(`
|
||||
# Page A with [[wikilink]] and a [url](https://google.com)
|
||||
this note has a title
|
||||
`);
|
||||
expect(note.title).toBe('Page A with wikilink and a url');
|
||||
});
|
||||
|
||||
it('should default to file name if heading does not exist', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
`This file has no heading.`,
|
||||
'/page-d.md'
|
||||
);
|
||||
|
||||
expect(note.title).toEqual('page-d');
|
||||
});
|
||||
|
||||
it('should give precedence to frontmatter title over other headings', () => {
|
||||
const note = createNoteFromMarkdown(`
|
||||
---
|
||||
title: Note Title
|
||||
date: 20-12-12
|
||||
---
|
||||
|
||||
# Other Note Title
|
||||
`);
|
||||
|
||||
expect(note.title).toBe('Note Title');
|
||||
});
|
||||
|
||||
it('should support numbers as title', () => {
|
||||
const note1 = createNoteFromMarkdown(`hello`, '/157.md');
|
||||
expect(note1.title).toBe('157');
|
||||
|
||||
const note2 = createNoteFromMarkdown(`# 158`, '/157.md');
|
||||
expect(note2.title).toBe('158');
|
||||
|
||||
const note3 = createNoteFromMarkdown(
|
||||
`
|
||||
---
|
||||
title: 159
|
||||
---
|
||||
|
||||
# 158
|
||||
`,
|
||||
'/157.md'
|
||||
);
|
||||
expect(note3.title).toBe('159');
|
||||
});
|
||||
|
||||
it('should support empty titles (see #276)', () => {
|
||||
const note = createNoteFromMarkdown(
|
||||
`
|
||||
#
|
||||
|
||||
this note has an empty title line
|
||||
`,
|
||||
'/Hello Page.md'
|
||||
);
|
||||
expect(note.title).toEqual('Hello Page');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Frontmatter', () => {
|
||||
it('should parse yaml frontmatter', () => {
|
||||
const note = createNoteFromMarkdown(`
|
||||
---
|
||||
title: Note Title
|
||||
date: 20-12-12
|
||||
---
|
||||
|
||||
# Other Note Title`);
|
||||
|
||||
expect(note.properties.title).toBe('Note Title');
|
||||
expect(note.properties.date).toBe('20-12-12');
|
||||
});
|
||||
|
||||
it('should parse empty frontmatter', () => {
|
||||
const note = createNoteFromMarkdown(`
|
||||
---
|
||||
---
|
||||
|
||||
# Empty Frontmatter
|
||||
`);
|
||||
|
||||
expect(note.properties).toEqual({});
|
||||
});
|
||||
|
||||
it('should not fail when there are issues with parsing frontmatter', () => {
|
||||
const note = createNoteFromMarkdown(`
|
||||
---
|
||||
title: - one
|
||||
- two
|
||||
- #
|
||||
---
|
||||
|
||||
`);
|
||||
|
||||
expect(note.properties).toEqual({});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Tags', () => {
|
||||
it('can find tags in the text of the note', () => {
|
||||
const noteA = createNoteFromMarkdown(`
|
||||
# this is a #heading
|
||||
#this is some #text that includes #tags we #care-about.
|
||||
`);
|
||||
expect(noteA.tags).toEqual([
|
||||
{ label: 'heading', range: Range.create(1, 12, 1, 20) },
|
||||
{ label: 'this', range: Range.create(2, 0, 2, 5) },
|
||||
{ label: 'text', range: Range.create(2, 14, 2, 19) },
|
||||
{ label: 'tags', range: Range.create(2, 34, 2, 39) },
|
||||
{ label: 'care-about', range: Range.create(2, 43, 2, 54) },
|
||||
]);
|
||||
});
|
||||
|
||||
it('will skip tags in codeblocks', () => {
|
||||
const noteA = createNoteFromMarkdown(`
|
||||
this is some #text that includes #tags we #care-about.
|
||||
|
||||
\`\`\`
|
||||
this is a #codeblock
|
||||
\`\`\`
|
||||
`);
|
||||
expect(noteA.tags.map(t => t.label)).toEqual([
|
||||
'text',
|
||||
'tags',
|
||||
'care-about',
|
||||
]);
|
||||
});
|
||||
|
||||
it('will skip tags in inlined codeblocks', () => {
|
||||
const noteA = createNoteFromMarkdown(`
|
||||
this is some #text that includes #tags we #care-about.
|
||||
this is a \`inlined #codeblock\` `);
|
||||
expect(noteA.tags.map(t => t.label)).toEqual([
|
||||
'text',
|
||||
'tags',
|
||||
'care-about',
|
||||
]);
|
||||
});
|
||||
it('can find tags as text in yaml', () => {
|
||||
const noteA = createNoteFromMarkdown(`
|
||||
---
|
||||
tags: hello, world this_is_good
|
||||
---
|
||||
# this is a heading
|
||||
this is some #text that includes #tags we #care-about.
|
||||
`);
|
||||
expect(noteA.tags.map(t => t.label)).toEqual([
|
||||
'hello',
|
||||
'world',
|
||||
'this_is_good',
|
||||
'text',
|
||||
'tags',
|
||||
'care-about',
|
||||
]);
|
||||
});
|
||||
|
||||
it('can find tags as array in yaml', () => {
|
||||
const noteA = createNoteFromMarkdown(`
|
||||
---
|
||||
tags: [hello, world, this_is_good]
|
||||
---
|
||||
# this is a heading
|
||||
this is some #text that includes #tags we #care-about.
|
||||
`);
|
||||
expect(noteA.tags.map(t => t.label)).toEqual([
|
||||
'hello',
|
||||
'world',
|
||||
'this_is_good',
|
||||
'text',
|
||||
'tags',
|
||||
'care-about',
|
||||
]);
|
||||
});
|
||||
|
||||
it('provides rough range for tags in yaml', () => {
|
||||
// For now it's enough to just get the YAML block range
|
||||
// in the future we might want to be more specific
|
||||
|
||||
const noteA = createNoteFromMarkdown(`
|
||||
---
|
||||
tags: [hello, world, this_is_good]
|
||||
---
|
||||
# this is a heading
|
||||
this is some text
|
||||
`);
|
||||
expect(noteA.tags[0]).toEqual({
|
||||
label: 'hello',
|
||||
range: Range.create(1, 0, 3, 3),
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Sections', () => {
|
||||
it('should find sections within the note', () => {
|
||||
const note = createNoteFromMarkdown(`
|
||||
# Section 1
|
||||
|
||||
This is the content of section 1.
|
||||
|
||||
## Section 1.1
|
||||
|
||||
This is the content of section 1.1.
|
||||
|
||||
# Section 2
|
||||
|
||||
This is the content of section 2.
|
||||
`);
|
||||
expect(note.sections).toHaveLength(3);
|
||||
expect(note.sections[0].label).toEqual('Section 1');
|
||||
expect(note.sections[0].range).toEqual(Range.create(1, 0, 9, 0));
|
||||
expect(note.sections[1].label).toEqual('Section 1.1');
|
||||
expect(note.sections[1].range).toEqual(Range.create(5, 0, 9, 0));
|
||||
expect(note.sections[2].label).toEqual('Section 2');
|
||||
expect(note.sections[2].range).toEqual(Range.create(9, 0, 13, 0));
|
||||
});
|
||||
|
||||
it('should support wikilinks and links in the section label', () => {
|
||||
const note = createNoteFromMarkdown(`
|
||||
# Section with [[wikilink]]
|
||||
|
||||
This is the content of section with wikilink
|
||||
|
||||
## Section with [url](https://google.com)
|
||||
|
||||
This is the content of section with url`);
|
||||
expect(note.sections).toHaveLength(2);
|
||||
expect(note.sections[0].label).toEqual('Section with wikilink');
|
||||
expect(note.sections[1].label).toEqual('Section with url');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Parser plugins', () => {
|
||||
const testPlugin: ParserPlugin = {
|
||||
visit: (node, note) => {
|
||||
if (node.type === 'heading') {
|
||||
note.properties.hasHeading = true;
|
||||
}
|
||||
},
|
||||
};
|
||||
const parser = createMarkdownParser([testPlugin]);
|
||||
|
||||
it('can augment the parsing of the file', () => {
|
||||
const note1 = parser.parse(
|
||||
URI.file('/path/to/a'),
|
||||
`
|
||||
This is a test note without headings.
|
||||
But with some content.
|
||||
`
|
||||
);
|
||||
expect(note1.properties.hasHeading).toBeUndefined();
|
||||
|
||||
const note2 = parser.parse(
|
||||
URI.file('/path/to/a'),
|
||||
`
|
||||
# This is a note with header
|
||||
and some content`
|
||||
);
|
||||
expect(note2.properties.hasHeading).toBeTruthy();
|
||||
});
|
||||
});
|
||||
describe('Alias', () => {
|
||||
it('can find tags in comma separated string', () => {
|
||||
const note = parser.parse(
|
||||
URI.file('/path/to/a'),
|
||||
`
|
||||
---
|
||||
alias: alias 1, alias 2 , alias3
|
||||
---
|
||||
This is a test note without headings.
|
||||
But with some content.
|
||||
`
|
||||
);
|
||||
expect(note.aliases).toEqual([
|
||||
{
|
||||
range: Range.create(1, 0, 3, 3),
|
||||
title: 'alias 1',
|
||||
},
|
||||
{
|
||||
range: Range.create(1, 0, 3, 3),
|
||||
title: 'alias 2',
|
||||
},
|
||||
{
|
||||
range: Range.create(1, 0, 3, 3),
|
||||
title: 'alias3',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
it('can find tags in yaml array', () => {
|
||||
const note = parser.parse(
|
||||
URI.file('/path/to/a'),
|
||||
`
|
||||
---
|
||||
alias:
|
||||
- alias 1
|
||||
- alias 2
|
||||
- alias3
|
||||
---
|
||||
This is a test note without headings.
|
||||
But with some content.
|
||||
`
|
||||
);
|
||||
expect(note.aliases).toEqual([
|
||||
{
|
||||
range: Range.create(1, 0, 6, 3),
|
||||
title: 'alias 1',
|
||||
},
|
||||
{
|
||||
range: Range.create(1, 0, 6, 3),
|
||||
title: 'alias 2',
|
||||
},
|
||||
{
|
||||
range: Range.create(1, 0, 6, 3),
|
||||
title: 'alias3',
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
388
packages/foam-vscode/src/core/services/markdown-parser.ts
Normal file
388
packages/foam-vscode/src/core/services/markdown-parser.ts
Normal file
@@ -0,0 +1,388 @@
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
import { Point, Node, Position as AstPosition } from 'unist';
|
||||
import unified from 'unified';
|
||||
import markdownParse from 'remark-parse';
|
||||
import wikiLinkPlugin from 'remark-wiki-link';
|
||||
import frontmatterPlugin from 'remark-frontmatter';
|
||||
import { parse as parseYAML } from 'yaml';
|
||||
import visit from 'unist-util-visit';
|
||||
import detectNewline from 'detect-newline';
|
||||
import os from 'os';
|
||||
import { NoteLinkDefinition, Resource, ResourceParser } from '../model/note';
|
||||
import { Position } from '../model/position';
|
||||
import { Range } from '../model/range';
|
||||
import { extractHashtags, extractTagsFromProp, isSome } from '../utils';
|
||||
import { Logger } from '../utils/log';
|
||||
import { URI } from '../model/uri';
|
||||
|
||||
export interface ParserPlugin {
|
||||
name?: string;
|
||||
visit?: (node: Node, note: Resource, noteSource: string) => void;
|
||||
onDidInitializeParser?: (parser: unified.Processor) => void;
|
||||
onWillParseMarkdown?: (markdown: string) => string;
|
||||
onWillVisitTree?: (tree: Node, note: Resource) => void;
|
||||
onDidVisitTree?: (tree: Node, note: Resource) => void;
|
||||
onDidFindProperties?: (properties: any, note: Resource, node: Node) => void;
|
||||
}
|
||||
|
||||
const ALIAS_DIVIDER_CHAR = '|';
|
||||
|
||||
export function createMarkdownParser(
|
||||
extraPlugins: ParserPlugin[]
|
||||
): ResourceParser {
|
||||
const parser = unified()
|
||||
.use(markdownParse, { gfm: true })
|
||||
.use(frontmatterPlugin, ['yaml'])
|
||||
.use(wikiLinkPlugin, { aliasDivider: ALIAS_DIVIDER_CHAR });
|
||||
|
||||
const plugins = [
|
||||
titlePlugin,
|
||||
wikilinkPlugin,
|
||||
definitionsPlugin,
|
||||
tagsPlugin,
|
||||
aliasesPlugin,
|
||||
sectionsPlugin,
|
||||
...extraPlugins,
|
||||
];
|
||||
|
||||
for (const plugin of plugins) {
|
||||
try {
|
||||
plugin.onDidInitializeParser?.(parser);
|
||||
} catch (e) {
|
||||
handleError(plugin, 'onDidInitializeParser', undefined, e);
|
||||
}
|
||||
}
|
||||
|
||||
const foamParser: ResourceParser = {
|
||||
parse: (uri: URI, markdown: string): Resource => {
|
||||
Logger.debug('Parsing:', uri.toString());
|
||||
for (const plugin of plugins) {
|
||||
try {
|
||||
plugin.onWillParseMarkdown?.(markdown);
|
||||
} catch (e) {
|
||||
handleError(plugin, 'onWillParseMarkdown', uri, e);
|
||||
}
|
||||
}
|
||||
const tree = parser.parse(markdown);
|
||||
const eol = detectNewline(markdown) || os.EOL;
|
||||
|
||||
const note: Resource = {
|
||||
uri: uri,
|
||||
type: 'note',
|
||||
properties: {},
|
||||
title: '',
|
||||
sections: [],
|
||||
tags: [],
|
||||
aliases: [],
|
||||
links: [],
|
||||
definitions: [],
|
||||
source: {
|
||||
text: markdown,
|
||||
contentStart: astPointToFoamPosition(tree.position!.start),
|
||||
end: astPointToFoamPosition(tree.position!.end),
|
||||
eol: eol,
|
||||
},
|
||||
};
|
||||
|
||||
for (const plugin of plugins) {
|
||||
try {
|
||||
plugin.onWillVisitTree?.(tree, note);
|
||||
} catch (e) {
|
||||
handleError(plugin, 'onWillVisitTree', uri, e);
|
||||
}
|
||||
}
|
||||
visit(tree, node => {
|
||||
if (node.type === 'yaml') {
|
||||
try {
|
||||
const yamlProperties = parseYAML((node as any).value) ?? {};
|
||||
note.properties = {
|
||||
...note.properties,
|
||||
...yamlProperties,
|
||||
};
|
||||
// Update the start position of the note by exluding the metadata
|
||||
note.source.contentStart = Position.create(
|
||||
node.position!.end.line! + 2,
|
||||
0
|
||||
);
|
||||
|
||||
for (const plugin of plugins) {
|
||||
try {
|
||||
plugin.onDidFindProperties?.(yamlProperties, note, node);
|
||||
} catch (e) {
|
||||
handleError(plugin, 'onDidFindProperties', uri, e);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
Logger.warn(`Error while parsing YAML for [${uri.toString()}]`, e);
|
||||
}
|
||||
}
|
||||
|
||||
for (const plugin of plugins) {
|
||||
try {
|
||||
plugin.visit?.(node, note, markdown);
|
||||
} catch (e) {
|
||||
handleError(plugin, 'visit', uri, e);
|
||||
}
|
||||
}
|
||||
});
|
||||
for (const plugin of plugins) {
|
||||
try {
|
||||
plugin.onDidVisitTree?.(tree, note);
|
||||
} catch (e) {
|
||||
handleError(plugin, 'onDidVisitTree', uri, e);
|
||||
}
|
||||
}
|
||||
Logger.debug('Result:', note);
|
||||
return note;
|
||||
},
|
||||
};
|
||||
return foamParser;
|
||||
}
|
||||
|
||||
/**
|
||||
* Traverses all the children of the given node, extracts
|
||||
* the text from them, and returns it concatenated.
|
||||
*
|
||||
* @param root the node from which to start collecting text
|
||||
*/
|
||||
const getTextFromChildren = (root: Node): string => {
|
||||
let text = '';
|
||||
visit(root, node => {
|
||||
if (node.type === 'text' || node.type === 'wikiLink') {
|
||||
text = text + ((node as any).value || '');
|
||||
}
|
||||
});
|
||||
return text;
|
||||
};
|
||||
|
||||
const tagsPlugin: ParserPlugin = {
|
||||
name: 'tags',
|
||||
onDidFindProperties: (props, note, node) => {
|
||||
if (isSome(props.tags)) {
|
||||
const yamlTags = extractTagsFromProp(props.tags);
|
||||
for (const tag of yamlTags) {
|
||||
note.tags.push({
|
||||
label: tag,
|
||||
range: astPositionToFoamRange(node.position!),
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
visit: (node, note) => {
|
||||
if (node.type === 'text') {
|
||||
const tags = extractHashtags((node as any).value);
|
||||
for (const tag of tags) {
|
||||
const start = astPointToFoamPosition(node.position!.start);
|
||||
start.character = start.character + tag.offset;
|
||||
const end: Position = {
|
||||
line: start.line,
|
||||
character: start.character + tag.label.length + 1,
|
||||
};
|
||||
note.tags.push({
|
||||
label: tag.label,
|
||||
range: Range.createFromPosition(start, end),
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
let sectionStack: Array<{ label: string; level: number; start: Position }> = [];
|
||||
const sectionsPlugin: ParserPlugin = {
|
||||
name: 'section',
|
||||
onWillVisitTree: () => {
|
||||
sectionStack = [];
|
||||
},
|
||||
visit: (node, note) => {
|
||||
if (node.type === 'heading') {
|
||||
const level = (node as any).depth;
|
||||
const label = getTextFromChildren(node);
|
||||
if (!label || !level) {
|
||||
return;
|
||||
}
|
||||
const start = astPositionToFoamRange(node.position!).start;
|
||||
|
||||
// Close all the sections that are not parents of the current section
|
||||
while (
|
||||
sectionStack.length > 0 &&
|
||||
sectionStack[sectionStack.length - 1].level >= level
|
||||
) {
|
||||
const section = sectionStack.pop();
|
||||
note.sections.push({
|
||||
label: section.label,
|
||||
range: Range.createFromPosition(section.start, start),
|
||||
});
|
||||
}
|
||||
|
||||
// Add the new section to the stack
|
||||
sectionStack.push({ label, level, start });
|
||||
}
|
||||
},
|
||||
onDidVisitTree: (tree, note) => {
|
||||
const end = Position.create(note.source.end.line + 1, 0);
|
||||
// Close all the remainig sections
|
||||
while (sectionStack.length > 0) {
|
||||
const section = sectionStack.pop();
|
||||
note.sections.push({
|
||||
label: section.label,
|
||||
range: { start: section.start, end },
|
||||
});
|
||||
}
|
||||
note.sections.sort((a, b) =>
|
||||
Position.compareTo(a.range.start, b.range.start)
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
const titlePlugin: ParserPlugin = {
|
||||
name: 'title',
|
||||
visit: (node, note) => {
|
||||
if (
|
||||
note.title === '' &&
|
||||
node.type === 'heading' &&
|
||||
(node as any).depth === 1
|
||||
) {
|
||||
const title = getTextFromChildren(node);
|
||||
note.title = title.length > 0 ? title : note.title;
|
||||
}
|
||||
},
|
||||
onDidFindProperties: (props, note) => {
|
||||
// Give precendence to the title from the frontmatter if it exists
|
||||
note.title = props.title?.toString() ?? note.title;
|
||||
},
|
||||
onDidVisitTree: (tree, note) => {
|
||||
if (note.title === '') {
|
||||
note.title = note.uri.getName();
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const aliasesPlugin: ParserPlugin = {
|
||||
name: 'aliases',
|
||||
onDidFindProperties: (props, note, node) => {
|
||||
if (isSome(props.alias)) {
|
||||
const aliases = Array.isArray(props.alias)
|
||||
? props.alias
|
||||
: props.alias.split(',').map(m => m.trim());
|
||||
for (const alias of aliases) {
|
||||
note.aliases.push({
|
||||
title: alias,
|
||||
range: astPositionToFoamRange(node.position!),
|
||||
});
|
||||
}
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const wikilinkPlugin: ParserPlugin = {
|
||||
name: 'wikilink',
|
||||
visit: (node, note, noteSource) => {
|
||||
if (node.type === 'wikiLink') {
|
||||
const literalContent = noteSource.substring(
|
||||
node.position!.start.offset!,
|
||||
node.position!.end.offset!
|
||||
);
|
||||
|
||||
note.links.push({
|
||||
type: 'wikilink',
|
||||
rawText: literalContent,
|
||||
range: astPositionToFoamRange(node.position!),
|
||||
});
|
||||
}
|
||||
if (node.type === 'link') {
|
||||
const targetUri = (node as any).url;
|
||||
const uri = note.uri.resolve(targetUri);
|
||||
if (uri.scheme !== 'file' || uri.path === note.uri.path) {
|
||||
return;
|
||||
}
|
||||
const literalContent = noteSource.substring(
|
||||
node.position!.start.offset!,
|
||||
node.position!.end.offset!
|
||||
);
|
||||
note.links.push({
|
||||
type: 'link',
|
||||
rawText: literalContent,
|
||||
range: astPositionToFoamRange(node.position!),
|
||||
});
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
const definitionsPlugin: ParserPlugin = {
|
||||
name: 'definitions',
|
||||
visit: (node, note) => {
|
||||
if (node.type === 'definition') {
|
||||
note.definitions.push({
|
||||
label: (node as any).label,
|
||||
url: (node as any).url,
|
||||
title: (node as any).title,
|
||||
range: astPositionToFoamRange(node.position!),
|
||||
});
|
||||
}
|
||||
},
|
||||
onDidVisitTree: (tree, note) => {
|
||||
note.definitions = getFoamDefinitions(note.definitions, note.source.end);
|
||||
},
|
||||
};
|
||||
|
||||
const handleError = (
|
||||
plugin: ParserPlugin,
|
||||
fnName: string,
|
||||
uri: URI | undefined,
|
||||
e: Error
|
||||
): void => {
|
||||
const name = plugin.name || '';
|
||||
Logger.warn(
|
||||
`Error while executing [${fnName}] in plugin [${name}]. ${
|
||||
uri ? 'for file [' + uri.toString() : ']'
|
||||
}.`,
|
||||
e
|
||||
);
|
||||
};
|
||||
|
||||
function getFoamDefinitions(
|
||||
defs: NoteLinkDefinition[],
|
||||
fileEndPoint: Position
|
||||
): NoteLinkDefinition[] {
|
||||
let previousLine = fileEndPoint.line;
|
||||
const foamDefinitions = [];
|
||||
|
||||
// walk through each definition in reverse order
|
||||
// (last one first)
|
||||
for (const def of defs.reverse()) {
|
||||
// if this definition is more than 2 lines above the
|
||||
// previous one below it (or file end), that means we
|
||||
// have exited the trailing definition block, and should bail
|
||||
const start = def.range!.start.line;
|
||||
if (start < previousLine - 2) {
|
||||
break;
|
||||
}
|
||||
|
||||
foamDefinitions.unshift(def);
|
||||
previousLine = def.range!.end.line;
|
||||
}
|
||||
|
||||
return foamDefinitions;
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts the 1-index Point object into the VS Code 0-index Position object
|
||||
* @param point ast Point (1-indexed)
|
||||
* @returns Foam Position (0-indexed)
|
||||
*/
|
||||
const astPointToFoamPosition = (point: Point): Position => {
|
||||
return Position.create(point.line - 1, point.column - 1);
|
||||
};
|
||||
|
||||
/**
|
||||
* Converts the 1-index Position object into the VS Code 0-index Range object
|
||||
* @param position an ast Position object (1-indexed)
|
||||
* @returns Foam Range (0-indexed)
|
||||
*/
|
||||
const astPositionToFoamRange = (pos: AstPosition): Range =>
|
||||
Range.create(
|
||||
pos.start.line - 1,
|
||||
pos.start.column - 1,
|
||||
pos.end.line - 1,
|
||||
pos.end.column - 1
|
||||
);
|
||||
292
packages/foam-vscode/src/core/services/markdown-provider.test.ts
Normal file
292
packages/foam-vscode/src/core/services/markdown-provider.test.ts
Normal file
@@ -0,0 +1,292 @@
|
||||
import { createMarkdownParser } from './markdown-parser';
|
||||
import { createMarkdownReferences } from './markdown-provider';
|
||||
import { Logger } from '../utils/log';
|
||||
import { URI } from '../model/uri';
|
||||
import {
|
||||
createTestNote,
|
||||
createTestWorkspace,
|
||||
getRandomURI,
|
||||
} from '../../test/test-utils';
|
||||
|
||||
Logger.setLevel('error');
|
||||
|
||||
const parser = createMarkdownParser([]);
|
||||
const createNoteFromMarkdown = (content: string, path?: string) =>
|
||||
parser.parse(path ? URI.file(path) : getRandomURI(), content);
|
||||
|
||||
describe('Link resolution', () => {
|
||||
describe('Wikilinks', () => {
|
||||
it('should resolve basename wikilinks with files in same directory', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown('Link to [[page b]]', './page-a.md');
|
||||
const noteB = createNoteFromMarkdown('Content of page b', './page b.md');
|
||||
workspace.set(noteA).set(noteB);
|
||||
expect(workspace.resolveLink(noteA, noteA.links[0])).toEqual(noteB.uri);
|
||||
});
|
||||
|
||||
it('should resolve basename wikilinks with files in other directory', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown('Link to [[page b]]', './page-a.md');
|
||||
const noteB = createNoteFromMarkdown('Page b', './folder/page b.md');
|
||||
workspace.set(noteA).set(noteB);
|
||||
expect(workspace.resolveLink(noteA, noteA.links[0])).toEqual(noteB.uri);
|
||||
});
|
||||
|
||||
it('should resolve wikilinks that represent an absolute path', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'Link to [[/folder/page b]]',
|
||||
'/page-a.md'
|
||||
);
|
||||
const noteB = createNoteFromMarkdown('Page b', '/folder/page b.md');
|
||||
workspace.set(noteA).set(noteB);
|
||||
expect(workspace.resolveLink(noteA, noteA.links[0])).toEqual(noteB.uri);
|
||||
});
|
||||
|
||||
it('should resolve wikilinks that represent a relative path', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'Link to [[../two/page b]]',
|
||||
'/path/one/page-a.md'
|
||||
);
|
||||
const noteB = createNoteFromMarkdown('Page b', '/path/one/page b.md');
|
||||
const noteB2 = createNoteFromMarkdown('Page b 2', '/path/two/page b.md');
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(noteB)
|
||||
.set(noteB2);
|
||||
expect(workspace.resolveLink(noteA, noteA.links[0])).toEqual(noteB2.uri);
|
||||
});
|
||||
|
||||
it('should resolve ambiguous wikilinks', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown('Link to [[page b]]', '/page-a.md');
|
||||
const noteB = createNoteFromMarkdown('Page b', '/path/one/page b.md');
|
||||
const noteB2 = createNoteFromMarkdown('Page b2', '/path/two/page b.md');
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(noteB)
|
||||
.set(noteB2);
|
||||
expect(workspace.resolveLink(noteA, noteA.links[0])).toEqual(noteB.uri);
|
||||
});
|
||||
|
||||
it('should resolve path wikilink even with other ambiguous notes', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ slug: './more/page-b' }, { slug: 'yet/page-b' }],
|
||||
});
|
||||
const noteB1 = createTestNote({ uri: '/path/to/another/page-b.md' });
|
||||
const noteB2 = createTestNote({ uri: '/path/to/more/page-b.md' });
|
||||
const noteB3 = createTestNote({ uri: '/path/to/yet/page-b.md' });
|
||||
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA)
|
||||
.set(noteB1)
|
||||
.set(noteB2)
|
||||
.set(noteB3);
|
||||
|
||||
expect(ws.resolveLink(noteA, noteA.links[0])).toEqual(noteB2.uri);
|
||||
expect(ws.resolveLink(noteA, noteA.links[1])).toEqual(noteB3.uri);
|
||||
});
|
||||
|
||||
it('should resolve Foam wikilinks', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'Link to [[two/page b]] and [[one/page b]]',
|
||||
'/page-a.md'
|
||||
);
|
||||
const noteB = createNoteFromMarkdown('Page b', '/path/one/page b.md');
|
||||
const noteB2 = createNoteFromMarkdown('Page b2', '/path/two/page b.md');
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(noteB)
|
||||
.set(noteB2);
|
||||
expect(workspace.resolveLink(noteA, noteA.links[0])).toEqual(noteB2.uri);
|
||||
expect(workspace.resolveLink(noteA, noteA.links[1])).toEqual(noteB.uri);
|
||||
});
|
||||
|
||||
it('should use wikilink definitions when available to resolve target', () => {
|
||||
const ws = createTestWorkspace();
|
||||
const noteA = createTestNote({
|
||||
uri: '/somewhere/from/page-a.md',
|
||||
links: [{ slug: 'page-b' }],
|
||||
});
|
||||
noteA.definitions.push({
|
||||
label: 'page-b',
|
||||
url: '../to/page-b.md',
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/somewhere/to/page-b.md',
|
||||
});
|
||||
ws.set(noteA).set(noteB);
|
||||
expect(ws.resolveLink(noteA, noteA.links[0])).toEqual(noteB.uri);
|
||||
});
|
||||
|
||||
it('should support case insensitive wikilink resolution', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [
|
||||
// uppercased filename, lowercased slug
|
||||
{ slug: 'page-b' },
|
||||
// lowercased filename, camelcased wikilink
|
||||
{ slug: 'Page-C' },
|
||||
// lowercased filename, lowercased wikilink
|
||||
{ slug: 'page-d' },
|
||||
],
|
||||
});
|
||||
const noteB = createTestNote({ uri: '/somewhere/PAGE-B.md' });
|
||||
const noteC = createTestNote({ uri: '/path/another/page-c.md' });
|
||||
const noteD = createTestNote({ uri: '/path/another/page-d.md' });
|
||||
const ws = createTestWorkspace()
|
||||
.set(noteA)
|
||||
.set(noteB)
|
||||
.set(noteC)
|
||||
.set(noteD);
|
||||
|
||||
expect(ws.resolveLink(noteA, noteA.links[0])).toEqual(noteB.uri);
|
||||
expect(ws.resolveLink(noteA, noteA.links[1])).toEqual(noteC.uri);
|
||||
expect(ws.resolveLink(noteA, noteA.links[2])).toEqual(noteD.uri);
|
||||
});
|
||||
|
||||
it('should resolve wikilink with section identifier', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [
|
||||
// uppercased filename, lowercased slug
|
||||
{ slug: 'page-b#section' },
|
||||
],
|
||||
});
|
||||
const noteB = createTestNote({ uri: '/somewhere/PAGE-B.md' });
|
||||
const ws = createTestWorkspace()
|
||||
.set(noteA)
|
||||
.set(noteB);
|
||||
|
||||
expect(ws.resolveLink(noteA, noteA.links[0])).toEqual(
|
||||
noteB.uri.withFragment('section')
|
||||
);
|
||||
});
|
||||
|
||||
it('should resolve section-only wikilinks', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [
|
||||
// uppercased filename, lowercased slug
|
||||
{ slug: '#section' },
|
||||
],
|
||||
});
|
||||
const ws = createTestWorkspace().set(noteA);
|
||||
|
||||
expect(ws.resolveLink(noteA, noteA.links[0])).toEqual(
|
||||
noteA.uri.withFragment('section')
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Markdown direct links', () => {
|
||||
it('should support absolute path 1', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: '/path/to/another/page-b.md' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
links: [{ to: '../../to/page-a.md' }],
|
||||
});
|
||||
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
expect(ws.resolveLink(noteA, noteA.links[0])).toEqual(noteB.uri);
|
||||
});
|
||||
|
||||
it('should support relative path 1', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: './another/page-b.md' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/another/page-b.md',
|
||||
links: [{ to: '../../to/page-a.md' }],
|
||||
});
|
||||
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
expect(ws.resolveLink(noteA, noteA.links[0])).toEqual(noteB.uri);
|
||||
});
|
||||
|
||||
it('should support relative path 2', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: 'more/page-b.md' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/more/page-b.md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
expect(ws.resolveLink(noteA, noteA.links[0])).toEqual(noteB.uri);
|
||||
});
|
||||
|
||||
it('should default to relative path', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: '/path/to/page-a.md',
|
||||
links: [{ to: 'page .md' }],
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: '/path/to/page .md',
|
||||
});
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(noteA).set(noteB);
|
||||
expect(ws.resolveLink(noteA, noteA.links[0])).toEqual(noteB.uri);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('Generation of markdown references', () => {
|
||||
it('should generate links without file extension when includeExtension = false', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'Link to [[page-b]] and [[page-c]]',
|
||||
'/dir1/page-a.md'
|
||||
);
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(createNoteFromMarkdown('Content of note B', '/dir1/page-b.md'))
|
||||
.set(createNoteFromMarkdown('Content of note C', '/dir1/page-c.md'));
|
||||
|
||||
const references = createMarkdownReferences(workspace, noteA.uri, false);
|
||||
expect(references.map(r => r.url)).toEqual(['page-b', 'page-c']);
|
||||
});
|
||||
|
||||
it('should generate links with file extension when includeExtension = true', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'Link to [[page-b]] and [[page-c]]',
|
||||
'/dir1/page-a.md'
|
||||
);
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(createNoteFromMarkdown('Content of note B', '/dir1/page-b.md'))
|
||||
.set(createNoteFromMarkdown('Content of note C', '/dir1/page-c.md'));
|
||||
|
||||
const references = createMarkdownReferences(workspace, noteA.uri, true);
|
||||
expect(references.map(r => r.url)).toEqual(['page-b.md', 'page-c.md']);
|
||||
});
|
||||
|
||||
it('should use relative paths', () => {
|
||||
const workspace = createTestWorkspace();
|
||||
const noteA = createNoteFromMarkdown(
|
||||
'Link to [[page-b]] and [[page-c]]',
|
||||
'/dir1/page-a.md'
|
||||
);
|
||||
workspace
|
||||
.set(noteA)
|
||||
.set(createNoteFromMarkdown('Content of note B', '/dir2/page-b.md'))
|
||||
.set(createNoteFromMarkdown('Content of note C', '/dir3/page-c.md'));
|
||||
|
||||
const references = createMarkdownReferences(workspace, noteA.uri, true);
|
||||
expect(references.map(r => r.url)).toEqual([
|
||||
'../dir2/page-b.md',
|
||||
'../dir3/page-c.md',
|
||||
]);
|
||||
});
|
||||
});
|
||||
221
packages/foam-vscode/src/core/services/markdown-provider.ts
Normal file
221
packages/foam-vscode/src/core/services/markdown-provider.ts
Normal file
@@ -0,0 +1,221 @@
|
||||
import {
|
||||
NoteLinkDefinition,
|
||||
Resource,
|
||||
ResourceLink,
|
||||
ResourceParser,
|
||||
} from '../model/note';
|
||||
import { isNone, isSome } from '../utils';
|
||||
import { Logger } from '../utils/log';
|
||||
import { URI } from '../model/uri';
|
||||
import { FoamWorkspace } from '../model/workspace';
|
||||
import { IDataStore, IMatcher } from '../services/datastore';
|
||||
import { IDisposable } from '../common/lifecycle';
|
||||
import { ResourceProvider } from '../model/provider';
|
||||
import { createMarkdownParser } from './markdown-parser';
|
||||
import { MarkdownLink } from './markdown-link';
|
||||
|
||||
export class MarkdownResourceProvider implements ResourceProvider {
|
||||
private disposables: IDisposable[] = [];
|
||||
|
||||
constructor(
|
||||
private readonly matcher: IMatcher,
|
||||
private readonly dataStore: IDataStore,
|
||||
private readonly watcherInit?: (triggers: {
|
||||
onDidChange: (uri: URI) => void;
|
||||
onDidCreate: (uri: URI) => void;
|
||||
onDidDelete: (uri: URI) => void;
|
||||
}) => IDisposable[],
|
||||
private readonly parser: ResourceParser = createMarkdownParser([])
|
||||
) {}
|
||||
|
||||
async init(workspace: FoamWorkspace) {
|
||||
const filesByFolder = await Promise.all(
|
||||
this.matcher.include.map(glob =>
|
||||
this.dataStore.list(glob, this.matcher.exclude)
|
||||
)
|
||||
);
|
||||
const files = this.matcher
|
||||
.match(filesByFolder.flat())
|
||||
.filter(this.supports);
|
||||
|
||||
await Promise.all(
|
||||
files.map(async uri => {
|
||||
Logger.info('Found: ' + uri.toString());
|
||||
const content = await this.dataStore.read(uri);
|
||||
if (isSome(content)) {
|
||||
workspace.set(this.parser.parse(uri, content));
|
||||
}
|
||||
})
|
||||
);
|
||||
|
||||
this.disposables =
|
||||
this.watcherInit?.({
|
||||
onDidChange: async uri => {
|
||||
if (this.matcher.isMatch(uri) && this.supports(uri)) {
|
||||
const content = await this.dataStore.read(uri);
|
||||
isSome(content) &&
|
||||
workspace.set(await this.parser.parse(uri, content));
|
||||
}
|
||||
},
|
||||
onDidCreate: async uri => {
|
||||
if (this.matcher.isMatch(uri) && this.supports(uri)) {
|
||||
const content = await this.dataStore.read(uri);
|
||||
isSome(content) &&
|
||||
workspace.set(await this.parser.parse(uri, content));
|
||||
}
|
||||
},
|
||||
onDidDelete: uri => {
|
||||
this.supports(uri) && workspace.delete(uri);
|
||||
},
|
||||
}) ?? [];
|
||||
}
|
||||
|
||||
supports(uri: URI) {
|
||||
return uri.isMarkdown();
|
||||
}
|
||||
|
||||
read(uri: URI): Promise<string | null> {
|
||||
return this.dataStore.read(uri);
|
||||
}
|
||||
|
||||
async readAsMarkdown(uri: URI): Promise<string | null> {
|
||||
let content = await this.dataStore.read(uri);
|
||||
if (isSome(content) && uri.fragment) {
|
||||
const resource = this.parser.parse(uri, content);
|
||||
const section = Resource.findSection(resource, uri.fragment);
|
||||
if (isSome(section)) {
|
||||
const rows = content.split('\n');
|
||||
content = rows
|
||||
.slice(section.range.start.line, section.range.end.line)
|
||||
.join('\n');
|
||||
}
|
||||
}
|
||||
return content;
|
||||
}
|
||||
|
||||
async fetch(uri: URI) {
|
||||
const content = await this.read(uri);
|
||||
return isSome(content) ? this.parser.parse(uri, content) : null;
|
||||
}
|
||||
|
||||
resolveLink(
|
||||
workspace: FoamWorkspace,
|
||||
resource: Resource,
|
||||
link: ResourceLink
|
||||
) {
|
||||
let targetUri: URI | undefined;
|
||||
const { target, section } = MarkdownLink.analyzeLink(link);
|
||||
switch (link.type) {
|
||||
case 'wikilink': {
|
||||
let definitionUri = undefined;
|
||||
for (const def of resource.definitions) {
|
||||
if (def.label === target) {
|
||||
definitionUri = def.url;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (isSome(definitionUri)) {
|
||||
const definedUri = resource.uri.resolve(definitionUri);
|
||||
targetUri =
|
||||
workspace.find(definedUri, resource.uri)?.uri ??
|
||||
URI.placeholder(definedUri.path);
|
||||
} else {
|
||||
targetUri =
|
||||
target === ''
|
||||
? resource.uri
|
||||
: workspace.find(target, resource.uri)?.uri ??
|
||||
URI.placeholder(target);
|
||||
|
||||
if (section) {
|
||||
targetUri = targetUri.withFragment(section);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
case 'link': {
|
||||
// force ambiguous links to be treated as relative
|
||||
const path =
|
||||
target.startsWith('/') ||
|
||||
target.startsWith('./') ||
|
||||
target.startsWith('../')
|
||||
? target
|
||||
: './' + target;
|
||||
targetUri =
|
||||
workspace.find(path, resource.uri)?.uri ??
|
||||
URI.placeholder(resource.uri.resolve(path).path);
|
||||
if (section && !targetUri.isPlaceholder()) {
|
||||
targetUri = targetUri.withFragment(section);
|
||||
}
|
||||
break;
|
||||
}
|
||||
}
|
||||
return targetUri;
|
||||
}
|
||||
|
||||
dispose() {
|
||||
this.disposables.forEach(d => d.dispose());
|
||||
}
|
||||
}
|
||||
|
||||
export function createMarkdownReferences(
|
||||
workspace: FoamWorkspace,
|
||||
noteUri: URI,
|
||||
includeExtension: boolean
|
||||
): NoteLinkDefinition[] {
|
||||
const source = workspace.find(noteUri);
|
||||
// Should never occur since we're already in a file,
|
||||
if (source?.type !== 'note') {
|
||||
console.warn(
|
||||
`Note ${noteUri.toString()} note found in workspace when attempting \
|
||||
to generate markdown reference list`
|
||||
);
|
||||
return [];
|
||||
}
|
||||
|
||||
return source.links
|
||||
.filter(link => link.type === 'wikilink')
|
||||
.map(link => {
|
||||
const targetUri = workspace.resolveLink(source, link);
|
||||
const target = workspace.find(targetUri);
|
||||
if (isNone(target)) {
|
||||
Logger.warn(
|
||||
`Link ${targetUri.toString()} in ${noteUri.toString()} is not valid.`
|
||||
);
|
||||
return null;
|
||||
}
|
||||
if (target.type === 'placeholder') {
|
||||
// no need to create definitions for placeholders
|
||||
return null;
|
||||
}
|
||||
|
||||
let relativeUri = target.uri.relativeTo(noteUri.getDirectory());
|
||||
if (!includeExtension) {
|
||||
relativeUri = relativeUri.changeExtension('*', '');
|
||||
}
|
||||
|
||||
// [wikilink-text]: path/to/file.md "Page title"
|
||||
return {
|
||||
label:
|
||||
link.rawText.indexOf('[[') > -1
|
||||
? link.rawText.substring(2, link.rawText.length - 2)
|
||||
: link.rawText,
|
||||
url: relativeUri.path,
|
||||
title: target.title,
|
||||
};
|
||||
})
|
||||
.filter(isSome)
|
||||
.sort();
|
||||
}
|
||||
|
||||
export function stringifyMarkdownLinkReferenceDefinition(
|
||||
definition: NoteLinkDefinition
|
||||
) {
|
||||
const url =
|
||||
definition.url.indexOf(' ') > 0 ? `<${definition.url}>` : definition.url;
|
||||
let text = `[${definition.label}]: ${url}`;
|
||||
if (definition.title) {
|
||||
text = `${text} "${definition.title}"`;
|
||||
}
|
||||
|
||||
return text;
|
||||
}
|
||||
@@ -1,45 +0,0 @@
|
||||
import { getShortestIdentifier } from './core';
|
||||
import { extractHashtags } from './index';
|
||||
import { Logger } from './log';
|
||||
|
||||
Logger.setLevel('error');
|
||||
|
||||
describe('getShortestIdentifier', () => {
|
||||
const needle = '/project/car/todo';
|
||||
|
||||
test.each([
|
||||
[['/project/home/todo', '/other/todo', '/something/else'], 'car/todo'],
|
||||
[['/family/car/todo', '/other/todo'], 'project/car/todo'],
|
||||
[[], 'todo'],
|
||||
])('Find shortest identifier', (haystack, id) => {
|
||||
expect(getShortestIdentifier(needle, haystack)).toEqual(id);
|
||||
});
|
||||
|
||||
it('should ignore same string in haystack', () => {
|
||||
const haystack = [
|
||||
needle,
|
||||
'/project/home/todo',
|
||||
'/other/todo',
|
||||
'/something/else',
|
||||
];
|
||||
|
||||
expect(getShortestIdentifier(needle, haystack)).toEqual('car/todo');
|
||||
});
|
||||
|
||||
it('should return best guess when no solution is possible', () => {
|
||||
/**
|
||||
* In this case there is no way to uniquely identify the element,
|
||||
* our fallback is to just return the "least wrong" result, basically
|
||||
* a full identifier
|
||||
* This is an edge case that should never happen in a real repo
|
||||
*/
|
||||
const haystack = [
|
||||
'/parent/' + needle,
|
||||
'/project/home/todo',
|
||||
'/other/todo',
|
||||
'/something/else',
|
||||
];
|
||||
|
||||
expect(getShortestIdentifier(needle, haystack)).toEqual('project/car/todo');
|
||||
});
|
||||
});
|
||||
@@ -1,19 +1,19 @@
|
||||
import crypto from 'crypto';
|
||||
|
||||
export function isNotNull<T>(value: T | null): value is T {
|
||||
return value != null; // eslint-disable-line
|
||||
return value != null;
|
||||
}
|
||||
|
||||
export function isSome<T>(
|
||||
value: T | null | undefined | void
|
||||
): value is NonNullable<T> {
|
||||
return value != null; // eslint-disable-line
|
||||
return value != null;
|
||||
}
|
||||
|
||||
export function isNone<T>(
|
||||
value: T | null | undefined | void
|
||||
): value is null | undefined | void {
|
||||
return value == null; // eslint-disable-line
|
||||
return value == null;
|
||||
}
|
||||
|
||||
export function isNumeric(value: string): boolean {
|
||||
@@ -25,43 +25,3 @@ export const hash = (text: string) =>
|
||||
.createHash('sha1')
|
||||
.update(text)
|
||||
.digest('hex');
|
||||
|
||||
/**
|
||||
* Returns the minimal identifier for the given string amongst others
|
||||
*
|
||||
* @param forValue the value to compute the identifier for
|
||||
* @param amongst the set of strings within which to find the identifier
|
||||
*/
|
||||
export const getShortestIdentifier = (
|
||||
forValue: string,
|
||||
amongst: string[]
|
||||
): string => {
|
||||
const needleTokens = forValue.split('/').reverse();
|
||||
const haystack = amongst
|
||||
.filter(value => value !== forValue)
|
||||
.map(value => value.split('/').reverse());
|
||||
|
||||
let tokenIndex = 0;
|
||||
let res = needleTokens;
|
||||
while (tokenIndex < needleTokens.length) {
|
||||
for (let j = haystack.length - 1; j >= 0; j--) {
|
||||
if (
|
||||
haystack[j].length < tokenIndex ||
|
||||
needleTokens[tokenIndex] !== haystack[j][tokenIndex]
|
||||
) {
|
||||
haystack.splice(j, 1);
|
||||
}
|
||||
}
|
||||
if (haystack.length === 0) {
|
||||
res = needleTokens.splice(0, tokenIndex + 1);
|
||||
break;
|
||||
}
|
||||
tokenIndex++;
|
||||
}
|
||||
const identifier = res
|
||||
.filter(token => token.trim() !== '')
|
||||
.reverse()
|
||||
.join('/');
|
||||
|
||||
return identifier;
|
||||
};
|
||||
|
||||
@@ -2,12 +2,6 @@ import { titleCase } from 'title-case';
|
||||
export { extractHashtags, extractTagsFromProp } from './hashtags';
|
||||
export * from './core';
|
||||
|
||||
export function dropExtension(path: string): string {
|
||||
const parts = path.split('.');
|
||||
parts.pop();
|
||||
return parts.join('.');
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* @param filename
|
||||
|
||||
@@ -58,7 +58,9 @@ export class ConsoleLogger extends BaseLogger {
|
||||
}
|
||||
|
||||
export class NoOpLogger extends BaseLogger {
|
||||
log(_l: LogLevel, _m?: string, ..._p: any[]): void {}
|
||||
log(_l: LogLevel, _m?: string, ..._p: any[]): void {
|
||||
// do nothing
|
||||
}
|
||||
}
|
||||
|
||||
export class Logger {
|
||||
|
||||
176
packages/foam-vscode/src/core/utils/path.ts
Normal file
176
packages/foam-vscode/src/core/utils/path.ts
Normal file
@@ -0,0 +1,176 @@
|
||||
import { CharCode } from '../common/charCode';
|
||||
import { posix } from 'path';
|
||||
|
||||
/**
|
||||
* Converts filesystem path to POSIX path. Supported inputs are:
|
||||
* - Windows path starting with a drive letter, e.g. C:\dir\file.ext
|
||||
* - UNC path for a shared file, e.g. \\server\share\path\file.ext
|
||||
* - POSIX path, e.g. /dir/file.ext
|
||||
*
|
||||
* @param path A supported filesystem path.
|
||||
* @returns [path, authority] where path is a POSIX representation for the
|
||||
* given input and authority is undefined except for UNC paths.
|
||||
*/
|
||||
export function fromFsPath(path: string): [string, string] {
|
||||
let authority: string;
|
||||
if (isUNCShare(path)) {
|
||||
[path, authority] = parseUNCShare(path);
|
||||
path = path.replace(/\\/g, '/');
|
||||
} else if (hasDrive(path)) {
|
||||
path = '/' + path[0].toUpperCase() + path.substr(1).replace(/\\/g, '/');
|
||||
} else if (path[0] === '/' && hasDrive(path, 1)) {
|
||||
// POSIX representation of a Windows path: just normalize drive letter case
|
||||
path = '/' + path[1].toUpperCase() + path.substr(2);
|
||||
}
|
||||
return [path, authority];
|
||||
}
|
||||
|
||||
/**
|
||||
* Converts a POSIX path to a filesystem path.
|
||||
*
|
||||
* @param path A POSIX path.
|
||||
* @param authority An optional authority used to build UNC paths. This only
|
||||
* makes sense for the Windows platform.
|
||||
* @returns A platform-specific representation of the given POSIX path.
|
||||
*/
|
||||
export function toFsPath(path: string, authority?: string): string {
|
||||
if (path[0] === '/' && hasDrive(path, 1)) {
|
||||
path = path.substr(1).replace(/\//g, '\\');
|
||||
if (authority) {
|
||||
path = `\\\\${authority}${path}`;
|
||||
}
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the containing directory of a POSIX path, e.g.
|
||||
* - /d1/d2/f.ext -> /d1/d2
|
||||
* - /d1/d2 -> /d1
|
||||
*
|
||||
* @param path A POSIX path.
|
||||
* @returns true if the path is absolute, false otherwise.
|
||||
*/
|
||||
export function isAbsolute(path: string): boolean {
|
||||
return posix.isAbsolute(path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the containing directory of a POSIX path, e.g.
|
||||
* - /d1/d2/f.ext -> /d1/d2
|
||||
* - /d1/d2 -> /d1
|
||||
*
|
||||
* @param path A POSIX path.
|
||||
* @returns The containing directory of the given path.
|
||||
*/
|
||||
export function getDirectory(path: string): string {
|
||||
return posix.dirname(path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the basename of a POSIX path, e.g. /d/f.ext -> f.ext.
|
||||
*
|
||||
* @param path A POSIX path.
|
||||
* @returns The basename of the given path.
|
||||
*/
|
||||
export function getBasename(path: string): string {
|
||||
return posix.basename(path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the name of a POSIX path, e.g. /d/f.ext -> f.
|
||||
*
|
||||
* @param path A POSIX path.
|
||||
* @returns The name of the given path.
|
||||
*/
|
||||
export function getName(path: string): string {
|
||||
return changeExtension(getBasename(path), '*', '');
|
||||
}
|
||||
|
||||
/**
|
||||
* Extracts the extension of a POSIX path, e.g.
|
||||
* - /d/f.ext -> .ext
|
||||
* - /d/f.g.ext -> .ext
|
||||
* - /d/f -> ''
|
||||
*
|
||||
* @param path A POSIX path.
|
||||
* @returns The extension of the given path.
|
||||
*/
|
||||
export function getExtension(path: string): string {
|
||||
return posix.extname(path);
|
||||
}
|
||||
|
||||
/**
|
||||
* Changes a POSIX path matching some extension to have another extension.
|
||||
*
|
||||
* @param path A POSIX path.
|
||||
* @param from The required current extension, or '*' to match any extension.
|
||||
* @param to The target extension.
|
||||
* @returns A POSIX path with its extension possibly changed.
|
||||
*/
|
||||
export function changeExtension(
|
||||
path: string,
|
||||
from: string,
|
||||
to: string
|
||||
): string {
|
||||
const old = getExtension(path);
|
||||
if ((from === '*' && old !== to) || old === from) {
|
||||
path = path.substring(0, path.length - old.length);
|
||||
return to ? path + to : path;
|
||||
}
|
||||
return path;
|
||||
}
|
||||
|
||||
/**
|
||||
* Joins a number of POSIX paths into a single POSIX path, e.g.
|
||||
* - /d1, d2, f.ext -> /d1/d2/f.ext
|
||||
* - /d1/d2, .., f.ext -> /d1/f.ext
|
||||
*
|
||||
* @param paths A variable number of POSIX paths.
|
||||
* @returns A POSIX path built from the given POSIX paths.
|
||||
*/
|
||||
export function joinPath(...paths: string[]): string {
|
||||
return posix.join(...paths);
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a POSIX path relative to another POSIX path, e.g.
|
||||
* - /d1/d2 relative to /d1 -> d2
|
||||
* - /d1/d2 relative to /d1/d3 -> ../d2
|
||||
*
|
||||
* @param path The POSIX path to be made relative.
|
||||
* @param basePath The POSIX base path.
|
||||
* @returns A POSIX path relative to the base path.
|
||||
*/
|
||||
export function relativeTo(path: string, basePath: string): string {
|
||||
return posix.relative(basePath, path);
|
||||
}
|
||||
|
||||
function hasDrive(path: string, idx = 0): boolean {
|
||||
if (path.length <= idx) {
|
||||
return false;
|
||||
}
|
||||
const c = path.charCodeAt(idx);
|
||||
return (
|
||||
((c >= CharCode.A && c <= CharCode.Z) ||
|
||||
(c >= CharCode.a && c <= CharCode.z)) &&
|
||||
path.charCodeAt(idx + 1) === CharCode.Colon
|
||||
);
|
||||
}
|
||||
|
||||
function isUNCShare(fsPath: string): boolean {
|
||||
return (
|
||||
fsPath.length >= 2 &&
|
||||
fsPath.charCodeAt(0) === CharCode.Backslash &&
|
||||
fsPath.charCodeAt(1) === CharCode.Backslash
|
||||
);
|
||||
}
|
||||
|
||||
function parseUNCShare(uncPath: string): [string, string] {
|
||||
const idx = uncPath.indexOf('\\', 2);
|
||||
if (idx === -1) {
|
||||
return [uncPath.substring(2), '\\'];
|
||||
} else {
|
||||
return [uncPath.substring(2, idx), uncPath.substring(idx) || '\\'];
|
||||
}
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
import GithubSlugger from 'github-slugger';
|
||||
import { URI } from '../model/uri';
|
||||
|
||||
export const uriToSlug = (uri: URI): string =>
|
||||
GithubSlugger.slug(URI.getBasename(uri));
|
||||
@@ -1,7 +1,6 @@
|
||||
import { workspace } from 'vscode';
|
||||
import { createDailyNoteIfNotExists, getDailyNotePath } from './dated-notes';
|
||||
import { URI } from './core/model/uri';
|
||||
import { isWindows } from './utils';
|
||||
import { isWindows } from './core/common/platform';
|
||||
import {
|
||||
cleanWorkspace,
|
||||
closeEditors,
|
||||
@@ -20,11 +19,9 @@ describe('getDailyNotePath', () => {
|
||||
test('Adds the root directory to relative directories', async () => {
|
||||
const config = 'journal';
|
||||
|
||||
const expectedPath = URI.joinPath(
|
||||
fromVsCodeUri(workspace.workspaceFolders[0].uri),
|
||||
config,
|
||||
`${isoDate}.md`
|
||||
);
|
||||
const expectedPath = fromVsCodeUri(
|
||||
workspace.workspaceFolders[0].uri
|
||||
).joinPath(config, `${isoDate}.md`);
|
||||
|
||||
const oldValue = await workspace
|
||||
.getConfiguration('foam')
|
||||
@@ -34,8 +31,8 @@ describe('getDailyNotePath', () => {
|
||||
.update('openDailyNote.directory', config);
|
||||
const foamConfiguration = workspace.getConfiguration('foam');
|
||||
|
||||
expect(URI.toFsPath(getDailyNotePath(foamConfiguration, date))).toEqual(
|
||||
URI.toFsPath(expectedPath)
|
||||
expect(getDailyNotePath(foamConfiguration, date).toFsPath()).toEqual(
|
||||
expectedPath.toFsPath()
|
||||
);
|
||||
|
||||
await workspace
|
||||
@@ -60,7 +57,7 @@ describe('getDailyNotePath', () => {
|
||||
.update('openDailyNote.directory', config);
|
||||
const foamConfiguration = workspace.getConfiguration('foam');
|
||||
|
||||
expect(URI.toFsPath(getDailyNotePath(foamConfiguration, date))).toMatch(
|
||||
expect(getDailyNotePath(foamConfiguration, date).toFsPath()).toMatch(
|
||||
expectedPath
|
||||
);
|
||||
|
||||
@@ -84,7 +81,7 @@ describe('Daily note template', () => {
|
||||
const config = workspace.getConfiguration('foam');
|
||||
const uri = getDailyNotePath(config, targetDate);
|
||||
|
||||
await createDailyNoteIfNotExists(config, uri, targetDate);
|
||||
await createDailyNoteIfNotExists(targetDate);
|
||||
|
||||
const doc = await showInEditor(uri);
|
||||
const content = doc.editor.document.getText();
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
import { workspace, WorkspaceConfiguration } from 'vscode';
|
||||
import dateFormat from 'dateformat';
|
||||
import { isAbsolute } from 'path';
|
||||
import { focusNote, pathExists } from './utils';
|
||||
import { focusNote } from './utils';
|
||||
import { URI } from './core/model/uri';
|
||||
import { fromVsCodeUri } from './utils/vsc-utils';
|
||||
import { fromVsCodeUri, toVsCodeUri } from './utils/vsc-utils';
|
||||
import { NoteFactory } from './services/templates';
|
||||
|
||||
/**
|
||||
@@ -12,20 +11,20 @@ import { NoteFactory } from './services/templates';
|
||||
* In the case that the daily note file does not exist,
|
||||
* it gets created along with any folders in its path.
|
||||
*
|
||||
* @param date A given date to be formatted as filename.
|
||||
* @param date The target date. If not provided, the function returns immediately.
|
||||
*/
|
||||
export async function openDailyNoteFor(date?: Date) {
|
||||
const foamConfiguration = workspace.getConfiguration('foam');
|
||||
const currentDate = date !== undefined ? date : new Date();
|
||||
if (date == null) {
|
||||
return;
|
||||
}
|
||||
|
||||
const dailyNotePath = getDailyNotePath(foamConfiguration, currentDate);
|
||||
|
||||
const isNew = await createDailyNoteIfNotExists(
|
||||
foamConfiguration,
|
||||
dailyNotePath,
|
||||
currentDate
|
||||
);
|
||||
await focusNote(dailyNotePath, isNew);
|
||||
const { didCreateFile, uri } = await createDailyNoteIfNotExists(date);
|
||||
// if a new file is created, the editor is automatically created
|
||||
// but forcing the focus will block the template placeholders from working
|
||||
// so we only explicitly focus on the note if the file already exists
|
||||
if (!didCreateFile) {
|
||||
await focusNote(uri, didCreateFile);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -45,16 +44,16 @@ export function getDailyNotePath(
|
||||
configuration: WorkspaceConfiguration,
|
||||
date: Date
|
||||
): URI {
|
||||
const dailyNoteDirectory: string =
|
||||
configuration.get('openDailyNote.directory') ?? '.';
|
||||
const dailyNoteDirectory = URI.file(
|
||||
configuration.get('openDailyNote.directory') ?? '.'
|
||||
);
|
||||
const dailyNoteFilename = getDailyNoteFileName(configuration, date);
|
||||
|
||||
if (isAbsolute(dailyNoteDirectory)) {
|
||||
return URI.joinPath(URI.file(dailyNoteDirectory), dailyNoteFilename);
|
||||
if (dailyNoteDirectory.isAbsolute()) {
|
||||
return dailyNoteDirectory.joinPath(dailyNoteFilename);
|
||||
} else {
|
||||
return URI.joinPath(
|
||||
fromVsCodeUri(workspace.workspaceFolders[0].uri),
|
||||
dailyNoteDirectory,
|
||||
return fromVsCodeUri(workspace.workspaceFolders[0].uri).joinPath(
|
||||
dailyNoteDirectory.path,
|
||||
dailyNoteFilename
|
||||
);
|
||||
}
|
||||
@@ -91,37 +90,31 @@ export function getDailyNoteFileName(
|
||||
* In the case that the folders referenced in the file path also do not exist,
|
||||
* this function will create all folders in the path.
|
||||
*
|
||||
* @param configuration The current workspace configuration.
|
||||
* @param dailyNotePath The path to daily note file.
|
||||
* @param currentDate The current date, to be used as a title.
|
||||
* @returns Wether the file was created.
|
||||
*/
|
||||
export async function createDailyNoteIfNotExists(
|
||||
configuration: WorkspaceConfiguration,
|
||||
dailyNotePath: URI,
|
||||
targetDate: Date
|
||||
) {
|
||||
if (await pathExists(dailyNotePath)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
export async function createDailyNoteIfNotExists(targetDate: Date) {
|
||||
const configuration = workspace.getConfiguration('foam');
|
||||
const pathFromLegacyConfiguration = getDailyNotePath(
|
||||
configuration,
|
||||
targetDate
|
||||
);
|
||||
const titleFormat: string =
|
||||
configuration.get('openDailyNote.titleFormat') ??
|
||||
configuration.get('openDailyNote.filenameFormat');
|
||||
|
||||
const templateFallbackText: string = `---
|
||||
const templateFallbackText = `---
|
||||
foam_template:
|
||||
name: New Daily Note
|
||||
description: Foam's default daily note template
|
||||
filepath: "${workspace.asRelativePath(
|
||||
toVsCodeUri(pathFromLegacyConfiguration)
|
||||
)}"
|
||||
---
|
||||
# ${dateFormat(targetDate, titleFormat, false)}
|
||||
`;
|
||||
|
||||
await NoteFactory.createFromDailyNoteTemplate(
|
||||
dailyNotePath,
|
||||
return await NoteFactory.createFromDailyNoteTemplate(
|
||||
pathFromLegacyConfiguration,
|
||||
templateFallbackText,
|
||||
targetDate
|
||||
);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
@@ -1,13 +1,15 @@
|
||||
import { workspace, ExtensionContext, window } from 'vscode';
|
||||
import { MarkdownResourceProvider } from './core/markdown-provider';
|
||||
import { MarkdownResourceProvider } from './core/services/markdown-provider';
|
||||
import { bootstrap } from './core/model/foam';
|
||||
import { URI } from './core/model/uri';
|
||||
import { FileDataStore, Matcher } from './core/services/datastore';
|
||||
import { Logger } from './core/utils/log';
|
||||
|
||||
import { features } from './features';
|
||||
import { VsCodeOutputLogger, exposeLogger } from './services/logging';
|
||||
import { getIgnoredFilesSetting } from './settings';
|
||||
import { fromVsCodeUri } from './utils/vsc-utils';
|
||||
import { fromVsCodeUri, toVsCodeUri } from './utils/vsc-utils';
|
||||
import { AttachmentResourceProvider } from './core/services/attachment-provider';
|
||||
|
||||
export async function activate(context: ExtensionContext) {
|
||||
const logger = new VsCodeOutputLogger();
|
||||
@@ -18,30 +20,56 @@ export async function activate(context: ExtensionContext) {
|
||||
Logger.info('Starting Foam');
|
||||
|
||||
// Prepare Foam
|
||||
const dataStore = new FileDataStore();
|
||||
const readFile = async (uri: URI) =>
|
||||
(await workspace.fs.readFile(toVsCodeUri(uri))).toString();
|
||||
const dataStore = new FileDataStore(readFile);
|
||||
const matcher = new Matcher(
|
||||
workspace.workspaceFolders.map(dir => fromVsCodeUri(dir.uri)),
|
||||
['**/*'],
|
||||
getIgnoredFilesSetting().map(g => g.toString())
|
||||
);
|
||||
const markdownProvider = new MarkdownResourceProvider(matcher, triggers => {
|
||||
const watcher = workspace.createFileSystemWatcher('**/*');
|
||||
return [
|
||||
watcher.onDidChange(uri => triggers.onDidChange(fromVsCodeUri(uri))),
|
||||
watcher.onDidCreate(uri => triggers.onDidCreate(fromVsCodeUri(uri))),
|
||||
watcher.onDidDelete(uri => triggers.onDidDelete(fromVsCodeUri(uri))),
|
||||
watcher,
|
||||
];
|
||||
});
|
||||
const watcher = workspace.createFileSystemWatcher('**/*');
|
||||
const markdownProvider = new MarkdownResourceProvider(
|
||||
matcher,
|
||||
dataStore,
|
||||
triggers => {
|
||||
return [
|
||||
watcher.onDidChange(uri => triggers.onDidChange(fromVsCodeUri(uri))),
|
||||
watcher.onDidCreate(uri => triggers.onDidCreate(fromVsCodeUri(uri))),
|
||||
watcher.onDidDelete(uri => triggers.onDidDelete(fromVsCodeUri(uri))),
|
||||
watcher,
|
||||
];
|
||||
}
|
||||
);
|
||||
const attachmentProvider = new AttachmentResourceProvider(
|
||||
matcher,
|
||||
dataStore,
|
||||
triggers => {
|
||||
return [
|
||||
watcher.onDidChange(uri => triggers.onDidChange(fromVsCodeUri(uri))),
|
||||
watcher.onDidCreate(uri => triggers.onDidCreate(fromVsCodeUri(uri))),
|
||||
watcher.onDidDelete(uri => triggers.onDidDelete(fromVsCodeUri(uri))),
|
||||
watcher,
|
||||
];
|
||||
}
|
||||
);
|
||||
|
||||
const foamPromise = bootstrap(matcher, dataStore, [markdownProvider]);
|
||||
const foamPromise = bootstrap(matcher, dataStore, [
|
||||
markdownProvider,
|
||||
attachmentProvider,
|
||||
]);
|
||||
|
||||
// Load the features
|
||||
const resPromises = features.map(f => f.activate(context, foamPromise));
|
||||
|
||||
const foam = await foamPromise;
|
||||
Logger.info(`Loaded ${foam.workspace.list().length} notes`);
|
||||
context.subscriptions.push(foam, markdownProvider);
|
||||
context.subscriptions.push(
|
||||
foam,
|
||||
watcher,
|
||||
markdownProvider,
|
||||
attachmentProvider
|
||||
);
|
||||
|
||||
const res = (await Promise.all(resPromises)).filter(r => r != null);
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ describe('Backlinks panel', () => {
|
||||
const noteB = createTestNote({
|
||||
root: rootUri,
|
||||
uri: './note-b.md',
|
||||
links: [{ slug: 'note-a' }, { slug: 'note-a' }],
|
||||
links: [{ slug: 'note-a' }, { slug: 'note-a#section' }],
|
||||
});
|
||||
const noteC = createTestNote({
|
||||
root: rootUri,
|
||||
|
||||
@@ -10,7 +10,7 @@ import { FoamWorkspace } from '../core/model/workspace';
|
||||
import { FoamGraph } from '../core/model/graph';
|
||||
import { Resource, ResourceLink } from '../core/model/note';
|
||||
import { Range } from '../core/model/range';
|
||||
import { fromVsCodeUri } from '../utils/vsc-utils';
|
||||
import { fromVsCodeUri, toVsCodeUri } from '../utils/vsc-utils';
|
||||
|
||||
const feature: FoamFeature = {
|
||||
activate: async (
|
||||
@@ -30,9 +30,7 @@ const feature: FoamFeature = {
|
||||
|
||||
context.subscriptions.push(
|
||||
vscode.window.registerTreeDataProvider('foam-vscode.backlinks', provider),
|
||||
foam.workspace.onDidAdd(() => provider.refresh()),
|
||||
foam.workspace.onDidUpdate(() => provider.refresh()),
|
||||
foam.workspace.onDidDelete(() => provider.refresh())
|
||||
foam.graph.onDidUpdate(() => provider.refresh())
|
||||
);
|
||||
},
|
||||
};
|
||||
@@ -63,7 +61,10 @@ export class BacklinksTreeDataProvider
|
||||
const backlinkRefs = Promise.all(
|
||||
resource.links
|
||||
.filter(link =>
|
||||
URI.isEqual(this.workspace.resolveLink(resource, link), uri)
|
||||
this.workspace
|
||||
.resolveLink(resource, link)
|
||||
.asPlain()
|
||||
.isEqual(uri)
|
||||
)
|
||||
.map(async link => {
|
||||
const item = new BacklinkTreeItem(resource, link);
|
||||
@@ -72,7 +73,7 @@ export class BacklinksTreeDataProvider
|
||||
).split('\n');
|
||||
if (link.range.start.line < lines.length) {
|
||||
const line = lines[link.range.start.line];
|
||||
let start = Math.max(0, link.range.start.character - 15);
|
||||
const start = Math.max(0, link.range.start.character - 15);
|
||||
const ellipsis = start === 0 ? '' : '...';
|
||||
|
||||
item.label = `${link.range.start.line}: ${ellipsis}${line.substr(
|
||||
@@ -93,7 +94,9 @@ export class BacklinksTreeDataProvider
|
||||
}
|
||||
|
||||
const backlinksByResourcePath = groupBy(
|
||||
this.graph.getConnections(uri).filter(c => URI.isEqual(c.target, uri)),
|
||||
this.graph
|
||||
.getConnections(uri)
|
||||
.filter(c => c.target.asPlain().isEqual(uri)),
|
||||
b => b.source.path
|
||||
);
|
||||
|
||||
@@ -126,11 +129,11 @@ export class BacklinkTreeItem extends vscode.TreeItem {
|
||||
public readonly resource: Resource,
|
||||
public readonly link: ResourceLink
|
||||
) {
|
||||
super(link.label, vscode.TreeItemCollapsibleState.None);
|
||||
super(link.rawText, vscode.TreeItemCollapsibleState.None);
|
||||
this.label = `${link.range.start.line}: ${this.label}`;
|
||||
this.command = {
|
||||
command: 'vscode.open',
|
||||
arguments: [resource.uri, { selection: link.range }],
|
||||
arguments: [toVsCodeUri(resource.uri), { selection: link.range }],
|
||||
title: 'Go to link',
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1,21 +1,13 @@
|
||||
// import { env, window, Uri, Position, Selection, commands } from 'vscode';
|
||||
// import * as vscode from 'vscode';
|
||||
import { env, Position, Selection, commands } from 'vscode';
|
||||
import { createFile, showInEditor } from '../test/test-utils-vscode';
|
||||
|
||||
describe('copyWithoutBrackets', () => {
|
||||
it('should pass CI', () => {
|
||||
expect(true).toBe(true);
|
||||
it('should get the input from the active editor selection', async () => {
|
||||
const { uri } = await createFile('This is my [[test-content]].');
|
||||
const { editor } = await showInEditor(uri);
|
||||
editor.selection = new Selection(new Position(0, 0), new Position(1, 0));
|
||||
await commands.executeCommand('foam-vscode.copy-without-brackets');
|
||||
const value = await env.clipboard.readText();
|
||||
expect(value).toEqual('This is my Test Content.');
|
||||
});
|
||||
// it('should get the input from the active editor selection', async () => {
|
||||
// const doc = await vscode.workspace.openTextDocument(
|
||||
// Uri.parse('untitled:/hello.md')
|
||||
// );
|
||||
// const editor = await window.showTextDocument(doc);
|
||||
// editor.edit(builder => {
|
||||
// builder.insert(new Position(0, 0), 'This is my [[test-content]].');
|
||||
// });
|
||||
// editor.selection = new Selection(new Position(0, 0), new Position(1, 0));
|
||||
// await commands.executeCommand('foam-vscode.copy-without-brackets');
|
||||
// const value = await env.clipboard.readText();
|
||||
// expect(value).toEqual('This is my Test Content.');
|
||||
// });
|
||||
});
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
import { Uri, commands, window, workspace } from 'vscode';
|
||||
import { URI } from '../core/model/uri';
|
||||
import path from 'path';
|
||||
import { toVsCodeUri } from '../utils/vsc-utils';
|
||||
import { commands, window, workspace } from 'vscode';
|
||||
import { createFile } from '../test/test-utils-vscode';
|
||||
import * as editor from '../services/editor';
|
||||
|
||||
@@ -122,12 +121,12 @@ Template A
|
||||
});
|
||||
|
||||
it('should create a new template', async () => {
|
||||
const template = path.join(
|
||||
workspace.workspaceFolders[0].uri.fsPath,
|
||||
const template = Uri.joinPath(
|
||||
workspace.workspaceFolders[0].uri,
|
||||
'.foam',
|
||||
'templates',
|
||||
'hello-world.md'
|
||||
);
|
||||
).fsPath;
|
||||
|
||||
window.showInputBox = jest.fn(() => {
|
||||
return Promise.resolve(template);
|
||||
@@ -142,12 +141,12 @@ Template A
|
||||
|
||||
it('can be cancelled', async () => {
|
||||
// This is the default template which would be created.
|
||||
const template = path.join(
|
||||
workspace.workspaceFolders[0].uri.fsPath,
|
||||
const template = Uri.joinPath(
|
||||
workspace.workspaceFolders[0].uri,
|
||||
'.foam',
|
||||
'templates',
|
||||
'new-template.md'
|
||||
);
|
||||
).fsPath;
|
||||
window.showInputBox = jest.fn(() => {
|
||||
return Promise.resolve(undefined);
|
||||
});
|
||||
|
||||
@@ -1,5 +1,3 @@
|
||||
import { URI } from '../core/model/uri';
|
||||
import * as path from 'path';
|
||||
import { commands, ExtensionContext, QuickPickItem, window } from 'vscode';
|
||||
import { FoamFeature } from '../types';
|
||||
import {
|
||||
@@ -60,7 +58,7 @@ async function askUserForTemplate() {
|
||||
await Promise.all(
|
||||
templates.map(async templateUri => {
|
||||
const metadata = await getTemplateMetadata(templateUri);
|
||||
metadata.set('templatePath', path.basename(templateUri.path));
|
||||
metadata.set('templatePath', templateUri.getBasename());
|
||||
return metadata;
|
||||
})
|
||||
)
|
||||
@@ -105,7 +103,7 @@ const feature: FoamFeature = {
|
||||
const templateFilename =
|
||||
(selectedTemplate as QuickPickItem).description ||
|
||||
(selectedTemplate as QuickPickItem).label;
|
||||
const templateUri = URI.joinPath(TEMPLATES_DIR, templateFilename);
|
||||
const templateUri = TEMPLATES_DIR.joinPath(templateFilename);
|
||||
|
||||
const resolver = new Resolver(new Map(), new Date());
|
||||
|
||||
@@ -119,7 +117,7 @@ const feature: FoamFeature = {
|
||||
() => {
|
||||
const resolver = new Resolver(new Map(), new Date());
|
||||
|
||||
NoteFactory.createFromTemplate(
|
||||
return NoteFactory.createFromTemplate(
|
||||
DEFAULT_TEMPLATE_URI,
|
||||
resolver,
|
||||
undefined,
|
||||
|
||||
@@ -1,7 +1,5 @@
|
||||
import * as vscode from 'vscode';
|
||||
import * as path from 'path';
|
||||
import { FoamFeature } from '../types';
|
||||
import { URI } from '../core/model/uri';
|
||||
import { TextDecoder } from 'util';
|
||||
import { getGraphStyle, getTitleMaxLength } from '../settings';
|
||||
import { isSome } from '../utils';
|
||||
@@ -35,13 +33,9 @@ const feature: FoamFeature = {
|
||||
updateGraph(panel, foam);
|
||||
};
|
||||
|
||||
const noteAddedListener = foam.workspace.onDidAdd(onFoamChanged);
|
||||
const noteUpdatedListener = foam.workspace.onDidUpdate(onFoamChanged);
|
||||
const noteDeletedListener = foam.workspace.onDidDelete(onFoamChanged);
|
||||
const noteUpdatedListener = foam.graph.onDidUpdate(onFoamChanged);
|
||||
panel.onDidDispose(() => {
|
||||
noteAddedListener.dispose();
|
||||
noteUpdatedListener.dispose();
|
||||
noteDeletedListener.dispose();
|
||||
panel = undefined;
|
||||
});
|
||||
|
||||
@@ -77,7 +71,7 @@ function generateGraphData(foam: Foam) {
|
||||
|
||||
foam.workspace.list().forEach(n => {
|
||||
const type = n.type === 'note' ? n.properties.type ?? 'note' : n.type;
|
||||
const title = n.type === 'note' ? n.title : path.basename(n.uri.path);
|
||||
const title = n.type === 'note' ? n.title : n.uri.getBasename();
|
||||
graph.nodeInfo[n.uri.path] = {
|
||||
id: n.uri.path,
|
||||
type: type,
|
||||
@@ -92,7 +86,7 @@ function generateGraphData(foam: Foam) {
|
||||
source: c.source.path,
|
||||
target: c.target.path,
|
||||
});
|
||||
if (URI.isPlaceholder(c.target)) {
|
||||
if (c.target.isPlaceholder()) {
|
||||
graph.nodeInfo[c.target.path] = {
|
||||
id: c.target.path,
|
||||
type: 'placeholder',
|
||||
@@ -133,7 +127,7 @@ async function createGraphPanel(foam: Foam, context: vscode.ExtensionContext) {
|
||||
panel.webview.onDidReceiveMessage(
|
||||
async message => {
|
||||
switch (message.type) {
|
||||
case 'webviewDidLoad':
|
||||
case 'webviewDidLoad': {
|
||||
const styles = getGraphStyle();
|
||||
panel.webview.postMessage({
|
||||
type: 'didUpdateStyle',
|
||||
@@ -141,8 +135,8 @@ async function createGraphPanel(foam: Foam, context: vscode.ExtensionContext) {
|
||||
});
|
||||
updateGraph(panel, foam);
|
||||
break;
|
||||
|
||||
case 'webviewDidSelectNode':
|
||||
}
|
||||
case 'webviewDidSelectNode': {
|
||||
const noteUri = vscode.Uri.parse(message.payload);
|
||||
const selectedNote = foam.workspace.get(fromVsCodeUri(noteUri));
|
||||
|
||||
@@ -153,10 +147,11 @@ async function createGraphPanel(foam: Foam, context: vscode.ExtensionContext) {
|
||||
vscode.window.showTextDocument(doc, vscode.ViewColumn.One);
|
||||
}
|
||||
break;
|
||||
|
||||
case 'error':
|
||||
}
|
||||
case 'error': {
|
||||
Logger.error('An error occurred in the graph view', message.payload);
|
||||
break;
|
||||
}
|
||||
}
|
||||
},
|
||||
undefined,
|
||||
@@ -170,25 +165,27 @@ async function getWebviewContent(
|
||||
context: vscode.ExtensionContext,
|
||||
panel: vscode.WebviewPanel
|
||||
) {
|
||||
const datavizPath = [context.extensionPath, 'static', 'dataviz'];
|
||||
const datavizPath = vscode.Uri.joinPath(
|
||||
vscode.Uri.file(context.extensionPath),
|
||||
'static',
|
||||
'dataviz'
|
||||
);
|
||||
|
||||
const getWebviewUri = (fileName: string) =>
|
||||
panel.webview.asWebviewUri(
|
||||
vscode.Uri.file(path.join(...datavizPath, fileName))
|
||||
);
|
||||
panel.webview.asWebviewUri(vscode.Uri.joinPath(datavizPath, fileName));
|
||||
|
||||
const indexHtml = await vscode.workspace.fs.readFile(
|
||||
vscode.Uri.file(path.join(...datavizPath, 'index.html'))
|
||||
vscode.Uri.joinPath(datavizPath, 'index.html')
|
||||
);
|
||||
|
||||
// Replace the script paths with the appropriate webview URI.
|
||||
const filled = new TextDecoder('utf-8')
|
||||
.decode(indexHtml)
|
||||
.replace(/<script data-replace src="([^"]+")/g, match => {
|
||||
const fileName = match
|
||||
.slice('<script data-replace src="'.length, -1)
|
||||
.trim();
|
||||
return '<script src="' + getWebviewUri(fileName).toString() + '"';
|
||||
.replace(/data-replace (src|href)="[^"]+"/g, match => {
|
||||
const i = match.indexOf(' ');
|
||||
const j = match.indexOf('=');
|
||||
const uri = getWebviewUri(match.slice(j + 2, -1).trim());
|
||||
return match.slice(i + 1, j) + '="' + uri.toString() + '"';
|
||||
});
|
||||
|
||||
return filled;
|
||||
|
||||
@@ -1,55 +1,44 @@
|
||||
import { debounce } from 'lodash';
|
||||
import * as vscode from 'vscode';
|
||||
import { URI } from '../core/model/uri';
|
||||
import { FoamFeature } from '../types';
|
||||
import {
|
||||
ConfigurationMonitor,
|
||||
monitorFoamVsCodeConfig,
|
||||
} from '../services/config';
|
||||
import { ResourceParser } from '../core/model/note';
|
||||
import { FoamWorkspace } from '../core/model/workspace';
|
||||
import { Foam } from '../core/model/foam';
|
||||
import { Range } from '../core/model/range';
|
||||
import { fromVsCodeUri } from '../utils/vsc-utils';
|
||||
|
||||
export const CONFIG_KEY = 'decorations.links.enable';
|
||||
|
||||
const linkDecoration = vscode.window.createTextEditorDecorationType({
|
||||
rangeBehavior: vscode.DecorationRangeBehavior.ClosedClosed,
|
||||
textDecoration: 'none',
|
||||
color: { id: 'textLink.foreground' },
|
||||
cursor: 'pointer',
|
||||
});
|
||||
|
||||
const placeholderDecoration = vscode.window.createTextEditorDecorationType({
|
||||
rangeBehavior: vscode.DecorationRangeBehavior.ClosedClosed,
|
||||
textDecoration: 'none',
|
||||
color: { id: 'editorWarning.foreground' },
|
||||
color: { id: 'foam.placeholder' },
|
||||
cursor: 'pointer',
|
||||
});
|
||||
|
||||
const updateDecorations = (
|
||||
areDecorationsEnabled: () => boolean,
|
||||
parser: ResourceParser,
|
||||
workspace: FoamWorkspace
|
||||
) => (editor: vscode.TextEditor) => {
|
||||
if (!editor || !areDecorationsEnabled()) {
|
||||
if (!editor || editor.document.languageId !== 'markdown') {
|
||||
return;
|
||||
}
|
||||
const note = parser.parse(
|
||||
fromVsCodeUri(editor.document.uri),
|
||||
editor.document.getText()
|
||||
);
|
||||
let linkRanges = [];
|
||||
let placeholderRanges = [];
|
||||
const placeholderRanges = [];
|
||||
note.links.forEach(link => {
|
||||
const linkUri = workspace.resolveLink(note, link);
|
||||
if (URI.isPlaceholder(linkUri)) {
|
||||
placeholderRanges.push(link.range);
|
||||
} else {
|
||||
linkRanges.push(link.range);
|
||||
if (linkUri.isPlaceholder()) {
|
||||
placeholderRanges.push(
|
||||
Range.create(
|
||||
link.range.start.line,
|
||||
link.range.start.character + (link.type === 'wikilink' ? 2 : 0),
|
||||
link.range.end.line,
|
||||
link.range.end.character - (link.type === 'wikilink' ? 2 : 0)
|
||||
)
|
||||
);
|
||||
}
|
||||
});
|
||||
editor.setDecorations(linkDecoration, linkRanges);
|
||||
editor.setDecorations(placeholderDecoration, placeholderRanges);
|
||||
};
|
||||
|
||||
@@ -58,14 +47,10 @@ const feature: FoamFeature = {
|
||||
context: vscode.ExtensionContext,
|
||||
foamPromise: Promise<Foam>
|
||||
) => {
|
||||
const areDecorationsEnabled: ConfigurationMonitor<boolean> = monitorFoamVsCodeConfig(
|
||||
CONFIG_KEY
|
||||
);
|
||||
const foam = await foamPromise;
|
||||
let activeEditor = vscode.window.activeTextEditor;
|
||||
|
||||
const immediatelyUpdateDecorations = updateDecorations(
|
||||
areDecorationsEnabled,
|
||||
foam.services.parser,
|
||||
foam.workspace
|
||||
);
|
||||
@@ -78,8 +63,6 @@ const feature: FoamFeature = {
|
||||
immediatelyUpdateDecorations(activeEditor);
|
||||
|
||||
context.subscriptions.push(
|
||||
areDecorationsEnabled,
|
||||
linkDecoration,
|
||||
placeholderDecoration,
|
||||
vscode.window.onDidChangeActiveTextEditor(editor => {
|
||||
activeEditor = editor;
|
||||
|
||||
@@ -1,11 +1,9 @@
|
||||
import * as vscode from 'vscode';
|
||||
import {
|
||||
createMarkdownParser,
|
||||
MarkdownResourceProvider,
|
||||
} from '../core/markdown-provider';
|
||||
import { createMarkdownParser } from '../core/services/markdown-parser';
|
||||
import { MarkdownResourceProvider } from '../core/services/markdown-provider';
|
||||
import { FoamGraph } from '../core/model/graph';
|
||||
import { FoamWorkspace } from '../core/model/workspace';
|
||||
import { Matcher } from '../core/services/datastore';
|
||||
import { FileDataStore, Matcher } from '../core/services/datastore';
|
||||
import {
|
||||
cleanWorkspace,
|
||||
closeEditors,
|
||||
@@ -14,6 +12,7 @@ import {
|
||||
} from '../test/test-utils-vscode';
|
||||
import { fromVsCodeUri, toVsCodeUri } from '../utils/vsc-utils';
|
||||
import { HoverProvider } from './hover-provider';
|
||||
import { readFileFromFs } from '../test/test-utils';
|
||||
|
||||
// We can't use createTestWorkspace from /packages/foam-vscode/src/test/test-utils.ts
|
||||
// because we need a MarkdownResourceProvider with a real instance of FileDataStore.
|
||||
@@ -21,7 +20,8 @@ const createWorkspace = () => {
|
||||
const matcher = new Matcher(
|
||||
vscode.workspace.workspaceFolders.map(f => fromVsCodeUri(f.uri))
|
||||
);
|
||||
const resourceProvider = new MarkdownResourceProvider(matcher);
|
||||
const dataStore = new FileDataStore(readFileFromFs);
|
||||
const resourceProvider = new MarkdownResourceProvider(matcher, dataStore);
|
||||
const workspace = new FoamWorkspace();
|
||||
workspace.registerProvider(resourceProvider);
|
||||
return workspace;
|
||||
|
||||
@@ -1,6 +1,5 @@
|
||||
import { uniqWith } from 'lodash';
|
||||
import * as vscode from 'vscode';
|
||||
import { URI } from '../core/model/uri';
|
||||
import { FoamFeature } from '../types';
|
||||
import { getNoteTooltip, mdDocSelector, isSome } from '../utils';
|
||||
import { fromVsCodeUri, toVsCodeRange } from '../utils/vsc-utils';
|
||||
@@ -81,9 +80,9 @@ export class HoverProvider implements vscode.HoverProvider {
|
||||
const sources = uniqWith(
|
||||
this.graph
|
||||
.getBacklinks(targetUri)
|
||||
.filter(link => !URI.isEqual(link.source, documentUri))
|
||||
.filter(link => !link.source.isEqual(documentUri))
|
||||
.map(link => link.source),
|
||||
URI.isEqual
|
||||
(u1, u2) => u1.isEqual(u2)
|
||||
);
|
||||
|
||||
const links = sources.slice(0, 10).map(ref => {
|
||||
@@ -101,7 +100,7 @@ export class HoverProvider implements vscode.HoverProvider {
|
||||
);
|
||||
|
||||
let mdContent = null;
|
||||
if (!URI.isPlaceholder(targetUri)) {
|
||||
if (!targetUri.isPlaceholder()) {
|
||||
const content: string = await this.workspace.readAsMarkdown(targetUri);
|
||||
|
||||
mdContent = isSome(content)
|
||||
|
||||
@@ -13,14 +13,17 @@ import backlinks from './backlinks';
|
||||
import utilityCommands from './utility-commands';
|
||||
import hoverProvider from './hover-provider';
|
||||
import previewNavigation from './preview-navigation';
|
||||
import completionProvider from './link-completion';
|
||||
import completionProvider, { completionCursorMove } from './link-completion';
|
||||
import tagCompletionProvider from './tag-completion';
|
||||
import linkDecorations from './document-decorator';
|
||||
import navigationProviders from './navigation-provider';
|
||||
import wikilinkDiagnostics from './wikilink-diagnostics';
|
||||
// import completionMoveCursor from './completion-cursor-move';
|
||||
import refactor from './refactor';
|
||||
import { FoamFeature } from '../types';
|
||||
|
||||
export const features: FoamFeature[] = [
|
||||
refactor,
|
||||
navigationProviders,
|
||||
wikilinkDiagnostics,
|
||||
tagsExplorer,
|
||||
@@ -41,4 +44,5 @@ export const features: FoamFeature[] = [
|
||||
previewNavigation,
|
||||
completionProvider,
|
||||
tagCompletionProvider,
|
||||
completionCursorMove,
|
||||
];
|
||||
|
||||
@@ -5,15 +5,17 @@ import {
|
||||
commands,
|
||||
ProgressLocation,
|
||||
} from 'vscode';
|
||||
import * as fs from 'fs';
|
||||
import { FoamFeature } from '../types';
|
||||
import { URI } from '../core/model/uri';
|
||||
|
||||
import {
|
||||
getWikilinkDefinitionSetting,
|
||||
LinkReferenceDefinitionsSetting,
|
||||
} from '../settings';
|
||||
import { toVsCodePosition, toVsCodeRange } from '../utils/vsc-utils';
|
||||
import {
|
||||
toVsCodePosition,
|
||||
toVsCodeRange,
|
||||
toVsCodeUri,
|
||||
} from '../utils/vsc-utils';
|
||||
import { Foam } from '../core/model/foam';
|
||||
import { Resource } from '../core/model/note';
|
||||
import { generateHeading, generateLinkReferences } from '../core/janitor';
|
||||
@@ -69,7 +71,7 @@ async function janitor(foam: Foam) {
|
||||
async function runJanitor(foam: Foam) {
|
||||
const notes: Resource[] = foam.workspace
|
||||
.list()
|
||||
.filter(r => URI.isMarkdownFile(r.uri));
|
||||
.filter(r => r.uri.isMarkdown());
|
||||
|
||||
let updatedHeadingCount = 0;
|
||||
let updatedDefinitionListCount = 0;
|
||||
@@ -86,11 +88,11 @@ async function runJanitor(foam: Foam) {
|
||||
);
|
||||
|
||||
const dirtyNotes = notes.filter(note =>
|
||||
dirtyEditorsFileName.includes(URI.toFsPath(note.uri))
|
||||
dirtyEditorsFileName.includes(note.uri.toFsPath())
|
||||
);
|
||||
|
||||
const nonDirtyNotes = notes.filter(
|
||||
note => !dirtyEditorsFileName.includes(URI.toFsPath(note.uri))
|
||||
note => !dirtyEditorsFileName.includes(note.uri.toFsPath())
|
||||
);
|
||||
|
||||
const wikilinkSetting = getWikilinkDefinitionSetting();
|
||||
@@ -98,12 +100,12 @@ async function runJanitor(foam: Foam) {
|
||||
// Apply Text Edits to Non Dirty Notes using fs module just like CLI
|
||||
|
||||
const fileWritePromises = nonDirtyNotes.map(note => {
|
||||
let heading = generateHeading(note);
|
||||
const heading = generateHeading(note);
|
||||
if (heading) {
|
||||
updatedHeadingCount += 1;
|
||||
}
|
||||
|
||||
let definitions =
|
||||
const definitions =
|
||||
wikilinkSetting === LinkReferenceDefinitionsSetting.off
|
||||
? null
|
||||
: generateLinkReferences(
|
||||
@@ -126,7 +128,7 @@ async function runJanitor(foam: Foam) {
|
||||
text = definitions ? applyTextEdit(text, definitions) : text;
|
||||
text = heading ? applyTextEdit(text, heading) : text;
|
||||
|
||||
return fs.promises.writeFile(URI.toFsPath(note.uri), text);
|
||||
return workspace.fs.writeFile(toVsCodeUri(note.uri), Buffer.from(text));
|
||||
});
|
||||
|
||||
await Promise.all(fileWritePromises);
|
||||
@@ -136,12 +138,12 @@ async function runJanitor(foam: Foam) {
|
||||
for (const doc of dirtyTextDocuments) {
|
||||
const editor = await window.showTextDocument(doc);
|
||||
const note = dirtyNotes.find(
|
||||
n => URI.toFsPath(n.uri) === editor.document.uri.fsPath
|
||||
n => n.uri.toFsPath() === editor.document.uri.fsPath
|
||||
)!;
|
||||
|
||||
// Get edits
|
||||
const heading = generateHeading(note);
|
||||
let definitions =
|
||||
const definitions =
|
||||
wikilinkSetting === LinkReferenceDefinitionsSetting.off
|
||||
? null
|
||||
: generateLinkReferences(
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import * as vscode from 'vscode';
|
||||
import { createMarkdownParser } from '../core/services/markdown-parser';
|
||||
import { FoamGraph } from '../core/model/graph';
|
||||
import { FoamWorkspace } from '../core/model/workspace';
|
||||
import { createTestNote } from '../test/test-utils';
|
||||
import { createTestNote, createTestWorkspace } from '../test/test-utils';
|
||||
import {
|
||||
cleanWorkspace,
|
||||
closeEditors,
|
||||
@@ -9,15 +9,20 @@ import {
|
||||
showInEditor,
|
||||
} from '../test/test-utils-vscode';
|
||||
import { fromVsCodeUri } from '../utils/vsc-utils';
|
||||
import { CompletionProvider } from './link-completion';
|
||||
import {
|
||||
CompletionProvider,
|
||||
SectionCompletionProvider,
|
||||
} from './link-completion';
|
||||
|
||||
describe('Link Completion', () => {
|
||||
const parser = createMarkdownParser([]);
|
||||
const root = fromVsCodeUri(vscode.workspace.workspaceFolders[0].uri);
|
||||
const ws = new FoamWorkspace();
|
||||
const ws = createTestWorkspace();
|
||||
ws.set(
|
||||
createTestNote({
|
||||
root,
|
||||
uri: 'file-name.md',
|
||||
sections: ['Section One', 'Section Two'],
|
||||
})
|
||||
)
|
||||
.set(
|
||||
@@ -82,24 +87,100 @@ describe('Link Completion', () => {
|
||||
});
|
||||
|
||||
it('should return notes with unique identifiers, and placeholders', async () => {
|
||||
const { uri } = await createFile('[[file]] [[');
|
||||
for (const text of ['[[', '[[file]] [[', '[[file]] #tag [[']) {
|
||||
const { uri } = await createFile(text);
|
||||
const { doc } = await showInEditor(uri);
|
||||
const provider = new CompletionProvider(ws, graph);
|
||||
|
||||
const links = await provider.provideCompletionItems(
|
||||
doc,
|
||||
new vscode.Position(0, text.length)
|
||||
);
|
||||
|
||||
expect(links.items.length).toEqual(5);
|
||||
expect(new Set(links.items.map(i => i.insertText))).toEqual(
|
||||
new Set([
|
||||
'to/file',
|
||||
'another/file',
|
||||
'File name with spaces',
|
||||
'file-name',
|
||||
'placeholder text',
|
||||
])
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return sections for other notes', async () => {
|
||||
for (const text of [
|
||||
'[[file-name#',
|
||||
'[[file]] [[file-name#',
|
||||
'[[file]] #tag [[file-name#',
|
||||
]) {
|
||||
const { uri } = await createFile(text);
|
||||
const { doc } = await showInEditor(uri);
|
||||
const provider = new SectionCompletionProvider(ws);
|
||||
|
||||
const links = await provider.provideCompletionItems(
|
||||
doc,
|
||||
new vscode.Position(0, text.length)
|
||||
);
|
||||
|
||||
expect(new Set(links.items.map(i => i.label))).toEqual(
|
||||
new Set(['Section One', 'Section Two'])
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
it('should return sections within the note', async () => {
|
||||
const { uri, content } = await createFile(`
|
||||
# Section 1
|
||||
|
||||
Content of section 1
|
||||
|
||||
# Section 2
|
||||
|
||||
Content of section 2
|
||||
|
||||
[[#
|
||||
`);
|
||||
ws.set(parser.parse(uri, content));
|
||||
|
||||
const { doc } = await showInEditor(uri);
|
||||
const provider = new SectionCompletionProvider(ws);
|
||||
|
||||
const links = await provider.provideCompletionItems(
|
||||
doc,
|
||||
new vscode.Position(9, 3)
|
||||
);
|
||||
|
||||
expect(new Set(links.items.map(i => i.label))).toEqual(
|
||||
new Set(['Section 1', 'Section 2'])
|
||||
);
|
||||
});
|
||||
|
||||
it('should return page alias', async () => {
|
||||
const { uri, content } = await createFile(
|
||||
`
|
||||
---
|
||||
alias: alias-a
|
||||
---
|
||||
[[
|
||||
`,
|
||||
['new-note-with-alias.md']
|
||||
);
|
||||
ws.set(parser.parse(uri, content));
|
||||
|
||||
const { doc } = await showInEditor(uri);
|
||||
const provider = new CompletionProvider(ws, graph);
|
||||
|
||||
const links = await provider.provideCompletionItems(
|
||||
doc,
|
||||
new vscode.Position(0, 11)
|
||||
new vscode.Position(4, 2)
|
||||
);
|
||||
|
||||
expect(links.items.length).toEqual(5);
|
||||
expect(new Set(links.items.map(i => i.insertText))).toEqual(
|
||||
new Set([
|
||||
'to/file',
|
||||
'another/file',
|
||||
'File name with spaces',
|
||||
'file-name',
|
||||
'placeholder text',
|
||||
])
|
||||
);
|
||||
const aliasCompletionItem = links.items.find(i => i.label === 'alias-a');
|
||||
expect(aliasCompletionItem).not.toBeNull();
|
||||
expect(aliasCompletionItem.label).toBe('alias-a');
|
||||
expect(aliasCompletionItem.insertText).toBe('new-note-with-alias|alias-a');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,12 +1,23 @@
|
||||
import * as vscode from 'vscode';
|
||||
import { Foam } from '../core/model/foam';
|
||||
import { FoamGraph } from '../core/model/graph';
|
||||
import { Resource } from '../core/model/note';
|
||||
import { URI } from '../core/model/uri';
|
||||
import { FoamWorkspace } from '../core/model/workspace';
|
||||
import { FoamFeature } from '../types';
|
||||
import { getNoteTooltip, mdDocSelector } from '../utils';
|
||||
import { toVsCodeUri } from '../utils/vsc-utils';
|
||||
import { fromVsCodeUri, toVsCodeUri } from '../utils/vsc-utils';
|
||||
|
||||
export const aliasCommitCharacters = ['#'];
|
||||
export const linkCommitCharacters = ['#', '|'];
|
||||
export const sectionCommitCharacters = ['|'];
|
||||
|
||||
const COMPLETION_CURSOR_MOVE = {
|
||||
command: 'foam-vscode.completion-move-cursor',
|
||||
title: 'Foam: Move cursor after completion',
|
||||
};
|
||||
|
||||
export const WIKILINK_REGEX = /\[\[[^[\]]*(?!.*\]\])/;
|
||||
export const SECTION_REGEX = /\[\[([^[\]]*#(?!.*\]\]))/;
|
||||
|
||||
const feature: FoamFeature = {
|
||||
activate: async (
|
||||
@@ -19,11 +30,135 @@ const feature: FoamFeature = {
|
||||
mdDocSelector,
|
||||
new CompletionProvider(foam.workspace, foam.graph),
|
||||
'['
|
||||
),
|
||||
vscode.languages.registerCompletionItemProvider(
|
||||
mdDocSelector,
|
||||
new SectionCompletionProvider(foam.workspace),
|
||||
'#'
|
||||
)
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
/**
|
||||
* always jump to the closing bracket, but jump back the cursor when commit
|
||||
* by alias divider `|` and section divider `#`
|
||||
* See https://github.com/foambubble/foam/issues/962,
|
||||
*/
|
||||
|
||||
export const completionCursorMove: FoamFeature = {
|
||||
activate: (context: vscode.ExtensionContext, foamPromise: Promise<Foam>) => {
|
||||
context.subscriptions.push(
|
||||
vscode.commands.registerCommand(
|
||||
COMPLETION_CURSOR_MOVE.command,
|
||||
async () => {
|
||||
const activeEditor = vscode.window.activeTextEditor;
|
||||
const document = activeEditor.document;
|
||||
const currentPosition = activeEditor.selection.active;
|
||||
const cursorChange = vscode.window.onDidChangeTextEditorSelection(
|
||||
async e => {
|
||||
const changedPosition = e.selections[0].active;
|
||||
const preChar = document
|
||||
.lineAt(changedPosition.line)
|
||||
.text.charAt(changedPosition.character - 1);
|
||||
|
||||
const {
|
||||
character: selectionChar,
|
||||
line: selectionLine,
|
||||
} = e.selections[0].active;
|
||||
|
||||
const {
|
||||
line: completionLine,
|
||||
character: completionChar,
|
||||
} = currentPosition;
|
||||
|
||||
const inCompleteBySectionDivider =
|
||||
linkCommitCharacters.includes(preChar) &&
|
||||
selectionLine === completionLine &&
|
||||
selectionChar === completionChar + 1;
|
||||
|
||||
cursorChange.dispose();
|
||||
if (inCompleteBySectionDivider) {
|
||||
await vscode.commands.executeCommand('cursorMove', {
|
||||
to: 'left',
|
||||
by: 'character',
|
||||
value: 2,
|
||||
});
|
||||
}
|
||||
}
|
||||
);
|
||||
|
||||
await vscode.commands.executeCommand('cursorMove', {
|
||||
to: 'right',
|
||||
by: 'character',
|
||||
value: 2,
|
||||
});
|
||||
}
|
||||
)
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
export class SectionCompletionProvider
|
||||
implements vscode.CompletionItemProvider<vscode.CompletionItem> {
|
||||
constructor(private ws: FoamWorkspace) {}
|
||||
|
||||
provideCompletionItems(
|
||||
document: vscode.TextDocument,
|
||||
position: vscode.Position
|
||||
): vscode.ProviderResult<vscode.CompletionList<vscode.CompletionItem>> {
|
||||
const cursorPrefix = document
|
||||
.lineAt(position)
|
||||
.text.substr(0, position.character);
|
||||
|
||||
// Requires autocomplete only if cursorPrefix matches `[[` that NOT ended by `]]`.
|
||||
// See https://github.com/foambubble/foam/pull/596#issuecomment-825748205 for details.
|
||||
const match = cursorPrefix.match(SECTION_REGEX);
|
||||
|
||||
if (!match) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const resourceId =
|
||||
match[1] === '#' ? fromVsCodeUri(document.uri) : match[1].slice(0, -1);
|
||||
|
||||
const resource = this.ws.find(resourceId);
|
||||
const replacementRange = new vscode.Range(
|
||||
position.line,
|
||||
cursorPrefix.lastIndexOf('#') + 1,
|
||||
position.line,
|
||||
position.character
|
||||
);
|
||||
if (resource) {
|
||||
const items = resource.sections.map(b => {
|
||||
const item = new ResourceCompletionItem(
|
||||
b.label,
|
||||
vscode.CompletionItemKind.Text,
|
||||
resource.uri.withFragment(b.label)
|
||||
);
|
||||
item.sortText = String(b.range.start.line).padStart(5, '0');
|
||||
item.range = replacementRange;
|
||||
item.commitCharacters = sectionCommitCharacters;
|
||||
item.command = COMPLETION_CURSOR_MOVE;
|
||||
return item;
|
||||
});
|
||||
return new vscode.CompletionList(items);
|
||||
}
|
||||
}
|
||||
|
||||
resolveCompletionItem(
|
||||
item: ResourceCompletionItem | vscode.CompletionItem
|
||||
): vscode.ProviderResult<vscode.CompletionItem> {
|
||||
if (item instanceof ResourceCompletionItem) {
|
||||
return this.ws.readAsMarkdown(item.resourceUri).then(text => {
|
||||
item.documentation = getNoteTooltip(text);
|
||||
return item;
|
||||
});
|
||||
}
|
||||
return item;
|
||||
}
|
||||
}
|
||||
|
||||
export class CompletionProvider
|
||||
implements vscode.CompletionItemProvider<vscode.CompletionItem> {
|
||||
constructor(private ws: FoamWorkspace, private graph: FoamGraph) {}
|
||||
@@ -38,24 +173,52 @@ export class CompletionProvider
|
||||
|
||||
// Requires autocomplete only if cursorPrefix matches `[[` that NOT ended by `]]`.
|
||||
// See https://github.com/foambubble/foam/pull/596#issuecomment-825748205 for details.
|
||||
// eslint-disable-next-line no-useless-escape
|
||||
const requiresAutocomplete = cursorPrefix.match(/\[\[[^\[\]]*(?!.*\]\])/);
|
||||
|
||||
if (!requiresAutocomplete) {
|
||||
const requiresAutocomplete = cursorPrefix.match(WIKILINK_REGEX);
|
||||
if (!requiresAutocomplete || requiresAutocomplete[0].indexOf('#') >= 0) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const text = requiresAutocomplete[0];
|
||||
|
||||
const replacementRange = new vscode.Range(
|
||||
position.line,
|
||||
position.character - (text.length - 2),
|
||||
position.line,
|
||||
position.character
|
||||
);
|
||||
const resources = this.ws.list().map(resource => {
|
||||
const label = vscode.workspace.asRelativePath(toVsCodeUri(resource.uri));
|
||||
const item = new ResourceCompletionItem(
|
||||
label,
|
||||
vscode.CompletionItemKind.File,
|
||||
resource
|
||||
resource.uri
|
||||
);
|
||||
item.filterText = URI.getBasename(resource.uri);
|
||||
item.sortText =
|
||||
resource.type === 'attachment' ? `1-${item.label}` : `0-${item.label}`;
|
||||
item.filterText = resource.uri.getName();
|
||||
item.insertText = this.ws.getIdentifier(resource.uri);
|
||||
item.range = replacementRange;
|
||||
item.command = COMPLETION_CURSOR_MOVE;
|
||||
item.commitCharacters = linkCommitCharacters;
|
||||
return item;
|
||||
});
|
||||
const aliases = this.ws.list().flatMap(resource =>
|
||||
resource.aliases.map(a => {
|
||||
const item = new ResourceCompletionItem(
|
||||
a.title,
|
||||
vscode.CompletionItemKind.Reference,
|
||||
resource.uri
|
||||
);
|
||||
item.insertText = this.ws.getIdentifier(resource.uri) + '|' + a.title;
|
||||
item.detail = `Alias of ${vscode.workspace.asRelativePath(
|
||||
toVsCodeUri(resource.uri)
|
||||
)}`;
|
||||
item.range = replacementRange;
|
||||
item.command = COMPLETION_CURSOR_MOVE;
|
||||
item.commitCharacters = aliasCommitCharacters;
|
||||
return item;
|
||||
})
|
||||
);
|
||||
const placeholders = Array.from(this.graph.placeholders.values()).map(
|
||||
uri => {
|
||||
const item = new vscode.CompletionItem(
|
||||
@@ -63,18 +226,27 @@ export class CompletionProvider
|
||||
vscode.CompletionItemKind.Interface
|
||||
);
|
||||
item.insertText = uri.path;
|
||||
item.command = COMPLETION_CURSOR_MOVE;
|
||||
item.range = replacementRange;
|
||||
return item;
|
||||
}
|
||||
);
|
||||
|
||||
return new vscode.CompletionList([...resources, ...placeholders]);
|
||||
return new vscode.CompletionList([
|
||||
...resources,
|
||||
...aliases,
|
||||
...placeholders,
|
||||
]);
|
||||
}
|
||||
|
||||
resolveCompletionItem(
|
||||
item: ResourceCompletionItem | vscode.CompletionItem
|
||||
): vscode.ProviderResult<vscode.CompletionItem> {
|
||||
if (item instanceof ResourceCompletionItem) {
|
||||
item.documentation = getNoteTooltip(item.resource.source.text);
|
||||
return this.ws.readAsMarkdown(item.resourceUri).then(text => {
|
||||
item.documentation = getNoteTooltip(text);
|
||||
return item;
|
||||
});
|
||||
}
|
||||
return item;
|
||||
}
|
||||
@@ -87,7 +259,7 @@ class ResourceCompletionItem extends vscode.CompletionItem {
|
||||
constructor(
|
||||
label: string,
|
||||
type: vscode.CompletionItemKind,
|
||||
public resource: Resource
|
||||
public resourceUri: URI
|
||||
) {
|
||||
super(label, type);
|
||||
}
|
||||
|
||||
@@ -10,8 +10,7 @@ import {
|
||||
import { NavigationProvider } from './navigation-provider';
|
||||
import { OPEN_COMMAND } from './utility-commands';
|
||||
import { toVsCodeUri } from '../utils/vsc-utils';
|
||||
import { createMarkdownParser } from '../core/markdown-provider';
|
||||
import { FoamWorkspace } from '../core/model/workspace';
|
||||
import { createMarkdownParser } from '../core/services/markdown-parser';
|
||||
import { FoamGraph } from '../core/model/graph';
|
||||
|
||||
describe('Document navigation', () => {
|
||||
@@ -31,7 +30,7 @@ describe('Document navigation', () => {
|
||||
describe('Document links provider', () => {
|
||||
it('should not return any link for empty documents', async () => {
|
||||
const { uri, content } = await createFile('');
|
||||
const ws = new FoamWorkspace().set(parser.parse(uri, content));
|
||||
const ws = createTestWorkspace().set(parser.parse(uri, content));
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
const doc = await vscode.workspace.openTextDocument(toVsCodeUri(uri));
|
||||
@@ -45,7 +44,7 @@ describe('Document navigation', () => {
|
||||
const { uri, content } = await createFile(
|
||||
'This is some content without links'
|
||||
);
|
||||
const ws = new FoamWorkspace().set(parser.parse(uri, content));
|
||||
const ws = createTestWorkspace().set(parser.parse(uri, content));
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
|
||||
const doc = await vscode.workspace.openTextDocument(toVsCodeUri(uri));
|
||||
@@ -69,12 +68,12 @@ describe('Document navigation', () => {
|
||||
|
||||
expect(links.length).toEqual(1);
|
||||
expect(links[0].target).toEqual(OPEN_COMMAND.asURI(fileA.uri));
|
||||
expect(links[0].range).toEqual(new vscode.Range(0, 18, 0, 28));
|
||||
expect(links[0].range).toEqual(new vscode.Range(0, 20, 0, 26));
|
||||
});
|
||||
|
||||
it('should create links for placeholders', async () => {
|
||||
const fileA = await createFile(`this is a link to [[a placeholder]].`);
|
||||
const ws = new FoamWorkspace().set(
|
||||
const ws = createTestWorkspace().set(
|
||||
parser.parse(fileA.uri, fileA.content)
|
||||
);
|
||||
const graph = FoamGraph.fromWorkspace(ws);
|
||||
@@ -87,7 +86,7 @@ describe('Document navigation', () => {
|
||||
expect(links[0].target).toEqual(
|
||||
OPEN_COMMAND.asURI(URI.placeholder('a placeholder'))
|
||||
);
|
||||
expect(links[0].range).toEqual(new vscode.Range(0, 18, 0, 35));
|
||||
expect(links[0].range).toEqual(new vscode.Range(0, 20, 0, 33));
|
||||
});
|
||||
});
|
||||
|
||||
@@ -232,6 +231,6 @@ describe('Document navigation', () => {
|
||||
range: new vscode.Range(0, 23, 0, 23 + 9),
|
||||
});
|
||||
});
|
||||
it('should provide references for placeholders', async () => {});
|
||||
it.todo('should provide references for placeholders');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import * as vscode from 'vscode';
|
||||
import { FoamFeature } from '../types';
|
||||
import { mdDocSelector } from '../utils';
|
||||
import { toVsCodeRange, toVsCodeUri } from '../utils/vsc-utils';
|
||||
import { toVsCodeRange, toVsCodeUri, fromVsCodeUri } from '../utils/vsc-utils';
|
||||
import { OPEN_COMMAND } from './utility-commands';
|
||||
import { Foam } from '../core/model/foam';
|
||||
import { FoamWorkspace } from '../core/model/workspace';
|
||||
import { ResourceLink, ResourceParser } from '../core/model/note';
|
||||
import { Resource, ResourceLink, ResourceParser } from '../core/model/note';
|
||||
import { URI } from '../core/model/uri';
|
||||
import { Range } from '../core/model/range';
|
||||
import { FoamGraph } from '../core/model/graph';
|
||||
@@ -70,7 +70,10 @@ export class NavigationProvider
|
||||
document: vscode.TextDocument,
|
||||
position: vscode.Position
|
||||
): vscode.ProviderResult<vscode.Location[]> {
|
||||
const resource = this.parser.parse(document.uri, document.getText());
|
||||
const resource = this.parser.parse(
|
||||
fromVsCodeUri(document.uri),
|
||||
document.getText()
|
||||
);
|
||||
const targetLink: ResourceLink | undefined = resource.links.find(link =>
|
||||
Range.containsPosition(link.range, position)
|
||||
);
|
||||
@@ -95,7 +98,10 @@ export class NavigationProvider
|
||||
document: vscode.TextDocument,
|
||||
position: vscode.Position
|
||||
): vscode.LocationLink[] {
|
||||
const resource = this.parser.parse(document.uri, document.getText());
|
||||
const resource = this.parser.parse(
|
||||
fromVsCodeUri(document.uri),
|
||||
document.getText()
|
||||
);
|
||||
const targetLink: ResourceLink | undefined = resource.links.find(link =>
|
||||
Range.containsPosition(link.range, position)
|
||||
);
|
||||
@@ -104,27 +110,35 @@ export class NavigationProvider
|
||||
}
|
||||
|
||||
const uri = this.workspace.resolveLink(resource, targetLink);
|
||||
if (URI.isPlaceholder(uri)) {
|
||||
if (uri.isPlaceholder()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const targetResource = this.workspace.get(uri);
|
||||
const section = Resource.findSection(targetResource, uri.fragment);
|
||||
|
||||
const result: vscode.LocationLink = {
|
||||
originSelectionRange: toVsCodeRange(targetLink.range),
|
||||
targetUri: toVsCodeUri(uri),
|
||||
targetRange: toVsCodeRange(
|
||||
Range.createFromPosition(
|
||||
const targetRange = section
|
||||
? section.range
|
||||
: Range.createFromPosition(
|
||||
targetResource.source.contentStart,
|
||||
targetResource.source.end
|
||||
)
|
||||
),
|
||||
targetSelectionRange: toVsCodeRange(
|
||||
Range.createFromPosition(
|
||||
targetResource.source.contentStart,
|
||||
targetResource.source.contentStart
|
||||
)
|
||||
);
|
||||
const targetSelectionRange = section
|
||||
? section.range
|
||||
: Range.createFromPosition(targetRange.start);
|
||||
|
||||
const result: vscode.LocationLink = {
|
||||
originSelectionRange: new vscode.Range(
|
||||
targetLink.range.start.line,
|
||||
targetLink.range.start.character +
|
||||
(targetLink.type === 'wikilink' ? 2 : 0),
|
||||
targetLink.range.end.line,
|
||||
targetLink.range.end.character -
|
||||
(targetLink.type === 'wikilink' ? 2 : 0)
|
||||
),
|
||||
targetUri: toVsCodeUri(uri.asPlain()),
|
||||
targetRange: toVsCodeRange(targetRange),
|
||||
targetSelectionRange: toVsCodeRange(targetSelectionRange),
|
||||
};
|
||||
return [result];
|
||||
}
|
||||
@@ -135,7 +149,10 @@ export class NavigationProvider
|
||||
public provideDocumentLinks(
|
||||
document: vscode.TextDocument
|
||||
): vscode.DocumentLink[] {
|
||||
const resource = this.parser.parse(document.uri, document.getText());
|
||||
const resource = this.parser.parse(
|
||||
fromVsCodeUri(document.uri),
|
||||
document.getText()
|
||||
);
|
||||
|
||||
const targets: { link: ResourceLink; target: URI }[] = resource.links.map(
|
||||
link => ({
|
||||
@@ -145,14 +162,19 @@ export class NavigationProvider
|
||||
);
|
||||
|
||||
return targets.map(o => {
|
||||
const command = OPEN_COMMAND.asURI(toVsCodeUri(o.target));
|
||||
const command = OPEN_COMMAND.asURI(o.target);
|
||||
const documentLink = new vscode.DocumentLink(
|
||||
toVsCodeRange(o.link.range),
|
||||
new vscode.Range(
|
||||
o.link.range.start.line,
|
||||
o.link.range.start.character + 2,
|
||||
o.link.range.end.line,
|
||||
o.link.range.end.character - 2
|
||||
),
|
||||
command
|
||||
);
|
||||
documentLink.tooltip = URI.isPlaceholder(o.target)
|
||||
documentLink.tooltip = o.target.isPlaceholder()
|
||||
? `Create note for '${o.target.path}'`
|
||||
: `Go to ${URI.toFsPath(o.target)}`;
|
||||
: `Go to ${o.target.toFsPath()}`;
|
||||
return documentLink;
|
||||
});
|
||||
}
|
||||
|
||||
27
packages/foam-vscode/src/features/open-daily-note.spec.ts
Normal file
27
packages/foam-vscode/src/features/open-daily-note.spec.ts
Normal file
@@ -0,0 +1,27 @@
|
||||
import dateFormat from 'dateformat';
|
||||
import { commands, window } from 'vscode';
|
||||
|
||||
describe('Open daily note command', () => {
|
||||
it('offers to pick which template to use', async () => {
|
||||
const spy = jest
|
||||
.spyOn(window, 'showQuickPick')
|
||||
.mockImplementationOnce(jest.fn(() => Promise.resolve(undefined)));
|
||||
|
||||
await commands.executeCommand('foam-vscode.open-daily-note-for-date');
|
||||
|
||||
expect(spy).toBeCalledWith(
|
||||
expect.objectContaining([
|
||||
expect.objectContaining({
|
||||
label: expect.stringContaining(
|
||||
dateFormat(new Date(), 'mmm dd, yyyy')
|
||||
),
|
||||
}),
|
||||
]),
|
||||
{
|
||||
placeHolder: 'Choose or type a date (YYYY-MM-DD)',
|
||||
matchOnDescription: true,
|
||||
matchOnDetail: true,
|
||||
}
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -1,18 +1,83 @@
|
||||
import { ExtensionContext, commands, workspace } from 'vscode';
|
||||
import { ExtensionContext, commands, window, QuickPickItem } from 'vscode';
|
||||
import { FoamFeature } from '../types';
|
||||
import { getFoamVsCodeConfig } from '../services/config';
|
||||
import { openDailyNoteFor } from '../dated-notes';
|
||||
import { FoamWorkspace } from '../core/model/workspace';
|
||||
import { range } from 'lodash';
|
||||
import dateFormat from 'dateformat';
|
||||
|
||||
const feature: FoamFeature = {
|
||||
activate: (context: ExtensionContext) => {
|
||||
activate: (context: ExtensionContext, foamPromise) => {
|
||||
context.subscriptions.push(
|
||||
commands.registerCommand('foam-vscode.open-daily-note', openDailyNoteFor)
|
||||
commands.registerCommand('foam-vscode.open-daily-note', () =>
|
||||
openDailyNoteFor(new Date())
|
||||
)
|
||||
);
|
||||
if (
|
||||
workspace.getConfiguration('foam').get('openDailyNote.onStartup', false)
|
||||
) {
|
||||
|
||||
context.subscriptions.push(
|
||||
commands.registerCommand(
|
||||
'foam-vscode.open-daily-note-for-date',
|
||||
async () => {
|
||||
const ws = (await foamPromise).workspace;
|
||||
const date = await window
|
||||
.showQuickPick<DateItem>(generateDateItems(ws), {
|
||||
placeHolder: 'Choose or type a date (YYYY-MM-DD)',
|
||||
matchOnDescription: true,
|
||||
matchOnDetail: true,
|
||||
})
|
||||
.then(item => {
|
||||
return item?.date;
|
||||
});
|
||||
return openDailyNoteFor(date);
|
||||
}
|
||||
)
|
||||
);
|
||||
|
||||
if (getFoamVsCodeConfig('openDailyNote.onStartup', false)) {
|
||||
commands.executeCommand('foam-vscode.open-daily-note');
|
||||
}
|
||||
},
|
||||
};
|
||||
|
||||
class DateItem implements QuickPickItem {
|
||||
public label: string;
|
||||
public detail: string;
|
||||
public description: string;
|
||||
public alwaysShow?: boolean;
|
||||
constructor(public date: Date, offset: number, public exists: boolean) {
|
||||
const icon = exists ? '$(calendar)' : '$(new-file)';
|
||||
this.label = `${icon} ${dateFormat(date, 'mmm dd, yyyy')}`;
|
||||
this.detail = dateFormat(date, 'dddd');
|
||||
if (offset === 0) {
|
||||
this.detail = 'Today';
|
||||
} else if (offset === -1) {
|
||||
this.detail = 'Yesterday';
|
||||
} else if (offset === 1) {
|
||||
this.detail = 'Tomorrow';
|
||||
} else if (offset > -8 && offset < -1) {
|
||||
this.detail = `Last ${dateFormat(date, 'dddd')}`;
|
||||
} else if (offset > 1 && offset < 8) {
|
||||
this.detail = `Next ${dateFormat(date, 'dddd')}`;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function generateDateItems(ws: FoamWorkspace): DateItem[] {
|
||||
const items = [
|
||||
...range(0, 32), // next month
|
||||
...range(-31, 0), // last month
|
||||
].map(offset => {
|
||||
const date = new Date();
|
||||
date.setDate(date.getDate() + offset);
|
||||
// TODO this is only compatible with default settings as it would
|
||||
// be otherwise hard to "guess" the daily note path
|
||||
// Ideally we would read the daily note path from the config or template to properly match
|
||||
const noteBasename = dateFormat(date, 'yyyy-mm-dd', false);
|
||||
const exists = ws.find(noteBasename) ? true : false;
|
||||
return new DateItem(date, offset, exists);
|
||||
});
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
export default feature;
|
||||
|
||||
@@ -13,7 +13,6 @@ import {
|
||||
createDailyNoteIfNotExists,
|
||||
getDailyNoteFileName,
|
||||
openDailyNoteFor,
|
||||
getDailyNotePath,
|
||||
} from '../dated-notes';
|
||||
import { FoamFeature } from '../types';
|
||||
|
||||
@@ -215,11 +214,7 @@ const datedNoteCommand = (date: Date) => {
|
||||
return openDailyNoteFor(date);
|
||||
}
|
||||
if (foamNavigateOnSelect === 'createNote') {
|
||||
return createDailyNoteIfNotExists(
|
||||
foamConfig,
|
||||
getDailyNotePath(foamConfig, date),
|
||||
date
|
||||
);
|
||||
return createDailyNoteIfNotExists(date);
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
import { URI } from '../core/model/uri';
|
||||
import { ExtensionContext, commands, window } from 'vscode';
|
||||
import { FoamFeature } from '../types';
|
||||
import { focusNote } from '../utils';
|
||||
@@ -10,9 +9,7 @@ const feature: FoamFeature = {
|
||||
commands.registerCommand('foam-vscode.open-random-note', async () => {
|
||||
const foam = await foamPromise;
|
||||
const currentFile = window.activeTextEditor?.document.uri.path;
|
||||
const notes = foam.workspace
|
||||
.list()
|
||||
.filter(r => URI.isMarkdownFile(r.uri));
|
||||
const notes = foam.workspace.list().filter(r => r.uri.isMarkdown());
|
||||
if (notes.length <= 1) {
|
||||
window.showInformationMessage(
|
||||
'Could not find another note to open. If you believe this is a bug, please file an issue.'
|
||||
|
||||
@@ -29,7 +29,7 @@ const feature: FoamFeature = {
|
||||
workspacesURIs,
|
||||
() => foam.graph.getAllNodes().filter(uri => isOrphan(uri, foam.graph)),
|
||||
uri => {
|
||||
if (URI.isPlaceholder(uri)) {
|
||||
if (uri.isPlaceholder()) {
|
||||
return new UriTreeItem(uri);
|
||||
}
|
||||
const resource = foam.workspace.find(uri);
|
||||
@@ -40,9 +40,7 @@ const feature: FoamFeature = {
|
||||
context.subscriptions.push(
|
||||
vscode.window.registerTreeDataProvider('foam-vscode.orphans', provider),
|
||||
...provider.commands,
|
||||
foam.workspace.onDidAdd(() => provider.refresh()),
|
||||
foam.workspace.onDidUpdate(() => provider.refresh()),
|
||||
foam.workspace.onDidDelete(() => provider.refresh())
|
||||
foam.graph.onDidUpdate(() => provider.refresh())
|
||||
);
|
||||
},
|
||||
};
|
||||
|
||||
@@ -30,7 +30,7 @@ const feature: FoamFeature = {
|
||||
.getAllNodes()
|
||||
.filter(uri => isPlaceholderResource(uri, foam.workspace)),
|
||||
uri => {
|
||||
if (URI.isPlaceholder(uri)) {
|
||||
if (uri.isPlaceholder()) {
|
||||
return new UriTreeItem(uri);
|
||||
}
|
||||
const resource = foam.workspace.find(uri);
|
||||
@@ -44,9 +44,7 @@ const feature: FoamFeature = {
|
||||
provider
|
||||
),
|
||||
...provider.commands,
|
||||
foam.workspace.onDidAdd(() => provider.refresh()),
|
||||
foam.workspace.onDidUpdate(() => provider.refresh()),
|
||||
foam.workspace.onDidDelete(() => provider.refresh())
|
||||
foam.graph.onDidUpdate(() => provider.refresh())
|
||||
);
|
||||
},
|
||||
};
|
||||
@@ -54,7 +52,7 @@ const feature: FoamFeature = {
|
||||
export default feature;
|
||||
|
||||
export function isPlaceholderResource(uri: URI, workspace: FoamWorkspace) {
|
||||
if (URI.isPlaceholder(uri)) {
|
||||
if (uri.isPlaceholder()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,11 +1,19 @@
|
||||
import MarkdownIt from 'markdown-it';
|
||||
import { createMarkdownParser } from '../core/services/markdown-parser';
|
||||
import { FoamWorkspace } from '../core/model/workspace';
|
||||
import { createTestNote } from '../test/test-utils';
|
||||
import { getUriInWorkspace } from '../test/test-utils-vscode';
|
||||
import {
|
||||
createFile,
|
||||
deleteFile,
|
||||
getUriInWorkspace,
|
||||
withModifiedFoamConfiguration,
|
||||
} from '../test/test-utils-vscode';
|
||||
import {
|
||||
CONFIG_EMBED_NOTE_IN_CONTAINER,
|
||||
markdownItWithFoamLinks,
|
||||
markdownItWithFoamTags,
|
||||
markdownItWithNoteInclusion,
|
||||
markdownItWithRemoveLinkReferences,
|
||||
} from './preview-navigation';
|
||||
|
||||
describe('Link generation in preview', () => {
|
||||
@@ -17,7 +25,11 @@ describe('Link generation in preview', () => {
|
||||
links: [{ slug: 'placeholder' }],
|
||||
});
|
||||
const ws = new FoamWorkspace().set(noteA);
|
||||
const md = markdownItWithFoamLinks(MarkdownIt(), ws);
|
||||
|
||||
const md = [
|
||||
markdownItWithFoamLinks,
|
||||
markdownItWithRemoveLinkReferences,
|
||||
].reduce((acc, extension) => extension(acc, ws), MarkdownIt());
|
||||
|
||||
it('generates a link to a note', () => {
|
||||
expect(md.render(`[[note-a]]`)).toEqual(
|
||||
@@ -36,15 +48,18 @@ describe('Link generation in preview', () => {
|
||||
`<p><a class='foam-placeholder-link' title="Link to non-existing resource" href="javascript:void(0);">random-text</a></p>\n`
|
||||
);
|
||||
});
|
||||
|
||||
it('generates a wikilink even when there is a link reference', () => {
|
||||
const note = `[[note-a]]
|
||||
[note-a]: <note-a.md> "Note A"`;
|
||||
expect(md.render(note)).toEqual(
|
||||
`<p><a class='foam-note-link' title='${noteA.title}' href='/path/to/note-a.md' data-href='/path/to/note-a.md'>note-a</a>\n[note-a]: <note-a.md> "Note A"</p>\n`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Stylable tag generation in preview', () => {
|
||||
const noteB = createTestNote({
|
||||
uri: 'note-b.md',
|
||||
title: 'Note B',
|
||||
});
|
||||
const ws = new FoamWorkspace().set(noteB);
|
||||
const md = markdownItWithFoamTags(MarkdownIt(), ws);
|
||||
const md = markdownItWithFoamTags(MarkdownIt(), new FoamWorkspace());
|
||||
|
||||
it('transforms a string containing multiple tags to a stylable html element', () => {
|
||||
expect(md.render(`Lorem #ipsum dolor #sit`)).toMatch(
|
||||
@@ -60,53 +75,114 @@ describe('Stylable tag generation in preview', () => {
|
||||
});
|
||||
|
||||
describe('Displaying included notes in preview', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: 'note-a.md',
|
||||
text: 'This is the text of note A',
|
||||
});
|
||||
const noteC = createTestNote({
|
||||
uri: 'note-c.md',
|
||||
text: 'This is the text of note C which includes ![[note-d]]',
|
||||
});
|
||||
const noteD = createTestNote({
|
||||
uri: 'note-d.md',
|
||||
text: 'This is the text of note D which includes ![[note-c]]',
|
||||
});
|
||||
const ws = new FoamWorkspace()
|
||||
.set(noteA)
|
||||
.set(noteC)
|
||||
.set(noteD);
|
||||
const md = markdownItWithNoteInclusion(MarkdownIt(), ws);
|
||||
it('should render an included note in flat mode', async () => {
|
||||
const note = createTestNote({
|
||||
uri: 'note-a.md',
|
||||
text: 'This is the text of note A',
|
||||
});
|
||||
const ws = new FoamWorkspace().set(note);
|
||||
await withModifiedFoamConfiguration(
|
||||
CONFIG_EMBED_NOTE_IN_CONTAINER,
|
||||
false,
|
||||
() => {
|
||||
const md = markdownItWithNoteInclusion(MarkdownIt(), ws);
|
||||
|
||||
it('renders an included note', () => {
|
||||
expect(
|
||||
md.render(`This is the root node.
|
||||
|
||||
![[note-a]]`)
|
||||
).toMatch(
|
||||
`<p>This is the root node.</p>
|
||||
expect(
|
||||
md.render(`This is the root node.
|
||||
|
||||
![[note-a]]`)
|
||||
).toMatch(
|
||||
`<p>This is the root node.</p>
|
||||
<p><p>This is the text of note A</p>
|
||||
</p>`
|
||||
);
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('displays the syntax when a note is not found', () => {
|
||||
expect(
|
||||
md.render(`This is the root node.
|
||||
![[note-b]]`)
|
||||
).toMatch(
|
||||
`<p>This is the root node.
|
||||
![[note-b]]</p>
|
||||
`
|
||||
it('should render an included note in container mode', async () => {
|
||||
const note = createTestNote({
|
||||
uri: 'note-a.md',
|
||||
text: 'This is the text of note A',
|
||||
});
|
||||
const ws = new FoamWorkspace().set(note);
|
||||
await await withModifiedFoamConfiguration(
|
||||
CONFIG_EMBED_NOTE_IN_CONTAINER,
|
||||
true,
|
||||
() => {
|
||||
const md = markdownItWithNoteInclusion(MarkdownIt(), ws);
|
||||
|
||||
const res = md.render(`This is the root node. ![[note-a]]`);
|
||||
expect(res).toContain('This is the root node');
|
||||
expect(res).toContain('embed-container-note');
|
||||
expect(res).toContain('This is the text of note A');
|
||||
}
|
||||
);
|
||||
});
|
||||
|
||||
it('displays a warning in case of cyclical inclusions', () => {
|
||||
expect(md.render(noteD.source.text)).toMatch(
|
||||
`<p>This is the text of note D which includes <p>This is the text of note C which includes <p>This is the text of note D which includes <div class="foam-cyclic-link-warning">Cyclic link detected for wikilink: note-c</div></p>
|
||||
</p>
|
||||
</p>
|
||||
`
|
||||
it('should render an included section', async () => {
|
||||
// here we use createFile as the test note doesn't fill in
|
||||
// all the metadata we need
|
||||
const note = await createFile(
|
||||
`
|
||||
# Section 1
|
||||
This is the first section of note D
|
||||
|
||||
# Section 2
|
||||
This is the second section of note D
|
||||
|
||||
# Section 3
|
||||
This is the third section of note D
|
||||
`,
|
||||
['note-e.md']
|
||||
);
|
||||
const parser = createMarkdownParser([]);
|
||||
const ws = new FoamWorkspace().set(parser.parse(note.uri, note.content));
|
||||
const md = markdownItWithNoteInclusion(MarkdownIt(), ws);
|
||||
|
||||
await withModifiedFoamConfiguration(
|
||||
CONFIG_EMBED_NOTE_IN_CONTAINER,
|
||||
false,
|
||||
() => {
|
||||
expect(
|
||||
md.render(`This is the root node.
|
||||
|
||||
![[note-e#Section 2]]`)
|
||||
).toMatch(
|
||||
`<p>This is the root node.</p>
|
||||
<p><h1>Section 2</h1>
|
||||
<p>This is the second section of note D</p>
|
||||
</p>`
|
||||
);
|
||||
}
|
||||
);
|
||||
|
||||
await deleteFile(note);
|
||||
});
|
||||
|
||||
it('should fallback to the bare text when the note is not found', () => {
|
||||
const md = markdownItWithNoteInclusion(MarkdownIt(), new FoamWorkspace());
|
||||
|
||||
expect(md.render(`This is the root node. ![[non-existing-note]]`)).toMatch(
|
||||
`<p>This is the root node. ![[non-existing-note]]</p>`
|
||||
);
|
||||
});
|
||||
|
||||
it('should display a warning in case of cyclical inclusions', () => {
|
||||
const noteA = createTestNote({
|
||||
uri: 'note-a.md',
|
||||
text: 'This is the text of note A which includes ![[note-b]]',
|
||||
});
|
||||
const noteB = createTestNote({
|
||||
uri: 'note-b.md',
|
||||
text: 'This is the text of note B which includes ![[note-a]]',
|
||||
});
|
||||
const ws = new FoamWorkspace().set(noteA).set(noteB);
|
||||
const md = markdownItWithNoteInclusion(MarkdownIt(), ws);
|
||||
const res = md.render(noteB.source.text);
|
||||
|
||||
expect(res).toContain('This is the text of note B which includes');
|
||||
expect(res).toContain('This is the text of note A which includes');
|
||||
expect(res).toContain('Cyclic link detected for wikilink: note-a');
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,14 +1,16 @@
|
||||
import markdownItRegex from 'markdown-it-regex';
|
||||
import * as vscode from 'vscode';
|
||||
import { FoamFeature } from '../types';
|
||||
import { isNone } from '../utils';
|
||||
import { isNone, isSome } from '../utils';
|
||||
import { Foam } from '../core/model/foam';
|
||||
import { FoamWorkspace } from '../core/model/workspace';
|
||||
import { Logger } from '../core/utils/log';
|
||||
import { toVsCodeUri } from '../utils/vsc-utils';
|
||||
|
||||
const ALIAS_DIVIDER_CHAR = '|';
|
||||
const refsStack: string[] = [];
|
||||
import { Resource } from '../core/model/note';
|
||||
import { MarkdownLink } from '../core/services/markdown-link';
|
||||
import { Range } from '../core/model/range';
|
||||
import { isEmpty } from 'lodash';
|
||||
import { getFoamVsCodeConfig } from '../services/config';
|
||||
|
||||
const feature: FoamFeature = {
|
||||
activate: async (
|
||||
@@ -30,6 +32,8 @@ const feature: FoamFeature = {
|
||||
},
|
||||
};
|
||||
|
||||
export const CONFIG_EMBED_NOTE_IN_CONTAINER = 'preview.embedNoteInContainer';
|
||||
const refsStack: string[] = [];
|
||||
export const markdownItWithNoteInclusion = (
|
||||
md: markdownit,
|
||||
workspace: FoamWorkspace
|
||||
@@ -45,20 +49,53 @@ export const markdownItWithNoteInclusion = (
|
||||
return `![[${wikilink}]]`;
|
||||
}
|
||||
|
||||
const cyclicLinkDetected = refsStack.includes(wikilink);
|
||||
const cyclicLinkDetected = refsStack.includes(
|
||||
includedNote.uri.path.toLocaleLowerCase()
|
||||
);
|
||||
|
||||
if (!cyclicLinkDetected) {
|
||||
refsStack.push(wikilink.toLowerCase());
|
||||
refsStack.push(includedNote.uri.path.toLocaleLowerCase());
|
||||
}
|
||||
|
||||
const html = cyclicLinkDetected
|
||||
? `<div class="foam-cyclic-link-warning">Cyclic link detected for wikilink: ${wikilink}</div>`
|
||||
: md.render(includedNote.source.text);
|
||||
|
||||
if (!cyclicLinkDetected) {
|
||||
refsStack.pop();
|
||||
if (cyclicLinkDetected) {
|
||||
return `<div class="foam-cyclic-link-warning">Cyclic link detected for wikilink: ${wikilink}</div>`;
|
||||
}
|
||||
|
||||
let content = `Embed for [[${wikilink}]]`;
|
||||
switch (includedNote.type) {
|
||||
case 'note':
|
||||
content = getFoamVsCodeConfig(CONFIG_EMBED_NOTE_IN_CONTAINER)
|
||||
? `<div class="embed-container-note">${md.render(
|
||||
includedNote.source.text
|
||||
)}</div>`
|
||||
: includedNote.source.text;
|
||||
break;
|
||||
case 'attachment':
|
||||
content = `
|
||||
<div class="embed-container-attachment">
|
||||
${md.renderInline('[[' + wikilink + ']]')}<br/>
|
||||
Embed for attachments is not supported
|
||||
</div>`;
|
||||
break;
|
||||
case 'image':
|
||||
content = `<div class="embed-container-image">${md.render(
|
||||
`
|
||||
)})`
|
||||
)}</div>`;
|
||||
break;
|
||||
}
|
||||
const section = Resource.findSection(
|
||||
includedNote,
|
||||
includedNote.uri.fragment
|
||||
);
|
||||
if (isSome(section)) {
|
||||
const rows = content.split('\n');
|
||||
content = rows
|
||||
.slice(section.range.start.line, section.range.end.line)
|
||||
.join('\n');
|
||||
}
|
||||
const html = md.render(content);
|
||||
refsStack.pop();
|
||||
return html;
|
||||
} catch (e) {
|
||||
Logger.error(
|
||||
@@ -80,22 +117,20 @@ export const markdownItWithFoamLinks = (
|
||||
regex: /\[\[([^[\]]+?)\]\]/,
|
||||
replace: (wikilink: string) => {
|
||||
try {
|
||||
const linkHasAlias = wikilink.includes(ALIAS_DIVIDER_CHAR);
|
||||
const resourceLink = linkHasAlias
|
||||
? wikilink.substring(0, wikilink.indexOf('|'))
|
||||
: wikilink;
|
||||
const { target, alias } = MarkdownLink.analyzeLink({
|
||||
rawText: '[[' + wikilink + ']]',
|
||||
type: 'wikilink',
|
||||
range: Range.create(0, 0),
|
||||
});
|
||||
const label = isEmpty(alias) ? target : alias;
|
||||
|
||||
const resource = workspace.find(resourceLink);
|
||||
const resource = workspace.find(target);
|
||||
if (isNone(resource)) {
|
||||
return getPlaceholderLink(resourceLink);
|
||||
return getPlaceholderLink(label);
|
||||
}
|
||||
|
||||
const linkLabel = linkHasAlias
|
||||
? wikilink.substr(wikilink.indexOf('|') + 1)
|
||||
: wikilink;
|
||||
|
||||
const link = vscode.workspace.asRelativePath(toVsCodeUri(resource.uri));
|
||||
return `<a class='foam-note-link' title='${resource.title}' href='/${link}' data-href='/${link}'>${linkLabel}</a>`;
|
||||
return `<a class='foam-note-link' title='${resource.title}' href='/${link}' data-href='/${link}'>${label}</a>`;
|
||||
} catch (e) {
|
||||
Logger.error(
|
||||
`Error while creating link for [[${wikilink}]] in Preview panel`,
|
||||
@@ -143,18 +178,17 @@ export const markdownItWithRemoveLinkReferences = (
|
||||
) => {
|
||||
md.inline.ruler.before('link', 'clear-references', state => {
|
||||
if (state.env.references) {
|
||||
Object.keys(state.env.references).forEach(refKey => {
|
||||
// Forget about reference links that contain an alias divider
|
||||
// Aliased reference links will lead the MarkdownParser to include wrong link references
|
||||
if (refKey.includes(ALIAS_DIVIDER_CHAR)) {
|
||||
delete state.env.references[refKey];
|
||||
}
|
||||
const src = state.src.toLowerCase();
|
||||
const foamLinkRegEx = /\[\[([^[\]]+?)\]\]/g;
|
||||
const foamLinks = [...src.matchAll(foamLinkRegEx)].map(m =>
|
||||
m[1].toLowerCase()
|
||||
);
|
||||
|
||||
// When the reference is present due to an inclusion of that note, we
|
||||
// need to remove that reference. This ensures the MarkdownIt parser
|
||||
// will not replace the wikilink syntax with an <a href> link and as a result
|
||||
// break our inclusion logic.
|
||||
if (state.src.toLowerCase().includes(`![[${refKey.toLowerCase()}]]`)) {
|
||||
Object.keys(state.env.references).forEach(refKey => {
|
||||
// Remove all references that have corresponding wikilinks.
|
||||
// If the markdown parser sees a reference, it will format it before
|
||||
// we get a chance to create the wikilink.
|
||||
if (foamLinks.includes(refKey.toLowerCase())) {
|
||||
delete state.env.references[refKey];
|
||||
}
|
||||
});
|
||||
|
||||
207
packages/foam-vscode/src/features/refactor.spec.ts
Normal file
207
packages/foam-vscode/src/features/refactor.spec.ts
Normal file
@@ -0,0 +1,207 @@
|
||||
import { wait, waitForExpect } from '../test/test-utils';
|
||||
import {
|
||||
closeEditors,
|
||||
createFile,
|
||||
cleanWorkspace,
|
||||
readFile,
|
||||
renameFile,
|
||||
showInEditor,
|
||||
} from '../test/test-utils-vscode';
|
||||
|
||||
describe('Note rename sync', () => {
|
||||
beforeAll(async () => {
|
||||
await closeEditors();
|
||||
await cleanWorkspace();
|
||||
});
|
||||
afterAll(closeEditors);
|
||||
|
||||
describe('wikilinks', () => {
|
||||
it('should sync wikilinks to renamed notes', async () => {
|
||||
const noteA = await createFile(`Content of note A`, [
|
||||
'refactor',
|
||||
'wikilinks',
|
||||
'rename-note-a.md',
|
||||
]);
|
||||
const noteB = await createFile(
|
||||
`Link to [[${noteA.name}]]. Also a [[placeholder]] and again [[${noteA.name}]]`,
|
||||
['refactor', 'wikilinks', 'rename-note-b.md']
|
||||
);
|
||||
const noteC = await createFile(`Link to [[${noteA.name}]] from note C.`, [
|
||||
'refactor',
|
||||
'wikilinks',
|
||||
'rename-note-c.md',
|
||||
]);
|
||||
const { doc } = await showInEditor(noteB.uri);
|
||||
|
||||
const newName = 'renamed-note-a';
|
||||
const newUri = noteA.uri.resolve(newName);
|
||||
|
||||
// wait for workspace files to be added to graph (because of graph debounced update)
|
||||
// TODO this should be replaced by either a force-refresh command or by Foam updating immediately in test mode
|
||||
await wait(600);
|
||||
await renameFile(noteA.uri, newUri);
|
||||
|
||||
await waitForExpect(async () => {
|
||||
// check it updates documents open in editors
|
||||
expect(doc.getText().trim()).toEqual(
|
||||
`Link to [[${newName}]]. Also a [[placeholder]] and again [[${newName}]]`
|
||||
);
|
||||
// and documents not open in editors
|
||||
expect((await readFile(noteC.uri)).trim()).toEqual(
|
||||
`Link to [[${newName}]] from note C.`
|
||||
);
|
||||
}, 1000);
|
||||
});
|
||||
|
||||
it('should use the best identifier based on the new note location', async () => {
|
||||
const noteA = await createFile(`Content of note A`, [
|
||||
'refactor',
|
||||
'wikilink',
|
||||
'first',
|
||||
'note-a.md',
|
||||
]);
|
||||
await createFile(`Content of note B`, [
|
||||
'refactor',
|
||||
'wikilink',
|
||||
'second',
|
||||
'note-b.md',
|
||||
]);
|
||||
const noteC = await createFile(`Link to [[${noteA.name}]] from note C.`);
|
||||
|
||||
const { doc } = await showInEditor(noteC.uri);
|
||||
|
||||
// rename note A
|
||||
const newUri = noteA.uri.resolve('note-b.md');
|
||||
|
||||
// wait for workspace files to be added to graph (because of graph debounced update)
|
||||
// TODO this should be replaced by either a force-refresh command or by Foam updating immediately in test mode
|
||||
await wait(600);
|
||||
await renameFile(noteA.uri, newUri);
|
||||
|
||||
await waitForExpect(async () => {
|
||||
expect(doc.getText().trim()).toEqual(
|
||||
`Link to [[first/note-b]] from note C.`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should use the best identifier when moving the note to another directory', async () => {
|
||||
const noteA = await createFile(`Content of note A`, [
|
||||
'refactor',
|
||||
'wikilink',
|
||||
'first',
|
||||
'note-a.md',
|
||||
]);
|
||||
await createFile(`Content of note B`, [
|
||||
'refactor',
|
||||
'wikilink',
|
||||
'second',
|
||||
'note-b.md',
|
||||
]);
|
||||
const noteC = await createFile(`Link to [[${noteA.name}]] from note C.`);
|
||||
|
||||
const { doc } = await showInEditor(noteC.uri);
|
||||
|
||||
const newUri = noteA.uri.resolve('../second/note-a.md');
|
||||
|
||||
// wait for workspace files to be added to graph (because of graph debounced update)
|
||||
// TODO this should be replaced by either a force-refresh command or by Foam updating immediately in test mode
|
||||
await wait(600);
|
||||
await renameFile(noteA.uri, newUri);
|
||||
|
||||
await waitForExpect(async () => {
|
||||
expect(doc.getText().trim()).toEqual(`Link to [[note-a]] from note C.`);
|
||||
});
|
||||
});
|
||||
|
||||
it('should keep the alias in wikilinks', async () => {
|
||||
const noteA = await createFile(`Content of note A`);
|
||||
const noteB = await createFile(`Link to [[${noteA.name}|Alias]]`);
|
||||
|
||||
const { doc } = await showInEditor(noteB.uri);
|
||||
|
||||
// rename note A
|
||||
const newUri = noteA.uri.resolve('new-note-a.md');
|
||||
// wait for workspace files to be added to graph (because of graph debounced update)
|
||||
// TODO this should be replaced by either a force-refresh command or by Foam updating immediately in test mode
|
||||
await wait(600);
|
||||
await renameFile(noteA.uri, newUri);
|
||||
|
||||
await waitForExpect(async () => {
|
||||
expect(doc.getText().trim()).toEqual(`Link to [[new-note-a|Alias]]`);
|
||||
});
|
||||
});
|
||||
|
||||
it('should keep the section part of the wikilink', async () => {
|
||||
const noteA = await createFile(`Content of note A`);
|
||||
const noteB = await createFile(`Link to [[${noteA.name}#Section]]`);
|
||||
|
||||
const { doc } = await showInEditor(noteB.uri);
|
||||
|
||||
// rename note A
|
||||
const newUri = noteA.uri.resolve('new-note-with-section.md');
|
||||
// wait for workspace files to be added to graph (because of graph debounced update)
|
||||
// TODO this should be replaced by either a force-refresh command or by Foam updating immediately in test mode
|
||||
await wait(600);
|
||||
await renameFile(noteA.uri, newUri);
|
||||
|
||||
await waitForExpect(async () => {
|
||||
expect(doc.getText().trim()).toEqual(
|
||||
`Link to [[new-note-with-section#Section]]`
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
it('should sync when moving the note to a new folder', async () => {
|
||||
const noteA = await createFile(`Content of note A`, [
|
||||
'refactor',
|
||||
'first',
|
||||
'note-a.md',
|
||||
]);
|
||||
const noteC = await createFile(`Link to [[note-a]] from note C.`);
|
||||
|
||||
const newUri = noteA.uri.resolve('../note-a.md');
|
||||
// wait for workspace files to be added to graph (because of graph debounced update)
|
||||
// TODO this should be replaced by either a force-refresh command or by Foam updating immediately in test mode
|
||||
await wait(600);
|
||||
await renameFile(noteA.uri, newUri);
|
||||
|
||||
const content = await readFile(noteC.uri);
|
||||
await waitForExpect(async () => {
|
||||
expect(content.trim()).toEqual(`Link to [[note-a]] from note C.`);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('direct links', () => {
|
||||
beforeAll(async () => {
|
||||
await closeEditors();
|
||||
await cleanWorkspace();
|
||||
});
|
||||
beforeEach(closeEditors);
|
||||
|
||||
it('should rename relative direct links', async () => {
|
||||
const noteA = await createFile(
|
||||
`Content of note A. Lorem etc etc etc etc`,
|
||||
['refactor', 'direct-links', 'f1', 'note-a.md']
|
||||
);
|
||||
const noteB = await createFile(
|
||||
`Link to [note](../f1/note-a.md) from note B.`,
|
||||
['refactor', 'direct-links', 'f2', 'note-b.md']
|
||||
);
|
||||
const { doc } = await showInEditor(noteB.uri);
|
||||
|
||||
const newUri = noteA.uri.resolve('../note-a.md');
|
||||
// wait for workspace files to be added to graph (because of graph debounced update)
|
||||
// TODO this should be replaced by either a force-refresh command or by Foam updating immediately in test mode
|
||||
await wait(600);
|
||||
await renameFile(noteA.uri, newUri);
|
||||
|
||||
await waitForExpect(async () => {
|
||||
expect(doc.getText().trim()).toEqual(
|
||||
`Link to [note](../note-a.md) from note B.`
|
||||
);
|
||||
});
|
||||
});
|
||||
});
|
||||
});
|
||||
108
packages/foam-vscode/src/features/refactor.ts
Normal file
108
packages/foam-vscode/src/features/refactor.ts
Normal file
@@ -0,0 +1,108 @@
|
||||
import * as vscode from 'vscode';
|
||||
import { Foam } from '../core/model/foam';
|
||||
import { MarkdownLink } from '../core/services/markdown-link';
|
||||
import { Logger } from '../core/utils/log';
|
||||
import { isAbsolute } from '../core/utils/path';
|
||||
import { getFoamVsCodeConfig } from '../services/config';
|
||||
import { FoamFeature } from '../types';
|
||||
import { fromVsCodeUri, toVsCodeRange, toVsCodeUri } from '../utils/vsc-utils';
|
||||
|
||||
const feature: FoamFeature = {
|
||||
activate: async (
|
||||
context: vscode.ExtensionContext,
|
||||
foamPromise: Promise<Foam>
|
||||
) => {
|
||||
const foam = await foamPromise;
|
||||
|
||||
context.subscriptions.push(
|
||||
vscode.workspace.onWillRenameFiles(async e => {
|
||||
if (!getFoamVsCodeConfig<boolean>('links.sync.enable')) {
|
||||
return;
|
||||
}
|
||||
const renameEdits = new vscode.WorkspaceEdit();
|
||||
e.files.forEach(({ oldUri, newUri }) => {
|
||||
const connections = foam.graph.getBacklinks(fromVsCodeUri(oldUri));
|
||||
connections.forEach(async connection => {
|
||||
const { target } = MarkdownLink.analyzeLink(connection.link);
|
||||
switch (connection.link.type) {
|
||||
case 'wikilink': {
|
||||
const identifier = foam.workspace.getIdentifier(
|
||||
fromVsCodeUri(newUri),
|
||||
[fromVsCodeUri(oldUri)]
|
||||
);
|
||||
const edit = MarkdownLink.createUpdateLinkEdit(
|
||||
connection.link,
|
||||
{ target: identifier }
|
||||
);
|
||||
renameEdits.replace(
|
||||
toVsCodeUri(connection.source),
|
||||
toVsCodeRange(edit.selection),
|
||||
edit.newText
|
||||
);
|
||||
break;
|
||||
}
|
||||
case 'link': {
|
||||
const path = isAbsolute(target)
|
||||
? '/' + vscode.workspace.asRelativePath(newUri)
|
||||
: fromVsCodeUri(newUri).relativeTo(
|
||||
connection.source.getDirectory()
|
||||
).path;
|
||||
const edit = MarkdownLink.createUpdateLinkEdit(
|
||||
connection.link,
|
||||
{ target: path }
|
||||
);
|
||||
renameEdits.replace(
|
||||
toVsCodeUri(connection.source),
|
||||
toVsCodeRange(edit.selection),
|
||||
edit.newText
|
||||
);
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
try {
|
||||
if (renameEdits.size > 0) {
|
||||
// We break the update by file because applying it at once was causing
|
||||
// dirty state and editors not always saving or closing
|
||||
for (const renameEditForUri of renameEdits.entries()) {
|
||||
const [uri, edits] = renameEditForUri;
|
||||
const fileEdits = new vscode.WorkspaceEdit();
|
||||
fileEdits.set(uri, edits);
|
||||
await vscode.workspace.applyEdit(fileEdits);
|
||||
const editor = await vscode.workspace.openTextDocument(uri);
|
||||
// Because the save happens within 50ms of opening the doc, it will be then closed
|
||||
editor.save();
|
||||
}
|
||||
|
||||
// Reporting
|
||||
const nUpdates = renameEdits.entries().reduce((acc, entry) => {
|
||||
return (acc += entry[1].length);
|
||||
}, 0);
|
||||
const links = nUpdates > 1 ? 'links' : 'link';
|
||||
const nFiles = renameEdits.size;
|
||||
const files = nFiles > 1 ? 'files' : 'file';
|
||||
Logger.info(
|
||||
`Updated links in the following files:`,
|
||||
...renameEdits
|
||||
.entries()
|
||||
.map(e => vscode.workspace.asRelativePath(e[0]))
|
||||
);
|
||||
vscode.window.showInformationMessage(
|
||||
`Updated ${nUpdates} ${links} across ${nFiles} ${files}.`
|
||||
);
|
||||
}
|
||||
} catch (e) {
|
||||
Logger.error('Error while updating references to file', e);
|
||||
vscode.window.showErrorMessage(
|
||||
`Foam couldn't update the links to ${vscode.workspace.asRelativePath(
|
||||
e.newUri
|
||||
)}. Check the logs for error details.`
|
||||
);
|
||||
}
|
||||
})
|
||||
);
|
||||
},
|
||||
};
|
||||
export default feature;
|
||||
@@ -79,4 +79,18 @@ describe('Tag Completion', () => {
|
||||
expect(foamTags.tags.get('primary')).toBeTruthy();
|
||||
expect(tags.items.length).toEqual(3);
|
||||
});
|
||||
|
||||
it('should not provide suggestions when inside a wikilink', async () => {
|
||||
const { uri } = await createFile('[[#prim');
|
||||
const { doc } = await showInEditor(uri);
|
||||
const provider = new TagCompletionProvider(foamTags);
|
||||
|
||||
const tags = await provider.provideCompletionItems(
|
||||
doc,
|
||||
new vscode.Position(0, 7)
|
||||
);
|
||||
|
||||
expect(foamTags.tags.get('primary')).toBeTruthy();
|
||||
expect(tags).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
@@ -3,6 +3,9 @@ import { Foam } from '../core/model/foam';
|
||||
import { FoamTags } from '../core/model/tags';
|
||||
import { FoamFeature } from '../types';
|
||||
import { mdDocSelector } from '../utils';
|
||||
import { SECTION_REGEX } from './link-completion';
|
||||
|
||||
export const TAG_REGEX = /#(.*)/;
|
||||
|
||||
const feature: FoamFeature = {
|
||||
activate: async (
|
||||
@@ -32,7 +35,8 @@ export class TagCompletionProvider
|
||||
.lineAt(position)
|
||||
.text.substr(0, position.character);
|
||||
|
||||
const requiresAutocomplete = cursorPrefix.match(/#(.*)/);
|
||||
const requiresAutocomplete =
|
||||
cursorPrefix.match(TAG_REGEX) && !cursorPrefix.match(SECTION_REGEX);
|
||||
|
||||
if (!requiresAutocomplete) {
|
||||
return null;
|
||||
|
||||
@@ -1,16 +1,17 @@
|
||||
import { createTestNote } from '../test/test-utils';
|
||||
import { createTestNote, readFileFromFs } from '../test/test-utils';
|
||||
import { cleanWorkspace, closeEditors } from '../test/test-utils-vscode';
|
||||
import { TagItem, TagReference, TagsProvider } from './tags-tree-view';
|
||||
import { bootstrap, Foam } from '../core/model/foam';
|
||||
import { MarkdownResourceProvider } from '../core/markdown-provider';
|
||||
import { MarkdownResourceProvider } from '../core/services/markdown-provider';
|
||||
import { FileDataStore, Matcher } from '../core/services/datastore';
|
||||
|
||||
describe('Tags tree panel', () => {
|
||||
let _foam: Foam;
|
||||
let provider: TagsProvider;
|
||||
|
||||
const dataStore = new FileDataStore(readFileFromFs);
|
||||
const matcher = new Matcher([]);
|
||||
const mdProvider = new MarkdownResourceProvider(matcher);
|
||||
const mdProvider = new MarkdownResourceProvider(matcher, dataStore);
|
||||
|
||||
beforeAll(async () => {
|
||||
await cleanWorkspace();
|
||||
@@ -22,7 +23,7 @@ describe('Tags tree panel', () => {
|
||||
});
|
||||
|
||||
beforeEach(async () => {
|
||||
_foam = await bootstrap(matcher, new FileDataStore(), [mdProvider]);
|
||||
_foam = await bootstrap(matcher, dataStore, [mdProvider]);
|
||||
provider = new TagsProvider(_foam, _foam.workspace);
|
||||
await closeEditors();
|
||||
});
|
||||
@@ -62,6 +63,7 @@ describe('Tags tree panel', () => {
|
||||
|
||||
childTreeItems.forEach(child => {
|
||||
if (child instanceof TagItem) {
|
||||
// eslint-disable-next-line jest/no-conditional-expect
|
||||
expect(child.title).toEqual('child');
|
||||
}
|
||||
});
|
||||
@@ -94,7 +96,9 @@ describe('Tags tree panel', () => {
|
||||
|
||||
childTreeItems.forEach(child => {
|
||||
if (child instanceof TagItem) {
|
||||
// eslint-disable-next-line jest/no-conditional-expect
|
||||
expect(['child', 'subchild']).toContain(child.title);
|
||||
// eslint-disable-next-line jest/no-conditional-expect
|
||||
expect(child.title).not.toEqual('parent');
|
||||
}
|
||||
});
|
||||
|
||||
@@ -21,9 +21,7 @@ const feature: FoamFeature = {
|
||||
provider
|
||||
)
|
||||
);
|
||||
foam.workspace.onDidUpdate(() => provider.refresh());
|
||||
foam.workspace.onDidAdd(() => provider.refresh());
|
||||
foam.workspace.onDidDelete(() => provider.refresh());
|
||||
foam.tags.onDidUpdate(() => provider.refresh());
|
||||
},
|
||||
};
|
||||
|
||||
@@ -166,16 +164,14 @@ export class TagReference extends vscode.TreeItem {
|
||||
public readonly title: string;
|
||||
constructor(public readonly tag: Tag, public readonly note: Resource) {
|
||||
super(note.title, vscode.TreeItemCollapsibleState.None);
|
||||
const uri = toVsCodeUri(note.uri);
|
||||
this.title = note.title;
|
||||
this.description = note.uri.path.replace(
|
||||
vscode.workspace.getWorkspaceFolder(toVsCodeUri(note.uri))?.uri.path,
|
||||
''
|
||||
);
|
||||
this.description = vscode.workspace.asRelativePath(uri);
|
||||
this.tooltip = undefined;
|
||||
this.command = {
|
||||
command: 'vscode.open',
|
||||
arguments: [
|
||||
note.uri,
|
||||
uri,
|
||||
{
|
||||
preview: true,
|
||||
selection: toVsCodeRange(tag.range),
|
||||
|
||||
@@ -3,52 +3,64 @@ import { FoamFeature } from '../types';
|
||||
import { URI } from '../core/model/uri';
|
||||
import { fromVsCodeUri, toVsCodeUri } from '../utils/vsc-utils';
|
||||
import { NoteFactory } from '../services/templates';
|
||||
import { Foam } from '../core/model/foam';
|
||||
|
||||
export const OPEN_COMMAND = {
|
||||
command: 'foam-vscode.open-resource',
|
||||
title: 'Foam: Open Resource',
|
||||
|
||||
execute: async (params: { uri: URI }) => {
|
||||
const { uri } = params;
|
||||
switch (uri.scheme) {
|
||||
case 'file':
|
||||
return vscode.commands.executeCommand('vscode.open', toVsCodeUri(uri));
|
||||
|
||||
case 'placeholder':
|
||||
const title = uri.path.split('/').slice(-1)[0];
|
||||
|
||||
const basedir =
|
||||
vscode.workspace.workspaceFolders.length > 0
|
||||
? fromVsCodeUri(vscode.workspace.workspaceFolders[0].uri)
|
||||
: fromVsCodeUri(vscode.window.activeTextEditor?.document.uri)
|
||||
? URI.getDir(
|
||||
fromVsCodeUri(vscode.window.activeTextEditor!.document.uri)
|
||||
)
|
||||
: undefined;
|
||||
|
||||
if (basedir === undefined) {
|
||||
return;
|
||||
}
|
||||
|
||||
const target = URI.createResourceUriFromPlaceholder(basedir, uri);
|
||||
|
||||
await NoteFactory.createForPlaceholderWikilink(title, target);
|
||||
return;
|
||||
}
|
||||
},
|
||||
|
||||
asURI: (uri: URI) =>
|
||||
vscode.Uri.parse(`command:${OPEN_COMMAND.command}`).with({
|
||||
query: encodeURIComponent(JSON.stringify({ uri: URI.create(uri) })),
|
||||
query: encodeURIComponent(JSON.stringify({ uri })),
|
||||
}),
|
||||
};
|
||||
|
||||
const feature: FoamFeature = {
|
||||
activate: (context: vscode.ExtensionContext) => {
|
||||
activate: (context: vscode.ExtensionContext, foamPromise: Promise<Foam>) => {
|
||||
context.subscriptions.push(
|
||||
vscode.commands.registerCommand(
|
||||
OPEN_COMMAND.command,
|
||||
OPEN_COMMAND.execute
|
||||
async (params: { uri: URI }) => {
|
||||
const uri = new URI(params.uri);
|
||||
switch (uri.scheme) {
|
||||
case 'file': {
|
||||
const targetUri =
|
||||
uri.path === vscode.window.activeTextEditor?.document.uri.path
|
||||
? vscode.window.activeTextEditor?.document.uri
|
||||
: toVsCodeUri(uri.asPlain());
|
||||
const targetEditor = vscode.window.visibleTextEditors.find(
|
||||
ed => targetUri.path === ed.document.uri.path
|
||||
);
|
||||
const column = targetEditor?.viewColumn;
|
||||
return vscode.window.showTextDocument(targetUri, {
|
||||
viewColumn: column,
|
||||
});
|
||||
}
|
||||
case 'placeholder': {
|
||||
const title = uri.getName();
|
||||
if (uri.isAbsolute()) {
|
||||
return NoteFactory.createForPlaceholderWikilink(
|
||||
title,
|
||||
URI.file(uri.path)
|
||||
);
|
||||
}
|
||||
const basedir =
|
||||
vscode.workspace.workspaceFolders.length > 0
|
||||
? vscode.workspace.workspaceFolders[0].uri
|
||||
: vscode.window.activeTextEditor?.document.uri
|
||||
? vscode.window.activeTextEditor!.document.uri
|
||||
: undefined;
|
||||
if (basedir === undefined) {
|
||||
return;
|
||||
}
|
||||
const target = fromVsCodeUri(basedir)
|
||||
.resolve(uri, true)
|
||||
.changeExtension('', '.md');
|
||||
await NoteFactory.createForPlaceholderWikilink(title, target);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
)
|
||||
);
|
||||
},
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user