Compare commits
12 Commits
b0442e5685
...
1e094eeea0
Author | SHA1 | Date | |
---|---|---|---|
1e094eeea0 | |||
3adb745576 | |||
8f7b785664 | |||
c7aacfe756 | |||
b71c253942 | |||
7d3aaa143a | |||
6619f1f0a9 | |||
bc59b6f575 | |||
c262418f88 | |||
2dd3a5904b | |||
ff1da716e9 | |||
7c6d634287 |
959
Cargo.lock
generated
959
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@ -1,5 +1,6 @@
|
|||||||
[workspace]
|
[workspace]
|
||||||
members = [
|
members = [
|
||||||
"reid",
|
"reid",
|
||||||
"reid-llvm-lib"
|
"reid-llvm-lib",
|
||||||
]
|
"reid-lsp"
|
||||||
|
]
|
||||||
|
21
README.md
21
README.md
@ -76,12 +76,12 @@ Big features that I want later but are not necessary:
|
|||||||
- LSP implementation
|
- LSP implementation
|
||||||
|
|
||||||
Smaller features:
|
Smaller features:
|
||||||
- ~~Hex-numbers~~
|
- ~~Hex-numbers~~ (DONE)
|
||||||
- ~~Bitwise operations~~
|
- ~~Bitwise operations~~ (DONE)
|
||||||
- ~~Easier way to initialize arrays with a single value~~
|
- ~~Easier way to initialize arrays with a single value~~ (DONE)
|
||||||
- ~~Void-returns (`return;` for void-returning functions)~~
|
- ~~Void-returns (`return;` for void-returning functions)~~ (DONE)
|
||||||
- ~~Only include standard library at all if it is imported~~
|
- ~~Only include standard library at all if it is imported~~ (DONE)
|
||||||
- Lexical scopes for Debug Information
|
- ~~Lexical scopes for Debug Information~~ (DONE)
|
||||||
|
|
||||||
### Why "Reid"
|
### Why "Reid"
|
||||||
|
|
||||||
@ -157,10 +157,6 @@ cmake llvm -B build -DCMAKE_BUILD_TYPE=MinSizeRel -DLLVM_ENABLE_ASSERTIONS=ON -D
|
|||||||
ninja -j23
|
ninja -j23
|
||||||
```
|
```
|
||||||
|
|
||||||
*Also Note:* Building LLVM with `Ninja` was not successful for me, but this
|
|
||||||
method was. Ninja may be successful with you, to try it, add `-G Ninja` to the
|
|
||||||
`cmake`-command, and instead of `make` run `ninja install`.
|
|
||||||
|
|
||||||
### Building this crate itself
|
### Building this crate itself
|
||||||
|
|
||||||
Assuming `llvm-project` from the previous step was at
|
Assuming `llvm-project` from the previous step was at
|
||||||
@ -170,6 +166,5 @@ Assuming `llvm-project` from the previous step was at
|
|||||||
LLVM_SYS_201_PREFIX=/path/llvm-project/build cargo build
|
LLVM_SYS_201_PREFIX=/path/llvm-project/build cargo build
|
||||||
```
|
```
|
||||||
|
|
||||||
## In conclusion
|
Alternatively assuming you have LLVM 20.1 or newer installed you may use omit
|
||||||
Good luck! It took me a good 10 hours to figure this out for myself, I sure hope
|
the environment variable entirely and use dynamic linking instead
|
||||||
these instructions help both myself and someone else in the future!
|
|
25
examples/mutable_inner.reid
Normal file
25
examples/mutable_inner.reid
Normal file
@ -0,0 +1,25 @@
|
|||||||
|
struct Game {}
|
||||||
|
|
||||||
|
impl Game {
|
||||||
|
pub fn run_frame(&mut self) {}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct Platform {
|
||||||
|
game: Game,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Platform {
|
||||||
|
pub fn new() -> Platform {
|
||||||
|
return Platform { game: Game {} };
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn run_frame(&mut self) {
|
||||||
|
*self.game.run_frame();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fn main() -> i32 {
|
||||||
|
let mut platform = Platform::new();
|
||||||
|
platform.run_frame();
|
||||||
|
return 0;
|
||||||
|
}
|
@ -16,7 +16,7 @@ BINARY="$(echo $1 | cut -d'.' -f1)"".out"
|
|||||||
|
|
||||||
echo $1
|
echo $1
|
||||||
|
|
||||||
cargo run --example cli $@ && \
|
cargo run --example cli $@ && \
|
||||||
./$BINARY ; echo "Return value: ""$?"
|
./$BINARY ; echo "Return value: ""$?"
|
||||||
|
|
||||||
## Command from: clang -v hello.o -o test
|
## Command from: clang -v hello.o -o test
|
||||||
|
@ -52,7 +52,5 @@ fn main() {
|
|||||||
|
|
||||||
else_b.terminate(TerminatorKind::Ret(add)).unwrap();
|
else_b.terminate(TerminatorKind::Ret(add)).unwrap();
|
||||||
|
|
||||||
dbg!(&context);
|
|
||||||
|
|
||||||
context.compile(None, Vec::new());
|
context.compile(None, Vec::new());
|
||||||
}
|
}
|
||||||
|
@ -223,7 +223,6 @@ impl Builder {
|
|||||||
unsafe {
|
unsafe {
|
||||||
let mut modules = self.modules.borrow_mut();
|
let mut modules = self.modules.borrow_mut();
|
||||||
let module = modules.get_unchecked_mut(module.0);
|
let module = modules.get_unchecked_mut(module.0);
|
||||||
dbg!(module.functions.iter().map(|f| f.data.name.clone()).collect::<Vec<_>>());
|
|
||||||
module.functions.iter().find(|f| f.data.name == *name).map(|f| f.value)
|
module.functions.iter().find(|f| f.data.name == *name).map(|f| f.value)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -123,8 +123,6 @@ impl CompiledModule {
|
|||||||
let llvm_ir =
|
let llvm_ir =
|
||||||
from_cstring(LLVMPrintModuleToString(self.module_ref)).expect("Unable to print LLVM IR to string");
|
from_cstring(LLVMPrintModuleToString(self.module_ref)).expect("Unable to print LLVM IR to string");
|
||||||
|
|
||||||
println!("{}", llvm_ir);
|
|
||||||
|
|
||||||
let mut err = ErrorMessageHolder::null();
|
let mut err = ErrorMessageHolder::null();
|
||||||
LLVMVerifyModule(
|
LLVMVerifyModule(
|
||||||
self.module_ref,
|
self.module_ref,
|
||||||
|
6
reid-lsp/.gitignore
vendored
Normal file
6
reid-lsp/.gitignore
vendored
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
.vscode
|
||||||
|
node_modules
|
||||||
|
dist
|
||||||
|
package-lock.json
|
||||||
|
pnpm-lock.yaml
|
||||||
|
tsconfig.tsbuildinfo
|
1
reid-lsp/.npmrc
Normal file
1
reid-lsp/.npmrc
Normal file
@ -0,0 +1 @@
|
|||||||
|
enable-pre-post-scripts = true
|
5
reid-lsp/.vscode-test.mjs
Normal file
5
reid-lsp/.vscode-test.mjs
Normal file
@ -0,0 +1,5 @@
|
|||||||
|
import { defineConfig } from '@vscode/test-cli';
|
||||||
|
|
||||||
|
export default defineConfig({
|
||||||
|
files: 'out/test/**/*.test.js',
|
||||||
|
});
|
15
reid-lsp/.vscodeignore
Normal file
15
reid-lsp/.vscodeignore
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
.vscode/**
|
||||||
|
.vscode-test/**
|
||||||
|
out/**
|
||||||
|
node_modules/**
|
||||||
|
src/**
|
||||||
|
client/**
|
||||||
|
.gitignore
|
||||||
|
.yarnrc
|
||||||
|
webpack.config.js
|
||||||
|
vsc-extension-quickstart.md
|
||||||
|
**/tsconfig.json
|
||||||
|
**/eslint.config.mjs
|
||||||
|
**/*.map
|
||||||
|
**/*.ts
|
||||||
|
**/.vscode-test.*
|
9
reid-lsp/CHANGELOG.md
Normal file
9
reid-lsp/CHANGELOG.md
Normal file
@ -0,0 +1,9 @@
|
|||||||
|
# Change Log
|
||||||
|
|
||||||
|
All notable changes to the "reid-lsp" extension will be documented in this file.
|
||||||
|
|
||||||
|
Check [Keep a Changelog](http://keepachangelog.com/) for recommendations on how to structure this file.
|
||||||
|
|
||||||
|
## [Unreleased]
|
||||||
|
|
||||||
|
- Initial release
|
11
reid-lsp/Cargo.toml
Normal file
11
reid-lsp/Cargo.toml
Normal file
@ -0,0 +1,11 @@
|
|||||||
|
[package]
|
||||||
|
name = "reid-lsp"
|
||||||
|
version = "0.1.0"
|
||||||
|
edition = "2024"
|
||||||
|
|
||||||
|
[dependencies]
|
||||||
|
socket = "0.0.7"
|
||||||
|
tokio = { version = "1.47.0", features = ["full"] }
|
||||||
|
tower-lsp = "0.20.0"
|
||||||
|
reid = { path = "../reid", version = "1.0.0-beta.2", registry="gitea-teascade", features=[] }
|
||||||
|
dashmap = "6.1.0"
|
71
reid-lsp/README.md
Normal file
71
reid-lsp/README.md
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
# reid-lsp README
|
||||||
|
|
||||||
|
This is the README for your extension "reid-lsp". After writing up a brief description, we recommend including the following sections.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
Describe specific features of your extension including screenshots of your extension in action. Image paths are relative to this README file.
|
||||||
|
|
||||||
|
For example if there is an image subfolder under your extension project workspace:
|
||||||
|
|
||||||
|
\!\[feature X\]\(images/feature-x.png\)
|
||||||
|
|
||||||
|
> Tip: Many popular extensions utilize animations. This is an excellent way to show off your extension! We recommend short, focused animations that are easy to follow.
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
If you have any requirements or dependencies, add a section describing those and how to install and configure them.
|
||||||
|
|
||||||
|
## Extension Settings
|
||||||
|
|
||||||
|
Include if your extension adds any VS Code settings through the `contributes.configuration` extension point.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
This extension contributes the following settings:
|
||||||
|
|
||||||
|
* `myExtension.enable`: Enable/disable this extension.
|
||||||
|
* `myExtension.thing`: Set to `blah` to do something.
|
||||||
|
|
||||||
|
## Known Issues
|
||||||
|
|
||||||
|
Calling out known issues can help limit users opening duplicate issues against your extension.
|
||||||
|
|
||||||
|
## Release Notes
|
||||||
|
|
||||||
|
Users appreciate release notes as you update your extension.
|
||||||
|
|
||||||
|
### 1.0.0
|
||||||
|
|
||||||
|
Initial release of ...
|
||||||
|
|
||||||
|
### 1.0.1
|
||||||
|
|
||||||
|
Fixed issue #.
|
||||||
|
|
||||||
|
### 1.1.0
|
||||||
|
|
||||||
|
Added features X, Y, and Z.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Following extension guidelines
|
||||||
|
|
||||||
|
Ensure that you've read through the extensions guidelines and follow the best practices for creating your extension.
|
||||||
|
|
||||||
|
* [Extension Guidelines](https://code.visualstudio.com/api/references/extension-guidelines)
|
||||||
|
|
||||||
|
## Working with Markdown
|
||||||
|
|
||||||
|
You can author your README using Visual Studio Code. Here are some useful editor keyboard shortcuts:
|
||||||
|
|
||||||
|
* Split the editor (`Cmd+\` on macOS or `Ctrl+\` on Windows and Linux).
|
||||||
|
* Toggle preview (`Shift+Cmd+V` on macOS or `Shift+Ctrl+V` on Windows and Linux).
|
||||||
|
* Press `Ctrl+Space` (Windows, Linux, macOS) to see a list of Markdown snippets.
|
||||||
|
|
||||||
|
## For more information
|
||||||
|
|
||||||
|
* [Visual Studio Code's Markdown Support](http://code.visualstudio.com/docs/languages/markdown)
|
||||||
|
* [Markdown Syntax Reference](https://help.github.com/articles/markdown-basics/)
|
||||||
|
|
||||||
|
**Enjoy!**
|
27
reid-lsp/client/package.json
Normal file
27
reid-lsp/client/package.json
Normal file
@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"name": "reid-lsp",
|
||||||
|
"displayName": "Reid Language Server",
|
||||||
|
"description": "Language Server Extension for Reid",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"engines": {
|
||||||
|
"vscode": "^1.102.0"
|
||||||
|
},
|
||||||
|
"main": "../out/extension.js",
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/mocha": "^10.0.10",
|
||||||
|
"@types/node": "20.x",
|
||||||
|
"@types/vscode": "^1.102.0",
|
||||||
|
"@typescript-eslint/eslint-plugin": "^8.31.1",
|
||||||
|
"@typescript-eslint/parser": "^8.31.1",
|
||||||
|
"@vscode/test-cli": "^0.0.11",
|
||||||
|
"@vscode/test-electron": "^2.5.2",
|
||||||
|
"eslint": "^9.25.1",
|
||||||
|
"ts-loader": "^9.5.2",
|
||||||
|
"typescript": "^5.8.3",
|
||||||
|
"webpack": "^5.99.7",
|
||||||
|
"webpack-cli": "^6.0.1"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"vscode-languageclient": "^9.0.1"
|
||||||
|
}
|
||||||
|
}
|
72
reid-lsp/client/src/extension.ts
Normal file
72
reid-lsp/client/src/extension.ts
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
/* --------------------------------------------------------------------------------------------
|
||||||
|
* Copyright (c) Microsoft Corporation. All rights reserved.
|
||||||
|
* Licensed under the MIT License. See License.txt in the project root for license information.
|
||||||
|
* ------------------------------------------------------------------------------------------ */
|
||||||
|
|
||||||
|
import * as path from 'path';
|
||||||
|
import { workspace, ExtensionContext, window } from 'vscode';
|
||||||
|
|
||||||
|
import {
|
||||||
|
Executable,
|
||||||
|
LanguageClient,
|
||||||
|
LanguageClientOptions,
|
||||||
|
ServerOptions,
|
||||||
|
TransportKind
|
||||||
|
} from 'vscode-languageclient/node';
|
||||||
|
|
||||||
|
|
||||||
|
let client: LanguageClient;
|
||||||
|
|
||||||
|
export function activate(context: ExtensionContext) {
|
||||||
|
const traceOutputChannel = window.createOutputChannel("Reid Language Server trace");
|
||||||
|
const command = process.env.SERVER_PATH || "reid-language-server";
|
||||||
|
|
||||||
|
|
||||||
|
const run: Executable = {
|
||||||
|
command,
|
||||||
|
options: {
|
||||||
|
env: {
|
||||||
|
...process.env,
|
||||||
|
RUST_LOG: "debug",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
const serverOptions: ServerOptions = {
|
||||||
|
run,
|
||||||
|
debug: run,
|
||||||
|
};
|
||||||
|
|
||||||
|
// Options to control the language client
|
||||||
|
const clientOptions: LanguageClientOptions = {
|
||||||
|
// Register the server for plain text documents
|
||||||
|
documentSelector: [{ scheme: 'file', language: 'reid' }],
|
||||||
|
synchronize: {
|
||||||
|
// Notify the server about file changes to '.clientrc files contained in the workspace
|
||||||
|
fileEvents: workspace.createFileSystemWatcher('**/.clientrc')
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// Create the language client and start the client.
|
||||||
|
client = new LanguageClient(
|
||||||
|
'reid-lsp',
|
||||||
|
'Reid Language Server',
|
||||||
|
serverOptions,
|
||||||
|
clientOptions
|
||||||
|
);
|
||||||
|
|
||||||
|
client.info("hello");
|
||||||
|
|
||||||
|
workspace.onDidOpenTextDocument((e) => {
|
||||||
|
});
|
||||||
|
|
||||||
|
// Start the client. This will also launch the server
|
||||||
|
client.start();
|
||||||
|
}
|
||||||
|
|
||||||
|
export function deactivate(): Thenable<void> | undefined {
|
||||||
|
if (!client) {
|
||||||
|
return undefined;
|
||||||
|
}
|
||||||
|
return client.stop();
|
||||||
|
}
|
15
reid-lsp/client/src/test/extension.test.ts
Normal file
15
reid-lsp/client/src/test/extension.test.ts
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
import * as assert from 'assert';
|
||||||
|
|
||||||
|
// You can import and use all API from the 'vscode' module
|
||||||
|
// as well as import your extension to test it
|
||||||
|
import * as vscode from 'vscode';
|
||||||
|
// import * as myExtension from '../../extension';
|
||||||
|
|
||||||
|
suite('Extension Test Suite', () => {
|
||||||
|
vscode.window.showInformationMessage('Start all tests.');
|
||||||
|
|
||||||
|
test('Sample test', () => {
|
||||||
|
assert.strictEqual(-1, [1, 2, 3].indexOf(5));
|
||||||
|
assert.strictEqual(-1, [1, 2, 3].indexOf(0));
|
||||||
|
});
|
||||||
|
});
|
24
reid-lsp/client/tsconfig.json
Normal file
24
reid-lsp/client/tsconfig.json
Normal file
@ -0,0 +1,24 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"module": "Node16",
|
||||||
|
"target": "ES2022",
|
||||||
|
"lib": [
|
||||||
|
"ES2022"
|
||||||
|
],
|
||||||
|
"sourceMap": true,
|
||||||
|
"rootDir": "src",
|
||||||
|
"outDir": "../dist",
|
||||||
|
"strict": true /* enable all strict type-checking options */
|
||||||
|
/* Additional Checks */
|
||||||
|
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||||
|
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||||
|
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"src"
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
"node_modules",
|
||||||
|
".vscode-test"
|
||||||
|
]
|
||||||
|
}
|
28
reid-lsp/eslint.config.mjs
Normal file
28
reid-lsp/eslint.config.mjs
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
import typescriptEslint from "@typescript-eslint/eslint-plugin";
|
||||||
|
import tsParser from "@typescript-eslint/parser";
|
||||||
|
|
||||||
|
export default [{
|
||||||
|
files: ["**/*.ts"],
|
||||||
|
}, {
|
||||||
|
plugins: {
|
||||||
|
"@typescript-eslint": typescriptEslint,
|
||||||
|
},
|
||||||
|
|
||||||
|
languageOptions: {
|
||||||
|
parser: tsParser,
|
||||||
|
ecmaVersion: 2022,
|
||||||
|
sourceType: "module",
|
||||||
|
},
|
||||||
|
|
||||||
|
rules: {
|
||||||
|
"@typescript-eslint/naming-convention": ["warn", {
|
||||||
|
selector: "import",
|
||||||
|
format: ["camelCase", "PascalCase"],
|
||||||
|
}],
|
||||||
|
|
||||||
|
curly: "warn",
|
||||||
|
eqeqeq: "warn",
|
||||||
|
"no-throw-literal": "warn",
|
||||||
|
semi: "warn",
|
||||||
|
},
|
||||||
|
}];
|
76
reid-lsp/package.json
Normal file
76
reid-lsp/package.json
Normal file
@ -0,0 +1,76 @@
|
|||||||
|
{
|
||||||
|
"name": "reid-lsp",
|
||||||
|
"displayName": "Reid Language Server",
|
||||||
|
"description": "Language Server Extension for Reid",
|
||||||
|
"version": "0.0.1",
|
||||||
|
"engines": {
|
||||||
|
"vscode": "^1.102.0"
|
||||||
|
},
|
||||||
|
"categories": [
|
||||||
|
"Other"
|
||||||
|
],
|
||||||
|
"activationEvents": [
|
||||||
|
"onLanguage:reid"
|
||||||
|
],
|
||||||
|
"main": "./dist/extension.js",
|
||||||
|
"contributes": {
|
||||||
|
"languages": [
|
||||||
|
{
|
||||||
|
"id": "reid",
|
||||||
|
"extensions": [
|
||||||
|
".reid"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"configuration": {
|
||||||
|
"type": "object",
|
||||||
|
"title": "reid-language-server",
|
||||||
|
"properties": {
|
||||||
|
"nrs-language-server.trace.server": {
|
||||||
|
"type": "string",
|
||||||
|
"scope": "window",
|
||||||
|
"enum": [
|
||||||
|
"off",
|
||||||
|
"messages",
|
||||||
|
"verbose"
|
||||||
|
],
|
||||||
|
"enumDescriptions": [
|
||||||
|
"No traces",
|
||||||
|
"Error only",
|
||||||
|
"Full log"
|
||||||
|
],
|
||||||
|
"default": "off",
|
||||||
|
"description": "Traces the communication between VS Code and the language server."
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"scripts": {
|
||||||
|
"vscode:prepublish": "pnpm run package",
|
||||||
|
"compile": "webpack",
|
||||||
|
"watch": "webpack --watch",
|
||||||
|
"package": "webpack --mode production --devtool hidden-source-map",
|
||||||
|
"compile-tests": "tsc -p . --outDir out",
|
||||||
|
"watch-tests": "tsc -p . -w --outDir out",
|
||||||
|
"pretest": "pnpm run compile-tests && pnpm run compile && pnpm run lint",
|
||||||
|
"lint": "eslint src",
|
||||||
|
"test": "vscode-test"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@types/mocha": "^10.0.10",
|
||||||
|
"@types/node": "20.x",
|
||||||
|
"@types/vscode": "^1.102.0",
|
||||||
|
"@typescript-eslint/eslint-plugin": "^8.31.1",
|
||||||
|
"@typescript-eslint/parser": "^8.31.1",
|
||||||
|
"@vscode/test-cli": "^0.0.11",
|
||||||
|
"@vscode/test-electron": "^2.5.2",
|
||||||
|
"eslint": "^9.25.1",
|
||||||
|
"ts-loader": "^9.5.2",
|
||||||
|
"typescript": "^5.8.3",
|
||||||
|
"webpack": "^5.99.7",
|
||||||
|
"webpack-cli": "^6.0.1"
|
||||||
|
},
|
||||||
|
"dependencies": {
|
||||||
|
"vscode-languageclient": "^9.0.1"
|
||||||
|
}
|
||||||
|
}
|
442
reid-lsp/src/main.rs
Normal file
442
reid-lsp/src/main.rs
Normal file
@ -0,0 +1,442 @@
|
|||||||
|
use std::collections::HashMap;
|
||||||
|
use std::path::PathBuf;
|
||||||
|
|
||||||
|
use dashmap::DashMap;
|
||||||
|
use reid::ast::lexer::{FullToken, Position};
|
||||||
|
use reid::error_raporting::{ErrorModules, ReidError};
|
||||||
|
use reid::mir::{
|
||||||
|
self, Context, FunctionCall, FunctionDefinition, FunctionParam, IfExpression, SourceModuleId, StructType, TypeKind,
|
||||||
|
WhileStatement,
|
||||||
|
};
|
||||||
|
use reid::{compile_module, parse_module, perform_all_passes};
|
||||||
|
use tower_lsp::lsp_types::{
|
||||||
|
self, CompletionItem, CompletionOptions, CompletionParams, CompletionResponse, Diagnostic, DiagnosticSeverity,
|
||||||
|
DidChangeTextDocumentParams, DidOpenTextDocumentParams, Hover, HoverContents, HoverParams, HoverProviderCapability,
|
||||||
|
InitializeParams, InitializeResult, InitializedParams, MarkedString, MessageType, OneOf, Range, ServerCapabilities,
|
||||||
|
TextDocumentItem, TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
|
||||||
|
WorkspaceFoldersServerCapabilities, WorkspaceServerCapabilities,
|
||||||
|
};
|
||||||
|
use tower_lsp::{Client, LanguageServer, LspService, Server, jsonrpc};
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
struct Backend {
|
||||||
|
client: Client,
|
||||||
|
tokens: DashMap<String, Vec<FullToken>>,
|
||||||
|
ast: DashMap<String, reid::ast::Module>,
|
||||||
|
types: DashMap<String, DashMap<FullToken, Option<TypeKind>>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tower_lsp::async_trait]
|
||||||
|
impl LanguageServer for Backend {
|
||||||
|
async fn initialize(&self, _: InitializeParams) -> jsonrpc::Result<InitializeResult> {
|
||||||
|
self.client
|
||||||
|
.log_message(MessageType::INFO, "Initializing Reid Language Server")
|
||||||
|
.await;
|
||||||
|
|
||||||
|
let sync = TextDocumentSyncOptions {
|
||||||
|
open_close: Some(true),
|
||||||
|
change: Some(TextDocumentSyncKind::FULL),
|
||||||
|
will_save: None,
|
||||||
|
will_save_wait_until: None,
|
||||||
|
save: None,
|
||||||
|
};
|
||||||
|
Ok(InitializeResult {
|
||||||
|
capabilities: ServerCapabilities {
|
||||||
|
hover_provider: Some(HoverProviderCapability::Simple(true)),
|
||||||
|
completion_provider: Some(CompletionOptions { ..Default::default() }),
|
||||||
|
text_document_sync: Some(TextDocumentSyncCapability::Options(sync)),
|
||||||
|
workspace: Some(WorkspaceServerCapabilities {
|
||||||
|
workspace_folders: Some(WorkspaceFoldersServerCapabilities {
|
||||||
|
supported: Some(true),
|
||||||
|
change_notifications: Some(OneOf::Left(true)),
|
||||||
|
}),
|
||||||
|
file_operations: None,
|
||||||
|
}),
|
||||||
|
..Default::default()
|
||||||
|
},
|
||||||
|
..Default::default()
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn initialized(&self, _: InitializedParams) {
|
||||||
|
self.client
|
||||||
|
.log_message(MessageType::INFO, "Reid Language Server initialized hello!")
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn shutdown(&self) -> jsonrpc::Result<()> {
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn completion(&self, params: CompletionParams) -> jsonrpc::Result<Option<CompletionResponse>> {
|
||||||
|
Ok(Some(CompletionResponse::Array(vec![
|
||||||
|
CompletionItem::new_simple("Hello".to_string(), "Some detail".to_string()),
|
||||||
|
CompletionItem::new_simple("Bye".to_string(), "More detail".to_string()),
|
||||||
|
])))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn hover(&self, params: HoverParams) -> jsonrpc::Result<Option<Hover>> {
|
||||||
|
let path = PathBuf::from(params.text_document_position_params.text_document.uri.path());
|
||||||
|
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
||||||
|
let tokens = self.tokens.get(&file_name);
|
||||||
|
let position = params.text_document_position_params.position;
|
||||||
|
|
||||||
|
let token = if let Some(tokens) = &tokens {
|
||||||
|
tokens.iter().find(|tok| {
|
||||||
|
tok.position.1 == position.line + 1
|
||||||
|
&& (tok.position.0 <= position.character + 1
|
||||||
|
&& (tok.position.0 + tok.token.len() as u32) > position.character + 1)
|
||||||
|
})
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
};
|
||||||
|
|
||||||
|
let ty = if let Some(token) = token {
|
||||||
|
if let Some(possible_ty) = self.types.get(&file_name).unwrap().get(token) {
|
||||||
|
if let Some(ty) = possible_ty.clone() {
|
||||||
|
format!("{}", ty)
|
||||||
|
} else {
|
||||||
|
String::from("no type")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
String::from("no token")
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
String::from("no token")
|
||||||
|
};
|
||||||
|
|
||||||
|
Ok(Some(Hover {
|
||||||
|
contents: HoverContents::Scalar(MarkedString::String(format!("{}", ty))),
|
||||||
|
range: None,
|
||||||
|
}))
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn did_open(&self, params: DidOpenTextDocumentParams) {
|
||||||
|
self.recompile(TextDocumentItem {
|
||||||
|
uri: params.text_document.uri,
|
||||||
|
language_id: params.text_document.language_id,
|
||||||
|
version: params.text_document.version,
|
||||||
|
text: params.text_document.text,
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
|
||||||
|
async fn did_change(&self, params: DidChangeTextDocumentParams) {
|
||||||
|
self.recompile(TextDocumentItem {
|
||||||
|
text: params.content_changes[0].text.clone(),
|
||||||
|
uri: params.text_document.uri,
|
||||||
|
version: params.text_document.version,
|
||||||
|
language_id: String::new(),
|
||||||
|
})
|
||||||
|
.await
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl Backend {
|
||||||
|
async fn recompile(&self, params: TextDocumentItem) {
|
||||||
|
let path = PathBuf::from(params.uri.clone().path());
|
||||||
|
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
||||||
|
|
||||||
|
let mut map = Default::default();
|
||||||
|
let parse_res = parse(¶ms.text, path.clone(), &mut map);
|
||||||
|
let (tokens, result) = match parse_res {
|
||||||
|
Ok((module_id, tokens)) => (tokens.clone(), compile(module_id, tokens, path, &mut map)),
|
||||||
|
Err(e) => (Vec::new(), Err(e)),
|
||||||
|
};
|
||||||
|
|
||||||
|
let mut diagnostics = Vec::new();
|
||||||
|
match result {
|
||||||
|
Ok(Some(result)) => {
|
||||||
|
self.tokens.insert(file_name.clone(), result.tokens);
|
||||||
|
self.types.insert(file_name.clone(), result.types);
|
||||||
|
}
|
||||||
|
Ok(_) => {}
|
||||||
|
Err(mut reid_error) => {
|
||||||
|
reid_error.errors.dedup();
|
||||||
|
for error in reid_error.errors {
|
||||||
|
let meta = error.get_meta();
|
||||||
|
let positions = meta
|
||||||
|
.range
|
||||||
|
.into_position(&tokens)
|
||||||
|
.unwrap_or((Position(0, 0), Position(0, 0)));
|
||||||
|
self.client.log_message(MessageType::INFO, format!("{:?}", &meta)).await;
|
||||||
|
self.client
|
||||||
|
.log_message(MessageType::INFO, format!("{:?}", &positions))
|
||||||
|
.await;
|
||||||
|
|
||||||
|
diagnostics.push(Diagnostic {
|
||||||
|
range: Range {
|
||||||
|
start: lsp_types::Position {
|
||||||
|
line: ((positions.0.1 as i32) - 1).max(0) as u32,
|
||||||
|
character: ((positions.0.0 as i32) - 1).max(0) as u32,
|
||||||
|
},
|
||||||
|
end: lsp_types::Position {
|
||||||
|
line: ((positions.1.1 as i32) - 1).max(0) as u32,
|
||||||
|
character: ((positions.1.0 as i32) - 1).max(0) as u32,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
severity: Some(DiagnosticSeverity::ERROR),
|
||||||
|
code: None,
|
||||||
|
code_description: None,
|
||||||
|
source: Some(error.get_type_str().to_owned()),
|
||||||
|
message: format!("{}", error),
|
||||||
|
related_information: None,
|
||||||
|
tags: None,
|
||||||
|
data: None,
|
||||||
|
});
|
||||||
|
self.client.log_message(MessageType::INFO, format!("{}", error)).await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
self.client
|
||||||
|
.publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version))
|
||||||
|
.await;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
struct CompileResult {
|
||||||
|
tokens: Vec<FullToken>,
|
||||||
|
types: DashMap<FullToken, Option<TypeKind>>,
|
||||||
|
}
|
||||||
|
|
||||||
|
fn parse(source: &str, path: PathBuf, map: &mut ErrorModules) -> Result<(SourceModuleId, Vec<FullToken>), ReidError> {
|
||||||
|
let file_name = path.file_name().unwrap().to_str().unwrap().to_owned();
|
||||||
|
|
||||||
|
Ok(parse_module(source, file_name.clone(), map)?)
|
||||||
|
}
|
||||||
|
|
||||||
|
fn compile(
|
||||||
|
module_id: SourceModuleId,
|
||||||
|
tokens: Vec<FullToken>,
|
||||||
|
path: PathBuf,
|
||||||
|
map: &mut ErrorModules,
|
||||||
|
) -> Result<Option<CompileResult>, ReidError> {
|
||||||
|
let token_types = DashMap::new();
|
||||||
|
|
||||||
|
let module = compile_module(module_id, tokens, map, Some(path.clone()), true)?;
|
||||||
|
|
||||||
|
let module_id = module.module_id;
|
||||||
|
let mut context = Context::from(vec![module], path.parent().unwrap().to_owned());
|
||||||
|
perform_all_passes(&mut context, map)?;
|
||||||
|
|
||||||
|
for module in context.modules.into_values() {
|
||||||
|
if module.module_id != module_id {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
for (idx, token) in module.tokens.iter().enumerate() {
|
||||||
|
token_types.insert(token.clone(), find_type_in_context(&module, idx));
|
||||||
|
}
|
||||||
|
|
||||||
|
return Ok(Some(CompileResult {
|
||||||
|
tokens: module.tokens,
|
||||||
|
types: token_types,
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
return Ok(None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[tokio::main]
|
||||||
|
async fn main() {
|
||||||
|
let stdin = tokio::io::stdin();
|
||||||
|
let stdout = tokio::io::stdout();
|
||||||
|
|
||||||
|
let (service, socket) = LspService::new(|client| Backend {
|
||||||
|
client,
|
||||||
|
ast: DashMap::new(),
|
||||||
|
tokens: DashMap::new(),
|
||||||
|
types: DashMap::new(),
|
||||||
|
});
|
||||||
|
Server::new(stdin, stdout, socket).serve(service).await;
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_type_in_context(module: &mir::Module, token_idx: usize) -> Option<TypeKind> {
|
||||||
|
for import in &module.imports {
|
||||||
|
if import.1.contains(token_idx) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for typedef in &module.typedefs {
|
||||||
|
if !typedef.meta.contains(token_idx) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
match &typedef.kind {
|
||||||
|
mir::TypeDefinitionKind::Struct(StructType(fields)) => {
|
||||||
|
for field in fields {
|
||||||
|
if field.2.contains(token_idx) {
|
||||||
|
return Some(field.1.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
for function in &module.functions {
|
||||||
|
if !(function.signature() + function.block_meta()).contains(token_idx) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
for param in &function.parameters {
|
||||||
|
if param.meta.contains(token_idx) {
|
||||||
|
return Some(param.ty.clone());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return match &function.kind {
|
||||||
|
mir::FunctionDefinitionKind::Local(block, _) => find_type_in_block(&block, module.module_id, token_idx),
|
||||||
|
mir::FunctionDefinitionKind::Extern(_) => None,
|
||||||
|
mir::FunctionDefinitionKind::Intrinsic(_) => None,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_type_in_block(block: &mir::Block, module_id: SourceModuleId, token_idx: usize) -> Option<TypeKind> {
|
||||||
|
if !block.meta.contains(token_idx) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
for statement in &block.statements {
|
||||||
|
if !statement.1.contains(token_idx) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
match &statement.0 {
|
||||||
|
mir::StmtKind::Let(named_variable_ref, _, expression) => {
|
||||||
|
if named_variable_ref.2.contains(token_idx) {
|
||||||
|
return expression
|
||||||
|
.return_type(&Default::default(), module_id)
|
||||||
|
.ok()
|
||||||
|
.map(|(_, ty)| ty);
|
||||||
|
} else {
|
||||||
|
return find_type_in_expr(&expression, module_id, token_idx);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mir::StmtKind::Set(lhs, rhs) => {
|
||||||
|
return find_type_in_expr(lhs, module_id, token_idx).or(find_type_in_expr(rhs, module_id, token_idx));
|
||||||
|
}
|
||||||
|
mir::StmtKind::Import(_) => {}
|
||||||
|
mir::StmtKind::Expression(expression) => return find_type_in_expr(expression, module_id, token_idx),
|
||||||
|
mir::StmtKind::While(WhileStatement { condition, block, .. }) => {
|
||||||
|
return find_type_in_expr(condition, module_id, token_idx)
|
||||||
|
.or(find_type_in_block(block, module_id, token_idx));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if let Some((_, Some(return_exp))) = &block.return_expression {
|
||||||
|
if let Some(ty) = find_type_in_expr(return_exp, module_id, token_idx) {
|
||||||
|
return Some(ty);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
None
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn find_type_in_expr(expr: &mir::Expression, module_id: SourceModuleId, token_idx: usize) -> Option<TypeKind> {
|
||||||
|
if !expr.1.contains(token_idx) {
|
||||||
|
return None;
|
||||||
|
}
|
||||||
|
|
||||||
|
match &expr.0 {
|
||||||
|
mir::ExprKind::Variable(named_variable_ref) => Some(named_variable_ref.0.clone()),
|
||||||
|
mir::ExprKind::Indexed(value, type_kind, index_expr) => Some(
|
||||||
|
find_type_in_expr(&value, module_id, token_idx)
|
||||||
|
.or(find_type_in_expr(&index_expr, module_id, token_idx))
|
||||||
|
.unwrap_or(type_kind.clone()),
|
||||||
|
),
|
||||||
|
mir::ExprKind::Accessed(expression, type_kind, _, meta) => {
|
||||||
|
if meta.contains(token_idx) {
|
||||||
|
Some(type_kind.clone())
|
||||||
|
} else {
|
||||||
|
find_type_in_expr(&expression, module_id, token_idx)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mir::ExprKind::Array(expressions) => {
|
||||||
|
for expr in expressions {
|
||||||
|
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||||
|
return Some(ty);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None
|
||||||
|
}
|
||||||
|
mir::ExprKind::Struct(name, items) => {
|
||||||
|
for (_, expr, meta) in items {
|
||||||
|
if meta.contains(token_idx) {
|
||||||
|
return expr.return_type(&Default::default(), module_id).map(|(_, t)| t).ok();
|
||||||
|
}
|
||||||
|
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||||
|
return Some(ty);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(TypeKind::CustomType(mir::CustomTypeKey(name.clone(), module_id)))
|
||||||
|
}
|
||||||
|
mir::ExprKind::Literal(literal) => Some(literal.as_type()),
|
||||||
|
mir::ExprKind::BinOp(binary_operator, lhs, rhs, type_kind) => {
|
||||||
|
if let Some(ty) = find_type_in_expr(lhs, module_id, token_idx) {
|
||||||
|
return Some(ty);
|
||||||
|
}
|
||||||
|
if let Some(ty) = find_type_in_expr(rhs, module_id, token_idx) {
|
||||||
|
return Some(ty);
|
||||||
|
}
|
||||||
|
Some(type_kind.clone())
|
||||||
|
}
|
||||||
|
mir::ExprKind::FunctionCall(FunctionCall {
|
||||||
|
return_type,
|
||||||
|
parameters,
|
||||||
|
..
|
||||||
|
}) => {
|
||||||
|
for expr in parameters {
|
||||||
|
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||||
|
return Some(ty);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(return_type.clone())
|
||||||
|
}
|
||||||
|
mir::ExprKind::AssociatedFunctionCall(
|
||||||
|
_,
|
||||||
|
FunctionCall {
|
||||||
|
return_type,
|
||||||
|
parameters,
|
||||||
|
..
|
||||||
|
},
|
||||||
|
) => {
|
||||||
|
for expr in parameters {
|
||||||
|
if let Some(ty) = find_type_in_expr(expr, module_id, token_idx) {
|
||||||
|
return Some(ty);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
Some(return_type.clone())
|
||||||
|
}
|
||||||
|
mir::ExprKind::If(IfExpression(cond, then_e, else_e)) => find_type_in_expr(&cond, module_id, token_idx)
|
||||||
|
.or(find_type_in_expr(&then_e, module_id, token_idx))
|
||||||
|
.or(else_e.clone().and_then(|e| find_type_in_expr(&e, module_id, token_idx))),
|
||||||
|
mir::ExprKind::Block(block) => find_type_in_block(block, module_id, token_idx),
|
||||||
|
mir::ExprKind::Borrow(expression, mutable) => {
|
||||||
|
if let Some(ty) = find_type_in_expr(&expression, module_id, token_idx) {
|
||||||
|
return Some(ty);
|
||||||
|
}
|
||||||
|
if let Ok(inner) = expression.return_type(&Default::default(), module_id).map(|(_, ty)| ty) {
|
||||||
|
Some(TypeKind::Borrow(Box::new(inner.clone()), *mutable))
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mir::ExprKind::Deref(expression) => {
|
||||||
|
if let Some(ty) = find_type_in_expr(&expression, module_id, token_idx) {
|
||||||
|
return Some(ty);
|
||||||
|
}
|
||||||
|
if let Ok(TypeKind::Borrow(inner, _)) =
|
||||||
|
expression.return_type(&Default::default(), module_id).map(|(_, ty)| ty)
|
||||||
|
{
|
||||||
|
Some(*inner.clone())
|
||||||
|
} else {
|
||||||
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mir::ExprKind::CastTo(expression, type_kind) => {
|
||||||
|
Some(find_type_in_expr(&expression, module_id, token_idx).unwrap_or(type_kind.clone()))
|
||||||
|
}
|
||||||
|
mir::ExprKind::GlobalRef(_, type_kind) => Some(type_kind.clone()),
|
||||||
|
}
|
||||||
|
}
|
29
reid-lsp/tsconfig.json
Normal file
29
reid-lsp/tsconfig.json
Normal file
@ -0,0 +1,29 @@
|
|||||||
|
{
|
||||||
|
"compilerOptions": {
|
||||||
|
"module": "Node16",
|
||||||
|
"target": "ES2022",
|
||||||
|
"lib": [
|
||||||
|
"ES2022"
|
||||||
|
],
|
||||||
|
"sourceMap": true,
|
||||||
|
"rootDir": "src",
|
||||||
|
"outDir": "out",
|
||||||
|
"strict": true /* enable all strict type-checking options */
|
||||||
|
/* Additional Checks */
|
||||||
|
// "noImplicitReturns": true, /* Report error when not all code paths in function return a value. */
|
||||||
|
// "noFallthroughCasesInSwitch": true, /* Report errors for fallthrough cases in switch statement. */
|
||||||
|
// "noUnusedParameters": true, /* Report errors on unused parameters. */
|
||||||
|
},
|
||||||
|
"include": [
|
||||||
|
"src"
|
||||||
|
],
|
||||||
|
"exclude": [
|
||||||
|
"node_modules",
|
||||||
|
".vscode-test"
|
||||||
|
],
|
||||||
|
"references": [
|
||||||
|
{
|
||||||
|
"path": "./client/"
|
||||||
|
},
|
||||||
|
]
|
||||||
|
}
|
48
reid-lsp/vsc-extension-quickstart.md
Normal file
48
reid-lsp/vsc-extension-quickstart.md
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
# Welcome to your VS Code Extension
|
||||||
|
|
||||||
|
## What's in the folder
|
||||||
|
|
||||||
|
* This folder contains all of the files necessary for your extension.
|
||||||
|
* `package.json` - this is the manifest file in which you declare your extension and command.
|
||||||
|
* The sample plugin registers a command and defines its title and command name. With this information VS Code can show the command in the command palette. It doesn’t yet need to load the plugin.
|
||||||
|
* `src/extension.ts` - this is the main file where you will provide the implementation of your command.
|
||||||
|
* The file exports one function, `activate`, which is called the very first time your extension is activated (in this case by executing the command). Inside the `activate` function we call `registerCommand`.
|
||||||
|
* We pass the function containing the implementation of the command as the second parameter to `registerCommand`.
|
||||||
|
|
||||||
|
## Setup
|
||||||
|
|
||||||
|
* install the recommended extensions (amodio.tsl-problem-matcher, ms-vscode.extension-test-runner, and dbaeumer.vscode-eslint)
|
||||||
|
|
||||||
|
|
||||||
|
## Get up and running straight away
|
||||||
|
|
||||||
|
* Press `F5` to open a new window with your extension loaded.
|
||||||
|
* Run your command from the command palette by pressing (`Ctrl+Shift+P` or `Cmd+Shift+P` on Mac) and typing `Hello World`.
|
||||||
|
* Set breakpoints in your code inside `src/extension.ts` to debug your extension.
|
||||||
|
* Find output from your extension in the debug console.
|
||||||
|
|
||||||
|
## Make changes
|
||||||
|
|
||||||
|
* You can relaunch the extension from the debug toolbar after changing code in `src/extension.ts`.
|
||||||
|
* You can also reload (`Ctrl+R` or `Cmd+R` on Mac) the VS Code window with your extension to load your changes.
|
||||||
|
|
||||||
|
|
||||||
|
## Explore the API
|
||||||
|
|
||||||
|
* You can open the full set of our API when you open the file `node_modules/@types/vscode/index.d.ts`.
|
||||||
|
|
||||||
|
## Run tests
|
||||||
|
|
||||||
|
* Install the [Extension Test Runner](https://marketplace.visualstudio.com/items?itemName=ms-vscode.extension-test-runner)
|
||||||
|
* Run the "watch" task via the **Tasks: Run Task** command. Make sure this is running, or tests might not be discovered.
|
||||||
|
* Open the Testing view from the activity bar and click the Run Test" button, or use the hotkey `Ctrl/Cmd + ; A`
|
||||||
|
* See the output of the test result in the Test Results view.
|
||||||
|
* Make changes to `src/test/extension.test.ts` or create new test files inside the `test` folder.
|
||||||
|
* The provided test runner will only consider files matching the name pattern `**.test.ts`.
|
||||||
|
* You can create folders inside the `test` folder to structure your tests any way you want.
|
||||||
|
|
||||||
|
## Go further
|
||||||
|
|
||||||
|
* Reduce the extension size and improve the startup time by [bundling your extension](https://code.visualstudio.com/api/working-with-extensions/bundling-extension).
|
||||||
|
* [Publish your extension](https://code.visualstudio.com/api/working-with-extensions/publishing-extension) on the VS Code extension marketplace.
|
||||||
|
* Automate builds by setting up [Continuous Integration](https://code.visualstudio.com/api/working-with-extensions/continuous-integration).
|
48
reid-lsp/webpack.config.js
Normal file
48
reid-lsp/webpack.config.js
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
//@ts-check
|
||||||
|
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
const path = require('path');
|
||||||
|
|
||||||
|
//@ts-check
|
||||||
|
/** @typedef {import('webpack').Configuration} WebpackConfig **/
|
||||||
|
|
||||||
|
/** @type WebpackConfig */
|
||||||
|
const extensionConfig = {
|
||||||
|
target: 'node', // VS Code extensions run in a Node.js-context 📖 -> https://webpack.js.org/configuration/node/
|
||||||
|
mode: 'none', // this leaves the source code as close as possible to the original (when packaging we set this to 'production')
|
||||||
|
|
||||||
|
entry: './client/src/extension.ts', // the entry point of this extension, 📖 -> https://webpack.js.org/configuration/entry-context/
|
||||||
|
output: {
|
||||||
|
// the bundle is stored in the 'dist' folder (check package.json), 📖 -> https://webpack.js.org/configuration/output/
|
||||||
|
path: path.resolve(__dirname, 'dist'),
|
||||||
|
filename: 'extension.js',
|
||||||
|
libraryTarget: 'commonjs2'
|
||||||
|
},
|
||||||
|
externals: {
|
||||||
|
vscode: 'commonjs vscode' // the vscode-module is created on-the-fly and must be excluded. Add other modules that cannot be webpack'ed, 📖 -> https://webpack.js.org/configuration/externals/
|
||||||
|
// modules added here also need to be added in the .vscodeignore file
|
||||||
|
},
|
||||||
|
resolve: {
|
||||||
|
// support reading TypeScript and JavaScript files, 📖 -> https://github.com/TypeStrong/ts-loader
|
||||||
|
extensions: ['.ts', '.js']
|
||||||
|
},
|
||||||
|
module: {
|
||||||
|
rules: [
|
||||||
|
{
|
||||||
|
test: /\.ts$/,
|
||||||
|
exclude: /node_modules/,
|
||||||
|
use: [
|
||||||
|
{
|
||||||
|
loader: 'ts-loader'
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
},
|
||||||
|
devtool: 'nosources-source-map',
|
||||||
|
infrastructureLogging: {
|
||||||
|
level: "log", // enables logging required for problem matchers
|
||||||
|
},
|
||||||
|
};
|
||||||
|
module.exports = [extensionConfig];
|
@ -10,6 +10,8 @@ edition = "2021"
|
|||||||
default = ["color"]
|
default = ["color"]
|
||||||
|
|
||||||
color = ["colored"]
|
color = ["colored"]
|
||||||
|
log_output = []
|
||||||
|
context_debug = []
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
## Make it easier to generate errors
|
## Make it easier to generate errors
|
||||||
|
@ -12,7 +12,6 @@ fn main() -> Result<(), std::io::Error> {
|
|||||||
libraries.push(libname);
|
libraries.push(libname);
|
||||||
}
|
}
|
||||||
|
|
||||||
dbg!(&filename);
|
|
||||||
let path = PathBuf::from(filename).canonicalize().unwrap();
|
let path = PathBuf::from(filename).canonicalize().unwrap();
|
||||||
let parent = path.with_extension("");
|
let parent = path.with_extension("");
|
||||||
let llvm_ir_path = parent.with_extension("ll");
|
let llvm_ir_path = parent.with_extension("ll");
|
||||||
@ -21,6 +20,7 @@ fn main() -> Result<(), std::io::Error> {
|
|||||||
let mir_path = parent.with_extension("mir");
|
let mir_path = parent.with_extension("mir");
|
||||||
let asm_path = parent.with_extension("asm");
|
let asm_path = parent.with_extension("asm");
|
||||||
|
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
let before = std::time::SystemTime::now();
|
let before = std::time::SystemTime::now();
|
||||||
|
|
||||||
let text = fs::read_to_string(&path)?;
|
let text = fs::read_to_string(&path)?;
|
||||||
@ -31,33 +31,39 @@ fn main() -> Result<(), std::io::Error> {
|
|||||||
match compile_simple(&text, PathBuf::from(&path), Some(cpu), vec![features]) {
|
match compile_simple(&text, PathBuf::from(&path), Some(cpu), vec![features]) {
|
||||||
Ok((
|
Ok((
|
||||||
CompileOutput {
|
CompileOutput {
|
||||||
triple,
|
triple: _triple,
|
||||||
assembly,
|
assembly,
|
||||||
obj_buffer,
|
obj_buffer,
|
||||||
llvm_ir,
|
llvm_ir: _llvm_ir,
|
||||||
},
|
},
|
||||||
CustomIRs { llir, mir },
|
CustomIRs { llir, mir },
|
||||||
)) => {
|
)) => {
|
||||||
println!("{}", llvm_ir);
|
#[cfg(feature = "log_output")]
|
||||||
|
{
|
||||||
|
println!("{}", _llvm_ir);
|
||||||
|
println!("Compiled with triple: {}\n", &_triple);
|
||||||
|
println!("Output LLVM IR to {:?}", llvm_ir_path);
|
||||||
|
println!("Output Assembly to {:?}", asm_path);
|
||||||
|
println!("Output Object-file to {:?}\n", object_path);
|
||||||
|
println!("Output LLIR-file to {:?}\n", llir_path);
|
||||||
|
println!("Output MIR-file to {:?}\n", mir_path);
|
||||||
|
}
|
||||||
|
|
||||||
let after = std::time::SystemTime::now();
|
fs::write(&llvm_ir_path, &_llvm_ir).expect("Could not write LLVM IR -file!");
|
||||||
println!("Compiled with triple: {}\n", &triple);
|
|
||||||
fs::write(&llvm_ir_path, &llvm_ir).expect("Could not write LLVM IR -file!");
|
|
||||||
println!("Output LLVM IR to {:?}", llvm_ir_path);
|
|
||||||
fs::write(&asm_path, &assembly).expect("Could not write Assembly-file!");
|
fs::write(&asm_path, &assembly).expect("Could not write Assembly-file!");
|
||||||
println!("Output Assembly to {:?}", asm_path);
|
|
||||||
fs::write(&object_path, &obj_buffer).expect("Could not write Object-file!");
|
fs::write(&object_path, &obj_buffer).expect("Could not write Object-file!");
|
||||||
println!("Output Object-file to {:?}\n", object_path);
|
|
||||||
fs::write(&llir_path, &llir).expect("Could not write LLIR-file!");
|
fs::write(&llir_path, &llir).expect("Could not write LLIR-file!");
|
||||||
println!("Output LLIR-file to {:?}\n", llir_path);
|
|
||||||
fs::write(&mir_path, &mir).expect("Could not write MIR-file!");
|
fs::write(&mir_path, &mir).expect("Could not write MIR-file!");
|
||||||
println!("Output MIR-file to {:?}\n", mir_path);
|
#[cfg(feature = "log_output")]
|
||||||
println!(
|
{
|
||||||
"Compilation took: {:.2}ms\n",
|
let after = std::time::SystemTime::now();
|
||||||
(after.duration_since(before).unwrap().as_micros() as f32) / 1000.
|
println!(
|
||||||
);
|
"Compilation took: {:.2}ms\n",
|
||||||
|
(after.duration_since(before).unwrap().as_micros() as f32) / 1000.
|
||||||
|
);
|
||||||
|
|
||||||
println!("Linking {:?}", &object_path);
|
println!("Linking {:?}", &object_path);
|
||||||
|
}
|
||||||
|
|
||||||
let linker = std::env::var("LD").unwrap_or("ld".to_owned());
|
let linker = std::env::var("LD").unwrap_or("ld".to_owned());
|
||||||
let mut linker = LDRunner::from_command(&linker).with_library("c");
|
let mut linker = LDRunner::from_command(&linker).with_library("c");
|
||||||
@ -69,6 +75,7 @@ fn main() -> Result<(), std::io::Error> {
|
|||||||
Err(e) => panic!("{}", e),
|
Err(e) => panic!("{}", e),
|
||||||
};
|
};
|
||||||
} else {
|
} else {
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("Please input compiled file path!")
|
println!("Please input compiled file path!")
|
||||||
}
|
}
|
||||||
Ok(())
|
Ok(())
|
||||||
|
@ -7,7 +7,7 @@ static HEXADECIMAL_NUMERICS: &[char] = &[
|
|||||||
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f',
|
'0', '1', '2', '3', '4', '5', '6', '7', '8', '9', 'a', 'b', 'c', 'd', 'e', 'f',
|
||||||
];
|
];
|
||||||
|
|
||||||
#[derive(Eq, PartialEq, Clone, PartialOrd, Ord)]
|
#[derive(Eq, PartialEq, Clone, PartialOrd, Ord, Hash)]
|
||||||
pub enum Token {
|
pub enum Token {
|
||||||
/// Values
|
/// Values
|
||||||
Identifier(String),
|
Identifier(String),
|
||||||
@ -114,6 +114,8 @@ pub enum Token {
|
|||||||
|
|
||||||
Unknown(char),
|
Unknown(char),
|
||||||
|
|
||||||
|
Whitespace(String),
|
||||||
|
Comment(String),
|
||||||
Eof,
|
Eof,
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -192,6 +194,8 @@ impl ToString for Token {
|
|||||||
Token::Eof => String::new(),
|
Token::Eof => String::new(),
|
||||||
Token::Slash => String::from('/'),
|
Token::Slash => String::from('/'),
|
||||||
Token::Percent => String::from('%'),
|
Token::Percent => String::from('%'),
|
||||||
|
Token::Whitespace(val) => val.clone(),
|
||||||
|
Token::Comment(val) => format!("//{}", val.clone()),
|
||||||
Token::Unknown(val) => val.to_string(),
|
Token::Unknown(val) => val.to_string(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -207,7 +211,7 @@ impl std::fmt::Debug for Token {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// A token with a position
|
/// A token with a position
|
||||||
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord)]
|
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
|
||||||
pub struct FullToken {
|
pub struct FullToken {
|
||||||
pub token: Token,
|
pub token: Token,
|
||||||
pub position: Position,
|
pub position: Position,
|
||||||
@ -293,13 +297,25 @@ pub fn tokenize<T: Into<String>>(to_tokenize: T) -> Result<Vec<FullToken>, Error
|
|||||||
|
|
||||||
let variant = match character {
|
let variant = match character {
|
||||||
// Whitespace
|
// Whitespace
|
||||||
w if w.is_whitespace() => continue,
|
w if w.is_whitespace() => {
|
||||||
|
let mut whitespace = String::from(*w);
|
||||||
|
while let Some(w) = cursor.first() {
|
||||||
|
if !w.is_whitespace() {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
whitespace.push(cursor.next().unwrap());
|
||||||
|
}
|
||||||
|
Token::Whitespace(whitespace)
|
||||||
|
}
|
||||||
// Comments
|
// Comments
|
||||||
'/' if cursor.first() == Some('/') => {
|
'/' if cursor.first() == Some('/') => {
|
||||||
|
let mut comment = String::new();
|
||||||
while !matches!(cursor.first(), Some('\n') | None) {
|
while !matches!(cursor.first(), Some('\n') | None) {
|
||||||
cursor.next();
|
if let Some(c) = cursor.next() {
|
||||||
|
comment.push(c);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
continue;
|
Token::Comment(comment)
|
||||||
}
|
}
|
||||||
'\"' | '\'' => {
|
'\"' | '\'' => {
|
||||||
let mut value = String::new();
|
let mut value = String::new();
|
||||||
|
@ -1,7 +1,7 @@
|
|||||||
//! This is the module that contains relevant code to parsing Reid, that is to
|
//! This is the module that contains relevant code to parsing Reid, that is to
|
||||||
//! say transforming a Vec of FullTokens into a loose parsed AST that can be
|
//! say transforming a Vec of FullTokens into a loose parsed AST that can be
|
||||||
//! used for unwrapping syntax sugar, and then be transformed into Reid MIR.
|
//! used for unwrapping syntax sugar, and then be transformed into Reid MIR.
|
||||||
use std::path::PathBuf;
|
use std::{fs::Metadata, path::PathBuf};
|
||||||
|
|
||||||
use token_stream::TokenRange;
|
use token_stream::TokenRange;
|
||||||
|
|
||||||
@ -88,7 +88,7 @@ pub enum ExpressionKind {
|
|||||||
/// Array-indexed, e.g. <expr>[<expr>]
|
/// Array-indexed, e.g. <expr>[<expr>]
|
||||||
Indexed(Box<Expression>, Box<Expression>),
|
Indexed(Box<Expression>, Box<Expression>),
|
||||||
/// Struct-accessed, e.g. <expr>.<expr>
|
/// Struct-accessed, e.g. <expr>.<expr>
|
||||||
Accessed(Box<Expression>, String),
|
Accessed(Box<Expression>, String, TokenRange),
|
||||||
/// Associated function call, but with a shorthand
|
/// Associated function call, but with a shorthand
|
||||||
AccessCall(Box<Expression>, Box<FunctionCallExpression>),
|
AccessCall(Box<Expression>, Box<FunctionCallExpression>),
|
||||||
Binop(BinaryOperator, Box<Expression>, Box<Expression>),
|
Binop(BinaryOperator, Box<Expression>, Box<Expression>),
|
||||||
@ -193,7 +193,7 @@ pub struct FunctionDefinition(pub FunctionSignature, pub bool, pub Block, pub To
|
|||||||
pub struct FunctionSignature {
|
pub struct FunctionSignature {
|
||||||
pub name: String,
|
pub name: String,
|
||||||
pub self_kind: SelfKind,
|
pub self_kind: SelfKind,
|
||||||
pub params: Vec<(String, Type)>,
|
pub params: Vec<(String, Type, TokenRange)>,
|
||||||
pub return_type: Option<Type>,
|
pub return_type: Option<Type>,
|
||||||
#[allow(dead_code)]
|
#[allow(dead_code)]
|
||||||
pub range: TokenRange,
|
pub range: TokenRange,
|
||||||
@ -216,7 +216,7 @@ pub enum ReturnType {
|
|||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub struct StructExpression {
|
pub struct StructExpression {
|
||||||
name: String,
|
name: String,
|
||||||
fields: Vec<(String, Expression)>,
|
fields: Vec<(String, Expression, TokenRange)>,
|
||||||
range: TokenRange,
|
range: TokenRange,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -191,10 +191,11 @@ where
|
|||||||
),
|
),
|
||||||
expr.0 .1,
|
expr.0 .1,
|
||||||
),
|
),
|
||||||
ExpressionKind::Accessed(value_expr, index_name) => Expression(
|
ExpressionKind::Accessed(value_expr, index_name, range) => Expression(
|
||||||
ExpressionKind::Accessed(
|
ExpressionKind::Accessed(
|
||||||
Box::new(apply_inner(PrimaryExpression(*value_expr.clone()), fun)),
|
Box::new(apply_inner(PrimaryExpression(*value_expr.clone()), fun)),
|
||||||
index_name.clone(),
|
index_name.clone(),
|
||||||
|
*range,
|
||||||
),
|
),
|
||||||
expr.0 .1,
|
expr.0 .1,
|
||||||
),
|
),
|
||||||
@ -399,9 +400,9 @@ impl Parse for PrimaryExpression {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
ValueIndex::Dot(val) => match val {
|
ValueIndex::Dot(val) => match val {
|
||||||
DotIndexKind::StructValueIndex(name) => {
|
DotIndexKind::StructValueIndex(name, range) => {
|
||||||
expr = Expression(
|
expr = Expression(
|
||||||
ExpressionKind::Accessed(Box::new(expr), name),
|
ExpressionKind::Accessed(Box::new(expr), name, range),
|
||||||
stream.get_range().unwrap(),
|
stream.get_range().unwrap(),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
@ -668,7 +669,7 @@ impl Parse for FunctionDefinition {
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
struct FunctionParam(String, Type);
|
struct FunctionParam(String, Type, TokenRange);
|
||||||
|
|
||||||
impl Parse for FunctionParam {
|
impl Parse for FunctionParam {
|
||||||
fn parse(mut stream: TokenStream) -> Result<Self, Error> {
|
fn parse(mut stream: TokenStream) -> Result<Self, Error> {
|
||||||
@ -676,7 +677,7 @@ impl Parse for FunctionParam {
|
|||||||
return Err(stream.expected_err("parameter name")?);
|
return Err(stream.expected_err("parameter name")?);
|
||||||
};
|
};
|
||||||
stream.expect(Token::Colon)?;
|
stream.expect(Token::Colon)?;
|
||||||
Ok(FunctionParam(arg_name, stream.parse()?))
|
Ok(FunctionParam(arg_name, stream.parse()?, stream.get_range().unwrap()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -738,11 +739,11 @@ impl Parse for FunctionSignature {
|
|||||||
match &self_kind {
|
match &self_kind {
|
||||||
SelfKind::None => {
|
SelfKind::None => {
|
||||||
if let Ok(param) = stream.parse::<FunctionParam>() {
|
if let Ok(param) = stream.parse::<FunctionParam>() {
|
||||||
params.push((param.0, param.1));
|
params.push((param.0, param.1, param.2));
|
||||||
while let Some(Token::Comma) = stream.peek() {
|
while let Some(Token::Comma) = stream.peek() {
|
||||||
stream.next();
|
stream.next();
|
||||||
let param = stream.parse::<FunctionParam>()?;
|
let param = stream.parse::<FunctionParam>()?;
|
||||||
params.push((param.0, param.1));
|
params.push((param.0, param.1, param.2));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -750,7 +751,7 @@ impl Parse for FunctionSignature {
|
|||||||
while let Some(Token::Comma) = stream.peek() {
|
while let Some(Token::Comma) = stream.peek() {
|
||||||
stream.next();
|
stream.next();
|
||||||
let param = stream.parse::<FunctionParam>()?;
|
let param = stream.parse::<FunctionParam>()?;
|
||||||
params.push((param.0, param.1));
|
params.push((param.0, param.1, param.2));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -818,9 +819,10 @@ impl Parse for StructExpression {
|
|||||||
let Some(Token::Identifier(name)) = stream.next() else {
|
let Some(Token::Identifier(name)) = stream.next() else {
|
||||||
return Err(stream.expected_err("struct identifier")?);
|
return Err(stream.expected_err("struct identifier")?);
|
||||||
};
|
};
|
||||||
|
|
||||||
stream.expect(Token::BraceOpen)?;
|
stream.expect(Token::BraceOpen)?;
|
||||||
let named_list = stream.parse::<NamedFieldList<Expression>>()?;
|
let named_list = stream.parse::<NamedFieldList<Expression>>()?;
|
||||||
let fields = named_list.0.into_iter().map(|f| (f.0, f.1)).collect();
|
let fields = named_list.0.into_iter().map(|f| (f.0, f.1, f.2)).collect();
|
||||||
|
|
||||||
stream.expect(Token::BraceClose)?;
|
stream.expect(Token::BraceClose)?;
|
||||||
|
|
||||||
@ -897,7 +899,7 @@ impl Parse for ArrayValueIndex {
|
|||||||
|
|
||||||
#[derive(Debug, Clone)]
|
#[derive(Debug, Clone)]
|
||||||
pub enum DotIndexKind {
|
pub enum DotIndexKind {
|
||||||
StructValueIndex(String),
|
StructValueIndex(String, TokenRange),
|
||||||
FunctionCall(FunctionCallExpression),
|
FunctionCall(FunctionCallExpression),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -913,7 +915,7 @@ impl Parse for DotIndexKind {
|
|||||||
is_macro: false,
|
is_macro: false,
|
||||||
}))
|
}))
|
||||||
} else {
|
} else {
|
||||||
Ok(Self::StructValueIndex(name))
|
Ok(Self::StructValueIndex(name, stream.get_range().unwrap()))
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(stream.expected_err("struct index (number)")?);
|
return Err(stream.expected_err("struct index (number)")?);
|
||||||
|
@ -51,7 +51,7 @@ impl ast::Module {
|
|||||||
.map(|p| mir::FunctionParam {
|
.map(|p| mir::FunctionParam {
|
||||||
name: p.0,
|
name: p.0,
|
||||||
ty: p.1 .0.into_mir(module_id),
|
ty: p.1 .0.into_mir(module_id),
|
||||||
meta: p.1 .1.as_meta(module_id),
|
meta: p.2.as_meta(module_id),
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
kind: mir::FunctionDefinitionKind::Extern(false),
|
kind: mir::FunctionDefinitionKind::Extern(false),
|
||||||
@ -164,7 +164,7 @@ impl ast::FunctionDefinition {
|
|||||||
params.extend(signature.params.iter().cloned().map(|p| FunctionParam {
|
params.extend(signature.params.iter().cloned().map(|p| FunctionParam {
|
||||||
name: p.0,
|
name: p.0,
|
||||||
ty: p.1 .0.into_mir(module_id),
|
ty: p.1 .0.into_mir(module_id),
|
||||||
meta: p.1 .1.as_meta(module_id),
|
meta: p.2.as_meta(module_id),
|
||||||
}));
|
}));
|
||||||
mir::FunctionDefinition {
|
mir::FunctionDefinition {
|
||||||
name: signature.name.clone(),
|
name: signature.name.clone(),
|
||||||
@ -375,13 +375,14 @@ impl ast::Expression {
|
|||||||
struct_init
|
struct_init
|
||||||
.fields
|
.fields
|
||||||
.iter()
|
.iter()
|
||||||
.map(|(n, e)| (n.clone(), e.process(module_id)))
|
.map(|(n, e, r)| (n.clone(), e.process(module_id), r.as_meta(module_id)))
|
||||||
.collect(),
|
.collect(),
|
||||||
),
|
),
|
||||||
ast::ExpressionKind::Accessed(expression, name) => mir::ExprKind::Accessed(
|
ast::ExpressionKind::Accessed(expression, name, name_range) => mir::ExprKind::Accessed(
|
||||||
Box::new(expression.process(module_id)),
|
Box::new(expression.process(module_id)),
|
||||||
mir::TypeKind::Vague(mir::VagueType::Unknown),
|
mir::TypeKind::Vague(mir::VagueType::Unknown),
|
||||||
name.clone(),
|
name.clone(),
|
||||||
|
name_range.as_meta(module_id),
|
||||||
),
|
),
|
||||||
ast::ExpressionKind::Borrow(expr, mutable) => {
|
ast::ExpressionKind::Borrow(expr, mutable) => {
|
||||||
mir::ExprKind::Borrow(Box::new(expr.process(module_id)), *mutable)
|
mir::ExprKind::Borrow(Box::new(expr.process(module_id)), *mutable)
|
||||||
|
@ -42,20 +42,18 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
|||||||
/// Useful in conjunction with [`TokenStream::next`]
|
/// Useful in conjunction with [`TokenStream::next`]
|
||||||
pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> {
|
pub fn expecting_err<T: Into<String>>(&mut self, expected: T) -> Result<Error, Error> {
|
||||||
let next_token = self.peek().unwrap_or(Token::Eof);
|
let next_token = self.peek().unwrap_or(Token::Eof);
|
||||||
|
let pos = self.next_token(self.position).0;
|
||||||
Ok(Error::Expected(
|
Ok(Error::Expected(
|
||||||
expected.into(),
|
expected.into(),
|
||||||
next_token,
|
next_token,
|
||||||
TokenRange {
|
TokenRange { start: pos, end: pos },
|
||||||
start: self.position,
|
|
||||||
end: self.position,
|
|
||||||
},
|
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn expect(&mut self, token: Token) -> Result<(), Error> {
|
pub fn expect(&mut self, token: Token) -> Result<(), Error> {
|
||||||
if let Some(peeked) = self.peek() {
|
if let (pos, Some(peeked)) = self.next_token(self.position) {
|
||||||
if token == peeked {
|
if token == peeked.token {
|
||||||
self.position += 1;
|
self.position = pos + 1;
|
||||||
Ok(())
|
Ok(())
|
||||||
} else {
|
} else {
|
||||||
Err(self.expecting_err(token)?)
|
Err(self.expecting_err(token)?)
|
||||||
@ -66,37 +64,25 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn next(&mut self) -> Option<Token> {
|
pub fn next(&mut self) -> Option<Token> {
|
||||||
let value = if self.tokens.len() < self.position {
|
let (position, token) = self.next_token(self.position);
|
||||||
None
|
self.position = position + 1;
|
||||||
} else {
|
token.map(|t| t.token.clone())
|
||||||
Some(self.tokens[self.position].token.clone())
|
|
||||||
};
|
|
||||||
self.position += 1;
|
|
||||||
value
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn previous(&mut self) -> Option<Token> {
|
pub fn previous(&mut self) -> Option<Token> {
|
||||||
if (self.position as i32 - 1) < 0 {
|
let (_, token) = self.previous_token(self.position);
|
||||||
None
|
token.map(|t| t.token.clone())
|
||||||
} else {
|
|
||||||
Some(self.tokens[self.position - 1].token.clone())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn peek(&mut self) -> Option<Token> {
|
pub fn peek(&mut self) -> Option<Token> {
|
||||||
if self.tokens.len() < self.position {
|
let (_, token) = self.next_token(self.position);
|
||||||
None
|
token.map(|t| t.token.clone())
|
||||||
} else {
|
|
||||||
Some(self.tokens[self.position].token.clone())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn peek2(&mut self) -> Option<Token> {
|
pub fn peek2(&mut self) -> Option<Token> {
|
||||||
if self.tokens.len() < (self.position + 1) {
|
let (pos2, _) = self.next_token(self.position);
|
||||||
None
|
let (_, token) = self.next_token(pos2 + 1);
|
||||||
} else {
|
token.map(|t| t.token.clone())
|
||||||
Some(self.tokens[self.position + 1].token.clone())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Parse the next value of trait Parse. If the parse succeeded, the related
|
/// Parse the next value of trait Parse. If the parse succeeded, the related
|
||||||
@ -185,9 +171,32 @@ impl<'a, 'b> TokenStream<'a, 'b> {
|
|||||||
pub fn get_range_prev(&self) -> Option<TokenRange> {
|
pub fn get_range_prev(&self) -> Option<TokenRange> {
|
||||||
self.ref_position.as_ref().map(|ref_pos| TokenRange {
|
self.ref_position.as_ref().map(|ref_pos| TokenRange {
|
||||||
start: **ref_pos,
|
start: **ref_pos,
|
||||||
end: self.position - 1,
|
end: self.previous_token(self.position).0,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn previous_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
|
||||||
|
from -= 1;
|
||||||
|
while let Some(token) = self.tokens.get(from) {
|
||||||
|
if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) {
|
||||||
|
from -= 1;
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(from, self.tokens.get(from))
|
||||||
|
}
|
||||||
|
|
||||||
|
fn next_token(&self, mut from: usize) -> (usize, Option<&'a FullToken>) {
|
||||||
|
while let Some(token) = self.tokens.get(from) {
|
||||||
|
if matches!(token.token, Token::Whitespace(_) | Token::Comment(_)) {
|
||||||
|
from += 1;
|
||||||
|
} else {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
(from, self.tokens.get(from))
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Drop for TokenStream<'_, '_> {
|
impl Drop for TokenStream<'_, '_> {
|
||||||
@ -217,8 +226,8 @@ impl std::ops::Add for TokenRange {
|
|||||||
|
|
||||||
fn add(self, rhs: Self) -> Self::Output {
|
fn add(self, rhs: Self) -> Self::Output {
|
||||||
TokenRange {
|
TokenRange {
|
||||||
start: self.start.min(rhs.start),
|
start: self.start.min(rhs.start).min(rhs.end),
|
||||||
end: self.end.min(rhs.end),
|
end: self.end.max(rhs.end).max(rhs.start),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -133,7 +133,7 @@ impl mir::Expression {
|
|||||||
allocated.extend(expr.allocate(scope));
|
allocated.extend(expr.allocate(scope));
|
||||||
allocated.extend(idx.allocate(scope));
|
allocated.extend(idx.allocate(scope));
|
||||||
}
|
}
|
||||||
mir::ExprKind::Accessed(expression, _, _) => {
|
mir::ExprKind::Accessed(expression, ..) => {
|
||||||
allocated.extend(expression.allocate(scope));
|
allocated.extend(expression.allocate(scope));
|
||||||
}
|
}
|
||||||
mir::ExprKind::Array(expressions) => {
|
mir::ExprKind::Array(expressions) => {
|
||||||
@ -159,7 +159,7 @@ impl mir::Expression {
|
|||||||
.unwrap();
|
.unwrap();
|
||||||
allocated.push(Allocation(self.1, ty, allocation));
|
allocated.push(Allocation(self.1, ty, allocation));
|
||||||
|
|
||||||
for (field_name, expression) in items {
|
for (field_name, expression, _) in items {
|
||||||
allocated.extend(expression.allocate(scope));
|
allocated.extend(expression.allocate(scope));
|
||||||
|
|
||||||
let (_, ty) = expression.return_type(&Default::default(), scope.mod_id).unwrap();
|
let (_, ty) = expression.return_type(&Default::default(), scope.mod_id).unwrap();
|
||||||
|
@ -728,7 +728,6 @@ impl mir::Statement {
|
|||||||
mir::StmtKind::Let(NamedVariableRef(ty, name, meta), mutable, expression) => {
|
mir::StmtKind::Let(NamedVariableRef(ty, name, meta), mutable, expression) => {
|
||||||
let value = expression.codegen(scope, &state)?.unwrap();
|
let value = expression.codegen(scope, &state)?.unwrap();
|
||||||
|
|
||||||
dbg!(&scope.allocator, &meta, &value.1);
|
|
||||||
let alloca = scope
|
let alloca = scope
|
||||||
.allocate(meta, &value.1)
|
.allocate(meta, &value.1)
|
||||||
.unwrap()
|
.unwrap()
|
||||||
@ -1161,7 +1160,7 @@ impl mir::Expression {
|
|||||||
TypeKind::Array(Box::new(elem_ty_kind), instr_list.len() as u64),
|
TypeKind::Array(Box::new(elem_ty_kind), instr_list.len() as u64),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
mir::ExprKind::Accessed(expression, type_kind, field) => {
|
mir::ExprKind::Accessed(expression, type_kind, field, _) => {
|
||||||
let struct_val = expression.codegen(scope, &state.load(false))?.unwrap();
|
let struct_val = expression.codegen(scope, &state.load(false))?.unwrap();
|
||||||
|
|
||||||
let TypeKind::CodegenPtr(inner) = &struct_val.1 else {
|
let TypeKind::CodegenPtr(inner) = &struct_val.1 else {
|
||||||
@ -1223,7 +1222,7 @@ impl mir::Expression {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.maybe_location(&mut scope.block, location.clone());
|
.maybe_location(&mut scope.block, location.clone());
|
||||||
|
|
||||||
for (field_n, exp) in items {
|
for (field_n, exp, _) in items {
|
||||||
let gep_n = format!("{}.{}.gep", name, field_n);
|
let gep_n = format!("{}.{}.gep", name, field_n);
|
||||||
let store_n = format!("{}.{}.store", name, field_n);
|
let store_n = format!("{}.{}.store", name, field_n);
|
||||||
let i = indices.clone().find(|(_, f)| f.0 == *field_n).unwrap().0;
|
let i = indices.clone().find(|(_, f)| f.0 == *field_n).unwrap().0;
|
||||||
|
@ -50,7 +50,7 @@ impl ErrorKind {
|
|||||||
}
|
}
|
||||||
|
|
||||||
impl ErrorKind {
|
impl ErrorKind {
|
||||||
fn get_meta(&self) -> Metadata {
|
pub fn get_meta(&self) -> Metadata {
|
||||||
match &self {
|
match &self {
|
||||||
ErrorKind::LexerError(error) => error.metadata,
|
ErrorKind::LexerError(error) => error.metadata,
|
||||||
ErrorKind::ParserError(error) => error.metadata,
|
ErrorKind::ParserError(error) => error.metadata,
|
||||||
@ -63,6 +63,18 @@ impl ErrorKind {
|
|||||||
ErrorKind::MacroError(error) => error.metadata,
|
ErrorKind::MacroError(error) => error.metadata,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn get_type_str(&self) -> &str {
|
||||||
|
match self {
|
||||||
|
ErrorKind::LexerError(_) => "lexer",
|
||||||
|
ErrorKind::ParserError(_) => "parser",
|
||||||
|
ErrorKind::TypeCheckError(_) => "typechecker",
|
||||||
|
ErrorKind::TypeInferenceError(_) => "type-inferrer",
|
||||||
|
ErrorKind::LinkerError(_) => "linker",
|
||||||
|
ErrorKind::MacroError(_) => "macro-pass",
|
||||||
|
ErrorKind::CodegenError(_) => "codegen",
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl PartialOrd for ErrorKind {
|
impl PartialOrd for ErrorKind {
|
||||||
@ -120,7 +132,7 @@ impl ErrorModules {
|
|||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub struct ReidError {
|
pub struct ReidError {
|
||||||
map: ErrorModules,
|
map: ErrorModules,
|
||||||
errors: Vec<ErrorKind>,
|
pub errors: Vec<ErrorKind>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl ReidError {
|
impl ReidError {
|
||||||
@ -185,9 +197,7 @@ impl std::fmt::Display for ReidError {
|
|||||||
let module = self.map.module(&meta.source_module_id);
|
let module = self.map.module(&meta.source_module_id);
|
||||||
let position = if let Some(module) = module {
|
let position = if let Some(module) = module {
|
||||||
if let Some(tokens) = &module.tokens {
|
if let Some(tokens) = &module.tokens {
|
||||||
let range_tokens = meta.range.into_tokens(&tokens);
|
meta.range.into_position(tokens).or(meta.position.map(|p| (p, p)))
|
||||||
|
|
||||||
get_position(&range_tokens).or(meta.position.map(|p| (p, p)))
|
|
||||||
} else if let Some(position) = meta.position {
|
} else if let Some(position) = meta.position {
|
||||||
Some((position, position))
|
Some((position, position))
|
||||||
} else {
|
} else {
|
||||||
@ -237,6 +247,11 @@ impl TokenRange {
|
|||||||
.take(self.end + 1 - self.start)
|
.take(self.end + 1 - self.start)
|
||||||
.collect::<Vec<_>>()
|
.collect::<Vec<_>>()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn into_position<'v>(&self, tokens: &'v Vec<FullToken>) -> Option<(Position, Position)> {
|
||||||
|
let tokens = self.into_tokens(tokens);
|
||||||
|
get_position(&tokens)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn get_position(tokens: &Vec<&FullToken>) -> Option<(Position, Position)> {
|
fn get_position(tokens: &Vec<&FullToken>) -> Option<(Position, Position)> {
|
||||||
|
@ -26,12 +26,11 @@ impl LDRunner {
|
|||||||
let dyn_linker_path = find_objectfile(&self.dynamic_linker);
|
let dyn_linker_path = find_objectfile(&self.dynamic_linker);
|
||||||
let crt1_path = find_objectfile("crt1.o");
|
let crt1_path = find_objectfile("crt1.o");
|
||||||
|
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("LDRunner: Using dynamic linker at: {:?}", dyn_linker_path);
|
println!("LDRunner: Using dynamic linker at: {:?}", dyn_linker_path);
|
||||||
|
|
||||||
let mut ld = Command::new(&self.command);
|
let mut ld = Command::new(&self.command);
|
||||||
ld.arg("-dynamic-linker")
|
ld.arg("-dynamic-linker").arg(dyn_linker_path).arg(crt1_path);
|
||||||
.arg(dyn_linker_path)
|
|
||||||
.arg(crt1_path);
|
|
||||||
|
|
||||||
for library in &self.libraries {
|
for library in &self.libraries {
|
||||||
ld.arg(format!("-l{}", library));
|
ld.arg(format!("-l{}", library));
|
||||||
@ -41,22 +40,21 @@ impl LDRunner {
|
|||||||
.arg("-o")
|
.arg("-o")
|
||||||
.arg(out_path.to_str().unwrap());
|
.arg(out_path.to_str().unwrap());
|
||||||
|
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!(
|
println!(
|
||||||
"LDRunner: Executing linker to objfile at {:?} => {:?}",
|
"LDRunner: Executing linker to objfile at {:?} => {:?}",
|
||||||
input_path, out_path
|
input_path, out_path
|
||||||
);
|
);
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
dbg!(&ld);
|
dbg!(&ld);
|
||||||
|
|
||||||
ld.spawn().expect("Unable to execute ld!");
|
ld.spawn().expect("Unable to execute ld!");
|
||||||
|
|
||||||
thread::sleep(Duration::from_millis(100));
|
thread::sleep(Duration::from_millis(100));
|
||||||
|
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("Setting executable bit to {:?}..", out_path);
|
println!("Setting executable bit to {:?}..", out_path);
|
||||||
Command::new("chmod")
|
Command::new("chmod").arg("+x").arg(out_path).spawn().unwrap();
|
||||||
.arg("+x")
|
|
||||||
.arg(out_path)
|
|
||||||
.spawn()
|
|
||||||
.unwrap();
|
|
||||||
thread::sleep(Duration::from_millis(100));
|
thread::sleep(Duration::from_millis(100));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -8,22 +8,36 @@
|
|||||||
//! Much of the syntax in Reid is directly inspired by rust, but mostly it is
|
//! Much of the syntax in Reid is directly inspired by rust, but mostly it is
|
||||||
//! driven by simplicity.
|
//! driven by simplicity.
|
||||||
//!
|
//!
|
||||||
|
//! Specifications and a bunch of [documentation for the language can be found
|
||||||
|
//! here](./documentation/).
|
||||||
|
//!
|
||||||
|
//! An example of a real whole program (a CPU pathtracer) can be found [in
|
||||||
|
//! examples/cpu_raytracer.reid](./examples/cpu_raytracer.reid), go have a look!
|
||||||
|
//!
|
||||||
//! Reid is currently able to (non-exhaustively):
|
//! Reid is currently able to (non-exhaustively):
|
||||||
//! - Do basic algebra (e.g. Add, Sub, Mult)
|
//! - Do basic algebra binary and unary-operations (e.g. Add, Sub, Div, Mult,
|
||||||
|
//! And, Not)
|
||||||
//! - Resolve complex one-liners correctly using PEDMAS (e.g. `5 + 2 * 5 - 5 *
|
//! - Resolve complex one-liners correctly using PEDMAS (e.g. `5 + 2 * 5 - 5 *
|
||||||
//! 5` is calculated correctly)
|
//! 5` is calculated correctly)
|
||||||
|
//! - Handle borrows/derefs, pointers.
|
||||||
//! - Declare and call functions with varying parameters and return types
|
//! - Declare and call functions with varying parameters and return types
|
||||||
//! - Perform type-checking and type-inference such that return-types and
|
//! - Perform type-checking and type-inference such that return-types and
|
||||||
//! parameter types must always match.
|
//! parameter types must always match.
|
||||||
//! - Do simple logic-operations (e.g. If/And/Or)
|
//! - Do simple logic-operations (e.g. If/And/Or)
|
||||||
|
//! - Handle, access, define and initialize structs and arrays.
|
||||||
|
//! - Define and execute For/While loops
|
||||||
|
//! - Output detailed debug information
|
||||||
|
//! - Define extern functions that can be linked to outside modules such as
|
||||||
|
//! `libc`.
|
||||||
|
//! - Define custom binary operations for any two types that hasn't been defined
|
||||||
|
//! previously (such as `u16 + u32`).
|
||||||
//!
|
//!
|
||||||
//! An example program of Reid, that calculates the 5th fibonacci number (and
|
//!
|
||||||
//! uses Rust for highlighting) is:
|
//! An example program of Reid, that calculates the 5th fibonacci number:
|
||||||
//! ```reid
|
//! ```reid
|
||||||
//! fn main() -> u16 {
|
//! fn main() -> u16 {
|
||||||
//! return fibonacci(5);
|
//! return fibonacci(5);
|
||||||
//! }
|
//! }
|
||||||
//!
|
|
||||||
//! fn fibonacci(n: u16) -> u16 {
|
//! fn fibonacci(n: u16) -> u16 {
|
||||||
//! if n <= 2 {
|
//! if n <= 2 {
|
||||||
//! return 1;
|
//! return 1;
|
||||||
@ -32,16 +46,13 @@
|
|||||||
//! }
|
//! }
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! Currently missing relevant features (TODOs) are:
|
//! TODOs still (see README.md for more)
|
||||||
//! - ~~Arrays~~ (DONE)
|
//! - Error handling
|
||||||
//! - Structs (and custom types as such)
|
//! - Lexing & parsing of whitespace and comments as well
|
||||||
//! - ~~Extern functions~~ (DONE)
|
//! - LSP implementation
|
||||||
//! - ~~Strings~~ (DONE)
|
|
||||||
//! - Loops
|
|
||||||
//! - Debug Symbols
|
|
||||||
//! ```
|
//! ```
|
||||||
|
|
||||||
use std::{collections::HashMap, path::PathBuf, thread, time::Duration};
|
use std::{collections::HashMap, path::PathBuf};
|
||||||
|
|
||||||
use ast::{
|
use ast::{
|
||||||
lexer::{self, FullToken, Token},
|
lexer::{self, FullToken, Token},
|
||||||
@ -61,7 +72,7 @@ use crate::{
|
|||||||
mir::macros::{form_macros, MacroModule, MacroPass},
|
mir::macros::{form_macros, MacroModule, MacroPass},
|
||||||
};
|
};
|
||||||
|
|
||||||
mod ast;
|
pub mod ast;
|
||||||
mod codegen;
|
mod codegen;
|
||||||
pub mod error_raporting;
|
pub mod error_raporting;
|
||||||
pub mod ld;
|
pub mod ld;
|
||||||
@ -82,6 +93,7 @@ pub fn parse_module<'map, T: Into<String>>(
|
|||||||
map.set_tokens(id, tokens.clone());
|
map.set_tokens(id, tokens.clone());
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:#?}", &tokens);
|
println!("{:#?}", &tokens);
|
||||||
|
|
||||||
Ok((id, tokens))
|
Ok((id, tokens))
|
||||||
@ -116,6 +128,7 @@ pub fn compile_module<'map>(
|
|||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
dbg!(&ast_module);
|
dbg!(&ast_module);
|
||||||
|
|
||||||
Ok(ast_module.process(module_id))
|
Ok(ast_module.process(module_id))
|
||||||
@ -126,9 +139,11 @@ pub fn perform_all_passes<'map>(
|
|||||||
module_map: &'map mut ErrorModules,
|
module_map: &'map mut ErrorModules,
|
||||||
) -> Result<(), ReidError> {
|
) -> Result<(), ReidError> {
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
dbg!(&context);
|
dbg!(&context);
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:#}", &context);
|
println!("{:#}", &context);
|
||||||
|
|
||||||
let state = context.pass(&mut LinkerPass {
|
let state = context.pass(&mut LinkerPass {
|
||||||
@ -143,10 +158,13 @@ pub fn perform_all_passes<'map>(
|
|||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:-^100}", "LINKER OUTPUT");
|
println!("{:-^100}", "LINKER OUTPUT");
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:#}", &context);
|
println!("{:#}", &context);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
dbg!(&state);
|
dbg!(&state);
|
||||||
|
|
||||||
if !state.errors.is_empty() {
|
if !state.errors.is_empty() {
|
||||||
@ -168,10 +186,13 @@ pub fn perform_all_passes<'map>(
|
|||||||
let state = context.pass(&mut macro_pass)?;
|
let state = context.pass(&mut macro_pass)?;
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:-^100}", "MACRO OUTPUT");
|
println!("{:-^100}", "MACRO OUTPUT");
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:#}", &context);
|
println!("{:#}", &context);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
dbg!(&state);
|
dbg!(&state);
|
||||||
|
|
||||||
if !state.errors.is_empty() {
|
if !state.errors.is_empty() {
|
||||||
@ -206,12 +227,16 @@ pub fn perform_all_passes<'map>(
|
|||||||
let state = context.pass(&mut TypeInference { refs: &mut refs })?;
|
let state = context.pass(&mut TypeInference { refs: &mut refs })?;
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:-^100}", "TYPE INFERRER OUTPUT");
|
println!("{:-^100}", "TYPE INFERRER OUTPUT");
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{}", &refs);
|
println!("{}", &refs);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:#}", &context);
|
println!("{:#}", &context);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
dbg!(&state);
|
dbg!(&state);
|
||||||
|
|
||||||
if !state.errors.is_empty() {
|
if !state.errors.is_empty() {
|
||||||
@ -228,10 +253,13 @@ pub fn perform_all_passes<'map>(
|
|||||||
let state = context.pass(&mut TypeCheck { refs: &refs })?;
|
let state = context.pass(&mut TypeCheck { refs: &refs })?;
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:-^100}", "TYPECHECKER OUTPUT");
|
println!("{:-^100}", "TYPECHECKER OUTPUT");
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:#}", &context);
|
println!("{:#}", &context);
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
dbg!(&state);
|
dbg!(&state);
|
||||||
|
|
||||||
if !state.errors.is_empty() {
|
if !state.errors.is_empty() {
|
||||||
@ -245,6 +273,9 @@ pub fn perform_all_passes<'map>(
|
|||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[cfg(feature = "context_debug")]
|
||||||
|
dbg!(&context);
|
||||||
|
|
||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -269,8 +300,10 @@ pub fn compile_and_pass<'map>(
|
|||||||
perform_all_passes(&mut mir_context, module_map)?;
|
perform_all_passes(&mut mir_context, module_map)?;
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:-^100}", "FINAL OUTPUT");
|
println!("{:-^100}", "FINAL OUTPUT");
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{:#}", &mir_context);
|
println!("{:#}", &mir_context);
|
||||||
|
|
||||||
let mut context = Context::new(format!("Reid ({})", env!("CARGO_PKG_VERSION")));
|
let mut context = Context::new(format!("Reid ({})", env!("CARGO_PKG_VERSION")));
|
||||||
@ -280,6 +313,7 @@ pub fn compile_and_pass<'map>(
|
|||||||
};
|
};
|
||||||
|
|
||||||
#[cfg(debug_assertions)]
|
#[cfg(debug_assertions)]
|
||||||
|
#[cfg(feature = "log_output")]
|
||||||
println!("{}", &codegen_modules.context);
|
println!("{}", &codegen_modules.context);
|
||||||
|
|
||||||
let compiled = codegen_modules.compile(cpu, features);
|
let compiled = codegen_modules.compile(cpu, features);
|
||||||
|
@ -279,9 +279,9 @@ impl Display for ExprKind {
|
|||||||
let mut state = Default::default();
|
let mut state = Default::default();
|
||||||
let mut inner_f = PadAdapter::wrap(f, &mut state);
|
let mut inner_f = PadAdapter::wrap(f, &mut state);
|
||||||
let mut iter = items.iter();
|
let mut iter = items.iter();
|
||||||
if let Some((name, expr)) = iter.next() {
|
if let Some((name, expr, _)) = iter.next() {
|
||||||
write!(inner_f, "\n{}: {}", name, expr)?;
|
write!(inner_f, "\n{}: {}", name, expr)?;
|
||||||
while let Some((name, expr)) = iter.next() {
|
while let Some((name, expr, _)) = iter.next() {
|
||||||
writeln!(inner_f, ",")?;
|
writeln!(inner_f, ",")?;
|
||||||
write!(inner_f, "{}: {}", name, expr)?;
|
write!(inner_f, "{}: {}", name, expr)?;
|
||||||
}
|
}
|
||||||
@ -289,7 +289,7 @@ impl Display for ExprKind {
|
|||||||
}
|
}
|
||||||
f.write_char('}')
|
f.write_char('}')
|
||||||
}
|
}
|
||||||
ExprKind::Accessed(expression, type_kind, name) => {
|
ExprKind::Accessed(expression, type_kind, name, _) => {
|
||||||
Display::fmt(&expression, f)?;
|
Display::fmt(&expression, f)?;
|
||||||
write_access(f, name)?;
|
write_access(f, name)?;
|
||||||
write!(f, "<{}>", type_kind)
|
write!(f, "<{}>", type_kind)
|
||||||
|
@ -405,7 +405,7 @@ impl Expression {
|
|||||||
TypeKind::Array(Box::new(first.1), expressions.len() as u64),
|
TypeKind::Array(Box::new(first.1), expressions.len() as u64),
|
||||||
))
|
))
|
||||||
}
|
}
|
||||||
Accessed(_, type_kind, _) => Ok((ReturnKind::Soft, type_kind.clone())),
|
Accessed(_, type_kind, ..) => Ok((ReturnKind::Soft, type_kind.clone())),
|
||||||
Struct(name, _) => Ok((
|
Struct(name, _) => Ok((
|
||||||
ReturnKind::Soft,
|
ReturnKind::Soft,
|
||||||
TypeKind::CustomType(CustomTypeKey(name.clone(), mod_id)),
|
TypeKind::CustomType(CustomTypeKey(name.clone(), mod_id)),
|
||||||
@ -437,7 +437,7 @@ impl Expression {
|
|||||||
match &self.0 {
|
match &self.0 {
|
||||||
ExprKind::Variable(var_ref) => Some(var_ref),
|
ExprKind::Variable(var_ref) => Some(var_ref),
|
||||||
ExprKind::Indexed(lhs, _, _) => lhs.backing_var(),
|
ExprKind::Indexed(lhs, _, _) => lhs.backing_var(),
|
||||||
ExprKind::Accessed(lhs, _, _) => lhs.backing_var(),
|
ExprKind::Accessed(lhs, ..) => lhs.backing_var(),
|
||||||
ExprKind::Borrow(expr, _) => expr.backing_var(),
|
ExprKind::Borrow(expr, _) => expr.backing_var(),
|
||||||
ExprKind::Deref(expr) => expr.backing_var(),
|
ExprKind::Deref(expr) => expr.backing_var(),
|
||||||
ExprKind::Block(block) => block.backing_var(),
|
ExprKind::Block(block) => block.backing_var(),
|
||||||
|
@ -459,7 +459,7 @@ impl<'map> Pass for LinkerPass<'map> {
|
|||||||
super::ExprKind::Indexed(.., type_kind, _) => {
|
super::ExprKind::Indexed(.., type_kind, _) => {
|
||||||
*type_kind = type_kind.update_imported(extern_types, mod_id)
|
*type_kind = type_kind.update_imported(extern_types, mod_id)
|
||||||
}
|
}
|
||||||
super::ExprKind::Accessed(.., type_kind, _) => {
|
super::ExprKind::Accessed(.., type_kind, _, _) => {
|
||||||
*type_kind = type_kind.update_imported(extern_types, mod_id)
|
*type_kind = type_kind.update_imported(extern_types, mod_id)
|
||||||
}
|
}
|
||||||
super::ExprKind::BinOp(.., type_kind) => *type_kind = type_kind.update_imported(extern_types, mod_id),
|
super::ExprKind::BinOp(.., type_kind) => *type_kind = type_kind.update_imported(extern_types, mod_id),
|
||||||
|
@ -41,16 +41,19 @@ impl Metadata {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn into_positions(&self, tokens: &Vec<FullToken>) -> Option<(Position, Position)> {
|
pub fn into_positions(&self, tokens: &Vec<FullToken>) -> Option<(Position, Position)> {
|
||||||
let mut iter = tokens
|
self.range.into_position(tokens)
|
||||||
.iter()
|
}
|
||||||
.skip(self.range.start)
|
|
||||||
.take(self.range.end - self.range.start);
|
pub fn is_after(&self, token_idx: usize) -> bool {
|
||||||
if let Some(first) = iter.next() {
|
return token_idx < self.range.start;
|
||||||
let last = iter.last().unwrap_or(first);
|
}
|
||||||
Some((first.position, last.position.add(last.token.len() as u32)))
|
|
||||||
} else {
|
pub fn is_before(&self, token_idx: usize) -> bool {
|
||||||
None
|
return token_idx > self.range.end;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn contains(&self, token_idx: usize) -> bool {
|
||||||
|
return token_idx >= self.range.start && token_idx <= self.range.end;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -259,9 +262,9 @@ pub struct Import(pub Vec<String>, pub Metadata);
|
|||||||
pub enum ExprKind {
|
pub enum ExprKind {
|
||||||
Variable(NamedVariableRef),
|
Variable(NamedVariableRef),
|
||||||
Indexed(Box<Expression>, TypeKind, Box<Expression>),
|
Indexed(Box<Expression>, TypeKind, Box<Expression>),
|
||||||
Accessed(Box<Expression>, TypeKind, String),
|
Accessed(Box<Expression>, TypeKind, String, Metadata),
|
||||||
Array(Vec<Expression>),
|
Array(Vec<Expression>),
|
||||||
Struct(String, Vec<(String, Expression)>),
|
Struct(String, Vec<(String, Expression, Metadata)>),
|
||||||
Literal(Literal),
|
Literal(Literal),
|
||||||
BinOp(BinaryOperator, Box<Expression>, Box<Expression>, TypeKind),
|
BinOp(BinaryOperator, Box<Expression>, Box<Expression>, TypeKind),
|
||||||
FunctionCall(FunctionCall),
|
FunctionCall(FunctionCall),
|
||||||
|
@ -585,7 +585,7 @@ impl Expression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
ExprKind::Struct(_, items) => {
|
ExprKind::Struct(_, items) => {
|
||||||
for (_, expr) in items {
|
for (_, expr, _) in items {
|
||||||
expr.pass(pass, state, scope, mod_id)?;
|
expr.pass(pass, state, scope, mod_id)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -97,9 +97,10 @@ fn check_typedefs_for_recursion<'a, 'b>(
|
|||||||
typedef.meta,
|
typedef.meta,
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
seen.insert(name.clone());
|
|
||||||
if let Some(inner_typedef) = defmap.get(name) {
|
if let Some(inner_typedef) = defmap.get(name) {
|
||||||
check_typedefs_for_recursion(defmap, inner_typedef, seen.clone(), state)
|
let mut inner_seen = seen.clone();
|
||||||
|
inner_seen.insert(name.clone());
|
||||||
|
check_typedefs_for_recursion(defmap, inner_typedef, inner_seen.clone(), state)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -596,7 +597,7 @@ impl Expression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ExprKind::Accessed(expression, type_kind, field_name) => {
|
ExprKind::Accessed(expression, type_kind, field_name, _) => {
|
||||||
// Resolve expected type
|
// Resolve expected type
|
||||||
let expected_ty = type_kind.resolve_ref(typerefs);
|
let expected_ty = type_kind.resolve_ref(typerefs);
|
||||||
|
|
||||||
@ -640,7 +641,7 @@ impl Expression {
|
|||||||
HashSet::new()
|
HashSet::new()
|
||||||
};
|
};
|
||||||
|
|
||||||
for (field_name, field_expr) in items {
|
for (field_name, field_expr, _) in items {
|
||||||
// Get expected type, or error if field does not exist
|
// Get expected type, or error if field does not exist
|
||||||
let expected_ty = state.or_else(
|
let expected_ty = state.or_else(
|
||||||
struct_def
|
struct_def
|
||||||
|
@ -171,8 +171,9 @@ impl FunctionDefinition {
|
|||||||
let scope_refs = ScopeTypeRefs::from(type_refs);
|
let scope_refs = ScopeTypeRefs::from(type_refs);
|
||||||
for param in &self.parameters {
|
for param in &self.parameters {
|
||||||
let param_t = state.or_else(param.ty.assert_unvague(), Vague(Unknown), self.signature());
|
let param_t = state.or_else(param.ty.assert_unvague(), Vague(Unknown), self.signature());
|
||||||
|
let mutable = matches!(param_t, TypeKind::Borrow(_, true));
|
||||||
let res = scope_refs
|
let res = scope_refs
|
||||||
.new_var(param.name.clone(), false, ¶m_t)
|
.new_var(param.name.clone(), mutable, ¶m_t)
|
||||||
.or(Err(ErrorKind::VariableAlreadyDefined(param.name.clone())));
|
.or(Err(ErrorKind::VariableAlreadyDefined(param.name.clone())));
|
||||||
state.ok(res, self.signature());
|
state.ok(res, self.signature());
|
||||||
}
|
}
|
||||||
@ -526,7 +527,7 @@ impl Expression {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
ExprKind::Accessed(expression, type_kind, field_name) => {
|
ExprKind::Accessed(expression, type_kind, field_name, _) => {
|
||||||
let expr_ty = expression.infer_types(state, type_refs)?;
|
let expr_ty = expression.infer_types(state, type_refs)?;
|
||||||
|
|
||||||
// Check that the resolved type is at least a struct, no
|
// Check that the resolved type is at least a struct, no
|
||||||
@ -605,7 +606,7 @@ impl Expression {
|
|||||||
.parameters
|
.parameters
|
||||||
.get_mut(0)
|
.get_mut(0)
|
||||||
.expect("Unknown-type associated function NEEDS to always have at least one parameter!");
|
.expect("Unknown-type associated function NEEDS to always have at least one parameter!");
|
||||||
let param_ty = first_param.infer_types(state, type_refs).unwrap().resolve_deep();
|
let param_ty = first_param.infer_types(state, type_refs)?.resolve_deep();
|
||||||
*type_kind = state
|
*type_kind = state
|
||||||
.or_else(
|
.or_else(
|
||||||
param_ty.ok_or(ErrorKind::CouldNotInferType(format!("{}", first_param))),
|
param_ty.ok_or(ErrorKind::CouldNotInferType(format!("{}", first_param))),
|
||||||
@ -613,24 +614,36 @@ impl Expression {
|
|||||||
first_param.1,
|
first_param.1,
|
||||||
)
|
)
|
||||||
.resolve_ref(type_refs.types);
|
.resolve_ref(type_refs.types);
|
||||||
let backing_var = first_param.backing_var().expect("todo").1.clone();
|
let backing_var = first_param.backing_var();
|
||||||
|
let is_mutable = if let Some(backing_var) = first_param.backing_var() {
|
||||||
if let TypeKind::Borrow(inner, _) = type_kind {
|
if let Some((mutable, _)) = type_refs.find_var(&backing_var.1) {
|
||||||
if let TypeKind::Borrow(..) = *inner.clone() {
|
mutable
|
||||||
*type_kind = type_kind.unroll_borrow();
|
} else {
|
||||||
let ExprKind::Borrow(val, _) = &first_param.0 else {
|
return Err(ErrorKind::VariableNotDefined(backing_var.1.clone()));
|
||||||
panic!()
|
|
||||||
};
|
|
||||||
*first_param = *val.clone();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some((mutable, _)) = type_refs.find_var(&backing_var) {
|
|
||||||
if !mutable {
|
|
||||||
first_param.remove_borrow_mutability();
|
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
return Err(ErrorKind::VariableNotDefined(backing_var));
|
false
|
||||||
|
};
|
||||||
|
|
||||||
|
if backing_var.is_some() {
|
||||||
|
if let TypeKind::Borrow(inner, _) = type_kind {
|
||||||
|
if let TypeKind::Borrow(..) = *inner.clone() {
|
||||||
|
*type_kind = type_kind.unroll_borrow();
|
||||||
|
let ExprKind::Borrow(val, _) = &first_param.0 else {
|
||||||
|
panic!()
|
||||||
|
};
|
||||||
|
*first_param = *val.clone();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
let ExprKind::Borrow(val, _) = &first_param.0 else {
|
||||||
|
panic!()
|
||||||
|
};
|
||||||
|
*first_param = *val.clone();
|
||||||
|
}
|
||||||
|
|
||||||
|
if !is_mutable {
|
||||||
|
first_param.remove_borrow_mutability();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -152,3 +152,8 @@ fn associated_functions() {
|
|||||||
Some(4),
|
Some(4),
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn mutable_inner_functions() {
|
||||||
|
test(include_str!("../../examples/mutable_inner.reid"), "test", Some(0));
|
||||||
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user