first working playground prototype
This commit is contained in:
parent
87f911e055
commit
1a5dcd24bc
|
@ -1,8 +1,8 @@
|
|||
import { defineConfig } from 'astro/config';
|
||||
import starlight from '@astrojs/starlight';
|
||||
import react from "@astrojs/react";
|
||||
import starlightLinksValidator from "starlight-links-validator";
|
||||
import shikiConfig from './src/utils/shiki';
|
||||
import react from "@astrojs/react";
|
||||
|
||||
// https://astro.build/config
|
||||
export default defineConfig({
|
||||
|
|
|
@ -10,6 +10,11 @@ import FileView from "./playground/FileView";
|
|||
import Editor from "./playground/Editor";
|
||||
import Header from "./playground/Header";
|
||||
|
||||
import initWasm, {
|
||||
compile,
|
||||
compileZip,
|
||||
} from "@wasm/webcompiler/pkg/webcompiler";
|
||||
|
||||
export type File = {
|
||||
language?: string;
|
||||
content: string;
|
||||
|
@ -42,6 +47,10 @@ const DEFAULT_FILES = {
|
|||
};
|
||||
|
||||
export default function Playground() {
|
||||
initWasm().catch((err) => {
|
||||
console.error(err);
|
||||
});
|
||||
|
||||
const [rootDir, updateRootDir] = useImmer(
|
||||
getStorageOrDefault(FILE_STORAGE_KEY, DEFAULT_FILES) as Directory
|
||||
);
|
||||
|
@ -51,21 +60,29 @@ export default function Playground() {
|
|||
|
||||
const onBuild = () => {
|
||||
if (monaco) {
|
||||
console.log(getFiles(monaco));
|
||||
const dist = JSON.parse(
|
||||
JSON.stringify(compile(getFiles(monaco)), jsonReplacer)
|
||||
);
|
||||
const withRoot = {
|
||||
dirs: {
|
||||
dist: dist,
|
||||
},
|
||||
} as Directory;
|
||||
loadFiles(monaco, updateRootDir, withRoot);
|
||||
} else {
|
||||
console.error("monaco has not loaded");
|
||||
}
|
||||
};
|
||||
const onZip = () => {
|
||||
if (monaco) {
|
||||
loadFile(
|
||||
monaco,
|
||||
updateRootDir,
|
||||
{ content: "zip" },
|
||||
"dist/pack.zip"
|
||||
);
|
||||
const data =
|
||||
"data:application/zip;base64," + compileZip(getFiles(monaco));
|
||||
const a = document.createElement("a");
|
||||
a.href = data;
|
||||
a.download = "shulkerscript-pack.zip";
|
||||
a.click();
|
||||
} else {
|
||||
console.error("onZip not set");
|
||||
console.error("monaco has not loaded");
|
||||
}
|
||||
};
|
||||
const onSave = () => {
|
||||
|
@ -226,6 +243,7 @@ function loadFile(
|
|||
monaco.editor.createModel(file.content, file.language, uri);
|
||||
}
|
||||
updater((dir) => {
|
||||
if (dir) {
|
||||
let current = dir;
|
||||
const parts = name.split("/").filter((s) => s !== "");
|
||||
const last = parts.pop()!;
|
||||
|
@ -233,12 +251,16 @@ function loadFile(
|
|||
if (!current.dirs) {
|
||||
current.dirs = {};
|
||||
}
|
||||
if (!current.dirs[part]) {
|
||||
current.dirs[part] = {};
|
||||
}
|
||||
current = current.dirs[part];
|
||||
}
|
||||
if (!current.files) {
|
||||
current.files = {};
|
||||
}
|
||||
current.files[last] = file;
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
|
@ -250,3 +272,15 @@ function getStorageOrDefault(key: string, def: any) {
|
|||
return def;
|
||||
}
|
||||
}
|
||||
|
||||
function jsonReplacer(key: any, value: any): any {
|
||||
if (value instanceof Map) {
|
||||
const res: { [key: string]: any } = {};
|
||||
for (const [k, v] of value.entries()) {
|
||||
res[k] = v;
|
||||
}
|
||||
return res;
|
||||
} else {
|
||||
return value;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67,10 +67,10 @@ export const shulkerscriptGrammar: LanguageInput = {
|
|||
end: "}",
|
||||
captures: {
|
||||
1: {
|
||||
name: "keyword.control.public.shulkerscript"
|
||||
name: "keyword.control.function.shulkerscript",
|
||||
},
|
||||
2: {
|
||||
name: "keyword.control.function.shulkerscript",
|
||||
name: "keyword.control.public.shulkerscript",
|
||||
},
|
||||
3: {
|
||||
name: "entity.name.function.shulkerscript",
|
||||
|
|
|
@ -0,0 +1,3 @@
|
|||
[build]
|
||||
target = "wasm32-unknown-unknown"
|
||||
target-dir = "target"
|
|
@ -0,0 +1,2 @@
|
|||
/target
|
||||
/pkg
|
|
@ -0,0 +1,18 @@
|
|||
[package]
|
||||
name = "webcompiler"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
[lib]
|
||||
crate-type = ["cdylib"]
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
[dependencies]
|
||||
wasm-bindgen = "0.2.92"
|
||||
shulkerscript = { git = "https://github.com/moritz-hoelting/shulkerscript-lang.git", default-features = false, features = ["serde", "shulkerbox"], rev = "af544ac79eea4498ef4563acfb7e8dd14ec5c84e" }
|
||||
serde = "1.0"
|
||||
serde-wasm-bindgen = "0.6.5"
|
||||
anyhow = "1.0.86"
|
||||
zip = { version = "2.1.3", default-features = false, features = ["deflate"] }
|
||||
base64 = "0.22.1"
|
|
@ -0,0 +1,86 @@
|
|||
use std::collections::BTreeMap;
|
||||
|
||||
use serde::{Deserialize, Serialize};
|
||||
use shulkerscript::shulkerbox::virtual_fs::{VFile, VFolder};
|
||||
|
||||
#[allow(dead_code)]
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct File {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub(crate) language: Option<String>,
|
||||
pub(crate) content: String,
|
||||
}
|
||||
impl File {
|
||||
pub fn with_lang(self, lang: String) -> Self {
|
||||
Self {
|
||||
language: Some(lang),
|
||||
..self
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct Directory {
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub(crate) dirs: Option<BTreeMap<String, Directory>>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub(crate) files: Option<BTreeMap<String, File>>,
|
||||
}
|
||||
|
||||
impl From<Directory> for VFolder {
|
||||
fn from(value: Directory) -> Self {
|
||||
let mut folder = VFolder::new();
|
||||
|
||||
if let Some(dirs) = value.dirs {
|
||||
for (name, dir) in dirs {
|
||||
folder.add_existing_folder(&name, dir.into())
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(files) = value.files {
|
||||
for (name, file) in files {
|
||||
folder.add_file(&name, file.into());
|
||||
}
|
||||
}
|
||||
|
||||
folder
|
||||
}
|
||||
}
|
||||
impl From<File> for VFile {
|
||||
fn from(value: File) -> Self {
|
||||
VFile::Text(value.content)
|
||||
}
|
||||
}
|
||||
|
||||
impl From<VFolder> for Directory {
|
||||
fn from(value: VFolder) -> Self {
|
||||
let mut dirs = BTreeMap::new();
|
||||
let mut files = BTreeMap::new();
|
||||
|
||||
for (name, item) in value.get_folders() {
|
||||
dirs.insert(name.to_string(), item.clone().into());
|
||||
}
|
||||
|
||||
for (name, item) in value.get_files() {
|
||||
files.insert(name.to_string(), item.clone().into());
|
||||
}
|
||||
|
||||
Self {
|
||||
dirs: Some(dirs),
|
||||
files: Some(files),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<VFile> for File {
|
||||
fn from(value: VFile) -> Self {
|
||||
let content = match value {
|
||||
VFile::Text(content) => content,
|
||||
VFile::Binary(bin) => String::from_utf8_lossy(&bin).to_string(),
|
||||
};
|
||||
Self {
|
||||
content,
|
||||
language: None,
|
||||
}
|
||||
}
|
||||
}
|
|
@ -0,0 +1,116 @@
|
|||
use std::{
|
||||
cell::Cell,
|
||||
fmt::Display,
|
||||
io::{Cursor, Write},
|
||||
path::PathBuf,
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
use base64::prelude::*;
|
||||
use fs::Directory;
|
||||
use shulkerscript::{
|
||||
base::Handler,
|
||||
shulkerbox::virtual_fs::{VFile, VFolder},
|
||||
};
|
||||
use wasm_bindgen::prelude::*;
|
||||
use zip::{write::SimpleFileOptions, ZipWriter};
|
||||
|
||||
mod fs;
|
||||
mod util;
|
||||
|
||||
#[wasm_bindgen]
|
||||
extern "C" {
|
||||
#[wasm_bindgen(js_namespace = console)]
|
||||
fn log(s: &str);
|
||||
#[wasm_bindgen(js_namespace = console, js_name = error)]
|
||||
fn log_err(s: &str);
|
||||
}
|
||||
|
||||
/// Compiles the given directory into datapack files.
|
||||
#[wasm_bindgen]
|
||||
pub fn compile(root_dir: JsValue) -> JsValue {
|
||||
let root_dir = VFolder::from(serde_wasm_bindgen::from_value::<Directory>(root_dir).unwrap());
|
||||
|
||||
log("Compiling...");
|
||||
if let Ok(folder) = _compile(&root_dir) {
|
||||
let folder = Directory::from(folder);
|
||||
serde_wasm_bindgen::to_value(&folder).unwrap()
|
||||
} else {
|
||||
JsValue::null()
|
||||
}
|
||||
}
|
||||
|
||||
/// Returns a base64 encoded zip file containing the compiled datapack.
|
||||
#[wasm_bindgen(js_name = compileZip)]
|
||||
pub fn compile_zip(root_dir: JsValue) -> String {
|
||||
let root_dir = VFolder::from(serde_wasm_bindgen::from_value::<Directory>(root_dir).unwrap());
|
||||
|
||||
let datapack = _compile(&root_dir).unwrap();
|
||||
|
||||
let mut buffer = Cursor::new(Vec::new());
|
||||
let mut writer = ZipWriter::new(&mut buffer);
|
||||
let virtual_files = datapack.flatten();
|
||||
|
||||
// write each file to the zip archive
|
||||
for (path, file) in virtual_files {
|
||||
writer
|
||||
.start_file(path, SimpleFileOptions::default())
|
||||
.unwrap();
|
||||
match file {
|
||||
VFile::Text(text) => {
|
||||
writer.write_all(text.as_bytes()).unwrap();
|
||||
}
|
||||
VFile::Binary(data) => {
|
||||
writer.write_all(data).unwrap();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
writer.set_comment("Data pack created with Shulkerscript web compiler");
|
||||
|
||||
writer.finish().unwrap();
|
||||
|
||||
BASE64_STANDARD.encode(buffer.into_inner())
|
||||
}
|
||||
|
||||
fn _compile(root_dir: &VFolder) -> Result<VFolder> {
|
||||
let printer = Printer::new();
|
||||
util::compile(&printer, root_dir, &get_script_paths(root_dir))
|
||||
}
|
||||
|
||||
struct Printer {
|
||||
printed: Cell<bool>,
|
||||
}
|
||||
impl<T: Display> Handler<T> for Printer {
|
||||
fn receive<E: Into<T>>(&self, error: E) {
|
||||
log_err(&error.into().to_string());
|
||||
self.printed.set(true);
|
||||
}
|
||||
|
||||
fn has_received(&self) -> bool {
|
||||
self.has_printed()
|
||||
}
|
||||
}
|
||||
impl Printer {
|
||||
/// Creates a new [`Printer`].
|
||||
fn new() -> Self {
|
||||
Self {
|
||||
printed: Cell::new(false),
|
||||
}
|
||||
}
|
||||
|
||||
fn has_printed(&self) -> bool {
|
||||
self.printed.get()
|
||||
}
|
||||
}
|
||||
|
||||
fn get_script_paths(root: &VFolder) -> Vec<(String, PathBuf)> {
|
||||
root.flatten()
|
||||
.into_iter()
|
||||
.filter_map(|(p, _)| {
|
||||
p.strip_suffix(".shu")
|
||||
.and_then(|p| p.strip_prefix("src/"))
|
||||
.map(|ident| (ident.to_string(), PathBuf::from(&p)))
|
||||
})
|
||||
.collect()
|
||||
}
|
|
@ -0,0 +1,96 @@
|
|||
use anyhow::Result;
|
||||
use std::path::Path;
|
||||
|
||||
use shulkerscript::{
|
||||
base::{source_file::SourceFile, Error, FileProvider},
|
||||
lexical::token_stream::TokenStream,
|
||||
shulkerbox::{datapack::Datapack, util::compile::CompileOptions, virtual_fs::VFolder},
|
||||
syntax::{parser::Parser, syntax_tree::program::ProgramFile},
|
||||
transpile::transpiler::Transpiler,
|
||||
};
|
||||
|
||||
use crate::Printer;
|
||||
|
||||
/// Tokenizes the source code at the given path.
|
||||
fn tokenize(
|
||||
printer: &Printer,
|
||||
file_provider: &impl FileProvider,
|
||||
path: &Path,
|
||||
) -> Result<TokenStream> {
|
||||
let source_file = SourceFile::load(path, file_provider)?;
|
||||
|
||||
Ok(TokenStream::tokenize(&source_file, printer))
|
||||
}
|
||||
|
||||
/// Parses the source code at the given path.
|
||||
fn parse(printer: &Printer, file_provider: &impl FileProvider, path: &Path) -> Result<ProgramFile> {
|
||||
let tokens = tokenize(printer, file_provider, path)?;
|
||||
|
||||
if printer.has_printed() {
|
||||
return Err(Error::Other("An error occurred while tokenizing the source code.").into());
|
||||
}
|
||||
|
||||
let mut parser = Parser::new(&tokens);
|
||||
let program = parser.parse_program(printer).ok_or(Error::Other(
|
||||
"An error occured while parsing the source code.",
|
||||
))?;
|
||||
|
||||
if printer.has_printed() {
|
||||
return Err(Error::Other("An error occurred while parsing the source code.").into());
|
||||
}
|
||||
|
||||
Ok(program)
|
||||
}
|
||||
|
||||
/// Transpiles the source code at the given paths into a shulkerbox [`Datapack`].
|
||||
fn transpile<F, P>(
|
||||
printer: &Printer,
|
||||
file_provider: &F,
|
||||
script_paths: &[(String, P)],
|
||||
) -> Result<Datapack>
|
||||
where
|
||||
F: FileProvider,
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
let programs = script_paths
|
||||
.iter()
|
||||
.map(|(program_identifier, path)| {
|
||||
let program = parse(printer, file_provider, path.as_ref())?;
|
||||
|
||||
Ok((program_identifier, program))
|
||||
})
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
if programs.iter().any(Result::is_err) {
|
||||
return Err(programs.into_iter().find_map(Result::err).unwrap());
|
||||
}
|
||||
let programs = programs
|
||||
.into_iter()
|
||||
.filter_map(Result::ok)
|
||||
.collect::<Vec<_>>();
|
||||
|
||||
let mut transpiler = Transpiler::new(48);
|
||||
transpiler.transpile(&programs, printer)?;
|
||||
let datapack = transpiler.into_datapack();
|
||||
|
||||
if printer.has_printed() {
|
||||
return Err(Error::Other("An error occurred while transpiling the source code.").into());
|
||||
}
|
||||
|
||||
Ok(datapack)
|
||||
}
|
||||
|
||||
/// Compiles the source code at the given paths.
|
||||
pub fn compile<F, P>(
|
||||
printer: &Printer,
|
||||
file_provider: &F,
|
||||
script_paths: &[(String, P)],
|
||||
) -> Result<VFolder>
|
||||
where
|
||||
F: FileProvider,
|
||||
P: AsRef<Path>,
|
||||
{
|
||||
let datapack = transpile(printer, file_provider, script_paths)?;
|
||||
|
||||
Ok(datapack.compile(&CompileOptions::default()))
|
||||
}
|
Loading…
Reference in New Issue