// MIT License // // Copyright (c) 2025 Adam Snyder // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. package app import ( "slices" "unicode" "unicode/utf8" "github.com/armsnyder/gdshader-language-server/internal/lsp" "github.com/samber/lo" ) type completionContext struct { shaderType string functionName string lineTokens []string } func (c completionContext) lastToken() string { if len(c.lineTokens) == 0 { return "" } return c.lineTokens[len(c.lineTokens)-1] } type completionPredicate func(c completionContext) bool type completionItemPredicate struct { predicate completionPredicate item lsp.CompletionItem } var alwaysTrue = func(completionContext) bool { return true } func ifLastTokenOneOf(tokens ...string) completionPredicate { return func(c completionContext) bool { return slices.Contains(tokens, c.lastToken()) } } func ifFirstTokenOneOf(tokens ...string) completionPredicate { return func(c completionContext) bool { if len(c.lineTokens) == 0 { return false } return slices.Contains(tokens, c.lineTokens[0]) } } func ifIsFirst(c completionContext) bool { return len(c.lineTokens) == 0 } func ifShaderType(shaderType string) completionPredicate { return func(c completionContext) bool { return c.shaderType == shaderType } } func and(predicates ...completionPredicate) completionPredicate { return func(c completionContext) bool { for _, predicate := range predicates { if !predicate(c) { return false } } return true } } func or(predicates ...completionPredicate) completionPredicate { return func(c completionContext) bool { for _, predicate := range predicates { if predicate(c) { return true } } return false } } func not(predicate completionPredicate) completionPredicate { return func(c completionContext) bool { return !predicate(c) } } func ifTokensContain(search string) completionPredicate { return func(c completionContext) bool { return slices.Contains(c.lineTokens, search) } } func inFunction(name string) completionPredicate { return func(c completionContext) bool { return c.functionName == name } } // https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/shading_language.html#data-types var dataTypes = map[string]string{ "void": "Void datatype, useful only for functions that return nothing.", "bool": "Boolean datatype, can only contain `true` or `false`.", "bvec2": "Two-component vector of booleans.", "bvec3": "Three-component vector of booleans.", "bvec4": "Four-component vector of booleans.", "int": "32 bit signed scalar integer.", "ivec2": "Two-component vector of signed integers.", "ivec3": "Three-component vector of signed integers.", "ivec4": "Four-component vector of signed integers.", "uint": "Unsigned scalar integer; can't contain negative numbers.", "uvec2": "Two-component vector of unsigned integers.", "uvec3": "Three-component vector of unsigned integers.", "uvec4": "Four-component vector of unsigned integers.", "float": "32 bit floating-point scalar.", "vec2": "Two-component vector of floating-point values.", "vec3": "Three-component vector of floating-point values.", "vec4": "Four-component vector of floating-point values.", "mat2": "2x2 matrix, in column major order.", "mat3": "3x3 matrix, in column major order.", "mat4": "4x4 matrix, in column major order.", "sampler2D": "Sampler type for binding 2D textures, which are read as float.", "isampler2D": "Sampler type for binding 2D textures, which are read as signed integer.", "usampler2D": "Sampler type for binding 2D textures, which are read as unsigned integer.", "sampler2DArray": "Sampler type for binding 2D texture arrays, which are read as float.", "isampler2DArray": "Sampler type for binding 2D texture arrays, which are read as signed integer.", "usampler2DArray": "Sampler type for binding 2D texture arrays, which are read as unsigned integer.", "sampler3D": "Sampler type for binding 3D textures, which are read as float.", "isampler3D": "Sampler type for binding 3D textures, which are read as signed integer.", "usampler3D": "Sampler type for binding 3D textures, which are read as unsigned integer.", "samplerCube": "Sampler type for binding Cubemaps, which are read as float.", "samplerCubeArray": "Sampler type for binding Cubemap arrays, which are read as float. Only supported in Forward+ and Mobile, not Compatibility.", "samplerExternalOES": "External sampler type. Only supported in Compatibility/Android platform.", } func isLastTokenDataType(c completionContext) bool { _, ok := dataTypes[c.lastToken()] return ok } var simpleKeywords = []string{"break", "case", "continue", "default", "do", "else", "for", "if", "return", "switch", "while", "const", "struct"} func sequence(keyword string, tokens ...string) completionPredicate { return func(c completionContext) bool { return keyword == tokens[len(tokens)-1] && slices.Equal(c.lineTokens[:len(c.lineTokens)-1], tokens[:len(tokens)-1]) } } func isLastTokenPunctuation(c completionContext) bool { if len(c.lineTokens) == 0 { return false } lastToken := c.lastToken() r, _ := utf8.DecodeRuneInString(lastToken) return unicode.IsPunct(r) } var completionItems = func() []completionItemPredicate { var items []completionItemPredicate for label, doc := range dataTypes { items = append(items, completionItemPredicate{ predicate: or( isLastTokenPunctuation, ifLastTokenOneOf("uniform", "varying", "in", "out", "inout", "flat", "smooth", "lowp", "mediump", "highp"), ), item: lsp.CompletionItem{ Label: label, Kind: lsp.CompletionClass, Documentation: &lsp.MarkupContent{Kind: lsp.MarkupMarkdown, Value: doc}, }, }) } for _, keyword := range simpleKeywords { items = append(items, completionItemPredicate{ predicate: or( not(or(isLastTokenDataType, ifLastTokenOneOf(simpleKeywords...))), sequence(keyword, "else", "if"), ), item: lsp.CompletionItem{ Label: keyword, Kind: lsp.CompletionKeyword, }, }) } type predicateDescription struct { predicate completionPredicate description string } isFirstInArgument := ifLastTokenOneOf("(", ",") describedKeywords := map[string]predicateDescription{ // https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/shading_language.html#precision "lowp": {alwaysTrue, "low precision, usually 8 bits per component mapped to 0-1"}, "mediump": {alwaysTrue, "medium precision, usually 16 bits or half float"}, "highp": {alwaysTrue, "high precision, uses full float or integer range (32 bit default)"}, // https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/shading_language.html#discarding "discard": {ifIsFirst, "Discards the current fragment, preventing it from being drawn. Used in fragment shaders to skip rendering under certain conditions."}, "in": {isFirstInArgument, "An argument only for reading"}, "out": {isFirstInArgument, "An argument only for writing"}, "inout": {isFirstInArgument, "An argument that is fully passed via reference"}, "shader_type": {ifIsFirst, "Declares the type of shader being written, such as `canvas_item`, `spatial`, or `particle`."}, "render_mode": {ifIsFirst, "Declares one or more render modes of the shader"}, "uniform": {ifIsFirst, "Declares a variable that can be set from outside the shader"}, "varying": {ifIsFirst, "Declares a variable that is passed between vertex and fragment shaders"}, "flat": {ifLastTokenOneOf("varying"), "The value is not interpolated"}, "smooth": {ifLastTokenOneOf("varying"), "The value is interpolated in a perspective-correct fashion. This is the default."}, "group_uniforms": {ifIsFirst, "Group multiple uniforms together in the inspector"}, } for label, predDesc := range describedKeywords { items = append(items, completionItemPredicate{ predicate: and(predDesc.predicate, not(func(c completionContext) bool { _, ok := describedKeywords[c.lastToken()] return ok })), item: lsp.CompletionItem{ Label: label, Kind: lsp.CompletionKeyword, Documentation: &lsp.MarkupContent{ Kind: lsp.MarkupMarkdown, Value: predDesc.description, }, }, }) } // Non-function uniform himts. uniformHints := map[string]string{ "source_color": "Used as color.", "hint_normal": "Used as normalmap.", "hint_default_white": "As value or albedo color, default to opaque white.", "hint_default_black": "As value or albedo color, default to opaque black.", "hint_default_transparent": "As value or albedo color, default to transparent black.", "hint_anisotropy": "As flowmap, default to right.", "repeat_enable": "Enabled texture repeating.", "repeat_disable": "Disabled texture repeating.", "hint_screen_texture": "Texture is the screen texture.", "hint_depth_texture": "Texture is the depth texture.", "hint_normal_roughness_texture": "Texture is the normal roughness texture (only supported in Forward+).", } roughnessHints := []string{"r", "g", "b", "a", "normal", "gray"} for _, channel := range roughnessHints { uniformHints["hint_roughness_"+channel] = "Used for roughness limiter on import (attempts reducing specular aliasing). `_normal` is a normal map that guides the roughness limiter, with roughness increasing in areas that have high-frequency detail." } filterHints := []string{"nearest", "linear", "nearest_mipmap_nearest", "linear_mipmap_nearest", "nearest_mipmap_linear", "linear_mipmap_linear"} for _, filter := range filterHints { uniformHints["hint_filter_"+filter] = "Enabled specified texture filtering." } for label, doc := range uniformHints { items = append(items, completionItemPredicate{ predicate: and(ifFirstTokenOneOf("uniform"), ifTokensContain(":")), item: lsp.CompletionItem{ Label: label, Kind: lsp.CompletionKeyword, Documentation: &lsp.MarkupContent{Kind: lsp.MarkupMarkdown, Value: doc}, }, }) } // Function uniform hints. functionUniformHints := map[string]string{ "hint_enum": "Displays int input as a dropdown widget in the editor.", "hint_range": "Displays float input as a slider in the editor.", } for label, doc := range functionUniformHints { items = append(items, completionItemPredicate{ predicate: and(ifFirstTokenOneOf("uniform"), ifTokensContain(":")), item: lsp.CompletionItem{ Label: label, Kind: lsp.CompletionFunction, Documentation: &lsp.MarkupContent{Kind: lsp.MarkupMarkdown, Value: doc}, }, }) } // Shader types shaderTypes := map[string]string{ "canvas_item": "Canvas item shader, used for 2D rendering.", "spatial": "Spatial shader, used for 3D rendering.", "particles": "Particle shader, used for particle systems.", "sky": "Sky shader, used for rendering skyboxes or skydomes.", "fog": "Fog shader, used for rendering fog effects.", } for label, doc := range shaderTypes { items = append(items, completionItemPredicate{ predicate: ifLastTokenOneOf("shader_type"), item: lsp.CompletionItem{ Label: label, Kind: lsp.CompletionKeyword, Documentation: &lsp.MarkupContent{Kind: lsp.MarkupMarkdown, Value: doc}, }, }) } // Built-in variables // https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/shading_language.html#built-in-variables // TODO(asnyder): Set variables based on shader type. type renderMode struct { mode string description string } makeRenderModeItems := func(modes ...renderMode) []completionItemPredicate { return lo.Map(modes, func(mode renderMode, _ int) completionItemPredicate { return completionItemPredicate{ predicate: ifFirstTokenOneOf("render_mode"), item: lsp.CompletionItem{ Label: mode.mode, Kind: lsp.CompletionKeyword, Documentation: &lsp.MarkupContent{Kind: lsp.MarkupMarkdown, Value: mode.description}, }, } }) } type constant struct { name string shortDesc string longDesc string } makeConstantItems := func(constants ...constant) []completionItemPredicate { return lo.Map(constants, func(c constant, _ int) completionItemPredicate { return completionItemPredicate{ predicate: alwaysTrue, item: lsp.CompletionItem{ Label: c.name, Kind: lsp.CompletionConstant, Detail: c.shortDesc, Documentation: &lsp.MarkupContent{Kind: lsp.MarkupMarkdown, Value: c.longDesc}, }, } }) } makeFunctionConstantItems := func(name string, constants ...constant) []completionItemPredicate { return lo.Map(constants, func(c constant, _ int) completionItemPredicate { return completionItemPredicate{ predicate: inFunction(name), item: lsp.CompletionItem{ Label: c.name, Kind: lsp.CompletionConstant, Detail: c.shortDesc, Documentation: &lsp.MarkupContent{Kind: lsp.MarkupMarkdown, Value: c.longDesc}, }, } }) } byShaderType := map[string][]completionItemPredicate{ "spatial": append( // https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/canvas_item_shader.html#render-modes makeRenderModeItems( renderMode{"blend_mix", "Mix blend mode (alpha is transparency), default."}, renderMode{"blend_add", "Additive blend mode."}, renderMode{"blend_sub", "Subtractive blend mode."}, renderMode{"blend_mul", "Multiplicative blend mode."}, renderMode{"blend_premul_alpha", "Premultiplied alpha blend mode (fully transparent = add, fully opaque = mix)."}, renderMode{"depth_draw_opaque", "Only draw depth for opaque geometry (not transparent)."}, renderMode{"depth_draw_always", "Always draw depth (opaque and transparent)."}, renderMode{"depth_draw_never", "Never draw depth."}, renderMode{"depth_prepass_alpha", "Do opaque depth pre-pass for transparent geometry."}, renderMode{"depth_test_disabled", "Disable depth testing."}, renderMode{"sss_mode_skin", "Subsurface Scattering mode for skin (optimizes visuals for human skin, e.g. boosted red channel)."}, renderMode{"cull_back", "Cull back-faces (default)."}, renderMode{"cull_front", "Cull front-faces."}, renderMode{"cull_disabled", "Culling disabled (double sided)."}, renderMode{"unshaded", "Result is just albedo. No lighting/shading happens in material, making it faster to render."}, renderMode{"wireframe", "Geometry draws using lines (useful for troubleshooting)."}, renderMode{"debug_shadow_splits", "Directional shadows are drawn using different colors for each split (useful for troubleshooting)."}, renderMode{"diffuse_burley", "Burley (Disney PBS) for diffuse (default)."}, renderMode{"diffuse_lambert", "Lambert shading for diffuse."}, renderMode{"diffuse_lambert_wrap", "Lambert-wrap shading (roughness-dependent) for diffuse."}, renderMode{"diffuse_toon", "Toon shading for diffuse."}, renderMode{"specular_schlick_ggx", "Schlick-GGX for direct light specular lobes (default)."}, renderMode{"specular_toon", "Toon for direct light specular lobes."}, renderMode{"specular_disabled", "Disable direct light specular lobes. Doesn't affect reflected light (use `SPECULAR = 0.0` instead)."}, renderMode{"skip_vertex_transform", "`VERTEX`, `NORMAL`, `TANGENT`, and `BITANGENT` need to be transformed manually in the `vertex()` function."}, renderMode{"world_vertex_coords", "`VERTEX`, `NORMAL`, `TANGENT`, and `BITANGENT` are modified in world space instead of model space."}, renderMode{"ensure_correct_normals", "Use when non-uniform scale is applied to mesh *(note: currently unimplemented)*."}, renderMode{"shadows_disabled", "Disable computing shadows in shader. The shader will not receive shadows, but can still cast them."}, renderMode{"ambient_light_disabled", "Disable contribution from ambient light and radiance map."}, renderMode{"shadow_to_opacity", "Lighting modifies the alpha so shadowed areas are opaque and non-shadowed areas are transparent. Useful for overlaying shadows onto a camera feed in AR."}, renderMode{"vertex_lighting", "Use vertex-based lighting instead of per-pixel lighting."}, renderMode{"particle_trails", "Enables the trails when used on particles geometry."}, renderMode{"alpha_to_coverage", "Alpha antialiasing mode, see [this PR](https://github.com/godotengine/godot/pull/40364) for more."}, renderMode{"alpha_to_coverage_and_one", "Alpha antialiasing mode, see [this PR](https://github.com/godotengine/godot/pull/40364) for more."}, renderMode{"fog_disabled", "Disable receiving depth-based or volumetric fog. Useful for `blend_add` materials like particles."}, ), append( // https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/canvas_item_shader.html#global-built-ins makeConstantItems( constant{"TIME", "in float TIME", "Global time since the engine has started, in seconds. It repeats after every `3,600` seconds (which can be changed with the `rollover` setting). It's affected by `time_scale` but not by pausing. If you need a `TIME` variable that is not affected by time scale, add your own global shader uniform and update it each frame."}, constant{"PI", "in float PI", "A `PI` constant (`3.141592`). A ratio of a circle's circumference to its diameter and amount of radians in half turn."}, constant{"TAU", "in float TAU", "A `TAU` constant (`6.283185`). An equivalent of `PI * 2` and amount of radians in full turn."}, constant{"E", "in float E", "An `E` constant (`2.718281`). Euler's number and a base of the natural logarithm."}, ), append( // https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/canvas_item_shader.html#vertex-built-ins makeFunctionConstantItems("vertex", constant{"MODEL_MATRIX", "in mat4 MODEL_MATRIX", "Local space to world space transform. World space is the coordinates you normally use in the editor."}, constant{"CANVAS_MATRIX", "in mat4 CANVAS_MATRIX", "World space to canvas space transform. In canvas space the origin is the upper-left corner of the screen and coordinates ranging from `(0.0, 0.0)` to viewport size."}, constant{"SCREEN_MATRIX", "in mat4 SCREEN_MATRIX", "Canvas space to clip space. In clip space coordinates range from `(-1.0, -1.0)` to `(1.0, 1.0)`."}, constant{"VIEWPORT_SIZE", "in vec2 VIEWPORT_SIZE", "Size of viewport (in pixels)."}, constant{"VIEW_MATRIX", "in mat4 VIEW_MATRIX", "World space to view space transform."}, constant{"INV_VIEW_MATRIX", "in mat4 INV_VIEW_MATRIX", "View space to world space transform."}, constant{"MAIN_CAM_INV_VIEW_MATRIX", "in mat4 MAIN_CAM_INV_VIEW_MATRIX", "View space to world space transform of camera used to draw the current viewport."}, constant{"INV_PROJECTION_MATRIX", "in mat4 INV_PROJECTION_MATRIX", "Clip space to view space transform."}, constant{"NODE_POSITION_WORLD", "in vec3 NODE_POSITION_WORLD", "Node position, in world space."}, constant{"NODE_POSITION_VIEW", "in vec3 NODE_POSITION_VIEW", "Node position, in view space."}, constant{"CAMERA_POSITION_WORLD", "in vec3 CAMERA_POSITION_WORLD", "Camera position, in world space."}, constant{"CAMERA_DIRECTION_WORLD", "in vec3 CAMERA_DIRECTION_WORLD", "Camera direction, in world space."}, constant{"CAMERA_VISIBLE_LAYERS", "in uint CAMERA_VISIBLE_LAYERS", "Cull layers of the camera rendering the current pass."}, constant{"INSTANCE_ID", "in int INSTANCE_ID", "Instance ID for instancing."}, constant{"INSTANCE_CUSTOM", "in vec4 INSTANCE_CUSTOM", "Instance custom data (for particles, mostly)."}, constant{"VIEW_INDEX", "in int VIEW_INDEX", "`VIEW_MONO_LEFT` (`0`) for Mono (not multiview) or left eye, `VIEW_RIGHT` (`1`) for right eye."}, constant{"VIEW_MONO_LEFT", "in int VIEW_MONO_LEFT", "Constant for Mono or left eye, always `0`."}, constant{"VIEW_RIGHT", "in int VIEW_RIGHT", "Constant for right eye, always `1`."}, constant{"EYE_OFFSET", "in vec3 EYE_OFFSET", "Position offset for the eye being rendered. Only applicable for multiview rendering."}, constant{"VERTEX", "inout vec3 VERTEX", "Position of the vertex, in model space. In world space if `world_vertex_coords` is used."}, constant{"VERTEX_ID", "in int VERTEX_ID", "The index of the current vertex in the vertex buffer."}, constant{"NORMAL", "inout vec3 NORMAL", "Normal in model space. In world space if `world_vertex_coords` is used."}, constant{"TANGENT", "inout vec3 TANGENT", "Tangent in model space. In world space if `world_vertex_coords` is used."}, constant{"BINORMAL", "inout vec3 BINORMAL", "Binormal in model space. In world space if `world_vertex_coords` is used."}, constant{"POSITION", "out vec4 POSITION", "If written to, overrides final vertex position in clip space."}, constant{"UV", "inout vec2 UV", "UV main channel."}, constant{"UV2", "inout vec2 UV2", "UV secondary channel."}, constant{"COLOR", "inout vec4 COLOR", "Color from vertices."}, constant{"ROUGHNESS", "out float ROUGHNESS", "Roughness for vertex lighting."}, constant{"POINT_SIZE", "inout float POINT_SIZE", "Point size for point rendering."}, constant{"MODELVIEW_MATRIX", "inout mat4 MODELVIEW_MATRIX", "Model/local space to view space transform (use if possible)."}, constant{"MODELVIEW_NORMAL_MATRIX", "inout mat3 MODELVIEW_NORMAL_MATRIX", ""}, constant{"MODEL_NORMAL_MATRIX", "in mat3 MODEL_NORMAL_MATRIX", ""}, constant{"PROJECTION_MATRIX", "inout mat4 PROJECTION_MATRIX", "View space to clip space transform."}, constant{"BONE_INDICES", "in uvec4 BONE_INDICES", ""}, constant{"BONE_WEIGHTS", "in vec4 BONE_WEIGHTS", ""}, constant{"CUSTOM0", "in vec4 CUSTOM0", "Custom value from vertex primitive. When using extra UVs, `xy` is UV3 and `zw` is UV4."}, constant{"CUSTOM1", "in vec4 CUSTOM1", "Custom value from vertex primitive. When using extra UVs, `xy` is UV5 and `zw` is UV6."}, constant{"CUSTOM2", "in vec4 CUSTOM2", "Custom value from vertex primitive. When using extra UVs, `xy` is UV7 and `zw` is UV8."}, constant{"CUSTOM3", "in vec4 CUSTOM3", "Custom value from vertex primitive."}, ), append( // https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/canvas_item_shader.html#fragment-built-ins makeFunctionConstantItems("fragment", constant{"VIEWPORT_SIZE", "in vec2 VIEWPORT_SIZE", "Size of viewport (in pixels)."}, constant{"FRAGCOORD", "in vec4 FRAGCOORD", "Coordinate of pixel center in screen space. `xy` specifies position in window (origin is lower-left). `z` is fragment depth and output unless `DEPTH` is written."}, constant{"FRONT_FACING", "in bool FRONT_FACING", "`true` if current face is front facing, `false` otherwise."}, constant{"VIEW", "in vec3 VIEW", "Normalized vector from fragment position to camera (in view space)."}, constant{"UV", "in vec2 UV", "UV that comes from the `vertex()` function."}, constant{"UV2", "in vec2 UV2", "UV2 that comes from the `vertex()` function."}, constant{"COLOR", "in vec4 COLOR", "COLOR that comes from the `vertex()` function."}, constant{"POINT_COORD", "in vec2 POINT_COORD", "Point coordinate for drawing points with `POINT_SIZE`."}, constant{"MODEL_MATRIX", "in mat4 MODEL_MATRIX", "Model/local space to world space transform."}, constant{"MODEL_NORMAL_MATRIX", "in mat3 MODEL_NORMAL_MATRIX", "`transpose(inverse(mat3(MODEL_MATRIX)))` for non-uniform scale. Matches `MODEL_MATRIX` otherwise."}, constant{"VIEW_MATRIX", "in mat4 VIEW_MATRIX", "World space to view space transform."}, constant{"INV_VIEW_MATRIX", "in mat4 INV_VIEW_MATRIX", "View space to world space transform."}, constant{"PROJECTION_MATRIX", "in mat4 PROJECTION_MATRIX", "View space to clip space transform."}, constant{"INV_PROJECTION_MATRIX", "in mat4 INV_PROJECTION_MATRIX", "Clip space to view space transform."}, constant{"NODE_POSITION_WORLD", "in vec3 NODE_POSITION_WORLD", "Node position, in world space."}, constant{"NODE_POSITION_VIEW", "in vec3 NODE_POSITION_VIEW", "Node position, in view space."}, constant{"CAMERA_POSITION_WORLD", "in vec3 CAMERA_POSITION_WORLD", "Camera position, in world space."}, constant{"CAMERA_DIRECTION_WORLD", "in vec3 CAMERA_DIRECTION_WORLD", "Camera direction, in world space."}, constant{"CAMERA_VISIBLE_LAYERS", "in uint CAMERA_VISIBLE_LAYERS", "Cull layers of the camera rendering the current pass."}, constant{"VERTEX", "in vec3 VERTEX", "`VERTEX` from `vertex()` transformed into view space. May differ if `skip_vertex_transform` is enabled."}, constant{"LIGHT_VERTEX", "inout vec3 LIGHT_VERTEX", "Writable version of `VERTEX` for lighting calculations. Does not change fragment position."}, constant{"VIEW_INDEX", "in int VIEW_INDEX", "`VIEW_MONO_LEFT` (0) or `VIEW_RIGHT` (1) for stereo rendering."}, constant{"VIEW_MONO_LEFT", "in int VIEW_MONO_LEFT", "Constant for Mono or left eye, always `0`."}, constant{"VIEW_RIGHT", "in int VIEW_RIGHT", "Constant for right eye, always `1`."}, constant{"EYE_OFFSET", "in vec3 EYE_OFFSET", "Position offset for the eye being rendered in multiview rendering."}, constant{"SCREEN_UV", "in vec2 SCREEN_UV", "Screen UV coordinate for current pixel."}, constant{"DEPTH", "out float DEPTH", "Custom depth value `[0.0, 1.0]`. Must be set in all branches if written."}, constant{"NORMAL", "inout vec3 NORMAL", "Normal from `vertex()`, in view space (unless `skip_vertex_transform` is used)."}, constant{"TANGENT", "inout vec3 TANGENT", "Tangent from `vertex()`, in view space (unless `skip_vertex_transform` is used)."}, constant{"BINORMAL", "inout vec3 BINORMAL", "Binormal from `vertex()`, in view space (unless `skip_vertex_transform` is used)."}, constant{"NORMAL_MAP", "out vec3 NORMAL_MAP", "Set normal here when reading from a texture instead of using `NORMAL`."}, constant{"NORMAL_MAP_DEPTH", "out float NORMAL_MAP_DEPTH", "Depth from `NORMAL_MAP`. Defaults to `1.0`."}, constant{"ALBEDO", "out vec3 ALBEDO", "Base color (default white)."}, constant{"ALPHA", "out float ALPHA", "Alpha value `[0.0, 1.0]`. Triggers transparency pipeline if used."}, constant{"ALPHA_SCISSOR_THRESHOLD", "out float ALPHA_SCISSOR_THRESHOLD", "Alpha discard threshold."}, constant{"ALPHA_HASH_SCALE", "out float ALPHA_HASH_SCALE", "Alpha hash dither scale (higher = more visible pixels)."}, constant{"ALPHA_ANTIALIASING_EDGE", "out float ALPHA_ANTIALIASING_EDGE", "Alpha to coverage antialiasing edge threshold. Requires `alpha_to_coverage` render mode."}, constant{"ALPHA_TEXTURE_COORDINATE", "out vec2 ALPHA_TEXTURE_COORDINATE", "UV for alpha-to-coverage AA. Typically `UV * texture_size`."}, constant{"PREMUL_ALPHA_FACTOR", "out float PREMUL_ALPHA_FACTOR", "Premultiplied alpha lighting interaction. Used with `blend_premul_alpha`."}, constant{"METALLIC", "out float METALLIC", "Metallic value `[0.0, 1.0]`."}, constant{"SPECULAR", "out float SPECULAR", "Specular value (default `0.5`). `0.0` disables reflections."}, constant{"ROUGHNESS", "out float ROUGHNESS", "Roughness value `[0.0, 1.0]`."}, constant{"RIM", "out float RIM", "Rim lighting intensity `[0.0, 1.0]`."}, constant{"RIM_TINT", "out float RIM_TINT", "Rim tint: `0.0` = white, `1.0` = albedo."}, constant{"CLEARCOAT", "out float CLEARCOAT", "Adds a secondary specular layer."}, constant{"CLEARCOAT_GLOSS", "out float CLEARCOAT_GLOSS", "Glossiness of clearcoat layer."}, constant{"ANISOTROPY", "out float ANISOTROPY", "Distortion factor for specular highlight."}, constant{"ANISOTROPY_FLOW", "out vec2 ANISOTROPY_FLOW", "Direction of anisotropy flow (e.g. from flowmaps)."}, constant{"SSS_STRENGTH", "out float SSS_STRENGTH", "Subsurface scattering strength."}, constant{"SSS_TRANSMITTANCE_COLOR", "out vec4 SSS_TRANSMITTANCE_COLOR", "Color for subsurface transmittance effect."}, constant{"SSS_TRANSMITTANCE_DEPTH", "out float SSS_TRANSMITTANCE_DEPTH", "Depth for transmittance penetration."}, constant{"SSS_TRANSMITTANCE_BOOST", "out float SSS_TRANSMITTANCE_BOOST", "Boost to force SSS to appear even when lit."}, constant{"BACKLIGHT", "inout vec3 BACKLIGHT", "Backlighting color for light received on opposite side of surface."}, constant{"AO", "out float AO", "Ambient occlusion intensity (for pre-baked AO)."}, constant{"AO_LIGHT_AFFECT", "out float AO_LIGHT_AFFECT", "How much AO dims direct lighting. `[0.0, 1.0]`."}, constant{"EMISSION", "out vec3 EMISSION", "Emissive color. Can exceed `1.0` for HDR."}, constant{"FOG", "out vec4 FOG", "If written to, blends final color with `FOG.rgb` using `FOG.a`."}, constant{"RADIANCE", "out vec4 RADIANCE", "Environment map radiance override."}, constant{"IRRADIANCE", "out vec4 IRRADIANCE", "Environment map irradiance override."}, ), // https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/canvas_item_shader.html#light-built-ins makeFunctionConstantItems("light", constant{"VIEWPORT_SIZE", "in vec2 VIEWPORT_SIZE", "Size of viewport (in pixels)."}, constant{"FRAGCOORD", "in vec4 FRAGCOORD", "Pixel center coordinate in screen space. `xy` is position in window, `z` is depth unless `DEPTH` is used. Origin is lower-left."}, constant{"MODEL_MATRIX", "in mat4 MODEL_MATRIX", "Model/local space to world space transform."}, constant{"INV_VIEW_MATRIX", "in mat4 INV_VIEW_MATRIX", "View space to world space transform."}, constant{"VIEW_MATRIX", "in mat4 VIEW_MATRIX", "World space to view space transform."}, constant{"PROJECTION_MATRIX", "in mat4 PROJECTION_MATRIX", "View space to clip space transform."}, constant{"INV_PROJECTION_MATRIX", "in mat4 INV_PROJECTION_MATRIX", "Clip space to view space transform."}, constant{"NORMAL", "in vec3 NORMAL", "Normal vector, in view space."}, constant{"SCREEN_UV", "in vec2 SCREEN_UV", "Screen UV coordinate for current pixel."}, constant{"UV", "in vec2 UV", "UV that comes from the `vertex()` function."}, constant{"UV2", "in vec2 UV2", "UV2 that comes from the `vertex()` function."}, constant{"VIEW", "in vec3 VIEW", "View vector, in view space."}, constant{"LIGHT", "in vec3 LIGHT", "Light vector, in view space."}, constant{"LIGHT_COLOR", "in vec3 LIGHT_COLOR", "`light_color * light_energy * PI`. Includes `PI` because physically-based models divide by `PI`."}, constant{"SPECULAR_AMOUNT", "in float SPECULAR_AMOUNT", "`2.0 * light_specular` for Omni and Spot lights. `1.0` for Directional lights."}, constant{"LIGHT_IS_DIRECTIONAL", "in bool LIGHT_IS_DIRECTIONAL", "`true` if this pass is a DirectionalLight3D."}, constant{"ATTENUATION", "in float ATTENUATION", "Attenuation from distance or shadow."}, constant{"ALBEDO", "in vec3 ALBEDO", "Base albedo color."}, constant{"BACKLIGHT", "in vec3 BACKLIGHT", "Backlighting color."}, constant{"METALLIC", "in float METALLIC", "Metallic factor."}, constant{"ROUGHNESS", "in float ROUGHNESS", "Roughness factor."}, constant{"DIFFUSE_LIGHT", "out vec3 DIFFUSE_LIGHT", "Diffuse light result."}, constant{"SPECULAR_LIGHT", "out vec3 SPECULAR_LIGHT", "Specular light result."}, constant{"ALPHA", "out float ALPHA", "Alpha value `[0.0, 1.0]`. Enables transparent pipeline if written."}, )..., )..., )..., )..., ), } for label, typeItems := range byShaderType { for _, item := range typeItems { items = append(items, completionItemPredicate{ predicate: and(ifShaderType(label), item.predicate), item: item.item, }) } } return items }()
// MIT License // // Copyright (c) 2025 Adam Snyder // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. package app import ( "bytes" "context" "fmt" "io" "slices" "strings" "unicode" "unicode/utf8" "github.com/armsnyder/gdshader-language-server/internal/lsp" "github.com/samber/lo" ) // Handler encapsulates the logic of the Godot shader language server. type Handler struct { lsp.Filesystem } // Initialize implements lsp.Handler. func (h *Handler) Initialize(context.Context, lsp.ClientCapabilities) (*lsp.ServerCapabilities, error) { return &lsp.ServerCapabilities{ TextDocumentSync: &lsp.TextDocumentSyncOptions{ OpenClose: true, Change: lsp.SyncIncremental, }, CompletionProvider: &lsp.CompletionOptions{}, }, nil } // Completion implements lsp.Handler. func (h *Handler) Completion(_ context.Context, params lsp.CompletionParams) (*lsp.CompletionList, error) { currentWord, c, err := h.getCompletionContext(params) if err != nil { return nil, fmt.Errorf("failed to get current word: %w", err) } // TODO(asnyder): // - Swizzling https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/shading_language.html#swizzling // - Constructors https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/shading_language.html#constructing // - Array .length() // - Reference variables and functions // - Struct fields // - Built-in functions https://docs.godotengine.org/en/stable/tutorials/shaders/shader_reference/shader_functions.html# return &lsp.CompletionList{ Items: lo.FilterMap(completionItems, func(item completionItemPredicate, _ int) (lsp.CompletionItem, bool) { return item.item, strings.HasPrefix(item.item.Label, currentWord) && item.predicate(*c) }), }, nil } func (h *Handler) getCompletionContext(params lsp.CompletionParams) (currentWord string, c *completionContext, err error) { doc, ok := h.Documents[params.TextDocument.URI] if !ok { return "", nil, fmt.Errorf("document not found: %s", params.TextDocument.URI) } lineStartPos := params.Position lineStartPos.Character = 0 line, err := h.readBetweenPositions(doc, lineStartPos, params.Position) if err != nil { return "", nil, fmt.Errorf("reading current line: %w", err) } firstLine, err := h.readLine(doc, 0) if err != nil { return "", nil, fmt.Errorf("reading first line: %w", err) } c = &completionContext{} c.functionName, err = h.getCurrentFunction(doc, params.Position) if err != nil { return "", nil, fmt.Errorf("getting current function: %w", err) } firstLineTokens := tokenize(firstLine) if i := slices.Index(firstLineTokens, "shader_type"); i >= 0 && i < len(firstLineTokens)-1 { c.shaderType = firstLineTokens[i+1] } tokens := tokenize(line) if len(tokens) == 0 { return "", c, nil } c.lineTokens = tokens[:len(tokens)-1] return tokens[len(tokens)-1], c, nil } func (h *Handler) readBetweenPositions(doc *lsp.Document, startPos, endPos lsp.Position) ([]byte, error) { startOffset, err := doc.PositionToOffset(startPos) if err != nil { return nil, fmt.Errorf("start position to offset: %w", err) } endOffset, err := doc.PositionToOffset(endPos) if err != nil { return nil, fmt.Errorf("end position to offset: %w", err) } return io.ReadAll(io.NewSectionReader(doc, int64(startOffset), int64(endOffset-startOffset))) } func (h *Handler) readLine(doc *lsp.Document, lineNumber int) ([]byte, error) { startPos := lsp.Position{Line: lineNumber, Character: 0} endPos := lsp.Position{Line: lineNumber + 1, Character: 0} line, err := h.readBetweenPositions(doc, startPos, endPos) if err != nil { return nil, fmt.Errorf("reading line %d: %w", lineNumber, err) } return line, nil } func (h *Handler) getCurrentFunction(doc *lsp.Document, pos lsp.Position) (string, error) { for lineNumber := pos.Line; lineNumber >= 0; lineNumber-- { line, err := h.readLine(doc, lineNumber) if err != nil { return "", fmt.Errorf("reading line %d: %w", lineNumber, err) } tokens := tokenize(line) for i := len(tokens) - 2; i > 1; i-- { if tokens[i] == ")" && tokens[i+1] == "{" { return tokens[1], nil } } } return "", nil } func tokenize(line []byte) []string { var tokens []string isNotWord := func(r rune) bool { return !unicode.IsLetter(r) && !unicode.IsDigit(r) && r != '_' } for { // Skip over any whitespace. line = bytes.TrimLeftFunc(line, unicode.IsSpace) // Capture each punctuation as a separate token. if i := bytes.IndexFunc(line, isNotWord); i == 0 { r, size := utf8.DecodeRune(line) tokens = append(tokens, string(r)) line = line[size:] continue } // Capture the word. if i := bytes.IndexFunc(line, isNotWord); i >= 0 { tokens = append(tokens, string(line[:i])) line = line[i:] continue } // This is the last token if len(line) > 0 { tokens = append(tokens, string(line)) } return tokens } } var _ lsp.Handler = &Handler{}
// MIT License // // Copyright (c) 2025 Adam Snyder // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. package ast import ( "io" "github.com/alecthomas/participle/v2" ) var parser *participle.Parser[File] func init() { parser = participle.MustBuild[File]() } // Parse parses a .gdshader file into a tree of AST nodes. func Parse(filename string, reader io.Reader) (*File, error) { return parser.Parse(filename, reader, participle.AllowTrailing(true)) }
// MIT License // // Copyright (c) 2025 Adam Snyder // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. package lsp import ( "bytes" "context" "errors" "fmt" "io" "slices" "strings" "unicode/utf8" "github.com/zyedidia/rope" ) // BufferType represents a buffer implementation. type BufferType int const ( // BufferTypeDefault chooses a buffer for you. BufferTypeDefault BufferType = iota // BufferTypeGap uses a gap buffer for amortized O(1) insertions and // deletions and fast reads, at the cost of poorer random access. BufferTypeGap // BufferTypeRope uses a rope data structure for efficient random // access and insertions/deletions in large documents. BufferTypeRope ) // Filesystem can be embedded into handlers in order to implement the basic // document sync methods of the LSP. type Filesystem struct { Documents map[string]*Document BufferType BufferType } // DidOpenTextDocument implements DocumentSyncHandler. func (f *Filesystem) DidOpenTextDocument(_ context.Context, params DidOpenTextDocumentParams) error { var buf Buffer switch f.BufferType { case BufferTypeGap: buf = &GapBuffer{} case BufferTypeRope: buf = &RopeBuffer{} } if f.Documents == nil { f.Documents = make(map[string]*Document) } f.Documents[params.TextDocument.URI] = NewDocument([]byte(params.TextDocument.Text), buf) return nil } // DidCloseTextDocument implements lsp.Handler. func (f *Filesystem) DidCloseTextDocument(_ context.Context, params DidCloseTextDocumentParams) error { delete(f.Documents, params.TextDocument.URI) return nil } // DidChangeTextDocument implements lsp.Handler. func (f *Filesystem) DidChangeTextDocument(_ context.Context, params DidChangeTextDocumentParams) error { doc, ok := f.Documents[params.TextDocument.URI] if !ok { return fmt.Errorf("document not found: %s", params.TextDocument.URI) } for _, change := range params.ContentChanges { if err := doc.ApplyChange(change); err != nil { return err } } return nil } var _ DocumentSyncHandler = (*Filesystem)(nil) // Buffer implements large text storage with methods for random access. type Buffer interface { io.ReaderAt io.WriterAt // Reset reinitializes the buffer with the given text. Reset(b []byte) // Bytes returns the full content of the buffer as a byte slice. Bytes() []byte // Delete deletes a range of bytes from the buffer. Delete(start, end int) // Len returns the number of bytes in the buffer. Len() int } // Document represents a text document with methods to manipulate its content. type Document struct { buffer Buffer lineStart []int cache []byte charBuf []byte } // NewDocument creates a new Document with the given initial text and buffer. // If buf is nil, a GapBuffer is used. func NewDocument(text []byte, buf Buffer) *Document { if buf == nil { buf = &GapBuffer{} } doc := &Document{buffer: buf, charBuf: make([]byte, 1024)} doc.Reset(text) return doc } // Reset reinitializes the document with the given text. func (d *Document) Reset(text []byte) { d.cache = nil d.buffer.Reset(text) d.lineStart = computeLineStart(text) } // Bytes returns the full content of the document. func (d *Document) Bytes() []byte { if d.cache != nil { return d.cache } d.cache = d.buffer.Bytes() return d.cache } // ReadAt implements io.ReaderAt. func (d *Document) ReadAt(p []byte, off int64) (n int, err error) { if d.cache != nil { return copy(p, d.cache[off:]), nil } return d.buffer.ReadAt(p, off) } // Len returns the number of bytes in the document. func (d *Document) Len() int { if d.cache != nil { return len(d.cache) } return d.buffer.Len() } // ApplyChange applies a content change to the document. func (d *Document) ApplyChange(change TextDocumentContentChangeEvent) error { d.cache = nil if len(d.charBuf) == 0 { d.charBuf = make([]byte, 1024) } if change.Range == nil { d.Reset([]byte(change.Text)) return nil } startOffset, endOffset, err := d.getChangeOffsets(change) if err != nil { return fmt.Errorf("get change offsets: %w", err) } if startOffset != endOffset { d.buffer.Delete(startOffset, endOffset) } if change.Text != "" { if err := d.writeText(change.Text, startOffset); err != nil { return fmt.Errorf("write text at offset %d: %w", startOffset, err) } } d.lineStart = updateLineStart(d.lineStart, change, startOffset, endOffset) return nil } func (d *Document) getChangeOffsets(change TextDocumentContentChangeEvent) (start, end int, err error) { startOffset, err := d.PositionToOffset(change.Range.Start) if err != nil { return 0, 0, err } // Optimize for basic typing, where end == start endOffset := startOffset if change.Range.End != change.Range.Start { endOffset, err = d.PositionToOffset(change.Range.End) if err != nil { return 0, 0, err } } return startOffset, endOffset, nil } func (d *Document) writeText(text string, off int) error { var toWrite []byte if len([]byte(text)) <= len(d.charBuf) { n := copy(d.charBuf, text) toWrite = d.charBuf[:n] } else { toWrite = []byte(text) } _, err := d.buffer.WriteAt(toWrite, int64(off)) return err } // PositionToOffset converts a Position (line and character) to a byte offset // in the document. It correctly handles UTF-16 character widths. func (d *Document) PositionToOffset(pos Position) (int, error) { if pos.Line >= len(d.lineStart) { return 0, fmt.Errorf("invalid line: %d", pos.Line) } start, end := d.lineBounds(pos.Line) offset, u16Count := start, 0 for offset < end { chunkSize := min(len(d.charBuf), end-offset) n, err := d.buffer.ReadAt(d.charBuf[:chunkSize], int64(offset)) if err != nil && !errors.Is(err, io.EOF) { return 0, fmt.Errorf("buffer read at line %d: %w", pos.Line, err) } if n == 0 { break } deltaOffset, done, err := decodeUntilTargetOffset(d.charBuf[:n], pos.Character, &u16Count) if err != nil { return 0, err } if done { return offset + deltaOffset, nil } offset += n } if u16Count >= pos.Character { return offset, nil } return 0, fmt.Errorf("line %d: target units %d out of bounds (only %d utf16 units)", pos.Line, pos.Character, u16Count) } func (d *Document) lineBounds(line int) (start, end int) { start = d.lineStart[line] if line+1 < len(d.lineStart) { return start, d.lineStart[line+1] } return start, d.buffer.Len() } func decodeUntilTargetOffset(buf []byte, targetU16Offset int, u16Count *int) (deltaOffset int, done bool, err error) { for i := 0; i < len(buf); { r, size := utf8.DecodeRune(buf[i:]) if r == utf8.RuneError && size == 1 { return 0, false, fmt.Errorf("invalid utf-8 at byte offset %d", i) } if *u16Count >= targetU16Offset { return i, true, nil } *u16Count += utf16Width(r) i += size } return 0, false, nil } func utf16Width(r rune) int { if r <= 0xFFFF { return 1 } return 2 } // ArrayBuffer is the simplest implementation of Buffer, using a byte slice // for storage. It is optimized for reads. Insertions and deletions are O(n) // due to slice copying. // // This implementation is not recommended and is mainly used as a testing // benchmark baseline for smarter buffer implementations. type ArrayBuffer struct { data []byte } // Bytes implements Buffer. func (a *ArrayBuffer) Bytes() []byte { return a.data } // Delete implements Buffer. func (a *ArrayBuffer) Delete(start, end int) { a.data = slices.Delete(a.data, start, end) } // Len implements Buffer. func (a *ArrayBuffer) Len() int { return len(a.data) } // ReadAt implements Buffer. func (a *ArrayBuffer) ReadAt(p []byte, off int64) (n int, err error) { n = copy(p, a.data[off:]) return n, nil } // Reset implements Buffer. func (a *ArrayBuffer) Reset(b []byte) { a.data = b } // WriteAt implements Buffer. func (a *ArrayBuffer) WriteAt(p []byte, off int64) (n int, err error) { a.data = slices.Insert(a.data, int(off), p...) return len(p), nil } var _ Buffer = (*ArrayBuffer)(nil) // GapBuffer implements a gap buffer for amortized O(1) insertions and // deletions at the cursor position and O(n) for random access. Its reads // are fast compared to [RopeBuffer]. type GapBuffer struct { buf []byte gapStart int gapEnd int } const initialGapSize = 128 // Bytes implements Buffer. func (g *GapBuffer) Bytes() []byte { return append(g.buf[:g.gapStart], g.buf[g.gapEnd:]...) } // Delete implements Buffer. func (g *GapBuffer) Delete(start, end int) { count := end - start switch g.gapStart { // Specific cases where we do not need to copy data case start: g.gapEnd += count case end: g.gapStart -= count // General case where we put the buffer into a state optimized for a // follow-up write to the start offset. default: g.moveGapTo(start) g.gapEnd += count } } // Len implements Buffer. func (g *GapBuffer) Len() int { return len(g.buf) - g.gapSize() } // ReadAt implements Buffer. func (g *GapBuffer) ReadAt(p []byte, off int64) (n int, err error) { if int(off) < g.gapStart { n = copy(p, g.buf[int(off):g.gapStart]) n += copy(p[n:], g.buf[g.gapEnd:]) } else { n = copy(p, g.buf[g.physicalOffset(int(off)):]) } if n < len(p) { err = io.EOF } return n, err } // Reset implements Buffer. func (g *GapBuffer) Reset(b []byte) { g.buf = make([]byte, len(b)+initialGapSize) // preallocate a gap copy(g.buf, b) g.gapStart = len(b) g.gapEnd = len(g.buf) } // WriteAt implements Buffer. func (g *GapBuffer) WriteAt(p []byte, off int64) (n int, err error) { g.moveGapTo(int(off)) g.growGap(len(p) - g.gapSize()) n = copy(g.buf[g.gapStart:], p) g.gapStart += n g.shrinkGapTo(1024) return n, nil } func (g *GapBuffer) physicalOffset(off int) int { if off < g.gapStart { return off } return off + g.gapSize() } func (g *GapBuffer) gapSize() int { return g.gapEnd - g.gapStart } func (g *GapBuffer) moveGapTo(off int) { count := off - g.gapStart switch { case count < 0: copy(g.buf[g.gapEnd+count:], g.buf[off:g.gapStart]) case count > 0: copy(g.buf[g.gapStart:g.gapStart+count], g.buf[g.gapEnd:g.gapEnd+count]) } g.gapEnd += count g.gapStart += count } func (g *GapBuffer) shrinkGapTo(n int) { excess := g.gapSize() - n if excess <= 0 { return } g.buf = slices.Delete(g.buf, g.gapStart, g.gapStart+excess) g.gapEnd -= excess } func (g *GapBuffer) growGap(n int) { if n <= 0 { return } n += initialGapSize g.buf = slices.Insert(g.buf, g.gapEnd, make([]byte, n)...) g.gapEnd += n } var _ Buffer = (*GapBuffer)(nil) // RopeBuffer implements Buffer using a rope data structure. This is best at // scale, for large documents with frequent random insertions and deletions. type RopeBuffer struct { node *rope.Node } // Bytes implements Buffer. func (r *RopeBuffer) Bytes() []byte { return r.node.Value() } // Delete implements Buffer. func (r *RopeBuffer) Delete(start, end int) { r.node.Remove(start, end) } // Len implements Buffer. func (r *RopeBuffer) Len() int { return r.node.Len() } // ReadAt implements Buffer. func (r *RopeBuffer) ReadAt(p []byte, off int64) (n int, err error) { return r.node.ReadAt(p, off) } // Reset implements Buffer. func (r *RopeBuffer) Reset(b []byte) { r.node = rope.New(b) } // WriteAt implements Buffer. func (r *RopeBuffer) WriteAt(p []byte, off int64) (n int, err error) { r.node.Insert(int(off), p) return len(p), nil } var _ Buffer = (*RopeBuffer)(nil) func computeLineStart(text []byte) []int { lineStart := []int{0} for i, b := range text { if b == '\n' { lineStart = append(lineStart, i+1) } } return lineStart } func updateLineStart(lineStart []int, change TextDocumentContentChangeEvent, startOffset, endOffset int) []int { // 1. We know all offsets before the change will remain the same. // 2. Since we know the text inserted, we can calculate any new line // offsets that are created by the change. // 3. Since we know the length of the text inserted, and we are passed // the length of the text removed, we can use those to update the // offsets of the lines after the change. newLinesInserted := strings.Count(change.Text, "\n") newLinesRemoved := change.Range.End.Line - change.Range.Start.Line growth := newLinesInserted - newLinesRemoved switch { case growth > 0: // Shift the existing elements right lineStart = slices.Insert(lineStart, change.Range.End.Line+1, make([]int, growth)...) case growth < 0: // Shift the existing elements left lineStart = slices.Delete(lineStart, change.Range.End.Line+growth+1, change.Range.End.Line+1) } if newLinesInserted > 0 { // Save the new offsets asBytes := []byte(change.Text) off := 0 for i := range newLinesInserted { off += bytes.IndexByte(asBytes[off:], '\n') + 1 lineStart[change.Range.Start.Line+i+1] = startOffset + off } } // Update the offsets for lines after the change for i := change.Range.End.Line + 1 + growth; i < len(lineStart); i++ { lineStart[i] += len([]byte(change.Text)) + startOffset - endOffset } return lineStart }
// MIT License // // Copyright (c) 2025 Adam Snyder // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. package lsp import ( "bufio" "bytes" "context" "encoding/json" "errors" "fmt" "io" "log/slog" "net/textproto" "os" "strconv" ) // DocumentSyncHandler defines methods for handling document synchronization. type DocumentSyncHandler interface { DidOpenTextDocument(ctx context.Context, params DidOpenTextDocumentParams) error DidCloseTextDocument(ctx context.Context, params DidCloseTextDocumentParams) error DidChangeTextDocument(ctx context.Context, params DidChangeTextDocumentParams) error } // Handler provides the logic for handling LSP requests and notifications. type Handler interface { DocumentSyncHandler Initialize(ctx context.Context, clientCapabilities ClientCapabilities) (*ServerCapabilities, error) Completion(ctx context.Context, params CompletionParams) (*CompletionList, error) } // Server manages the LSP server lifecycle and dispatching requests and // notifications to a handler. type Server struct { Stdin io.Reader Stdout io.Writer Info ServerInfo Handler Handler } // Serve runs the LSP server. It blocks until the client receives an "exit". func (s *Server) Serve() error { if s.Stdin == nil { s.Stdin = os.Stdin } scanner := bufio.NewScanner(s.Stdin) scanner.Split(jsonRPCSplit) slog.Info("Server is running", "name", s.Info.Name, "version", s.Info.Version) for scanner.Scan() { if !s.processMessage(scanner.Bytes()) { return nil } } slog.Error("Scanner error", "error", scanner.Err()) return scanner.Err() } func (s *Server) processMessage(payload []byte) bool { // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#requestMessage var request struct { ID json.RawMessage `json:"id"` Method string `json:"method"` Params json.RawMessage `json:"params"` } if err := json.Unmarshal(payload, &request); err != nil { slog.Error("Bad request", "error", err) return true } if len(request.ID) == 0 { logger := slog.With("method", request.Method) logger.Debug("Received notification", "params", string(request.Params)) if request.Method == "exit" { logger.Info("Exiting") return false } if err := s.handleNotification(request.Method, request.Params); err != nil { logger.Error("Error handling notification", "error", err) } return true } logger := slog.With("request_id", request.ID, "method", request.Method) debugEnabled := logger.Enabled(context.TODO(), slog.LevelDebug) if debugEnabled { logger.Debug("Received request", "params", string(request.Params)) } response, err := s.handleRequest(request.Method, request.Params) if err != nil { logger.Error("Error handling request", "error", err) var asResponseError *ResponseError if errors.As(err, &asResponseError) { response = asResponseError } else { response = &ResponseError{ Code: CodeInternalError, Message: err.Error(), } } } if err := s.write(request.ID, response); err != nil { logger.Error("Write error", "error", err) return true } if debugEnabled { logger.Debug("Sent response", "response", fmt.Sprintf("%#v", response)) } return true } func jsonRPCSplit(data []byte, _ bool) (advance int, token []byte, err error) { const headerDelimiter = "\r\n\r\n" i := bytes.Index(data, []byte(headerDelimiter)) if i == -1 { return 0, nil, nil } payloadIndex := i + len(headerDelimiter) header, err := textproto.NewReader(bufio.NewReader(bytes.NewReader(data[:payloadIndex]))).ReadMIMEHeader() if err != nil { return 0, nil, fmt.Errorf("bad header: %w", err) } contentLength, err := strconv.Atoi(header.Get("content-length")) if err != nil { return 0, nil, fmt.Errorf("bad content-length: %w", err) } restBytes := data[payloadIndex:] if len(restBytes) < contentLength { return 0, nil, nil } return payloadIndex + contentLength, restBytes[:contentLength], nil } func (s *Server) handleNotification(method string, paramsRaw json.RawMessage) error { switch method { case "initialized": case "cancelRequest": // TODO(asnyder): Handle cancelRequest and make everything // async. case "textDocument/didOpen": var params DidOpenTextDocumentParams if err := parseParams(paramsRaw, ¶ms); err != nil { return err } return s.Handler.DidOpenTextDocument(context.TODO(), params) case "textDocument/didClose": var params DidCloseTextDocumentParams if err := parseParams(paramsRaw, ¶ms); err != nil { return err } return s.Handler.DidCloseTextDocument(context.TODO(), params) case "textDocument/didChange": var params DidChangeTextDocumentParams if err := parseParams(paramsRaw, ¶ms); err != nil { return err } return s.Handler.DidChangeTextDocument(context.TODO(), params) default: slog.Warn("Unknown notification", "method", method) } return nil } func (s *Server) handleRequest(method string, paramsRaw json.RawMessage) (any, error) { switch method { case "initialize": // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initializeParams var params struct { ClientInfo struct { Name string `json:"name"` Version string `json:"version"` } `json:"clientInfo"` Capabilities ClientCapabilities `json:"capabilities"` } if err := parseParams(paramsRaw, ¶ms); err != nil { return nil, err } slog.Info("Client info", "name", params.ClientInfo.Name, "version", params.ClientInfo.Version) serverCapabilities, err := s.Handler.Initialize(context.TODO(), params.Capabilities) if err != nil { return nil, err } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initializeResult return struct { Capabilities *ServerCapabilities `json:"capabilities"` ServerInfo ServerInfo `json:"serverInfo"` }{Capabilities: serverCapabilities, ServerInfo: s.Info}, nil case "shutdown": return nil, nil case "textDocument/completion": var params CompletionParams if err := parseParams(paramsRaw, ¶ms); err != nil { return nil, err } return s.Handler.Completion(context.TODO(), params) default: return nil, &ResponseError{ Code: CodeMethodNotFound, Message: fmt.Sprintf("Unknown method %q", method), } } } func parseParams(paramsRaw json.RawMessage, result any) error { if err := json.Unmarshal(paramsRaw, result); err != nil { return &ResponseError{ Code: CodeInvalidParams, Message: err.Error(), InternalError: err, } } return nil } func (s *Server) write(requestID json.RawMessage, result any) error { // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#responseMessage message := struct { JSONRPC string `json:"jsonrpc"` ID json.RawMessage `json:"id"` Result any `json:"result"` }{JSONRPC: "2.0", ID: requestID, Result: result} data, err := json.Marshal(message) if err != nil { return fmt.Errorf("invalid response: %w", err) } if s.Stdout == nil { s.Stdout = os.Stdout } _, err = s.Stdout.Write(append([]byte("Content-Length: "+strconv.Itoa(len(data))+"\r\nContent-Type: application/vscode-jsonrpc; charset=utf-8\r\n\r\n"), data...)) return err }
// MIT License // // Copyright (c) 2025 Adam Snyder // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. package lsp import "fmt" // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#clientCapabilities type ClientCapabilities struct{} // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#serverCapabilities type ServerCapabilities struct { TextDocumentSync *TextDocumentSyncOptions `json:"textDocumentSync,omitempty"` CompletionProvider *CompletionOptions `json:"completionProvider,omitempty"` HoverProvider bool `json:"hoverProvider,omitempty"` DefinitionProvider bool `json:"definitionProvider,omitempty"` ReferencesProvider bool `json:"referencesProvider,omitempty"` SignatureHelpProvider bool `json:"signatureHelpProvider,omitempty"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentSyncOptions type TextDocumentSyncOptions struct { OpenClose bool `json:"openClose,omitempty"` Change TextDocumentSyncKind `json:"change"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#completionOptions type CompletionOptions struct{} // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentSyncKind type TextDocumentSyncKind int // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentSyncKind const ( SyncNone TextDocumentSyncKind = 0 SyncFull TextDocumentSyncKind = 1 SyncIncremental TextDocumentSyncKind = 2 ) // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#initializeResult type ServerInfo struct { Name string `json:"name"` Version string `json:"version"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#responseError type ResponseError struct { Code ErrorCode `json:"code"` Message string `json:"message"` InternalError error `json:"-"` } func (e *ResponseError) Error() string { msg := fmt.Sprintf("error code %d: %s", e.Code, e.Message) if e.InternalError != nil { msg += fmt.Sprintf(": %s", e.InternalError.Error()) } return msg } func (e *ResponseError) Unwrap() error { return e.InternalError } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#errorCodes type ErrorCode int // LSP error codes. const ( CodeParseError ErrorCode = -32700 CodeInvalidRequest ErrorCode = -32600 CodeMethodNotFound ErrorCode = -32601 CodeInvalidParams ErrorCode = -32602 CodeInternalError ErrorCode = -32603 ) // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#didOpenTextDocumentParams type DidOpenTextDocumentParams struct { TextDocument TextDocumentItem `json:"textDocument"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#didChangeTextDocumentParams type DidChangeTextDocumentParams struct { TextDocument TextDocumentIdentifier `json:"textDocument"` ContentChanges []TextDocumentContentChangeEvent `json:"contentChanges"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentContentChangeEvent type TextDocumentContentChangeEvent struct { Text string `json:"text"` Range *Range `json:"range,omitempty"` } func (e TextDocumentContentChangeEvent) String() string { return fmt.Sprintf("%q @ %s", e.Text, e.Range) } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#didCloseTextDocumentParams type DidCloseTextDocumentParams struct { TextDocument TextDocumentIdentifier `json:"textDocument"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#position type Position struct { Line int `json:"line"` Character int `json:"character"` } func (p Position) String() string { return fmt.Sprintf("%d:%d", p.Line, p.Character) } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#range type Range struct { Start Position `json:"start"` End Position `json:"end"` } func (r Range) String() string { return fmt.Sprintf("%s-%s", r.Start, r.End) } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentItem type TextDocumentItem struct { URI string `json:"uri"` Text string `json:"text"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentIdentifier type TextDocumentIdentifier struct { URI string `json:"uri"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocumentPositionParams type TextDocumentPositionParams struct { TextDocument TextDocumentIdentifier `json:"textDocument"` Position Position `json:"position"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#location type Location struct { URI string `json:"uri"` Range Range `json:"range"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#completionParams type CompletionParams struct { TextDocumentPositionParams } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#completionList type CompletionList struct { IsIncomplete bool `json:"isIncomplete"` Items []CompletionItem `json:"items"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#completionItem type CompletionItem struct { Label string `json:"label"` Kind CompletionItemKind `json:"kind,omitempty"` Detail string `json:"detail,omitempty"` Documentation *MarkupContent `json:"documentation,omitempty"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#completionItemKind type CompletionItemKind int // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#completionItemKind const ( CompletionText CompletionItemKind = 1 CompletionMethod CompletionItemKind = 2 CompletionFunction CompletionItemKind = 3 CompletionConstructor CompletionItemKind = 4 CompletionField CompletionItemKind = 5 CompletionVariable CompletionItemKind = 6 CompletionClass CompletionItemKind = 7 CompletionInterface CompletionItemKind = 8 CompletionModule CompletionItemKind = 9 CompletionProperty CompletionItemKind = 10 CompletionUnit CompletionItemKind = 11 CompletionValue CompletionItemKind = 12 CompletionEnum CompletionItemKind = 13 CompletionKeyword CompletionItemKind = 14 CompletionSnippet CompletionItemKind = 15 CompletionColor CompletionItemKind = 16 CompletionFile CompletionItemKind = 17 CompletionReference CompletionItemKind = 18 CompletionFolder CompletionItemKind = 19 CompletionEnumMember CompletionItemKind = 20 CompletionConstant CompletionItemKind = 21 CompletionStruct CompletionItemKind = 22 CompletionEvent CompletionItemKind = 23 CompletionOperator CompletionItemKind = 24 CompletionTypeParameter CompletionItemKind = 25 ) // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#markupContentInnerDefinition type MarkupContent struct { Kind MarkupKind `json:"kind"` Value string `json:"value"` } // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#markupContent type MarkupKind string // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#markupContent const ( MarkupPlainText MarkupKind = "plaintext" MarkupMarkdown MarkupKind = "markdown" )
// MIT License // // Copyright (c) 2025 Adam Snyder // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. package testutil import ( "io" "log/slog" "path/filepath" "testing" ) // SetupLogger configures the default slog logger for testing. func SetupLogger(t testing.TB) { originalHandler := slog.Default().Handler() t.Cleanup(func() { slog.SetDefault(slog.New(originalHandler)) }) slog.SetDefault(slog.New(slog.NewTextHandler(TestWriter{t}, &slog.HandlerOptions{ Level: slog.LevelDebug, AddSource: true, ReplaceAttr: func(_ []string, a slog.Attr) slog.Attr { switch a.Key { case slog.TimeKey: // Suppress time return slog.Attr{} case slog.SourceKey: // Simplify file name source := a.Value.Any().(*slog.Source) //nolint:revive source.File = filepath.Base(source.File) } return a }, }))) } // TestWriter is an io.Writer that writes to test logs. type TestWriter struct { T testing.TB } // Write implements io.Writer. func (t TestWriter) Write(p []byte) (n int, err error) { t.T.Helper() t.T.Logf("%s", p) return len(p), nil } var _ io.Writer = TestWriter{}
// MIT License // // Copyright (c) 2025 Adam Snyder // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in all // copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE // SOFTWARE. package main import ( _ "embed" "flag" "log/slog" "os" "path/filepath" "strings" "github.com/armsnyder/gdshader-language-server/internal/app" "github.com/armsnyder/gdshader-language-server/internal/lsp" ) //go:embed version.txt var version string func main() { var flags struct { Debug bool } flag.BoolVar(&flags.Debug, "debug", false, "Enable debug logging") flag.Parse() setupLogger(flags.Debug) server := &lsp.Server{ Info: lsp.ServerInfo{ Name: "gdshader-language-server", Version: strings.TrimSpace(version), }, Handler: &app.Handler{}, } if err := server.Serve(); err != nil { os.Exit(1) } } func setupLogger(debug bool) { //nolint:revive level := slog.LevelInfo if debug { level = slog.LevelDebug } slog.SetDefault(slog.New(slog.NewTextHandler(os.Stderr, &slog.HandlerOptions{ Level: level, AddSource: true, ReplaceAttr: func(_ []string, a slog.Attr) slog.Attr { switch a.Key { case slog.TimeKey: // Suppress time return slog.Attr{} case slog.SourceKey: // Simplify file name source := a.Value.Any().(*slog.Source) //nolint:revive source.File = filepath.Base(source.File) } return a }, }))) }