package main import ( "log" "net" "os" "strconv" eng "github.com/PDOK/gokoala/internal/engine" "github.com/PDOK/gokoala/internal/ogc" "github.com/urfave/cli/v2" _ "go.uber.org/automaxprocs" ) var ( cliFlags = []cli.Flag{ &cli.StringFlag{ Name: "host", Usage: "bind host for OGC server", Value: "0.0.0.0", Required: false, EnvVars: []string{"HOST"}, }, &cli.IntFlag{ Name: "port", Usage: "bind port for OGC server", Value: 8080, Required: false, EnvVars: []string{"PORT"}, }, &cli.IntFlag{ Name: "debug-port", Usage: "bind port for debug server (disabled by default), do not expose this port publicly", Value: -1, Required: false, EnvVars: []string{"DEBUG_PORT"}, }, &cli.IntFlag{ Name: "shutdown-delay", Usage: "delay (in seconds) before initiating graceful shutdown (e.g. useful in k8s to allow ingress controller to update their endpoints list)", Value: 0, Required: false, EnvVars: []string{"SHUTDOWN_DELAY"}, }, &cli.StringFlag{ Name: "config-file", Usage: "reference to YAML configuration file", Required: true, EnvVars: []string{"CONFIG_FILE"}, }, &cli.StringFlag{ Name: "openapi-file", Usage: "reference to a (customized) OGC OpenAPI spec for the dynamic parts of your OGC API", Required: false, EnvVars: []string{"OPENAPI_FILE"}, }, &cli.BoolFlag{ Name: "enable-trailing-slash", Usage: "allow API calls to URLs with a trailing slash.", Value: false, // to satisfy https://gitdocumentatie.logius.nl/publicatie/api/adr/#api-48 Required: false, EnvVars: []string{"ALLOW_TRAILING_SLASH"}, }, &cli.BoolFlag{ Name: "enable-cors", Usage: "enable Cross-Origin Resource Sharing (CORS) as required by OGC API specs. Disable if you handle CORS elsewhere.", Value: false, Required: false, EnvVars: []string{"ENABLE_CORS"}, }, } ) func main() { app := cli.NewApp() app.Name = "GoKoala" app.Usage = "Cloud Native OGC APIs server, written in Go" app.Flags = cliFlags app.Action = func(c *cli.Context) error { log.Printf("%s - %s\n", app.Name, app.Usage) address := net.JoinHostPort(c.String("host"), strconv.Itoa(c.Int("port"))) debugPort := c.Int("debug-port") shutdownDelay := c.Int("shutdown-delay") configFile := c.String("config-file") openAPIFile := c.String("openapi-file") trailingSlash := c.Bool("enable-trailing-slash") cors := c.Bool("enable-cors") // Engine encapsulates shared non-OGC API specific logic engine, err := eng.NewEngine(configFile, openAPIFile, trailingSlash, cors) if err != nil { return err } // Each OGC API building block makes use of said Engine ogc.SetupBuildingBlocks(engine) return engine.Start(address, debugPort, shutdownDelay) } err := app.Run(os.Args) if err != nil { log.Fatal(err) } }
package config import ( "encoding/json" "log" "sort" "dario.cat/mergo" orderedmap "github.com/wk8/go-ordered-map/v2" "gopkg.in/yaml.v3" ) type GeoSpatialCollections []GeoSpatialCollection // +kubebuilder:object:generate=true type GeoSpatialCollection struct { // Unique ID of the collection // +kubebuilder:validation:Pattern=`^[a-z0-9"]([a-z0-9_-]*[a-z0-9"]+|)$` ID string `yaml:"id" validate:"required,lowercase_id" json:"id"` // Metadata describing the collection contents // +optional Metadata *GeoSpatialCollectionMetadata `yaml:"metadata,omitempty" json:"metadata,omitempty"` // Links pertaining to this collection (e.g., downloads, documentation) // +optional Links *CollectionLinks `yaml:"links,omitempty" json:"links,omitempty"` // 3D GeoVolumes specific to this collection // +optional GeoVolumes *CollectionEntry3dGeoVolumes `yaml:",inline" json:",inline"` // Tiles specific to this collection // +optional Tiles *CollectionEntryTiles `yaml:",inline" json:",inline"` // Features specific to this collection // +optional Features *CollectionEntryFeatures `yaml:",inline" json:",inline"` } type GeoSpatialCollectionJSON struct { // Keep this in sync with the GeoSpatialCollection struct! ID string `json:"id"` Metadata *GeoSpatialCollectionMetadata `json:"metadata,omitempty"` Links *CollectionLinks `json:"links,omitempty"` *CollectionEntry3dGeoVolumes `json:",inline"` *CollectionEntryTiles `json:",inline"` *CollectionEntryFeatures `json:",inline"` } // MarshalJSON custom because inlining only works on embedded structs. // Value instead of pointer receiver because only that way it can be used for both. func (c GeoSpatialCollection) MarshalJSON() ([]byte, error) { return json.Marshal(GeoSpatialCollectionJSON{ ID: c.ID, Metadata: c.Metadata, Links: c.Links, CollectionEntry3dGeoVolumes: c.GeoVolumes, CollectionEntryTiles: c.Tiles, CollectionEntryFeatures: c.Features, }) } // UnmarshalJSON parses a string to GeoSpatialCollection func (c *GeoSpatialCollection) UnmarshalJSON(b []byte) error { return yaml.Unmarshal(b, c) } // HasDateTime true when collection has temporal support, false otherwise func (c *GeoSpatialCollection) HasDateTime() bool { return c.Metadata != nil && c.Metadata.TemporalProperties != nil } // +kubebuilder:object:generate=true type GeoSpatialCollectionMetadata struct { // Human friendly title of this collection. When no title is specified the collection ID is used. // +optional Title *string `yaml:"title,omitempty" json:"title,omitempty"` // Describes the content of this collection Description *string `yaml:"description" json:"description" validate:"required"` // Reference to a PNG image to use a thumbnail on the collections. // The full path is constructed by appending Resources + Thumbnail. // +optional Thumbnail *string `yaml:"thumbnail,omitempty" json:"thumbnail,omitempty"` // Keywords to make this collection beter discoverable // +optional Keywords []string `yaml:"keywords,omitempty" json:"keywords,omitempty"` // Moment in time when the collection was last updated // // +optional // +kubebuilder:validation:Type=string // +kubebuilder:validation:Format="date-time" LastUpdated *string `yaml:"lastUpdated,omitempty" json:"lastUpdated,omitempty" validate:"omitempty,datetime=2006-01-02T15:04:05Z"` // Who updated this collection // +optional LastUpdatedBy string `yaml:"lastUpdatedBy,omitempty" json:"lastUpdatedBy,omitempty"` // Fields in the datasource to be used in temporal queries // +optional TemporalProperties *TemporalProperties `yaml:"temporalProperties,omitempty" json:"temporalProperties,omitempty" validate:"omitempty,required_with=Extent.Interval"` // Extent of the collection, both geospatial and/or temporal // +optional Extent *Extent `yaml:"extent,omitempty" json:"extent,omitempty"` // The CRS identifier which the features are originally stored, meaning no CRS transformations are applied when features are retrieved in this CRS. // WGS84 is the default storage CRS. // // +kubebuilder:default="http://www.opengis.net/def/crs/OGC/1.3/CRS84" // +kubebuilder:validation:Pattern=`^http:\/\/www\.opengis\.net\/def\/crs\/.*$` // +optional StorageCrs *string `yaml:"storageCrs,omitempty" json:"storageCrs,omitempty" default:"http://www.opengis.net/def/crs/OGC/1.3/CRS84" validate:"startswith=http://www.opengis.net/def/crs"` } // +kubebuilder:object:generate=true type Extent struct { // Projection (SRS/CRS) to be used. When none is provided WGS84 (http://www.opengis.net/def/crs/OGC/1.3/CRS84) is used. // +optional // +kubebuilder:validation:Pattern=`^EPSG:\d+$` Srs string `yaml:"srs,omitempty" json:"srs,omitempty" validate:"omitempty,startswith=EPSG:"` // Geospatial extent Bbox []string `yaml:"bbox" json:"bbox"` // Temporal extent // +optional // +kubebuilder:validation:MinItems=2 // +kubebuilder:validation:MaxItems=2 Interval []string `yaml:"interval,omitempty" json:"interval,omitempty" validate:"omitempty,len=2"` } // +kubebuilder:object:generate=true type CollectionLinks struct { // Links to downloads of entire collection. These will be rendered as rel=enclosure links // +optional Downloads []DownloadLink `yaml:"downloads,omitempty" json:"downloads,omitempty" validate:"dive"` // Links to documentation describing the collection. These will be rendered as rel=describedby links // <placeholder> } // +kubebuilder:object:generate=true type DownloadLink struct { // Name of the provided download Name string `yaml:"name" json:"name" validate:"required"` // Full URL to the file to be downloaded AssetURL *URL `yaml:"assetUrl" json:"assetUrl" validate:"required"` // Approximate size of the file to be downloaded // +optional Size string `yaml:"size,omitempty" json:"size,omitempty"` // Media type of the file to be downloaded MediaType MediaType `yaml:"mediaType" json:"mediaType" validate:"required"` } // HasCollections does this API offer collections with for example features, tiles, 3d tiles, etc func (c *Config) HasCollections() bool { return c.AllCollections() != nil } // AllCollections get all collections - with for example features, tiles, 3d tiles - offered through this OGC API. // Results are returned in alphabetic or literal order. func (c *Config) AllCollections() GeoSpatialCollections { var result GeoSpatialCollections if c.OgcAPI.GeoVolumes != nil { result = append(result, c.OgcAPI.GeoVolumes.Collections...) } if c.OgcAPI.Tiles != nil { result = append(result, c.OgcAPI.Tiles.Collections...) } if c.OgcAPI.Features != nil { result = append(result, c.OgcAPI.Features.Collections...) } // sort if len(c.OgcAPICollectionOrder) > 0 { sortByLiteralOrder(result, c.OgcAPICollectionOrder) } else { sortByAlphabet(result) } return result } // Unique lists all unique GeoSpatialCollections (no duplicate IDs). // Don't use in hot path (creates a map on every invocation). func (g GeoSpatialCollections) Unique() []GeoSpatialCollection { collectionsByID := g.toMap() result := make([]GeoSpatialCollection, 0, collectionsByID.Len()) for pair := collectionsByID.Oldest(); pair != nil; pair = pair.Next() { result = append(result, pair.Value) } return result } // ContainsID check if given collection - by ID - exists. // Don't use in hot path (creates a map on every invocation). func (g GeoSpatialCollections) ContainsID(id string) bool { collectionsByID := g.toMap() _, ok := collectionsByID.Get(id) return ok } func (g GeoSpatialCollections) toMap() orderedmap.OrderedMap[string, GeoSpatialCollection] { collectionsByID := orderedmap.New[string, GeoSpatialCollection]() for _, current := range g { existing, ok := collectionsByID.Get(current.ID) if ok { err := mergo.Merge(&existing, current) if err != nil { log.Fatalf("failed to merge 2 collections with the same name '%s': %v", current.ID, err) } collectionsByID.Set(current.ID, existing) } else { collectionsByID.Set(current.ID, current) } } return *collectionsByID } func sortByAlphabet(collection []GeoSpatialCollection) { sort.Slice(collection, func(i, j int) bool { iName := collection[i].ID jName := collection[j].ID // prefer to sort by title when available, collection ID otherwise if collection[i].Metadata != nil && collection[i].Metadata.Title != nil { iName = *collection[i].Metadata.Title } if collection[j].Metadata != nil && collection[j].Metadata.Title != nil { jName = *collection[j].Metadata.Title } return iName < jName }) } func sortByLiteralOrder(collections []GeoSpatialCollection, literalOrder []string) { collectionOrderIndex := make(map[string]int) for i, id := range literalOrder { collectionOrderIndex[id] = i } sort.Slice(collections, func(i, j int) bool { // sort according to the explicit/literal order specified in OgcAPICollectionOrder return collectionOrderIndex[collections[i].ID] < collectionOrderIndex[collections[j].ID] }) }
//go:generate ../hack/generate-deepcopy.sh package config import ( "errors" "fmt" "os" "github.com/creasty/defaults" "github.com/go-playground/validator/v10" "golang.org/x/text/language" "gopkg.in/yaml.v3" ) const ( CookieMaxAge = 60 * 60 * 24 DefaultSrs = "EPSG:28992" ) // NewConfig read YAML config file, required to start GoKoala func NewConfig(configFile string) (*Config, error) { yamlData, err := os.ReadFile(configFile) if err != nil { return nil, fmt.Errorf("failed to read config file %w", err) } // expand environment variables yamlData = []byte(os.ExpandEnv(string(yamlData))) var config *Config err = yaml.Unmarshal(yamlData, &config) if err != nil { return nil, fmt.Errorf("failed to unmarshal config file, error: %w", err) } err = validateLocalPaths(config) if err != nil { return nil, fmt.Errorf("validation error in config file, error: %w", err) } return config, nil } // +kubebuilder:object:generate=true type Config struct { // Version of the API. When releasing a new version which contains backwards-incompatible changes, a new major version must be released. Version string `yaml:"version" json:"version" validate:"required,semver"` // Human friendly title of the API. Don't include "OGC API" in the title, this is added automatically. Title string `yaml:"title" json:"title" validate:"required"` // Shorted title / abbreviation describing the API. ServiceIdentifier string `yaml:"serviceIdentifier" json:"serviceIdentifier" validate:"required"` // Human friendly description of the API and dataset. Abstract string `yaml:"abstract" json:"abstract" validate:"required"` // Licensing term that apply to this API and dataset License License `yaml:"license" json:"license" validate:"required"` // The base URL - that's the part until the OGC API landing page - under which this API is served BaseURL URL `yaml:"baseUrl" json:"baseUrl" validate:"required"` // Optional reference to a catalog/portal/registry that lists all datasets, not just this one // +optional DatasetCatalogURL URL `yaml:"datasetCatalogUrl,omitempty" json:"datasetCatalogUrl,omitempty"` // The languages/translations to offer, valid options are Dutch (nl) and English (en). Dutch is the default. // +optional AvailableLanguages []Language `yaml:"availableLanguages,omitempty" json:"availableLanguages,omitempty"` // Define which OGC API building blocks this API supports OgcAPI OgcAPI `yaml:"ogcApi" json:"ogcApi" validate:"required"` // Order in which collections (containing features, tiles, 3d tiles, etc.) should be returned. // When not specified collections are returned in alphabetic order. // +optional OgcAPICollectionOrder []string `yaml:"collectionOrder,omitempty" json:"collectionOrder,omitempty"` // Reference to a PNG image to use a thumbnail on the landing page. // The full path is constructed by appending Resources + Thumbnail. // +optional Thumbnail *string `yaml:"thumbnail,omitempty" json:"thumbnail,omitempty"` // Keywords to make this API beter discoverable // +optional Keywords []string `yaml:"keywords,omitempty" json:"keywords,omitempty"` // Moment in time when the dataset was last updated // +optional // +kubebuilder:validation:Type=string // +kubebuilder:validation:Format="date-time" LastUpdated *string `yaml:"lastUpdated,omitempty" json:"lastUpdated,omitempty" validate:"omitempty,datetime=2006-01-02T15:04:05Z"` // Who updated the dataset // +optional LastUpdatedBy string `yaml:"lastUpdatedBy,omitempty" json:"lastUpdatedBy,omitempty"` // Available support channels // +optional Support *Support `yaml:"support,omitempty" json:"support,omitempty"` // Metadata links // +optional MetadataLinks []MetadataLink `yaml:"metadataLinks,omitempty" json:"metadataLinks,omitempty"` // Key/value pairs to add extra information to the landing page // +optional DatasetDetails []DatasetDetail `yaml:"datasetDetails,omitempty" json:"datasetDetails,omitempty"` // Location where resources (e.g. thumbnails) specific to the given dataset are hosted // +optional Resources *Resources `yaml:"resources,omitempty" json:"resources,omitempty"` } // UnmarshalYAML hooks into unmarshalling to set defaults and validate config func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error { type cfg Config if err := unmarshal((*cfg)(c)); err != nil { return err } // init config if err := setDefaults(c); err != nil { return err } if err := validate(c); err != nil { return err } return nil } func (c *Config) UnmarshalJSON(b []byte) error { return yaml.Unmarshal(b, c) } func (c *Config) CookieMaxAge() int { return CookieMaxAge } // +kubebuilder:object:generate=true type OgcAPI struct { // Enable when this API should offer OGC API 3D GeoVolumes. This includes OGC 3D Tiles. // +optional GeoVolumes *OgcAPI3dGeoVolumes `yaml:"3dgeovolumes,omitempty" json:"3dgeovolumes,omitempty"` // Enable when this API should offer OGC API Tiles. This also requires OGC API Styles. // +optional Tiles *OgcAPITiles `yaml:"tiles,omitempty" json:"tiles,omitempty" validate:"required_with=Styles"` // Enable when this API should offer OGC API Styles. // +optional Styles *OgcAPIStyles `yaml:"styles,omitempty" json:"styles,omitempty"` // Enable when this API should offer OGC API Features. // +optional Features *OgcAPIFeatures `yaml:"features,omitempty" json:"features,omitempty"` // Enable when this API should offer OGC API Processes. // +optional Processes *OgcAPIProcesses `yaml:"processes,omitempty" json:"processes,omitempty"` } // +kubebuilder:object:generate=true type Support struct { // Name of the support organization Name string `yaml:"name" json:"name" validate:"required"` // URL to external support webpage // +kubebuilder:validation:Type=string URL URL `yaml:"url" json:"url" validate:"required"` // Email for support questions // +optional Email string `yaml:"email,omitempty" json:"email,omitempty" validate:"omitempty,email"` } // +kubebuilder:object:generate=true type MetadataLink struct { // Name of the metadata collection/site/organization Name string `yaml:"name" json:"name" validate:"required"` // Which category of the API this metadata concerns. E.g. dataset (in general), tiles or features // +kubebuilder:default="dataset" Category string `yaml:"category" json:"category" validate:"required" default:"dataset"` // URL to external metadata detail page // +kubebuilder:validation:Type=string URL URL `yaml:"url" json:"url" validate:"required"` } // +kubebuilder:object:generate=true type DatasetDetail struct { // Arbitrary name to add extra information to the landing page Name string `yaml:"name" json:"name"` // Arbitrary value associated with the given name Value string `yaml:"value" json:"value"` } // +kubebuilder:object:generate=true type Resources struct { // Location where resources (e.g. thumbnails) specific to the given dataset are hosted. This is optional if Directory is set // +optional URL *URL `yaml:"url,omitempty" json:"url,omitempty" validate:"required_without=Directory,omitempty"` // Location where resources (e.g. thumbnails) specific to the given dataset are hosted. This is optional if URL is set // +optional Directory *string `yaml:"directory,omitempty" json:"directory,omitempty" validate:"required_without=URL,omitempty,dirpath|filepath"` } // +kubebuilder:object:generate=true type License struct { // Name of the license, e.g. MIT, CC0, etc Name string `yaml:"name" json:"name" validate:"required"` // URL to license text on the web URL URL `yaml:"url" json:"url" validate:"required"` } func setDefaults(config *Config) error { // process 'default' tags if err := defaults.Set(config); err != nil { return fmt.Errorf("failed to set default configuration: %w", err) } // custom default logic if len(config.AvailableLanguages) == 0 { config.AvailableLanguages = append(config.AvailableLanguages, Language{language.Dutch}) // default to Dutch only } if config.OgcAPI.Tiles != nil { config.OgcAPI.Tiles.Defaults() } return nil } func validate(config *Config) error { // process 'validate' tags v := validator.New() err := v.RegisterValidation(lowercaseID, LowercaseID) if err != nil { return err } err = v.Struct(config) if err != nil { var ive *validator.InvalidValidationError if ok := errors.Is(err, ive); ok { return fmt.Errorf("failed to validate config: %w", err) } var errMessages []string var valErrs validator.ValidationErrors if errors.As(err, &valErrs) { for _, valErr := range valErrs { errMessages = append(errMessages, valErr.Error()+"\n") } } return fmt.Errorf("invalid config provided:\n%v", errMessages) } // custom validations var errs []error if config.OgcAPI.Features != nil { errs = append(errs, validateFeatureCollections(config.OgcAPI.Features.Collections)) } if config.OgcAPI.Tiles != nil { errs = append(errs, validateTileProjections(config.OgcAPI.Tiles)) } err = errors.Join(errs...) if err != nil { return err } return nil } // validateLocalPaths validates the existence of local paths. // Not suitable for general validation while unmarshalling. // Because that could happen on another machine. func validateLocalPaths(config *Config) error { // Could use a deep dive and reflection. // But the settings with a path are not recursive and relatively limited in numbers. // GeoPackageCloudCache.Path is not verified. It will be created anyway in cloud_sqlite_vfs.createCacheDir during startup time. if config.Resources != nil && config.Resources.Directory != nil && *config.Resources.Directory != "" && !isExistingLocalDir(*config.Resources.Directory) { return errors.New("Config.Resources.Directory should be an existing directory: " + *config.Resources.Directory) } if config.OgcAPI.Styles != nil && !isExistingLocalDir(config.OgcAPI.Styles.StylesDir) { return errors.New("Config.OgcAPI.Styles.StylesDir should be an existing directory: " + config.OgcAPI.Styles.StylesDir) } return nil } func isExistingLocalDir(path string) bool { fileInfo, err := os.Stat(path) return err == nil && fileInfo.IsDir() }
package config import ( "encoding/json" "time" "gopkg.in/yaml.v3" ) // Duration Custom time.Duration compatible with YAML and JSON (un)marshalling and kubebuilder. // (Already supported in yaml/v3 but not encoding/json.) // // +kubebuilder:validation:Type=string // +kubebuilder:validation:Format=duration type Duration struct { time.Duration } // MarshalJSON turn duration tag into JSON // Value instead of pointer receiver because only that way it can be used for both. func (d Duration) MarshalJSON() ([]byte, error) { return json.Marshal(d.Duration.String()) } func (d *Duration) UnmarshalJSON(b []byte) error { return yaml.Unmarshal(b, &d.Duration) } // MarshalYAML turn duration tag into YAML // Value instead of pointer receiver because only that way it can be used for both. func (d Duration) MarshalYAML() (interface{}, error) { return d.Duration, nil } func (d *Duration) UnmarshalYAML(unmarshal func(any) error) error { return unmarshal(&d.Duration) } // DeepCopyInto copy the receiver, write into out. in must be non-nil. func (d *Duration) DeepCopyInto(out *Duration) { if out != nil { *out = *d } } // DeepCopy copy the receiver, create a new Duration. func (d *Duration) DeepCopy() *Duration { if d == nil { return nil } out := &Duration{} d.DeepCopyInto(out) return out }
package config import ( "encoding/json" "golang.org/x/text/language" ) // Language represents a BCP 47 language tag. // +kubebuilder:validation:Type=string type Language struct { language.Tag } // MarshalJSON turn language tag into JSON // Value instead of pointer receiver because only that way it can be used for both. func (l Language) MarshalJSON() ([]byte, error) { return json.Marshal(l.Tag.String()) } // UnmarshalJSON turn JSON into Language func (l *Language) UnmarshalJSON(b []byte) error { var s string if err := json.Unmarshal(b, &s); err != nil { return err } *l = Language{language.Make(s)} return nil } // DeepCopyInto copy the receiver, write into out. in must be non-nil. func (l *Language) DeepCopyInto(out *Language) { *out = *l } // DeepCopy copy the receiver, create a new Language. func (l *Language) DeepCopy() *Language { if l == nil { return nil } out := &Language{} l.DeepCopyInto(out) return out }
package config import ( "log" "regexp" "github.com/go-playground/validator/v10" ) var ( lowercaseIDRegexp = regexp.MustCompile("^[a-z0-9\"]([a-z0-9_-]*[a-z0-9\"]+|)$") ) const ( lowercaseID = "lowercase_id" ) // LowercaseID is the validation function for validating if the current field // is not empty and contains only lowercase chars, numbers, hyphens or underscores. // It's similar to RFC 1035 DNS label but not the same. func LowercaseID(fl validator.FieldLevel) bool { valAsString := fl.Field().String() valid := lowercaseIDRegexp.MatchString(valAsString) if !valid { log.Printf("Invalid ID %s", valAsString) } return valid }
package config import ( "encoding/json" "github.com/elnormous/contenttype" ) // MediaType represents a IANA media type as described in RFC 6838. Media types were formerly known as MIME types. // +kubebuilder:validation:Type=string type MediaType struct { contenttype.MediaType } // MarshalJSON turn MediaType into JSON // Value instead of pointer receiver because only that way it can be used for both. func (m MediaType) MarshalJSON() ([]byte, error) { return json.Marshal(m.String()) } // UnmarshalJSON turn JSON into MediaType func (m *MediaType) UnmarshalJSON(b []byte) error { var s string if err := json.Unmarshal(b, &s); err != nil { return err } mt, err := contenttype.ParseMediaType(s) if err != nil { return err } m.MediaType = mt return nil } // MarshalYAML turns MediaType into YAML. // Value instead of pointer receiver because only that way it can be used for both. func (m MediaType) MarshalYAML() (interface{}, error) { return m.MediaType.String(), nil } // UnmarshalYAML parses a string to MediaType func (m *MediaType) UnmarshalYAML(unmarshal func(any) error) error { var s string if err := unmarshal(&s); err != nil { return err } mt, err := contenttype.ParseMediaType(s) if err != nil { return err } m.MediaType = mt return nil } // DeepCopyInto copy the receiver, write into out. in must be non-nil. func (m *MediaType) DeepCopyInto(out *MediaType) { *out = *m } // DeepCopy copy the receiver, create a new MediaType. func (m *MediaType) DeepCopy() *MediaType { if m == nil { return nil } out := &MediaType{} m.DeepCopyInto(out) return out }
package config // +kubebuilder:object:generate=true type OgcAPI3dGeoVolumes struct { // Reference to the server (or object storage) hosting the 3D Tiles TileServer URL `yaml:"tileServer" json:"tileServer" validate:"required"` // Collections to be served as 3D GeoVolumes Collections GeoSpatialCollections `yaml:"collections" json:"collections"` // Whether JSON responses will be validated against the OpenAPI spec // since it has significant performance impact when dealing with large JSON payloads. // // +kubebuilder:default=true // +optional ValidateResponses *bool `yaml:"validateResponses,omitempty" json:"validateResponses,omitempty" default:"true"` // ptr due to https://github.com/creasty/defaults/issues/49 } // +kubebuilder:object:generate=true type CollectionEntry3dGeoVolumes struct { // Optional basepath to 3D tiles on the tileserver. Defaults to the collection ID. // +optional TileServerPath *string `yaml:"tileServerPath,omitempty" json:"tileServerPath,omitempty"` // URI template for individual 3D tiles. // +optional URITemplate3dTiles *string `yaml:"uriTemplate3dTiles,omitempty" json:"uriTemplate3dTiles,omitempty" validate:"required_without_all=URITemplateDTM"` // Optional URI template for subtrees, only required when "implicit tiling" extension is used. // +optional URITemplateImplicitTilingSubtree *string `yaml:"uriTemplateImplicitTilingSubtree,omitempty" json:"uriTemplateImplicitTilingSubtree,omitempty"` // URI template for digital terrain model (DTM) in Quantized Mesh format, REQUIRED when you want to serve a DTM. // +optional URITemplateDTM *string `yaml:"uriTemplateDTM,omitempty" json:"uriTemplateDTM,omitempty" validate:"required_without_all=URITemplate3dTiles"` //nolint:tagliatelle // grandfathered // Optional URL to 3D viewer to visualize the given collection of 3D Tiles. // +optional URL3DViewer *URL `yaml:"3dViewerUrl,omitempty" json:"3dViewerUrl,omitempty"` } func (gv *CollectionEntry3dGeoVolumes) Has3DTiles() bool { return gv.URITemplate3dTiles != nil } func (gv *CollectionEntry3dGeoVolumes) HasDTM() bool { return gv.URITemplateDTM != nil }
package config import ( "encoding/json" "fmt" "math/rand" "os" "path/filepath" "slices" "strconv" "strings" "github.com/PDOK/gokoala/internal/engine/util" "github.com/docker/go-units" "gopkg.in/yaml.v3" ) // +kubebuilder:object:generate=true type OgcAPIFeatures struct { // Basemap to use in embedded viewer on the HTML pages. // +kubebuilder:default="OSM" // +kubebuilder:validation:Enum=OSM;BRT // +optional Basemap string `yaml:"basemap,omitempty" json:"basemap,omitempty" default:"OSM" validate:"oneof=OSM BRT"` // Collections to be served as features through this API Collections GeoSpatialCollections `yaml:"collections" json:"collections" validate:"required,dive"` // Limits the amount of features to retrieve with a single call // +optional Limit Limit `yaml:"limit,omitempty" json:"limit,omitempty"` // One or more datasources to get the features from (geopackages, postgis, etc). // Optional since you can also define datasources at the collection level // +optional Datasources *Datasources `yaml:"datasources,omitempty" json:"datasources,omitempty"` // Whether GeoJSON/JSON-FG responses will be validated against the OpenAPI spec // since it has significant performance impact when dealing with large JSON payloads. // // +kubebuilder:default=true // +optional ValidateResponses *bool `yaml:"validateResponses,omitempty" json:"validateResponses,omitempty" default:"true"` // ptr due to https://github.com/creasty/defaults/issues/49 } func (oaf *OgcAPIFeatures) ProjectionsForCollections() []string { return oaf.ProjectionsForCollection("") } func (oaf *OgcAPIFeatures) ProjectionsForCollection(collectionID string) []string { uniqueSRSs := make(map[string]struct{}) if oaf.Datasources != nil { for _, a := range oaf.Datasources.Additional { uniqueSRSs[a.Srs] = struct{}{} } } for _, coll := range oaf.Collections { if (coll.ID == collectionID || collectionID == "") && coll.Features != nil && coll.Features.Datasources != nil { for _, a := range coll.Features.Datasources.Additional { uniqueSRSs[a.Srs] = struct{}{} } break } } result := util.Keys(uniqueSRSs) slices.Sort(result) return result } // +kubebuilder:object:generate=true type CollectionEntryFeatures struct { // Optional way to explicitly map a collection ID to the underlying table in the datasource. // +optional TableName *string `yaml:"tableName,omitempty" json:"tableName,omitempty"` // Optional collection specific datasources. Mutually exclusive with top-level defined datasources. // +optional Datasources *Datasources `yaml:"datasources,omitempty" json:"datasources,omitempty"` // Filters available for this collection // +optional Filters FeatureFilters `yaml:"filters,omitempty" json:"filters,omitempty"` // Optional way to exclude feature properties and/or determine the ordering of properties in the response. // +optional *FeatureProperties `yaml:",inline" json:",inline"` // Downloads available for this collection through map sheets. Note that 'map sheets' refer to a map // divided in rectangle areas that can be downloaded individually. // +optional MapSheetDownloads *MapSheetDownloads `yaml:"mapSheetDownloads,omitempty" json:"mapSheetDownloads,omitempty"` // Configuration specifically related to HTML/Web representation // +optional Web *WebConfig `yaml:"web,omitempty" json:"web,omitempty"` } // MarshalJSON custom because inlining only works on embedded structs. // Value instead of pointer receiver because only that way it can be used for both. func (c CollectionEntryFeatures) MarshalJSON() ([]byte, error) { return json.Marshal(c) } // UnmarshalJSON parses a string to CollectionEntryFeatures func (c *CollectionEntryFeatures) UnmarshalJSON(b []byte) error { return yaml.Unmarshal(b, c) } // +kubebuilder:object:generate=true type Datasources struct { // Features should always be available in WGS84 (according to spec). // This specifies the datasource to be used for features in the WGS84 projection DefaultWGS84 Datasource `yaml:"defaultWGS84" json:"defaultWGS84" validate:"required"` //nolint:tagliatelle // grandfathered // One or more additional datasources for features in other projections. GoKoala doesn't do // any on-the-fly reprojection so additional datasources need to be reprojected ahead of time. // +optional Additional []AdditionalDatasource `yaml:"additional" json:"additional" validate:"dive"` } // +kubebuilder:object:generate=true type Datasource struct { // GeoPackage to get the features from. // +optional GeoPackage *GeoPackage `yaml:"geopackage,omitempty" json:"geopackage,omitempty" validate:"required_without_all=PostGIS"` // PostGIS database to get the features from (not implemented yet). // +optional PostGIS *PostGIS `yaml:"postgis,omitempty" json:"postgis,omitempty" validate:"required_without_all=GeoPackage"` // Add more datasources here such as Mongo, Elastic, etc } // +kubebuilder:object:generate=true type AdditionalDatasource struct { // Projection (SRS/CRS) used for the features in this datasource // +kubebuilder:validation:Pattern=`^EPSG:\d+$` Srs string `yaml:"srs" json:"srs" validate:"required,startswith=EPSG:"` // The additional datasource Datasource `yaml:",inline" json:",inline"` } // +kubebuilder:object:generate=true type PostGIS struct { // placeholder } // +kubebuilder:object:generate=true type GeoPackage struct { // Settings to read a GeoPackage from local disk // +optional Local *GeoPackageLocal `yaml:"local,omitempty" json:"local,omitempty" validate:"required_without_all=Cloud"` // Settings to read a GeoPackage as a Cloud-Backed SQLite database // +optional Cloud *GeoPackageCloud `yaml:"cloud,omitempty" json:"cloud,omitempty" validate:"required_without_all=Local"` } // +kubebuilder:object:generate=true type GeoPackageCommon struct { // Feature id column name // +kubebuilder:default="fid" // +optional Fid string `yaml:"fid,omitempty" json:"fid,omitempty" validate:"required" default:"fid"` // External feature id column name. When specified this ID column will be exposed to clients instead of the regular FID column. // It allows one to offer a more stable ID to clients instead of an auto-generated FID. External FID column should contain UUIDs. // +optional ExternalFid string `yaml:"externalFid" json:"externalFid"` // Optional timeout after which queries are canceled // +kubebuilder:default="15s" // +optional QueryTimeout Duration `yaml:"queryTimeout,omitempty" json:"queryTimeout,omitempty" validate:"required" default:"15s"` // ADVANCED SETTING. When the number of features in a bbox stay within the given value use an RTree index, otherwise use a BTree index. // +kubebuilder:default=8000 // +optional MaxBBoxSizeToUseWithRTree int `yaml:"maxBBoxSizeToUseWithRTree,omitempty" json:"maxBBoxSizeToUseWithRTree,omitempty" validate:"required" default:"8000"` // ADVANCED SETTING. Sets the SQLite "cache_size" pragma which determines how many pages are cached in-memory. // See https://sqlite.org/pragma.html#pragma_cache_size for details. // Default in SQLite is 2000 pages, which equates to 2000KiB (2048000 bytes). Which is denoted as -2000. // +kubebuilder:default=-2000 // +optional InMemoryCacheSize int `yaml:"inMemoryCacheSize,omitempty" json:"inMemoryCacheSize,omitempty" validate:"required" default:"-2000"` } // +kubebuilder:object:generate=true type GeoPackageLocal struct { // GeoPackageCommon shared config between local and cloud GeoPackage GeoPackageCommon `yaml:",inline" json:",inline"` // Location of GeoPackage on disk. // You can place the GeoPackage here manually (out-of-band) or you can specify Download // and let the application download the GeoPackage for you and store it at this location. File string `yaml:"file" json:"file" validate:"required,omitempty,filepath"` // Optional initialization task to download a GeoPackage during startup. GeoPackage will be // downloaded to local disk and stored at the location specified in File. // +optional Download *GeoPackageDownload `yaml:"download,omitempty" json:"download,omitempty"` } // +kubebuilder:object:generate=true type GeoPackageDownload struct { // Location of GeoPackage on remote HTTP(S) URL. GeoPackage will be downloaded to local disk // during startup and stored at the location specified in "file". From URL `yaml:"from" json:"from" validate:"required"` // ADVANCED SETTING. Determines how many workers (goroutines) in parallel will download the specified GeoPackage. // Setting this to 1 will disable concurrent downloads. // +kubebuilder:default=4 // +kubebuilder:validation:Minimum=1 // +optional Parallelism int `yaml:"parallelism,omitempty" json:"parallelism,omitempty" validate:"required,gte=1" default:"4"` // ADVANCED SETTING. When true TLS certs are NOT validated, false otherwise. Only use true for your own self-signed certificates! // +kubebuilder:default=false // +optional TLSSkipVerify bool `yaml:"tlsSkipVerify,omitempty" json:"tlsSkipVerify,omitempty" default:"false"` // ADVANCED SETTING. HTTP request timeout when downloading (part of) GeoPackage. // +kubebuilder:default="2m" // +optional Timeout Duration `yaml:"timeout,omitempty" json:"timeout,omitempty" validate:"required" default:"2m"` // ADVANCED SETTING. Minimum delay to use when retrying HTTP request to download (part of) GeoPackage. // +kubebuilder:default="1s" // +optional RetryDelay Duration `yaml:"retryDelay,omitempty" json:"retryDelay,omitempty" validate:"required" default:"1s"` // ADVANCED SETTING. Maximum overall delay of the exponential backoff while retrying HTTP requests to download (part of) GeoPackage. // +kubebuilder:default="30s" // +optional RetryMaxDelay Duration `yaml:"retryMaxDelay,omitempty" json:"retryMaxDelay,omitempty" validate:"required" default:"30s"` // ADVANCED SETTING. Maximum number of retries when retrying HTTP requests to download (part of) GeoPackage. // +kubebuilder:default=5 // +kubebuilder:validation:Minimum=1 // +optional MaxRetries int `yaml:"maxRetries,omitempty" json:"maxRetries,omitempty" validate:"required,gte=1" default:"5"` } // +kubebuilder:object:generate=true type GeoPackageCloud struct { // GeoPackageCommon shared config between local and cloud GeoPackage GeoPackageCommon `yaml:",inline" json:",inline"` // Reference to the cloud storage (either azure or google at the moment). // For example 'azure?emulator=127.0.0.1:10000&sas=0' or 'google' Connection string `yaml:"connection" json:"connection" validate:"required"` // Username of the storage account, like devstoreaccount1 when using Azurite User string `yaml:"user" json:"user" validate:"required"` // Some kind of credential like a password or key to authenticate with the storage backend, e.g: // 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==' when using Azurite Auth string `yaml:"auth" json:"auth" validate:"required"` // Container/bucket on the storage account Container string `yaml:"container" json:"container" validate:"required"` // Filename of the GeoPackage File string `yaml:"file" json:"file" validate:"required"` // Local cache of fetched blocks from cloud storage // +optional Cache GeoPackageCloudCache `yaml:"cache,omitempty" json:"cache,omitempty"` // ADVANCED SETTING. Only for debug purposes! When true all HTTP requests executed by sqlite to cloud object storage are logged to stdout // +kubebuilder:default=false // +optional LogHTTPRequests bool `yaml:"logHttpRequests,omitempty" json:"logHttpRequests,omitempty" default:"false"` } func (gc *GeoPackageCloud) CacheDir() (string, error) { fileNameWithoutExt := strings.TrimSuffix(gc.File, filepath.Ext(gc.File)) if gc.Cache.Path != nil { randomSuffix := strconv.Itoa(rand.Intn(99999)) //nolint:gosec // random isn't used for security purposes return filepath.Join(*gc.Cache.Path, fileNameWithoutExt+"-"+randomSuffix), nil } cacheDir, err := os.MkdirTemp("", fileNameWithoutExt) if err != nil { return "", fmt.Errorf("failed to create tempdir to cache %s, error %w", fileNameWithoutExt, err) } return cacheDir, nil } // +kubebuilder:object:generate=true type GeoPackageCloudCache struct { // Optional path to directory for caching cloud-backed GeoPackage blocks, when omitted a temp dir will be used. // +optional Path *string `yaml:"path,omitempty" json:"path,omitempty" validate:"omitempty,dirpath|filepath"` // Max size of the local cache. Accepts human-readable size such as 100Mb, 4Gb, 1Tb, etc. When omitted 1Gb is used. // +kubebuilder:default="1Gb" // +optional MaxSize string `yaml:"maxSize,omitempty" json:"maxSize,omitempty" default:"1Gb"` // When true a warm-up query is executed on startup which aims to fill the local cache. Does increase startup time. // +kubebuilder:default=false // +optional WarmUp bool `yaml:"warmUp,omitempty" json:"warmUp,omitempty" default:"false"` } func (cache *GeoPackageCloudCache) MaxSizeAsBytes() (int64, error) { return units.FromHumanSize(cache.MaxSize) } // +kubebuilder:object:generate=true type FeatureFilters struct { // OAF Part 1: filter on feature properties // https://docs.ogc.org/is/17-069r4/17-069r4.html#_parameters_for_filtering_on_feature_properties // // +optional Properties []PropertyFilter `yaml:"properties,omitempty" json:"properties,omitempty" validate:"dive"` // OAF Part 3: add config for complex/CQL filters here // <placeholder> } // +kubebuilder:object:generate=true type FeatureProperties struct { // Properties/fields of features in this collection. This setting controls two things: // // A) allows one to exclude certain properties, when propertiesExcludeUnknown=true // B) allows one sort the properties in the given order, when propertiesInSpecificOrder=true // // When not set all available properties are returned in API responses, in alphabetical order. // +optional Properties []string `yaml:"properties,omitempty" json:"properties,omitempty"` // When true properties not listed under 'properties' are excluded from API responses. When false // unlisted properties are also included in API responses. // +optional // +kubebuilder:default=false PropertiesExcludeUnknown bool `yaml:"propertiesExcludeUnknown,omitempty" json:"propertiesExcludeUnknown,omitempty" default:"false"` // When true properties are returned according to the ordering specified under 'properties'. When false // properties are returned in alphabetical order. // +optional // +kubebuilder:default=false PropertiesInSpecificOrder bool `yaml:"propertiesInSpecificOrder,omitempty" json:"propertiesInSpecificOrder,omitempty" default:"false"` } // +kubebuilder:object:generate=true type MapSheetDownloads struct { // Properties that provide the download details per map sheet. Note that 'map sheets' refer to a map // divided in rectangle areas that can be downloaded individually. Properties MapSheetDownloadProperties `yaml:"properties" json:"properties" validate:"required"` } // +kubebuilder:object:generate=true type MapSheetDownloadProperties struct { // Property/column containing file download URL AssetURL string `yaml:"assetUrl" json:"assetUrl" validate:"required"` // Property/column containing file size Size string `yaml:"size" json:"size" validate:"required"` // The actual media type (not a property/column) of the download, like application/zip. MediaType MediaType `yaml:"mediaType" json:"mediaType" validate:"required"` // Property/column containing the map sheet identifier MapSheetID string `yaml:"mapSheetId" json:"mapSheetId" validate:"required"` } // +kubebuilder:object:generate=true type WebConfig struct { // Viewer config for displaying multiple features on a map // +optional FeaturesViewer *FeaturesViewer `yaml:"featuresViewer,omitempty" json:"featuresViewer,omitempty"` // Viewer config for displaying a single feature on a map // +optional FeatureViewer *FeaturesViewer `yaml:"featureViewer,omitempty" json:"featureViewer,omitempty"` // Whether URLs (to external resources) in the HTML representation of features should be rendered as hyperlinks. // +optional URLAsHyperlink bool `yaml:"urlAsHyperlink,omitempty" json:"urlAsHyperlink,omitempty"` } // +kubebuilder:object:generate=true type FeaturesViewer struct { // Maximum initial zoom level of the viewer when rendering features, specified by scale denominator. // Defaults to 1000 (= scale 1:1000). // +optional MinScale int `yaml:"minScale,omitempty" json:"minScale,omitempty" validate:"gt=0" default:"1000"` // Minimal initial zoom level of the viewer when rendering features, specified by scale denominator // (not set by default). // +optional MaxScale *int `yaml:"maxScale,omitempty" json:"maxScale,omitempty" validate:"omitempty,gt=0,gtefield=MinScale"` } // +kubebuilder:object:generate=true type Limit struct { // Number of features to return by default. // +kubebuilder:default=10 // +kubebuilder:validation:Minimum=2 // +optional Default int `yaml:"default,omitempty" json:"default,omitempty" validate:"gt=1" default:"10"` // Max number of features to return. Should be larger than 100 since the HTML interface always offers a 100 limit option. // +kubebuilder:default=1000 // +kubebuilder:validation:Minimum=100 // +optional Max int `yaml:"max,omitempty" json:"max,omitempty" validate:"gte=100" default:"1000"` } // +kubebuilder:object:generate=true type PropertyFilter struct { // Needs to match with a column name in the feature table (in the configured datasource) Name string `yaml:"name" json:"name" validate:"required"` // Explains this property filter // +kubebuilder:default="Filter features by this property" // +optional Description string `yaml:"description,omitempty" json:"description,omitempty" default:"Filter features by this property"` // When true the property/column in the feature table needs to be indexed. Initialization will fail // when no index is present, when false the index check is skipped. For large tables an index is recommended! // // +kubebuilder:default=true // +optional IndexRequired *bool `yaml:"indexRequired,omitempty" json:"indexRequired,omitempty" default:"true"` // ptr due to https://github.com/creasty/defaults/issues/49 // Static list of allowed values to be used as input for this property filter. Will be enforced by OpenAPI spec. // +optional AllowedValues []string `yaml:"allowedValues,omitempty" json:"allowedValues,omitempty"` // Derive list of allowed values for this property filter from the corresponding column in the datastore. // Use with caution since it can increase startup time when used on large tables. Make sure an index in present. // // +kubebuilder:default=false // +optional DeriveAllowedValuesFromDatasource *bool `yaml:"deriveAllowedValuesFromDatasource,omitempty" json:"deriveAllowedValuesFromDatasource,omitempty" default:"false"` } // +kubebuilder:object:generate=true type TemporalProperties struct { // Name of field in datasource to be used in temporal queries as the start date StartDate string `yaml:"startDate" json:"startDate" validate:"required"` // Name of field in datasource to be used in temporal queries as the end date EndDate string `yaml:"endDate" json:"endDate" validate:"required"` } func validateFeatureCollections(collections GeoSpatialCollections) error { var errMessages []string for _, collection := range collections { if collection.Metadata != nil && collection.Metadata.TemporalProperties != nil && (collection.Metadata.Extent == nil || collection.Metadata.Extent.Interval == nil) { errMessages = append(errMessages, fmt.Sprintf("validation failed for collection '%s'; "+ "field 'Extent.Interval' is required with field 'TemporalProperties'\n", collection.ID)) } if collection.Features != nil && collection.Features.Filters.Properties != nil { for _, pf := range collection.Features.Filters.Properties { if pf.AllowedValues != nil && *pf.DeriveAllowedValuesFromDatasource { errMessages = append(errMessages, fmt.Sprintf("validation failed for property filter '%s'; "+ "field 'AllowedValues' and field 'DeriveAllowedValuesFromDatasource' are mutually exclusive\n", pf.Name)) } } } } if len(errMessages) > 0 { return fmt.Errorf("invalid config provided:\n%v", errMessages) } return nil }
package config import ( "encoding/json" "fmt" "slices" "sort" "github.com/PDOK/gokoala/internal/engine/util" "gopkg.in/yaml.v3" ) // +kubebuilder:object:generate=true type OgcAPITiles struct { // Tiles for the entire dataset, these are hosted at the root of the API (/tiles endpoint). // +optional DatasetTiles *Tiles `yaml:",inline" json:",inline"` // Tiles per collection. When no collections are specified tiles should be hosted at the root of the API (/tiles endpoint). // +optional Collections GeoSpatialCollections `yaml:"collections,omitempty" json:"collections,omitempty"` } type OgcAPITilesJSON struct { *Tiles `json:",inline"` Collections GeoSpatialCollections `json:"collections,omitempty"` } // MarshalJSON custom because inlining only works on embedded structs. // Value instead of pointer receiver because only that way it can be used for both. func (o OgcAPITiles) MarshalJSON() ([]byte, error) { return json.Marshal(OgcAPITilesJSON{ Tiles: o.DatasetTiles, Collections: o.Collections, }) } // UnmarshalJSON parses a string to OgcAPITiles func (o *OgcAPITiles) UnmarshalJSON(b []byte) error { return yaml.Unmarshal(b, o) } func (o *OgcAPITiles) Defaults() { if o.DatasetTiles != nil && o.DatasetTiles.HealthCheck.Srs == DefaultSrs && o.DatasetTiles.HealthCheck.TilePath == nil { o.DatasetTiles.deriveHealthCheckTilePath() } else if o.Collections != nil { for _, coll := range o.Collections { if coll.Tiles != nil && coll.Tiles.GeoDataTiles.HealthCheck.Srs == DefaultSrs && coll.Tiles.GeoDataTiles.HealthCheck.TilePath == nil { coll.Tiles.GeoDataTiles.deriveHealthCheckTilePath() } } } } // +kubebuilder:object:generate=true type CollectionEntryTiles struct { // Tiles specific to this collection. Called 'geodata tiles' in OGC spec. GeoDataTiles Tiles `yaml:",inline" json:",inline" validate:"required"` } type CollectionEntryTilesJSON struct { Tiles `json:",inline"` } // MarshalJSON custom because inlining only works on embedded structs. // Value instead of pointer receiver because only that way it can be used for both. func (c CollectionEntryTiles) MarshalJSON() ([]byte, error) { return json.Marshal(CollectionEntryTilesJSON{ Tiles: c.GeoDataTiles, }) } // UnmarshalJSON parses a string to CollectionEntryTiles func (c *CollectionEntryTiles) UnmarshalJSON(b []byte) error { return yaml.Unmarshal(b, c) } // +kubebuilder:validation:Enum=raster;vector type TilesType string const ( TilesTypeRaster TilesType = "raster" TilesTypeVector TilesType = "vector" ) func (o *OgcAPITiles) HasType(t TilesType) bool { if o.DatasetTiles != nil && slices.Contains(o.DatasetTiles.Types, t) { return true } for _, coll := range o.Collections { if coll.Tiles != nil && slices.Contains(coll.Tiles.GeoDataTiles.Types, t) { return true } } return false } // AllTileProjections projections supported by GoKoala for serving (vector) tiles, regardless of the dataset. // When adding a new projection also add corresponding HTML/JSON templates. var AllTileProjections = map[string]string{ "EPSG:28992": "NetherlandsRDNewQuad", "EPSG:3035": "EuropeanETRS89_LAEAQuad", "EPSG:3857": "WebMercatorQuad", } // HasProjection true when the given projection is supported for this dataset func (o *OgcAPITiles) HasProjection(srs string) bool { for _, projection := range o.GetProjections() { if projection.Srs == srs { return true } } return false } // GetProjections projections supported for this dataset func (o *OgcAPITiles) GetProjections() []SupportedSrs { supportedSrsSet := map[SupportedSrs]struct{}{} if o.DatasetTiles != nil { for _, supportedSrs := range o.DatasetTiles.SupportedSrs { supportedSrsSet[supportedSrs] = struct{}{} } } for _, coll := range o.Collections { if coll.Tiles == nil { continue } for _, supportedSrs := range coll.Tiles.GeoDataTiles.SupportedSrs { supportedSrsSet[supportedSrs] = struct{}{} } } result := util.Keys(supportedSrsSet) sort.Slice(result, func(i, j int) bool { return len(result[i].Srs) > len(result[j].Srs) }) return result } // +kubebuilder:object:generate=true type Tiles struct { // Reference to the server (or object storage) hosting the tiles. // Note: Only marked as optional in CRD to support top-level OR collection-level tiles // +optional TileServer URL `yaml:"tileServer" json:"tileServer" validate:"required"` // Could be 'vector' and/or 'raster' to indicate the types of tiles offered // Note: Only marked as optional in CRD to support top-level OR collection-level tiles // +optional Types []TilesType `yaml:"types" json:"types" validate:"required"` // Specifies in what projections (SRS/CRS) the tiles are offered // Note: Only marked as optional in CRD to support top-level OR collection-level tiles // +optional SupportedSrs []SupportedSrs `yaml:"supportedSrs" json:"supportedSrs" validate:"required,dive"` // Optional template to the vector tiles on the tileserver. Defaults to {tms}/{z}/{x}/{y}.pbf. // +optional URITemplateTiles *string `yaml:"uriTemplateTiles,omitempty" json:"uriTemplateTiles,omitempty"` // Optional health check configuration // +optional HealthCheck HealthCheck `yaml:"healthCheck" json:"healthCheck"` } func (t *Tiles) deriveHealthCheckTilePath() { var deepestZoomLevel int for _, srs := range t.SupportedSrs { if srs.Srs == DefaultSrs { deepestZoomLevel = srs.ZoomLevelRange.End } } defaultTile := HealthCheckDefaultTiles[deepestZoomLevel] tileMatrixSet := AllTileProjections[DefaultSrs] tilePath := fmt.Sprintf("/%s/%d/%d/%d.pbf", tileMatrixSet, deepestZoomLevel, defaultTile.x, defaultTile.y) t.HealthCheck.TilePath = &tilePath } // +kubebuilder:object:generate=true type SupportedSrs struct { // Projection (SRS/CRS) used // +kubebuilder:validation:Pattern=`^EPSG:\d+$` Srs string `yaml:"srs" json:"srs" validate:"required,startswith=EPSG:"` // Available zoom levels ZoomLevelRange ZoomLevelRange `yaml:"zoomLevelRange" json:"zoomLevelRange" validate:"required"` } // +kubebuilder:object:generate=true type ZoomLevelRange struct { // Start zoom level // +kubebuilder:validation:Minimum=0 Start int `yaml:"start" json:"start" validate:"gte=0,ltefield=End"` // End zoom level End int `yaml:"end" json:"end" validate:"required,gtefield=Start"` } type TileCoordinates struct { x int y int } // default tiles for EPSG:28992 - location centered just outside a village in the province of Friesland var HealthCheckDefaultTiles = map[int]TileCoordinates{ 0: {x: 0, y: 0}, 1: {x: 1, y: 0}, 2: {x: 2, y: 1}, 3: {x: 4, y: 2}, 4: {x: 8, y: 5}, 5: {x: 17, y: 11}, 6: {x: 35, y: 22}, 7: {x: 71, y: 45}, 8: {x: 143, y: 91}, 9: {x: 286, y: 182}, 10: {x: 572, y: 365}, 11: {x: 1144, y: 731}, 12: {x: 2288, y: 1462}, 13: {x: 4576, y: 2925}, 14: {x: 9152, y: 5851}, 15: {x: 18304, y: 11702}, 16: {x: 36608, y: 23404}, } // +kubebuilder:object:generate=true type HealthCheck struct { // Projection (SRS/CRS) used for tile healthcheck // +kubebuilder:default="EPSG:28992" // +kubebuilder:validation:Pattern=`^EPSG:\d+$` // +optional Srs string `yaml:"srs" json:"srs" default:"EPSG:28992" validate:"required,startswith=EPSG:"` // Path to specific tile used for healthcheck // +optional TilePath *string `yaml:"tilePath,omitempty" json:"tilePath,omitempty" validate:"required_unless=Srs EPSG:28992"` } func validateTileProjections(tiles *OgcAPITiles) error { var errMessages []string if tiles.DatasetTiles != nil { for _, srs := range tiles.DatasetTiles.SupportedSrs { if _, ok := AllTileProjections[srs.Srs]; !ok { errMessages = append(errMessages, fmt.Sprintf("validation failed for srs '%s'; srs is not supported", srs.Srs)) } } } for _, collection := range tiles.Collections { for _, srs := range collection.Tiles.GeoDataTiles.SupportedSrs { if _, ok := AllTileProjections[srs.Srs]; !ok { errMessages = append(errMessages, fmt.Sprintf("validation failed for srs '%s'; srs is not supported", srs.Srs)) } } } if len(errMessages) > 0 { return fmt.Errorf("invalid config provided:\n%v", errMessages) } return nil }
package config import ( "encoding/json" "fmt" "net/url" "regexp" "strings" "gopkg.in/yaml.v3" ) var ( validURLRegexp = regexp.MustCompile(`^(https?://.+|\$\{.+\}.*)$`) // https://regex101.com/r/IvhP6H/1 ) // URL Custom net.URL compatible with YAML and JSON (un)marshalling and kubebuilder. // In addition, it also removes trailing slash if present, so we can easily // append a longer path without having to worry about double slashes. // // Allow only http/https URLs or environment variables like ${FOOBAR} // +kubebuilder:validation:Pattern=`^(https?://.+)|(\$\{.+\}.*)` // +kubebuilder:validation:Type=string type URL struct { // This is a pointer so the wrapper can directly be used in templates, e.g.: {{ .Config.BaseURL }} // Otherwise you would need .String() or template.URL(). (Might be a bug.) *url.URL } // UnmarshalYAML parses a string to URL and also removes trailing slash if present, // so we can easily append a longer path without having to worry about double slashes. func (u *URL) UnmarshalYAML(unmarshal func(any) error) error { var s string if err := unmarshal(&s); err != nil { return err } if parsedURL, err := parseURL(s); err != nil { return err } else if parsedURL != nil { u.URL = parsedURL } return nil } // MarshalJSON turns URL into JSON. // Value instead of pointer receiver because only that way it can be used for both. func (u URL) MarshalJSON() ([]byte, error) { if u.URL == nil { return json.Marshal("") } return json.Marshal(u.URL.String()) } // UnmarshalJSON parses a string to URL and also removes trailing slash if present, // so we can easily append a longer path without having to worry about double slashes. func (u *URL) UnmarshalJSON(b []byte) error { return yaml.Unmarshal(b, u) } // MarshalYAML turns URL into YAML. // Value instead of pointer receiver because only that way it can be used for both. func (u URL) MarshalYAML() (interface{}, error) { if u.URL == nil { return "", nil } return u.URL.String(), nil } // DeepCopyInto copies the receiver, writes into out. func (u *URL) DeepCopyInto(out *URL) { if out != nil { *out = *u } } // DeepCopy copies the receiver, creates a new URL. func (u *URL) DeepCopy() *URL { if u == nil { return nil } out := &URL{} u.DeepCopyInto(out) return out } func parseURL(s string) (*url.URL, error) { if !validURLRegexp.MatchString(s) { return nil, fmt.Errorf("invalid URL: %s", s) } return url.Parse(strings.TrimSuffix(s, "/")) }
package engine import ( "log" "net/http" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/engine/util" "github.com/elnormous/contenttype" "golang.org/x/text/language" ) const ( FormatParam = "f" languageParam = "lang" MediaTypeJSON = "application/json" MediaTypeXML = "application/xml" MediaTypeHTML = "text/html" MediaTypeTileJSON = "application/vnd.mapbox.tile+json" MediaTypeMVT = "application/vnd.mapbox-vector-tile" MediaTypeMapboxStyle = "application/vnd.mapbox.style+json" MediaTypeSLD = "application/vnd.ogc.sld+xml;version=1.0" MediaTypeOpenAPI = "application/vnd.oai.openapi+json;version=3.0" MediaTypeGeoJSON = "application/geo+json" MediaTypeJSONFG = "application/vnd.ogc.fg+json" // https://docs.ogc.org/per/21-017r1.html#toc17 MediaTypeQuantizedMesh = "application/vnd.quantized-mesh" FormatHTML = "html" FormatXML = "xml" FormatJSON = "json" FormatTileJSON = "tilejson" FormatMVT = "mvt" FormatMVTAlternative = "pbf" FormatMapboxStyle = "mapbox" FormatSLD = "sld10" FormatGeoJSON = "geojson" // ?=json should also work for geojson FormatJSONFG = "jsonfg" FormatGzip = "gzip" ) var ( MediaTypeJSONFamily = []string{MediaTypeTileJSON, MediaTypeMapboxStyle, MediaTypeGeoJSON, MediaTypeJSONFG} OutputFormatDefault = map[string]string{FormatJSON: "JSON"} OutputFormatFeatures = map[string]string{FormatJSON: "GeoJSON", FormatJSONFG: "JSON-FG"} CompressibleMediaTypes = []string{ MediaTypeJSON, MediaTypeGeoJSON, MediaTypeJSONFG, MediaTypeTileJSON, MediaTypeMapboxStyle, MediaTypeOpenAPI, MediaTypeHTML, // common web media types "text/css", "text/plain", "text/javascript", "application/javascript", "image/svg+xml", } StyleFormatExtension = map[string]string{ FormatMapboxStyle: ".json", FormatSLD: ".sld", } ) type ContentNegotiation struct { availableMediaTypes []contenttype.MediaType availableLanguages []language.Tag formatsByMediaType map[string]string mediaTypesByFormat map[string]string } func newContentNegotiation(availableLanguages []config.Language) *ContentNegotiation { availableMediaTypes := []contenttype.MediaType{ // in order contenttype.NewMediaType(MediaTypeJSON), contenttype.NewMediaType(MediaTypeXML), contenttype.NewMediaType(MediaTypeHTML), contenttype.NewMediaType(MediaTypeTileJSON), contenttype.NewMediaType(MediaTypeGeoJSON), contenttype.NewMediaType(MediaTypeJSONFG), contenttype.NewMediaType(MediaTypeMVT), contenttype.NewMediaType(MediaTypeMapboxStyle), contenttype.NewMediaType(MediaTypeSLD), contenttype.NewMediaType(MediaTypeOpenAPI), } formatsByMediaType := map[string]string{ MediaTypeJSON: FormatJSON, MediaTypeXML: FormatXML, MediaTypeHTML: FormatHTML, MediaTypeTileJSON: FormatTileJSON, MediaTypeGeoJSON: FormatGeoJSON, MediaTypeJSONFG: FormatJSONFG, MediaTypeMVT: FormatMVT, MediaTypeMapboxStyle: FormatMapboxStyle, MediaTypeSLD: FormatSLD, } mediaTypesByFormat := util.Inverse(formatsByMediaType) languageTags := make([]language.Tag, 0, len(availableLanguages)) for _, availableLanguage := range availableLanguages { languageTags = append(languageTags, availableLanguage.Tag) } return &ContentNegotiation{ availableMediaTypes: availableMediaTypes, availableLanguages: languageTags, formatsByMediaType: formatsByMediaType, mediaTypesByFormat: mediaTypesByFormat, } } func (cn *ContentNegotiation) GetSupportedStyleFormats() []string { return []string{FormatMapboxStyle, FormatSLD} } func (cn *ContentNegotiation) GetStyleFormatExtension(format string) string { if extension, exists := StyleFormatExtension[format]; exists { return extension } return "" } // NegotiateFormat performs content negotiation, not idempotent (since it removes the ?f= param) func (cn *ContentNegotiation) NegotiateFormat(req *http.Request) string { requestedFormat := cn.getFormatFromQueryParam(req) if requestedFormat == "" { requestedFormat = cn.getFormatFromAcceptHeader(req) } if requestedFormat == "" { requestedFormat = FormatJSON // default } return requestedFormat } // NegotiateLanguage performs language negotiation, not idempotent (since it removes the ?lang= param) func (cn *ContentNegotiation) NegotiateLanguage(w http.ResponseWriter, req *http.Request) language.Tag { requestedLanguage := cn.getLanguageFromQueryParam(w, req) if requestedLanguage == language.Und { requestedLanguage = cn.getLanguageFromCookie(req) } if requestedLanguage == language.Und { requestedLanguage = cn.getLanguageFromHeader(req) } if requestedLanguage == language.Und { requestedLanguage = language.Dutch // default } return requestedLanguage } func (cn *ContentNegotiation) formatToMediaType(format string) string { return cn.mediaTypesByFormat[format] } func (cn *ContentNegotiation) getFormatFromQueryParam(req *http.Request) string { var requestedFormat = "" queryParams := req.URL.Query() if queryParams.Get(FormatParam) != "" { requestedFormat = queryParams.Get(FormatParam) // remove ?f= parameter, to prepare for rewrite queryParams.Del(FormatParam) req.URL.RawQuery = queryParams.Encode() } return requestedFormat } func (cn *ContentNegotiation) getFormatFromAcceptHeader(req *http.Request) string { accepted, _, err := contenttype.GetAcceptableMediaType(req, cn.availableMediaTypes) if err != nil { log.Printf("Failed to parse Accept header: %v. Continuing\n", err) return "" } return cn.formatsByMediaType[accepted.String()] } func (cn *ContentNegotiation) getLanguageFromQueryParam(w http.ResponseWriter, req *http.Request) language.Tag { var requestedLanguage = language.Und queryParams := req.URL.Query() if queryParams.Get(languageParam) != "" { lang := queryParams.Get(languageParam) accepted, _, err := language.ParseAcceptLanguage(lang) if err != nil { return requestedLanguage } m := language.NewMatcher(cn.availableLanguages) _, langIndex, _ := m.Match(accepted...) requestedLanguage = cn.availableLanguages[langIndex] // override for use in cookie lang = requestedLanguage.String() // set requested language in cookie setLanguageCookie(w, lang) // remove ?lang= parameter, to prepare for rewrite queryParams.Del(languageParam) req.URL.RawQuery = queryParams.Encode() } return requestedLanguage } func setLanguageCookie(w http.ResponseWriter, lang string) { cookie := &http.Cookie{ Name: languageParam, Value: lang, Path: "/", MaxAge: config.CookieMaxAge, SameSite: http.SameSiteStrictMode, Secure: true, } http.SetCookie(w, cookie) } func (cn *ContentNegotiation) getLanguageFromCookie(req *http.Request) language.Tag { var requestedLanguage = language.Und cookie, err := req.Cookie(languageParam) if err != nil { return requestedLanguage } lang := cookie.Value accepted, _, err := language.ParseAcceptLanguage(lang) if err != nil { return requestedLanguage } m := language.NewMatcher(cn.availableLanguages) _, langIndex, _ := m.Match(accepted...) requestedLanguage = cn.availableLanguages[langIndex] return requestedLanguage } func (cn *ContentNegotiation) getLanguageFromHeader(req *http.Request) language.Tag { var requestedLanguage = language.Und if req.Header.Get(HeaderAcceptLanguage) != "" { accepted, _, err := language.ParseAcceptLanguage(req.Header.Get(HeaderAcceptLanguage)) if err != nil { log.Printf("Failed to parse Accept-Language header: %v. Continuing\n", err) return requestedLanguage } m := language.NewMatcher(cn.availableLanguages) _, langIndex, _ := m.Match(accepted...) requestedLanguage = cn.availableLanguages[langIndex] } return requestedLanguage }
package engine import ( "context" "crypto/tls" "fmt" "io" "net/http" "net/url" "os" "time" "github.com/failsafe-go/failsafe-go/failsafehttp" "golang.org/x/sync/errgroup" ) const bufferSize = 1 * 1024 * 1024 // 1MiB // Part piece of the file to download when HTTP Range Requests are supported type Part struct { Start int64 End int64 Size int64 } // Download downloads file from the given URL and stores the result in the given output location. // Will utilize multiple concurrent connections to increase transfer speed. The latter is only // possible when the remote server supports HTTP Range Requests, otherwise it falls back // to a regular/single connection download. Additionally, failed requests will be retried according // to the given settings. func Download(url url.URL, outputFilepath string, parallelism int, tlsSkipVerify bool, timeout time.Duration, retryDelay time.Duration, retryMaxDelay time.Duration, maxRetries int) (*time.Duration, error) { client := createHTTPClient(tlsSkipVerify, timeout, retryDelay, retryMaxDelay, maxRetries) outputFile, err := os.OpenFile(outputFilepath, os.O_CREATE|os.O_RDWR, 0644) if err != nil { return nil, err } defer outputFile.Close() start := time.Now() supportRanges, contentLength, err := checkRemoteFile(url, client) if err != nil { return nil, err } if supportRanges && parallelism > 1 { err = downloadWithMultipleConnections(url, outputFile, contentLength, int64(parallelism), client) } else { err = downloadWithSingleConnection(url, outputFile, client) } if err != nil { return nil, err } err = assertFileValid(outputFile, contentLength) if err != nil { return nil, err } timeSpent := time.Since(start) return &timeSpent, err } func checkRemoteFile(url url.URL, client *http.Client) (supportRanges bool, contentLength int64, err error) { res, err := client.Head(url.String()) if err != nil { return } defer res.Body.Close() contentLength = res.ContentLength supportRanges = res.Header.Get(HeaderAcceptRanges) == "bytes" && contentLength != 0 return } func downloadWithSingleConnection(url url.URL, outputFile *os.File, client *http.Client) error { res, err := client.Get(url.String()) if err != nil { return err } defer res.Body.Close() buf := make([]byte, bufferSize) _, err = io.CopyBuffer(outputFile, res.Body, buf) return err } func downloadWithMultipleConnections(url url.URL, outputFile *os.File, contentLength int64, parallelism int64, client *http.Client) error { parts := make([]Part, parallelism) partSize := contentLength / parallelism remainder := contentLength % parallelism wg, _ := errgroup.WithContext(context.Background()) for i, part := range parts { start := int64(i) * partSize end := start + partSize if remainder != 0 && i == len(parts)-1 { end += remainder } part = Part{start, end, partSize} wg.Go(func() error { return downloadPart(client, url, outputFile.Name(), part) }) } return wg.Wait() } func downloadPart(client *http.Client, url url.URL, outputFilepath string, part Part) error { outputFile, err := os.OpenFile(outputFilepath, os.O_RDWR, 0664) if err != nil { return err } defer outputFile.Close() _, err = outputFile.Seek(part.Start, 0) if err != nil { return err } req, err := http.NewRequest(http.MethodGet, url.String(), nil) if err != nil { return err } req.Header.Set(HeaderRange, fmt.Sprintf("bytes=%d-%d", part.Start, part.End-1)) res, err := client.Do(req) if err != nil { return err } defer res.Body.Close() if res.StatusCode != http.StatusPartialContent { return fmt.Errorf("server advertises HTTP Range Request support "+ "but doesn't return status %d", http.StatusPartialContent) } buf := make([]byte, bufferSize) _, err = io.CopyBuffer(outputFile, res.Body, buf) return err } func assertFileValid(outputFile *os.File, contentLength int64) error { fi, err := outputFile.Stat() if err != nil { return err } if fi.Size() != contentLength { return fmt.Errorf("invalid file, content-length %d and file size %d mismatch", contentLength, fi.Size()) } return nil } func createHTTPClient(tlsSkipVerify bool, timeout time.Duration, retryDelay time.Duration, retryMaxDelay time.Duration, maxRetries int) *http.Client { transport := &http.Transport{ TLSClientConfig: &tls.Config{ InsecureSkipVerify: tlsSkipVerify, //nolint:gosec // on purpose, default is false }, } //nolint:bodyclose // false positive retryPolicy := failsafehttp.RetryPolicyBuilder(). WithBackoff(retryDelay, retryMaxDelay). //nolint:bodyclose // false positive WithMaxRetries(maxRetries). //nolint:bodyclose // false positive Build() //nolint:bodyclose // false positive return &http.Client{ Timeout: timeout, Transport: failsafehttp.NewRoundTripper(transport, retryPolicy), } }
package engine import ( "bytes" "compress/gzip" "context" "errors" "fmt" htmltemplate "html/template" "io" "log" "net/http" "net/http/httputil" "net/url" "os" "os/signal" "syscall" texttemplate "text/template" "time" "github.com/PDOK/gokoala/config" "github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5/middleware" ) const ( templatesDir = "internal/engine/templates/" shutdownTimeout = 5 * time.Second HeaderLink = "Link" HeaderAccept = "Accept" HeaderAcceptLanguage = "Accept-Language" HeaderAcceptRanges = "Accept-Ranges" HeaderRange = "Range" HeaderContentType = "Content-Type" HeaderContentLength = "Content-Length" HeaderContentCrs = "Content-Crs" HeaderContentEncoding = "Content-Encoding" HeaderBaseURL = "X-BaseUrl" HeaderRequestedWith = "X-Requested-With" HeaderAPIVersion = "API-Version" ) // Engine encapsulates shared non-OGC API specific logic type Engine struct { Config *config.Config OpenAPI *OpenAPI Templates *Templates CN *ContentNegotiation Router *chi.Mux shutdownHooks []func() } // NewEngine builds a new Engine func NewEngine(configFile string, openAPIFile string, enableTrailingSlash bool, enableCORS bool) (*Engine, error) { cfg, err := config.NewConfig(configFile) if err != nil { return nil, err } return NewEngineWithConfig(cfg, openAPIFile, enableTrailingSlash, enableCORS), nil } // NewEngineWithConfig builds a new Engine func NewEngineWithConfig(config *config.Config, openAPIFile string, enableTrailingSlash bool, enableCORS bool) *Engine { contentNegotiation := newContentNegotiation(config.AvailableLanguages) templates := newTemplates(config) openAPI := newOpenAPI(config, []string{openAPIFile}, nil) router := newRouter(config.Version, enableTrailingSlash, enableCORS) engine := &Engine{ Config: config, OpenAPI: openAPI, Templates: templates, CN: contentNegotiation, Router: router, } // Default (non-OGC) endpoints newSitemap(engine) newHealthEndpoint(engine) newResourcesEndpoint(engine) return engine } // Start the engine by initializing all components and starting the server func (e *Engine) Start(address string, debugPort int, shutdownDelay int) error { // debug server (binds to localhost). if debugPort > 0 { go func() { debugAddress := fmt.Sprintf("localhost:%d", debugPort) debugRouter := chi.NewRouter() debugRouter.Use(middleware.Logger) debugRouter.Mount("/debug", middleware.Profiler()) err := e.startServer("debug server", debugAddress, 0, debugRouter) if err != nil { log.Fatalf("debug server failed %v", err) } }() } // main server return e.startServer("main server", address, shutdownDelay, e.Router) } // startServer creates and starts an HTTP server, also takes care of graceful shutdown func (e *Engine) startServer(name string, address string, shutdownDelay int, router *chi.Mux) error { // create HTTP server server := http.Server{ Addr: address, Handler: router, ReadTimeout: 15 * time.Second, ReadHeaderTimeout: 15 * time.Second, } ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT) defer stop() go func() { log.Printf("%s listening on http://%2s", name, address) // ListenAndServe always returns a non-nil error. After Shutdown or // Close, the returned error is ErrServerClosed if err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) { log.Fatalf("failed to shutdown %s: %v", name, err) } }() // listen for interrupt signal and then perform shutdown <-ctx.Done() stop() // execute shutdown hooks for _, shutdownHook := range e.shutdownHooks { shutdownHook() } if shutdownDelay > 0 { log.Printf("stop signal received, initiating shutdown of %s after %d seconds delay", name, shutdownDelay) time.Sleep(time.Duration(shutdownDelay) * time.Second) } log.Printf("shutting down %s gracefully", name) // shutdown with a max timeout. timeoutCtx, cancel := context.WithTimeout(context.Background(), shutdownTimeout) defer cancel() return server.Shutdown(timeoutCtx) } // RegisterShutdownHook register a func to execute during graceful shutdown, e.g. to clean up resources. func (e *Engine) RegisterShutdownHook(fn func()) { e.shutdownHooks = append(e.shutdownHooks, fn) } // RebuildOpenAPI rebuild the full OpenAPI spec with the newly given parameters. // Use only once during bootstrap for specific use cases! For example: when you want to expand a // specific part of the OpenAPI spec with data outside the configuration file (e.g. from a database). func (e *Engine) RebuildOpenAPI(openAPIParams any) { e.OpenAPI = newOpenAPI(e.Config, e.OpenAPI.extraOpenAPIFiles, openAPIParams) } // ParseTemplate parses both HTML and non-HTML templates depending on the format given in the TemplateKey and // stores it in the engine for future rendering using RenderAndServePage. func (e *Engine) ParseTemplate(key TemplateKey) { e.Templates.parseAndSaveTemplate(key) } // RenderTemplates renders both HTML and non-HTML templates depending on the format given in the TemplateKey. // This method also performs OpenAPI validation of the rendered template, therefore we also need the URL path. // The rendered templates are stored in the engine for future serving using ServePage. func (e *Engine) RenderTemplates(urlPath string, breadcrumbs []Breadcrumb, keys ...TemplateKey) { e.renderTemplates(urlPath, nil, breadcrumbs, true, keys...) } // RenderTemplatesWithParams renders both HTMl and non-HTML templates depending on the format given in the TemplateKey. func (e *Engine) RenderTemplatesWithParams(urlPath string, params any, breadcrumbs []Breadcrumb, keys ...TemplateKey) { e.renderTemplates(urlPath, params, breadcrumbs, true, keys...) } func (e *Engine) renderTemplates(urlPath string, params any, breadcrumbs []Breadcrumb, validate bool, keys ...TemplateKey) { for _, key := range keys { e.Templates.renderAndSaveTemplate(key, breadcrumbs, params) if validate { // we already perform OpenAPI validation here during startup to catch // issues early on, in addition to runtime OpenAPI response validation // all templates are created in all available languages, hence all are checked for lang := range e.Templates.localizers { key.Language = lang if err := e.validateStaticResponse(key, urlPath); err != nil { log.Fatal(err) } } } } } // RenderAndServePage renders an already parsed HTML or non-HTML template on-the-fly depending // on the format in the given TemplateKey. The result isn't store in engine, it's served directly to the client. // // NOTE: only used this for dynamic pages that can't be pre-rendered and cached (e.g. with data from a datastore), // otherwise use ServePage for pre-rendered pages. func (e *Engine) RenderAndServePage(w http.ResponseWriter, r *http.Request, key TemplateKey, params any, breadcrumbs []Breadcrumb) { // validate request if err := e.OpenAPI.ValidateRequest(r); err != nil { log.Printf("%v", err.Error()) RenderProblem(ProblemBadRequest, w, err.Error()) return } // get template parsedTemplate, err := e.Templates.getParsedTemplate(key) if err != nil { log.Printf("%v", err.Error()) RenderProblem(ProblemServerError, w) } // render output var output []byte if key.Format == FormatHTML { htmlTmpl := parsedTemplate.(*htmltemplate.Template) output = e.Templates.renderHTMLTemplate(htmlTmpl, r.URL, params, breadcrumbs, "") } else { jsonTmpl := parsedTemplate.(*texttemplate.Template) output = e.Templates.renderNonHTMLTemplate(jsonTmpl, params, key, "") } contentType := e.CN.formatToMediaType(key.Format) // validate response if err := e.OpenAPI.ValidateResponse(contentType, output, r); err != nil { log.Printf("%v", err.Error()) RenderProblem(ProblemServerError, w, err.Error()) return } writeResponse(w, contentType, output) } // ServePage serves a pre-rendered template while also validating against the OpenAPI spec func (e *Engine) ServePage(w http.ResponseWriter, r *http.Request, templateKey TemplateKey) { e.serve(w, r, &templateKey, true, true, "", nil) } // Serve serves the given response (arbitrary bytes) while also validating against the OpenAPI spec func (e *Engine) Serve(w http.ResponseWriter, r *http.Request, validateRequest bool, validateResponse bool, contentType string, output []byte) { e.serve(w, r, nil, validateRequest, validateResponse, contentType, output) } func (e *Engine) serve(w http.ResponseWriter, r *http.Request, templateKey *TemplateKey, validateRequest bool, validateResponse bool, contentType string, output []byte) { if validateRequest { if err := e.OpenAPI.ValidateRequest(r); err != nil { log.Printf("%v", err.Error()) RenderProblem(ProblemBadRequest, w, err.Error()) return } } if templateKey != nil { // render output var err error output, err = e.Templates.getRenderedTemplate(*templateKey) if err != nil { log.Printf("%v", err.Error()) RenderProblem(ProblemNotFound, w) return } contentType = e.CN.formatToMediaType(templateKey.Format) } if validateResponse { if err := e.OpenAPI.ValidateResponse(contentType, output, r); err != nil { log.Printf("%v", err.Error()) RenderProblem(ProblemServerError, w, err.Error()) return } } writeResponse(w, contentType, output) } // ReverseProxy forwards given HTTP request to given target server, and optionally tweaks response func (e *Engine) ReverseProxy(w http.ResponseWriter, r *http.Request, target *url.URL, prefer204 bool, contentTypeOverwrite string) { e.ReverseProxyAndValidate(w, r, target, prefer204, contentTypeOverwrite, false) } // ReverseProxyAndValidate forwards given HTTP request to given target server, and optionally tweaks and validates response func (e *Engine) ReverseProxyAndValidate(w http.ResponseWriter, r *http.Request, target *url.URL, prefer204 bool, contentTypeOverwrite string, validateResponse bool) { rewrite := func(r *httputil.ProxyRequest) { r.Out.URL = target r.Out.Host = "" // Don't pass Host header (similar to Traefik's passHostHeader=false) r.SetXForwarded() // Set X-Forwarded-* headers. r.Out.Header.Set(HeaderBaseURL, e.Config.BaseURL.String()) } errorHandler := func(w http.ResponseWriter, _ *http.Request, err error) { log.Printf("failed to proxy request: %v", err) RenderProblem(ProblemBadGateway, w) } modifyResponse := func(proxyRes *http.Response) error { if prefer204 { // OGC spec: If the tile has no content due to lack of data in the area, but is within the data // resource its tile matrix sets and tile matrix sets limits, the HTTP response will use the status // code either 204 (indicating an empty tile with no content) or a 200 if proxyRes.StatusCode == http.StatusNotFound { proxyRes.StatusCode = http.StatusNoContent removeBody(proxyRes) } } if contentTypeOverwrite != "" { proxyRes.Header.Set(HeaderContentType, contentTypeOverwrite) } if contentType := proxyRes.Header.Get(HeaderContentType); contentType == MediaTypeJSON && validateResponse { var reader io.ReadCloser var err error if proxyRes.Header.Get(HeaderContentEncoding) == FormatGzip { reader, err = gzip.NewReader(proxyRes.Body) if err != nil { return err } } else { reader = proxyRes.Body } res, err := io.ReadAll(reader) if err != nil { return err } e.Serve(w, r, false, true, contentType, res) } return nil } reverseProxy := &httputil.ReverseProxy{ Rewrite: rewrite, ModifyResponse: modifyResponse, ErrorHandler: errorHandler, } reverseProxy.ServeHTTP(w, r) } func removeBody(proxyRes *http.Response) { buf := bytes.NewBuffer(make([]byte, 0)) proxyRes.Body = io.NopCloser(buf) proxyRes.Header[HeaderContentLength] = []string{"0"} proxyRes.Header[HeaderContentType] = []string{} } func (e *Engine) validateStaticResponse(key TemplateKey, urlPath string) error { template, _ := e.Templates.getRenderedTemplate(key) serverURL := normalizeBaseURL(e.Config.BaseURL.String()) req, err := http.NewRequest(http.MethodGet, serverURL+urlPath, nil) if err != nil { return fmt.Errorf("failed to construct request to validate %s "+ "template against OpenAPI spec %v", key.Name, err) } err = e.OpenAPI.ValidateResponse(e.CN.formatToMediaType(key.Format), template, req) if err != nil { return fmt.Errorf("validation of template %s failed: %w", key.Name, err) } return nil } // return response output to client func writeResponse(w http.ResponseWriter, contentType string, output []byte) { if contentType != "" { w.Header().Set(HeaderContentType, contentType) } SafeWrite(w.Write, output) } // SafeWrite executes the given http.ResponseWriter.Write while logging errors func SafeWrite(write func([]byte) (int, error), body []byte) { _, err := write(body) if err != nil { log.Printf("failed to write response: %v", err) } }
package engine import ( "log" "net/http" "net/url" "time" ) func newHealthEndpoint(e *Engine) { var target *url.URL if tilesConfig := e.Config.OgcAPI.Tiles; tilesConfig != nil { var err error switch { case tilesConfig.DatasetTiles != nil: target, err = url.Parse(tilesConfig.DatasetTiles.TileServer.String() + *tilesConfig.DatasetTiles.HealthCheck.TilePath) case len(tilesConfig.Collections) > 0 && tilesConfig.Collections[0].Tiles != nil: target, err = url.Parse(tilesConfig.Collections[0].Tiles.GeoDataTiles.TileServer.String() + *tilesConfig.Collections[0].Tiles.GeoDataTiles.HealthCheck.TilePath) default: log.Println("cannot determine health check tilepath, falling back to basic check") } if err != nil { log.Fatalf("invalid health check tilepath: %v", err) } } if target != nil { client := &http.Client{Timeout: time.Duration(500) * time.Millisecond} e.Router.Get("/health", func(w http.ResponseWriter, _ *http.Request) { resp, err := client.Head(target.String()) if err != nil { // exact error is irrelevant for health monitoring, but log it for insight log.Printf("healthcheck failed: %v", err) w.WriteHeader(http.StatusNotFound) } else { w.WriteHeader(resp.StatusCode) resp.Body.Close() } }) } else { e.Router.Get("/health", func(w http.ResponseWriter, _ *http.Request) { SafeWrite(w.Write, []byte("OK")) }) } }
package engine import ( "github.com/PDOK/gokoala/config" "github.com/nicksnyder/go-i18n/v2/i18n" "golang.org/x/text/language" "gopkg.in/yaml.v3" ) func newLocalizers(availableLanguages []config.Language) map[language.Tag]i18n.Localizer { localizers := make(map[language.Tag]i18n.Localizer) // add localizer for each available language for _, lang := range availableLanguages { bundle := i18n.NewBundle(lang.Tag) bundle.RegisterUnmarshalFunc("yaml", yaml.Unmarshal) bundle.MustLoadMessageFile("assets/i18n/" + lang.String() + ".yaml") localizers[lang.Tag] = *i18n.NewLocalizer(bundle, lang.String()) } return localizers }
package engine import ( "bytes" "context" "encoding/json" "errors" "fmt" "io" "log" "net/http" "net/url" "path/filepath" "regexp" "strings" texttemplate "text/template" gokoalaconfig "github.com/PDOK/gokoala/config" orderedmap "github.com/wk8/go-ordered-map/v2" "github.com/PDOK/gokoala/internal/engine/util" "github.com/getkin/kin-openapi/openapi3" "github.com/getkin/kin-openapi/openapi3filter" "github.com/getkin/kin-openapi/routers" "github.com/getkin/kin-openapi/routers/gorillamux" ) const ( specPath = templatesDir + "openapi/" preamble = specPath + "preamble.go.json" problems = specPath + "problems.go.json" commonCollections = specPath + "common-collections.go.json" featuresSpec = specPath + "features.go.json" tilesSpec = specPath + "tiles.go.json" stylesSpec = specPath + "styles.go.json" geoVolumesSpec = specPath + "3dgeovolumes.go.json" commonSpec = specPath + "common.go.json" HTMLRegex = `<[/]?([a-zA-Z]+).*?>` ) type OpenAPI struct { spec *openapi3.T SpecJSON []byte config *gokoalaconfig.Config router routers.Router extraOpenAPIFiles []string } func newOpenAPI(config *gokoalaconfig.Config, extraOpenAPIFiles []string, openAPIParams any) *OpenAPI { setupRequestResponseValidation() ctx := context.Background() // order matters, see mergeSpecs for details. defaultOpenAPIFiles := []string{commonSpec} if config.AllCollections() != nil { defaultOpenAPIFiles = append(defaultOpenAPIFiles, commonCollections) } if config.OgcAPI.Tiles != nil { defaultOpenAPIFiles = append(defaultOpenAPIFiles, tilesSpec) } if config.OgcAPI.Features != nil { defaultOpenAPIFiles = append(defaultOpenAPIFiles, featuresSpec) } if config.OgcAPI.Styles != nil { defaultOpenAPIFiles = append(defaultOpenAPIFiles, stylesSpec) } if config.OgcAPI.GeoVolumes != nil { defaultOpenAPIFiles = append(defaultOpenAPIFiles, geoVolumesSpec) } // add preamble first openAPIFiles := []string{preamble} // add extra spec(s) thereafter, to allow it to override default openapi specs openAPIFiles = append(openAPIFiles, extraOpenAPIFiles...) openAPIFiles = append(openAPIFiles, defaultOpenAPIFiles...) resultSpec, resultSpecJSON := mergeSpecs(ctx, config, openAPIFiles, openAPIParams) validateSpec(ctx, resultSpec, resultSpecJSON) for _, server := range resultSpec.Servers { server.URL = normalizeBaseURL(server.URL) } return &OpenAPI{ config: config, spec: resultSpec, SpecJSON: util.PrettyPrintJSON(resultSpecJSON, ""), router: newOpenAPIRouter(resultSpec), extraOpenAPIFiles: extraOpenAPIFiles, } } func setupRequestResponseValidation() { htmlRegex := regexp.MustCompile(HTMLRegex) openapi3filter.RegisterBodyDecoder(MediaTypeHTML, func(body io.Reader, _ http.Header, _ *openapi3.SchemaRef, _ openapi3filter.EncodingFn) (any, error) { data, err := io.ReadAll(body) if err != nil { return nil, errors.New("failed to read response body") } if !htmlRegex.Match(data) { return nil, errors.New("response doesn't contain HTML") } return string(data), nil }) for _, mediaType := range MediaTypeJSONFamily { openapi3filter.RegisterBodyDecoder(mediaType, func(body io.Reader, _ http.Header, _ *openapi3.SchemaRef, _ openapi3filter.EncodingFn) (any, error) { var value any dec := json.NewDecoder(body) dec.UseNumber() if err := dec.Decode(&value); err != nil { return nil, errors.New("response doesn't contain valid JSON") } return value, nil }) } } // mergeSpecs merges the given OpenAPI specs. // // Order matters! We start with the preamble, it is highest in rank and there's no way to override it. // Then the files are merged according to their given order. Files that are merged first // have a higher change of getting their changes in the final spec than files that follow later. // // The OpenAPI spec optionally provided through the CLI should be the second (after preamble) item in the // `files` slice since it allows the user to override other/default specs. func mergeSpecs(ctx context.Context, config *gokoalaconfig.Config, files []string, params any) (*openapi3.T, []byte) { loader := &openapi3.Loader{Context: ctx, IsExternalRefsAllowed: false} if len(files) < 1 { log.Fatalf("files can't be empty, at least OGC Common is expected") } var resultSpecJSON []byte var resultSpec *openapi3.T for _, file := range files { if file == "" { continue } specJSON := renderOpenAPITemplate(config, file, params) var mergedJSON []byte if resultSpecJSON == nil { mergedJSON = specJSON } else { var err error mergedJSON, err = util.MergeJSON(resultSpecJSON, specJSON, orderByOpenAPIConvention) if err != nil { log.Print(string(mergedJSON)) log.Fatalf("failed to merge OpenAPI specs: %v", err) } } resultSpecJSON = mergedJSON resultSpec = loadSpec(loader, mergedJSON) } return resultSpec, resultSpecJSON } func orderByOpenAPIConvention(output map[string]any) any { result := orderedmap.New[string, any]() // OpenAPI specs are commonly ordered according to the following sequence. desiredOrder := []string{"openapi", "info", "servers", "paths", "components"} for _, order := range desiredOrder { for k, v := range output { if k == order { result.Set(k, v) } } } // add remaining keys for k, v := range output { result.Set(k, v) } return result } func loadSpec(loader *openapi3.Loader, mergedJSON []byte, fileName ...string) *openapi3.T { resultSpec, err := loader.LoadFromData(mergedJSON) if err != nil { log.Print(string(mergedJSON)) log.Fatalf("failed to load merged OpenAPI spec %s, due to %v", fileName, err) } return resultSpec } func validateSpec(ctx context.Context, finalSpec *openapi3.T, finalSpecRaw []byte) { // Validate OGC OpenAPI spec. Note: the examples provided in the official spec aren't valid. err := finalSpec.Validate(ctx, openapi3.DisableExamplesValidation()) if err != nil { log.Print(string(finalSpecRaw)) log.Fatalf("invalid OpenAPI spec: %v", err) } } func newOpenAPIRouter(doc *openapi3.T) routers.Router { openAPIRouter, err := gorillamux.NewRouter(doc) if err != nil { log.Fatalf("failed to setup OpenAPI router: %v", err) } return openAPIRouter } func renderOpenAPITemplate(config *gokoalaconfig.Config, fileName string, params any) []byte { file := filepath.Clean(fileName) files := []string{problems, file} // add problems template too since it's an "include" template parsed := texttemplate.Must(texttemplate.New(filepath.Base(file)).Funcs(globalTemplateFuncs).ParseFiles(files...)) var rendered bytes.Buffer if err := parsed.Execute(&rendered, &TemplateData{Config: config, Params: params}); err != nil { log.Fatalf("failed to render %s, error: %v", file, err) } return rendered.Bytes() } func (o *OpenAPI) ValidateRequest(r *http.Request) error { requestValidationInput, _ := o.getRequestValidationInput(r) if requestValidationInput != nil { err := openapi3filter.ValidateRequest(context.Background(), requestValidationInput) if err != nil { var schemaErr *openapi3.SchemaError // Don't fail on maximum constraints because OGC has decided these are soft limits, for instance // in features: "If the value of the limit parameter is larger than the maximum value, this // SHALL NOT result in an error (instead use the maximum as the parameter value)." if errors.As(err, &schemaErr) && schemaErr.SchemaField == "maximum" { return nil } return fmt.Errorf("request doesn't conform to OpenAPI spec: %w", err) } } return nil } func (o *OpenAPI) ValidateResponse(contentType string, body []byte, r *http.Request) error { requestValidationInput, _ := o.getRequestValidationInput(r) if requestValidationInput != nil { responseHeaders := http.Header{HeaderContentType: []string{contentType}} responseCode := 200 responseValidationInput := &openapi3filter.ResponseValidationInput{ RequestValidationInput: requestValidationInput, Status: responseCode, Header: responseHeaders, } responseValidationInput.SetBodyBytes(body) err := openapi3filter.ValidateResponse(context.Background(), responseValidationInput) if err != nil { return fmt.Errorf("response doesn't conform to OpenAPI spec: %w", err) } } return nil } func (o *OpenAPI) getRequestValidationInput(r *http.Request) (*openapi3filter.RequestValidationInput, error) { route, pathParams, err := o.router.FindRoute(r) if err != nil { log.Printf("route not found in OpenAPI spec for url %s (host: %s), "+ "skipping OpenAPI validation", r.URL, r.Host) return nil, err } opts := &openapi3filter.Options{ SkipSettingDefaults: true, } opts.WithCustomSchemaErrorFunc(func(err *openapi3.SchemaError) string { return err.Reason }) return &openapi3filter.RequestValidationInput{ Request: r, PathParams: pathParams, Route: route, Options: opts, }, nil } // normalizeBaseURL normalizes the given base URL so our OpenAPI validator is able to match // requests against the OpenAPI spec. This involves: // // - striping the context root (path) from the base URL. If you use a context root we expect // you to have a proxy fronting GoKoala, therefore we also need to strip it from the base // URL used during OpenAPI validation // // - replacing HTTPS scheme with HTTP. Since GoKoala doesn't support HTTPS we always perform // OpenAPI validation against HTTP requests. Note: it's possible to offer GoKoala over HTTPS, but you'll // need to take care of that in your proxy server (or loadbalancer/service mesh/etc) fronting GoKoala. func normalizeBaseURL(baseURL string) string { serverURL, _ := url.Parse(baseURL) result := strings.Replace(baseURL, serverURL.Scheme, "http", 1) result = strings.Replace(result, serverURL.Path, "", 1) return result }
package engine import ( "log" "net/http" "time" "schneider.vip/problem" ) const ( timestampKey = "timeStamp" defaultMessageServerErr = "An unexpected error has occurred, try again or contact support if the problem persists" defaultMessageBadGateway = "Failed to proxy request, try again or contact support if the problem persists" ) type ProblemKind int var Now = time.Now // allow mocking // The following problems should be added to openapi/problems.go.json var ( ProblemBadRequest = ProblemKind(http.StatusBadRequest) ProblemNotFound = ProblemKind(http.StatusNotFound) ProblemNotAcceptable = ProblemKind(http.StatusNotAcceptable) ProblemServerError = ProblemKind(http.StatusInternalServerError) ProblemBadGateway = ProblemKind(http.StatusBadGateway) ) // RenderProblem writes RFC 7807 (https://tools.ietf.org/html/rfc7807) problem to client. // Only the listed problem kinds are supported since they should be advertised in the OpenAPI spec. // Optionally a caller may add a details (single string) about the problem. Warning: Be sure to not // include sensitive information in the details string! func RenderProblem(kind ProblemKind, w http.ResponseWriter, details ...string) { p := problem.Of(int(kind)) if len(details) > 0 { //nolint:gocritic // switch not handy here p = p.Append(problem.Detail(details[0])) } else if kind == ProblemServerError { p = p.Append(problem.Detail(defaultMessageServerErr)) } else if kind == ProblemBadGateway { p = p.Append(problem.Detail(defaultMessageBadGateway)) } p = p.Append(problem.Custom(timestampKey, Now().UTC().Format(time.RFC3339))) _, err := p.WriteTo(w) if err != nil { log.Printf("failed to write response: %v", err) } } // RenderProblemAndLog writes RFC 7807 (https://tools.ietf.org/html/rfc7807) problem to client + logs message to stdout. func RenderProblemAndLog(kind ProblemKind, w http.ResponseWriter, err error, details ...string) { log.Printf("%v", err.Error()) RenderProblem(kind, w, details...) }
package engine import ( "log" "net/http" "net/url" "github.com/go-chi/chi/v5" ) // Resources endpoint to serve static assets, either from local storage or through reverse proxy func newResourcesEndpoint(e *Engine) { res := e.Config.Resources if res == nil { return } if res.Directory != nil && *res.Directory != "" { resourcesPath := *res.Directory e.Router.Handle("/resources/*", http.StripPrefix("/resources", http.FileServer(http.Dir(resourcesPath)))) } else if res.URL != nil && res.URL.String() != "" { e.Router.Get("/resources/*", proxy(e.ReverseProxy, res.URL.String())) } } type revProxy func(w http.ResponseWriter, r *http.Request, target *url.URL, prefer204 bool, overwrite string) func proxy(revProxy revProxy, resourcesURL string) func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) { resourcePath, _ := url.JoinPath("/", chi.URLParam(r, "*")) target, err := url.ParseRequestURI(resourcesURL + resourcePath) if err != nil { log.Printf("invalid target url, can't proxy resources: %v", err) RenderProblem(ProblemServerError, w) return } revProxy(w, r, target, true, "") } }
package engine import ( "net/http" "runtime/debug" "time" "github.com/go-chi/chi/v5" "github.com/go-chi/chi/v5/middleware" "github.com/go-chi/cors" ) func newRouter(version string, enableTrailingSlash bool, enableCORS bool) *chi.Mux { router := chi.NewRouter() router.Use(middleware.RealIP) // should be first middleware router.Use(middleware.Logger) // log to console router.Use(problemRecoverer) // catch panics and turn into 500s router.Use(middleware.GetHead) // support HEAD requests https://docs.ogc.org/is/17-069r4/17-069r4.html#_http_1_1 if enableTrailingSlash { router.Use(middleware.StripSlashes) } if enableCORS { router.Use(cors.Handler(cors.Options{ AllowedOrigins: []string{"*"}, AllowedMethods: []string{http.MethodGet, http.MethodHead, http.MethodOptions}, AllowedHeaders: []string{HeaderRequestedWith}, ExposedHeaders: []string{HeaderContentCrs, HeaderLink}, AllowCredentials: false, MaxAge: int((time.Hour * 24).Seconds()), })) } // some GIS clients don't sent proper CORS preflight requests, still respond with OK for any OPTIONS request router.Use(optionsFallback) // add semver header, implements https://gitdocumentatie.logius.nl/publicatie/api/adr/#api-57 router.Use(middleware.SetHeader(HeaderAPIVersion, version)) router.Use(middleware.Compress(5, CompressibleMediaTypes...)) // enable gzip responses return router } func optionsFallback(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { if r.Method == http.MethodOptions { w.WriteHeader(http.StatusOK) return } next.ServeHTTP(w, r) }) } // Custom middleware.Recoverer adapted from Chi (https://github.com/go-chi/chi/blob/master/middleware/recoverer.go) // to return RFC-7807 Problem messages. func problemRecoverer(next http.Handler) http.Handler { return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { defer func() { if rvr := recover(); rvr != nil { if rvr == http.ErrAbortHandler { //nolint:errorlint // already so in Chi // we don't recover http.ErrAbortHandler so the response // to the client is aborted, this should not be logged panic(rvr) } logEntry := middleware.GetLogEntry(r) if logEntry != nil { logEntry.Panic(rvr, debug.Stack()) } else { middleware.PrintPrettyStack(rvr) } if r.Header.Get("Connection") != "Upgrade" { RenderProblem(ProblemServerError, w) } } }() next.ServeHTTP(w, r) }) }
package engine import "net/http" func newSitemap(e *Engine) { for path, template := range map[string]string{"/sitemap.xml": "sitemap.go.xml", "/robots.txt": "robots.go.txt"} { key := NewTemplateKey(templatesDir + template) e.renderTemplates(path, nil, nil, false, key) e.Router.Get(path, func(w http.ResponseWriter, r *http.Request) { e.serve(w, r, &key, false, false, "", nil) }) } }
package engine import ( "bytes" "fmt" htmltemplate "html/template" "log" "net/url" "path/filepath" "strings" texttemplate "text/template" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/engine/util" "github.com/nicksnyder/go-i18n/v2/i18n" "golang.org/x/text/language" ) const ( layoutFile = "layout.go.html" ) // TemplateKey unique key to register and lookup Go templates type TemplateKey struct { // Name of the template, the filename including extension Name string // Directory in which the template resides Directory string // Format the file format based on the filename extension, 'html' or 'json' Format string // Language of the contents of the template Language language.Tag // Optional. Only required when you want to render the same template multiple times (with different content). // By specifying an 'instance name' you can refer to a certain instance of a rendered template later on. InstanceName string } // TemplateData the data/variables passed as an argument into the template. type TemplateData struct { // Config set during startup based on the given config file Config *config.Config // Params optional parameters not part of GoKoala's config file. You can use // this to provide extra data to a template at rendering time. Params any // Breadcrumb path to the page, in key-value pairs of name->path Breadcrumbs []Breadcrumb // Request URL url *url.URL } // AvailableFormats returns the output formats available for the current page func (td *TemplateData) AvailableFormats() map[string]string { if td.url != nil && strings.Contains(td.url.Path, "/items") { return td.AvailableFormatsFeatures() } return OutputFormatDefault } // AvailableFormatsFeatures convenience function func (td *TemplateData) AvailableFormatsFeatures() map[string]string { return OutputFormatFeatures } // QueryString returns ?=foo=a&bar=b style query string of the current page func (td *TemplateData) QueryString(format string) string { if td.url != nil { q := td.url.Query() if format != "" { q.Set(FormatParam, format) } return "?" + q.Encode() } return fmt.Sprintf("?%s=%s", FormatParam, format) } type Breadcrumb struct { Name string Path string } // NewTemplateKey build TemplateKeys func NewTemplateKey(path string) TemplateKey { return NewTemplateKeyWithName(path, "") } func NewTemplateKeyWithLanguage(path string, language language.Tag) TemplateKey { return NewTemplateKeyWithNameAndLanguage(path, "", language) } // NewTemplateKeyWithName build TemplateKey with InstanceName (see docs in struct) func NewTemplateKeyWithName(path string, instanceName string) TemplateKey { return NewTemplateKeyWithNameAndLanguage(path, instanceName, language.Dutch) } func NewTemplateKeyWithNameAndLanguage(path string, instanceName string, language language.Tag) TemplateKey { cleanPath := filepath.Clean(path) return TemplateKey{ Name: filepath.Base(cleanPath), Directory: filepath.Dir(cleanPath), Format: strings.TrimPrefix(filepath.Ext(path), "."), Language: language, InstanceName: instanceName, } } func ExpandTemplateKey(key TemplateKey, language language.Tag) TemplateKey { copyKey := key copyKey.Language = language return copyKey } type Templates struct { // ParsedTemplates templates loaded from disk and parsed to an in-memory Go representation. ParsedTemplates map[TemplateKey]any // RenderedTemplates templates parsed + rendered to their actual output format like JSON, HTMl, etc. // We prefer pre-rendered templates whenever possible. These are stored in this map. RenderedTemplates map[TemplateKey][]byte config *config.Config localizers map[language.Tag]i18n.Localizer } func newTemplates(config *config.Config) *Templates { templates := &Templates{ ParsedTemplates: make(map[TemplateKey]any), RenderedTemplates: make(map[TemplateKey][]byte), config: config, localizers: newLocalizers(config.AvailableLanguages), } return templates } func (t *Templates) getParsedTemplate(key TemplateKey) (any, error) { if parsedTemplate, ok := t.ParsedTemplates[key]; ok { return parsedTemplate, nil } return nil, fmt.Errorf("no parsed template with name %s", key.Name) } func (t *Templates) getRenderedTemplate(key TemplateKey) ([]byte, error) { if RenderedTemplate, ok := t.RenderedTemplates[key]; ok { return RenderedTemplate, nil } return nil, fmt.Errorf("no rendered template with name %s", key.Name) } func (t *Templates) parseAndSaveTemplate(key TemplateKey) { for lang := range t.localizers { keyWithLang := ExpandTemplateKey(key, lang) if key.Format == FormatHTML { _, parsed := t.parseHTMLTemplate(keyWithLang, lang) t.ParsedTemplates[keyWithLang] = parsed } else { _, parsed := t.parseNonHTMLTemplate(keyWithLang, lang) t.ParsedTemplates[keyWithLang] = parsed } } } func (t *Templates) renderAndSaveTemplate(key TemplateKey, breadcrumbs []Breadcrumb, params any) { for lang := range t.localizers { var result []byte if key.Format == FormatHTML { file, parsed := t.parseHTMLTemplate(key, lang) result = t.renderHTMLTemplate(parsed, nil, params, breadcrumbs, file) } else { file, parsed := t.parseNonHTMLTemplate(key, lang) result = t.renderNonHTMLTemplate(parsed, params, key, file) } // Store rendered template per language key.Language = lang t.RenderedTemplates[key] = result } } func (t *Templates) parseHTMLTemplate(key TemplateKey, lang language.Tag) (string, *htmltemplate.Template) { file := filepath.Clean(filepath.Join(key.Directory, key.Name)) templateFuncs := t.createTemplateFuncs(lang) parsed := htmltemplate.Must(htmltemplate.New(layoutFile). Funcs(templateFuncs).ParseFiles(templatesDir+layoutFile, file)) return file, parsed } func (t *Templates) renderHTMLTemplate(parsed *htmltemplate.Template, url *url.URL, params any, breadcrumbs []Breadcrumb, file string) []byte { var rendered bytes.Buffer if err := parsed.Execute(&rendered, &TemplateData{ Config: t.config, Params: params, Breadcrumbs: breadcrumbs, url: url, }); err != nil { log.Fatalf("failed to execute HTML template %s, error: %v", file, err) } return rendered.Bytes() } func (t *Templates) parseNonHTMLTemplate(key TemplateKey, lang language.Tag) (string, *texttemplate.Template) { file := filepath.Clean(filepath.Join(key.Directory, key.Name)) templateFuncs := t.createTemplateFuncs(lang) parsed := texttemplate.Must(texttemplate.New(filepath.Base(file)). Funcs(templateFuncs).Parse(util.ReadFile(file))) return file, parsed } func (t *Templates) renderNonHTMLTemplate(parsed *texttemplate.Template, params any, key TemplateKey, file string) []byte { var rendered bytes.Buffer if err := parsed.Execute(&rendered, &TemplateData{ Config: t.config, Params: params, }); err != nil { log.Fatalf("failed to execute template %s, error: %v", file, err) } var result = rendered.Bytes() if strings.Contains(key.Format, FormatJSON) { // pretty print all JSON (or derivatives like TileJSON) result = util.PrettyPrintJSON(result, key.Name) } return result } func (t *Templates) createTemplateFuncs(lang language.Tag) map[string]any { return combineFuncMaps(globalTemplateFuncs, texttemplate.FuncMap{ // create func just-in-time based on TemplateKey "i18n": func(messageID string) htmltemplate.HTML { localizer := t.localizers[lang] translated := localizer.MustLocalize(&i18n.LocalizeConfig{MessageID: messageID}) return htmltemplate.HTML(translated) //nolint:gosec // since we trust our language files }, }) }
package engine import ( htmltemplate "html/template" "log" "regexp" "strconv" "strings" texttemplate "text/template" "time" "github.com/docker/go-units" sprig "github.com/go-task/slim-sprig" gomarkdown "github.com/gomarkdown/markdown" gomarkdownhtml "github.com/gomarkdown/markdown/html" gomarkdownparser "github.com/gomarkdown/markdown/parser" stripmd "github.com/writeas/go-strip-markdown/v2" ) var ( globalTemplateFuncs texttemplate.FuncMap linkRegex = regexp.MustCompile(`^https?://\S+$`) ) // Initialize functions to be used in html/json/etc templates func init() { customFuncs := texttemplate.FuncMap{ // custom template functions (keep lowercase) "markdown": markdown, "unmarkdown": unmarkdown, "truncate": truncateText, "humansize": humanSize, "bytessize": bytesSize, "isdate": isDate, "islink": isLink, } sprigFuncs := sprig.FuncMap() // we also support https://github.com/go-task/slim-sprig functions globalTemplateFuncs = combineFuncMaps(customFuncs, sprigFuncs) } // combine given FuncMaps func combineFuncMaps(funcMaps ...map[string]any) map[string]any { result := make(map[string]any) for _, funcMap := range funcMaps { for k, v := range funcMap { result[k] = v } } return result } // markdown turn Markdown into HTML func markdown(s *string) htmltemplate.HTML { if s == nil { return "" } // always normalize newlines, this library only supports Unix LF newlines md := gomarkdown.NormalizeNewlines([]byte(*s)) // create Markdown parser extensions := gomarkdownparser.CommonExtensions parser := gomarkdownparser.NewWithExtensions(extensions) // parse Markdown into AST tree doc := parser.Parse(md) // create HTML renderer htmlFlags := gomarkdownhtml.CommonFlags | gomarkdownhtml.HrefTargetBlank | gomarkdownhtml.SkipHTML renderer := gomarkdownhtml.NewRenderer(gomarkdownhtml.RendererOptions{Flags: htmlFlags}) return htmltemplate.HTML(gomarkdown.Render(doc, renderer)) //nolint:gosec } // unmarkdown remove Markdown, so we can use the given string in non-HTML (JSON) output func unmarkdown(s *string) string { if s == nil { return "" } withoutMarkdown := stripmd.Strip(*s) withoutLinebreaks := strings.ReplaceAll(withoutMarkdown, "\n", " ") return withoutLinebreaks } // truncateText truncate text to avoid overly long text on overview pages func truncateText(s *string, limit int) *string { if s == nil { return s } if len(*s) > limit { // truncate at last space or newline before given character limit cutoff := strings.LastIndexAny((*s)[:limit], " \n") t := (*s)[:cutoff] + "..." return &t } return s } // humanSize converts size in bytes to a human-readable size func humanSize(a any) string { if i, ok := a.(int64); ok { return units.HumanSize(float64(i)) } else if f, ok := a.(float64); ok { return units.HumanSize(f) } else if s, ok := a.(string); ok { fs, err := strconv.ParseFloat(s, 64) if err == nil { return units.HumanSize(fs) } } log.Printf("cannot convert '%v' to float", a) return "0" } // bytesSize converts human-readable size to size in bytes (base-10, not base-2) func bytesSize(s string) int64 { i, err := units.FromHumanSize(s) if err != nil { log.Printf("cannot convert '%s' to bytes", s) return 0 } return i } // isDate true when given input is a date, false otherwise func isDate(v any) bool { if _, ok := v.(time.Time); ok { return true } return false } // isLink true when given input is an HTTP(s) URL (without any additional text), false otherwise func isLink(v any) bool { if text, ok := v.(string); ok { return linkRegex.MatchString(text) } return false }
package util import ( "bytes" "compress/gzip" "errors" "io" "io/fs" "log" "os" ) // ReadFile read a plain or gzipped file and return contents as string func ReadFile(filePath string) string { gzipFile := filePath + ".gz" var fileContents string if _, err := os.Stat(gzipFile); !errors.Is(err, fs.ErrNotExist) { fileContents, err = readGzipContents(gzipFile) if err != nil { log.Fatalf("unable to decompress gzip file %s", gzipFile) } } else { fileContents, err = readPlainContents(filePath) if err != nil { log.Fatalf("unable to read file %s", filePath) } } return fileContents } // decompress gzip files, return contents as string func readGzipContents(filePath string) (string, error) { gzipFile, err := os.Open(filePath) if err != nil { return "", err } defer func(gzipFile *os.File) { err := gzipFile.Close() if err != nil { log.Println("failed to close gzip file") } }(gzipFile) gzipReader, err := gzip.NewReader(gzipFile) if err != nil { return "", err } defer func(gzipReader *gzip.Reader) { err := gzipReader.Close() if err != nil { log.Println("failed to close gzip reader") } }(gzipReader) var buffer bytes.Buffer _, err = io.Copy(&buffer, gzipReader) //nolint:gosec if err != nil { return "", err } return buffer.String(), nil } // read file, return contents as string func readPlainContents(filePath string) (string, error) { file, err := os.Open(filePath) if err != nil { return "", err } defer func(file *os.File) { err := file.Close() if err != nil { log.Println("failed to close file") } }(file) var buffer bytes.Buffer _, err = io.Copy(&buffer, file) if err != nil { return "", err } return buffer.String(), nil }
package util import ( "bytes" "encoding/json" "log" "dario.cat/mergo" ) func PrettyPrintJSON(content []byte, name string) []byte { var pretty bytes.Buffer if err := json.Indent(&pretty, content, "", " "); err != nil { log.Print(string(content)) log.Fatalf("invalid json in %s: %v, see json output above", name, err) } return pretty.Bytes() } // MergeJSON merges the two JSON byte slices. It returns an error if x1 or x2 cannot be JSON-unmarshalled, // or the merged JSON is invalid. // // Optionally, an orderBy function can be provided to alter the key order in the resulting JSON func MergeJSON(x1, x2 []byte, orderBy func(output map[string]any) any) ([]byte, error) { var j1 map[string]any err := json.Unmarshal(x1, &j1) if err != nil { return nil, err } var j2 map[string]any err = json.Unmarshal(x2, &j2) if err != nil { return nil, err } err = mergo.Merge(&j1, &j2) if err != nil { return nil, err } if orderBy != nil { return json.Marshal(orderBy(j1)) } return json.Marshal(j1) }
package util // Keys returns the keys of the map m. The keys will be an indeterminate order. func Keys[M ~map[K]V, K comparable, V any](input M) []K { output := make([]K, 0, len(input)) for k := range input { output = append(output, k) } return output } // Inverse switches the values to keys and the keys to values. func Inverse(input map[string]string) map[string]string { output := make(map[string]string) for k, v := range input { output[v] = k } return output } // Cast turns a map[K]V to a map[K]any, so values will downcast to 'any' type. func Cast[M ~map[K]V, K comparable, V any](input M) map[K]any { output := make(map[K]any, len(input)) for k, v := range input { output[k] = v } return output }
package core import ( "net/http" "github.com/PDOK/gokoala/internal/engine" ) const ( templatesDir = "internal/ogc/common/core/templates/" rootPath = "/" apiPath = "/api" alternativeAPIPath = "/openapi.json" conformancePath = "/conformance" ) type CommonCore struct { engine *engine.Engine } func NewCommonCore(e *engine.Engine) *CommonCore { conformanceBreadcrumbs := []engine.Breadcrumb{ { Name: "Conformance", Path: "conformance", }, } apiBreadcrumbs := []engine.Breadcrumb{ { Name: "OpenAPI specification", Path: "api", }, } e.RenderTemplates(rootPath, nil, engine.NewTemplateKey(templatesDir+"landing-page.go.json"), engine.NewTemplateKey(templatesDir+"landing-page.go.html")) e.RenderTemplates(rootPath, apiBreadcrumbs, engine.NewTemplateKey(templatesDir+"api.go.html")) e.RenderTemplates(conformancePath, conformanceBreadcrumbs, engine.NewTemplateKey(templatesDir+"conformance.go.json"), engine.NewTemplateKey(templatesDir+"conformance.go.html")) core := &CommonCore{ engine: e, } e.Router.Get(rootPath, core.LandingPage()) e.Router.Get(apiPath, core.API()) // implements https://gitdocumentatie.logius.nl/publicatie/api/adr/#api-17 e.Router.Get(alternativeAPIPath, func(w http.ResponseWriter, r *http.Request) { core.apiAsJSON(w, r) }) e.Router.Get(conformancePath, core.Conformance()) e.Router.Handle("/*", http.FileServer(http.Dir("assets"))) return core } func (c *CommonCore) LandingPage() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { key := engine.NewTemplateKeyWithLanguage(templatesDir+"landing-page.go."+c.engine.CN.NegotiateFormat(r), c.engine.CN.NegotiateLanguage(w, r)) c.engine.ServePage(w, r, key) } } func (c *CommonCore) API() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { format := c.engine.CN.NegotiateFormat(r) if format == engine.FormatHTML { c.apiAsHTML(w, r) return } else if format == engine.FormatJSON { c.apiAsJSON(w, r) return } engine.RenderProblem(engine.ProblemNotFound, w) } } func (c *CommonCore) apiAsHTML(w http.ResponseWriter, r *http.Request) { key := engine.NewTemplateKeyWithLanguage(templatesDir+"api.go.html", c.engine.CN.NegotiateLanguage(w, r)) c.engine.ServePage(w, r, key) } func (c *CommonCore) apiAsJSON(w http.ResponseWriter, r *http.Request) { c.engine.Serve(w, r, true, true, engine.MediaTypeOpenAPI, c.engine.OpenAPI.SpecJSON) } func (c *CommonCore) Conformance() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { key := engine.NewTemplateKeyWithLanguage(templatesDir+"conformance.go."+c.engine.CN.NegotiateFormat(r), c.engine.CN.NegotiateLanguage(w, r)) c.engine.ServePage(w, r, key) } }
package geospatial import ( "net/http" "github.com/PDOK/gokoala/internal/engine" "github.com/go-chi/chi/v5" ) const ( CollectionsPath = "/collections" templatesDir = "internal/ogc/common/geospatial/templates/" ) type Collections struct { engine *engine.Engine } // NewCollections enables support for OGC APIs that organize data in the concept of collections. // A collection, also known as a geospatial data resource, is a common way to organize data in various OGC APIs. func NewCollections(e *engine.Engine) *Collections { if e.Config.HasCollections() { collectionsBreadcrumbs := []engine.Breadcrumb{ { Name: "Collections", Path: "collections", }, } e.RenderTemplates(CollectionsPath, collectionsBreadcrumbs, engine.NewTemplateKey(templatesDir+"collections.go.json"), engine.NewTemplateKey(templatesDir+"collections.go.html")) for _, coll := range e.Config.AllCollections().Unique() { title := coll.ID if coll.Metadata != nil && coll.Metadata.Title != nil { title = *coll.Metadata.Title } collectionBreadcrumbs := collectionsBreadcrumbs collectionBreadcrumbs = append(collectionBreadcrumbs, []engine.Breadcrumb{ { Name: title, Path: "collections/" + coll.ID, }, }...) e.RenderTemplatesWithParams(CollectionsPath+"/"+coll.ID, coll, nil, engine.NewTemplateKeyWithName(templatesDir+"collection.go.json", coll.ID)) e.RenderTemplatesWithParams(CollectionsPath+"/"+coll.ID, coll, collectionBreadcrumbs, engine.NewTemplateKeyWithName(templatesDir+"collection.go.html", coll.ID)) } } instance := &Collections{ engine: e, } e.Router.Get(CollectionsPath, instance.Collections()) e.Router.Get(CollectionsPath+"/{collectionId}", instance.Collection()) return instance } // Collections returns list of collections func (c *Collections) Collections() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { key := engine.NewTemplateKeyWithLanguage(templatesDir+"collections.go."+c.engine.CN.NegotiateFormat(r), c.engine.CN.NegotiateLanguage(w, r)) c.engine.ServePage(w, r, key) } } // Collection provides METADATA about a specific collection. To get the CONTENTS of a collection each OGC API // building block must provide a separate/specific endpoint. // // For example in: // - OGC API Features you would have: /collections/{collectionId}/items // - OGC API Tiles could have: /collections/{collectionId}/tiles // - OGC API Maps could have: /collections/{collectionId}/map // - OGC API 3d GeoVolumes would have: /collections/{collectionId}/3dtiles func (c *Collections) Collection() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { collectionID := chi.URLParam(r, "collectionId") key := engine.NewTemplateKeyWithNameAndLanguage(templatesDir+"collection.go."+c.engine.CN.NegotiateFormat(r), collectionID, c.engine.CN.NegotiateLanguage(w, r)) c.engine.ServePage(w, r, key) } }
package geopackage import ( "errors" "fmt" "strings" "github.com/PDOK/gokoala/config" "github.com/jmoiron/sqlx" ) // assertIndexesExist asserts required indexes in the GeoPackage exists func assertIndexesExist( configuredCollections config.GeoSpatialCollections, featureTableByCollectionID map[string]*featureTable, db *sqlx.DB, fidColumn string) error { // index needs to contain these columns in the given order defaultSpatialBtreeColumns := strings.Join([]string{fidColumn, "minx", "maxx", "miny", "maxy"}, ",") for collID, table := range featureTableByCollectionID { if table == nil { return errors.New("given table can't be nil") } for _, coll := range configuredCollections { if coll.ID == collID && coll.Features != nil { spatialBtreeColumns := defaultSpatialBtreeColumns // assert temporal columns are indexed if configured if coll.Metadata != nil && coll.Metadata.TemporalProperties != nil { temporalBtreeColumns := strings.Join([]string{coll.Metadata.TemporalProperties.StartDate, coll.Metadata.TemporalProperties.EndDate}, ",") spatialBtreeColumns = strings.Join([]string{defaultSpatialBtreeColumns, coll.Metadata.TemporalProperties.StartDate, coll.Metadata.TemporalProperties.EndDate}, ",") if err := assertIndexExists(table.TableName, db, temporalBtreeColumns, true); err != nil { return err } } // assert spatial b-tree index exists, this index substitutes the r-tree when querying large bounding boxes // if temporal columns are configured, they must be included in this index as well if err := assertIndexExists(table.TableName, db, spatialBtreeColumns, true); err != nil { return err } // assert the column for each property filter is indexed. for _, propertyFilter := range coll.Features.Filters.Properties { if err := assertIndexExists(table.TableName, db, propertyFilter.Name, false); err != nil && *propertyFilter.IndexRequired { return fmt.Errorf("%w. To disable this check set 'indexRequired' to 'false'", err) } } break } } } return nil } func assertIndexExists(tableName string, db *sqlx.DB, columns string, prefixMatch bool) error { query := fmt.Sprintf(` select group_concat(info.name) as indexed_columns from pragma_index_list('%s') as list, pragma_index_info(list.name) as info group by list.name`, tableName) rows, err := db.Queryx(query) if err != nil { return fmt.Errorf("failed to read indexes from table '%s'", tableName) } exists := false for rows.Next() { var indexedColumns string _ = rows.Scan(&indexedColumns) if columns == indexedColumns { exists = true // index on expected columns } else if prefixMatch && strings.HasPrefix(indexedColumns, columns) { exists = true // index with expected prefix columns } } defer rows.Close() if !exists { return fmt.Errorf("missing required index: no index exists on column(s) '%s' in table '%s'", columns, tableName) } return nil }
//go:build cgo && !darwin && !windows package geopackage import ( "fmt" "log" "github.com/PDOK/gokoala/config" "github.com/google/uuid" cloudsqlitevfs "github.com/PDOK/go-cloud-sqlite-vfs" "github.com/jmoiron/sqlx" ) // Cloud-Backed SQLite (CBS) GeoPackage in Azure or Google object storage type cloudGeoPackage struct { db *sqlx.DB cloudVFS *cloudsqlitevfs.VFS } func newCloudBackedGeoPackage(gpkg *config.GeoPackageCloud) geoPackageBackend { cacheDir, err := gpkg.CacheDir() if err != nil { log.Fatalf("invalid cache dir, error: %v", err) } cacheSize, err := gpkg.Cache.MaxSizeAsBytes() if err != nil { log.Fatalf("invalid cache size provided, error: %v", err) } msg := fmt.Sprintf("Cloud-Backed GeoPackage '%s' in container '%s' on '%s'", gpkg.File, gpkg.Container, gpkg.Connection) log.Printf("connecting to %s\n", msg) vfsName := uuid.New().String() // important: each geopackage must use a unique VFS name vfs, err := cloudsqlitevfs.NewVFS(vfsName, gpkg.Connection, gpkg.User, gpkg.Auth, gpkg.Container, cacheDir, cacheSize, gpkg.LogHTTPRequests) if err != nil { log.Fatalf("failed to connect with %s, error: %v", msg, err) } log.Printf("connected to %s\n", msg) conn := fmt.Sprintf("/%s/%s?vfs=%s&mode=ro&_cache_size=%d", gpkg.Container, gpkg.File, vfsName, gpkg.InMemoryCacheSize) db, err := sqlx.Open(sqliteDriverName, conn) if err != nil { log.Fatalf("failed to open %s, error: %v", msg, err) } return &cloudGeoPackage{db, &vfs} } func (g *cloudGeoPackage) getDB() *sqlx.DB { return g.db } func (g *cloudGeoPackage) close() { err := g.db.Close() if err != nil { log.Printf("failed to close GeoPackage: %v", err) } if g.cloudVFS != nil { err = g.cloudVFS.Close() if err != nil { log.Printf("failed to close Cloud-Backed GeoPackage: %v", err) } } }
package geopackage import ( "fmt" "log" "time" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/engine" "github.com/jmoiron/sqlx" ) // GeoPackage on local disk type localGeoPackage struct { db *sqlx.DB } func newLocalGeoPackage(gpkg *config.GeoPackageLocal) geoPackageBackend { if gpkg.Download != nil { downloadGeoPackage(gpkg) } conn := fmt.Sprintf("file:%s?immutable=1&_cache_size=%d", gpkg.File, gpkg.InMemoryCacheSize) db, err := sqlx.Open(sqliteDriverName, conn) if err != nil { log.Fatalf("failed to open GeoPackage: %v", err) } log.Printf("connected to local GeoPackage: %s", gpkg.File) return &localGeoPackage{db} } func downloadGeoPackage(gpkg *config.GeoPackageLocal) { url := *gpkg.Download.From.URL log.Printf("start download of GeoPackage: %s", url.String()) downloadTime, err := engine.Download(url, gpkg.File, gpkg.Download.Parallelism, gpkg.Download.TLSSkipVerify, gpkg.Download.Timeout.Duration, gpkg.Download.RetryDelay.Duration, gpkg.Download.RetryMaxDelay.Duration, gpkg.Download.MaxRetries) if err != nil { log.Fatalf("failed to download GeoPackage: %v", err) } log.Printf("successfully downloaded GeoPackage to %s in %s", gpkg.File, downloadTime.Round(time.Second)) } func (g *localGeoPackage) getDB() *sqlx.DB { return g.db } func (g *localGeoPackage) close() { err := g.db.Close() if err != nil { log.Printf("failed to close GeoPackage: %v", err) } }
// Package encoding based on https://github.com/go-spatial/geom/blob/master/encoding/gpkg/binary_header.go // // Copyright (c) 2017 go-spatial. Modified by PDOK. // Licensed under the MIT license. See https://github.com/go-spatial/geom/blob/master/LICENSE for details. package encoding import ( "encoding/binary" "errors" "fmt" "math" "github.com/twpayne/go-geom" "github.com/twpayne/go-geom/encoding/wkb" "github.com/twpayne/go-geom/encoding/wkbcommon" ) type EnvelopeType uint8 // Magic is the magic number encode in the header. It should be 0x4750 var Magic = [2]byte{0x47, 0x50} // Decipher empty points with NaN as coordinates, in line with Requirement 152 of the spec (http://www.geopackage.org/spec/). var gpkgNaNHandling = wkbcommon.WKBOptionEmptyPointHandling(wkbcommon.EmptyPointHandlingNaN) const ( EnvelopeTypeNone = EnvelopeType(0) EnvelopeTypeXY = EnvelopeType(1) EnvelopeTypeXYZ = EnvelopeType(2) EnvelopeTypeXYM = EnvelopeType(3) EnvelopeTypeXYZM = EnvelopeType(4) EnvelopeTypeInvalid = EnvelopeType(5) ) // NumberOfElements that the particular Envelope Type will have. func (et EnvelopeType) NumberOfElements() int { switch et { //nolint:exhaustive case EnvelopeTypeNone: return 0 case EnvelopeTypeXY: return 4 case EnvelopeTypeXYZ: return 6 case EnvelopeTypeXYM: return 6 case EnvelopeTypeXYZM: return 8 default: return -1 } } func (et EnvelopeType) String() string { str := "NONEXYZMXYMINVALID" switch et { //nolint:exhaustive case EnvelopeTypeNone: return str[0:4] case EnvelopeTypeXY: return str[4 : 4+2] case EnvelopeTypeXYZ: return str[4 : 4+3] case EnvelopeTypeXYM: return str[8 : 8+3] case EnvelopeTypeXYZM: return str[4 : 4+4] default: return str[11:] } } // HEADER FLAG LAYOUT // 7 6 5 4 3 2 1 0 // R R X Y E E E B // R Reserved for future use. (should be set to 0) // X GeoPackageBinary type // Normal or extented // Y empty geometry // E Envelope type // B ByteOrder // http://www.geopackage.org/spec/#flags_layout const ( maskByteOrder = 1 << 0 maskEnvelopeType = 1<<3 | 1<<2 | 1<<1 maskEmptyGeometry = 1 << 4 maskGeoPackageBinary = 1 << 5 ) type headerFlags byte func (hf headerFlags) String() string { return fmt.Sprintf("0x%02x", uint8(hf)) } // Endian will return the encoded Endianess func (hf headerFlags) Endian() binary.ByteOrder { if hf&maskByteOrder == 0 { return binary.BigEndian } return binary.LittleEndian } // Envelope returns the type of the envelope. func (hf headerFlags) Envelope() EnvelopeType { et := uint8((hf & maskEnvelopeType) >> 1) if et >= uint8(EnvelopeTypeInvalid) { return EnvelopeTypeInvalid } return EnvelopeType(et) } // IsEmpty returns whether or not the geometry is empty. func (hf headerFlags) IsEmpty() bool { return ((hf & maskEmptyGeometry) >> 4) == 1 } // IsStandard returns weather or not the geometry is a standard GeoPackage geometry type. func (hf headerFlags) IsStandard() bool { return ((hf & maskGeoPackageBinary) >> 5) == 0 } // BinaryHeader is the gpkg header that accompainies every feature. type BinaryHeader struct { // See: http://www.geopackage.org/spec/ magic [2]byte // should be 0x47 0x50 (GP in ASCII) version uint8 // should be 0 flags headerFlags srsid int32 envelope []float64 } // decodeBinaryHeader decodes the data into the BinaryHeader func decodeBinaryHeader(data []byte) (*BinaryHeader, error) { if len(data) < 8 { return nil, errors.New("not enough bytes") } var bh BinaryHeader bh.magic[0] = data[0] bh.magic[1] = data[1] bh.version = data[2] bh.flags = headerFlags(data[3]) en := bh.flags.Endian() bh.srsid = int32(en.Uint32(data[4 : 4+4])) //nolint:gosec bytes := data[8:] et := bh.flags.Envelope() if et == EnvelopeTypeInvalid { return nil, errors.New("invalid envelope type") } if et == EnvelopeTypeNone { return &bh, nil } num := et.NumberOfElements() // there are 8 bytes per float64 value and we need num of them. if len(bytes) < (num * 8) { return nil, errors.New("not enough bytes") } bh.envelope = make([]float64, 0, num) for i := 0; i < num; i++ { bits := en.Uint64(bytes[i*8 : (i*8)+8]) bh.envelope = append(bh.envelope, math.Float64frombits(bits)) } if bh.magic[0] != Magic[0] || bh.magic[1] != Magic[1] { return &bh, errors.New("invalid magic number") } return &bh, nil } // Magic is the magic number encode in the header. It should be 0x4750 func (h *BinaryHeader) Magic() [2]byte { if h == nil { return Magic } return h.magic } // Version is the version number encode in the header. func (h *BinaryHeader) Version() uint8 { if h == nil { return 0 } return h.version } // EnvelopeType is the type of the envelope that is provided. func (h *BinaryHeader) EnvelopeType() EnvelopeType { if h == nil { return EnvelopeTypeInvalid } return h.flags.Envelope() } // SRSID is the SRS id of the feature. func (h *BinaryHeader) SRSID() int32 { if h == nil { return 0 } return h.srsid } // Envelope is the bounding box of the feature, used for searching. If the EnvelopeType is EvelopeTypeNone, then there isn't a envelope encoded // and a search without an index will need to be preformed. This is to save space. func (h *BinaryHeader) Envelope() []float64 { if h == nil { return nil } return h.envelope } // IsGeometryEmpty tells us if the geometry should be considered empty. func (h *BinaryHeader) IsGeometryEmpty() bool { if h == nil { return true } return h.flags.IsEmpty() } // IsStandardGeometry is the geometry a core/extended geometry type, or a user defined geometry type. func (h *BinaryHeader) IsStandardGeometry() bool { if h == nil { return true } return h.flags.IsStandard() } // Size is the size of the header in bytes. func (h *BinaryHeader) Size() int { if h == nil { return 0 } return (len(h.envelope) * 8) + 8 } // StandardBinary is the binary encoding plus some metadata // should be stored as a blob type StandardBinary struct { Header *BinaryHeader SRSID int32 Geometry geom.T } func DecodeGeometry(bytes []byte) (*StandardBinary, error) { h, err := decodeBinaryHeader(bytes) if err != nil { return nil, err } geo, err := wkb.Unmarshal(bytes[h.Size():], gpkgNaNHandling) if err != nil { return nil, err } return &StandardBinary{ Header: h, SRSID: h.SRSID(), Geometry: geo, }, nil }
package geopackage import ( "context" "database/sql" "fmt" "log" "maps" "os" "path" "slices" "strings" "sync" "time" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/engine/util" "github.com/PDOK/gokoala/internal/ogc/features/datasources" "github.com/PDOK/gokoala/internal/ogc/features/datasources/geopackage/encoding" "github.com/PDOK/gokoala/internal/ogc/features/domain" "github.com/twpayne/go-geom" "github.com/twpayne/go-geom/encoding/wkt" "github.com/google/uuid" "github.com/jmoiron/sqlx" "github.com/mattn/go-sqlite3" "github.com/qustavo/sqlhooks/v2" ) const ( sqliteDriverName = "sqlite3_with_extensions" selectAll = "*" ) var once sync.Once // Load sqlite (with extensions) once. // // Extensions are by default expected in /usr/lib. For spatialite you can // alternatively/optionally set SPATIALITE_LIBRARY_PATH. func loadDriver() { once.Do(func() { spatialite := path.Join(os.Getenv("SPATIALITE_LIBRARY_PATH"), "mod_spatialite") driver := &sqlite3.SQLiteDriver{Extensions: []string{spatialite}} sql.Register(sqliteDriverName, sqlhooks.Wrap(driver, datasources.NewSQLLogFromEnv())) }) } // geoPackageBackend abstraction over different kinds of GeoPackages, e.g. local file or cloud-backed sqlite. type geoPackageBackend interface { getDB() *sqlx.DB close() } // featureTable according to spec https://www.geopackage.org/spec121/index.html#_contents type featureTable struct { TableName string `db:"table_name"` DataType string `db:"data_type"` // always 'features' Identifier string `db:"identifier"` Description string `db:"description"` GeometryColumnName string `db:"column_name"` GeometryType string `db:"geometry_type_name"` LastChange time.Time `db:"last_change"` MinX sql.NullFloat64 `db:"min_x"` // bbox MinY sql.NullFloat64 `db:"min_y"` // bbox MaxX sql.NullFloat64 `db:"max_x"` // bbox MaxY sql.NullFloat64 `db:"max_y"` // bbox SRS sql.NullInt64 `db:"srs_id"` ColumnsWithDateType map[string]string } func (ft featureTable) ColumnsWithDataType() map[string]string { return ft.ColumnsWithDateType } type GeoPackage struct { backend geoPackageBackend preparedStmtCache *PreparedStatementCache fidColumn string externalFidColumn string featureTableByCollectionID map[string]*featureTable propertyFiltersByCollectionID map[string]datasources.PropertyFiltersWithAllowedValues propertiesByCollectionID map[string]*config.FeatureProperties queryTimeout time.Duration maxBBoxSizeToUseWithRTree int selectClauseFids []string } func NewGeoPackage(collections config.GeoSpatialCollections, gpkgConfig config.GeoPackage) *GeoPackage { loadDriver() g := &GeoPackage{} g.preparedStmtCache = NewCache() g.propertiesByCollectionID = cacheFeatureProperties(collections) warmUp := false switch { case gpkgConfig.Local != nil: g.backend = newLocalGeoPackage(gpkgConfig.Local) g.fidColumn = gpkgConfig.Local.Fid g.externalFidColumn = gpkgConfig.Local.ExternalFid g.queryTimeout = gpkgConfig.Local.QueryTimeout.Duration g.maxBBoxSizeToUseWithRTree = gpkgConfig.Local.MaxBBoxSizeToUseWithRTree case gpkgConfig.Cloud != nil: g.backend = newCloudBackedGeoPackage(gpkgConfig.Cloud) g.fidColumn = gpkgConfig.Cloud.Fid g.externalFidColumn = gpkgConfig.Cloud.ExternalFid g.queryTimeout = gpkgConfig.Cloud.QueryTimeout.Duration g.maxBBoxSizeToUseWithRTree = gpkgConfig.Cloud.MaxBBoxSizeToUseWithRTree warmUp = gpkgConfig.Cloud.Cache.WarmUp default: log.Fatal("unknown GeoPackage config encountered") } g.selectClauseFids = []string{g.fidColumn, domain.PrevFid, domain.NextFid} metadata, err := readDriverMetadata(g.backend.getDB()) if err != nil { log.Fatalf("failed to connect with GeoPackage: %v", err) } log.Println(metadata) g.featureTableByCollectionID, err = readGpkgContents(collections, g.backend.getDB()) if err != nil { log.Fatal(err) } g.propertyFiltersByCollectionID, err = readPropertyFiltersWithAllowedValues(g.featureTableByCollectionID, collections, g.backend.getDB()) if err != nil { log.Fatal(err) } if err = assertIndexesExist(collections, g.featureTableByCollectionID, g.backend.getDB(), g.fidColumn); err != nil { log.Fatal(err) } if warmUp { // perform warmup async since it can take a long time go func() { if err = warmUpFeatureTables(collections, g.featureTableByCollectionID, g.backend.getDB()); err != nil { log.Fatal(err) } }() } return g } func (g *GeoPackage) Close() { g.preparedStmtCache.Close() g.backend.close() } func (g *GeoPackage) GetFeatureIDs(ctx context.Context, collection string, criteria datasources.FeaturesCriteria) ([]int64, domain.Cursors, error) { table, err := g.getFeatureTable(collection) if err != nil { return nil, domain.Cursors{}, err } queryCtx, cancel := context.WithTimeout(ctx, g.queryTimeout) // https://go.dev/doc/database/cancel-operations defer cancel() stmt, query, queryArgs, err := g.makeFeaturesQuery(queryCtx, g.propertiesByCollectionID[collection], table, true, criteria) //nolint:sqlclosecheck // prepared statement is cached, will be closed when evicted from cache if err != nil { return nil, domain.Cursors{}, fmt.Errorf("failed to create query '%s' error: %w", query, err) } rows, err := stmt.QueryxContext(queryCtx, queryArgs) if err != nil { return nil, domain.Cursors{}, fmt.Errorf("failed to execute query '%s' error: %w", query, err) } defer rows.Close() featureIDs, prevNext, err := domain.MapRowsToFeatureIDs(queryCtx, rows) if err != nil { return nil, domain.Cursors{}, err } if prevNext == nil { return nil, domain.Cursors{}, nil } return featureIDs, domain.NewCursors(*prevNext, criteria.Cursor.FiltersChecksum), queryCtx.Err() } func (g *GeoPackage) GetFeaturesByID(ctx context.Context, collection string, featureIDs []int64, profile domain.Profile) (*domain.FeatureCollection, error) { table, err := g.getFeatureTable(collection) if err != nil { return nil, err } queryCtx, cancel := context.WithTimeout(ctx, g.queryTimeout) // https://go.dev/doc/database/cancel-operations defer cancel() fids := map[string]any{"fids": featureIDs} query, queryArgs, err := sqlx.Named(fmt.Sprintf("select * from %s where %s in (:fids)", table.TableName, g.fidColumn), fids) if err != nil { return nil, fmt.Errorf("failed to make features query, error: %w", err) } query, queryArgs, err = sqlx.In(query, queryArgs...) if err != nil { return nil, fmt.Errorf("failed to make IN-clause, error: %w", err) } rows, err := g.backend.getDB().QueryxContext(queryCtx, g.backend.getDB().Rebind(query), queryArgs...) if err != nil { return nil, fmt.Errorf("failed to execute query '%s' error: %w", query, err) } defer rows.Close() fc := domain.FeatureCollection{} fc.Features, _, err = domain.MapRowsToFeatures(queryCtx, rows, g.fidColumn, g.externalFidColumn, table.GeometryColumnName, g.propertiesByCollectionID[collection], mapGpkgGeometry, profile.MapRelationUsingProfile) if err != nil { return nil, err } fc.NumberReturned = len(fc.Features) return &fc, queryCtx.Err() } func (g *GeoPackage) GetFeatures(ctx context.Context, collection string, criteria datasources.FeaturesCriteria, profile domain.Profile) (*domain.FeatureCollection, domain.Cursors, error) { table, err := g.getFeatureTable(collection) if err != nil { return nil, domain.Cursors{}, err } queryCtx, cancel := context.WithTimeout(ctx, g.queryTimeout) // https://go.dev/doc/database/cancel-operations defer cancel() stmt, query, queryArgs, err := g.makeFeaturesQuery(queryCtx, g.propertiesByCollectionID[collection], table, false, criteria) //nolint:sqlclosecheck // prepared statement is cached, will be closed when evicted from cache if err != nil { return nil, domain.Cursors{}, fmt.Errorf("failed to create query '%s' error: %w", query, err) } rows, err := stmt.QueryxContext(queryCtx, queryArgs) if err != nil { return nil, domain.Cursors{}, fmt.Errorf("failed to execute query '%s' error: %w", query, err) } defer rows.Close() var prevNext *domain.PrevNextFID fc := domain.FeatureCollection{} fc.Features, prevNext, err = domain.MapRowsToFeatures(queryCtx, rows, g.fidColumn, g.externalFidColumn, table.GeometryColumnName, g.propertiesByCollectionID[collection], mapGpkgGeometry, profile.MapRelationUsingProfile) if err != nil { return nil, domain.Cursors{}, err } if prevNext == nil { return nil, domain.Cursors{}, nil } fc.NumberReturned = len(fc.Features) return &fc, domain.NewCursors(*prevNext, criteria.Cursor.FiltersChecksum), queryCtx.Err() } func (g *GeoPackage) GetFeature(ctx context.Context, collection string, featureID any, profile domain.Profile) (*domain.Feature, error) { table, err := g.getFeatureTable(collection) if err != nil { return nil, err } queryCtx, cancel := context.WithTimeout(ctx, g.queryTimeout) // https://go.dev/doc/database/cancel-operations defer cancel() var fidColumn string switch featureID.(type) { case int64: if g.externalFidColumn != "" { // Features should be retrieved by UUID log.Println("feature requested by int while external fid column is defined") return nil, nil } fidColumn = g.fidColumn case uuid.UUID: if g.externalFidColumn == "" { // Features should be retrieved by int64 log.Println("feature requested by UUID while external fid column is not defined") return nil, nil } fidColumn = g.externalFidColumn } query := fmt.Sprintf("select * from %s f where f.%s = :fid limit 1", table.TableName, fidColumn) rows, err := g.backend.getDB().NamedQueryContext(queryCtx, query, map[string]any{"fid": featureID}) if err != nil { return nil, fmt.Errorf("query '%s' failed: %w", query, err) } defer rows.Close() features, _, err := domain.MapRowsToFeatures(queryCtx, rows, g.fidColumn, g.externalFidColumn, table.GeometryColumnName, g.propertiesByCollectionID[collection], mapGpkgGeometry, profile.MapRelationUsingProfile) if err != nil { return nil, err } if len(features) != 1 { return nil, nil } return features[0], queryCtx.Err() } func (g *GeoPackage) GetFeatureTableMetadata(collection string) (datasources.FeatureTableMetadata, error) { val, ok := g.featureTableByCollectionID[collection] if !ok { return nil, fmt.Errorf("no metadata for %s", collection) } return val, nil } func (g *GeoPackage) GetPropertyFiltersWithAllowedValues(collection string) datasources.PropertyFiltersWithAllowedValues { return g.propertyFiltersByCollectionID[collection] } // Build specific features queries based on the given options. // Make sure to use SQL bind variables and return named params: https://jmoiron.github.io/sqlx/#namedParams func (g *GeoPackage) makeFeaturesQuery(ctx context.Context, propConfig *config.FeatureProperties, table *featureTable, onlyFIDs bool, criteria datasources.FeaturesCriteria) (stmt *sqlx.NamedStmt, query string, queryArgs map[string]any, err error) { selectClause := selectAll if onlyFIDs { selectClause = columnsToSQL(g.selectClauseFids) } else if propConfig != nil && propConfig.Properties != nil { selectClause = g.selectSpecificColumnsInOrder(propConfig, table) } // make query if criteria.Bbox != nil { query, queryArgs, err = g.makeBboxQuery(table, selectClause, criteria) if err != nil { return } } else { query, queryArgs = g.makeDefaultQuery(table, selectClause, criteria) } // lookup prepared statement for given query, or create new one stmt, err = g.preparedStmtCache.Lookup(ctx, g.backend.getDB(), query) return } func (g *GeoPackage) makeDefaultQuery(table *featureTable, selectClause string, criteria datasources.FeaturesCriteria) (string, map[string]any) { pfClause, pfNamedParams := propertyFiltersToSQL(criteria.PropertyFilters) temporalClause, temporalNamedParams := temporalCriteriaToSQL(criteria.TemporalCriteria) defaultQuery := fmt.Sprintf(` with next as (select * from "%[1]s" where "%[2]s" >= :fid %[3]s %[4]s order by %[2]s asc limit :limit + 1), prev as (select * from "%[1]s" where "%[2]s" < :fid %[3]s %[4]s order by %[2]s desc limit :limit), nextprev as (select * from next union all select * from prev), nextprevfeat as (select *, lag("%[2]s", :limit) over (order by %[2]s) as %[6]s, lead("%[2]s", :limit) over (order by "%[2]s") as %[7]s from nextprev) select %[5]s from nextprevfeat where "%[2]s" >= :fid %[3]s %[4]s limit :limit `, table.TableName, g.fidColumn, temporalClause, pfClause, selectClause, domain.PrevFid, domain.NextFid) // don't add user input here, use named params for user input! namedParams := map[string]any{ "fid": criteria.Cursor.FID, "limit": criteria.Limit, } maps.Copy(namedParams, pfNamedParams) maps.Copy(namedParams, temporalNamedParams) return defaultQuery, namedParams } func (g *GeoPackage) makeBboxQuery(table *featureTable, selectClause string, criteria datasources.FeaturesCriteria) (string, map[string]any, error) { btreeIndexHint := fmt.Sprintf("indexed by \"%s_spatial_idx\"", table.TableName) pfClause, pfNamedParams := propertyFiltersToSQL(criteria.PropertyFilters) if pfClause != "" { // don't force btree index when using property filter, let SQLite decide // whether to use the BTree index or the property filter index btreeIndexHint = "" } temporalClause, temporalNamedParams := temporalCriteriaToSQL(criteria.TemporalCriteria) bboxQuery := fmt.Sprintf(` with given_bbox as (select geomfromtext(:bboxWkt, :bboxSrid)), bbox_size as (select iif(count(id) < %[3]d, 'small', 'big') as bbox_size from (select id from rtree_%[1]s_%[4]s where minx <= :maxx and maxx >= :minx and miny <= :maxy and maxy >= :miny limit %[3]d)), next_bbox_rtree as (select f.* from "%[1]s" f inner join rtree_%[1]s_%[4]s rf on f."%[2]s" = rf.id where rf.minx <= :maxx and rf.maxx >= :minx and rf.miny <= :maxy and rf.maxy >= :miny and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1 and f."%[2]s" >= :fid %[6]s %[7]s order by f."%[2]s" asc limit (select iif(bbox_size == 'small', :limit + 1, 0) from bbox_size)), next_bbox_btree as (select f.* from "%[1]s" f %[8]s where f.minx <= :maxx and f.maxx >= :minx and f.miny <= :maxy and f.maxy >= :miny and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1 and f."%[2]s" >= :fid %[6]s %[7]s order by f."%[2]s" asc limit (select iif(bbox_size == 'big', :limit + 1, 0) from bbox_size)), next as (select * from next_bbox_rtree union all select * from next_bbox_btree), prev_bbox_rtree as (select f.* from "%[1]s" f inner join rtree_%[1]s_%[4]s rf on f."%[2]s" = rf.id where rf.minx <= :maxx and rf.maxx >= :minx and rf.miny <= :maxy and rf.maxy >= :miny and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1 and f."%[2]s" < :fid %[6]s %[7]s order by f."%[2]s" desc limit (select iif(bbox_size == 'small', :limit, 0) from bbox_size)), prev_bbox_btree as (select f.* from "%[1]s" f %[8]s where f.minx <= :maxx and f.maxx >= :minx and f.miny <= :maxy and f.maxy >= :miny and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1 and f."%[2]s" < :fid %[6]s %[7]s order by f."%[2]s" desc limit (select iif(bbox_size == 'big', :limit, 0) from bbox_size)), prev as (select * from prev_bbox_rtree union all select * from prev_bbox_btree), nextprev as (select * from next union all select * from prev), nextprevfeat as (select *, lag("%[2]s", :limit) over (order by "%[2]s") as %[9]s, lead("%[2]s", :limit) over (order by "%[2]s") as %[10]s from nextprev) select %[5]s from nextprevfeat where "%[2]s" >= :fid %[6]s %[7]s limit :limit `, table.TableName, g.fidColumn, g.maxBBoxSizeToUseWithRTree, table.GeometryColumnName, selectClause, temporalClause, pfClause, btreeIndexHint, domain.PrevFid, domain.NextFid) // don't add user input here, use named params for user input! bboxAsWKT, err := wkt.Marshal(criteria.Bbox.Polygon()) if err != nil { return "", nil, err } namedParams := map[string]any{ "fid": criteria.Cursor.FID, "limit": criteria.Limit, "bboxWkt": bboxAsWKT, "maxx": criteria.Bbox.Max(0), "minx": criteria.Bbox.Min(0), "maxy": criteria.Bbox.Max(1), "miny": criteria.Bbox.Min(1), "bboxSrid": criteria.InputSRID} maps.Copy(namedParams, pfNamedParams) maps.Copy(namedParams, temporalNamedParams) return bboxQuery, namedParams, nil } func (g *GeoPackage) getFeatureTable(collection string) (*featureTable, error) { table, ok := g.featureTableByCollectionID[collection] if !ok { return nil, fmt.Errorf("can't query collection '%s' since it doesn't exist in "+ "geopackage, available in geopackage: %v", collection, util.Keys(g.featureTableByCollectionID)) } return table, nil } func (g *GeoPackage) selectSpecificColumnsInOrder(propConfig *config.FeatureProperties, table *featureTable) string { clause := g.selectClauseFids clause = append(clause, propConfig.Properties...) if !slices.Contains(clause, table.GeometryColumnName) { clause = append(clause, table.GeometryColumnName) } result := columnsToSQL(clause) if !propConfig.PropertiesExcludeUnknown { result += ", " + selectAll } return result } func mapGpkgGeometry(rawGeom []byte) (geom.T, error) { geomWithMetadata, err := encoding.DecodeGeometry(rawGeom) if err != nil { return nil, err } if geomWithMetadata == nil || geomWithMetadata.Geometry.Empty() { return nil, nil } return geomWithMetadata.Geometry, nil } func propertyFiltersToSQL(pf map[string]string) (sql string, namedParams map[string]any) { namedParams = make(map[string]any) if len(pf) > 0 { position := 0 for k, v := range pf { position++ namedParam := fmt.Sprintf("pf%d", position) // column name in double quotes in case it is a reserved keyword // also: we don't currently support LIKE since wildcard searches don't use the index sql += fmt.Sprintf(" and \"%s\" = :%s", k, namedParam) namedParams[namedParam] = v } } return sql, namedParams } func temporalCriteriaToSQL(temporalCriteria datasources.TemporalCriteria) (sql string, namedParams map[string]any) { namedParams = make(map[string]any) if !temporalCriteria.ReferenceDate.IsZero() { namedParams["referenceDate"] = temporalCriteria.ReferenceDate startDate := temporalCriteria.StartDateProperty endDate := temporalCriteria.EndDateProperty sql = fmt.Sprintf(" and \"%[1]s\" <= :referenceDate and (\"%[2]s\" >= :referenceDate or \"%[2]s\" is null)", startDate, endDate) } return sql, namedParams } func cacheFeatureProperties(collections config.GeoSpatialCollections) map[string]*config.FeatureProperties { result := make(map[string]*config.FeatureProperties) for _, collection := range collections { if collection.Features == nil { continue } result[collection.ID] = collection.Features.FeatureProperties } return result } func columnsToSQL(columns []string) string { return fmt.Sprintf("\"%s\"", strings.Join(columns, `", "`)) }
package geopackage import ( "errors" "fmt" "log" "github.com/PDOK/gokoala/config" ds "github.com/PDOK/gokoala/internal/ogc/features/datasources" "github.com/jmoiron/sqlx" ) // Read metadata about gpkg and sqlite driver func readDriverMetadata(db *sqlx.DB) (string, error) { type pragma struct { UserVersion string `db:"user_version"` } type metadata struct { Sqlite string `db:"sqlite"` Spatialite string `db:"spatialite"` Arch string `db:"arch"` } var m metadata err := db.QueryRowx(` select sqlite_version() as sqlite, spatialite_version() as spatialite, spatialite_target_cpu() as arch`).StructScan(&m) if err != nil { return "", err } var gpkgVersion pragma _ = db.QueryRowx(`pragma user_version`).StructScan(&gpkgVersion) if gpkgVersion.UserVersion == "" { gpkgVersion.UserVersion = "unknown" } return fmt.Sprintf("geopackage version: %s, sqlite version: %s, spatialite version: %s on %s", gpkgVersion.UserVersion, m.Sqlite, m.Spatialite, m.Arch), nil } // Read gpkg_contents table. This table contains metadata about feature tables. The result is a mapping from // collection ID -> feature table metadata. We match each feature table to the collection ID by looking at the // 'identifier' column. Also in case there's no exact match between 'collection ID' and 'identifier' we use // the explicitly configured table name. func readGpkgContents(collections config.GeoSpatialCollections, db *sqlx.DB) (map[string]*featureTable, error) { query := ` select c.table_name, c.data_type, c.identifier, c.description, c.last_change, c.min_x, c.min_y, c.max_x, c.max_y, c.srs_id, gc.column_name, gc.geometry_type_name from gpkg_contents c join gpkg_geometry_columns gc on c.table_name == gc.table_name where c.data_type = 'features'` rows, err := db.Queryx(query) if err != nil { return nil, fmt.Errorf("failed to retrieve gpkg_contents using query: %v\n, error: %w", query, err) } defer rows.Close() result := make(map[string]*featureTable, 10) for rows.Next() { row := featureTable{ ColumnsWithDateType: make(map[string]string), } if err = rows.StructScan(&row); err != nil { return nil, fmt.Errorf("failed to read gpkg_contents record, error: %w", err) } if row.TableName == "" { return nil, fmt.Errorf("feature table name is blank, error: %w", err) } if err = readFeatureTableInfo(db, row); err != nil { return nil, fmt.Errorf("failed to read feature table metadata, error: %w", err) } for _, collection := range collections { if row.TableName == collection.ID { result[collection.ID] = &row } else if hasMatchingTableName(collection, row) { result[collection.ID] = &row } } } if err = rows.Err(); err != nil { return nil, err } if len(result) == 0 { return nil, errors.New("no records for 'features' found in gpkg_contents and/or gpkg_geometry_columns") } uniqueTables := make(map[string]struct{}) for _, table := range result { uniqueTables[table.TableName] = struct{}{} } if len(uniqueTables) != len(result) { log.Printf("Warning: found %d unique table names for %d collections, "+ "usually each collection is backed by its own unique table\n", len(uniqueTables), len(result)) } return result, nil } func readPropertyFiltersWithAllowedValues(featTableByCollection map[string]*featureTable, collections config.GeoSpatialCollections, db *sqlx.DB) (map[string]ds.PropertyFiltersWithAllowedValues, error) { result := make(map[string]ds.PropertyFiltersWithAllowedValues) for _, collection := range collections { if collection.Features == nil { continue } result[collection.ID] = make(map[string]ds.PropertyFilterWithAllowedValues) featTable := featTableByCollection[collection.ID] for _, pf := range collection.Features.Filters.Properties { // result should contain ALL configured property filters, with or without allowed values. // when available, allowed values can be either static (from YAML config) or derived from the geopackage result[collection.ID][pf.Name] = ds.PropertyFilterWithAllowedValues{PropertyFilter: pf} if pf.AllowedValues != nil { result[collection.ID][pf.Name] = ds.PropertyFilterWithAllowedValues{PropertyFilter: pf, AllowedValues: pf.AllowedValues} continue } if *pf.DeriveAllowedValuesFromDatasource { if !*pf.IndexRequired { log.Printf("Warning: index is disabled for column %s, deriving allowed values "+ "from may take a long time. Index on this column is recommended", pf.Name) } // select distinct values from given column query := fmt.Sprintf("select distinct ft.%s from %s ft", pf.Name, featTable.TableName) var values []string err := db.Select(&values, query) if err != nil { return nil, fmt.Errorf("failed to derive allowed values using query: %v\n, error: %w", query, err) } result[collection.ID][pf.Name] = ds.PropertyFilterWithAllowedValues{PropertyFilter: pf, AllowedValues: values} continue } } } return result, nil } func readFeatureTableInfo(db *sqlx.DB, table featureTable) error { rows, err := db.Queryx(fmt.Sprintf("select name, type from pragma_table_info('%s')", table.TableName)) if err != nil { return err } defer rows.Close() for rows.Next() { var colName, colType string err = rows.Scan(&colName, &colType) if err != nil { return err } table.ColumnsWithDateType[colName] = colType } return nil } func hasMatchingTableName(collection config.GeoSpatialCollection, row featureTable) bool { return collection.Features != nil && collection.Features.TableName != nil && row.TableName == *collection.Features.TableName }
package geopackage import ( "context" "log" lru "github.com/hashicorp/golang-lru/v2" "github.com/jmoiron/sqlx" ) var preparedStmtCacheSize = 25 // PreparedStatementCache is thread safe type PreparedStatementCache struct { cache *lru.Cache[string, *sqlx.NamedStmt] } // NewCache creates a new PreparedStatementCache that will evict least-recently used (LRU) statements. func NewCache() *PreparedStatementCache { cache, _ := lru.NewWithEvict[string, *sqlx.NamedStmt](preparedStmtCacheSize, func(_ string, stmt *sqlx.NamedStmt) { if stmt != nil { _ = stmt.Close() } }) return &PreparedStatementCache{cache: cache} } // Lookup gets a prepared statement from the cache for the given query, or creates a new one and adds it to the cache func (c *PreparedStatementCache) Lookup(ctx context.Context, db *sqlx.DB, query string) (*sqlx.NamedStmt, error) { cachedStmt, ok := c.cache.Get(query) if !ok { stmt, err := db.PrepareNamedContext(ctx, query) if err != nil { return nil, err } c.cache.Add(query, stmt) return stmt, nil } return cachedStmt, nil } // Close purges the cache, and closes remaining prepared statements func (c *PreparedStatementCache) Close() { log.Printf("closing %d prepared statements", c.cache.Len()) c.cache.Purge() }
package geopackage import ( "errors" "fmt" "log" "github.com/PDOK/gokoala/config" "github.com/jmoiron/sqlx" ) // warmUpFeatureTables executes a warmup query to speedup subsequent queries. // This encompasses traversing index(es) to fill the local cache. func warmUpFeatureTables( configuredCollections config.GeoSpatialCollections, featureTableByCollectionID map[string]*featureTable, db *sqlx.DB) error { for collID, table := range featureTableByCollectionID { if table == nil { return errors.New("given table can't be nil") } for _, coll := range configuredCollections { if coll.ID == collID && coll.Features != nil { if err := warmUpFeatureTable(table.TableName, db); err != nil { return err } break } } } return nil } func warmUpFeatureTable(tableName string, db *sqlx.DB) error { query := fmt.Sprintf(` select minx,maxx,miny,maxy from %[1]s where minx <= 0 and maxx >= 0 and miny <= 0 and maxy >= 0 `, tableName) log.Printf("start warm-up of feature table '%s'", tableName) _, err := db.Exec(query) if err != nil { return fmt.Errorf("failed to warm-up feature table '%s': %w", tableName, err) } log.Printf("end warm-up of feature table '%s'", tableName) return nil }
package postgis import ( "context" "log" "github.com/PDOK/gokoala/internal/ogc/features/datasources" "github.com/PDOK/gokoala/internal/ogc/features/domain" ) // PostGIS !!! Placeholder implementation, for future reference !!! type PostGIS struct { } func NewPostGIS() *PostGIS { return &PostGIS{} } func (PostGIS) Close() { // noop } func (pg PostGIS) GetFeatureIDs(_ context.Context, _ string, _ datasources.FeaturesCriteria) ([]int64, domain.Cursors, error) { log.Println("PostGIS support is not implemented yet, this just serves to demonstrate that we can support multiple types of datasources") return []int64{}, domain.Cursors{}, nil } func (pg PostGIS) GetFeaturesByID(_ context.Context, _ string, _ []int64, _ domain.Profile) (*domain.FeatureCollection, error) { log.Println("PostGIS support is not implemented yet, this just serves to demonstrate that we can support multiple types of datasources") return &domain.FeatureCollection{}, nil } func (pg PostGIS) GetFeatures(_ context.Context, _ string, _ datasources.FeaturesCriteria, _ domain.Profile) (*domain.FeatureCollection, domain.Cursors, error) { log.Println("PostGIS support is not implemented yet, this just serves to demonstrate that we can support multiple types of datasources") return nil, domain.Cursors{}, nil } func (pg PostGIS) GetFeature(_ context.Context, _ string, _ any, _ domain.Profile) (*domain.Feature, error) { log.Println("PostGIS support is not implemented yet, this just serves to demonstrate that we can support multiple types of datasources") return nil, nil } func (pg PostGIS) GetFeatureTableMetadata(_ string) (datasources.FeatureTableMetadata, error) { log.Println("PostGIS support is not implemented yet, this just serves to demonstrate that we can support multiple types of datasources") return nil, nil } func (pg PostGIS) GetPropertyFiltersWithAllowedValues(_ string) datasources.PropertyFiltersWithAllowedValues { log.Println("PostGIS support is not implemented yet, this just serves to demonstrate that we can support multiple types of datasources") return nil }
package datasources import ( "context" "fmt" "log" "os" "strconv" "strings" "time" ) type contextKey int const ( envLogSQL = "LOG_SQL" envSlowQueryTime = "SLOW_QUERY_TIME" defaultSlowQueryTime = 5 * time.Second sqlContextKey contextKey = iota ) // SQLLog query logging for debugging purposes type SQLLog struct { LogSQL bool SlowQueryTime time.Duration } // NewSQLLogFromEnv build a SQLLog from environment variables listed in this file func NewSQLLogFromEnv() *SQLLog { var err error logSQL := false if os.Getenv(envLogSQL) != "" { logSQL, err = strconv.ParseBool(os.Getenv(envLogSQL)) if err != nil { log.Fatalf("invalid %s value provided, must be a boolean", envLogSQL) } } slowQueryTime := defaultSlowQueryTime if os.Getenv(envSlowQueryTime) != "" { slowQueryTime, err = time.ParseDuration(os.Getenv(envSlowQueryTime)) if err != nil { log.Fatalf("invalid %s value provided, value such as '5s' expected", envSlowQueryTime) } } return &SQLLog{LogSQL: logSQL, SlowQueryTime: slowQueryTime} } // Before callback prior to execution of the given SQL query func (s *SQLLog) Before(ctx context.Context, _ string, _ ...any) (context.Context, error) { return context.WithValue(ctx, sqlContextKey, time.Now()), nil } // After callback once execution of the given SQL query is done func (s *SQLLog) After(ctx context.Context, query string, args ...any) (context.Context, error) { start := ctx.Value(sqlContextKey).(time.Time) timeSpent := time.Since(start) if timeSpent > s.SlowQueryTime || s.LogSQL { query = replaceBindVars(query, args) log.Printf("\n--- SQL:\n%s\n--- SQL query took: %s\n", query, timeSpent) } return ctx, nil } // replaceBindVars replaces '?' bind vars in order to log a complete query func replaceBindVars(query string, args []any) string { for _, arg := range args { query = strings.Replace(query, "?", fmt.Sprintf("%v", arg), 1) } return query }
package domain import ( "bytes" "encoding/base64" "log" "math/big" neturl "net/url" "strings" ) const ( PrevFid = "prevfid" NextFid = "nextfid" separator = '|' ) // Cursors holds next and previous cursor. Note that we use // 'cursor-based pagination' as opposed to 'offset-based pagination' type Cursors struct { Prev EncodedCursor Next EncodedCursor HasPrev bool HasNext bool } // EncodedCursor is a scrambled string representation of the fields defined in DecodedCursor type EncodedCursor string // DecodedCursor the cursor values after decoding EncodedCursor type DecodedCursor struct { FiltersChecksum []byte FID int64 } // PrevNextFID previous and next feature id (fid) to encode in cursor. type PrevNextFID struct { Prev int64 Next int64 } // NewCursors create Cursors based on the prev/next feature ids from the datasource // and the provided filters (captured in a hash). func NewCursors(fid PrevNextFID, filtersChecksum []byte) Cursors { return Cursors{ Prev: encodeCursor(fid.Prev, filtersChecksum), Next: encodeCursor(fid.Next, filtersChecksum), HasPrev: fid.Prev > 0, HasNext: fid.Next > 0, } } func encodeCursor(fid int64, filtersChecksum []byte) EncodedCursor { fidAsBytes := big.NewInt(fid).Bytes() // format of the cursor: <encoded fid><separator><encoded checksum> return EncodedCursor(base64.RawURLEncoding.EncodeToString(fidAsBytes) + string(separator) + base64.RawURLEncoding.EncodeToString(filtersChecksum)) } // Decode turns encoded cursor into DecodedCursor and verifies that // the checksum of query params that act as filters hasn't changed func (c EncodedCursor) Decode(filtersChecksum []byte) DecodedCursor { value, err := neturl.QueryUnescape(string(c)) if err != nil || value == "" { return DecodedCursor{filtersChecksum, 0} } // split first, then decode encoded := strings.Split(value, string(separator)) if len(encoded) < 2 { log.Printf("cursor '%s' doesn't contain expected separator %c", value, separator) return DecodedCursor{filtersChecksum, 0} } decodedFid, fidErr := base64.RawURLEncoding.DecodeString(encoded[0]) decodedChecksum, checksumErr := base64.RawURLEncoding.DecodeString(encoded[1]) if fidErr != nil || checksumErr != nil { log.Printf("decoding cursor value '%s' failed, defaulting to first page", value) return DecodedCursor{filtersChecksum, 0} } // feature id fid := big.NewInt(0).SetBytes(decodedFid).Int64() if fid < 0 { log.Printf("negative feature ID detected: %d, defaulting to first page", fid) fid = 0 } // checksum if !bytes.Equal(decodedChecksum, filtersChecksum) { log.Printf("filters in query params have changed during pagination, resetting to first page") return DecodedCursor{filtersChecksum, 0} } return DecodedCursor{filtersChecksum, fid} } func (c EncodedCursor) String() string { return string(c) }
package domain import ( "github.com/twpayne/go-geom/encoding/geojson" ) // featureCollectionType allows the GeoJSON type to be automatically set during json marshalling type featureCollectionType struct{} func (fc *featureCollectionType) MarshalJSON() ([]byte, error) { return []byte(`"FeatureCollection"`), nil } // featureType allows the type for Feature to be automatically set during json Marshalling type featureType struct{} func (ft *featureType) MarshalJSON() ([]byte, error) { return []byte(`"Feature"`), nil } // FeatureCollection is a GeoJSON FeatureCollection with extras such as links // Note: fields in this struct are sorted for optimal memory usage (field alignment) type FeatureCollection struct { Type featureCollectionType `json:"type"` Timestamp string `json:"timeStamp,omitempty"` Links []Link `json:"links,omitempty"` Features []*Feature `json:"features"` NumberReturned int `json:"numberReturned"` } // Feature is a GeoJSON Feature with extras such as links // Note: fields in this struct are sorted for optimal memory usage (field alignment) type Feature struct { Type featureType `json:"type"` Properties FeatureProperties `json:"properties"` Geometry *geojson.Geometry `json:"geometry"` // We expect feature ids to be auto-incrementing integers (which is the default in geopackages) // since we use it for cursor-based pagination. ID string `json:"id"` Links []Link `json:"links,omitempty"` } // Keys of the Feature properties. func (f *Feature) Keys() []string { return f.Properties.Keys() } // Link according to RFC 8288, https://datatracker.ietf.org/doc/html/rfc8288 // Note: fields in this struct are sorted for optimal memory usage (field alignment) type Link struct { Rel string `json:"rel"` Title string `json:"title,omitempty"` Type string `json:"type,omitempty"` Href string `json:"href"` Hreflang string `json:"hreflang,omitempty"` Length int64 `json:"length,omitempty"` Templated bool `json:"templated,omitempty"` }
package domain import ( "context" "fmt" "strings" "time" "github.com/PDOK/gokoala/config" "github.com/jmoiron/sqlx" "github.com/twpayne/go-geom" "github.com/twpayne/go-geom/encoding/geojson" ) // MapRelation abstract function type to map feature relations type MapRelation func(columnName string, columnValue any, externalFidColumn string) (newColumnName string, newColumnValue any) // MapGeom abstract function type to map geometry from bytes to Geometry type MapGeom func([]byte) (geom.T, error) // MapRowsToFeatureIDs datasource agnostic mapper from SQL rows set feature IDs, including prev/next feature ID func MapRowsToFeatureIDs(ctx context.Context, rows *sqlx.Rows) (featureIDs []int64, prevNextID *PrevNextFID, err error) { firstRow := true for rows.Next() { var values []any if values, err = rows.SliceScan(); err != nil { return nil, nil, err } if len(values) != 3 { return nil, nil, fmt.Errorf("expected 3 columns containing the feature id, "+ "the previous feature id and the next feature id. Got: %v", values) } featureID := values[0].(int64) featureIDs = append(featureIDs, featureID) if firstRow { prev := int64(0) if values[1] != nil { prev = values[1].(int64) } next := int64(0) if values[2] != nil { next = values[2].(int64) } prevNextID = &PrevNextFID{Prev: prev, Next: next} firstRow = false } } if ctx.Err() != nil { err = ctx.Err() } return } // MapRowsToFeatures datasource agnostic mapper from SQL rows/result set to Features domain model func MapRowsToFeatures(ctx context.Context, rows *sqlx.Rows, fidColumn string, externalFidColumn string, geomColumn string, propConfig *config.FeatureProperties, mapGeom MapGeom, mapRel MapRelation) ([]*Feature, *PrevNextFID, error) { result := make([]*Feature, 0) columns, err := rows.Columns() if err != nil { return result, nil, err } propertiesOrder := propConfig != nil && propConfig.PropertiesInSpecificOrder firstRow := true var prevNextID *PrevNextFID for rows.Next() { var values []any if values, err = rows.SliceScan(); err != nil { return result, nil, err } feature := &Feature{Properties: NewFeatureProperties(propertiesOrder)} np, err := mapColumnsToFeature(ctx, firstRow, feature, columns, values, fidColumn, externalFidColumn, geomColumn, mapGeom, mapRel) if err != nil { return result, nil, err } else if firstRow { prevNextID = np firstRow = false } result = append(result, feature) } return result, prevNextID, ctx.Err() } //nolint:cyclop,funlen func mapColumnsToFeature(ctx context.Context, firstRow bool, feature *Feature, columns []string, values []any, fidColumn string, externalFidColumn string, geomColumn string, mapGeom MapGeom, mapRel MapRelation) (*PrevNextFID, error) { prevNextID := PrevNextFID{} for i, columnName := range columns { columnValue := values[i] switch columnName { case fidColumn: feature.ID = fmt.Sprint(columnValue) case geomColumn: if columnValue == nil { feature.Properties.Set(columnName, nil) continue } rawGeom, ok := columnValue.([]byte) if !ok { return nil, fmt.Errorf("failed to read geometry from %s column in datasource", geomColumn) } mappedGeom, err := mapGeom(rawGeom) if err != nil { return nil, fmt.Errorf("failed to map/decode geometry from datasource, error: %w", err) } if mappedGeom != nil { feature.Geometry, err = geojson.Encode(mappedGeom) if err != nil { return nil, fmt.Errorf("failed to map/encode geometry to JSON, error: %w", err) } } case "minx", "miny", "maxx", "maxy", "min_zoom", "max_zoom": // Skip these columns used for bounding box and zoom filtering continue case PrevFid: // Only the first row in the result set contains the previous feature id if firstRow && columnValue != nil { prevNextID.Prev = columnValue.(int64) } case NextFid: // Only the first row in the result set contains the next feature id if firstRow && columnValue != nil { prevNextID.Next = columnValue.(int64) } default: if columnValue == nil { feature.Properties.Set(columnName, nil) continue } // Grab any non-nil, non-id, non-bounding box, & non-geometry column as a tag switch v := columnValue.(type) { case []uint8: asBytes := make([]byte, len(v)) copy(asBytes, v) feature.Properties.Set(columnName, string(asBytes)) case int64: feature.Properties.Set(columnName, v) case float64: feature.Properties.Set(columnName, v) case time.Time: feature.Properties.Set(columnName, v) case string: feature.Properties.Set(columnName, v) case bool: feature.Properties.Set(columnName, v) default: return nil, fmt.Errorf("unexpected type for sqlite column data: %v: %T", columns[i], v) } } } mapExternalFid(columns, values, externalFidColumn, feature, mapRel) return &prevNextID, ctx.Err() } // mapExternalFid run a second pass over columns to map external feature ID, including relations to other features func mapExternalFid(columns []string, values []any, externalFidColumn string, feature *Feature, mapRel MapRelation) { for i, columnName := range columns { columnValue := values[i] switch { case externalFidColumn == "": continue case columnName == externalFidColumn: // When externalFidColumn is configured, overwrite feature ID and drop externalFidColumn. // Note: This happens in a second pass over the feature, since we want to overwrite the // feature ID irrespective of the order of columns in the table feature.ID = fmt.Sprint(columnValue) feature.Properties.Delete(columnName) case strings.Contains(columnName, externalFidColumn): // When externalFidColumn is part of the column name (e.g. 'foobar_external_fid') we treat // it as a relation to another feature. newColumnName, newColumnValue := mapRel(columnName, columnValue, externalFidColumn) if newColumnName != "" { columnNameWithoutExternalFID := strings.ReplaceAll(columnName, externalFidColumn, "") feature.Properties.SetRelation(newColumnName, newColumnValue, columnNameWithoutExternalFID) feature.Properties.Delete(columnName) } } } }
package domain import ( "fmt" "net/url" "regexp" "sort" "strings" ) const regexRemoveSeparators = "[^a-z0-9]?" const featurePath = "%s/collections/%s/items/%s" type ProfileName string // Profiles from OAF Part 5 as specified in https://docs.ogc.org/DRAFTS/23-058r1.html#rc_profile-parameter const ( RelAsKey ProfileName = "rel-as-key" // RelAsKey a feature reference in the response SHALL be represented by: The featureId RelAsURI ProfileName = "rel-as-uri" // RelAsURI a feature reference in the response SHALL be represented by: an HTTP(S) URI. RelAsLink ProfileName = "rel-as-link" // RelAsLink a feature reference in the response SHALL be represented by: an object with the property "href" and, optionally a "title" ) // Profile from OAF Part 5, used to express relations between features type Profile struct { profileName ProfileName baseURL string collectionNames []string } func NewProfile(profileName ProfileName, baseURL url.URL, collectionNames []string) Profile { sort.Slice(collectionNames, func(i, j int) bool { return len(collectionNames[i]) > len(collectionNames[j]) }) return Profile{ profileName: profileName, baseURL: baseURL.String(), collectionNames: collectionNames, } } func (p *Profile) MapRelationUsingProfile(columnName string, columnValue any, externalFidColumn string) (newColumnName string, newColumnValue any) { regex, _ := regexp.Compile(regexRemoveSeparators + externalFidColumn + regexRemoveSeparators) switch p.profileName { case RelAsLink: newColumnName = regex.ReplaceAllString(columnName, "") collectionName := p.findCollection(newColumnName) newColumnName += ".href" if columnValue != nil && collectionName != "" { newColumnValue = fmt.Sprintf(featurePath, p.baseURL, collectionName, columnValue) } case RelAsKey: newColumnName = regex.ReplaceAllString(columnName, "") newColumnValue = columnValue case RelAsURI: // almost identical to rel-as-link except that there's no ".href" suffix (and potentially a title in the future) newColumnName = regex.ReplaceAllString(columnName, "") collectionName := p.findCollection(newColumnName) if columnValue != nil { newColumnValue = fmt.Sprintf(featurePath, p.baseURL, collectionName, columnValue) } } return } func (p *Profile) findCollection(name string) string { // prefer exact matches first for _, collName := range p.collectionNames { if name == collName { return collName } } // then prefer fuzzy match (to support infix) for _, collName := range p.collectionNames { if strings.HasPrefix(name, collName) { return collName } } return "" }
package domain import ( "slices" "strings" "github.com/PDOK/gokoala/internal/engine/util" perfjson "github.com/goccy/go-json" orderedmap "github.com/wk8/go-ordered-map/v2" ) // FeatureProperties the properties of a GeoJSON Feature. Properties are either unordered // (default, and has the best performance!) or ordered in a specific way as described in the config. type FeatureProperties struct { unordered map[string]any ordered orderedmap.OrderedMap[string, any] } func NewFeatureProperties(order bool) FeatureProperties { return NewFeaturePropertiesWithData(order, make(map[string]any)) } func NewFeaturePropertiesWithData(order bool, data map[string]any) FeatureProperties { if order { // properties are allowed to contain anything, including for example XML/GML ordered := *orderedmap.New[string, any](orderedmap.WithDisableHTMLEscape[string, any]()) for k, v := range data { ordered.Set(k, v) } return FeatureProperties{ordered: ordered} } return FeatureProperties{unordered: data} } // MarshalJSON returns the JSON representation of either the ordered or unordered properties func (p *FeatureProperties) MarshalJSON() ([]byte, error) { if p.unordered != nil { // properties are allowed to contain anything, including for example XML/GML. return perfjson.MarshalWithOption(p.unordered, perfjson.DisableHTMLEscape()) } return p.ordered.MarshalJSON() } func (p *FeatureProperties) Value(key string) any { if p.unordered != nil { return p.unordered[key] } return p.ordered.Value(key) } func (p *FeatureProperties) Delete(key string) { if p.unordered != nil { delete(p.unordered, key) } else { p.ordered.Delete(key) } } func (p *FeatureProperties) Set(key string, value any) { if p.unordered != nil { p.unordered[key] = value } else { p.ordered.Set(key, value) } } func (p *FeatureProperties) SetRelation(key string, value any, existingKeyPrefix string) { p.Set(key, value) p.moveKeyBeforePrefix(key, existingKeyPrefix) } // moveKeyBeforePrefix best-effort algorithm to place the feature relation BEFORE the first shortest of any similarly named keys. // For example, places "building.href" before "building_fk" or "building_fid". func (p *FeatureProperties) moveKeyBeforePrefix(key string, keyPrefix string) { if p.unordered != nil { return } var existingKey string for pair := p.ordered.Oldest(); pair != nil; pair = pair.Next() { if strings.HasPrefix(pair.Key, keyPrefix) { if existingKey != "" && len(existingKey) <= len(pair.Key) { continue } existingKey = pair.Key } } if existingKey != "" { _ = p.ordered.MoveBefore(key, existingKey) } } // Keys of the Feature properties. // // Note: In the future we might replace this with Go 1.23 iterators (range-over-func) however at the moment this // isn't supported in Go templates: https://github.com/golang/go/pull/68329 func (p *FeatureProperties) Keys() []string { if p.unordered != nil { keys := util.Keys(p.unordered) slices.Sort(keys) // preserve alphabetical order return keys } result := make([]string, 0, p.ordered.Len()) for pair := p.ordered.Oldest(); pair != nil; pair = pair.Next() { result = append(result, pair.Key) } return result }
package domain import ( "fmt" "strconv" "strings" ) const ( CrsURIPrefix = "http://www.opengis.net/def/crs/" UndefinedSRID = 0 WGS84SRID = 100000 // We use the SRID for CRS84 (WGS84) as defined in the GeoPackage, instead of EPSG:4326 (due to axis order). In time, we may need to read this value dynamically from the GeoPackage. WGS84CodeOGC = "CRS84" WGS84CrsURI = CrsURIPrefix + "OGC/1.3/" + WGS84CodeOGC ) // SRID Spatial Reference System Identifier: a unique value to unambiguously identify a spatial coordinate system. // For example '28992' in https://www.opengis.net/def/crs/EPSG/0/28992 type SRID int func (s SRID) GetOrDefault() int { val := int(s) if val <= 0 { return WGS84SRID } return val } func EpsgToSrid(srs string) (SRID, error) { prefix := "EPSG:" srsCode, found := strings.CutPrefix(srs, prefix) if !found { return -1, fmt.Errorf("expected SRS to start with '%s', got %s", prefix, srs) } srid, err := strconv.Atoi(srsCode) if err != nil { return -1, fmt.Errorf("expected EPSG code to have numeric value, got %s", srsCode) } return SRID(srid), nil } // ContentCrs the coordinate reference system (represented as a URI) of the content/output to return. type ContentCrs string // ToLink returns link target conforming to RFC 8288 func (c ContentCrs) ToLink() string { return fmt.Sprintf("<%s>", c) } func (c ContentCrs) IsWGS84() bool { return string(c) == WGS84CrsURI }
package features import ( "context" "errors" "fmt" "log" "net/http" "github.com/PDOK/gokoala/internal/engine" ) func handleCollectionNotFound(w http.ResponseWriter, collectionID string) { msg := fmt.Sprintf("collection %s doesn't exist in this features service", collectionID) log.Println(msg) engine.RenderProblem(engine.ProblemNotFound, w, msg) } func handleFeatureNotFound(w http.ResponseWriter, collectionID string, featureID any) { msg := fmt.Sprintf("the requested feature with id: %v does not exist in collection '%v'", featureID, collectionID) log.Println(msg) engine.RenderProblem(engine.ProblemNotFound, w, msg) } // log error, but send generic message to client to prevent possible information leakage from datasource func handleFeaturesQueryError(w http.ResponseWriter, collectionID string, err error) { msg := "failed to retrieve feature collection " + collectionID if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) { // provide more context when user hits the query timeout msg += ": querying the features took too long (timeout encountered). Simplify your request and try again, or contact support" } log.Printf("%s, error: %v\n", msg, err) engine.RenderProblem(engine.ProblemServerError, w, msg) // don't include sensitive information in details msg } // log error, but sent generic message to client to prevent possible information leakage from datasource func handleFeatureQueryError(w http.ResponseWriter, collectionID string, featureID any, err error) { msg := fmt.Sprintf("failed to retrieve feature %v in collection %s", featureID, collectionID) if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) { // provide more context when user hits the query timeout msg += ": querying the feature took too long (timeout encountered). Try again, or contact support" } log.Printf("%s, error: %v\n", msg, err) engine.RenderProblem(engine.ProblemServerError, w, msg) // don't include sensitive information in details msg }
package features import ( "net/http" "time" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/ogc/features/datasources" "github.com/PDOK/gokoala/internal/engine" "github.com/PDOK/gokoala/internal/ogc/features/domain" ) const ( collectionsCrumb = "collections/" ) var ( collectionsBreadcrumb = []engine.Breadcrumb{ { Name: "Collections", Path: "collections", }, } featuresKey = engine.NewTemplateKey(templatesDir + "features.go.html") featureKey = engine.NewTemplateKey(templatesDir + "feature.go.html") ) type htmlFeatures struct { engine *engine.Engine } func newHTMLFeatures(e *engine.Engine) *htmlFeatures { e.ParseTemplate(featuresKey) e.ParseTemplate(featureKey) return &htmlFeatures{ engine: e, } } // featureCollectionPage enriched FeatureCollection for HTML representation. type featureCollectionPage struct { domain.FeatureCollection CollectionID string Metadata *config.GeoSpatialCollectionMetadata Cursor domain.Cursors PrevLink string NextLink string Limit int ReferenceDate *time.Time MapSheetProperties *config.MapSheetDownloadProperties WebConfig *config.WebConfig // Property filters as supplied by the user in the URL: filter name + value(s) PropertyFilters map[string]string // Property filters as specified in the (YAML) config, enriched with allowed values. Does not contain user supplied values ConfiguredPropertyFilters map[string]datasources.PropertyFilterWithAllowedValues } // featurePage enriched Feature for HTML representation. type featurePage struct { domain.Feature CollectionID string FeatureID string Metadata *config.GeoSpatialCollectionMetadata MapSheetProperties *config.MapSheetDownloadProperties WebConfig *config.WebConfig } func (hf *htmlFeatures) features(w http.ResponseWriter, r *http.Request, collectionID string, cursor domain.Cursors, featuresURL featureCollectionURL, limit int, referenceDate *time.Time, propertyFilters map[string]string, configuredPropertyFilters datasources.PropertyFiltersWithAllowedValues, configuredFC *config.CollectionEntryFeatures, fc *domain.FeatureCollection) { collection := configuredCollections[collectionID] breadcrumbs := collectionsBreadcrumb breadcrumbs = append(breadcrumbs, []engine.Breadcrumb{ { Name: getCollectionTitle(collectionID, collection.Metadata), Path: collectionsCrumb + collectionID, }, { Name: "Items", Path: collectionsCrumb + collectionID + "/items", }, }...) if referenceDate.IsZero() { referenceDate = nil } var mapSheetProps *config.MapSheetDownloadProperties var wc *config.WebConfig if configuredFC != nil { if configuredFC.MapSheetDownloads != nil { mapSheetProps = &configuredFC.MapSheetDownloads.Properties } wc = configuredFC.Web } pageContent := &featureCollectionPage{ *fc, collectionID, collection.Metadata, cursor, featuresURL.toPrevNextURL(collectionID, cursor.Prev, engine.FormatHTML), featuresURL.toPrevNextURL(collectionID, cursor.Next, engine.FormatHTML), limit, referenceDate, mapSheetProps, wc, propertyFilters, configuredPropertyFilters, } lang := hf.engine.CN.NegotiateLanguage(w, r) hf.engine.RenderAndServePage(w, r, engine.ExpandTemplateKey(featuresKey, lang), pageContent, breadcrumbs) } func (hf *htmlFeatures) feature(w http.ResponseWriter, r *http.Request, collectionID string, configuredFC *config.CollectionEntryFeatures, feat *domain.Feature) { collection := configuredCollections[collectionID] breadcrumbs := collectionsBreadcrumb breadcrumbs = append(breadcrumbs, []engine.Breadcrumb{ { Name: getCollectionTitle(collectionID, collection.Metadata), Path: collectionsCrumb + collectionID, }, { Name: "Items", Path: collectionsCrumb + collectionID + "/items", }, { Name: feat.ID, Path: collectionsCrumb + collectionID + "/items/" + feat.ID, }, }...) var mapSheetProps *config.MapSheetDownloadProperties var wc *config.WebConfig if configuredFC != nil { if configuredFC.MapSheetDownloads != nil { mapSheetProps = &configuredFC.MapSheetDownloads.Properties } wc = configuredFC.Web } pageContent := &featurePage{ *feat, collectionID, feat.ID, collection.Metadata, mapSheetProps, wc, } lang := hf.engine.CN.NegotiateLanguage(w, r) hf.engine.RenderAndServePage(w, r, engine.ExpandTemplateKey(featureKey, lang), pageContent, breadcrumbs) } func getCollectionTitle(collectionID string, metadata *config.GeoSpatialCollectionMetadata) string { if metadata != nil && metadata.Title != nil { return *metadata.Title } return collectionID }
package features import ( "bytes" stdjson "encoding/json" "fmt" "io" "log" "net/http" "os" "strconv" "time" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/engine" "github.com/PDOK/gokoala/internal/ogc/features/domain" perfjson "github.com/goccy/go-json" ) var ( now = time.Now // allow mocking disableJSONPerfOptimization, _ = strconv.ParseBool(os.Getenv("DISABLE_JSON_PERF_OPTIMIZATION")) ) type jsonFeatures struct { engine *engine.Engine validateResponse bool } func newJSONFeatures(e *engine.Engine) *jsonFeatures { if *e.Config.OgcAPI.Features.ValidateResponses { log.Println("JSON response validation is enabled (by default). When serving large feature collections " + "set 'validateResponses' to 'false' to improve performance") } return &jsonFeatures{ engine: e, validateResponse: *e.Config.OgcAPI.Features.ValidateResponses, } } // GeoJSON func (jf *jsonFeatures) featuresAsGeoJSON(w http.ResponseWriter, r *http.Request, collectionID string, cursor domain.Cursors, featuresURL featureCollectionURL, configuredFC *config.CollectionEntryFeatures, fc *domain.FeatureCollection) { fc.Timestamp = now().Format(time.RFC3339) fc.Links = jf.createFeatureCollectionLinks(engine.FormatGeoJSON, collectionID, cursor, featuresURL) jf.createFeatureDownloadLinks(configuredFC, fc) if jf.validateResponse { jf.serveAndValidateJSON(&fc, engine.MediaTypeGeoJSON, r, w) } else { serveJSON(&fc, engine.MediaTypeGeoJSON, w) } } // GeoJSON func (jf *jsonFeatures) featureAsGeoJSON(w http.ResponseWriter, r *http.Request, collectionID string, configuredFC *config.CollectionEntryFeatures, feat *domain.Feature, url featureURL) { feat.Links = jf.createFeatureLinks(engine.FormatGeoJSON, url, collectionID, feat.ID) if mapSheetProperties := getMapSheetProperties(configuredFC); mapSheetProperties != nil { feat.Links = append(feat.Links, domain.Link{ Rel: "enclosure", Title: "Download feature", Type: mapSheetProperties.MediaType.String(), Href: fmt.Sprintf("%v", feat.Properties.Value(mapSheetProperties.AssetURL)), }) } if jf.validateResponse { jf.serveAndValidateJSON(&feat, engine.MediaTypeGeoJSON, r, w) } else { serveJSON(&feat, engine.MediaTypeGeoJSON, w) } } // JSON-FG func (jf *jsonFeatures) featuresAsJSONFG(w http.ResponseWriter, r *http.Request, collectionID string, cursor domain.Cursors, featuresURL featureCollectionURL, configuredFC *config.CollectionEntryFeatures, fc *domain.FeatureCollection, crs domain.ContentCrs) { fgFC := domain.JSONFGFeatureCollection{} fgFC.ConformsTo = []string{domain.ConformanceJSONFGCore} fgFC.CoordRefSys = string(crs) if len(fc.Features) == 0 { fgFC.Features = make([]*domain.JSONFGFeature, 0) } else { for _, f := range fc.Features { fgF := domain.JSONFGFeature{ ID: f.ID, Links: f.Links, Properties: f.Properties, } setGeom(crs, &fgF, f) fgFC.Features = append(fgFC.Features, &fgF) } } fgFC.NumberReturned = fc.NumberReturned fgFC.Timestamp = now().Format(time.RFC3339) fgFC.Links = jf.createFeatureCollectionLinks(engine.FormatJSONFG, collectionID, cursor, featuresURL) jf.createJSONFGFeatureDownloadLinks(configuredFC, &fgFC) if jf.validateResponse { jf.serveAndValidateJSON(&fgFC, engine.MediaTypeJSONFG, r, w) } else { serveJSON(&fgFC, engine.MediaTypeJSONFG, w) } } // JSON-FG func (jf *jsonFeatures) featureAsJSONFG(w http.ResponseWriter, r *http.Request, collectionID string, configuredFC *config.CollectionEntryFeatures, f *domain.Feature, url featureURL, crs domain.ContentCrs) { fgF := domain.JSONFGFeature{ ID: f.ID, Links: f.Links, ConformsTo: []string{domain.ConformanceJSONFGCore}, CoordRefSys: string(crs), Properties: f.Properties, } setGeom(crs, &fgF, f) fgF.Links = jf.createFeatureLinks(engine.FormatJSONFG, url, collectionID, fgF.ID) if mapSheetProperties := getMapSheetProperties(configuredFC); mapSheetProperties != nil { fgF.Links = append(fgF.Links, domain.Link{ Rel: "enclosure", Title: "Download feature", Type: mapSheetProperties.MediaType.String(), Href: fmt.Sprintf("%v", fgF.Properties.Value(mapSheetProperties.AssetURL)), }) } if jf.validateResponse { jf.serveAndValidateJSON(&fgF, engine.MediaTypeJSONFG, r, w) } else { serveJSON(&fgF, engine.MediaTypeJSONFG, w) } } func (jf *jsonFeatures) createFeatureCollectionLinks(currentFormat string, collectionID string, cursor domain.Cursors, featuresURL featureCollectionURL) []domain.Link { links := make([]domain.Link, 0) switch currentFormat { case engine.FormatGeoJSON: links = append(links, domain.Link{ Rel: "self", Title: "This document as GeoJSON", Type: engine.MediaTypeGeoJSON, Href: featuresURL.toSelfURL(collectionID, engine.FormatJSON), }) links = append(links, domain.Link{ Rel: "alternate", Title: "This document as JSON-FG", Type: engine.MediaTypeJSONFG, Href: featuresURL.toSelfURL(collectionID, engine.FormatJSONFG), }) case engine.FormatJSONFG: links = append(links, domain.Link{ Rel: "self", Title: "This document as JSON-FG", Type: engine.MediaTypeJSONFG, Href: featuresURL.toSelfURL(collectionID, engine.FormatJSONFG), }) links = append(links, domain.Link{ Rel: "alternate", Title: "This document as GeoJSON", Type: engine.MediaTypeGeoJSON, Href: featuresURL.toSelfURL(collectionID, engine.FormatJSON), }) } links = append(links, domain.Link{ Rel: "alternate", Title: "This document as HTML", Type: engine.MediaTypeHTML, Href: featuresURL.toSelfURL(collectionID, engine.FormatHTML), }) if cursor.HasNext { switch currentFormat { case engine.FormatGeoJSON: links = append(links, domain.Link{ Rel: "next", Title: "Next page", Type: engine.MediaTypeGeoJSON, Href: featuresURL.toPrevNextURL(collectionID, cursor.Next, engine.FormatJSON), }) case engine.FormatJSONFG: links = append(links, domain.Link{ Rel: "next", Title: "Next page", Type: engine.MediaTypeJSONFG, Href: featuresURL.toPrevNextURL(collectionID, cursor.Next, engine.FormatJSONFG), }) } } if cursor.HasPrev { switch currentFormat { case engine.FormatGeoJSON: links = append(links, domain.Link{ Rel: "prev", Title: "Previous page", Type: engine.MediaTypeGeoJSON, Href: featuresURL.toPrevNextURL(collectionID, cursor.Prev, engine.FormatJSON), }) case engine.FormatJSONFG: links = append(links, domain.Link{ Rel: "prev", Title: "Previous page", Type: engine.MediaTypeJSONFG, Href: featuresURL.toPrevNextURL(collectionID, cursor.Prev, engine.FormatJSONFG), }) } } return links } func (jf *jsonFeatures) createFeatureLinks(currentFormat string, url featureURL, collectionID string, featureID string) []domain.Link { links := make([]domain.Link, 0) switch currentFormat { case engine.FormatGeoJSON: links = append(links, domain.Link{ Rel: "self", Title: "This document as GeoJSON", Type: engine.MediaTypeGeoJSON, Href: url.toSelfURL(collectionID, featureID, engine.FormatJSON), }) links = append(links, domain.Link{ Rel: "alternate", Title: "This document as JSON-FG", Type: engine.MediaTypeJSONFG, Href: url.toSelfURL(collectionID, featureID, engine.FormatJSONFG), }) case engine.FormatJSONFG: links = append(links, domain.Link{ Rel: "self", Title: "This document as JSON-FG", Type: engine.MediaTypeJSONFG, Href: url.toSelfURL(collectionID, featureID, engine.FormatJSONFG), }) links = append(links, domain.Link{ Rel: "alternate", Title: "This document as GeoJSON", Type: engine.MediaTypeGeoJSON, Href: url.toSelfURL(collectionID, featureID, engine.FormatJSON), }) } links = append(links, domain.Link{ Rel: "alternate", Title: "This document as HTML", Type: engine.MediaTypeHTML, Href: url.toSelfURL(collectionID, featureID, engine.FormatHTML), }) links = append(links, domain.Link{ Rel: "collection", Title: "The collection to which this feature belongs", Type: engine.MediaTypeJSON, Href: url.toCollectionURL(collectionID, engine.FormatJSON), }) return links } func (jf *jsonFeatures) createFeatureDownloadLinks(configuredFC *config.CollectionEntryFeatures, fc *domain.FeatureCollection) { if mapSheetProperties := getMapSheetProperties(configuredFC); mapSheetProperties != nil { for _, feature := range fc.Features { links := make([]domain.Link, 0) links = append(links, domain.Link{ Rel: "enclosure", Title: "Download feature", Type: mapSheetProperties.MediaType.String(), Href: fmt.Sprintf("%v", feature.Properties.Value(mapSheetProperties.AssetURL)), }) feature.Links = links } } } func (jf *jsonFeatures) createJSONFGFeatureDownloadLinks(configuredFC *config.CollectionEntryFeatures, fc *domain.JSONFGFeatureCollection) { if mapSheetProperties := getMapSheetProperties(configuredFC); mapSheetProperties != nil { for _, feature := range fc.Features { links := make([]domain.Link, 0) links = append(links, domain.Link{ Rel: "enclosure", Title: "Download feature", Type: mapSheetProperties.MediaType.String(), Href: fmt.Sprintf("%v", feature.Properties.Value(mapSheetProperties.AssetURL)), }) feature.Links = links } } } // serveAndValidateJSON serves JSON after performing OpenAPI response validation. func (jf *jsonFeatures) serveAndValidateJSON(input any, contentType string, r *http.Request, w http.ResponseWriter) { json := &bytes.Buffer{} if err := getEncoder(json).Encode(input); err != nil { handleJSONEncodingFailure(err, w) return } jf.engine.Serve(w, r, false /* performed earlier */, jf.validateResponse, contentType, json.Bytes()) } // serveJSON serves JSON *WITHOUT* OpenAPI validation by writing directly to the response output stream func serveJSON(input any, contentType string, w http.ResponseWriter) { w.Header().Set(engine.HeaderContentType, contentType) if err := getEncoder(w).Encode(input); err != nil { handleJSONEncodingFailure(err, w) return } } type jsonEncoder interface { Encode(input any) error } // Create JSONEncoder. Note escaping of '<', '>' and '&' is disabled (HTMLEscape is false). // Especially the '&' is important since we use this character in the next/prev links. func getEncoder(w io.Writer) jsonEncoder { if disableJSONPerfOptimization { // use Go stdlib JSON encoder encoder := stdjson.NewEncoder(w) encoder.SetEscapeHTML(false) return encoder } // use ~7% overall faster 3rd party JSON encoder (in case of issues switch back to stdlib using env variable) encoder := perfjson.NewEncoder(w) encoder.SetEscapeHTML(false) return encoder } func handleJSONEncodingFailure(err error, w http.ResponseWriter) { log.Printf("JSON encoding failed: %v", err) engine.RenderProblem(engine.ProblemServerError, w, "Failed to write JSON response") } func setGeom(crs domain.ContentCrs, jsonfgFeature *domain.JSONFGFeature, feature *domain.Feature) { if crs.IsWGS84() { jsonfgFeature.Geometry = feature.Geometry } else { jsonfgFeature.Place = feature.Geometry } } func getMapSheetProperties(configuredFC *config.CollectionEntryFeatures) *config.MapSheetDownloadProperties { if configuredFC != nil && configuredFC.MapSheetDownloads != nil { return &configuredFC.MapSheetDownloads.Properties } return nil }
package features import ( "errors" "fmt" "log" "net/http" "strconv" "time" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/engine/util" "github.com/google/uuid" "github.com/twpayne/go-geom" "github.com/PDOK/gokoala/internal/engine" "github.com/PDOK/gokoala/internal/ogc/common/geospatial" ds "github.com/PDOK/gokoala/internal/ogc/features/datasources" "github.com/PDOK/gokoala/internal/ogc/features/datasources/geopackage" "github.com/PDOK/gokoala/internal/ogc/features/datasources/postgis" "github.com/PDOK/gokoala/internal/ogc/features/domain" "github.com/go-chi/chi/v5" ) const ( templatesDir = "internal/ogc/features/templates/" ) var ( configuredCollections map[string]config.GeoSpatialCollection emptyFeatureCollection = &domain.FeatureCollection{Features: make([]*domain.Feature, 0)} ) type DatasourceKey struct { srid int collectionID string } type DatasourceConfig struct { collections config.GeoSpatialCollections ds config.Datasource } type Features struct { engine *engine.Engine datasources map[DatasourceKey]ds.Datasource configuredPropertyFilters map[string]ds.PropertyFiltersWithAllowedValues defaultProfile domain.Profile html *htmlFeatures json *jsonFeatures } func NewFeatures(e *engine.Engine) *Features { datasources := createDatasources(e) configuredCollections = cacheConfiguredFeatureCollections(e) configuredPropertyFilters := configurePropertyFiltersWithAllowedValues(datasources, configuredCollections) rebuildOpenAPIForFeatures(e, datasources, configuredPropertyFilters) f := &Features{ engine: e, datasources: datasources, configuredPropertyFilters: configuredPropertyFilters, defaultProfile: domain.NewProfile(domain.RelAsLink, *e.Config.BaseURL.URL, util.Keys(configuredCollections)), html: newHTMLFeatures(e), json: newJSONFeatures(e), } e.Router.Get(geospatial.CollectionsPath+"/{collectionId}/items", f.Features()) e.Router.Get(geospatial.CollectionsPath+"/{collectionId}/items/{featureId}", f.Feature()) return f } // Features serve a FeatureCollection with the given collectionId // // Beware: this is one of the most performance sensitive pieces of code in the system. // Try to do as much initialization work outside the hot path, and only do essential // operations inside this method. func (f *Features) Features() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { if err := f.engine.OpenAPI.ValidateRequest(r); err != nil { engine.RenderProblem(engine.ProblemBadRequest, w, err.Error()) return } collectionID := chi.URLParam(r, "collectionId") collection, ok := configuredCollections[collectionID] if !ok { handleCollectionNotFound(w, collectionID) return } url, encodedCursor, limit, inputSRID, outputSRID, contentCrs, bbox, referenceDate, propertyFilters, err := f.parseFeaturesURL(r, collection) if err != nil { engine.RenderProblem(engine.ProblemBadRequest, w, err.Error()) return } w.Header().Add(engine.HeaderContentCrs, contentCrs.ToLink()) var newCursor domain.Cursors var fc *domain.FeatureCollection if querySingleDatasource(inputSRID, outputSRID, bbox) { // fast path datasource := f.datasources[DatasourceKey{srid: outputSRID.GetOrDefault(), collectionID: collectionID}] fc, newCursor, err = datasource.GetFeatures(r.Context(), collectionID, ds.FeaturesCriteria{ Cursor: encodedCursor.Decode(url.checksum()), Limit: limit, InputSRID: inputSRID.GetOrDefault(), OutputSRID: outputSRID.GetOrDefault(), Bbox: bbox, TemporalCriteria: getTemporalCriteria(collection, referenceDate), PropertyFilters: propertyFilters, // Add filter, filter-lang }, f.defaultProfile) if err != nil { handleFeaturesQueryError(w, collectionID, err) return } } else { // slower path: get feature ids by input CRS (step 1), then the actual features in output CRS (step 2) var fids []int64 datasource := f.datasources[DatasourceKey{srid: inputSRID.GetOrDefault(), collectionID: collectionID}] fids, newCursor, err = datasource.GetFeatureIDs(r.Context(), collectionID, ds.FeaturesCriteria{ Cursor: encodedCursor.Decode(url.checksum()), Limit: limit, InputSRID: inputSRID.GetOrDefault(), OutputSRID: outputSRID.GetOrDefault(), Bbox: bbox, TemporalCriteria: getTemporalCriteria(collection, referenceDate), PropertyFilters: propertyFilters, // Add filter, filter-lang }) if err == nil && fids != nil { datasource = f.datasources[DatasourceKey{srid: outputSRID.GetOrDefault(), collectionID: collectionID}] fc, err = datasource.GetFeaturesByID(r.Context(), collectionID, fids, f.defaultProfile) } if err != nil { handleFeaturesQueryError(w, collectionID, err) return } } if fc == nil { fc = emptyFeatureCollection } format := f.engine.CN.NegotiateFormat(r) switch format { case engine.FormatHTML: f.html.features(w, r, collectionID, newCursor, url, limit, &referenceDate, propertyFilters, f.configuredPropertyFilters[collectionID], collection.Features, fc) case engine.FormatGeoJSON, engine.FormatJSON: f.json.featuresAsGeoJSON(w, r, collectionID, newCursor, url, collection.Features, fc) case engine.FormatJSONFG: f.json.featuresAsJSONFG(w, r, collectionID, newCursor, url, collection.Features, fc, contentCrs) default: engine.RenderProblem(engine.ProblemNotAcceptable, w, fmt.Sprintf("format '%s' is not supported", format)) return } } } // Feature serves a single Feature func (f *Features) Feature() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { if err := f.engine.OpenAPI.ValidateRequest(r); err != nil { engine.RenderProblem(engine.ProblemBadRequest, w, err.Error()) return } collectionID := chi.URLParam(r, "collectionId") collection, ok := configuredCollections[collectionID] if !ok { handleCollectionNotFound(w, collectionID) return } featureID, err := parseFeatureID(r) if err != nil { engine.RenderProblem(engine.ProblemBadRequest, w, err.Error()) return } url := featureURL{*f.engine.Config.BaseURL.URL, r.URL.Query()} outputSRID, contentCrs, err := url.parse() if err != nil { engine.RenderProblem(engine.ProblemBadRequest, w, err.Error()) return } w.Header().Add(engine.HeaderContentCrs, contentCrs.ToLink()) datasource := f.datasources[DatasourceKey{srid: outputSRID.GetOrDefault(), collectionID: collectionID}] feat, err := datasource.GetFeature(r.Context(), collectionID, featureID, f.defaultProfile) if err != nil { handleFeatureQueryError(w, collectionID, featureID, err) return } if feat == nil { handleFeatureNotFound(w, collectionID, featureID) return } format := f.engine.CN.NegotiateFormat(r) switch format { case engine.FormatHTML: f.html.feature(w, r, collectionID, collection.Features, feat) case engine.FormatGeoJSON, engine.FormatJSON: f.json.featureAsGeoJSON(w, r, collectionID, collection.Features, feat, url) case engine.FormatJSONFG: f.json.featureAsJSONFG(w, r, collectionID, collection.Features, feat, url, contentCrs) default: engine.RenderProblem(engine.ProblemNotAcceptable, w, fmt.Sprintf("format '%s' is not supported", format)) return } } } func (f *Features) parseFeaturesURL(r *http.Request, collection config.GeoSpatialCollection) (featureCollectionURL, domain.EncodedCursor, int, domain.SRID, domain.SRID, domain.ContentCrs, *geom.Bounds, time.Time, map[string]string, error) { url := featureCollectionURL{ *f.engine.Config.BaseURL.URL, r.URL.Query(), f.engine.Config.OgcAPI.Features.Limit, f.configuredPropertyFilters[collection.ID], collection.HasDateTime(), } encodedCursor, limit, inputSRID, outputSRID, contentCrs, bbox, referenceDate, propertyFilters, err := url.parse() return url, encodedCursor, limit, inputSRID, outputSRID, contentCrs, bbox, referenceDate, propertyFilters, err } func parseFeatureID(r *http.Request) (any, error) { var featureID any featureID, err := uuid.Parse(chi.URLParam(r, "featureId")) if err != nil { // fallback to numerical feature id featureID, err = strconv.ParseInt(chi.URLParam(r, "featureId"), 10, 0) if err != nil { return nil, errors.New("feature ID must be a UUID or number") } } return featureID, nil } func cacheConfiguredFeatureCollections(e *engine.Engine) map[string]config.GeoSpatialCollection { result := make(map[string]config.GeoSpatialCollection) for _, collection := range e.Config.OgcAPI.Features.Collections { result[collection.ID] = collection } return result } func createDatasources(e *engine.Engine) map[DatasourceKey]ds.Datasource { configured := make(map[DatasourceKey]*DatasourceConfig, len(e.Config.OgcAPI.Features.Collections)) // configure collection specific datasources first configureCollectionDatasources(e, configured) // now configure top-level datasources, for the whole dataset. But only when // there's no collection specific datasource already configured configureTopLevelDatasources(e, configured) if len(configured) == 0 { log.Fatal("no datasource(s) configured for OGC API Features, check config") } // now we have a mapping from collection+projection => desired datasource (the 'configured' map). // but the actual datasource connection still needs to be CREATED and associated with these collections. // this is what we'll going to do now, but in the process we need to make sure no duplicate datasources // are instantiated, since multiple collection can point to the same datasource and we only what to have a single // datasource/connection-pool serving those collections. createdDatasources := make(map[config.Datasource]ds.Datasource) result := make(map[DatasourceKey]ds.Datasource, len(configured)) for k, cfg := range configured { if cfg == nil { continue } existing, ok := createdDatasources[cfg.ds] if !ok { // make sure to only create a new datasource when it hasn't already been done before (for another collection) created := newDatasource(e, cfg.collections, cfg.ds) createdDatasources[cfg.ds] = created result[k] = created } else { result[k] = existing } } return result } func configurePropertyFiltersWithAllowedValues(datasources map[DatasourceKey]ds.Datasource, collections map[string]config.GeoSpatialCollection) map[string]ds.PropertyFiltersWithAllowedValues { result := make(map[string]ds.PropertyFiltersWithAllowedValues) for k, datasource := range datasources { result[k.collectionID] = datasource.GetPropertyFiltersWithAllowedValues(k.collectionID) } // sanity check to make sure datasources return all configured property filters. for _, collection := range collections { actual := len(result[collection.ID]) if collection.Features != nil && collection.Features.Filters.Properties != nil { expected := len(collection.Features.Filters.Properties) if expected != actual { log.Fatalf("number of property filters received from datasource for collection '%s' does not "+ "match the number of configured property filters. Expected filters: %d, got from datasource: %d", collection.ID, expected, actual) } } } return result } func configureTopLevelDatasources(e *engine.Engine, result map[DatasourceKey]*DatasourceConfig) { cfg := e.Config.OgcAPI.Features if cfg.Datasources == nil { return } var defaultDS *DatasourceConfig for _, coll := range cfg.Collections { key := DatasourceKey{srid: domain.WGS84SRID, collectionID: coll.ID} if result[key] == nil { if defaultDS == nil { defaultDS = &DatasourceConfig{cfg.Collections, cfg.Datasources.DefaultWGS84} } result[key] = defaultDS } } for _, additional := range cfg.Datasources.Additional { for _, coll := range cfg.Collections { srid, err := domain.EpsgToSrid(additional.Srs) if err != nil { log.Fatal(err) } key := DatasourceKey{srid: srid.GetOrDefault(), collectionID: coll.ID} if result[key] == nil { result[key] = &DatasourceConfig{cfg.Collections, additional.Datasource} } } } } func configureCollectionDatasources(e *engine.Engine, result map[DatasourceKey]*DatasourceConfig) { cfg := e.Config.OgcAPI.Features for _, coll := range cfg.Collections { if coll.Features == nil || coll.Features.Datasources == nil { continue } defaultDS := &DatasourceConfig{cfg.Collections, coll.Features.Datasources.DefaultWGS84} result[DatasourceKey{srid: domain.WGS84SRID, collectionID: coll.ID}] = defaultDS for _, additional := range coll.Features.Datasources.Additional { srid, err := domain.EpsgToSrid(additional.Srs) if err != nil { log.Fatal(err) } additionalDS := &DatasourceConfig{cfg.Collections, additional.Datasource} result[DatasourceKey{srid: srid.GetOrDefault(), collectionID: coll.ID}] = additionalDS } } } func newDatasource(e *engine.Engine, coll config.GeoSpatialCollections, dsConfig config.Datasource) ds.Datasource { var datasource ds.Datasource if dsConfig.GeoPackage != nil { datasource = geopackage.NewGeoPackage(coll, *dsConfig.GeoPackage) } else if dsConfig.PostGIS != nil { datasource = postgis.NewPostGIS() } e.RegisterShutdownHook(datasource.Close) return datasource } func querySingleDatasource(input domain.SRID, output domain.SRID, bbox *geom.Bounds) bool { return bbox == nil || int(input) == int(output) || (int(input) == domain.UndefinedSRID && int(output) == domain.WGS84SRID) || (int(input) == domain.WGS84SRID && int(output) == domain.UndefinedSRID) } func getTemporalCriteria(collection config.GeoSpatialCollection, referenceDate time.Time) ds.TemporalCriteria { var temporalCriteria ds.TemporalCriteria if collection.HasDateTime() { temporalCriteria = ds.TemporalCriteria{ ReferenceDate: referenceDate, StartDateProperty: collection.Metadata.TemporalProperties.StartDate, EndDateProperty: collection.Metadata.TemporalProperties.EndDate} } return temporalCriteria }
package features import ( "fmt" "log" "slices" "strings" "github.com/PDOK/gokoala/internal/engine" ds "github.com/PDOK/gokoala/internal/ogc/features/datasources" ) type OpenAPIPropertyFilter struct { Name string Description string DataType string AllowedValues []string } // rebuildOpenAPIForFeatures Rebuild OpenAPI spec with additional info from given datasources func rebuildOpenAPIForFeatures(e *engine.Engine, datasources map[DatasourceKey]ds.Datasource, filters map[string]ds.PropertyFiltersWithAllowedValues) { propertyFiltersByCollection, err := createPropertyFiltersByCollection(datasources, filters) if err != nil { log.Fatal(err) } e.RebuildOpenAPI(struct { PropertyFiltersByCollection map[string][]OpenAPIPropertyFilter }{ PropertyFiltersByCollection: propertyFiltersByCollection, }) } func createPropertyFiltersByCollection(datasources map[DatasourceKey]ds.Datasource, filters map[string]ds.PropertyFiltersWithAllowedValues) (map[string][]OpenAPIPropertyFilter, error) { result := make(map[string][]OpenAPIPropertyFilter) for k, datasource := range datasources { configuredPropertyFilters := filters[k.collectionID] if len(configuredPropertyFilters) == 0 { continue } featTable, err := datasource.GetFeatureTableMetadata(k.collectionID) if err != nil { continue } featTableColumns := featTable.ColumnsWithDataType() propertyFilters := make([]OpenAPIPropertyFilter, 0, len(featTableColumns)) for _, fc := range configuredPropertyFilters { match := false for name, dataType := range featTableColumns { if fc.Name == name { // match found between property filter in config file and database column name dataType = datasourceToOpenAPI(dataType) propertyFilters = append(propertyFilters, OpenAPIPropertyFilter{ Name: name, Description: fc.Description, DataType: dataType, AllowedValues: fc.AllowedValues, }) match = true break } } if !match { return nil, fmt.Errorf("invalid property filter specified, "+ "column '%s' doesn't exist in datasource attached to collection '%s'", fc.Name, k.collectionID) } } slices.SortFunc(propertyFilters, func(a, b OpenAPIPropertyFilter) int { return strings.Compare(a.Name, b.Name) }) result[k.collectionID] = propertyFilters } return result, nil } // translate database data types to OpenAPI data types func datasourceToOpenAPI(dataType string) string { switch strings.ToUpper(dataType) { case "INTEGER": dataType = "integer" case "REAL", "NUMERIC": dataType = "number" case "TEXT", "VARCHAR": dataType = "string" default: dataType = "string" } return dataType }
package features import ( "bytes" "errors" "fmt" "hash/fnv" "math" "net/url" "slices" "sort" "strconv" "strings" "time" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/engine" "github.com/PDOK/gokoala/internal/ogc/features/datasources" d "github.com/PDOK/gokoala/internal/ogc/features/domain" "github.com/twpayne/go-geom" ) const ( cursorParam = "cursor" limitParam = "limit" crsParam = "crs" dateTimeParam = "datetime" bboxParam = "bbox" bboxCrsParam = "bbox-crs" filterParam = "filter" filterCrsParam = "filter-crs" propertyFilterMaxLength = 512 propertyFilterWildcard = "*" ) var ( checksumExcludedParams = []string{engine.FormatParam, cursorParam} // don't include these in checksum ) // URL to a page in a collection of features type featureCollectionURL struct { baseURL url.URL params url.Values limit config.Limit configuredPropertyFilters map[string]datasources.PropertyFilterWithAllowedValues supportsDatetime bool } // parse the given URL to values required to delivery a set of Features func (fc featureCollectionURL) parse() (encodedCursor d.EncodedCursor, limit int, inputSRID d.SRID, outputSRID d.SRID, contentCrs d.ContentCrs, bbox *geom.Bounds, referenceDate time.Time, propertyFilters map[string]string, err error) { err = fc.validateNoUnknownParams() if err != nil { return } encodedCursor = d.EncodedCursor(fc.params.Get(cursorParam)) limit, limitErr := parseLimit(fc.params, fc.limit) outputSRID, outputSRIDErr := parseCrsToSRID(fc.params, crsParam) contentCrs = parseCrsToContentCrs(fc.params) propertyFilters, pfErr := parsePropertyFilters(fc.configuredPropertyFilters, fc.params) bbox, bboxSRID, bboxErr := parseBbox(fc.params) referenceDate, dateTimeErr := parseDateTime(fc.params, fc.supportsDatetime) _, filterSRID, filterErr := parseFilter(fc.params) inputSRID, inputSRIDErr := consolidateSRIDs(bboxSRID, filterSRID) err = errors.Join(limitErr, outputSRIDErr, bboxErr, pfErr, dateTimeErr, filterErr, inputSRIDErr) return } // Calculate checksum over the query parameters that have a "filtering effect" on // the result set such as limit, bbox, property filters, CQL filters, etc. These query params // aren't allowed to be changed during pagination. The checksum allows for the latter // to be verified func (fc featureCollectionURL) checksum() []byte { var valuesToHash bytes.Buffer sortedQueryParams := make([]string, 0, len(fc.params)) for k := range fc.params { sortedQueryParams = append(sortedQueryParams, k) } sort.Strings(sortedQueryParams) // sort keys OUTER: for _, k := range sortedQueryParams { for _, skip := range checksumExcludedParams { if k == skip { continue OUTER } } paramValues := fc.params[k] if paramValues != nil { slices.Sort(paramValues) // sort values belonging to key } for _, s := range paramValues { valuesToHash.WriteString(s) } } bytesToHash := valuesToHash.Bytes() if len(bytesToHash) > 0 { hasher := fnv.New32a() // fast non-cryptographic hash _, _ = hasher.Write(bytesToHash) return hasher.Sum(nil) } return []byte{} } func (fc featureCollectionURL) toSelfURL(collectionID string, format string) string { copyParams := clone(fc.params) copyParams.Set(engine.FormatParam, format) result := fc.baseURL.JoinPath("collections", collectionID, "items") result.RawQuery = copyParams.Encode() return result.String() } func (fc featureCollectionURL) toPrevNextURL(collectionID string, cursor d.EncodedCursor, format string) string { copyParams := clone(fc.params) copyParams.Set(engine.FormatParam, format) copyParams.Set(cursorParam, cursor.String()) result := fc.baseURL.JoinPath("collections", collectionID, "items") result.RawQuery = copyParams.Encode() return result.String() } // implements req 7.6 (https://docs.ogc.org/is/17-069r4/17-069r4.html#query_parameters) func (fc featureCollectionURL) validateNoUnknownParams() error { copyParams := clone(fc.params) copyParams.Del(engine.FormatParam) copyParams.Del(limitParam) copyParams.Del(cursorParam) copyParams.Del(crsParam) copyParams.Del(dateTimeParam) copyParams.Del(bboxParam) copyParams.Del(bboxCrsParam) copyParams.Del(filterParam) copyParams.Del(filterCrsParam) for pf := range fc.configuredPropertyFilters { copyParams.Del(pf) } if len(copyParams) > 0 { return fmt.Errorf("unknown query parameter(s) found: %v", copyParams.Encode()) } return nil } // URL to a specific Feature type featureURL struct { baseURL url.URL params url.Values } // parse the given URL to values required to delivery a specific Feature func (f featureURL) parse() (srid d.SRID, contentCrs d.ContentCrs, err error) { err = f.validateNoUnknownParams() if err != nil { return } srid, err = parseCrsToSRID(f.params, crsParam) contentCrs = parseCrsToContentCrs(f.params) return } func (f featureURL) toSelfURL(collectionID string, featureID string, format string) string { newParams := url.Values{} newParams.Set(engine.FormatParam, format) result := f.baseURL.JoinPath("collections", collectionID, "items", featureID) result.RawQuery = newParams.Encode() return result.String() } func (f featureURL) toCollectionURL(collectionID string, format string) string { newParams := url.Values{} newParams.Set(engine.FormatParam, format) result := f.baseURL.JoinPath("collections", collectionID) result.RawQuery = newParams.Encode() return result.String() } // implements req 7.6 (https://docs.ogc.org/is/17-069r4/17-069r4.html#query_parameters) func (f featureURL) validateNoUnknownParams() error { copyParams := clone(f.params) copyParams.Del(engine.FormatParam) copyParams.Del(crsParam) if len(copyParams) > 0 { return fmt.Errorf("unknown query parameter(s) found: %v", copyParams.Encode()) } return nil } func clone(params url.Values) url.Values { copyParams := url.Values{} for k, v := range params { copyParams[k] = v } return copyParams } func consolidateSRIDs(bboxSRID d.SRID, filterSRID d.SRID) (inputSRID d.SRID, err error) { if bboxSRID != d.UndefinedSRID && filterSRID != d.UndefinedSRID && bboxSRID != filterSRID { return 0, errors.New("bbox-crs and filter-crs need to be equal. " + "Can't use more than one CRS as input, but input and output CRS may differ") } if bboxSRID != d.UndefinedSRID || filterSRID != d.UndefinedSRID { inputSRID = bboxSRID // or filterCrs, both the same } return inputSRID, err } func parseLimit(params url.Values, limitCfg config.Limit) (int, error) { limit := limitCfg.Default var err error if params.Get(limitParam) != "" { limit, err = strconv.Atoi(params.Get(limitParam)) if err != nil { err = errors.New("limit must be numeric") } // "If the value of the limit parameter is larger than the maximum value, this SHALL NOT result // in an error (instead use the maximum as the parameter value)." if limit > limitCfg.Max { limit = limitCfg.Max } } if limit < 0 { err = errors.New("limit can't be negative") } return limit, err } func parseBbox(params url.Values) (*geom.Bounds, d.SRID, error) { bboxSRID, err := parseCrsToSRID(params, bboxCrsParam) if err != nil { return nil, d.UndefinedSRID, err } if params.Get(bboxParam) == "" { return nil, d.UndefinedSRID, nil } bboxValues := strings.Split(params.Get(bboxParam), ",") if len(bboxValues) != 4 { return nil, bboxSRID, errors.New("bbox should contain exactly 4 values " + "separated by commas: minx,miny,maxx,maxy") } bboxFloats := make([]float64, len(bboxValues)) for i, v := range bboxValues { bboxFloats[i], err = strconv.ParseFloat(v, 64) if err != nil { return nil, bboxSRID, fmt.Errorf("failed to parse value %s in bbox, error: %w", v, err) } } bbox := geom.NewBounds(geom.XY).Set(bboxFloats...) if surfaceArea(bbox) <= 0 { return nil, bboxSRID, errors.New("bbox has no surface area") } return bbox, bboxSRID, nil } func surfaceArea(bbox *geom.Bounds) float64 { // Use the same logic as bbox.Area() in https://github.com/go-spatial/geom to calculate surface area. // The bounds.Area() in github.com/twpayne/go-geom behaves differently and is not what we're looking for. return math.Abs((bbox.Max(1) - bbox.Min(1)) * (bbox.Max(0) - bbox.Min(0))) } func parseCrsToContentCrs(params url.Values) d.ContentCrs { param := params.Get(crsParam) if param == "" { return d.WGS84CrsURI } return d.ContentCrs(param) } func parseCrsToSRID(params url.Values, paramName string) (d.SRID, error) { param := params.Get(paramName) if param == "" { return d.UndefinedSRID, nil } param = strings.TrimSpace(param) if !strings.HasPrefix(param, d.CrsURIPrefix) { return d.UndefinedSRID, fmt.Errorf("%s param should start with %s, got: %s", paramName, d.CrsURIPrefix, param) } var srid d.SRID lastIndex := strings.LastIndex(param, "/") if lastIndex != -1 { crsCode := param[lastIndex+1:] if crsCode == d.WGS84CodeOGC { return d.WGS84SRID, nil // CRS84 is WGS84, just like EPSG:4326 (only axis order differs but SRID is the same) } val, err := strconv.Atoi(crsCode) if err != nil { return 0, fmt.Errorf("expected numerical CRS code, received: %s", crsCode) } srid = d.SRID(val) } return srid, nil } // Support simple filtering on properties: https://docs.ogc.org/is/17-069r4/17-069r4.html#_parameters_for_filtering_on_feature_properties func parsePropertyFilters(configuredPropertyFilters map[string]datasources.PropertyFilterWithAllowedValues, params url.Values) (map[string]string, error) { propertyFilters := make(map[string]string) for name := range configuredPropertyFilters { pf := params.Get(name) if pf != "" { if len(pf) > propertyFilterMaxLength { return nil, fmt.Errorf("property filter %s is too large, "+ "value is limited to %d characters", name, propertyFilterMaxLength) } if strings.Contains(pf, propertyFilterWildcard) { // if/when we choose to support wildcards in the future, make sure wildcards are // only allowed at the END (suffix) of the filter return nil, fmt.Errorf("property filter %s contains a wildcard (%s), "+ "wildcard filtering is not allowed", name, propertyFilterWildcard) } propertyFilters[name] = pf } } return propertyFilters, nil } // Support filtering on datetime: https://docs.ogc.org/is/17-069r4/17-069r4.html#_parameter_datetime func parseDateTime(params url.Values, datetimeSupported bool) (time.Time, error) { datetime := params.Get(dateTimeParam) if datetime != "" { if !datetimeSupported { return time.Time{}, errors.New("datetime param is currently not supported for this collection") } if strings.Contains(datetime, "/") { return time.Time{}, fmt.Errorf("datetime param '%s' represents an interval, intervals are currently not supported", datetime) } return time.Parse(time.RFC3339, datetime) } return time.Time{}, nil } func parseFilter(params url.Values) (filter string, filterSRID d.SRID, err error) { filter = params.Get(filterParam) filterSRID, _ = parseCrsToSRID(params, filterCrsParam) if filter != "" { return filter, filterSRID, errors.New("CQL filter param is currently not supported") } return filter, filterSRID, nil }
package geovolumes import ( "errors" "log" "net/http" "net/url" "strings" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/engine" "github.com/PDOK/gokoala/internal/ogc/common/geospatial" "github.com/go-chi/chi/v5" ) type ThreeDimensionalGeoVolumes struct { engine *engine.Engine validateResponse bool } func NewThreeDimensionalGeoVolumes(e *engine.Engine) *ThreeDimensionalGeoVolumes { _, err := url.ParseRequestURI(e.Config.OgcAPI.GeoVolumes.TileServer.String()) if err != nil { log.Fatalf("invalid tileserver url provided: %v", err) } geoVolumes := &ThreeDimensionalGeoVolumes{ engine: e, validateResponse: *e.Config.OgcAPI.GeoVolumes.ValidateResponses, } // 3D Tiles e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles", geoVolumes.Tileset("tileset.json")) e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles/{explicitTileSet}.json", geoVolumes.ExplicitTileset()) e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile()) e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles/{tilePathPrefix}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile()) // DTM/Quantized Mesh e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh", geoVolumes.Tileset("layer.json")) e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh/{explicitTileSet}.json", geoVolumes.ExplicitTileset()) e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile()) e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh/{tilePathPrefix}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile()) // path '/3dtiles' or '/quantized-mesh' is preferred but optional when requesting the actual tiles/tileset. e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/{explicitTileSet}.json", geoVolumes.ExplicitTileset()) e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile()) e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/{tilePathPrefix}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile()) return geoVolumes } // Tileset serves tileset.json manifest in case of OGC 3D Tiles (= separate spec from OGC 3D GeoVolumes) requests or // layer.json manifest in case of quantized mesh requests. Both requests will be proxied to the configured tileserver. func (t *ThreeDimensionalGeoVolumes) Tileset(fileName string) http.HandlerFunc { if !strings.HasSuffix(fileName, ".json") { log.Fatalf("manifest should be a JSON file") } return func(w http.ResponseWriter, r *http.Request) { t.tileSet(w, r, fileName) } } // ExplicitTileset serves OGC 3D Tiles manifest (= separate spec from OGC 3D GeoVolumes) or // quantized mesh manifest. All requests will be proxied to the configured tileserver. func (t *ThreeDimensionalGeoVolumes) ExplicitTileset() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { tileSetName := chi.URLParam(r, "explicitTileSet") if tileSetName == "" { engine.RenderProblem(engine.ProblemNotFound, w) return } t.tileSet(w, r, tileSetName+".json") } } // Tile reverse proxy to tileserver for actual 3D tiles (from OGC 3D Tiles, separate spec // from OGC 3D GeoVolumes) or DTM Quantized Mesh tiles func (t *ThreeDimensionalGeoVolumes) Tile() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { collectionID := chi.URLParam(r, "3dContainerId") collection, err := t.idToCollection(collectionID) if err != nil { engine.RenderProblem(engine.ProblemNotFound, w, err.Error()) return } tileServerPath := collectionID if collection.GeoVolumes != nil && collection.GeoVolumes.TileServerPath != nil { tileServerPath = *collection.GeoVolumes.TileServerPath } tilePathPrefix := chi.URLParam(r, "tilePathPrefix") // optional tileMatrix := chi.URLParam(r, "tileMatrix") tileRow := chi.URLParam(r, "tileRow") tileColAndSuffix := chi.URLParam(r, "tileColAndSuffix") contentType := "" if collection.GeoVolumes != nil && collection.GeoVolumes.HasDTM() { // DTM has a specialized mediatype, although application/octet-stream will also work with Cesium contentType = engine.MediaTypeQuantizedMesh } path, _ := url.JoinPath("/", tileServerPath, tilePathPrefix, tileMatrix, tileRow, tileColAndSuffix) t.reverseProxy(w, r, path, true, contentType) } } func (t *ThreeDimensionalGeoVolumes) tileSet(w http.ResponseWriter, r *http.Request, tileSet string) { collectionID := chi.URLParam(r, "3dContainerId") collection, err := t.idToCollection(collectionID) if err != nil { engine.RenderProblem(engine.ProblemNotFound, w, err.Error()) return } tileServerPath := collectionID if collection.GeoVolumes != nil && collection.GeoVolumes.TileServerPath != nil { tileServerPath = *collection.GeoVolumes.TileServerPath } path, _ := url.JoinPath("/", tileServerPath, tileSet) t.reverseProxy(w, r, path, false, "") } func (t *ThreeDimensionalGeoVolumes) reverseProxy(w http.ResponseWriter, r *http.Request, path string, prefer204 bool, contentTypeOverwrite string) { target, err := url.Parse(t.engine.Config.OgcAPI.GeoVolumes.TileServer.String() + path) if err != nil { log.Printf("invalid target url, can't proxy tiles: %v", err) engine.RenderProblem(engine.ProblemServerError, w) return } t.engine.ReverseProxyAndValidate(w, r, target, prefer204, contentTypeOverwrite, t.validateResponse) } func (t *ThreeDimensionalGeoVolumes) idToCollection(cid string) (*config.GeoSpatialCollection, error) { for _, collection := range t.engine.Config.OgcAPI.GeoVolumes.Collections { if collection.ID == cid { return &collection, nil } } return nil, errors.New("no matching collection found") }
package processes import ( "net/http" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/engine" ) type Processes struct { engine *engine.Engine } func NewProcesses(e *engine.Engine) *Processes { processes := &Processes{engine: e} e.Router.Handle("/jobs*", processes.forwarder(e.Config.OgcAPI.Processes.ProcessesServer)) e.Router.Handle("/processes*", processes.forwarder(e.Config.OgcAPI.Processes.ProcessesServer)) e.Router.Handle("/api*", processes.forwarder(e.Config.OgcAPI.Processes.ProcessesServer)) return processes } func (p *Processes) forwarder(processServer config.URL) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { targetURL := *processServer.URL targetURL.Path = processServer.URL.Path + r.URL.Path targetURL.RawQuery = r.URL.RawQuery p.engine.ReverseProxy(w, r, &targetURL, false, "") } }
package ogc import ( "github.com/PDOK/gokoala/internal/engine" "github.com/PDOK/gokoala/internal/ogc/common/core" "github.com/PDOK/gokoala/internal/ogc/common/geospatial" "github.com/PDOK/gokoala/internal/ogc/features" "github.com/PDOK/gokoala/internal/ogc/geovolumes" "github.com/PDOK/gokoala/internal/ogc/processes" "github.com/PDOK/gokoala/internal/ogc/styles" "github.com/PDOK/gokoala/internal/ogc/tiles" ) func SetupBuildingBlocks(engine *engine.Engine) { // OGC Common Part 1, will always be started core.NewCommonCore(engine) // OGC Common part 2 if engine.Config.HasCollections() { geospatial.NewCollections(engine) } // OGC 3D GeoVolumes API if engine.Config.OgcAPI.GeoVolumes != nil { geovolumes.NewThreeDimensionalGeoVolumes(engine) } // OGC Tiles API if engine.Config.OgcAPI.Tiles != nil { tiles.NewTiles(engine) } // OGC Styles API if engine.Config.OgcAPI.Styles != nil { styles.NewStyles(engine) } // OGC Features API if engine.Config.OgcAPI.Features != nil { features.NewFeatures(engine) } // OGC Processes API if engine.Config.OgcAPI.Processes != nil { processes.NewProcesses(engine) } }
package styles import ( "log" "net/http" "slices" "strings" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/engine" "github.com/PDOK/gokoala/internal/engine/util" "github.com/go-chi/chi/v5" ) const ( templatesDir = "internal/ogc/styles/templates/" stylesPath = "/styles" stylesCrumb = "styles/" projectionDelimiter = "__" ) var ( defaultProjection = "" stylesBreadcrumbs = []engine.Breadcrumb{ { Name: "Styles", Path: "styles", }, } ) type stylesTemplateData struct { // Projection used by default DefaultProjection string // All supported projections for this dataset SupportedProjections []config.SupportedSrs // All supported projections by GoKoala (for tiles) AllProjections map[string]any } type stylesMetadataTemplateData struct { // Metadata about this style Metadata config.Style // Projection used by this style Projection string } type Styles struct { engine *engine.Engine } func NewStyles(e *engine.Engine) *Styles { // default style must be the first entry in supported styles if e.Config.OgcAPI.Styles.Default != e.Config.OgcAPI.Styles.SupportedStyles[0].ID { log.Fatalf("default style must be first entry in supported styles. '%s' does not match '%s'", e.Config.OgcAPI.Styles.SupportedStyles[0].ID, e.Config.OgcAPI.Styles.Default) } allProjections := util.Cast(config.AllTileProjections) supportedProjections := e.Config.OgcAPI.Tiles.GetProjections() if len(supportedProjections) == 0 { log.Fatalf("failed to setup OGC API Styles, no supported projections (SRS) found in OGC API Tiles") } defaultProjection = strings.ToLower(config.AllTileProjections[supportedProjections[0].Srs]) e.RenderTemplatesWithParams(stylesPath, &stylesTemplateData{defaultProjection, supportedProjections, allProjections}, stylesBreadcrumbs, engine.NewTemplateKey(templatesDir+"styles.go.json"), engine.NewTemplateKey(templatesDir+"styles.go.html")) renderStylesPerProjection(e, supportedProjections) styles := &Styles{ engine: e, } e.Router.Get(stylesPath, styles.Styles()) e.Router.Get(stylesPath+"/{style}", styles.Style()) e.Router.Get(stylesPath+"/{style}/metadata", styles.StyleMetadata()) return styles } func (s *Styles) Styles() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { key := engine.NewTemplateKeyWithLanguage( templatesDir+"styles.go."+s.engine.CN.NegotiateFormat(r), s.engine.CN.NegotiateLanguage(w, r)) s.engine.ServePage(w, r, key) } } func (s *Styles) Style() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { style := chi.URLParam(r, "style") styleID := strings.Split(style, projectionDelimiter)[0] // Previously, the API did not utilise separate styles per projection; whereas the current implementation // advertises all possible combinations of available styles and available projections as separate styles. // To ensure that the use of style URLs without projection remains possible for previously published APIs, // URLs without an explicit projection are defaulted to the first configured projection. if style == styleID { style += projectionDelimiter + defaultProjection } styleFormat := s.engine.CN.NegotiateFormat(r) var key engine.TemplateKey if styleFormat == engine.FormatHTML { key = engine.NewTemplateKeyWithNameAndLanguage( templatesDir+"style.go.html", style, s.engine.CN.NegotiateLanguage(w, r)) } else { var instanceName string if slices.Contains(s.engine.CN.GetSupportedStyleFormats(), styleFormat) { instanceName = style + "." + styleFormat } else { styleFormat = engine.FormatMapboxStyle instanceName = style + "." + engine.FormatMapboxStyle } key = engine.TemplateKey{ Name: styleID + s.engine.CN.GetStyleFormatExtension(styleFormat), Directory: s.engine.Config.OgcAPI.Styles.StylesDir, Format: styleFormat, InstanceName: instanceName, Language: s.engine.CN.NegotiateLanguage(w, r), } } s.engine.ServePage(w, r, key) } } func (s *Styles) StyleMetadata() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { style := chi.URLParam(r, "style") styleID := strings.Split(style, projectionDelimiter)[0] // Previously, the API did not utilise separate styles per projection; whereas the current implementation // advertises all possible combinations of available styles and available projections as separate styles. // To ensure that the use of style URLs without projection remains possible for previously published APIs, // URLs without an explicit projection are defaulted to the first configured projection. if style == styleID { style += projectionDelimiter + defaultProjection } key := engine.NewTemplateKeyWithNameAndLanguage( templatesDir+"styleMetadata.go."+s.engine.CN.NegotiateFormat(r), style, s.engine.CN.NegotiateLanguage(w, r)) s.engine.ServePage(w, r, key) } } func renderStylesPerProjection(e *engine.Engine, supportedProjections []config.SupportedSrs) { for _, style := range e.Config.OgcAPI.Styles.SupportedStyles { for _, supportedSrs := range supportedProjections { projection := config.AllTileProjections[supportedSrs.Srs] zoomLevelRange := supportedSrs.ZoomLevelRange styleInstanceID := style.ID + projectionDelimiter + strings.ToLower(projection) styleProjectionBreadcrumb := engine.Breadcrumb{ Name: style.Title + " (" + projection + ")", Path: stylesCrumb + styleInstanceID, } data := &stylesMetadataTemplateData{style, projection} // Render metadata template (JSON) path := stylesPath + "/" + styleInstanceID + "/metadata" e.RenderTemplatesWithParams(path, data, nil, engine.NewTemplateKeyWithName(templatesDir+"styleMetadata.go.json", styleInstanceID)) // Render metadata template (HTML) styleMetadataBreadcrumbs := stylesBreadcrumbs styleMetadataBreadcrumbs = append(styleMetadataBreadcrumbs, []engine.Breadcrumb{ styleProjectionBreadcrumb, { Name: "Metadata", Path: stylesCrumb + styleInstanceID + "/metadata", }, }...) e.RenderTemplatesWithParams(path, data, styleMetadataBreadcrumbs, engine.NewTemplateKeyWithName(templatesDir+"styleMetadata.go.html", styleInstanceID)) // Add existing style definitions to rendered templates renderStylePerFormat(e, style, styleInstanceID, projection, zoomLevelRange, styleProjectionBreadcrumb) } } } func renderStylePerFormat(e *engine.Engine, style config.Style, styleInstanceID string, projection string, zoomLevelRange config.ZoomLevelRange, styleProjectionBreadcrumb engine.Breadcrumb) { for _, styleFormat := range style.Formats { formatExtension := e.CN.GetStyleFormatExtension(styleFormat.Format) styleKey := engine.TemplateKey{ Name: style.ID + formatExtension, Directory: e.Config.OgcAPI.Styles.StylesDir, Format: styleFormat.Format, InstanceName: styleInstanceID + "." + styleFormat.Format, } path := stylesPath + "/" + styleInstanceID // Render template (JSON) e.RenderTemplatesWithParams(path, struct { Projection string ZoomLevelRange config.ZoomLevelRange }{Projection: projection, ZoomLevelRange: zoomLevelRange}, nil, styleKey) // Render template (HTML) styleBreadCrumbs := stylesBreadcrumbs styleBreadCrumbs = append(styleBreadCrumbs, styleProjectionBreadcrumb) e.RenderTemplatesWithParams(path, style, styleBreadCrumbs, engine.NewTemplateKeyWithName(templatesDir+"style.go.html", styleInstanceID)) } }
package tiles import ( "errors" "fmt" "log" "net/http" "net/url" "os" "strconv" "strings" "github.com/PDOK/gokoala/config" "github.com/PDOK/gokoala/internal/engine" "github.com/PDOK/gokoala/internal/engine/util" g "github.com/PDOK/gokoala/internal/ogc/common/geospatial" "github.com/go-chi/chi/v5" "gopkg.in/yaml.v3" ) const ( templatesDir = "internal/ogc/tiles/templates/" tilesPath = "/tiles" tilesLocalPath = "tiles/" tileMatrixSetsPath = "/tileMatrixSets" tileMatrixSetsLocalPath = "tileMatrixSets/" defaultTilesTmpl = "{tms}/{z}/{x}/{y}." + engine.FormatMVTAlternative collectionsCrumb = "collections/" tilesCrumbTitle = "Tiles" tmsLimitsDir = "internal/ogc/tiles/tileMatrixSetLimits/" ) var ( tilesBreadcrumbs = []engine.Breadcrumb{ { Name: tilesCrumbTitle, Path: "tiles", }, } tileMatrixSetsBreadcrumbs = []engine.Breadcrumb{ { Name: "Tile Matrix Sets", Path: "tileMatrixSets", }, } collectionsBreadcrumb = []engine.Breadcrumb{ { Name: "Collections", Path: "collections", }, } ) type templateData struct { // Tiles top-level or collection-level tiles config config.Tiles // BaseURL part of the url prefixing "/tiles" BaseURL string // All supported projections by GoKoala (for tiles) AllProjections map[string]any } type Tiles struct { engine *engine.Engine tileMatrixSetLimits map[string]map[int]TileMatrixSetLimits } type TileMatrixSetLimits struct { MinCol int `yaml:"minCol" json:"minCol"` MaxCol int `yaml:"maxCol" json:"maxCol"` MinRow int `yaml:"minRow" json:"minRow"` MaxRow int `yaml:"maxRow" json:"maxRow"` } func NewTiles(e *engine.Engine) *Tiles { tiles := &Tiles{engine: e} // TileMatrixSetLimits supportedProjections := e.Config.OgcAPI.Tiles.GetProjections() tiles.tileMatrixSetLimits = readTileMatrixSetLimits(supportedProjections) // TileMatrixSets renderTileMatrixTemplates(e) e.Router.Get(tileMatrixSetsPath, tiles.TileMatrixSets()) e.Router.Get(tileMatrixSetsPath+"/{tileMatrixSetId}", tiles.TileMatrixSet()) // Top-level tiles (dataset tiles in OGC spec) if e.Config.OgcAPI.Tiles.DatasetTiles != nil { renderTilesTemplates(e, nil, templateData{ *e.Config.OgcAPI.Tiles.DatasetTiles, e.Config.BaseURL.String(), util.Cast(config.AllTileProjections), }) e.Router.Get(tilesPath, tiles.TilesetsList()) e.Router.Get(tilesPath+"/{tileMatrixSetId}", tiles.Tileset()) e.Router.Head(tilesPath+"/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}", tiles.Tile(*e.Config.OgcAPI.Tiles.DatasetTiles)) e.Router.Get(tilesPath+"/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}", tiles.Tile(*e.Config.OgcAPI.Tiles.DatasetTiles)) } // Collection-level tiles (geodata tiles in OGC spec) geoDataTiles := map[string]config.Tiles{} for _, coll := range e.Config.OgcAPI.Tiles.Collections { if coll.Tiles == nil { continue } renderTilesTemplates(e, &coll, templateData{ coll.Tiles.GeoDataTiles, e.Config.BaseURL.String() + g.CollectionsPath + "/" + coll.ID, util.Cast(config.AllTileProjections), }) geoDataTiles[coll.ID] = coll.Tiles.GeoDataTiles } if len(geoDataTiles) != 0 { e.Router.Get(g.CollectionsPath+"/{collectionId}"+tilesPath, tiles.TilesetsListForCollection()) e.Router.Get(g.CollectionsPath+"/{collectionId}"+tilesPath+"/{tileMatrixSetId}", tiles.TilesetForCollection()) e.Router.Head(g.CollectionsPath+"/{collectionId}"+tilesPath+"/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}", tiles.TileForCollection(geoDataTiles)) e.Router.Get(g.CollectionsPath+"/{collectionId}"+tilesPath+"/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}", tiles.TileForCollection(geoDataTiles)) } return tiles } func (t *Tiles) TileMatrixSets() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { key := engine.NewTemplateKeyWithLanguage(templatesDir+"tileMatrixSets.go."+t.engine.CN.NegotiateFormat(r), t.engine.CN.NegotiateLanguage(w, r)) t.engine.ServePage(w, r, key) } } func (t *Tiles) TileMatrixSet() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId") key := engine.NewTemplateKeyWithLanguage(templatesDir+tileMatrixSetsLocalPath+tileMatrixSetID+".go."+t.engine.CN.NegotiateFormat(r), t.engine.CN.NegotiateLanguage(w, r)) t.engine.ServePage(w, r, key) } } func (t *Tiles) TilesetsList() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { key := engine.NewTemplateKeyWithLanguage(templatesDir+"tiles.go."+t.engine.CN.NegotiateFormat(r), t.engine.CN.NegotiateLanguage(w, r)) t.engine.ServePage(w, r, key) } } func (t *Tiles) TilesetsListForCollection() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { collectionID := chi.URLParam(r, "collectionId") key := engine.NewTemplateKeyWithNameAndLanguage(templatesDir+"tiles.go."+t.engine.CN.NegotiateFormat(r), collectionID, t.engine.CN.NegotiateLanguage(w, r)) t.engine.ServePage(w, r, key) } } func (t *Tiles) Tileset() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId") key := engine.NewTemplateKeyWithLanguage(templatesDir+tilesLocalPath+tileMatrixSetID+".go."+t.engine.CN.NegotiateFormat(r), t.engine.CN.NegotiateLanguage(w, r)) t.engine.ServePage(w, r, key) } } func (t *Tiles) TilesetForCollection() http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { collectionID := chi.URLParam(r, "collectionId") tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId") key := engine.NewTemplateKeyWithNameAndLanguage(templatesDir+tilesLocalPath+tileMatrixSetID+".go."+t.engine.CN.NegotiateFormat(r), collectionID, t.engine.CN.NegotiateLanguage(w, r)) t.engine.ServePage(w, r, key) } } // Tile reverse proxy to configured tileserver/object storage. Assumes the backing resource is publicly accessible. func (t *Tiles) Tile(tilesConfig config.Tiles) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId") tileMatrix := chi.URLParam(r, "tileMatrix") tileRow := chi.URLParam(r, "tileRow") tileCol, err := getTileColumn(r, t.engine.CN.NegotiateFormat(r)) if err != nil { engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, err.Error()) return } tm, tr, tc, err := parseTileParams(tileMatrix, tileRow, tileCol) if err != nil { engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, strings.ReplaceAll(err.Error(), "strconv.Atoi: ", "")) return } if _, ok := t.tileMatrixSetLimits[tileMatrixSetID]; !ok { // unknown tileMatrixSet err = fmt.Errorf("unknown tileMatrixSet '%s'", tileMatrixSetID) engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, err.Error()) return } err = checkTileMatrixSetLimits(t.tileMatrixSetLimits, tileMatrixSetID, tm, tr, tc) if err != nil { engine.RenderProblem(engine.ProblemNotFound, w, err.Error()) return } target, err := createTilesURL(tileMatrixSetID, tileMatrix, tileCol, tileRow, tilesConfig) if err != nil { engine.RenderProblemAndLog(engine.ProblemServerError, w, err) return } t.engine.ReverseProxy(w, r, target, true, engine.MediaTypeMVT) } } // TileForCollection reverse proxy to configured tileserver/object storage for tiles within a given collection. // Assumes the backing resource is publicly accessible. func (t *Tiles) TileForCollection(tilesConfigByCollection map[string]config.Tiles) http.HandlerFunc { return func(w http.ResponseWriter, r *http.Request) { collectionID := chi.URLParam(r, "collectionId") tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId") tileMatrix := chi.URLParam(r, "tileMatrix") tileRow := chi.URLParam(r, "tileRow") tileCol, err := getTileColumn(r, t.engine.CN.NegotiateFormat(r)) if err != nil { engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, err.Error()) return } tm, tr, tc, err := parseTileParams(tileMatrix, tileRow, tileCol) if err != nil { engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, strings.ReplaceAll(err.Error(), "strconv.Atoi: ", "")) return } if _, ok := t.tileMatrixSetLimits[tileMatrixSetID]; !ok { // unknown tileMatrixSet err = fmt.Errorf("unknown tileMatrixSet '%s'", tileMatrixSetID) engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, err.Error()) return } err = checkTileMatrixSetLimits(t.tileMatrixSetLimits, tileMatrixSetID, tm, tr, tc) if err != nil { engine.RenderProblem(engine.ProblemNotFound, w, err.Error()) return } tilesConfig, ok := tilesConfigByCollection[collectionID] if !ok { err = fmt.Errorf("no tiles available for collection: %s", collectionID) engine.RenderProblemAndLog(engine.ProblemNotFound, w, err, err.Error()) return } target, err := createTilesURL(tileMatrixSetID, tileMatrix, tileCol, tileRow, tilesConfig) if err != nil { engine.RenderProblemAndLog(engine.ProblemServerError, w, err) return } t.engine.ReverseProxy(w, r, target, true, engine.MediaTypeMVT) } } func getTileColumn(r *http.Request, format string) (string, error) { tileCol := chi.URLParam(r, "tileCol") // We support content negotiation using Accept header and ?f= param, but also // using the .pbf extension. This is for backwards compatibility. if !strings.HasSuffix(tileCol, "."+engine.FormatMVTAlternative) { // if no format is specified, default to mvt if f := strings.Replace(format, engine.FormatJSON, engine.FormatMVT, 1); f != engine.FormatMVT && f != engine.FormatMVTAlternative { return "", errors.New("specify tile format. Currently only Mapbox Vector Tiles (?f=mvt) tiles are supported") } } else { tileCol = tileCol[:len(tileCol)-4] // remove .pbf extension } return tileCol, nil } func createTilesURL(tileMatrixSetID string, tileMatrix string, tileCol string, tileRow string, tilesCfg config.Tiles) (*url.URL, error) { tilesTmpl := defaultTilesTmpl if tilesCfg.URITemplateTiles != nil { tilesTmpl = *tilesCfg.URITemplateTiles } // OGC spec is (default) z/row/col but tileserver is z/col/row (z/x/y) replacer := strings.NewReplacer("{tms}", tileMatrixSetID, "{z}", tileMatrix, "{x}", tileCol, "{y}", tileRow) path, _ := url.JoinPath("/", replacer.Replace(tilesTmpl)) target, err := url.Parse(tilesCfg.TileServer.String() + path) if err != nil { return nil, fmt.Errorf("invalid target url, can't proxy tiles: %w", err) } return target, nil } func renderTileMatrixTemplates(e *engine.Engine) { e.RenderTemplates(tileMatrixSetsPath, tileMatrixSetsBreadcrumbs, engine.NewTemplateKey(templatesDir+"tileMatrixSets.go.json"), engine.NewTemplateKey(templatesDir+"tileMatrixSets.go.html")) for _, projection := range config.AllTileProjections { breadcrumbs := tileMatrixSetsBreadcrumbs breadcrumbs = append(breadcrumbs, []engine.Breadcrumb{ { Name: projection, Path: tileMatrixSetsLocalPath + projection, }, }...) e.RenderTemplates(tileMatrixSetsPath+"/"+projection, breadcrumbs, engine.NewTemplateKey(templatesDir+tileMatrixSetsLocalPath+projection+".go.json"), engine.NewTemplateKey(templatesDir+tileMatrixSetsLocalPath+projection+".go.html")) } } func renderTilesTemplates(e *engine.Engine, collection *config.GeoSpatialCollection, data templateData) { var breadcrumbs []engine.Breadcrumb path := tilesPath collectionID := "" if collection != nil { collectionID = collection.ID path = g.CollectionsPath + "/" + collectionID + tilesPath breadcrumbs = collectionsBreadcrumb breadcrumbs = append(breadcrumbs, []engine.Breadcrumb{ { Name: getCollectionTitle(collectionID, collection.Metadata), Path: collectionsCrumb + collectionID, }, { Name: tilesCrumbTitle, Path: collectionsCrumb + collectionID + tilesPath, }, }...) } else { breadcrumbs = tilesBreadcrumbs } e.RenderTemplatesWithParams(path, data, breadcrumbs, engine.NewTemplateKeyWithName(templatesDir+"tiles.go.json", collectionID), engine.NewTemplateKeyWithName(templatesDir+"tiles.go.html", collectionID)) // Now render metadata about tiles per projection/SRS. for _, projection := range config.AllTileProjections { path = tilesPath + "/" + projection projectionBreadcrumbs := breadcrumbs if collection != nil { projectionBreadcrumbs = append(projectionBreadcrumbs, []engine.Breadcrumb{ { Name: projection, Path: collectionsCrumb + collectionID + path, }, }...) path = g.CollectionsPath + "/" + collectionID + tilesPath + "/" + projection } else { projectionBreadcrumbs = append(projectionBreadcrumbs, []engine.Breadcrumb{ { Name: projection, Path: tilesLocalPath + projection, }, }...) } e.RenderTemplatesWithParams(path, data, projectionBreadcrumbs, engine.NewTemplateKeyWithName(templatesDir+tilesLocalPath+projection+".go.json", collectionID), engine.NewTemplateKeyWithName(templatesDir+tilesLocalPath+projection+".go.html", collectionID)) e.RenderTemplatesWithParams(path, data, projectionBreadcrumbs, engine.NewTemplateKeyWithName(templatesDir+tilesLocalPath+projection+".go.tilejson", collectionID)) } } func getCollectionTitle(collectionID string, metadata *config.GeoSpatialCollectionMetadata) string { if metadata != nil && metadata.Title != nil { return *metadata.Title } return collectionID } func readTileMatrixSetLimits(supportedProjections []config.SupportedSrs) map[string]map[int]TileMatrixSetLimits { tileMatrixSetLimits := make(map[string]map[int]TileMatrixSetLimits) for _, supportedSrs := range supportedProjections { tileMatrixSetID := config.AllTileProjections[supportedSrs.Srs] yamlFile, err := os.ReadFile(tmsLimitsDir + tileMatrixSetID + ".yaml") if err != nil { log.Fatalf("unable to read file %s", tileMatrixSetID+".yaml") } tmsLimits := make(map[int]TileMatrixSetLimits) err = yaml.Unmarshal(yamlFile, &tmsLimits) if err != nil { log.Fatalf("cannot unmarshal yaml: %v", err) } // keep only the zoomlevels supported for tm := range tmsLimits { if tm < supportedSrs.ZoomLevelRange.Start || tm > supportedSrs.ZoomLevelRange.End { delete(tmsLimits, tm) } } tileMatrixSetLimits[tileMatrixSetID] = tmsLimits } return tileMatrixSetLimits } func parseTileParams(tileMatrix, tileRow, tileCol string) (int, int, int, error) { tm, tmErr := strconv.Atoi(tileMatrix) tr, trErr := strconv.Atoi(tileRow) tc, tcErr := strconv.Atoi(tileCol) return tm, tr, tc, errors.Join(tmErr, trErr, tcErr) } func checkTileMatrixSetLimits(tileMatrixSetLimits map[string]map[int]TileMatrixSetLimits, tileMatrixSetID string, tileMatrix, tileRow, tileCol int) error { if limits, ok := tileMatrixSetLimits[tileMatrixSetID][tileMatrix]; !ok { // tileMatrix out of supported range return fmt.Errorf("tileMatrix %d is out of range", tileMatrix) } else if tileRow < limits.MinRow || tileRow > limits.MaxRow || tileCol < limits.MinCol || tileCol > limits.MaxCol { // tileRow and/or tileCol out of supported range return fmt.Errorf("tileRow/tileCol %d/%d is out of range", tileRow, tileCol) } return nil }