package main
import (
"log"
"net"
"os"
"strconv"
"github.com/PDOK/gokoala/config"
eng "github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc"
"github.com/urfave/cli/v2"
)
var (
cliFlags = []cli.Flag{
&cli.StringFlag{
Name: "host",
Usage: "bind host for OGC server",
Value: "0.0.0.0",
Required: false,
EnvVars: []string{"HOST"},
},
&cli.IntFlag{
Name: "port",
Usage: "bind port for OGC server",
Value: 8080,
Required: false,
EnvVars: []string{"PORT"},
},
&cli.IntFlag{
Name: "debug-port",
Usage: "bind port for debug server (disabled by default), do not expose this port publicly",
Value: -1,
Required: false,
EnvVars: []string{"DEBUG_PORT"},
},
&cli.IntFlag{
Name: "shutdown-delay",
Usage: "delay (in seconds) before initiating graceful shutdown (e.g. useful in k8s to allow ingress controller to update their endpoints list)",
Value: 0,
Required: false,
EnvVars: []string{"SHUTDOWN_DELAY"},
},
&cli.StringFlag{
Name: "config-file",
Usage: "reference to YAML configuration file",
Required: true,
EnvVars: []string{"CONFIG_FILE"},
},
&cli.StringFlag{
Name: "openapi-file",
Usage: "reference to a (customized) OGC OpenAPI spec for the dynamic parts of your OGC API",
Required: false,
EnvVars: []string{"OPENAPI_FILE"},
},
&cli.BoolFlag{
Name: "enable-trailing-slash",
Usage: "allow API calls to URLs with a trailing slash.",
Value: false, // to satisfy https://gitdocumentatie.logius.nl/publicatie/api/adr/#api-48
Required: false,
EnvVars: []string{"ALLOW_TRAILING_SLASH"},
},
&cli.BoolFlag{
Name: "enable-cors",
Usage: "enable Cross-Origin Resource Sharing (CORS) as required by OGC API specs. Disable if you handle CORS elsewhere.",
Value: false,
Required: false,
EnvVars: []string{"ENABLE_CORS"},
},
&cli.StringFlag{
Name: "theme-file",
Usage: "reference to a (customized) YAML configuration file for the theme",
Required: false,
EnvVars: []string{"THEME_FILE"},
},
}
)
func main() {
app := cli.NewApp()
app.Name = config.AppName
app.Usage = "Cloud Native OGC APIs server, written in Go"
app.Flags = cliFlags
app.Action = func(c *cli.Context) error {
log.Printf("%s - %s\n", app.Name, app.Usage)
address := net.JoinHostPort(c.String("host"), strconv.Itoa(c.Int("port")))
debugPort := c.Int("debug-port")
shutdownDelay := c.Int("shutdown-delay")
configFile := c.String("config-file")
themeFile := c.String("theme-file")
openAPIFile := c.String("openapi-file")
trailingSlash := c.Bool("enable-trailing-slash")
cors := c.Bool("enable-cors")
// Engine encapsulates shared non-OGC API specific logic
engine, err := eng.NewEngine(configFile, themeFile, openAPIFile, trailingSlash, cors)
if err != nil {
return err
}
// Each OGC API building block makes use of said Engine
ogc.SetupBuildingBlocks(engine)
return engine.Start(address, debugPort, shutdownDelay)
}
err := app.Run(os.Args)
if err != nil {
log.Fatal(err)
}
}
package config
import (
"encoding/json"
"log"
"sort"
"dario.cat/mergo"
orderedmap "github.com/wk8/go-ordered-map/v2"
"gopkg.in/yaml.v3"
)
// GeoSpatialCollections All collections configured for this OGC API. Can contain a mix of tiles/features/etc.
type GeoSpatialCollections []GeoSpatialCollection
// +kubebuilder:object:generate=true
type GeoSpatialCollection struct {
// Unique ID of the collection
// +kubebuilder:validation:Pattern=`^[a-z0-9"]([a-z0-9_-]*[a-z0-9"]+|)$`
ID string `yaml:"id" validate:"required,lowercase_id" json:"id"`
// Metadata describing the collection contents
// +optional
Metadata *GeoSpatialCollectionMetadata `yaml:"metadata,omitempty" json:"metadata,omitempty"`
// Links pertaining to this collection (e.g., downloads, documentation)
// +optional
Links *CollectionLinks `yaml:"links,omitempty" json:"links,omitempty"`
// 3D GeoVolumes specific to this collection
// +optional
GeoVolumes *CollectionEntry3dGeoVolumes `yaml:",inline" json:",inline"`
// Tiles specific to this collection
// +optional
Tiles *CollectionEntryTiles `yaml:",inline" json:",inline"`
// Features specific to this collection
// +optional
Features *CollectionEntryFeatures `yaml:",inline" json:",inline"`
}
type GeoSpatialCollectionJSON struct {
// Keep this in sync with the GeoSpatialCollection struct!
ID string `json:"id"`
Metadata *GeoSpatialCollectionMetadata `json:"metadata,omitempty"`
Links *CollectionLinks `json:"links,omitempty"`
*CollectionEntry3dGeoVolumes `json:",inline"`
*CollectionEntryTiles `json:",inline"`
*CollectionEntryFeatures `json:",inline"`
}
// MarshalJSON custom because inlining only works on embedded structs.
// Value instead of pointer receiver because only that way it can be used for both.
func (c GeoSpatialCollection) MarshalJSON() ([]byte, error) {
return json.Marshal(GeoSpatialCollectionJSON{
ID: c.ID,
Metadata: c.Metadata,
Links: c.Links,
CollectionEntry3dGeoVolumes: c.GeoVolumes,
CollectionEntryTiles: c.Tiles,
CollectionEntryFeatures: c.Features,
})
}
// UnmarshalJSON parses a string to GeoSpatialCollection.
func (c *GeoSpatialCollection) UnmarshalJSON(b []byte) error {
return yaml.Unmarshal(b, c)
}
// HasDateTime true when collection has temporal support, false otherwise.
func (c *GeoSpatialCollection) HasDateTime() bool {
return c.Metadata != nil && c.Metadata.TemporalProperties != nil
}
// HasTableName true when collection uses the given table, false otherwise.
func (c *GeoSpatialCollection) HasTableName(table string) bool {
return c.Features != nil && c.Features.TableName != nil &&
table == *c.Features.TableName
}
// +kubebuilder:object:generate=true
type GeoSpatialCollectionMetadata struct {
// Human friendly title of this collection. When no title is specified the collection ID is used.
// +optional
Title *string `yaml:"title,omitempty" json:"title,omitempty"`
// Describes the content of this collection
Description *string `yaml:"description" json:"description" validate:"required"`
// Reference to a PNG image to use a thumbnail on the collections.
// The full path is constructed by appending Resources + Thumbnail.
// +optional
Thumbnail *string `yaml:"thumbnail,omitempty" json:"thumbnail,omitempty"`
// Keywords to make this collection beter discoverable
// +optional
Keywords []string `yaml:"keywords,omitempty" json:"keywords,omitempty"`
// Moment in time when the collection was last updated
//
// +optional
// +kubebuilder:validation:Type=string
// +kubebuilder:validation:Format="date-time"
LastUpdated *string `yaml:"lastUpdated,omitempty" json:"lastUpdated,omitempty" validate:"omitempty,datetime=2006-01-02T15:04:05Z"`
// Who updated this collection
// +optional
LastUpdatedBy string `yaml:"lastUpdatedBy,omitempty" json:"lastUpdatedBy,omitempty"`
// Fields in the datasource to be used in temporal queries
// +optional
TemporalProperties *TemporalProperties `yaml:"temporalProperties,omitempty" json:"temporalProperties,omitempty" validate:"omitempty,required_with=Extent.Interval"`
// Extent of the collection, both geospatial and/or temporal
// +optional
Extent *Extent `yaml:"extent,omitempty" json:"extent,omitempty"`
// The CRS identifier which the features are originally stored, meaning no CRS transformations are applied when features are retrieved in this CRS.
// WGS84 is the default storage CRS.
//
// +kubebuilder:default="http://www.opengis.net/def/crs/OGC/1.3/CRS84"
// +kubebuilder:validation:Pattern=`^http:\/\/www\.opengis\.net\/def\/crs\/.*$`
// +optional
StorageCrs *string `yaml:"storageCrs,omitempty" json:"storageCrs,omitempty" default:"http://www.opengis.net/def/crs/OGC/1.3/CRS84" validate:"startswith=http://www.opengis.net/def/crs"`
}
// +kubebuilder:object:generate=true
type Extent struct {
// Projection (SRS/CRS) to be used. When none is provided WGS84 (http://www.opengis.net/def/crs/OGC/1.3/CRS84) is used.
// +optional
// +kubebuilder:validation:Pattern=`^EPSG:\d+$`
Srs string `yaml:"srs,omitempty" json:"srs,omitempty" validate:"omitempty,startswith=EPSG:"`
// Geospatial extent
Bbox []string `yaml:"bbox" json:"bbox"`
// Temporal extent
// +optional
// +kubebuilder:validation:MinItems=2
// +kubebuilder:validation:MaxItems=2
Interval []string `yaml:"interval,omitempty" json:"interval,omitempty" validate:"omitempty,len=2"`
}
// +kubebuilder:object:generate=true
type CollectionLinks struct {
// Links to downloads of entire collection. These will be rendered as rel=enclosure links
// +optional
Downloads []DownloadLink `yaml:"downloads,omitempty" json:"downloads,omitempty" validate:"dive"`
// Links to documentation describing the collection. These will be rendered as rel=describedby links
// <placeholder>
}
// +kubebuilder:object:generate=true
type DownloadLink struct {
// Name of the provided download
Name string `yaml:"name" json:"name" validate:"required"`
// Full URL to the file to be downloaded
AssetURL *URL `yaml:"assetUrl" json:"assetUrl" validate:"required"`
// Approximate size of the file to be downloaded
// +optional
Size string `yaml:"size,omitempty" json:"size,omitempty"`
// Media type of the file to be downloaded
MediaType MediaType `yaml:"mediaType" json:"mediaType" validate:"required"`
}
// HasCollections does this API offer collections with for example features, tiles, 3d tiles, etc.
func (c *Config) HasCollections() bool {
return c.AllCollections() != nil
}
// AllCollections get all collections - with for example features, tiles, 3d tiles - offered through this OGC API.
// Results are returned in alphabetic or literal order.
func (c *Config) AllCollections() GeoSpatialCollections {
var result GeoSpatialCollections
if c.OgcAPI.GeoVolumes != nil {
result = append(result, c.OgcAPI.GeoVolumes.Collections...)
}
if c.OgcAPI.Tiles != nil {
result = append(result, c.OgcAPI.Tiles.Collections...)
}
if c.OgcAPI.Features != nil {
result = append(result, c.OgcAPI.Features.Collections...)
}
// sort
if len(c.OgcAPICollectionOrder) > 0 {
sortByLiteralOrder(result, c.OgcAPICollectionOrder)
} else {
sortByAlphabet(result)
}
return result
}
// FeaturePropertiesByID returns a map of collection IDs to their corresponding FeatureProperties.
// Skips collections that do not have features defined.
func (g GeoSpatialCollections) FeaturePropertiesByID() map[string]*FeatureProperties {
result := make(map[string]*FeatureProperties)
for _, collection := range g {
if collection.Features == nil {
continue
}
result[collection.ID] = collection.Features.FeatureProperties
}
return result
}
// Unique lists all unique GeoSpatialCollections (no duplicate IDs).
// Don't use in the hot path (creates a map on every invocation).
func (g GeoSpatialCollections) Unique() []GeoSpatialCollection {
collectionsByID := g.toMap()
result := make([]GeoSpatialCollection, 0, collectionsByID.Len())
for pair := collectionsByID.Oldest(); pair != nil; pair = pair.Next() {
result = append(result, pair.Value)
}
return result
}
// ContainsID check if given collection - by ID - exists.
// Don't use in the hot path (creates a map on every invocation).
func (g GeoSpatialCollections) ContainsID(id string) bool {
collectionsByID := g.toMap()
_, ok := collectionsByID.Get(id)
return ok
}
func (g GeoSpatialCollections) toMap() orderedmap.OrderedMap[string, GeoSpatialCollection] {
collectionsByID := orderedmap.New[string, GeoSpatialCollection]()
for _, current := range g {
existing, ok := collectionsByID.Get(current.ID)
if ok {
err := mergo.Merge(&existing, current)
if err != nil {
log.Fatalf("failed to merge 2 collections with the same name '%s': %v", current.ID, err)
}
collectionsByID.Set(current.ID, existing)
} else {
collectionsByID.Set(current.ID, current)
}
}
return *collectionsByID
}
func sortByAlphabet(collection []GeoSpatialCollection) {
sort.Slice(collection, func(i, j int) bool {
iName := collection[i].ID
jName := collection[j].ID
// prefer to sort by title when available, collection ID otherwise
if collection[i].Metadata != nil && collection[i].Metadata.Title != nil {
iName = *collection[i].Metadata.Title
}
if collection[j].Metadata != nil && collection[j].Metadata.Title != nil {
jName = *collection[j].Metadata.Title
}
return iName < jName
})
}
func sortByLiteralOrder(collections []GeoSpatialCollection, literalOrder []string) {
collectionOrderIndex := make(map[string]int)
for i, id := range literalOrder {
collectionOrderIndex[id] = i
}
sort.Slice(collections, func(i, j int) bool {
// sort according to the explicit/literal order specified in OgcAPICollectionOrder
return collectionOrderIndex[collections[i].ID] < collectionOrderIndex[collections[j].ID]
})
}
//go:generate ../hack/generate-deepcopy.sh
package config
import (
"errors"
"fmt"
"os"
"github.com/creasty/defaults"
"github.com/go-playground/validator/v10"
"golang.org/x/text/language"
"gopkg.in/yaml.v3"
)
const (
AppName = "GoKoala"
CookieMaxAge = 60 * 60 * 24
)
// +kubebuilder:object:generate=true
type Config struct {
// Version of the API. When releasing a new version which contains backwards-incompatible changes, a new major version must be released.
Version string `yaml:"version" json:"version" validate:"required,semver"`
// Human friendly title of the API. Don't include "OGC API" in the title, this is added automatically.
Title string `yaml:"title" json:"title" validate:"required"`
// Shorted title / abbreviation describing the API.
ServiceIdentifier string `yaml:"serviceIdentifier" json:"serviceIdentifier" validate:"required"`
// Human friendly description of the API and dataset.
Abstract string `yaml:"abstract" json:"abstract" validate:"required"`
// Licensing term that apply to this API and dataset
License License `yaml:"license" json:"license" validate:"required"`
// The base URL - that's the part until the OGC API landing page - under which this API is served
BaseURL URL `yaml:"baseUrl" json:"baseUrl" validate:"required"`
// Optional reference to a catalog/portal/registry that lists all datasets, not just this one
// +optional
DatasetCatalogURL URL `yaml:"datasetCatalogUrl,omitempty" json:"datasetCatalogUrl,omitempty"`
// The languages/translations to offer, valid options are Dutch (nl) and English (en). Dutch is the default.
// +optional
AvailableLanguages []Language `yaml:"availableLanguages,omitempty" json:"availableLanguages,omitempty"`
// Define which OGC API building blocks this API supports
OgcAPI OgcAPI `yaml:"ogcApi" json:"ogcApi" validate:"required"`
// Order in which collections (containing features, tiles, 3d tiles, etc.) should be returned.
// When not specified collections are returned in alphabetic order.
// +optional
OgcAPICollectionOrder []string `yaml:"collectionOrder,omitempty" json:"collectionOrder,omitempty"`
// Reference to a PNG image to use a thumbnail on the landing page.
// The full path is constructed by appending Resources + Thumbnail.
// +optional
Thumbnail *string `yaml:"thumbnail,omitempty" json:"thumbnail,omitempty" validate:"omitempty"`
// Keywords to make this API beter discoverable
// +optional
Keywords []string `yaml:"keywords,omitempty" json:"keywords,omitempty"`
// Moment in time when the dataset was last updated
// +optional
// +kubebuilder:validation:Type=string
// +kubebuilder:validation:Format="date-time"
LastUpdated *string `yaml:"lastUpdated,omitempty" json:"lastUpdated,omitempty" validate:"omitempty,datetime=2006-01-02T15:04:05Z"`
// Who updated the dataset
// +optional
LastUpdatedBy string `yaml:"lastUpdatedBy,omitempty" json:"lastUpdatedBy,omitempty"`
// Available support channels
// +optional
Support *Support `yaml:"support,omitempty" json:"support,omitempty"`
// Metadata links
// +optional
MetadataLinks []MetadataLink `yaml:"metadataLinks,omitempty" json:"metadataLinks,omitempty"`
// Key/value pairs to add extra information to the landing page
// +optional
DatasetDetails []DatasetDetail `yaml:"datasetDetails,omitempty" json:"datasetDetails,omitempty"`
// Location where resources (e.g. thumbnails) specific to the given dataset are hosted
// +optional
Resources *Resources `yaml:"resources,omitempty" json:"resources,omitempty"`
}
// NewConfig read YAML config file, required to start GoKoala.
func NewConfig(configFile string) (*Config, error) {
yamlData, err := os.ReadFile(configFile)
if err != nil {
return nil, fmt.Errorf("failed to read config file %w", err)
}
// expand environment variables
yamlData = []byte(os.ExpandEnv(string(yamlData)))
var config *Config
err = yaml.Unmarshal(yamlData, &config)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal config file, error: %w", err)
}
err = validateLocalPaths(config)
if err != nil {
return nil, fmt.Errorf("validation error in config file, error: %w", err)
}
return config, nil
}
// UnmarshalYAML hooks into unmarshalling to set defaults and validate config.
func (c *Config) UnmarshalYAML(unmarshal func(any) error) error {
type cfg Config
if err := unmarshal((*cfg)(c)); err != nil {
return err
}
// init config
if err := setDefaults(c); err != nil {
return err
}
if err := validate(c); err != nil {
return err
}
return nil
}
func (c *Config) UnmarshalJSON(b []byte) error {
return yaml.Unmarshal(b, c)
}
func (c *Config) CookieMaxAge() int {
return CookieMaxAge
}
// +kubebuilder:object:generate=true
type OgcAPI struct {
// Enable when this API should offer OGC API 3D GeoVolumes. This includes OGC 3D Tiles.
// +optional
GeoVolumes *OgcAPI3dGeoVolumes `yaml:"3dgeovolumes,omitempty" json:"3dgeovolumes,omitempty"`
// Enable when this API should offer OGC API Tiles. This also requires OGC API Styles.
// +optional
Tiles *OgcAPITiles `yaml:"tiles,omitempty" json:"tiles,omitempty" validate:"required_with=Styles"`
// Enable when this API should offer OGC API Styles.
// +optional
Styles *OgcAPIStyles `yaml:"styles,omitempty" json:"styles,omitempty"`
// Enable when this API should offer OGC API Features.
// +optional
Features *OgcAPIFeatures `yaml:"features,omitempty" json:"features,omitempty"`
// Enable when this API should offer OGC API Processes.
// +optional
Processes *OgcAPIProcesses `yaml:"processes,omitempty" json:"processes,omitempty"`
}
// +kubebuilder:object:generate=true
type Support struct {
// Name of the support organization
Name string `yaml:"name" json:"name" validate:"required"`
// URL to external support webpage
// +kubebuilder:validation:Type=string
URL URL `yaml:"url" json:"url" validate:"required"`
// Email for support questions
// +optional
Email string `yaml:"email,omitempty" json:"email,omitempty" validate:"omitempty,email"`
}
// +kubebuilder:object:generate=true
type MetadataLink struct {
// Name of the metadata collection/site/organization
Name string `yaml:"name" json:"name" validate:"required"`
// Which category of the API this metadata concerns. E.g. dataset (in general), tiles or features
// +kubebuilder:default="dataset"
Category string `yaml:"category" json:"category" validate:"required" default:"dataset"`
// URL to external metadata detail page
// +kubebuilder:validation:Type=string
URL URL `yaml:"url" json:"url" validate:"required"`
}
// +kubebuilder:object:generate=true
type DatasetDetail struct {
// Arbitrary name to add extra information to the landing page
Name string `yaml:"name" json:"name"`
// Arbitrary value associated with the given name
Value string `yaml:"value" json:"value"`
}
// +kubebuilder:object:generate=true
type Resources struct {
// Location where resources (e.g. thumbnails) specific to the given dataset are hosted. This is optional if Directory is set
// +optional
URL *URL `yaml:"url,omitempty" json:"url,omitempty" validate:"required_without=Directory,omitempty"`
// Location where resources (e.g. thumbnails) specific to the given dataset are hosted. This is optional if URL is set
// +optional
Directory *string `yaml:"directory,omitempty" json:"directory,omitempty" validate:"required_without=URL,omitempty,dirpath|filepath"`
}
// +kubebuilder:object:generate=true
type License struct {
// Name of the license, e.g. MIT, CC0, etc
Name string `yaml:"name" json:"name" validate:"required"`
// URL to license text on the web
URL URL `yaml:"url" json:"url" validate:"required"`
}
func setDefaults(config *Config) error {
// process 'default' tags
if err := defaults.Set(config); err != nil {
return fmt.Errorf("failed to set default configuration: %w", err)
}
// custom default logic
if len(config.AvailableLanguages) == 0 {
config.AvailableLanguages = append(config.AvailableLanguages, Language{language.Dutch}) // default to Dutch only
}
if config.OgcAPI.Tiles != nil {
config.OgcAPI.Tiles.Defaults()
}
return nil
}
func validate(config *Config) error {
// process 'validate' tags
v := validator.New()
err := v.RegisterValidation(lowercaseID, LowercaseID)
if err != nil {
return err
}
err = v.Struct(config)
if err != nil {
return formatValidationErr(err)
}
// custom validations
var errs []error
if config.OgcAPI.Features != nil {
errs = append(errs, validateFeatureCollections(config.OgcAPI.Features.Collections))
}
if config.OgcAPI.Tiles != nil {
errs = append(errs, validateTileProjections(config.OgcAPI.Tiles))
}
err = errors.Join(errs...)
if err != nil {
return err
}
return nil
}
// validateLocalPaths validates the existence of local paths.
// Not suitable for general validation while unmarshalling.
// Because that could happen on another machine.
func validateLocalPaths(config *Config) error {
// Could use a deep dive and reflection.
// But the settings with a path are not recursive and relatively limited in numbers.
// GeoPackageCloudCache.Path is not verified. It will be created anyway in cloud_sqlite_vfs.createCacheDir during startup time.
if config.Resources != nil {
if config.Resources.Directory != nil && *config.Resources.Directory != "" &&
!isExistingLocalDir(*config.Resources.Directory) {
return errors.New("resources.directory should be an existing directory: " + *config.Resources.Directory)
}
} else if err := validateConfiguredResources(config); err != nil {
return err
}
if config.OgcAPI.Styles != nil && !isExistingLocalDir(config.OgcAPI.Styles.StylesDir) {
return errors.New("stylesDir should be an existing directory: " + config.OgcAPI.Styles.StylesDir)
}
return nil
}
// make sure resources dir/url is configured when config contains references to files like thumbnails or legends.
func validateConfiguredResources(config *Config) error {
if config.Thumbnail != nil {
return errors.New("thumbnail cannot be used when 'resources' isn't specified")
}
for _, coll := range config.AllCollections() {
if coll.Metadata != nil && coll.Metadata.Thumbnail != nil {
return fmt.Errorf("thumbnail for collection %s cannot be used when 'resources' isn't specified", coll.ID)
}
}
if config.OgcAPI.Styles != nil {
for _, style := range config.OgcAPI.Styles.SupportedStyles {
if style.Thumbnail != nil {
return fmt.Errorf("thumbnail for style %s option cannot be used when 'resources' isn't specified", style.ID)
}
if style.Legend != nil {
return fmt.Errorf("legend for style %s option cannot be used when 'resources' isn't specified", style.ID)
}
}
}
return nil
}
func isExistingLocalDir(path string) bool {
fileInfo, err := os.Stat(path)
return err == nil && fileInfo.IsDir()
}
func formatValidationErr(err error) error {
var ive *validator.InvalidValidationError
if ok := errors.Is(err, ive); ok {
return fmt.Errorf("failed to validate config: %w", err)
}
var errMessages []string
var valErrs validator.ValidationErrors
if errors.As(err, &valErrs) {
for _, valErr := range valErrs {
errMsg := fmt.Sprintf("field: '%s', value: '%v', error: %v\n", valErr.Field(), valErr.Value(), valErr.Error())
errMessages = append(errMessages, errMsg)
}
}
return fmt.Errorf("invalid config provided:\n%v", errMessages)
}
package config
import (
"encoding/json"
"time"
"gopkg.in/yaml.v3"
)
// Duration Custom time.Duration compatible with YAML and JSON (un)marshalling and kubebuilder.
// (Already supported in yaml/v3 but not encoding/json.)
//
// +kubebuilder:validation:Type=string
// +kubebuilder:validation:Format=duration
type Duration struct {
time.Duration
}
// MarshalJSON turn duration tag into JSON
// Value instead of pointer receiver because only that way it can be used for both.
func (d Duration) MarshalJSON() ([]byte, error) {
return json.Marshal(d.String())
}
func (d *Duration) UnmarshalJSON(b []byte) error {
return yaml.Unmarshal(b, &d.Duration)
}
// MarshalYAML turn duration tag into YAML
// Value instead of pointer receiver because only that way it can be used for both.
func (d Duration) MarshalYAML() (any, error) {
return d.Duration, nil
}
func (d *Duration) UnmarshalYAML(unmarshal func(any) error) error {
return unmarshal(&d.Duration)
}
// DeepCopyInto copy the receiver, write into out. in must be non-nil.
func (d *Duration) DeepCopyInto(out *Duration) {
if out != nil {
*out = *d
}
}
// DeepCopy copy the receiver, create a new Duration.
func (d *Duration) DeepCopy() *Duration {
if d == nil {
return nil
}
out := &Duration{}
d.DeepCopyInto(out)
return out
}
package config
import (
"encoding/json"
"golang.org/x/text/language"
)
// Language represents a BCP 47 language tag.
// +kubebuilder:validation:Type=string
type Language struct {
language.Tag
}
// MarshalJSON turn language tag into JSON
// Value instead of pointer receiver because only that way it can be used for both.
func (l Language) MarshalJSON() ([]byte, error) {
return json.Marshal(l.String())
}
// UnmarshalJSON turn JSON into Language.
func (l *Language) UnmarshalJSON(b []byte) error {
var s string
if err := json.Unmarshal(b, &s); err != nil {
return err
}
*l = Language{language.Make(s)}
return nil
}
// DeepCopyInto copy the receiver, write into out. in must be non-nil.
func (l *Language) DeepCopyInto(out *Language) {
*out = *l
}
// DeepCopy copy the receiver, create a new Language.
func (l *Language) DeepCopy() *Language {
if l == nil {
return nil
}
out := &Language{}
l.DeepCopyInto(out)
return out
}
package config
import (
"log"
"regexp"
"github.com/go-playground/validator/v10"
)
var (
lowercaseIDRegexp = regexp.MustCompile("^[a-z0-9\"]([a-z0-9_-]*[a-z0-9\"]+|)$")
)
const (
lowercaseID = "lowercase_id"
)
// LowercaseID is the validation function for validating if the current field
// is not empty and contains only lowercase chars, numbers, hyphens or underscores.
// It's similar to RFC 1035 DNS label but not the same.
func LowercaseID(fl validator.FieldLevel) bool {
valAsString := fl.Field().String()
valid := lowercaseIDRegexp.MatchString(valAsString)
if !valid {
log.Printf("Invalid ID %s", valAsString)
}
return valid
}
package config
import (
"encoding/json"
"github.com/elnormous/contenttype"
)
// MediaType represents a IANA media type as described in RFC 6838. Media types were formerly known as MIME types.
// +kubebuilder:validation:Type=string
type MediaType struct {
contenttype.MediaType
}
// MarshalJSON turn MediaType into JSON
// Value instead of pointer receiver because only that way it can be used for both.
func (m MediaType) MarshalJSON() ([]byte, error) {
return json.Marshal(m.String())
}
// UnmarshalJSON turn JSON into MediaType.
func (m *MediaType) UnmarshalJSON(b []byte) error {
var s string
if err := json.Unmarshal(b, &s); err != nil {
return err
}
mt, err := contenttype.ParseMediaType(s)
if err != nil {
return err
}
m.MediaType = mt
return nil
}
// MarshalYAML turns MediaType into YAML.
// Value instead of pointer receiver because only that way it can be used for both.
func (m MediaType) MarshalYAML() (any, error) {
return m.String(), nil
}
// UnmarshalYAML parses a string to MediaType.
func (m *MediaType) UnmarshalYAML(unmarshal func(any) error) error {
var s string
if err := unmarshal(&s); err != nil {
return err
}
mt, err := contenttype.ParseMediaType(s)
if err != nil {
return err
}
m.MediaType = mt
return nil
}
// DeepCopyInto copy the receiver, write into out. in must be non-nil.
func (m *MediaType) DeepCopyInto(out *MediaType) {
*out = *m
}
// DeepCopy copy the receiver, create a new MediaType.
func (m *MediaType) DeepCopy() *MediaType {
if m == nil {
return nil
}
out := &MediaType{}
m.DeepCopyInto(out)
return out
}
package config
// +kubebuilder:object:generate=true
type OgcAPI3dGeoVolumes struct {
// Reference to the server (or object storage) hosting the 3D Tiles
TileServer URL `yaml:"tileServer" json:"tileServer" validate:"required"`
// Collections to be served as 3D GeoVolumes
Collections GeoSpatialCollections `yaml:"collections" json:"collections"`
// Whether JSON responses will be validated against the OpenAPI spec
// since it has a significant performance impact when dealing with large JSON payloads.
//
// +kubebuilder:default=true
// +optional
ValidateResponses *bool `yaml:"validateResponses,omitempty" json:"validateResponses,omitempty" default:"true"` // ptr due to https://github.com/creasty/defaults/issues/49
}
// +kubebuilder:object:generate=true
type CollectionEntry3dGeoVolumes struct {
// Optional basepath to 3D tiles on the tileserver. Defaults to the collection ID.
// +optional
TileServerPath *string `yaml:"tileServerPath,omitempty" json:"tileServerPath,omitempty"`
// URI template for individual 3D tiles.
// +optional
URITemplate3dTiles *string `yaml:"uriTemplate3dTiles,omitempty" json:"uriTemplate3dTiles,omitempty" validate:"required_without_all=URITemplateDTM"`
// Optional URI template for subtrees, only required when "implicit tiling" extension is used.
// +optional
URITemplateImplicitTilingSubtree *string `yaml:"uriTemplateImplicitTilingSubtree,omitempty" json:"uriTemplateImplicitTilingSubtree,omitempty"`
// URI template for digital terrain model (DTM) in Quantized Mesh format, REQUIRED when you want to serve a DTM.
// +optional
URITemplateDTM *string `yaml:"uriTemplateDTM,omitempty" json:"uriTemplateDTM,omitempty" validate:"required_without_all=URITemplate3dTiles"` //nolint:tagliatelle // grandfathered
// Optional URL to 3D viewer to visualize the given collection of 3D Tiles.
// +optional
URL3DViewer *URL `yaml:"3dViewerUrl,omitempty" json:"3dViewerUrl,omitempty"`
}
func (gv *CollectionEntry3dGeoVolumes) Has3DTiles() bool {
return gv.URITemplate3dTiles != nil
}
func (gv *CollectionEntry3dGeoVolumes) HasDTM() bool {
return gv.URITemplateDTM != nil
}
package config
import (
"encoding/json"
"fmt"
"slices"
"github.com/PDOK/gokoala/internal/engine/util"
"gopkg.in/yaml.v3"
)
// +kubebuilder:object:generate=true
type OgcAPIFeatures struct {
// Basemap to use in embedded viewer on the HTML pages.
// +kubebuilder:default="OSM"
// +kubebuilder:validation:Enum=OSM;BRT
// +optional
Basemap string `yaml:"basemap,omitempty" json:"basemap,omitempty" default:"OSM" validate:"oneof=OSM BRT"`
// Collections to be served as features through this API
Collections GeoSpatialCollections `yaml:"collections" json:"collections" validate:"required,dive"`
// Limits the number of features to retrieve with a single call
// +optional
Limit Limit `yaml:"limit,omitempty" json:"limit,omitempty"`
// One or more datasources to get the features from (geopackages, postgres, etc).
// Optional since you can also define datasources at the collection level
// +optional
Datasources *Datasources `yaml:"datasources,omitempty" json:"datasources,omitempty"`
// Whether GeoJSON/JSON-FG responses will be validated against the OpenAPI spec
// since it has a significant performance impact when dealing with large JSON payloads.
//
// +kubebuilder:default=true
// +optional
ValidateResponses *bool `yaml:"validateResponses,omitempty" json:"validateResponses,omitempty" default:"true"` // ptr due to https://github.com/creasty/defaults/issues/49
// Maximum number of decimals allowed in geometry coordinates. When not specified (default value of 0) no limit is enforced.
// +optional
// +kubebuilder:validation:Minimum=0
MaxDecimals int `yaml:"maxDecimals,omitempty" json:"maxDecimals,omitempty" default:"0"`
// Force timestamps in features to the UTC timezone.
//
// +kubebuilder:default=false
// +optional
ForceUTC bool `yaml:"forceUtc,omitempty" json:"forceUtc,omitempty"`
}
func (oaf *OgcAPIFeatures) CollectionsSRS() []string {
return oaf.CollectionSRS("")
}
func (oaf *OgcAPIFeatures) CollectionSRS(collectionID string) []string {
uniqueSRSs := make(map[string]struct{})
if oaf.Datasources != nil {
for _, d := range oaf.Datasources.OnTheFly {
for _, srs := range d.SupportedSrs {
uniqueSRSs[srs.Srs] = struct{}{}
}
}
for _, d := range oaf.Datasources.Additional {
uniqueSRSs[d.Srs] = struct{}{}
}
}
for _, coll := range oaf.Collections {
if (coll.ID == collectionID || collectionID == "") && coll.Features != nil && coll.Features.Datasources != nil {
for _, d := range coll.Features.Datasources.OnTheFly {
for _, srs := range d.SupportedSrs {
uniqueSRSs[srs.Srs] = struct{}{}
}
}
for _, d := range coll.Features.Datasources.Additional {
uniqueSRSs[d.Srs] = struct{}{}
}
break
}
}
result := util.Keys(uniqueSRSs)
slices.Sort(result)
return result
}
// +kubebuilder:object:generate=true
type CollectionEntryFeatures struct {
// Optional way to explicitly map a collection ID to the underlying table in the datasource.
// +optional
TableName *string `yaml:"tableName,omitempty" json:"tableName,omitempty"`
// Optional collection-specific datasources. Mutually exclusive with top-level defined datasources.
// +optional
Datasources *Datasources `yaml:"datasources,omitempty" json:"datasources,omitempty"`
// Filters available for this collection
// +optional
Filters FeatureFilters `yaml:"filters,omitempty" json:"filters,omitempty"`
// Optional way to exclude feature properties and/or determine the ordering of properties in the response.
// +optional
*FeatureProperties `yaml:",inline" json:",inline"`
// Downloads available for this collection through map sheets. Note that 'map sheets' refer to a map
// divided in rectangle areas that can be downloaded individually.
// +optional
MapSheetDownloads *MapSheetDownloads `yaml:"mapSheetDownloads,omitempty" json:"mapSheetDownloads,omitempty"`
// Configuration specifically related to HTML/Web representation
// +optional
Web *WebConfig `yaml:"web,omitempty" json:"web,omitempty"`
}
// MarshalJSON custom because inlining only works on embedded structs.
// Value instead of pointer receiver because only that way it can be used for both.
func (c CollectionEntryFeatures) MarshalJSON() ([]byte, error) {
return json.Marshal(c)
}
// UnmarshalJSON parses a string to CollectionEntryFeatures.
func (c *CollectionEntryFeatures) UnmarshalJSON(b []byte) error {
return yaml.Unmarshal(b, c)
}
// +kubebuilder:object:generate=true
type FeatureFilters struct {
// OAF Part 1: filter on feature properties
// https://docs.ogc.org/is/17-069r4/17-069r4.html#_parameters_for_filtering_on_feature_properties
//
// +optional
Properties []PropertyFilter `yaml:"properties,omitempty" json:"properties,omitempty" validate:"dive"`
// OAF Part 3: add config for complex/CQL filters here
// <placeholder>
}
// +kubebuilder:object:generate=true
type FeatureProperties struct {
// Properties/fields of features in this collection. This setting controls two things:
//
// A) allows one to exclude certain properties, when propertiesExcludeUnknown=true
// B) allows one to sort the properties in the given order, when propertiesInSpecificOrder=true
//
// When not set, all available properties are returned in API responses, in alphabetical order.
// +optional
Properties []string `yaml:"properties,omitempty" json:"properties,omitempty"`
// When true properties not listed under 'properties' are excluded from API responses. When false
// unlisted properties are also included in API responses.
// +optional
// +kubebuilder:default=false
PropertiesExcludeUnknown bool `yaml:"propertiesExcludeUnknown,omitempty" json:"propertiesExcludeUnknown,omitempty" default:"false"`
// When true properties are returned according to the ordering specified under 'properties'. When false
// properties are returned in alphabetical order.
// +optional
// +kubebuilder:default=false
PropertiesInSpecificOrder bool `yaml:"propertiesInSpecificOrder,omitempty" json:"propertiesInSpecificOrder,omitempty" default:"false"`
}
// +kubebuilder:object:generate=true
type MapSheetDownloads struct {
// Properties that provide the download details per map sheet. Note that 'map sheets' refer to a map
// divided in rectangle areas that can be downloaded individually.
Properties MapSheetDownloadProperties `yaml:"properties" json:"properties" validate:"required"`
}
// +kubebuilder:object:generate=true
type MapSheetDownloadProperties struct {
// Property/column containing file download URL
AssetURL string `yaml:"assetUrl" json:"assetUrl" validate:"required"`
// Property/column containing file size
Size string `yaml:"size" json:"size" validate:"required"`
// The actual media type (not a property/column) of the download, like application/zip.
MediaType MediaType `yaml:"mediaType" json:"mediaType" validate:"required"`
// Property/column containing the map sheet identifier
MapSheetID string `yaml:"mapSheetId" json:"mapSheetId" validate:"required"`
}
// +kubebuilder:object:generate=true
type WebConfig struct {
// Viewer config for displaying multiple features on a map
// +optional
FeaturesViewer *FeaturesViewer `yaml:"featuresViewer,omitempty" json:"featuresViewer,omitempty"`
// Viewer config for displaying a single feature on a map
// +optional
FeatureViewer *FeaturesViewer `yaml:"featureViewer,omitempty" json:"featureViewer,omitempty"`
// Whether URLs (to external resources) in the HTML representation of features should be rendered as hyperlinks.
// +optional
URLAsHyperlink bool `yaml:"urlAsHyperlink,omitempty" json:"urlAsHyperlink,omitempty"`
}
// +kubebuilder:object:generate=true
type FeaturesViewer struct {
// Maximum initial zoom level of the viewer when rendering features, specified by scale denominator.
// Defaults to 1000 (= scale 1:1000).
// +optional
MinScale int `yaml:"minScale,omitempty" json:"minScale,omitempty" validate:"gt=0" default:"1000"`
// Minimal initial zoom level of the viewer when rendering features, specified by scale denominator
// (not set by default).
// +optional
MaxScale *int `yaml:"maxScale,omitempty" json:"maxScale,omitempty" validate:"omitempty,gt=0,gtefield=MinScale"`
}
// +kubebuilder:object:generate=true
type Limit struct {
// Number of features to return by default.
// +kubebuilder:default=10
// +kubebuilder:validation:Minimum=2
// +optional
Default int `yaml:"default,omitempty" json:"default,omitempty" validate:"gt=1" default:"10"`
// Max number of features to return. Should be larger than 100 since the HTML interface always offers a 100 limit option.
// +kubebuilder:default=1000
// +kubebuilder:validation:Minimum=100
// +optional
Max int `yaml:"max,omitempty" json:"max,omitempty" validate:"gte=100" default:"1000"`
}
// +kubebuilder:object:generate=true
type PropertyFilter struct {
// Needs to match with a column name in the feature table (in the configured datasource)
Name string `yaml:"name" json:"name" validate:"required"`
// Explains this property filter
// +kubebuilder:default="Filter features by this property"
// +optional
Description string `yaml:"description,omitempty" json:"description,omitempty" default:"Filter features by this property"`
// When true the property/column in the feature table needs to be indexed. Initialization will fail
// when no index is present, when false the index check is skipped. For large tables an index is recommended!
//
// +kubebuilder:default=true
// +optional
IndexRequired *bool `yaml:"indexRequired,omitempty" json:"indexRequired,omitempty" default:"true"` // ptr due to https://github.com/creasty/defaults/issues/49
// Static list of allowed values to be used as input for this property filter. Will be enforced by OpenAPI spec.
// +optional
AllowedValues []string `yaml:"allowedValues,omitempty" json:"allowedValues,omitempty"`
// Derive a list of allowed values for this property filter from the corresponding column in the datastore.
// Use with caution since it can increase startup time when used on large tables. Make sure an index in present.
//
// +kubebuilder:default=false
// +optional
DeriveAllowedValuesFromDatasource *bool `yaml:"deriveAllowedValuesFromDatasource,omitempty" json:"deriveAllowedValuesFromDatasource,omitempty" default:"false"`
}
// +kubebuilder:object:generate=true
type TemporalProperties struct {
// Name of field in datasource to be used in temporal queries as the start date
StartDate string `yaml:"startDate" json:"startDate" validate:"required"`
// Name of field in datasource to be used in temporal queries as the end date
EndDate string `yaml:"endDate" json:"endDate" validate:"required"`
}
func validateFeatureCollections(collections GeoSpatialCollections) error {
var errMessages []string
for _, collection := range collections {
if collection.Metadata != nil && collection.Metadata.TemporalProperties != nil &&
(collection.Metadata.Extent == nil || collection.Metadata.Extent.Interval == nil) {
errMessages = append(errMessages, fmt.Sprintf("validation failed for collection '%s'; "+
"field 'Extent.Interval' is required with field 'TemporalProperties'\n", collection.ID))
}
if collection.Features != nil && collection.Features.Filters.Properties != nil {
for _, pf := range collection.Features.Filters.Properties {
if pf.AllowedValues != nil && *pf.DeriveAllowedValuesFromDatasource {
errMessages = append(errMessages, fmt.Sprintf("validation failed for property filter '%s'; "+
"field 'AllowedValues' and field 'DeriveAllowedValuesFromDatasource' are mutually exclusive\n", pf.Name))
}
}
}
}
if len(errMessages) > 0 {
return fmt.Errorf("invalid config provided:\n%v", errMessages)
}
return nil
}
package config
import (
"fmt"
"math/rand/v2"
"net"
"os"
"path/filepath"
"strconv"
"strings"
"github.com/docker/go-units"
)
// +kubebuilder:object:generate=true
type Datasources struct {
// Features should always be available in WGS84 (according to spec). This specifies the
// datasource to be used for features in the WGS84 coordinate reference system.
//
// No on-the-fly transformation/reprojection is performed, so the features in this datasource need to be
// either native WGS84 or reprojected/transformed to WGS84 ahead of time. For example, using ogr2ogr.
// +optional
DefaultWGS84 *Datasource `yaml:"defaultWGS84" json:"defaultWGS84"` //nolint:tagliatelle // grandfathered
// One or more additional datasources for features in other (non-WGS84) coordinate reference systems.
//
// No on-the-fly transformation/reprojection is performed, so the features in these additional datasources
// need to be transformed/reprojected ahead of time. For example, using ogr2ogr.
// +optional
Additional []AdditionalDatasource `yaml:"additional" json:"additional" validate:"dive"`
// Datasource containing features which will be transformed/reprojected on-the-fly to the specified
// coordinate reference systems. No need to transform/reproject ahead of time.
//
// Note: On-the-fly transformation/reprojection may impact performance when using (very) large geometries.
// +optional
OnTheFly []OnTheFlyDatasource `yaml:"transformOnTheFly" json:"transformOnTheFly" validate:"dive"`
}
// +kubebuilder:object:generate=true
type Datasource struct {
// GeoPackage to get the features from.
// +optional
GeoPackage *GeoPackage `yaml:"geopackage,omitempty" json:"geopackage,omitempty" validate:"required_without_all=Postgres"`
// Postgres database to get the features from.
// +optional
Postgres *Postgres `yaml:"postgres,omitempty" json:"postgres,omitempty" validate:"required_without_all=GeoPackage"`
// Add more data sources here such as Mongo, Elastic, etc.
}
// +kubebuilder:object:generate=true
type AdditionalDatasource struct {
// SRS/CRS used for the features in this datasource
// +kubebuilder:validation:Pattern=`^EPSG:\d+$`
Srs string `yaml:"srs" json:"srs" validate:"required,startswith=EPSG:"`
// The additional datasource
Datasource `yaml:",inline" json:",inline"`
}
// +kubebuilder:object:generate=true
type OnTheFlyDatasource struct {
// List of supported SRS/CRS
SupportedSrs []OnTheFlySupportedSrs `yaml:"supportedSrs,omitempty" json:"supportedSrs,omitempty" validate:"dive,omitempty"`
// The datasource capable of on-the-fly reprojection/transformation
Datasource `yaml:",inline" json:",inline"`
}
// +kubebuilder:object:generate=true
type OnTheFlySupportedSrs struct {
// Supported coordinated reference systems (CRS/SRS) for on-the-fly reprojection/transformation.
// Note: no need to add 'OGC:CRS84', since that one is required and included by default.
// +kubebuilder:validation:Pattern=`^EPSG:\d+$`
Srs string `yaml:"srs" json:"srs" validate:"required,startswith=EPSG:"`
}
// +kubebuilder:object:generate=true
type DatasourceCommon struct {
// Feature id column name
// +kubebuilder:default="fid"
// +optional
Fid string `yaml:"fid,omitempty" json:"fid,omitempty" validate:"required" default:"fid"`
// External feature id column name. When specified, this ID column will be exposed to clients instead of the regular FID column.
// It allows one to offer a more stable ID to clients instead of an auto-generated FID. External FID column should contain UUIDs.
// +optional
ExternalFid string `yaml:"externalFid" json:"externalFid"`
// Optional timeout after which queries are canceled
// +kubebuilder:default="15s"
// +optional
QueryTimeout Duration `yaml:"queryTimeout,omitempty" json:"queryTimeout,omitempty" validate:"required" default:"15s"`
}
// +kubebuilder:object:generate=true
type Postgres struct {
DatasourceCommon `yaml:",inline" json:",inline"`
// Hostname of the PostgreSQL server.
// +kubebuilder:default="localhost"
// +optional
Host string `yaml:"host" json:"host" validate:"required" default:"localhost"`
// Port number of the PostgreSQL server.
// +kubebuilder:default=5432
// +optional
Port uint `yaml:"port" json:"port" validate:"required,port" default:"5432"`
// Name of the PostgreSQL database containing the data.
// +kubebuilder:default="postgres"
// +optional
DatabaseName string `yaml:"databaseName" json:"databaseName" validate:"required" default:"postgres"`
// Name of the PostgreSQL schema containing the data.
// +kubebuilder:default="public"
// +optional
Schema string `yaml:"schema" json:"schema" validate:"required" default:"public"`
// The SSL mode to use, e.g. 'disable', 'allow', 'prefer', 'require', 'verify-ca' or 'verify-full'.
// +kubebuilder:validation:Enum=disable;allow;prefer;require;verify-ca;verify-full
// +kubebuilder:default="disable"
// +optional
SSLMode string `yaml:"sslMode" json:"sslMode" validate:"required" default:"disable"`
// Username when connecting to the PostgreSQL server.
// +kubebuilder:default="postgres"
// +optional
User string `yaml:"user" json:"user" validate:"required" default:"postgres"`
// Password when connecting to the PostgreSQL server.
// +kubebuilder:default="postgres"
// +optional
Pass string `yaml:"pass" json:"pass" validate:"required" default:"postgres"`
// When true the geometry column in the feature table needs to be indexed. Initialization will fail
// when no index is present, when false the index check is skipped. For large tables an index is recommended!
//
// +kubebuilder:default=true
// +optional
SpatialIndexRequired *bool `yaml:"spatialIndexRequired,omitempty" json:"spatialIndexRequired,omitempty" default:"true"`
}
func (p *Postgres) ConnectionString() string {
port := strconv.FormatUint(uint64(p.Port), 10)
defaultSearchPath := "public, postgis, topology" // otherwise postgis extension isn't found
return fmt.Sprintf("postgres://%s:%s@%s/%s?sslmode=%s&search_path=%s,%s&application_name=%s",
p.User, p.Pass, net.JoinHostPort(p.Host, port), p.DatabaseName, p.SSLMode,
p.Schema, defaultSearchPath, AppName)
}
// +kubebuilder:object:generate=true
type GeoPackage struct {
// Settings to read a GeoPackage from local disk
// +optional
Local *GeoPackageLocal `yaml:"local,omitempty" json:"local,omitempty" validate:"required_without_all=Cloud"`
// Settings to read a GeoPackage as a Cloud-Backed SQLite database
// +optional
Cloud *GeoPackageCloud `yaml:"cloud,omitempty" json:"cloud,omitempty" validate:"required_without_all=Local"`
}
// +kubebuilder:object:generate=true
type GeoPackageCommon struct {
DatasourceCommon `yaml:",inline" json:",inline"`
// ADVANCED SETTING. When the number of features in a bbox stay within the given value use an RTree index, otherwise use a BTree index.
// +kubebuilder:default=8000
// +optional
MaxBBoxSizeToUseWithRTree int `yaml:"maxBBoxSizeToUseWithRTree,omitempty" json:"maxBBoxSizeToUseWithRTree,omitempty" validate:"required" default:"8000"`
// ADVANCED SETTING. Sets the SQLite "cache_size" pragma which determines how many pages are cached in-memory.
// See https://sqlite.org/pragma.html#pragma_cache_size for details.
// Default in SQLite is 2000 pages, which equates to 2000KiB (2048000 bytes). Which is denoted as -2000.
// +kubebuilder:default=-2000
// +optional
InMemoryCacheSize int `yaml:"inMemoryCacheSize,omitempty" json:"inMemoryCacheSize,omitempty" validate:"required" default:"-2000"`
}
// +kubebuilder:object:generate=true
type GeoPackageLocal struct {
// GeoPackageCommon shared config between local and cloud GeoPackage
GeoPackageCommon `yaml:",inline" json:",inline"`
// Location of GeoPackage on disk.
// You can place the GeoPackage here manually (out-of-band) or you can specify Download
// and let the application download the GeoPackage for you and store it at this location.
File string `yaml:"file" json:"file" validate:"required,omitempty,filepath"`
// Optional initialization task to download a GeoPackage during startup. GeoPackage will be
// downloaded to local disk and stored at the location specified in File.
// +optional
Download *GeoPackageDownload `yaml:"download,omitempty" json:"download,omitempty"`
}
// +kubebuilder:object:generate=true
type GeoPackageDownload struct {
// Location of GeoPackage on remote HTTP(S) URL. GeoPackage will be downloaded to local disk
// during startup and stored at the location specified in "file".
From URL `yaml:"from" json:"from" validate:"required"`
// ADVANCED SETTING. Determines how many workers (goroutines) in parallel will download the specified GeoPackage.
// Setting this to 1 will disable concurrent downloads.
// +kubebuilder:default=4
// +kubebuilder:validation:Minimum=1
// +optional
Parallelism int `yaml:"parallelism,omitempty" json:"parallelism,omitempty" validate:"required,gte=1" default:"4"`
// ADVANCED SETTING. When true TLS certs are NOT validated, false otherwise. Only use true for your own self-signed certificates!
// +kubebuilder:default=false
// +optional
TLSSkipVerify bool `yaml:"tlsSkipVerify,omitempty" json:"tlsSkipVerify,omitempty" default:"false"`
// ADVANCED SETTING. HTTP request timeout when downloading (part of) GeoPackage.
// +kubebuilder:default="2m"
// +optional
Timeout Duration `yaml:"timeout,omitempty" json:"timeout,omitempty" validate:"required" default:"2m"`
// ADVANCED SETTING. Minimum delay to use when retrying HTTP request to download (part of) GeoPackage.
// +kubebuilder:default="1s"
// +optional
RetryDelay Duration `yaml:"retryDelay,omitempty" json:"retryDelay,omitempty" validate:"required" default:"1s"`
// ADVANCED SETTING. Maximum overall delay of the exponential backoff while retrying HTTP requests to download (part of) GeoPackage.
// +kubebuilder:default="30s"
// +optional
RetryMaxDelay Duration `yaml:"retryMaxDelay,omitempty" json:"retryMaxDelay,omitempty" validate:"required" default:"30s"`
// ADVANCED SETTING. Maximum number of retries when retrying HTTP requests to download (part of) GeoPackage.
// +kubebuilder:default=5
// +kubebuilder:validation:Minimum=1
// +optional
MaxRetries int `yaml:"maxRetries,omitempty" json:"maxRetries,omitempty" validate:"required,gte=1" default:"5"`
}
// +kubebuilder:object:generate=true
type GeoPackageCloud struct {
// GeoPackageCommon shared config between local and cloud GeoPackage
GeoPackageCommon `yaml:",inline" json:",inline"`
// Reference to the cloud storage (either azure or google at the moment).
// For example, 'azure?emulator=127.0.0.1:10000&sas=0' or 'google'.
Connection string `yaml:"connection" json:"connection" validate:"required"`
// Username of the storage account, like devstoreaccount1 when using Azurite.
User string `yaml:"user" json:"user" validate:"required"`
// Some kind of credential like a password or key to authenticate with the storage backend, e.g:
// 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==' when using Azurite.
Auth string `yaml:"auth" json:"auth" validate:"required"`
// Container/bucket on the storage account
Container string `yaml:"container" json:"container" validate:"required"`
// Filename of the GeoPackage
File string `yaml:"file" json:"file" validate:"required"`
// Local cache of fetched blocks from cloud storage
// +optional
Cache GeoPackageCloudCache `yaml:"cache,omitempty" json:"cache,omitempty"`
// ADVANCED SETTING. Only for debug purposes! When true all HTTP requests executed by sqlite to cloud object storage are logged to stdout
// +kubebuilder:default=false
// +optional
LogHTTPRequests bool `yaml:"logHttpRequests,omitempty" json:"logHttpRequests,omitempty" default:"false"`
}
func (gc *GeoPackageCloud) CacheDir() (string, error) {
fileNameWithoutExt := strings.TrimSuffix(gc.File, filepath.Ext(gc.File))
if gc.Cache.Path != nil {
randomSuffix := strconv.Itoa(rand.IntN(99999)) //nolint:gosec // random isn't used for security purposes
return filepath.Join(*gc.Cache.Path, fileNameWithoutExt+"-"+randomSuffix), nil
}
cacheDir, err := os.MkdirTemp("", fileNameWithoutExt)
if err != nil {
return "", fmt.Errorf("failed to create tempdir to cache %s, error %w", fileNameWithoutExt, err)
}
return cacheDir, nil
}
// +kubebuilder:object:generate=true
type GeoPackageCloudCache struct {
// Optional path to directory for caching cloud-backed GeoPackage blocks, when omitted a temp dir will be used.
// +optional
Path *string `yaml:"path,omitempty" json:"path,omitempty" validate:"omitempty,dirpath|filepath"`
// Max size of the local cache. Accepts human-readable size such as 100Mb, 4Gb, 1Tb, etc. When omitted 1Gb is used.
// +kubebuilder:default="1Gb"
// +optional
MaxSize string `yaml:"maxSize,omitempty" json:"maxSize,omitempty" default:"1Gb"`
// When true a warm-up query is executed on startup which aims to fill the local cache. Does increase startup time.
// +kubebuilder:default=false
// +optional
WarmUp bool `yaml:"warmUp,omitempty" json:"warmUp,omitempty" default:"false"`
}
func (cache *GeoPackageCloudCache) MaxSizeAsBytes() (int64, error) {
return units.FromHumanSize(cache.MaxSize)
}
package config
import (
"encoding/json"
"fmt"
"slices"
"sort"
"github.com/PDOK/gokoala/internal/engine/util"
"gopkg.in/yaml.v3"
)
var DefaultSrs = "EPSG:28992"
// +kubebuilder:object:generate=true
type OgcAPITiles struct {
// Tiles for the entire dataset, these are hosted at the root of the API (/tiles endpoint).
// +optional
DatasetTiles *Tiles `yaml:",inline" json:",inline"`
// Tiles per collection. When no collections are specified tiles should be hosted at the root of the API (/tiles endpoint).
// +optional
Collections GeoSpatialCollections `yaml:"collections,omitempty" json:"collections,omitempty"`
}
type OgcAPITilesJSON struct {
*Tiles `json:",inline"`
Collections GeoSpatialCollections `json:"collections,omitempty"`
}
// MarshalJSON custom because inlining only works on embedded structs.
// Value instead of pointer receiver because only that way it can be used for both.
func (o OgcAPITiles) MarshalJSON() ([]byte, error) {
return json.Marshal(OgcAPITilesJSON{
Tiles: o.DatasetTiles,
Collections: o.Collections,
})
}
// UnmarshalJSON parses a string to OgcAPITiles.
func (o *OgcAPITiles) UnmarshalJSON(b []byte) error {
return yaml.Unmarshal(b, o)
}
func (o *OgcAPITiles) Defaults() {
if o.DatasetTiles != nil && o.DatasetTiles.HealthCheck.Srs == DefaultSrs &&
o.DatasetTiles.HealthCheck.TilePath == nil && *o.DatasetTiles.HealthCheck.Enabled {
o.DatasetTiles.deriveHealthCheckTilePath()
} else if o.Collections != nil {
for _, coll := range o.Collections {
if coll.Tiles != nil && coll.Tiles.GeoDataTiles.HealthCheck.Srs == DefaultSrs &&
coll.Tiles.GeoDataTiles.HealthCheck.TilePath == nil &&
*coll.Tiles.GeoDataTiles.HealthCheck.Enabled {
coll.Tiles.GeoDataTiles.deriveHealthCheckTilePath()
}
}
}
}
// +kubebuilder:object:generate=true
type CollectionEntryTiles struct {
// Tiles specific to this collection. Called 'geodata tiles' in OGC spec.
GeoDataTiles Tiles `yaml:",inline" json:",inline" validate:"required"`
}
type CollectionEntryTilesJSON struct {
Tiles `json:",inline"`
}
// MarshalJSON custom because inlining only works on embedded structs.
// Value instead of pointer receiver because only that way it can be used for both.
func (c CollectionEntryTiles) MarshalJSON() ([]byte, error) {
return json.Marshal(CollectionEntryTilesJSON{
Tiles: c.GeoDataTiles,
})
}
// UnmarshalJSON parses a string to CollectionEntryTiles.
func (c *CollectionEntryTiles) UnmarshalJSON(b []byte) error {
return yaml.Unmarshal(b, c)
}
// +kubebuilder:validation:Enum=raster;vector
type TilesType string
const (
TilesTypeRaster TilesType = "raster"
TilesTypeVector TilesType = "vector"
)
func (o *OgcAPITiles) HasType(t TilesType) bool {
if o.DatasetTiles != nil && slices.Contains(o.DatasetTiles.Types, t) {
return true
}
for _, coll := range o.Collections {
if coll.Tiles != nil && slices.Contains(coll.Tiles.GeoDataTiles.Types, t) {
return true
}
}
return false
}
// AllTileProjections projections supported by GoKoala for serving (vector) tiles, regardless of the dataset.
// When adding a new projection also add corresponding HTML/JSON templates.
var AllTileProjections = map[string]string{
"EPSG:28992": "NetherlandsRDNewQuad",
"EPSG:3035": "EuropeanETRS89_LAEAQuad",
"EPSG:3857": "WebMercatorQuad",
}
// HasProjection true when the given projection is supported for this dataset.
func (o *OgcAPITiles) HasProjection(srs string) bool {
for _, projection := range o.GetProjections() {
if projection.Srs == srs {
return true
}
}
return false
}
// GetProjections projections supported for this dataset.
func (o *OgcAPITiles) GetProjections() []SupportedSrs {
supportedSrsSet := map[SupportedSrs]struct{}{}
if o.DatasetTiles != nil {
for _, supportedSrs := range o.DatasetTiles.SupportedSrs {
supportedSrsSet[supportedSrs] = struct{}{}
}
}
for _, coll := range o.Collections {
if coll.Tiles == nil {
continue
}
for _, supportedSrs := range coll.Tiles.GeoDataTiles.SupportedSrs {
supportedSrsSet[supportedSrs] = struct{}{}
}
}
result := util.Keys(supportedSrsSet)
sort.Slice(result, func(i, j int) bool {
return len(result[i].Srs) > len(result[j].Srs)
})
return result
}
// +kubebuilder:object:generate=true
type Tiles struct {
// Reference to the server (or object storage) hosting the tiles.
// Note: Only marked as optional in CRD to support top-level OR collection-level tiles
// +optional
TileServer URL `yaml:"tileServer" json:"tileServer" validate:"required"`
// Could be 'vector' and/or 'raster' to indicate the types of tiles offered
// Note: Only marked as optional in CRD to support top-level OR collection-level tiles
// +optional
Types []TilesType `yaml:"types" json:"types" validate:"required"`
// Specifies in what projections (SRS/CRS) the tiles are offered
// Note: Only marked as optional in CRD to support top-level OR collection-level tiles
// +optional
SupportedSrs []SupportedSrs `yaml:"supportedSrs" json:"supportedSrs" validate:"required,dive"`
// Optional template to the vector tiles on the tileserver. Defaults to {tms}/{z}/{x}/{y}.pbf.
// +optional
URITemplateTiles *string `yaml:"uriTemplateTiles,omitempty" json:"uriTemplateTiles,omitempty"`
// Optional health check configuration
// +optional
HealthCheck HealthCheck `yaml:"healthCheck" json:"healthCheck"`
}
func (t *Tiles) deriveHealthCheckTilePath() {
var deepestZoomLevel int
for _, srs := range t.SupportedSrs {
if srs.Srs == DefaultSrs {
deepestZoomLevel = srs.ZoomLevelRange.End
}
}
defaultTile := HealthCheckDefaultTiles[deepestZoomLevel]
tileMatrixSet := AllTileProjections[DefaultSrs]
tilePath := fmt.Sprintf("/%s/%d/%d/%d.pbf", tileMatrixSet, deepestZoomLevel, defaultTile.x, defaultTile.y)
t.HealthCheck.TilePath = &tilePath
}
// +kubebuilder:object:generate=true
type SupportedSrs struct {
// Projection (SRS/CRS) used
// +kubebuilder:validation:Pattern=`^EPSG:\d+$`
Srs string `yaml:"srs" json:"srs" validate:"required,startswith=EPSG:"`
// Available zoom levels
ZoomLevelRange ZoomLevelRange `yaml:"zoomLevelRange" json:"zoomLevelRange" validate:"required"`
}
// +kubebuilder:object:generate=true
type ZoomLevelRange struct {
// Start zoom level
// +kubebuilder:validation:Minimum=0
Start int `yaml:"start" json:"start" validate:"gte=0,ltefield=End"`
// End zoom level
End int `yaml:"end" json:"end" validate:"required,gtefield=Start"`
}
type TileCoordinates struct {
x int
y int
}
// default tiles for EPSG:28992 - location centered just outside a village in the province of Friesland.
var HealthCheckDefaultTiles = map[int]TileCoordinates{
0: {x: 0, y: 0},
1: {x: 1, y: 0},
2: {x: 2, y: 1},
3: {x: 4, y: 2},
4: {x: 8, y: 5},
5: {x: 17, y: 11},
6: {x: 35, y: 22},
7: {x: 71, y: 45},
8: {x: 143, y: 91},
9: {x: 286, y: 182},
10: {x: 572, y: 365},
11: {x: 1144, y: 731},
12: {x: 2288, y: 1462},
13: {x: 4576, y: 2925},
14: {x: 9152, y: 5851},
15: {x: 18304, y: 11702},
16: {x: 36608, y: 23404},
}
// +kubebuilder:object:generate=true
type HealthCheck struct {
// Enable/disable healthcheck on tiles. Defaults to true.
// +kubebuilder:default=true
// +optional
Enabled *bool `yaml:"enabled" json:"enabled" default:"true"`
// Projection (SRS/CRS) used for tile healthcheck
// +kubebuilder:default="EPSG:28992"
// +kubebuilder:validation:Pattern=`^EPSG:\d+$`
// +optional
Srs string `yaml:"srs" json:"srs" default:"EPSG:28992" validate:"required,startswith=EPSG:"`
// Path to specific tile used for healthcheck
// +optional
TilePath *string `yaml:"tilePath,omitempty" json:"tilePath,omitempty" validate:"required_unless=Srs EPSG:28992"`
}
func validateTileProjections(tiles *OgcAPITiles) error {
var errMessages []string
if tiles.DatasetTiles != nil {
for _, srs := range tiles.DatasetTiles.SupportedSrs {
if _, ok := AllTileProjections[srs.Srs]; !ok {
errMessages = append(errMessages, fmt.Sprintf("validation failed for srs '%s'; srs is not supported", srs.Srs))
}
}
}
for _, collection := range tiles.Collections {
if collection.Tiles != nil {
for _, srs := range collection.Tiles.GeoDataTiles.SupportedSrs {
if _, ok := AllTileProjections[srs.Srs]; !ok {
errMessages = append(errMessages, fmt.Sprintf("validation failed for srs '%s'; srs is not supported", srs.Srs))
}
}
}
}
if len(errMessages) > 0 {
return fmt.Errorf("invalid config provided:\n%v", errMessages)
}
return nil
}
package config
import (
"fmt"
"html/template"
"log"
"os"
"path/filepath"
"dario.cat/mergo"
"github.com/go-playground/validator/v10"
"gopkg.in/yaml.v3"
)
const (
defaultThemeConfig = "themes/pdok/theme.yaml"
)
type Theme struct {
Logo *ThemeLogo `yaml:"logo" json:"logo" validate:"required"`
Color *ThemeColors `yaml:"color" json:"color" validate:"required"`
Includes *ThemeIncludes `yaml:"includes" json:"includes"`
}
type ThemeLogo struct {
Header string `yaml:"header" json:"header" validate:"required"`
Footer string `yaml:"footer" json:"footer" validate:"required"`
Opengraph string `yaml:"opengraph" json:"opengraph" validate:"required"`
Favicon string `yaml:"favicon" json:"favicon" validate:"required"`
Favicon16 string `yaml:"favicon16" json:"favicon16" validate:"required"`
Favicon32 string `yaml:"favicon32" json:"favicon32" validate:"required"`
}
type ThemeColors struct {
Primary string `yaml:"primary" json:"primary" validate:"required,hexcolor"`
Secondary string `yaml:"secondary" json:"secondary" validate:"required,hexcolor"`
Link string `yaml:"link" json:"link" validate:"required,hexcolor"`
}
type ThemeIncludes struct {
HTMLFile string `yaml:"html"`
ParsedHTML template.HTML
}
func NewTheme(cfg string) (theme *Theme, err error) {
theme, err = getThemeFromFile(defaultThemeConfig)
if err != nil {
return nil, err
}
var customTheme *Theme
if cfg != "" {
// If a custom theme is present, also fetch it
customTheme, err = getThemeFromFile(cfg)
if err != nil {
return nil, err
}
// Overwrite the basetheme
err = mergo.Merge(theme, customTheme, mergo.WithOverride)
if err != nil {
log.Fatalf("ERROR: %v", err)
return nil, err
}
}
theme.ParseHTML()
// check 'validate' tags
v := validator.New()
err = v.Struct(theme)
if err != nil {
return nil, formatValidationErr(err)
}
// if valid, set theme location
return theme, nil
}
func (t *Theme) ParseHTML() {
if t.Includes == nil {
t.Includes = &ThemeIncludes{}
}
content, err := os.ReadFile(t.Includes.HTMLFile)
if err != nil {
log.Printf("failed to read html file %v", err)
t.Includes.ParsedHTML = ""
return
}
// #nosec G203 - trusted html so no threat
t.Includes.ParsedHTML = template.HTML(content)
}
func getThemeFromFile(path string) (*Theme, error) {
yamlData, err := os.ReadFile(path)
if err != nil {
return nil, fmt.Errorf("failed to read theme file %w", err)
}
absolutePath, err := filepath.Abs(path)
if err != nil {
return nil, fmt.Errorf("failed to get absolute path for theme file %w", err)
}
dir := filepath.Dir(absolutePath)
var theme Theme
if err = yaml.Unmarshal(yamlData, &theme); err != nil {
return nil, fmt.Errorf("failed to unmarshal theme file, error: %w", err)
}
if theme.Logo != nil {
theme.Logo = &ThemeLogo{
Header: pathJoinIfPresent(dir, theme.Logo.Header),
Footer: pathJoinIfPresent(dir, theme.Logo.Footer),
Opengraph: pathJoinIfPresent(dir, theme.Logo.Opengraph),
Favicon: pathJoinIfPresent(dir, theme.Logo.Favicon),
Favicon16: pathJoinIfPresent(dir, theme.Logo.Favicon16),
Favicon32: pathJoinIfPresent(dir, theme.Logo.Favicon32),
}
}
if theme.Includes != nil {
theme.Includes.HTMLFile = filepath.Join(dir, theme.Includes.HTMLFile)
}
return &theme, nil
}
func pathJoinIfPresent(base string, file string) (result string) {
if file != "" {
result = filepath.Join(base, file)
}
return
}
package config
import (
"encoding/json"
"fmt"
"net/url"
"regexp"
"strings"
"gopkg.in/yaml.v3"
)
var (
validURLRegexp = regexp.MustCompile(`^(https?://.+|\$\{.+\}.*)$`) // https://regex101.com/r/IvhP6H/1
)
// URL Custom net.URL compatible with YAML and JSON (un)marshalling and kubebuilder.
// In addition, it also removes trailing slash if present, so we can easily
// append a longer path without having to worry about double slashes.
//
// Allow only http/https URLs or environment variables like ${FOOBAR}
// +kubebuilder:validation:Pattern=`^(https?://.+)|(\$\{.+\}.*)`
// +kubebuilder:validation:Type=string
type URL struct {
// This is a pointer so the wrapper can directly be used in templates, e.g.: {{ .Config.BaseURL }}
// Otherwise you would need .String() or template.URL(). (Might be a bug.)
*url.URL
}
// UnmarshalYAML parses a string to URL and also removes trailing slash if present,
// so we can easily append a longer path without having to worry about double slashes.
func (u *URL) UnmarshalYAML(unmarshal func(any) error) error {
var s string
if err := unmarshal(&s); err != nil {
return err
}
if parsedURL, err := parseURL(s); err != nil {
return err
} else if parsedURL != nil {
u.URL = parsedURL
}
return nil
}
// MarshalJSON turns URL into JSON.
// Value instead of pointer receiver because only that way it can be used for both.
func (u URL) MarshalJSON() ([]byte, error) {
if u.URL == nil {
return json.Marshal("")
}
return json.Marshal(u.String())
}
// UnmarshalJSON parses a string to URL and also removes trailing slash if present,
// so we can easily append a longer path without having to worry about double slashes.
func (u *URL) UnmarshalJSON(b []byte) error {
return yaml.Unmarshal(b, u)
}
// MarshalYAML turns URL into YAML.
// Value instead of pointer receiver because only that way it can be used for both.
func (u URL) MarshalYAML() (any, error) {
if u.URL == nil {
return "", nil
}
return u.String(), nil
}
// DeepCopyInto copies the receiver, writes into out.
func (u *URL) DeepCopyInto(out *URL) {
if out != nil {
*out = *u
}
}
// DeepCopy copies the receiver, creates a new URL.
func (u *URL) DeepCopy() *URL {
if u == nil {
return nil
}
out := &URL{}
u.DeepCopyInto(out)
return out
}
func parseURL(s string) (*url.URL, error) {
if !validURLRegexp.MatchString(s) {
return nil, fmt.Errorf("invalid URL: %s", s)
}
return url.Parse(strings.TrimSuffix(s, "/"))
}
package engine
import (
"log"
"net/http"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine/util"
"github.com/elnormous/contenttype"
"golang.org/x/text/language"
)
const (
FormatParam = "f"
languageParam = "lang"
MediaTypeJSON = "application/json"
MediaTypeXML = "application/xml"
MediaTypeHTML = "text/html"
MediaTypeTileJSON = "application/vnd.mapbox.tile+json"
MediaTypeMVT = "application/vnd.mapbox-vector-tile"
MediaTypeMapboxStyle = "application/vnd.mapbox.style+json"
MediaTypeSLD = "application/vnd.ogc.sld+xml;version=1.0"
MediaTypeOpenAPI = "application/vnd.oai.openapi+json;version=3.0"
MediaTypeGeoJSON = "application/geo+json"
MediaTypeJSONFG = "application/vnd.ogc.fg+json" // https://docs.ogc.org/per/21-017r1.html#toc17
MediaTypeJSONSchema = "application/schema+json"
MediaTypeQuantizedMesh = "application/vnd.quantized-mesh"
FormatHTML = "html"
FormatXML = "xml"
FormatJSON = "json"
FormatTileJSON = "tilejson"
FormatMVT = "mvt"
FormatMVTAlternative = "pbf"
FormatMapboxStyle = "mapbox"
FormatSLD = "sld10"
FormatGeoJSON = "geojson" // ?=json should also work for geojson
FormatJSONFG = "jsonfg"
FormatGzip = "gzip"
)
var (
MediaTypeJSONFamily = []string{
MediaTypeTileJSON,
MediaTypeMapboxStyle,
MediaTypeGeoJSON,
MediaTypeJSONFG,
MediaTypeJSONSchema,
}
CompressibleMediaTypes = []string{
MediaTypeJSON,
MediaTypeGeoJSON,
MediaTypeJSONFG,
MediaTypeTileJSON,
MediaTypeJSONSchema,
MediaTypeMapboxStyle,
MediaTypeOpenAPI,
MediaTypeHTML,
// common web media types
"text/css",
"text/plain",
"text/javascript",
"application/javascript",
"image/svg+xml",
}
OutputFormatDefault = []OutputFormat{
{Key: FormatJSON, Name: "JSON"},
}
StyleFormatExtension = map[string]string{
FormatMapboxStyle: ".json",
FormatSLD: ".sld",
}
)
// OutputFormat formats that can be returned by the API.
type OutputFormat struct {
Key string
Name string
}
// ContentNegotiation handles HTTP content negotiation.
// See https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/Content_negotiation
type ContentNegotiation struct {
availableMediaTypes []contenttype.MediaType
availableLanguages []language.Tag
formatsByMediaType map[string]string
mediaTypesByFormat map[string]string
}
func newContentNegotiation(availableLanguages []config.Language) *ContentNegotiation {
availableMediaTypes := []contenttype.MediaType{
// in order
contenttype.NewMediaType(MediaTypeJSON),
contenttype.NewMediaType(MediaTypeXML),
contenttype.NewMediaType(MediaTypeHTML),
contenttype.NewMediaType(MediaTypeTileJSON),
contenttype.NewMediaType(MediaTypeGeoJSON),
contenttype.NewMediaType(MediaTypeJSONFG),
contenttype.NewMediaType(MediaTypeMVT),
contenttype.NewMediaType(MediaTypeMapboxStyle),
contenttype.NewMediaType(MediaTypeSLD),
contenttype.NewMediaType(MediaTypeOpenAPI),
}
formatsByMediaType := map[string]string{
MediaTypeJSON: FormatJSON,
MediaTypeXML: FormatXML,
MediaTypeHTML: FormatHTML,
MediaTypeTileJSON: FormatTileJSON,
MediaTypeGeoJSON: FormatGeoJSON,
MediaTypeJSONFG: FormatJSONFG,
MediaTypeMVT: FormatMVT,
MediaTypeMapboxStyle: FormatMapboxStyle,
MediaTypeSLD: FormatSLD,
}
mediaTypesByFormat := util.Inverse(formatsByMediaType)
languageTags := make([]language.Tag, 0, len(availableLanguages))
for _, availableLanguage := range availableLanguages {
languageTags = append(languageTags, availableLanguage.Tag)
}
return &ContentNegotiation{
availableMediaTypes: availableMediaTypes,
availableLanguages: languageTags,
formatsByMediaType: formatsByMediaType,
mediaTypesByFormat: mediaTypesByFormat,
}
}
func (cn *ContentNegotiation) GetSupportedStyleFormats() []string {
return []string{FormatMapboxStyle, FormatSLD}
}
func (cn *ContentNegotiation) GetStyleFormatExtension(format string) string {
if extension, exists := StyleFormatExtension[format]; exists {
return extension
}
return ""
}
// NegotiateFormat performs content negotiation, not idempotent (since it removes the ?f= param).
func (cn *ContentNegotiation) NegotiateFormat(req *http.Request) string {
requestedFormat := cn.getFormatFromQueryParam(req)
if requestedFormat == "" {
requestedFormat = cn.getFormatFromAcceptHeader(req)
}
if requestedFormat == "" {
requestedFormat = FormatJSON // default
}
return requestedFormat
}
// NegotiateLanguage performs language negotiation, not idempotent (since it removes the ?lang= param).
func (cn *ContentNegotiation) NegotiateLanguage(w http.ResponseWriter, req *http.Request) language.Tag {
requestedLanguage := cn.getLanguageFromQueryParam(w, req)
if requestedLanguage == language.Und {
requestedLanguage = cn.getLanguageFromCookie(req)
}
if requestedLanguage == language.Und {
requestedLanguage = cn.getLanguageFromHeader(req)
}
if requestedLanguage == language.Und {
requestedLanguage = language.Dutch // default
}
return requestedLanguage
}
func (cn *ContentNegotiation) formatToMediaType(key TemplateKey) string {
if key.MediaTypeOverwrite != "" {
return key.MediaTypeOverwrite
}
return cn.mediaTypesByFormat[key.Format]
}
func (cn *ContentNegotiation) getFormatFromQueryParam(req *http.Request) string {
var requestedFormat = ""
queryParams := req.URL.Query()
if queryParams.Get(FormatParam) != "" {
requestedFormat = queryParams.Get(FormatParam)
// remove ?f= parameter to prepare for rewrite
queryParams.Del(FormatParam)
req.URL.RawQuery = queryParams.Encode()
}
return requestedFormat
}
func (cn *ContentNegotiation) getFormatFromAcceptHeader(req *http.Request) string {
accepted, _, err := contenttype.GetAcceptableMediaType(req, cn.availableMediaTypes)
if err != nil {
log.Printf("Failed to parse Accept header: %v. Continuing\n", err)
return ""
}
return cn.formatsByMediaType[accepted.String()]
}
func (cn *ContentNegotiation) getLanguageFromQueryParam(w http.ResponseWriter, req *http.Request) language.Tag {
var requestedLanguage = language.Und
queryParams := req.URL.Query()
if queryParams.Get(languageParam) != "" {
lang := queryParams.Get(languageParam)
accepted, _, err := language.ParseAcceptLanguage(lang)
if err != nil {
return requestedLanguage
}
m := language.NewMatcher(cn.availableLanguages)
_, langIndex, _ := m.Match(accepted...)
requestedLanguage = cn.availableLanguages[langIndex]
// override for use in cookie
lang = requestedLanguage.String()
// set requested language in cookie
setLanguageCookie(w, lang)
// remove ?lang= parameter, to prepare for rewrite
queryParams.Del(languageParam)
req.URL.RawQuery = queryParams.Encode()
}
return requestedLanguage
}
func setLanguageCookie(w http.ResponseWriter, lang string) {
cookie := &http.Cookie{
Name: languageParam,
Value: lang,
Path: "/",
MaxAge: config.CookieMaxAge,
SameSite: http.SameSiteStrictMode,
Secure: true,
}
http.SetCookie(w, cookie)
}
func (cn *ContentNegotiation) getLanguageFromCookie(req *http.Request) language.Tag {
var requestedLanguage = language.Und
cookie, err := req.Cookie(languageParam)
if err != nil {
return requestedLanguage
}
lang := cookie.Value
accepted, _, err := language.ParseAcceptLanguage(lang)
if err != nil {
return requestedLanguage
}
m := language.NewMatcher(cn.availableLanguages)
_, langIndex, _ := m.Match(accepted...)
requestedLanguage = cn.availableLanguages[langIndex]
return requestedLanguage
}
func (cn *ContentNegotiation) getLanguageFromHeader(req *http.Request) language.Tag {
var requestedLanguage = language.Und
if req.Header.Get(HeaderAcceptLanguage) != "" {
accepted, _, err := language.ParseAcceptLanguage(req.Header.Get(HeaderAcceptLanguage))
if err != nil {
log.Printf("Failed to parse Accept-Language header: %v. Continuing\n", err)
return requestedLanguage
}
m := language.NewMatcher(cn.availableLanguages)
_, langIndex, _ := m.Match(accepted...)
requestedLanguage = cn.availableLanguages[langIndex]
}
return requestedLanguage
}
package engine
import (
"context"
"crypto/tls"
"fmt"
"io"
"net/http"
"net/url"
"os"
"time"
"github.com/failsafe-go/failsafe-go/failsafehttp"
"golang.org/x/sync/errgroup"
)
const bufferSize = 1 * 1024 * 1024 // 1MiB
// Part piece of the file to download when HTTP Range Requests are supported.
type Part struct {
Start int64
End int64
Size int64
}
// Download downloads file from the given URL and stores the result in the given output location.
// Will utilize multiple concurrent connections to increase transfer speed. The latter is only
// possible when the remote server supports HTTP Range Requests, otherwise it falls back
// to a regular/single connection download. Additionally, failed requests will be retried according
// to the given settings.
func Download(url url.URL, outputFilepath string, parallelism int, tlsSkipVerify bool, timeout time.Duration,
retryDelay time.Duration, retryMaxDelay time.Duration, maxRetries int) (*time.Duration, error) {
client := createHTTPClient(tlsSkipVerify, timeout, retryDelay, retryMaxDelay, maxRetries)
outputFile, err := os.OpenFile(outputFilepath, os.O_CREATE|os.O_RDWR, 0644)
if err != nil {
return nil, err
}
defer outputFile.Close()
start := time.Now()
supportRanges, contentLength, err := checkRemoteFile(url, client)
if err != nil {
return nil, err
}
if supportRanges && parallelism > 1 {
err = downloadWithMultipleConnections(url, outputFile, contentLength, int64(parallelism), client)
} else {
err = downloadWithSingleConnection(url, outputFile, client)
}
if err != nil {
return nil, err
}
err = assertFileValid(outputFile, contentLength)
if err != nil {
return nil, err
}
timeSpent := time.Since(start)
return &timeSpent, err
}
func checkRemoteFile(url url.URL, client *http.Client) (supportRanges bool, contentLength int64, err error) {
res, err := client.Head(url.String())
if err != nil {
return
}
defer res.Body.Close()
contentLength = res.ContentLength
supportRanges = res.Header.Get(HeaderAcceptRanges) == "bytes" && contentLength != 0
return
}
func downloadWithSingleConnection(url url.URL, outputFile *os.File, client *http.Client) error {
res, err := client.Get(url.String())
if err != nil {
return err
}
defer res.Body.Close()
buf := make([]byte, bufferSize)
_, err = io.CopyBuffer(outputFile, res.Body, buf)
return err
}
func downloadWithMultipleConnections(url url.URL, outputFile *os.File, contentLength int64, parallelism int64, client *http.Client) error {
parts := make([]Part, parallelism)
partSize := contentLength / parallelism
remainder := contentLength % parallelism
wg, _ := errgroup.WithContext(context.Background())
for i, part := range parts {
start := int64(i) * partSize
end := start + partSize
if remainder != 0 && i == len(parts)-1 {
end += remainder
}
part = Part{start, end, partSize}
wg.Go(func() error {
return downloadPart(client, url, outputFile.Name(), part)
})
}
return wg.Wait()
}
func downloadPart(client *http.Client, url url.URL, outputFilepath string, part Part) error {
outputFile, err := os.OpenFile(outputFilepath, os.O_RDWR, 0664)
if err != nil {
return err
}
defer outputFile.Close()
_, err = outputFile.Seek(part.Start, 0)
if err != nil {
return err
}
req, err := http.NewRequest(http.MethodGet, url.String(), nil)
if err != nil {
return err
}
req.Header.Set(HeaderRange, fmt.Sprintf("bytes=%d-%d", part.Start, part.End-1))
res, err := client.Do(req)
if err != nil {
return err
}
defer res.Body.Close()
if res.StatusCode != http.StatusPartialContent {
return fmt.Errorf("server advertises HTTP Range Request support "+
"but doesn't return status %d", http.StatusPartialContent)
}
buf := make([]byte, bufferSize)
_, err = io.CopyBuffer(outputFile, res.Body, buf)
return err
}
func assertFileValid(outputFile *os.File, contentLength int64) error {
fi, err := outputFile.Stat()
if err != nil {
return err
}
if fi.Size() != contentLength {
return fmt.Errorf("invalid file, content-length %d and file size %d mismatch", contentLength, fi.Size())
}
return nil
}
func createHTTPClient(tlsSkipVerify bool, timeout time.Duration, retryDelay time.Duration,
retryMaxDelay time.Duration, maxRetries int) *http.Client {
transport := &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: tlsSkipVerify, //nolint:gosec // on purpose, default is false
},
}
//nolint:bodyclose // false positive
retryPolicy := failsafehttp.NewRetryPolicyBuilder().
WithBackoff(retryDelay, retryMaxDelay). //nolint:bodyclose // false positive
WithMaxRetries(maxRetries). //nolint:bodyclose // false positive
Build() //nolint:bodyclose // false positive
return &http.Client{
Timeout: timeout,
Transport: failsafehttp.NewRoundTripper(transport, retryPolicy),
}
}
package engine
import (
"bytes"
"compress/gzip"
"context"
"errors"
"fmt"
htmltemplate "html/template"
"io"
"log"
"net/http"
"net/http/httputil"
"net/url"
"os"
"os/signal"
"syscall"
texttemplate "text/template"
"time"
"github.com/PDOK/gokoala/config"
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
)
const (
templatesDir = "internal/engine/templates/"
shutdownTimeout = 5 * time.Second
)
// Engine encapsulates shared non-OGC API specific logic.
type Engine struct {
Config *config.Config
OpenAPI *OpenAPI
Templates *Templates
CN *ContentNegotiation
Router *chi.Mux
shutdownHooks []func()
}
// NewEngine builds a new Engine.
func NewEngine(configFile string, themeFile string, openAPIFile string, enableTrailingSlash bool, enableCORS bool) (*Engine, error) {
cfg, err := config.NewConfig(configFile)
if err != nil {
return nil, err
}
theme, err := config.NewTheme(themeFile)
if err != nil {
return nil, err
}
return NewEngineWithConfig(cfg, theme, openAPIFile, enableTrailingSlash, enableCORS), nil
}
// NewEngineWithConfig builds a new Engine.
func NewEngineWithConfig(config *config.Config, theme *config.Theme, openAPIFile string, enableTrailingSlash bool, enableCORS bool) *Engine {
contentNegotiation := newContentNegotiation(config.AvailableLanguages)
templates := newTemplates(config, theme)
openAPI := newOpenAPI(config, []string{openAPIFile}, nil)
router := newRouter(config.Version, enableTrailingSlash, enableCORS)
engine := &Engine{
Config: config,
OpenAPI: openAPI,
Templates: templates,
CN: contentNegotiation,
Router: router,
}
// Default (non-OGC) endpoints
newSitemap(engine)
newHealthEndpoint(engine)
newResourcesEndpoint(engine)
newThemeEndpoints(theme, engine)
return engine
}
// Start the engine by initializing all components and starting the server.
func (e *Engine) Start(address string, debugPort int, shutdownDelay int) error {
// debug server (binds to localhost).
if debugPort > 0 {
go func() {
debugAddress := fmt.Sprintf("localhost:%d", debugPort)
debugRouter := chi.NewRouter()
debugRouter.Use(middleware.Logger)
debugRouter.Mount("/debug", middleware.Profiler())
err := e.startServer("debug server", debugAddress, 0, debugRouter)
if err != nil {
log.Fatalf("debug server failed %v", err)
}
}()
}
// main server
return e.startServer("main server", address, shutdownDelay, e.Router)
}
// RegisterShutdownHook register a func to execute during graceful shutdown, e.g. to clean up resources.
func (e *Engine) RegisterShutdownHook(fn func()) {
e.shutdownHooks = append(e.shutdownHooks, fn)
}
// RebuildOpenAPI rebuild the full OpenAPI spec with the newly given parameters.
// Use only once during bootstrap for specific use cases! For example: when you want to expand a
// specific part of the OpenAPI spec with data outside the configuration file (e.g. from a database).
func (e *Engine) RebuildOpenAPI(openAPIParams any) {
e.OpenAPI = newOpenAPI(e.Config, e.OpenAPI.extraOpenAPIFiles, openAPIParams)
}
// ParseTemplate parses both HTML and non-HTML templates depending on the format given in the TemplateKey and
// stores it in the engine for future rendering using RenderAndServe.
func (e *Engine) ParseTemplate(key TemplateKey) {
e.Templates.parseAndSaveTemplate(key)
}
// RenderTemplates renders both HTML and non-HTML templates depending on the format given in the TemplateKey.
// This method also performs OpenAPI validation of the rendered template, therefore we also need the URL path.
// The rendered templates are stored in the engine for future serving using ServePage.
func (e *Engine) RenderTemplates(urlPath string, breadcrumbs []Breadcrumb, keys ...TemplateKey) {
e.renderTemplates(urlPath, nil, breadcrumbs, true, keys...)
}
// RenderTemplatesWithParams renders both HTML and non-HTML templates depending on the format given in the TemplateKey.
func (e *Engine) RenderTemplatesWithParams(urlPath string, params any, breadcrumbs []Breadcrumb, keys ...TemplateKey) {
e.renderTemplates(urlPath, params, breadcrumbs, true, keys...)
}
// RenderAndServe renders an already parsed HTML or non-HTML template on-the-fly depending
// on the format in the given TemplateKey. The result isn't stored in engine, it's served directly to the client.
//
// NOTE: only used this for dynamic pages that can't be pre-rendered and cached (e.g. with data from a datastore),
// otherwise use ServePage for pre-rendered pages.
func (e *Engine) RenderAndServe(w http.ResponseWriter, r *http.Request, key TemplateKey,
params any, breadcrumbs []Breadcrumb, availableFormats []OutputFormat) {
// validate request
if err := e.OpenAPI.ValidateRequest(r); err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemBadRequest, w, err.Error())
return
}
// get template
parsedTemplate, err := e.Templates.getParsedTemplate(key)
if err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemServerError, w)
}
// render output
var output []byte
if key.Format == FormatHTML {
htmlTmpl := parsedTemplate.(*htmltemplate.Template)
output = e.Templates.renderHTMLTemplate(htmlTmpl, r.URL, params, breadcrumbs, "", availableFormats)
} else {
jsonTmpl := parsedTemplate.(*texttemplate.Template)
output = e.Templates.renderNonHTMLTemplate(jsonTmpl, params, key, "")
}
contentType := e.CN.formatToMediaType(key)
// validate response
if err := e.OpenAPI.ValidateResponse(contentType, output, r); err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemServerError, w, err.Error())
return
}
writeResponse(w, contentType, output)
}
// Serve serves a response (which is either a pre-rendered template based on TemplateKey or a slice of arbitrary bytes)
// while also validating against the OpenAPI spec.
func (e *Engine) Serve(w http.ResponseWriter, r *http.Request, opt ...ServeOption) {
s := &serve{
validateRequest: true,
validateResponse: true,
}
for _, o := range opt {
o(s)
}
if s.validateRequest {
if err := e.OpenAPI.ValidateRequest(r); err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemBadRequest, w, err.Error())
return
}
}
output := s.output
if s.templateKey != nil {
// render output
var err error
output, err = e.Templates.getRenderedTemplate(*s.templateKey)
if err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemNotFound, w)
return
}
if s.contentType == "" {
s.contentType = e.CN.formatToMediaType(*s.templateKey)
}
}
if s.validateResponse {
if err := e.OpenAPI.ValidateResponse(s.contentType, output, r); err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemServerError, w, err.Error())
return
}
}
writeResponse(w, s.contentType, output)
}
type serve struct {
templateKey *TemplateKey
output []byte
validateRequest bool
validateResponse bool
contentType string
}
type ServeOption func(*serve)
func ServeTemplate(templateKey TemplateKey) ServeOption {
return func(s *serve) {
s.templateKey = &templateKey
}
}
func ServeOutput(output []byte) ServeOption {
return func(s *serve) {
s.output = output
}
}
func ServeValidation(validateRequest bool, validateResponse bool) ServeOption {
return func(s *serve) {
s.validateRequest = validateRequest
s.validateResponse = validateResponse
}
}
func ServeContentType(contentType string) ServeOption {
return func(s *serve) {
s.contentType = contentType
}
}
// ReverseProxy forwards given HTTP request to given target server, and optionally tweaks response.
func (e *Engine) ReverseProxy(w http.ResponseWriter, r *http.Request, target *url.URL,
prefer204 bool, contentTypeOverwrite string) {
e.ReverseProxyAndValidate(w, r, target, prefer204, contentTypeOverwrite, false)
}
// ReverseProxyAndValidate forwards given HTTP request to given target server, and optionally tweaks and validates response.
func (e *Engine) ReverseProxyAndValidate(w http.ResponseWriter, r *http.Request, target *url.URL,
prefer204 bool, contentTypeOverwrite string, validateResponse bool) {
rewrite := func(r *httputil.ProxyRequest) {
r.Out.URL = target
r.Out.Host = "" // Don't pass Host header (similar to Traefik's passHostHeader=false)
r.SetXForwarded() // Set X-Forwarded-* headers.
r.Out.Header.Set(HeaderBaseURL, e.Config.BaseURL.String())
}
errorHandler := func(w http.ResponseWriter, _ *http.Request, err error) {
log.Printf("failed to proxy request: %v", err)
RenderProblem(ProblemBadGateway, w)
}
modifyResponse := func(proxyRes *http.Response) error {
if prefer204 {
// OGC spec: If the tile has no content due to lack of data in the area, but is within the data
// resource its tile matrix sets and tile matrix sets limits, the HTTP response will use the status
// code either 204 (indicating an empty tile with no content) or a 200
if proxyRes.StatusCode == http.StatusNotFound {
proxyRes.StatusCode = http.StatusNoContent
removeBody(proxyRes)
}
}
if contentTypeOverwrite != "" {
proxyRes.Header.Set(HeaderContentType, contentTypeOverwrite)
}
if contentType := proxyRes.Header.Get(HeaderContentType); contentType == MediaTypeJSON && validateResponse {
var reader io.ReadCloser
var err error
if proxyRes.Header.Get(HeaderContentEncoding) == FormatGzip {
reader, err = gzip.NewReader(proxyRes.Body)
if err != nil {
return err
}
} else {
reader = proxyRes.Body
}
res, err := io.ReadAll(reader)
if err != nil {
return err
}
e.Serve(w, r, ServeValidation(false, true), ServeContentType(contentType), ServeOutput(res))
}
return nil
}
reverseProxy := &httputil.ReverseProxy{
Rewrite: rewrite,
ModifyResponse: modifyResponse,
ErrorHandler: errorHandler,
}
reverseProxy.ServeHTTP(w, r)
}
// startServer creates and starts an HTTP server, also takes care of graceful shutdown.
func (e *Engine) startServer(name string, address string, shutdownDelay int, router *chi.Mux) error {
// create HTTP server
server := http.Server{
Addr: address,
Handler: router,
ReadTimeout: 15 * time.Second,
ReadHeaderTimeout: 15 * time.Second,
}
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT)
defer stop()
go func() {
log.Printf("%s listening on http://%2s", name, address)
// ListenAndServe always returns a non-nil error. After Shutdown or
// Close, the returned error is ErrServerClosed
if err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {
log.Fatalf("failed to shutdown %s: %v", name, err)
}
}()
// listen for interrupt signal and then perform shutdown
<-ctx.Done()
stop()
// execute shutdown hooks
for _, shutdownHook := range e.shutdownHooks {
shutdownHook()
}
if shutdownDelay > 0 {
log.Printf("stop signal received, initiating shutdown of %s after %d seconds delay", name, shutdownDelay)
time.Sleep(time.Duration(shutdownDelay) * time.Second)
}
log.Printf("shutting down %s gracefully", name)
// shutdown with a max timeout.
timeoutCtx, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
defer cancel()
return server.Shutdown(timeoutCtx)
}
func (e *Engine) renderTemplates(urlPath string, params any, breadcrumbs []Breadcrumb, validate bool, keys ...TemplateKey) {
for _, key := range keys {
e.Templates.renderAndSaveTemplate(key, breadcrumbs, params)
if validate {
// we already perform OpenAPI validation here during startup to catch
// issues early on, in addition to runtime OpenAPI response validation
// all templates are created in all available languages, hence all are checked
for lang := range e.Templates.localizers {
key.Language = lang
if err := e.validateStaticResponse(key, urlPath); err != nil {
log.Fatal(err)
}
}
}
}
}
func removeBody(r *http.Response) {
buf := bytes.NewBuffer(make([]byte, 0))
r.Body = io.NopCloser(buf)
r.Header[HeaderContentLength] = []string{"0"}
r.Header[HeaderContentType] = []string{}
}
func (e *Engine) validateStaticResponse(key TemplateKey, urlPath string) error {
template, _ := e.Templates.getRenderedTemplate(key)
serverURL := normalizeBaseURL(e.Config.BaseURL.String())
req, err := http.NewRequest(http.MethodGet, serverURL+urlPath, nil)
if err != nil {
return fmt.Errorf("failed to construct request to validate %s "+
"template against OpenAPI spec %v", key.Name, err)
}
err = e.OpenAPI.ValidateResponse(e.CN.formatToMediaType(key), template, req)
if err != nil {
return fmt.Errorf("validation of template %s failed: %w", key.Name, err)
}
return nil
}
// return response output to client.
func writeResponse(w http.ResponseWriter, contentType string, output []byte) {
if contentType != "" {
w.Header().Set(HeaderContentType, contentType)
}
SafeWrite(w.Write, output)
}
// SafeWrite executes the given http.ResponseWriter.Write while logging errors.
func SafeWrite(write func([]byte) (int, error), body []byte) {
_, err := write(body)
if err != nil {
log.Printf("failed to write response: %v", err)
}
}
package engine
import (
"log"
"net/http"
"net/url"
"time"
)
func newHealthEndpoint(e *Engine) {
var target *url.URL
if tilesConfig := e.Config.OgcAPI.Tiles; tilesConfig != nil {
var err error
switch {
case tilesConfig.DatasetTiles != nil && *tilesConfig.DatasetTiles.HealthCheck.Enabled:
target, err = url.Parse(tilesConfig.DatasetTiles.TileServer.String() + *tilesConfig.DatasetTiles.HealthCheck.TilePath)
case len(tilesConfig.Collections) > 0 && tilesConfig.Collections[0].Tiles != nil &&
*tilesConfig.Collections[0].Tiles.GeoDataTiles.HealthCheck.Enabled:
target, err = url.Parse(tilesConfig.Collections[0].Tiles.GeoDataTiles.TileServer.String() + *tilesConfig.Collections[0].Tiles.GeoDataTiles.HealthCheck.TilePath)
default:
log.Println("cannot determine health check tilepath or tiles health check is disabled, falling back to basic check")
}
if err != nil {
log.Fatalf("invalid health check tilepath: %v", err)
}
}
if target != nil {
client := &http.Client{Timeout: time.Duration(500) * time.Millisecond}
e.Router.Get("/health", func(w http.ResponseWriter, _ *http.Request) {
resp, err := client.Head(target.String())
if err != nil {
// the exact error is irrelevant for health monitoring, but log it for insight
log.Printf("healthcheck failed: %v", err)
w.WriteHeader(http.StatusNotFound)
} else {
w.WriteHeader(resp.StatusCode)
resp.Body.Close()
}
})
} else {
e.Router.Get("/health", func(w http.ResponseWriter, _ *http.Request) {
SafeWrite(w.Write, []byte("OK"))
})
}
}
package engine
import (
"github.com/PDOK/gokoala/config"
"github.com/nicksnyder/go-i18n/v2/i18n"
"golang.org/x/text/language"
"gopkg.in/yaml.v3"
)
func newLocalizers(availableLanguages []config.Language) map[language.Tag]i18n.Localizer {
localizers := make(map[language.Tag]i18n.Localizer)
// add localizer for each available language
for _, lang := range availableLanguages {
bundle := i18n.NewBundle(lang.Tag)
bundle.RegisterUnmarshalFunc("yaml", yaml.Unmarshal)
bundle.MustLoadMessageFile("assets/i18n/" + lang.String() + ".yaml")
localizers[lang.Tag] = *i18n.NewLocalizer(bundle, lang.String())
}
return localizers
}
package engine
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"log"
"net/http"
"net/url"
"path/filepath"
"regexp"
"strings"
texttemplate "text/template"
gokoalaconfig "github.com/PDOK/gokoala/config"
orderedmap "github.com/wk8/go-ordered-map/v2"
"github.com/PDOK/gokoala/internal/engine/util"
"github.com/getkin/kin-openapi/openapi3"
"github.com/getkin/kin-openapi/openapi3filter"
"github.com/getkin/kin-openapi/routers"
"github.com/getkin/kin-openapi/routers/gorillamux"
)
const (
specPath = templatesDir + "openapi/"
preamble = specPath + "preamble.go.json"
problems = specPath + "problems.go.json"
commonCollections = specPath + "common-collections.go.json"
featuresSpec = specPath + "features.go.json"
tilesSpec = specPath + "tiles.go.json"
stylesSpec = specPath + "styles.go.json"
geoVolumesSpec = specPath + "3dgeovolumes.go.json"
commonSpec = specPath + "common.go.json"
HTMLRegex = `<[/]?([a-zA-Z]+).*?>`
)
type OpenAPI struct {
spec *openapi3.T
SpecJSON []byte
config *gokoalaconfig.Config
router routers.Router
extraOpenAPIFiles []string
}
// init once.
func init() {
htmlRegex := regexp.MustCompile(HTMLRegex)
openapi3filter.RegisterBodyDecoder(MediaTypeHTML,
func(body io.Reader, _ http.Header, _ *openapi3.SchemaRef,
_ openapi3filter.EncodingFn) (any, error) {
data, err := io.ReadAll(body)
if err != nil {
return nil, errors.New("failed to read response body")
}
if !htmlRegex.Match(data) {
return nil, errors.New("response doesn't contain HTML")
}
return string(data), nil
})
for _, mediaType := range MediaTypeJSONFamily {
openapi3filter.RegisterBodyDecoder(mediaType,
func(body io.Reader, _ http.Header, _ *openapi3.SchemaRef,
_ openapi3filter.EncodingFn) (any, error) {
var value any
dec := json.NewDecoder(body)
dec.UseNumber()
if err := dec.Decode(&value); err != nil {
return nil, errors.New("response doesn't contain valid JSON")
}
return value, nil
})
}
}
func newOpenAPI(config *gokoalaconfig.Config, extraOpenAPIFiles []string, openAPIParams any) *OpenAPI {
ctx := context.Background()
// order matters, see mergeSpecs for details.
defaultOpenAPIFiles := []string{commonSpec}
if config.AllCollections() != nil {
defaultOpenAPIFiles = append(defaultOpenAPIFiles, commonCollections)
}
if config.OgcAPI.Tiles != nil {
defaultOpenAPIFiles = append(defaultOpenAPIFiles, tilesSpec)
}
if config.OgcAPI.Features != nil {
defaultOpenAPIFiles = append(defaultOpenAPIFiles, featuresSpec)
}
if config.OgcAPI.Styles != nil {
defaultOpenAPIFiles = append(defaultOpenAPIFiles, stylesSpec)
}
if config.OgcAPI.GeoVolumes != nil {
defaultOpenAPIFiles = append(defaultOpenAPIFiles, geoVolumesSpec)
}
// add preamble first
openAPIFiles := []string{preamble}
// add extra spec(s) thereafter, to allow it to override default openapi specs
openAPIFiles = append(openAPIFiles, extraOpenAPIFiles...)
openAPIFiles = append(openAPIFiles, defaultOpenAPIFiles...)
resultSpec, resultSpecJSON := mergeSpecs(ctx, config, openAPIFiles, openAPIParams)
validateSpec(ctx, resultSpec, resultSpecJSON)
for _, server := range resultSpec.Servers {
server.URL = normalizeBaseURL(server.URL)
}
return &OpenAPI{
config: config,
spec: resultSpec,
SpecJSON: util.PrettyPrintJSON(resultSpecJSON, ""),
router: newOpenAPIRouter(resultSpec),
extraOpenAPIFiles: extraOpenAPIFiles,
}
}
// mergeSpecs merges the given OpenAPI specs.
//
// Order matters! We start with the preamble, it is highest in rank and there's no way to override it.
// Then the files are merged according to their given order. Files that are merged first
// have a higher change of getting their changes in the final spec than files that follow later.
//
// The OpenAPI spec optionally provided through the CLI should be the second (after preamble) item in the
// `files` slice since it allows the user to override other/default specs.
func mergeSpecs(ctx context.Context, config *gokoalaconfig.Config, files []string, params any) (*openapi3.T, []byte) {
loader := &openapi3.Loader{Context: ctx, IsExternalRefsAllowed: false}
if len(files) < 1 {
log.Fatalf("files can't be empty, at least OGC Common is expected")
}
var resultSpecJSON []byte
var resultSpec *openapi3.T
for _, file := range files {
if file == "" {
continue
}
specJSON := renderOpenAPITemplate(config, file, params)
var mergedJSON []byte
if resultSpecJSON == nil {
mergedJSON = specJSON
} else {
var err error
mergedJSON, err = util.MergeJSON(resultSpecJSON, specJSON, orderByOpenAPIConvention)
if err != nil {
log.Print(string(mergedJSON))
log.Fatalf("failed to merge OpenAPI specs: %v", err)
}
}
resultSpecJSON = mergedJSON
resultSpec = loadSpec(loader, mergedJSON)
}
return resultSpec, resultSpecJSON
}
func orderByOpenAPIConvention(output map[string]any) any {
result := orderedmap.New[string, any]()
// OpenAPI specs are commonly ordered according to the following sequence.
desiredOrder := []string{"openapi", "info", "servers", "paths", "components"}
for _, order := range desiredOrder {
for k, v := range output {
if k == order {
result.Set(k, v)
}
}
}
// add remaining keys
for k, v := range output {
result.Set(k, v)
}
return result
}
func loadSpec(loader *openapi3.Loader, mergedJSON []byte, fileName ...string) *openapi3.T {
resultSpec, err := loader.LoadFromData(mergedJSON)
if err != nil {
log.Print(string(mergedJSON))
log.Fatalf("failed to load merged OpenAPI spec %s, due to %v", fileName, err)
}
return resultSpec
}
func validateSpec(ctx context.Context, finalSpec *openapi3.T, finalSpecRaw []byte) {
// Validate OGC OpenAPI spec. Note: the examples provided in the official spec aren't valid.
err := finalSpec.Validate(ctx, openapi3.DisableExamplesValidation())
if err != nil {
log.Print(string(finalSpecRaw))
log.Fatalf("invalid OpenAPI spec: %v", err)
}
}
func newOpenAPIRouter(doc *openapi3.T) routers.Router {
openAPIRouter, err := gorillamux.NewRouter(doc)
if err != nil {
log.Fatalf("failed to setup OpenAPI router: %v", err)
}
return openAPIRouter
}
func renderOpenAPITemplate(config *gokoalaconfig.Config, fileName string, params any) []byte {
file := filepath.Clean(fileName)
files := []string{problems, file} // add problems template too since it's an "include" template
parsed := texttemplate.Must(texttemplate.New(filepath.Base(file)).Funcs(globalTemplateFuncs).ParseFiles(files...))
var rendered bytes.Buffer
if err := parsed.Execute(&rendered, &TemplateData{Config: config, Params: params}); err != nil {
log.Fatalf("failed to render %s, error: %v", file, err)
}
return rendered.Bytes()
}
func (o *OpenAPI) ValidateRequest(r *http.Request) error {
requestValidationInput, _ := o.getRequestValidationInput(r)
if requestValidationInput != nil {
err := openapi3filter.ValidateRequest(context.Background(), requestValidationInput)
if err != nil {
var schemaErr *openapi3.SchemaError
// Don't fail on maximum constraints because OGC has decided these are soft limits, for instance
// in features: "If the value of the limit parameter is larger than the maximum value, this
// SHALL NOT result in an error (instead use the maximum as the parameter value)."
if errors.As(err, &schemaErr) && schemaErr.SchemaField == "maximum" {
return nil
}
return fmt.Errorf("request doesn't conform to OpenAPI spec: %w", err)
}
}
return nil
}
func (o *OpenAPI) ValidateResponse(contentType string, body []byte, r *http.Request) error {
requestValidationInput, _ := o.getRequestValidationInput(r)
if requestValidationInput != nil {
if contentType == "" {
contentType = MediaTypeJSON
}
responseHeaders := http.Header{HeaderContentType: []string{contentType}}
responseCode := 200
responseValidationInput := &openapi3filter.ResponseValidationInput{
RequestValidationInput: requestValidationInput,
Status: responseCode,
Header: responseHeaders,
}
responseValidationInput.SetBodyBytes(body)
err := openapi3filter.ValidateResponse(context.Background(), responseValidationInput)
if err != nil {
return fmt.Errorf("response doesn't conform to OpenAPI spec: %w", err)
}
}
return nil
}
func (o *OpenAPI) getRequestValidationInput(r *http.Request) (*openapi3filter.RequestValidationInput, error) {
route, pathParams, err := o.router.FindRoute(r)
if err != nil {
log.Printf("route not found in OpenAPI spec for url %s (host: %s), "+
"skipping OpenAPI validation", r.URL, r.Host)
return nil, err
}
opts := &openapi3filter.Options{
SkipSettingDefaults: true,
}
opts.WithCustomSchemaErrorFunc(func(err *openapi3.SchemaError) string {
return err.Reason
})
return &openapi3filter.RequestValidationInput{
Request: r,
PathParams: pathParams,
Route: route,
Options: opts,
}, nil
}
// normalizeBaseURL normalizes the given base URL so our OpenAPI validator is able to match
// requests against the OpenAPI spec. This involves:
//
// - striping the context root (path) from the base URL. If you use a context root we expect
// you to have a proxy fronting GoKoala, therefore we also need to strip it from the base
// URL used during OpenAPI validation
//
// - replacing HTTPS scheme with HTTP. Since GoKoala doesn't support HTTPS we always perform
// OpenAPI validation against HTTP requests. Note: it's possible to offer GoKoala over HTTPS, but you'll
// need to take care of that in your proxy server (or loadbalancer/service mesh/etc) fronting GoKoala.
func normalizeBaseURL(baseURL string) string {
serverURL, _ := url.Parse(baseURL)
result := strings.Replace(baseURL, serverURL.Scheme, "http", 1)
result = strings.Replace(result, serverURL.Path, "", 1)
return result
}
package engine
import (
"log"
"net/http"
"time"
"schneider.vip/problem"
)
const (
timestampKey = "timeStamp"
defaultMessageServerErr = "An unexpected error has occurred, try again or contact support if the problem persists"
defaultMessageBadGateway = "Failed to proxy request, try again or contact support if the problem persists"
)
type ProblemKind int
var Now = time.Now // allow mocking
// The following problems should be added to openapi/problems.go.json.
var (
ProblemBadRequest = ProblemKind(http.StatusBadRequest)
ProblemNotFound = ProblemKind(http.StatusNotFound)
ProblemNotAcceptable = ProblemKind(http.StatusNotAcceptable)
ProblemServerError = ProblemKind(http.StatusInternalServerError)
ProblemBadGateway = ProblemKind(http.StatusBadGateway)
)
// RenderProblem writes RFC 7807 (https://tools.ietf.org/html/rfc7807) problem to the client.
// Only the listed problem kinds are supported since they should be advertised in the OpenAPI spec.
// Optionally, a caller may add details (single string) about the problem. Warning: Be sure to not
// include sensitive information in the details string!
func RenderProblem(kind ProblemKind, w http.ResponseWriter, details ...string) {
p := problem.Of(int(kind))
if len(details) > 0 { //nolint:gocritic // switch not handy here
p = p.Append(problem.Detail(details[0]))
} else if kind == ProblemServerError {
p = p.Append(problem.Detail(defaultMessageServerErr))
} else if kind == ProblemBadGateway {
p = p.Append(problem.Detail(defaultMessageBadGateway))
}
p = p.Append(problem.Custom(timestampKey, Now().UTC().Format(time.RFC3339)))
_, err := p.WriteTo(w)
if err != nil {
log.Printf("failed to write response: %v", err)
}
}
// RenderProblemAndLog writes RFC 7807 (https://tools.ietf.org/html/rfc7807) problem to client + logs message to stdout.
func RenderProblemAndLog(kind ProblemKind, w http.ResponseWriter, err error, details ...string) {
log.Printf("%v", err.Error())
RenderProblem(kind, w, details...)
}
package engine
import (
"log"
"net/http"
"net/url"
"github.com/go-chi/chi/v5"
)
// Resources endpoint to serve static assets, either from local storage or through reverse proxy.
func newResourcesEndpoint(e *Engine) {
res := e.Config.Resources
if res == nil {
return
}
var resourcesHandler http.Handler
if res.Directory != nil && *res.Directory != "" {
resourcesPath := *res.Directory
resourcesHandler = http.StripPrefix("/resources", http.FileServer(http.Dir(resourcesPath)))
} else if res.URL != nil && res.URL.String() != "" {
resourcesHandler = proxy(e.ReverseProxy, res.URL.String())
}
e.Router.Handle("/resources/*", resourcesHandler)
}
type revProxy func(w http.ResponseWriter, r *http.Request, target *url.URL, prefer204 bool, overwrite string)
func proxy(reverseProxy revProxy, resourcesURL string) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
resourcePath, _ := url.JoinPath("/", chi.URLParam(r, "*"))
target, err := url.ParseRequestURI(resourcesURL + resourcePath)
if err != nil {
log.Printf("invalid target url, can't proxy resources: %v", err)
RenderProblem(ProblemServerError, w)
return
}
reverseProxy(w, r, target, false, "")
}
}
package engine
import (
"net/http"
"runtime/debug"
"time"
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
"github.com/go-chi/cors"
)
func newRouter(version string, enableTrailingSlash bool, enableCORS bool) *chi.Mux {
router := chi.NewRouter()
router.Use(middleware.RealIP) // should be first middleware
router.Use(middleware.Logger) // log to console
router.Use(problemRecoverer) // catch panics and turn into 500s
router.Use(middleware.GetHead) // support HEAD requests https://docs.ogc.org/is/17-069r4/17-069r4.html#_http_1_1
if enableTrailingSlash {
router.Use(middleware.StripSlashes)
}
if enableCORS {
router.Use(cors.Handler(cors.Options{
AllowedOrigins: []string{"*"},
AllowedMethods: []string{http.MethodGet, http.MethodHead, http.MethodOptions},
AllowedHeaders: []string{HeaderRequestedWith},
ExposedHeaders: []string{HeaderContentCrs, HeaderLink},
AllowCredentials: false,
MaxAge: int((time.Hour * 24).Seconds()),
}))
}
// some GIS clients don't sent proper CORS preflight requests, still respond with OK for any OPTIONS request
router.Use(optionsFallback)
// add semver header, implements https://gitdocumentatie.logius.nl/publicatie/api/adr/#api-57
router.Use(middleware.SetHeader(HeaderAPIVersion, version))
router.Use(middleware.Compress(5, CompressibleMediaTypes...)) // enable gzip responses
return router
}
func optionsFallback(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodOptions {
w.WriteHeader(http.StatusOK)
return
}
next.ServeHTTP(w, r)
})
}
// Custom middleware.Recoverer adapted from Chi (https://github.com/go-chi/chi/blob/master/middleware/recoverer.go)
// to return RFC-7807 Problem messages.
func problemRecoverer(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
defer func() {
if rvr := recover(); rvr != nil {
if rvr == http.ErrAbortHandler { //nolint:errorlint // already so in Chi
// we don't recover http.ErrAbortHandler so the response
// to the client is aborted, this should not be logged
panic(rvr)
}
logEntry := middleware.GetLogEntry(r)
if logEntry != nil {
logEntry.Panic(rvr, debug.Stack())
} else {
middleware.PrintPrettyStack(rvr)
}
if r.Header.Get("Connection") != "Upgrade" {
RenderProblem(ProblemServerError, w)
}
}
}()
next.ServeHTTP(w, r)
})
}
package engine
import "net/http"
func newSitemap(e *Engine) {
for path, template := range map[string]string{"/sitemap.xml": "sitemap.go.xml", "/robots.txt": "robots.go.txt"} {
key := NewTemplateKey(templatesDir + template)
e.renderTemplates(path, nil, nil, false, key)
e.Router.Get(path, func(w http.ResponseWriter, r *http.Request) {
e.Serve(w, r, ServeTemplate(key), ServeValidation(false, false))
})
}
}
package engine
import (
"bytes"
"fmt"
htmltemplate "html/template"
"log"
"net/http"
"net/url"
"path/filepath"
"strings"
texttemplate "text/template"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine/util"
"github.com/nicksnyder/go-i18n/v2/i18n"
"golang.org/x/text/language"
)
const (
layoutFile = "layout.go.html"
)
// TemplateKey unique key to register and lookup Go templates.
type TemplateKey struct {
// Name of the template, the filename including extension
Name string
// Directory in which the template resides
Directory string
// Format the file format based on the filename extension, 'html' or 'json'
Format string
// Optional. Use with caution, overwrite the Media-Type associated with the Format of this template.
MediaTypeOverwrite string
// Language of the contents of the template
Language language.Tag
// Optional. Only required when you want to render the same template multiple times (with different content).
// By specifying an 'instance name' you can refer to a certain instance of a rendered template later on.
InstanceName string
}
// TemplateKeyOption implements the functional option pattern for TemplateKey.
type TemplateKeyOption func(*TemplateKey)
// TemplateData the data/variables passed as an argument into the template.
type TemplateData struct {
// Config set during startup based on the given config file
Config *config.Config
// Theme set during startup
Theme *config.Theme
// Params optional parameters not part of GoKoala's config file. You can use
// this to provide extra data to a template at rendering time.
Params any
// Breadcrumb path to the page, in key-value pairs of name->path
Breadcrumbs []Breadcrumb
// AvailableFormats returns the output formats available for the current page
AvailableFormats []OutputFormat
// Request URL
url *url.URL
}
// QueryString returns ?=foo=a&bar=b style query string of the current page.
func (td *TemplateData) QueryString(format string) string {
if td.url != nil {
q := td.url.Query()
if format != "" {
q.Set(FormatParam, format)
}
return "?" + q.Encode()
}
return fmt.Sprintf("?%s=%s", FormatParam, format)
}
type Breadcrumb struct {
Name string
Path string
}
// WithLanguage sets the language of a TemplateKey.
func WithLanguage(language language.Tag) TemplateKeyOption {
return func(tk *TemplateKey) {
tk.Language = language
}
}
// WithNegotiatedLanguage sets the language of a TemplateKey based on content-negotiation.
func (e *Engine) WithNegotiatedLanguage(w http.ResponseWriter, r *http.Request) TemplateKeyOption {
return WithLanguage(e.CN.NegotiateLanguage(w, r))
}
// WithInstanceName sets the instance name of a TemplateKey.
func WithInstanceName(instanceName string) TemplateKeyOption {
return func(tk *TemplateKey) {
tk.InstanceName = instanceName
}
}
// WithMediaTypeOverwrite overwrites the mediatype of the Format in a TemplateKey.
func WithMediaTypeOverwrite(mediaType string) TemplateKeyOption {
return func(tk *TemplateKey) {
tk.MediaTypeOverwrite = mediaType
}
}
// NewTemplateKey builds a TemplateKey with the given path and options.
func NewTemplateKey(path string, opts ...TemplateKeyOption) TemplateKey {
cleanPath := filepath.Clean(path)
tk := TemplateKey{
Name: filepath.Base(cleanPath),
Directory: filepath.Dir(cleanPath),
Format: strings.TrimPrefix(filepath.Ext(path), "."),
Language: language.Dutch, // Default language
}
for _, opt := range opts {
opt(&tk)
}
return tk
}
func ExpandTemplateKey(key TemplateKey, language language.Tag) TemplateKey {
copyKey := key
copyKey.Language = language
return copyKey
}
type Templates struct {
// ParsedTemplates templates loaded from disk and parsed to an in-memory Go representation.
ParsedTemplates map[TemplateKey]any
// RenderedTemplates templates parsed + rendered to their actual output format like JSON, HTMl, etc.
// We prefer pre-rendered templates whenever possible. These are stored in this map.
RenderedTemplates map[TemplateKey][]byte
Theme *config.Theme
config *config.Config
localizers map[language.Tag]i18n.Localizer
}
func newTemplates(config *config.Config, theme *config.Theme) *Templates {
templates := &Templates{
ParsedTemplates: make(map[TemplateKey]any),
RenderedTemplates: make(map[TemplateKey][]byte),
config: config,
Theme: theme,
localizers: newLocalizers(config.AvailableLanguages),
}
return templates
}
func (t *Templates) getParsedTemplate(key TemplateKey) (any, error) {
if parsedTemplate, ok := t.ParsedTemplates[key]; ok {
return parsedTemplate, nil
}
return nil, fmt.Errorf("no parsed template with name %s", key.Name)
}
func (t *Templates) getRenderedTemplate(key TemplateKey) ([]byte, error) {
if RenderedTemplate, ok := t.RenderedTemplates[key]; ok {
return RenderedTemplate, nil
}
return nil, fmt.Errorf("no rendered template with name %s", key.Name)
}
func (t *Templates) parseAndSaveTemplate(key TemplateKey) {
for lang := range t.localizers {
keyWithLang := ExpandTemplateKey(key, lang)
if key.Format == FormatHTML {
_, parsed := t.parseHTMLTemplate(keyWithLang, lang)
t.ParsedTemplates[keyWithLang] = parsed
} else {
_, parsed := t.parseNonHTMLTemplate(keyWithLang, lang)
t.ParsedTemplates[keyWithLang] = parsed
}
}
}
func (t *Templates) renderAndSaveTemplate(key TemplateKey, breadcrumbs []Breadcrumb, params any) {
for lang := range t.localizers {
var result []byte
if key.Format == FormatHTML {
file, parsed := t.parseHTMLTemplate(key, lang)
result = t.renderHTMLTemplate(parsed, nil, params, breadcrumbs, file, OutputFormatDefault)
} else {
file, parsed := t.parseNonHTMLTemplate(key, lang)
result = t.renderNonHTMLTemplate(parsed, params, key, file)
}
// Store rendered template per language
key.Language = lang
t.RenderedTemplates[key] = result
}
}
func (t *Templates) parseHTMLTemplate(key TemplateKey, lang language.Tag) (string, *htmltemplate.Template) {
file := filepath.Clean(filepath.Join(key.Directory, key.Name))
templateFuncs := t.createTemplateFuncs(lang)
parsed := htmltemplate.Must(htmltemplate.New(layoutFile).
Funcs(templateFuncs).ParseFiles(templatesDir+layoutFile, file))
return file, parsed
}
func (t *Templates) renderHTMLTemplate(parsed *htmltemplate.Template, url *url.URL,
params any, breadcrumbs []Breadcrumb, file string, availableFormats []OutputFormat) []byte {
var rendered bytes.Buffer
if err := parsed.Execute(&rendered, &TemplateData{
Config: t.config,
Theme: t.Theme,
Params: params,
Breadcrumbs: breadcrumbs,
AvailableFormats: availableFormats,
url: url,
}); err != nil {
log.Fatalf("failed to execute HTML template %s, error: %v", file, err)
}
return rendered.Bytes()
}
func (t *Templates) parseNonHTMLTemplate(key TemplateKey, lang language.Tag) (string, *texttemplate.Template) {
file := filepath.Clean(filepath.Join(key.Directory, key.Name))
templateFuncs := t.createTemplateFuncs(lang)
parsed := texttemplate.Must(texttemplate.New(filepath.Base(file)).
Funcs(templateFuncs).Parse(util.ReadFile(file)))
return file, parsed
}
func (t *Templates) renderNonHTMLTemplate(parsed *texttemplate.Template, params any, key TemplateKey, file string) []byte {
var rendered bytes.Buffer
if err := parsed.Execute(&rendered, &TemplateData{
Config: t.config,
Params: params,
}); err != nil {
log.Fatalf("failed to execute template %s, error: %v", file, err)
}
var result = rendered.Bytes()
if strings.Contains(key.Format, FormatJSON) {
// pretty print all JSON (or derivatives like TileJSON)
result = util.PrettyPrintJSON(result, key.Name)
}
return result
}
func (t *Templates) createTemplateFuncs(lang language.Tag) map[string]any {
return combineFuncMaps(globalTemplateFuncs, texttemplate.FuncMap{
// create func just-in-time based on TemplateKey
"i18n": func(messageID string) htmltemplate.HTML {
localizer := t.localizers[lang]
translated := localizer.MustLocalize(&i18n.LocalizeConfig{MessageID: messageID})
return htmltemplate.HTML(translated) //nolint:gosec // since we trust our language files
},
})
}
package engine
import (
htmltemplate "html/template"
"log"
"regexp"
"strconv"
"strings"
texttemplate "text/template"
"time"
"github.com/docker/go-units"
sprig "github.com/go-task/slim-sprig"
gomarkdown "github.com/gomarkdown/markdown"
gomarkdownhtml "github.com/gomarkdown/markdown/html"
gomarkdownparser "github.com/gomarkdown/markdown/parser"
stripmd "github.com/writeas/go-strip-markdown/v2"
)
var (
globalTemplateFuncs texttemplate.FuncMap
linkRegex = regexp.MustCompile(`^https?://\S+$`)
)
// Initialize functions to be used in html/json/etc templates.
func init() {
customFuncs := texttemplate.FuncMap{
// custom template functions (keep lowercase)
"markdown": markdown,
"unmarkdown": unmarkdown,
"truncate": truncateText,
"humansize": humanSize,
"bytessize": bytesSize,
"isdate": isDate,
"islink": isLink,
}
sprigFuncs := sprig.FuncMap() // we also support https://github.com/go-task/slim-sprig functions
globalTemplateFuncs = combineFuncMaps(customFuncs, sprigFuncs)
}
// combine given FuncMaps.
func combineFuncMaps(funcMaps ...map[string]any) map[string]any {
result := make(map[string]any)
for _, funcMap := range funcMaps {
for k, v := range funcMap {
result[k] = v
}
}
return result
}
// markdown turn Markdown into HTML.
func markdown(s *string) htmltemplate.HTML {
if s == nil {
return ""
}
// always normalize newlines, this library only supports Unix LF newlines
md := gomarkdown.NormalizeNewlines([]byte(*s))
// create Markdown parser
extensions := gomarkdownparser.CommonExtensions
parser := gomarkdownparser.NewWithExtensions(extensions)
// parse Markdown into AST tree
doc := parser.Parse(md)
// create HTML renderer
htmlFlags := gomarkdownhtml.CommonFlags | gomarkdownhtml.HrefTargetBlank | gomarkdownhtml.SkipHTML
renderer := gomarkdownhtml.NewRenderer(gomarkdownhtml.RendererOptions{Flags: htmlFlags})
return htmltemplate.HTML(gomarkdown.Render(doc, renderer)) //nolint:gosec
}
// unmarkdown remove Markdown, so we can use the given string in non-HTML (JSON) output.
func unmarkdown(s *string) string {
if s == nil {
return ""
}
withoutMarkdown := stripmd.Strip(*s)
withoutLinebreaks := strings.ReplaceAll(withoutMarkdown, "\n", " ")
return withoutLinebreaks
}
// truncateText truncate text to avoid overly long text on overview pages.
func truncateText(s *string, limit int) *string {
if s == nil {
return s
}
if len(*s) > limit {
// truncate at last space or newline before given character limit
cutoff := strings.LastIndexAny((*s)[:limit], " \n")
t := (*s)[:cutoff] + "..."
return &t
}
return s
}
// humanSize converts size in bytes to a human-readable size.
func humanSize(a any) string {
if i, ok := a.(int64); ok {
return units.HumanSize(float64(i))
} else if f, ok := a.(float64); ok {
return units.HumanSize(f)
} else if s, ok := a.(string); ok {
fs, err := strconv.ParseFloat(s, 64)
if err == nil {
return units.HumanSize(fs)
}
}
log.Printf("cannot convert '%v' to float", a)
return "0"
}
// bytesSize converts human-readable size to size in bytes (base-10, not base-2).
func bytesSize(s string) int64 {
i, err := units.FromHumanSize(s)
if err != nil {
log.Printf("cannot convert '%s' to bytes", s)
return 0
}
return i
}
// isDate true when given input is a date, false otherwise.
func isDate(v any) bool {
if _, ok := v.(time.Time); ok {
return true
}
return false
}
// isLink true when given input is an HTTP(s) URL (without any additional text), false otherwise.
func isLink(v any) bool {
if text, ok := v.(string); ok {
return linkRegex.MatchString(text)
}
return false
}
package engine
import (
htmltemplate "html/template"
"log"
"net/http"
"path/filepath"
"strings"
"github.com/PDOK/gokoala/config"
)
const (
styleTemplate = "theme.go.css"
)
func newThemeEndpoints(theme *config.Theme, e *Engine) {
newCSSEndpoint(e)
// Replace the theme Logo properties with the absolute paths for the template
theme.Logo = &config.ThemeLogo{
Header: newThemeAssetEndpoint(e, theme.Logo.Header),
Footer: newThemeAssetEndpoint(e, theme.Logo.Footer),
Opengraph: newThemeAssetEndpoint(e, theme.Logo.Opengraph),
Favicon: newThemeAssetEndpoint(e, theme.Logo.Favicon),
Favicon16: newThemeAssetEndpoint(e, theme.Logo.Favicon16),
Favicon32: newThemeAssetEndpoint(e, theme.Logo.Favicon32),
}
}
func newCSSEndpoint(e *Engine) {
templatePath := filepath.Join(templatesDir, styleTemplate)
template := htmltemplate.Must(
htmltemplate.New(styleTemplate).ParseFiles(templatePath),
)
data := &TemplateData{
Theme: e.Templates.Theme,
}
// Parse CSS with variables from the config file
e.Router.Get("/css/theme.css", func(w http.ResponseWriter, _ *http.Request) {
w.Header().Set(HeaderContentType, "text/css")
if err := template.Execute(w, data); err != nil {
log.Fatal("Failed to render theme CSS")
}
})
}
func newThemeAssetEndpoint(e *Engine, file string) string {
route := "/theme/" + filepath.Base(file)
e.Router.Get(route, func(w http.ResponseWriter, r *http.Request) {
http.ServeFile(w, r, file)
})
var absolutePath string
if !strings.HasPrefix(file, "/") {
absolutePath = "/"
}
// Return the new (absolute) path
return absolutePath + filepath.Clean(file)
}
// Package types package contains generic types
package types
import (
"encoding/json"
"time"
)
//nolint:recvcheck // see MarshalJSON comment
type Date struct {
time time.Time
}
func NewDate(t time.Time) Date {
return Date{t}
}
// MarshalJSON turn Date into JSON
// Value instead of pointer receiver because only that way it can be used for both.
func (d Date) MarshalJSON() ([]byte, error) {
if d.time.IsZero() {
return json.Marshal(nil)
}
return json.Marshal(d.time.Format(time.DateOnly))
}
// UnmarshalJSON turn JSON into Date.
func (d *Date) UnmarshalJSON(text []byte) (err error) {
var value string
err = json.Unmarshal(text, &value)
if err != nil {
return err
}
if value == "" {
return nil
}
d.time, err = time.Parse(time.DateOnly, value)
return err
}
func (d Date) String() string {
if d.time.IsZero() {
return ""
}
return d.time.Format(time.DateOnly)
}
// Package types package contains generic types
package types
import (
"fmt"
"time"
)
// IsDate return true when time.Time doesn't contain a time component, false otherwise.
func IsDate(t time.Time) bool {
return t.Hour() == 0 && t.Minute() == 0 && t.Second() == 0
}
// IsFloat return true when float has decimals, false otherwise.
func IsFloat(f float64) bool {
return f != float64(int64(f))
}
func ToInt64(v any) (int64, error) {
switch i := v.(type) {
case int:
return int64(i), nil
case int32:
return int64(i), nil
case int64:
return i, nil
default:
return 0, fmt.Errorf("unsupported type: %T", v)
}
}
package util
import (
"bytes"
"compress/gzip"
"errors"
"io"
"io/fs"
"log"
"os"
)
// ReadFile read a plain or gzipped file and return contents as string.
func ReadFile(filePath string) string {
gzipFile := filePath + ".gz"
var fileContents string
if _, err := os.Stat(gzipFile); !errors.Is(err, fs.ErrNotExist) {
fileContents, err = readGzipContents(gzipFile)
if err != nil {
log.Fatalf("unable to decompress gzip file %s", gzipFile)
}
} else {
fileContents, err = readPlainContents(filePath)
if err != nil {
log.Fatalf("unable to read file %s", filePath)
}
}
return fileContents
}
// decompress gzip files, return contents as string.
func readGzipContents(filePath string) (string, error) {
gzipFile, err := os.Open(filePath)
if err != nil {
return "", err
}
defer func(gzipFile *os.File) {
err := gzipFile.Close()
if err != nil {
log.Println("failed to close gzip file")
}
}(gzipFile)
gzipReader, err := gzip.NewReader(gzipFile)
if err != nil {
return "", err
}
defer func(gzipReader *gzip.Reader) {
err := gzipReader.Close()
if err != nil {
log.Println("failed to close gzip reader")
}
}(gzipReader)
var buffer bytes.Buffer
_, err = io.Copy(&buffer, gzipReader) //nolint:gosec
if err != nil {
return "", err
}
return buffer.String(), nil
}
// read file, return contents as string.
func readPlainContents(filePath string) (string, error) {
file, err := os.Open(filePath)
if err != nil {
return "", err
}
defer func(file *os.File) {
err := file.Close()
if err != nil {
log.Println("failed to close file")
}
}(file)
var buffer bytes.Buffer
_, err = io.Copy(&buffer, file)
if err != nil {
return "", err
}
return buffer.String(), nil
}
package util
import (
"bytes"
"encoding/json"
"log"
"dario.cat/mergo"
)
func PrettyPrintJSON(content []byte, name string) []byte {
var pretty bytes.Buffer
if err := json.Indent(&pretty, content, "", " "); err != nil {
log.Print(string(content))
log.Fatalf("invalid json in %s: %v, see json output above", name, err)
}
return pretty.Bytes()
}
// MergeJSON merges the two JSON byte slices. It returns an error if x1 or x2 cannot be JSON-unmarshalled,
// or the merged JSON is invalid.
//
// Optionally, an orderBy function can be provided to alter the key order in the resulting JSON.
func MergeJSON(x1, x2 []byte, orderBy func(output map[string]any) any) ([]byte, error) {
var j1 map[string]any
err := json.Unmarshal(x1, &j1)
if err != nil {
return nil, err
}
var j2 map[string]any
err = json.Unmarshal(x2, &j2)
if err != nil {
return nil, err
}
err = mergo.Merge(&j1, &j2)
if err != nil {
return nil, err
}
if orderBy != nil {
return json.Marshal(orderBy(j1))
}
return json.Marshal(j1)
}
package util
// Keys returns the keys of the map m. The keys will be an indeterminate order.
func Keys[M ~map[K]V, K comparable, V any](input M) []K {
output := make([]K, 0, len(input))
for k := range input {
output = append(output, k)
}
return output
}
// Inverse switches the values to keys and the keys to values.
func Inverse(input map[string]string) map[string]string {
output := make(map[string]string)
for k, v := range input {
output[v] = k
}
return output
}
// Cast turns a map[K]V to a map[K]any, so values will downcast to 'any' type.
func Cast[M ~map[K]V, K comparable, V any](input M) map[K]any {
output := make(map[K]any, len(input))
for k, v := range input {
output[k] = v
}
return output
}
package core
import (
"net/http"
"github.com/PDOK/gokoala/internal/engine"
)
const (
templatesDir = "internal/ogc/common/core/templates/"
rootPath = "/"
apiPath = "/api"
alternativeAPIPath = "/openapi.json"
conformancePath = "/conformance"
)
type ExtraConformanceClasses struct {
AttributesConformance bool
}
type CommonCore struct {
engine *engine.Engine
}
func NewCommonCore(e *engine.Engine, extraConformanceClasses ExtraConformanceClasses) *CommonCore {
conformanceBreadcrumbs := []engine.Breadcrumb{
{
Name: "Conformance",
Path: "conformance",
},
}
apiBreadcrumbs := []engine.Breadcrumb{
{
Name: "OpenAPI specification",
Path: "api",
},
}
e.RenderTemplates(rootPath,
nil,
engine.NewTemplateKey(templatesDir+"landing-page.go.json"),
engine.NewTemplateKey(templatesDir+"landing-page.go.html"))
e.RenderTemplates(rootPath,
apiBreadcrumbs,
engine.NewTemplateKey(templatesDir+"api.go.html"))
e.RenderTemplatesWithParams(conformancePath,
extraConformanceClasses,
conformanceBreadcrumbs,
engine.NewTemplateKey(templatesDir+"conformance.go.json"),
engine.NewTemplateKey(templatesDir+"conformance.go.html"))
core := &CommonCore{
engine: e,
}
e.Router.Get(rootPath, core.LandingPage())
e.Router.Get(apiPath, core.API())
// implements https://gitdocumentatie.logius.nl/publicatie/api/adr/#api-17
e.Router.Get(alternativeAPIPath, func(w http.ResponseWriter, r *http.Request) { core.apiAsJSON(w, r) })
e.Router.Get(conformancePath, core.Conformance())
e.Router.Handle("/*", http.FileServer(http.Dir("assets")))
return core
}
func (c *CommonCore) LandingPage() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKey(templatesDir+"landing-page.go."+c.engine.CN.NegotiateFormat(r), c.engine.WithNegotiatedLanguage(w, r))
c.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
func (c *CommonCore) Conformance() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKey(
templatesDir+"conformance.go."+c.engine.CN.NegotiateFormat(r),
c.engine.WithNegotiatedLanguage(w, r))
c.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
func (c *CommonCore) API() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
format := c.engine.CN.NegotiateFormat(r)
switch format {
case engine.FormatHTML:
c.apiAsHTML(w, r)
return
case engine.FormatJSON:
c.apiAsJSON(w, r)
return
}
engine.RenderProblem(engine.ProblemNotFound, w)
}
}
func (c *CommonCore) apiAsHTML(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKey(templatesDir+"api.go.html", c.engine.WithNegotiatedLanguage(w, r))
c.engine.Serve(w, r, engine.ServeTemplate(key))
}
func (c *CommonCore) apiAsJSON(w http.ResponseWriter, r *http.Request) {
c.engine.Serve(w, r, engine.ServeContentType(engine.MediaTypeOpenAPI), engine.ServeOutput(c.engine.OpenAPI.SpecJSON))
}
package geospatial
import (
"github.com/PDOK/gokoala/internal/engine"
"github.com/twpayne/go-geom"
)
// CollectionType is the type of the data in a collection.
type CollectionType string
const (
Features CollectionType = "features" // Geospatial data, https://docs.ogc.org/is/12-128r19/12-128r19.html#features
Attributes CollectionType = "attributes" // Non-geospatial data. Same as features but without geometry, https://docs.ogc.org/is/12-128r19/12-128r19.html#attributes
)
// ItemType indicator about the type of the items in a collection. The default value is 'feature'.
// Other OGC-approved item types are e.g. 'record' and 'movingfeature'. We (PDOK) introduce 'attribute' as well.
//
// See https://docs.ogc.org/DRAFTS/20-024.html#collection-item-type-section
func (ct CollectionType) ItemType() string {
switch ct {
case Attributes:
return "attribute"
case Features:
return "feature"
default:
return "feature"
}
}
// AvailableFormats returns the output formats available for the current page.
func (ct CollectionType) AvailableFormats() []engine.OutputFormat {
switch ct {
case Attributes:
return engine.OutputFormatDefault
case Features:
return []engine.OutputFormat{
{Key: engine.FormatJSON, Name: "GeoJSON"},
{Key: engine.FormatJSONFG, Name: "JSON-FG"},
}
default:
return engine.OutputFormatDefault
}
}
// IsSpatialRequestAllowed returns true if the collection supports spatial requests such as bbox or other spatial filters.
func (ct CollectionType) IsSpatialRequestAllowed(bbox *geom.Bounds) bool {
return ct != Attributes || bbox == nil
}
// CollectionTypes one or more CollectionType.
type CollectionTypes struct {
types map[string]CollectionType
}
func NewCollectionTypes(types map[string]CollectionType) CollectionTypes {
return CollectionTypes{types}
}
func (cts CollectionTypes) Get(collection string) CollectionType {
return cts.types[collection]
}
func (cts CollectionTypes) HasAttributes() bool {
for _, ct := range cts.types {
if ct == Attributes {
return true
}
}
return false
}
package geospatial
import (
"net/http"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/go-chi/chi/v5"
)
const (
CollectionsPath = "/collections"
templatesDir = "internal/ogc/common/geospatial/templates/"
)
type Collections struct {
engine *engine.Engine
}
// Wrapper around collection+type to make it easier to access in the "collection" template.
type collectionWithType struct {
Collection config.GeoSpatialCollection
Type CollectionType
}
// NewCollections enables support for OGC APIs that organize data in the concept of collections.
// A collection, also known as a geospatial data resource, is a common way to organize data in various OGC APIs.
func NewCollections(e *engine.Engine, types CollectionTypes) *Collections {
if e.Config.HasCollections() {
collectionsBreadcrumbs := []engine.Breadcrumb{
{
Name: "Collections",
Path: "collections",
},
}
e.RenderTemplatesWithParams(CollectionsPath,
types,
collectionsBreadcrumbs,
engine.NewTemplateKey(templatesDir+"collections.go.json"),
engine.NewTemplateKey(templatesDir+"collections.go.html"))
for _, coll := range e.Config.AllCollections().Unique() {
title := coll.ID
if coll.Metadata != nil && coll.Metadata.Title != nil {
title = *coll.Metadata.Title
}
collectionBreadcrumbs := collectionsBreadcrumbs
collectionBreadcrumbs = append(collectionBreadcrumbs, []engine.Breadcrumb{
{
Name: title,
Path: "collections/" + coll.ID,
},
}...)
collWithType := collectionWithType{coll, types.Get(coll.ID)}
e.RenderTemplatesWithParams(CollectionsPath+"/"+coll.ID, collWithType, nil,
engine.NewTemplateKey(templatesDir+"collection.go.json", engine.WithInstanceName(coll.ID)))
e.RenderTemplatesWithParams(CollectionsPath+"/"+coll.ID, collWithType, collectionBreadcrumbs,
engine.NewTemplateKey(templatesDir+"collection.go.html", engine.WithInstanceName(coll.ID)))
}
}
instance := &Collections{
engine: e,
}
e.Router.Get(CollectionsPath, instance.Collections())
e.Router.Get(CollectionsPath+"/{collectionId}", instance.Collection())
return instance
}
// Collections returns list of collections.
func (c *Collections) Collections() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKey(templatesDir+"collections.go."+c.engine.CN.NegotiateFormat(r), c.engine.WithNegotiatedLanguage(w, r))
c.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
// Collection provides METADATA about a specific collection. To get the CONTENTS of a collection each OGC API
// building block must provide a separate/specific endpoint.
//
// For example, in:
// - OGC API Features you would have: /collections/{collectionId}/items
// - OGC API Tiles could have: /collections/{collectionId}/tiles
// - OGC API Maps could have: /collections/{collectionId}/map
// - OGC API 3d GeoVolumes would have: /collections/{collectionId}/3dtiles.
func (c *Collections) Collection() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
collectionID := chi.URLParam(r, "collectionId")
key := engine.NewTemplateKey(templatesDir+"collection.go."+c.engine.CN.NegotiateFormat(r),
engine.WithInstanceName(collectionID),
c.engine.WithNegotiatedLanguage(w, r),
)
c.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
package common
import (
"fmt"
"log"
"slices"
"strings"
"time"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine/util"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
"github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
orderedmap "github.com/wk8/go-ordered-map/v2"
)
const (
EnvLogSQL = "LOG_SQL"
selectAll = "*"
)
// DatasourceCommon shared data and logic between data sources.
type DatasourceCommon struct {
TransformOnTheFly bool
QueryTimeout time.Duration
FidColumn string
ExternalFidColumn string
MaxDecimals int
ForceUTC bool
TableByCollectionID map[string]*Table
PropertyFiltersByCollectionID map[string]datasources.PropertyFiltersWithAllowedValues
PropertiesByCollectionID map[string]*config.FeatureProperties
}
// Table metadata about a table containing features or attributes in a data source.
type Table struct {
Name string
Type geospatial.CollectionType
GeometryColumnName string
GeometryType string
Schema *domain.Schema // required
}
func (dc *DatasourceCommon) GetSchema(collection string) (*domain.Schema, error) {
table, err := dc.CollectionToTable(collection)
if err != nil {
return nil, err
}
return table.Schema, nil
}
func (dc *DatasourceCommon) GetCollectionType(collection string) (geospatial.CollectionType, error) {
table, err := dc.CollectionToTable(collection)
if err != nil {
return "", err
}
return table.Type, nil
}
func (dc *DatasourceCommon) GetPropertyFiltersWithAllowedValues(collection string) datasources.PropertyFiltersWithAllowedValues {
return dc.PropertyFiltersByCollectionID[collection]
}
func (dc *DatasourceCommon) SupportsOnTheFlyTransformation() bool {
return dc.TransformOnTheFly
}
func (dc *DatasourceCommon) CollectionToTable(collection string) (*Table, error) {
table, ok := dc.TableByCollectionID[collection]
if !ok {
return nil, fmt.Errorf("can't query collection '%s' since it doesn't exist in "+
"datasource, available in datasource: %v", collection, util.Keys(dc.TableByCollectionID))
}
return table, nil
}
// SelectGeom function signature to select geometry from a table
// while taking axis order into account.
type SelectGeom func(order domain.AxisOrder, table *Table) string
// SelectColumns build select clause.
func (dc *DatasourceCommon) SelectColumns(table *Table, axisOrder domain.AxisOrder,
selectGeom SelectGeom, propConfig *config.FeatureProperties, includePrevNext bool) string {
columns := orderedmap.New[string, struct{}]() // map (actually a set) to prevent accidental duplicate columns
switch {
case propConfig != nil:
// select columns in a specific order (we need an ordered map for this purpose!)
for _, prop := range propConfig.Properties {
if prop != table.GeometryColumnName {
columns.Set(prop, struct{}{})
}
}
if !propConfig.PropertiesExcludeUnknown {
// select missing columns according to the table schema
for _, field := range table.Schema.Fields {
if field.Name != table.GeometryColumnName {
_, ok := columns.Get(field.Name)
if !ok {
columns.Set(field.Name, struct{}{})
}
}
}
}
case table.Schema != nil:
// select all columns according to the table schema
for _, field := range table.Schema.Fields {
if field.Name != table.GeometryColumnName {
columns.Set(field.Name, struct{}{})
}
}
default:
log.Println("Warning: table doesn't have a schema. Can't select columns by name, selecting all")
return selectAll
}
columns.Set(dc.FidColumn, struct{}{})
if includePrevNext {
columns.Set(domain.PrevFid, struct{}{})
columns.Set(domain.NextFid, struct{}{})
}
result := ColumnsToSQL(slices.Collect(columns.KeysFromOldest()))
result += selectGeom(axisOrder, table)
return result
}
func PropertyFiltersToSQL(pf map[string]string, symbol string) (sql string, namedParams map[string]any) {
namedParams = make(map[string]any)
if len(pf) > 0 {
position := 0
for k, v := range pf {
position++
namedParam := fmt.Sprintf("pf%d", position)
// column name in double quotes in case it is a reserved keyword
// also: we don't currently support LIKE since wildcard searches don't use the index
sql += fmt.Sprintf(" and \"%s\" = %s%s", k, symbol, namedParam)
namedParams[namedParam] = v
}
}
return sql, namedParams
}
func TemporalCriteriaToSQL(temporalCriteria datasources.TemporalCriteria, symbol string) (sql string, namedParams map[string]any) {
namedParams = make(map[string]any)
if !temporalCriteria.ReferenceDate.IsZero() {
namedParams["referenceDate"] = temporalCriteria.ReferenceDate
startDate := temporalCriteria.StartDateProperty
endDate := temporalCriteria.EndDateProperty
sql = fmt.Sprintf(" and \"%[1]s\" <= %[3]sreferenceDate and (\"%[2]s\" >= %[3]sreferenceDate or \"%[2]s\" is null)",
startDate, endDate, symbol)
}
return sql, namedParams
}
func ColumnsToSQL(columns []string) string {
return fmt.Sprintf("\"%s\"", strings.Join(columns, `", "`))
}
func ValidateUniqueness(result map[string]*Table) {
uniqueTables := make(map[string]struct{})
for _, table := range result {
uniqueTables[table.Name] = struct{}{}
}
if len(uniqueTables) != len(result) {
log.Printf("Warning: found %d unique table names for %d collections, "+
"usually each collection is backed by its own unique table\n", len(uniqueTables), len(result))
}
}
package common
import (
"context"
"fmt"
"time"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine/types"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/google/uuid"
"github.com/twpayne/go-geom"
)
// MapRelation function signature to map feature relations.
type MapRelation func(columnName string, columnValue any, externalFidColumn string) (newColumnName, newColumnNameWithoutProfile string, newColumnValue any)
// MapGeom function signature to map datasource-specific geometry
// (in GeoPackage, PostGIS, WKB, etc. format) to general-purpose geometry.
type MapGeom func(columnValue any) (geom.T, error)
// MapRowsToFeatureIDs datasource agnostic mapper from SQL rows set feature IDs, including prev/next feature ID.
//
//nolint:nakedret
func MapRowsToFeatureIDs(ctx context.Context, rows DatasourceRows) (featureIDs []int64, prevNextID *domain.PrevNextFID, err error) {
firstRow := true
for rows.Next() {
var values []any
if values, err = rows.SliceScan(); err != nil {
return nil, nil, err
}
if len(values) != 3 {
return nil, nil, fmt.Errorf("expected 3 columns containing the feature id, "+
"the previous feature id and the next feature id. Got: %v", values)
}
featureID := values[0].(int64)
featureIDs = append(featureIDs, featureID)
if firstRow {
prev := int64(0)
if values[1] != nil {
prev = values[1].(int64)
}
next := int64(0)
if values[2] != nil {
next = values[2].(int64)
}
prevNextID = &domain.PrevNextFID{Prev: prev, Next: next}
firstRow = false
}
}
if ctx.Err() != nil {
err = ctx.Err()
}
return
}
type FormatOpts struct {
MaxDecimals int
ForceUTC bool
}
// MapRowsToFeatures datasource agnostic mapper from SQL rows/result set to Features domain model.
func MapRowsToFeatures(ctx context.Context, rows DatasourceRows,
fidColumn string, externalFidColumn string, geomColumn string,
propConfig *config.FeatureProperties, schema *domain.Schema,
mapGeom MapGeom, mapRel MapRelation, formatOpts FormatOpts) ([]*domain.Feature, *domain.PrevNextFID, error) {
result := make([]*domain.Feature, 0)
columns, err := rows.Columns()
if err != nil {
return result, nil, err
}
propertiesOrder := propConfig != nil && propConfig.PropertiesInSpecificOrder
firstRow := true
var prevNextID *domain.PrevNextFID
for rows.Next() {
var values []any
if values, err = rows.SliceScan(); err != nil {
return result, nil, err
}
feature := &domain.Feature{Properties: domain.NewFeatureProperties(propertiesOrder)}
np, err := mapColumnsToFeature(ctx, firstRow, feature, columns, values, fidColumn,
externalFidColumn, geomColumn, schema, mapGeom, mapRel, formatOpts)
if err != nil {
return result, nil, err
} else if firstRow {
prevNextID = np
firstRow = false
}
result = append(result, feature)
}
return result, prevNextID, ctx.Err()
}
//nolint:cyclop
func mapColumnsToFeature(ctx context.Context, firstRow bool, feature *domain.Feature, columns []string, values []any,
fidColumn string, externalFidColumn string, geomColumn string, schema *domain.Schema, mapGeom MapGeom, mapRel MapRelation,
formatOpts FormatOpts) (*domain.PrevNextFID, error) {
prevNextID := domain.PrevNextFID{}
for i, columnName := range columns {
columnValue := values[i]
switch columnName {
case fidColumn:
feature.ID = fmt.Sprint(columnValue)
case geomColumn:
if columnValue == nil {
feature.Properties.Set(columnName, nil)
continue
}
mappedGeom, err := mapGeom(columnValue)
if err != nil {
return nil, fmt.Errorf("failed to map/decode geometry from datasource, error: %w", err)
}
if err = feature.SetGeom(mappedGeom, formatOpts.MaxDecimals); err != nil {
return nil, fmt.Errorf("failed to map/encode geometry to JSON, error: %w", err)
}
case domain.MinxField, domain.MinyField, domain.MaxxField, domain.MaxyField:
// Skip these columns used for bounding box handling
continue
case domain.PrevFid:
// Only the first row in the result set contains the previous feature id
if firstRow && columnValue != nil {
val, err := types.ToInt64(columnValue)
if err != nil {
return nil, err
}
prevNextID.Prev = val
}
case domain.NextFid:
// Only the first row in the result set contains the next feature id
if firstRow && columnValue != nil {
val, err := types.ToInt64(columnValue)
if err != nil {
return nil, err
}
prevNextID.Next = val
}
default:
if columnValue == nil {
feature.Properties.Set(columnName, nil)
continue
}
// map any non-nil, non-id, non-bounding box and non-geometry column as a feature property
if err := mapColumnValueToFeature(columnValue, feature, columnName, formatOpts, schema); err != nil {
return nil, err
}
}
}
mapExternalFid(columns, values, externalFidColumn, feature, mapRel)
return &prevNextID, ctx.Err()
}
func mapColumnValueToFeature(columnValue any, feature *domain.Feature, columnName string,
formatOpts FormatOpts, schema *domain.Schema) error {
switch v := columnValue.(type) {
case []byte:
asBytes := make([]byte, len(v))
copy(asBytes, v)
feature.Properties.Set(columnName, string(asBytes))
case int32, int64:
feature.Properties.Set(columnName, v)
case float64:
// Check to determine whether the content of the columnValue is truly a floating point value.
// (Because of non-strict tables in SQLite)
if !types.IsFloat(v) {
feature.Properties.Set(columnName, int64(v))
} else {
feature.Properties.Set(columnName, v)
}
case time.Time:
timeVal := v
if formatOpts.ForceUTC {
timeVal = timeVal.UTC()
}
// Map as date (= without time) only when defined as such in the schema AND when no time component is present
if types.IsDate(timeVal) && schema.IsDate(columnName) {
feature.Properties.Set(columnName, types.NewDate(timeVal))
} else {
feature.Properties.Set(columnName, timeVal)
}
case string:
feature.Properties.Set(columnName, v)
case bool:
feature.Properties.Set(columnName, v)
case uuid.UUID:
feature.Properties.Set(columnName, v.String())
default:
return fmt.Errorf("column %s has unexpected type: %T for value %v", columnName, v, columnValue)
}
return nil
}
// mapExternalFid run a second pass over columns to map external feature ID, including relations to other features.
func mapExternalFid(columns []string, values []any, externalFidColumn string, feature *domain.Feature, mapRel MapRelation) {
for i, columnName := range columns {
columnValue := values[i]
switch {
case externalFidColumn == "":
continue
case columnName == externalFidColumn:
// When externalFidColumn is configured, overwrite feature ID and drop externalFidColumn.
// Note: This happens in a second pass over the feature, since we want to overwrite the
// feature ID irrespective of the order of columns in the table
feature.ID = fmt.Sprint(columnValue)
feature.Properties.Delete(columnName)
case domain.IsFeatureRelation(columnName, externalFidColumn):
// When externalFidColumn is part of the column name (e.g. 'foobar_external_fid') we treat
// it as a relation to another feature.
newColumnName, newColumnNameWithoutProfile, newColumnValue := mapRel(columnName, columnValue, externalFidColumn)
if newColumnName != "" {
feature.Properties.SetRelation(newColumnName, newColumnValue, newColumnNameWithoutProfile)
feature.Properties.Delete(columnName)
}
}
}
}
package geopackage
import (
"errors"
"fmt"
"strings"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/common"
d "github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/jmoiron/sqlx"
)
// assertIndexesExist asserts required indexes in the GeoPackage exists.
func assertIndexesExist(
configuredCollections config.GeoSpatialCollections,
tableByCollectionID map[string]*common.Table,
db *sqlx.DB, fidColumn string) error {
// index needs to contain these columns in the given order
defaultSpatialBtreeColumns := strings.Join([]string{fidColumn, d.MinxField, d.MaxxField, d.MinyField, d.MaxyField}, ",")
for collID, table := range tableByCollectionID {
if table == nil {
return errors.New("given table can't be nil")
}
for _, coll := range configuredCollections {
if coll.ID == collID && coll.Features != nil {
err := assertIndexesExistsForTable(defaultSpatialBtreeColumns, coll, table, db)
if err != nil {
return err
}
break
}
}
}
return nil
}
func assertIndexesExistsForTable(defaultSpatialBtreeColumns string, collection config.GeoSpatialCollection, table *common.Table, db *sqlx.DB) error {
spatialBtreeColumns := defaultSpatialBtreeColumns
// assert temporal columns are indexed if configured
if collection.Metadata != nil && collection.Metadata.TemporalProperties != nil {
temporalBtreeColumns := strings.Join([]string{collection.Metadata.TemporalProperties.StartDate, collection.Metadata.TemporalProperties.EndDate}, ",")
spatialBtreeColumns = strings.Join([]string{defaultSpatialBtreeColumns, collection.Metadata.TemporalProperties.StartDate, collection.Metadata.TemporalProperties.EndDate}, ",")
if err := assertIndexExists(table.Name, db, temporalBtreeColumns, true, false); err != nil {
return err
}
}
// assert spatial b-tree index exists, this index substitutes the r-tree when querying large bounding boxes
// if temporal columns are configured, they must be included in this index as well
if table.Type == geospatial.Features {
if err := assertIndexExists(table.Name, db, spatialBtreeColumns, true, false); err != nil {
return err
}
}
// assert the column for each property filter is indexed.
for _, propertyFilter := range collection.Features.Filters.Properties {
if err := assertIndexExists(table.Name, db, propertyFilter.Name, false, true); err != nil && *propertyFilter.IndexRequired {
return fmt.Errorf("%w. To disable this check set 'indexRequired' to 'false'", err)
}
}
return nil
}
func assertIndexExists(tableName string, db *sqlx.DB, columns string, prefixMatch bool, containsMatch bool) error {
query := fmt.Sprintf(`
select group_concat(info.name) as indexed_columns
from pragma_index_list('%s') as list,
pragma_index_info(list.name) as info
group by list.name`, tableName)
rows, err := db.Queryx(query)
if err != nil {
return fmt.Errorf("failed to read indexes from table '%s'", tableName)
}
exists := false
for rows.Next() {
var indexedColumns string
_ = rows.Scan(&indexedColumns)
switch {
case columns == indexedColumns:
exists = true
case prefixMatch && strings.HasPrefix(indexedColumns, columns):
exists = true
case containsMatch && strings.Contains(indexedColumns, columns):
exists = true
}
}
defer rows.Close()
if !exists {
return fmt.Errorf("missing required index: no index exists on column(s) '%s' in table '%s'",
columns, tableName)
}
return nil
}
//go:build cgo && !darwin && !windows
package geopackage
import (
"fmt"
"log"
"github.com/PDOK/gokoala/config"
"github.com/google/uuid"
cloudsqlitevfs "github.com/PDOK/go-cloud-sqlite-vfs"
"github.com/jmoiron/sqlx"
)
// Cloud-Backed SQLite (CBS) GeoPackage in Azure or Google object storage
type cloudGeoPackage struct {
db *sqlx.DB
cloudVFS *cloudsqlitevfs.VFS
}
func newCloudBackedGeoPackage(gpkg *config.GeoPackageCloud) geoPackageBackend {
cacheDir, err := gpkg.CacheDir()
if err != nil {
log.Fatalf("invalid cache dir, error: %v", err)
}
cacheSize, err := gpkg.Cache.MaxSizeAsBytes()
if err != nil {
log.Fatalf("invalid cache size provided, error: %v", err)
}
msg := fmt.Sprintf("Cloud-Backed GeoPackage '%s' in container '%s' on '%s'",
gpkg.File, gpkg.Container, gpkg.Connection)
log.Printf("connecting to %s\n", msg)
vfsName := uuid.New().String() // important: each geopackage must use a unique VFS name
vfs, err := cloudsqlitevfs.NewVFS(vfsName, gpkg.Connection, gpkg.User, gpkg.Auth,
gpkg.Container, cacheDir, cacheSize, gpkg.LogHTTPRequests)
if err != nil {
log.Fatalf("failed to connect with %s, error: %v", msg, err)
}
log.Printf("connected to %s\n", msg)
conn := fmt.Sprintf("/%s/%s?vfs=%s&mode=ro&_cache_size=%d", gpkg.Container, gpkg.File, vfsName, gpkg.InMemoryCacheSize)
db, err := sqlx.Open(sqliteDriverName, conn)
if err != nil {
log.Fatalf("failed to open %s, error: %v", msg, err)
}
return &cloudGeoPackage{db, &vfs}
}
func (g *cloudGeoPackage) getDB() *sqlx.DB {
return g.db
}
func (g *cloudGeoPackage) close() {
err := g.db.Close()
if err != nil {
log.Printf("failed to close GeoPackage: %v", err)
}
if g.cloudVFS != nil {
err = g.cloudVFS.Close()
if err != nil {
log.Printf("failed to close Cloud-Backed GeoPackage: %v", err)
}
}
}
package geopackage
import (
"fmt"
"log"
"time"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/jmoiron/sqlx"
)
// GeoPackage on local disk.
type localGeoPackage struct {
db *sqlx.DB
}
func newLocalGeoPackage(gpkg *config.GeoPackageLocal) geoPackageBackend {
if gpkg.Download != nil {
downloadGeoPackage(gpkg)
}
conn := fmt.Sprintf("file:%s?immutable=1&_cache_size=%d", gpkg.File, gpkg.InMemoryCacheSize)
db, err := sqlx.Open(sqliteDriverName, conn)
if err != nil {
log.Fatalf("failed to open GeoPackage: %v", err)
}
log.Printf("connected to local GeoPackage: %s", gpkg.File)
return &localGeoPackage{db}
}
func downloadGeoPackage(gpkg *config.GeoPackageLocal) {
url := *gpkg.Download.From.URL
log.Printf("start download of GeoPackage: %s", url.String())
downloadTime, err := engine.Download(url, gpkg.File, gpkg.Download.Parallelism, gpkg.Download.TLSSkipVerify,
gpkg.Download.Timeout.Duration, gpkg.Download.RetryDelay.Duration, gpkg.Download.RetryMaxDelay.Duration, gpkg.Download.MaxRetries)
if err != nil {
log.Fatalf("failed to download GeoPackage: %v", err)
}
log.Printf("successfully downloaded GeoPackage to %s in %s", gpkg.File, downloadTime.Round(time.Second))
}
func (g *localGeoPackage) getDB() *sqlx.DB {
return g.db
}
func (g *localGeoPackage) close() {
err := g.db.Close()
if err != nil {
log.Printf("failed to close GeoPackage: %v", err)
}
}
// Package encoding based on https://github.com/go-spatial/geom/blob/master/encoding/gpkg/binary_header.go
//
// Copyright (c) 2017 go-spatial. Modified by PDOK.
// Licensed under the MIT license. See https://github.com/go-spatial/geom/blob/master/LICENSE for details.
package encoding
import (
"encoding/binary"
"errors"
"fmt"
"math"
"github.com/twpayne/go-geom"
"github.com/twpayne/go-geom/encoding/wkb"
"github.com/twpayne/go-geom/encoding/wkbcommon"
)
type EnvelopeType uint8
// Magic is the magic number encode in the header. It should be 0x4750.
var Magic = [2]byte{0x47, 0x50}
// Decipher empty points with NaN as coordinates, in line with Requirement 152 of the spec (http://www.geopackage.org/spec/).
var gpkgNaNHandling = wkbcommon.WKBOptionEmptyPointHandling(wkbcommon.EmptyPointHandlingNaN)
const (
EnvelopeTypeNone = EnvelopeType(0)
EnvelopeTypeXY = EnvelopeType(1)
EnvelopeTypeXYZ = EnvelopeType(2)
EnvelopeTypeXYM = EnvelopeType(3)
EnvelopeTypeXYZM = EnvelopeType(4)
EnvelopeTypeInvalid = EnvelopeType(5)
)
// NumberOfElements that the particular Envelope Type will have.
func (et EnvelopeType) NumberOfElements() int {
switch et { //nolint:exhaustive
case EnvelopeTypeNone:
return 0
case EnvelopeTypeXY:
return 4
case EnvelopeTypeXYZ:
return 6
case EnvelopeTypeXYM:
return 6
case EnvelopeTypeXYZM:
return 8
default:
return -1
}
}
func (et EnvelopeType) String() string {
str := "NONEXYZMXYMINVALID"
switch et { //nolint:exhaustive
case EnvelopeTypeNone:
return str[0:4]
case EnvelopeTypeXY:
return str[4 : 4+2]
case EnvelopeTypeXYZ:
return str[4 : 4+3]
case EnvelopeTypeXYM:
return str[8 : 8+3]
case EnvelopeTypeXYZM:
return str[4 : 4+4]
default:
return str[11:]
}
}
// HEADER FLAG LAYOUT
//
// 7-6-5-4-3-2-1-0
// R-R-X-Y-E-E-E-B
//
// R Reserved for future use. (should be set to 0)
// X GeoPackageBinary type // Normal or extented
// Y empty geometry
// E Envelope type
// B ByteOrder
// http://www.geopackage.org/spec/#flags_layout
const (
maskByteOrder = 1 << 0
maskEnvelopeType = 1<<3 | 1<<2 | 1<<1
maskEmptyGeometry = 1 << 4
maskGeoPackageBinary = 1 << 5
)
type headerFlags byte
func (hf headerFlags) String() string { return fmt.Sprintf("0x%02x", uint8(hf)) }
// Endian will return the encoded Endianess.
func (hf headerFlags) Endian() binary.ByteOrder {
if hf&maskByteOrder == 0 {
return binary.BigEndian
}
return binary.LittleEndian
}
// Envelope returns the type of the envelope.
func (hf headerFlags) Envelope() EnvelopeType {
et := uint8((hf & maskEnvelopeType) >> 1)
if et >= uint8(EnvelopeTypeInvalid) {
return EnvelopeTypeInvalid
}
return EnvelopeType(et)
}
// IsEmpty returns whether or not the geometry is empty.
func (hf headerFlags) IsEmpty() bool { return ((hf & maskEmptyGeometry) >> 4) == 1 }
// IsStandard returns weather or not the geometry is a standard GeoPackage geometry type.
func (hf headerFlags) IsStandard() bool { return ((hf & maskGeoPackageBinary) >> 5) == 0 }
// BinaryHeader is the gpkg header that accompainies every feature.
type BinaryHeader struct {
// See: http://www.geopackage.org/spec/
magic [2]byte // should be 0x47 0x50 (GP in ASCII)
version uint8 // should be 0
flags headerFlags
srsid int32
envelope []float64
}
// decodeBinaryHeader decodes the data into the BinaryHeader.
func decodeBinaryHeader(data []byte) (*BinaryHeader, error) {
if len(data) < 8 {
return nil, errors.New("not enough bytes")
}
var bh BinaryHeader
bh.magic[0] = data[0]
bh.magic[1] = data[1]
bh.version = data[2]
bh.flags = headerFlags(data[3])
en := bh.flags.Endian()
bh.srsid = int32(en.Uint32(data[4 : 4+4])) //nolint:gosec
bytes := data[8:]
et := bh.flags.Envelope()
if et == EnvelopeTypeInvalid {
return nil, errors.New("invalid envelope type")
}
if et == EnvelopeTypeNone {
return &bh, nil
}
num := et.NumberOfElements()
// there are 8 bytes per float64 value and we need num of them.
if len(bytes) < (num * 8) {
return nil, errors.New("not enough bytes")
}
bh.envelope = make([]float64, 0, num)
for i := range num {
bits := en.Uint64(bytes[i*8 : (i*8)+8])
bh.envelope = append(bh.envelope, math.Float64frombits(bits))
}
if bh.magic[0] != Magic[0] || bh.magic[1] != Magic[1] {
return &bh, errors.New("invalid magic number")
}
return &bh, nil
}
// Magic is the magic number encode in the header. It should be 0x4750.
func (h *BinaryHeader) Magic() [2]byte {
if h == nil {
return Magic
}
return h.magic
}
// Version is the version number encode in the header.
func (h *BinaryHeader) Version() uint8 {
if h == nil {
return 0
}
return h.version
}
// EnvelopeType is the type of the envelope that is provided.
func (h *BinaryHeader) EnvelopeType() EnvelopeType {
if h == nil {
return EnvelopeTypeInvalid
}
return h.flags.Envelope()
}
// SRSID is the SRS id of the feature.
func (h *BinaryHeader) SRSID() int32 {
if h == nil {
return 0
}
return h.srsid
}
// Envelope is the bounding box of the feature, used for searching. If the EnvelopeType is EvelopeTypeNone, then there isn't a envelope encoded
// and a search without an index will need to be preformed. This is to save space.
func (h *BinaryHeader) Envelope() []float64 {
if h == nil {
return nil
}
return h.envelope
}
// IsGeometryEmpty tells us if the geometry should be considered empty.
func (h *BinaryHeader) IsGeometryEmpty() bool {
if h == nil {
return true
}
return h.flags.IsEmpty()
}
// IsStandardGeometry is the geometry a core/extended geometry type, or a user defined geometry type.
func (h *BinaryHeader) IsStandardGeometry() bool {
if h == nil {
return true
}
return h.flags.IsStandard()
}
// Size is the size of the header in bytes.
func (h *BinaryHeader) Size() int {
if h == nil {
return 0
}
return (len(h.envelope) * 8) + 8
}
// StandardBinary is the binary encoding plus some metadata
// should be stored as a blob.
type StandardBinary struct {
Header *BinaryHeader
SRSID int32
Geometry geom.T
}
func DecodeGeometry(bytes []byte) (*StandardBinary, error) {
h, err := decodeBinaryHeader(bytes)
if err != nil {
return nil, err
}
geo, err := wkb.Unmarshal(bytes[h.Size():], gpkgNaNHandling)
if err != nil {
return nil, err
}
return &StandardBinary{
Header: h,
SRSID: h.SRSID(),
Geometry: geo,
}, nil
}
package geopackage
import (
"context"
"database/sql"
"errors"
"fmt"
"log"
"maps"
"os"
"path"
"sync"
"github.com/PDOK/gokoala/config"
ds "github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/common"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/geopackage/encoding"
d "github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/google/uuid"
"github.com/jmoiron/sqlx"
"github.com/mattn/go-sqlite3"
"github.com/qustavo/sqlhooks/v2"
"github.com/twpayne/go-geom"
"github.com/twpayne/go-geom/encoding/wkt"
)
const (
sqliteDriverName = "sqlite3_with_extensions"
// https://jmoiron.github.io/sqlx/#namedParams
sqlxNamedParamSymbol = ":"
)
var once sync.Once
// Load sqlite (with extensions) once.
//
// Extensions are by default expected in /usr/lib. For spatialite you can
// alternatively/optionally set SPATIALITE_LIBRARY_PATH.
func loadDriver() {
once.Do(func() {
spatialite := path.Join(os.Getenv("SPATIALITE_LIBRARY_PATH"), "mod_spatialite")
driver := &sqlite3.SQLiteDriver{Extensions: []string{spatialite}}
sql.Register(sqliteDriverName, sqlhooks.Wrap(driver, NewSQLLogFromEnv())) // adda support for SQL logging
})
}
// geoPackageBackend abstraction over different kinds of GeoPackages, e.g. local file or cloud-backed sqlite.
type geoPackageBackend interface {
getDB() *sqlx.DB
close()
}
type GeoPackage struct {
common.DatasourceCommon
backend geoPackageBackend
preparedStmtCache *PreparedStatementCache
maxBBoxSizeToUseWithRTree int
}
func NewGeoPackage(collections config.GeoSpatialCollections, gpkgConfig config.GeoPackage,
transformOnTheFly bool, maxDecimals int, forceUTC bool) (*GeoPackage, error) {
loadDriver()
if transformOnTheFly {
return nil, errors.New("on the fly reprojection/transformation is currently not supported for GeoPackages")
}
g := &GeoPackage{
DatasourceCommon: common.DatasourceCommon{
TransformOnTheFly: transformOnTheFly,
MaxDecimals: maxDecimals,
ForceUTC: forceUTC,
PropertiesByCollectionID: collections.FeaturePropertiesByID(),
},
preparedStmtCache: NewCache(),
}
warmUp := false
switch {
case gpkgConfig.Local != nil:
g.backend = newLocalGeoPackage(gpkgConfig.Local)
g.FidColumn = gpkgConfig.Local.Fid
g.ExternalFidColumn = gpkgConfig.Local.ExternalFid
g.QueryTimeout = gpkgConfig.Local.QueryTimeout.Duration
g.maxBBoxSizeToUseWithRTree = gpkgConfig.Local.MaxBBoxSizeToUseWithRTree
case gpkgConfig.Cloud != nil:
g.backend = newCloudBackedGeoPackage(gpkgConfig.Cloud)
g.FidColumn = gpkgConfig.Cloud.Fid
g.ExternalFidColumn = gpkgConfig.Cloud.ExternalFid
g.QueryTimeout = gpkgConfig.Cloud.QueryTimeout.Duration
g.maxBBoxSizeToUseWithRTree = gpkgConfig.Cloud.MaxBBoxSizeToUseWithRTree
warmUp = gpkgConfig.Cloud.Cache.WarmUp
default:
return nil, errors.New("unknown GeoPackage config encountered")
}
g.TableByCollectionID, g.PropertyFiltersByCollectionID = readMetadata(
g.backend.getDB(), collections, g.FidColumn, g.ExternalFidColumn)
if err := assertIndexesExist(collections, g.TableByCollectionID, g.backend.getDB(), g.FidColumn); err != nil {
return nil, err
}
if warmUp {
// perform warmup async since it can take a long time
go func() {
if err := warmUpFeatureTables(collections, g.TableByCollectionID, g.backend.getDB()); err != nil {
log.Fatal(err)
}
}()
}
return g, nil
}
func (g *GeoPackage) Close() {
g.preparedStmtCache.Close()
g.backend.close()
}
func (g *GeoPackage) GetFeatureIDs(ctx context.Context, collection string, criteria ds.FeaturesCriteria) ([]int64, d.Cursors, error) {
table, err := g.CollectionToTable(collection)
if err != nil {
return nil, d.Cursors{}, err
}
queryCtx, cancel := context.WithTimeout(ctx, g.QueryTimeout) // https://go.dev/doc/database/cancel-operations
defer cancel()
propConfig := g.PropertiesByCollectionID[collection]
stmt, query, queryArgs, err := g.makeFeaturesQuery(queryCtx, propConfig, table, true, -1, criteria) //nolint:sqlclosecheck // prepared statement is cached, will be closed when evicted from cache
if err != nil {
return nil, d.Cursors{}, fmt.Errorf("failed to create query '%s' error: %w", query, err)
}
rows, err := stmt.QueryxContext(queryCtx, queryArgs)
if err != nil {
return nil, d.Cursors{}, fmt.Errorf("failed to execute query '%s' error: %w", query, err)
}
defer rows.Close()
featureIDs, prevNext, err := common.MapRowsToFeatureIDs(queryCtx, FromSqlxRows(rows))
if err != nil {
return nil, d.Cursors{}, err
}
if prevNext == nil {
return nil, d.Cursors{}, nil
}
return featureIDs, d.NewCursors(*prevNext, criteria.Cursor.FiltersChecksum), queryCtx.Err()
}
func (g *GeoPackage) GetFeaturesByID(ctx context.Context, collection string, featureIDs []int64,
axisOrder d.AxisOrder, profile d.Profile) (*d.FeatureCollection, error) {
table, err := g.CollectionToTable(collection)
if err != nil {
return nil, err
}
queryCtx, cancel := context.WithTimeout(ctx, g.QueryTimeout) // https://go.dev/doc/database/cancel-operations
defer cancel()
propConfig := g.PropertiesByCollectionID[collection]
selectClause := g.SelectColumns(table, axisOrder, selectGpkgGeometry, propConfig, false)
fids := map[string]any{"fids": featureIDs}
query, queryArgs, err := sqlx.Named(fmt.Sprintf("select %s from %s where %s in (:fids)",
selectClause, table.Name, g.FidColumn), fids)
if err != nil {
return nil, fmt.Errorf("failed to make features query, error: %w", err)
}
query, queryArgs, err = sqlx.In(query, queryArgs...)
if err != nil {
return nil, fmt.Errorf("failed to make IN-clause, error: %w", err)
}
rows, err := g.backend.getDB().QueryxContext(queryCtx, g.backend.getDB().Rebind(query), queryArgs...)
if err != nil {
return nil, fmt.Errorf("failed to execute query '%s' error: %w", query, err)
}
defer rows.Close()
fc := d.FeatureCollection{}
fc.Features, _, err = common.MapRowsToFeatures(queryCtx, FromSqlxRows(rows),
g.FidColumn, g.ExternalFidColumn, table.GeometryColumnName,
propConfig, table.Schema, mapGpkgGeometry, profile.MapRelationUsingProfile,
common.FormatOpts{MaxDecimals: g.MaxDecimals, ForceUTC: g.ForceUTC})
if err != nil {
return nil, err
}
fc.NumberReturned = len(fc.Features)
return &fc, queryCtx.Err()
}
func (g *GeoPackage) GetFeatures(ctx context.Context, collection string, criteria ds.FeaturesCriteria,
axisOrder d.AxisOrder, profile d.Profile) (*d.FeatureCollection, d.Cursors, error) {
table, err := g.CollectionToTable(collection)
if err != nil {
return nil, d.Cursors{}, err
}
queryCtx, cancel := context.WithTimeout(ctx, g.QueryTimeout) // https://go.dev/doc/database/cancel-operations
defer cancel()
propConfig := g.PropertiesByCollectionID[collection]
stmt, query, queryArgs, err := g.makeFeaturesQuery(queryCtx, propConfig, table, false, axisOrder, criteria) //nolint:sqlclosecheck // prepared statement is cached, will be closed when evicted from cache
if err != nil {
return nil, d.Cursors{}, fmt.Errorf("failed to create query '%s' error: %w", query, err)
}
rows, err := stmt.QueryxContext(queryCtx, queryArgs)
if err != nil {
return nil, d.Cursors{}, fmt.Errorf("failed to execute query '%s' error: %w", query, err)
}
defer rows.Close()
var prevNext *d.PrevNextFID
fc := d.FeatureCollection{}
fc.Features, prevNext, err = common.MapRowsToFeatures(queryCtx, FromSqlxRows(rows),
g.FidColumn, g.ExternalFidColumn, table.GeometryColumnName,
propConfig, table.Schema, mapGpkgGeometry, profile.MapRelationUsingProfile,
common.FormatOpts{MaxDecimals: g.MaxDecimals, ForceUTC: g.ForceUTC})
if err != nil {
return nil, d.Cursors{}, err
}
if prevNext == nil {
return nil, d.Cursors{}, nil
}
fc.NumberReturned = len(fc.Features)
return &fc, d.NewCursors(*prevNext, criteria.Cursor.FiltersChecksum), queryCtx.Err()
}
func (g *GeoPackage) GetFeature(ctx context.Context, collection string, featureID any,
_ d.SRID, axisOrder d.AxisOrder, profile d.Profile) (*d.Feature, error) {
table, err := g.CollectionToTable(collection)
if err != nil {
return nil, err
}
queryCtx, cancel := context.WithTimeout(ctx, g.QueryTimeout) // https://go.dev/doc/database/cancel-operations
defer cancel()
var fidColumn string
switch featureID.(type) {
case int64:
if g.ExternalFidColumn != "" {
// Features should be retrieved by UUID
log.Println("feature requested by int while external fid column is defined")
return nil, nil
}
fidColumn = g.FidColumn
case uuid.UUID:
if g.ExternalFidColumn == "" {
// Features should be retrieved by int64
log.Println("feature requested by UUID while external fid column is not defined")
return nil, nil
}
fidColumn = g.ExternalFidColumn
}
propConfig := g.PropertiesByCollectionID[collection]
selectClause := g.SelectColumns(table, axisOrder, selectGpkgGeometry, propConfig, false)
query := fmt.Sprintf(`select %s from "%s" where "%s" = :fid limit 1`, selectClause, table.Name, fidColumn)
rows, err := g.backend.getDB().NamedQueryContext(queryCtx, query, map[string]any{"fid": featureID})
if err != nil {
return nil, fmt.Errorf("query '%s' failed: %w", query, err)
}
defer rows.Close()
features, _, err := common.MapRowsToFeatures(queryCtx, FromSqlxRows(rows),
g.FidColumn, g.ExternalFidColumn, table.GeometryColumnName,
propConfig, table.Schema, mapGpkgGeometry, profile.MapRelationUsingProfile,
common.FormatOpts{MaxDecimals: g.MaxDecimals, ForceUTC: g.ForceUTC})
if err != nil {
return nil, err
}
if len(features) != 1 {
return nil, nil
}
return features[0], queryCtx.Err()
}
// Build specific features queries based on the given options.
// Make sure to use SQL bind variables and return named params: https://jmoiron.github.io/sqlx/#namedParams
func (g *GeoPackage) makeFeaturesQuery(ctx context.Context, propConfig *config.FeatureProperties, table *common.Table,
onlyFIDs bool, axisOrder d.AxisOrder, criteria ds.FeaturesCriteria) (stmt *sqlx.NamedStmt, query string, queryArgs map[string]any, err error) {
var selectClause string
if onlyFIDs {
selectClause = common.ColumnsToSQL([]string{g.FidColumn, d.PrevFid, d.NextFid})
} else {
selectClause = g.SelectColumns(table, axisOrder, selectGpkgGeometry, propConfig, true)
}
// make query
if criteria.Bbox != nil {
query, queryArgs, err = g.makeBboxQuery(table, selectClause, criteria)
if err != nil {
return
}
} else {
query, queryArgs = g.makeDefaultQuery(table, selectClause, criteria)
}
// lookup prepared statement for given query, or create new one
stmt, err = g.preparedStmtCache.Lookup(ctx, g.backend.getDB(), query)
return
}
func (g *GeoPackage) makeDefaultQuery(table *common.Table, selectClause string, criteria ds.FeaturesCriteria) (string, map[string]any) {
pfClause, pfNamedParams := common.PropertyFiltersToSQL(criteria.PropertyFilters, sqlxNamedParamSymbol)
temporalClause, temporalNamedParams := common.TemporalCriteriaToSQL(criteria.TemporalCriteria, sqlxNamedParamSymbol)
defaultQuery := fmt.Sprintf(`
with
next as (select * from "%[1]s" where "%[2]s" >= :fid %[3]s %[4]s order by %[2]s asc limit :limit + 1),
prev as (select * from "%[1]s" where "%[2]s" < :fid %[3]s %[4]s order by %[2]s desc limit :limit),
nextprev as (select * from next union all select * from prev),
nextprevfeat as (select *, lag("%[2]s", :limit) over (order by %[2]s) as %[6]s, lead("%[2]s", :limit) over (order by "%[2]s") as %[7]s from nextprev)
select %[5]s from nextprevfeat where "%[2]s" >= :fid %[3]s %[4]s limit :limit
`, table.Name, g.FidColumn, temporalClause, pfClause, selectClause, d.PrevFid, d.NextFid) // don't add user input here, use named params for user input!
namedParams := map[string]any{
"fid": criteria.Cursor.FID,
"limit": criteria.Limit,
}
maps.Copy(namedParams, pfNamedParams)
maps.Copy(namedParams, temporalNamedParams)
return defaultQuery, namedParams
}
func (g *GeoPackage) makeBboxQuery(table *common.Table, selectClause string, criteria ds.FeaturesCriteria) (string, map[string]any, error) {
btreeIndexHint := fmt.Sprintf("indexed by \"%s_spatial_idx\"", table.Name)
pfClause, pfNamedParams := common.PropertyFiltersToSQL(criteria.PropertyFilters, sqlxNamedParamSymbol)
if pfClause != "" {
// don't force btree index when using property filter, let SQLite decide
// whether to use the BTree index or the property filter index
btreeIndexHint = ""
}
temporalClause, temporalNamedParams := common.TemporalCriteriaToSQL(criteria.TemporalCriteria, sqlxNamedParamSymbol)
bboxQuery := fmt.Sprintf(`
with
given_bbox as (select geomfromtext(:bboxWkt, :bboxSrid)),
bbox_size as (select iif(count(id) < %[3]d, 'small', 'big') as bbox_size
from (select id from rtree_%[1]s_%[4]s
where minx <= :maxx and maxx >= :minx and miny <= :maxy and maxy >= :miny
limit %[3]d)),
next_bbox_rtree as (select f.*
from "%[1]s" f inner join rtree_%[1]s_%[4]s rf on f."%[2]s" = rf.id
where rf.minx <= :maxx and rf.maxx >= :minx and rf.miny <= :maxy and rf.maxy >= :miny
and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1
and f."%[2]s" >= :fid %[6]s %[7]s
order by f."%[2]s" asc
limit (select iif(bbox_size == 'small', :limit + 1, 0) from bbox_size)),
next_bbox_btree as (select f.*
from "%[1]s" f %[8]s
where f.minx <= :maxx and f.maxx >= :minx and f.miny <= :maxy and f.maxy >= :miny
and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1
and f."%[2]s" >= :fid %[6]s %[7]s
order by f."%[2]s" asc
limit (select iif(bbox_size == 'big', :limit + 1, 0) from bbox_size)),
next as (select * from next_bbox_rtree union all select * from next_bbox_btree),
prev_bbox_rtree as (select f.*
from "%[1]s" f inner join rtree_%[1]s_%[4]s rf on f."%[2]s" = rf.id
where rf.minx <= :maxx and rf.maxx >= :minx and rf.miny <= :maxy and rf.maxy >= :miny
and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1
and f."%[2]s" < :fid %[6]s %[7]s
order by f."%[2]s" desc
limit (select iif(bbox_size == 'small', :limit, 0) from bbox_size)),
prev_bbox_btree as (select f.*
from "%[1]s" f %[8]s
where f.minx <= :maxx and f.maxx >= :minx and f.miny <= :maxy and f.maxy >= :miny
and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1
and f."%[2]s" < :fid %[6]s %[7]s
order by f."%[2]s" desc
limit (select iif(bbox_size == 'big', :limit, 0) from bbox_size)),
prev as (select * from prev_bbox_rtree union all select * from prev_bbox_btree),
nextprev as (select * from next union all select * from prev),
nextprevfeat as (select *, lag("%[2]s", :limit) over (order by "%[2]s") as %[9]s, lead("%[2]s", :limit) over (order by "%[2]s") as %[10]s from nextprev)
select %[5]s from nextprevfeat where "%[2]s" >= :fid %[6]s %[7]s limit :limit
`, table.Name, g.FidColumn, g.maxBBoxSizeToUseWithRTree, table.GeometryColumnName,
selectClause, temporalClause, pfClause, btreeIndexHint, d.PrevFid, d.NextFid) // don't add user input here, use named params for user input!
bboxAsWKT, err := wkt.Marshal(criteria.Bbox.Polygon())
if err != nil {
return "", nil, err
}
namedParams := map[string]any{
"fid": criteria.Cursor.FID,
"limit": criteria.Limit,
"bboxWkt": bboxAsWKT,
d.MaxxField: criteria.Bbox.Max(0),
d.MinxField: criteria.Bbox.Min(0),
d.MaxyField: criteria.Bbox.Max(1),
d.MinyField: criteria.Bbox.Min(1),
"bboxSrid": criteria.InputSRID}
maps.Copy(namedParams, pfNamedParams)
maps.Copy(namedParams, temporalNamedParams)
return bboxQuery, namedParams, nil
}
// mapGpkgGeometry GeoPackage specific way to read geometries into a geom.T.
func mapGpkgGeometry(columnValue any) (geom.T, error) {
rawGeom, ok := columnValue.([]byte)
if !ok {
return nil, errors.New("failed to cast GeoPackage geom to bytes")
}
geomWithMetadata, err := encoding.DecodeGeometry(rawGeom)
if err != nil {
return nil, err
}
if geomWithMetadata == nil || geomWithMetadata.Geometry.Empty() {
return nil, nil
}
return geomWithMetadata.Geometry, nil
}
// selectGpkgGeometry GeoPackage specific way to select geometry and take axis order into account.
func selectGpkgGeometry(axisOrder d.AxisOrder, table *common.Table) string {
if table.GeometryColumnName == "" {
return ""
}
if axisOrder == d.AxisOrderYX {
// GeoPackage geometries are stored in WKB format and WKB is always XY.
// So swap coordinates when needed. This requires casting to a SpatiaLite geometry first, executing
// the swap and then casting back to a GeoPackage geometry.
return fmt.Sprintf(", asgpb(swapcoords(castautomagic(\"%[1]s\"))) as \"%[1]s\"", table.GeometryColumnName)
}
return fmt.Sprintf(", \"%s\"", table.GeometryColumnName)
}
package geopackage
import (
"errors"
"fmt"
"log"
"regexp"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
ds "github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/common"
d "github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/jmoiron/sqlx"
)
var newlineRegex = regexp.MustCompile(`[\r\n]+`)
// readMetadata reads metadata such as available feature tables, the schema of each table,
// available filters, etc. from the GeoPackage. Terminates on failure.
func readMetadata(db *sqlx.DB, collections config.GeoSpatialCollections, fidColumn, externalFidColumn string) (
tableByCollectionID map[string]*common.Table,
propertyFiltersByCollectionID map[string]ds.PropertyFiltersWithAllowedValues) {
metadata, err := readDriverMetadata(db)
if err != nil {
log.Fatal(err)
}
log.Println(metadata)
tableByCollectionID, err = readGeoPackageTables(collections, db, fidColumn, externalFidColumn)
if err != nil {
log.Fatal(err)
}
propertyFiltersByCollectionID, err = readPropertyFiltersWithAllowedValues(tableByCollectionID, collections, db)
if err != nil {
log.Fatal(err)
}
return
}
// Read metadata about gpkg and sqlite driver.
func readDriverMetadata(db *sqlx.DB) (string, error) {
type pragma struct {
UserVersion string `db:"user_version"`
}
type metadata struct {
Sqlite string `db:"sqlite"`
Spatialite string `db:"spatialite"`
Arch string `db:"arch"`
}
var m metadata
err := db.QueryRowx(`
select sqlite_version() as sqlite,
spatialite_version() as spatialite,
spatialite_target_cpu() as arch`).StructScan(&m)
if err != nil {
return "", fmt.Errorf("failed to connect with GeoPackage: %w", err)
}
var gpkgVersion pragma
_ = db.QueryRowx(`pragma user_version`).StructScan(&gpkgVersion)
if gpkgVersion.UserVersion == "" {
gpkgVersion.UserVersion = "unknown"
}
return fmt.Sprintf("geopackage version: %s, sqlite version: %s, spatialite version: %s on %s",
gpkgVersion.UserVersion, m.Sqlite, m.Spatialite, m.Arch), nil
}
// Read "gpkg_contents" table. This table contains metadata about feature tables. The result is a mapping from
// collection ID -> feature table metadata. We match each feature table to the collection ID by looking at the
// 'table_name' column. Also, in case there's no exact match between 'collection ID' and 'table_name' we use
// the explicitly configured table name (from the YAML config).
func readGeoPackageTables(collections config.GeoSpatialCollections, db *sqlx.DB,
fidColumn, externalFidColumn string) (map[string]*common.Table, error) {
query := `
select
c.table_name, c.data_type, coalesce(gc.column_name, ''), coalesce(gc.geometry_type_name, '')
from
gpkg_contents c left join gpkg_geometry_columns gc on c.table_name == gc.table_name
where
c.data_type = '%s' or c.data_type = '%s'`
// see https://docs.ogc.org/is/12-128r19/12-128r19.html#r14 for supported data types in GeoPackages.
rows, err := db.Queryx(fmt.Sprintf(query, geospatial.Features, geospatial.Attributes))
if err != nil {
return nil, fmt.Errorf("failed to retrieve gpkg_contents using query: %v\n, error: %w", query, err)
}
if err = rows.Err(); err != nil {
return nil, err
}
defer rows.Close()
result := make(map[string]*common.Table, 10)
for rows.Next() {
table, err := readGeoPackageTable(rows)
if err != nil {
return nil, err
}
hasCollection := false
for _, collection := range collections {
if table.Name == collection.ID {
result[collection.ID] = &table
hasCollection = true
} else if collection.HasTableName(table.Name) {
result[collection.ID] = &table
hasCollection = true
}
}
if !hasCollection {
log.Printf("Warning: table %s is present in GeoPackage but not configured as a collection", table.Name)
}
}
if len(result) == 0 {
return nil, errors.New("no records found in gpkg_contents and/or gpkg_geometry_columns")
}
for _, table := range result {
table.Schema, err = readSchema(db, *table, fidColumn, externalFidColumn, collections)
if err != nil {
return nil, fmt.Errorf("failed to read schema for table %s, error: %w", table.Name, err)
}
}
common.ValidateUniqueness(result)
return result, nil
}
func readGeoPackageTable(rows *sqlx.Rows) (common.Table, error) {
table := common.Table{}
if err := rows.Scan(&table.Name, &table.Type, &table.GeometryColumnName, &table.GeometryType); err != nil {
return table, fmt.Errorf("failed to read gpkg_contents record, error: %w", err)
}
if table.Name == "" {
return table, errors.New("table name is blank")
}
if table.Type == geospatial.Features && (table.GeometryColumnName == "" || table.GeometryType == "") {
return table, errors.New("data type of table is 'features' but table has no geometry defined")
}
return table, nil
}
func readPropertyFiltersWithAllowedValues(featTableByCollection map[string]*common.Table,
collections config.GeoSpatialCollections, db *sqlx.DB) (map[string]ds.PropertyFiltersWithAllowedValues, error) {
result := make(map[string]ds.PropertyFiltersWithAllowedValues)
for _, collection := range collections {
if collection.Features == nil {
continue
}
result[collection.ID] = make(map[string]ds.PropertyFilterWithAllowedValues)
featTable := featTableByCollection[collection.ID]
for _, pf := range collection.Features.Filters.Properties {
// the result should contain ALL configured property filters, with or without allowed values.
// when available, allowed values can be either static (from YAML config) or derived from the geopackage
result[collection.ID][pf.Name] = ds.PropertyFilterWithAllowedValues{PropertyFilter: pf}
if pf.AllowedValues != nil {
result[collection.ID][pf.Name] = ds.PropertyFilterWithAllowedValues{PropertyFilter: pf, AllowedValues: pf.AllowedValues}
continue
}
if *pf.DeriveAllowedValuesFromDatasource {
if !*pf.IndexRequired {
log.Printf("Warning: index is disabled for column %s, deriving allowed values "+
"from may take a long time. Index on this column is recommended", pf.Name)
}
// select distinct values from given column
query := fmt.Sprintf("select distinct ft.%[1]s from %[2]s ft order by ft.%[1]s", pf.Name, featTable.Name)
var values []string
err := db.Select(&values, query)
if err != nil {
return nil, fmt.Errorf("failed to derive allowed values using query: %v\n, error: %w", query, err)
}
// make sure values are valid
for _, v := range values {
if newlineRegex.MatchString(v) {
return nil, fmt.Errorf("failed to derive allowed values, one value contains a "+
"newline which isn't a valid (OpenAPI) enum value. The value is: %s", v)
}
}
result[collection.ID][pf.Name] = ds.PropertyFilterWithAllowedValues{PropertyFilter: pf, AllowedValues: values}
continue
}
}
}
return result, nil
}
func readSchema(db *sqlx.DB, table common.Table, fidColumn, externalFidColumn string,
collections config.GeoSpatialCollections) (*d.Schema, error) {
collectionNames := make([]string, 0, len(collections))
for _, collection := range collections {
collectionNames = append(collectionNames, collection.ID)
}
// if table "gpkg_data_columns" is included in geopackage, use its description field to supplement the schema.
schemaExtension, err := hasSchemaExtension(db)
if err != nil {
return nil, err
}
var query string
if schemaExtension {
query = fmt.Sprintf("select a.name, a.type, a.\"notnull\", coalesce(b.description, '') "+
"from pragma_table_info('%[1]s') a "+
"left join gpkg_data_columns b on b.column_name = a.name and b.table_name='%[1]s'", table.Name)
} else {
query = fmt.Sprintf("select name, type, \"notnull\" from pragma_table_info('%s')", table.Name)
}
rows, err := db.Queryx(query)
if err != nil {
return nil, err
}
defer rows.Close()
fields := make([]d.Field, 0)
for rows.Next() {
var colName, colType, colNotNull, colDescription string
if schemaExtension {
err = rows.Scan(&colName, &colType, &colNotNull, &colDescription)
} else {
err = rows.Scan(&colName, &colType, &colNotNull)
}
if err != nil {
return nil, err
}
fields = append(fields, d.Field{
Name: colName,
Type: colType,
Description: colDescription,
IsRequired: colNotNull == "1",
IsPrimaryGeometry: colName == table.GeometryColumnName,
FeatureRelation: d.NewFeatureRelation(colName, externalFidColumn, collectionNames),
})
}
schema, err := d.NewSchema(fields, fidColumn, externalFidColumn)
if err != nil {
return nil, err
}
return schema, nil
}
func hasSchemaExtension(db *sqlx.DB) (bool, error) {
var hasExtension bool
err := db.Get(&hasExtension, "select exists (select 1 from sqlite_master where type='table' and name='gpkg_data_columns')")
if err != nil {
return false, err
}
return hasExtension, nil
}
package geopackage
import "github.com/jmoiron/sqlx"
// SqlxRowsAdapter implements common.DatasourceRows.
type SqlxRowsAdapter struct {
rows *sqlx.Rows
}
func FromSqlxRows(rows *sqlx.Rows) *SqlxRowsAdapter {
return &SqlxRowsAdapter{rows: rows}
}
func (s *SqlxRowsAdapter) Columns() ([]string, error) {
return s.rows.Columns()
}
func (s *SqlxRowsAdapter) SliceScan() ([]any, error) {
return s.rows.SliceScan()
}
func (s *SqlxRowsAdapter) Next() bool {
return s.rows.Next()
}
func (s *SqlxRowsAdapter) Err() error {
return s.rows.Err()
}
func (s *SqlxRowsAdapter) Close() {
_ = s.rows.Close()
}
package geopackage
import (
"context"
"fmt"
"log"
"os"
"strconv"
"strings"
"time"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/common"
)
type contextKey int
const (
sqlContextKey contextKey = iota
)
// SQLLog query logging for debugging purposes.
type SQLLog struct {
LogSQL bool
}
// NewSQLLogFromEnv build a SQLLog based on the `LOG_SQL` environment variable.
func NewSQLLogFromEnv() *SQLLog {
var err error
logSQL := false
if os.Getenv(common.EnvLogSQL) != "" {
logSQL, err = strconv.ParseBool(os.Getenv(common.EnvLogSQL))
if err != nil {
log.Fatalf("invalid %s value provided, must be a boolean", common.EnvLogSQL)
}
}
return &SQLLog{LogSQL: logSQL}
}
// Before callback prior to execution of the given SQL query.
func (s *SQLLog) Before(ctx context.Context, _ string, _ ...any) (context.Context, error) {
return context.WithValue(ctx, sqlContextKey, time.Now()), nil
}
// After callback once execution of the given SQL query is done.
func (s *SQLLog) After(ctx context.Context, query string, args ...any) (context.Context, error) {
start := ctx.Value(sqlContextKey).(time.Time)
timeSpent := time.Since(start)
if s.LogSQL {
query = replaceBindVars(query, args)
log.Printf("\n--- SQL:\n%s\n--- SQL query took: %s\n", query, timeSpent)
}
return ctx, nil
}
// replaceBindVars replaces '?' bind vars with actual values to log a complete query (best effort).
func replaceBindVars(query string, args []any) string {
for _, arg := range args {
query = strings.Replace(query, "?", fmt.Sprintf("%v", arg), 1)
}
return query
}
package geopackage
import (
"context"
"log"
lru "github.com/hashicorp/golang-lru/v2"
"github.com/jmoiron/sqlx"
)
var preparedStmtCacheSize = 25
// PreparedStatementCache is thread safe.
type PreparedStatementCache struct {
cache *lru.Cache[string, *sqlx.NamedStmt]
}
// NewCache creates a new PreparedStatementCache that will evict least-recently used (LRU) statements.
func NewCache() *PreparedStatementCache {
cache, _ := lru.NewWithEvict[string, *sqlx.NamedStmt](preparedStmtCacheSize,
func(_ string, stmt *sqlx.NamedStmt) {
if stmt != nil {
_ = stmt.Close()
}
})
return &PreparedStatementCache{cache: cache}
}
// Lookup gets a prepared statement from the cache for the given query, or creates a new one and adds it to the cache.
func (c *PreparedStatementCache) Lookup(ctx context.Context, db *sqlx.DB, query string) (*sqlx.NamedStmt, error) {
cachedStmt, ok := c.cache.Get(query)
if !ok {
stmt, err := db.PrepareNamedContext(ctx, query)
if err != nil {
return nil, err
}
c.cache.Add(query, stmt)
return stmt, nil
}
return cachedStmt, nil
}
// Close purges the cache, and closes remaining prepared statements.
func (c *PreparedStatementCache) Close() {
log.Printf("closing %d prepared statements", c.cache.Len())
c.cache.Purge()
}
package geopackage
import (
"errors"
"fmt"
"log"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/common"
"github.com/jmoiron/sqlx"
)
// warmUpFeatureTables executes a warmup query to speedup subsequent queries.
// This encompasses traversing index(es) to fill the local cache.
func warmUpFeatureTables(
configuredCollections config.GeoSpatialCollections,
tableByCollectionID map[string]*common.Table,
db *sqlx.DB) error {
for collID, table := range tableByCollectionID {
if table == nil {
return errors.New("given table can't be nil")
}
for _, coll := range configuredCollections {
if coll.ID == collID && coll.Features != nil {
if err := warmUpFeatureTable(table.Name, db); err != nil {
return err
}
break
}
}
}
return nil
}
func warmUpFeatureTable(tableName string, db *sqlx.DB) error {
query := fmt.Sprintf(`
select minx,maxx,miny,maxy from %[1]s where minx <= 0 and maxx >= 0 and miny <= 0 and maxy >= 0
`, tableName)
log.Printf("start warm-up of feature table '%s'", tableName)
_, err := db.Exec(query)
if err != nil {
return fmt.Errorf("failed to warm-up feature table '%s': %w", tableName, err)
}
log.Printf("end warm-up of feature table '%s'", tableName)
return nil
}
package postgres
import (
"context"
"errors"
"fmt"
"strings"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/common"
"github.com/jackc/pgx/v5/pgxpool"
)
// assertIndexesExist asserts required indexes in Postgres exists
//
//nolint:nestif
func assertIndexesExist(configuredCollections config.GeoSpatialCollections, tableByCollectionID map[string]*common.Table, db *pgxpool.Pool, spatialIndexRequired bool) error {
for collID, table := range tableByCollectionID {
if table == nil {
return errors.New("given table can't be nil")
}
for _, coll := range configuredCollections {
if coll.ID == collID && coll.Features != nil {
// assert temporal columns are indexed if configured
if coll.Metadata != nil && coll.Metadata.TemporalProperties != nil {
temporalColumns := strings.Join([]string{coll.Metadata.TemporalProperties.StartDate, coll.Metadata.TemporalProperties.EndDate}, ",")
if err := assertIndexExists(table.Name, db, temporalColumns, true, false); err != nil {
return err
}
}
// assert geometry column has GIST (rtree) index
if spatialIndexRequired {
if err := assertSpatialIndex(table.Name, db, table.GeometryColumnName); err != nil {
return err
}
}
// assert the column for each property filter is indexed
for _, propertyFilter := range coll.Features.Filters.Properties {
if err := assertIndexExists(table.Name, db, propertyFilter.Name, false, true); err != nil && *propertyFilter.IndexRequired {
return fmt.Errorf("%w. To disable this check set 'indexRequired' to 'false'", err)
}
}
break
}
}
}
return nil
}
func assertSpatialIndex(tableName string, db *pgxpool.Pool, geometryColumn string) error {
query := `
select count(*)
from pg_index idx
join pg_class tbl on tbl.oid = idx.indrelid
join pg_class idx_class on idx_class.oid = idx.indexrelid
join pg_am am on idx_class.relam = am.oid
join pg_attribute attr on attr.attrelid = tbl.oid
where tbl.relname = $1
and am.amname = 'gist'
and attr.attnum = any(idx.indkey)
and attr.attname = $2;`
var count int
err := db.QueryRow(context.Background(), query, tableName, geometryColumn).Scan(&count)
if err != nil {
return fmt.Errorf("failed to check spatial index on table '%s', column '%s': %w",
tableName, geometryColumn, err)
}
if count == 0 {
return fmt.Errorf("missing required spatial index (GIST): no index exists on geometry column '%s' in table '%s'",
geometryColumn, tableName)
}
return nil
}
func assertIndexExists(tableName string, db *pgxpool.Pool, columns string, prefixMatch bool, containsMatch bool) error {
query := `
select string_agg(a.attname, ',' order by array_position(idx.indkey, a.attnum)) as indexed_columns
from pg_class c
join pg_index idx on c.oid = idx.indrelid
join pg_attribute a on a.attrelid = c.oid and a.attnum = any(idx.indkey)
where c.relname = $1
group by idx.indexrelid;`
rows, err := db.Query(context.Background(), query, tableName)
if err != nil {
return fmt.Errorf("failed to read indexes from table '%s'", tableName)
}
exists := false
for rows.Next() {
var indexedColumns string
_ = rows.Scan(&indexedColumns)
switch {
case columns == indexedColumns:
exists = true
case prefixMatch && strings.HasPrefix(indexedColumns, columns):
exists = true
case containsMatch && strings.Contains(indexedColumns, columns):
exists = true
}
}
defer rows.Close()
if !exists {
return fmt.Errorf("missing required index: no index exists on column(s) '%s' in table '%s'",
columns, tableName)
}
return nil
}
package postgres
import (
"context"
"errors"
"fmt"
"log"
"regexp"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
ds "github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/common"
d "github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/jackc/pgx/v5/pgxpool"
)
var newlineRegex = regexp.MustCompile(`[\r\n]+`)
// readMetadata reads metadata such as available feature tables, the schema of each table,
// available filters, etc. from the Postgres database. Terminates on failure.
func readMetadata(db *pgxpool.Pool, collections config.GeoSpatialCollections, fidColumn, externalFidColumn, schemaName string) (
tableByCollectionID map[string]*common.Table,
propertyFiltersByCollectionID map[string]ds.PropertyFiltersWithAllowedValues) {
metadata, err := readDriverMetadata(db)
if err != nil {
log.Fatal(err)
}
log.Println(metadata)
tableByCollectionID, err = readFeatureTables(collections, db, fidColumn, externalFidColumn, schemaName)
if err != nil {
log.Fatal(err)
}
propertyFiltersByCollectionID, err = readPropertyFiltersWithAllowedValues(tableByCollectionID, collections, db)
if err != nil {
log.Fatal(err)
}
return
}
// Read metadata about PostgreSQL and PostGIS.
func readDriverMetadata(db *pgxpool.Pool) (string, error) {
var pgVersion string
var postGISVersion string
err := db.QueryRow(context.Background(), `
select (select version()) as pg_version, (select PostGIS_Version()) as postgis_version;
`).Scan(&pgVersion, &postGISVersion)
return fmt.Sprintf("postgresql version: '%s', postgis version: '%s'", pgVersion, postGISVersion), err
}
// Read "geometry_columns" view. This table contains metadata about PostGIS tables. The result is a mapping from
// collection ID -> feature table metadata. We match each feature table to the collection ID by looking at the
// 'f_table_name' column. Also, in case there's no exact match between 'collection ID' and 'f_table_name' we use
// the explicitly configured table name (from the YAML config).
func readFeatureTables(collections config.GeoSpatialCollections, db *pgxpool.Pool,
fidColumn, externalFidColumn, schemaName string) (map[string]*common.Table, error) {
query := `
select
f_table_name::text, '%s', f_geometry_column::text, type::text
from
geometry_columns
where
f_table_schema = $1`
params := fmt.Sprintf(query, geospatial.Features) // Currently only features are supported, not 'attributes'.
rows, err := db.Query(context.Background(), params, schemaName)
if err != nil {
return nil, fmt.Errorf("failed to retrieve geometry_columns using query: %v\n, error: %w", query, err)
}
if err = rows.Err(); err != nil {
return nil, err
}
defer rows.Close()
result := make(map[string]*common.Table, 10)
for rows.Next() {
table := common.Table{}
if err = rows.Scan(&table.Name, &table.Type, &table.GeometryColumnName, &table.GeometryType); err != nil {
return nil, fmt.Errorf("failed to read geometry_columns record, error: %w", err)
}
if table.Name == "" {
return nil, fmt.Errorf("feature table name is blank, error: %w", err)
}
hasCollection := false
for _, collection := range collections {
if table.Name == collection.ID {
result[collection.ID] = &table
hasCollection = true
} else if collection.HasTableName(table.Name) {
result[collection.ID] = &table
hasCollection = true
}
}
if !hasCollection {
log.Printf("Warning: table %s is present in PostgreSQL but not configured as a collection", table.Name)
}
}
if len(result) == 0 {
return nil, errors.New("no records found in PostgreSQL geometry_columns view")
}
for _, table := range result {
table.Schema, err = readSchema(db, *table, fidColumn, externalFidColumn, schemaName, collections)
if err != nil {
return nil, fmt.Errorf("failed to read schema for table %s, error: %w", table.Name, err)
}
}
common.ValidateUniqueness(result)
return result, nil
}
func readPropertyFiltersWithAllowedValues(featTableByCollection map[string]*common.Table,
collections config.GeoSpatialCollections, db *pgxpool.Pool) (map[string]ds.PropertyFiltersWithAllowedValues, error) {
result := make(map[string]ds.PropertyFiltersWithAllowedValues)
for _, collection := range collections {
if collection.Features == nil {
continue
}
result[collection.ID] = make(map[string]ds.PropertyFilterWithAllowedValues)
featTable := featTableByCollection[collection.ID]
for _, pf := range collection.Features.Filters.Properties {
// the result should contain ALL configured property filters, with or without allowed values.
// when available, allowed values can be either static (from YAML config) or derived from the geopackage
result[collection.ID][pf.Name] = ds.PropertyFilterWithAllowedValues{PropertyFilter: pf}
if pf.AllowedValues != nil {
result[collection.ID][pf.Name] = ds.PropertyFilterWithAllowedValues{PropertyFilter: pf, AllowedValues: pf.AllowedValues}
continue
}
if *pf.DeriveAllowedValuesFromDatasource {
if !*pf.IndexRequired {
log.Printf("Warning: index is disabled for column %s, deriving allowed values "+
"from may take a long time. Index on this column is recommended", pf.Name)
}
// select distinct values from given column
query := fmt.Sprintf("select distinct \"%[1]s\" from \"%[2]s\" order by \"%[1]s\"", pf.Name, featTable.Name)
rows, err := db.Query(context.Background(), query)
if err != nil {
return nil, fmt.Errorf("failed to derive allowed values using query: %v\n, error: %w", query, err)
}
var values []string
for rows.Next() {
rowValues, err := rows.Values()
if err != nil {
return nil, fmt.Errorf("failed to read: %w", err)
}
for _, v := range rowValues {
values = append(values, fmt.Sprintf("%v", v))
}
}
// make sure values are valid
for _, v := range values {
if newlineRegex.MatchString(v) {
return nil, fmt.Errorf("failed to derive allowed values, one value contains a "+
"newline which isn't a valid (OpenAPI) enum value. The value is: %s", v)
}
}
result[collection.ID][pf.Name] = ds.PropertyFilterWithAllowedValues{PropertyFilter: pf, AllowedValues: values}
continue
}
}
}
return result, nil
}
func readSchema(db *pgxpool.Pool, table common.Table, fidColumn, externalFidColumn, schemaName string,
collections config.GeoSpatialCollections) (*d.Schema, error) {
collectionNames := make([]string, 0, len(collections))
for _, collection := range collections {
collectionNames = append(collectionNames, collection.ID)
}
query := `
select
a.attname as column_name,
case
-- If the data type is a geometry, extract the specific type (Point, Polygon, etc)
when pg_catalog.format_type(a.atttypid, a.atttypmod) like 'geometry(%' then
substring(pg_catalog.format_type(a.atttypid, a.atttypmod) from 'geometry\(([^,)]+)')
-- Otherwise, return the standard data type
else
pg_catalog.format_type(a.atttypid, a.atttypmod)
end as data_type,
a.attnotnull as is_required,
coalesce(d.description, '') as column_description
from
pg_catalog.pg_attribute a
join
pg_catalog.pg_class c on a.attrelid = c.oid
join
pg_catalog.pg_namespace n on c.relnamespace = n.oid
left join
pg_catalog.pg_description d on d.objoid = a.attrelid and d.objsubid = a.attnum
where
n.nspname = $1
and c.relname = $2
and a.attnum > 0 -- Excludes system columns
and not a.attisdropped -- Excludes columns that have been dropped
order by
a.attnum;
`
rows, err := db.Query(context.Background(), query, schemaName, table.Name)
if err != nil {
return nil, err
}
defer rows.Close()
fields := make([]d.Field, 0)
for rows.Next() {
var colName, colType, colDescription string
var colNotNull bool
if err = rows.Scan(&colName, &colType, &colNotNull, &colDescription); err != nil {
return nil, err
}
fields = append(fields, d.Field{
Name: colName,
Type: colType,
Description: colDescription,
IsRequired: colNotNull,
IsPrimaryGeometry: colName == table.GeometryColumnName,
FeatureRelation: d.NewFeatureRelation(colName, externalFidColumn, collectionNames),
})
}
schema, err := d.NewSchema(fields, fidColumn, externalFidColumn)
if err != nil {
return nil, err
}
return schema, nil
}
package postgres
import (
"context"
"errors"
"fmt"
"log"
"maps"
"github.com/PDOK/gokoala/config"
ds "github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/common"
d "github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/google/uuid"
"github.com/jackc/pgx/v5"
"github.com/jackc/pgx/v5/pgxpool"
"github.com/twpayne/go-geom"
"github.com/twpayne/go-geom/encoding/wkt"
pgxgeom "github.com/twpayne/pgx-geom"
pgxuuid "github.com/vgarvardt/pgx-google-uuid/v5"
)
const (
// https://github.com/jackc/pgx/issues/387#issuecomment-1107666716
pgxNamedParamSymbol = "@"
)
type Postgres struct {
common.DatasourceCommon
db *pgxpool.Pool
schemaName string
}
func NewPostgres(collections config.GeoSpatialCollections, pgConfig config.Postgres,
transformOnTheFly bool, maxDecimals int, forceUTC bool) (*Postgres, error) {
if !transformOnTheFly {
return nil, errors.New("ahead-of-time transformed features are currently not " +
"supported for postgresql, reprojection/transformation is always applied")
}
pgxConfig, err := pgxpool.ParseConfig(pgConfig.ConnectionString())
if err != nil {
return nil, fmt.Errorf("unable to parse database config: %w", err)
}
// enable SQL logging when appropriate environment variable (LOG_SQL=true) is set
if sl := NewSQLLogFromEnv(); sl.LogSQL {
pgxConfig.ConnConfig.Tracer = sl.Tracer
}
// set connection to read-only for safety since we (should) never write to Postgres.
pgxConfig.ConnConfig.RuntimeParams["default_transaction_read_only"] = "on"
pgxConfig.AfterConnect = func(ctx context.Context, conn *pgx.Conn) error {
// add support for github.com/google/uuid <-> PostGIS conversions
pgxuuid.Register(conn.TypeMap())
// add support for Go <-> PostGIS conversions
return pgxgeom.Register(ctx, conn)
}
ctx := context.Background()
db, err := pgxpool.NewWithConfig(ctx, pgxConfig)
if err != nil {
return nil, fmt.Errorf("unable to create connection pool: %w", err)
}
log.Printf("connecting to database '%s' as user '%s' on server: %s",
pgConfig.DatabaseName, pgConfig.User, pgConfig.Host)
if err := db.Ping(ctx); err != nil {
return nil, fmt.Errorf("unable to connect with database: %w", err)
}
pg := &Postgres{
DatasourceCommon: common.DatasourceCommon{
TransformOnTheFly: transformOnTheFly,
FidColumn: pgConfig.Fid,
ExternalFidColumn: pgConfig.ExternalFid,
QueryTimeout: pgConfig.QueryTimeout.Duration,
MaxDecimals: maxDecimals,
ForceUTC: forceUTC,
PropertiesByCollectionID: collections.FeaturePropertiesByID(),
},
db: db,
schemaName: pgConfig.Schema,
}
pg.TableByCollectionID, pg.PropertyFiltersByCollectionID = readMetadata(
db, collections, pg.FidColumn, pg.ExternalFidColumn, pg.schemaName)
if err = assertIndexesExist(collections, pg.TableByCollectionID, db, *pgConfig.SpatialIndexRequired); err != nil {
return nil, err
}
return pg, nil
}
func (pg *Postgres) Close() {
pg.db.Close()
}
func (pg *Postgres) GetFeatureIDs(_ context.Context, _ string, _ ds.FeaturesCriteria) ([]int64, d.Cursors, error) {
return []int64{}, d.Cursors{}, errors.New("not implemented since the postgres datasource currently " +
"only support on-the-fly transformation/reprojection, use GetFeatures() to get features in every supported CRS")
}
func (pg *Postgres) GetFeaturesByID(_ context.Context, _ string, _ []int64, _ d.AxisOrder, _ d.Profile) (*d.FeatureCollection, error) {
return &d.FeatureCollection{}, errors.New("not implemented since the postgres datasource currently " +
"only support on-the-fly transformation/reprojection, use GetFeatures() to get features in every supported CRS")
}
func (pg *Postgres) GetFeatures(ctx context.Context, collection string, criteria ds.FeaturesCriteria,
axisOrder d.AxisOrder, profile d.Profile) (*d.FeatureCollection, d.Cursors, error) {
table, err := pg.CollectionToTable(collection)
if err != nil {
return nil, d.Cursors{}, err
}
queryCtx, cancel := context.WithTimeout(ctx, pg.QueryTimeout) // https://go.dev/doc/database/cancel-operations
defer cancel()
propConfig := pg.PropertiesByCollectionID[collection]
query, queryArgs, err := pg.makeFeaturesQuery(propConfig, table, false, axisOrder, criteria)
if err != nil {
return nil, d.Cursors{}, fmt.Errorf("failed to create query '%s' error: %w", query, err)
}
rows, err := pg.db.Query(queryCtx, query, queryArgs)
if err != nil {
return nil, d.Cursors{}, fmt.Errorf("failed to execute query '%s' error: %w", query, err)
}
defer rows.Close()
var prevNext *d.PrevNextFID
fc := d.FeatureCollection{}
fc.Features, prevNext, err = common.MapRowsToFeatures(queryCtx, FromPgxRows(rows),
pg.FidColumn, pg.ExternalFidColumn, table.GeometryColumnName,
propConfig, table.Schema, mapPostGISGeometry, profile.MapRelationUsingProfile,
common.FormatOpts{MaxDecimals: pg.MaxDecimals, ForceUTC: pg.ForceUTC})
if err != nil {
return nil, d.Cursors{}, err
}
if prevNext == nil {
return nil, d.Cursors{}, nil
}
fc.NumberReturned = len(fc.Features)
return &fc, d.NewCursors(*prevNext, criteria.Cursor.FiltersChecksum), queryCtx.Err()
}
func (pg *Postgres) GetFeature(ctx context.Context, collection string, featureID any,
outputSRID d.SRID, axisOrder d.AxisOrder, profile d.Profile) (*d.Feature, error) {
table, err := pg.CollectionToTable(collection)
if err != nil {
return nil, err
}
queryCtx, cancel := context.WithTimeout(ctx, pg.QueryTimeout) // https://go.dev/doc/database/cancel-operations
defer cancel()
var fidTypeCast string
var fidColumn string
switch featureID.(type) {
case int64:
if pg.ExternalFidColumn != "" {
// Features should be retrieved by UUID
log.Println("feature requested by int while external fid column is defined")
return nil, nil
}
fidColumn = pg.FidColumn
fidTypeCast = "::bigint" // always compare as 64-bits integer, regardless of numeric type in schema
case uuid.UUID:
if pg.ExternalFidColumn == "" {
// Features should be retrieved by int64
log.Println("feature requested by UUID while external fid column is not defined")
return nil, nil
}
fidColumn = pg.ExternalFidColumn
}
propConfig := pg.PropertiesByCollectionID[collection]
selectClause := pg.SelectColumns(table, axisOrder, selectPostGISGeometry, propConfig, false)
// TODO: find better place for this srid logic
srid := outputSRID.GetOrDefault()
if srid == d.UndefinedSRID || srid == d.WGS84SRID {
srid = d.WGS84SRIDPostgis
}
query := fmt.Sprintf(`select %[1]s from "%[2]s" where "%[3]s"%[4]s = @fid%[4]s limit 1`,
selectClause, table.Name, fidColumn, fidTypeCast)
rows, err := pg.db.Query(queryCtx, query, pgx.NamedArgs{"fid": featureID, "outputSrid": srid})
if err != nil {
return nil, fmt.Errorf("query '%s' failed: %w", query, err)
}
defer rows.Close()
if err != nil {
return nil, fmt.Errorf("query '%s' failed: %w", query, err)
}
defer rows.Close()
features, _, err := common.MapRowsToFeatures(queryCtx, FromPgxRows(rows),
pg.FidColumn, pg.ExternalFidColumn, table.GeometryColumnName,
propConfig, table.Schema, mapPostGISGeometry, profile.MapRelationUsingProfile,
common.FormatOpts{MaxDecimals: pg.MaxDecimals, ForceUTC: pg.ForceUTC})
if err != nil {
return nil, err
}
if len(features) != 1 {
return nil, nil
}
return features[0], queryCtx.Err()
}
// Build specific features queries based on the given options.
func (pg *Postgres) makeFeaturesQuery(propConfig *config.FeatureProperties, table *common.Table,
onlyFIDs bool, axisOrder d.AxisOrder, criteria ds.FeaturesCriteria) (query string, queryArgs pgx.NamedArgs, err error) {
var selectClause string
if onlyFIDs {
selectClause = common.ColumnsToSQL([]string{pg.FidColumn, d.PrevFid, d.NextFid})
} else {
selectClause = pg.SelectColumns(table, axisOrder, selectPostGISGeometry, propConfig, true)
}
// TODO: find better place for this srid logic
if criteria.InputSRID == d.UndefinedSRID || criteria.InputSRID == d.WGS84SRID {
criteria.InputSRID = d.WGS84SRIDPostgis
}
if criteria.OutputSRID == d.UndefinedSRID || criteria.OutputSRID == d.WGS84SRID {
criteria.OutputSRID = d.WGS84SRIDPostgis
}
return pg.makeQuery(table, selectClause, criteria)
}
func (pg *Postgres) makeQuery(table *common.Table, selectClause string, criteria ds.FeaturesCriteria) (string, map[string]any, error) {
pfClause, pfNamedParams := common.PropertyFiltersToSQL(criteria.PropertyFilters, pgxNamedParamSymbol)
temporalClause, temporalNamedParams := common.TemporalCriteriaToSQL(criteria.TemporalCriteria, pgxNamedParamSymbol)
var bboxClause string
var bboxNamedParams map[string]any
if criteria.Bbox != nil {
var err error
bboxClause, bboxNamedParams, err = bboxToSQL(criteria.Bbox, criteria.InputSRID, table.GeometryColumnName)
if err != nil {
return "", nil, err
}
}
query := fmt.Sprintf(`
with
next as (select * from "%[1]s" where "%[2]s" >= @fid %[3]s %[4]s %[8]s order by %[2]s asc limit @limit + 1),
prev as (select * from "%[1]s" where "%[2]s" < @fid %[3]s %[4]s %[8]s order by %[2]s desc limit @limit),
nextprev as (select * from next union all select * from prev),
nextprevfeat as (select *, lag("%[2]s", @limit) over (order by %[2]s) as %[6]s, lead("%[2]s", @limit) over (order by "%[2]s") as %[7]s from nextprev)
select %[5]s from nextprevfeat where "%[2]s" >= @fid %[3]s %[4]s limit @limit
`, table.Name, pg.FidColumn, temporalClause, pfClause, selectClause, d.PrevFid, d.NextFid, bboxClause)
namedParams := map[string]any{
"fid": criteria.Cursor.FID,
"limit": criteria.Limit,
"outputSrid": criteria.OutputSRID,
}
if criteria.Bbox != nil {
maps.Copy(namedParams, bboxNamedParams)
}
maps.Copy(namedParams, pfNamedParams)
maps.Copy(namedParams, temporalNamedParams)
return query, namedParams, nil
}
func bboxToSQL(bbox *geom.Bounds, bboxSRID d.SRID, geomColumn string) (string, map[string]any, error) {
var bboxFilter, bboxWkt string
var bboxNamedParams map[string]any
var err error
if bbox != nil {
bboxFilter = fmt.Sprintf(`and
st_intersects(st_transform(%[1]s, @bboxSrid::int), st_geomfromtext(@bboxWkt::text, @bboxSrid::int))
`, geomColumn)
bboxWkt, err = wkt.Marshal(bbox.Polygon())
if err != nil {
return "", nil, err
}
bboxNamedParams = map[string]any{
"bboxWkt": bboxWkt,
"bboxSrid": bboxSRID,
}
}
return bboxFilter, bboxNamedParams, err
}
// mapPostGISGeometry Postgres/PostGIS specific way to read geometries into a geom.T.
// since we use 'pgx-geom' it's just a simple cast since conversion happens automatically.
func mapPostGISGeometry(columnValue any) (geom.T, error) {
geometry, ok := columnValue.(geom.T)
if !ok {
return nil, errors.New("failed to convert column value to geometry")
}
return geometry, nil
}
// selectPostGISGeometry Postgres/PostGIS specific way to select geometry
// and take domain.AxisOrder into account.
func selectPostGISGeometry(axisOrder d.AxisOrder, table *common.Table) string {
if axisOrder == d.AxisOrderYX {
return fmt.Sprintf(", st_flipcoordinates(st_transform(\"%[1]s\", @outputSrid::int)) as \"%[1]s\"", table.GeometryColumnName)
}
return fmt.Sprintf(", st_transform(\"%[1]s\", @outputSrid::int) as \"%[1]s\"", table.GeometryColumnName)
}
package postgres
import "github.com/jackc/pgx/v5"
// PgxRowsAdapter implements common.DatasourceRows.
type PgxRowsAdapter struct {
rows pgx.Rows
}
func FromPgxRows(rows pgx.Rows) *PgxRowsAdapter {
return &PgxRowsAdapter{rows: rows}
}
func (p *PgxRowsAdapter) Columns() ([]string, error) {
// pgx.Rows does not have a Columns() method like sqlx.Rows,
// we need to get the field descriptions and extract names.
fields := p.rows.FieldDescriptions()
cols := make([]string, len(fields))
for i, fd := range fields {
cols[i] = fd.Name
}
return cols, nil
}
func (p *PgxRowsAdapter) SliceScan() ([]any, error) {
return p.rows.Values()
}
func (p *PgxRowsAdapter) Next() bool {
return p.rows.Next()
}
func (p *PgxRowsAdapter) Err() error {
return p.rows.Err()
}
func (p *PgxRowsAdapter) Close() {
p.rows.Close()
}
package postgres
import (
"context"
"fmt"
"log"
"os"
"strconv"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/common"
"github.com/jackc/pgx/v5/tracelog"
)
type stdoutLogger struct {
logger *log.Logger
}
// SQLLog query logging for debugging purposes.
type SQLLog struct {
LogSQL bool
Tracer *tracelog.TraceLog
}
// NewSQLLogFromEnv build a SQLLog based on the `LOG_SQL` environment variable.
func NewSQLLogFromEnv() *SQLLog {
var err error
logSQL := false
var tracer *tracelog.TraceLog
if os.Getenv(common.EnvLogSQL) != "" {
logSQL, err = strconv.ParseBool(os.Getenv(common.EnvLogSQL))
if err != nil {
log.Fatalf("invalid %s value provided, must be a boolean", common.EnvLogSQL)
}
loggerAdapter := &stdoutLogger{logger: log.New(os.Stdout, "POSTGRES: ", log.LstdFlags)}
tracer = &tracelog.TraceLog{
Logger: loggerAdapter,
LogLevel: tracelog.LogLevelTrace, // Set to Trace to see all query details
}
}
return &SQLLog{LogSQL: logSQL, Tracer: tracer}
}
func (s *stdoutLogger) Log(_ context.Context, level tracelog.LogLevel, msg string, data map[string]any) {
logMessage := fmt.Sprintf("%s: %s", level, msg)
if data != nil {
logMessage = fmt.Sprintf("%s %v", logMessage, data)
}
s.logger.Println(logMessage)
}
package domain
// -----
// "Attribute JSON" is NOT a formal standard. It's defined by PDOK as a way to return non-spatial data from certain collections
// alongside collections that do contain spatial data. Attribute JSON is modeled after GeoJSON but does NOT contain a geometry.
// -----
// AttributeCollection is a FeatureCollection with only attributes and NO geometries.
type AttributeCollection struct {
Features []*Attribute `json:"features"`
FeatureCollection
}
// Attribute is a Feature with only attributes and NO geometry.
type Attribute struct {
Type featureType `json:"type"`
Properties FeatureProperties `json:"properties"`
// We expect ids to be auto-incrementing integers (which is the default in geopackages)
// since we use it for cursor-based pagination.
ID string `json:"id"`
Links []Link `json:"links,omitempty"`
}
// Keys of the Attribute properties.
func (f *Attribute) Keys() []string {
return f.Properties.Keys()
}
package domain
import (
"bytes"
"encoding/base64"
"log"
"math/big"
neturl "net/url"
"strings"
)
const (
PrevFid = "prevfid"
NextFid = "nextfid"
separator = '|'
)
// Cursors hold the next and previous cursor. Note that we use
// 'cursor-based pagination' as opposed to 'offset-based pagination'.
//
// The cursor is based on the fid (feature id) of the underlying feature table. This fid is required to be a unique
// and (auto)incrementing integer. The fid is not required to be contagious, gaps in the fid sequence are allowed.
type Cursors struct {
Prev EncodedCursor
Next EncodedCursor
HasPrev bool
HasNext bool
}
// EncodedCursor is a scrambled string representation of the fields defined in DecodedCursor.
type EncodedCursor string
// DecodedCursor the cursor values after decoding EncodedCursor.
type DecodedCursor struct {
FiltersChecksum []byte
FID int64
}
// PrevNextFID previous and next feature id (fid) to encode in cursor.
type PrevNextFID struct {
Prev int64
Next int64
}
// NewCursors create Cursors based on the prev/next feature ids from the datasource
// and the provided filters (captured in a hash).
func NewCursors(fid PrevNextFID, filtersChecksum []byte) Cursors {
return Cursors{
Prev: encodeCursor(fid.Prev, filtersChecksum),
Next: encodeCursor(fid.Next, filtersChecksum),
HasPrev: fid.Prev > 0,
HasNext: fid.Next > 0,
}
}
func encodeCursor(fid int64, filtersChecksum []byte) EncodedCursor {
fidAsBytes := big.NewInt(fid).Bytes()
// format of the cursor: <encoded fid><separator><encoded checksum>
return EncodedCursor(base64.RawURLEncoding.EncodeToString(fidAsBytes) +
string(separator) +
base64.RawURLEncoding.EncodeToString(filtersChecksum))
}
// Decode turns encoded cursor into DecodedCursor and verifies that
// the checksum of query params that act as filters hasn't changed.
func (c EncodedCursor) Decode(filtersChecksum []byte) DecodedCursor {
value, err := neturl.QueryUnescape(string(c))
if err != nil || value == "" {
return DecodedCursor{filtersChecksum, 0}
}
// split first, then decode
encoded := strings.Split(value, string(separator))
if len(encoded) < 2 {
log.Printf("cursor '%s' doesn't contain expected separator %c", value, separator)
return DecodedCursor{filtersChecksum, 0}
}
decodedFid, fidErr := base64.RawURLEncoding.DecodeString(encoded[0])
decodedChecksum, checksumErr := base64.RawURLEncoding.DecodeString(encoded[1])
if fidErr != nil || checksumErr != nil {
log.Printf("decoding cursor value '%s' failed, defaulting to first page", value)
return DecodedCursor{filtersChecksum, 0}
}
// feature id
fid := big.NewInt(0).SetBytes(decodedFid).Int64()
if fid < 0 {
log.Printf("negative feature ID detected: %d, defaulting to first page", fid)
fid = 0
}
// checksum
if !bytes.Equal(decodedChecksum, filtersChecksum) {
log.Printf("filters in query params have changed during pagination, resetting to first page")
return DecodedCursor{filtersChecksum, 0}
}
return DecodedCursor{filtersChecksum, fid}
}
func (c EncodedCursor) String() string {
return string(c)
}
package domain
import (
"github.com/twpayne/go-geom"
"github.com/twpayne/go-geom/encoding/geojson"
)
// featureCollectionType allows the GeoJSON type to be automatically set during json marshalling.
type featureCollectionType struct{}
func (fc *featureCollectionType) MarshalJSON() ([]byte, error) {
return []byte(`"FeatureCollection"`), nil
}
// featureType allows the type for Feature to be automatically set during json Marshalling.
type featureType struct{}
func (ft *featureType) MarshalJSON() ([]byte, error) {
return []byte(`"Feature"`), nil
}
// FeatureCollection is a GeoJSON FeatureCollection with extras such as links
// Note: fields in this struct are sorted for optimal memory usage (field alignment).
type FeatureCollection struct {
Type featureCollectionType `json:"type"`
Timestamp string `json:"timeStamp,omitempty"`
Links []Link `json:"links,omitempty"`
Features []*Feature `json:"features"`
NumberReturned int `json:"numberReturned"`
}
// Feature is a GeoJSON Feature with extras such as links
// Note: fields in this struct are sorted for optimal memory usage (field alignment).
type Feature struct {
Type featureType `json:"type"`
Properties FeatureProperties `json:"properties"`
// We support 'null' geometries, don't add an 'omitempty' tag here.
Geometry *geojson.Geometry `json:"geometry"`
// We expect feature ids to be auto-incrementing integers (which is the default in geopackages)
// since we use it for cursor-based pagination.
ID string `json:"id"`
Links []Link `json:"links,omitempty"`
}
// Keys of the Feature properties.
func (f *Feature) Keys() []string {
return f.Properties.Keys()
}
// SetGeom sets the geometry of the Feature by encoding the provided geom.T with
// optional maximum decimal precision to GeoJSON.
func (f *Feature) SetGeom(geometry geom.T, maxDecimals int) (err error) {
if geometry == nil {
f.Geometry = nil
return
}
var opts []geojson.EncodeGeometryOption
if maxDecimals > 0 {
opts = []geojson.EncodeGeometryOption{geojson.EncodeGeometryWithMaxDecimalDigits(maxDecimals)}
}
f.Geometry, err = geojson.Encode(geometry, opts...)
return
}
// Link according to RFC 8288, https://datatracker.ietf.org/doc/html/rfc8288
// Note: fields in this struct are sorted for optimal memory usage (field alignment).
type Link struct {
Rel string `json:"rel"`
Title string `json:"title,omitempty"`
Type string `json:"type,omitempty"`
Href string `json:"href"`
Hreflang string `json:"hreflang,omitempty"`
Length int64 `json:"length,omitempty"`
Templated bool `json:"templated,omitempty"`
}
package domain
import (
"fmt"
"net/url"
)
const featurePath = "%s/collections/%s/items/%s"
type ProfileName string
// Profiles from OAF Part 5 as specified in https://docs.ogc.org/DRAFTS/23-058r1.html#rc_profile-parameter
const (
RelAsKey ProfileName = "rel-as-key" // RelAsKey a feature reference in the response SHALL be represented by: The featureId
RelAsURI ProfileName = "rel-as-uri" // RelAsURI a feature reference in the response SHALL be represented by: an HTTP(S) URI.
RelAsLink ProfileName = "rel-as-link" // RelAsLink a feature reference in the response SHALL be represented by: an object with the property "href" and, optionally a "title"
)
var SupportedProfiles = []ProfileName{
RelAsKey, RelAsURI, RelAsLink,
}
// Profile from OAF Part 5, used to express relations between features.
type Profile struct {
profileName ProfileName
baseURL string
schema Schema
}
func NewProfile(profileName ProfileName, baseURL url.URL, schema Schema) Profile {
return Profile{
profileName: profileName,
baseURL: baseURL.String(),
schema: schema,
}
}
func (p *Profile) MapRelationUsingProfile(columnName string, columnValue any, externalFidColumn string) (newColumnName, relationName string, newColumnValue any) {
switch p.profileName {
case RelAsLink:
relationName = newFeatureRelationName(columnName, externalFidColumn)
featureRelation := p.schema.findFeatureRelation(relationName)
newColumnName = relationName + ".href"
if columnValue != nil && featureRelation != nil {
newColumnValue = fmt.Sprintf(featurePath, p.baseURL, featureRelation.CollectionID, columnValue)
}
case RelAsKey:
relationName = newFeatureRelationName(columnName, externalFidColumn)
newColumnName = relationName
newColumnValue = columnValue
case RelAsURI:
// almost identical to rel-as-link except that there's no ".href" suffix (and potentially a title in the future)
relationName = newFeatureRelationName(columnName, externalFidColumn)
featureRelation := p.schema.findFeatureRelation(relationName)
newColumnName = relationName
if columnValue != nil && featureRelation != nil {
newColumnValue = fmt.Sprintf(featurePath, p.baseURL, featureRelation.CollectionID, columnValue)
}
}
return
}
package domain
import (
"slices"
"strings"
"github.com/PDOK/gokoala/internal/engine/util"
perfjson "github.com/goccy/go-json"
orderedmap "github.com/wk8/go-ordered-map/v2"
)
// FeatureProperties the properties of a GeoJSON Feature. Properties are either unordered
// (default, and have the best performance!) or ordered in a specific way as described in the config.
type FeatureProperties struct {
unordered map[string]any
ordered orderedmap.OrderedMap[string, any]
}
func NewFeatureProperties(order bool) FeatureProperties {
return NewFeaturePropertiesWithData(order, make(map[string]any))
}
func NewFeaturePropertiesWithData(order bool, data map[string]any) FeatureProperties {
if order {
// properties are allowed to contain anything, including for example XML/GML
ordered := *orderedmap.New[string, any](orderedmap.WithDisableHTMLEscape[string, any]())
for k, v := range data {
ordered.Set(k, v)
}
return FeatureProperties{ordered: ordered}
}
return FeatureProperties{unordered: data}
}
// MarshalJSON returns the JSON representation of either the ordered or unordered properties.
func (p *FeatureProperties) MarshalJSON() ([]byte, error) {
if p.unordered != nil {
// properties are allowed to contain anything, including for example XML/GML.
return perfjson.MarshalWithOption(p.unordered, perfjson.DisableHTMLEscape())
}
return p.ordered.MarshalJSON()
}
func (p *FeatureProperties) Value(key string) any {
if p.unordered != nil {
return p.unordered[key]
}
return p.ordered.Value(key)
}
func (p *FeatureProperties) Delete(key string) {
if p.unordered != nil {
delete(p.unordered, key)
} else {
p.ordered.Delete(key)
}
}
func (p *FeatureProperties) Set(key string, value any) {
if p.unordered != nil {
p.unordered[key] = value
} else {
p.ordered.Set(key, value)
}
}
func (p *FeatureProperties) SetRelation(key string, value any, existingKeyPrefix string) {
p.Set(key, value)
p.moveKeyBeforePrefix(key, existingKeyPrefix)
}
// Keys of the Feature properties.
//
// Note: In the future we might replace this with Go 1.23 iterators (range-over-func) however at the moment this
// isn't supported in Go templates: https://github.com/golang/go/pull/68329
func (p *FeatureProperties) Keys() []string {
if p.unordered != nil {
keys := util.Keys(p.unordered)
slices.Sort(keys) // preserve alphabetical order
return keys
}
result := make([]string, 0, p.ordered.Len())
for pair := p.ordered.Oldest(); pair != nil; pair = pair.Next() {
result = append(result, pair.Key)
}
return result
}
// moveKeyBeforePrefix best-effort algorithm to place the feature relation BEFORE the first shortest of any similarly named keys.
// For example, places "building.href" before "building_fk" or "building_fid".
func (p *FeatureProperties) moveKeyBeforePrefix(key string, keyPrefix string) {
if p.unordered != nil {
return
}
var existingKey string
for pair := p.ordered.Oldest(); pair != nil; pair = pair.Next() {
if strings.HasPrefix(pair.Key, keyPrefix) {
if existingKey != "" && len(existingKey) <= len(pair.Key) {
continue
}
existingKey = pair.Key
}
}
if existingKey != "" {
_ = p.ordered.MoveBefore(key, existingKey)
}
}
package domain
import (
"log"
"regexp"
"sort"
"strings"
)
const regexRemoveSeparators = "[^a-z0-9]?"
// FeatureRelation a relation/reference from one feature to another in a different
// collection, according to OAF Part 5: https://docs.ogc.org/DRAFTS/23-058r1.html#rc_feature-references.
type FeatureRelation struct {
Name string
CollectionID string
}
func NewFeatureRelation(name, externalFidColumn string, collectionNames []string) *FeatureRelation {
if !IsFeatureRelation(name, externalFidColumn) {
return nil
}
relationName := newFeatureRelationName(name, externalFidColumn)
return &FeatureRelation{
Name: relationName,
CollectionID: findReferencedCollection(collectionNames, relationName),
}
}
// newFeatureRelationName derive name of the feature relation.
//
// In the datasource we have fields named 'foobar_external_fid' or 'foobar_sometext_external_fid' containing UUID's to
// features in the 'foobar' collection. The field containing this relation will be named 'foobar' or 'foobar_sometext'.
// This name will appear in the feature data (GeoJSON) and the schema (JSON-Schema) to represent the feature relation.
func newFeatureRelationName(name string, externalFidColumn string) string {
regex, _ := regexp.Compile(regexRemoveSeparators + externalFidColumn + regexRemoveSeparators)
return regex.ReplaceAllString(name, "")
}
// IsFeatureRelation "Algorithm" to determine feature reference:
//
// When externalFidColumn (e.g. 'external_fid') is part of the column name (e.g. 'foobar_external_fid' or
// 'foobar_sometext_external_fid') we treat the field as a reference to another feature in the 'foobar' collection.
//
// Meaning data sources should be pre-populated with a 'foobar_external_fid' field containing UUIDs of other features.
// Creating these fields in the data source is beyond the scope of this application.
func IsFeatureRelation(columnName string, externalFidColumn string) bool {
if externalFidColumn == "" || columnName == externalFidColumn {
return false
}
return strings.Contains(columnName, externalFidColumn)
}
func findReferencedCollection(collectionNames []string, name string) string {
if collectionNames != nil {
sort.Slice(collectionNames, func(i, j int) bool {
return len(collectionNames[i]) > len(collectionNames[j])
})
// prefer exact matches first
for _, collName := range collectionNames {
if name == collName {
return collName
}
}
// then prefer fuzzy match (to support infix)
for _, collName := range collectionNames {
if strings.HasPrefix(name, collName) {
return collName
}
}
}
log.Printf("Warning: could not find collection for feature reference '%s'", name)
return ""
}
package domain
import (
"errors"
"fmt"
"log"
"slices"
"strings"
)
const (
formatDateOnly = "date"
formatTimeOnly = "time"
formatDateTime = "date-time"
)
const (
MinxField = "minx"
MinyField = "miny"
MaxxField = "maxx"
MaxyField = "maxy"
)
var fieldsToSkip = []string{
MinxField,
MinyField,
MaxxField,
MaxyField,
}
const (
geometryType = "geometry"
geometryCollectionType = "geometrycollection"
pointType = "point"
linestringType = "linestring"
polygonType = "polygon"
multipointType = "multipoint"
multilinestringType = "multilinestring"
multipolygonType = "multipolygon"
)
var geometryTypes = []string{
geometryType,
geometryCollectionType,
pointType,
linestringType,
polygonType,
multipointType,
multilinestringType,
multipolygonType,
}
// Schema derived from the data source schema.
// Describes the schema of a single collection (table in the data source).
type Schema struct {
Fields []Field
}
func NewSchema(fields []Field, fidColumn, externalFidColumn string) (*Schema, error) {
publicFields := make([]Field, 0, len(fields))
nrOfGeomsFound := 0
for _, field := range fields {
if field.Name == "" {
return nil, errors.New("empty field name found, field name is required")
}
if field.Type == "" {
return nil, errors.New("empty field type found, field type is required")
}
// Don't include internal/non-public fields in schema
if slices.Contains(fieldsToSkip, strings.ToLower(field.Name)) {
continue
}
// Don't allow multiple geometries. OAF Part 5 does support multiple geometries, but GeoPackage and GeoJSON don't
if slices.Contains(geometryTypes, strings.ToLower(field.Type)) {
nrOfGeomsFound++
if nrOfGeomsFound > 1 {
return nil, errors.New("more than one geometry field found! Currently only a single geometry " +
"per collection is supported (also a restriction of GeoJSON and GeoPackage)")
}
}
field.IsFid = field.Name == fidColumn
field.IsExternalFid = field.Name == externalFidColumn
publicFields = append(publicFields, field)
}
schema := &Schema{publicFields}
if externalFidColumn != "" && !schema.HasExternalFid() {
return nil, fmt.Errorf("external feature ID column '%s' configured but not found in schema", externalFidColumn)
}
return schema, nil
}
// IsDate convenience function to check if the given field is a Date.
func (s Schema) IsDate(field string) bool {
f := s.findField(field)
return f.ToTypeFormat().Format == formatDateOnly
}
// HasExternalFid convenience function to check if this schema defines an external feature ID.
func (s Schema) HasExternalFid() bool {
for _, field := range s.Fields {
if field.IsExternalFid {
return true
}
}
return false
}
func (s Schema) findField(name string) Field {
for _, f := range s.Fields {
if f.Name == name {
return f
}
}
return Field{}
}
func (s Schema) findFeatureRelation(name string) *FeatureRelation {
for _, field := range s.Fields {
if field.FeatureRelation != nil && field.FeatureRelation.Name == name {
return field.FeatureRelation
}
}
return nil
}
// Field a field/column/property in the schema. Contains at least a name and data type.
type Field struct {
FeatureRelation *FeatureRelation
Name string // required
Type string // required, can be data source specific
Description string // optional
IsRequired bool
IsPrimaryGeometry bool
IsPrimaryIntervalStart bool
IsPrimaryIntervalEnd bool
IsFid bool
IsExternalFid bool
}
// TypeFormat type and optional format according to JSON schema (https://json-schema.org/).
type TypeFormat struct {
Type string
Format string
}
// ToTypeFormat converts the Field's data type (from SQLite or Postgres) to a valid JSON data type
// and optional format as specified in OAF Part 5.
func (f Field) ToTypeFormat() TypeFormat {
// lowercase, no spaces
normalizedType := strings.ReplaceAll(strings.ToLower(f.Type), " ", "")
// sometimes data sources mention the length of fields within parenthesis, this is irrelevant.
// also, SQLite accepts for example TEXT(5) but ignores the length: https://sqlite.org/datatype3.html#affinity_name_examples
normalizedType = prefixBeforeParenthesis(normalizedType)
switch normalizedType {
case "boolean", "bool":
return TypeFormat{Type: "boolean"}
case "text", "char", "character", "charactervarying", "varchar", "nvarchar", "clob":
return TypeFormat{Type: "string"}
case "int", "integer", "tinyint", "smallint", "mediumint", "bigint", "int2", "int4", "int8":
return TypeFormat{Type: "integer"}
case "real", "float", "double", "doubleprecision", "numeric", "decimal":
return TypeFormat{Type: "number", Format: "double"}
case "uuid":
// From OAF Part 5: Properties that represent a UUID SHOULD be represented as a string with format "uuid".
return TypeFormat{Type: "string", Format: "uuid"}
case "date":
// From OAF Part 5: Each temporal property SHALL be a "string" literal with the appropriate format
// (e.g., "date-time" or "date" for instances, depending on the temporal granularity).
return TypeFormat{Type: "string", Format: formatDateOnly}
case "time":
// From OAF Part 5: Each temporal property SHALL be a "string" literal with the appropriate format
// (e.g., "date-time" or "date" for instances, depending on the temporal granularity).
return TypeFormat{Type: "string", Format: formatTimeOnly}
case "datetime", "timestamp", "timestampwithtimezone", "timestampwithouttimezone":
// From OAF Part 5: Each temporal property SHALL be a "string" literal with the appropriate format
// (e.g., "date-time" or "date" for instances, depending on the temporal granularity).
return TypeFormat{Type: "string", Format: formatDateTime}
case geometryType, geometryCollectionType:
// From OAF Part 5: the following special value is supported: "geometry-any" as the wildcard for any geometry type
return TypeFormat{Type: normalizedType, Format: "geometry-any"}
case pointType, linestringType, polygonType, multipointType, multilinestringType, multipolygonType:
// From OAF Part 5: Each spatial property SHALL include a "format" member with a string value "geometry",
// followed by a hyphen, followed by the name of the geometry type in lower case
return TypeFormat{Type: normalizedType, Format: "geometry-" + normalizedType}
default:
// handle geometry types with additional Z and/or M dimensions e.g., LineStringZ, or PointZM.
// OAF part 5 only supports simple 2D types, so advertise the 2D variant.
for _, geomType := range geometryTypes {
if strings.HasPrefix(normalizedType, geomType) {
return TypeFormat{Type: normalizedType, Format: "geometry-" + geomType}
}
}
log.Printf("Warning: unknown data type '%s' for field '%s', falling back to string", f.Type, f.Name)
return TypeFormat{Type: "string"}
}
}
func prefixBeforeParenthesis(s string) string {
idx := strings.Index(s, "(")
if idx != -1 {
return s[:idx]
}
return s
}
package domain
import (
"fmt"
"strconv"
"strings"
)
const (
UndefinedSRID = 0
WGS84SRID = 100000 // We use the SRID for CRS84 (WGS84) as defined in the GeoPackage, instead of EPSG:4326 (due to axis order). In time, we may need to read this value dynamically from the GeoPackage.
WGS84SRIDPostgis = 4326 // PostGIS knows one SRID for WGS84
CrsURIPrefix = "http://www.opengis.net/def/crs/"
WGS84CodeOGC = "CRS84"
WGS84CrsURI = CrsURIPrefix + "OGC/1.3/" + WGS84CodeOGC
EPSGPrefix = "EPSG:"
)
// AxisOrder the order of axis for a certain CRS.
type AxisOrder int
const (
AxisOrderXY AxisOrder = iota
AxisOrderYX
)
// SRID Spatial Reference System Identifier: a unique value to unambiguously identify a spatial coordinate system.
// For example '28992' in https://www.opengis.net/def/crs/EPSG/0/28992
type SRID int
func (s SRID) GetOrDefault() int {
val := int(s)
if val <= 0 {
return WGS84SRID
}
return val
}
func EpsgToSrid(srs string) (SRID, error) {
srsCode, found := strings.CutPrefix(srs, EPSGPrefix)
if !found {
return -1, fmt.Errorf("expected SRS to start with '%s', got %s", EPSGPrefix, srs)
}
srid, err := strconv.Atoi(srsCode)
if err != nil {
return -1, fmt.Errorf("expected EPSG code to have numeric value, got %s", srsCode)
}
return SRID(srid), nil
}
// ContentCrs the coordinate reference system (represented as a URI) of the content/output to return.
type ContentCrs string
// ToLink returns link target conforming to RFC 8288.
func (c ContentCrs) ToLink() string {
return fmt.Sprintf("<%s>", c)
}
func (c ContentCrs) IsWGS84() bool {
return string(c) == WGS84CrsURI
}
package features
import (
"context"
"errors"
"fmt"
"log"
"net/http"
"strconv"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
"github.com/go-chi/chi/v5"
"github.com/google/uuid"
)
// Feature endpoint serves a single Feature by ID
//
//nolint:cyclop
func (f *Features) Feature() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if err := f.engine.OpenAPI.ValidateRequest(r); err != nil {
engine.RenderProblem(engine.ProblemBadRequest, w, err.Error())
return
}
collectionID := chi.URLParam(r, "collectionId")
collection, ok := f.configuredCollections[collectionID]
if !ok {
handleCollectionNotFound(w, collection.ID)
return
}
featureID, err := parseFeatureID(r)
if err != nil {
engine.RenderProblem(engine.ProblemBadRequest, w, err.Error())
return
}
url := featureURL{*f.engine.Config.BaseURL.URL,
r.URL.Query(),
f.schemas[collection.ID],
}
outputSRID, contentCrs, profile, err := url.parse()
if err != nil {
engine.RenderProblem(engine.ProblemBadRequest, w, err.Error())
return
}
w.Header().Add(engine.HeaderContentCrs, contentCrs.ToLink())
// validation completed, now get the feature
datasource := f.datasources[datasourceKey{srid: outputSRID.GetOrDefault(), collectionID: collection.ID}]
feat, err := datasource.GetFeature(r.Context(), collection.ID, featureID,
outputSRID, f.axisOrderBySRID[outputSRID.GetOrDefault()], profile)
if err != nil {
handleFeatureQueryError(w, collection.ID, featureID, err)
return
}
if feat == nil {
handleFeatureNotFound(w, collection.ID, featureID)
return
}
// render output
format := f.engine.CN.NegotiateFormat(r)
collectionType := f.collectionTypes.Get(collection.ID)
switch collectionType {
case geospatial.Features:
switch format {
case engine.FormatHTML:
f.html.feature(w, r, collection, feat, collectionType.AvailableFormats())
case engine.FormatGeoJSON, engine.FormatJSON:
f.json.featureAsGeoJSON(w, r, collectionID, collection.Features, feat, url)
case engine.FormatJSONFG:
f.json.featureAsJSONFG(w, r, collectionID, collection.Features, feat, url, contentCrs)
default:
handleFormatNotSupported(w, format)
}
case geospatial.Attributes:
switch format {
case engine.FormatHTML:
f.html.attribute(w, r, collection, feat, collectionType.AvailableFormats())
case engine.FormatJSON:
f.json.featureAsAttributeJSON(w, r, collectionID, feat, url)
default:
handleFormatNotSupported(w, format)
}
}
}
}
func parseFeatureID(r *http.Request) (any, error) {
var featureID any
featureID, err := uuid.Parse(chi.URLParam(r, "featureId"))
if err != nil {
// fallback to numerical feature id
featureID, err = strconv.ParseInt(chi.URLParam(r, "featureId"), 10, 0)
if err != nil {
return nil, errors.New("feature ID must be a UUID or number")
}
}
return featureID, nil
}
// log error but send a generic message to the client to prevent possible information leakage from datasource.
func handleFeatureQueryError(w http.ResponseWriter, collectionID string, featureID any, err error) {
msg := fmt.Sprintf("failed to retrieve feature %v in collection %s", featureID, collectionID)
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
// provide more context when user hits the query timeout
msg += ": querying the feature took too long (timeout encountered). Try again, or contact support"
}
log.Printf("%s, error: %v\n", msg, err)
engine.RenderProblem(engine.ProblemServerError, w, msg) // don't include sensitive information in details msg
}
func handleFeatureNotFound(w http.ResponseWriter, collectionID string, featureID any) {
msg := fmt.Sprintf("the requested feature with id: %v does not exist in collection '%v'", featureID, collectionID)
log.Println(msg)
engine.RenderProblem(engine.ProblemNotFound, w, msg)
}
package features
import (
"context"
"errors"
"log"
"net/http"
"time"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
ds "github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/go-chi/chi/v5"
"github.com/twpayne/go-geom"
)
var errBBoxRequestDisallowed = errors.New("bbox is not supported for this collection since it does not " +
"contain geospatial items (features), only non-geospatial items (attributes)")
var emptyFeatureCollection = &domain.FeatureCollection{Features: make([]*domain.Feature, 0)}
// Features this endpoint serves a FeatureCollection with the given collectionId
//
// BEWARE: this is one of the most performance-sensitive pieces of code in the system.
// Try to do as much initialization work outside the hot path, only do essential
// operations inside this method.
func (f *Features) Features() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if err := f.engine.OpenAPI.ValidateRequest(r); err != nil {
engine.RenderProblem(engine.ProblemBadRequest, w, err.Error())
return
}
collectionID := chi.URLParam(r, "collectionId")
collection, ok := f.configuredCollections[collectionID]
if !ok {
handleCollectionNotFound(w, collectionID)
return
}
url := featureCollectionURL{
*f.engine.Config.BaseURL.URL,
r.URL.Query(),
f.engine.Config.OgcAPI.Features.Limit,
f.configuredPropertyFilters[collection.ID],
f.schemas[collection.ID],
collection.HasDateTime(),
}
encodedCursor, limit, inputSRID, outputSRID, contentCrs, bbox,
referenceDate, propertyFilters, profile, err := url.parse()
if err != nil {
engine.RenderProblem(engine.ProblemBadRequest, w, err.Error())
return
}
w.Header().Add(engine.HeaderContentCrs, contentCrs.ToLink())
datasource := f.datasources[datasourceKey{srid: outputSRID.GetOrDefault(), collectionID: collection.ID}]
collectionType := f.collectionTypes.Get(collection.ID)
if !collectionType.IsSpatialRequestAllowed(bbox) {
engine.RenderProblem(engine.ProblemBadRequest, w, errBBoxRequestDisallowed.Error())
return
}
// validation completed, now get the features
newCursor, fc, err := f.queryFeatures(r.Context(), datasource, inputSRID, outputSRID, bbox,
encodedCursor.Decode(url.checksum()), limit, collection, referenceDate, propertyFilters, profile)
if err != nil {
handleFeaturesQueryError(w, collection.ID, err)
return
}
// render output
format := f.engine.CN.NegotiateFormat(r)
switch collectionType {
case geospatial.Features:
switch format {
case engine.FormatHTML:
f.html.features(w, r, collection, newCursor, url, limit, &referenceDate,
propertyFilters, f.configuredPropertyFilters[collection.ID],
fc, collectionType.AvailableFormats())
case engine.FormatGeoJSON, engine.FormatJSON:
f.json.featuresAsGeoJSON(w, r, collection.ID, newCursor, url, collection.Features, fc)
case engine.FormatJSONFG:
f.json.featuresAsJSONFG(w, r, collection.ID, newCursor, url, collection.Features, fc, contentCrs)
default:
handleFormatNotSupported(w, format)
}
case geospatial.Attributes:
switch format {
case engine.FormatHTML:
f.html.attributes(w, r, collection, newCursor, url, limit, &referenceDate,
propertyFilters, f.configuredPropertyFilters[collection.ID],
fc, collectionType.AvailableFormats())
case engine.FormatJSON:
f.json.featuresAsAttributeJSON(w, r, collection.ID, newCursor, url, fc)
default:
handleFormatNotSupported(w, format)
}
}
}
}
func (f *Features) queryFeatures(ctx context.Context, datasource ds.Datasource, inputSRID, outputSRID domain.SRID,
bbox *geom.Bounds, currentCursor domain.DecodedCursor, limit int, collection config.GeoSpatialCollection,
referenceDate time.Time, propertyFilters map[string]string, profile domain.Profile) (domain.Cursors, *domain.FeatureCollection, error) {
var newCursor domain.Cursors
var fc *domain.FeatureCollection
var err error
if shouldQuerySingleDatasource(datasource, inputSRID, outputSRID, bbox) {
// fast path
fc, newCursor, err = datasource.GetFeatures(ctx, collection.ID, ds.FeaturesCriteria{
Cursor: currentCursor,
Limit: limit,
InputSRID: inputSRID,
OutputSRID: outputSRID,
Bbox: bbox,
TemporalCriteria: createTemporalCriteria(collection, referenceDate),
PropertyFilters: propertyFilters,
// Add filter, filter-lang
}, f.axisOrderBySRID[outputSRID.GetOrDefault()], profile)
} else {
// slower path: get feature ids by input CRS (step 1), then the actual features in output CRS (step 2)
var fids []int64
datasource = f.datasources[datasourceKey{srid: inputSRID.GetOrDefault(), collectionID: collection.ID}]
fids, newCursor, err = datasource.GetFeatureIDs(ctx, collection.ID, ds.FeaturesCriteria{
Cursor: currentCursor,
Limit: limit,
InputSRID: inputSRID,
OutputSRID: outputSRID,
Bbox: bbox,
TemporalCriteria: createTemporalCriteria(collection, referenceDate),
PropertyFilters: propertyFilters,
// Add filter, filter-lang
})
if err == nil && fids != nil {
// this is step 2: get the actual features in output CRS by feature ID
datasource = f.datasources[datasourceKey{srid: outputSRID.GetOrDefault(), collectionID: collection.ID}]
fc, err = datasource.GetFeaturesByID(ctx, collection.ID, fids, f.axisOrderBySRID[outputSRID.GetOrDefault()], profile)
}
}
if fc == nil {
fc = emptyFeatureCollection
}
return newCursor, fc, err
}
func shouldQuerySingleDatasource(datasource ds.Datasource, input domain.SRID, output domain.SRID, bbox *geom.Bounds) bool {
if datasource != nil && datasource.SupportsOnTheFlyTransformation() {
return true // for on-the-fly we can always use just one datasource
}
// in the case of ahead-of-time transformed data sources, use a
// single datasource only when input and output SRID are compatible.
return bbox == nil ||
int(input) == int(output) ||
(int(input) == domain.UndefinedSRID && int(output) == domain.WGS84SRID) ||
(int(input) == domain.WGS84SRID && int(output) == domain.UndefinedSRID)
}
func createTemporalCriteria(collection config.GeoSpatialCollection, referenceDate time.Time) ds.TemporalCriteria {
var temporalCriteria ds.TemporalCriteria
if collection.HasDateTime() {
temporalCriteria = ds.TemporalCriteria{
ReferenceDate: referenceDate,
StartDateProperty: collection.Metadata.TemporalProperties.StartDate,
EndDateProperty: collection.Metadata.TemporalProperties.EndDate}
}
return temporalCriteria
}
// log error but send a generic message to the client to prevent possible information leakage from datasource.
func handleFeaturesQueryError(w http.ResponseWriter, collectionID string, err error) {
msg := "failed to retrieve feature collection " + collectionID
if errors.Is(err, context.Canceled) || errors.Is(err, context.DeadlineExceeded) {
// provide more context when user hits the query timeout
msg += ": querying the features took too long (timeout encountered). Simplify your request and try again, or contact support"
}
log.Printf("%s, error: %v\n", msg, err)
engine.RenderProblem(engine.ProblemServerError, w, msg) // don't include sensitive information in details msg
}
package features
import (
"net/http"
"time"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
)
const (
collectionsCrumb = "collections/"
)
var (
collectionsBreadcrumb = []engine.Breadcrumb{
{
Name: "Collections",
Path: "collections",
},
}
featuresKey = engine.NewTemplateKey(templatesDir + "features.go.html")
featureKey = engine.NewTemplateKey(templatesDir + "feature.go.html")
)
type htmlFeatures struct {
engine *engine.Engine
}
func newHTMLFeatures(e *engine.Engine) *htmlFeatures {
e.ParseTemplate(featuresKey)
e.ParseTemplate(featureKey)
return &htmlFeatures{
engine: e,
}
}
// featureCollectionPage enriched FeatureCollection for HTML representation.
type featureCollectionPage struct {
domain.FeatureCollection
CollectionID string
Metadata *config.GeoSpatialCollectionMetadata
Cursor domain.Cursors
PrevLink string
NextLink string
Limit int
ReferenceDate *time.Time
MapSheetProperties *config.MapSheetDownloadProperties
WebConfig *config.WebConfig
ShowViewer bool
// Property filters as supplied by the user in the URL: filter name + value(s)
PropertyFilters map[string]string
// Property filters as specified in the (YAML) config, enriched with allowed values. Does not contain user supplied values
ConfiguredPropertyFilters map[string]datasources.PropertyFilterWithAllowedValues
}
// featurePage enriched Feature for HTML representation.
type featurePage struct {
domain.Feature
CollectionID string
FeatureID string
Metadata *config.GeoSpatialCollectionMetadata
MapSheetProperties *config.MapSheetDownloadProperties
WebConfig *config.WebConfig
ShowViewer bool
}
func (hf *htmlFeatures) features(w http.ResponseWriter, r *http.Request,
collection config.GeoSpatialCollection, cursor domain.Cursors,
featuresURL featureCollectionURL, limit int, referenceDate *time.Time,
propertyFilters map[string]string,
configuredPropertyFilters datasources.PropertyFiltersWithAllowedValues,
fc *domain.FeatureCollection, outputFormats []engine.OutputFormat) {
breadcrumbs, pageContent := hf.toItemsPage(collection, referenceDate, fc, cursor,
featuresURL, limit, propertyFilters, configuredPropertyFilters)
hf.engine.RenderAndServe(w, r,
engine.ExpandTemplateKey(featuresKey, hf.engine.CN.NegotiateLanguage(w, r)),
pageContent, breadcrumbs, outputFormats)
}
func (hf *htmlFeatures) attributes(w http.ResponseWriter, r *http.Request, collection config.GeoSpatialCollection,
cursor domain.Cursors, featuresURL featureCollectionURL, limit int, referenceDate *time.Time,
propertyFilters map[string]string, configuredPropertyFilters datasources.PropertyFiltersWithAllowedValues,
fc *domain.FeatureCollection, outputFormats []engine.OutputFormat) {
breadcrumbs, pageContent := hf.toItemsPage(collection, referenceDate, fc, cursor,
featuresURL, limit, propertyFilters, configuredPropertyFilters)
pageContent.ShowViewer = false // since items have no geometry
hf.engine.RenderAndServe(w, r,
engine.ExpandTemplateKey(featuresKey, hf.engine.CN.NegotiateLanguage(w, r)),
pageContent, breadcrumbs, outputFormats)
}
func (hf *htmlFeatures) toItemsPage(collection config.GeoSpatialCollection, referenceDate *time.Time,
fc *domain.FeatureCollection, cursor domain.Cursors, featuresURL featureCollectionURL, limit int,
propertyFilters map[string]string, configuredPropertyFilters datasources.PropertyFiltersWithAllowedValues) ([]engine.Breadcrumb, *featureCollectionPage) {
breadcrumbs := collectionsBreadcrumb
breadcrumbs = append(breadcrumbs, []engine.Breadcrumb{
{
Name: getCollectionTitle(collection.ID, collection.Metadata),
Path: collectionsCrumb + collection.ID,
},
{
Name: "Items",
Path: collectionsCrumb + collection.ID + "/items",
},
}...)
if referenceDate.IsZero() {
referenceDate = nil
}
var mapSheetProps *config.MapSheetDownloadProperties
var wc *config.WebConfig
if collection.Features != nil {
if collection.Features.MapSheetDownloads != nil {
mapSheetProps = &collection.Features.MapSheetDownloads.Properties
}
wc = collection.Features.Web
}
pageContent := &featureCollectionPage{
*fc,
collection.ID,
collection.Metadata,
cursor,
featuresURL.toPrevNextURL(collection.ID, cursor.Prev, engine.FormatHTML),
featuresURL.toPrevNextURL(collection.ID, cursor.Next, engine.FormatHTML),
limit,
referenceDate,
mapSheetProps,
wc,
true,
propertyFilters,
configuredPropertyFilters,
}
return breadcrumbs, pageContent
}
func (hf *htmlFeatures) feature(w http.ResponseWriter, r *http.Request,
collection config.GeoSpatialCollection, feat *domain.Feature, outputFormats []engine.OutputFormat) {
breadcrumbs, pageContent := hf.toItemPage(collection, feat)
hf.engine.RenderAndServe(w, r,
engine.ExpandTemplateKey(featureKey, hf.engine.CN.NegotiateLanguage(w, r)),
pageContent, breadcrumbs, outputFormats)
}
func (hf *htmlFeatures) attribute(w http.ResponseWriter, r *http.Request,
collection config.GeoSpatialCollection, feat *domain.Feature, outputFormats []engine.OutputFormat) {
breadcrumbs, pageContent := hf.toItemPage(collection, feat)
pageContent.ShowViewer = false // since items have no geometry
hf.engine.RenderAndServe(w, r,
engine.ExpandTemplateKey(featureKey, hf.engine.CN.NegotiateLanguage(w, r)),
pageContent, breadcrumbs, outputFormats)
}
func (hf *htmlFeatures) toItemPage(collection config.GeoSpatialCollection, feat *domain.Feature) ([]engine.Breadcrumb, *featurePage) {
breadcrumbs := collectionsBreadcrumb
breadcrumbs = append(breadcrumbs, []engine.Breadcrumb{
{
Name: getCollectionTitle(collection.ID, collection.Metadata),
Path: collectionsCrumb + collection.ID,
},
{
Name: "Items",
Path: collectionsCrumb + collection.ID + "/items",
},
{
Name: feat.ID,
Path: collectionsCrumb + collection.ID + "/items/" + feat.ID,
},
}...)
var mapSheetProps *config.MapSheetDownloadProperties
var wc *config.WebConfig
if collection.Features != nil {
if collection.Features.MapSheetDownloads != nil {
mapSheetProps = &collection.Features.MapSheetDownloads.Properties
}
wc = collection.Features.Web
}
pageContent := &featurePage{
*feat,
collection.ID,
feat.ID,
collection.Metadata,
mapSheetProps,
wc,
true,
}
return breadcrumbs, pageContent
}
func getCollectionTitle(collectionID string, metadata *config.GeoSpatialCollectionMetadata) string {
if metadata != nil && metadata.Title != nil {
return *metadata.Title
}
return collectionID
}
package features
import (
"bytes"
stdjson "encoding/json"
"fmt"
"io"
"log"
"net/http"
"os"
"strconv"
"time"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
perfjson "github.com/goccy/go-json"
)
var (
now = time.Now // allow mocking
disableJSONPerfOptimization, _ = strconv.ParseBool(os.Getenv("DISABLE_JSON_PERF_OPTIMIZATION"))
)
type jsonFeatures struct {
engine *engine.Engine
validateResponse bool
}
func newJSONFeatures(e *engine.Engine) *jsonFeatures {
if *e.Config.OgcAPI.Features.ValidateResponses {
log.Println("JSON response validation is enabled (by default). When serving large feature collections " +
"set 'validateResponses' to 'false' to improve performance")
}
return &jsonFeatures{
engine: e,
validateResponse: *e.Config.OgcAPI.Features.ValidateResponses,
}
}
// GeoJSON.
func (jf *jsonFeatures) featuresAsGeoJSON(w http.ResponseWriter, r *http.Request, collectionID string, cursor domain.Cursors,
featuresURL featureCollectionURL, configuredFC *config.CollectionEntryFeatures, fc *domain.FeatureCollection) {
fc.Timestamp = now().Format(time.RFC3339)
fc.Links = jf.createFeatureCollectionLinks(engine.FormatGeoJSON, collectionID, cursor, featuresURL)
jf.createFeatureDownloadLinks(configuredFC, fc)
if jf.validateResponse {
jf.serveAndValidateJSON(&fc, engine.MediaTypeGeoJSON, r, w)
} else {
serveJSON(&fc, engine.MediaTypeGeoJSON, w)
}
}
// GeoJSON.
func (jf *jsonFeatures) featureAsGeoJSON(w http.ResponseWriter, r *http.Request, collectionID string,
configuredFC *config.CollectionEntryFeatures, feat *domain.Feature, url featureURL) {
feat.Links = jf.createFeatureLinks(engine.FormatGeoJSON, url, collectionID, feat.ID)
if mapSheetProperties := getMapSheetProperties(configuredFC); mapSheetProperties != nil {
feat.Links = append(feat.Links, domain.Link{
Rel: "enclosure",
Title: "Download feature",
Type: mapSheetProperties.MediaType.String(),
Href: fmt.Sprintf("%v", feat.Properties.Value(mapSheetProperties.AssetURL)),
})
}
if jf.validateResponse {
jf.serveAndValidateJSON(&feat, engine.MediaTypeGeoJSON, r, w)
} else {
serveJSON(&feat, engine.MediaTypeGeoJSON, w)
}
}
// GeoJSON for non-spatial data ("attribute JSON").
func (jf *jsonFeatures) featuresAsAttributeJSON(w http.ResponseWriter, r *http.Request, collectionID string, cursor domain.Cursors,
featuresURL featureCollectionURL, fc *domain.FeatureCollection) {
fgFC := domain.AttributeCollection{}
if len(fc.Features) == 0 {
fgFC.Features = make([]*domain.Attribute, 0)
} else {
for _, f := range fc.Features {
fgF := domain.Attribute{
ID: f.ID,
Links: f.Links,
Properties: f.Properties,
}
fgFC.Features = append(fgFC.Features, &fgF)
}
}
fgFC.NumberReturned = fc.NumberReturned
fgFC.Timestamp = now().Format(time.RFC3339)
fgFC.Links = jf.createFeatureCollectionLinks(engine.FormatJSON, collectionID, cursor, featuresURL)
if jf.validateResponse {
jf.serveAndValidateJSON(&fgFC, engine.MediaTypeJSON, r, w)
} else {
serveJSON(&fgFC, engine.MediaTypeJSON, w)
}
}
// GeoJSON for non-spatial data ("attribute JSON").
func (jf *jsonFeatures) featureAsAttributeJSON(w http.ResponseWriter, r *http.Request, collectionID string,
f *domain.Feature, url featureURL) {
fgF := domain.Attribute{
ID: f.ID,
Links: f.Links,
Properties: f.Properties,
}
fgF.Links = jf.createFeatureLinks(engine.FormatJSON, url, collectionID, fgF.ID)
if jf.validateResponse {
jf.serveAndValidateJSON(&fgF, engine.MediaTypeJSON, r, w)
} else {
serveJSON(&fgF, engine.MediaTypeJSON, w)
}
}
// JSON-FG.
func (jf *jsonFeatures) featuresAsJSONFG(w http.ResponseWriter, r *http.Request, collectionID string, cursor domain.Cursors,
featuresURL featureCollectionURL, configuredFC *config.CollectionEntryFeatures, fc *domain.FeatureCollection, crs domain.ContentCrs) {
fgFC := domain.JSONFGFeatureCollection{}
fgFC.ConformsTo = []string{domain.ConformanceJSONFGCore}
fgFC.CoordRefSys = string(crs)
if len(fc.Features) == 0 {
fgFC.Features = make([]*domain.JSONFGFeature, 0)
} else {
for _, f := range fc.Features {
fgF := domain.JSONFGFeature{
ID: f.ID,
Links: f.Links,
Properties: f.Properties,
}
setGeom(crs, &fgF, f)
fgFC.Features = append(fgFC.Features, &fgF)
}
}
fgFC.NumberReturned = fc.NumberReturned
fgFC.Timestamp = now().Format(time.RFC3339)
fgFC.Links = jf.createFeatureCollectionLinks(engine.FormatJSONFG, collectionID, cursor, featuresURL)
jf.createJSONFGFeatureDownloadLinks(configuredFC, &fgFC)
if jf.validateResponse {
jf.serveAndValidateJSON(&fgFC, engine.MediaTypeJSONFG, r, w)
} else {
serveJSON(&fgFC, engine.MediaTypeJSONFG, w)
}
}
// JSON-FG.
func (jf *jsonFeatures) featureAsJSONFG(w http.ResponseWriter, r *http.Request, collectionID string,
configuredFC *config.CollectionEntryFeatures, f *domain.Feature, url featureURL, crs domain.ContentCrs) {
fgF := domain.JSONFGFeature{
ID: f.ID,
Links: f.Links,
ConformsTo: []string{domain.ConformanceJSONFGCore},
CoordRefSys: string(crs),
Properties: f.Properties,
}
setGeom(crs, &fgF, f)
fgF.Links = jf.createFeatureLinks(engine.FormatJSONFG, url, collectionID, fgF.ID)
if mapSheetProperties := getMapSheetProperties(configuredFC); mapSheetProperties != nil {
fgF.Links = append(fgF.Links, domain.Link{
Rel: "enclosure",
Title: "Download feature",
Type: mapSheetProperties.MediaType.String(),
Href: fmt.Sprintf("%v", fgF.Properties.Value(mapSheetProperties.AssetURL)),
})
}
if jf.validateResponse {
jf.serveAndValidateJSON(&fgF, engine.MediaTypeJSONFG, r, w)
} else {
serveJSON(&fgF, engine.MediaTypeJSONFG, w)
}
}
func (jf *jsonFeatures) createFeatureCollectionLinks(currentFormat string, collectionID string,
cursor domain.Cursors, featuresURL featureCollectionURL) []domain.Link {
links := make([]domain.Link, 0)
switch currentFormat {
case engine.FormatGeoJSON:
links = append(links, domain.Link{
Rel: "self",
Title: "This document as GeoJSON",
Type: engine.MediaTypeGeoJSON,
Href: featuresURL.toSelfURL(collectionID, engine.FormatJSON),
})
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as JSON-FG",
Type: engine.MediaTypeJSONFG,
Href: featuresURL.toSelfURL(collectionID, engine.FormatJSONFG),
})
case engine.FormatJSONFG:
links = append(links, domain.Link{
Rel: "self",
Title: "This document as JSON-FG",
Type: engine.MediaTypeJSONFG,
Href: featuresURL.toSelfURL(collectionID, engine.FormatJSONFG),
})
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as GeoJSON",
Type: engine.MediaTypeGeoJSON,
Href: featuresURL.toSelfURL(collectionID, engine.FormatJSON),
})
case engine.FormatJSON:
links = append(links, domain.Link{
Rel: "self",
Title: "This document as JSON",
Type: engine.MediaTypeJSON,
Href: featuresURL.toSelfURL(collectionID, engine.FormatJSON),
})
}
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as HTML",
Type: engine.MediaTypeHTML,
Href: featuresURL.toSelfURL(collectionID, engine.FormatHTML),
})
if cursor.HasNext {
switch currentFormat {
case engine.FormatGeoJSON:
links = append(links, domain.Link{
Rel: "next",
Title: "Next page",
Type: engine.MediaTypeGeoJSON,
Href: featuresURL.toPrevNextURL(collectionID, cursor.Next, engine.FormatJSON),
})
case engine.FormatJSONFG:
links = append(links, domain.Link{
Rel: "next",
Title: "Next page",
Type: engine.MediaTypeJSONFG,
Href: featuresURL.toPrevNextURL(collectionID, cursor.Next, engine.FormatJSONFG),
})
}
}
if cursor.HasPrev {
switch currentFormat {
case engine.FormatGeoJSON:
links = append(links, domain.Link{
Rel: "prev",
Title: "Previous page",
Type: engine.MediaTypeGeoJSON,
Href: featuresURL.toPrevNextURL(collectionID, cursor.Prev, engine.FormatJSON),
})
case engine.FormatJSONFG:
links = append(links, domain.Link{
Rel: "prev",
Title: "Previous page",
Type: engine.MediaTypeJSONFG,
Href: featuresURL.toPrevNextURL(collectionID, cursor.Prev, engine.FormatJSONFG),
})
}
}
return links
}
func (jf *jsonFeatures) createFeatureLinks(currentFormat string, url featureURL,
collectionID string, featureID string) []domain.Link {
links := make([]domain.Link, 0)
switch currentFormat {
case engine.FormatGeoJSON:
links = append(links, domain.Link{
Rel: "self",
Title: "This document as GeoJSON",
Type: engine.MediaTypeGeoJSON,
Href: url.toSelfURL(collectionID, featureID, engine.FormatJSON),
})
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as JSON-FG",
Type: engine.MediaTypeJSONFG,
Href: url.toSelfURL(collectionID, featureID, engine.FormatJSONFG),
})
case engine.FormatJSONFG:
links = append(links, domain.Link{
Rel: "self",
Title: "This document as JSON-FG",
Type: engine.MediaTypeJSONFG,
Href: url.toSelfURL(collectionID, featureID, engine.FormatJSONFG),
})
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as GeoJSON",
Type: engine.MediaTypeGeoJSON,
Href: url.toSelfURL(collectionID, featureID, engine.FormatJSON),
})
case engine.FormatJSON:
links = append(links, domain.Link{
Rel: "self",
Title: "This document as JSON",
Type: engine.MediaTypeJSON,
Href: url.toSelfURL(collectionID, featureID, engine.FormatJSON),
})
}
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as HTML",
Type: engine.MediaTypeHTML,
Href: url.toSelfURL(collectionID, featureID, engine.FormatHTML),
})
links = append(links, domain.Link{
Rel: "collection",
Title: "The collection to which this feature belongs",
Type: engine.MediaTypeJSON,
Href: url.toCollectionURL(collectionID, engine.FormatJSON),
})
return links
}
func (jf *jsonFeatures) createFeatureDownloadLinks(configuredFC *config.CollectionEntryFeatures, fc *domain.FeatureCollection) {
if mapSheetProperties := getMapSheetProperties(configuredFC); mapSheetProperties != nil {
for _, feature := range fc.Features {
links := make([]domain.Link, 0)
links = append(links, domain.Link{
Rel: "enclosure",
Title: "Download feature",
Type: mapSheetProperties.MediaType.String(),
Href: fmt.Sprintf("%v", feature.Properties.Value(mapSheetProperties.AssetURL)),
})
feature.Links = links
}
}
}
func (jf *jsonFeatures) createJSONFGFeatureDownloadLinks(configuredFC *config.CollectionEntryFeatures, fc *domain.JSONFGFeatureCollection) {
if mapSheetProperties := getMapSheetProperties(configuredFC); mapSheetProperties != nil {
for _, feature := range fc.Features {
links := make([]domain.Link, 0)
links = append(links, domain.Link{
Rel: "enclosure",
Title: "Download feature",
Type: mapSheetProperties.MediaType.String(),
Href: fmt.Sprintf("%v", feature.Properties.Value(mapSheetProperties.AssetURL)),
})
feature.Links = links
}
}
}
// serveAndValidateJSON serves JSON after performing OpenAPI response validation.
func (jf *jsonFeatures) serveAndValidateJSON(input any, contentType string, r *http.Request, w http.ResponseWriter) {
json := &bytes.Buffer{}
if err := getEncoder(json).Encode(input); err != nil {
handleJSONEncodingFailure(err, w)
return
}
jf.engine.Serve(w, r,
engine.ServeValidation(false /* performed earlier */, jf.validateResponse),
engine.ServeContentType(contentType),
engine.ServeOutput(json.Bytes()),
)
}
// serveJSON serves JSON *WITHOUT* OpenAPI validation by writing directly to the response output stream.
func serveJSON(input any, contentType string, w http.ResponseWriter) {
w.Header().Set(engine.HeaderContentType, contentType)
if err := getEncoder(w).Encode(input); err != nil {
handleJSONEncodingFailure(err, w)
return
}
}
type jsonEncoder interface {
Encode(input any) error
}
// Create JSONEncoder. Note escaping of '<', '>' and '&' is disabled (HTMLEscape is false).
// Especially the '&' is important since we use this character in the next/prev links.
func getEncoder(w io.Writer) jsonEncoder {
if disableJSONPerfOptimization {
// use Go stdlib JSON encoder
encoder := stdjson.NewEncoder(w)
encoder.SetEscapeHTML(false)
return encoder
}
// use ~7% overall faster 3rd party JSON encoder (in case of issues switch back to stdlib using env variable)
encoder := perfjson.NewEncoder(w)
encoder.SetEscapeHTML(false)
return encoder
}
func handleJSONEncodingFailure(err error, w http.ResponseWriter) {
log.Printf("JSON encoding failed: %v", err)
engine.RenderProblem(engine.ProblemServerError, w, "Failed to write JSON response")
}
func setGeom(crs domain.ContentCrs, jsonfgFeature *domain.JSONFGFeature, feature *domain.Feature) {
if crs.IsWGS84() {
jsonfgFeature.Geometry = feature.Geometry
} else {
jsonfgFeature.Place = feature.Geometry
}
}
func getMapSheetProperties(configuredFC *config.CollectionEntryFeatures) *config.MapSheetDownloadProperties {
if configuredFC != nil && configuredFC.MapSheetDownloads != nil {
return &configuredFC.MapSheetDownloads.Properties
}
return nil
}
package features
import (
"fmt"
"log"
"net/http"
"sync"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
ds "github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/geopackage"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/postgres"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/PDOK/gokoala/internal/ogc/features/proj"
)
const (
templatesDir = "internal/ogc/features/templates/"
)
type Features struct {
engine *engine.Engine
datasources map[datasourceKey]ds.Datasource
axisOrderBySRID map[int]domain.AxisOrder
configuredCollections map[string]config.GeoSpatialCollection
configuredPropertyFilters map[string]ds.PropertyFiltersWithAllowedValues
collectionTypes geospatial.CollectionTypes
schemas map[string]domain.Schema
html *htmlFeatures
json *jsonFeatures
}
// NewFeatures Bootstraps OGC API Features logic.
func NewFeatures(e *engine.Engine) *Features {
datasources := createDatasources(e)
axisOrderBySRID := determineAxisOrder(datasources)
configuredCollections := cacheConfiguredFeatureCollections(e)
configuredPropertyFilters := configurePropertyFiltersWithAllowedValues(datasources, configuredCollections)
collectionTypes := determineCollectionTypes(datasources)
schemas := renderSchemas(e, datasources)
rebuildOpenAPI(e, datasources, configuredPropertyFilters, collectionTypes, schemas)
f := &Features{
engine: e,
datasources: datasources,
axisOrderBySRID: axisOrderBySRID,
configuredCollections: configuredCollections,
configuredPropertyFilters: configuredPropertyFilters,
collectionTypes: collectionTypes,
schemas: schemas,
html: newHTMLFeatures(e),
json: newJSONFeatures(e),
}
e.Router.Get(geospatial.CollectionsPath+"/{collectionId}/items", f.Features())
e.Router.Get(geospatial.CollectionsPath+"/{collectionId}/items/{featureId}", f.Feature())
e.Router.Get(geospatial.CollectionsPath+"/{collectionId}/schema", f.Schema())
return f
}
func (f *Features) GetCollectionTypes() geospatial.CollectionTypes {
return f.collectionTypes
}
type datasourceKey struct {
srid int
collectionID string
}
type datasourceConfig struct {
collections config.GeoSpatialCollections
ds config.Datasource
transformOnTheFly bool
}
func createDatasources(e *engine.Engine) map[datasourceKey]ds.Datasource {
configured := make(map[datasourceKey]*datasourceConfig, len(e.Config.OgcAPI.Features.Collections))
// configure collection specific datasources first
configureCollectionDatasources(e, configured)
// now configure top-level datasources, for the whole dataset. But only when
// there's no collection-specific datasource already configured
configureTopLevelDatasources(e, configured)
if len(configured) == 0 {
log.Fatal("no datasource(s) configured for OGC API Features, check config")
}
// now we have a mapping from collection+projection => desired datasource (the 'configured' map).
// but the actual datasource connection still needs to be CREATED and associated with these collections.
// this is what we're going to do now, but in the process we need to make sure no duplicate datasources
// are instantiated: since multiple collections can point to the same datasource, and we only want to have a single
// datasource/connection-pool serving those collections.
createdDatasources := make(map[config.Datasource]ds.Datasource)
result := make(map[datasourceKey]ds.Datasource, len(configured))
for k, cfg := range configured {
if cfg == nil {
continue
}
existing, ok := createdDatasources[cfg.ds]
if !ok {
// make sure to only create a new datasource when it hasn't already been done before
// since we only want a single connection-pool per (geopackage/postgresql) database.
created := newDatasource(e, cfg.collections, cfg.ds, cfg.transformOnTheFly)
createdDatasources[cfg.ds] = created
result[k] = created
} else {
result[k] = existing
}
}
return result
}
func determineAxisOrder(datasources map[datasourceKey]ds.Datasource) map[int]domain.AxisOrder {
log.Println("start determining axis order for all configured CRSs")
order := map[int]domain.AxisOrder{
domain.WGS84SRID: domain.AxisOrderXY, // We know CRS84 is XY, see https://spatialreference.org/ref/ogc/CRS84/
}
var mu sync.Mutex
var wg sync.WaitGroup
for key := range datasources {
mu.Lock()
_, exists := order[key.srid]
mu.Unlock()
if !exists {
wg.Add(1)
// use goroutine to avoid blocking on GetAxisOrder(). The mutex is necessary
// to avoid race conditions on the map.
go func() {
defer wg.Done()
axisOrder, err := proj.GetAxisOrder(domain.SRID(key.srid))
if err != nil {
log.Printf("Warning: failed to determine whether EPSG:%d needs "+
"swap of X/Y axis: %v. Defaulting to XY order.", key.srid, err)
axisOrder = domain.AxisOrderXY
}
mu.Lock()
order[key.srid] = axisOrder
mu.Unlock()
}()
}
}
wg.Wait()
log.Println("done determining axis order for all configured CRSs")
return order
}
func determineCollectionTypes(datasources map[datasourceKey]ds.Datasource) geospatial.CollectionTypes {
result := make(map[string]geospatial.CollectionType)
for key, datasource := range datasources {
collectionType, err := datasource.GetCollectionType(key.collectionID)
if err != nil {
continue
}
result[key.collectionID] = collectionType
}
return geospatial.NewCollectionTypes(result)
}
func cacheConfiguredFeatureCollections(e *engine.Engine) map[string]config.GeoSpatialCollection {
result := make(map[string]config.GeoSpatialCollection)
for _, collection := range e.Config.OgcAPI.Features.Collections {
result[collection.ID] = collection
}
return result
}
func configurePropertyFiltersWithAllowedValues(datasources map[datasourceKey]ds.Datasource,
collections map[string]config.GeoSpatialCollection) map[string]ds.PropertyFiltersWithAllowedValues {
result := make(map[string]ds.PropertyFiltersWithAllowedValues)
for k, datasource := range datasources {
result[k.collectionID] = datasource.GetPropertyFiltersWithAllowedValues(k.collectionID)
}
// sanity check to make sure datasources return all configured property filters.
for _, collection := range collections {
actual := len(result[collection.ID])
if collection.Features != nil && collection.Features.Filters.Properties != nil {
expected := len(collection.Features.Filters.Properties)
if expected != actual {
log.Fatalf("number of property filters received from datasource for collection '%s' does not "+
"match the number of configured property filters. Expected filters: %d, got from datasource: %d",
collection.ID, expected, actual)
}
}
}
return result
}
// configureTopLevelDatasources configures top-level datasources - in one or multiple CRS's - which can be
// used by one or multiple collections (e.g., one GPKG that holds an entire dataset)
//
//nolint:cyclop
func configureTopLevelDatasources(e *engine.Engine, result map[datasourceKey]*datasourceConfig) {
cfg := e.Config.OgcAPI.Features
if cfg.Datasources == nil {
return
}
// Ahead-of-time WGS84
if cfg.Datasources.DefaultWGS84 != nil {
var defaultDS *datasourceConfig
for _, coll := range cfg.Collections {
key := datasourceKey{srid: domain.WGS84SRID, collectionID: coll.ID}
if result[key] == nil {
if defaultDS == nil {
defaultDS = &datasourceConfig{cfg.Collections, *cfg.Datasources.DefaultWGS84, false}
}
result[key] = defaultDS
}
}
}
// Ahead-of-time additional SRSs
for _, additional := range cfg.Datasources.Additional {
for _, coll := range cfg.Collections {
srid, err := domain.EpsgToSrid(additional.Srs)
if err != nil {
log.Fatal(err)
}
key := datasourceKey{srid: srid.GetOrDefault(), collectionID: coll.ID}
if result[key] == nil {
result[key] = &datasourceConfig{cfg.Collections, additional.Datasource, false}
}
}
}
// On-the-fly SRSs -- add these as last since we prefer ahead-of-time projections
for _, otf := range cfg.Datasources.OnTheFly {
for _, coll := range cfg.Collections {
// WGS84
key := datasourceKey{srid: domain.WGS84SRID, collectionID: coll.ID}
if result[key] == nil {
result[key] = &datasourceConfig{cfg.Collections, otf.Datasource, true}
}
// All other configured SRSs
for _, srs := range otf.SupportedSrs {
srid, err := domain.EpsgToSrid(srs.Srs)
if err != nil {
log.Fatal(err)
}
key = datasourceKey{srid: srid.GetOrDefault(), collectionID: coll.ID}
if result[key] == nil {
result[key] = &datasourceConfig{cfg.Collections, otf.Datasource, true}
}
}
}
}
}
// configureCollectionDatasources configures datasources - in one or multiple CRS's - which are specific
// to a certain collection (e.g., a separate GPKG per collection).
func configureCollectionDatasources(e *engine.Engine, result map[datasourceKey]*datasourceConfig) {
cfg := e.Config.OgcAPI.Features
for _, coll := range cfg.Collections {
if coll.Features == nil || coll.Features.Datasources == nil {
continue
}
// Ahead-of-time WGS84
if coll.Features.Datasources.DefaultWGS84 != nil {
defaultDS := &datasourceConfig{cfg.Collections, *coll.Features.Datasources.DefaultWGS84, false}
result[datasourceKey{srid: domain.WGS84SRID, collectionID: coll.ID}] = defaultDS
}
// Ahead-of-time additional SRSs
for _, additional := range coll.Features.Datasources.Additional {
srid, err := domain.EpsgToSrid(additional.Srs)
if err != nil {
log.Fatal(err)
}
additionalDS := &datasourceConfig{cfg.Collections, additional.Datasource, false}
result[datasourceKey{srid: srid.GetOrDefault(), collectionID: coll.ID}] = additionalDS
}
// On-the-fly SRSs -- add these as last since we prefer ahead-of-time projections
for _, otf := range coll.Features.Datasources.OnTheFly {
// WGS84
key := datasourceKey{srid: domain.WGS84SRID, collectionID: coll.ID}
if result[key] == nil {
result[key] = &datasourceConfig{cfg.Collections, otf.Datasource, true}
}
// All other configured SRSs
for _, srs := range otf.SupportedSrs {
srid, err := domain.EpsgToSrid(srs.Srs)
if err != nil {
log.Fatal(err)
}
additionalDS := &datasourceConfig{cfg.Collections, otf.Datasource, true}
result[datasourceKey{srid: srid.GetOrDefault(), collectionID: coll.ID}] = additionalDS
}
}
}
}
func newDatasource(e *engine.Engine, collections config.GeoSpatialCollections,
dsConfig config.Datasource, transformOnTheFly bool) ds.Datasource {
maxDecimals := e.Config.OgcAPI.Features.MaxDecimals
forceUTC := e.Config.OgcAPI.Features.ForceUTC
var datasource ds.Datasource
var err error
switch {
case dsConfig.GeoPackage != nil:
datasource, err = geopackage.NewGeoPackage(collections, *dsConfig.GeoPackage, transformOnTheFly, maxDecimals, forceUTC)
case dsConfig.Postgres != nil:
datasource, err = postgres.NewPostgres(collections, *dsConfig.Postgres, transformOnTheFly, maxDecimals, forceUTC)
default:
log.Fatal("got unknown datasource type")
}
if err != nil {
log.Fatal(err)
}
e.RegisterShutdownHook(datasource.Close)
return datasource
}
func handleCollectionNotFound(w http.ResponseWriter, collectionID string) {
msg := fmt.Sprintf("collection %s doesn't exist in this features service", collectionID)
log.Println(msg)
engine.RenderProblem(engine.ProblemNotFound, w, msg)
}
func handleFormatNotSupported(w http.ResponseWriter, format string) {
msg := fmt.Sprintf("format %s is not supported", format)
log.Println(msg)
engine.RenderProblem(engine.ProblemNotAcceptable, w, msg)
}
package features
import (
"fmt"
"log"
"slices"
"strings"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
ds "github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
)
type openAPIParams struct {
PropertyFiltersByCollection map[string][]OpenAPIPropertyFilter
CollectionTypes geospatial.CollectionTypes
SchemasByCollection map[string]domain.Schema
}
type OpenAPIPropertyFilter struct {
Name string
Description string
DataType string
AllowedValues []string
}
// rebuildOpenAPI Rebuild OpenAPI spec for features with additional info from given parameters.
func rebuildOpenAPI(e *engine.Engine,
datasources map[datasourceKey]ds.Datasource,
filters map[string]ds.PropertyFiltersWithAllowedValues,
collectionTypes geospatial.CollectionTypes,
schemas map[string]domain.Schema) {
propertyFiltersByCollection, err := createPropertyFiltersByCollection(datasources, filters)
if err != nil {
log.Fatal(err)
}
e.RebuildOpenAPI(openAPIParams{
PropertyFiltersByCollection: propertyFiltersByCollection,
CollectionTypes: collectionTypes,
SchemasByCollection: schemas,
})
}
func createPropertyFiltersByCollection(datasources map[datasourceKey]ds.Datasource,
filters map[string]ds.PropertyFiltersWithAllowedValues) (map[string][]OpenAPIPropertyFilter, error) {
result := make(map[string][]OpenAPIPropertyFilter)
for k, datasource := range datasources {
configuredPropertyFilters := filters[k.collectionID]
if len(configuredPropertyFilters) == 0 {
continue
}
featTable, err := datasource.GetSchema(k.collectionID)
if err != nil {
continue
}
propertyFilters := make([]OpenAPIPropertyFilter, 0, len(featTable.Fields))
for _, fc := range configuredPropertyFilters {
match := false
for _, field := range featTable.Fields {
if fc.Name == field.Name {
// match found between property filter in config file and database column name
propertyFilters = append(propertyFilters, OpenAPIPropertyFilter{
Name: field.Name,
Description: fc.Description,
DataType: field.ToTypeFormat().Type,
AllowedValues: fc.AllowedValues,
})
match = true
break
}
}
if !match {
return nil, fmt.Errorf("invalid property filter specified, "+
"column '%s' doesn't exist in datasource attached to collection '%s'", fc.Name, k.collectionID)
}
}
slices.SortFunc(propertyFilters, func(a, b OpenAPIPropertyFilter) int {
return strings.Compare(a.Name, b.Name)
})
result[k.collectionID] = propertyFilters
}
return result, nil
}
package proj
import (
"encoding/json"
"fmt"
"os/exec"
"strings"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
)
const projInfoTool = "projinfo"
var (
execCommand = exec.Command // Allow mocking
execLookPath = exec.LookPath // Allow mocking
)
// Info output in PROJJSON format. Note: only relevant fields are mapped in this struct.
type Info struct {
CoordinateSystem struct {
Axis []struct {
Name string `json:"name"`
Abbreviation string `json:"abbreviation"`
Direction string `json:"direction"`
Unit string `json:"unit"`
} `json:"axis"`
} `json:"coordinate_system"` //nolint:tagliatelle
}
// GetAxisOrder return XY or YX axis order for the given SRID.
func GetAxisOrder(srid domain.SRID) (domain.AxisOrder, error) {
epsgCode := fmt.Sprintf("%s%d", domain.EPSGPrefix, srid)
info, err := execProjInfo(epsgCode)
if err != nil {
return -1, err
}
// east/north == XY, north/east == YX.
if info.CoordinateSystem.Axis[0].Direction == "north" {
return domain.AxisOrderYX, nil
}
return domain.AxisOrderXY, nil
}
func execProjInfo(epsgCode string) (*Info, error) {
_, err := execLookPath(projInfoTool)
if err != nil {
return nil, fmt.Errorf("%s command not found in PATH: %w", projInfoTool, err)
}
// Run 'projinfo' and return output in PROJJSON format (https://proj.org/en/stable/specifications/projjson.html)
cmd := execCommand(projInfoTool, epsgCode, "-o", "projjson", "--single-line", "-q")
output, err := cmd.Output()
if err != nil {
return nil, fmt.Errorf("failed to execute %s command: %w", projInfoTool, err)
}
var projInfo Info
if err := json.Unmarshal([]byte(strings.TrimSpace(string(output))), &projInfo); err != nil {
return nil, fmt.Errorf("failed to parse %s output: %w", projInfoTool, err)
}
if len(projInfo.CoordinateSystem.Axis) < 1 {
return nil, fmt.Errorf("invalid %s output: axis not found", projInfoTool)
}
return &projInfo, nil
}
package features
import (
"log"
"net/http"
"slices"
"strings"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
g "github.com/PDOK/gokoala/internal/ogc/common/geospatial"
ds "github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/go-chi/chi/v5"
)
const schemasPath = "/schema"
const schemaHTML = templatesDir + "schema.go.html"
const schemaJSON = templatesDir + "schema.go.json"
// Schema endpoint serves a schema that describes the features in the collection, either as HTML
// or as JSON schema (https://json-schema.org/)
func (f *Features) Schema() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if err := f.engine.OpenAPI.ValidateRequest(r); err != nil {
engine.RenderProblem(engine.ProblemBadRequest, w, err.Error())
return
}
collectionID := chi.URLParam(r, "collectionId")
collection, ok := f.configuredCollections[collectionID]
if !ok {
handleCollectionNotFound(w, collectionID)
return
}
var key engine.TemplateKey
format := f.engine.CN.NegotiateFormat(r)
switch format {
case engine.FormatHTML:
key = engine.NewTemplateKey(schemaHTML,
engine.WithInstanceName(collection.ID),
f.engine.WithNegotiatedLanguage(w, r))
case engine.FormatJSON:
key = engine.NewTemplateKey(schemaJSON,
engine.WithInstanceName(collection.ID),
f.engine.WithNegotiatedLanguage(w, r),
engine.WithMediaTypeOverwrite(engine.MediaTypeJSONSchema)) // JSON format, but specific mediatype.
default:
handleFormatNotSupported(w, format)
return
}
f.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
type schemaTemplateData struct {
domain.Schema
CollectionID string
CollectionTitle string
CollectionDescription *string
}
// renderSchemas pre-renders HTML and JSON schemas describing each feature collection.
func renderSchemas(e *engine.Engine, datasources map[datasourceKey]ds.Datasource) map[string]domain.Schema {
schemasByCollection := make(map[string]domain.Schema)
for _, collection := range e.Config.OgcAPI.Features.Collections {
title, description := getCollectionTitleAndDesc(collection)
breadcrumbs := collectionsBreadcrumb
breadcrumbs = append(breadcrumbs, []engine.Breadcrumb{
{
Name: title,
Path: collectionsCrumb + collection.ID,
},
{
Name: "Schema",
Path: collectionsCrumb + collection.ID + schemasPath,
},
}...)
// the schema should be the same regardless of CRS, so we use WGS84 as it's the default and always present
datasource := datasources[datasourceKey{srid: domain.WGS84SRID, collectionID: collection.ID}]
schema, err := datasource.GetSchema(collection.ID)
if err != nil {
log.Printf("Failed to render OGC API Features part 5 Schema for collection %s: %v", collection.ID, err)
continue
}
// expand the schema with details about temporal fields
if collection.Metadata != nil && collection.Metadata.TemporalProperties != nil {
for i := range schema.Fields {
// OAF part 5: If the features have multiple temporal properties, the roles "primary-interval-start"
// and "primary-interval-end" can be used to identify the primary temporal information of the features.
if collection.Metadata.TemporalProperties.StartDate == schema.Fields[i].Name {
schema.Fields[i].IsPrimaryIntervalStart = true
} else if collection.Metadata.TemporalProperties.EndDate == schema.Fields[i].Name {
schema.Fields[i].IsPrimaryIntervalEnd = true
}
}
}
if !requiresSpecificOrder(collection) {
// stable field order
slices.SortFunc(schema.Fields, func(a, b domain.Field) int {
return strings.Compare(a.Name, b.Name)
})
}
// pre-render the schema, catches issues early on during start-up.
e.RenderTemplatesWithParams(g.CollectionsPath+"/"+collection.ID+schemasPath,
schemaTemplateData{
*schema,
collection.ID,
title,
description,
},
breadcrumbs,
engine.NewTemplateKey(schemaJSON,
engine.WithInstanceName(collection.ID),
engine.WithMediaTypeOverwrite(engine.MediaTypeJSONSchema),
),
engine.NewTemplateKey(schemaHTML,
engine.WithInstanceName(collection.ID),
),
)
schemasByCollection[collection.ID] = *schema
}
return schemasByCollection
}
func requiresSpecificOrder(collection config.GeoSpatialCollection) bool {
if collection.Features != nil && collection.Features.FeatureProperties != nil {
return collection.Features.PropertiesInSpecificOrder
}
return false
}
func getCollectionTitleAndDesc(collection config.GeoSpatialCollection) (string, *string) {
var description *string
if collection.Metadata != nil {
description = collection.Metadata.Description
}
return getCollectionTitle(collection.ID, collection.Metadata), description
}
package features
import (
"bytes"
"errors"
"fmt"
"hash/fnv"
"math"
"net/url"
"slices"
"sort"
"strconv"
"strings"
"time"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/features/datasources"
d "github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/twpayne/go-geom"
)
const (
cursorParam = "cursor"
limitParam = "limit"
crsParam = "crs"
dateTimeParam = "datetime"
bboxParam = "bbox"
bboxCrsParam = "bbox-crs"
filterParam = "filter"
filterCrsParam = "filter-crs"
profileParam = "profile"
propertyFilterMaxLength = 512
propertyFilterWildcard = "*"
)
var (
// don't include these params in checksum.
checksumExcludedParams = []string{
engine.FormatParam,
cursorParam,
}
// request for /items should only accept these params (+ filters)
// defined as a set for fast lookup.
featuresKnownParams = map[string]struct{}{
engine.FormatParam: {},
limitParam: {},
cursorParam: {},
crsParam: {},
dateTimeParam: {},
bboxParam: {},
bboxCrsParam: {},
filterParam: {},
filterCrsParam: {},
profileParam: {},
}
// request for /item/{id} should only accept these params
// defined as a set for fast lookup.
featureKnownParams = map[string]struct{}{
engine.FormatParam: {},
crsParam: {},
profileParam: {},
}
)
// URL to a page in a collection of features.
type featureCollectionURL struct {
baseURL url.URL
params url.Values
limit config.Limit
configuredPropertyFilters map[string]datasources.PropertyFilterWithAllowedValues
schema d.Schema
supportsDatetime bool
}
// parse the given URL to values required to delivery a set of Features.
func (fc featureCollectionURL) parse() (encodedCursor d.EncodedCursor, limit int, inputSRID d.SRID, outputSRID d.SRID,
contentCrs d.ContentCrs, bbox *geom.Bounds, referenceDate time.Time, propertyFilters map[string]string,
profile d.Profile, err error) {
err = fc.validateNoUnknownParams()
if err != nil {
return
}
encodedCursor = d.EncodedCursor(fc.params.Get(cursorParam))
limit, limitErr := parseLimit(fc.params, fc.limit)
outputSRID, outputSRIDErr := parseCrsToSRID(fc.params, crsParam)
contentCrs = parseCrsToContentCrs(fc.params)
propertyFilters, pfErr := parsePropertyFilters(fc.configuredPropertyFilters, fc.params)
bbox, bboxSRID, bboxErr := parseBbox(fc.params)
profile, profileErr := parseProfile(fc.params, fc.baseURL, fc.schema)
referenceDate, referenceDateErr := parseDateTime(fc.params, fc.supportsDatetime)
_, filterSRID, filterErr := parseFilter(fc.params)
inputSRID, inputSRIDErr := consolidateSRIDs(bboxSRID, filterSRID)
err = errors.Join(limitErr, outputSRIDErr, bboxErr, pfErr, profileErr, referenceDateErr, filterErr, inputSRIDErr)
return
}
// Calculate checksum over the query parameters that have a "filtering effect" on
// the result set such as limit, bbox, property filters, CQL filters, etc. These query params
// aren't allowed to be changed during pagination. The checksum allows for the latter
// to be verified.
func (fc featureCollectionURL) checksum() []byte {
var valuesToHash bytes.Buffer
sortedQueryParams := make([]string, 0, len(fc.params))
for k := range fc.params {
sortedQueryParams = append(sortedQueryParams, k)
}
sort.Strings(sortedQueryParams) // sort keys
OUTER:
for _, k := range sortedQueryParams {
for _, skip := range checksumExcludedParams {
if k == skip {
continue OUTER
}
}
paramValues := fc.params[k]
if paramValues != nil {
slices.Sort(paramValues) // sort values belonging to key
}
for _, s := range paramValues {
valuesToHash.WriteString(s)
}
}
bytesToHash := valuesToHash.Bytes()
if len(bytesToHash) > 0 {
hasher := fnv.New32a() // fast non-cryptographic hash
_, _ = hasher.Write(bytesToHash)
return hasher.Sum(nil)
}
return []byte{}
}
func (fc featureCollectionURL) toSelfURL(collectionID string, format string) string {
copyParams := clone(fc.params)
copyParams.Set(engine.FormatParam, format)
result := fc.baseURL.JoinPath("collections", collectionID, "items")
result.RawQuery = copyParams.Encode()
return result.String()
}
func (fc featureCollectionURL) toPrevNextURL(collectionID string, cursor d.EncodedCursor, format string) string {
copyParams := clone(fc.params)
copyParams.Set(engine.FormatParam, format)
copyParams.Set(cursorParam, cursor.String())
result := fc.baseURL.JoinPath("collections", collectionID, "items")
result.RawQuery = copyParams.Encode()
return result.String()
}
// implements req 7.6 (https://docs.ogc.org/is/17-069r4/17-069r4.html#query_parameters)
func (fc featureCollectionURL) validateNoUnknownParams() error {
for param := range fc.params {
if _, ok := featuresKnownParams[param]; !ok {
if _, configured := fc.configuredPropertyFilters[param]; !configured {
return fmt.Errorf("unknown query parameter(s) found: %s", param)
}
}
}
return nil
}
// URL to a specific Feature.
type featureURL struct {
baseURL url.URL
params url.Values
schema d.Schema
}
// parse the given URL to values required to delivery a specific Feature.
func (f featureURL) parse() (srid d.SRID, contentCrs d.ContentCrs, profile d.Profile, err error) {
err = f.validateNoUnknownParams()
if err != nil {
return
}
srid, crsErr := parseCrsToSRID(f.params, crsParam)
contentCrs = parseCrsToContentCrs(f.params)
profile, profileErr := parseProfile(f.params, f.baseURL, f.schema)
err = errors.Join(crsErr, profileErr)
return
}
func (f featureURL) toSelfURL(collectionID string, featureID string, format string) string {
newParams := url.Values{}
newParams.Set(engine.FormatParam, format)
result := f.baseURL.JoinPath("collections", collectionID, "items", featureID)
result.RawQuery = newParams.Encode()
return result.String()
}
func (f featureURL) toCollectionURL(collectionID string, format string) string {
newParams := url.Values{}
newParams.Set(engine.FormatParam, format)
result := f.baseURL.JoinPath("collections", collectionID)
result.RawQuery = newParams.Encode()
return result.String()
}
// implements req 7.6 (https://docs.ogc.org/is/17-069r4/17-069r4.html#query_parameters)
func (f featureURL) validateNoUnknownParams() error {
for param := range f.params {
if _, ok := featureKnownParams[param]; !ok {
return fmt.Errorf("unknown query parameter(s) found: %s", param)
}
}
return nil
}
func clone(params url.Values) url.Values {
copyParams := url.Values{}
for k, v := range params {
copyParams[k] = v
}
return copyParams
}
func consolidateSRIDs(bboxSRID d.SRID, filterSRID d.SRID) (inputSRID d.SRID, err error) {
if bboxSRID != d.UndefinedSRID && filterSRID != d.UndefinedSRID && bboxSRID != filterSRID {
return 0, errors.New("bbox-crs and filter-crs need to be equal. " +
"Can't use more than one CRS as input, but input and output CRS may differ")
}
if bboxSRID != d.UndefinedSRID || filterSRID != d.UndefinedSRID {
inputSRID = bboxSRID // or filterCrs, both the same
}
return inputSRID, err
}
func parseLimit(params url.Values, limitCfg config.Limit) (int, error) {
limit := limitCfg.Default
var err error
if params.Get(limitParam) != "" {
limit, err = strconv.Atoi(params.Get(limitParam))
if err != nil {
err = errors.New("limit must be numeric")
}
// "If the value of the limit parameter is larger than the maximum value, this SHALL NOT result
// in an error (instead use the maximum as the parameter value)."
if limit > limitCfg.Max {
limit = limitCfg.Max
}
}
if limit < 0 {
err = errors.New("limit can't be negative")
}
return limit, err
}
func parseBbox(params url.Values) (*geom.Bounds, d.SRID, error) {
if params.Get(bboxParam) == "" && params.Get(bboxCrsParam) != "" {
return nil, d.UndefinedSRID, errors.New("bbox-crs can't be used without bbox parameter")
}
bboxSRID, err := parseCrsToSRID(params, bboxCrsParam)
if err != nil {
return nil, d.UndefinedSRID, err
}
if params.Get(bboxParam) == "" {
return nil, d.UndefinedSRID, nil
}
bboxValues := strings.Split(params.Get(bboxParam), ",")
if len(bboxValues) != 4 {
return nil, bboxSRID, errors.New("bbox should contain exactly 4 values " +
"separated by commas: minx,miny,maxx,maxy")
}
bboxFloats := make([]float64, len(bboxValues))
for i, v := range bboxValues {
bboxFloats[i], err = strconv.ParseFloat(v, 64)
if err != nil {
return nil, bboxSRID, fmt.Errorf("failed to parse value %s in bbox, error: %w", v, err)
}
}
bbox := geom.NewBounds(geom.XY).Set(bboxFloats...)
if surfaceArea(bbox) <= 0 {
return nil, bboxSRID, errors.New("bbox has no surface area")
}
return bbox, bboxSRID, nil
}
func surfaceArea(bbox *geom.Bounds) float64 {
// Use the same logic as bbox.Area() in https://github.com/go-spatial/geom to calculate surface area.
// The bounds.Area() in github.com/twpayne/go-geom behaves differently and is not what we're looking for.
return math.Abs((bbox.Max(1) - bbox.Min(1)) * (bbox.Max(0) - bbox.Min(0)))
}
func parseCrsToContentCrs(params url.Values) d.ContentCrs {
param := params.Get(crsParam)
if param == "" {
return d.WGS84CrsURI
}
return d.ContentCrs(param)
}
func parseCrsToSRID(params url.Values, paramName string) (d.SRID, error) {
param := params.Get(paramName)
if param == "" {
return d.UndefinedSRID, nil
}
param = strings.TrimSpace(param)
if !strings.HasPrefix(param, d.CrsURIPrefix) {
return d.UndefinedSRID, fmt.Errorf("%s param should start with %s, got: %s", paramName, d.CrsURIPrefix, param)
}
var srid d.SRID
lastIndex := strings.LastIndex(param, "/")
if lastIndex != -1 {
crsCode := param[lastIndex+1:]
if crsCode == d.WGS84CodeOGC {
return d.WGS84SRID, nil // CRS84 is WGS84, just like EPSG:4326 (only axis order differs but SRID is the same)
}
val, err := strconv.Atoi(crsCode)
if err != nil {
return 0, fmt.Errorf("expected numerical CRS code, received: %s", crsCode)
}
srid = d.SRID(val)
}
return srid, nil
}
// Support simple filtering on properties: https://docs.ogc.org/is/17-069r4/17-069r4.html#_parameters_for_filtering_on_feature_properties
func parsePropertyFilters(configuredPropertyFilters map[string]datasources.PropertyFilterWithAllowedValues, params url.Values) (map[string]string, error) {
propertyFilters := make(map[string]string)
for name := range configuredPropertyFilters {
pf := params.Get(name)
if pf != "" {
if len(pf) > propertyFilterMaxLength {
return nil, fmt.Errorf("property filter %s is too large, "+
"value is limited to %d characters", name, propertyFilterMaxLength)
}
if strings.Contains(pf, propertyFilterWildcard) {
// if/when we choose to support wildcards in the future, make sure wildcards are
// only allowed at the END (suffix) of the filter
return nil, fmt.Errorf("property filter %s contains a wildcard (%s), "+
"wildcard filtering is not allowed", name, propertyFilterWildcard)
}
propertyFilters[name] = pf
}
}
return propertyFilters, nil
}
// Support filtering on datetime: https://docs.ogc.org/is/17-069r4/17-069r4.html#_parameter_datetime
func parseDateTime(params url.Values, datetimeSupported bool) (time.Time, error) {
datetime := params.Get(dateTimeParam)
if datetime != "" {
if !datetimeSupported {
return time.Time{}, errors.New("datetime param is currently not supported for this collection")
}
if strings.Contains(datetime, "/") {
return time.Time{}, fmt.Errorf("datetime param '%s' represents an interval, intervals are currently not supported", datetime)
}
return time.Parse(time.RFC3339, datetime)
}
return time.Time{}, nil
}
func parseFilter(params url.Values) (filter string, filterSRID d.SRID, err error) {
filter = params.Get(filterParam)
filterSRID, _ = parseCrsToSRID(params, filterCrsParam)
if filter != "" {
return filter, filterSRID, errors.New("CQL filter param is currently not supported")
}
return filter, filterSRID, nil
}
func parseProfile(params url.Values, baseURL url.URL, schema d.Schema) (d.Profile, error) {
profile := d.RelAsLink
if params.Has(profileParam) {
profile = d.ProfileName(params.Get(profileParam))
if !slices.Contains(d.SupportedProfiles, profile) {
return d.Profile{}, fmt.Errorf("profile %s is not supported, only supporting %s", profile, d.SupportedProfiles)
}
}
return d.NewProfile(profile, baseURL, schema), nil
}
package geovolumes
import (
"errors"
"log"
"net/http"
"net/url"
"strings"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
"github.com/go-chi/chi/v5"
)
type ThreeDimensionalGeoVolumes struct {
engine *engine.Engine
validateResponse bool
}
func NewThreeDimensionalGeoVolumes(e *engine.Engine) *ThreeDimensionalGeoVolumes {
_, err := url.ParseRequestURI(e.Config.OgcAPI.GeoVolumes.TileServer.String())
if err != nil {
log.Fatalf("invalid tileserver url provided: %v", err)
}
geoVolumes := &ThreeDimensionalGeoVolumes{
engine: e,
validateResponse: *e.Config.OgcAPI.GeoVolumes.ValidateResponses,
}
// 3D Tiles
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles", geoVolumes.Tileset("tileset.json"))
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles/{explicitTileSet}.json", geoVolumes.ExplicitTileset())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles/{tilePathPrefix}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
// DTM/Quantized Mesh
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh", geoVolumes.Tileset("layer.json"))
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh/{explicitTileSet}.json", geoVolumes.ExplicitTileset())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh/{tilePathPrefix}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
// path '/3dtiles' or '/quantized-mesh' is preferred but optional when requesting the actual tiles/tileset.
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/{explicitTileSet}.json", geoVolumes.ExplicitTileset())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/{tilePathPrefix}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
return geoVolumes
}
// Tileset serves tileset.json manifest in case of OGC 3D Tiles (= separate spec from OGC 3D GeoVolumes) requests or
// layer.json manifest in case of quantized mesh requests. Both requests will be proxied to the configured tileserver.
func (t *ThreeDimensionalGeoVolumes) Tileset(fileName string) http.HandlerFunc {
if !strings.HasSuffix(fileName, ".json") {
log.Fatalf("manifest should be a JSON file")
}
return func(w http.ResponseWriter, r *http.Request) {
t.tileSet(w, r, fileName)
}
}
// ExplicitTileset serves OGC 3D Tiles manifest (= separate spec from OGC 3D GeoVolumes) or
// quantized mesh manifest. All requests will be proxied to the configured tileserver.
func (t *ThreeDimensionalGeoVolumes) ExplicitTileset() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
tileSetName := chi.URLParam(r, "explicitTileSet")
if tileSetName == "" {
engine.RenderProblem(engine.ProblemNotFound, w)
return
}
t.tileSet(w, r, tileSetName+".json")
}
}
// Tile reverse proxy to tileserver for actual 3D tiles (from OGC 3D Tiles, separate spec
// from OGC 3D GeoVolumes) or DTM Quantized Mesh tiles.
func (t *ThreeDimensionalGeoVolumes) Tile() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
collectionID := chi.URLParam(r, "3dContainerId")
collection, err := t.idToCollection(collectionID)
if err != nil {
engine.RenderProblem(engine.ProblemNotFound, w, err.Error())
return
}
tileServerPath := collectionID
if collection.GeoVolumes != nil && collection.GeoVolumes.TileServerPath != nil {
tileServerPath = *collection.GeoVolumes.TileServerPath
}
tilePathPrefix := chi.URLParam(r, "tilePathPrefix") // optional
tileMatrix := chi.URLParam(r, "tileMatrix")
tileRow := chi.URLParam(r, "tileRow")
tileColAndSuffix := chi.URLParam(r, "tileColAndSuffix")
contentType := ""
if collection.GeoVolumes != nil && collection.GeoVolumes.HasDTM() {
// DTM has a specialized mediatype, although application/octet-stream will also work with Cesium
contentType = engine.MediaTypeQuantizedMesh
}
path, _ := url.JoinPath("/", tileServerPath, tilePathPrefix, tileMatrix, tileRow, tileColAndSuffix)
t.reverseProxy(w, r, path, true, contentType)
}
}
func (t *ThreeDimensionalGeoVolumes) tileSet(w http.ResponseWriter, r *http.Request, tileSet string) {
collectionID := chi.URLParam(r, "3dContainerId")
collection, err := t.idToCollection(collectionID)
if err != nil {
engine.RenderProblem(engine.ProblemNotFound, w, err.Error())
return
}
tileServerPath := collectionID
if collection.GeoVolumes != nil && collection.GeoVolumes.TileServerPath != nil {
tileServerPath = *collection.GeoVolumes.TileServerPath
}
path, _ := url.JoinPath("/", tileServerPath, tileSet)
t.reverseProxy(w, r, path, false, "")
}
func (t *ThreeDimensionalGeoVolumes) reverseProxy(w http.ResponseWriter, r *http.Request, path string,
prefer204 bool, contentTypeOverwrite string) {
target, err := url.Parse(t.engine.Config.OgcAPI.GeoVolumes.TileServer.String() + path)
if err != nil {
log.Printf("invalid target url, can't proxy tiles: %v", err)
engine.RenderProblem(engine.ProblemServerError, w)
return
}
t.engine.ReverseProxyAndValidate(w, r, target, prefer204, contentTypeOverwrite, t.validateResponse)
}
func (t *ThreeDimensionalGeoVolumes) idToCollection(cid string) (*config.GeoSpatialCollection, error) {
for _, collection := range t.engine.Config.OgcAPI.GeoVolumes.Collections {
if collection.ID == cid {
return &collection, nil
}
}
return nil, errors.New("no matching collection found")
}
package processes
import (
"net/http"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
)
type Processes struct {
engine *engine.Engine
}
func NewProcesses(e *engine.Engine) *Processes {
processes := &Processes{engine: e}
e.Router.Handle("/jobs*", processes.forwarder(e.Config.OgcAPI.Processes.ProcessesServer))
e.Router.Handle("/processes*", processes.forwarder(e.Config.OgcAPI.Processes.ProcessesServer))
e.Router.Handle("/api*", processes.forwarder(e.Config.OgcAPI.Processes.ProcessesServer))
return processes
}
func (p *Processes) forwarder(processServer config.URL) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
targetURL := *processServer.URL
targetURL.Path = processServer.Path + r.URL.Path
targetURL.RawQuery = r.URL.RawQuery
p.engine.ReverseProxy(w, r, &targetURL, false, "")
}
}
package ogc
import (
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/common/core"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
"github.com/PDOK/gokoala/internal/ogc/features"
"github.com/PDOK/gokoala/internal/ogc/geovolumes"
"github.com/PDOK/gokoala/internal/ogc/processes"
"github.com/PDOK/gokoala/internal/ogc/styles"
"github.com/PDOK/gokoala/internal/ogc/tiles"
)
func SetupBuildingBlocks(engine *engine.Engine) {
// OGC 3D GeoVolumes API
if engine.Config.OgcAPI.GeoVolumes != nil {
geovolumes.NewThreeDimensionalGeoVolumes(engine)
}
// OGC Tiles API
if engine.Config.OgcAPI.Tiles != nil {
tiles.NewTiles(engine)
}
// OGC Styles API
if engine.Config.OgcAPI.Styles != nil {
styles.NewStyles(engine)
}
// OGC Features API
collectionTypes := geospatial.NewCollectionTypes(nil)
if engine.Config.OgcAPI.Features != nil {
f := features.NewFeatures(engine)
collectionTypes = f.GetCollectionTypes()
}
// OGC Processes API
if engine.Config.OgcAPI.Processes != nil {
processes.NewProcesses(engine)
}
// OGC Common Part 1, this will always be started
core.NewCommonCore(engine, core.ExtraConformanceClasses{AttributesConformance: collectionTypes.HasAttributes()})
// OGC Common part 2
if engine.Config.HasCollections() {
geospatial.NewCollections(engine, collectionTypes)
}
}
package styles
import (
"log"
"net/http"
"net/url"
"slices"
"strings"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/engine/util"
"github.com/go-chi/chi/v5"
)
const (
templatesDir = "internal/ogc/styles/templates/"
stylesPath = "/styles"
stylesCrumb = "styles/"
projectionDelimiter = "__"
)
var (
defaultProjection = ""
stylesBreadcrumbs = []engine.Breadcrumb{
{
Name: "Styles",
Path: "styles",
},
}
)
type stylesTemplateData struct {
// Projection used by default
DefaultProjection string
// All supported projections for this dataset
SupportedProjections []config.SupportedSrs
// All supported projections by GoKoala (for tiles)
AllProjections map[string]any
}
type stylesMetadataTemplateData struct {
// Metadata about this style
Metadata config.Style
// Projection used by this style
Projection string
}
type Styles struct {
engine *engine.Engine
localResourcesHandler http.Handler
}
func NewStyles(e *engine.Engine) *Styles {
// default style must be the first entry in supported styles
if e.Config.OgcAPI.Styles.Default != e.Config.OgcAPI.Styles.SupportedStyles[0].ID {
log.Fatalf("default style must be first entry in supported styles. '%s' does not match '%s'",
e.Config.OgcAPI.Styles.SupportedStyles[0].ID, e.Config.OgcAPI.Styles.Default)
}
allProjections := util.Cast(config.AllTileProjections)
supportedProjections := e.Config.OgcAPI.Tiles.GetProjections()
if len(supportedProjections) == 0 {
log.Fatalf("failed to setup OGC API Styles, no supported projections (SRS) found in OGC API Tiles")
}
defaultProjection = strings.ToLower(config.AllTileProjections[supportedProjections[0].Srs])
e.RenderTemplatesWithParams(stylesPath,
&stylesTemplateData{defaultProjection, supportedProjections, allProjections},
stylesBreadcrumbs,
engine.NewTemplateKey(templatesDir+"styles.go.json"),
engine.NewTemplateKey(templatesDir+"styles.go.html"))
renderStylesPerProjection(e, supportedProjections)
styles := &Styles{
engine: e,
}
e.Router.Get(stylesPath, styles.Styles())
e.Router.Get(stylesPath+"/{style}", styles.Style())
e.Router.Get(stylesPath+"/{style}/metadata", styles.Metadata())
e.Router.Get(stylesPath+"/{style}/legend", styles.Legend())
if res := e.Config.Resources; e.Config.Resources != nil {
if res != nil && res.Directory != nil && *res.Directory != "" {
resourcesPath := *res.Directory
styles.localResourcesHandler = http.FileServer(http.Dir(resourcesPath))
}
}
return styles
}
func (s *Styles) Styles() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKey(
templatesDir+"styles.go."+s.engine.CN.NegotiateFormat(r), s.engine.WithNegotiatedLanguage(w, r))
s.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
func (s *Styles) Style() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
style, styleID := parseStyleParam(r)
styleFormat := s.engine.CN.NegotiateFormat(r)
var key engine.TemplateKey
if styleFormat == engine.FormatHTML {
key = engine.NewTemplateKey(
templatesDir+"style.go.html", engine.WithInstanceName(style), s.engine.WithNegotiatedLanguage(w, r))
} else {
var instanceName string
if slices.Contains(s.engine.CN.GetSupportedStyleFormats(), styleFormat) {
instanceName = style + "." + styleFormat
} else {
styleFormat = engine.FormatMapboxStyle
instanceName = style + "." + engine.FormatMapboxStyle
}
key = engine.TemplateKey{
Name: styleID + s.engine.CN.GetStyleFormatExtension(styleFormat),
Directory: s.engine.Config.OgcAPI.Styles.StylesDir,
Format: styleFormat,
InstanceName: instanceName,
Language: s.engine.CN.NegotiateLanguage(w, r),
}
}
s.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
func (s *Styles) Metadata() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
style, _ := parseStyleParam(r)
key := engine.NewTemplateKey(
templatesDir+"styleMetadata.go."+s.engine.CN.NegotiateFormat(r),
engine.WithInstanceName(style),
s.engine.WithNegotiatedLanguage(w, r))
s.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
func (s *Styles) Legend() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
_, styleID := parseStyleParam(r)
// search matching legend when configured
var legend string
for _, supportedStyle := range s.engine.Config.OgcAPI.Styles.SupportedStyles {
if supportedStyle.ID == styleID && supportedStyle.Legend != nil {
legend = *supportedStyle.Legend
break
}
}
if s.engine.Config.Resources == nil {
engine.RenderProblem(engine.ProblemNotFound, w, "no legends configured")
return
}
if legend == "" {
engine.RenderProblem(engine.ProblemNotFound, w, "no legend configured for style "+styleID)
return
}
// rewrite legend url to configured legend resource (png file).
switch {
case s.localResourcesHandler != nil:
r.URL.Path = legend
s.localResourcesHandler.ServeHTTP(w, r)
case s.engine.Config.Resources.URL != nil:
legendURL, _ := url.JoinPath(s.engine.Config.Resources.URL.String(), legend)
target, _ := url.ParseRequestURI(legendURL)
s.engine.ReverseProxy(w, r, target, false, "")
default:
engine.RenderProblem(engine.ProblemServerError, w, "legend not properly configured")
}
}
}
func parseStyleParam(r *http.Request) (style string, styleID string) {
style = chi.URLParam(r, "style")
styleID = strings.Split(style, projectionDelimiter)[0]
// Previously, the API did not utilise separate styles per projection; whereas the current implementation
// advertises all possible combinations of available styles and available projections as separate styles.
// To ensure that the use of style URLs without projection remains possible for previously published APIs,
// URLs without an explicit projection are defaulted to the first configured projection.
if style == styleID {
style += projectionDelimiter + defaultProjection
}
return style, styleID
}
func renderStylesPerProjection(e *engine.Engine, supportedProjections []config.SupportedSrs) {
for _, style := range e.Config.OgcAPI.Styles.SupportedStyles {
for _, supportedSrs := range supportedProjections {
projection := config.AllTileProjections[supportedSrs.Srs]
zoomLevelRange := supportedSrs.ZoomLevelRange
styleInstanceID := style.ID + projectionDelimiter + strings.ToLower(projection)
styleProjectionBreadcrumb := engine.Breadcrumb{
Name: style.Title + " (" + projection + ")",
Path: stylesCrumb + styleInstanceID,
}
data := &stylesMetadataTemplateData{style, projection}
// Render metadata template (JSON)
path := stylesPath + "/" + styleInstanceID + "/metadata"
e.RenderTemplatesWithParams(path, data, nil,
engine.NewTemplateKey(templatesDir+"styleMetadata.go.json", engine.WithInstanceName(styleInstanceID)))
// Render metadata template (HTML)
styleMetadataBreadcrumbs := stylesBreadcrumbs
styleMetadataBreadcrumbs = append(styleMetadataBreadcrumbs, []engine.Breadcrumb{
styleProjectionBreadcrumb,
{
Name: "Metadata",
Path: stylesCrumb + styleInstanceID + "/metadata",
},
}...)
e.RenderTemplatesWithParams(path, data, styleMetadataBreadcrumbs,
engine.NewTemplateKey(templatesDir+"styleMetadata.go.html", engine.WithInstanceName(styleInstanceID)))
// Add existing style definitions to rendered templates
renderStylePerFormat(e, style, styleInstanceID, projection, zoomLevelRange, styleProjectionBreadcrumb)
}
}
}
func renderStylePerFormat(e *engine.Engine, style config.Style, styleInstanceID string,
projection string, zoomLevelRange config.ZoomLevelRange, styleProjectionBreadcrumb engine.Breadcrumb) {
for _, styleFormat := range style.Formats {
formatExtension := e.CN.GetStyleFormatExtension(styleFormat.Format)
styleKey := engine.TemplateKey{
Name: style.ID + formatExtension,
Directory: e.Config.OgcAPI.Styles.StylesDir,
Format: styleFormat.Format,
InstanceName: styleInstanceID + "." + styleFormat.Format,
}
path := stylesPath + "/" + styleInstanceID
// Render template (JSON)
e.RenderTemplatesWithParams(path, struct {
Projection string
ZoomLevelRange config.ZoomLevelRange
}{Projection: projection, ZoomLevelRange: zoomLevelRange}, nil, styleKey)
// Render template (HTML)
styleBreadCrumbs := stylesBreadcrumbs
styleBreadCrumbs = append(styleBreadCrumbs, styleProjectionBreadcrumb)
e.RenderTemplatesWithParams(path, style, styleBreadCrumbs,
engine.NewTemplateKey(templatesDir+"style.go.html", engine.WithInstanceName(styleInstanceID)))
}
}
package tiles
import (
"errors"
"fmt"
"log"
"net/http"
"net/url"
"os"
"strconv"
"strings"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/engine/util"
g "github.com/PDOK/gokoala/internal/ogc/common/geospatial"
"github.com/go-chi/chi/v5"
"gopkg.in/yaml.v3"
)
const (
templatesDir = "internal/ogc/tiles/templates/"
tilesPath = "/tiles"
tilesLocalPath = "tiles/"
tileMatrixSetsPath = "/tileMatrixSets"
tileMatrixSetsLocalPath = "tileMatrixSets/"
defaultTilesTmpl = "{tms}/{z}/{x}/{y}." + engine.FormatMVTAlternative
collectionsCrumb = "collections/"
tilesCrumbTitle = "Tiles"
tmsLimitsDir = "internal/ogc/tiles/tileMatrixSetLimits/"
)
var (
tilesBreadcrumbs = []engine.Breadcrumb{
{
Name: tilesCrumbTitle,
Path: "tiles",
},
}
tileMatrixSetsBreadcrumbs = []engine.Breadcrumb{
{
Name: "Tile Matrix Sets",
Path: "tileMatrixSets",
},
}
collectionsBreadcrumb = []engine.Breadcrumb{
{
Name: "Collections",
Path: "collections",
},
}
)
type templateData struct {
// Tiles top-level or collection-level tiles config
config.Tiles
// BaseURL part of the url prefixing "/tiles"
BaseURL string
// All supported projections by GoKoala (for tiles)
AllProjections map[string]any
}
type Tiles struct {
engine *engine.Engine
tileMatrixSetLimits map[string]map[int]TileMatrixSetLimits
}
type TileMatrixSetLimits struct {
MinCol int `yaml:"minCol" json:"minCol"`
MaxCol int `yaml:"maxCol" json:"maxCol"`
MinRow int `yaml:"minRow" json:"minRow"`
MaxRow int `yaml:"maxRow" json:"maxRow"`
}
func NewTiles(e *engine.Engine) *Tiles {
tiles := &Tiles{engine: e}
// TileMatrixSetLimits
supportedProjections := e.Config.OgcAPI.Tiles.GetProjections()
tiles.tileMatrixSetLimits = readTileMatrixSetLimits(supportedProjections)
// TileMatrixSets
renderTileMatrixTemplates(e)
e.Router.Get(tileMatrixSetsPath, tiles.TileMatrixSets())
e.Router.Get(tileMatrixSetsPath+"/{tileMatrixSetId}", tiles.TileMatrixSet())
// Top-level tiles (dataset tiles in OGC spec)
if e.Config.OgcAPI.Tiles.DatasetTiles != nil {
renderTilesTemplates(e, nil, templateData{
*e.Config.OgcAPI.Tiles.DatasetTiles,
e.Config.BaseURL.String(),
util.Cast(config.AllTileProjections),
})
e.Router.Get(tilesPath, tiles.TilesetsList())
e.Router.Get(tilesPath+"/{tileMatrixSetId}", tiles.Tileset())
e.Router.Head(tilesPath+"/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}", tiles.Tile(*e.Config.OgcAPI.Tiles.DatasetTiles))
e.Router.Get(tilesPath+"/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}", tiles.Tile(*e.Config.OgcAPI.Tiles.DatasetTiles))
}
// Collection-level tiles (geodata tiles in OGC spec)
geoDataTiles := map[string]config.Tiles{}
for _, coll := range e.Config.OgcAPI.Tiles.Collections {
if coll.Tiles == nil {
continue
}
renderTilesTemplates(e, &coll, templateData{
coll.Tiles.GeoDataTiles,
e.Config.BaseURL.String() + g.CollectionsPath + "/" + coll.ID,
util.Cast(config.AllTileProjections),
})
geoDataTiles[coll.ID] = coll.Tiles.GeoDataTiles
}
if len(geoDataTiles) != 0 {
e.Router.Get(g.CollectionsPath+"/{collectionId}"+tilesPath, tiles.TilesetsListForCollection())
e.Router.Get(g.CollectionsPath+"/{collectionId}"+tilesPath+"/{tileMatrixSetId}", tiles.TilesetForCollection())
e.Router.Head(g.CollectionsPath+"/{collectionId}"+tilesPath+"/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}", tiles.TileForCollection(geoDataTiles))
e.Router.Get(g.CollectionsPath+"/{collectionId}"+tilesPath+"/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}", tiles.TileForCollection(geoDataTiles))
}
return tiles
}
func (t *Tiles) TileMatrixSets() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKey(templatesDir+"tileMatrixSets.go."+t.engine.CN.NegotiateFormat(r),
t.engine.WithNegotiatedLanguage(w, r))
t.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
func (t *Tiles) TileMatrixSet() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId")
key := engine.NewTemplateKey(templatesDir+tileMatrixSetsLocalPath+tileMatrixSetID+".go."+t.engine.CN.NegotiateFormat(r),
t.engine.WithNegotiatedLanguage(w, r))
t.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
func (t *Tiles) TilesetsList() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKey(templatesDir+"tiles.go."+t.engine.CN.NegotiateFormat(r),
t.engine.WithNegotiatedLanguage(w, r))
t.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
func (t *Tiles) TilesetsListForCollection() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
collectionID := chi.URLParam(r, "collectionId")
key := engine.NewTemplateKey(templatesDir+"tiles.go."+t.engine.CN.NegotiateFormat(r),
engine.WithInstanceName(collectionID),
t.engine.WithNegotiatedLanguage(w, r))
t.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
func (t *Tiles) Tileset() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId")
key := engine.NewTemplateKey(templatesDir+tilesLocalPath+tileMatrixSetID+".go."+t.engine.CN.NegotiateFormat(r),
t.engine.WithNegotiatedLanguage(w, r))
t.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
func (t *Tiles) TilesetForCollection() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
collectionID := chi.URLParam(r, "collectionId")
tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId")
key := engine.NewTemplateKey(templatesDir+tilesLocalPath+tileMatrixSetID+".go."+t.engine.CN.NegotiateFormat(r),
engine.WithInstanceName(collectionID),
t.engine.WithNegotiatedLanguage(w, r))
t.engine.Serve(w, r, engine.ServeTemplate(key))
}
}
// Tile reverse proxy to configured tileserver/object storage. Assumes the backing resource is publicly accessible.
func (t *Tiles) Tile(tilesConfig config.Tiles) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId")
tileMatrix := chi.URLParam(r, "tileMatrix")
tileRow := chi.URLParam(r, "tileRow")
tileCol, err := getTileColumn(r, t.engine.CN.NegotiateFormat(r))
if err != nil {
engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, err.Error())
return
}
tm, tr, tc, err := parseTileParams(tileMatrix, tileRow, tileCol)
if err != nil {
engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, strings.ReplaceAll(err.Error(), "strconv.Atoi: ", ""))
return
}
if _, ok := t.tileMatrixSetLimits[tileMatrixSetID]; !ok {
// unknown tileMatrixSet
err = fmt.Errorf("unknown tileMatrixSet '%s'", tileMatrixSetID)
engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, err.Error())
return
}
err = checkTileMatrixSetLimits(t.tileMatrixSetLimits, tileMatrixSetID, tm, tr, tc)
if err != nil {
engine.RenderProblem(engine.ProblemNotFound, w, err.Error())
return
}
target, err := createTilesURL(tileMatrixSetID, tileMatrix, tileCol, tileRow, tilesConfig)
if err != nil {
engine.RenderProblemAndLog(engine.ProblemServerError, w, err)
return
}
t.engine.ReverseProxy(w, r, target, true, engine.MediaTypeMVT)
}
}
// TileForCollection reverse proxy to configured tileserver/object storage for tiles within a given collection.
// Assumes the backing resource is publicly accessible.
func (t *Tiles) TileForCollection(tilesConfigByCollection map[string]config.Tiles) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
collectionID := chi.URLParam(r, "collectionId")
tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId")
tileMatrix := chi.URLParam(r, "tileMatrix")
tileRow := chi.URLParam(r, "tileRow")
tileCol, err := getTileColumn(r, t.engine.CN.NegotiateFormat(r))
if err != nil {
engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, err.Error())
return
}
tm, tr, tc, err := parseTileParams(tileMatrix, tileRow, tileCol)
if err != nil {
engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, strings.ReplaceAll(err.Error(), "strconv.Atoi: ", ""))
return
}
if _, ok := t.tileMatrixSetLimits[tileMatrixSetID]; !ok {
// unknown tileMatrixSet
err = fmt.Errorf("unknown tileMatrixSet '%s'", tileMatrixSetID)
engine.RenderProblemAndLog(engine.ProblemBadRequest, w, err, err.Error())
return
}
err = checkTileMatrixSetLimits(t.tileMatrixSetLimits, tileMatrixSetID, tm, tr, tc)
if err != nil {
engine.RenderProblem(engine.ProblemNotFound, w, err.Error())
return
}
tilesConfig, ok := tilesConfigByCollection[collectionID]
if !ok {
err = fmt.Errorf("no tiles available for collection: %s", collectionID)
engine.RenderProblemAndLog(engine.ProblemNotFound, w, err, err.Error())
return
}
target, err := createTilesURL(tileMatrixSetID, tileMatrix, tileCol, tileRow, tilesConfig)
if err != nil {
engine.RenderProblemAndLog(engine.ProblemServerError, w, err)
return
}
t.engine.ReverseProxy(w, r, target, true, engine.MediaTypeMVT)
}
}
func getTileColumn(r *http.Request, format string) (string, error) {
tileCol := chi.URLParam(r, "tileCol")
// We support content negotiation using Accept header and ?f= param, but also
// using the .pbf extension. This is for backwards compatibility.
if !strings.HasSuffix(tileCol, "."+engine.FormatMVTAlternative) {
// if no format is specified, default to mvt
if f := strings.Replace(format, engine.FormatJSON, engine.FormatMVT, 1); f != engine.FormatMVT && f != engine.FormatMVTAlternative {
return "", errors.New("specify tile format. Currently only Mapbox Vector Tiles (?f=mvt) tiles are supported")
}
} else {
tileCol = tileCol[:len(tileCol)-4] // remove .pbf extension
}
return tileCol, nil
}
func createTilesURL(tileMatrixSetID string, tileMatrix string, tileCol string,
tileRow string, tilesCfg config.Tiles) (*url.URL, error) {
tilesTmpl := defaultTilesTmpl
if tilesCfg.URITemplateTiles != nil {
tilesTmpl = *tilesCfg.URITemplateTiles
}
// OGC spec is (default) z/row/col but tileserver is z/col/row (z/x/y)
replacer := strings.NewReplacer("{tms}", tileMatrixSetID, "{z}", tileMatrix, "{x}", tileCol, "{y}", tileRow)
path, _ := url.JoinPath("/", replacer.Replace(tilesTmpl))
target, err := url.Parse(tilesCfg.TileServer.String() + path)
if err != nil {
return nil, fmt.Errorf("invalid target url, can't proxy tiles: %w", err)
}
return target, nil
}
func renderTileMatrixTemplates(e *engine.Engine) {
e.RenderTemplates(tileMatrixSetsPath,
tileMatrixSetsBreadcrumbs,
engine.NewTemplateKey(templatesDir+"tileMatrixSets.go.json"),
engine.NewTemplateKey(templatesDir+"tileMatrixSets.go.html"))
for _, projection := range config.AllTileProjections {
breadcrumbs := tileMatrixSetsBreadcrumbs
breadcrumbs = append(breadcrumbs, []engine.Breadcrumb{
{
Name: projection,
Path: tileMatrixSetsLocalPath + projection,
},
}...)
e.RenderTemplates(tileMatrixSetsPath+"/"+projection,
breadcrumbs,
engine.NewTemplateKey(templatesDir+tileMatrixSetsLocalPath+projection+".go.json"),
engine.NewTemplateKey(templatesDir+tileMatrixSetsLocalPath+projection+".go.html"))
}
}
func renderTilesTemplates(e *engine.Engine, collection *config.GeoSpatialCollection, data templateData) {
var breadcrumbs []engine.Breadcrumb
path := tilesPath
collectionID := ""
if collection != nil {
collectionID = collection.ID
path = g.CollectionsPath + "/" + collectionID + tilesPath
breadcrumbs = collectionsBreadcrumb
breadcrumbs = append(breadcrumbs, []engine.Breadcrumb{
{
Name: getCollectionTitle(collectionID, collection.Metadata),
Path: collectionsCrumb + collectionID,
},
{
Name: tilesCrumbTitle,
Path: collectionsCrumb + collectionID + tilesPath,
},
}...)
} else {
breadcrumbs = tilesBreadcrumbs
}
e.RenderTemplatesWithParams(path,
data,
breadcrumbs,
engine.NewTemplateKey(templatesDir+"tiles.go.json", engine.WithInstanceName(collectionID)),
engine.NewTemplateKey(templatesDir+"tiles.go.html", engine.WithInstanceName(collectionID)))
// Now render metadata about tiles per projection/SRS.
for _, projection := range config.AllTileProjections {
path = tilesPath + "/" + projection
projectionBreadcrumbs := breadcrumbs
if collection != nil {
projectionBreadcrumbs = append(projectionBreadcrumbs, []engine.Breadcrumb{
{
Name: projection,
Path: collectionsCrumb + collectionID + path,
},
}...)
path = g.CollectionsPath + "/" + collectionID + tilesPath + "/" + projection
} else {
projectionBreadcrumbs = append(projectionBreadcrumbs, []engine.Breadcrumb{
{
Name: projection,
Path: tilesLocalPath + projection,
},
}...)
}
e.RenderTemplatesWithParams(path,
data,
projectionBreadcrumbs,
engine.NewTemplateKey(templatesDir+tilesLocalPath+projection+".go.json", engine.WithInstanceName(collectionID)),
engine.NewTemplateKey(templatesDir+tilesLocalPath+projection+".go.html", engine.WithInstanceName(collectionID)))
e.RenderTemplatesWithParams(path,
data,
projectionBreadcrumbs,
engine.NewTemplateKey(templatesDir+tilesLocalPath+projection+".go.tilejson", engine.WithInstanceName(collectionID)))
}
}
func getCollectionTitle(collectionID string, metadata *config.GeoSpatialCollectionMetadata) string {
if metadata != nil && metadata.Title != nil {
return *metadata.Title
}
return collectionID
}
func readTileMatrixSetLimits(supportedProjections []config.SupportedSrs) map[string]map[int]TileMatrixSetLimits {
tileMatrixSetLimits := make(map[string]map[int]TileMatrixSetLimits)
for _, supportedSrs := range supportedProjections {
tileMatrixSetID := config.AllTileProjections[supportedSrs.Srs]
yamlFile, err := os.ReadFile(tmsLimitsDir + tileMatrixSetID + ".yaml")
if err != nil {
log.Fatalf("unable to read file %s", tileMatrixSetID+".yaml")
}
tmsLimits := make(map[int]TileMatrixSetLimits)
err = yaml.Unmarshal(yamlFile, &tmsLimits)
if err != nil {
log.Fatalf("cannot unmarshal yaml: %v", err)
}
// keep only the zoomlevels supported
for tm := range tmsLimits {
if tm < supportedSrs.ZoomLevelRange.Start || tm > supportedSrs.ZoomLevelRange.End {
delete(tmsLimits, tm)
}
}
tileMatrixSetLimits[tileMatrixSetID] = tmsLimits
}
return tileMatrixSetLimits
}
func parseTileParams(tileMatrix, tileRow, tileCol string) (int, int, int, error) {
tm, tmErr := strconv.Atoi(tileMatrix)
tr, trErr := strconv.Atoi(tileRow)
tc, tcErr := strconv.Atoi(tileCol)
return tm, tr, tc, errors.Join(tmErr, trErr, tcErr)
}
func checkTileMatrixSetLimits(tileMatrixSetLimits map[string]map[int]TileMatrixSetLimits,
tileMatrixSetID string, tileMatrix, tileRow, tileCol int) error {
if limits, ok := tileMatrixSetLimits[tileMatrixSetID][tileMatrix]; !ok {
// tileMatrix out of supported range
return fmt.Errorf("tileMatrix %d is out of range", tileMatrix)
} else if tileRow < limits.MinRow || tileRow > limits.MaxRow || tileCol < limits.MinCol || tileCol > limits.MaxCol {
// tileRow and/or tileCol out of supported range
return fmt.Errorf("tileRow/tileCol %d/%d is out of range", tileRow, tileCol)
}
return nil
}