package main
import (
"log"
"net"
"os"
"strconv"
eng "github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/common/core"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
"github.com/PDOK/gokoala/internal/ogc/features"
"github.com/PDOK/gokoala/internal/ogc/geovolumes"
"github.com/PDOK/gokoala/internal/ogc/processes"
"github.com/PDOK/gokoala/internal/ogc/styles"
"github.com/PDOK/gokoala/internal/ogc/tiles"
"github.com/urfave/cli/v2"
_ "go.uber.org/automaxprocs"
)
var (
cliFlags = []cli.Flag{
&cli.StringFlag{
Name: "host",
Usage: "bind host for OGC server",
Value: "0.0.0.0",
Required: false,
EnvVars: []string{"HOST"},
},
&cli.IntFlag{
Name: "port",
Usage: "bind port for OGC server",
Value: 8080,
Required: false,
EnvVars: []string{"PORT"},
},
&cli.IntFlag{
Name: "debug-port",
Usage: "bind port for debug server (disabled by default), do not expose this port publicly",
Value: -1,
Required: false,
EnvVars: []string{"DEBUG_PORT"},
},
&cli.IntFlag{
Name: "shutdown-delay",
Usage: "delay (in seconds) before initiating graceful shutdown (e.g. useful in k8s to allow ingress controller to update their endpoints list)",
Value: 0,
Required: false,
EnvVars: []string{"SHUTDOWN_DELAY"},
},
&cli.StringFlag{
Name: "config-file",
Usage: "reference to YAML configuration file",
Required: true,
EnvVars: []string{"CONFIG_FILE"},
},
&cli.StringFlag{
Name: "openapi-file",
Usage: "reference to a (customized) OGC OpenAPI spec for the dynamic parts of your OGC API",
Required: false,
EnvVars: []string{"OPENAPI_FILE"},
},
&cli.BoolFlag{
Name: "enable-trailing-slash",
Usage: "allow API calls to URLs with a trailing slash.",
Value: false, // to satisfy https://gitdocumentatie.logius.nl/publicatie/api/adr/#api-48
Required: false,
EnvVars: []string{"ALLOW_TRAILING_SLASH"},
},
&cli.BoolFlag{
Name: "enable-cors",
Usage: "enable Cross-Origin Resource Sharing (CORS) as required by OGC API specs. Disable if you handle CORS elsewhere.",
Value: false,
Required: false,
EnvVars: []string{"ENABLE_CORS"},
},
}
)
func main() {
app := cli.NewApp()
app.Name = "GoKoala"
app.Usage = "Cloud Native OGC APIs server, written in Go"
app.Flags = cliFlags
app.Action = func(c *cli.Context) error {
log.Printf("%s - %s\n", app.Name, app.Usage)
address := net.JoinHostPort(c.String("host"), strconv.Itoa(c.Int("port")))
debugPort := c.Int("debug-port")
shutdownDelay := c.Int("shutdown-delay")
configFile := c.String("config-file")
openAPIFile := c.String("openapi-file")
trailingSlash := c.Bool("enable-trailing-slash")
cors := c.Bool("enable-cors")
// Engine encapsulates shared non-OGC API specific logic
engine, err := eng.NewEngine(configFile, openAPIFile, trailingSlash, cors)
if err != nil {
return err
}
// Each OGC API building block makes use of said Engine
setupOGCBuildingBlocks(engine)
return engine.Start(address, debugPort, shutdownDelay)
}
err := app.Run(os.Args)
if err != nil {
log.Fatal(err)
}
}
func setupOGCBuildingBlocks(engine *eng.Engine) {
// OGC Common Part 1, will always be started
core.NewCommonCore(engine)
// OGC Common part 2
if engine.Config.HasCollections() {
geospatial.NewCollections(engine)
}
// OGC 3D GeoVolumes API
if engine.Config.OgcAPI.GeoVolumes != nil {
geovolumes.NewThreeDimensionalGeoVolumes(engine)
}
// OGC Tiles API
if engine.Config.OgcAPI.Tiles != nil {
tiles.NewTiles(engine)
}
// OGC Styles API
if engine.Config.OgcAPI.Styles != nil {
styles.NewStyles(engine)
}
// OGC Features API
if engine.Config.OgcAPI.Features != nil {
features.NewFeatures(engine)
}
// OGC Processes API
if engine.Config.OgcAPI.Processes != nil {
processes.NewProcesses(engine)
}
}
package config
import (
"log"
"sort"
"dario.cat/mergo"
)
type GeoSpatialCollections []GeoSpatialCollection
// Unique lists all unique GeoSpatialCollections (no duplicate IDs),
// return results in alphabetic order
func (g GeoSpatialCollections) Unique() []GeoSpatialCollection {
collectionsByID := g.toMap()
flattened := make([]GeoSpatialCollection, 0, len(collectionsByID))
for _, v := range collectionsByID {
flattened = append(flattened, v)
}
sort.Slice(flattened, func(i, j int) bool {
icomp := flattened[i].ID
jcomp := flattened[j].ID
// prefer to sort by title when available, collection ID otherwise
if flattened[i].Metadata != nil && flattened[i].Metadata.Title != nil {
icomp = *flattened[i].Metadata.Title
}
if flattened[j].Metadata != nil && flattened[j].Metadata.Title != nil {
jcomp = *flattened[j].Metadata.Title
}
return icomp < jcomp
})
return flattened
}
// ContainsID check if given collection - by ID - exists
func (g GeoSpatialCollections) ContainsID(id string) bool {
_, ok := g.toMap()[id]
return ok
}
func (g GeoSpatialCollections) toMap() map[string]GeoSpatialCollection {
collectionsByID := make(map[string]GeoSpatialCollection)
for _, current := range g {
existing, ok := collectionsByID[current.ID]
if ok {
err := mergo.Merge(&existing, current)
if err != nil {
log.Fatalf("failed to merge 2 collections with the same name '%s': %v", current.ID, err)
}
collectionsByID[current.ID] = existing
} else {
collectionsByID[current.ID] = current
}
}
return collectionsByID
}
//go:generate ../hack/generate-deepcopy.sh
package config
import (
"encoding/json"
"errors"
"fmt"
"math/rand"
"os"
"path/filepath"
"slices"
"strconv"
"strings"
"github.com/PDOK/gokoala/internal/engine/util"
"github.com/creasty/defaults"
"github.com/docker/go-units"
"github.com/go-playground/validator/v10"
"golang.org/x/text/language"
"gopkg.in/yaml.v3"
)
const (
CookieMaxAge = 60 * 60 * 24
)
// NewConfig read YAML config file, required to start GoKoala
func NewConfig(configFile string) (*Config, error) {
yamlData, err := os.ReadFile(configFile)
if err != nil {
return nil, fmt.Errorf("failed to read config file %w", err)
}
// expand environment variables
yamlData = []byte(os.ExpandEnv(string(yamlData)))
var config *Config
err = yaml.Unmarshal(yamlData, &config)
if err != nil {
return nil, fmt.Errorf("failed to unmarshal config file, error: %w", err)
}
err = validateLocalPaths(config)
if err != nil {
return nil, fmt.Errorf("validation error in config file, error: %w", err)
}
return config, nil
}
// UnmarshalYAML hooks into unmarshalling to set defaults and validate config
func (c *Config) UnmarshalYAML(unmarshal func(interface{}) error) error {
type cfg Config
if err := unmarshal((*cfg)(c)); err != nil {
return err
}
// init config
if err := setDefaults(c); err != nil {
return err
}
if err := validate(c); err != nil {
return err
}
return nil
}
func (c *Config) UnmarshalJSON(b []byte) error {
return yaml.Unmarshal(b, c)
}
func setDefaults(config *Config) error {
// process 'default' tags
if err := defaults.Set(config); err != nil {
return fmt.Errorf("failed to set default configuration: %w", err)
}
// custom default logic
if len(config.AvailableLanguages) == 0 {
config.AvailableLanguages = append(config.AvailableLanguages, Language{language.Dutch}) // default to Dutch only
}
return nil
}
func validate(config *Config) error {
// process 'validate' tags
v := validator.New()
err := v.Struct(config)
if err != nil {
var ive *validator.InvalidValidationError
if ok := errors.Is(err, ive); ok {
return fmt.Errorf("failed to validate config: %w", err)
}
var errMessages []string
var valErrs validator.ValidationErrors
if errors.As(err, &valErrs) {
for _, valErr := range valErrs {
errMessages = append(errMessages, valErr.Error()+"\n")
}
}
return fmt.Errorf("invalid config provided:\n%v", errMessages)
}
// custom validations
if config.OgcAPI.Features != nil {
return validateCollectionsTemporalConfig(config.OgcAPI.Features.Collections)
}
return nil
}
func validateCollectionsTemporalConfig(collections GeoSpatialCollections) error {
var errMessages []string
for _, collection := range collections {
if collection.Metadata != nil && collection.Metadata.TemporalProperties != nil &&
(collection.Metadata.Extent == nil || collection.Metadata.Extent.Interval == nil) {
errMessages = append(errMessages, fmt.Sprintf("validation failed for collection '%s'; "+
"field 'Extent.Interval' is required with field 'TemporalProperties'\n", collection.ID))
}
}
if len(errMessages) > 0 {
return fmt.Errorf("invalid config provided:\n%v", errMessages)
}
return nil
}
// validateLocalPaths validates the existence of local paths.
// Not suitable for general validation while unmarshalling.
// Because that could happen on another machine.
func validateLocalPaths(config *Config) error {
// Could use a deep dive and reflection.
// But the settings with a path are not recursive and relatively limited in numbers.
// GeoPackageCloudCache.Path is not verified. It will be created anyway in cloud_sqlite_vfs.createCacheDir during startup time.
if config.Resources != nil && config.Resources.Directory != nil && *config.Resources.Directory != "" &&
!isExistingLocalDir(*config.Resources.Directory) {
return errors.New("Config.Resources.Directory should be an existing directory: " + *config.Resources.Directory)
}
if config.OgcAPI.Styles != nil && !isExistingLocalDir(config.OgcAPI.Styles.StylesDir) {
return errors.New("Config.OgcAPI.Styles.StylesDir should be an existing directory: " + config.OgcAPI.Styles.StylesDir)
}
return nil
}
func isExistingLocalDir(path string) bool {
fileInfo, err := os.Stat(path)
return err == nil && fileInfo.IsDir()
}
// +kubebuilder:object:generate=true
type Config struct {
// Version of the API. When releasing a new version which contains backwards-incompatible changes, a new major version must be released.
Version string `yaml:"version" json:"version" validate:"required,semver"`
// Human friendly title of the API. Don't include "OGC API" in the title, this is added automatically.
Title string `yaml:"title" json:"title" validate:"required"`
// Shorted title / abbreviation describing the API.
ServiceIdentifier string `yaml:"serviceIdentifier" json:"serviceIdentifier" validate:"required"`
// Human friendly description of the API and dataset.
Abstract string `yaml:"abstract" json:"abstract" validate:"required"`
// Licensing term that apply to this API and dataset
License License `yaml:"license" json:"license" validate:"required"`
// The base URL - that's the part until the OGC API landing page - under which this API is served
BaseURL URL `yaml:"baseUrl" json:"baseUrl" validate:"required"`
// Optional reference to a catalog/portal/registry that lists all datasets, not just this one
DatasetCatalogURL URL `yaml:"datasetCatalogUrl" json:"datasetCatalogUrl"`
// The languages/translations to offer, valid options are Dutch (nl) and English (en). Dutch is the default.
AvailableLanguages []Language `yaml:"availableLanguages" json:"availableLanguages"`
// Define which OGC API building blocks this API supports
OgcAPI OgcAPI `yaml:"ogcApi" json:"ogcApi" validate:"required"`
// Reference to a PNG image to use a thumbnail on the landing page.
// The full path is constructed by appending Resources + Thumbnail.
// +optional
Thumbnail *string `yaml:"thumbnail,omitempty" json:"thumbnail,omitempty"`
// Keywords to make this API beter discoverable
// +optional
Keywords []string `yaml:"keywords,omitempty" json:"keywords,omitempty"`
// Moment in time when the dataset was last updated
// +optional
// +kubebuilder:validation:Type=string
// +kubebuilder:validation:Format="date-time"
LastUpdated *string `yaml:"lastUpdated,omitempty" json:"lastUpdated,omitempty" validate:"omitempty,datetime=2006-01-02T15:04:05Z"`
// Who updated the dataset
// +optional
LastUpdatedBy string `yaml:"lastUpdatedBy,omitempty" json:"lastUpdatedBy,omitempty"`
// Available support channels
// +optional
Support *Support `yaml:"support,omitempty" json:"support,omitempty"`
// Key/value pairs to add extra information to the landing page
// +optional
DatasetDetails []DatasetDetail `yaml:"datasetDetails,omitempty" json:"datasetDetails,omitempty"`
// Location where resources (e.g. thumbnails) specific to the given dataset are hosted
// +optional
Resources *Resources `yaml:"resources,omitempty" json:"resources,omitempty"`
}
func (c *Config) CookieMaxAge() int {
return CookieMaxAge
}
func (c *Config) HasCollections() bool {
return c.AllCollections() != nil
}
func (c *Config) AllCollections() GeoSpatialCollections {
var result GeoSpatialCollections
if c.OgcAPI.GeoVolumes != nil {
result = append(result, c.OgcAPI.GeoVolumes.Collections...)
}
if c.OgcAPI.Tiles != nil {
result = append(result, c.OgcAPI.Tiles.Collections...)
}
if c.OgcAPI.Features != nil {
result = append(result, c.OgcAPI.Features.Collections...)
}
return result
}
// +kubebuilder:object:generate=true
type Support struct {
// Name of the support organization
Name string `yaml:"name" json:"name" validate:"required"`
// URL to external support webpage
// +kubebuilder:validation:Type=string
URL URL `yaml:"url" json:"url" validate:"required"`
// Email for support questions
// +optional
Email string `yaml:"email,omitempty" json:"email,omitempty" validate:"omitempty,email"`
}
// +kubebuilder:object:generate=true
type DatasetDetail struct {
// Arbitrary name to add extra information to the landing page
Name string `yaml:"name" json:"name"`
// Arbitrary value associated with the given name
Value string `yaml:"value" json:"value"`
}
// +kubebuilder:object:generate=true
type Resources struct {
// Location where resources (e.g. thumbnails) specific to the given dataset are hosted. This is optional if Directory is set
// +optional
URL *URL `yaml:"url,omitempty" json:"url,omitempty" validate:"required_without=Directory,omitempty"`
// // Location where resources (e.g. thumbnails) specific to the given dataset are hosted. This is optional if URL is set
// +optional
Directory *string `yaml:"directory,omitempty" json:"directory,omitempty" validate:"required_without=URL,omitempty,dirpath|filepath"`
}
// +kubebuilder:object:generate=true
type OgcAPI struct {
// Enable when this API should offer OGC API 3D GeoVolumes. This includes OGC 3D Tiles.
// +optional
GeoVolumes *OgcAPI3dGeoVolumes `yaml:"3dgeovolumes,omitempty" json:"3dgeovolumes,omitempty"`
// Enable when this API should offer OGC API Tiles. This also requires OGC API Styles.
// +optional
Tiles *OgcAPITiles `yaml:"tiles,omitempty" json:"tiles,omitempty" validate:"required_with=Styles"`
// Enable when this API should offer OGC API Styles.
// +optional
Styles *OgcAPIStyles `yaml:"styles,omitempty" json:"styles,omitempty"`
// Enable when this API should offer OGC API Features.
// +optional
Features *OgcAPIFeatures `yaml:"features,omitempty" json:"features,omitempty"`
// Enable when this API should offer OGC API Processes.
// +optional
Processes *OgcAPIProcesses `yaml:"processes,omitempty" json:"processes,omitempty"`
}
// +kubebuilder:object:generate=true
type GeoSpatialCollection struct {
// Unique ID of the collection
ID string `yaml:"id" validate:"required" json:"id"`
// Metadata describing the collection contents
// +optional
Metadata *GeoSpatialCollectionMetadata `yaml:"metadata,omitempty" json:"metadata,omitempty"`
// 3D GeoVolumes specific to this collection
// +optional
GeoVolumes *CollectionEntry3dGeoVolumes `yaml:",inline" json:",inline"`
// Tiles specific to this collection
// +optional
Tiles *CollectionEntryTiles `yaml:",inline" json:",inline"`
// Features specific to this collection
// +optional
Features *CollectionEntryFeatures `yaml:",inline" json:",inline"`
}
type GeoSpatialCollectionJSON struct {
ID string `json:"id"`
Metadata *GeoSpatialCollectionMetadata `json:"metadata,omitempty"`
*CollectionEntry3dGeoVolumes `json:",inline"`
*CollectionEntryTiles `json:",inline"`
*CollectionEntryFeatures `json:",inline"`
}
// MarshalJSON custom because inlining only works on embedded structs.
// Value instead of pointer receiver because only that way it can be used for both.
func (c GeoSpatialCollection) MarshalJSON() ([]byte, error) {
return json.Marshal(GeoSpatialCollectionJSON{
ID: c.ID,
Metadata: c.Metadata,
CollectionEntry3dGeoVolumes: c.GeoVolumes,
CollectionEntryTiles: c.Tiles,
CollectionEntryFeatures: c.Features,
})
}
// UnmarshalJSON parses a string to GeoSpatialCollection
func (c *GeoSpatialCollection) UnmarshalJSON(b []byte) error {
return yaml.Unmarshal(b, c)
}
// +kubebuilder:object:generate=true
type GeoSpatialCollectionMetadata struct {
// Human friendly title of this collection. When no title is specified the collection ID is used.
// +optional
Title *string `yaml:"title,omitempty" json:"title,omitempty"`
// Describes the content of this collection
Description *string `yaml:"description" json:"description" validate:"required"`
// Reference to a PNG image to use a thumbnail on the collections.
// The full path is constructed by appending Resources + Thumbnail.
// +optional
Thumbnail *string `yaml:"thumbnail,omitempty" json:"thumbnail,omitempty"`
// Keywords to make this collection beter discoverable
// +optional
Keywords []string `yaml:"keywords,omitempty" json:"keywords,omitempty"`
// Moment in time when the collection was last updated
//
// +optional
// +kubebuilder:validation:Type=string
// +kubebuilder:validation:Format="date-time"
LastUpdated *string `yaml:"lastUpdated,omitempty" json:"lastUpdated,omitempty" validate:"omitempty,datetime=2006-01-02T15:04:05Z"`
// Who updated this collection
// +optional
LastUpdatedBy string `yaml:"lastUpdatedBy,omitempty" json:"lastUpdatedBy,omitempty"`
// Fields in the datasource to be used in temporal queries
// +optional
TemporalProperties *TemporalProperties `yaml:"temporalProperties,omitempty" json:"temporalProperties,omitempty" validate:"omitempty,required_with=Extent.Interval"`
// Extent of the collection, both geospatial and/or temporal
// +optional
Extent *Extent `yaml:"extent,omitempty" json:"extent,omitempty"`
// The CRS identifier which the features are originally stored, meaning no CRS transformations are applied when features are retrieved in this CRS.
// WGS84 is the default storage CRS.
//
// +kubebuilder:default="http://www.opengis.net/def/crs/OGC/1.3/CRS84"
// +kubebuilder:validation:Pattern=`^http:\/\/www\.opengis\.net\/def\/crs\/.*$`
// +optional
StorageCrs *string `yaml:"storageCrs,omitempty" json:"storageCrs,omitempty" default:"http://www.opengis.net/def/crs/OGC/1.3/CRS84" validate:"startswith=http://www.opengis.net/def/crs"`
}
// +kubebuilder:object:generate=true
type CollectionEntry3dGeoVolumes struct {
// Optional basepath to 3D tiles on the tileserver. Defaults to the collection ID.
// +optional
TileServerPath *string `yaml:"tileServerPath,omitempty" json:"tileServerPath,omitempty"`
// URI template for individual 3D tiles.
// +optional
URITemplate3dTiles *string `yaml:"uriTemplate3dTiles,omitempty" json:"uriTemplate3dTiles,omitempty" validate:"required_without_all=URITemplateDTM"`
// Optional URI template for subtrees, only required when "implicit tiling" extension is used.
// +optional
URITemplateImplicitTilingSubtree *string `yaml:"uriTemplateImplicitTilingSubtree,omitempty" json:"uriTemplateImplicitTilingSubtree,omitempty"`
// URI template for digital terrain model (DTM) in Quantized Mesh format, REQUIRED when you want to serve a DTM.
// +optional
URITemplateDTM *string `yaml:"uriTemplateDTM,omitempty" json:"uriTemplateDTM,omitempty" validate:"required_without_all=URITemplate3dTiles"`
// Optional URL to 3D viewer to visualize the given collection of 3D Tiles.
// +optional
URL3DViewer *URL `yaml:"3dViewerUrl,omitempty" json:"3dViewerUrl,omitempty"`
}
func (gv *CollectionEntry3dGeoVolumes) Has3DTiles() bool {
return gv.URITemplate3dTiles != nil
}
func (gv *CollectionEntry3dGeoVolumes) HasDTM() bool {
return gv.URITemplateDTM != nil
}
// +kubebuilder:object:generate=true
type CollectionEntryTiles struct {
// placeholder
}
// +kubebuilder:object:generate=true
type CollectionEntryFeatures struct {
// Optional way to explicitly map a collection ID to the underlying table in the datasource.
// +optional
TableName *string `yaml:"tableName,omitempty" json:"tableName,omitempty"`
// Optional collection specific datasources. Mutually exclusive with top-level defined datasources.
// +optional
Datasources *Datasources `yaml:"datasources,omitempty" json:"datasources,omitempty"`
// Filters available for this collection
// +optional
Filters FeatureFilters `yaml:"filters,omitempty" json:"filters,omitempty"`
}
// +kubebuilder:object:generate=true
type FeatureFilters struct {
// OAF Part 1: filter on feature properties
// https://docs.ogc.org/is/17-069r4/17-069r4.html#_parameters_for_filtering_on_feature_properties
//
// +optional
Properties []PropertyFilter `yaml:"properties,omitempty" json:"properties,omitempty" validate:"dive"`
// OAF Part 3: add config for complex/CQL filters here
// <placeholder>
}
// +kubebuilder:object:generate=true
type OgcAPI3dGeoVolumes struct {
// Reference to the server (or object storage) hosting the 3D Tiles
TileServer URL `yaml:"tileServer" json:"tileServer" validate:"required"`
// Collections to be served as 3D GeoVolumes
Collections GeoSpatialCollections `yaml:"collections" json:"collections"`
// Whether JSON responses will be validated against the OpenAPI spec
// since it has significant performance impact when dealing with large JSON payloads.
//
// +kubebuilder:default=true
// +optional
ValidateResponses *bool `yaml:"validateResponses,omitempty" json:"validateResponses,omitempty" default:"true"` // ptr due to https://github.com/creasty/defaults/issues/49
}
// +kubebuilder:validation:Enum=raster;vector
type TilesType string
const (
TilesTypeRaster TilesType = "raster"
TilesTypeVector TilesType = "vector"
)
// +kubebuilder:object:generate=true
type OgcAPITiles struct {
// Reference to the server (or object storage) hosting the tiles
TileServer URL `yaml:"tileServer" json:"tileServer" validate:"required"`
// Could be 'vector' and/or 'raster' to indicate the types of tiles offered
Types []TilesType `yaml:"types" json:"types" validate:"required"`
// Specifies in what projections (SRS/CRS) the tiles are offered
SupportedSrs []SupportedSrs `yaml:"supportedSrs" json:"supportedSrs" validate:"required,dive"`
// Optional template to the vector tiles on the tileserver. Defaults to {tms}/{z}/{x}/{y}.pbf.
// +optional
URITemplateTiles *string `yaml:"uriTemplateTiles,omitempty" json:"uriTemplateTiles,omitempty"`
// The collections to offer as tiles. When no collection is specified the tiles are hosted at the root of the API (/tiles endpoint).
// +optional
Collections GeoSpatialCollections `yaml:"collections,omitempty" json:"collections,omitempty"`
}
// +kubebuilder:object:generate=true
type OgcAPIStyles struct {
// ID of the style to use a default
Default string `yaml:"default" json:"default" validate:"required"`
// Location on disk where the styles are hosted
StylesDir string `yaml:"stylesDir" json:"stylesDir" validate:"required,dirpath|filepath"`
// Styles exposed though this API
SupportedStyles []Style `yaml:"supportedStyles" json:"supportedStyles" validate:"required,dive"`
}
// +kubebuilder:object:generate=true
type OgcAPIFeatures struct {
// Basemap to use in embedded viewer on the HTML pages.
// +kubebuilder:default="OSM"
// +kubebuilder:validation:Enum=OSM;BRT
// +optional
Basemap string `yaml:"basemap,omitempty" json:"basemap,omitempty" default:"OSM" validate:"oneof=OSM BRT"`
// Collections to be served as features through this API
Collections GeoSpatialCollections `yaml:"collections" json:"collections" validate:"required,dive"`
// Limits the amount of features to retrieve with a single call
// +optional
Limit Limit `yaml:"limit,omitempty" json:"limit,omitempty"`
// One or more datasources to get the features from (geopackages, postgis, etc).
// Optional since you can also define datasources at the collection level
// +optional
Datasources *Datasources `yaml:"datasources,omitempty" json:"datasources,omitempty"`
// Whether GeoJSON/JSON-FG responses will be validated against the OpenAPI spec
// since it has significant performance impact when dealing with large JSON payloads.
//
// +kubebuilder:default=true
// +optional
ValidateResponses *bool `yaml:"validateResponses,omitempty" json:"validateResponses,omitempty" default:"true"` // ptr due to https://github.com/creasty/defaults/issues/49
}
func (oaf *OgcAPIFeatures) ProjectionsForCollections() []string {
return oaf.ProjectionsForCollection("")
}
func (oaf *OgcAPIFeatures) ProjectionsForCollection(collectionID string) []string {
uniqueSRSs := make(map[string]struct{})
if oaf.Datasources != nil {
for _, a := range oaf.Datasources.Additional {
uniqueSRSs[a.Srs] = struct{}{}
}
}
for _, coll := range oaf.Collections {
if (coll.ID == collectionID || collectionID == "") && coll.Features != nil && coll.Features.Datasources != nil {
for _, a := range coll.Features.Datasources.Additional {
uniqueSRSs[a.Srs] = struct{}{}
}
break
}
}
result := util.Keys(uniqueSRSs)
slices.Sort(result)
return result
}
func (oaf *OgcAPIFeatures) PropertyFiltersForCollection(collectionID string) []PropertyFilter {
for _, coll := range oaf.Collections {
if coll.ID == collectionID && coll.Features != nil && coll.Features.Filters.Properties != nil {
return coll.Features.Filters.Properties
}
}
return []PropertyFilter{}
}
// +kubebuilder:object:generate=true
type OgcAPIProcesses struct {
// Enable to advertise dismiss operations on the conformance page
SupportsDismiss bool `yaml:"supportsDismiss" json:"supportsDismiss"`
// Enable to advertise callback operations on the conformance page
SupportsCallback bool `yaml:"supportsCallback" json:"supportsCallback"`
// Reference to an external service implementing the process API. GoKoala acts only as a proxy for OGC API Processes.
ProcessesServer URL `yaml:"processesServer" json:"processesServer" validate:"required"`
}
// +kubebuilder:object:generate=true
type Limit struct {
// Number of features to return by default.
// +kubebuilder:default=10
// +kubebuilder:validation:Minimum=2
// +optional
Default int `yaml:"default,omitempty" json:"default,omitempty" validate:"gt=1" default:"10"`
// Max number of features to return. Should be larger than 100 since the HTML interface always offers a 100 limit option.
// +kubebuilder:default=1000
// +kubebuilder:validation:Minimum=100
// +optional
Max int `yaml:"max,omitempty" json:"max,omitempty" validate:"gte=100" default:"1000"`
}
// +kubebuilder:object:generate=true
type Datasources struct {
// Features should always be available in WGS84 (according to spec).
// This specifies the datasource to be used for features in the WGS84 projection
DefaultWGS84 Datasource `yaml:"defaultWGS84" json:"defaultWGS84" validate:"required"`
// One or more additional datasources for features in other projections. GoKoala doesn't do
// any on-the-fly reprojection so additional datasources need to be reprojected ahead of time.
// +optional
Additional []AdditionalDatasource `yaml:"additional" json:"additional" validate:"dive"`
}
// +kubebuilder:object:generate=true
type Datasource struct {
// GeoPackage to get the features from.
// +optional
GeoPackage *GeoPackage `yaml:"geopackage,omitempty" json:"geopackage,omitempty" validate:"required_without_all=PostGIS"`
// PostGIS database to get the features from (not implemented yet).
// +optional
PostGIS *PostGIS `yaml:"postgis,omitempty" json:"postgis,omitempty" validate:"required_without_all=GeoPackage"`
// Add more datasources here such as Mongo, Elastic, etc
}
// +kubebuilder:object:generate=true
type AdditionalDatasource struct {
// Projection (SRS/CRS) used for the features in this datasource
// +kubebuilder:validation:Pattern=`^EPSG:\d+$`
Srs string `yaml:"srs" json:"srs" validate:"required,startswith=EPSG:"`
// The additional datasource
Datasource `yaml:",inline" json:",inline"`
}
// +kubebuilder:object:generate=true
type PostGIS struct {
// placeholder
}
// +kubebuilder:object:generate=true
type GeoPackage struct {
// Settings to read a GeoPackage from local disk
// +optional
Local *GeoPackageLocal `yaml:"local,omitempty" json:"local,omitempty" validate:"required_without_all=Cloud"`
// Settings to read a GeoPackage as a Cloud-Backed SQLite database
// +optional
Cloud *GeoPackageCloud `yaml:"cloud,omitempty" json:"cloud,omitempty" validate:"required_without_all=Local"`
}
// +kubebuilder:object:generate=true
type GeoPackageCommon struct {
// Feature id column name
// +kubebuilder:default="fid"
// +optional
Fid string `yaml:"fid,omitempty" json:"fid,omitempty" validate:"required" default:"fid"`
// Optional timeout after which queries are canceled
// +kubebuilder:default="15s"
// +optional
QueryTimeout Duration `yaml:"queryTimeout,omitempty" json:"queryTimeout,omitempty" validate:"required" default:"15s"`
// When the number of features in a bbox stay within the given value use an RTree index, otherwise use a BTree index
// +kubebuilder:default=30000
// +optional
MaxBBoxSizeToUseWithRTree int `yaml:"maxBBoxSizeToUseWithRTree,omitempty" json:"maxBBoxSizeToUseWithRTree,omitempty" validate:"required" default:"30000"`
// ADVANCED SETTING. Sets the SQLite "cache_size" pragma which determines how many pages are cached in-memory.
// See https://sqlite.org/pragma.html#pragma_cache_size for details.
// Default in SQLite is 2000 pages, which equates to 2000KiB (2048000 bytes). Which is denoted as -2000.
// +kubebuilder:default=-2000
// +optional
InMemoryCacheSize int `yaml:"inMemoryCacheSize,omitempty" json:"inMemoryCacheSize,omitempty" validate:"required" default:"-2000"`
}
// +kubebuilder:object:generate=true
type GeoPackageLocal struct {
// GeoPackageCommon shared config between local and cloud GeoPackage
GeoPackageCommon `yaml:",inline" json:",inline"`
// Location of GeoPackage on disk.
// You can place the GeoPackage here manually (out-of-band) or you can specify Download
// and let the application download the GeoPackage for you and store it at this location.
File string `yaml:"file" json:"file" validate:"required,omitempty,filepath"`
// Optional initialization task to download a GeoPackage during startup. GeoPackage will be
// downloaded to local disk and stored at the location specified in File.
// +optional
Download *GeoPackageDownload `yaml:"download,omitempty" json:"download,omitempty"`
}
// +kubebuilder:object:generate=true
type GeoPackageDownload struct {
// Location of GeoPackage on remote HTTP(S) URL. GeoPackage will be downloaded to local disk
// during startup and stored at the location specified in "file".
From URL `yaml:"from" json:"from" validate:"required"`
// ADVANCED SETTING. Determines how many workers (goroutines) in parallel will download the specified GeoPackage.
// Setting this to 1 will disable concurrent downloads.
// +kubebuilder:default=4
// +kubebuilder:validation:Minimum=1
// +optional
Parallelism int `yaml:"parallelism,omitempty" json:"parallelism,omitempty" validate:"required,gte=1" default:"4"`
// ADVANCED SETTING. When true TLS certs are NOT validated, false otherwise. Only use true for your own self-signed certificates!
// +kubebuilder:default=false
// +optional
TLSSkipVerify bool `yaml:"tlsSkipVerify,omitempty" json:"tlsSkipVerify,omitempty" default:"false"`
// ADVANCED SETTING. HTTP request timeout when downloading (part of) GeoPackage.
// +kubebuilder:default="2m"
// +optional
Timeout Duration `yaml:"timeout,omitempty" json:"timeout,omitempty" validate:"required" default:"2m"`
// ADVANCED SETTING. Minimum delay to use when retrying HTTP request to download (part of) GeoPackage.
// +kubebuilder:default="1s"
// +optional
RetryDelay Duration `yaml:"retryDelay,omitempty" json:"retryDelay,omitempty" validate:"required" default:"1s"`
// ADVANCED SETTING. Maximum overall delay of the exponential backoff while retrying HTTP requests to download (part of) GeoPackage.
// +kubebuilder:default="30s"
// +optional
RetryMaxDelay Duration `yaml:"retryMaxDelay,omitempty" json:"retryMaxDelay,omitempty" validate:"required" default:"30s"`
// ADVANCED SETTING. Maximum number of retries when retrying HTTP requests to download (part of) GeoPackage.
// +kubebuilder:default=5
// +kubebuilder:validation:Minimum=1
// +optional
MaxRetries int `yaml:"maxRetries,omitempty" json:"maxRetries,omitempty" validate:"required,gte=1" default:"5"`
}
// +kubebuilder:object:generate=true
type GeoPackageCloud struct {
// GeoPackageCommon shared config between local and cloud GeoPackage
GeoPackageCommon `yaml:",inline" json:",inline"`
// Reference to the cloud storage (either azure or google at the moment).
// For example 'azure?emulator=127.0.0.1:10000&sas=0' or 'google'
Connection string `yaml:"connection" json:"connection" validate:"required"`
// Username of the storage account, like devstoreaccount1 when using Azurite
User string `yaml:"user" json:"user" validate:"required"`
// Some kind of credential like a password or key to authenticate with the storage backend, e.g:
// 'Eby8vdM02xNOcqFlqUwJPLlmEtlCDXJ1OUzFT50uSRZ6IFsuFq2UVErCz4I6tq/K1SZFPTOtr/KBHBeksoGMGw==' when using Azurite
Auth string `yaml:"auth" json:"auth" validate:"required"`
// Container/bucket on the storage account
Container string `yaml:"container" json:"container" validate:"required"`
// Filename of the GeoPackage
File string `yaml:"file" json:"file" validate:"required"`
// Local cache of fetched blocks from cloud storage
// +optional
Cache GeoPackageCloudCache `yaml:"cache,omitempty" json:"cache,omitempty"`
// Only for debug purposes! When true all HTTP requests executed by sqlite to cloud object storage are logged to stdout
// +kubebuilder:default=false
// +optional
LogHTTPRequests bool `yaml:"logHttpRequests,omitempty" json:"logHttpRequests,omitempty" default:"false"`
}
func (gc *GeoPackageCloud) CacheDir() (string, error) {
fileNameWithoutExt := strings.TrimSuffix(gc.File, filepath.Ext(gc.File))
if gc.Cache.Path != nil {
randomSuffix := strconv.Itoa(rand.Intn(99999)) //nolint:gosec // random isn't used for security purposes
return filepath.Join(*gc.Cache.Path, fileNameWithoutExt+"-"+randomSuffix), nil
}
cacheDir, err := os.MkdirTemp("", fileNameWithoutExt)
if err != nil {
return "", fmt.Errorf("failed to create tempdir to cache %s, error %w", fileNameWithoutExt, err)
}
return cacheDir, nil
}
// +kubebuilder:object:generate=true
type GeoPackageCloudCache struct {
// Optional path to directory for caching cloud-backed GeoPackage blocks, when omitted a temp dir will be used.
// +optional
Path *string `yaml:"path,omitempty" json:"path,omitempty" validate:"omitempty,dirpath|filepath"`
// Max size of the local cache. Accepts human-readable size such as 100Mb, 4Gb, 1Tb, etc. When omitted 1Gb is used.
// +kubebuilder:default="1Gb"
// +optional
MaxSize string `yaml:"maxSize,omitempty" json:"maxSize,omitempty" default:"1Gb"`
// When true a warm-up query is executed on startup which aims to fill the local cache. Does increase startup time.
// +kubebuilder:default=false
// +optional
WarmUp bool `yaml:"warmUp,omitempty" json:"warmUp,omitempty" default:"false"`
}
func (cache *GeoPackageCloudCache) MaxSizeAsBytes() (int64, error) {
return units.FromHumanSize(cache.MaxSize)
}
// +kubebuilder:object:generate=true
type PropertyFilter struct {
// Needs to match with a column name in the feature table (in the configured datasource)
Name string `yaml:"name" json:"name" validate:"required"`
// Explains this property filter
// +kubebuilder:default="Filter features by this property"
// +optional
Description string `yaml:"description,omitempty" json:"description,omitempty" default:"Filter features by this property"`
// When true the property/column in the feature table needs to be indexed. Initialization will fail
// when no index is present, when false the index check is skipped. For large tables an index is recommended!
//
// +kubebuilder:default=true
// +optional
IndexRequired *bool `yaml:"indexRequired,omitempty" json:"indexRequired,omitempty" default:"true"` // ptr due to https://github.com/creasty/defaults/issues/49
}
// +kubebuilder:object:generate=true
type SupportedSrs struct {
// Projection (SRS/CRS) used
// +kubebuilder:validation:Pattern=`^EPSG:\d+$`
Srs string `yaml:"srs" json:"srs" validate:"required,startswith=EPSG:"`
// Available zoom levels
ZoomLevelRange ZoomLevelRange `yaml:"zoomLevelRange" json:"zoomLevelRange" validate:"required"`
}
// +kubebuilder:object:generate=true
type ZoomLevelRange struct {
// Start zoom level
// +kubebuilder:validation:Minimum=0
Start int `yaml:"start" json:"start" validate:"gte=0,ltefield=End"`
// End zoom level
End int `yaml:"end" json:"end" validate:"required,gtefield=Start"`
}
// +kubebuilder:object:generate=true
type Extent struct {
// Projection (SRS/CRS) to be used. When none is provided WGS84 (http://www.opengis.net/def/crs/OGC/1.3/CRS84) is used.
// +optional
// +kubebuilder:validation:Pattern=`^EPSG:\d+$`
Srs string `yaml:"srs,omitempty" json:"srs,omitempty" validate:"omitempty,startswith=EPSG:"`
// Geospatial extent
Bbox []string `yaml:"bbox" json:"bbox"`
// Temporal extent
// +optional
// +kubebuilder:validation:MinItems=2
// +kubebuilder:validation:MaxItems=2
Interval []string `yaml:"interval,omitempty" json:"interval,omitempty" validate:"omitempty,len=2"`
}
// +kubebuilder:object:generate=true
type TemporalProperties struct {
// Name of field in datasource to be used in temporal queries as the start date
StartDate string `yaml:"startDate" json:"startDate" validate:"required"`
// Name of field in datasource to be used in temporal queries as the end date
EndDate string `yaml:"endDate" json:"endDate" validate:"required"`
}
// +kubebuilder:object:generate=true
type License struct {
// Name of the license, e.g. MIT, CC0, etc
Name string `yaml:"name" json:"name" validate:"required"`
// URL to license text on the web
URL URL `yaml:"url" json:"url" validate:"required"`
}
// +kubebuilder:object:generate=true
type Style struct {
// Unique ID of this style
ID string `yaml:"id" json:"id" validate:"required"`
// Human-friendly name of this style
Title string `yaml:"title" json:"title" validate:"required"`
// Explains what is visualized by this style
// +optional
Description *string `yaml:"description,omitempty" json:"description,omitempty"`
// Keywords to make this style better discoverable
// +optional
Keywords []string `yaml:"keywords,omitempty" json:"keywords,omitempty"`
// Moment in time when the style was last updated
// +optional
// +kubebuilder:validation:Type=string
// +kubebuilder:validation:Format="date-time"
LastUpdated *string `yaml:"lastUpdated,omitempty" json:"lastUpdated,omitempty" validate:"omitempty,datetime=2006-01-02T15:04:05Z"`
// Optional version of this style
// +optional
Version *string `yaml:"version,omitempty" json:"version,omitempty"`
// Reference to a PNG image to use a thumbnail on the style metadata page.
// The full path is constructed by appending Resources + Thumbnail.
// +optional
Thumbnail *string `yaml:"thumbnail,omitempty" json:"thumbnail,omitempty"`
// This style is offered in the following formats
Formats []StyleFormat `yaml:"formats" json:"formats" validate:"required,dive"`
}
// +kubebuilder:object:generate=true
type StyleFormat struct {
// Name of the format
// +kubebuilder:default="mapbox"
// +optional
Format string `yaml:"format,omitempty" json:"format,omitempty" default:"mapbox" validate:"required,oneof=mapbox sld10"`
}
package config
import (
"encoding/json"
"time"
"gopkg.in/yaml.v3"
)
// Duration Custom time.Duration compatible with YAML and JSON (un)marshalling and kubebuilder.
// (Already supported in yaml/v3 but not encoding/json.)
//
// +kubebuilder:validation:Type=string
// +kubebuilder:validation:Format=duration
type Duration struct {
time.Duration
}
// MarshalJSON turn duration tag into JSON
// Value instead of pointer receiver because only that way it can be used for both.
func (d Duration) MarshalJSON() ([]byte, error) {
return json.Marshal(d.Duration.String())
}
func (d *Duration) UnmarshalJSON(b []byte) error {
return yaml.Unmarshal(b, &d.Duration)
}
// MarshalYAML turn duration tag into YAML
// Value instead of pointer receiver because only that way it can be used for both.
func (d Duration) MarshalYAML() (interface{}, error) {
return d.Duration, nil
}
func (d *Duration) UnmarshalYAML(unmarshal func(any) error) error {
return unmarshal(&d.Duration)
}
// DeepCopyInto copy the receiver, write into out. in must be non-nil.
func (d *Duration) DeepCopyInto(out *Duration) {
if out != nil {
*out = *d
}
}
// DeepCopy copy the receiver, create a new Duration.
func (d *Duration) DeepCopy() *Duration {
if d == nil {
return nil
}
out := &Duration{}
d.DeepCopyInto(out)
return out
}
package config
import (
"encoding/json"
"golang.org/x/text/language"
)
// Language represents a BCP 47 language tag.
// +kubebuilder:validation:Type=string
type Language struct {
language.Tag
}
// MarshalJSON turn language tag into JSON
// Value instead of pointer receiver because only that way it can be used for both.
func (l Language) MarshalJSON() ([]byte, error) {
return json.Marshal(l.Tag.String())
}
// UnmarshalJSON turn JSON into Language
func (l *Language) UnmarshalJSON(b []byte) error {
var s string
if err := json.Unmarshal(b, &s); err != nil {
return err
}
*l = Language{language.Make(s)}
return nil
}
// DeepCopyInto copy the receiver, write into out. in must be non-nil.
func (l *Language) DeepCopyInto(out *Language) {
*out = *l
}
// DeepCopy copy the receiver, create a new Language.
func (l *Language) DeepCopy() *Language {
if l == nil {
return nil
}
out := &Language{}
l.DeepCopyInto(out)
return out
}
package config
import (
"encoding/json"
"net/url"
"strings"
"gopkg.in/yaml.v3"
)
// URL Custom net.URL compatible with YAML and JSON (un)marshalling and kubebuilder.
// In addition, it also removes trailing slash if present, so we can easily
// append a longer path without having to worry about double slashes.
//
// Allow only http/https URLs or environment variables like ${FOOBAR}
// +kubebuilder:validation:Pattern=`^(https?://.+)|(\$\{.+\}.*)`
// +kubebuilder:validation:Type=string
type URL struct {
// This is a pointer so the wrapper can directly be used in templates, e.g.: {{ .Config.BaseURL }}
// Otherwise you would need .String() or template.URL(). (Might be a bug.)
*url.URL
}
// UnmarshalYAML parses a string to URL and also removes trailing slash if present,
// so we can easily append a longer path without having to worry about double slashes.
func (u *URL) UnmarshalYAML(unmarshal func(any) error) error {
var s string
if err := unmarshal(&s); err != nil {
return err
}
if parsedURL, err := parseURL(s); err != nil {
return err
} else if parsedURL != nil {
u.URL = parsedURL
}
return nil
}
// MarshalJSON turns URL into JSON.
// Value instead of pointer receiver because only that way it can be used for both.
func (u URL) MarshalJSON() ([]byte, error) {
if u.URL == nil {
return json.Marshal("")
}
return json.Marshal(u.URL.String())
}
// UnmarshalJSON parses a string to URL and also removes trailing slash if present,
// so we can easily append a longer path without having to worry about double slashes.
func (u *URL) UnmarshalJSON(b []byte) error {
return yaml.Unmarshal(b, u)
}
// MarshalYAML turns URL into YAML.
// Value instead of pointer receiver because only that way it can be used for both.
func (u URL) MarshalYAML() (interface{}, error) {
if u.URL == nil {
return "", nil
}
return u.URL.String(), nil
}
// DeepCopyInto copies the receiver, writes into out.
func (u *URL) DeepCopyInto(out *URL) {
if out != nil {
*out = *u
}
}
// DeepCopy copies the receiver, creates a new URL.
func (u *URL) DeepCopy() *URL {
if u == nil {
return nil
}
out := &URL{}
u.DeepCopyInto(out)
return out
}
func parseURL(s string) (*url.URL, error) {
return url.ParseRequestURI(strings.TrimSuffix(s, "/"))
}
package engine
import (
"log"
"net/http"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine/util"
"github.com/elnormous/contenttype"
"golang.org/x/text/language"
)
const (
FormatParam = "f"
languageParam = "lang"
MediaTypeJSON = "application/json"
MediaTypeHTML = "text/html"
MediaTypeTileJSON = "application/vnd.mapbox.tile+json"
MediaTypeMVT = "application/vnd.mapbox-vector-tile"
MediaTypeMapboxStyle = "application/vnd.mapbox.style+json"
MediaTypeSLD = "application/vnd.ogc.sld+xml;version=1.0"
MediaTypeOpenAPI = "application/vnd.oai.openapi+json;version=3.0"
MediaTypeGeoJSON = "application/geo+json"
MediaTypeJSONFG = "application/vnd.ogc.fg+json" // https://docs.ogc.org/per/21-017r1.html#toc17
MediaTypeQuantizedMesh = "application/vnd.quantized-mesh"
FormatHTML = "html"
FormatJSON = "json"
FormatTileJSON = "tilejson"
FormatMVT = "mvt"
FormatMVTAlternative = "pbf"
FormatMapboxStyle = "mapbox"
FormatSLD = "sld10"
FormatGeoJSON = "geojson" // ?=json should also work for geojson
FormatJSONFG = "jsonfg"
FormatGzip = "gzip"
)
var (
MediaTypeJSONFamily = []string{MediaTypeTileJSON, MediaTypeMapboxStyle, MediaTypeGeoJSON, MediaTypeJSONFG}
OutputFormatDefault = map[string]string{FormatJSON: "JSON"}
OutputFormatFeatures = map[string]string{FormatJSON: "GeoJSON", FormatJSONFG: "JSON-FG"}
CompressibleMediaTypes = []string{
MediaTypeJSON,
MediaTypeGeoJSON,
MediaTypeJSONFG,
MediaTypeTileJSON,
MediaTypeMapboxStyle,
MediaTypeOpenAPI,
MediaTypeHTML,
// common web media types
"text/css",
"text/plain",
"text/javascript",
"application/javascript",
"image/svg+xml",
}
StyleFormatExtension = map[string]string{
FormatMapboxStyle: ".json",
FormatSLD: ".sld",
}
)
type ContentNegotiation struct {
availableMediaTypes []contenttype.MediaType
availableLanguages []language.Tag
formatsByMediaType map[string]string
mediaTypesByFormat map[string]string
}
func newContentNegotiation(availableLanguages []config.Language) *ContentNegotiation {
availableMediaTypes := []contenttype.MediaType{
// in order
contenttype.NewMediaType(MediaTypeJSON),
contenttype.NewMediaType(MediaTypeHTML),
contenttype.NewMediaType(MediaTypeTileJSON),
contenttype.NewMediaType(MediaTypeGeoJSON),
contenttype.NewMediaType(MediaTypeJSONFG),
contenttype.NewMediaType(MediaTypeMVT),
contenttype.NewMediaType(MediaTypeMapboxStyle),
contenttype.NewMediaType(MediaTypeSLD),
}
formatsByMediaType := map[string]string{
MediaTypeJSON: FormatJSON,
MediaTypeHTML: FormatHTML,
MediaTypeTileJSON: FormatTileJSON,
MediaTypeGeoJSON: FormatGeoJSON,
MediaTypeJSONFG: FormatJSONFG,
MediaTypeMVT: FormatMVT,
MediaTypeMapboxStyle: FormatMapboxStyle,
MediaTypeSLD: FormatSLD,
}
mediaTypesByFormat := util.ReverseMap(formatsByMediaType)
languageTags := make([]language.Tag, 0, len(availableLanguages))
for _, availableLanguage := range availableLanguages {
languageTags = append(languageTags, availableLanguage.Tag)
}
return &ContentNegotiation{
availableMediaTypes: availableMediaTypes,
availableLanguages: languageTags,
formatsByMediaType: formatsByMediaType,
mediaTypesByFormat: mediaTypesByFormat,
}
}
func (cn *ContentNegotiation) GetSupportedStyleFormats() []string {
return []string{FormatMapboxStyle, FormatSLD}
}
func (cn *ContentNegotiation) GetStyleFormatExtension(format string) string {
if extension, exists := StyleFormatExtension[format]; exists {
return extension
}
return ""
}
// NegotiateFormat performs content negotiation, not idempotent (since it removes the ?f= param)
func (cn *ContentNegotiation) NegotiateFormat(req *http.Request) string {
requestedFormat := cn.getFormatFromQueryParam(req)
if requestedFormat == "" {
requestedFormat = cn.getFormatFromAcceptHeader(req)
}
if requestedFormat == "" {
requestedFormat = FormatJSON // default
}
return requestedFormat
}
// NegotiateLanguage performs language negotiation, not idempotent (since it removes the ?lang= param)
func (cn *ContentNegotiation) NegotiateLanguage(w http.ResponseWriter, req *http.Request) language.Tag {
requestedLanguage := cn.getLanguageFromQueryParam(w, req)
if requestedLanguage == language.Und {
requestedLanguage = cn.getLanguageFromCookie(req)
}
if requestedLanguage == language.Und {
requestedLanguage = cn.getLanguageFromHeader(req)
}
if requestedLanguage == language.Und {
requestedLanguage = language.Dutch // default
}
return requestedLanguage
}
func (cn *ContentNegotiation) formatToMediaType(format string) string {
return cn.mediaTypesByFormat[format]
}
func (cn *ContentNegotiation) getFormatFromQueryParam(req *http.Request) string {
var requestedFormat = ""
queryParams := req.URL.Query()
if queryParams.Get(FormatParam) != "" {
requestedFormat = queryParams.Get(FormatParam)
// remove ?f= parameter, to prepare for rewrite
queryParams.Del(FormatParam)
req.URL.RawQuery = queryParams.Encode()
}
return requestedFormat
}
func (cn *ContentNegotiation) getFormatFromAcceptHeader(req *http.Request) string {
accepted, _, err := contenttype.GetAcceptableMediaType(req, cn.availableMediaTypes)
if err != nil {
log.Printf("Failed to parse Accept header: %v. Continuing\n", err)
return ""
}
return cn.formatsByMediaType[accepted.String()]
}
func (cn *ContentNegotiation) getLanguageFromQueryParam(w http.ResponseWriter, req *http.Request) language.Tag {
var requestedLanguage = language.Und
queryParams := req.URL.Query()
if queryParams.Get(languageParam) != "" {
lang := queryParams.Get(languageParam)
accepted, _, err := language.ParseAcceptLanguage(lang)
if err != nil {
return requestedLanguage
}
m := language.NewMatcher(cn.availableLanguages)
_, langIndex, _ := m.Match(accepted...)
requestedLanguage = cn.availableLanguages[langIndex]
// override for use in cookie
lang = requestedLanguage.String()
// set requested language in cookie
setLanguageCookie(w, lang)
// remove ?lang= parameter, to prepare for rewrite
queryParams.Del(languageParam)
req.URL.RawQuery = queryParams.Encode()
}
return requestedLanguage
}
func setLanguageCookie(w http.ResponseWriter, lang string) {
cookie := &http.Cookie{
Name: languageParam,
Value: lang,
Path: "/",
MaxAge: config.CookieMaxAge,
SameSite: http.SameSiteStrictMode,
Secure: true,
}
http.SetCookie(w, cookie)
}
func (cn *ContentNegotiation) getLanguageFromCookie(req *http.Request) language.Tag {
var requestedLanguage = language.Und
cookie, err := req.Cookie(languageParam)
if err != nil {
return requestedLanguage
}
lang := cookie.Value
accepted, _, err := language.ParseAcceptLanguage(lang)
if err != nil {
return requestedLanguage
}
m := language.NewMatcher(cn.availableLanguages)
_, langIndex, _ := m.Match(accepted...)
requestedLanguage = cn.availableLanguages[langIndex]
return requestedLanguage
}
func (cn *ContentNegotiation) getLanguageFromHeader(req *http.Request) language.Tag {
var requestedLanguage = language.Und
if req.Header.Get(HeaderAcceptLanguage) != "" {
accepted, _, err := language.ParseAcceptLanguage(req.Header.Get(HeaderAcceptLanguage))
if err != nil {
log.Printf("Failed to parse Accept-Language header: %v. Continuing\n", err)
return requestedLanguage
}
m := language.NewMatcher(cn.availableLanguages)
_, langIndex, _ := m.Match(accepted...)
requestedLanguage = cn.availableLanguages[langIndex]
}
return requestedLanguage
}
package engine
import (
"context"
"crypto/tls"
"fmt"
"io"
"net/http"
"net/url"
"os"
"time"
"github.com/failsafe-go/failsafe-go/failsafehttp"
"golang.org/x/sync/errgroup"
)
const bufferSize = 1 * 1024 * 1024 // 1MiB
// Part piece of the file to download when HTTP Range Requests are supported
type Part struct {
Start int64
End int64
Size int64
}
// Download downloads file from the given URL and stores the result in the given output location.
// Will utilize multiple concurrent connections to increase transfer speed. The latter is only
// possible when the remote server supports HTTP Range Requests, otherwise it falls back
// to a regular/single connection download. Additionally, failed requests will be retried according
// to the given settings.
func Download(url url.URL, outputFilepath string, parallelism int, tlsSkipVerify bool, timeout time.Duration,
retryDelay time.Duration, retryMaxDelay time.Duration, maxRetries int) (*time.Duration, error) {
client := createHTTPClient(tlsSkipVerify, timeout, retryDelay, retryMaxDelay, maxRetries)
outputFile, err := os.OpenFile(outputFilepath, os.O_CREATE|os.O_RDWR, 0644)
if err != nil {
return nil, err
}
defer outputFile.Close()
start := time.Now()
supportRanges, contentLength, err := checkRemoteFile(url, client)
if err != nil {
return nil, err
}
if supportRanges && parallelism > 1 {
err = downloadWithMultipleConnections(url, outputFile, contentLength, int64(parallelism), client)
} else {
err = downloadWithSingleConnection(url, outputFile, client)
}
if err != nil {
return nil, err
}
err = assertFileValid(outputFile, contentLength)
if err != nil {
return nil, err
}
timeSpent := time.Since(start)
return &timeSpent, err
}
func checkRemoteFile(url url.URL, client *http.Client) (supportRanges bool, contentLength int64, err error) {
res, err := client.Head(url.String())
if err != nil {
return
}
defer res.Body.Close()
contentLength = res.ContentLength
supportRanges = res.Header.Get(HeaderAcceptRanges) == "bytes" && contentLength != 0
return
}
func downloadWithSingleConnection(url url.URL, outputFile *os.File, client *http.Client) error {
res, err := client.Get(url.String())
if err != nil {
return err
}
defer res.Body.Close()
buf := make([]byte, bufferSize)
_, err = io.CopyBuffer(outputFile, res.Body, buf)
return err
}
func downloadWithMultipleConnections(url url.URL, outputFile *os.File, contentLength int64, parallelism int64, client *http.Client) error {
parts := make([]Part, parallelism)
partSize := contentLength / parallelism
remainder := contentLength % parallelism
wg, _ := errgroup.WithContext(context.Background())
for i, part := range parts {
start := int64(i) * partSize
end := start + partSize
if remainder != 0 && i == len(parts)-1 {
end += remainder
}
part = Part{start, end, partSize}
wg.Go(func() error {
return downloadPart(client, url, outputFile.Name(), part)
})
}
return wg.Wait()
}
func downloadPart(client *http.Client, url url.URL, outputFilepath string, part Part) error {
outputFile, err := os.OpenFile(outputFilepath, os.O_RDWR, 0664)
if err != nil {
return err
}
defer outputFile.Close()
_, err = outputFile.Seek(part.Start, 0)
if err != nil {
return err
}
req, err := http.NewRequest(http.MethodGet, url.String(), nil)
if err != nil {
return err
}
req.Header.Set(HeaderRange, fmt.Sprintf("bytes=%d-%d", part.Start, part.End-1))
res, err := client.Do(req)
if err != nil {
return err
}
defer res.Body.Close()
if res.StatusCode != http.StatusPartialContent {
return fmt.Errorf("server advertises HTTP Range Request support "+
"but doesn't return status %d", http.StatusPartialContent)
}
buf := make([]byte, bufferSize)
_, err = io.CopyBuffer(outputFile, res.Body, buf)
return err
}
func assertFileValid(outputFile *os.File, contentLength int64) error {
fi, err := outputFile.Stat()
if err != nil {
return err
}
if fi.Size() != contentLength {
return fmt.Errorf("invalid file, content-length %d and file size %d mismatch", contentLength, fi.Size())
}
return nil
}
func createHTTPClient(tlsSkipVerify bool, timeout time.Duration, retryDelay time.Duration,
retryMaxDelay time.Duration, maxRetries int) *http.Client {
transport := &http.Transport{
TLSClientConfig: &tls.Config{
InsecureSkipVerify: tlsSkipVerify, //nolint:gosec // on purpose, default is false
},
}
//nolint:bodyclose // false positive
retryPolicy := failsafehttp.RetryPolicyBuilder().
WithBackoff(retryDelay, retryMaxDelay). //nolint:bodyclose // false positive
WithMaxRetries(maxRetries). //nolint:bodyclose // false positive
Build() //nolint:bodyclose // false positive
return &http.Client{
Timeout: timeout,
Transport: failsafehttp.NewRoundTripper(transport, retryPolicy),
}
}
package engine
import (
"bytes"
"compress/gzip"
"context"
"errors"
"fmt"
htmltemplate "html/template"
"io"
"log"
"net/http"
"net/http/httputil"
"net/url"
"os"
"os/signal"
"syscall"
texttemplate "text/template"
"time"
"github.com/PDOK/gokoala/config"
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
)
const (
templatesDir = "internal/engine/templates/"
shutdownTimeout = 5 * time.Second
HeaderLink = "Link"
HeaderAccept = "Accept"
HeaderAcceptLanguage = "Accept-Language"
HeaderAcceptRanges = "Accept-Ranges"
HeaderRange = "Range"
HeaderContentType = "Content-Type"
HeaderContentLength = "Content-Length"
HeaderContentCrs = "Content-Crs"
HeaderContentEncoding = "Content-Encoding"
HeaderBaseURL = "X-BaseUrl"
HeaderRequestedWith = "X-Requested-With"
HeaderAPIVersion = "API-Version"
)
// Engine encapsulates shared non-OGC API specific logic
type Engine struct {
Config *config.Config
OpenAPI *OpenAPI
Templates *Templates
CN *ContentNegotiation
Router *chi.Mux
shutdownHooks []func()
}
// NewEngine builds a new Engine
func NewEngine(configFile string, openAPIFile string, enableTrailingSlash bool, enableCORS bool) (*Engine, error) {
cfg, err := config.NewConfig(configFile)
if err != nil {
return nil, err
}
return NewEngineWithConfig(cfg, openAPIFile, enableTrailingSlash, enableCORS), nil
}
// NewEngineWithConfig builds a new Engine
func NewEngineWithConfig(config *config.Config, openAPIFile string, enableTrailingSlash bool, enableCORS bool) *Engine {
contentNegotiation := newContentNegotiation(config.AvailableLanguages)
templates := newTemplates(config)
openAPI := newOpenAPI(config, []string{openAPIFile}, nil)
router := newRouter(config.Version, enableTrailingSlash, enableCORS)
engine := &Engine{
Config: config,
OpenAPI: openAPI,
Templates: templates,
CN: contentNegotiation,
Router: router,
}
if config.Resources != nil {
newResourcesEndpoint(engine) // Resources endpoint to serve static assets
}
router.Get("/health", func(w http.ResponseWriter, _ *http.Request) {
SafeWrite(w.Write, []byte("OK")) // Health endpoint
})
return engine
}
// Start the engine by initializing all components and starting the server
func (e *Engine) Start(address string, debugPort int, shutdownDelay int) error {
// debug server (binds to localhost).
if debugPort > 0 {
go func() {
debugAddress := fmt.Sprintf("localhost:%d", debugPort)
debugRouter := chi.NewRouter()
debugRouter.Use(middleware.Logger)
debugRouter.Mount("/debug", middleware.Profiler())
err := e.startServer("debug server", debugAddress, 0, debugRouter)
if err != nil {
log.Fatalf("debug server failed %v", err)
}
}()
}
// main server
return e.startServer("main server", address, shutdownDelay, e.Router)
}
// startServer creates and starts an HTTP server, also takes care of graceful shutdown
func (e *Engine) startServer(name string, address string, shutdownDelay int, router *chi.Mux) error {
// create HTTP server
server := http.Server{
Addr: address,
Handler: router,
ReadTimeout: 15 * time.Second,
ReadHeaderTimeout: 15 * time.Second,
}
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM, syscall.SIGQUIT)
defer stop()
go func() {
log.Printf("%s listening on http://%2s", name, address)
// ListenAndServe always returns a non-nil error. After Shutdown or
// Close, the returned error is ErrServerClosed
if err := server.ListenAndServe(); err != nil && !errors.Is(err, http.ErrServerClosed) {
log.Fatalf("failed to shutdown %s: %v", name, err)
}
}()
// listen for interrupt signal and then perform shutdown
<-ctx.Done()
stop()
// execute shutdown hooks
for _, shutdownHook := range e.shutdownHooks {
shutdownHook()
}
if shutdownDelay > 0 {
log.Printf("stop signal received, initiating shutdown of %s after %d seconds delay", name, shutdownDelay)
time.Sleep(time.Duration(shutdownDelay) * time.Second)
}
log.Printf("shutting down %s gracefully", name)
// shutdown with a max timeout.
timeoutCtx, cancel := context.WithTimeout(context.Background(), shutdownTimeout)
defer cancel()
return server.Shutdown(timeoutCtx)
}
// RegisterShutdownHook register a func to execute during graceful shutdown, e.g. to clean up resources.
func (e *Engine) RegisterShutdownHook(fn func()) {
e.shutdownHooks = append(e.shutdownHooks, fn)
}
// RebuildOpenAPI rebuild the full OpenAPI spec with the newly given parameters.
// Use only once during bootstrap for specific use cases! For example: when you want to expand a
// specific part of the OpenAPI spec with data outside the configuration file (e.g. from a database).
func (e *Engine) RebuildOpenAPI(openAPIParams any) {
e.OpenAPI = newOpenAPI(e.Config, e.OpenAPI.extraOpenAPIFiles, openAPIParams)
}
// ParseTemplate parses both HTML and non-HTML templates depending on the format given in the TemplateKey and
// stores it in the engine for future rendering using RenderAndServePage.
func (e *Engine) ParseTemplate(key TemplateKey) {
e.Templates.parseAndSaveTemplate(key)
}
// RenderTemplates renders both HTML and non-HTML templates depending on the format given in the TemplateKey.
// This method also performs OpenAPI validation of the rendered template, therefore we also need the URL path.
// The rendered templates are stored in the engine for future serving using ServePage.
func (e *Engine) RenderTemplates(urlPath string, breadcrumbs []Breadcrumb, keys ...TemplateKey) {
for _, key := range keys {
e.Templates.renderAndSaveTemplate(key, breadcrumbs, nil)
// we already perform OpenAPI validation here during startup to catch
// issues early on, in addition to runtime OpenAPI response validation
// all templates are created in all available languages, hence all are checked
for lang := range e.Templates.localizers {
key.Language = lang
if err := e.validateStaticResponse(key, urlPath); err != nil {
log.Fatal(err)
}
}
}
}
// RenderTemplatesWithParams renders both HTMl and non-HTML templates depending on the format given in the TemplateKey.
// This method does not perform OpenAPI validation of the rendered template (will be done during runtime).
func (e *Engine) RenderTemplatesWithParams(params any, breadcrumbs []Breadcrumb, keys ...TemplateKey) {
for _, key := range keys {
e.Templates.renderAndSaveTemplate(key, breadcrumbs, params)
}
}
// RenderAndServePage renders an already parsed HTML or non-HTML template and renders it on-the-fly depending
// on the format in the given TemplateKey. The result isn't store in engine, it's served directly to the client.
//
// NOTE: only used this for dynamic pages that can't be pre-rendered and cached (e.g. with data from a backing store).
func (e *Engine) RenderAndServePage(w http.ResponseWriter, r *http.Request, key TemplateKey,
params any, breadcrumbs []Breadcrumb) {
// validate request
if err := e.OpenAPI.ValidateRequest(r); err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemBadRequest, w, err.Error())
return
}
// get template
parsedTemplate, err := e.Templates.getParsedTemplate(key)
if err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemServerError, w)
}
// render output
var output []byte
if key.Format == FormatHTML {
htmlTmpl := parsedTemplate.(*htmltemplate.Template)
output = e.Templates.renderHTMLTemplate(htmlTmpl, r.URL, params, breadcrumbs, "")
} else {
jsonTmpl := parsedTemplate.(*texttemplate.Template)
output = e.Templates.renderNonHTMLTemplate(jsonTmpl, params, key, "")
}
contentType := e.CN.formatToMediaType(key.Format)
// validate response
if err := e.OpenAPI.ValidateResponse(contentType, output, r); err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemServerError, w, err.Error())
return
}
// return response output to client
if contentType != "" {
w.Header().Set(HeaderContentType, contentType)
}
SafeWrite(w.Write, output)
}
// ServePage serves a pre-rendered template while also validating against the OpenAPI spec
func (e *Engine) ServePage(w http.ResponseWriter, r *http.Request, templateKey TemplateKey) {
// validate request
if err := e.OpenAPI.ValidateRequest(r); err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemBadRequest, w, err.Error())
return
}
// render output
output, err := e.Templates.getRenderedTemplate(templateKey)
if err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemNotFound, w)
return
}
contentType := e.CN.formatToMediaType(templateKey.Format)
// validate response
if err := e.OpenAPI.ValidateResponse(contentType, output, r); err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemServerError, w, err.Error())
return
}
// return response output to client
if contentType != "" {
w.Header().Set(HeaderContentType, contentType)
}
SafeWrite(w.Write, output)
}
// ServeResponse serves the given response (arbitrary bytes) while also validating against the OpenAPI spec
func (e *Engine) ServeResponse(w http.ResponseWriter, r *http.Request,
validateRequest bool, validateResponse bool, contentType string, response []byte) {
if validateRequest {
if err := e.OpenAPI.ValidateRequest(r); err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemBadRequest, w, err.Error())
return
}
}
if validateResponse {
if err := e.OpenAPI.ValidateResponse(contentType, response, r); err != nil {
log.Printf("%v", err.Error())
RenderProblem(ProblemServerError, w, err.Error())
return
}
}
// return response output to client
if contentType != "" {
w.Header().Set(HeaderContentType, contentType)
}
SafeWrite(w.Write, response)
}
// ReverseProxy forwards given HTTP request to given target server, and optionally tweaks response
func (e *Engine) ReverseProxy(w http.ResponseWriter, r *http.Request, target *url.URL,
prefer204 bool, contentTypeOverwrite string) {
e.ReverseProxyAndValidate(w, r, target, prefer204, contentTypeOverwrite, false)
}
// ReverseProxyAndValidate forwards given HTTP request to given target server, and optionally tweaks and validates response
func (e *Engine) ReverseProxyAndValidate(w http.ResponseWriter, r *http.Request, target *url.URL,
prefer204 bool, contentTypeOverwrite string, validateResponse bool) {
rewrite := func(r *httputil.ProxyRequest) {
r.Out.URL = target
r.Out.Host = "" // Don't pass Host header (similar to Traefik's passHostHeader=false)
r.SetXForwarded() // Set X-Forwarded-* headers.
r.Out.Header.Set(HeaderBaseURL, e.Config.BaseURL.String())
}
errorHandler := func(w http.ResponseWriter, _ *http.Request, err error) {
log.Printf("failed to proxy request: %v", err)
RenderProblem(ProblemBadGateway, w)
}
modifyResponse := func(proxyRes *http.Response) error {
if prefer204 {
// OGC spec: If the tile has no content due to lack of data in the area, but is within the data
// resource its tile matrix sets and tile matrix sets limits, the HTTP response will use the status
// code either 204 (indicating an empty tile with no content) or a 200
if proxyRes.StatusCode == http.StatusNotFound {
proxyRes.StatusCode = http.StatusNoContent
removeBody(proxyRes)
}
}
if contentTypeOverwrite != "" {
proxyRes.Header.Set(HeaderContentType, contentTypeOverwrite)
}
if contentType := proxyRes.Header.Get(HeaderContentType); contentType == MediaTypeJSON && validateResponse {
var reader io.ReadCloser
var err error
if proxyRes.Header.Get(HeaderContentEncoding) == FormatGzip {
reader, err = gzip.NewReader(proxyRes.Body)
if err != nil {
return err
}
} else {
reader = proxyRes.Body
}
res, err := io.ReadAll(reader)
if err != nil {
return err
}
e.ServeResponse(w, r, false, true, contentType, res)
}
return nil
}
reverseProxy := &httputil.ReverseProxy{
Rewrite: rewrite,
ModifyResponse: modifyResponse,
ErrorHandler: errorHandler,
}
reverseProxy.ServeHTTP(w, r)
}
func removeBody(proxyRes *http.Response) {
buf := bytes.NewBuffer(make([]byte, 0))
proxyRes.Body = io.NopCloser(buf)
proxyRes.Header[HeaderContentLength] = []string{"0"}
proxyRes.Header[HeaderContentType] = []string{}
}
func (e *Engine) validateStaticResponse(key TemplateKey, urlPath string) error {
template, _ := e.Templates.getRenderedTemplate(key)
serverURL := normalizeBaseURL(e.Config.BaseURL.String())
req, err := http.NewRequest(http.MethodGet, serverURL+urlPath, nil)
if err != nil {
return fmt.Errorf("failed to construct request to validate %s "+
"template against OpenAPI spec %v", key.Name, err)
}
err = e.OpenAPI.ValidateResponse(e.CN.formatToMediaType(key.Format), template, req)
if err != nil {
return fmt.Errorf("validation of template %s failed: %w", key.Name, err)
}
return nil
}
// SafeWrite executes the given http.ResponseWriter.Write while logging errors
func SafeWrite(write func([]byte) (int, error), body []byte) {
_, err := write(body)
if err != nil {
log.Printf("failed to write response: %v", err)
}
}
package engine
import (
"github.com/BurntSushi/toml"
"github.com/PDOK/gokoala/config"
"github.com/nicksnyder/go-i18n/v2/i18n"
"golang.org/x/text/language"
)
func newLocalizers(availableLanguages []config.Language) map[language.Tag]i18n.Localizer {
localizers := make(map[language.Tag]i18n.Localizer)
// add localizer for each available language
for _, lang := range availableLanguages {
bundle := i18n.NewBundle(lang.Tag)
bundle.RegisterUnmarshalFunc("toml", toml.Unmarshal)
bundle.MustLoadMessageFile("assets/i18n/active." + lang.String() + ".toml")
localizers[lang.Tag] = *i18n.NewLocalizer(bundle, lang.String())
}
return localizers
}
package engine
import (
"bytes"
"context"
"encoding/json"
"errors"
"fmt"
"io"
"log"
"net/http"
"net/url"
"path/filepath"
"regexp"
"strings"
texttemplate "text/template"
gokoalaconfig "github.com/PDOK/gokoala/config"
orderedmap "github.com/wk8/go-ordered-map/v2"
"github.com/PDOK/gokoala/internal/engine/util"
"github.com/getkin/kin-openapi/openapi3"
"github.com/getkin/kin-openapi/openapi3filter"
"github.com/getkin/kin-openapi/routers"
"github.com/getkin/kin-openapi/routers/gorillamux"
)
const (
specPath = templatesDir + "openapi/"
preamble = specPath + "preamble.go.json"
problems = specPath + "problems.go.json"
commonCollections = specPath + "common-collections.go.json"
featuresSpec = specPath + "features.go.json"
tilesSpec = specPath + "tiles.go.json"
stylesSpec = specPath + "styles.go.json"
geoVolumesSpec = specPath + "3dgeovolumes.go.json"
commonSpec = specPath + "common.go.json"
HTMLRegex = `<[/]?([a-zA-Z]+).*?>`
)
type OpenAPI struct {
spec *openapi3.T
SpecJSON []byte
config *gokoalaconfig.Config
router routers.Router
extraOpenAPIFiles []string
}
func newOpenAPI(config *gokoalaconfig.Config, extraOpenAPIFiles []string, openAPIParams any) *OpenAPI {
setupRequestResponseValidation()
ctx := context.Background()
// order matters, see mergeSpecs for details.
defaultOpenAPIFiles := []string{commonSpec}
if config.AllCollections() != nil {
defaultOpenAPIFiles = append(defaultOpenAPIFiles, commonCollections)
}
if config.OgcAPI.Tiles != nil {
defaultOpenAPIFiles = append(defaultOpenAPIFiles, tilesSpec)
}
if config.OgcAPI.Features != nil {
defaultOpenAPIFiles = append(defaultOpenAPIFiles, featuresSpec)
}
if config.OgcAPI.Styles != nil {
defaultOpenAPIFiles = append(defaultOpenAPIFiles, stylesSpec)
}
if config.OgcAPI.GeoVolumes != nil {
defaultOpenAPIFiles = append(defaultOpenAPIFiles, geoVolumesSpec)
}
// add preamble first
openAPIFiles := []string{preamble}
// add extra spec(s) thereafter, to allow it to override default openapi specs
openAPIFiles = append(openAPIFiles, extraOpenAPIFiles...)
openAPIFiles = append(openAPIFiles, defaultOpenAPIFiles...)
resultSpec, resultSpecJSON := mergeSpecs(ctx, config, openAPIFiles, openAPIParams)
validateSpec(ctx, resultSpec, resultSpecJSON)
for _, server := range resultSpec.Servers {
server.URL = normalizeBaseURL(server.URL)
}
return &OpenAPI{
config: config,
spec: resultSpec,
SpecJSON: util.PrettyPrintJSON(resultSpecJSON, ""),
router: newOpenAPIRouter(resultSpec),
extraOpenAPIFiles: extraOpenAPIFiles,
}
}
func setupRequestResponseValidation() {
htmlRegex := regexp.MustCompile(HTMLRegex)
openapi3filter.RegisterBodyDecoder(MediaTypeHTML,
func(body io.Reader, _ http.Header, _ *openapi3.SchemaRef,
_ openapi3filter.EncodingFn) (any, error) {
data, err := io.ReadAll(body)
if err != nil {
return nil, errors.New("failed to read response body")
}
if !htmlRegex.Match(data) {
return nil, errors.New("response doesn't contain HTML")
}
return string(data), nil
})
for _, mediaType := range MediaTypeJSONFamily {
openapi3filter.RegisterBodyDecoder(mediaType,
func(body io.Reader, _ http.Header, _ *openapi3.SchemaRef,
_ openapi3filter.EncodingFn) (any, error) {
var value any
dec := json.NewDecoder(body)
dec.UseNumber()
if err := dec.Decode(&value); err != nil {
return nil, errors.New("response doesn't contain valid JSON")
}
return value, nil
})
}
}
// mergeSpecs merges the given OpenAPI specs.
//
// Order matters! We start with the preamble, it is highest in rank and there's no way to override it.
// Then the files are merged according to their given order. Files that are merged first
// have a higher change of getting their changes in the final spec than files that follow later.
//
// The OpenAPI spec optionally provided through the CLI should be the second (after preamble) item in the
// `files` slice since it allows the user to override other/default specs.
func mergeSpecs(ctx context.Context, config *gokoalaconfig.Config, files []string, params any) (*openapi3.T, []byte) {
loader := &openapi3.Loader{Context: ctx, IsExternalRefsAllowed: false}
if len(files) < 1 {
log.Fatalf("files can't be empty, at least OGC Common is expected")
}
var resultSpecJSON []byte
var resultSpec *openapi3.T
for _, file := range files {
if file == "" {
continue
}
specJSON := renderOpenAPITemplate(config, file, params)
var mergedJSON []byte
if resultSpecJSON == nil {
mergedJSON = specJSON
} else {
var err error
mergedJSON, err = util.MergeJSON(resultSpecJSON, specJSON, orderByOpenAPIConvention)
if err != nil {
log.Print(string(mergedJSON))
log.Fatalf("failed to merge OpenAPI specs: %v", err)
}
}
resultSpecJSON = mergedJSON
resultSpec = loadSpec(loader, mergedJSON)
}
return resultSpec, resultSpecJSON
}
func orderByOpenAPIConvention(output map[string]any) any {
result := orderedmap.New[string, any]()
// OpenAPI specs are commonly ordered according to the following sequence.
desiredOrder := []string{"openapi", "info", "servers", "paths", "components"}
for _, order := range desiredOrder {
for k, v := range output {
if k == order {
result.Set(k, v)
}
}
}
// add remaining keys
for k, v := range output {
result.Set(k, v)
}
return result
}
func loadSpec(loader *openapi3.Loader, mergedJSON []byte, fileName ...string) *openapi3.T {
resultSpec, err := loader.LoadFromData(mergedJSON)
if err != nil {
log.Print(string(mergedJSON))
log.Fatalf("failed to load merged OpenAPI spec %s, due to %v", fileName, err)
}
return resultSpec
}
func validateSpec(ctx context.Context, finalSpec *openapi3.T, finalSpecRaw []byte) {
// Validate OGC OpenAPI spec. Note: the examples provided in the official spec aren't valid.
err := finalSpec.Validate(ctx, openapi3.DisableExamplesValidation())
if err != nil {
log.Print(string(finalSpecRaw))
log.Fatalf("invalid OpenAPI spec: %v", err)
}
}
func newOpenAPIRouter(doc *openapi3.T) routers.Router {
openAPIRouter, err := gorillamux.NewRouter(doc)
if err != nil {
log.Fatalf("failed to setup OpenAPI router: %v", err)
}
return openAPIRouter
}
func renderOpenAPITemplate(config *gokoalaconfig.Config, fileName string, params any) []byte {
file := filepath.Clean(fileName)
files := []string{problems, file} // add problems template too since it's an "include" template
parsed := texttemplate.Must(texttemplate.New(filepath.Base(file)).Funcs(globalTemplateFuncs).ParseFiles(files...))
var rendered bytes.Buffer
if err := parsed.Execute(&rendered, &TemplateData{Config: config, Params: params}); err != nil {
log.Fatalf("failed to render %s, error: %v", file, err)
}
return rendered.Bytes()
}
func (o *OpenAPI) ValidateRequest(r *http.Request) error {
requestValidationInput, _ := o.getRequestValidationInput(r)
if requestValidationInput != nil {
err := openapi3filter.ValidateRequest(context.Background(), requestValidationInput)
if err != nil {
var schemaErr *openapi3.SchemaError
// Don't fail on maximum constraints because OGC has decided these are soft limits, for instance
// in features: "If the value of the limit parameter is larger than the maximum value, this
// SHALL NOT result in an error (instead use the maximum as the parameter value)."
if errors.As(err, &schemaErr) && schemaErr.SchemaField == "maximum" {
return nil
}
return fmt.Errorf("request doesn't conform to OpenAPI spec: %w", err)
}
}
return nil
}
func (o *OpenAPI) ValidateResponse(contentType string, body []byte, r *http.Request) error {
requestValidationInput, _ := o.getRequestValidationInput(r)
if requestValidationInput != nil {
responseHeaders := http.Header{HeaderContentType: []string{contentType}}
responseCode := 200
responseValidationInput := &openapi3filter.ResponseValidationInput{
RequestValidationInput: requestValidationInput,
Status: responseCode,
Header: responseHeaders,
}
responseValidationInput.SetBodyBytes(body)
err := openapi3filter.ValidateResponse(context.Background(), responseValidationInput)
if err != nil {
return fmt.Errorf("response doesn't conform to OpenAPI spec: %w", err)
}
}
return nil
}
func (o *OpenAPI) getRequestValidationInput(r *http.Request) (*openapi3filter.RequestValidationInput, error) {
route, pathParams, err := o.router.FindRoute(r)
if err != nil {
log.Printf("route not found in OpenAPI spec for url %s (host: %s), "+
"skipping OpenAPI validation", r.URL, r.Host)
return nil, err
}
opts := &openapi3filter.Options{
SkipSettingDefaults: true,
}
opts.WithCustomSchemaErrorFunc(func(err *openapi3.SchemaError) string {
return err.Reason
})
return &openapi3filter.RequestValidationInput{
Request: r,
PathParams: pathParams,
Route: route,
Options: opts,
}, nil
}
// normalizeBaseURL normalizes the given base URL so our OpenAPI validator is able to match
// requests against the OpenAPI spec. This involves:
//
// - striping the context root (path) from the base URL. If you use a context root we expect
// you to have a proxy fronting GoKoala, therefore we also need to strip it from the base
// URL used during OpenAPI validation
//
// - replacing HTTPS scheme with HTTP. Since GoKoala doesn't support HTTPS we always perform
// OpenAPI validation against HTTP requests. Note: it's possible to offer GoKoala over HTTPS, but you'll
// need to take care of that in your proxy server (or loadbalancer/service mesh/etc) fronting GoKoala.
func normalizeBaseURL(baseURL string) string {
serverURL, _ := url.Parse(baseURL)
result := strings.Replace(baseURL, serverURL.Scheme, "http", 1)
result = strings.Replace(result, serverURL.Path, "", 1)
return result
}
package engine
import (
"log"
"net/http"
"time"
"schneider.vip/problem"
)
const (
timestampKey = "timestamp"
defaultMessageServerErr = "An unexpected error has occurred, try again or contact support if the problem persists"
defaultMessageBadGateway = "Failed to proxy request, try again or contact support if the problem persists"
)
type ProblemKind int
var Now = time.Now // allow mocking
// The following problems should be added to openapi/problems.go.json
var (
ProblemBadRequest = ProblemKind(http.StatusBadRequest)
ProblemNotFound = ProblemKind(http.StatusNotFound)
ProblemNotAcceptable = ProblemKind(http.StatusNotAcceptable)
ProblemServerError = ProblemKind(http.StatusInternalServerError)
ProblemBadGateway = ProblemKind(http.StatusBadGateway)
)
// RenderProblem writes RFC 7807 (https://tools.ietf.org/html/rfc7807) problem to client.
// Only the listed problem kinds are supported since they should be advertised in the OpenAPI spec.
// Optionally a caller may add a details (single string) about the problem. Warning: Be sure to not
// include sensitive information in the details string!
func RenderProblem(kind ProblemKind, w http.ResponseWriter, details ...string) {
p := problem.Of(int(kind))
if kind == ProblemServerError { //nolint:gocritic // switch not handy here
p = p.Append(problem.Detail(defaultMessageServerErr))
} else if kind == ProblemBadGateway {
p = p.Append(problem.Detail(defaultMessageBadGateway))
} else if len(details) > 0 {
p = p.Append(problem.Detail(details[0]))
}
p = p.Append(problem.Custom(timestampKey, Now().UTC().Format(time.RFC3339)))
_, err := p.WriteTo(w)
if err != nil {
log.Printf("failed to write response: %v", err)
}
}
package engine
import (
"log"
"net/http"
"net/url"
"strings"
"github.com/go-chi/chi/v5"
)
func newResourcesEndpoint(e *Engine) {
// Serve static assets either from local storage or through reverse proxy
resourcesDir := ""
if e.Config.Resources.Directory != nil {
resourcesDir = *e.Config.Resources.Directory
}
resourcesURL := ""
if e.Config.Resources.URL != nil {
resourcesURL = e.Config.Resources.URL.String()
}
if resourcesDir != "" {
resourcesPath := strings.TrimSuffix(resourcesDir, "/resources")
e.Router.Handle("/resources/*", http.FileServer(http.Dir(resourcesPath)))
} else if resourcesURL != "" {
e.Router.Get("/resources/*",
func(w http.ResponseWriter, r *http.Request) {
resourcePath, _ := url.JoinPath("/", chi.URLParam(r, "*"))
target, err := url.Parse(resourcesURL + resourcePath)
if err != nil {
log.Printf("invalid target url, can't proxy resources: %v", err)
RenderProblem(ProblemServerError, w)
return
}
e.ReverseProxy(w, r, target, true, "")
})
}
}
package engine
import (
"net/http"
"runtime/debug"
"time"
"github.com/go-chi/chi/v5"
"github.com/go-chi/chi/v5/middleware"
"github.com/go-chi/cors"
)
func newRouter(version string, enableTrailingSlash bool, enableCORS bool) *chi.Mux {
router := chi.NewRouter()
router.Use(middleware.RealIP) // should be first middleware
router.Use(middleware.Logger) // log to console
router.Use(problemRecoverer) // catch panics and turn into 500s
router.Use(middleware.GetHead) // support HEAD requests https://docs.ogc.org/is/17-069r4/17-069r4.html#_http_1_1
if enableTrailingSlash {
router.Use(middleware.StripSlashes)
}
if enableCORS {
router.Use(cors.Handler(cors.Options{
AllowedOrigins: []string{"*"},
AllowedMethods: []string{http.MethodGet, http.MethodHead, http.MethodOptions},
AllowedHeaders: []string{HeaderRequestedWith},
ExposedHeaders: []string{HeaderContentCrs, HeaderLink},
AllowCredentials: false,
MaxAge: int((time.Hour * 24).Seconds()),
}))
}
// some GIS clients don't sent proper CORS preflight requests, still respond with OK for any OPTIONS request
router.Use(optionsFallback)
// add semver header, implements https://gitdocumentatie.logius.nl/publicatie/api/adr/#api-57
router.Use(middleware.SetHeader(HeaderAPIVersion, version))
router.Use(middleware.Compress(5, CompressibleMediaTypes...)) // enable gzip responses
return router
}
func optionsFallback(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
if r.Method == http.MethodOptions {
w.WriteHeader(http.StatusOK)
return
}
next.ServeHTTP(w, r)
})
}
// Custom middleware.Recoverer adapted from Chi (https://github.com/go-chi/chi/blob/master/middleware/recoverer.go)
// to return RFC-7807 Problem messages.
func problemRecoverer(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
defer func() {
if rvr := recover(); rvr != nil {
if rvr == http.ErrAbortHandler { //nolint:errorlint // already so in Chi
// we don't recover http.ErrAbortHandler so the response
// to the client is aborted, this should not be logged
panic(rvr)
}
logEntry := middleware.GetLogEntry(r)
if logEntry != nil {
logEntry.Panic(rvr, debug.Stack())
} else {
middleware.PrintPrettyStack(rvr)
}
if r.Header.Get("Connection") != "Upgrade" {
RenderProblem(ProblemServerError, w)
}
}
}()
next.ServeHTTP(w, r)
})
}
package engine
import (
"bytes"
"fmt"
htmltemplate "html/template"
"log"
"net/url"
"path/filepath"
"strings"
texttemplate "text/template"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine/util"
sprig "github.com/go-task/slim-sprig"
gomarkdown "github.com/gomarkdown/markdown"
gomarkdownhtml "github.com/gomarkdown/markdown/html"
gomarkdownparser "github.com/gomarkdown/markdown/parser"
"github.com/nicksnyder/go-i18n/v2/i18n"
stripmd "github.com/writeas/go-strip-markdown/v2"
"golang.org/x/text/language"
)
const (
layoutFile = "layout.go.html"
)
var (
globalTemplateFuncs texttemplate.FuncMap
)
func init() {
customFuncs := texttemplate.FuncMap{
// custom template functions
"markdown": markdown,
"unmarkdown": unmarkdown,
}
sprigFuncs := sprig.FuncMap() // we also support https://github.com/go-task/slim-sprig functions
globalTemplateFuncs = combineFuncMaps(customFuncs, sprigFuncs)
}
// TemplateKey unique key to register and lookup Go templates
type TemplateKey struct {
// Name of the template, the filename including extension
Name string
// Directory in which the template resides
Directory string
// Format the file format based on the filename extension, 'html' or 'json'
Format string
// Language of the contents of the template
Language language.Tag
// Optional. Only required when you want to render the same template multiple times (with different content).
// By specifying an 'instance name' you can refer to a certain instance of a rendered template later on.
InstanceName string
}
// TemplateData the data/variables passed as an argument into the template.
type TemplateData struct {
// Config set during startup based on the given config file
Config *config.Config
// Params optional parameters not part of GoKoala's config file. You can use
// this to provide extra data to a template at rendering time.
Params any
// Breadcrumb path to the page, in key-value pairs of name->path
Breadcrumbs []Breadcrumb
// Request URL
url *url.URL
}
// AvailableFormats returns the output formats available for the current page
func (td *TemplateData) AvailableFormats() map[string]string {
if td.url != nil && strings.Contains(td.url.Path, "/items") {
return td.AvailableFormatsFeatures()
}
return OutputFormatDefault
}
// AvailableFormatsFeatures convenience function
func (td *TemplateData) AvailableFormatsFeatures() map[string]string {
return OutputFormatFeatures
}
// QueryString returns ?=foo=a&bar=b style query string of the current page
func (td *TemplateData) QueryString(format string) string {
if td.url != nil {
q := td.url.Query()
if format != "" {
q.Set(FormatParam, format)
}
return "?" + q.Encode()
}
return fmt.Sprintf("?%s=%s", FormatParam, format)
}
type Breadcrumb struct {
Name string
Path string
}
// NewTemplateKey build TemplateKeys
func NewTemplateKey(path string) TemplateKey {
return NewTemplateKeyWithName(path, "")
}
func NewTemplateKeyWithLanguage(path string, language language.Tag) TemplateKey {
return NewTemplateKeyWithNameAndLanguage(path, "", language)
}
// NewTemplateKeyWithName build TemplateKey with InstanceName (see docs in struct)
func NewTemplateKeyWithName(path string, instanceName string) TemplateKey {
return NewTemplateKeyWithNameAndLanguage(path, instanceName, language.Dutch)
}
func NewTemplateKeyWithNameAndLanguage(path string, instanceName string, language language.Tag) TemplateKey {
cleanPath := filepath.Clean(path)
return TemplateKey{
Name: filepath.Base(cleanPath),
Directory: filepath.Dir(cleanPath),
Format: strings.TrimPrefix(filepath.Ext(path), "."),
Language: language,
InstanceName: instanceName,
}
}
func ExpandTemplateKey(key TemplateKey, language language.Tag) TemplateKey {
copyKey := key
copyKey.Language = language
return copyKey
}
type Templates struct {
// ParsedTemplates templates loaded from disk and parsed to an in-memory Go representation.
ParsedTemplates map[TemplateKey]any
// RenderedTemplates templates parsed + rendered to their actual output format like JSON, HTMl, etc.
// We prefer pre-rendered templates whenever possible. These are stored in this map.
RenderedTemplates map[TemplateKey][]byte
config *config.Config
localizers map[language.Tag]i18n.Localizer
}
func newTemplates(config *config.Config) *Templates {
templates := &Templates{
ParsedTemplates: make(map[TemplateKey]any),
RenderedTemplates: make(map[TemplateKey][]byte),
config: config,
localizers: newLocalizers(config.AvailableLanguages),
}
return templates
}
func (t *Templates) getParsedTemplate(key TemplateKey) (any, error) {
if parsedTemplate, ok := t.ParsedTemplates[key]; ok {
return parsedTemplate, nil
}
return nil, fmt.Errorf("no parsed template with name %s", key.Name)
}
func (t *Templates) getRenderedTemplate(key TemplateKey) ([]byte, error) {
if RenderedTemplate, ok := t.RenderedTemplates[key]; ok {
return RenderedTemplate, nil
}
return nil, fmt.Errorf("no rendered template with name %s", key.Name)
}
func (t *Templates) parseAndSaveTemplate(key TemplateKey) {
for lang := range t.localizers {
keyWithLang := ExpandTemplateKey(key, lang)
if key.Format == FormatHTML {
_, parsed := t.parseHTMLTemplate(keyWithLang, lang)
t.ParsedTemplates[keyWithLang] = parsed
} else {
_, parsed := t.parseNonHTMLTemplate(keyWithLang, lang)
t.ParsedTemplates[keyWithLang] = parsed
}
}
}
func (t *Templates) renderAndSaveTemplate(key TemplateKey, breadcrumbs []Breadcrumb, params any) {
for lang := range t.localizers {
var result []byte
if key.Format == FormatHTML {
file, parsed := t.parseHTMLTemplate(key, lang)
result = t.renderHTMLTemplate(parsed, nil, params, breadcrumbs, file)
} else {
file, parsed := t.parseNonHTMLTemplate(key, lang)
result = t.renderNonHTMLTemplate(parsed, params, key, file)
}
// Store rendered template per language
key.Language = lang
t.RenderedTemplates[key] = result
}
}
func (t *Templates) parseHTMLTemplate(key TemplateKey, lang language.Tag) (string, *htmltemplate.Template) {
file := filepath.Clean(filepath.Join(key.Directory, key.Name))
templateFuncs := t.createTemplateFuncs(lang)
parsed := htmltemplate.Must(htmltemplate.New(layoutFile).
Funcs(templateFuncs).ParseFiles(templatesDir+layoutFile, file))
return file, parsed
}
func (t *Templates) renderHTMLTemplate(parsed *htmltemplate.Template, url *url.URL,
params any, breadcrumbs []Breadcrumb, file string) []byte {
var rendered bytes.Buffer
if err := parsed.Execute(&rendered, &TemplateData{
Config: t.config,
Params: params,
Breadcrumbs: breadcrumbs,
url: url,
}); err != nil {
log.Fatalf("failed to execute HTML template %s, error: %v", file, err)
}
return rendered.Bytes()
}
func (t *Templates) parseNonHTMLTemplate(key TemplateKey, lang language.Tag) (string, *texttemplate.Template) {
file := filepath.Clean(filepath.Join(key.Directory, key.Name))
templateFuncs := t.createTemplateFuncs(lang)
parsed := texttemplate.Must(texttemplate.New(filepath.Base(file)).
Funcs(templateFuncs).Parse(util.ReadFile(file)))
return file, parsed
}
func (t *Templates) renderNonHTMLTemplate(parsed *texttemplate.Template, params any, key TemplateKey, file string) []byte {
var rendered bytes.Buffer
if err := parsed.Execute(&rendered, &TemplateData{
Config: t.config,
Params: params,
}); err != nil {
log.Fatalf("failed to execute template %s, error: %v", file, err)
}
var result = rendered.Bytes()
if strings.Contains(key.Format, FormatJSON) {
// pretty print all JSON (or derivatives like TileJSON)
result = util.PrettyPrintJSON(result, key.Name)
}
return result
}
func (t *Templates) createTemplateFuncs(lang language.Tag) map[string]any {
return combineFuncMaps(globalTemplateFuncs, texttemplate.FuncMap{
// create func just-in-time based on TemplateKey
"i18n": func(messageID string) htmltemplate.HTML {
localizer := t.localizers[lang]
translated := localizer.MustLocalize(&i18n.LocalizeConfig{MessageID: messageID})
return htmltemplate.HTML(translated) //nolint:gosec // since we trust our language files
},
})
}
// combine given FuncMaps
func combineFuncMaps(funcMaps ...map[string]any) map[string]any {
result := make(map[string]any)
for _, funcMap := range funcMaps {
for k, v := range funcMap {
result[k] = v
}
}
return result
}
// markdown turn Markdown into HTML
func markdown(s *string) htmltemplate.HTML {
if s == nil {
return ""
}
// always normalize newlines, this library only supports Unix LF newlines
md := gomarkdown.NormalizeNewlines([]byte(*s))
// create Markdown parser
extensions := gomarkdownparser.CommonExtensions
parser := gomarkdownparser.NewWithExtensions(extensions)
// parse Markdown into AST tree
doc := parser.Parse(md)
// create HTML renderer
htmlFlags := gomarkdownhtml.CommonFlags | gomarkdownhtml.HrefTargetBlank | gomarkdownhtml.SkipHTML
renderer := gomarkdownhtml.NewRenderer(gomarkdownhtml.RendererOptions{Flags: htmlFlags})
return htmltemplate.HTML(gomarkdown.Render(doc, renderer)) //nolint:gosec
}
// unmarkdown remove Markdown, so we can use the given string in non-HTML (JSON) output
func unmarkdown(s *string) string {
if s == nil {
return ""
}
withoutMarkdown := stripmd.Strip(*s)
withoutLinebreaks := strings.ReplaceAll(withoutMarkdown, "\n", " ")
return withoutLinebreaks
}
package util
import (
"bytes"
"compress/gzip"
"errors"
"io"
"io/fs"
"log"
"os"
)
// ReadFile read a plain or gzipped file and return contents as string
func ReadFile(filePath string) string {
gzipFile := filePath + ".gz"
var fileContents string
if _, err := os.Stat(gzipFile); !errors.Is(err, fs.ErrNotExist) {
fileContents, err = readGzipContents(gzipFile)
if err != nil {
log.Fatalf("unable to decompress gzip file %s", gzipFile)
}
} else {
fileContents, err = readPlainContents(filePath)
if err != nil {
log.Fatalf("unable to read file %s", filePath)
}
}
return fileContents
}
// decompress gzip files, return contents as string
func readGzipContents(filePath string) (string, error) {
gzipFile, err := os.Open(filePath)
if err != nil {
return "", err
}
defer func(gzipFile *os.File) {
err := gzipFile.Close()
if err != nil {
log.Println("failed to close gzip file")
}
}(gzipFile)
gzipReader, err := gzip.NewReader(gzipFile)
if err != nil {
return "", err
}
defer func(gzipReader *gzip.Reader) {
err := gzipReader.Close()
if err != nil {
log.Println("failed to close gzip reader")
}
}(gzipReader)
var buffer bytes.Buffer
_, err = io.Copy(&buffer, gzipReader) //nolint:gosec
if err != nil {
return "", err
}
return buffer.String(), nil
}
// read file, return contents as string
func readPlainContents(filePath string) (string, error) {
file, err := os.Open(filePath)
if err != nil {
return "", err
}
defer func(file *os.File) {
err := file.Close()
if err != nil {
log.Println("failed to close file")
}
}(file)
var buffer bytes.Buffer
_, err = io.Copy(&buffer, file)
if err != nil {
return "", err
}
return buffer.String(), nil
}
package util
import (
"bytes"
"encoding/json"
"log"
"dario.cat/mergo"
)
func PrettyPrintJSON(content []byte, name string) []byte {
var pretty bytes.Buffer
if err := json.Indent(&pretty, content, "", " "); err != nil {
log.Print(string(content))
log.Fatalf("invalid json in %s: %v, see json output above", name, err)
}
return pretty.Bytes()
}
// MergeJSON merges the two JSON byte slices. It returns an error if x1 or x2 cannot be JSON-unmarshalled,
// or the merged JSON is invalid.
//
// Optionally, an orderBy function can be provided to alter the key order in the resulting JSON
func MergeJSON(x1, x2 []byte, orderBy func(output map[string]any) any) ([]byte, error) {
var j1 map[string]any
err := json.Unmarshal(x1, &j1)
if err != nil {
return nil, err
}
var j2 map[string]any
err = json.Unmarshal(x2, &j2)
if err != nil {
return nil, err
}
err = mergo.Merge(&j1, &j2)
if err != nil {
return nil, err
}
if orderBy != nil {
return json.Marshal(orderBy(j1))
}
return json.Marshal(j1)
}
package util
// Keys returns the keys of the map m.
// The keys will be an indeterminate order.
func Keys[M ~map[K]V, K comparable, V any](m M) []K {
r := make([]K, 0, len(m))
for k := range m {
r = append(r, k)
}
return r
}
func ReverseMap(input map[string]string) map[string]string {
output := make(map[string]string)
for k, v := range input {
output[v] = k
}
return output
}
package core
import (
"net/http"
"github.com/PDOK/gokoala/internal/engine"
)
const (
templatesDir = "internal/ogc/common/core/templates/"
rootPath = "/"
apiPath = "/api"
alternativeAPIPath = "/openapi.json"
conformancePath = "/conformance"
)
type CommonCore struct {
engine *engine.Engine
}
func NewCommonCore(e *engine.Engine) *CommonCore {
conformanceBreadcrumbs := []engine.Breadcrumb{
{
Name: "Conformance",
Path: "conformance",
},
}
apiBreadcrumbs := []engine.Breadcrumb{
{
Name: "OpenAPI specification",
Path: "api",
},
}
e.RenderTemplates(rootPath,
nil,
engine.NewTemplateKey(templatesDir+"landing-page.go.json"),
engine.NewTemplateKey(templatesDir+"landing-page.go.html"))
e.RenderTemplates(rootPath,
apiBreadcrumbs,
engine.NewTemplateKey(templatesDir+"api.go.html"))
e.RenderTemplates(conformancePath,
conformanceBreadcrumbs,
engine.NewTemplateKey(templatesDir+"conformance.go.json"),
engine.NewTemplateKey(templatesDir+"conformance.go.html"))
core := &CommonCore{
engine: e,
}
e.Router.Get(rootPath, core.LandingPage())
e.Router.Get(apiPath, core.API())
// implements https://gitdocumentatie.logius.nl/publicatie/api/adr/#api-17
e.Router.Get(alternativeAPIPath, func(w http.ResponseWriter, r *http.Request) { core.apiAsJSON(w, r) })
e.Router.Get(conformancePath, core.Conformance())
e.Router.Handle("/*", http.FileServer(http.Dir("assets")))
return core
}
func (c *CommonCore) LandingPage() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKeyWithLanguage(templatesDir+"landing-page.go."+c.engine.CN.NegotiateFormat(r), c.engine.CN.NegotiateLanguage(w, r))
c.engine.ServePage(w, r, key)
}
}
func (c *CommonCore) API() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
format := c.engine.CN.NegotiateFormat(r)
if format == engine.FormatHTML {
c.apiAsHTML(w, r)
return
} else if format == engine.FormatJSON {
c.apiAsJSON(w, r)
return
}
engine.RenderProblem(engine.ProblemNotFound, w)
}
}
func (c *CommonCore) apiAsHTML(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKeyWithLanguage(templatesDir+"api.go.html", c.engine.CN.NegotiateLanguage(w, r))
c.engine.ServePage(w, r, key)
}
func (c *CommonCore) apiAsJSON(w http.ResponseWriter, r *http.Request) {
c.engine.ServeResponse(w, r, true, true, engine.MediaTypeOpenAPI, c.engine.OpenAPI.SpecJSON)
}
func (c *CommonCore) Conformance() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKeyWithLanguage(templatesDir+"conformance.go."+c.engine.CN.NegotiateFormat(r), c.engine.CN.NegotiateLanguage(w, r))
c.engine.ServePage(w, r, key)
}
}
package geospatial
import (
"net/http"
"github.com/PDOK/gokoala/internal/engine"
"github.com/go-chi/chi/v5"
)
const (
CollectionsPath = "/collections"
templatesDir = "internal/ogc/common/geospatial/templates/"
)
type Collections struct {
engine *engine.Engine
}
// NewCollections enables support for OGC APIs that organize data in the concept of collections.
// A collection, also known as a geospatial data resource, is a common way to organize data in various OGC APIs.
func NewCollections(e *engine.Engine) *Collections {
if e.Config.HasCollections() {
collectionsBreadcrumbs := []engine.Breadcrumb{
{
Name: "Collections",
Path: "collections",
},
}
e.RenderTemplates(CollectionsPath,
collectionsBreadcrumbs,
engine.NewTemplateKey(templatesDir+"collections.go.json"),
engine.NewTemplateKey(templatesDir+"collections.go.html"))
for _, coll := range e.Config.AllCollections().Unique() {
title := coll.ID
if coll.Metadata != nil && coll.Metadata.Title != nil {
title = *coll.Metadata.Title
}
collectionBreadcrumbs := collectionsBreadcrumbs
collectionBreadcrumbs = append(collectionBreadcrumbs, []engine.Breadcrumb{
{
Name: title,
Path: "collections/" + coll.ID,
},
}...)
e.RenderTemplatesWithParams(coll,
nil,
engine.NewTemplateKeyWithName(templatesDir+"collection.go.json", coll.ID))
e.RenderTemplatesWithParams(coll,
collectionBreadcrumbs,
engine.NewTemplateKeyWithName(templatesDir+"collection.go.html", coll.ID))
}
}
instance := &Collections{
engine: e,
}
e.Router.Get(CollectionsPath, instance.Collections())
e.Router.Get(CollectionsPath+"/{collectionId}", instance.Collection())
return instance
}
// Collections returns list of collections
func (c *Collections) Collections() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKeyWithLanguage(templatesDir+"collections.go."+c.engine.CN.NegotiateFormat(r), c.engine.CN.NegotiateLanguage(w, r))
c.engine.ServePage(w, r, key)
}
}
// Collection provides METADATA about a specific collection. To get the CONTENTS of a collection each OGC API
// building block must provide a separate/specific endpoint.
//
// For example in:
// - OGC API Features you would have: /collections/{collectionId}/items
// - OGC API Tiles could have: /collections/{collectionId}/tiles
// - OGC API Maps could have: /collections/{collectionId}/maps
// - OGC API 3d GeoVolumes would have: /collections/{collectionId}/3dtiles
func (c *Collections) Collection() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
collectionID := chi.URLParam(r, "collectionId")
key := engine.NewTemplateKeyWithNameAndLanguage(templatesDir+"collection.go."+c.engine.CN.NegotiateFormat(r), collectionID, c.engine.CN.NegotiateLanguage(w, r))
c.engine.ServePage(w, r, key)
}
}
package geopackage
import (
"errors"
"fmt"
"strings"
"github.com/PDOK/gokoala/config"
"github.com/jmoiron/sqlx"
)
// assertIndexesExist asserts required indexes in the GeoPackage exists
func assertIndexesExist(
configuredCollections config.GeoSpatialCollections,
featureTableByCollectionID map[string]*featureTable,
db *sqlx.DB, fidColumn string) error {
// index needs to contain these columns in the given order
defaultSpatialBtreeColumns := strings.Join([]string{fidColumn, "minx", "maxx", "miny", "maxy"}, ",")
for collID, table := range featureTableByCollectionID {
if table == nil {
return errors.New("given table can't be nil")
}
for _, coll := range configuredCollections {
if coll.ID == collID && coll.Features != nil {
spatialBtreeColumns := defaultSpatialBtreeColumns
// assert temporal columns are indexed if configured
if coll.Metadata != nil && coll.Metadata.TemporalProperties != nil {
temporalBtreeColumns := strings.Join([]string{coll.Metadata.TemporalProperties.StartDate, coll.Metadata.TemporalProperties.EndDate}, ",")
spatialBtreeColumns = strings.Join([]string{defaultSpatialBtreeColumns, coll.Metadata.TemporalProperties.StartDate, coll.Metadata.TemporalProperties.EndDate}, ",")
if err := assertIndexExists(table.TableName, db, temporalBtreeColumns, true); err != nil {
return err
}
}
// assert spatial b-tree index exists, this index substitutes the r-tree when querying large bounding boxes
// if temporal columns are configured, they must be included in this index as well
if err := assertIndexExists(table.TableName, db, spatialBtreeColumns, true); err != nil {
return err
}
// assert the column for each property filter is indexed.
for _, propertyFilter := range coll.Features.Filters.Properties {
if err := assertIndexExists(table.TableName, db, propertyFilter.Name, false); err != nil && *propertyFilter.IndexRequired {
return fmt.Errorf("%w. To disable this check set 'indexRequired' to 'false'", err)
}
}
break
}
}
}
return nil
}
func assertIndexExists(tableName string, db *sqlx.DB, columns string, prefixMatch bool) error {
query := fmt.Sprintf(`
select group_concat(info.name) as indexed_columns
from pragma_index_list('%s') as list,
pragma_index_info(list.name) as info
group by list.name`, tableName)
rows, err := db.Queryx(query)
if err != nil {
return fmt.Errorf("failed to read indexes from table '%s'", tableName)
}
exists := false
for rows.Next() {
var indexedColumns string
_ = rows.Scan(&indexedColumns)
if columns == indexedColumns {
exists = true // index on expected columns
} else if prefixMatch && strings.HasPrefix(indexedColumns, columns) {
exists = true // index with expected prefix columns
}
}
defer rows.Close()
if !exists {
return fmt.Errorf("missing required index: no index exists on column(s) '%s' in table '%s'",
columns, tableName)
}
return nil
}
//go:build cgo && !darwin && !windows
package geopackage
import (
"fmt"
"log"
"github.com/PDOK/gokoala/config"
"github.com/google/uuid"
cloudsqlitevfs "github.com/PDOK/go-cloud-sqlite-vfs"
"github.com/jmoiron/sqlx"
)
// Cloud-Backed SQLite (CBS) GeoPackage in Azure or Google object storage
type cloudGeoPackage struct {
db *sqlx.DB
cloudVFS *cloudsqlitevfs.VFS
}
func newCloudBackedGeoPackage(gpkg *config.GeoPackageCloud) geoPackageBackend {
cacheDir, err := gpkg.CacheDir()
if err != nil {
log.Fatalf("invalid cache dir, error: %v", err)
}
cacheSize, err := gpkg.Cache.MaxSizeAsBytes()
if err != nil {
log.Fatalf("invalid cache size provided, error: %v", err)
}
msg := fmt.Sprintf("Cloud-Backed GeoPackage '%s' in container '%s' on '%s'",
gpkg.File, gpkg.Container, gpkg.Connection)
log.Printf("connecting to %s\n", msg)
vfsName := uuid.New().String() // important: each geopackage must use a unique VFS name
vfs, err := cloudsqlitevfs.NewVFS(vfsName, gpkg.Connection, gpkg.User, gpkg.Auth,
gpkg.Container, cacheDir, cacheSize, gpkg.LogHTTPRequests)
if err != nil {
log.Fatalf("failed to connect with %s, error: %v", msg, err)
}
log.Printf("connected to %s\n", msg)
conn := fmt.Sprintf("/%s/%s?vfs=%s&mode=ro&_cache_size=%d", gpkg.Container, gpkg.File, vfsName, gpkg.InMemoryCacheSize)
db, err := sqlx.Open(sqliteDriverName, conn)
if err != nil {
log.Fatalf("failed to open %s, error: %v", msg, err)
}
return &cloudGeoPackage{db, &vfs}
}
func (g *cloudGeoPackage) getDB() *sqlx.DB {
return g.db
}
func (g *cloudGeoPackage) close() {
err := g.db.Close()
if err != nil {
log.Printf("failed to close GeoPackage: %v", err)
}
if g.cloudVFS != nil {
err = g.cloudVFS.Close()
if err != nil {
log.Printf("failed to close Cloud-Backed GeoPackage: %v", err)
}
}
}
package geopackage
import (
"fmt"
"log"
"time"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/jmoiron/sqlx"
)
// GeoPackage on local disk
type localGeoPackage struct {
db *sqlx.DB
}
func newLocalGeoPackage(gpkg *config.GeoPackageLocal) geoPackageBackend {
if gpkg.Download != nil {
downloadGeoPackage(gpkg)
}
conn := fmt.Sprintf("file:%s?mode=ro&_cache_size=%d", gpkg.File, gpkg.InMemoryCacheSize)
db, err := sqlx.Open(sqliteDriverName, conn)
if err != nil {
log.Fatalf("failed to open GeoPackage: %v", err)
}
log.Printf("connected to local GeoPackage: %s", gpkg.File)
return &localGeoPackage{db}
}
func downloadGeoPackage(gpkg *config.GeoPackageLocal) {
url := *gpkg.Download.From.URL
log.Printf("start download of GeoPackage: %s", url.String())
downloadTime, err := engine.Download(url, gpkg.File, gpkg.Download.Parallelism, gpkg.Download.TLSSkipVerify,
gpkg.Download.Timeout.Duration, gpkg.Download.RetryDelay.Duration, gpkg.Download.RetryMaxDelay.Duration, gpkg.Download.MaxRetries)
if err != nil {
log.Fatalf("failed to download GeoPackage: %v", err)
}
log.Printf("succesfully downloaded GeoPackage to %s in %s", gpkg.File, downloadTime.Round(time.Second))
}
func (g *localGeoPackage) getDB() *sqlx.DB {
return g.db
}
func (g *localGeoPackage) close() {
err := g.db.Close()
if err != nil {
log.Printf("failed to close GeoPackage: %v", err)
}
}
package geopackage
import (
"context"
"database/sql"
"fmt"
"log"
"maps"
"os"
"path"
"sync"
"time"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine/util"
"github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/go-spatial/geom"
"github.com/go-spatial/geom/encoding/gpkg"
"github.com/go-spatial/geom/encoding/wkt"
"github.com/jmoiron/sqlx"
"github.com/mattn/go-sqlite3"
"github.com/qustavo/sqlhooks/v2"
_ "github.com/mattn/go-sqlite3" // import for side effect (= sqlite3 driver) only
)
const (
sqliteDriverName = "sqlite3_with_extensions"
)
var once sync.Once
// Load sqlite (with extensions) once.
//
// Extensions are by default expected in /usr/lib. For spatialite you can
// alternatively/optionally set SPATIALITE_LIBRARY_PATH.
func loadDriver() {
once.Do(func() {
spatialite := path.Join(os.Getenv("SPATIALITE_LIBRARY_PATH"), "mod_spatialite")
driver := &sqlite3.SQLiteDriver{Extensions: []string{spatialite}}
sql.Register(sqliteDriverName, sqlhooks.Wrap(driver, datasources.NewSQLLogFromEnv()))
})
}
type geoPackageBackend interface {
getDB() *sqlx.DB
close()
}
type featureTable struct {
TableName string `db:"table_name"`
DataType string `db:"data_type"` // always 'features'
Identifier string `db:"identifier"`
Description string `db:"description"`
GeometryColumnName string `db:"column_name"`
GeometryType string `db:"geometry_type_name"`
LastChange time.Time `db:"last_change"`
MinX float64 `db:"min_x"` // bbox
MinY float64 `db:"min_y"` // bbox
MaxX float64 `db:"max_x"` // bbox
MaxY float64 `db:"max_y"` // bbox
SRS int64 `db:"srs_id"`
ColumnsWithDateType map[string]string
}
func (ft featureTable) ColumnsWithDataType() map[string]string {
return ft.ColumnsWithDateType
}
type GeoPackage struct {
backend geoPackageBackend
preparedStmtCache *PreparedStatementCache
fidColumn string
featureTableByCollectionID map[string]*featureTable
queryTimeout time.Duration
maxBBoxSizeToUseWithRTree int
}
func NewGeoPackage(collections config.GeoSpatialCollections, gpkgConfig config.GeoPackage) *GeoPackage {
loadDriver()
g := &GeoPackage{}
g.preparedStmtCache = NewCache()
warmUp := false
switch {
case gpkgConfig.Local != nil:
g.backend = newLocalGeoPackage(gpkgConfig.Local)
g.fidColumn = gpkgConfig.Local.Fid
g.queryTimeout = gpkgConfig.Local.QueryTimeout.Duration
g.maxBBoxSizeToUseWithRTree = gpkgConfig.Local.MaxBBoxSizeToUseWithRTree
case gpkgConfig.Cloud != nil:
g.backend = newCloudBackedGeoPackage(gpkgConfig.Cloud)
g.fidColumn = gpkgConfig.Cloud.Fid
g.queryTimeout = gpkgConfig.Cloud.QueryTimeout.Duration
g.maxBBoxSizeToUseWithRTree = gpkgConfig.Cloud.MaxBBoxSizeToUseWithRTree
warmUp = gpkgConfig.Cloud.Cache.WarmUp
default:
log.Fatal("unknown GeoPackage config encountered")
}
metadata, err := readDriverMetadata(g.backend.getDB())
if err != nil {
log.Fatalf("failed to connect with GeoPackage: %v", err)
}
log.Println(metadata)
g.featureTableByCollectionID, err = readGpkgContents(collections, g.backend.getDB())
if err != nil {
log.Fatal(err)
}
if err = assertIndexesExist(collections, g.featureTableByCollectionID, g.backend.getDB(), g.fidColumn); err != nil {
log.Fatal(err)
}
if warmUp {
// perform warmup async since it can take a long time
go func() {
if err = warmUpFeatureTables(collections, g.featureTableByCollectionID, g.backend.getDB()); err != nil {
log.Fatal(err)
}
}()
}
return g
}
func (g *GeoPackage) Close() {
g.preparedStmtCache.Close()
g.backend.close()
}
func (g *GeoPackage) GetFeatureIDs(ctx context.Context, collection string, criteria datasources.FeaturesCriteria) ([]int64, domain.Cursors, error) {
table, err := g.getFeatureTable(collection)
if err != nil {
return nil, domain.Cursors{}, err
}
queryCtx, cancel := context.WithTimeout(ctx, g.queryTimeout) // https://go.dev/doc/database/cancel-operations
defer cancel()
stmt, query, queryArgs, err := g.makeFeaturesQuery(queryCtx, table, true, criteria) //nolint:sqlclosecheck // prepared statement is cached, will be closed when evicted from cache
if err != nil {
return nil, domain.Cursors{}, fmt.Errorf("failed to create query '%s' error: %w", query, err)
}
rows, err := stmt.QueryxContext(queryCtx, queryArgs)
if err != nil {
return nil, domain.Cursors{}, fmt.Errorf("failed to execute query '%s' error: %w", query, err)
}
defer rows.Close()
featureIDs, prevNext, err := domain.MapRowsToFeatureIDs(rows)
if err != nil {
return nil, domain.Cursors{}, err
}
if prevNext == nil {
return nil, domain.Cursors{}, nil
}
return featureIDs, domain.NewCursors(*prevNext, criteria.Cursor.FiltersChecksum), nil
}
func (g *GeoPackage) GetFeaturesByID(ctx context.Context, collection string, featureIDs []int64) (*domain.FeatureCollection, error) {
table, err := g.getFeatureTable(collection)
if err != nil {
return nil, err
}
queryCtx, cancel := context.WithTimeout(ctx, g.queryTimeout) // https://go.dev/doc/database/cancel-operations
defer cancel()
fids := map[string]any{"fids": featureIDs}
query, queryArgs, err := sqlx.Named(fmt.Sprintf("select * from %s where %s in (:fids)", table.TableName, g.fidColumn), fids)
if err != nil {
return nil, fmt.Errorf("failed to make features query, error: %w", err)
}
query, queryArgs, err = sqlx.In(query, queryArgs...)
if err != nil {
return nil, fmt.Errorf("failed to make IN-clause, error: %w", err)
}
rows, err := g.backend.getDB().QueryxContext(queryCtx, g.backend.getDB().Rebind(query), queryArgs...)
if err != nil {
return nil, fmt.Errorf("failed to execute query '%s' error: %w", query, err)
}
defer rows.Close()
fc := domain.FeatureCollection{}
fc.Features, _, err = domain.MapRowsToFeatures(rows, g.fidColumn, table.GeometryColumnName, readGpkgGeometry)
if err != nil {
return nil, err
}
fc.NumberReturned = len(fc.Features)
return &fc, nil
}
func (g *GeoPackage) GetFeatures(ctx context.Context, collection string, criteria datasources.FeaturesCriteria) (*domain.FeatureCollection, domain.Cursors, error) {
table, err := g.getFeatureTable(collection)
if err != nil {
return nil, domain.Cursors{}, err
}
queryCtx, cancel := context.WithTimeout(ctx, g.queryTimeout) // https://go.dev/doc/database/cancel-operations
defer cancel()
stmt, query, queryArgs, err := g.makeFeaturesQuery(queryCtx, table, false, criteria) //nolint:sqlclosecheck // prepared statement is cached, will be closed when evicted from cache
if err != nil {
return nil, domain.Cursors{}, fmt.Errorf("failed to create query '%s' error: %w", query, err)
}
rows, err := stmt.QueryxContext(queryCtx, queryArgs)
if err != nil {
return nil, domain.Cursors{}, fmt.Errorf("failed to execute query '%s' error: %w", query, err)
}
defer rows.Close()
var prevNext *domain.PrevNextFID
fc := domain.FeatureCollection{}
fc.Features, prevNext, err = domain.MapRowsToFeatures(rows, g.fidColumn, table.GeometryColumnName, readGpkgGeometry)
if err != nil {
return nil, domain.Cursors{}, err
}
if prevNext == nil {
return nil, domain.Cursors{}, nil
}
fc.NumberReturned = len(fc.Features)
return &fc, domain.NewCursors(*prevNext, criteria.Cursor.FiltersChecksum), nil
}
func (g *GeoPackage) GetFeature(ctx context.Context, collection string, featureID int64) (*domain.Feature, error) {
table, err := g.getFeatureTable(collection)
if err != nil {
return nil, err
}
queryCtx, cancel := context.WithTimeout(ctx, g.queryTimeout) // https://go.dev/doc/database/cancel-operations
defer cancel()
query := fmt.Sprintf("select * from %s f where f.%s = :fid limit 1", table.TableName, g.fidColumn)
rows, err := g.backend.getDB().NamedQueryContext(queryCtx, query, map[string]any{"fid": featureID})
if err != nil {
return nil, fmt.Errorf("query '%s' failed: %w", query, err)
}
defer rows.Close()
features, _, err := domain.MapRowsToFeatures(rows, g.fidColumn, table.GeometryColumnName, readGpkgGeometry)
if err != nil {
return nil, err
}
if len(features) != 1 {
return nil, nil
}
return features[0], nil
}
func (g *GeoPackage) GetFeatureTableMetadata(collection string) (datasources.FeatureTableMetadata, error) {
val, ok := g.featureTableByCollectionID[collection]
if !ok {
return nil, fmt.Errorf("no metadata for %s", collection)
}
return val, nil
}
// Build specific features queries based on the given options.
// Make sure to use SQL bind variables and return named params: https://jmoiron.github.io/sqlx/#namedParams
func (g *GeoPackage) makeFeaturesQuery(ctx context.Context, table *featureTable, onlyFIDs bool,
criteria datasources.FeaturesCriteria) (stmt *sqlx.NamedStmt, query string, queryArgs map[string]any, err error) {
// make query
if criteria.Bbox != nil {
query, queryArgs, err = g.makeBboxQuery(table, onlyFIDs, criteria)
if err != nil {
return
}
} else {
query, queryArgs = g.makeDefaultQuery(table, criteria)
}
// lookup prepared statement for given query, or create new one
stmt, err = g.preparedStmtCache.Lookup(ctx, g.backend.getDB(), query)
return
}
func (g *GeoPackage) makeDefaultQuery(table *featureTable, criteria datasources.FeaturesCriteria) (string, map[string]any) {
pfClause, pfNamedParams := propertyFiltersToSQL(criteria.PropertyFilters)
temporalClause, temporalNamedParams := temporalCriteriaToSQL(criteria.TemporalCriteria)
defaultQuery := fmt.Sprintf(`
with
next as (select * from "%[1]s" where "%[2]s" >= :fid %[3]s %[4]s order by %[2]s asc limit :limit + 1),
prev as (select * from "%[1]s" where "%[2]s" < :fid %[3]s %[4]s order by %[2]s desc limit :limit),
nextprev as (select * from next union all select * from prev),
nextprevfeat as (select *, lag("%[2]s", :limit) over (order by %[2]s) as prevfid, lead("%[2]s", :limit) over (order by "%[2]s") as nextfid from nextprev)
select * from nextprevfeat where "%[2]s" >= :fid %[3]s %[4]s limit :limit
`, table.TableName, g.fidColumn, temporalClause, pfClause) // don't add user input here, use named params for user input!
namedParams := map[string]any{
"fid": criteria.Cursor.FID,
"limit": criteria.Limit,
}
maps.Copy(namedParams, pfNamedParams)
maps.Copy(namedParams, temporalNamedParams)
return defaultQuery, namedParams
}
func (g *GeoPackage) makeBboxQuery(table *featureTable, onlyFIDs bool, criteria datasources.FeaturesCriteria) (string, map[string]any, error) {
selectClause := "*"
if onlyFIDs {
selectClause = "\"" + g.fidColumn + "\", prevfid, nextfid"
}
btreeIndexHint := fmt.Sprintf("indexed by \"%s_spatial_idx\"", table.TableName)
pfClause, pfNamedParams := propertyFiltersToSQL(criteria.PropertyFilters)
if pfClause != "" {
// don't force btree index when using property filter, let SQLite decide
// whether to use the BTree index or the property filter index
btreeIndexHint = ""
}
temporalClause, temporalNamedParams := temporalCriteriaToSQL(criteria.TemporalCriteria)
bboxQuery := fmt.Sprintf(`
with
given_bbox as (select geomfromtext(:bboxWkt, :bboxSrid)),
bbox_size as (select iif(count(id) < %[3]d, 'small', 'big') as bbox_size
from (select id from rtree_%[1]s_%[4]s
where minx <= :maxx and maxx >= :minx and miny <= :maxy and maxy >= :miny
limit %[3]d)),
next_bbox_rtree as (select f.*
from "%[1]s" f inner join rtree_%[1]s_%[4]s rf on f."%[2]s" = rf.id
where rf.minx <= :maxx and rf.maxx >= :minx and rf.miny <= :maxy and rf.maxy >= :miny
and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1
and f."%[2]s" >= :fid %[6]s %[7]s
order by f."%[2]s" asc
limit (select iif(bbox_size == 'small', :limit + 1, 0) from bbox_size)),
next_bbox_btree as (select f.*
from "%[1]s" f %[8]s
where f.minx <= :maxx and f.maxx >= :minx and f.miny <= :maxy and f.maxy >= :miny
and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1
and f."%[2]s" >= :fid %[6]s %[7]s
order by f."%[2]s" asc
limit (select iif(bbox_size == 'big', :limit + 1, 0) from bbox_size)),
next as (select * from next_bbox_rtree union all select * from next_bbox_btree),
prev_bbox_rtree as (select f.*
from "%[1]s" f inner join rtree_%[1]s_%[4]s rf on f."%[2]s" = rf.id
where rf.minx <= :maxx and rf.maxx >= :minx and rf.miny <= :maxy and rf.maxy >= :miny
and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1
and f."%[2]s" < :fid %[6]s %[7]s
order by f."%[2]s" desc
limit (select iif(bbox_size == 'small', :limit, 0) from bbox_size)),
prev_bbox_btree as (select f.*
from "%[1]s" f %[8]s
where f.minx <= :maxx and f.maxx >= :minx and f.miny <= :maxy and f.maxy >= :miny
and st_intersects((select * from given_bbox), castautomagic(f.%[4]s)) = 1
and f."%[2]s" < :fid %[6]s %[7]s
order by f."%[2]s" desc
limit (select iif(bbox_size == 'big', :limit, 0) from bbox_size)),
prev as (select * from prev_bbox_rtree union all select * from prev_bbox_btree),
nextprev as (select * from next union all select * from prev),
nextprevfeat as (select *, lag("%[2]s", :limit) over (order by "%[2]s") as prevfid, lead("%[2]s", :limit) over (order by "%[2]s") as nextfid from nextprev)
select %[5]s from nextprevfeat where "%[2]s" >= :fid %[6]s %[7]s limit :limit
`, table.TableName, g.fidColumn, g.maxBBoxSizeToUseWithRTree, table.GeometryColumnName,
selectClause, temporalClause, pfClause, btreeIndexHint) // don't add user input here, use named params for user input!
bboxAsWKT, err := wkt.EncodeString(criteria.Bbox)
if err != nil {
return "", nil, err
}
namedParams := map[string]any{
"fid": criteria.Cursor.FID,
"limit": criteria.Limit,
"bboxWkt": bboxAsWKT,
"maxx": criteria.Bbox.MaxX(),
"minx": criteria.Bbox.MinX(),
"maxy": criteria.Bbox.MaxY(),
"miny": criteria.Bbox.MinY(),
"bboxSrid": criteria.InputSRID}
maps.Copy(namedParams, pfNamedParams)
maps.Copy(namedParams, temporalNamedParams)
return bboxQuery, namedParams, nil
}
func (g *GeoPackage) getFeatureTable(collection string) (*featureTable, error) {
table, ok := g.featureTableByCollectionID[collection]
if !ok {
return nil, fmt.Errorf("can't query collection '%s' since it doesn't exist in "+
"geopackage, available in geopackage: %v", collection, util.Keys(g.featureTableByCollectionID))
}
return table, nil
}
func readGpkgGeometry(rawGeom []byte) (geom.Geometry, error) {
geometry, err := gpkg.DecodeGeometry(rawGeom)
if err != nil {
return nil, err
}
return geometry.Geometry, nil
}
func propertyFiltersToSQL(pf map[string]string) (sql string, namedParams map[string]any) {
namedParams = make(map[string]any)
if len(pf) > 0 {
position := 0
for k, v := range pf {
position++
namedParam := fmt.Sprintf("pf%d", position)
// column name in double quotes in case it is a reserved keyword
// also: we don't currently support LIKE since wildcard searches don't use the index
sql += fmt.Sprintf(" and \"%s\" = :%s", k, namedParam)
namedParams[namedParam] = v
}
}
return sql, namedParams
}
func temporalCriteriaToSQL(temporalCriteria datasources.TemporalCriteria) (sql string, namedParams map[string]any) {
namedParams = make(map[string]any)
if !temporalCriteria.ReferenceDate.IsZero() {
namedParams["referenceDate"] = temporalCriteria.ReferenceDate
startDate := temporalCriteria.StartDateProperty
endDate := temporalCriteria.EndDateProperty
sql = fmt.Sprintf(" and \"%[1]s\" <= :referenceDate and (\"%[2]s\" >= :referenceDate or \"%[2]s\" is null)", startDate, endDate)
}
return sql, namedParams
}
package geopackage
import (
"errors"
"fmt"
"log"
"github.com/PDOK/gokoala/config"
"github.com/jmoiron/sqlx"
)
// Read metadata about gpkg and sqlite driver
func readDriverMetadata(db *sqlx.DB) (string, error) {
type pragma struct {
UserVersion string `db:"user_version"`
}
type metadata struct {
Sqlite string `db:"sqlite"`
Spatialite string `db:"spatialite"`
Arch string `db:"arch"`
}
var m metadata
err := db.QueryRowx(`
select sqlite_version() as sqlite,
spatialite_version() as spatialite,
spatialite_target_cpu() as arch`).StructScan(&m)
if err != nil {
return "", err
}
var gpkgVersion pragma
_ = db.QueryRowx(`pragma user_version`).StructScan(&gpkgVersion)
if gpkgVersion.UserVersion == "" {
gpkgVersion.UserVersion = "unknown"
}
return fmt.Sprintf("geopackage version: %s, sqlite version: %s, spatialite version: %s on %s",
gpkgVersion.UserVersion, m.Sqlite, m.Spatialite, m.Arch), nil
}
// Read gpkg_contents table. This table contains metadata about feature tables. The result is a mapping from
// collection ID -> feature table metadata. We match each feature table to the collection ID by looking at the
// 'identifier' column. Also in case there's no exact match between 'collection ID' and 'identifier' we use
// the explicitly configured table name.
func readGpkgContents(collections config.GeoSpatialCollections, db *sqlx.DB) (map[string]*featureTable, error) {
query := `
select
c.table_name, c.data_type, c.identifier, c.description, c.last_change,
c.min_x, c.min_y, c.max_x, c.max_y, c.srs_id, gc.column_name, gc.geometry_type_name
from
gpkg_contents c join gpkg_geometry_columns gc on c.table_name == gc.table_name
where
c.data_type = 'features' and
c.min_x is not null`
rows, err := db.Queryx(query)
if err != nil {
return nil, fmt.Errorf("failed to retrieve gpkg_contents using query: %v\n, error: %w", query, err)
}
defer rows.Close()
result := make(map[string]*featureTable, 10)
for rows.Next() {
row := featureTable{
ColumnsWithDateType: make(map[string]string),
}
if err = rows.StructScan(&row); err != nil {
return nil, fmt.Errorf("failed to read gpkg_contents record, error: %w", err)
}
if row.TableName == "" {
return nil, fmt.Errorf("feature table name is blank, error: %w", err)
}
if err = readFeatureTableInfo(db, row); err != nil {
return nil, fmt.Errorf("failed to read feature table metadata, error: %w", err)
}
for _, collection := range collections {
if row.Identifier == collection.ID {
result[collection.ID] = &row
} else if hasMatchingTableName(collection, row) {
result[collection.ID] = &row
}
}
}
if err = rows.Err(); err != nil {
return nil, err
}
if len(result) == 0 {
return nil, errors.New("no records found in gpkg_contents, can't serve features")
}
uniqueTables := make(map[string]struct{})
for _, table := range result {
uniqueTables[table.TableName] = struct{}{}
}
if len(uniqueTables) != len(result) {
log.Printf("Warning: found %d unique table names for %d collections, "+
"usually each collection is backed by its own unique table\n", len(uniqueTables), len(result))
}
return result, nil
}
func readFeatureTableInfo(db *sqlx.DB, table featureTable) error {
rows, err := db.Queryx(fmt.Sprintf("select name, type from pragma_table_info('%s')", table.TableName))
if err != nil {
return err
}
defer rows.Close()
for rows.Next() {
var colName, colType string
err = rows.Scan(&colName, &colType)
if err != nil {
return err
}
table.ColumnsWithDateType[colName] = colType
}
return nil
}
func hasMatchingTableName(collection config.GeoSpatialCollection, row featureTable) bool {
return collection.Features != nil && collection.Features.TableName != nil &&
row.Identifier == *collection.Features.TableName
}
package geopackage
import (
"context"
"log"
lru "github.com/hashicorp/golang-lru/v2"
"github.com/jmoiron/sqlx"
)
var preparedStmtCacheSize = 25
// PreparedStatementCache is thread safe
type PreparedStatementCache struct {
cache *lru.Cache[string, *sqlx.NamedStmt]
}
// NewCache creates a new PreparedStatementCache that will evict least-recently used (LRU) statements.
func NewCache() *PreparedStatementCache {
cache, _ := lru.NewWithEvict[string, *sqlx.NamedStmt](preparedStmtCacheSize,
func(_ string, stmt *sqlx.NamedStmt) {
if stmt != nil {
_ = stmt.Close()
}
})
return &PreparedStatementCache{cache: cache}
}
// Lookup gets a prepared statement from the cache for the given query, or creates a new one and adds it to the cache
func (c *PreparedStatementCache) Lookup(ctx context.Context, db *sqlx.DB, query string) (*sqlx.NamedStmt, error) {
cachedStmt, ok := c.cache.Get(query)
if !ok {
stmt, err := db.PrepareNamedContext(ctx, query)
if err != nil {
return nil, err
}
c.cache.Add(query, stmt)
return stmt, nil
}
return cachedStmt, nil
}
// Close purges the cache, and closes remaining prepared statements
func (c *PreparedStatementCache) Close() {
log.Printf("closing %d prepared statements", c.cache.Len())
c.cache.Purge()
}
package geopackage
import (
"errors"
"fmt"
"log"
"github.com/PDOK/gokoala/config"
"github.com/jmoiron/sqlx"
)
// warmUpFeatureTables executes a warmup query to speedup subsequent queries.
// This encompasses traversing index(es) to fill the local cache.
func warmUpFeatureTables(
configuredCollections config.GeoSpatialCollections,
featureTableByCollectionID map[string]*featureTable,
db *sqlx.DB) error {
for collID, table := range featureTableByCollectionID {
if table == nil {
return errors.New("given table can't be nil")
}
for _, coll := range configuredCollections {
if coll.ID == collID && coll.Features != nil {
if err := warmUpFeatureTable(table.TableName, db); err != nil {
return err
}
break
}
}
}
return nil
}
func warmUpFeatureTable(tableName string, db *sqlx.DB) error {
query := fmt.Sprintf(`
select minx,maxx,miny,maxy from %[1]s where minx <= 0 and maxx >= 0 and miny <= 0 and maxy >= 0
`, tableName)
log.Printf("start warm-up of feature table '%s'", tableName)
_, err := db.Exec(query)
if err != nil {
return fmt.Errorf("failed to warm-up feature table '%s': %w", tableName, err)
}
log.Printf("end warm-up of feature table '%s'", tableName)
return nil
}
package postgis
import (
"context"
"log"
"github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
)
// PostGIS !!! Placeholder implementation, for future reference !!!
type PostGIS struct {
}
func NewPostGIS() *PostGIS {
return &PostGIS{}
}
func (PostGIS) Close() {
// noop
}
func (pg PostGIS) GetFeatureIDs(_ context.Context, _ string, _ datasources.FeaturesCriteria) ([]int64, domain.Cursors, error) {
log.Println("PostGIS support is not implemented yet, this just serves to demonstrate that we can support multiple types of datasources")
return []int64{}, domain.Cursors{}, nil
}
func (pg PostGIS) GetFeaturesByID(_ context.Context, _ string, _ []int64) (*domain.FeatureCollection, error) {
log.Println("PostGIS support is not implemented yet, this just serves to demonstrate that we can support multiple types of datasources")
return &domain.FeatureCollection{}, nil
}
func (pg PostGIS) GetFeatures(_ context.Context, _ string, _ datasources.FeaturesCriteria) (*domain.FeatureCollection, domain.Cursors, error) {
log.Println("PostGIS support is not implemented yet, this just serves to demonstrate that we can support multiple types of datasources")
return nil, domain.Cursors{}, nil
}
func (pg PostGIS) GetFeature(_ context.Context, _ string, _ int64) (*domain.Feature, error) {
log.Println("PostGIS support is not implemented yet, this just serves to demonstrate that we can support multiple types of datasources")
return nil, nil
}
func (pg PostGIS) GetFeatureTableMetadata(_ string) (datasources.FeatureTableMetadata, error) {
log.Println("PostGIS support is not implemented yet, this just serves to demonstrate that we can support multiple types of datasources")
return nil, nil
}
package datasources
import (
"context"
"fmt"
"log"
"os"
"strconv"
"strings"
"time"
)
type contextKey int
const (
envLogSQL = "LOG_SQL"
envSlowQueryTime = "SLOW_QUERY_TIME"
defaultSlowQueryTime = 5 * time.Second
sqlContextKey contextKey = iota
)
// SQLLog query logging for debugging purposes
type SQLLog struct {
LogSQL bool
SlowQueryTime time.Duration
}
// NewSQLLogFromEnv build a SQLLog from environment variables listed in this file
func NewSQLLogFromEnv() *SQLLog {
var err error
logSQL := false
if os.Getenv(envLogSQL) != "" {
logSQL, err = strconv.ParseBool(os.Getenv(envLogSQL))
if err != nil {
log.Fatalf("invalid %s value provided, must be a boolean", envLogSQL)
}
}
slowQueryTime := defaultSlowQueryTime
if os.Getenv(envSlowQueryTime) != "" {
slowQueryTime, err = time.ParseDuration(os.Getenv(envSlowQueryTime))
if err != nil {
log.Fatalf("invalid %s value provided, value such as '5s' expected", envSlowQueryTime)
}
}
return &SQLLog{LogSQL: logSQL, SlowQueryTime: slowQueryTime}
}
// Before callback prior to execution of the given SQL query
func (s *SQLLog) Before(ctx context.Context, _ string, _ ...any) (context.Context, error) {
return context.WithValue(ctx, sqlContextKey, time.Now()), nil
}
// After callback once execution of the given SQL query is done
func (s *SQLLog) After(ctx context.Context, query string, args ...any) (context.Context, error) {
start := ctx.Value(sqlContextKey).(time.Time)
timeSpent := time.Since(start)
if timeSpent > s.SlowQueryTime || s.LogSQL {
query = replaceBindVars(query, args)
log.Printf("\n--- SQL:\n%s\n--- SQL query took: %s\n", query, timeSpent)
}
return ctx, nil
}
// replaceBindVars replaces '?' bind vars in order to log a complete query
func replaceBindVars(query string, args []any) string {
for _, arg := range args {
query = strings.Replace(query, "?", fmt.Sprintf("%v", arg), 1)
}
return query
}
package domain
import (
"bytes"
"encoding/base64"
"log"
"math/big"
neturl "net/url"
"strings"
)
const separator = '|'
// Cursors holds next and previous cursor. Note that we use
// 'cursor-based pagination' as opposed to 'offset-based pagination'
type Cursors struct {
Prev EncodedCursor
Next EncodedCursor
HasPrev bool
HasNext bool
}
// EncodedCursor is a scrambled string representation of the fields defined in DecodedCursor
type EncodedCursor string
// DecodedCursor the cursor values after decoding EncodedCursor
type DecodedCursor struct {
FiltersChecksum []byte
FID int64
}
// PrevNextFID previous and next feature id (fid) to encode in cursor.
type PrevNextFID struct {
Prev int64
Next int64
}
// NewCursors create Cursors based on the prev/next feature ids from the datasource
// and the provided filters (captured in a hash).
func NewCursors(fid PrevNextFID, filtersChecksum []byte) Cursors {
return Cursors{
Prev: encodeCursor(fid.Prev, filtersChecksum),
Next: encodeCursor(fid.Next, filtersChecksum),
HasPrev: fid.Prev > 0,
HasNext: fid.Next > 0,
}
}
func encodeCursor(fid int64, filtersChecksum []byte) EncodedCursor {
fidAsBytes := big.NewInt(fid).Bytes()
// format of the cursor: <encoded fid><separator><encoded checksum>
return EncodedCursor(base64.RawURLEncoding.EncodeToString(fidAsBytes) + string(separator) + base64.RawURLEncoding.EncodeToString(filtersChecksum))
}
// Decode turns encoded cursor into DecodedCursor and verifies the
// that the checksum of query params that act as filters hasn't changed
func (c EncodedCursor) Decode(filtersChecksum []byte) DecodedCursor {
value, err := neturl.QueryUnescape(string(c))
if err != nil || value == "" {
return DecodedCursor{filtersChecksum, 0}
}
// split first, then decode
encoded := strings.Split(value, string(separator))
if len(encoded) < 2 {
log.Printf("cursor '%s' doesn't contain expected separator %c", value, separator)
return DecodedCursor{filtersChecksum, 0}
}
decodedFid, fidErr := base64.RawURLEncoding.DecodeString(encoded[0])
decodedChecksum, checksumErr := base64.RawURLEncoding.DecodeString(encoded[1])
if fidErr != nil || checksumErr != nil {
log.Printf("decoding cursor value '%s' failed, defaulting to first page", value)
return DecodedCursor{filtersChecksum, 0}
}
// feature id
fid := big.NewInt(0).SetBytes(decodedFid).Int64()
if fid < 0 {
log.Printf("negative feature ID detected: %d, defaulting to first page", fid)
fid = 0
}
// checksum
if !bytes.Equal(decodedChecksum, filtersChecksum) {
log.Printf("filters in query params have changed during pagination, resetting to first page")
return DecodedCursor{filtersChecksum, 0}
}
return DecodedCursor{filtersChecksum, fid}
}
func (c EncodedCursor) String() string {
return string(c)
}
package domain
import (
"github.com/go-spatial/geom/encoding/geojson"
)
// featureCollectionType allows the GeoJSON type to be automatically set during json marshalling
type featureCollectionType struct{}
func (fc *featureCollectionType) MarshalJSON() ([]byte, error) {
return []byte(`"FeatureCollection"`), nil
}
// FeatureCollection is a GeoJSON FeatureCollection with extras such as links
type FeatureCollection struct {
Type featureCollectionType `json:"type"`
Timestamp string `json:"timeStamp,omitempty"`
Links []Link `json:"links,omitempty"`
Features []*Feature `json:"features"`
NumberReturned int `json:"numberReturned"`
}
// Feature is a GeoJSON Feature with extras such as links
type Feature struct {
geojson.Feature
Links []Link `json:"links,omitempty"`
// we overwrite ID since we want to make it a required attribute. We also expect feature ids to be
// auto-incrementing integers (which is the default in geopackages) since we use it for cursor-based pagination.
ID int64 `json:"id"`
}
// Link according to RFC 8288, https://datatracker.ietf.org/doc/html/rfc8288
type Link struct {
Rel string `json:"rel"`
Title string `json:"title,omitempty"`
Type string `json:"type,omitempty"`
Href string `json:"href"`
Hreflang string `json:"hreflang,omitempty"`
Length int64 `json:"length,omitempty"`
Templated bool `json:"templated,omitempty"`
}
package domain
import (
"github.com/go-spatial/geom"
)
const (
ConformanceJSONFGCore = "http://www.opengis.net/spec/json-fg-1/0.2/conf/core"
)
// featureType allows the type for Feature to be automatically set during json Marshalling
type featureType struct{}
func (ft *featureType) MarshalJSON() ([]byte, error) {
return []byte(`"Feature"`), nil
}
type JSONFGFeatureCollection struct {
Type featureCollectionType `json:"type"`
Timestamp string `json:"timeStamp,omitempty"`
CoordRefSys string `json:"coordRefSys"`
Links []Link `json:"links,omitempty"`
ConformsTo []string `json:"conformsTo"`
Features []*JSONFGFeature `json:"features"`
NumberReturned int `json:"numberReturned"`
}
type JSONFGFeature struct {
Type featureType `json:"type"`
Time any `json:"time"`
// we don't implement the JSON-FG "3D" conformance class. So Place only
// supports simple/2D geometries, no 3D geometries like Polyhedron, Prism, etc.
Place geom.Geometry `json:"place"` // may only contain non-WGS84 geometries
Geometry geom.Geometry `json:"geometry"` // may only contain WGS84 geometries
Properties map[string]any `json:"properties"`
CoordRefSys string `json:"coordRefSys,omitempty"`
Links []Link `json:"links,omitempty"`
ConformsTo []string `json:"conformsTo,omitempty"`
// We expect feature ids to be auto-incrementing integers (which is the default in geopackages)
// since we use it for cursor-based pagination.
ID int64 `json:"id"`
}
package domain
import (
"fmt"
"time"
"github.com/go-spatial/geom"
"github.com/go-spatial/geom/encoding/geojson"
"github.com/jmoiron/sqlx"
)
// MapRowsToFeatureIDs datasource agnostic mapper from SQL rows set feature IDs, including prev/next feature ID
func MapRowsToFeatureIDs(rows *sqlx.Rows) (featureIDs []int64, prevNextID *PrevNextFID, err error) {
firstRow := true
for rows.Next() {
var values []any
if values, err = rows.SliceScan(); err != nil {
return nil, nil, err
}
if len(values) != 3 {
return nil, nil, fmt.Errorf("expected 3 columns containing the feature id, "+
"the previous feature id and the next feature id. Got: %v", values)
}
featureID := values[0].(int64)
featureIDs = append(featureIDs, featureID)
if firstRow {
prev := int64(0)
if values[1] != nil {
prev = values[1].(int64)
}
next := int64(0)
if values[2] != nil {
next = values[2].(int64)
}
prevNextID = &PrevNextFID{Prev: prev, Next: next}
firstRow = false
}
}
return
}
// MapRowsToFeatures datasource agnostic mapper from SQL rows/result set to Features domain model
func MapRowsToFeatures(rows *sqlx.Rows, fidColumn string, geomColumn string,
geomMapper func([]byte) (geom.Geometry, error)) ([]*Feature, *PrevNextFID, error) {
result := make([]*Feature, 0)
columns, err := rows.Columns()
if err != nil {
return result, nil, err
}
firstRow := true
var prevNextID *PrevNextFID
for rows.Next() {
var values []any
if values, err = rows.SliceScan(); err != nil {
return result, nil, err
}
feature := &Feature{Feature: geojson.Feature{Properties: make(map[string]any)}}
np, err := mapColumnsToFeature(firstRow, feature, columns, values, fidColumn, geomColumn, geomMapper)
if err != nil {
return result, nil, err
} else if firstRow {
prevNextID = np
firstRow = false
}
result = append(result, feature)
}
return result, prevNextID, nil
}
//nolint:cyclop,funlen
func mapColumnsToFeature(firstRow bool, feature *Feature, columns []string, values []any,
fidColumn string, geomColumn string, geomMapper func([]byte) (geom.Geometry, error)) (*PrevNextFID, error) {
prevNextID := PrevNextFID{}
for i, columnName := range columns {
columnValue := values[i]
switch columnName {
case fidColumn:
feature.ID = columnValue.(int64)
case geomColumn:
if columnValue == nil {
feature.Properties[columnName] = nil
continue
}
rawGeom, ok := columnValue.([]byte)
if !ok {
return nil, fmt.Errorf("failed to read geometry from %s column in datasource", geomColumn)
}
mappedGeom, err := geomMapper(rawGeom)
if err != nil {
return nil, fmt.Errorf("failed to map/decode geometry from datasource, error: %w", err)
}
feature.Geometry = geojson.Geometry{Geometry: mappedGeom}
case "minx", "miny", "maxx", "maxy", "min_zoom", "max_zoom":
// Skip these columns used for bounding box and zoom filtering
continue
case "prevfid":
// Only the first row in the result set contains the previous feature id
if firstRow && columnValue != nil {
prevNextID.Prev = columnValue.(int64)
}
case "nextfid":
// Only the first row in the result set contains the next feature id
if firstRow && columnValue != nil {
prevNextID.Next = columnValue.(int64)
}
default:
if columnValue == nil {
feature.Properties[columnName] = nil
continue
}
// Grab any non-nil, non-id, non-bounding box, & non-geometry column as a tag
switch v := columnValue.(type) {
case []uint8:
asBytes := make([]byte, len(v))
copy(asBytes, v)
feature.Properties[columnName] = string(asBytes)
case int64:
feature.Properties[columnName] = v
case float64:
feature.Properties[columnName] = v
case time.Time:
feature.Properties[columnName] = v
case string:
feature.Properties[columnName] = v
case bool:
feature.Properties[columnName] = v
default:
return nil, fmt.Errorf("unexpected type for sqlite column data: %v: %T", columns[i], v)
}
}
}
return &prevNextID, nil
}
package features
import (
"net/http"
"strconv"
"time"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
)
const (
collectionsCrumb = "collections/"
)
var (
collectionsBreadcrumb = []engine.Breadcrumb{
{
Name: "Collections",
Path: "collections",
},
}
featuresKey = engine.NewTemplateKey(templatesDir + "features.go.html")
featureKey = engine.NewTemplateKey(templatesDir + "feature.go.html")
)
type htmlFeatures struct {
engine *engine.Engine
}
func newHTMLFeatures(e *engine.Engine) *htmlFeatures {
e.ParseTemplate(featuresKey)
e.ParseTemplate(featureKey)
return &htmlFeatures{
engine: e,
}
}
// featureCollectionPage enriched FeatureCollection for HTML representation.
type featureCollectionPage struct {
domain.FeatureCollection
CollectionID string
Metadata *config.GeoSpatialCollectionMetadata
Cursor domain.Cursors
PrevLink string
NextLink string
Limit int
ReferenceDate *time.Time
PropertyFilters map[string]string
}
// featurePage enriched Feature for HTML representation.
type featurePage struct {
domain.Feature
CollectionID string
FeatureID int64
Metadata *config.GeoSpatialCollectionMetadata
}
func (hf *htmlFeatures) features(w http.ResponseWriter, r *http.Request, collectionID string,
cursor domain.Cursors, featuresURL featureCollectionURL, limit int, referenceDate *time.Time,
propertyFilters map[string]string, fc *domain.FeatureCollection) {
collectionMetadata := collections[collectionID]
breadcrumbs := collectionsBreadcrumb
breadcrumbs = append(breadcrumbs, []engine.Breadcrumb{
{
Name: getCollectionTitle(collectionID, collectionMetadata),
Path: collectionsCrumb + collectionID,
},
{
Name: "Items",
Path: collectionsCrumb + collectionID + "/items",
},
}...)
if referenceDate.IsZero() {
referenceDate = nil
}
pageContent := &featureCollectionPage{
*fc,
collectionID,
collectionMetadata,
cursor,
featuresURL.toPrevNextURL(collectionID, cursor.Prev, engine.FormatHTML),
featuresURL.toPrevNextURL(collectionID, cursor.Next, engine.FormatHTML),
limit,
referenceDate,
propertyFilters,
}
lang := hf.engine.CN.NegotiateLanguage(w, r)
hf.engine.RenderAndServePage(w, r, engine.ExpandTemplateKey(featuresKey, lang), pageContent, breadcrumbs)
}
func (hf *htmlFeatures) feature(w http.ResponseWriter, r *http.Request, collectionID string, feat *domain.Feature) {
collectionMetadata := collections[collectionID]
breadcrumbs := collectionsBreadcrumb
breadcrumbs = append(breadcrumbs, []engine.Breadcrumb{
{
Name: getCollectionTitle(collectionID, collectionMetadata),
Path: collectionsCrumb + collectionID,
},
{
Name: "Items",
Path: collectionsCrumb + collectionID + "/items",
},
{
Name: strconv.FormatInt(feat.ID, 10),
Path: collectionsCrumb + collectionID + "/items/" + strconv.FormatInt(feat.ID, 10),
},
}...)
pageContent := &featurePage{
*feat,
collectionID,
feat.ID,
collectionMetadata,
}
lang := hf.engine.CN.NegotiateLanguage(w, r)
hf.engine.RenderAndServePage(w, r, engine.ExpandTemplateKey(featureKey, lang), pageContent, breadcrumbs)
}
func getCollectionTitle(collectionID string, metadata *config.GeoSpatialCollectionMetadata) string {
title := collectionID
if metadata != nil && metadata.Title != nil {
title = *metadata.Title
}
return title
}
package features
import (
"bytes"
stdjson "encoding/json"
"io"
"log"
"net/http"
"os"
"strconv"
"time"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
perfjson "github.com/goccy/go-json"
)
var (
now = time.Now // allow mocking
disableJSONPerfOptimization, _ = strconv.ParseBool(os.Getenv("DISABLE_JSON_PERF_OPTIMIZATION"))
)
type jsonFeatures struct {
engine *engine.Engine
validateResponse bool
}
func newJSONFeatures(e *engine.Engine) *jsonFeatures {
if *e.Config.OgcAPI.Features.ValidateResponses {
log.Println("JSON response validation is enabled (by default). When serving large feature collections " +
"set 'validateResponses' to 'false' to improve performance")
}
return &jsonFeatures{
engine: e,
validateResponse: *e.Config.OgcAPI.Features.ValidateResponses,
}
}
func (jf *jsonFeatures) featuresAsGeoJSON(w http.ResponseWriter, r *http.Request, collectionID string,
cursor domain.Cursors, featuresURL featureCollectionURL, fc *domain.FeatureCollection) {
fc.Timestamp = now().Format(time.RFC3339)
fc.Links = jf.createFeatureCollectionLinks(engine.FormatGeoJSON, collectionID, cursor, featuresURL)
if jf.validateResponse {
jf.serveAndValidateJSON(&fc, engine.MediaTypeGeoJSON, r, w)
} else {
serveJSON(&fc, engine.MediaTypeGeoJSON, w)
}
}
func (jf *jsonFeatures) featureAsGeoJSON(w http.ResponseWriter, r *http.Request, collectionID string,
feat *domain.Feature, url featureURL) {
feat.Links = jf.createFeatureLinks(engine.FormatGeoJSON, url, collectionID, feat.ID)
if jf.validateResponse {
jf.serveAndValidateJSON(&feat, engine.MediaTypeGeoJSON, r, w)
} else {
serveJSON(&feat, engine.MediaTypeGeoJSON, w)
}
}
func (jf *jsonFeatures) featuresAsJSONFG(w http.ResponseWriter, r *http.Request, collectionID string,
cursor domain.Cursors, featuresURL featureCollectionURL, fc *domain.FeatureCollection, crs ContentCrs) {
fgFC := domain.JSONFGFeatureCollection{}
fgFC.ConformsTo = []string{domain.ConformanceJSONFGCore}
fgFC.CoordRefSys = string(crs)
if len(fc.Features) == 0 {
fgFC.Features = make([]*domain.JSONFGFeature, 0)
} else {
for _, f := range fc.Features {
fgF := domain.JSONFGFeature{
ID: f.ID,
Links: f.Links,
Properties: f.Properties,
}
setGeom(crs, &fgF, f)
fgFC.Features = append(fgFC.Features, &fgF)
}
}
fgFC.NumberReturned = fc.NumberReturned
fgFC.Timestamp = now().Format(time.RFC3339)
fgFC.Links = jf.createFeatureCollectionLinks(engine.FormatJSONFG, collectionID, cursor, featuresURL)
if jf.validateResponse {
jf.serveAndValidateJSON(&fgFC, engine.MediaTypeJSONFG, r, w)
} else {
serveJSON(&fgFC, engine.MediaTypeJSONFG, w)
}
}
func (jf *jsonFeatures) featureAsJSONFG(w http.ResponseWriter, r *http.Request, collectionID string,
f *domain.Feature, url featureURL, crs ContentCrs) {
fgF := domain.JSONFGFeature{
ID: f.ID,
Links: f.Links,
ConformsTo: []string{domain.ConformanceJSONFGCore},
CoordRefSys: string(crs),
Properties: f.Properties,
}
setGeom(crs, &fgF, f)
fgF.Links = jf.createFeatureLinks(engine.FormatJSONFG, url, collectionID, fgF.ID)
if jf.validateResponse {
jf.serveAndValidateJSON(&fgF, engine.MediaTypeJSONFG, r, w)
} else {
serveJSON(&fgF, engine.MediaTypeJSONFG, w)
}
}
func (jf *jsonFeatures) createFeatureCollectionLinks(currentFormat string, collectionID string,
cursor domain.Cursors, featuresURL featureCollectionURL) []domain.Link {
links := make([]domain.Link, 0)
switch currentFormat {
case engine.FormatGeoJSON:
links = append(links, domain.Link{
Rel: "self",
Title: "This document as GeoJSON",
Type: engine.MediaTypeGeoJSON,
Href: featuresURL.toSelfURL(collectionID, engine.FormatJSON),
})
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as JSON-FG",
Type: engine.MediaTypeJSONFG,
Href: featuresURL.toSelfURL(collectionID, engine.FormatJSONFG),
})
case engine.FormatJSONFG:
links = append(links, domain.Link{
Rel: "self",
Title: "This document as JSON-FG",
Type: engine.MediaTypeJSONFG,
Href: featuresURL.toSelfURL(collectionID, engine.FormatJSONFG),
})
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as GeoJSON",
Type: engine.MediaTypeGeoJSON,
Href: featuresURL.toSelfURL(collectionID, engine.FormatJSON),
})
}
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as HTML",
Type: engine.MediaTypeHTML,
Href: featuresURL.toSelfURL(collectionID, engine.FormatHTML),
})
if cursor.HasNext {
switch currentFormat {
case engine.FormatGeoJSON:
links = append(links, domain.Link{
Rel: "next",
Title: "Next page",
Type: engine.MediaTypeGeoJSON,
Href: featuresURL.toPrevNextURL(collectionID, cursor.Next, engine.FormatJSON),
})
case engine.FormatJSONFG:
links = append(links, domain.Link{
Rel: "next",
Title: "Next page",
Type: engine.MediaTypeJSONFG,
Href: featuresURL.toPrevNextURL(collectionID, cursor.Next, engine.FormatJSONFG),
})
}
}
if cursor.HasPrev {
switch currentFormat {
case engine.FormatGeoJSON:
links = append(links, domain.Link{
Rel: "prev",
Title: "Previous page",
Type: engine.MediaTypeGeoJSON,
Href: featuresURL.toPrevNextURL(collectionID, cursor.Prev, engine.FormatJSON),
})
case engine.FormatJSONFG:
links = append(links, domain.Link{
Rel: "prev",
Title: "Previous page",
Type: engine.MediaTypeJSONFG,
Href: featuresURL.toPrevNextURL(collectionID, cursor.Prev, engine.FormatJSONFG),
})
}
}
return links
}
func (jf *jsonFeatures) createFeatureLinks(currentFormat string, url featureURL,
collectionID string, featureID int64) []domain.Link {
links := make([]domain.Link, 0)
switch currentFormat {
case engine.FormatGeoJSON:
links = append(links, domain.Link{
Rel: "self",
Title: "This document as GeoJSON",
Type: engine.MediaTypeGeoJSON,
Href: url.toSelfURL(collectionID, featureID, engine.FormatJSON),
})
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as JSON-FG",
Type: engine.MediaTypeJSONFG,
Href: url.toSelfURL(collectionID, featureID, engine.FormatJSONFG),
})
case engine.FormatJSONFG:
links = append(links, domain.Link{
Rel: "self",
Title: "This document as JSON-FG",
Type: engine.MediaTypeJSONFG,
Href: url.toSelfURL(collectionID, featureID, engine.FormatJSONFG),
})
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as GeoJSON",
Type: engine.MediaTypeGeoJSON,
Href: url.toSelfURL(collectionID, featureID, engine.FormatJSON),
})
}
links = append(links, domain.Link{
Rel: "alternate",
Title: "This document as HTML",
Type: engine.MediaTypeHTML,
Href: url.toSelfURL(collectionID, featureID, engine.FormatHTML),
})
links = append(links, domain.Link{
Rel: "collection",
Title: "The collection to which this feature belongs",
Type: engine.MediaTypeJSON,
Href: url.toCollectionURL(collectionID, engine.FormatJSON),
})
return links
}
// serveAndValidateJSON serves JSON after performing OpenAPI response validation.
// Note: this requires reading first marshalling to the result to JSON in-memory.
func (jf *jsonFeatures) serveAndValidateJSON(input any, contentType string, r *http.Request, w http.ResponseWriter) {
json := &bytes.Buffer{}
if err := getEncoder(json).Encode(input); err != nil {
handleJSONEncodingFailure(err, w)
return
}
jf.engine.ServeResponse(w, r, false /* performed earlier */, jf.validateResponse, contentType, json.Bytes())
}
// serveJSON serves JSON *WITHOUT* OpenAPI validation by writing directly to the response output stream
func serveJSON(input any, contentType string, w http.ResponseWriter) {
w.Header().Set(engine.HeaderContentType, contentType)
if err := getEncoder(w).Encode(input); err != nil {
handleJSONEncodingFailure(err, w)
return
}
}
type jsonEncoder interface {
Encode(input any) error
}
// Create JSONEncoder. Note escaping of '<', '>' and '&' is disabled (HTMLEscape is false).
// Especially the '&' is important since we use this character in the next/prev links.
func getEncoder(w io.Writer) jsonEncoder {
if disableJSONPerfOptimization {
// use Go stdlib JSON encoder
encoder := stdjson.NewEncoder(w)
encoder.SetEscapeHTML(false)
return encoder
}
// use ~7% overall faster 3rd party JSON encoder (in case of issues switch back to stdlib using env variable)
encoder := perfjson.NewEncoder(w)
encoder.SetEscapeHTML(false)
return encoder
}
func handleJSONEncodingFailure(err error, w http.ResponseWriter) {
log.Printf("JSON encoding failed: %v", err)
engine.RenderProblem(engine.ProblemServerError, w, "Failed to write JSON response")
}
func setGeom(crs ContentCrs, jsonfgFeature *domain.JSONFGFeature, feature *domain.Feature) {
if crs.IsWGS84() {
jsonfgFeature.Geometry = feature.Geometry
} else {
jsonfgFeature.Place = feature.Geometry
}
}
package features
import (
"fmt"
"log"
"net/http"
"strconv"
"strings"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
ds "github.com/PDOK/gokoala/internal/ogc/features/datasources"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/geopackage"
"github.com/PDOK/gokoala/internal/ogc/features/datasources/postgis"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/go-chi/chi/v5"
"github.com/go-spatial/geom"
)
const (
templatesDir = "internal/ogc/features/templates/"
crsURIPrefix = "http://www.opengis.net/def/crs/"
undefinedSRID = 0
wgs84SRID = 100000 // We use the SRID for CRS84 (WGS84) as defined in the GeoPackage, instead of EPSG:4326 (due to axis order). In time, we may need to read this value dynamically from the GeoPackage.
wgs84CodeOGC = "CRS84"
wgs84CrsURI = crsURIPrefix + "OGC/1.3/" + wgs84CodeOGC
)
var (
collections map[string]*config.GeoSpatialCollectionMetadata
emptyFeatureCollection = &domain.FeatureCollection{Features: make([]*domain.Feature, 0)}
)
type DatasourceKey struct {
srid int
collectionID string
}
type DatasourceConfig struct {
collections config.GeoSpatialCollections
ds config.Datasource
}
type Features struct {
engine *engine.Engine
datasources map[DatasourceKey]ds.Datasource
html *htmlFeatures
json *jsonFeatures
}
func NewFeatures(e *engine.Engine) *Features {
collections = cacheCollectionsMetadata(e)
datasources := createDatasources(e)
rebuildOpenAPIForFeatures(e, datasources)
f := &Features{
engine: e,
datasources: datasources,
html: newHTMLFeatures(e),
json: newJSONFeatures(e),
}
e.Router.Get(geospatial.CollectionsPath+"/{collectionId}/items", f.Features())
e.Router.Get(geospatial.CollectionsPath+"/{collectionId}/items/{featureId}", f.Feature())
return f
}
// Features serve a FeatureCollection with the given collectionId
//
//nolint:cyclop
func (f *Features) Features() http.HandlerFunc {
cfg := f.engine.Config
return func(w http.ResponseWriter, r *http.Request) {
if err := f.engine.OpenAPI.ValidateRequest(r); err != nil {
engine.RenderProblem(engine.ProblemBadRequest, w, err.Error())
return
}
collectionID := chi.URLParam(r, "collectionId")
if _, ok := collections[collectionID]; !ok {
handleCollectionNotFound(w, collectionID)
return
}
url := featureCollectionURL{*cfg.BaseURL.URL, r.URL.Query(), cfg.OgcAPI.Features.Limit,
cfg.OgcAPI.Features.PropertyFiltersForCollection(collectionID), false}
if collection := collections[collectionID]; collection != nil && collection.TemporalProperties != nil {
url.supportsDatetime = true
}
encodedCursor, limit, inputSRID, outputSRID, contentCrs, bbox, referenceDate, propertyFilters, err := url.parse()
var temporalCriteria ds.TemporalCriteria
if collection := collections[collectionID]; collection != nil && collection.TemporalProperties != nil {
temporalCriteria = ds.TemporalCriteria{
ReferenceDate: referenceDate,
StartDateProperty: collection.TemporalProperties.StartDate,
EndDateProperty: collection.TemporalProperties.EndDate}
}
if err != nil {
engine.RenderProblem(engine.ProblemBadRequest, w, err.Error())
return
}
w.Header().Add(engine.HeaderContentCrs, contentCrs.ToLink())
var newCursor domain.Cursors
var fc *domain.FeatureCollection
if querySingleDatasource(inputSRID, outputSRID, bbox) {
// fast path
datasource := f.datasources[DatasourceKey{srid: outputSRID.GetOrDefault(), collectionID: collectionID}]
fc, newCursor, err = datasource.GetFeatures(r.Context(), collectionID, ds.FeaturesCriteria{
Cursor: encodedCursor.Decode(url.checksum()),
Limit: limit,
InputSRID: inputSRID.GetOrDefault(),
OutputSRID: outputSRID.GetOrDefault(),
Bbox: bbox,
TemporalCriteria: temporalCriteria,
PropertyFilters: propertyFilters,
// Add filter, filter-lang
})
if err != nil {
handleFeatureCollectionError(w, collectionID, err)
return
}
} else {
// slower path: get feature ids by input CRS (step 1), then the actual features in output CRS (step 2)
var fids []int64
datasource := f.datasources[DatasourceKey{srid: inputSRID.GetOrDefault(), collectionID: collectionID}]
fids, newCursor, err = datasource.GetFeatureIDs(r.Context(), collectionID, ds.FeaturesCriteria{
Cursor: encodedCursor.Decode(url.checksum()),
Limit: limit,
InputSRID: inputSRID.GetOrDefault(),
OutputSRID: outputSRID.GetOrDefault(),
Bbox: bbox,
TemporalCriteria: temporalCriteria,
PropertyFilters: propertyFilters,
// Add filter, filter-lang
})
if err == nil && fids != nil {
datasource = f.datasources[DatasourceKey{srid: outputSRID.GetOrDefault(), collectionID: collectionID}]
fc, err = datasource.GetFeaturesByID(r.Context(), collectionID, fids)
}
if err != nil {
handleFeatureCollectionError(w, collectionID, err)
return
}
}
if fc == nil {
fc = emptyFeatureCollection
}
format := f.engine.CN.NegotiateFormat(r)
switch format {
case engine.FormatHTML:
f.html.features(w, r, collectionID, newCursor, url, limit, &referenceDate, propertyFilters, fc)
case engine.FormatGeoJSON, engine.FormatJSON:
f.json.featuresAsGeoJSON(w, r, collectionID, newCursor, url, fc)
case engine.FormatJSONFG:
f.json.featuresAsJSONFG(w, r, collectionID, newCursor, url, fc, contentCrs)
default:
engine.RenderProblem(engine.ProblemNotAcceptable, w, fmt.Sprintf("format '%s' is not supported", format))
return
}
}
}
// Feature serves a single Feature
func (f *Features) Feature() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
if err := f.engine.OpenAPI.ValidateRequest(r); err != nil {
engine.RenderProblem(engine.ProblemBadRequest, w, err.Error())
return
}
collectionID := chi.URLParam(r, "collectionId")
if _, ok := collections[collectionID]; !ok {
handleCollectionNotFound(w, collectionID)
return
}
featureID, err := strconv.Atoi(chi.URLParam(r, "featureId"))
if err != nil {
engine.RenderProblem(engine.ProblemBadRequest, w, "feature ID must be a number")
return
}
url := featureURL{*f.engine.Config.BaseURL.URL, r.URL.Query()}
outputSRID, contentCrs, err := url.parse()
if err != nil {
engine.RenderProblem(engine.ProblemBadRequest, w, err.Error())
return
}
w.Header().Add(engine.HeaderContentCrs, contentCrs.ToLink())
datasource := f.datasources[DatasourceKey{srid: outputSRID.GetOrDefault(), collectionID: collectionID}]
feat, err := datasource.GetFeature(r.Context(), collectionID, int64(featureID))
if err != nil {
// log error, but sent generic message to client to prevent possible information leakage from datasource
msg := fmt.Sprintf("failed to retrieve feature %d in collection %s", featureID, collectionID)
log.Printf("%s, error: %v\n", msg, err)
engine.RenderProblem(engine.ProblemServerError, w, msg)
return
}
if feat == nil {
msg := fmt.Sprintf("the requested feature with id: %d does not exist in collection '%s'", featureID, collectionID)
log.Println(msg)
engine.RenderProblem(engine.ProblemNotFound, w, msg)
return
}
format := f.engine.CN.NegotiateFormat(r)
switch format {
case engine.FormatHTML:
f.html.feature(w, r, collectionID, feat)
case engine.FormatGeoJSON, engine.FormatJSON:
f.json.featureAsGeoJSON(w, r, collectionID, feat, url)
case engine.FormatJSONFG:
f.json.featureAsJSONFG(w, r, collectionID, feat, url, contentCrs)
default:
engine.RenderProblem(engine.ProblemNotAcceptable, w, fmt.Sprintf("format '%s' is not supported", format))
return
}
}
}
func cacheCollectionsMetadata(e *engine.Engine) map[string]*config.GeoSpatialCollectionMetadata {
result := make(map[string]*config.GeoSpatialCollectionMetadata)
for _, collection := range e.Config.OgcAPI.Features.Collections {
result[collection.ID] = collection.Metadata
}
return result
}
func createDatasources(e *engine.Engine) map[DatasourceKey]ds.Datasource {
configured := make(map[DatasourceKey]*DatasourceConfig, len(e.Config.OgcAPI.Features.Collections))
// configure collection specific datasources first
configureCollectionDatasources(e, configured)
// now configure top-level datasources, for the whole dataset. But only when
// there's no collection specific datasource already configured
configureTopLevelDatasources(e, configured)
if len(configured) == 0 {
log.Fatal("no datasource(s) configured for OGC API Features, check config")
}
created := make(map[DatasourceKey]ds.Datasource, len(configured))
for k, cfg := range configured {
if cfg == nil {
continue
}
created[k] = newDatasource(e, cfg.collections, cfg.ds)
}
return created
}
func configureTopLevelDatasources(e *engine.Engine, result map[DatasourceKey]*DatasourceConfig) {
cfg := e.Config.OgcAPI.Features
if cfg.Datasources == nil {
return
}
var defaultDS *DatasourceConfig
for _, coll := range cfg.Collections {
key := DatasourceKey{srid: wgs84SRID, collectionID: coll.ID}
if result[key] == nil {
if defaultDS == nil {
defaultDS = &DatasourceConfig{cfg.Collections, cfg.Datasources.DefaultWGS84}
}
result[key] = defaultDS
}
}
for _, additional := range cfg.Datasources.Additional {
for _, coll := range cfg.Collections {
srid, err := epsgToSrid(additional.Srs)
if err != nil {
log.Fatal(err)
}
key := DatasourceKey{srid: srid, collectionID: coll.ID}
if result[key] == nil {
result[key] = &DatasourceConfig{cfg.Collections, additional.Datasource}
}
}
}
}
func configureCollectionDatasources(e *engine.Engine, result map[DatasourceKey]*DatasourceConfig) {
cfg := e.Config.OgcAPI.Features
for _, coll := range cfg.Collections {
if coll.Features == nil || coll.Features.Datasources == nil {
continue
}
defaultDS := &DatasourceConfig{cfg.Collections, coll.Features.Datasources.DefaultWGS84}
result[DatasourceKey{srid: wgs84SRID, collectionID: coll.ID}] = defaultDS
for _, additional := range coll.Features.Datasources.Additional {
srid, err := epsgToSrid(additional.Srs)
if err != nil {
log.Fatal(err)
}
additionalDS := &DatasourceConfig{cfg.Collections, additional.Datasource}
result[DatasourceKey{srid: srid, collectionID: coll.ID}] = additionalDS
}
}
}
func newDatasource(e *engine.Engine, coll config.GeoSpatialCollections, dsConfig config.Datasource) ds.Datasource {
var datasource ds.Datasource
if dsConfig.GeoPackage != nil {
datasource = geopackage.NewGeoPackage(coll, *dsConfig.GeoPackage)
} else if dsConfig.PostGIS != nil {
datasource = postgis.NewPostGIS()
}
e.RegisterShutdownHook(datasource.Close)
return datasource
}
func epsgToSrid(srs string) (int, error) {
prefix := "EPSG:"
srsCode, found := strings.CutPrefix(srs, prefix)
if !found {
return -1, fmt.Errorf("expected configured SRS to start with '%s', got %s", prefix, srs)
}
srid, err := strconv.Atoi(srsCode)
if err != nil {
return -1, fmt.Errorf("expected EPSG code to have numeric value, got %s", srsCode)
}
return srid, nil
}
func handleCollectionNotFound(w http.ResponseWriter, collectionID string) {
msg := fmt.Sprintf("collection %s doesn't exist in this features service", collectionID)
log.Println(msg)
engine.RenderProblem(engine.ProblemNotFound, w, msg)
}
// log error, but send generic message to client to prevent possible information leakage from datasource
func handleFeatureCollectionError(w http.ResponseWriter, collectionID string, err error) {
msg := "failed to retrieve feature collection " + collectionID
log.Printf("%s, error: %v\n", msg, err)
engine.RenderProblem(engine.ProblemServerError, w, msg)
}
func querySingleDatasource(input SRID, output SRID, bbox *geom.Extent) bool {
return bbox == nil ||
int(input) == int(output) ||
(int(input) == undefinedSRID && int(output) == wgs84SRID) ||
(int(input) == wgs84SRID && int(output) == undefinedSRID)
}
package features
import (
"fmt"
"log"
"strings"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
ds "github.com/PDOK/gokoala/internal/ogc/features/datasources"
)
type OpenAPIPropertyFilter struct {
Name string
Description string
DataType string
}
// rebuildOpenAPIForFeatures Rebuild OpenAPI spec with additional info from given datasources
func rebuildOpenAPIForFeatures(e *engine.Engine, datasources map[DatasourceKey]ds.Datasource) {
propertyFiltersByCollection, err := createPropertyFiltersByCollection(e.Config.OgcAPI.Features, datasources)
if err != nil {
log.Fatal(err)
}
e.RebuildOpenAPI(struct {
PropertyFiltersByCollection map[string][]OpenAPIPropertyFilter
}{
PropertyFiltersByCollection: propertyFiltersByCollection,
})
}
func createPropertyFiltersByCollection(config *config.OgcAPIFeatures,
datasources map[DatasourceKey]ds.Datasource) (map[string][]OpenAPIPropertyFilter, error) {
result := make(map[string][]OpenAPIPropertyFilter)
for k, datasource := range datasources {
filtersConfig := config.PropertyFiltersForCollection(k.collectionID)
if len(filtersConfig) == 0 {
continue
}
featTable, err := datasource.GetFeatureTableMetadata(k.collectionID)
if err != nil {
continue
}
featTableColumns := featTable.ColumnsWithDataType()
propertyFilters := make([]OpenAPIPropertyFilter, 0, len(featTableColumns))
for _, fc := range filtersConfig {
match := false
for name, dataType := range featTableColumns {
if fc.Name == name {
// match found between property filter in config file and database column name
dataType = datasourceToOpenAPI(dataType)
propertyFilters = append(propertyFilters, OpenAPIPropertyFilter{
Name: name,
Description: fc.Description,
DataType: dataType,
})
match = true
break
}
}
if !match {
return nil, fmt.Errorf("invalid property filter specified, "+
"column '%s' doesn't exist in datasource attached to collection '%s'", fc.Name, k.collectionID)
}
}
result[k.collectionID] = propertyFilters
}
return result, nil
}
// translate database data types to OpenAPI data types
func datasourceToOpenAPI(dataType string) string {
switch strings.ToUpper(dataType) {
case "INTEGER":
dataType = "integer"
case "REAL", "NUMERIC":
dataType = "number"
case "TEXT", "VARCHAR":
dataType = "string"
default:
dataType = "string"
}
return dataType
}
package features
import (
"bytes"
"errors"
"fmt"
"hash/fnv"
"net/url"
"slices"
"sort"
"strconv"
"strings"
"time"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/features/domain"
"github.com/go-spatial/geom"
)
const (
cursorParam = "cursor"
limitParam = "limit"
crsParam = "crs"
dateTimeParam = "datetime"
bboxParam = "bbox"
bboxCrsParam = "bbox-crs"
filterParam = "filter"
filterCrsParam = "filter-crs"
propertyFilterMaxLength = 512
propertyFilterWildcard = "*"
)
var (
checksumExcludedParams = []string{engine.FormatParam, cursorParam} // don't include these in checksum
)
// SRID Spatial Reference System Identifier: a unique value to unambiguously identify a spatial coordinate system.
// For example '28992' in https://www.opengis.net/def/crs/EPSG/0/28992
type SRID int
func (s SRID) GetOrDefault() int {
val := int(s)
if val <= 0 {
return wgs84SRID
}
return val
}
// ContentCrs the coordinate reference system (represented as a URI) of the content/output to return.
type ContentCrs string
// ToLink returns link target conforming to RFC 8288
func (c ContentCrs) ToLink() string {
return fmt.Sprintf("<%s>", c)
}
func (c ContentCrs) IsWGS84() bool {
return string(c) == wgs84CrsURI
}
// URL to a page in a collection of features
type featureCollectionURL struct {
baseURL url.URL
params url.Values
limit config.Limit
configuredPropertyFilters []config.PropertyFilter
supportsDatetime bool
}
// parse the given URL to values required to delivery a set of Features
func (fc featureCollectionURL) parse() (encodedCursor domain.EncodedCursor, limit int, inputSRID SRID, outputSRID SRID,
contentCrs ContentCrs, bbox *geom.Extent, referenceDate time.Time, propertyFilters map[string]string, err error) {
err = fc.validateNoUnknownParams()
if err != nil {
return
}
encodedCursor = domain.EncodedCursor(fc.params.Get(cursorParam))
limit, limitErr := parseLimit(fc.params, fc.limit)
outputSRID, outputSRIDErr := parseCrsToSRID(fc.params, crsParam)
contentCrs = parseCrsToContentCrs(fc.params)
propertyFilters, pfErr := parsePropertyFilters(fc.configuredPropertyFilters, fc.params)
bbox, bboxSRID, bboxErr := parseBbox(fc.params)
referenceDate, dateTimeErr := parseDateTime(fc.params, fc.supportsDatetime)
_, filterSRID, filterErr := parseFilter(fc.params)
inputSRID, inputSRIDErr := consolidateSRIDs(bboxSRID, filterSRID)
err = errors.Join(limitErr, outputSRIDErr, bboxErr, pfErr, dateTimeErr, filterErr, inputSRIDErr)
return
}
// Calculate checksum over the query parameters that have a "filtering effect" on
// the result set such as limit, bbox, property filters, CQL filters, etc. These query params
// aren't allowed to be changed during pagination. The checksum allows for the latter
// to be verified
func (fc featureCollectionURL) checksum() []byte {
var valuesToHash bytes.Buffer
sortedQueryParams := make([]string, 0, len(fc.params))
for k := range fc.params {
sortedQueryParams = append(sortedQueryParams, k)
}
sort.Strings(sortedQueryParams) // sort keys
OUTER:
for _, k := range sortedQueryParams {
for _, skip := range checksumExcludedParams {
if k == skip {
continue OUTER
}
}
paramValues := fc.params[k]
if paramValues != nil {
slices.Sort(paramValues) // sort values belonging to key
}
for _, s := range paramValues {
valuesToHash.WriteString(s)
}
}
bytesToHash := valuesToHash.Bytes()
if len(bytesToHash) > 0 {
hasher := fnv.New32a() // fast non-cryptographic hash
hasher.Write(bytesToHash)
return hasher.Sum(nil)
}
return []byte{}
}
func (fc featureCollectionURL) toSelfURL(collectionID string, format string) string {
copyParams := clone(fc.params)
copyParams.Set(engine.FormatParam, format)
result := fc.baseURL.JoinPath("collections", collectionID, "items")
result.RawQuery = copyParams.Encode()
return result.String()
}
func (fc featureCollectionURL) toPrevNextURL(collectionID string, cursor domain.EncodedCursor, format string) string {
copyParams := clone(fc.params)
copyParams.Set(engine.FormatParam, format)
copyParams.Set(cursorParam, cursor.String())
result := fc.baseURL.JoinPath("collections", collectionID, "items")
result.RawQuery = copyParams.Encode()
return result.String()
}
// implements req 7.6 (https://docs.ogc.org/is/17-069r4/17-069r4.html#query_parameters)
func (fc featureCollectionURL) validateNoUnknownParams() error {
copyParams := clone(fc.params)
copyParams.Del(engine.FormatParam)
copyParams.Del(limitParam)
copyParams.Del(cursorParam)
copyParams.Del(crsParam)
copyParams.Del(dateTimeParam)
copyParams.Del(bboxParam)
copyParams.Del(bboxCrsParam)
copyParams.Del(filterParam)
copyParams.Del(filterCrsParam)
for _, pf := range fc.configuredPropertyFilters {
copyParams.Del(pf.Name)
}
if len(copyParams) > 0 {
return fmt.Errorf("unknown query parameter(s) found: %v", copyParams.Encode())
}
return nil
}
// URL to a specific Feature
type featureURL struct {
baseURL url.URL
params url.Values
}
// parse the given URL to values required to delivery a specific Feature
func (f featureURL) parse() (srid SRID, contentCrs ContentCrs, err error) {
err = f.validateNoUnknownParams()
if err != nil {
return
}
srid, err = parseCrsToSRID(f.params, crsParam)
contentCrs = parseCrsToContentCrs(f.params)
return
}
func (f featureURL) toSelfURL(collectionID string, featureID int64, format string) string {
newParams := url.Values{}
newParams.Set(engine.FormatParam, format)
result := f.baseURL.JoinPath("collections", collectionID, "items", strconv.FormatInt(featureID, 10))
result.RawQuery = newParams.Encode()
return result.String()
}
func (f featureURL) toCollectionURL(collectionID string, format string) string {
newParams := url.Values{}
newParams.Set(engine.FormatParam, format)
result := f.baseURL.JoinPath("collections", collectionID)
result.RawQuery = newParams.Encode()
return result.String()
}
// implements req 7.6 (https://docs.ogc.org/is/17-069r4/17-069r4.html#query_parameters)
func (f featureURL) validateNoUnknownParams() error {
copyParams := clone(f.params)
copyParams.Del(engine.FormatParam)
copyParams.Del(crsParam)
if len(copyParams) > 0 {
return fmt.Errorf("unknown query parameter(s) found: %v", copyParams.Encode())
}
return nil
}
func clone(params url.Values) url.Values {
copyParams := url.Values{}
for k, v := range params {
copyParams[k] = v
}
return copyParams
}
func consolidateSRIDs(bboxSRID SRID, filterSRID SRID) (inputSRID SRID, err error) {
if bboxSRID != undefinedSRID && filterSRID != undefinedSRID && bboxSRID != filterSRID {
return 0, errors.New("bbox-crs and filter-crs need to be equal. " +
"Can't use more than one CRS as input, but input and output CRS may differ")
}
if bboxSRID != undefinedSRID || filterSRID != undefinedSRID {
inputSRID = bboxSRID // or filterCrs, both the same
}
return inputSRID, err
}
func parseLimit(params url.Values, limitCfg config.Limit) (int, error) {
limit := limitCfg.Default
var err error
if params.Get(limitParam) != "" {
limit, err = strconv.Atoi(params.Get(limitParam))
if err != nil {
err = errors.New("limit must be numeric")
}
// "If the value of the limit parameter is larger than the maximum value, this SHALL NOT result
// in an error (instead use the maximum as the parameter value)."
if limit > limitCfg.Max {
limit = limitCfg.Max
}
}
if limit < 0 {
err = errors.New("limit can't be negative")
}
return limit, err
}
func parseBbox(params url.Values) (*geom.Extent, SRID, error) {
bboxSRID, err := parseCrsToSRID(params, bboxCrsParam)
if err != nil {
return nil, undefinedSRID, err
}
if params.Get(bboxParam) == "" {
return nil, undefinedSRID, nil
}
bboxValues := strings.Split(params.Get(bboxParam), ",")
if len(bboxValues) != 4 {
return nil, bboxSRID, errors.New("bbox should contain exactly 4 values " +
"separated by commas: minx,miny,maxx,maxy")
}
var extent geom.Extent
for i, v := range bboxValues {
extent[i], err = strconv.ParseFloat(v, 64)
if err != nil {
return nil, bboxSRID, fmt.Errorf("failed to parse value %s in bbox, error: %w", v, err)
}
}
return &extent, bboxSRID, nil
}
func parseCrsToContentCrs(params url.Values) ContentCrs {
param := params.Get(crsParam)
if param == "" {
return wgs84CrsURI
}
return ContentCrs(param)
}
func parseCrsToSRID(params url.Values, paramName string) (SRID, error) {
param := params.Get(paramName)
if param == "" {
return undefinedSRID, nil
}
param = strings.TrimSpace(param)
if !strings.HasPrefix(param, crsURIPrefix) {
return undefinedSRID, fmt.Errorf("%s param should start with %s, got: %s", paramName, crsURIPrefix, param)
}
var srid SRID
lastIndex := strings.LastIndex(param, "/")
if lastIndex != -1 {
crsCode := param[lastIndex+1:]
if crsCode == wgs84CodeOGC {
return wgs84SRID, nil // CRS84 is WGS84, just like EPSG:4326 (only axis order differs but SRID is the same)
}
val, err := strconv.Atoi(crsCode)
if err != nil {
return 0, fmt.Errorf("expected numerical CRS code, received: %s", crsCode)
}
srid = SRID(val)
}
return srid, nil
}
// Support simple filtering on properties: https://docs.ogc.org/is/17-069r4/17-069r4.html#_parameters_for_filtering_on_feature_properties
func parsePropertyFilters(configuredPropertyFilters []config.PropertyFilter, params url.Values) (map[string]string, error) {
propertyFilters := make(map[string]string)
for _, cpf := range configuredPropertyFilters {
pf := params.Get(cpf.Name)
if pf != "" {
if len(pf) > propertyFilterMaxLength {
return nil, fmt.Errorf("property filter %s is too large, "+
"value is limited to %d characters", cpf.Name, propertyFilterMaxLength)
}
if strings.Contains(pf, propertyFilterWildcard) {
// if/when we choose to support wildcards in the future, make sure wildcards are
// only allowed at the END (suffix) of the filter
return nil, fmt.Errorf("property filter %s contains a wildcard (%s), "+
"wildcard filtering is not allowed", cpf.Name, propertyFilterWildcard)
}
propertyFilters[cpf.Name] = pf
}
}
return propertyFilters, nil
}
// Support filtering on datetime: https://docs.ogc.org/is/17-069r4/17-069r4.html#_parameter_datetime
func parseDateTime(params url.Values, datetimeSupported bool) (time.Time, error) {
datetime := params.Get(dateTimeParam)
if datetime != "" {
if !datetimeSupported {
return time.Time{}, errors.New("datetime param is currently not supported for this collection")
}
if strings.Contains(datetime, "/") {
return time.Time{}, fmt.Errorf("datetime param '%s' represents an interval, intervals are currently not supported", datetime)
}
return time.Parse(time.RFC3339, datetime)
}
return time.Time{}, nil
}
func parseFilter(params url.Values) (filter string, filterSRID SRID, err error) {
filter = params.Get(filterParam)
filterSRID, _ = parseCrsToSRID(params, filterCrsParam)
if filter != "" {
return filter, filterSRID, errors.New("CQL filter param is currently not supported")
}
return filter, filterSRID, nil
}
package geovolumes
import (
"errors"
"log"
"net/http"
"net/url"
"strings"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
"github.com/go-chi/chi/v5"
)
type ThreeDimensionalGeoVolumes struct {
engine *engine.Engine
validateResponse bool
}
func NewThreeDimensionalGeoVolumes(e *engine.Engine) *ThreeDimensionalGeoVolumes {
_, err := url.ParseRequestURI(e.Config.OgcAPI.GeoVolumes.TileServer.String())
if err != nil {
log.Fatalf("invalid tileserver url provided: %v", err)
}
geoVolumes := &ThreeDimensionalGeoVolumes{
engine: e,
validateResponse: *e.Config.OgcAPI.GeoVolumes.ValidateResponses,
}
// 3D Tiles
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles", geoVolumes.Tileset("tileset.json"))
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles/{explicitTileSet}.json", geoVolumes.ExplicitTileset())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/3dtiles/{tilePathPrefix}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
// DTM/Quantized Mesh
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh", geoVolumes.Tileset("layer.json"))
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh/{explicitTileSet}.json", geoVolumes.ExplicitTileset())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/quantized-mesh/{tilePathPrefix}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
// path '/3dtiles' or '/quantized-mesh' is preferred but optional when requesting the actual tiles/tileset.
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/{explicitTileSet}.json", geoVolumes.ExplicitTileset())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
e.Router.Get(geospatial.CollectionsPath+"/{3dContainerId}/{tilePathPrefix}/{tileMatrix}/{tileRow}/{tileColAndSuffix}", geoVolumes.Tile())
return geoVolumes
}
// Tileset serves tileset.json manifest in case of OGC 3D Tiles (= separate spec from OGC 3D GeoVolumes) requests or
// layer.json manifest in case of quantized mesh requests. Both requests will be proxied to the configured tileserver.
func (t *ThreeDimensionalGeoVolumes) Tileset(fileName string) http.HandlerFunc {
if !strings.HasSuffix(fileName, ".json") {
log.Fatalf("manifest should be a JSON file")
}
return func(w http.ResponseWriter, r *http.Request) {
t.tileSet(w, r, fileName)
}
}
// ExplicitTileset serves OGC 3D Tiles manifest (= separate spec from OGC 3D GeoVolumes) or
// quantized mesh manifest. All requests will be proxied to the configured tileserver.
func (t *ThreeDimensionalGeoVolumes) ExplicitTileset() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
tileSetName := chi.URLParam(r, "explicitTileSet")
if tileSetName == "" {
engine.RenderProblem(engine.ProblemNotFound, w)
return
}
t.tileSet(w, r, tileSetName+".json")
}
}
// Tile reverse proxy to tileserver for actual 3D tiles (from OGC 3D Tiles, separate spec
// from OGC 3D GeoVolumes) or DTM Quantized Mesh tiles
func (t *ThreeDimensionalGeoVolumes) Tile() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
collectionID := chi.URLParam(r, "3dContainerId")
collection, err := t.idToCollection(collectionID)
if err != nil {
engine.RenderProblem(engine.ProblemNotFound, w, err.Error())
return
}
tileServerPath := collectionID
if collection.GeoVolumes != nil && collection.GeoVolumes.TileServerPath != nil {
tileServerPath = *collection.GeoVolumes.TileServerPath
}
tilePathPrefix := chi.URLParam(r, "tilePathPrefix") // optional
tileMatrix := chi.URLParam(r, "tileMatrix")
tileRow := chi.URLParam(r, "tileRow")
tileColAndSuffix := chi.URLParam(r, "tileColAndSuffix")
contentType := ""
if collection.GeoVolumes != nil && collection.GeoVolumes.HasDTM() {
// DTM has a specialized mediatype, although application/octet-stream will also work with Cesium
contentType = engine.MediaTypeQuantizedMesh
}
path, _ := url.JoinPath("/", tileServerPath, tilePathPrefix, tileMatrix, tileRow, tileColAndSuffix)
t.reverseProxy(w, r, path, true, contentType)
}
}
func (t *ThreeDimensionalGeoVolumes) tileSet(w http.ResponseWriter, r *http.Request, tileSet string) {
collectionID := chi.URLParam(r, "3dContainerId")
collection, err := t.idToCollection(collectionID)
if err != nil {
engine.RenderProblem(engine.ProblemNotFound, w, err.Error())
return
}
tileServerPath := collectionID
if collection.GeoVolumes != nil && collection.GeoVolumes.TileServerPath != nil {
tileServerPath = *collection.GeoVolumes.TileServerPath
}
path, _ := url.JoinPath("/", tileServerPath, tileSet)
t.reverseProxy(w, r, path, false, "")
}
func (t *ThreeDimensionalGeoVolumes) reverseProxy(w http.ResponseWriter, r *http.Request, path string,
prefer204 bool, contentTypeOverwrite string) {
target, err := url.Parse(t.engine.Config.OgcAPI.GeoVolumes.TileServer.String() + path)
if err != nil {
log.Printf("invalid target url, can't proxy tiles: %v", err)
engine.RenderProblem(engine.ProblemServerError, w)
return
}
t.engine.ReverseProxyAndValidate(w, r, target, prefer204, contentTypeOverwrite, t.validateResponse)
}
func (t *ThreeDimensionalGeoVolumes) idToCollection(cid string) (*config.GeoSpatialCollection, error) {
for _, collection := range t.engine.Config.OgcAPI.GeoVolumes.Collections {
if collection.ID == cid {
return &collection, nil
}
}
return nil, errors.New("no matching collection found")
}
package processes
import (
"net/http"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
)
type Processes struct {
engine *engine.Engine
}
func NewProcesses(e *engine.Engine) *Processes {
processes := &Processes{engine: e}
e.Router.Handle("/jobs*", processes.forwarder(e.Config.OgcAPI.Processes.ProcessesServer))
e.Router.Handle("/processes*", processes.forwarder(e.Config.OgcAPI.Processes.ProcessesServer))
e.Router.Handle("/api*", processes.forwarder(e.Config.OgcAPI.Processes.ProcessesServer))
return processes
}
func (p *Processes) forwarder(processServer config.URL) http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
targetURL := *processServer.URL
targetURL.Path = processServer.URL.Path + r.URL.Path
targetURL.RawQuery = r.URL.RawQuery
p.engine.ReverseProxy(w, r, &targetURL, false, "")
}
}
package styles
import (
"log"
"net/http"
"slices"
"strings"
"github.com/PDOK/gokoala/config"
"github.com/PDOK/gokoala/internal/engine"
"github.com/go-chi/chi/v5"
)
const (
templatesDir = "internal/ogc/styles/templates/"
stylesPath = "/styles"
stylesCrumb = "styles/"
projectionDelimiter = "__"
)
var (
defaultProjection = ""
)
type Styles struct {
engine *engine.Engine
}
func NewStyles(e *engine.Engine) *Styles {
// default style must be the first entry in supportedstyles
if e.Config.OgcAPI.Styles.Default != e.Config.OgcAPI.Styles.SupportedStyles[0].ID {
log.Fatalf("default style must be first entry in supported styles. '%s' does not match '%s'",
e.Config.OgcAPI.Styles.SupportedStyles[0].ID, e.Config.OgcAPI.Styles.Default)
}
stylesBreadcrumbs := []engine.Breadcrumb{
{
Name: "Styles",
Path: "styles",
},
}
e.RenderTemplates(stylesPath,
stylesBreadcrumbs,
engine.NewTemplateKey(templatesDir+"styles.go.json"),
engine.NewTemplateKey(templatesDir+"styles.go.html"))
projections := map[string]string{"EPSG:28992": "NetherlandsRDNewQuad", "EPSG:3035": "EuropeanETRS89_LAEAQuad", "EPSG:3857": "WebMercatorQuad"}
defaultProjection = strings.ToLower(projections[e.Config.OgcAPI.Tiles.SupportedSrs[0].Srs])
for _, style := range e.Config.OgcAPI.Styles.SupportedStyles {
for _, supportedSrs := range e.Config.OgcAPI.Tiles.SupportedSrs {
projection := projections[supportedSrs.Srs]
zoomLevelRange := supportedSrs.ZoomLevelRange
styleInstanceID := style.ID + projectionDelimiter + strings.ToLower(projection)
// Render metadata templates
e.RenderTemplatesWithParams(struct {
Metadata config.Style
Projection string
}{Metadata: style, Projection: projection},
nil,
engine.NewTemplateKeyWithName(templatesDir+"styleMetadata.go.json", styleInstanceID))
styleMetadataBreadcrumbs := stylesBreadcrumbs
styleMetadataBreadcrumbs = append(styleMetadataBreadcrumbs, []engine.Breadcrumb{
{
Name: style.Title + " (" + projection + ")",
Path: stylesCrumb + styleInstanceID,
},
{
Name: "Metadata",
Path: stylesCrumb + styleInstanceID + "/metadata",
},
}...)
e.RenderTemplatesWithParams(struct {
Metadata config.Style
Projection string
}{Metadata: style, Projection: projection},
styleMetadataBreadcrumbs,
engine.NewTemplateKeyWithName(templatesDir+"styleMetadata.go.html", styleInstanceID))
// Add existing style definitions to rendered templates
for _, styleFormat := range style.Formats {
formatExtension := e.CN.GetStyleFormatExtension(styleFormat.Format)
styleKey := engine.TemplateKey{
Name: style.ID + formatExtension,
Directory: e.Config.OgcAPI.Styles.StylesDir,
Format: styleFormat.Format,
InstanceName: styleInstanceID + "." + styleFormat.Format,
}
e.RenderTemplatesWithParams(struct {
Projection string
ZoomLevelRange config.ZoomLevelRange
}{Projection: projection, ZoomLevelRange: zoomLevelRange}, nil, styleKey)
styleBreadCrumbs := stylesBreadcrumbs
styleBreadCrumbs = append(styleBreadCrumbs, []engine.Breadcrumb{
{
Name: style.Title + " (" + projection + ")",
Path: stylesCrumb + styleInstanceID,
},
}...)
e.RenderTemplatesWithParams(style,
styleBreadCrumbs,
engine.NewTemplateKeyWithName(templatesDir+"style.go.html", styleInstanceID))
}
}
}
styles := &Styles{
engine: e,
}
e.Router.Get(stylesPath, styles.Styles())
e.Router.Get(stylesPath+"/{style}", styles.Style())
e.Router.Get(stylesPath+"/{style}/metadata", styles.StyleMetadata())
return styles
}
func (s *Styles) Styles() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKeyWithLanguage(
templatesDir+"styles.go."+s.engine.CN.NegotiateFormat(r), s.engine.CN.NegotiateLanguage(w, r))
s.engine.ServePage(w, r, key)
}
}
func (s *Styles) Style() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
style := chi.URLParam(r, "style")
styleID := strings.Split(style, projectionDelimiter)[0]
// Previously, the API did not utilise separate styles per projection; whereas the current implementation
// advertises all possible combinations of available styles and available projections as separate styles.
// To ensure that the use of style URLs without projection remains possible for previously published APIs,
// URLs without an explicit projection are defaulted to the first configured projection.
if style == styleID {
style += projectionDelimiter + defaultProjection
}
styleFormat := s.engine.CN.NegotiateFormat(r)
var key engine.TemplateKey
if styleFormat == engine.FormatHTML {
key = engine.NewTemplateKeyWithNameAndLanguage(
templatesDir+"style.go.html", style, s.engine.CN.NegotiateLanguage(w, r))
} else {
var instanceName string
if slices.Contains(s.engine.CN.GetSupportedStyleFormats(), styleFormat) {
instanceName = style + "." + styleFormat
} else {
styleFormat = engine.FormatMapboxStyle
instanceName = style + "." + engine.FormatMapboxStyle
}
key = engine.TemplateKey{
Name: styleID + s.engine.CN.GetStyleFormatExtension(styleFormat),
Directory: s.engine.Config.OgcAPI.Styles.StylesDir,
Format: styleFormat,
InstanceName: instanceName,
Language: s.engine.CN.NegotiateLanguage(w, r),
}
}
s.engine.ServePage(w, r, key)
}
}
func (s *Styles) StyleMetadata() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
style := chi.URLParam(r, "style")
styleID := strings.Split(style, projectionDelimiter)[0]
// Previously, the API did not utilise separate styles per projection; whereas the current implementation
// advertises all possible combinations of available styles and available projections as separate styles.
// To ensure that the use of style URLs without projection remains possible for previously published APIs,
// URLs without an explicit projection are defaulted to the first configured projection.
if style == styleID {
style += projectionDelimiter + defaultProjection
}
key := engine.NewTemplateKeyWithNameAndLanguage(
templatesDir+"styleMetadata.go."+s.engine.CN.NegotiateFormat(r), style, s.engine.CN.NegotiateLanguage(w, r))
s.engine.ServePage(w, r, key)
}
}
package tiles
import (
"log"
"net/http"
"net/url"
"strings"
"github.com/PDOK/gokoala/internal/engine"
"github.com/PDOK/gokoala/internal/ogc/common/geospatial"
"github.com/go-chi/chi/v5"
)
const (
templatesDir = "internal/ogc/tiles/templates/"
tilesPath = "/tiles"
tilesLocalPath = "tiles/"
tileMatrixSetsPath = "/tileMatrixSets"
tileMatrixSetsLocalPath = "tileMatrixSets/"
defaultTilesTmpl = "{tms}/{z}/{x}/{y}." + engine.FormatMVTAlternative
)
type Tiles struct {
engine *engine.Engine
}
func NewTiles(e *engine.Engine) *Tiles {
tilesBreadcrumbs := []engine.Breadcrumb{
{
Name: "Tiles",
Path: "tiles",
},
}
tileMatrixSetsBreadcrumbs := []engine.Breadcrumb{
{
Name: "Tile Matrix Sets",
Path: "tileMatrixSets",
},
}
e.RenderTemplates(tilesPath,
tilesBreadcrumbs,
engine.NewTemplateKey(templatesDir+"tiles.go.json"),
engine.NewTemplateKey(templatesDir+"tiles.go.html"))
e.RenderTemplates(tileMatrixSetsPath,
tileMatrixSetsBreadcrumbs,
engine.NewTemplateKey(templatesDir+"tileMatrixSets.go.json"),
engine.NewTemplateKey(templatesDir+"tileMatrixSets.go.html"))
renderTemplatesForSrs(e, "EuropeanETRS89_LAEAQuad", tilesBreadcrumbs, tileMatrixSetsBreadcrumbs)
renderTemplatesForSrs(e, "NetherlandsRDNewQuad", tilesBreadcrumbs, tileMatrixSetsBreadcrumbs)
renderTemplatesForSrs(e, "WebMercatorQuad", tilesBreadcrumbs, tileMatrixSetsBreadcrumbs)
_, err := url.ParseRequestURI(e.Config.OgcAPI.Tiles.TileServer.String())
if err != nil {
log.Fatalf("invalid tileserver url provided: %v", err)
}
tiles := &Tiles{
engine: e,
}
e.Router.Get(tileMatrixSetsPath, tiles.TileMatrixSets())
e.Router.Get(tileMatrixSetsPath+"/{tileMatrixSetId}", tiles.TileMatrixSet())
e.Router.Get(tilesPath, tiles.TilesetsList())
e.Router.Get(tilesPath+"/{tileMatrixSetId}", tiles.Tileset())
e.Router.Head(tilesPath+"/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}", tiles.Tile())
e.Router.Get(tilesPath+"/{tileMatrixSetId}/{tileMatrix}/{tileRow}/{tileCol}", tiles.Tile())
e.Router.Get(geospatial.CollectionsPath+"/{collectionId}/tiles", tiles.TilesCollection())
return tiles
}
func renderTemplatesForSrs(e *engine.Engine, srs string, tilesBreadcrumbs []engine.Breadcrumb, tileMatrixSetsBreadcrumbs []engine.Breadcrumb) {
tilesSrsBreadcrumbs := tilesBreadcrumbs
tilesSrsBreadcrumbs = append(tilesSrsBreadcrumbs, []engine.Breadcrumb{
{
Name: srs,
Path: tilesLocalPath + srs,
},
}...)
tileMatrixSetsSrsBreadcrumbs := tileMatrixSetsBreadcrumbs
tileMatrixSetsSrsBreadcrumbs = append(tileMatrixSetsSrsBreadcrumbs, []engine.Breadcrumb{
{
Name: srs,
Path: tileMatrixSetsLocalPath + srs,
},
}...)
e.RenderTemplates(tileMatrixSetsPath+"/"+srs,
tileMatrixSetsSrsBreadcrumbs,
engine.NewTemplateKey(templatesDir+tileMatrixSetsLocalPath+srs+".go.json"),
engine.NewTemplateKey(templatesDir+tileMatrixSetsLocalPath+srs+".go.html"))
e.RenderTemplates(tilesPath+"/"+srs,
tilesSrsBreadcrumbs,
engine.NewTemplateKey(templatesDir+tilesLocalPath+srs+".go.json"),
engine.NewTemplateKey(templatesDir+tilesLocalPath+srs+".go.html"))
e.RenderTemplates(tilesPath+"/"+srs,
tilesSrsBreadcrumbs,
engine.NewTemplateKey(templatesDir+tilesLocalPath+srs+".go.tilejson"))
}
func (t *Tiles) TileMatrixSets() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKeyWithLanguage(templatesDir+"tileMatrixSets.go."+t.engine.CN.NegotiateFormat(r), t.engine.CN.NegotiateLanguage(w, r))
t.engine.ServePage(w, r, key)
}
}
func (t *Tiles) TileMatrixSet() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId")
key := engine.NewTemplateKeyWithLanguage(templatesDir+tileMatrixSetsLocalPath+tileMatrixSetID+".go."+t.engine.CN.NegotiateFormat(r), t.engine.CN.NegotiateLanguage(w, r))
t.engine.ServePage(w, r, key)
}
}
func (t *Tiles) TilesetsList() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
key := engine.NewTemplateKeyWithLanguage(templatesDir+"tiles.go."+t.engine.CN.NegotiateFormat(r), t.engine.CN.NegotiateLanguage(w, r))
t.engine.ServePage(w, r, key)
}
}
func (t *Tiles) Tileset() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId")
key := engine.NewTemplateKeyWithLanguage(templatesDir+tilesLocalPath+tileMatrixSetID+".go."+t.engine.CN.NegotiateFormat(r), t.engine.CN.NegotiateLanguage(w, r))
t.engine.ServePage(w, r, key)
}
}
// Tile reverse proxy to Azure Blob, assumes blob bucket/container is public
func (t *Tiles) Tile() http.HandlerFunc {
return func(w http.ResponseWriter, r *http.Request) {
tileMatrixSetID := chi.URLParam(r, "tileMatrixSetId")
tileMatrix := chi.URLParam(r, "tileMatrix")
tileRow := chi.URLParam(r, "tileRow")
tileCol := chi.URLParam(r, "tileCol")
// We support content negotiation using Accept header and ?f= param, but also
// using the .pbf extension. This is for backwards compatibility.
if !strings.HasSuffix(tileCol, ".pbf") {
// if no format is specified, default to mvt
if format := strings.Replace(t.engine.CN.NegotiateFormat(r), engine.FormatJSON, engine.FormatMVT, 1); format != engine.FormatMVT && format != engine.FormatMVTAlternative {
engine.RenderProblem(engine.ProblemBadRequest, w, "Specify tile format. Currently only Mapbox Vector Tiles (?f=mvt) tiles are supported")
return
}
} else {
tileCol = tileCol[:len(tileCol)-4] // remove .pbf extension
}
// ogc spec is (default) z/row/col but tileserver is z/col/row (z/x/y)
replacer := strings.NewReplacer("{tms}", tileMatrixSetID, "{z}", tileMatrix, "{x}", tileCol, "{y}", tileRow)
tilesTmpl := defaultTilesTmpl
if t.engine.Config.OgcAPI.Tiles.URITemplateTiles != nil {
tilesTmpl = *t.engine.Config.OgcAPI.Tiles.URITemplateTiles
}
path, _ := url.JoinPath("/", replacer.Replace(tilesTmpl))
target, err := url.Parse(t.engine.Config.OgcAPI.Tiles.TileServer.String() + path)
if err != nil {
log.Printf("invalid target url, can't proxy tiles: %v", err)
engine.RenderProblem(engine.ProblemServerError, w)
return
}
t.engine.ReverseProxy(w, r, target, true, engine.MediaTypeMVT)
}
}
func (t *Tiles) TilesCollection(_ ...any) http.HandlerFunc {
return func(_ http.ResponseWriter, r *http.Request) {
collectionID := chi.URLParam(r, "collectionId")
// TODO: not implemented, since we don't (yet) support tile collections
log.Printf("TODO: return tiles for collection %s", collectionID)
}
}