初始
This commit is contained in:
49
api/v1/common/tool/excel/excel.go
Normal file
49
api/v1/common/tool/excel/excel.go
Normal file
@ -0,0 +1,49 @@
|
||||
package excel
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/tiger1103/gfast/v3/api/v1/common/tool"
|
||||
"github.com/xuri/excelize/v2"
|
||||
)
|
||||
|
||||
type Sheet struct {
|
||||
Name string `json:"name"`
|
||||
Rows [][]string `json:"rows"`
|
||||
}
|
||||
|
||||
func ReadXlsx(xlsx string) (err error, sheet []Sheet) {
|
||||
if !tool.PathExists(xlsx) {
|
||||
return errors.New("文件不存在:" + xlsx), sheet
|
||||
}
|
||||
f, err := excelize.OpenFile(xlsx)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
return err, sheet
|
||||
}
|
||||
defer func() {
|
||||
// 关闭工作簿
|
||||
if err := f.Close(); err != nil {
|
||||
fmt.Println(err)
|
||||
}
|
||||
}()
|
||||
list := f.GetSheetList()
|
||||
// 获取 Sheet1 上所有单元格
|
||||
for _, sheetName := range list {
|
||||
result, err := f.GetRows(sheetName)
|
||||
if err != nil {
|
||||
fmt.Println(err)
|
||||
continue
|
||||
}
|
||||
sheet = append(sheet, Sheet{sheetName, result})
|
||||
//rows = append(rows, result...)
|
||||
}
|
||||
|
||||
//for _, row := range rows {
|
||||
// for _, colCell := range row {
|
||||
// fmt.Print(colCell, "\t")
|
||||
// }
|
||||
// fmt.Println()
|
||||
//}
|
||||
return nil, sheet
|
||||
}
|
25
api/v1/common/tool/proj/proj.go
Normal file
25
api/v1/common/tool/proj/proj.go
Normal file
@ -0,0 +1,25 @@
|
||||
package proj
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"github.com/dop251/goja"
|
||||
)
|
||||
|
||||
//go:embed proj4.js
|
||||
var proj4 string
|
||||
|
||||
var CGCS2000_to_WGS84 func(degrees int, cscs2000 [][]string) string
|
||||
var WGS84_to_CGCS2000 func(degrees int, wgs84 [][]string) string
|
||||
|
||||
func InitProj() {
|
||||
vm := goja.New()
|
||||
vm.RunString(proj4)
|
||||
vm.ExportTo(vm.Get("CGCS2000_to_WGS84"), &CGCS2000_to_WGS84)
|
||||
vm.ExportTo(vm.Get("WGS84_to_CGCS2000"), &WGS84_to_CGCS2000)
|
||||
//var ss [][]string
|
||||
//ss = append(ss, []string{
|
||||
// "106.545463204423", "23.467020901621", "805.6832",
|
||||
//})
|
||||
//s := WGS84_to_CGCS2000(108, ss)
|
||||
//fmt.Println(s)
|
||||
}
|
1994
api/v1/common/tool/proj/proj4.js
Normal file
1994
api/v1/common/tool/proj/proj4.js
Normal file
File diff suppressed because it is too large
Load Diff
2
api/v1/common/tool/shp/.hound.yml
Normal file
2
api/v1/common/tool/shp/.hound.yml
Normal file
@ -0,0 +1,2 @@
|
||||
go:
|
||||
enabled: true
|
19
api/v1/common/tool/shp/.travis.yml
Normal file
19
api/v1/common/tool/shp/.travis.yml
Normal file
@ -0,0 +1,19 @@
|
||||
language: go
|
||||
sudo: false
|
||||
|
||||
go:
|
||||
- 1.8.x
|
||||
- 1.9.x
|
||||
- master
|
||||
|
||||
os:
|
||||
- linux
|
||||
|
||||
before_install:
|
||||
- go get -t -v ./...
|
||||
|
||||
script:
|
||||
- go test -race -coverprofile=coverage.txt -covermode=atomic
|
||||
|
||||
after_success:
|
||||
- bash <(curl -s https://codecov.io/bash)
|
21
api/v1/common/tool/shp/LICENSE
Normal file
21
api/v1/common/tool/shp/LICENSE
Normal file
@ -0,0 +1,21 @@
|
||||
The MIT License (MIT)
|
||||
|
||||
Copyright (c) 2014 Jonas Palm
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
87
api/v1/common/tool/shp/README.md
Normal file
87
api/v1/common/tool/shp/README.md
Normal file
@ -0,0 +1,87 @@
|
||||
go-shp
|
||||
======
|
||||
|
||||
[](https://travis-ci.org/jonas-p/go-shp)
|
||||
[](https://ci.appveyor.com/project/fawick/go-shp)
|
||||
[](https://goreportcard.com/report/github.com/jonas-p/go-shp)
|
||||
[](https://codecov.io/gh/jonas-p/go-shp)
|
||||
|
||||
Go library for reading and writing ESRI Shapefiles. This is a pure Golang implementation based on the ESRI Shapefile technical description.
|
||||
|
||||
### Usage
|
||||
#### Installation
|
||||
|
||||
go get github.com/jonas-p/go-shp
|
||||
|
||||
#### Importing
|
||||
|
||||
```go
|
||||
import "github.com/jonas-p/go-shp"
|
||||
```
|
||||
|
||||
### Examples
|
||||
#### Reading a shapefile
|
||||
|
||||
```go
|
||||
// open a shapefile for reading
|
||||
shape, err := shp.Open("points.shp")
|
||||
if err != nil { log.Fatal(err) }
|
||||
defer shape.Close()
|
||||
|
||||
// fields from the attribute table (DBF)
|
||||
fields := shape.Fields()
|
||||
|
||||
// loop through all features in the shapefile
|
||||
for shape.Next() {
|
||||
n, p := shape.Shape()
|
||||
|
||||
// print feature
|
||||
fmt.Println(reflect.TypeOf(p).Elem(), p.BBox())
|
||||
|
||||
// print attributes
|
||||
for k, f := range fields {
|
||||
val := shape.ReadAttribute(n, k)
|
||||
fmt.Printf("\t%v: %v\n", f, val)
|
||||
}
|
||||
fmt.Println()
|
||||
}
|
||||
```
|
||||
|
||||
#### Creating a shapefile
|
||||
|
||||
```go
|
||||
// points to write
|
||||
points := []shp.Point{
|
||||
shp.Point{10.0, 10.0},
|
||||
shp.Point{10.0, 15.0},
|
||||
shp.Point{15.0, 15.0},
|
||||
shp.Point{15.0, 10.0},
|
||||
}
|
||||
|
||||
// fields to write
|
||||
fields := []shp.Field{
|
||||
// String attribute field with length 25
|
||||
shp.StringField("NAME", 25),
|
||||
}
|
||||
|
||||
// create and open a shapefile for writing points
|
||||
shape, err := shp.Create("points.shp", shp.POINT)
|
||||
if err != nil { log.Fatal(err) }
|
||||
defer shape.Close()
|
||||
|
||||
// setup fields for attributes
|
||||
shape.SetFields(fields)
|
||||
|
||||
// write points and attributes
|
||||
for n, point := range points {
|
||||
shape.Write(&point)
|
||||
|
||||
// write attribute for object n for field 0 (NAME)
|
||||
shape.WriteAttribute(n, 0, "Point " + strconv.Itoa(n + 1))
|
||||
}
|
||||
```
|
||||
|
||||
### Resources
|
||||
|
||||
- [Documentation on godoc.org](http://godoc.org/github.com/jonas-p/go-shp)
|
||||
- [ESRI Shapefile Technical Description](http://www.esri.com/library/whitepapers/pdfs/shapefile.pdf)
|
26
api/v1/common/tool/shp/appveyor.yml
Normal file
26
api/v1/common/tool/shp/appveyor.yml
Normal file
@ -0,0 +1,26 @@
|
||||
clone_folder: c:\go-shp
|
||||
|
||||
environment:
|
||||
GOPATH: c:\gopath
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
||||
|
||||
init:
|
||||
- ps: >-
|
||||
$app = Get-WmiObject -Class Win32_Product -Filter "Vendor = 'http://golang.org'"
|
||||
|
||||
if ($app) {
|
||||
$app.Uninstall()
|
||||
}
|
||||
|
||||
install:
|
||||
- rmdir c:\go /s /q
|
||||
- appveyor DownloadFile https://storage.googleapis.com/golang/go1.9.windows-amd64.msi
|
||||
- msiexec /i go1.9.windows-amd64.msi /q
|
||||
- go version
|
||||
- go env
|
||||
|
||||
build_script:
|
||||
- go test ./...
|
27
api/v1/common/tool/shp/errreader.go
Normal file
27
api/v1/common/tool/shp/errreader.go
Normal file
@ -0,0 +1,27 @@
|
||||
package shp
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
)
|
||||
|
||||
// errReader is a helper to perform multiple successive read from another reader
|
||||
// and do the error checking only once afterwards. It will not perform any new
|
||||
// reads in case there was an error encountered earlier.
|
||||
type errReader struct {
|
||||
io.Reader
|
||||
e error
|
||||
n int64
|
||||
}
|
||||
|
||||
func (er *errReader) Read(p []byte) (n int, err error) {
|
||||
if er.e != nil {
|
||||
return 0, fmt.Errorf("unable to read after previous error: %v", er.e)
|
||||
}
|
||||
n, err = er.Reader.Read(p)
|
||||
if n < len(p) && err != nil {
|
||||
er.e = err
|
||||
}
|
||||
er.n += int64(n)
|
||||
return n, er.e
|
||||
}
|
253
api/v1/common/tool/shp/reader.go
Normal file
253
api/v1/common/tool/shp/reader.go
Normal file
@ -0,0 +1,253 @@
|
||||
package shp
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Reader provides a interface for reading Shapefiles. Calls
|
||||
// to the Next method will iterate through the objects in the
|
||||
// Shapefile. After a call to Next the object will be available
|
||||
// through the Shape method.
|
||||
type Reader struct {
|
||||
GeometryType ShapeType
|
||||
bbox Box
|
||||
err error
|
||||
|
||||
shp readSeekCloser
|
||||
shape Shape
|
||||
num int32
|
||||
filename string
|
||||
filelength int64
|
||||
|
||||
dbf readSeekCloser
|
||||
dbfFields []Field
|
||||
dbfNumRecords int32
|
||||
dbfHeaderLength int16
|
||||
dbfRecordLength int16
|
||||
}
|
||||
|
||||
type readSeekCloser interface {
|
||||
io.Reader
|
||||
io.Seeker
|
||||
io.Closer
|
||||
}
|
||||
|
||||
// Open opens a Shapefile for reading.
|
||||
func Open(filename string) (*Reader, error) {
|
||||
ext := filepath.Ext(filename)
|
||||
if strings.ToLower(ext) != ".shp" {
|
||||
return nil, fmt.Errorf("Invalid file extension: %s", filename)
|
||||
}
|
||||
shp, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s := &Reader{filename: strings.TrimSuffix(filename, ext), shp: shp}
|
||||
return s, s.readHeaders()
|
||||
}
|
||||
|
||||
// BBox returns the bounding box of the shapefile.
|
||||
func (r *Reader) BBox() Box {
|
||||
return r.bbox
|
||||
}
|
||||
|
||||
// Read and parse headers in the Shapefile. This will
|
||||
// fill out GeometryType, filelength and bbox.
|
||||
func (r *Reader) readHeaders() error {
|
||||
er := &errReader{Reader: r.shp}
|
||||
// don't trust the the filelength in the header
|
||||
r.filelength, _ = r.shp.Seek(0, io.SeekEnd)
|
||||
|
||||
var filelength int32
|
||||
r.shp.Seek(24, 0)
|
||||
// file length
|
||||
binary.Read(er, binary.BigEndian, &filelength)
|
||||
r.shp.Seek(32, 0)
|
||||
binary.Read(er, binary.LittleEndian, &r.GeometryType)
|
||||
r.bbox.MinX = readFloat64(er)
|
||||
r.bbox.MinY = readFloat64(er)
|
||||
r.bbox.MaxX = readFloat64(er)
|
||||
r.bbox.MaxY = readFloat64(er)
|
||||
r.shp.Seek(100, 0)
|
||||
return er.e
|
||||
}
|
||||
|
||||
func readFloat64(r io.Reader) float64 {
|
||||
var bits uint64
|
||||
binary.Read(r, binary.LittleEndian, &bits)
|
||||
return math.Float64frombits(bits)
|
||||
}
|
||||
|
||||
// Close closes the Shapefile.
|
||||
func (r *Reader) Close() error {
|
||||
if r.err == nil {
|
||||
r.err = r.shp.Close()
|
||||
if r.dbf != nil {
|
||||
r.dbf.Close()
|
||||
}
|
||||
}
|
||||
return r.err
|
||||
}
|
||||
|
||||
// Shape returns the most recent feature that was read by
|
||||
// a call to Next. It returns two values, the int is the
|
||||
// object index starting from zero in the shapefile which
|
||||
// can be used as row in ReadAttribute, and the Shape is the object.
|
||||
func (r *Reader) Shape() (int, Shape) {
|
||||
return int(r.num) - 1, r.shape
|
||||
}
|
||||
|
||||
// Attribute returns value of the n-th attribute of the most recent feature
|
||||
// that was read by a call to Next.
|
||||
func (r *Reader) Attribute(n int) string {
|
||||
return r.ReadAttribute(int(r.num)-1, n)
|
||||
}
|
||||
|
||||
// newShape creates a new shape with a given type.
|
||||
func newShape(shapetype ShapeType) (Shape, error) {
|
||||
switch shapetype {
|
||||
case NULL:
|
||||
return new(Null), nil
|
||||
case POINT:
|
||||
return new(Point), nil
|
||||
case POLYLINE:
|
||||
return new(PolyLine), nil
|
||||
case POLYGON:
|
||||
return new(Polygon), nil
|
||||
case MULTIPOINT:
|
||||
return new(MultiPoint), nil
|
||||
case POINTZ:
|
||||
return new(PointZ), nil
|
||||
case POLYLINEZ:
|
||||
return new(PolyLineZ), nil
|
||||
case POLYGONZ:
|
||||
return new(PolygonZ), nil
|
||||
case MULTIPOINTZ:
|
||||
return new(MultiPointZ), nil
|
||||
case POINTM:
|
||||
return new(PointM), nil
|
||||
case POLYLINEM:
|
||||
return new(PolyLineM), nil
|
||||
case POLYGONM:
|
||||
return new(PolygonM), nil
|
||||
case MULTIPOINTM:
|
||||
return new(MultiPointM), nil
|
||||
case MULTIPATCH:
|
||||
return new(MultiPatch), nil
|
||||
default:
|
||||
return nil, fmt.Errorf("Unsupported shape type: %v", shapetype)
|
||||
}
|
||||
}
|
||||
|
||||
// Next reads in the next Shape in the Shapefile, which
|
||||
// will then be available through the Shape method. It
|
||||
// returns false when the reader has reached the end of the
|
||||
// file or encounters an error.
|
||||
func (r *Reader) Next() bool {
|
||||
cur, _ := r.shp.Seek(0, io.SeekCurrent)
|
||||
if cur >= r.filelength {
|
||||
return false
|
||||
}
|
||||
|
||||
var size int32
|
||||
var shapetype ShapeType
|
||||
er := &errReader{Reader: r.shp}
|
||||
binary.Read(er, binary.BigEndian, &r.num)
|
||||
binary.Read(er, binary.BigEndian, &size)
|
||||
binary.Read(er, binary.LittleEndian, &shapetype)
|
||||
if er.e != nil {
|
||||
if er.e != io.EOF {
|
||||
r.err = fmt.Errorf("Error when reading metadata of next shape: %v", er.e)
|
||||
} else {
|
||||
r.err = io.EOF
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var err error
|
||||
r.shape, err = newShape(shapetype)
|
||||
if err != nil {
|
||||
r.err = fmt.Errorf("Error decoding shape type: %v", err)
|
||||
return false
|
||||
}
|
||||
r.shape.read(er)
|
||||
if er.e != nil {
|
||||
r.err = fmt.Errorf("Error while reading next shape: %v", er.e)
|
||||
return false
|
||||
}
|
||||
|
||||
// move to next object
|
||||
r.shp.Seek(int64(size)*2+cur+8, 0)
|
||||
return true
|
||||
}
|
||||
|
||||
// Opens DBF file using r.filename + "dbf". This method
|
||||
// will parse the header and fill out all dbf* values int
|
||||
// the f object.
|
||||
func (r *Reader) openDbf() (err error) {
|
||||
if r.dbf != nil {
|
||||
return
|
||||
}
|
||||
|
||||
r.dbf, err = os.Open(r.filename + ".dbf")
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
// read header
|
||||
r.dbf.Seek(4, io.SeekStart)
|
||||
binary.Read(r.dbf, binary.LittleEndian, &r.dbfNumRecords)
|
||||
binary.Read(r.dbf, binary.LittleEndian, &r.dbfHeaderLength)
|
||||
binary.Read(r.dbf, binary.LittleEndian, &r.dbfRecordLength)
|
||||
|
||||
r.dbf.Seek(20, io.SeekCurrent) // skip padding
|
||||
numFields := int(math.Floor(float64(r.dbfHeaderLength-33) / 32.0))
|
||||
r.dbfFields = make([]Field, numFields)
|
||||
binary.Read(r.dbf, binary.LittleEndian, &r.dbfFields)
|
||||
return
|
||||
}
|
||||
|
||||
// Fields returns a slice of Fields that are present in the
|
||||
// DBF table.
|
||||
func (r *Reader) Fields() []Field {
|
||||
err := r.openDbf()
|
||||
fmt.Println(err)
|
||||
if err != nil {
|
||||
return nil
|
||||
} // make sure we have dbf file to read from
|
||||
return r.dbfFields
|
||||
}
|
||||
|
||||
// Err returns the last non-EOF error encountered.
|
||||
func (r *Reader) Err() error {
|
||||
if r.err == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return r.err
|
||||
}
|
||||
|
||||
// AttributeCount returns number of records in the DBF table.
|
||||
func (r *Reader) AttributeCount() int {
|
||||
r.openDbf() // make sure we have a dbf file to read from
|
||||
return int(r.dbfNumRecords)
|
||||
}
|
||||
|
||||
// ReadAttribute returns the attribute value at row for field in
|
||||
// the DBF table as a string. Both values starts at 0.
|
||||
func (r *Reader) ReadAttribute(row int, field int) string {
|
||||
r.openDbf() // make sure we have a dbf file to read from
|
||||
seekTo := 1 + int64(r.dbfHeaderLength) + (int64(row) * int64(r.dbfRecordLength))
|
||||
for n := 0; n < field; n++ {
|
||||
seekTo += int64(r.dbfFields[n].Size)
|
||||
}
|
||||
r.dbf.Seek(seekTo, io.SeekStart)
|
||||
buf := make([]byte, r.dbfFields[field].Size)
|
||||
r.dbf.Read(buf)
|
||||
return strings.Trim(string(buf[:]), " ")
|
||||
}
|
527
api/v1/common/tool/shp/reader_test.go
Normal file
527
api/v1/common/tool/shp/reader_test.go
Normal file
@ -0,0 +1,527 @@
|
||||
package shp
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func pointsEqual(a, b []float64) bool {
|
||||
if len(a) != len(b) {
|
||||
return false
|
||||
}
|
||||
for k, v := range a {
|
||||
if v != b[k] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func getShapesFromFile(prefix string, t *testing.T) (shapes []Shape) {
|
||||
filename := prefix + ".shp"
|
||||
file, err := Open(filename)
|
||||
if err != nil {
|
||||
t.Fatal("Failed to open shapefile: " + filename + " (" + err.Error() + ")")
|
||||
}
|
||||
defer file.Close()
|
||||
|
||||
for file.Next() {
|
||||
_, shape := file.Shape()
|
||||
shapes = append(shapes, shape)
|
||||
}
|
||||
if file.Err() != nil {
|
||||
t.Errorf("Error while getting shapes for %s: %v", prefix, file.Err())
|
||||
}
|
||||
|
||||
return shapes
|
||||
}
|
||||
|
||||
type shapeGetterFunc func(string, *testing.T) []Shape
|
||||
|
||||
type identityTestFunc func(*testing.T, [][]float64, []Shape)
|
||||
|
||||
func testPoint(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*Point)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
if !pointsEqual([]float64{p.X, p.Y}, points[n]) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testPolyLine(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*PolyLine)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
for k, point := range p.Points {
|
||||
if !pointsEqual(points[n*3+k], []float64{point.X, point.Y}) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testPolygon(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*Polygon)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
for k, point := range p.Points {
|
||||
if !pointsEqual(points[n*3+k], []float64{point.X, point.Y}) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testMultiPoint(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*MultiPoint)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
for k, point := range p.Points {
|
||||
if !pointsEqual(points[n*3+k], []float64{point.X, point.Y}) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testPointZ(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*PointZ)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
if !pointsEqual([]float64{p.X, p.Y, p.Z}, points[n]) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testPolyLineZ(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*PolyLineZ)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
for k, point := range p.Points {
|
||||
if !pointsEqual(points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testPolygonZ(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*PolygonZ)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
for k, point := range p.Points {
|
||||
if !pointsEqual(points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testMultiPointZ(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*MultiPointZ)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
for k, point := range p.Points {
|
||||
if !pointsEqual(points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testPointM(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*PointM)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
if !pointsEqual([]float64{p.X, p.Y, p.M}, points[n]) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testPolyLineM(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*PolyLineM)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
for k, point := range p.Points {
|
||||
if !pointsEqual(points[n*3+k], []float64{point.X, point.Y, p.MArray[k]}) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testPolygonM(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*PolygonM)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
for k, point := range p.Points {
|
||||
if !pointsEqual(points[n*3+k], []float64{point.X, point.Y, p.MArray[k]}) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testMultiPointM(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*MultiPointM)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
for k, point := range p.Points {
|
||||
if !pointsEqual(points[n*3+k], []float64{point.X, point.Y, p.MArray[k]}) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testMultiPatch(t *testing.T, points [][]float64, shapes []Shape) {
|
||||
for n, s := range shapes {
|
||||
p, ok := s.(*MultiPatch)
|
||||
if !ok {
|
||||
t.Fatal("Failed to type assert.")
|
||||
}
|
||||
for k, point := range p.Points {
|
||||
if !pointsEqual(points[n*3+k], []float64{point.X, point.Y, p.ZArray[k]}) {
|
||||
t.Error("Points did not match.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func testshapeIdentity(t *testing.T, prefix string, getter shapeGetterFunc) {
|
||||
shapes := getter(prefix, t)
|
||||
d := dataForReadTests[prefix]
|
||||
if len(shapes) != d.count {
|
||||
t.Errorf("Number of shapes for %s read was wrong. Wanted %d, got %d.", prefix, d.count, len(shapes))
|
||||
}
|
||||
d.tester(t, d.points, shapes)
|
||||
}
|
||||
|
||||
func TestReadBBox(t *testing.T) {
|
||||
tests := []struct {
|
||||
filename string
|
||||
want Box
|
||||
}{
|
||||
{"test_files/multipatch.shp", Box{0, 0, 10, 10}},
|
||||
{"test_files/multipoint.shp", Box{0, 5, 10, 10}},
|
||||
{"test_files/multipointm.shp", Box{0, 5, 10, 10}},
|
||||
{"test_files/multipointz.shp", Box{0, 5, 10, 10}},
|
||||
{"test_files/point.shp", Box{0, 5, 10, 10}},
|
||||
{"test_files/pointm.shp", Box{0, 5, 10, 10}},
|
||||
{"test_files/pointz.shp", Box{0, 5, 10, 10}},
|
||||
{"test_files/polygon.shp", Box{0, 0, 5, 5}},
|
||||
{"test_files/polygonm.shp", Box{0, 0, 5, 5}},
|
||||
{"test_files/polygonz.shp", Box{0, 0, 5, 5}},
|
||||
{"test_files/polyline.shp", Box{0, 0, 25, 25}},
|
||||
{"test_files/polylinem.shp", Box{0, 0, 25, 25}},
|
||||
{"test_files/polylinez.shp", Box{0, 0, 25, 25}},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
r, err := Open(tt.filename)
|
||||
if err != nil {
|
||||
t.Fatalf("%v", err)
|
||||
}
|
||||
if got := r.BBox().MinX; got != tt.want.MinX {
|
||||
t.Errorf("got MinX = %v, want %v", got, tt.want.MinX)
|
||||
}
|
||||
if got := r.BBox().MinY; got != tt.want.MinY {
|
||||
t.Errorf("got MinY = %v, want %v", got, tt.want.MinY)
|
||||
}
|
||||
if got := r.BBox().MaxX; got != tt.want.MaxX {
|
||||
t.Errorf("got MaxX = %v, want %v", got, tt.want.MaxX)
|
||||
}
|
||||
if got := r.BBox().MaxY; got != tt.want.MaxY {
|
||||
t.Errorf("got MaxY = %v, want %v", got, tt.want.MaxY)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
type testCaseData struct {
|
||||
points [][]float64
|
||||
tester identityTestFunc
|
||||
count int
|
||||
}
|
||||
|
||||
var dataForReadTests = map[string]testCaseData{
|
||||
"test_files/polygonm": {
|
||||
points: [][]float64{
|
||||
{0, 0, 0},
|
||||
{0, 5, 5},
|
||||
{5, 5, 10},
|
||||
{5, 0, 15},
|
||||
{0, 0, 0},
|
||||
},
|
||||
tester: testPolygonM,
|
||||
count: 1,
|
||||
},
|
||||
"test_files/multipointm": {
|
||||
points: [][]float64{
|
||||
{10, 10, 100},
|
||||
{5, 5, 50},
|
||||
{0, 10, 75},
|
||||
},
|
||||
tester: testMultiPointM,
|
||||
count: 1,
|
||||
},
|
||||
"test_files/multipatch": {
|
||||
points: [][]float64{
|
||||
{0, 0, 0},
|
||||
{10, 0, 0},
|
||||
{10, 10, 0},
|
||||
{0, 10, 0},
|
||||
{0, 0, 0},
|
||||
{0, 10, 0},
|
||||
{0, 10, 10},
|
||||
{0, 0, 10},
|
||||
{0, 0, 0},
|
||||
{0, 10, 0},
|
||||
{10, 0, 0},
|
||||
{10, 0, 10},
|
||||
{10, 10, 10},
|
||||
{10, 10, 0},
|
||||
{10, 0, 0},
|
||||
{0, 0, 0},
|
||||
{0, 0, 10},
|
||||
{10, 0, 10},
|
||||
{10, 0, 0},
|
||||
{0, 0, 0},
|
||||
{10, 10, 0},
|
||||
{10, 10, 10},
|
||||
{0, 10, 10},
|
||||
{0, 10, 0},
|
||||
{10, 10, 0},
|
||||
{0, 0, 10},
|
||||
{0, 10, 10},
|
||||
{10, 10, 10},
|
||||
{10, 0, 10},
|
||||
{0, 0, 10},
|
||||
},
|
||||
tester: testMultiPatch,
|
||||
count: 1,
|
||||
},
|
||||
"test_files/point": {
|
||||
points: [][]float64{
|
||||
{10, 10},
|
||||
{5, 5},
|
||||
{0, 10},
|
||||
},
|
||||
tester: testPoint,
|
||||
count: 3,
|
||||
},
|
||||
"test_files/polyline": {
|
||||
points: [][]float64{
|
||||
{0, 0},
|
||||
{5, 5},
|
||||
{10, 10},
|
||||
{15, 15},
|
||||
{20, 20},
|
||||
{25, 25},
|
||||
},
|
||||
tester: testPolyLine,
|
||||
count: 2,
|
||||
},
|
||||
"test_files/polygon": {
|
||||
points: [][]float64{
|
||||
{0, 0},
|
||||
{0, 5},
|
||||
{5, 5},
|
||||
{5, 0},
|
||||
{0, 0},
|
||||
},
|
||||
tester: testPolygon,
|
||||
count: 1,
|
||||
},
|
||||
"test_files/multipoint": {
|
||||
points: [][]float64{
|
||||
{10, 10},
|
||||
{5, 5},
|
||||
{0, 10},
|
||||
},
|
||||
tester: testMultiPoint,
|
||||
count: 1,
|
||||
},
|
||||
"test_files/pointz": {
|
||||
points: [][]float64{
|
||||
{10, 10, 100},
|
||||
{5, 5, 50},
|
||||
{0, 10, 75},
|
||||
},
|
||||
tester: testPointZ,
|
||||
count: 3,
|
||||
},
|
||||
"test_files/polylinez": {
|
||||
points: [][]float64{
|
||||
{0, 0, 0},
|
||||
{5, 5, 5},
|
||||
{10, 10, 10},
|
||||
{15, 15, 15},
|
||||
{20, 20, 20},
|
||||
{25, 25, 25},
|
||||
},
|
||||
tester: testPolyLineZ,
|
||||
count: 2,
|
||||
},
|
||||
"test_files/polygonz": {
|
||||
points: [][]float64{
|
||||
{0, 0, 0},
|
||||
{0, 5, 5},
|
||||
{5, 5, 10},
|
||||
{5, 0, 15},
|
||||
{0, 0, 0},
|
||||
},
|
||||
tester: testPolygonZ,
|
||||
count: 1,
|
||||
},
|
||||
"test_files/multipointz": {
|
||||
points: [][]float64{
|
||||
{10, 10, 100},
|
||||
{5, 5, 50},
|
||||
{0, 10, 75},
|
||||
},
|
||||
tester: testMultiPointZ,
|
||||
count: 1,
|
||||
},
|
||||
"test_files/pointm": {
|
||||
points: [][]float64{
|
||||
{10, 10, 100},
|
||||
{5, 5, 50},
|
||||
{0, 10, 75},
|
||||
},
|
||||
tester: testPointM,
|
||||
count: 3,
|
||||
},
|
||||
"test_files/polylinem": {
|
||||
points: [][]float64{
|
||||
{0, 0, 0},
|
||||
{5, 5, 5},
|
||||
{10, 10, 10},
|
||||
{15, 15, 15},
|
||||
{20, 20, 20},
|
||||
{25, 25, 25},
|
||||
},
|
||||
tester: testPolyLineM,
|
||||
count: 2,
|
||||
},
|
||||
}
|
||||
|
||||
func TestReadPoint(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/point", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadPolyLine(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/polyline", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadPolygon(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/polygon", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadMultiPoint(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/multipoint", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadPointZ(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/pointz", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadPolyLineZ(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/polylinez", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadPolygonZ(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/polygonz", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadMultiPointZ(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/multipointz", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadPointM(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/pointm", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadPolyLineM(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/polylinem", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadPolygonM(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/polygonm", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadMultiPointM(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/multipointm", getShapesFromFile)
|
||||
}
|
||||
|
||||
func TestReadMultiPatch(t *testing.T) {
|
||||
testshapeIdentity(t, "test_files/multipatch", getShapesFromFile)
|
||||
}
|
||||
|
||||
func newReadSeekCloser(b []byte) readSeekCloser {
|
||||
return struct {
|
||||
io.Closer
|
||||
io.ReadSeeker
|
||||
}{
|
||||
ioutil.NopCloser(nil),
|
||||
bytes.NewReader(b),
|
||||
}
|
||||
}
|
||||
|
||||
func TestReadInvalidShapeType(t *testing.T) {
|
||||
record := []byte{
|
||||
0, 0, 0, 0,
|
||||
0, 0, 0, 0,
|
||||
255, 255, 255, 255, // shape type
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
r interface {
|
||||
Next() bool
|
||||
Err() error
|
||||
}
|
||||
name string
|
||||
}{
|
||||
{&Reader{shp: newReadSeekCloser(record), filelength: int64(len(record))}, "reader"},
|
||||
{&seqReader{shp: newReadSeekCloser(record), filelength: int64(len(record))}, "seqReader"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
if test.r.Next() {
|
||||
t.Fatal("read unsupported shape type without stopping")
|
||||
}
|
||||
if test.r.Err() == nil {
|
||||
t.Fatal("read unsupported shape type without error")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
235
api/v1/common/tool/shp/sequentialreader.go
Normal file
235
api/v1/common/tool/shp/sequentialreader.go
Normal file
@ -0,0 +1,235 @@
|
||||
package shp
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// SequentialReader is the interface that allows reading shapes and attributes one after another. It also embeds io.Closer.
|
||||
type SequentialReader interface {
|
||||
// Close() frees the resources allocated by the SequentialReader.
|
||||
io.Closer
|
||||
|
||||
// Next() tries to advance the reading by one shape and one attribute row
|
||||
// and returns true if the read operation could be performed without any
|
||||
// error.
|
||||
Next() bool
|
||||
|
||||
// Shape returns the index and the last read shape. If the SequentialReader
|
||||
// encountered any errors, nil is returned for the Shape.
|
||||
Shape() (int, Shape)
|
||||
|
||||
// Attribute returns the value of the n-th attribute in the current row. If
|
||||
// the SequentialReader encountered any errors, the empty string is
|
||||
// returned.
|
||||
Attribute(n int) string
|
||||
|
||||
// Fields returns the fields of the database. If the SequentialReader
|
||||
// encountered any errors, nil is returned.
|
||||
Fields() []Field
|
||||
|
||||
// Err returns the last non-EOF error encountered.
|
||||
Err() error
|
||||
}
|
||||
|
||||
// Attributes returns all attributes of the shape that sr was last advanced to.
|
||||
func Attributes(sr SequentialReader) []string {
|
||||
if sr.Err() != nil {
|
||||
return nil
|
||||
}
|
||||
s := make([]string, len(sr.Fields()))
|
||||
for i := range s {
|
||||
s[i] = sr.Attribute(i)
|
||||
}
|
||||
return s
|
||||
}
|
||||
|
||||
// AttributeCount returns the number of fields of the database.
|
||||
func AttributeCount(sr SequentialReader) int {
|
||||
return len(sr.Fields())
|
||||
}
|
||||
|
||||
// seqReader implements SequentialReader based on external io.ReadCloser
|
||||
// instances
|
||||
type seqReader struct {
|
||||
shp, dbf io.ReadCloser
|
||||
err error
|
||||
|
||||
geometryType ShapeType
|
||||
bbox Box
|
||||
|
||||
shape Shape
|
||||
num int32
|
||||
filelength int64
|
||||
|
||||
dbfFields []Field
|
||||
dbfNumRecords int32
|
||||
dbfHeaderLength int16
|
||||
dbfRecordLength int16
|
||||
dbfRow []byte
|
||||
}
|
||||
|
||||
// Read and parse headers in the Shapefile. This will fill out GeometryType,
|
||||
// filelength and bbox.
|
||||
func (sr *seqReader) readHeaders() {
|
||||
// contrary to Reader.readHeaders we cannot seek with the ReadCloser, so we
|
||||
// need to trust the filelength in the header
|
||||
|
||||
er := &errReader{Reader: sr.shp}
|
||||
// shp headers
|
||||
io.CopyN(ioutil.Discard, er, 24)
|
||||
var l int32
|
||||
binary.Read(er, binary.BigEndian, &l)
|
||||
sr.filelength = int64(l) * 2
|
||||
io.CopyN(ioutil.Discard, er, 4)
|
||||
binary.Read(er, binary.LittleEndian, &sr.geometryType)
|
||||
sr.bbox.MinX = readFloat64(er)
|
||||
sr.bbox.MinY = readFloat64(er)
|
||||
sr.bbox.MaxX = readFloat64(er)
|
||||
sr.bbox.MaxY = readFloat64(er)
|
||||
io.CopyN(ioutil.Discard, er, 32) // skip four float64: Zmin, Zmax, Mmin, Max
|
||||
if er.e != nil {
|
||||
sr.err = fmt.Errorf("Error when reading SHP header: %v", er.e)
|
||||
return
|
||||
}
|
||||
|
||||
// dbf header
|
||||
er = &errReader{Reader: sr.dbf}
|
||||
if sr.dbf == nil {
|
||||
return
|
||||
}
|
||||
io.CopyN(ioutil.Discard, er, 4)
|
||||
binary.Read(er, binary.LittleEndian, &sr.dbfNumRecords)
|
||||
binary.Read(er, binary.LittleEndian, &sr.dbfHeaderLength)
|
||||
binary.Read(er, binary.LittleEndian, &sr.dbfRecordLength)
|
||||
io.CopyN(ioutil.Discard, er, 20) // skip padding
|
||||
numFields := int(math.Floor(float64(sr.dbfHeaderLength-33) / 32.0))
|
||||
sr.dbfFields = make([]Field, numFields)
|
||||
binary.Read(er, binary.LittleEndian, &sr.dbfFields)
|
||||
buf := make([]byte, 1)
|
||||
er.Read(buf[:])
|
||||
if er.e != nil {
|
||||
sr.err = fmt.Errorf("Error when reading DBF header: %v", er.e)
|
||||
return
|
||||
}
|
||||
if buf[0] != 0x0d {
|
||||
sr.err = fmt.Errorf("Field descriptor array terminator not found")
|
||||
return
|
||||
}
|
||||
sr.dbfRow = make([]byte, sr.dbfRecordLength)
|
||||
}
|
||||
|
||||
// Next implements a method of interface SequentialReader for seqReader.
|
||||
func (sr *seqReader) Next() bool {
|
||||
if sr.err != nil {
|
||||
return false
|
||||
}
|
||||
var num, size int32
|
||||
var shapetype ShapeType
|
||||
|
||||
// read shape
|
||||
er := &errReader{Reader: sr.shp}
|
||||
binary.Read(er, binary.BigEndian, &num)
|
||||
binary.Read(er, binary.BigEndian, &size)
|
||||
binary.Read(er, binary.LittleEndian, &shapetype)
|
||||
|
||||
if er.e != nil {
|
||||
if er.e != io.EOF {
|
||||
sr.err = fmt.Errorf("Error when reading shapefile header: %v", er.e)
|
||||
} else {
|
||||
sr.err = io.EOF
|
||||
}
|
||||
return false
|
||||
}
|
||||
sr.num = num
|
||||
var err error
|
||||
sr.shape, err = newShape(shapetype)
|
||||
if err != nil {
|
||||
sr.err = fmt.Errorf("Error decoding shape type: %v", err)
|
||||
return false
|
||||
}
|
||||
sr.shape.read(er)
|
||||
switch {
|
||||
case er.e == io.EOF:
|
||||
// io.EOF means end-of-file was reached gracefully after all
|
||||
// shape-internal reads succeeded, so it's not a reason stop
|
||||
// iterating over all shapes.
|
||||
er.e = nil
|
||||
case er.e != nil:
|
||||
sr.err = fmt.Errorf("Error while reading next shape: %v", er.e)
|
||||
return false
|
||||
}
|
||||
skipBytes := int64(size)*2 + 8 - er.n
|
||||
_, ce := io.CopyN(ioutil.Discard, er, skipBytes)
|
||||
if er.e != nil {
|
||||
sr.err = er.e
|
||||
return false
|
||||
}
|
||||
if ce != nil {
|
||||
sr.err = fmt.Errorf("Error when discarding bytes on sequential read: %v", ce)
|
||||
return false
|
||||
}
|
||||
if _, err := io.ReadFull(sr.dbf, sr.dbfRow); err != nil {
|
||||
sr.err = fmt.Errorf("Error when reading DBF row: %v", err)
|
||||
return false
|
||||
}
|
||||
if sr.dbfRow[0] != 0x20 && sr.dbfRow[0] != 0x2a {
|
||||
sr.err = fmt.Errorf("Attribute row %d starts with incorrect deletion indicator", num)
|
||||
}
|
||||
return sr.err == nil
|
||||
}
|
||||
|
||||
// Shape implements a method of interface SequentialReader for seqReader.
|
||||
func (sr *seqReader) Shape() (int, Shape) {
|
||||
return int(sr.num) - 1, sr.shape
|
||||
}
|
||||
|
||||
// Attribute implements a method of interface SequentialReader for seqReader.
|
||||
func (sr *seqReader) Attribute(n int) string {
|
||||
if sr.err != nil {
|
||||
return ""
|
||||
}
|
||||
start := 1
|
||||
f := 0
|
||||
for ; f < n; f++ {
|
||||
start += int(sr.dbfFields[f].Size)
|
||||
}
|
||||
s := string(sr.dbfRow[start : start+int(sr.dbfFields[f].Size)])
|
||||
return strings.Trim(s, " ")
|
||||
}
|
||||
|
||||
// Err returns the first non-EOF error that was encountered.
|
||||
func (sr *seqReader) Err() error {
|
||||
if sr.err == io.EOF {
|
||||
return nil
|
||||
}
|
||||
return sr.err
|
||||
}
|
||||
|
||||
// Close closes the seqReader and free all the allocated resources.
|
||||
func (sr *seqReader) Close() error {
|
||||
if err := sr.shp.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := sr.dbf.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Fields returns a slice of the fields that are present in the DBF table.
|
||||
func (sr *seqReader) Fields() []Field {
|
||||
return sr.dbfFields
|
||||
}
|
||||
|
||||
// SequentialReaderFromExt returns a new SequentialReader that interprets shp
|
||||
// as a source of shapes whose attributes can be retrieved from dbf.
|
||||
func SequentialReaderFromExt(shp, dbf io.ReadCloser) SequentialReader {
|
||||
sr := &seqReader{shp: shp, dbf: dbf}
|
||||
sr.readHeaders()
|
||||
return sr
|
||||
}
|
43
api/v1/common/tool/shp/sequentialreader_test.go
Normal file
43
api/v1/common/tool/shp/sequentialreader_test.go
Normal file
@ -0,0 +1,43 @@
|
||||
package shp
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func openFile(name string, t *testing.T) *os.File {
|
||||
f, err := os.Open(name)
|
||||
if err != nil {
|
||||
t.Fatalf("Failed to open %s: %v", name, err)
|
||||
}
|
||||
return f
|
||||
}
|
||||
|
||||
func getShapesSequentially(prefix string, t *testing.T) (shapes []Shape) {
|
||||
shp := openFile(prefix+".shp", t)
|
||||
dbf := openFile(prefix+".dbf", t)
|
||||
|
||||
sr := SequentialReaderFromExt(shp, dbf)
|
||||
if err := sr.Err(); err != nil {
|
||||
t.Fatalf("Error when iterating over the shapefile header: %v", err)
|
||||
}
|
||||
for sr.Next() {
|
||||
_, shape := sr.Shape()
|
||||
shapes = append(shapes, shape)
|
||||
}
|
||||
if err := sr.Err(); err != nil {
|
||||
t.Errorf("Error when iterating over the shapes: %v", err)
|
||||
}
|
||||
|
||||
if err := sr.Close(); err != nil {
|
||||
t.Errorf("Could not close sequential reader: %v", err)
|
||||
}
|
||||
return shapes
|
||||
}
|
||||
|
||||
func TestSequentialReader(t *testing.T) {
|
||||
for prefix := range dataForReadTests {
|
||||
t.Logf("Testing sequential read for %s", prefix)
|
||||
testshapeIdentity(t, prefix, getShapesSequentially)
|
||||
}
|
||||
}
|
612
api/v1/common/tool/shp/shapefile.go
Normal file
612
api/v1/common/tool/shp/shapefile.go
Normal file
@ -0,0 +1,612 @@
|
||||
package shp
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"io"
|
||||
"strings"
|
||||
)
|
||||
|
||||
//go:generate stringer -type=ShapeType
|
||||
|
||||
// ShapeType is a identifier for the the type of shapes.
|
||||
type ShapeType int32
|
||||
|
||||
// These are the possible shape types.
|
||||
const (
|
||||
NULL ShapeType = 0
|
||||
POINT ShapeType = 1
|
||||
POLYLINE ShapeType = 3
|
||||
POLYGON ShapeType = 5
|
||||
MULTIPOINT ShapeType = 8
|
||||
POINTZ ShapeType = 11
|
||||
POLYLINEZ ShapeType = 13
|
||||
POLYGONZ ShapeType = 15
|
||||
MULTIPOINTZ ShapeType = 18
|
||||
POINTM ShapeType = 21
|
||||
POLYLINEM ShapeType = 23
|
||||
POLYGONM ShapeType = 25
|
||||
MULTIPOINTM ShapeType = 28
|
||||
MULTIPATCH ShapeType = 31
|
||||
)
|
||||
|
||||
// Box structure made up from four coordinates. This type
|
||||
// is used to represent bounding boxes
|
||||
type Box struct {
|
||||
MinX, MinY, MaxX, MaxY float64
|
||||
}
|
||||
|
||||
// Extend extends the box with coordinates from the provided
|
||||
// box. This method calls Box.ExtendWithPoint twice with
|
||||
// {MinX, MinY} and {MaxX, MaxY}
|
||||
func (b *Box) Extend(box Box) {
|
||||
b.ExtendWithPoint(Point{box.MinX, box.MinY})
|
||||
b.ExtendWithPoint(Point{box.MaxX, box.MaxY})
|
||||
}
|
||||
|
||||
// ExtendWithPoint extends box with coordinates from point
|
||||
// if they are outside the range of the current box.
|
||||
func (b *Box) ExtendWithPoint(p Point) {
|
||||
if p.X < b.MinX {
|
||||
b.MinX = p.X
|
||||
}
|
||||
if p.Y < b.MinY {
|
||||
b.MinY = p.Y
|
||||
}
|
||||
if p.X > b.MaxX {
|
||||
b.MaxX = p.X
|
||||
}
|
||||
if p.Y > b.MaxY {
|
||||
b.MaxY = p.Y
|
||||
}
|
||||
}
|
||||
|
||||
// BBoxFromPoints returns the bounding box calculated
|
||||
// from points.
|
||||
func BBoxFromPoints(points []Point) (box Box) {
|
||||
for k, p := range points {
|
||||
if k == 0 {
|
||||
box = Box{p.X, p.Y, p.X, p.Y}
|
||||
} else {
|
||||
box.ExtendWithPoint(p)
|
||||
}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
// Shape interface
|
||||
type Shape interface {
|
||||
BBox() Box
|
||||
|
||||
read(io.Reader)
|
||||
write(io.Writer)
|
||||
}
|
||||
|
||||
// Null is an empty shape.
|
||||
type Null struct {
|
||||
}
|
||||
|
||||
// BBox Returns an empty BBox at the geometry origin.
|
||||
func (n Null) BBox() Box {
|
||||
return Box{0.0, 0.0, 0.0, 0.0}
|
||||
}
|
||||
|
||||
func (n *Null) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, n)
|
||||
}
|
||||
|
||||
func (n *Null) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, n)
|
||||
}
|
||||
|
||||
// Point is the shape that consists of single a geometry point.
|
||||
type Point struct {
|
||||
X, Y float64
|
||||
}
|
||||
|
||||
// BBox returns the bounding box of the Point feature, i.e. an empty area at
|
||||
// the point location itself.
|
||||
func (p Point) BBox() Box {
|
||||
return Box{p.X, p.Y, p.X, p.Y}
|
||||
}
|
||||
|
||||
func (p *Point) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, p)
|
||||
}
|
||||
|
||||
func (p *Point) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p)
|
||||
}
|
||||
|
||||
func flatten(points [][]Point) []Point {
|
||||
n, i := 0, 0
|
||||
for _, v := range points {
|
||||
n += len(v)
|
||||
}
|
||||
r := make([]Point, n)
|
||||
for _, v := range points {
|
||||
for _, p := range v {
|
||||
r[i] = p
|
||||
i++
|
||||
}
|
||||
}
|
||||
return r
|
||||
}
|
||||
|
||||
// PolyLine is a shape type that consists of an ordered set of vertices that
|
||||
// consists of one or more parts. A part is a connected sequence of two ore
|
||||
// more points. Parts may or may not be connected to another and may or may not
|
||||
// intersect each other.
|
||||
type PolyLine struct {
|
||||
Box
|
||||
NumParts int32
|
||||
NumPoints int32
|
||||
Parts []int32
|
||||
Points []Point
|
||||
}
|
||||
|
||||
// NewPolyLine returns a pointer a new PolyLine created
|
||||
// with the provided points. The inner slice should be
|
||||
// the points that the parent part consists of.
|
||||
func NewPolyLine(parts [][]Point) *PolyLine {
|
||||
points := flatten(parts)
|
||||
|
||||
p := &PolyLine{}
|
||||
p.NumParts = int32(len(parts))
|
||||
p.NumPoints = int32(len(points))
|
||||
p.Parts = make([]int32, len(parts))
|
||||
var marker int32
|
||||
for i, part := range parts {
|
||||
p.Parts[i] = marker
|
||||
marker += int32(len(part))
|
||||
}
|
||||
p.Points = points
|
||||
p.Box = p.BBox()
|
||||
|
||||
return p
|
||||
}
|
||||
|
||||
// BBox returns the bounding box of the PolyLine feature
|
||||
func (p PolyLine) BBox() Box {
|
||||
return BBoxFromPoints(p.Points)
|
||||
}
|
||||
|
||||
func (p *PolyLine) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, &p.Box)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumParts)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumPoints)
|
||||
p.Parts = make([]int32, p.NumParts)
|
||||
p.Points = make([]Point, p.NumPoints)
|
||||
binary.Read(file, binary.LittleEndian, &p.Parts)
|
||||
binary.Read(file, binary.LittleEndian, &p.Points)
|
||||
}
|
||||
|
||||
func (p *PolyLine) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p.Box)
|
||||
binary.Write(file, binary.LittleEndian, p.NumParts)
|
||||
binary.Write(file, binary.LittleEndian, p.NumPoints)
|
||||
binary.Write(file, binary.LittleEndian, p.Parts)
|
||||
binary.Write(file, binary.LittleEndian, p.Points)
|
||||
}
|
||||
|
||||
// Polygon is identical to the PolyLine struct. However the parts must form
|
||||
// rings that may not intersect.
|
||||
type Polygon PolyLine
|
||||
|
||||
// BBox returns the bounding box of the Polygon feature
|
||||
func (p Polygon) BBox() Box {
|
||||
return BBoxFromPoints(p.Points)
|
||||
}
|
||||
|
||||
func (p *Polygon) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, &p.Box)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumParts)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumPoints)
|
||||
p.Parts = make([]int32, p.NumParts)
|
||||
p.Points = make([]Point, p.NumPoints)
|
||||
binary.Read(file, binary.LittleEndian, &p.Parts)
|
||||
binary.Read(file, binary.LittleEndian, &p.Points)
|
||||
}
|
||||
|
||||
func (p *Polygon) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p.Box)
|
||||
binary.Write(file, binary.LittleEndian, p.NumParts)
|
||||
binary.Write(file, binary.LittleEndian, p.NumPoints)
|
||||
binary.Write(file, binary.LittleEndian, p.Parts)
|
||||
binary.Write(file, binary.LittleEndian, p.Points)
|
||||
}
|
||||
|
||||
// MultiPoint is the shape that consists of multiple points.
|
||||
type MultiPoint struct {
|
||||
Box Box
|
||||
NumPoints int32
|
||||
Points []Point
|
||||
}
|
||||
|
||||
// BBox returns the bounding box of the MultiPoint feature
|
||||
func (p MultiPoint) BBox() Box {
|
||||
return BBoxFromPoints(p.Points)
|
||||
}
|
||||
|
||||
func (p *MultiPoint) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, &p.Box)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumPoints)
|
||||
p.Points = make([]Point, p.NumPoints)
|
||||
binary.Read(file, binary.LittleEndian, &p.Points)
|
||||
}
|
||||
|
||||
func (p *MultiPoint) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p.Box)
|
||||
binary.Write(file, binary.LittleEndian, p.NumPoints)
|
||||
binary.Write(file, binary.LittleEndian, p.Points)
|
||||
}
|
||||
|
||||
// PointZ is a triplet of double precision coordinates plus a measure.
|
||||
type PointZ struct {
|
||||
X float64
|
||||
Y float64
|
||||
Z float64
|
||||
M float64
|
||||
}
|
||||
|
||||
// BBox eturns the bounding box of the PointZ feature which is an zero-sized area
|
||||
// at the X and Y coordinates of the feature.
|
||||
func (p PointZ) BBox() Box {
|
||||
return Box{p.X, p.Y, p.X, p.Y}
|
||||
}
|
||||
|
||||
func (p *PointZ) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, p)
|
||||
}
|
||||
|
||||
func (p *PointZ) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p)
|
||||
}
|
||||
|
||||
// PolyLineZ is a shape which consists of one or more parts. A part is a
|
||||
// connected sequence of two or more points. Parts may or may not be connected
|
||||
// and may or may not intersect one another.
|
||||
type PolyLineZ struct {
|
||||
Box Box
|
||||
NumParts int32
|
||||
NumPoints int32
|
||||
Parts []int32
|
||||
Points []Point
|
||||
ZRange [2]float64
|
||||
ZArray []float64
|
||||
MRange [2]float64
|
||||
MArray []float64
|
||||
}
|
||||
|
||||
// BBox eturns the bounding box of the PolyLineZ feature.
|
||||
func (p PolyLineZ) BBox() Box {
|
||||
return BBoxFromPoints(p.Points)
|
||||
}
|
||||
|
||||
func (p *PolyLineZ) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, &p.Box)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumParts)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumPoints)
|
||||
p.Parts = make([]int32, p.NumParts)
|
||||
p.Points = make([]Point, p.NumPoints)
|
||||
p.ZArray = make([]float64, p.NumPoints)
|
||||
p.MArray = make([]float64, p.NumPoints)
|
||||
binary.Read(file, binary.LittleEndian, &p.Parts)
|
||||
binary.Read(file, binary.LittleEndian, &p.Points)
|
||||
binary.Read(file, binary.LittleEndian, &p.ZRange)
|
||||
binary.Read(file, binary.LittleEndian, &p.ZArray)
|
||||
binary.Read(file, binary.LittleEndian, &p.MRange)
|
||||
binary.Read(file, binary.LittleEndian, &p.MArray)
|
||||
}
|
||||
|
||||
func (p *PolyLineZ) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p.Box)
|
||||
binary.Write(file, binary.LittleEndian, p.NumParts)
|
||||
binary.Write(file, binary.LittleEndian, p.NumPoints)
|
||||
binary.Write(file, binary.LittleEndian, p.Parts)
|
||||
binary.Write(file, binary.LittleEndian, p.Points)
|
||||
binary.Write(file, binary.LittleEndian, p.ZRange)
|
||||
binary.Write(file, binary.LittleEndian, p.ZArray)
|
||||
binary.Write(file, binary.LittleEndian, p.MRange)
|
||||
binary.Write(file, binary.LittleEndian, p.MArray)
|
||||
}
|
||||
|
||||
// PolygonZ structure is identical to the PolyLineZ structure.
|
||||
type PolygonZ PolyLineZ
|
||||
|
||||
// BBox returns the bounding box of the PolygonZ feature
|
||||
func (p PolygonZ) BBox() Box {
|
||||
return BBoxFromPoints(p.Points)
|
||||
}
|
||||
|
||||
func (p *PolygonZ) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, &p.Box)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumParts)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumPoints)
|
||||
p.Parts = make([]int32, p.NumParts)
|
||||
p.Points = make([]Point, p.NumPoints)
|
||||
p.ZArray = make([]float64, p.NumPoints)
|
||||
p.MArray = make([]float64, p.NumPoints)
|
||||
binary.Read(file, binary.LittleEndian, &p.Parts)
|
||||
binary.Read(file, binary.LittleEndian, &p.Points)
|
||||
binary.Read(file, binary.LittleEndian, &p.ZRange)
|
||||
binary.Read(file, binary.LittleEndian, &p.ZArray)
|
||||
binary.Read(file, binary.LittleEndian, &p.MRange)
|
||||
binary.Read(file, binary.LittleEndian, &p.MArray)
|
||||
}
|
||||
|
||||
func (p *PolygonZ) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p.Box)
|
||||
binary.Write(file, binary.LittleEndian, p.NumParts)
|
||||
binary.Write(file, binary.LittleEndian, p.NumPoints)
|
||||
binary.Write(file, binary.LittleEndian, p.Parts)
|
||||
binary.Write(file, binary.LittleEndian, p.Points)
|
||||
binary.Write(file, binary.LittleEndian, p.ZRange)
|
||||
binary.Write(file, binary.LittleEndian, p.ZArray)
|
||||
binary.Write(file, binary.LittleEndian, p.MRange)
|
||||
binary.Write(file, binary.LittleEndian, p.MArray)
|
||||
}
|
||||
|
||||
// MultiPointZ consists of one ore more PointZ.
|
||||
type MultiPointZ struct {
|
||||
Box Box
|
||||
NumPoints int32
|
||||
Points []Point
|
||||
ZRange [2]float64
|
||||
ZArray []float64
|
||||
MRange [2]float64
|
||||
MArray []float64
|
||||
}
|
||||
|
||||
// BBox eturns the bounding box of the MultiPointZ feature.
|
||||
func (p MultiPointZ) BBox() Box {
|
||||
return BBoxFromPoints(p.Points)
|
||||
}
|
||||
|
||||
func (p *MultiPointZ) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, &p.Box)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumPoints)
|
||||
p.Points = make([]Point, p.NumPoints)
|
||||
p.ZArray = make([]float64, p.NumPoints)
|
||||
p.MArray = make([]float64, p.NumPoints)
|
||||
binary.Read(file, binary.LittleEndian, &p.Points)
|
||||
binary.Read(file, binary.LittleEndian, &p.ZRange)
|
||||
binary.Read(file, binary.LittleEndian, &p.ZArray)
|
||||
binary.Read(file, binary.LittleEndian, &p.MRange)
|
||||
binary.Read(file, binary.LittleEndian, &p.MArray)
|
||||
}
|
||||
|
||||
func (p *MultiPointZ) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p.Box)
|
||||
binary.Write(file, binary.LittleEndian, p.NumPoints)
|
||||
binary.Write(file, binary.LittleEndian, p.Points)
|
||||
binary.Write(file, binary.LittleEndian, p.ZRange)
|
||||
binary.Write(file, binary.LittleEndian, p.ZArray)
|
||||
binary.Write(file, binary.LittleEndian, p.MRange)
|
||||
binary.Write(file, binary.LittleEndian, p.MArray)
|
||||
}
|
||||
|
||||
// PointM is a point with a measure.
|
||||
type PointM struct {
|
||||
X float64
|
||||
Y float64
|
||||
M float64
|
||||
}
|
||||
|
||||
// BBox returns the bounding box of the PointM feature which is a zero-sized
|
||||
// area at the X- and Y-coordinates of the point.
|
||||
func (p PointM) BBox() Box {
|
||||
return Box{p.X, p.Y, p.X, p.Y}
|
||||
}
|
||||
|
||||
func (p *PointM) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, p)
|
||||
}
|
||||
|
||||
func (p *PointM) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p)
|
||||
}
|
||||
|
||||
// PolyLineM is the polyline in which each point also has a measure.
|
||||
type PolyLineM struct {
|
||||
Box Box
|
||||
NumParts int32
|
||||
NumPoints int32
|
||||
Parts []int32
|
||||
Points []Point
|
||||
MRange [2]float64
|
||||
MArray []float64
|
||||
}
|
||||
|
||||
// BBox returns the bounding box of the PolyLineM feature.
|
||||
func (p PolyLineM) BBox() Box {
|
||||
return BBoxFromPoints(p.Points)
|
||||
}
|
||||
|
||||
func (p *PolyLineM) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, &p.Box)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumParts)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumPoints)
|
||||
p.Parts = make([]int32, p.NumParts)
|
||||
p.Points = make([]Point, p.NumPoints)
|
||||
p.MArray = make([]float64, p.NumPoints)
|
||||
binary.Read(file, binary.LittleEndian, &p.Parts)
|
||||
binary.Read(file, binary.LittleEndian, &p.Points)
|
||||
binary.Read(file, binary.LittleEndian, &p.MRange)
|
||||
binary.Read(file, binary.LittleEndian, &p.MArray)
|
||||
}
|
||||
|
||||
func (p *PolyLineM) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p.Box)
|
||||
binary.Write(file, binary.LittleEndian, p.NumParts)
|
||||
binary.Write(file, binary.LittleEndian, p.NumPoints)
|
||||
binary.Write(file, binary.LittleEndian, p.Parts)
|
||||
binary.Write(file, binary.LittleEndian, p.Points)
|
||||
binary.Write(file, binary.LittleEndian, p.MRange)
|
||||
binary.Write(file, binary.LittleEndian, p.MArray)
|
||||
}
|
||||
|
||||
// PolygonM structure is identical to the PolyLineZ structure.
|
||||
type PolygonM PolyLineZ
|
||||
|
||||
// BBox returns the bounding box of the PolygonM feature.
|
||||
func (p PolygonM) BBox() Box {
|
||||
return BBoxFromPoints(p.Points)
|
||||
}
|
||||
|
||||
func (p *PolygonM) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, &p.Box)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumParts)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumPoints)
|
||||
p.Parts = make([]int32, p.NumParts)
|
||||
p.Points = make([]Point, p.NumPoints)
|
||||
p.MArray = make([]float64, p.NumPoints)
|
||||
binary.Read(file, binary.LittleEndian, &p.Parts)
|
||||
binary.Read(file, binary.LittleEndian, &p.Points)
|
||||
binary.Read(file, binary.LittleEndian, &p.MRange)
|
||||
binary.Read(file, binary.LittleEndian, &p.MArray)
|
||||
}
|
||||
|
||||
func (p *PolygonM) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p.Box)
|
||||
binary.Write(file, binary.LittleEndian, p.NumParts)
|
||||
binary.Write(file, binary.LittleEndian, p.NumPoints)
|
||||
binary.Write(file, binary.LittleEndian, p.Parts)
|
||||
binary.Write(file, binary.LittleEndian, p.Points)
|
||||
binary.Write(file, binary.LittleEndian, p.MRange)
|
||||
binary.Write(file, binary.LittleEndian, p.MArray)
|
||||
}
|
||||
|
||||
// MultiPointM is the collection of multiple points with measures.
|
||||
type MultiPointM struct {
|
||||
Box Box
|
||||
NumPoints int32
|
||||
Points []Point
|
||||
MRange [2]float64
|
||||
MArray []float64
|
||||
}
|
||||
|
||||
// BBox eturns the bounding box of the MultiPointM feature
|
||||
func (p MultiPointM) BBox() Box {
|
||||
return BBoxFromPoints(p.Points)
|
||||
}
|
||||
|
||||
func (p *MultiPointM) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, &p.Box)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumPoints)
|
||||
p.Points = make([]Point, p.NumPoints)
|
||||
p.MArray = make([]float64, p.NumPoints)
|
||||
binary.Read(file, binary.LittleEndian, &p.Points)
|
||||
binary.Read(file, binary.LittleEndian, &p.MRange)
|
||||
binary.Read(file, binary.LittleEndian, &p.MArray)
|
||||
}
|
||||
|
||||
func (p *MultiPointM) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p.Box)
|
||||
binary.Write(file, binary.LittleEndian, p.NumPoints)
|
||||
binary.Write(file, binary.LittleEndian, p.Points)
|
||||
binary.Write(file, binary.LittleEndian, p.MRange)
|
||||
binary.Write(file, binary.LittleEndian, p.MArray)
|
||||
}
|
||||
|
||||
// MultiPatch consists of a number of surfaces patches. Each surface path
|
||||
// descries a surface. The surface patches of a MultiPatch are referred to as
|
||||
// its parts, and the type of part controls how the order of vertices of an
|
||||
// MultiPatch part is interpreted.
|
||||
type MultiPatch struct {
|
||||
Box Box
|
||||
NumParts int32
|
||||
NumPoints int32
|
||||
Parts []int32
|
||||
PartTypes []int32
|
||||
Points []Point
|
||||
ZRange [2]float64
|
||||
ZArray []float64
|
||||
MRange [2]float64
|
||||
MArray []float64
|
||||
}
|
||||
|
||||
// BBox returns the bounding box of the MultiPatch feature
|
||||
func (p MultiPatch) BBox() Box {
|
||||
return BBoxFromPoints(p.Points)
|
||||
}
|
||||
|
||||
func (p *MultiPatch) read(file io.Reader) {
|
||||
binary.Read(file, binary.LittleEndian, &p.Box)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumParts)
|
||||
binary.Read(file, binary.LittleEndian, &p.NumPoints)
|
||||
p.Parts = make([]int32, p.NumParts)
|
||||
p.PartTypes = make([]int32, p.NumParts)
|
||||
p.Points = make([]Point, p.NumPoints)
|
||||
p.ZArray = make([]float64, p.NumPoints)
|
||||
p.MArray = make([]float64, p.NumPoints)
|
||||
binary.Read(file, binary.LittleEndian, &p.Parts)
|
||||
binary.Read(file, binary.LittleEndian, &p.PartTypes)
|
||||
binary.Read(file, binary.LittleEndian, &p.Points)
|
||||
binary.Read(file, binary.LittleEndian, &p.ZRange)
|
||||
binary.Read(file, binary.LittleEndian, &p.ZArray)
|
||||
binary.Read(file, binary.LittleEndian, &p.MRange)
|
||||
binary.Read(file, binary.LittleEndian, &p.MArray)
|
||||
}
|
||||
|
||||
func (p *MultiPatch) write(file io.Writer) {
|
||||
binary.Write(file, binary.LittleEndian, p.Box)
|
||||
binary.Write(file, binary.LittleEndian, p.NumParts)
|
||||
binary.Write(file, binary.LittleEndian, p.NumPoints)
|
||||
binary.Write(file, binary.LittleEndian, p.Parts)
|
||||
binary.Write(file, binary.LittleEndian, p.PartTypes)
|
||||
binary.Write(file, binary.LittleEndian, p.Points)
|
||||
binary.Write(file, binary.LittleEndian, p.ZRange)
|
||||
binary.Write(file, binary.LittleEndian, p.ZArray)
|
||||
binary.Write(file, binary.LittleEndian, p.MRange)
|
||||
binary.Write(file, binary.LittleEndian, p.MArray)
|
||||
}
|
||||
|
||||
// Field representation of a field object in the DBF file
|
||||
type Field struct {
|
||||
Name [11]byte
|
||||
Fieldtype byte
|
||||
Addr [4]byte // not used
|
||||
Size uint8
|
||||
Precision uint8
|
||||
Padding [14]byte
|
||||
}
|
||||
|
||||
// Returns a string representation of the Field. Currently
|
||||
// this only returns field name.
|
||||
func (f Field) String() string {
|
||||
return strings.TrimRight(string(f.Name[:]), "\x00")
|
||||
}
|
||||
|
||||
// StringField returns a Field that can be used in SetFields to initialize the
|
||||
// DBF file.
|
||||
func StringField(name string, length uint8) Field {
|
||||
// TODO: Error checking
|
||||
field := Field{Fieldtype: 'C', Size: length}
|
||||
copy(field.Name[:], []byte(name))
|
||||
return field
|
||||
}
|
||||
|
||||
// NumberField returns a Field that can be used in SetFields to initialize the
|
||||
// DBF file.
|
||||
func NumberField(name string, length uint8) Field {
|
||||
field := Field{Fieldtype: 'N', Size: length}
|
||||
copy(field.Name[:], []byte(name))
|
||||
return field
|
||||
}
|
||||
|
||||
// FloatField returns a Field that can be used in SetFields to initialize the
|
||||
// DBF file. Used to store floating points with precision in the DBF.
|
||||
func FloatField(name string, length uint8, precision uint8) Field {
|
||||
field := Field{Fieldtype: 'F', Size: length, Precision: precision}
|
||||
copy(field.Name[:], []byte(name))
|
||||
return field
|
||||
}
|
||||
|
||||
// DateField feturns a Field that can be used in SetFields to initialize the
|
||||
// DBF file. Used to store Date strings formatted as YYYYMMDD. Data wise this
|
||||
// is the same as a StringField with length 8.
|
||||
func DateField(name string) Field {
|
||||
field := Field{Fieldtype: 'D', Size: 8}
|
||||
copy(field.Name[:], []byte(name))
|
||||
return field
|
||||
}
|
22
api/v1/common/tool/shp/shapefile_test.go
Normal file
22
api/v1/common/tool/shp/shapefile_test.go
Normal file
@ -0,0 +1,22 @@
|
||||
package shp
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestBoxExtend(t *testing.T) {
|
||||
a := Box{-124.763068, 45.543541, -116.915989, 49.002494}
|
||||
b := Box{-92.888114, 42.49192, -86.805415, 47.080621}
|
||||
a.Extend(b)
|
||||
c := Box{-124.763068, 42.49192, -86.805415, 49.002494}
|
||||
if a.MinX != c.MinX {
|
||||
t.Errorf("a.MinX = %v, want %v", a.MinX, c.MinX)
|
||||
}
|
||||
if a.MinY != c.MinY {
|
||||
t.Errorf("a.MinY = %v, want %v", a.MinY, c.MinY)
|
||||
}
|
||||
if a.MaxX != c.MaxX {
|
||||
t.Errorf("a.MaxX = %v, want %v", a.MaxX, c.MaxX)
|
||||
}
|
||||
if a.MaxY != c.MaxY {
|
||||
t.Errorf("a.MaxY = %v, want %v", a.MaxY, c.MaxY)
|
||||
}
|
||||
}
|
31
api/v1/common/tool/shp/shapetype_string.go
Normal file
31
api/v1/common/tool/shp/shapetype_string.go
Normal file
@ -0,0 +1,31 @@
|
||||
// Code generated by "stringer -type=ShapeType"; DO NOT EDIT.
|
||||
|
||||
package shp
|
||||
|
||||
import "strconv"
|
||||
|
||||
const _ShapeType_name = "NULLPOINTPOLYLINEPOLYGONMULTIPOINTPOINTZPOLYLINEZPOLYGONZMULTIPOINTZPOINTMPOLYLINEMPOLYGONMMULTIPOINTMMULTIPATCH"
|
||||
|
||||
var _ShapeType_map = map[ShapeType]string{
|
||||
0: _ShapeType_name[0:4],
|
||||
1: _ShapeType_name[4:9],
|
||||
3: _ShapeType_name[9:17],
|
||||
5: _ShapeType_name[17:24],
|
||||
8: _ShapeType_name[24:34],
|
||||
11: _ShapeType_name[34:40],
|
||||
13: _ShapeType_name[40:49],
|
||||
15: _ShapeType_name[49:57],
|
||||
18: _ShapeType_name[57:68],
|
||||
21: _ShapeType_name[68:74],
|
||||
23: _ShapeType_name[74:83],
|
||||
25: _ShapeType_name[83:91],
|
||||
28: _ShapeType_name[91:102],
|
||||
31: _ShapeType_name[102:112],
|
||||
}
|
||||
|
||||
func (i ShapeType) String() string {
|
||||
if str, ok := _ShapeType_map[i]; ok {
|
||||
return str
|
||||
}
|
||||
return "ShapeType(" + strconv.FormatInt(int64(i), 10) + ")"
|
||||
}
|
BIN
api/v1/common/tool/shp/test_files/multipatch.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/multipatch.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/multipatch.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/multipatch.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/multipatch.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/multipatch.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/multipoint.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/multipoint.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/multipoint.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/multipoint.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/multipoint.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/multipoint.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/multipointm.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/multipointm.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/multipointm.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/multipointm.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/multipointm.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/multipointm.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/multipointz.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/multipointz.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/multipointz.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/multipointz.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/multipointz.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/multipointz.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/point.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/point.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/point.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/point.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/point.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/point.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/pointm.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/pointm.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/pointm.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/pointm.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/pointm.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/pointm.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/pointz.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/pointz.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/pointz.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/pointz.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/pointz.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/pointz.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polygon.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/polygon.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polygon.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/polygon.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polygon.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/polygon.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polygonm.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/polygonm.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polygonm.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/polygonm.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polygonm.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/polygonm.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polygonz.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/polygonz.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polygonz.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/polygonz.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polygonz.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/polygonz.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polyline.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/polyline.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polyline.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/polyline.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polyline.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/polyline.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polylinem.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/polylinem.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polylinem.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/polylinem.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polylinem.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/polylinem.shx
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polylinez.dbf
Normal file
BIN
api/v1/common/tool/shp/test_files/polylinez.dbf
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polylinez.shp
Normal file
BIN
api/v1/common/tool/shp/test_files/polylinez.shp
Normal file
Binary file not shown.
BIN
api/v1/common/tool/shp/test_files/polylinez.shx
Normal file
BIN
api/v1/common/tool/shp/test_files/polylinez.shx
Normal file
Binary file not shown.
345
api/v1/common/tool/shp/writer.go
Normal file
345
api/v1/common/tool/shp/writer.go
Normal file
@ -0,0 +1,345 @@
|
||||
package shp
|
||||
|
||||
import (
|
||||
"encoding/binary"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// Writer is the type that is used to write a new shapefile.
|
||||
type Writer struct {
|
||||
filename string
|
||||
shp writeSeekCloser
|
||||
shx writeSeekCloser
|
||||
GeometryType ShapeType
|
||||
num int32
|
||||
bbox Box
|
||||
|
||||
dbf writeSeekCloser
|
||||
dbfFields []Field
|
||||
dbfHeaderLength int16
|
||||
dbfRecordLength int16
|
||||
}
|
||||
|
||||
type writeSeekCloser interface {
|
||||
io.Writer
|
||||
io.Seeker
|
||||
io.Closer
|
||||
}
|
||||
|
||||
// Create returns a point to new Writer and the first error that was
|
||||
// encountered. In case an error occurred the returned Writer point will be nil
|
||||
// This also creates a corresponding SHX file. It is important to use Close()
|
||||
// when done because that method writes all the headers for each file (SHP, SHX
|
||||
// and DBF).
|
||||
// If filename does not end on ".shp" already, it will be treated as the basename
|
||||
// for the file and the ".shp" extension will be appended to that name.
|
||||
func Create(filename string, t ShapeType) (*Writer, error) {
|
||||
if strings.HasSuffix(strings.ToLower(filename), ".shp") {
|
||||
filename = filename[0 : len(filename)-4]
|
||||
}
|
||||
shp, err := os.Create(filename + ".shp")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
shx, err := os.Create(filename + ".shx")
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
shp.Seek(100, io.SeekStart)
|
||||
shx.Seek(100, io.SeekStart)
|
||||
w := &Writer{
|
||||
filename: filename,
|
||||
shp: shp,
|
||||
shx: shx,
|
||||
GeometryType: t,
|
||||
}
|
||||
return w, nil
|
||||
}
|
||||
|
||||
// Append returns a Writer pointer that will append to the given shapefile and
|
||||
// the first error that was encounted during creation of that Writer. The
|
||||
// shapefile must have a valid index file.
|
||||
func Append(filename string) (*Writer, error) {
|
||||
shp, err := os.OpenFile(filename, os.O_RDWR, 0666)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ext := filepath.Ext(filename)
|
||||
basename := filename[:len(filename)-len(ext)]
|
||||
w := &Writer{
|
||||
filename: basename,
|
||||
shp: shp,
|
||||
}
|
||||
_, err = shp.Seek(32, io.SeekStart)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot seek to SHP geometry type: %v", err)
|
||||
}
|
||||
err = binary.Read(shp, binary.LittleEndian, &w.GeometryType)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read geometry type: %v", err)
|
||||
}
|
||||
er := &errReader{Reader: shp}
|
||||
w.bbox.MinX = readFloat64(er)
|
||||
w.bbox.MinY = readFloat64(er)
|
||||
w.bbox.MaxX = readFloat64(er)
|
||||
w.bbox.MaxY = readFloat64(er)
|
||||
if er.e != nil {
|
||||
return nil, fmt.Errorf("cannot read bounding box: %v", er.e)
|
||||
}
|
||||
|
||||
shx, err := os.OpenFile(basename+".shx", os.O_RDWR, 0666)
|
||||
if os.IsNotExist(err) {
|
||||
// TODO allow index file to not exist, in that case just
|
||||
// read through all the shapes and create it on the fly
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot open shapefile index: %v", err)
|
||||
}
|
||||
_, err = shx.Seek(-8, io.SeekEnd)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot seek to last shape index: %v", err)
|
||||
}
|
||||
var offset int32
|
||||
err = binary.Read(shx, binary.BigEndian, &offset)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read last shape index: %v", err)
|
||||
}
|
||||
offset = offset * 2
|
||||
_, err = shp.Seek(int64(offset), io.SeekStart)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot seek to last shape: %v", err)
|
||||
}
|
||||
err = binary.Read(shp, binary.BigEndian, &w.num)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read number of last shape: %v", err)
|
||||
}
|
||||
_, err = shp.Seek(0, io.SeekEnd)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot seek to SHP end: %v", err)
|
||||
}
|
||||
_, err = shx.Seek(0, io.SeekEnd)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot seek to SHX end: %v", err)
|
||||
}
|
||||
w.shx = shx
|
||||
|
||||
dbf, err := os.Open(basename + ".dbf")
|
||||
if os.IsNotExist(err) {
|
||||
return w, nil // it's okay if the DBF does not exist
|
||||
}
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot open DBF: %v", err)
|
||||
}
|
||||
|
||||
_, err = dbf.Seek(8, io.SeekStart)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot seek in DBF: %v", err)
|
||||
}
|
||||
err = binary.Read(dbf, binary.LittleEndian, &w.dbfHeaderLength)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read header length from DBF: %v", err)
|
||||
}
|
||||
err = binary.Read(dbf, binary.LittleEndian, &w.dbfRecordLength)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read record length from DBF: %v", err)
|
||||
}
|
||||
|
||||
_, err = dbf.Seek(20, io.SeekCurrent) // skip padding
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot seek in DBF: %v", err)
|
||||
}
|
||||
numFields := int(math.Floor(float64(w.dbfHeaderLength-33) / 32.0))
|
||||
w.dbfFields = make([]Field, numFields)
|
||||
err = binary.Read(dbf, binary.LittleEndian, &w.dbfFields)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot read number of fields from DBF: %v", err)
|
||||
}
|
||||
_, err = dbf.Seek(0, io.SeekEnd) // skip padding
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("cannot seek to DBF end: %v", err)
|
||||
}
|
||||
w.dbf = dbf
|
||||
|
||||
return w, nil
|
||||
}
|
||||
|
||||
// Write shape to the Shapefile. This also creates
|
||||
// a record in the SHX file and DBF file (if it is
|
||||
// initialized). Returns the index of the written object
|
||||
// which can be used in WriteAttribute.
|
||||
func (w *Writer) Write(shape Shape) int32 {
|
||||
// increate bbox
|
||||
if w.num == 0 {
|
||||
w.bbox = shape.BBox()
|
||||
} else {
|
||||
w.bbox.Extend(shape.BBox())
|
||||
}
|
||||
|
||||
w.num++
|
||||
binary.Write(w.shp, binary.BigEndian, w.num)
|
||||
w.shp.Seek(4, io.SeekCurrent)
|
||||
start, _ := w.shp.Seek(0, io.SeekCurrent)
|
||||
binary.Write(w.shp, binary.LittleEndian, w.GeometryType)
|
||||
shape.write(w.shp)
|
||||
finish, _ := w.shp.Seek(0, io.SeekCurrent)
|
||||
length := int32(math.Floor((float64(finish) - float64(start)) / 2.0))
|
||||
w.shp.Seek(start-4, io.SeekStart)
|
||||
binary.Write(w.shp, binary.BigEndian, length)
|
||||
w.shp.Seek(finish, io.SeekStart)
|
||||
|
||||
// write shx
|
||||
binary.Write(w.shx, binary.BigEndian, int32((start-8)/2))
|
||||
binary.Write(w.shx, binary.BigEndian, length)
|
||||
|
||||
// write empty record to dbf
|
||||
if w.dbf != nil {
|
||||
w.writeEmptyRecord()
|
||||
}
|
||||
|
||||
return w.num - 1
|
||||
}
|
||||
|
||||
// Close closes the Writer. This must be used at the end of
|
||||
// the transaction because it writes the correct headers
|
||||
// to the SHP/SHX and DBF files before closing.
|
||||
func (w *Writer) Close() {
|
||||
w.writeHeader(w.shx)
|
||||
w.writeHeader(w.shp)
|
||||
w.shp.Close()
|
||||
w.shx.Close()
|
||||
|
||||
if w.dbf == nil {
|
||||
w.SetFields([]Field{})
|
||||
}
|
||||
w.writeDbfHeader(w.dbf)
|
||||
w.dbf.Close()
|
||||
}
|
||||
|
||||
// writeHeader wrires SHP/SHX headers to ws.
|
||||
func (w *Writer) writeHeader(ws io.WriteSeeker) {
|
||||
filelength, _ := ws.Seek(0, io.SeekEnd)
|
||||
if filelength == 0 {
|
||||
filelength = 100
|
||||
}
|
||||
ws.Seek(0, io.SeekStart)
|
||||
// file code
|
||||
binary.Write(ws, binary.BigEndian, []int32{9994, 0, 0, 0, 0, 0})
|
||||
// file length
|
||||
binary.Write(ws, binary.BigEndian, int32(filelength/2))
|
||||
// version and shape type
|
||||
binary.Write(ws, binary.LittleEndian, []int32{1000, int32(w.GeometryType)})
|
||||
// bounding box
|
||||
binary.Write(ws, binary.LittleEndian, w.bbox)
|
||||
// elevation, measure
|
||||
binary.Write(ws, binary.LittleEndian, []float64{0.0, 0.0, 0.0, 0.0})
|
||||
}
|
||||
|
||||
// writeDbfHeader writes a DBF header to ws.
|
||||
func (w *Writer) writeDbfHeader(ws io.WriteSeeker) {
|
||||
ws.Seek(0, 0)
|
||||
// version, year (YEAR-1990), month, day
|
||||
binary.Write(ws, binary.LittleEndian, []byte{3, 24, 5, 3})
|
||||
// number of records
|
||||
binary.Write(ws, binary.LittleEndian, w.num)
|
||||
// header length, record length
|
||||
binary.Write(ws, binary.LittleEndian, []int16{w.dbfHeaderLength, w.dbfRecordLength})
|
||||
// padding
|
||||
binary.Write(ws, binary.LittleEndian, make([]byte, 20))
|
||||
|
||||
for _, field := range w.dbfFields {
|
||||
binary.Write(ws, binary.LittleEndian, field)
|
||||
}
|
||||
|
||||
// end with return
|
||||
ws.Write([]byte("\r"))
|
||||
}
|
||||
|
||||
// SetFields sets field values in the DBF. This initializes the DBF file and
|
||||
// should be used prior to writing any attributes.
|
||||
func (w *Writer) SetFields(fields []Field) error {
|
||||
if w.dbf != nil {
|
||||
return errors.New("Cannot set fields in existing dbf")
|
||||
}
|
||||
|
||||
var err error
|
||||
w.dbf, err = os.Create(w.filename + ".dbf")
|
||||
if err != nil {
|
||||
return fmt.Errorf("Failed to open %s.dbf: %v", w.filename, err)
|
||||
}
|
||||
w.dbfFields = fields
|
||||
|
||||
// calculate record length
|
||||
w.dbfRecordLength = int16(1)
|
||||
for _, field := range w.dbfFields {
|
||||
w.dbfRecordLength += int16(field.Size)
|
||||
}
|
||||
|
||||
// header lengh
|
||||
w.dbfHeaderLength = int16(len(w.dbfFields)*32 + 33)
|
||||
|
||||
// fill header space with empty bytes for now
|
||||
buf := make([]byte, w.dbfHeaderLength)
|
||||
binary.Write(w.dbf, binary.LittleEndian, buf)
|
||||
|
||||
// write empty records
|
||||
for n := int32(0); n < w.num; n++ {
|
||||
w.writeEmptyRecord()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Writes an empty record to the end of the DBF. This
|
||||
// works by seeking to the end of the file and writing
|
||||
// dbfRecordLength number of bytes. The first byte is a
|
||||
// space that indicates a new record.
|
||||
func (w *Writer) writeEmptyRecord() {
|
||||
w.dbf.Seek(0, io.SeekEnd)
|
||||
buf := make([]byte, w.dbfRecordLength)
|
||||
buf[0] = ' '
|
||||
binary.Write(w.dbf, binary.LittleEndian, buf)
|
||||
}
|
||||
|
||||
// WriteAttribute writes value for field into the given row in the DBF. Row
|
||||
// number should be the same as the order the Shape was written to the
|
||||
// Shapefile. The field value corresponds to the field in the slice used in
|
||||
// SetFields.
|
||||
func (w *Writer) WriteAttribute(row int, field int, value interface{}) error {
|
||||
var buf []byte
|
||||
switch v := value.(type) {
|
||||
case int:
|
||||
buf = []byte(strconv.Itoa(v))
|
||||
case float64:
|
||||
precision := w.dbfFields[field].Precision
|
||||
buf = []byte(strconv.FormatFloat(v, 'f', int(precision), 64))
|
||||
case string:
|
||||
buf = []byte(v)
|
||||
default:
|
||||
return fmt.Errorf("Unsupported value type: %T", v)
|
||||
}
|
||||
|
||||
if w.dbf == nil {
|
||||
return errors.New("Initialize DBF by using SetFields first")
|
||||
}
|
||||
if sz := int(w.dbfFields[field].Size); len(buf) > sz {
|
||||
return fmt.Errorf("Unable to write field %v: %q exceeds field length %v", field, buf, sz)
|
||||
}
|
||||
|
||||
seekTo := 1 + int64(w.dbfHeaderLength) + (int64(row) * int64(w.dbfRecordLength))
|
||||
for n := 0; n < field; n++ {
|
||||
seekTo += int64(w.dbfFields[n].Size)
|
||||
}
|
||||
w.dbf.Seek(seekTo, io.SeekStart)
|
||||
return binary.Write(w.dbf, binary.LittleEndian, buf)
|
||||
}
|
||||
|
||||
// BBox returns the bounding box of the Writer.
|
||||
func (w *Writer) BBox() Box {
|
||||
return w.bbox
|
||||
}
|
209
api/v1/common/tool/shp/writer_test.go
Normal file
209
api/v1/common/tool/shp/writer_test.go
Normal file
@ -0,0 +1,209 @@
|
||||
package shp
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
"os"
|
||||
"reflect"
|
||||
"testing"
|
||||
)
|
||||
|
||||
var filenamePrefix = "test_files/write_"
|
||||
|
||||
func removeShapefile(filename string) {
|
||||
os.Remove(filename + ".shp")
|
||||
os.Remove(filename + ".shx")
|
||||
os.Remove(filename + ".dbf")
|
||||
}
|
||||
|
||||
func pointsToFloats(points []Point) [][]float64 {
|
||||
floats := make([][]float64, len(points))
|
||||
for k, v := range points {
|
||||
floats[k] = make([]float64, 2)
|
||||
floats[k][0] = v.X
|
||||
floats[k][1] = v.Y
|
||||
}
|
||||
return floats
|
||||
}
|
||||
|
||||
func TestAppend(t *testing.T) {
|
||||
filename := filenamePrefix + "point"
|
||||
defer removeShapefile(filename)
|
||||
points := [][]float64{
|
||||
{0.0, 0.0},
|
||||
{5.0, 5.0},
|
||||
{10.0, 10.0},
|
||||
}
|
||||
|
||||
shape, err := Create(filename+".shp", POINT)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
for _, p := range points {
|
||||
shape.Write(&Point{p[0], p[1]})
|
||||
}
|
||||
wantNum := shape.num
|
||||
shape.Close()
|
||||
|
||||
newPoints := [][]float64{
|
||||
{15.0, 15.0},
|
||||
{20.0, 20.0},
|
||||
{25.0, 25.0},
|
||||
}
|
||||
shape, err = Append(filename + ".shp")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
if shape.GeometryType != POINT {
|
||||
t.Fatalf("wanted geo type %d, got %d", POINT, shape.GeometryType)
|
||||
}
|
||||
if shape.num != wantNum {
|
||||
t.Fatalf("wrong 'num', wanted type %d, got %d", wantNum, shape.num)
|
||||
}
|
||||
|
||||
for _, p := range newPoints {
|
||||
shape.Write(&Point{p[0], p[1]})
|
||||
}
|
||||
|
||||
points = append(points, newPoints...)
|
||||
|
||||
shapes := getShapesFromFile(filename, t)
|
||||
if len(shapes) != len(points) {
|
||||
t.Error("Number of shapes read was wrong")
|
||||
}
|
||||
testPoint(t, points, shapes)
|
||||
}
|
||||
|
||||
func TestWritePoint(t *testing.T) {
|
||||
filename := filenamePrefix + "point"
|
||||
defer removeShapefile(filename)
|
||||
|
||||
points := [][]float64{
|
||||
{0.0, 0.0},
|
||||
{5.0, 5.0},
|
||||
{10.0, 10.0},
|
||||
}
|
||||
|
||||
shape, err := Create(filename+".shp", POINT)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
for _, p := range points {
|
||||
shape.Write(&Point{p[0], p[1]})
|
||||
}
|
||||
shape.Close()
|
||||
|
||||
shapes := getShapesFromFile(filename, t)
|
||||
if len(shapes) != len(points) {
|
||||
t.Error("Number of shapes read was wrong")
|
||||
}
|
||||
testPoint(t, points, shapes)
|
||||
}
|
||||
|
||||
func TestWritePolyLine(t *testing.T) {
|
||||
filename := filenamePrefix + "polyline"
|
||||
defer removeShapefile(filename)
|
||||
|
||||
points := [][]Point{
|
||||
{Point{0.0, 0.0}, Point{5.0, 5.0}},
|
||||
{Point{10.0, 10.0}, Point{15.0, 15.0}},
|
||||
}
|
||||
|
||||
shape, err := Create(filename+".shp", POLYLINE)
|
||||
if err != nil {
|
||||
t.Log(shape, err)
|
||||
}
|
||||
|
||||
l := NewPolyLine(points)
|
||||
|
||||
lWant := &PolyLine{
|
||||
Box: Box{MinX: 0, MinY: 0, MaxX: 15, MaxY: 15},
|
||||
NumParts: 2,
|
||||
NumPoints: 4,
|
||||
Parts: []int32{0, 2},
|
||||
Points: []Point{{X: 0, Y: 0},
|
||||
{X: 5, Y: 5},
|
||||
{X: 10, Y: 10},
|
||||
{X: 15, Y: 15},
|
||||
},
|
||||
}
|
||||
if !reflect.DeepEqual(l, lWant) {
|
||||
t.Errorf("incorrect NewLine: have: %+v; want: %+v", l, lWant)
|
||||
}
|
||||
|
||||
shape.Write(l)
|
||||
shape.Close()
|
||||
|
||||
shapes := getShapesFromFile(filename, t)
|
||||
if len(shapes) != 1 {
|
||||
t.Error("Number of shapes read was wrong")
|
||||
}
|
||||
testPolyLine(t, pointsToFloats(flatten(points)), shapes)
|
||||
}
|
||||
|
||||
type seekTracker struct {
|
||||
io.Writer
|
||||
offset int64
|
||||
}
|
||||
|
||||
func (s *seekTracker) Seek(offset int64, whence int) (int64, error) {
|
||||
s.offset = offset
|
||||
return s.offset, nil
|
||||
}
|
||||
|
||||
func (s *seekTracker) Close() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func TestWriteAttribute(t *testing.T) {
|
||||
buf := new(bytes.Buffer)
|
||||
s := &seekTracker{Writer: buf}
|
||||
w := Writer{
|
||||
dbf: s,
|
||||
dbfFields: []Field{
|
||||
StringField("A_STRING", 6),
|
||||
FloatField("A_FLOAT", 8, 4),
|
||||
NumberField("AN_INT", 4),
|
||||
},
|
||||
dbfRecordLength: 100,
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
row int
|
||||
field int
|
||||
data interface{}
|
||||
wantOffset int64
|
||||
wantData string
|
||||
}{
|
||||
{"string-0", 0, 0, "test", 1, "test"},
|
||||
{"string-0-overflow-1", 0, 0, "overflo", 0, ""},
|
||||
{"string-0-overflow-n", 0, 0, "overflowing", 0, ""},
|
||||
{"string-3", 3, 0, "things", 301, "things"},
|
||||
{"float-0", 0, 1, 123.44, 7, "123.4400"},
|
||||
{"float-0-overflow-1", 0, 1, 1234.0, 0, ""},
|
||||
{"float-0-overflow-n", 0, 1, 123456789.0, 0, ""},
|
||||
{"int-0", 0, 2, 4242, 15, "4242"},
|
||||
{"int-0-overflow-1", 0, 2, 42424, 0, ""},
|
||||
{"int-0-overflow-n", 0, 2, 42424343, 0, ""},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.name, func(t *testing.T) {
|
||||
buf.Reset()
|
||||
s.offset = 0
|
||||
|
||||
err := w.WriteAttribute(test.row, test.field, test.data)
|
||||
|
||||
if buf.String() != test.wantData {
|
||||
t.Errorf("got data: %v, want: %v", buf.String(), test.wantData)
|
||||
}
|
||||
if s.offset != test.wantOffset {
|
||||
t.Errorf("got seek offset: %v, want: %v", s.offset, test.wantOffset)
|
||||
}
|
||||
if err == nil && test.wantData == "" {
|
||||
t.Error("got no data and no error")
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
151
api/v1/common/tool/shp/zipreader.go
Normal file
151
api/v1/common/tool/shp/zipreader.go
Normal file
@ -0,0 +1,151 @@
|
||||
package shp
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"fmt"
|
||||
"io"
|
||||
"path"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// ZipReader provides an interface for reading Shapefiles that are compressed in a ZIP archive.
|
||||
type ZipReader struct {
|
||||
sr SequentialReader
|
||||
z *zip.ReadCloser
|
||||
}
|
||||
|
||||
// openFromZIP is convenience function for opening the file called name that is
|
||||
// compressed in z for reading.
|
||||
func openFromZIP(z *zip.ReadCloser, name string) (io.ReadCloser, error) {
|
||||
for _, f := range z.File {
|
||||
if f.Name == name {
|
||||
return f.Open()
|
||||
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("No such file in archive: %s", name)
|
||||
}
|
||||
|
||||
// OpenZip opens a ZIP file that contains a single shapefile.
|
||||
func OpenZip(zipFilePath string) (*ZipReader, error) {
|
||||
z, err := zip.OpenReader(zipFilePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
zr := &ZipReader{
|
||||
z: z,
|
||||
}
|
||||
shapeFiles := shapesInZip(z)
|
||||
if len(shapeFiles) == 0 {
|
||||
return nil, fmt.Errorf("archive does not contain a .shp file")
|
||||
}
|
||||
if len(shapeFiles) > 1 {
|
||||
return nil, fmt.Errorf("archive does contain multiple .shp files")
|
||||
}
|
||||
|
||||
shp, err := openFromZIP(zr.z, shapeFiles[0].Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
withoutExt := strings.TrimSuffix(shapeFiles[0].Name, ".shp")
|
||||
// dbf is optional, so no error checking here
|
||||
dbf, _ := openFromZIP(zr.z, withoutExt+".dbf")
|
||||
zr.sr = SequentialReaderFromExt(shp, dbf)
|
||||
return zr, nil
|
||||
}
|
||||
|
||||
// ShapesInZip returns a string-slice with the names (i.e. relatives paths in
|
||||
// archive file tree) of all shapes that are in the ZIP archive at zipFilePath.
|
||||
func ShapesInZip(zipFilePath string) ([]string, error) {
|
||||
var names []string
|
||||
z, err := zip.OpenReader(zipFilePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
shapeFiles := shapesInZip(z)
|
||||
for i := range shapeFiles {
|
||||
names = append(names, shapeFiles[i].Name)
|
||||
}
|
||||
return names, nil
|
||||
}
|
||||
|
||||
func shapesInZip(z *zip.ReadCloser) []*zip.File {
|
||||
var shapeFiles []*zip.File
|
||||
for _, f := range z.File {
|
||||
if strings.HasSuffix(f.Name, ".shp") {
|
||||
shapeFiles = append(shapeFiles, f)
|
||||
}
|
||||
}
|
||||
return shapeFiles
|
||||
}
|
||||
|
||||
// OpenShapeFromZip opens a shape file that is contained in a ZIP archive. The
|
||||
// parameter name is name of the shape file.
|
||||
// The name of the shapefile must be a relative path: it must not start with a
|
||||
// drive letter (e.g. C:) or leading slash, and only forward slashes are
|
||||
// allowed. These rules are the same as in
|
||||
// https://golang.org/pkg/archive/zip/#FileHeader.
|
||||
func OpenShapeFromZip(zipFilePath string, name string) (*ZipReader, error) {
|
||||
z, err := zip.OpenReader(zipFilePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
zr := &ZipReader{
|
||||
z: z,
|
||||
}
|
||||
|
||||
shp, err := openFromZIP(zr.z, name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// dbf is optional, so no error checking here
|
||||
prefix := strings.TrimSuffix(name, path.Ext(name))
|
||||
dbf, _ := openFromZIP(zr.z, prefix+".dbf")
|
||||
zr.sr = SequentialReaderFromExt(shp, dbf)
|
||||
return zr, nil
|
||||
}
|
||||
|
||||
// Close closes the ZipReader and frees the allocated resources.
|
||||
func (zr *ZipReader) Close() error {
|
||||
s := ""
|
||||
err := zr.sr.Close()
|
||||
if err != nil {
|
||||
s += err.Error() + ". "
|
||||
}
|
||||
err = zr.z.Close()
|
||||
if err != nil {
|
||||
s += err.Error() + ". "
|
||||
}
|
||||
if s != "" {
|
||||
return fmt.Errorf(s)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// Next reads the next shape in the shapefile and the next row in the DBF. Call
|
||||
// Shape() and Attribute() to access the values.
|
||||
func (zr *ZipReader) Next() bool {
|
||||
return zr.sr.Next()
|
||||
}
|
||||
|
||||
// Shape returns the shape that was last read as well as the current index.
|
||||
func (zr *ZipReader) Shape() (int, Shape) {
|
||||
return zr.sr.Shape()
|
||||
}
|
||||
|
||||
// Attribute returns the n-th field of the last row that was read. If there
|
||||
// were any errors before, the empty string is returned.
|
||||
func (zr *ZipReader) Attribute(n int) string {
|
||||
return zr.sr.Attribute(n)
|
||||
}
|
||||
|
||||
// Fields returns a slice of Fields that are present in the
|
||||
// DBF table.
|
||||
func (zr *ZipReader) Fields() []Field {
|
||||
return zr.sr.Fields()
|
||||
}
|
||||
|
||||
// Err returns the last non-EOF error that was encountered by this ZipReader.
|
||||
func (zr *ZipReader) Err() error {
|
||||
return zr.sr.Err()
|
||||
}
|
236
api/v1/common/tool/shp/zipreader_test.go
Normal file
236
api/v1/common/tool/shp/zipreader_test.go
Normal file
@ -0,0 +1,236 @@
|
||||
package shp
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func compressFileToZIP(zw *zip.Writer, src, tgt string, t *testing.T) {
|
||||
r, err := os.Open(src)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not open for compression %s: %v", src, err)
|
||||
}
|
||||
w, err := zw.Create(tgt)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not start to compress %s: %v", tgt, err)
|
||||
}
|
||||
_, err = io.Copy(w, r)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not compress contents for %s: %v", tgt, err)
|
||||
}
|
||||
}
|
||||
|
||||
// createTempZIP packs the SHP, SHX, and DBF into a ZIP in a temporary
|
||||
// directory
|
||||
func createTempZIP(prefix string, t *testing.T) (dir, filename string) {
|
||||
dir, err := ioutil.TempDir("", "go-shp-test")
|
||||
if err != nil {
|
||||
t.Fatalf("Could not create temporary directory: %v", err)
|
||||
}
|
||||
base := filepath.Base(prefix)
|
||||
zipName := base + ".zip"
|
||||
w, err := os.Create(filepath.Join(dir, zipName))
|
||||
if err != nil {
|
||||
t.Fatalf("Could not create temporary zip file: %v", err)
|
||||
}
|
||||
zw := zip.NewWriter(w)
|
||||
for _, suffix := range []string{".shp", ".shx", ".dbf"} {
|
||||
compressFileToZIP(zw, prefix+suffix, base+suffix, t)
|
||||
}
|
||||
if err := zw.Close(); err != nil {
|
||||
t.Fatalf("Could not close the written zip: %v", err)
|
||||
}
|
||||
return dir, zipName
|
||||
}
|
||||
|
||||
func getShapesZipped(prefix string, t *testing.T) (shapes []Shape) {
|
||||
dir, filename := createTempZIP(prefix, t)
|
||||
defer os.RemoveAll(dir)
|
||||
zr, err := OpenZip(filepath.Join(dir, filename))
|
||||
if err != nil {
|
||||
t.Errorf("Error when opening zip file: %v", err)
|
||||
}
|
||||
for zr.Next() {
|
||||
_, shape := zr.Shape()
|
||||
shapes = append(shapes, shape)
|
||||
}
|
||||
if err := zr.Err(); err != nil {
|
||||
t.Errorf("Error when iterating over the shapes: %v", err)
|
||||
}
|
||||
|
||||
if err := zr.Close(); err != nil {
|
||||
t.Errorf("Could not close zipreader: %v", err)
|
||||
}
|
||||
return shapes
|
||||
}
|
||||
|
||||
func TestZipReader(t *testing.T) {
|
||||
for prefix := range dataForReadTests {
|
||||
t.Logf("Testing zipped reading for %s", prefix)
|
||||
testshapeIdentity(t, prefix, getShapesZipped)
|
||||
}
|
||||
}
|
||||
|
||||
func unzipToTempDir(t *testing.T, p string) string {
|
||||
td, err := ioutil.TempDir("", "")
|
||||
if err != nil {
|
||||
t.Fatalf("%v", err)
|
||||
}
|
||||
zip, err := zip.OpenReader(p)
|
||||
if err != nil {
|
||||
t.Fatalf("%v", err)
|
||||
}
|
||||
defer zip.Close()
|
||||
for _, f := range zip.File {
|
||||
_, fn := path.Split(f.Name)
|
||||
pn := filepath.Join(td, fn)
|
||||
t.Logf("Uncompress: %s -> %s", f.Name, pn)
|
||||
w, err := os.Create(pn)
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot unzip %s: %v", p, err)
|
||||
}
|
||||
defer w.Close()
|
||||
r, err := f.Open()
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot unzip %s: %v", p, err)
|
||||
}
|
||||
defer r.Close()
|
||||
_, err = io.Copy(w, r)
|
||||
if err != nil {
|
||||
t.Fatalf("Cannot unzip %s: %v", p, err)
|
||||
}
|
||||
}
|
||||
return td
|
||||
}
|
||||
|
||||
// TestZipReaderAttributes reads the same shapesfile twice, first directly from
|
||||
// the Shp with a Reader, and, second, from a zip. It compares the fields as
|
||||
// well as the shapes and the attributes. For this test, the Shapes are
|
||||
// considered to be equal if their bounding boxes are equal.
|
||||
func TestZipReaderAttribute(t *testing.T) {
|
||||
b := "ne_110m_admin_0_countries"
|
||||
skipOrDownloadNaturalEarth(t, b+".zip")
|
||||
d := unzipToTempDir(t, b+".zip")
|
||||
defer os.RemoveAll(d)
|
||||
lr, err := Open(filepath.Join(d, b+".shp"))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer lr.Close()
|
||||
zr, err := OpenZip(b + ".zip")
|
||||
if os.IsNotExist(err) {
|
||||
t.Skipf("Skipping test, as Natural Earth dataset wasn't found")
|
||||
}
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer zr.Close()
|
||||
fsl := lr.Fields()
|
||||
fsz := zr.Fields()
|
||||
if len(fsl) != len(fsz) {
|
||||
t.Fatalf("Number of attributes do not match: Wanted %d, got %d", len(fsl), len(fsz))
|
||||
}
|
||||
for i := range fsl {
|
||||
if fsl[i] != fsz[i] {
|
||||
t.Fatalf("Attribute %d (%s) does not match (%s)", i, fsl[i], fsz[i])
|
||||
}
|
||||
}
|
||||
for zr.Next() && lr.Next() {
|
||||
ln, ls := lr.Shape()
|
||||
zn, zs := zr.Shape()
|
||||
if ln != zn {
|
||||
t.Fatalf("Sequence number wrong: Wanted %d, got %d", ln, zn)
|
||||
}
|
||||
if ls.BBox() != zs.BBox() {
|
||||
t.Fatalf("Bounding boxes for shape #%d do not match", ln+1)
|
||||
}
|
||||
for i := range fsl {
|
||||
la := lr.Attribute(i)
|
||||
za := zr.Attribute(i)
|
||||
if la != za {
|
||||
t.Fatalf("Shape %d: Attribute %d (%s) are unequal: '%s' vs '%s'",
|
||||
ln+1, i, fsl[i].String(), la, za)
|
||||
}
|
||||
}
|
||||
}
|
||||
if lr.Err() != nil {
|
||||
t.Logf("Reader error: %v / ZipReader error: %v", lr.Err(), zr.Err())
|
||||
t.FailNow()
|
||||
}
|
||||
}
|
||||
|
||||
func skipOrDownloadNaturalEarth(t *testing.T, p string) {
|
||||
if _, err := os.Stat(p); os.IsNotExist(err) {
|
||||
dl := false
|
||||
for _, a := range os.Args {
|
||||
if a == "download" {
|
||||
dl = true
|
||||
break
|
||||
}
|
||||
}
|
||||
u := "http://www.naturalearthdata.com/http//www.naturalearthdata.com/download/110m/cultural/ne_110m_admin_0_countries.zip"
|
||||
if !dl {
|
||||
t.Skipf("Skipped, as %s does not exist. Consider calling tests with '-args download` "+
|
||||
"or download manually from '%s'", p, u)
|
||||
} else {
|
||||
t.Logf("Downloading %s", u)
|
||||
w, err := os.Create(p)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not create %q: %v", p, err)
|
||||
}
|
||||
defer w.Close()
|
||||
resp, err := http.Get(u)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not download %q: %v", u, err)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
_, err = io.Copy(w, resp.Body)
|
||||
if err != nil {
|
||||
t.Fatalf("Could not download %q: %v", u, err)
|
||||
}
|
||||
t.Logf("Download complete")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestNaturalEarthZip(t *testing.T) {
|
||||
type metaShape struct {
|
||||
Attributes map[string]string
|
||||
Shape
|
||||
}
|
||||
p := "ne_110m_admin_0_countries.zip"
|
||||
skipOrDownloadNaturalEarth(t, p)
|
||||
zr, err := OpenZip(p)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
defer zr.Close()
|
||||
|
||||
fs := zr.Fields()
|
||||
if len(fs) != 63 {
|
||||
t.Fatalf("Expected 63 columns in Natural Earth dataset, got %d", len(fs))
|
||||
}
|
||||
var metas []metaShape
|
||||
for zr.Next() {
|
||||
m := metaShape{
|
||||
Attributes: make(map[string]string),
|
||||
}
|
||||
_, m.Shape = zr.Shape()
|
||||
for n := range fs {
|
||||
m.Attributes[fs[n].String()] = zr.Attribute(n)
|
||||
}
|
||||
metas = append(metas, m)
|
||||
}
|
||||
if zr.Err() != nil {
|
||||
t.Fatal(zr.Err())
|
||||
}
|
||||
for _, m := range metas {
|
||||
t.Log(m.Attributes["name"])
|
||||
}
|
||||
}
|
308
api/v1/common/tool/tool.go
Normal file
308
api/v1/common/tool/tool.go
Normal file
@ -0,0 +1,308 @@
|
||||
package tool
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/md5"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"math/rand"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/exec"
|
||||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
/*
|
||||
判断文件或文件夹是否存在
|
||||
如果返回的错误为nil,说明文件或文件夹存在
|
||||
如果返回的错误类型使用os.IsNotExist()判断为true,说明文件或文件夹不存在
|
||||
如果返回的错误为其它类型,则不确定是否在存在
|
||||
*/
|
||||
func PathExists(path string) bool {
|
||||
|
||||
_, err := os.Stat(path)
|
||||
if err == nil {
|
||||
return true
|
||||
}
|
||||
if os.IsNotExist(err) {
|
||||
return false
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
/*
|
||||
检查字符串是否在某一数组中
|
||||
*/
|
||||
func IsContainStr(items []string, item string) bool {
|
||||
for _, eachItem := range items {
|
||||
if eachItem == item {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// 调用os.MkdirAll递归创建文件夹
|
||||
func CreateDir(dirPath string) {
|
||||
if !isExist(dirPath) {
|
||||
fmt.Println("路径不存在,创建路径", dirPath)
|
||||
_ = os.MkdirAll(dirPath, os.ModePerm)
|
||||
}
|
||||
}
|
||||
|
||||
// 判断所给路径文件/文件夹是否存在(返回true是存在)
|
||||
func isExist(path string) bool {
|
||||
_, err := os.Stat(path) //os.Stat获取文件信息
|
||||
if err != nil {
|
||||
if os.IsExist(err) {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
/*获取uuid*/
|
||||
func GetUuid() string {
|
||||
str := GetRandstring(32)
|
||||
time.Sleep(time.Nanosecond)
|
||||
return strings.ToLower(Md5V(str))
|
||||
}
|
||||
|
||||
// #取得随机字符串:使用字符串拼接
|
||||
func GetRandstring(lenNum int) string {
|
||||
var CHARS = []string{"a", "b", "c", "d", "e", "f", "g", "h", "i", "j", "k", "l", "m", "n", "o", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z",
|
||||
"A", "B", "C", "D", "E", "F", "G", "H", "I", "J", "K", "L", "M", "N", "O", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z",
|
||||
"1", "2", "3", "4", "5", "6", "7", "8", "9", "0"}
|
||||
str := strings.Builder{}
|
||||
length := len(CHARS)
|
||||
for i := 0; i < lenNum; i++ {
|
||||
l := CHARS[rand.Intn(length)]
|
||||
str.WriteString(l)
|
||||
}
|
||||
return str.String()
|
||||
|
||||
}
|
||||
|
||||
// 获取当前执行程序所在的绝对路径
|
||||
func GetCurrentAbPathByExecutable() string {
|
||||
exePath, err := os.Executable()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
res, _ := filepath.EvalSymlinks(filepath.Dir(exePath))
|
||||
return res
|
||||
}
|
||||
|
||||
/*结构体转map且首字母小写*/
|
||||
func Struct2Map(obj interface{}) map[string]interface{} {
|
||||
t := reflect.TypeOf(obj)
|
||||
v := reflect.ValueOf(obj)
|
||||
var data = make(map[string]interface{})
|
||||
for i := 0; i < t.NumField(); i++ {
|
||||
data[strings.ToLower(string(t.Field(i).Name[0]))+t.Field(i).Name[1:]] = v.Field(i).Interface()
|
||||
//data[t.Field(i).Name] = v.Field(i).Interface()
|
||||
}
|
||||
return data
|
||||
}
|
||||
func PortInUse(port int) int {
|
||||
checkStatement := fmt.Sprintf("lsof -i:%d ", port)
|
||||
output, _ := exec.Command("sh", "-c", checkStatement).CombinedOutput()
|
||||
fmt.Println(output)
|
||||
if len(output) > 0 {
|
||||
return 1
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
/*检测端口是否在使用中*/
|
||||
// 传入查询的端口号
|
||||
// 返回端口号对应的进程PID,若没有找到相关进程,返回-1
|
||||
func portInUse(portNumber int) int {
|
||||
res := -1
|
||||
var outBytes bytes.Buffer
|
||||
sysType := runtime.GOOS
|
||||
fmt.Println(sysType)
|
||||
var cmdStr = ""
|
||||
if sysType == "linux" {
|
||||
cmdStr = fmt.Sprintf("lsof -i:%d ", portNumber)
|
||||
}
|
||||
//
|
||||
if sysType == "windows" {
|
||||
cmdStr = fmt.Sprintf("netstat -ano -p tcp | findstr %d", portNumber)
|
||||
}
|
||||
//checkStatement := fmt.Sprintf("lsof -i:%d ", portNumber)
|
||||
fmt.Println(cmdStr)
|
||||
cmd := exec.Command("cmd", "/c", cmdStr)
|
||||
cmd.Stdout = &outBytes
|
||||
cmd.Run()
|
||||
resStr := outBytes.String()
|
||||
fmt.Println(resStr)
|
||||
r := regexp.MustCompile(`\s\d+\s`).FindAllString(resStr, -1)
|
||||
if len(r) > 0 {
|
||||
pid, err := strconv.Atoi(strings.TrimSpace(r[0]))
|
||||
if err != nil {
|
||||
res = -1
|
||||
} else {
|
||||
res = pid
|
||||
}
|
||||
}
|
||||
return res
|
||||
}
|
||||
|
||||
func GetUnUsePort(port int) int {
|
||||
isInUse := PortInUse(port)
|
||||
if isInUse != -1 {
|
||||
fmt.Println("端口:" + strconv.Itoa(port) + " 被占用")
|
||||
port++
|
||||
port = GetUnUsePort(port)
|
||||
return port
|
||||
} else {
|
||||
return port
|
||||
}
|
||||
}
|
||||
|
||||
// WeekIntervalTime 获取某周的开始和结束时间,week为0本周,-1上周,1下周以此类推
|
||||
func WeekIntervalTime(week int) (startTime, endTime string) {
|
||||
now := time.Now()
|
||||
offset := int(time.Monday - now.Weekday())
|
||||
//周日做特殊判断 因为time.Monday = 0
|
||||
if offset > 0 {
|
||||
offset = -6
|
||||
}
|
||||
|
||||
year, month, day := now.Date()
|
||||
thisWeek := time.Date(year, month, day, 0, 0, 0, 0, time.Local)
|
||||
startTime = thisWeek.AddDate(0, 0, offset+7*week).Format("2006-01-02") + " 00:00:00"
|
||||
endTime = thisWeek.AddDate(0, 0, offset+6+7*week).Format("2006-01-02") + " 23:59:59"
|
||||
|
||||
return startTime, endTime
|
||||
}
|
||||
|
||||
// GetCurrentTime 获取当前系统时间
|
||||
func GetCurrentTime() string {
|
||||
return time.Now().Format("2006-01-02 15:04:05")
|
||||
}
|
||||
|
||||
/*删除文件或文件夹*/
|
||||
func RemoveFile(path string) {
|
||||
if isExist(path) {
|
||||
err := os.Remove(path)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// 检查参数
|
||||
func Md5V(str string) string {
|
||||
h := md5.New()
|
||||
h.Write([]byte(str))
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
}
|
||||
|
||||
// download file会将url下载到本地文件,它会在下载时写入,而不是将整个文件加载到内存中。
|
||||
func DownloadFile(filepath string, url string) error {
|
||||
|
||||
// Get the data
|
||||
resp, err := http.Get(url)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
// Create the file
|
||||
out, err := os.Create(filepath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer out.Close()
|
||||
|
||||
// Write the body to file
|
||||
_, err = io.Copy(out, resp.Body)
|
||||
return err
|
||||
}
|
||||
|
||||
// 通用排序
|
||||
// 结构体排序,必须重写数组Len() Swap() Less()函数
|
||||
type body_wrapper struct {
|
||||
Bodys []interface{}
|
||||
by func(p, q *interface{}) bool //内部Less()函数会用到
|
||||
}
|
||||
type SortBodyBy func(p, q *interface{}) bool //定义一个函数类型
|
||||
|
||||
// 数组长度Len()
|
||||
func (acw body_wrapper) Len() int {
|
||||
return len(acw.Bodys)
|
||||
}
|
||||
|
||||
// 元素交换
|
||||
func (acw body_wrapper) Swap(i, j int) {
|
||||
acw.Bodys[i], acw.Bodys[j] = acw.Bodys[j], acw.Bodys[i]
|
||||
}
|
||||
|
||||
// 比较函数,使用外部传入的by比较函数
|
||||
func (acw body_wrapper) Less(i, j int) bool {
|
||||
return acw.by(&acw.Bodys[i], &acw.Bodys[j])
|
||||
}
|
||||
|
||||
// 自定义排序字段,参考SortBodyByCreateTime中的传入函数
|
||||
func SortBody(bodys []interface{}, by SortBodyBy) {
|
||||
sort.Sort(body_wrapper{bodys, by})
|
||||
}
|
||||
|
||||
// 格式化时间
|
||||
type LocalTime time.Time
|
||||
|
||||
func (t *LocalTime) MarshalJSON() ([]byte, error) {
|
||||
tTime := time.Time(*t)
|
||||
return []byte(fmt.Sprintf("\"%v\"", tTime.Format("2006-01-02 15:04:05"))), nil
|
||||
}
|
||||
|
||||
// 写入文件,保存
|
||||
func WriteFile(path string, base64_image_content string) (error, string) {
|
||||
|
||||
//b, _ := regexp.MatchString(`^data:\s*image\/(\w+);base64,`, base64_image_content)
|
||||
//if !b {
|
||||
// return errors.New(""), ""
|
||||
//}
|
||||
base64_image_content = "data:image/png;base64," + base64_image_content
|
||||
re, _ := regexp.Compile(`^data:\s*image\/(\w+);base64,`)
|
||||
allData := re.FindAllSubmatch([]byte(base64_image_content), 2)
|
||||
fileType := string(allData[0][1]) //png ,jpeg 后缀获取
|
||||
|
||||
base64Str := re.ReplaceAllString(base64_image_content, "")
|
||||
|
||||
//date := time.Now().Format("2006-01-02")
|
||||
//if ok := IsFileExist(path + "/" + date); !ok {
|
||||
// os.Mkdir(path+"/"+date, 0666)
|
||||
//}
|
||||
|
||||
curFileStr := strconv.FormatInt(time.Now().UnixNano(), 10)
|
||||
|
||||
r := rand.New(rand.NewSource(time.Now().UnixNano()))
|
||||
n := r.Intn(99999)
|
||||
var filename = curFileStr + strconv.Itoa(n) + "." + fileType
|
||||
var file = path + "/" + filename
|
||||
byte, _ := base64.StdEncoding.DecodeString(base64Str)
|
||||
|
||||
err := ioutil.WriteFile(file, byte, 0666)
|
||||
if err != nil {
|
||||
log.Println(err)
|
||||
}
|
||||
return err, filename
|
||||
}
|
||||
|
||||
func UploadsFile() {
|
||||
|
||||
}
|
26
api/v1/common/tool/turf/turf.go
Normal file
26
api/v1/common/tool/turf/turf.go
Normal file
@ -0,0 +1,26 @@
|
||||
package turf
|
||||
|
||||
import (
|
||||
_ "embed"
|
||||
"github.com/dop251/goja"
|
||||
)
|
||||
|
||||
//go:embed turf.min.js
|
||||
var turf_min string
|
||||
|
||||
//go:embed turf.js
|
||||
var turf string
|
||||
var Tin func(wgs84 [][]string) [][]string
|
||||
var BooleanPointInPolygon func(point []float64, polygon [][]float64) bool
|
||||
|
||||
//// 初始化
|
||||
//func init() {
|
||||
// InitTurfjs()
|
||||
//}
|
||||
|
||||
func InitTurfjs() {
|
||||
vm := goja.New()
|
||||
vm.RunString(turf_min + turf)
|
||||
vm.ExportTo(vm.Get("Tin"), &Tin)
|
||||
vm.ExportTo(vm.Get("BooleanPointInPolygon"), &BooleanPointInPolygon)
|
||||
}
|
26
api/v1/common/tool/turf/turf.js
Normal file
26
api/v1/common/tool/turf/turf.js
Normal file
@ -0,0 +1,26 @@
|
||||
function Tin(points=[]) {
|
||||
let arr = []
|
||||
points.forEach(p=>{
|
||||
arr.push(turf.point( [parseFloat(p[0]), parseFloat(p[1])]))
|
||||
})
|
||||
var tin = turf.tin(turf.featureCollection(arr));
|
||||
let polylines=[]
|
||||
tin.features.forEach((feature, index) => {
|
||||
feature.geometry.coordinates.forEach((coordinate,) => {
|
||||
polylines.push([
|
||||
coordinate[0],
|
||||
coordinate[1],
|
||||
coordinate[2],
|
||||
])
|
||||
})
|
||||
})
|
||||
return polylines
|
||||
return JSON.stringify(polylines)
|
||||
}
|
||||
|
||||
function BooleanPointInPolygon(point,polygon=[]) {
|
||||
var pt = turf.point([point[0],point[1]]);
|
||||
var poly = turf.polygon([polygon]);
|
||||
var scaledPoly = turf.transformScale(poly, 2);
|
||||
return turf.booleanPointInPolygon(pt, poly)
|
||||
}
|
91
api/v1/common/tool/turf/turf.min.js
vendored
Normal file
91
api/v1/common/tool/turf/turf.min.js
vendored
Normal file
File diff suppressed because one or more lines are too long
52
api/v1/common/tool/xyz2wgs84.go
Normal file
52
api/v1/common/tool/xyz2wgs84.go
Normal file
@ -0,0 +1,52 @@
|
||||
package tool
|
||||
|
||||
import (
|
||||
"math"
|
||||
)
|
||||
|
||||
var a = float64(6378137)
|
||||
|
||||
var b = 6356752.3142
|
||||
|
||||
var asqr = a * a
|
||||
|
||||
var bsqr = b * b
|
||||
|
||||
var e = math.Sqrt((asqr - bsqr) / asqr)
|
||||
|
||||
var eprime = math.Sqrt((asqr - bsqr) / bsqr)
|
||||
|
||||
func Xyz2Wgs84(X, Y, Z float64) (lng, lat, height float64) {
|
||||
var p = math.Sqrt(X*X + Y*Y)
|
||||
var theta = math.Atan((Z * a) / (p * b))
|
||||
var sintheta = math.Sin(theta)
|
||||
var costheta = math.Cos(theta)
|
||||
var num = Z + eprime*eprime*b*sintheta*sintheta*sintheta
|
||||
var denom = p - e*e*a*costheta*costheta*costheta
|
||||
//Now calculate LLA
|
||||
var latitude = math.Atan(num / denom)
|
||||
var longitude = math.Atan(Y / X)
|
||||
var N = getN(latitude)
|
||||
var altitude = (p / math.Cos(latitude)) - N
|
||||
|
||||
if X < 0 && Y < 0 {
|
||||
longitude = longitude - math.Pi
|
||||
}
|
||||
|
||||
if X < 0 && Y > 0 {
|
||||
longitude = longitude + math.Pi
|
||||
}
|
||||
return radiansToDegrees(longitude), radiansToDegrees(latitude), altitude
|
||||
}
|
||||
|
||||
func getN(latitude float64) float64 {
|
||||
var sinlatitude = math.Sin(latitude)
|
||||
var denom = math.Sqrt(1 - e*e*sinlatitude*sinlatitude)
|
||||
var N = a / denom
|
||||
return N
|
||||
}
|
||||
func radiansToDegrees(radians float64) float64 {
|
||||
return radians * 180 / math.Pi
|
||||
}
|
||||
|
||||
//106.54959740614493 23.47200769358978
|
113
api/v1/common/tool/zip/zip.go
Normal file
113
api/v1/common/tool/zip/zip.go
Normal file
@ -0,0 +1,113 @@
|
||||
package zip
|
||||
|
||||
import (
|
||||
"archive/zip"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"path"
|
||||
)
|
||||
|
||||
func ZipFiles(filename string, files []string) error {
|
||||
fmt.Println("start zip file......")
|
||||
//创建输出文件目录
|
||||
newZipFile, err := os.Create(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer newZipFile.Close()
|
||||
//创建空的zip档案,可以理解为打开zip文件,准备写入
|
||||
zipWriter := zip.NewWriter(newZipFile)
|
||||
defer zipWriter.Close()
|
||||
// Add files to zip
|
||||
for _, file := range files {
|
||||
if err = AddFileToZip(zipWriter, file); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func AddFileToZip(zipWriter *zip.Writer, filename string) error {
|
||||
//打开要压缩的文件
|
||||
fileToZip, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer fileToZip.Close()
|
||||
//获取文件的描述
|
||||
info, err := fileToZip.Stat()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
//FileInfoHeader返回一个根据fi填写了部分字段的Header,可以理解成是将fileinfo转换成zip格式的文件信息
|
||||
header, err := zip.FileInfoHeader(info)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
header.Name = filename
|
||||
/*
|
||||
预定义压缩算法。
|
||||
archive/zip包中预定义的有两种压缩方式。一个是仅把文件写入到zip中。不做压缩。一种是压缩文件然后写入到zip中。默认的Store模式。就是只保存不压缩的模式。
|
||||
Store unit16 = 0 //仅存储文件
|
||||
Deflate unit16 = 8 //压缩文件
|
||||
*/
|
||||
header.Method = zip.Store
|
||||
//创建压缩包头部信息
|
||||
writer, err := zipWriter.CreateHeader(header)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
//将源复制到目标,将fileToZip 写入writer 是按默认的缓冲区32k循环操作的,不会将内容一次性全写入内存中,这样就能解决大文件的问题
|
||||
_, err = io.Copy(writer, fileToZip)
|
||||
return err
|
||||
}
|
||||
|
||||
// Decompressor 解压
|
||||
func Decompressor(zipFilePath string, targetDir string, filename string) error {
|
||||
reader, err := zip.OpenReader(zipFilePath)
|
||||
if nil != err {
|
||||
fmt.Println(err)
|
||||
return err
|
||||
}
|
||||
defer reader.Close()
|
||||
|
||||
_ = os.MkdirAll(targetDir, 0777)
|
||||
names := []string{}
|
||||
for _, f := range reader.File {
|
||||
err := func() error {
|
||||
if f.FileInfo().IsDir() {
|
||||
_ = os.MkdirAll(path.Join(targetDir, f.Name), f.Mode())
|
||||
return nil
|
||||
}
|
||||
suffix := path.Ext(f.Name)
|
||||
//fmt.Println(f.Name)
|
||||
//fmt.Println(path.Join(targetDir, f.Name))
|
||||
writeFile, err := os.OpenFile(path.Join(targetDir, filename+suffix), os.O_WRONLY|os.O_CREATE, f.Mode())
|
||||
if nil != err {
|
||||
return err
|
||||
}
|
||||
defer writeFile.Close()
|
||||
|
||||
readFile, err := f.Open()
|
||||
if nil != err {
|
||||
return err
|
||||
}
|
||||
defer readFile.Close()
|
||||
|
||||
n, err := io.Copy(writeFile, readFile)
|
||||
if nil != err {
|
||||
return err
|
||||
}
|
||||
if false {
|
||||
names = append(names, f.Name)
|
||||
fmt.Printf("解压文件: %s 大小: %v", f.Name, n)
|
||||
}
|
||||
return nil
|
||||
}()
|
||||
if nil != err {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
Reference in New Issue
Block a user