mirror of https://github.com/dsoprea/go-exif.git
Compare commits
45 Commits
Author | SHA1 | Date |
---|---|---|
|
6579e82b73 | |
|
d21ac8e2de | |
|
dae547f073 | |
|
de6cb6e380 | |
|
f65986e5d0 | |
|
1cd83a4e24 | |
|
1dda18f2dc | |
|
cf5cd88aa5 | |
|
0a0262e4b8 | |
|
fe1174ce18 | |
|
0e3dba6a88 | |
|
517e30a4d3 | |
|
a6301f85c8 | |
|
8213cfabc6 | |
|
120bcdb2a5 | |
|
cb1753e83a | |
|
dca55bf8ca | |
|
a1cb4443b2 | |
|
d154f10435 | |
|
d42f8ce9cd | |
|
4ec6f89b9f | |
|
443fda923a | |
|
52fa399645 | |
|
3089244e86 | |
|
0d9ceeb35b | |
|
db167117f4 | |
|
4382f1769b | |
|
12dc66d902 | |
|
64f5c6ad03 | |
|
fe4ea0ee06 | |
|
325de3c5bb | |
|
b3f4f3b4b7 | |
|
de21411905 | |
|
089aa48c91 | |
|
74a1612f20 | |
|
33ee3a8313 | |
|
3a80916d1f | |
|
9393e7afd4 | |
|
46b1a0cd17 | |
|
08f1b67089 | |
|
796ddda06f | |
|
4675ba7529 | |
|
73fc50e0d0 | |
|
f93c06e0d6 | |
|
0758712be3 |
|
@ -1,12 +1,9 @@
|
|||
language: go
|
||||
go:
|
||||
- master
|
||||
- stable
|
||||
- "1.14"
|
||||
- "1.13"
|
||||
- "1.12"
|
||||
env:
|
||||
- GO111MODULE=on
|
||||
- "1.19"
|
||||
- "1.18"
|
||||
- "1.17"
|
||||
install:
|
||||
- go get -t ./...
|
||||
script:
|
||||
|
|
25
README.md
25
README.md
|
@ -1,7 +1,7 @@
|
|||
[](https://travis-ci.org/dsoprea/go-exif)
|
||||
[](https://coveralls.io/github/dsoprea/go-exif?branch=master)
|
||||
[](https://goreportcard.com/report/github.com/dsoprea/go-exif)
|
||||
[](https://godoc.org/github.com/dsoprea/go-exif)
|
||||
[](https://app.travis-ci.com/dsoprea/go-exif)
|
||||
[](https://codecov.io/gh/dsoprea/go-exif)
|
||||
[](https://goreportcard.com/report/github.com/dsoprea/go-exif/v3)
|
||||
[](https://godoc.org/github.com/dsoprea/go-exif/v3)
|
||||
|
||||
# Overview
|
||||
|
||||
|
@ -13,10 +13,16 @@ This package provides native Go functionality to parse an existing EXIF block, u
|
|||
To get the project and dependencies:
|
||||
|
||||
```
|
||||
$ go get -t github.com/dsoprea/go-exif/v3
|
||||
$ git clone https://github.com/dsoprea/go-exif.git
|
||||
$ cd v3
|
||||
```
|
||||
|
||||
|
||||
# Requirements
|
||||
|
||||
- Go >= 1.17: Due to a breakage with "go test", we only officially support 1.17 for testing/CI reasons. It may still work in earlier versions if such a need is critically required, however.
|
||||
|
||||
|
||||
# Scope
|
||||
|
||||
This project is concerned only with parsing and encoding raw EXIF data. It does
|
||||
|
@ -93,8 +99,8 @@ store a flat list of tags in an image for simplified, proprietary usage.
|
|||
There is a runnable reading/dumping tool included:
|
||||
|
||||
```
|
||||
$ go get github.com/dsoprea/go-exif/v3/command/exif-read-tool
|
||||
$ exif-read-tool -filepath "<media file-path>"
|
||||
$ go install github.com/dsoprea/go-exif/v3/command/exif-read-tool@latest
|
||||
$ exif-read-tool --filepath "<media file-path>"
|
||||
```
|
||||
|
||||
Example output:
|
||||
|
@ -113,7 +119,7 @@ IFD-PATH=[IFD] ID=(0x0132) NAME=[DateTime] COUNT=(20) TYPE=[ASCII] VALUE=[2017:1
|
|||
You can also print the raw, parsed data as JSON:
|
||||
|
||||
```
|
||||
$ exif-read-tool -filepath "<media file-path>" -json
|
||||
$ exif-read-tool --filepath "<media file-path>" -json
|
||||
```
|
||||
|
||||
Example output:
|
||||
|
@ -196,6 +202,9 @@ providing their non-trivial time or image corpus to test go-exif:
|
|||
- [philip-firstorder](https://github.com/philip-firstorder) (200K images)
|
||||
- [matchstick](https://github.com/matchstick) (102K images)
|
||||
|
||||
In addition to these, it has been tested on my own collection, north of 560K
|
||||
images.
|
||||
|
||||
[search-and-extract-exif]: https://godoc.org/github.com/dsoprea/go-exif/v3#SearchAndExtractExif
|
||||
[search-file-and-extract-exif]: https://godoc.org/github.com/dsoprea/go-exif/v3#SearchFileAndExtractExif
|
||||
[jpeg-set-exif]: https://godoc.org/github.com/dsoprea/go-jpeg-image-structure#example-SegmentList-SetExif
|
||||
|
|
|
@ -1055,23 +1055,35 @@ func (ifd *Ifd) GpsInfo() (gi *GpsInfo, err error) {
|
|||
altitudeRefTags, foundAltitudeRef := ifd.EntriesByTagId[TagAltitudeRefId]
|
||||
|
||||
if foundAltitude == true && foundAltitudeRef == true {
|
||||
altitudePhrase, err := altitudeTags[0].Format()
|
||||
log.PanicIf(err)
|
||||
|
||||
ifdEnumerateLogger.Debugf(nil, "Altitude is [%s].", altitudePhrase)
|
||||
|
||||
altitudeValue, err := altitudeTags[0].Value()
|
||||
log.PanicIf(err)
|
||||
|
||||
altitudeRefPhrase, err := altitudeRefTags[0].Format()
|
||||
log.PanicIf(err)
|
||||
|
||||
ifdEnumerateLogger.Debugf(nil, "Altitude-reference is [%s].", altitudeRefPhrase)
|
||||
|
||||
altitudeRefValue, err := altitudeRefTags[0].Value()
|
||||
log.PanicIf(err)
|
||||
|
||||
altitudeRaw := altitudeValue.([]exifcommon.Rational)
|
||||
altitude := int(altitudeRaw[0].Numerator / altitudeRaw[0].Denominator)
|
||||
if altitudeRaw[0].Denominator > 0 {
|
||||
altitude := int(altitudeRaw[0].Numerator / altitudeRaw[0].Denominator)
|
||||
|
||||
if altitudeRefValue.([]byte)[0] == 1 {
|
||||
altitude *= -1
|
||||
if altitudeRefValue.([]byte)[0] == 1 {
|
||||
altitude *= -1
|
||||
}
|
||||
|
||||
gi.Altitude = altitude
|
||||
}
|
||||
|
||||
gi.Altitude = altitude
|
||||
}
|
||||
|
||||
// Parse time.
|
||||
// Parse timestamp from separate date and time tags.
|
||||
|
||||
timestampTags, foundTimestamp := ifd.EntriesByTagId[TagTimestampId]
|
||||
datestampTags, foundDatestamp := ifd.EntriesByTagId[TagDatestampId]
|
||||
|
@ -1080,7 +1092,13 @@ func (ifd *Ifd) GpsInfo() (gi *GpsInfo, err error) {
|
|||
datestampValue, err := datestampTags[0].Value()
|
||||
log.PanicIf(err)
|
||||
|
||||
dateParts := strings.Split(datestampValue.(string), ":")
|
||||
datePhrase := datestampValue.(string)
|
||||
ifdEnumerateLogger.Debugf(nil, "Date tag value is [%s].", datePhrase)
|
||||
|
||||
// Normalize the separators.
|
||||
datePhrase = strings.ReplaceAll(datePhrase, "-", ":")
|
||||
|
||||
dateParts := strings.Split(datePhrase, ":")
|
||||
|
||||
year, err1 := strconv.ParseUint(dateParts[0], 10, 16)
|
||||
month, err2 := strconv.ParseUint(dateParts[1], 10, 8)
|
||||
|
@ -1090,6 +1108,11 @@ func (ifd *Ifd) GpsInfo() (gi *GpsInfo, err error) {
|
|||
timestampValue, err := timestampTags[0].Value()
|
||||
log.PanicIf(err)
|
||||
|
||||
timePhrase, err := timestampTags[0].Format()
|
||||
log.PanicIf(err)
|
||||
|
||||
ifdEnumerateLogger.Debugf(nil, "Time tag value is [%s].", timePhrase)
|
||||
|
||||
timestampRaw := timestampValue.([]exifcommon.Rational)
|
||||
|
||||
hour := int(timestampRaw[0].Numerator / timestampRaw[0].Denominator)
|
||||
|
|
|
@ -34,6 +34,10 @@ func ParseExifFullTimestamp(fullTimestampPhrase string) (timestamp time.Time, er
|
|||
parts := strings.Split(fullTimestampPhrase, " ")
|
||||
datestampValue, timestampValue := parts[0], parts[1]
|
||||
|
||||
// Normalize the separators.
|
||||
datestampValue = strings.ReplaceAll(datestampValue, "-", ":")
|
||||
timestampValue = strings.ReplaceAll(timestampValue, "-", ":")
|
||||
|
||||
dateParts := strings.Split(datestampValue, ":")
|
||||
|
||||
year, err := strconv.ParseUint(dateParts[0], 10, 16)
|
||||
|
|
Binary file not shown.
Binary file not shown.
|
@ -23,8 +23,8 @@ import (
|
|||
"github.com/dsoprea/go-logging"
|
||||
"github.com/jessevdk/go-flags"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2"
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -55,6 +55,8 @@ type parameters struct {
|
|||
IsVerbose bool `short:"v" long:"verbose" description:"Print logging"`
|
||||
ThumbnailOutputFilepath string `short:"t" long:"thumbnail-output-filepath" description:"File-path to write thumbnail to (if present)"`
|
||||
DoNotPrintTags bool `short:"n" long:"no-tags" description:"Do not actually print tags. Good for auditing the logs or merely checking the EXIF structure for errors."`
|
||||
SkipBlocks int `short:"s" long:"skip" description:"Skip this many EXIF blocks before returning"`
|
||||
DoUniversalTagSearch bool `short:"u" long:"universal-tags" description:"If tags not found in known mapped IFDs, fallback to trying all IFDs."`
|
||||
}
|
||||
|
||||
var (
|
||||
|
@ -92,7 +94,7 @@ func main() {
|
|||
data, err := ioutil.ReadAll(f)
|
||||
log.PanicIf(err)
|
||||
|
||||
rawExif, err := exif.SearchAndExtractExif(data)
|
||||
rawExif, err := exif.SearchAndExtractExifN(data, arguments.SkipBlocks)
|
||||
if err != nil {
|
||||
if err == exif.ErrNoExif {
|
||||
fmt.Printf("No EXIF data.\n")
|
||||
|
@ -106,8 +108,14 @@ func main() {
|
|||
|
||||
// Run the parse.
|
||||
|
||||
entries, _, err := exif.GetFlatExifData(rawExif, nil)
|
||||
log.PanicIf(err)
|
||||
entries, _, err := exif.GetFlatExifDataUniversalSearch(rawExif, nil, arguments.DoUniversalTagSearch)
|
||||
if err != nil {
|
||||
if arguments.SkipBlocks > 0 {
|
||||
mainLogger.Warningf(nil, "Encountered an error. This might be related to the request to skip EXIF blocks.")
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
// Write the thumbnail is requested and present.
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestMain(t *testing.T) {
|
||||
|
|
|
@ -2,6 +2,8 @@ package exifcommon
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"math"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
|
@ -12,6 +14,10 @@ var (
|
|||
parserLogger = log.NewLogger("exifcommon.parser")
|
||||
)
|
||||
|
||||
var (
|
||||
ErrParseFail = errors.New("parse failure")
|
||||
)
|
||||
|
||||
// Parser knows how to parse all well-defined, encoded EXIF types.
|
||||
type Parser struct {
|
||||
}
|
||||
|
@ -56,7 +62,18 @@ func (p *Parser) ParseAscii(data []byte, unitCount uint32) (value string, err er
|
|||
|
||||
if len(data) == 0 || data[count-1] != 0 {
|
||||
s := string(data[:count])
|
||||
parserLogger.Warningf(nil, "ascii not terminated with nul as expected: [%v]", s)
|
||||
parserLogger.Warningf(nil, "ASCII not terminated with NUL as expected: [%v]", s)
|
||||
|
||||
for i, c := range s {
|
||||
if c > 127 {
|
||||
// Binary
|
||||
|
||||
t := s[:i]
|
||||
parserLogger.Warningf(nil, "ASCII also had binary characters. Truncating: [%v]->[%s]", s, t)
|
||||
|
||||
return t, nil
|
||||
}
|
||||
}
|
||||
|
||||
return s, nil
|
||||
}
|
||||
|
@ -135,6 +152,50 @@ func (p *Parser) ParseLongs(data []byte, unitCount uint32, byteOrder binary.Byte
|
|||
return value, nil
|
||||
}
|
||||
|
||||
// ParseFloats knows how to encode an encoded list of floats.
|
||||
func (p *Parser) ParseFloats(data []byte, unitCount uint32, byteOrder binary.ByteOrder) (value []float32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
count := int(unitCount)
|
||||
|
||||
if len(data) != (TypeFloat.Size() * count) {
|
||||
log.Panic(ErrNotEnoughData)
|
||||
}
|
||||
|
||||
value = make([]float32, count)
|
||||
for i := 0; i < count; i++ {
|
||||
value[i] = math.Float32frombits(byteOrder.Uint32(data[i*4 : (i+1)*4]))
|
||||
}
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ParseDoubles knows how to encode an encoded list of doubles.
|
||||
func (p *Parser) ParseDoubles(data []byte, unitCount uint32, byteOrder binary.ByteOrder) (value []float64, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
count := int(unitCount)
|
||||
|
||||
if len(data) != (TypeDouble.Size() * count) {
|
||||
log.Panic(ErrNotEnoughData)
|
||||
}
|
||||
|
||||
value = make([]float64, count)
|
||||
for i := 0; i < count; i++ {
|
||||
value[i] = math.Float64frombits(byteOrder.Uint64(data[i*8 : (i+1)*8]))
|
||||
}
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ParseRationals knows how to parse an encoded list of unsigned rationals.
|
||||
func (p *Parser) ParseRationals(data []byte, unitCount uint32, byteOrder binary.ByteOrder) (value []Rational, err error) {
|
||||
defer func() {
|
||||
|
|
|
@ -2,6 +2,7 @@ package exifcommon
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"math"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
|
@ -170,6 +171,78 @@ func TestParser_ParseLongs__Multiple(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestParser_ParseFloats__Single(t *testing.T) {
|
||||
p := new(Parser)
|
||||
|
||||
encoded := []byte{0x40, 0x49, 0x0f, 0xdb}
|
||||
|
||||
value, err := p.ParseFloats(encoded, 1, TestDefaultByteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
expectedResult := []float32{3.14159265}
|
||||
|
||||
for i, v := range value {
|
||||
if v < expectedResult[i] ||
|
||||
v >= math.Nextafter32(expectedResult[i], expectedResult[i]+1) {
|
||||
t.Fatalf("Encoding not correct (1): %v", value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParser_ParseFloats__Multiple(t *testing.T) {
|
||||
p := new(Parser)
|
||||
|
||||
encoded := []byte{0x40, 0x49, 0x0f, 0xdb, 0x40, 0x2d, 0xf8, 0x54}
|
||||
|
||||
value, err := p.ParseFloats(encoded, 2, TestDefaultByteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
expectedResult := []float32{3.14159265, 2.71828182}
|
||||
|
||||
for i, v := range value {
|
||||
if v < expectedResult[i] ||
|
||||
v >= math.Nextafter32(expectedResult[i], expectedResult[i]+1) {
|
||||
t.Fatalf("Encoding not correct (1): %v", value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParser_ParseDoubles__Single(t *testing.T) {
|
||||
p := new(Parser)
|
||||
|
||||
encoded := []byte{0x40, 0x09, 0x21, 0xfb, 0x53, 0xc8, 0xd4, 0xf1}
|
||||
|
||||
value, err := p.ParseDoubles(encoded, 1, TestDefaultByteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
expectedResult := []float64{3.14159265}
|
||||
for i, v := range value {
|
||||
if v < expectedResult[i] ||
|
||||
v >= math.Nextafter(expectedResult[i], expectedResult[i]+1) {
|
||||
t.Fatalf("Encoding not correct (1): %v", value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParser_ParseDoubles__Multiple(t *testing.T) {
|
||||
p := new(Parser)
|
||||
|
||||
encoded := []byte{0x40, 0x09, 0x21, 0xfb, 0x53, 0xc8, 0xd4, 0xf1,
|
||||
0x40, 0x05, 0xbf, 0x0a, 0x89, 0xf1, 0xb0, 0xdd}
|
||||
|
||||
value, err := p.ParseDoubles(encoded, 2, TestDefaultByteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
expectedResult := []float64{3.14159265, 2.71828182}
|
||||
|
||||
for i, v := range value {
|
||||
if v < expectedResult[i] ||
|
||||
v >= math.Nextafter(expectedResult[i], expectedResult[i]+1) {
|
||||
t.Fatalf("Encoding not correct: %v", value)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestParser_ParseRationals__Single(t *testing.T) {
|
||||
p := new(Parser)
|
||||
|
||||
|
|
|
@ -6,6 +6,7 @@ import (
|
|||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"encoding/binary"
|
||||
|
||||
|
@ -63,6 +64,12 @@ const (
|
|||
// TypeSignedRational describes an encoded list of signed rationals.
|
||||
TypeSignedRational TagTypePrimitive = 10
|
||||
|
||||
// TypeFloat describes an encoded list of floats
|
||||
TypeFloat TagTypePrimitive = 11
|
||||
|
||||
// TypeDouble describes an encoded list of doubles.
|
||||
TypeDouble TagTypePrimitive = 12
|
||||
|
||||
// TypeAsciiNoNul is just a pseudo-type, for our own purposes.
|
||||
TypeAsciiNoNul TagTypePrimitive = 0xf0
|
||||
)
|
||||
|
@ -74,23 +81,19 @@ func (typeType TagTypePrimitive) String() string {
|
|||
|
||||
// Size returns the size of one atomic unit of the type.
|
||||
func (tagType TagTypePrimitive) Size() int {
|
||||
if tagType == TypeByte {
|
||||
switch tagType {
|
||||
case TypeByte, TypeAscii, TypeAsciiNoNul:
|
||||
return 1
|
||||
} else if tagType == TypeAscii || tagType == TypeAsciiNoNul {
|
||||
return 1
|
||||
} else if tagType == TypeShort {
|
||||
case TypeShort:
|
||||
return 2
|
||||
} else if tagType == TypeLong {
|
||||
case TypeLong, TypeSignedLong, TypeFloat:
|
||||
return 4
|
||||
} else if tagType == TypeRational {
|
||||
case TypeRational, TypeSignedRational, TypeDouble:
|
||||
return 8
|
||||
} else if tagType == TypeSignedLong {
|
||||
return 4
|
||||
} else if tagType == TypeSignedRational {
|
||||
return 8
|
||||
} else {
|
||||
log.Panicf("can not determine tag-value size for type (%d): [%s]", tagType, TypeNames[tagType])
|
||||
|
||||
default:
|
||||
log.Panicf("can not determine tag-value size for type (%d): [%s]",
|
||||
tagType,
|
||||
TypeNames[tagType])
|
||||
// Never called.
|
||||
return 0
|
||||
}
|
||||
|
@ -109,6 +112,8 @@ func (tagType TagTypePrimitive) IsValid() bool {
|
|||
tagType == TypeRational ||
|
||||
tagType == TypeSignedLong ||
|
||||
tagType == TypeSignedRational ||
|
||||
tagType == TypeFloat ||
|
||||
tagType == TypeDouble ||
|
||||
tagType == TypeUndefined
|
||||
}
|
||||
|
||||
|
@ -123,6 +128,8 @@ var (
|
|||
TypeUndefined: "UNDEFINED",
|
||||
TypeSignedLong: "SLONG",
|
||||
TypeSignedRational: "SRATIONAL",
|
||||
TypeFloat: "FLOAT",
|
||||
TypeDouble: "DOUBLE",
|
||||
|
||||
TypeAsciiNoNul: "_ASCII_NO_NUL",
|
||||
}
|
||||
|
@ -148,6 +155,19 @@ type SignedRational struct {
|
|||
Denominator int32
|
||||
}
|
||||
|
||||
func isPrintableText(s string) bool {
|
||||
for _, c := range s {
|
||||
// unicode.IsPrint() returns false for newline characters.
|
||||
if c == 0x0d || c == 0x0a {
|
||||
continue
|
||||
} else if unicode.IsPrint(rune(c)) == false {
|
||||
return false
|
||||
}
|
||||
}
|
||||
|
||||
return true
|
||||
}
|
||||
|
||||
// Format returns a stringified value for the given encoding. Automatically
|
||||
// parses. Automatically calculates count based on type size. This function
|
||||
// also supports undefined-type values (the ones that we support, anyway) by
|
||||
|
@ -166,37 +186,36 @@ func FormatFromType(value interface{}, justFirst bool) (phrase string, err error
|
|||
case []byte:
|
||||
return DumpBytesToString(t), nil
|
||||
case string:
|
||||
for i, c := range t {
|
||||
if c == 0 {
|
||||
t = t[:i]
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if isPrintableText(t) == false {
|
||||
phrase = fmt.Sprintf("string with binary data (%d bytes)", len(t))
|
||||
return phrase, nil
|
||||
}
|
||||
|
||||
return t, nil
|
||||
case []uint16:
|
||||
if len(t) == 0 {
|
||||
case []uint16, []uint32, []int32, []float64, []float32:
|
||||
val := reflect.ValueOf(t)
|
||||
|
||||
if val.Len() == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
if val.Len() > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
return fmt.Sprintf("%v%s", val.Index(0), valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
case []uint32:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
return fmt.Sprintf("%v", val), nil
|
||||
case []Rational:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
|
@ -221,21 +240,6 @@ func FormatFromType(value interface{}, justFirst bool) (phrase string, err error
|
|||
}
|
||||
|
||||
return fmt.Sprintf("%v", parts), nil
|
||||
case []int32:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
case []SignedRational:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
|
@ -261,8 +265,14 @@ func FormatFromType(value interface{}, justFirst bool) (phrase string, err error
|
|||
|
||||
return fmt.Sprintf("%v", parts), nil
|
||||
case fmt.Stringer:
|
||||
s := t.String()
|
||||
if isPrintableText(s) == false {
|
||||
phrase = fmt.Sprintf("stringable with binary data (%d bytes)", len(s))
|
||||
return phrase, nil
|
||||
}
|
||||
|
||||
// An undefined value that is documented (or that we otherwise support).
|
||||
return t.String(), nil
|
||||
return s, nil
|
||||
default:
|
||||
// Affects only "unknown" values, in general.
|
||||
log.Panicf("type can not be formatted into string: %v", reflect.TypeOf(value).Name())
|
||||
|
@ -323,6 +333,16 @@ func FormatFromBytes(rawBytes []byte, tagType TagTypePrimitive, justFirst bool,
|
|||
|
||||
value, err = parser.ParseLongs(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeFloat:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseFloats(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeDouble:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseDoubles(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeRational:
|
||||
var err error
|
||||
|
||||
|
@ -407,6 +427,16 @@ func TranslateStringToType(tagType TagTypePrimitive, valueString string) (value
|
|||
log.PanicIf(err)
|
||||
|
||||
return int32(n), nil
|
||||
} else if tagType == TypeFloat {
|
||||
n, err := strconv.ParseFloat(valueString, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
return float32(n), nil
|
||||
} else if tagType == TypeDouble {
|
||||
n, err := strconv.ParseFloat(valueString, 64)
|
||||
log.PanicIf(err)
|
||||
|
||||
return float64(n), nil
|
||||
} else if tagType == TypeSignedRational {
|
||||
parts := strings.SplitN(valueString, "/", 2)
|
||||
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
package exifcommon
|
||||
|
||||
import (
|
||||
"math"
|
||||
"testing"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
@ -54,6 +55,18 @@ func TestTypeSignedRational_String(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestTypeFloat_String(t *testing.T) {
|
||||
if TypeFloat.String() != "FLOAT" {
|
||||
t.Fatalf("Type name not correct (float): [%s]", TypeFloat.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestTypeDouble_String(t *testing.T) {
|
||||
if TypeDouble.String() != "DOUBLE" {
|
||||
t.Fatalf("Type name not correct (double): [%s]", TypeDouble.String())
|
||||
}
|
||||
}
|
||||
|
||||
func TestTypeByte_Size(t *testing.T) {
|
||||
if TypeByte.Size() != 1 {
|
||||
t.Fatalf("Type size not correct (byte): (%d)", TypeByte.Size())
|
||||
|
@ -102,6 +115,18 @@ func TestTypeSignedRational_Size(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestTypeFloat_Size(t *testing.T) {
|
||||
if TypeFloat.Size() != 4 {
|
||||
t.Fatalf("Type size not correct (float): (%d)", TypeFloat.Size())
|
||||
}
|
||||
}
|
||||
|
||||
func TestTypeDouble_Size(t *testing.T) {
|
||||
if TypeDouble.Size() != 8 {
|
||||
t.Fatalf("Type size not correct (double): (%d)", TypeDouble.Size())
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormat__Byte(t *testing.T) {
|
||||
r := []byte{1, 2, 3, 4, 5, 6, 7, 8}
|
||||
|
||||
|
@ -157,6 +182,30 @@ func TestFormat__Long(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestFormat__Float(t *testing.T) {
|
||||
r := []byte{0x3f, 0x80, 0x00, 0x00,
|
||||
0x40, 0x00, 0x00, 0x00}
|
||||
|
||||
s, err := FormatFromBytes(r, TypeFloat, false, TestDefaultByteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
if s != "[1 2]" {
|
||||
t.Fatalf("Format output not correct (floats): [%s]", s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormat__Double(t *testing.T) {
|
||||
r := []byte{0x40, 0x09, 0x21, 0xfb, 0x53, 0xc8, 0xd4, 0xf1,
|
||||
0x40, 0x05, 0xbf, 0x0a, 0x89, 0xf1, 0xb0, 0xdd}
|
||||
|
||||
s, err := FormatFromBytes(r, TypeDouble, false, TestDefaultByteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
if s != "[3.14159265 2.71828182]" {
|
||||
t.Fatalf("Format output not correct (doubles): [%s]", s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFormat__Rational(t *testing.T) {
|
||||
r := []byte{
|
||||
0, 0, 0, 1, 0, 0, 0, 2,
|
||||
|
@ -261,6 +310,26 @@ func TestTranslateStringToType__TypeLong(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestTranslateStringToType__TypeFloat(t *testing.T) {
|
||||
v, err := TranslateStringToType(TypeFloat, "3.14159265")
|
||||
log.PanicIf(err)
|
||||
|
||||
expected := float32(3.14159265)
|
||||
if v.(float32) < expected || v.(float32) >= math.Nextafter32(expected, expected+1) {
|
||||
t.Fatalf("Translation of string to type not correct (float32): %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTranslateStringToType__TypeDouble(t *testing.T) {
|
||||
v, err := TranslateStringToType(TypeDouble, "3.14159265")
|
||||
log.PanicIf(err)
|
||||
|
||||
expected := float64(3.14159265)
|
||||
if v.(float64) < expected || v.(float64) >= math.Nextafter(expected, expected+1) {
|
||||
t.Fatalf("Translation of string to type not correct (double): %v", v)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTranslateStringToType__TypeRational(t *testing.T) {
|
||||
v, err := TranslateStringToType(TypeRational, "11/22")
|
||||
log.PanicIf(err)
|
||||
|
@ -342,3 +411,27 @@ func TestTranslateStringToType__InvalidType(t *testing.T) {
|
|||
// log.Panicf("from-string encoding for type not supported; this shouldn't happen: [%s]", tagType.String())
|
||||
// return nil, nil
|
||||
// }
|
||||
|
||||
func TestIsPrintableText_letters(t *testing.T) {
|
||||
if isPrintableText("abc") != true {
|
||||
t.Fatalf("Printable text interpreted as nonprintable.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsPrintableText_space(t *testing.T) {
|
||||
if isPrintableText(" ") != true {
|
||||
t.Fatalf("Printable text interpreted as nonprintable.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsPrintableText_newlines(t *testing.T) {
|
||||
if isPrintableText("\r\n") != true {
|
||||
t.Fatalf("Printable text interpreted as nonprintable.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsPrintableText_punctuationAndSymbols(t *testing.T) {
|
||||
if isPrintableText(",:-/$©") != true {
|
||||
t.Fatalf("Printable text interpreted as nonprintable.")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -97,6 +97,10 @@ func ParseExifFullTimestamp(fullTimestampPhrase string) (timestamp time.Time, er
|
|||
parts := strings.Split(fullTimestampPhrase, " ")
|
||||
datestampValue, timestampValue := parts[0], parts[1]
|
||||
|
||||
// Normalize the separators.
|
||||
datestampValue = strings.ReplaceAll(datestampValue, "-", ":")
|
||||
timestampValue = strings.ReplaceAll(timestampValue, "-", ":")
|
||||
|
||||
dateParts := strings.Split(datestampValue, ":")
|
||||
|
||||
year, err := strconv.ParseUint(dateParts[0], 10, 16)
|
||||
|
|
|
@ -315,6 +315,40 @@ func (vc *ValueContext) ReadLongs() (value []uint32, err error) {
|
|||
return value, nil
|
||||
}
|
||||
|
||||
// ReadFloats parses the list of encoded, floats from the value-context.
|
||||
func (vc *ValueContext) ReadFloats() (value []float32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawValue, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
value, err = parser.ParseFloats(rawValue, vc.unitCount, vc.byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ReadDoubles parses the list of encoded, doubles from the value-context.
|
||||
func (vc *ValueContext) ReadDoubles() (value []float64, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawValue, err := vc.readRawEncoded()
|
||||
log.PanicIf(err)
|
||||
|
||||
value, err = parser.ParseDoubles(rawValue, vc.unitCount, vc.byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
return value, nil
|
||||
}
|
||||
|
||||
// ReadRationals parses the list of encoded, unsigned rationals from the value-
|
||||
// context.
|
||||
func (vc *ValueContext) ReadRationals() (value []Rational, err error) {
|
||||
|
@ -405,6 +439,12 @@ func (vc *ValueContext) Values() (values interface{}, err error) {
|
|||
} else if vc.tagType == TypeSignedRational {
|
||||
values, err = vc.ReadSignedRationals()
|
||||
log.PanicIf(err)
|
||||
} else if vc.tagType == TypeFloat {
|
||||
values, err = vc.ReadFloats()
|
||||
log.PanicIf(err)
|
||||
} else if vc.tagType == TypeDouble {
|
||||
values, err = vc.ReadDoubles()
|
||||
log.PanicIf(err)
|
||||
} else if vc.tagType == TypeUndefined {
|
||||
log.Panicf("will not parse undefined-type value")
|
||||
|
||||
|
|
|
@ -2,13 +2,14 @@ package exifcommon
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"math"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"io/ioutil"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-utility/filesystem"
|
||||
"github.com/dsoprea/go-utility/v2/filesystem"
|
||||
)
|
||||
|
||||
func TestNewValueContext(t *testing.T) {
|
||||
|
@ -950,6 +951,72 @@ func TestValueContext_ReadLongs(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestValueContext_ReadFloats(t *testing.T) {
|
||||
unitCount := uint32(2)
|
||||
|
||||
rawValueOffset := []byte{0, 0, 0, 4}
|
||||
valueOffset := uint32(4)
|
||||
|
||||
data := []byte{0x40, 0x49, 0x0f, 0xdb, 0x40, 0x2d, 0xf8, 0x54}
|
||||
|
||||
addressableData := []byte{0, 0, 0, 0}
|
||||
addressableData = append(addressableData, data...)
|
||||
sb := rifs.NewSeekableBufferWithBytes(addressableData)
|
||||
|
||||
vc := NewValueContext(
|
||||
"aa/bb",
|
||||
0x1234,
|
||||
unitCount,
|
||||
valueOffset,
|
||||
rawValueOffset,
|
||||
sb,
|
||||
TypeFloat,
|
||||
TestDefaultByteOrder)
|
||||
|
||||
value, err := vc.ReadFloats()
|
||||
log.PanicIf(err)
|
||||
|
||||
expectedResult := []float32{3.14159265, 2.71828182}
|
||||
for i, v := range value {
|
||||
if v < expectedResult[i] || v >= math.Nextafter32(expectedResult[i], expectedResult[i]+1) {
|
||||
t.Fatalf("ReadFloats expecting %v, received %v", expectedResult[i], v)
|
||||
}
|
||||
}
|
||||
}
|
||||
func TestValueContext_ReadDoubles(t *testing.T) {
|
||||
unitCount := uint32(2)
|
||||
|
||||
rawValueOffset := []byte{0, 0, 0, 4}
|
||||
valueOffset := uint32(4)
|
||||
|
||||
data := []byte{0x40, 0x09, 0x21, 0xfb, 0x53, 0xc8, 0xd4, 0xf1,
|
||||
0x40, 0x05, 0xbf, 0x0a, 0x89, 0xf1, 0xb0, 0xdd}
|
||||
|
||||
addressableData := []byte{0, 0, 0, 0}
|
||||
addressableData = append(addressableData, data...)
|
||||
sb := rifs.NewSeekableBufferWithBytes(addressableData)
|
||||
|
||||
vc := NewValueContext(
|
||||
"aa/bb",
|
||||
0x1234,
|
||||
unitCount,
|
||||
valueOffset,
|
||||
rawValueOffset,
|
||||
sb,
|
||||
TypeDouble,
|
||||
TestDefaultByteOrder)
|
||||
|
||||
value, err := vc.ReadDoubles()
|
||||
log.PanicIf(err)
|
||||
|
||||
expectedResult := []float64{3.14159265, 2.71828182}
|
||||
for i, v := range value {
|
||||
if v < expectedResult[i] || v >= math.Nextafter(expectedResult[i], expectedResult[i]+1) {
|
||||
t.Fatalf("ReadDoubles expecting %v, received %v", expectedResult[i], v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestValueContext_ReadRationals(t *testing.T) {
|
||||
unitCount := uint32(2)
|
||||
|
||||
|
|
|
@ -2,6 +2,7 @@ package exifcommon
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"math"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
|
@ -113,6 +114,44 @@ func (ve *ValueEncoder) encodeLongs(value []uint32) (ed EncodedData, err error)
|
|||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeFloats(value []float32) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Encoded = make([]byte, ed.UnitCount*4)
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
ve.byteOrder.PutUint32(ed.Encoded[i*4:(i+1)*4], math.Float32bits(value[i]))
|
||||
}
|
||||
|
||||
ed.Type = TypeFloat
|
||||
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeDoubles(value []float64) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Encoded = make([]byte, ed.UnitCount*8)
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
ve.byteOrder.PutUint64(ed.Encoded[i*8:(i+1)*8], math.Float64bits(value[i]))
|
||||
}
|
||||
|
||||
ed.Type = TypeDouble
|
||||
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeRationals(value []Rational) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
|
@ -190,33 +229,38 @@ func (ve *ValueEncoder) Encode(value interface{}) (ed EncodedData, err error) {
|
|||
}
|
||||
}()
|
||||
|
||||
switch value.(type) {
|
||||
switch t := value.(type) {
|
||||
case []byte:
|
||||
ed, err = ve.encodeBytes(value.([]byte))
|
||||
ed, err = ve.encodeBytes(t)
|
||||
log.PanicIf(err)
|
||||
case string:
|
||||
ed, err = ve.encodeAscii(value.(string))
|
||||
ed, err = ve.encodeAscii(t)
|
||||
log.PanicIf(err)
|
||||
case []uint16:
|
||||
ed, err = ve.encodeShorts(value.([]uint16))
|
||||
ed, err = ve.encodeShorts(t)
|
||||
log.PanicIf(err)
|
||||
case []uint32:
|
||||
ed, err = ve.encodeLongs(value.([]uint32))
|
||||
ed, err = ve.encodeLongs(t)
|
||||
log.PanicIf(err)
|
||||
case []float32:
|
||||
ed, err = ve.encodeFloats(t)
|
||||
log.PanicIf(err)
|
||||
case []float64:
|
||||
ed, err = ve.encodeDoubles(t)
|
||||
log.PanicIf(err)
|
||||
case []Rational:
|
||||
ed, err = ve.encodeRationals(value.([]Rational))
|
||||
ed, err = ve.encodeRationals(t)
|
||||
log.PanicIf(err)
|
||||
case []int32:
|
||||
ed, err = ve.encodeSignedLongs(value.([]int32))
|
||||
ed, err = ve.encodeSignedLongs(t)
|
||||
log.PanicIf(err)
|
||||
case []SignedRational:
|
||||
ed, err = ve.encodeSignedRationals(value.([]SignedRational))
|
||||
ed, err = ve.encodeSignedRationals(t)
|
||||
log.PanicIf(err)
|
||||
case time.Time:
|
||||
// For convenience, if the user doesn't want to deal with translation
|
||||
// semantics with timestamps.
|
||||
|
||||
t := value.(time.Time)
|
||||
s := ExifFullTimestampString(t)
|
||||
|
||||
ed, err = ve.encodeAscii(s)
|
||||
|
|
|
@ -2,6 +2,7 @@ package exifcommon
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"math"
|
||||
"reflect"
|
||||
"testing"
|
||||
"time"
|
||||
|
@ -173,6 +174,80 @@ func TestValueEncoder_encodeLongs__Cycle(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestValueEncoder_encodeFloats__Cycle(t *testing.T) {
|
||||
byteOrder := TestDefaultByteOrder
|
||||
ve := NewValueEncoder(byteOrder)
|
||||
|
||||
original := []float32{3.14159265, 2.71828182, 51.0, 68.0, 85.0}
|
||||
|
||||
ed, err := ve.encodeFloats(original)
|
||||
log.PanicIf(err)
|
||||
|
||||
if ed.Type != TypeFloat {
|
||||
t.Fatalf("IFD type not expected.")
|
||||
}
|
||||
|
||||
expected := []byte{
|
||||
0x40, 0x49, 0x0f, 0xdb,
|
||||
0x40, 0x2d, 0xf8, 0x54,
|
||||
0x42, 0x4c, 0x00, 0x00,
|
||||
0x42, 0x88, 0x00, 0x00,
|
||||
0x42, 0xaa, 0x00, 0x00,
|
||||
}
|
||||
|
||||
if bytes.Equal(ed.Encoded, expected) != true {
|
||||
t.Fatalf("Data not encoded correctly.")
|
||||
} else if ed.UnitCount != 5 {
|
||||
t.Fatalf("Unit-count not correct.")
|
||||
}
|
||||
|
||||
recovered, err := parser.ParseFloats(ed.Encoded, ed.UnitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
for i, v := range recovered {
|
||||
if v < original[i] || v >= math.Nextafter32(original[i], original[i]+1) {
|
||||
t.Fatalf("ReadFloats expecting %v, received %v", original[i], v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestValueEncoder_encodeDoubles__Cycle(t *testing.T) {
|
||||
byteOrder := TestDefaultByteOrder
|
||||
ve := NewValueEncoder(byteOrder)
|
||||
|
||||
original := []float64{3.14159265, 2.71828182, 954877.1230695, 68.0, 85.0}
|
||||
|
||||
ed, err := ve.encodeDoubles(original)
|
||||
log.PanicIf(err)
|
||||
|
||||
if ed.Type != TypeDouble {
|
||||
t.Fatalf("IFD type not expected.")
|
||||
}
|
||||
|
||||
expected := []byte{
|
||||
0x40, 0x09, 0x21, 0xfb, 0x53, 0xc8, 0xd4, 0xf1,
|
||||
0x40, 0x05, 0xbf, 0x0a, 0x89, 0xf1, 0xb0, 0xdd,
|
||||
0x41, 0x2d, 0x23, 0xfa, 0x3f, 0x02, 0xf7, 0x2b,
|
||||
0x40, 0x51, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x40, 0x55, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(ed.Encoded, expected) != true {
|
||||
t.Fatalf("Data not encoded correctly.")
|
||||
} else if ed.UnitCount != 5 {
|
||||
t.Fatalf("Unit-count not correct.")
|
||||
}
|
||||
|
||||
recovered, err := parser.ParseDoubles(ed.Encoded, ed.UnitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
|
||||
for i, v := range recovered {
|
||||
if v < original[i] || v >= math.Nextafter(original[i], original[i]+1) {
|
||||
t.Fatalf("ReadDoubles expecting %v, received %v", original[i], v)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestValueEncoder_encodeRationals__Cycle(t *testing.T) {
|
||||
byteOrder := TestDefaultByteOrder
|
||||
ve := NewValueEncoder(byteOrder)
|
||||
|
@ -431,6 +506,64 @@ func TestValueEncoder_Encode__Long(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestValueEncoder_Encode__Float(t *testing.T) {
|
||||
byteOrder := TestDefaultByteOrder
|
||||
ve := NewValueEncoder(byteOrder)
|
||||
|
||||
original := []float32{3.14159265, 2.71828182, 51.0, 68.0, 85.0}
|
||||
|
||||
ed, err := ve.Encode(original)
|
||||
log.PanicIf(err)
|
||||
|
||||
if ed.Type != TypeFloat {
|
||||
t.Fatalf("IFD type not expected.")
|
||||
}
|
||||
|
||||
expected := []byte{
|
||||
0x40, 0x49, 0x0f, 0xdb,
|
||||
0x40, 0x2d, 0xf8, 0x54,
|
||||
0x42, 0x4c, 0x00, 0x00,
|
||||
0x42, 0x88, 0x00, 0x00,
|
||||
0x42, 0xaa, 0x00, 0x00,
|
||||
}
|
||||
|
||||
if bytes.Equal(ed.Encoded, expected) != true {
|
||||
t.Fatalf("Data not encoded correctly.")
|
||||
} else if ed.UnitCount != 5 {
|
||||
t.Fatalf("Unit-count not correct.")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestValueEncoder_Encode__Double(t *testing.T) {
|
||||
byteOrder := TestDefaultByteOrder
|
||||
ve := NewValueEncoder(byteOrder)
|
||||
|
||||
original := []float64{3.14159265, 2.71828182, 954877.1230695, 68.0, 85.0}
|
||||
|
||||
ed, err := ve.Encode(original)
|
||||
log.PanicIf(err)
|
||||
|
||||
if ed.Type != TypeDouble {
|
||||
t.Fatalf("IFD type not expected.")
|
||||
}
|
||||
|
||||
expected := []byte{
|
||||
0x40, 0x09, 0x21, 0xfb, 0x53, 0xc8, 0xd4, 0xf1,
|
||||
0x40, 0x05, 0xbf, 0x0a, 0x89, 0xf1, 0xb0, 0xdd,
|
||||
0x41, 0x2d, 0x23, 0xfa, 0x3f, 0x02, 0xf7, 0x2b,
|
||||
0x40, 0x51, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
0x40, 0x55, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00,
|
||||
}
|
||||
|
||||
if bytes.Equal(ed.Encoded, expected) != true {
|
||||
t.Fatalf("Data not encoded correctly.")
|
||||
} else if ed.UnitCount != 5 {
|
||||
t.Fatalf("Unit-count not correct.")
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestValueEncoder_Encode__Rational(t *testing.T) {
|
||||
byteOrder := TestDefaultByteOrder
|
||||
ve := NewValueEncoder(byteOrder)
|
||||
|
|
|
@ -4,7 +4,7 @@ import (
|
|||
"io"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-utility/filesystem"
|
||||
"github.com/dsoprea/go-utility/v2/filesystem"
|
||||
)
|
||||
|
||||
type ExifBlobSeeker interface {
|
||||
|
|
87
v3/exif.go
87
v3/exif.go
|
@ -13,7 +13,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -70,10 +70,58 @@ func SearchAndExtractExif(data []byte) (rawExif []byte, err error) {
|
|||
return rawExif, nil
|
||||
}
|
||||
|
||||
// SearchAndExtractExifWithReader searches for an EXIF blob using an
|
||||
// `io.Reader`. We can't know how much long the EXIF data is without parsing it,
|
||||
// so this will likely grab up a lot of the image-data, too.
|
||||
func SearchAndExtractExifWithReader(r io.Reader) (rawExif []byte, err error) {
|
||||
// SearchAndExtractExifN searches for an EXIF blob in the byte-slice, but skips
|
||||
// the given number of EXIF blocks first. This is a forensics tool that helps
|
||||
// identify multiple EXIF blocks in a file.
|
||||
func SearchAndExtractExifN(data []byte, n int) (rawExif []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
skips := 0
|
||||
totalDiscarded := 0
|
||||
for {
|
||||
b := bytes.NewBuffer(data)
|
||||
|
||||
var discarded int
|
||||
|
||||
rawExif, discarded, err = searchAndExtractExifWithReaderWithDiscarded(b)
|
||||
if err != nil {
|
||||
if err == ErrNoExif {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
exifLogger.Debugf(nil, "Read EXIF block (%d).", skips)
|
||||
|
||||
totalDiscarded += discarded
|
||||
|
||||
if skips >= n {
|
||||
exifLogger.Debugf(nil, "Reached requested EXIF block (%d).", n)
|
||||
break
|
||||
}
|
||||
|
||||
nextOffset := discarded + 1
|
||||
exifLogger.Debugf(nil, "Skipping EXIF block (%d) by seeking to position (%d).", skips, nextOffset)
|
||||
|
||||
data = data[nextOffset:]
|
||||
skips++
|
||||
}
|
||||
|
||||
exifLogger.Debugf(nil, "Found EXIF blob (%d) bytes from initial position.", totalDiscarded)
|
||||
return rawExif, nil
|
||||
}
|
||||
|
||||
// searchAndExtractExifWithReaderWithDiscarded searches for an EXIF blob using
|
||||
// an `io.Reader`. We can't know how much long the EXIF data is without parsing
|
||||
// it, so this will likely grab up a lot of the image-data, too.
|
||||
//
|
||||
// This function returned the count of preceding bytes.
|
||||
func searchAndExtractExifWithReaderWithDiscarded(r io.Reader) (rawExif []byte, discarded int, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
|
@ -85,13 +133,12 @@ func SearchAndExtractExifWithReader(r io.Reader) (rawExif []byte, err error) {
|
|||
// least, again, with JPEGs).
|
||||
|
||||
br := bufio.NewReader(r)
|
||||
discarded := 0
|
||||
|
||||
for {
|
||||
window, err := br.Peek(ExifSignatureLength)
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
return nil, ErrNoExif
|
||||
return nil, 0, ErrNoExif
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
|
@ -122,6 +169,30 @@ func SearchAndExtractExifWithReader(r io.Reader) (rawExif []byte, err error) {
|
|||
rawExif, err = ioutil.ReadAll(br)
|
||||
log.PanicIf(err)
|
||||
|
||||
return rawExif, discarded, nil
|
||||
}
|
||||
|
||||
// RELEASE(dustin): We should replace the implementation of SearchAndExtractExifWithReader with searchAndExtractExifWithReaderWithDiscarded and drop the latter.
|
||||
|
||||
// SearchAndExtractExifWithReader searches for an EXIF blob using an
|
||||
// `io.Reader`. We can't know how much long the EXIF data is without parsing it,
|
||||
// so this will likely grab up a lot of the image-data, too.
|
||||
func SearchAndExtractExifWithReader(r io.Reader) (rawExif []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
rawExif, _, err = searchAndExtractExifWithReaderWithDiscarded(r)
|
||||
if err != nil {
|
||||
if err == ErrNoExif {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
return rawExif, nil
|
||||
}
|
||||
|
||||
|
@ -179,9 +250,11 @@ func ParseExifHeader(data []byte) (eh ExifHeader, err error) {
|
|||
}
|
||||
|
||||
if bytes.Equal(data[:4], ExifBigEndianSignature[:]) == true {
|
||||
exifLogger.Debugf(nil, "Byte-order is big-endian.")
|
||||
eh.ByteOrder = binary.BigEndian
|
||||
} else if bytes.Equal(data[:4], ExifLittleEndianSignature[:]) == true {
|
||||
eh.ByteOrder = binary.LittleEndian
|
||||
exifLogger.Debugf(nil, "Byte-order is little-endian.")
|
||||
} else {
|
||||
return eh, ErrNoExif
|
||||
}
|
||||
|
|
|
@ -13,7 +13,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestVisit(t *testing.T) {
|
||||
|
|
20
v3/go.mod
20
v3/go.mod
|
@ -1,16 +1,18 @@
|
|||
module github.com/dsoprea/go-exif/v2
|
||||
module github.com/dsoprea/go-exif/v3
|
||||
|
||||
go 1.13
|
||||
go 1.12
|
||||
|
||||
// Development only
|
||||
// replace github.com/dsoprea/go-logging => ../../go-logging
|
||||
// replace github.com/dsoprea/go-utility => ../../go-utility
|
||||
// replace github.com/dsoprea/go-utility/v2 => ../../go-utility/v2
|
||||
|
||||
require (
|
||||
github.com/dsoprea/go-logging v0.0.0-20200517223158-a10564966e9d
|
||||
github.com/dsoprea/go-utility v0.0.0-20200711062821-fab8125e9bdf
|
||||
github.com/golang/geo v0.0.0-20200319012246-673a6f80352d
|
||||
github.com/jessevdk/go-flags v1.4.0
|
||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120 // indirect
|
||||
gopkg.in/yaml.v2 v2.3.0
|
||||
github.com/dsoprea/go-logging v0.0.0-20200710184922-b02d349568dd
|
||||
github.com/dsoprea/go-utility/v2 v2.0.0-20221003172846-a3e1774ef349
|
||||
github.com/go-errors/errors v1.4.2 // indirect
|
||||
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551
|
||||
github.com/jessevdk/go-flags v1.5.0
|
||||
golang.org/x/net v0.0.0-20221002022538-bcab6841153b // indirect
|
||||
golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec // indirect
|
||||
gopkg.in/yaml.v2 v2.4.0
|
||||
)
|
||||
|
|
43
v3/go.sum
43
v3/go.sum
|
@ -1,39 +1,46 @@
|
|||
github.com/dsoprea/go-exif/v2 v2.0.0-20200321225314-640175a69fe4/go.mod h1:Lm2lMM2zx8p4a34ZemkaUV95AnMl4ZvLbCUbwOvLC2E=
|
||||
github.com/dsoprea/go-logging v0.0.0-20190624164917-c4f10aab7696 h1:VGFnZAcLwPpt1sHlAxml+pGLZz9A2s+K/s1YNhPC91Y=
|
||||
github.com/dsoprea/go-exif/v3 v3.0.0-20200717053412-08f1b6708903/go.mod h1:0nsO1ce0mh5czxGeLo4+OCZ/C6Eo6ZlMWsz7rH/Gxv8=
|
||||
github.com/dsoprea/go-logging v0.0.0-20190624164917-c4f10aab7696/go.mod h1:Nm/x2ZUNRW6Fe5C3LxdY1PyZY5wmDv/s5dkPJ/VB3iA=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200502191043-ec333ec7635f h1:XM9MVftaUNA4CcjV97+4bSy7u9Ns04DEYbZkswUrRtc=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200502191043-ec333ec7635f/go.mod h1:7I+3Pe2o/YSU88W0hWlm9S22W7XI1JFNJ86U0zPKMf8=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200502201358-170ff607885f h1:FonKAuW3PmNtqk9tOR+Z7bnyQHytmnZBCmm5z1PQMss=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200502201358-170ff607885f/go.mod h1:7I+3Pe2o/YSU88W0hWlm9S22W7XI1JFNJ86U0zPKMf8=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200517223158-a10564966e9d h1:F/7L5wr/fP/SKeO5HuMlNEX9Ipyx2MbH2rV9G4zJRpk=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200517223158-a10564966e9d/go.mod h1:7I+3Pe2o/YSU88W0hWlm9S22W7XI1JFNJ86U0zPKMf8=
|
||||
github.com/dsoprea/go-utility v0.0.0-20200512094054-1abbbc781176 h1:CfXezFYb2STGOd1+n1HshvE191zVx+QX3A1nML5xxME=
|
||||
github.com/dsoprea/go-utility v0.0.0-20200512094054-1abbbc781176/go.mod h1:95+K3z2L0mqsVYd6yveIv1lmtT3tcQQ3dVakPySffW8=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200710184922-b02d349568dd h1:l+vLbuxptsC6VQyQsfD7NnEC8BZuFpz45PgY+pH8YTg=
|
||||
github.com/dsoprea/go-logging v0.0.0-20200710184922-b02d349568dd/go.mod h1:7I+3Pe2o/YSU88W0hWlm9S22W7XI1JFNJ86U0zPKMf8=
|
||||
github.com/dsoprea/go-utility v0.0.0-20200711062821-fab8125e9bdf h1:/w4QxepU4AHh3AuO6/g8y/YIIHH5+aKP3Bj8sg5cqhU=
|
||||
github.com/dsoprea/go-utility v0.0.0-20200711062821-fab8125e9bdf/go.mod h1:95+K3z2L0mqsVYd6yveIv1lmtT3tcQQ3dVakPySffW8=
|
||||
github.com/go-errors/errors v1.0.1 h1:LUHzmkK3GUKUrL/1gfBUxAHzcev3apQlezX/+O7ma6w=
|
||||
github.com/dsoprea/go-utility/v2 v2.0.0-20221003172846-a3e1774ef349 h1:DilThiXje0z+3UQ5YjYiSRRzVdtamFpvBQXKwMglWqw=
|
||||
github.com/dsoprea/go-utility/v2 v2.0.0-20221003172846-a3e1774ef349/go.mod h1:4GC5sXji84i/p+irqghpPFZBF8tRN/Q7+700G0/DLe8=
|
||||
github.com/go-errors/errors v1.0.1/go.mod h1:f4zRHt4oKfwPJE5k8C9vpYG+aDHdBFUsgrm6/TyX73Q=
|
||||
github.com/go-errors/errors v1.0.2 h1:xMxH9j2fNg/L4hLn/4y3M0IUsn0M6Wbu/Uh9QlOfBh4=
|
||||
github.com/go-errors/errors v1.0.2/go.mod h1:psDX2osz5VnTOnFWbDeWwS7yejl+uV3FEWEp4lssFEs=
|
||||
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec h1:lJwO/92dFXWeXOZdoGXgptLmNLwynMSHUmU6besqtiw=
|
||||
github.com/go-errors/errors v1.1.1/go.mod h1:psDX2osz5VnTOnFWbDeWwS7yejl+uV3FEWEp4lssFEs=
|
||||
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
|
||||
github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
|
||||
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/golang/geo v0.0.0-20200319012246-673a6f80352d h1:C/hKUcHT483btRbeGkrRjJz+Zbcj8audldIi9tRJDCc=
|
||||
github.com/golang/geo v0.0.0-20200319012246-673a6f80352d/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA=
|
||||
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551 h1:gtexQ/VGyN+VVFRXSFiguSNcXmS6rkKT+X7FdIrTtfo=
|
||||
github.com/golang/geo v0.0.0-20210211234256-740aa86cb551/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/jessevdk/go-flags v1.5.0 h1:1jKYvbxEjfUl0fmqTCOfonvskHHXMjBySTLW4y9LFvc=
|
||||
github.com/jessevdk/go-flags v1.5.0/go.mod h1:Fw0T6WPc1dYxT4mKEZRfG5kJhaTDP9pj1c2EWnYs/m4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553 h1:efeOvDhwQ29Dj3SdAV/MJf8oukgn+8D8WgaCaRMchF8=
|
||||
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200320220750-118fecf932d8/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5 h1:WQ8q63x+f/zpC8Ac1s9wLElVoHhm32p6tudrU72n1QA=
|
||||
golang.org/x/net v0.0.0-20200501053045-e0ff5e5a1de5/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120 h1:EZ3cVSzKOlJxAd8e8YAJ7no8nNypTxexh/YE/xW3ZEY=
|
||||
golang.org/x/net v0.0.0-20200513185701-a91f0712d120/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20221002022538-bcab6841153b h1:6e93nYa3hNqAvLr0pD4PN1fFS+gKzp2zAXqrnTCstqU=
|
||||
golang.org/x/net v0.0.0-20221002022538-bcab6841153b/go.mod h1:YDH+HFinaLZZlnHAfSS6ZXJJ9M9t4Dl22yv3iI2vPwk=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210320140829-1e4c9ba3b0c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec h1:BkDtF2Ih9xZ7le9ndzTA7KJow28VbQW3odyk/8drmuI=
|
||||
golang.org/x/sys v0.0.0-20220928140112-f11e5e49a4ec/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo=
|
||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
|
||||
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
"github.com/dsoprea/go-logging"
|
||||
"github.com/golang/geo/s2"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
var (
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestNewGpsDegreesFromRationals(t *testing.T) {
|
||||
|
|
|
@ -14,8 +14,8 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v2/undefined"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
"github.com/dsoprea/go-exif/v3/undefined"
|
||||
)
|
||||
|
||||
var (
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func Test_ByteWriter_writeAsBytes_uint8(t *testing.T) {
|
||||
|
|
|
@ -9,8 +9,8 @@ import (
|
|||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v2/undefined"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
"github.com/dsoprea/go-exif/v3/undefined"
|
||||
"github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
|
|
|
@ -13,8 +13,8 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v2/undefined"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
"github.com/dsoprea/go-exif/v3/undefined"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -163,6 +163,8 @@ type IfdEnumerate struct {
|
|||
tagIndex *TagIndex
|
||||
ifdMapping *exifcommon.IfdMapping
|
||||
furthestOffset uint32
|
||||
|
||||
visitedIfdOffsets map[uint32]struct{}
|
||||
}
|
||||
|
||||
// NewIfdEnumerate returns a new instance of IfdEnumerate.
|
||||
|
@ -172,6 +174,8 @@ func NewIfdEnumerate(ifdMapping *exifcommon.IfdMapping, tagIndex *TagIndex, ebs
|
|||
byteOrder: byteOrder,
|
||||
ifdMapping: ifdMapping,
|
||||
tagIndex: tagIndex,
|
||||
|
||||
visitedIfdOffsets: make(map[uint32]struct{}),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -225,15 +229,60 @@ func (ie *IfdEnumerate) parseTag(ii *exifcommon.IfdIdentity, tagPosition int, bp
|
|||
valueOffset, rawValueOffset, err := bp.getUint32()
|
||||
log.PanicIf(err)
|
||||
|
||||
// Check whether the embedded type indicator is valid.
|
||||
|
||||
if tagType.IsValid() == false {
|
||||
// Technically, we have the type on-file in the tags-index, but
|
||||
// if the type stored alongside the data disagrees with it,
|
||||
// which it apparently does, all bets are off.
|
||||
ifdEnumerateLogger.Warningf(nil,
|
||||
"Tag (0x%04x) in IFD [%s] at position (%d) has invalid type (0x%04x) and will be skipped.",
|
||||
tagId, ii, tagPosition, int(tagType))
|
||||
|
||||
ite = &IfdTagEntry{
|
||||
tagId: tagId,
|
||||
tagType: tagType,
|
||||
}
|
||||
|
||||
log.Panic(ErrTagTypeNotValid)
|
||||
return ite, ErrTagTypeNotValid
|
||||
}
|
||||
|
||||
// Check whether the embedded type is listed among the supported types for
|
||||
// the registered tag. If not, skip processing the tag.
|
||||
|
||||
it, err := ie.tagIndex.Get(ii, tagId)
|
||||
if err != nil {
|
||||
if log.Is(err, ErrTagNotFound) == true {
|
||||
ifdEnumerateLogger.Warningf(nil, "Tag (0x%04x) is not known and will be skipped.", tagId)
|
||||
|
||||
ite = &IfdTagEntry{
|
||||
tagId: tagId,
|
||||
}
|
||||
|
||||
return ite, ErrTagNotFound
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
// If we're trying to be as forgiving as possible then use whatever type was
|
||||
// reported in the format. Otherwise, only accept a type that's expected for
|
||||
// this tag.
|
||||
if ie.tagIndex.UniversalSearch() == false && it.DoesSupportType(tagType) == false {
|
||||
// The type in the stream disagrees with the type that this tag is
|
||||
// expected to have. This can present issues with how we handle the
|
||||
// special-case tags (e.g. thumbnails, GPS, etc..) when those tags
|
||||
// suddenly have data that we no longer manipulate correctly/
|
||||
// accurately.
|
||||
ifdEnumerateLogger.Warningf(nil,
|
||||
"Tag (0x%04x) in IFD [%s] at position (%d) has unsupported type (0x%02x) and will be skipped.",
|
||||
tagId, ii, tagPosition, int(tagType))
|
||||
|
||||
return nil, ErrTagTypeNotValid
|
||||
}
|
||||
|
||||
// Construct tag struct.
|
||||
|
||||
rs, err := ie.ebs.GetReadSeeker(0)
|
||||
log.PanicIf(err)
|
||||
|
||||
|
@ -363,7 +412,7 @@ func (ie *IfdEnumerate) tagPostParse(ite *IfdTagEntry, med *MiscellaneousExifDat
|
|||
// tag should ever be repeated, and b) all but one had an incorrect
|
||||
// type and caused parsing/conversion woes. So, this is a quick fix
|
||||
// for those scenarios.
|
||||
if it.DoesSupportType(tagType) == false {
|
||||
if ie.tagIndex.UniversalSearch() == false && it.DoesSupportType(tagType) == false {
|
||||
ifdEnumerateLogger.Warningf(nil,
|
||||
"Skipping tag [%s] (0x%04x) [%s] with an unexpected type: %v ∉ %v",
|
||||
ii.UnindexedString(), tagId, it.Name,
|
||||
|
@ -397,11 +446,9 @@ func (ie *IfdEnumerate) parseIfd(ii *exifcommon.IfdIdentity, bp *byteParser, vis
|
|||
for i := 0; i < int(tagCount); i++ {
|
||||
ite, err := ie.parseTag(ii, i, bp)
|
||||
if err != nil {
|
||||
if log.Is(err, ErrTagTypeNotValid) == true {
|
||||
// Technically, we have the type on-file in the tags-index, but
|
||||
// if the type stored alongside the data disagrees with it,
|
||||
// which it apparently does, all bets are off.
|
||||
ifdEnumerateLogger.Warningf(nil, "Tag (0x%04x) in IFD [%s] at position (%d) has invalid type (%d) and will be skipped.", ite.tagId, ii, i, ite.tagType)
|
||||
if log.Is(err, ErrTagNotFound) == true || log.Is(err, ErrTagTypeNotValid) == true {
|
||||
// These tags should've been fully logged in parseTag(). The
|
||||
// ITE returned is nil so we can't print anything about them, now.
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -486,27 +533,43 @@ func (ie *IfdEnumerate) parseIfd(ii *exifcommon.IfdIdentity, bp *byteParser, vis
|
|||
|
||||
if enumeratorThumbnailOffset != nil && enumeratorThumbnailSize != nil {
|
||||
thumbnailData, err = ie.parseThumbnail(enumeratorThumbnailOffset, enumeratorThumbnailSize)
|
||||
log.PanicIf(err)
|
||||
if err != nil {
|
||||
ifdEnumerateLogger.Errorf(
|
||||
nil, err,
|
||||
"We tried to bump our furthest-offset counter but there was an issue first seeking past the thumbnail.")
|
||||
} else {
|
||||
// In this case, the value is always an offset.
|
||||
offset := enumeratorThumbnailOffset.getValueOffset()
|
||||
|
||||
// In this case, the value is always an offset.
|
||||
offset := enumeratorThumbnailOffset.getValueOffset()
|
||||
// This this case, the value is always a length.
|
||||
length := enumeratorThumbnailSize.getValueOffset()
|
||||
|
||||
// This this case, the value is always a length.
|
||||
length := enumeratorThumbnailSize.getValueOffset()
|
||||
ifdEnumerateLogger.Debugf(nil, "Found thumbnail in IFD [%s]. Its offset is (%d) and is (%d) bytes.", ii, offset, length)
|
||||
|
||||
ifdEnumerateLogger.Debugf(nil, "Found thumbnail in IFD [%s]. Its offset is (%d) and is (%d) bytes.", ii, offset, length)
|
||||
furthestOffset := offset + length
|
||||
|
||||
furthestOffset := offset + length
|
||||
|
||||
if furthestOffset > ie.furthestOffset {
|
||||
ie.furthestOffset = furthestOffset
|
||||
if furthestOffset > ie.furthestOffset {
|
||||
ie.furthestOffset = furthestOffset
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
nextIfdOffset, _, err = bp.getUint32()
|
||||
log.PanicIf(err)
|
||||
|
||||
ifdEnumerateLogger.Debugf(nil, "Next IFD at offset: (%08x)", nextIfdOffset)
|
||||
_, alreadyVisited := ie.visitedIfdOffsets[nextIfdOffset]
|
||||
|
||||
if alreadyVisited == true {
|
||||
ifdEnumerateLogger.Warningf(nil, "IFD at offset (0x%08x) has been linked-to more than once. There might be a cycle in the IFD chain. Not reparsing.", nextIfdOffset)
|
||||
nextIfdOffset = 0
|
||||
}
|
||||
|
||||
if nextIfdOffset != 0 {
|
||||
ie.visitedIfdOffsets[nextIfdOffset] = struct{}{}
|
||||
ifdEnumerateLogger.Debugf(nil, "[%s] Next IFD at offset: (0x%08x)", ii.String(), nextIfdOffset)
|
||||
} else {
|
||||
ifdEnumerateLogger.Debugf(nil, "[%s] IFD chain has terminated.", ii.String())
|
||||
}
|
||||
|
||||
return nextIfdOffset, entries, thumbnailData, nil
|
||||
}
|
||||
|
@ -715,6 +778,14 @@ func (ifd *Ifd) ByteOrder() binary.ByteOrder {
|
|||
return ifd.byteOrder
|
||||
}
|
||||
|
||||
// NextIfd returns the Ifd struct for the next IFD in the chain.
|
||||
func (ifd *Ifd) NextIfd() *Ifd {
|
||||
|
||||
// TODO(dustin): Add test
|
||||
|
||||
return ifd.nextIfd
|
||||
}
|
||||
|
||||
// ChildWithIfdPath returns an `Ifd` struct for the given child of the current
|
||||
// IFD.
|
||||
func (ifd *Ifd) ChildWithIfdPath(iiChild *exifcommon.IfdIdentity) (childIfd *Ifd, err error) {
|
||||
|
@ -1037,8 +1108,8 @@ func (ifd *Ifd) GpsInfo() (gi *GpsInfo, err error) {
|
|||
|
||||
gi = new(GpsInfo)
|
||||
|
||||
if ifd.ifdIdentity.UnindexedString() != exifcommon.IfdGpsInfoStandardIfdIdentity.UnindexedString() {
|
||||
log.Panicf("GPS can only be read on GPS IFD: [%s] != [%s]", ifd.ifdIdentity.UnindexedString(), exifcommon.IfdGpsInfoStandardIfdIdentity.UnindexedString())
|
||||
if ifd.ifdIdentity.Equals(exifcommon.IfdGpsInfoStandardIfdIdentity) == false {
|
||||
log.Panicf("GPS can only be read on GPS IFD: [%s]", ifd.ifdIdentity.UnindexedString())
|
||||
}
|
||||
|
||||
if tags, found := ifd.entriesByTagId[TagGpsVersionId]; found == false {
|
||||
|
@ -1119,23 +1190,35 @@ func (ifd *Ifd) GpsInfo() (gi *GpsInfo, err error) {
|
|||
altitudeRefTags, foundAltitudeRef := ifd.entriesByTagId[TagAltitudeRefId]
|
||||
|
||||
if foundAltitude == true && foundAltitudeRef == true {
|
||||
altitudePhrase, err := altitudeTags[0].Format()
|
||||
log.PanicIf(err)
|
||||
|
||||
ifdEnumerateLogger.Debugf(nil, "Altitude is [%s].", altitudePhrase)
|
||||
|
||||
altitudeValue, err := altitudeTags[0].Value()
|
||||
log.PanicIf(err)
|
||||
|
||||
altitudeRefPhrase, err := altitudeRefTags[0].Format()
|
||||
log.PanicIf(err)
|
||||
|
||||
ifdEnumerateLogger.Debugf(nil, "Altitude-reference is [%s].", altitudeRefPhrase)
|
||||
|
||||
altitudeRefValue, err := altitudeRefTags[0].Value()
|
||||
log.PanicIf(err)
|
||||
|
||||
altitudeRaw := altitudeValue.([]exifcommon.Rational)
|
||||
altitude := int(altitudeRaw[0].Numerator / altitudeRaw[0].Denominator)
|
||||
if altitudeRaw[0].Denominator > 0 {
|
||||
altitude := int(altitudeRaw[0].Numerator / altitudeRaw[0].Denominator)
|
||||
|
||||
if altitudeRefValue.([]byte)[0] == 1 {
|
||||
altitude *= -1
|
||||
if altitudeRefValue.([]byte)[0] == 1 {
|
||||
altitude *= -1
|
||||
}
|
||||
|
||||
gi.Altitude = altitude
|
||||
}
|
||||
|
||||
gi.Altitude = altitude
|
||||
}
|
||||
|
||||
// Parse time.
|
||||
// Parse timestamp from separate date and time tags.
|
||||
|
||||
timestampTags, foundTimestamp := ifd.entriesByTagId[TagTimestampId]
|
||||
datestampTags, foundDatestamp := ifd.entriesByTagId[TagDatestampId]
|
||||
|
@ -1144,7 +1227,13 @@ func (ifd *Ifd) GpsInfo() (gi *GpsInfo, err error) {
|
|||
datestampValue, err := datestampTags[0].Value()
|
||||
log.PanicIf(err)
|
||||
|
||||
dateParts := strings.Split(datestampValue.(string), ":")
|
||||
datePhrase := datestampValue.(string)
|
||||
ifdEnumerateLogger.Debugf(nil, "Date tag value is [%s].", datePhrase)
|
||||
|
||||
// Normalize the separators.
|
||||
datePhrase = strings.ReplaceAll(datePhrase, "-", ":")
|
||||
|
||||
dateParts := strings.Split(datePhrase, ":")
|
||||
|
||||
year, err1 := strconv.ParseUint(dateParts[0], 10, 16)
|
||||
month, err2 := strconv.ParseUint(dateParts[1], 10, 8)
|
||||
|
@ -1154,6 +1243,11 @@ func (ifd *Ifd) GpsInfo() (gi *GpsInfo, err error) {
|
|||
timestampValue, err := timestampTags[0].Value()
|
||||
log.PanicIf(err)
|
||||
|
||||
timePhrase, err := timestampTags[0].Format()
|
||||
log.PanicIf(err)
|
||||
|
||||
ifdEnumerateLogger.Debugf(nil, "Time tag value is [%s].", timePhrase)
|
||||
|
||||
timestampRaw := timestampValue.([]exifcommon.Rational)
|
||||
|
||||
hour := int(timestampRaw[0].Numerator / timestampRaw[0].Denominator)
|
||||
|
|
|
@ -11,7 +11,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestIfdTagEntry_RawBytes_RealData(t *testing.T) {
|
||||
|
|
|
@ -8,8 +8,8 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v2/undefined"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
"github.com/dsoprea/go-exif/v3/undefined"
|
||||
)
|
||||
|
||||
var (
|
||||
|
|
|
@ -5,9 +5,9 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-utility/filesystem"
|
||||
"github.com/dsoprea/go-utility/v2/filesystem"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestIfdTagEntry_RawBytes_Allocated(t *testing.T) {
|
||||
|
|
74
v3/tags.go
74
v3/tags.go
|
@ -2,11 +2,12 @@ package exif
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"gopkg.in/yaml.v2"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -177,6 +178,10 @@ func (it *IndexedTag) DoesSupportType(tagType exifcommon.TagTypePrimitive) bool
|
|||
type TagIndex struct {
|
||||
tagsByIfd map[string]map[uint16]*IndexedTag
|
||||
tagsByIfdR map[string]map[string]*IndexedTag
|
||||
|
||||
mutex sync.Mutex
|
||||
|
||||
doUniversalSearch bool
|
||||
}
|
||||
|
||||
// NewTagIndex returns a new TagIndex struct.
|
||||
|
@ -189,6 +194,16 @@ func NewTagIndex() *TagIndex {
|
|||
return ti
|
||||
}
|
||||
|
||||
// SetUniversalSearch enables a fallback to matching tags under *any* IFD.
|
||||
func (ti *TagIndex) SetUniversalSearch(flag bool) {
|
||||
ti.doUniversalSearch = flag
|
||||
}
|
||||
|
||||
// UniversalSearch enables a fallback to matching tags under *any* IFD.
|
||||
func (ti *TagIndex) UniversalSearch() bool {
|
||||
return ti.doUniversalSearch
|
||||
}
|
||||
|
||||
// Add registers a new tag to be recognized during the parse.
|
||||
func (ti *TagIndex) Add(it *IndexedTag) (err error) {
|
||||
defer func() {
|
||||
|
@ -197,6 +212,9 @@ func (ti *TagIndex) Add(it *IndexedTag) (err error) {
|
|||
}
|
||||
}()
|
||||
|
||||
ti.mutex.Lock()
|
||||
defer ti.mutex.Unlock()
|
||||
|
||||
// Store by ID.
|
||||
|
||||
family, found := ti.tagsByIfd[it.IfdPath]
|
||||
|
@ -228,9 +246,7 @@ func (ti *TagIndex) Add(it *IndexedTag) (err error) {
|
|||
return nil
|
||||
}
|
||||
|
||||
// Get returns information about the non-IFD tag given a tag ID. `ifdPath` must
|
||||
// not be fully-qualified.
|
||||
func (ti *TagIndex) Get(ii *exifcommon.IfdIdentity, id uint16) (it *IndexedTag, err error) {
|
||||
func (ti *TagIndex) getOne(ifdPath string, id uint16) (it *IndexedTag, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
|
@ -242,7 +258,8 @@ func (ti *TagIndex) Get(ii *exifcommon.IfdIdentity, id uint16) (it *IndexedTag,
|
|||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
ifdPath := ii.UnindexedString()
|
||||
ti.mutex.Lock()
|
||||
defer ti.mutex.Unlock()
|
||||
|
||||
family, found := ti.tagsByIfd[ifdPath]
|
||||
if found == false {
|
||||
|
@ -257,6 +274,53 @@ func (ti *TagIndex) Get(ii *exifcommon.IfdIdentity, id uint16) (it *IndexedTag,
|
|||
return it, nil
|
||||
}
|
||||
|
||||
// Get returns information about the non-IFD tag given a tag ID. `ifdPath` must
|
||||
// not be fully-qualified.
|
||||
func (ti *TagIndex) Get(ii *exifcommon.IfdIdentity, id uint16) (it *IndexedTag, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ifdPath := ii.UnindexedString()
|
||||
|
||||
it, err = ti.getOne(ifdPath, id)
|
||||
if err == nil {
|
||||
return it, nil
|
||||
} else if err != ErrTagNotFound {
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
if ti.doUniversalSearch == false {
|
||||
return nil, ErrTagNotFound
|
||||
}
|
||||
|
||||
// We've been told to fallback to look for the tag in other IFDs.
|
||||
|
||||
skipIfdPath := ii.UnindexedString()
|
||||
|
||||
for currentIfdPath, _ := range ti.tagsByIfd {
|
||||
if currentIfdPath == skipIfdPath {
|
||||
// Skip the primary IFD, which has already been checked.
|
||||
continue
|
||||
}
|
||||
|
||||
it, err = ti.getOne(currentIfdPath, id)
|
||||
if err == nil {
|
||||
tagsLogger.Warningf(nil,
|
||||
"Found tag (0x%02x) in the wrong IFD: [%s] != [%s]",
|
||||
id, currentIfdPath, ifdPath)
|
||||
|
||||
return it, nil
|
||||
} else if err != ErrTagNotFound {
|
||||
log.Panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil, ErrTagNotFound
|
||||
}
|
||||
|
||||
var (
|
||||
// tagGuessDefaultIfdIdentities describes which IFDs we'll look for a given
|
||||
// tag-ID in, if it's not found where it's supposed to be. We suppose that
|
||||
|
|
|
@ -59,6 +59,15 @@ IFD/Exif:
|
|||
- id: 0x9004
|
||||
name: DateTimeDigitized
|
||||
type_name: ASCII
|
||||
- id: 0x9010
|
||||
name: OffsetTime
|
||||
type_name: ASCII
|
||||
- id: 0x9011
|
||||
name: OffsetTimeOriginal
|
||||
type_name: ASCII
|
||||
- id: 0x9012
|
||||
name: OffsetTimeDigitized
|
||||
type_name: ASCII
|
||||
- id: 0x9101
|
||||
name: ComponentsConfiguration
|
||||
type_name: UNDEFINED
|
||||
|
@ -909,6 +918,36 @@ IFD:
|
|||
- id: 0xc74e
|
||||
name: OpcodeList3
|
||||
type_name: UNDEFINED
|
||||
# This tag may be used to specify the size of raster pixel spacing in the
|
||||
# model space units, when the raster space can be embedded in the model space
|
||||
# coordinate system without rotation, and consists of the following 3 values:
|
||||
# ModelPixelScaleTag = (ScaleX, ScaleY, ScaleZ)
|
||||
# where ScaleX and ScaleY give the horizontal and vertical spacing of raster
|
||||
# pixels. The ScaleZ is primarily used to map the pixel value of a digital
|
||||
# elevation model into the correct Z-scale, and so for most other purposes
|
||||
# this value should be zero (since most model spaces are 2-D, with Z=0).
|
||||
# Source: http://geotiff.maptools.org/spec/geotiff2.6.html#2.6.1
|
||||
- id: 0x830e
|
||||
name: ModelPixelScaleTag
|
||||
type_name: DOUBLE
|
||||
# This tag stores raster->model tiepoint pairs in the order
|
||||
# ModelTiepointTag = (...,I,J,K, X,Y,Z...),
|
||||
# where (I,J,K) is the point at location (I,J) in raster space with
|
||||
# pixel-value K, and (X,Y,Z) is a vector in model space. In most cases the
|
||||
# model space is only two-dimensional, in which case both K and Z should be
|
||||
# set to zero; this third dimension is provided in anticipation of future
|
||||
# support for 3D digital elevation models and vertical coordinate systems.
|
||||
# Source: http://geotiff.maptools.org/spec/geotiff2.6.html#2.6.1
|
||||
- id: 0x8482
|
||||
name: ModelTiepointTag
|
||||
type_name: DOUBLE
|
||||
# This tag may be used to specify the transformation matrix between the
|
||||
# raster space (and its dependent pixel-value space) and the (possibly 3D)
|
||||
# model space.
|
||||
# Source: http://geotiff.maptools.org/spec/geotiff2.6.html#2.6.1
|
||||
- id: 0x85d8
|
||||
name: ModelTransformationTag
|
||||
type_name: DOUBLE
|
||||
IFD/Exif/Iop:
|
||||
- id: 0x0001
|
||||
name: InteroperabilityIndex
|
||||
|
|
|
@ -0,0 +1,33 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
func TestGeotiffTags(t *testing.T) {
|
||||
testImageFilepath := getTestGeotiffFilepath()
|
||||
|
||||
// Returns a slice starting with the EXIF data and going to the end of the
|
||||
// image.
|
||||
rawExif, err := SearchFileAndExtractExif(testImageFilepath)
|
||||
log.PanicIf(err)
|
||||
|
||||
exifTags, _, err := GetFlatExifData(rawExif, nil)
|
||||
log.PanicIf(err)
|
||||
|
||||
exifTagsIDMap := make(map[uint16]int)
|
||||
|
||||
for _, e := range exifTags {
|
||||
exifTagsIDMap[e.TagId] = 1
|
||||
}
|
||||
|
||||
if exifTagsIDMap[0x830e] == 0 {
|
||||
t.Fatal("Missing ModelPixelScaleTag.")
|
||||
}
|
||||
|
||||
if exifTagsIDMap[0x8482] == 0 {
|
||||
t.Fatal("Missing ModelTiepointTag.")
|
||||
}
|
||||
}
|
|
@ -7,7 +7,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestIndexedTag_String(t *testing.T) {
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -180,3 +180,9 @@ func getTestGpsImageFilepath() string {
|
|||
testGpsImageFilepath := path.Join(assetsPath, "gps.jpg")
|
||||
return testGpsImageFilepath
|
||||
}
|
||||
|
||||
func getTestGeotiffFilepath() string {
|
||||
assetsPath := exifcommon.GetTestAssetsPath()
|
||||
testGeotiffFilepath := path.Join(assetsPath, "geotiff_example.tif")
|
||||
return testGeotiffFilepath
|
||||
}
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
// Encode encodes the given encodeable undefined value to bytes.
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
type Tag8828Oecf struct {
|
||||
|
|
|
@ -6,9 +6,9 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-utility/filesystem"
|
||||
"github.com/dsoprea/go-utility/v2/filesystem"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTag8828Oecf_String(t *testing.T) {
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
type Tag9000ExifVersion struct {
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTag9000ExifVersion_String(t *testing.T) {
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTagExif9101ComponentsConfiguration_String(t *testing.T) {
|
||||
|
|
|
@ -9,7 +9,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
type Tag927CMakerNote struct {
|
||||
|
|
|
@ -6,9 +6,9 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-utility/filesystem"
|
||||
"github.com/dsoprea/go-utility/v2/filesystem"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTag927CMakerNote_String(t *testing.T) {
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
var (
|
||||
|
|
|
@ -6,9 +6,9 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-utility/filesystem"
|
||||
"github.com/dsoprea/go-utility/v2/filesystem"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTag9286UserComment_String(t *testing.T) {
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
type TagA000FlashpixVersion struct {
|
||||
|
|
|
@ -6,9 +6,9 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-utility/filesystem"
|
||||
"github.com/dsoprea/go-utility/v2/filesystem"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTagA000FlashpixVersion_String(t *testing.T) {
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
type TagA20CSpatialFrequencyResponse struct {
|
||||
|
|
|
@ -6,9 +6,9 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-utility/filesystem"
|
||||
"github.com/dsoprea/go-utility/v2/filesystem"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTagA20CSpatialFrequencyResponse_String(t *testing.T) {
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
type TagExifA300FileSource uint32
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTagExifA300FileSource_String(t *testing.T) {
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
type TagExifA301SceneType uint32
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTagExifA301SceneType_String(t *testing.T) {
|
||||
|
|
|
@ -8,7 +8,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
type TagA302CfaPattern struct {
|
||||
|
|
|
@ -6,9 +6,9 @@ import (
|
|||
"testing"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-utility/filesystem"
|
||||
"github.com/dsoprea/go-utility/v2/filesystem"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTagA302CfaPattern_String(t *testing.T) {
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
type Tag0002InteropVersion struct {
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTag0002InteropVersion_String(t *testing.T) {
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
type Tag001BGPSProcessingMethod struct {
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTag001BGPSProcessingMethod_String(t *testing.T) {
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
type Tag001CGPSAreaInformation struct {
|
||||
|
|
|
@ -7,7 +7,7 @@ import (
|
|||
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
func TestTag001CGPSAreaInformation_String(t *testing.T) {
|
||||
|
|
|
@ -5,7 +5,7 @@ import (
|
|||
|
||||
"encoding/binary"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -2,12 +2,14 @@ package exif
|
|||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"math"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-utility/v2/filesystem"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v2/undefined"
|
||||
"github.com/dsoprea/go-exif/v3/common"
|
||||
"github.com/dsoprea/go-exif/v3/undefined"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -76,7 +78,69 @@ func GetFlatExifData(exifData []byte, so *ScanOptions) (exifTags []ExifTag, med
|
|||
}
|
||||
}()
|
||||
|
||||
eh, err := ParseExifHeader(exifData)
|
||||
sb := rifs.NewSeekableBufferWithBytes(exifData)
|
||||
|
||||
exifTags, med, err = getFlatExifDataUniversalSearchWithReadSeeker(sb, so, false)
|
||||
log.PanicIf(err)
|
||||
|
||||
return exifTags, med, nil
|
||||
}
|
||||
|
||||
// RELEASE(dustin): GetFlatExifDataUniversalSearch is a kludge to allow univeral tag searching in a backwards-compatible manner. For the next release, undo this and simply add the flag to GetFlatExifData.
|
||||
|
||||
// GetFlatExifDataUniversalSearch returns a simple, flat representation of all
|
||||
// tags.
|
||||
func GetFlatExifDataUniversalSearch(exifData []byte, so *ScanOptions, doUniversalSearch bool) (exifTags []ExifTag, med *MiscellaneousExifData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
sb := rifs.NewSeekableBufferWithBytes(exifData)
|
||||
|
||||
exifTags, med, err = getFlatExifDataUniversalSearchWithReadSeeker(sb, so, doUniversalSearch)
|
||||
log.PanicIf(err)
|
||||
|
||||
return exifTags, med, nil
|
||||
}
|
||||
|
||||
// RELEASE(dustin): GetFlatExifDataUniversalSearchWithReadSeeker is a kludge to allow using a ReadSeeker in a backwards-compatible manner. For the next release, drop this and refactor GetFlatExifDataUniversalSearch to take a ReadSeeker.
|
||||
|
||||
// GetFlatExifDataUniversalSearchWithReadSeeker returns a simple, flat
|
||||
// representation of all tags given a ReadSeeker.
|
||||
func GetFlatExifDataUniversalSearchWithReadSeeker(rs io.ReadSeeker, so *ScanOptions, doUniversalSearch bool) (exifTags []ExifTag, med *MiscellaneousExifData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
exifTags, med, err = getFlatExifDataUniversalSearchWithReadSeeker(rs, so, doUniversalSearch)
|
||||
log.PanicIf(err)
|
||||
|
||||
return exifTags, med, nil
|
||||
}
|
||||
|
||||
// getFlatExifDataUniversalSearchWithReadSeeker returns a simple, flat
|
||||
// representation of all tags given a ReadSeeker.
|
||||
func getFlatExifDataUniversalSearchWithReadSeeker(rs io.ReadSeeker, so *ScanOptions, doUniversalSearch bool) (exifTags []ExifTag, med *MiscellaneousExifData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
headerData := make([]byte, ExifSignatureLength)
|
||||
if _, err = io.ReadFull(rs, headerData); err != nil {
|
||||
if err == io.EOF {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
eh, err := ParseExifHeader(headerData)
|
||||
log.PanicIf(err)
|
||||
|
||||
im, err := exifcommon.NewIfdMappingWithStandard()
|
||||
|
@ -84,7 +148,11 @@ func GetFlatExifData(exifData []byte, so *ScanOptions) (exifTags []ExifTag, med
|
|||
|
||||
ti := NewTagIndex()
|
||||
|
||||
ebs := NewExifReadSeekerWithBytes(exifData)
|
||||
if doUniversalSearch == true {
|
||||
ti.SetUniversalSearch(true)
|
||||
}
|
||||
|
||||
ebs := NewExifReadSeeker(rs)
|
||||
ie := NewIfdEnumerate(im, ti, ebs, eh.ByteOrder)
|
||||
|
||||
exifTags = make([]ExifTag, 0)
|
||||
|
@ -107,6 +175,12 @@ func GetFlatExifData(exifData []byte, so *ScanOptions) (exifTags []ExifTag, med
|
|||
if err != nil {
|
||||
if err == exifcommon.ErrUnhandledUndefinedTypedTag {
|
||||
value = exifundefined.UnparseableUnknownTagValuePlaceholder
|
||||
} else if log.Is(err, exifcommon.ErrParseFail) == true {
|
||||
utilityLogger.Warningf(nil,
|
||||
"Could not parse value for tag [%s] (%04x) [%s].",
|
||||
ite.IfdPath(), ite.TagId(), ite.TagName())
|
||||
|
||||
return nil
|
||||
} else {
|
||||
log.Panic(err)
|
||||
}
|
||||
|
|
|
@ -1,7 +1,11 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/dsoprea/go-logging/v2"
|
||||
"github.com/dsoprea/go-utility/v2/filesystem"
|
||||
)
|
||||
|
||||
func TestGpsDegreesEquals_Equals(t *testing.T) {
|
||||
|
@ -51,3 +55,46 @@ func TestGpsDegreesEquals_NotEqual_Position(t *testing.T) {
|
|||
t.Fatalf("GpsDegrees structs were equal but not supposed to be.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetFlatExifData(t *testing.T) {
|
||||
testExifData := getTestExifData()
|
||||
|
||||
exifTags, _, err := GetFlatExifData(testExifData, nil)
|
||||
log.PanicIf(err)
|
||||
|
||||
if len(exifTags) != 59 {
|
||||
t.Fatalf("Tag count not correct: (%d)", len(exifTags))
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetFlatExifDataUniversalSearch(t *testing.T) {
|
||||
testExifData := getTestExifData()
|
||||
|
||||
exifTags, _, err := GetFlatExifDataUniversalSearch(testExifData, nil, false)
|
||||
log.PanicIf(err)
|
||||
|
||||
if len(exifTags) != 59 {
|
||||
t.Fatalf("Tag count not correct: (%d)", len(exifTags))
|
||||
}
|
||||
}
|
||||
|
||||
func TestGetFlatExifDataUniversalSearchWithReadSeeker(t *testing.T) {
|
||||
testImageFilepath := getTestImageFilepath()
|
||||
|
||||
f, err := os.Open(testImageFilepath)
|
||||
log.PanicIf(err)
|
||||
|
||||
defer f.Close()
|
||||
|
||||
rawExif, err := SearchAndExtractExifWithReader(f)
|
||||
log.PanicIf(err)
|
||||
|
||||
sb := rifs.NewSeekableBufferWithBytes(rawExif)
|
||||
|
||||
exifTags, _, err := GetFlatExifDataUniversalSearchWithReadSeeker(sb, nil, false)
|
||||
log.PanicIf(err)
|
||||
|
||||
if len(exifTags) != 59 {
|
||||
t.Fatalf("Tag count not correct: (%d)", len(exifTags))
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue