mirror of https://github.com/dsoprea/go-exif.git
Revert "Report-card fixes"
This reverts commit eb5bd13c98
.
Introduced some new issues, here. This is being debugged on a separate
branch.
Fixes #41
dustin/master
parent
82fe0e20b9
commit
591504aef0
|
@ -155,9 +155,9 @@ func TestIfdMapping_ResolvePath__Regular(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected := []IfdTagIdAndIndex{
|
||||
{Name: "IFD", TagId: 0, Index: 0},
|
||||
{Name: "Exif", TagId: 0x8769, Index: 0},
|
||||
{Name: "Iop", TagId: 0xa005, Index: 0},
|
||||
IfdTagIdAndIndex{Name: "IFD", TagId: 0, Index: 0},
|
||||
IfdTagIdAndIndex{Name: "Exif", TagId: 0x8769, Index: 0},
|
||||
IfdTagIdAndIndex{Name: "Iop", TagId: 0xa005, Index: 0},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(lineage, expected) != true {
|
||||
|
@ -175,9 +175,9 @@ func TestIfdMapping_ResolvePath__WithIndices(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected := []IfdTagIdAndIndex{
|
||||
{Name: "IFD", TagId: 0, Index: 0},
|
||||
{Name: "Exif", TagId: 0x8769, Index: 1},
|
||||
{Name: "Iop", TagId: 0xa005, Index: 0},
|
||||
IfdTagIdAndIndex{Name: "IFD", TagId: 0, Index: 0},
|
||||
IfdTagIdAndIndex{Name: "Exif", TagId: 0x8769, Index: 1},
|
||||
IfdTagIdAndIndex{Name: "Iop", TagId: 0xa005, Index: 0},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(lineage, expected) != true {
|
||||
|
@ -201,9 +201,9 @@ func TestIfdMapping_ResolvePath__Miss(t *testing.T) {
|
|||
|
||||
func TestIfdMapping_FqPathPhraseFromLineage(t *testing.T) {
|
||||
lineage := []IfdTagIdAndIndex{
|
||||
{Name: "IFD", Index: 0},
|
||||
{Name: "Exif", Index: 1},
|
||||
{Name: "Iop", Index: 0},
|
||||
IfdTagIdAndIndex{Name: "IFD", Index: 0},
|
||||
IfdTagIdAndIndex{Name: "Exif", Index: 1},
|
||||
IfdTagIdAndIndex{Name: "Iop", Index: 0},
|
||||
}
|
||||
|
||||
im := NewIfdMapping()
|
||||
|
@ -216,9 +216,9 @@ func TestIfdMapping_FqPathPhraseFromLineage(t *testing.T) {
|
|||
|
||||
func TestIfdMapping_PathPhraseFromLineage(t *testing.T) {
|
||||
lineage := []IfdTagIdAndIndex{
|
||||
{Name: "IFD", Index: 0},
|
||||
{Name: "Exif", Index: 1},
|
||||
{Name: "Iop", Index: 0},
|
||||
IfdTagIdAndIndex{Name: "IFD", Index: 0},
|
||||
IfdTagIdAndIndex{Name: "Exif", Index: 1},
|
||||
IfdTagIdAndIndex{Name: "Iop", Index: 0},
|
||||
}
|
||||
|
||||
im := NewIfdMapping()
|
||||
|
@ -234,7 +234,6 @@ func TestIfdMapping_NewIfdMappingWithStandard(t *testing.T) {
|
|||
imWithout := NewIfdMapping()
|
||||
|
||||
err := LoadStandardIfds(imWithout)
|
||||
log.PanicIf(err)
|
||||
|
||||
outputWith, err := imWith.DumpLineages()
|
||||
log.PanicIf(err)
|
||||
|
|
|
@ -16,7 +16,6 @@ var (
|
|||
type Parser struct {
|
||||
}
|
||||
|
||||
// ParseBytesknows how to parse a byte-type value.
|
||||
func (p *Parser) ParseBytes(data []byte, unitCount uint32) (value []uint8, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
|
@ -59,12 +58,12 @@ func (p *Parser) ParseAscii(data []byte, unitCount uint32) (value string, err er
|
|||
parserLogger.Warningf(nil, "ascii not terminated with nul as expected: [%v]", s)
|
||||
|
||||
return s, nil
|
||||
} else {
|
||||
// Auto-strip the NUL from the end. It serves no purpose outside of
|
||||
// encoding semantics.
|
||||
|
||||
return string(data[:count-1]), nil
|
||||
}
|
||||
|
||||
// Auto-strip the NUL from the end. It serves no purpose outside of
|
||||
// encoding semantics.
|
||||
|
||||
return string(data[:count-1]), nil
|
||||
}
|
||||
|
||||
// ParseAsciiNoNul returns a string without any consideration for a trailing NUL
|
||||
|
|
|
@ -182,7 +182,7 @@ func TestParser_ParseRationals__Single(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected := []Rational{
|
||||
{Numerator: 1, Denominator: 2},
|
||||
Rational{Numerator: 1, Denominator: 2},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(value, expected) != true {
|
||||
|
@ -198,7 +198,7 @@ func TestParser_ParseRationals__Single(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected = []Rational{
|
||||
{Numerator: 1, Denominator: 2},
|
||||
Rational{Numerator: 1, Denominator: 2},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(value, expected) != true {
|
||||
|
@ -218,8 +218,8 @@ func TestParser_ParseRationals__Multiple(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected := []Rational{
|
||||
{Numerator: 1, Denominator: 2},
|
||||
{Numerator: 3, Denominator: 4},
|
||||
Rational{Numerator: 1, Denominator: 2},
|
||||
Rational{Numerator: 3, Denominator: 4},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(value, expected) != true {
|
||||
|
@ -274,7 +274,7 @@ func TestParser_ParseSignedRationals__Single(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected := []SignedRational{
|
||||
{Numerator: 1, Denominator: 2},
|
||||
SignedRational{Numerator: 1, Denominator: 2},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(value, expected) != true {
|
||||
|
@ -290,7 +290,7 @@ func TestParser_ParseSignedRationals__Single(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected = []SignedRational{
|
||||
{Numerator: 1, Denominator: 2},
|
||||
SignedRational{Numerator: 1, Denominator: 2},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(value, expected) != true {
|
||||
|
@ -310,8 +310,8 @@ func TestParser_ParseSignedRationals__Multiple(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected := []SignedRational{
|
||||
{Numerator: 1, Denominator: 2},
|
||||
{Numerator: 3, Denominator: 4},
|
||||
SignedRational{Numerator: 1, Denominator: 2},
|
||||
SignedRational{Numerator: 3, Denominator: 4},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(value, expected) != true {
|
||||
|
|
|
@ -1,151 +1,151 @@
|
|||
package exifcommon
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"errors"
|
||||
"fmt"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"encoding/binary"
|
||||
"encoding/binary"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
var (
|
||||
typeLogger = log.NewLogger("exif.type")
|
||||
typeLogger = log.NewLogger("exif.type")
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrNotEnoughData is used when there isn't enough data to accommodate what
|
||||
// we're trying to parse (sizeof(type) * unit_count).
|
||||
ErrNotEnoughData = errors.New("not enough data for type")
|
||||
// ErrNotEnoughData is used when there isn't enough data to accomodate what
|
||||
// we're trying to parse (sizeof(type) * unit_count).
|
||||
ErrNotEnoughData = errors.New("not enough data for type")
|
||||
|
||||
// ErrWrongType is used when we try to parse anything other than the
|
||||
// current type.
|
||||
ErrWrongType = errors.New("wrong type, can not parse")
|
||||
// ErrWrongType is used when we try to parse anything other than the
|
||||
// current type.
|
||||
ErrWrongType = errors.New("wrong type, can not parse")
|
||||
|
||||
// ErrUnhandledUndefinedTypedTag is used when we try to parse a tag that's
|
||||
// recorded as an "unknown" type but not a documented tag (therefore
|
||||
// leaving us not knowning how to read it).
|
||||
ErrUnhandledUndefinedTypedTag = errors.New("not a standard unknown-typed tag")
|
||||
// ErrUnhandledUndefinedTypedTag is used when we try to parse a tag that's
|
||||
// recorded as an "unknown" type but not a documented tag (therefore
|
||||
// leaving us not knowning how to read it).
|
||||
ErrUnhandledUndefinedTypedTag = errors.New("not a standard unknown-typed tag")
|
||||
)
|
||||
|
||||
// TagTypePrimitive is a type-alias that let's us easily lookup type properties.
|
||||
type TagTypePrimitive uint16
|
||||
|
||||
const (
|
||||
// TypeByte describes an encoded list of bytes.
|
||||
TypeByte TagTypePrimitive = 1
|
||||
// TypeByte describes an encoded list of bytes.
|
||||
TypeByte TagTypePrimitive = 1
|
||||
|
||||
// TypeAscii describes an encoded list of characters that is terminated
|
||||
// with a NUL in its encoded form.
|
||||
TypeAscii TagTypePrimitive = 2
|
||||
// TypeAscii describes an encoded list of characters that is terminated
|
||||
// with a NUL in its encoded form.
|
||||
TypeAscii TagTypePrimitive = 2
|
||||
|
||||
// TypeShort describes an encoded list of shorts.
|
||||
TypeShort TagTypePrimitive = 3
|
||||
// TypeShort describes an encoded list of shorts.
|
||||
TypeShort TagTypePrimitive = 3
|
||||
|
||||
// TypeLong describes an encoded list of longs.
|
||||
TypeLong TagTypePrimitive = 4
|
||||
// TypeLong describes an encoded list of longs.
|
||||
TypeLong TagTypePrimitive = 4
|
||||
|
||||
// TypeRational describes an encoded list of rationals.
|
||||
TypeRational TagTypePrimitive = 5
|
||||
// TypeRational describes an encoded list of rationals.
|
||||
TypeRational TagTypePrimitive = 5
|
||||
|
||||
// TypeUndefined describes an encoded value that has a complex/non-clearcut
|
||||
// interpretation.
|
||||
TypeUndefined TagTypePrimitive = 7
|
||||
// TypeUndefined describes an encoded value that has a complex/non-clearcut
|
||||
// interpretation.
|
||||
TypeUndefined TagTypePrimitive = 7
|
||||
|
||||
// We've seen type-8, but have no documentation on it.
|
||||
// We've seen type-8, but have no documentation on it.
|
||||
|
||||
// TypeSignedLong describes an encoded list of signed longs.
|
||||
TypeSignedLong TagTypePrimitive = 9
|
||||
// TypeSignedLong describes an encoded list of signed longs.
|
||||
TypeSignedLong TagTypePrimitive = 9
|
||||
|
||||
// TypeSignedRational describes an encoded list of signed rationals.
|
||||
TypeSignedRational TagTypePrimitive = 10
|
||||
// TypeSignedRational describes an encoded list of signed rationals.
|
||||
TypeSignedRational TagTypePrimitive = 10
|
||||
|
||||
// TypeAsciiNoNul is just a pseudo-type, for our own purposes.
|
||||
TypeAsciiNoNul TagTypePrimitive = 0xf0
|
||||
// TypeAsciiNoNul is just a pseudo-type, for our own purposes.
|
||||
TypeAsciiNoNul TagTypePrimitive = 0xf0
|
||||
)
|
||||
|
||||
// String returns the name of the type
|
||||
func (typeType TagTypePrimitive) String() string {
|
||||
return TypeNames[typeType]
|
||||
return TypeNames[typeType]
|
||||
}
|
||||
|
||||
// Size returns the size of one atomic unit of the type.
|
||||
func (tagType TagTypePrimitive) Size() int {
|
||||
if tagType == TypeByte {
|
||||
return 1
|
||||
} else if tagType == TypeAscii || tagType == TypeAsciiNoNul {
|
||||
return 1
|
||||
} else if tagType == TypeShort {
|
||||
return 2
|
||||
} else if tagType == TypeLong {
|
||||
return 4
|
||||
} else if tagType == TypeRational {
|
||||
return 8
|
||||
} else if tagType == TypeSignedLong {
|
||||
return 4
|
||||
} else if tagType == TypeSignedRational {
|
||||
return 8
|
||||
} else {
|
||||
log.Panicf("can not determine tag-value size for type (%d): [%s]", tagType, TypeNames[tagType])
|
||||
if tagType == TypeByte {
|
||||
return 1
|
||||
} else if tagType == TypeAscii || tagType == TypeAsciiNoNul {
|
||||
return 1
|
||||
} else if tagType == TypeShort {
|
||||
return 2
|
||||
} else if tagType == TypeLong {
|
||||
return 4
|
||||
} else if tagType == TypeRational {
|
||||
return 8
|
||||
} else if tagType == TypeSignedLong {
|
||||
return 4
|
||||
} else if tagType == TypeSignedRational {
|
||||
return 8
|
||||
} else {
|
||||
log.Panicf("can not determine tag-value size for type (%d): [%s]", tagType, TypeNames[tagType])
|
||||
|
||||
// Never called.
|
||||
return 0
|
||||
}
|
||||
// Never called.
|
||||
return 0
|
||||
}
|
||||
}
|
||||
|
||||
// IsValid returns true if tagType is a valid type.
|
||||
func (tagType TagTypePrimitive) IsValid() bool {
|
||||
|
||||
// TODO(dustin): Add test
|
||||
// TODO(dustin): Add test
|
||||
|
||||
return tagType == TypeByte ||
|
||||
tagType == TypeAscii ||
|
||||
tagType == TypeAsciiNoNul ||
|
||||
tagType == TypeShort ||
|
||||
tagType == TypeLong ||
|
||||
tagType == TypeRational ||
|
||||
tagType == TypeSignedLong ||
|
||||
tagType == TypeSignedRational ||
|
||||
tagType == TypeUndefined
|
||||
return tagType == TypeByte ||
|
||||
tagType == TypeAscii ||
|
||||
tagType == TypeAsciiNoNul ||
|
||||
tagType == TypeShort ||
|
||||
tagType == TypeLong ||
|
||||
tagType == TypeRational ||
|
||||
tagType == TypeSignedLong ||
|
||||
tagType == TypeSignedRational ||
|
||||
tagType == TypeUndefined
|
||||
}
|
||||
|
||||
var (
|
||||
// TODO(dustin): Rename TypeNames() to typeNames() and add getter.
|
||||
TypeNames = map[TagTypePrimitive]string{
|
||||
TypeByte: "BYTE",
|
||||
TypeAscii: "ASCII",
|
||||
TypeShort: "SHORT",
|
||||
TypeLong: "LONG",
|
||||
TypeRational: "RATIONAL",
|
||||
TypeUndefined: "UNDEFINED",
|
||||
TypeSignedLong: "SLONG",
|
||||
TypeSignedRational: "SRATIONAL",
|
||||
// TODO(dustin): Rename TypeNames() to typeNames() and add getter.
|
||||
TypeNames = map[TagTypePrimitive]string{
|
||||
TypeByte: "BYTE",
|
||||
TypeAscii: "ASCII",
|
||||
TypeShort: "SHORT",
|
||||
TypeLong: "LONG",
|
||||
TypeRational: "RATIONAL",
|
||||
TypeUndefined: "UNDEFINED",
|
||||
TypeSignedLong: "SLONG",
|
||||
TypeSignedRational: "SRATIONAL",
|
||||
|
||||
TypeAsciiNoNul: "_ASCII_NO_NUL",
|
||||
}
|
||||
TypeAsciiNoNul: "_ASCII_NO_NUL",
|
||||
}
|
||||
|
||||
typeNamesR = map[string]TagTypePrimitive{}
|
||||
typeNamesR = map[string]TagTypePrimitive{}
|
||||
)
|
||||
|
||||
// Rational describes an unsigned rational value.
|
||||
type Rational struct {
|
||||
// Numerator is the numerator of the rational value.
|
||||
Numerator uint32
|
||||
// Numerator is the numerator of the rational value.
|
||||
Numerator uint32
|
||||
|
||||
// Denominator is the numerator of the rational value.
|
||||
Denominator uint32
|
||||
// Denominator is the numerator of the rational value.
|
||||
Denominator uint32
|
||||
}
|
||||
|
||||
// SignedRational describes a signed rational value.
|
||||
type SignedRational struct {
|
||||
// Numerator is the numerator of the rational value.
|
||||
Numerator int32
|
||||
// Numerator is the numerator of the rational value.
|
||||
Numerator int32
|
||||
|
||||
// Denominator is the numerator of the rational value.
|
||||
Denominator int32
|
||||
// Denominator is the numerator of the rational value.
|
||||
Denominator int32
|
||||
}
|
||||
|
||||
// Format returns a stringified value for the given encoding. Automatically
|
||||
|
@ -154,202 +154,202 @@ type SignedRational struct {
|
|||
// way of the String() method that they all require. We can't be more specific
|
||||
// because we're a base package and we can't refer to it.
|
||||
func FormatFromType(value interface{}, justFirst bool) (phrase string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): !! Add test
|
||||
// TODO(dustin): !! Add test
|
||||
|
||||
switch t := value.(type) {
|
||||
case []byte:
|
||||
return DumpBytesToString(t), nil
|
||||
case string:
|
||||
return t, nil
|
||||
case []uint16:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
switch t := value.(type) {
|
||||
case []byte:
|
||||
return DumpBytesToString(t), nil
|
||||
case string:
|
||||
return t, nil
|
||||
case []uint16:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
}
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
case []uint32:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
case []uint32:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
}
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
case []Rational:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
case []Rational:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
parts := make([]string, len(t))
|
||||
for i, r := range t {
|
||||
parts[i] = fmt.Sprintf("%d/%d", r.Numerator, r.Denominator)
|
||||
parts := make([]string, len(t))
|
||||
for i, r := range t {
|
||||
parts[i] = fmt.Sprintf("%d/%d", r.Numerator, r.Denominator)
|
||||
|
||||
if justFirst == true {
|
||||
break
|
||||
}
|
||||
}
|
||||
if justFirst == true {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", parts[0], valueSuffix), nil
|
||||
}
|
||||
return fmt.Sprintf("%v%s", parts[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", parts), nil
|
||||
case []int32:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
return fmt.Sprintf("%v", parts), nil
|
||||
case []int32:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
}
|
||||
return fmt.Sprintf("%v%s", t[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
case []SignedRational:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
return fmt.Sprintf("%v", t), nil
|
||||
case []SignedRational:
|
||||
if len(t) == 0 {
|
||||
return "", nil
|
||||
}
|
||||
|
||||
parts := make([]string, len(t))
|
||||
for i, r := range t {
|
||||
parts[i] = fmt.Sprintf("%d/%d", r.Numerator, r.Denominator)
|
||||
parts := make([]string, len(t))
|
||||
for i, r := range t {
|
||||
parts[i] = fmt.Sprintf("%d/%d", r.Numerator, r.Denominator)
|
||||
|
||||
if justFirst == true {
|
||||
break
|
||||
}
|
||||
}
|
||||
if justFirst == true {
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
if justFirst == true {
|
||||
var valueSuffix string
|
||||
if len(t) > 1 {
|
||||
valueSuffix = "..."
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v%s", parts[0], valueSuffix), nil
|
||||
}
|
||||
return fmt.Sprintf("%v%s", parts[0], valueSuffix), nil
|
||||
}
|
||||
|
||||
return fmt.Sprintf("%v", parts), nil
|
||||
case fmt.Stringer:
|
||||
// An undefined value that is documented (or that we otherwise support).
|
||||
return t.String(), nil
|
||||
default:
|
||||
// Affects only "unknown" values, in general.
|
||||
log.Panicf("type can not be formatted into string: %v", reflect.TypeOf(value).Name())
|
||||
return fmt.Sprintf("%v", parts), nil
|
||||
case fmt.Stringer:
|
||||
// An undefined value that is documented (or that we otherwise support).
|
||||
return t.String(), nil
|
||||
default:
|
||||
// Affects only "unknown" values, in general.
|
||||
log.Panicf("type can not be formatted into string: %v", reflect.TypeOf(value).Name())
|
||||
|
||||
// Never called.
|
||||
return "", nil
|
||||
}
|
||||
// Never called.
|
||||
return "", nil
|
||||
}
|
||||
}
|
||||
|
||||
// Format returns a stringified value for the given encoding. Automatically
|
||||
// parses. Automatically calculates count based on type size.
|
||||
func FormatFromBytes(rawBytes []byte, tagType TagTypePrimitive, justFirst bool, byteOrder binary.ByteOrder) (phrase string, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// TODO(dustin): !! Add test
|
||||
// TODO(dustin): !! Add test
|
||||
|
||||
typeSize := tagType.Size()
|
||||
typeSize := tagType.Size()
|
||||
|
||||
if len(rawBytes)%typeSize != 0 {
|
||||
log.Panicf("byte-count (%d) does not align for [%s] type with a size of (%d) bytes", len(rawBytes), TypeNames[tagType], typeSize)
|
||||
}
|
||||
if len(rawBytes)%typeSize != 0 {
|
||||
log.Panicf("byte-count (%d) does not align for [%s] type with a size of (%d) bytes", len(rawBytes), TypeNames[tagType], typeSize)
|
||||
}
|
||||
|
||||
// unitCount is the calculated unit-count. This should equal the original
|
||||
// value from the tag (pre-resolution).
|
||||
unitCount := uint32(len(rawBytes) / typeSize)
|
||||
// unitCount is the calculated unit-count. This should equal the original
|
||||
// value from the tag (pre-resolution).
|
||||
unitCount := uint32(len(rawBytes) / typeSize)
|
||||
|
||||
// Truncate the items if it's not bytes or a string and we just want the first.
|
||||
// Truncate the items if it's not bytes or a string and we just want the first.
|
||||
|
||||
var value interface{}
|
||||
var value interface{}
|
||||
|
||||
switch tagType {
|
||||
case TypeByte:
|
||||
var err error
|
||||
switch tagType {
|
||||
case TypeByte:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseBytes(rawBytes, unitCount)
|
||||
log.PanicIf(err)
|
||||
case TypeAscii:
|
||||
var err error
|
||||
value, err = parser.ParseBytes(rawBytes, unitCount)
|
||||
log.PanicIf(err)
|
||||
case TypeAscii:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseAscii(rawBytes, unitCount)
|
||||
log.PanicIf(err)
|
||||
case TypeAsciiNoNul:
|
||||
var err error
|
||||
value, err = parser.ParseAscii(rawBytes, unitCount)
|
||||
log.PanicIf(err)
|
||||
case TypeAsciiNoNul:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseAsciiNoNul(rawBytes, unitCount)
|
||||
log.PanicIf(err)
|
||||
case TypeShort:
|
||||
var err error
|
||||
value, err = parser.ParseAsciiNoNul(rawBytes, unitCount)
|
||||
log.PanicIf(err)
|
||||
case TypeShort:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseShorts(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeLong:
|
||||
var err error
|
||||
value, err = parser.ParseShorts(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeLong:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseLongs(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeRational:
|
||||
var err error
|
||||
value, err = parser.ParseLongs(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeRational:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseRationals(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeSignedLong:
|
||||
var err error
|
||||
value, err = parser.ParseRationals(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeSignedLong:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseSignedLongs(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeSignedRational:
|
||||
var err error
|
||||
value, err = parser.ParseSignedLongs(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
case TypeSignedRational:
|
||||
var err error
|
||||
|
||||
value, err = parser.ParseSignedRationals(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
default:
|
||||
// Affects only "unknown" values, in general.
|
||||
log.Panicf("value of type [%s] can not be formatted into string", tagType.String())
|
||||
value, err = parser.ParseSignedRationals(rawBytes, unitCount, byteOrder)
|
||||
log.PanicIf(err)
|
||||
default:
|
||||
// Affects only "unknown" values, in general.
|
||||
log.Panicf("value of type [%s] can not be formatted into string", tagType.String())
|
||||
|
||||
// Never called.
|
||||
return "", nil
|
||||
}
|
||||
// Never called.
|
||||
return "", nil
|
||||
}
|
||||
|
||||
phrase, err = FormatFromType(value, justFirst)
|
||||
log.PanicIf(err)
|
||||
phrase, err = FormatFromType(value, justFirst)
|
||||
log.PanicIf(err)
|
||||
|
||||
return phrase, nil
|
||||
return phrase, nil
|
||||
}
|
||||
|
||||
// TranslateStringToType converts user-provided strings to properly-typed
|
||||
|
@ -357,96 +357,96 @@ func FormatFromBytes(rawBytes []byte, tagType TagTypePrimitive, justFirst bool,
|
|||
// number. If a list needs to be processed, it is the caller's responsibility to
|
||||
// split it (according to whichever convention has been established).
|
||||
func TranslateStringToType(tagType TagTypePrimitive, valueString string) (value interface{}, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
if tagType == TypeUndefined {
|
||||
// The caller should just call String() on the decoded type.
|
||||
log.Panicf("undefined-type values are not supported")
|
||||
}
|
||||
if tagType == TypeUndefined {
|
||||
// The caller should just call String() on the decoded type.
|
||||
log.Panicf("undefined-type values are not supported")
|
||||
}
|
||||
|
||||
if tagType == TypeByte {
|
||||
wide, err := strconv.ParseInt(valueString, 16, 8)
|
||||
log.PanicIf(err)
|
||||
if tagType == TypeByte {
|
||||
wide, err := strconv.ParseInt(valueString, 16, 8)
|
||||
log.PanicIf(err)
|
||||
|
||||
return byte(wide), nil
|
||||
} else if tagType == TypeAscii || tagType == TypeAsciiNoNul {
|
||||
// Whether or not we're putting an NUL on the end is only relevant for
|
||||
// byte-level encoding. This function really just supports a user
|
||||
// interface.
|
||||
return byte(wide), nil
|
||||
} else if tagType == TypeAscii || tagType == TypeAsciiNoNul {
|
||||
// Whether or not we're putting an NUL on the end is only relevant for
|
||||
// byte-level encoding. This function really just supports a user
|
||||
// interface.
|
||||
|
||||
return valueString, nil
|
||||
} else if tagType == TypeShort {
|
||||
n, err := strconv.ParseUint(valueString, 10, 16)
|
||||
log.PanicIf(err)
|
||||
return valueString, nil
|
||||
} else if tagType == TypeShort {
|
||||
n, err := strconv.ParseUint(valueString, 10, 16)
|
||||
log.PanicIf(err)
|
||||
|
||||
return uint16(n), nil
|
||||
} else if tagType == TypeLong {
|
||||
n, err := strconv.ParseUint(valueString, 10, 32)
|
||||
log.PanicIf(err)
|
||||
return uint16(n), nil
|
||||
} else if tagType == TypeLong {
|
||||
n, err := strconv.ParseUint(valueString, 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
return uint32(n), nil
|
||||
} else if tagType == TypeRational {
|
||||
parts := strings.SplitN(valueString, "/", 2)
|
||||
return uint32(n), nil
|
||||
} else if tagType == TypeRational {
|
||||
parts := strings.SplitN(valueString, "/", 2)
|
||||
|
||||
numerator, err := strconv.ParseUint(parts[0], 10, 32)
|
||||
log.PanicIf(err)
|
||||
numerator, err := strconv.ParseUint(parts[0], 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
denominator, err := strconv.ParseUint(parts[1], 10, 32)
|
||||
log.PanicIf(err)
|
||||
denominator, err := strconv.ParseUint(parts[1], 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
return Rational{
|
||||
Numerator: uint32(numerator),
|
||||
Denominator: uint32(denominator),
|
||||
}, nil
|
||||
} else if tagType == TypeSignedLong {
|
||||
n, err := strconv.ParseInt(valueString, 10, 32)
|
||||
log.PanicIf(err)
|
||||
return Rational{
|
||||
Numerator: uint32(numerator),
|
||||
Denominator: uint32(denominator),
|
||||
}, nil
|
||||
} else if tagType == TypeSignedLong {
|
||||
n, err := strconv.ParseInt(valueString, 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
return int32(n), nil
|
||||
} else if tagType == TypeSignedRational {
|
||||
parts := strings.SplitN(valueString, "/", 2)
|
||||
return int32(n), nil
|
||||
} else if tagType == TypeSignedRational {
|
||||
parts := strings.SplitN(valueString, "/", 2)
|
||||
|
||||
numerator, err := strconv.ParseInt(parts[0], 10, 32)
|
||||
log.PanicIf(err)
|
||||
numerator, err := strconv.ParseInt(parts[0], 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
denominator, err := strconv.ParseInt(parts[1], 10, 32)
|
||||
log.PanicIf(err)
|
||||
denominator, err := strconv.ParseInt(parts[1], 10, 32)
|
||||
log.PanicIf(err)
|
||||
|
||||
return SignedRational{
|
||||
Numerator: int32(numerator),
|
||||
Denominator: int32(denominator),
|
||||
}, nil
|
||||
}
|
||||
return SignedRational{
|
||||
Numerator: int32(numerator),
|
||||
Denominator: int32(denominator),
|
||||
}, nil
|
||||
}
|
||||
|
||||
log.Panicf("from-string encoding for type not supported; this shouldn't happen: [%s]", tagType.String())
|
||||
return nil, nil
|
||||
log.Panicf("from-string encoding for type not supported; this shouldn't happen: [%s]", tagType.String())
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// GetTypeByName returns the `TagTypePrimitive` for the given type name.
|
||||
// Returns (0) if not valid.
|
||||
func GetTypeByName(typeName string) (tagType TagTypePrimitive, found bool) {
|
||||
tagType, found = typeNamesR[typeName]
|
||||
return tagType, found
|
||||
tagType, found = typeNamesR[typeName]
|
||||
return tagType, found
|
||||
}
|
||||
|
||||
// BasicTag describes a single tag for any purpose.
|
||||
type BasicTag struct {
|
||||
// FqIfdPath is the fully-qualified IFD-path.
|
||||
FqIfdPath string
|
||||
// FqIfdPath is the fully-qualified IFD-path.
|
||||
FqIfdPath string
|
||||
|
||||
// IfdPath is the unindexed IFD-path.
|
||||
IfdPath string
|
||||
// IfdPath is the unindexed IFD-path.
|
||||
IfdPath string
|
||||
|
||||
// TagId is the tag-ID.
|
||||
TagId uint16
|
||||
// TagId is the tag-ID.
|
||||
TagId uint16
|
||||
}
|
||||
|
||||
func init() {
|
||||
for typeId, typeName := range TypeNames {
|
||||
typeNamesR[typeName] = typeId
|
||||
}
|
||||
for typeId, typeName := range TypeNames {
|
||||
typeNamesR[typeName] = typeId
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,82 +1,82 @@
|
|||
package exifcommon
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"time"
|
||||
"bytes"
|
||||
"fmt"
|
||||
"time"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
// DumpBytes prints a list of hex-encoded bytes.
|
||||
func DumpBytes(data []byte) {
|
||||
fmt.Printf("DUMP: ")
|
||||
for _, x := range data {
|
||||
fmt.Printf("%02x ", x)
|
||||
}
|
||||
fmt.Printf("DUMP: ")
|
||||
for _, x := range data {
|
||||
fmt.Printf("%02x ", x)
|
||||
}
|
||||
|
||||
fmt.Printf("\n")
|
||||
fmt.Printf("\n")
|
||||
}
|
||||
|
||||
// DumpBytesClause prints a list like DumpBytes(), but encapsulated in
|
||||
// "[]byte { ... }".
|
||||
func DumpBytesClause(data []byte) {
|
||||
fmt.Printf("DUMP: ")
|
||||
fmt.Printf("DUMP: ")
|
||||
|
||||
fmt.Printf("[]byte { ")
|
||||
fmt.Printf("[]byte { ")
|
||||
|
||||
for i, x := range data {
|
||||
fmt.Printf("0x%02x", x)
|
||||
for i, x := range data {
|
||||
fmt.Printf("0x%02x", x)
|
||||
|
||||
if i < len(data)-1 {
|
||||
fmt.Printf(", ")
|
||||
}
|
||||
}
|
||||
if i < len(data)-1 {
|
||||
fmt.Printf(", ")
|
||||
}
|
||||
}
|
||||
|
||||
fmt.Printf(" }\n")
|
||||
fmt.Printf(" }\n")
|
||||
}
|
||||
|
||||
// DumpBytesToString returns a stringified list of hex-encoded bytes.
|
||||
func DumpBytesToString(data []byte) string {
|
||||
b := new(bytes.Buffer)
|
||||
b := new(bytes.Buffer)
|
||||
|
||||
for i, x := range data {
|
||||
_, err := b.WriteString(fmt.Sprintf("%02x", x))
|
||||
log.PanicIf(err)
|
||||
for i, x := range data {
|
||||
_, err := b.WriteString(fmt.Sprintf("%02x", x))
|
||||
log.PanicIf(err)
|
||||
|
||||
if i < len(data)-1 {
|
||||
_, err := b.WriteRune(' ')
|
||||
log.PanicIf(err)
|
||||
}
|
||||
}
|
||||
if i < len(data)-1 {
|
||||
_, err := b.WriteRune(' ')
|
||||
log.PanicIf(err)
|
||||
}
|
||||
}
|
||||
|
||||
return b.String()
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// DumpBytesClauseToString returns a comma-separated list of hex-encoded bytes.
|
||||
func DumpBytesClauseToString(data []byte) string {
|
||||
b := new(bytes.Buffer)
|
||||
b := new(bytes.Buffer)
|
||||
|
||||
for i, x := range data {
|
||||
_, err := b.WriteString(fmt.Sprintf("0x%02x", x))
|
||||
log.PanicIf(err)
|
||||
for i, x := range data {
|
||||
_, err := b.WriteString(fmt.Sprintf("0x%02x", x))
|
||||
log.PanicIf(err)
|
||||
|
||||
if i < len(data)-1 {
|
||||
_, err := b.WriteString(", ")
|
||||
log.PanicIf(err)
|
||||
}
|
||||
}
|
||||
if i < len(data)-1 {
|
||||
_, err := b.WriteString(", ")
|
||||
log.PanicIf(err)
|
||||
}
|
||||
}
|
||||
|
||||
return b.String()
|
||||
return b.String()
|
||||
}
|
||||
|
||||
// ExifFullTimestampString produces a string like "2018:11:30 13:01:49" from a
|
||||
// `time.Time` struct. It will attempt to convert to UTC first.
|
||||
func ExifFullTimestampString(t time.Time) (fullTimestampPhrase string) {
|
||||
|
||||
// RELEASE(dustin): Dump this for the next release. It duplicates the same function now in exifcommon.
|
||||
// RELEASE(dustin): Dump this for the next release. It duplicates the same function now in exifcommon.
|
||||
|
||||
t = t.UTC()
|
||||
t = t.UTC()
|
||||
|
||||
return fmt.Sprintf("%04d:%02d:%02d %02d:%02d:%02d", t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second())
|
||||
return fmt.Sprintf("%04d:%02d:%02d %02d:%02d:%02d", t.Year(), t.Month(), t.Day(), t.Hour(), t.Minute(), t.Second())
|
||||
}
|
||||
|
|
|
@ -13,8 +13,6 @@ var (
|
|||
)
|
||||
|
||||
var (
|
||||
// ErrNotFarValue indicates that an offset-based lookup was attempted for a
|
||||
// non-offset-based (embedded) value.
|
||||
ErrNotFarValue = errors.New("not a far value")
|
||||
)
|
||||
|
||||
|
@ -39,7 +37,7 @@ type ValueContext struct {
|
|||
|
||||
// TODO(dustin): We can update newValueContext() to derive `valueOffset` itself (from `rawValueOffset`).
|
||||
|
||||
// NewValueContext returns a new ValueContext struct.
|
||||
// newValueContext returns a new ValueContext struct.
|
||||
func NewValueContext(ifdPath string, tagId uint16, unitCount, valueOffset uint32, rawValueOffset, addressableData []byte, tagType TagTypePrimitive, byteOrder binary.ByteOrder) *ValueContext {
|
||||
return &ValueContext{
|
||||
unitCount: unitCount,
|
||||
|
@ -150,9 +148,9 @@ func (vc *ValueContext) readRawEncoded() (rawBytes []byte, err error) {
|
|||
if vc.isEmbedded() == true {
|
||||
byteLength := unitSizeRaw * vc.unitCount
|
||||
return vc.rawValueOffset[:byteLength], nil
|
||||
} else {
|
||||
return vc.addressableData[vc.valueOffset : vc.valueOffset+vc.unitCount*unitSizeRaw], nil
|
||||
}
|
||||
|
||||
return vc.addressableData[vc.valueOffset : vc.valueOffset+vc.unitCount*unitSizeRaw], nil
|
||||
}
|
||||
|
||||
// GetFarOffset returns the offset if the value is not embedded [within the
|
||||
|
@ -396,14 +394,16 @@ func (vc *ValueContext) Values() (values interface{}, err error) {
|
|||
} else if vc.tagType == TypeUndefined {
|
||||
log.Panicf("will not parse undefined-type value")
|
||||
|
||||
// Never called.
|
||||
return nil, nil
|
||||
} else {
|
||||
log.Panicf("value of type [%s] is unparseable", vc.tagType)
|
||||
|
||||
// Never called.
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
log.Panicf("value of type [%s] is unparseable", vc.tagType)
|
||||
|
||||
// Never called.
|
||||
return nil, nil
|
||||
return values, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
|
|
|
@ -621,8 +621,8 @@ func TestValueContext_ReadRationals(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected := []Rational{
|
||||
{Numerator: 1, Denominator: 2},
|
||||
{Numerator: 3, Denominator: 4},
|
||||
Rational{Numerator: 1, Denominator: 2},
|
||||
Rational{Numerator: 3, Denominator: 4},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(value, expected) != true {
|
||||
|
@ -670,8 +670,8 @@ func TestValueContext_ReadSignedRationals(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected := []SignedRational{
|
||||
{Numerator: 1, Denominator: 2},
|
||||
{Numerator: 3, Denominator: 4},
|
||||
SignedRational{Numerator: 1, Denominator: 2},
|
||||
SignedRational{Numerator: 3, Denominator: 4},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(value, expected) != true {
|
||||
|
@ -799,8 +799,8 @@ func TestValueContext_Values__Rational(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected := []Rational{
|
||||
{Numerator: 1, Denominator: 2},
|
||||
{Numerator: 3, Denominator: 4},
|
||||
Rational{Numerator: 1, Denominator: 2},
|
||||
Rational{Numerator: 3, Denominator: 4},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(value, expected) != true {
|
||||
|
@ -848,8 +848,8 @@ func TestValueContext_Values__SignedRational(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected := []SignedRational{
|
||||
{Numerator: 1, Denominator: 2},
|
||||
{Numerator: 3, Denominator: 4},
|
||||
SignedRational{Numerator: 1, Denominator: 2},
|
||||
SignedRational{Numerator: 3, Denominator: 4},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(value, expected) != true {
|
||||
|
|
|
@ -1,57 +1,55 @@
|
|||
package exifcommon
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"reflect"
|
||||
"time"
|
||||
"bytes"
|
||||
"reflect"
|
||||
"time"
|
||||
|
||||
"encoding/binary"
|
||||
"encoding/binary"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
var (
|
||||
typeEncodeLogger = log.NewLogger("exif.type_encode")
|
||||
typeEncodeLogger = log.NewLogger("exif.type_encode")
|
||||
)
|
||||
|
||||
// EncodedData encapsulates the compound output of an encoding operation.
|
||||
type EncodedData struct {
|
||||
Type TagTypePrimitive
|
||||
Encoded []byte
|
||||
Type TagTypePrimitive
|
||||
Encoded []byte
|
||||
|
||||
// TODO(dustin): Is this really necessary? We might have this just to correlate to the incoming stream format (raw bytes and a unit-count both for incoming and outgoing).
|
||||
UnitCount uint32
|
||||
// TODO(dustin): Is this really necessary? We might have this just to correlate to the incoming stream format (raw bytes and a unit-count both for incoming and outgoing).
|
||||
UnitCount uint32
|
||||
}
|
||||
|
||||
// ValueEncoder knows how to encode values of every type to bytes.
|
||||
type ValueEncoder struct {
|
||||
byteOrder binary.ByteOrder
|
||||
byteOrder binary.ByteOrder
|
||||
}
|
||||
|
||||
// NewValueEncoder returns a new ValueEncoder.
|
||||
func NewValueEncoder(byteOrder binary.ByteOrder) *ValueEncoder {
|
||||
return &ValueEncoder{
|
||||
byteOrder: byteOrder,
|
||||
}
|
||||
return &ValueEncoder{
|
||||
byteOrder: byteOrder,
|
||||
}
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeBytes(value []uint8) (ed EncodedData, err error) {
|
||||
ed.Type = TypeByte
|
||||
ed.Encoded = []byte(value)
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Type = TypeByte
|
||||
ed.Encoded = []byte(value)
|
||||
ed.UnitCount = uint32(len(value))
|
||||
|
||||
return ed, nil
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeAscii(value string) (ed EncodedData, err error) {
|
||||
ed.Type = TypeAscii
|
||||
ed.Type = TypeAscii
|
||||
|
||||
ed.Encoded = []byte(value)
|
||||
ed.Encoded = append(ed.Encoded, 0)
|
||||
ed.Encoded = []byte(value)
|
||||
ed.Encoded = append(ed.Encoded, 0)
|
||||
|
||||
ed.UnitCount = uint32(len(ed.Encoded))
|
||||
ed.UnitCount = uint32(len(ed.Encoded))
|
||||
|
||||
return ed, nil
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
// encodeAsciiNoNul returns a string encoded as a byte-string without a trailing
|
||||
|
@ -68,162 +66,162 @@ func (ve *ValueEncoder) encodeAscii(value string) (ed EncodedData, err error) {
|
|||
// no-nul parser.
|
||||
//
|
||||
func (ve *ValueEncoder) encodeAsciiNoNul(value string) (ed EncodedData, err error) {
|
||||
ed.Type = TypeAsciiNoNul
|
||||
ed.Encoded = []byte(value)
|
||||
ed.UnitCount = uint32(len(ed.Encoded))
|
||||
ed.Type = TypeAsciiNoNul
|
||||
ed.Encoded = []byte(value)
|
||||
ed.UnitCount = uint32(len(ed.Encoded))
|
||||
|
||||
return ed, nil
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeShorts(value []uint16) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Encoded = make([]byte, ed.UnitCount*2)
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Encoded = make([]byte, ed.UnitCount*2)
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
ve.byteOrder.PutUint16(ed.Encoded[i*2:(i+1)*2], value[i])
|
||||
}
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
ve.byteOrder.PutUint16(ed.Encoded[i*2:(i+1)*2], value[i])
|
||||
}
|
||||
|
||||
ed.Type = TypeShort
|
||||
ed.Type = TypeShort
|
||||
|
||||
return ed, nil
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeLongs(value []uint32) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Encoded = make([]byte, ed.UnitCount*4)
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Encoded = make([]byte, ed.UnitCount*4)
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
ve.byteOrder.PutUint32(ed.Encoded[i*4:(i+1)*4], value[i])
|
||||
}
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
ve.byteOrder.PutUint32(ed.Encoded[i*4:(i+1)*4], value[i])
|
||||
}
|
||||
|
||||
ed.Type = TypeLong
|
||||
ed.Type = TypeLong
|
||||
|
||||
return ed, nil
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeRationals(value []Rational) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Encoded = make([]byte, ed.UnitCount*8)
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.Encoded = make([]byte, ed.UnitCount*8)
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
ve.byteOrder.PutUint32(ed.Encoded[i*8+0:i*8+4], value[i].Numerator)
|
||||
ve.byteOrder.PutUint32(ed.Encoded[i*8+4:i*8+8], value[i].Denominator)
|
||||
}
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
ve.byteOrder.PutUint32(ed.Encoded[i*8+0:i*8+4], value[i].Numerator)
|
||||
ve.byteOrder.PutUint32(ed.Encoded[i*8+4:i*8+8], value[i].Denominator)
|
||||
}
|
||||
|
||||
ed.Type = TypeRational
|
||||
ed.Type = TypeRational
|
||||
|
||||
return ed, nil
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeSignedLongs(value []int32) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.UnitCount = uint32(len(value))
|
||||
|
||||
b := bytes.NewBuffer(make([]byte, 0, 8*ed.UnitCount))
|
||||
b := bytes.NewBuffer(make([]byte, 0, 8*ed.UnitCount))
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
err := binary.Write(b, ve.byteOrder, value[i])
|
||||
log.PanicIf(err)
|
||||
}
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
err := binary.Write(b, ve.byteOrder, value[i])
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
ed.Type = TypeSignedLong
|
||||
ed.Encoded = b.Bytes()
|
||||
ed.Type = TypeSignedLong
|
||||
ed.Encoded = b.Bytes()
|
||||
|
||||
return ed, nil
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
func (ve *ValueEncoder) encodeSignedRationals(value []SignedRational) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
ed.UnitCount = uint32(len(value))
|
||||
ed.UnitCount = uint32(len(value))
|
||||
|
||||
b := bytes.NewBuffer(make([]byte, 0, 8*ed.UnitCount))
|
||||
b := bytes.NewBuffer(make([]byte, 0, 8*ed.UnitCount))
|
||||
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
err := binary.Write(b, ve.byteOrder, value[i].Numerator)
|
||||
log.PanicIf(err)
|
||||
for i := uint32(0); i < ed.UnitCount; i++ {
|
||||
err := binary.Write(b, ve.byteOrder, value[i].Numerator)
|
||||
log.PanicIf(err)
|
||||
|
||||
err = binary.Write(b, ve.byteOrder, value[i].Denominator)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
err = binary.Write(b, ve.byteOrder, value[i].Denominator)
|
||||
log.PanicIf(err)
|
||||
}
|
||||
|
||||
ed.Type = TypeSignedRational
|
||||
ed.Encoded = b.Bytes()
|
||||
ed.Type = TypeSignedRational
|
||||
ed.Encoded = b.Bytes()
|
||||
|
||||
return ed, nil
|
||||
return ed, nil
|
||||
}
|
||||
|
||||
// Encode returns bytes for the given value, infering type from the actual
|
||||
// value. This does not support `TypeAsciiNoNull` (all strings are encoded as
|
||||
// `TypeAscii`).
|
||||
func (ve *ValueEncoder) Encode(value interface{}) (ed EncodedData, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
switch value.(type) {
|
||||
case []byte:
|
||||
ed, err = ve.encodeBytes(value.([]byte))
|
||||
log.PanicIf(err)
|
||||
case string:
|
||||
ed, err = ve.encodeAscii(value.(string))
|
||||
log.PanicIf(err)
|
||||
case []uint16:
|
||||
ed, err = ve.encodeShorts(value.([]uint16))
|
||||
log.PanicIf(err)
|
||||
case []uint32:
|
||||
ed, err = ve.encodeLongs(value.([]uint32))
|
||||
log.PanicIf(err)
|
||||
case []Rational:
|
||||
ed, err = ve.encodeRationals(value.([]Rational))
|
||||
log.PanicIf(err)
|
||||
case []int32:
|
||||
ed, err = ve.encodeSignedLongs(value.([]int32))
|
||||
log.PanicIf(err)
|
||||
case []SignedRational:
|
||||
ed, err = ve.encodeSignedRationals(value.([]SignedRational))
|
||||
log.PanicIf(err)
|
||||
case time.Time:
|
||||
// For convenience, if the user doesn't want to deal with translation
|
||||
// semantics with timestamps.
|
||||
switch value.(type) {
|
||||
case []byte:
|
||||
ed, err = ve.encodeBytes(value.([]byte))
|
||||
log.PanicIf(err)
|
||||
case string:
|
||||
ed, err = ve.encodeAscii(value.(string))
|
||||
log.PanicIf(err)
|
||||
case []uint16:
|
||||
ed, err = ve.encodeShorts(value.([]uint16))
|
||||
log.PanicIf(err)
|
||||
case []uint32:
|
||||
ed, err = ve.encodeLongs(value.([]uint32))
|
||||
log.PanicIf(err)
|
||||
case []Rational:
|
||||
ed, err = ve.encodeRationals(value.([]Rational))
|
||||
log.PanicIf(err)
|
||||
case []int32:
|
||||
ed, err = ve.encodeSignedLongs(value.([]int32))
|
||||
log.PanicIf(err)
|
||||
case []SignedRational:
|
||||
ed, err = ve.encodeSignedRationals(value.([]SignedRational))
|
||||
log.PanicIf(err)
|
||||
case time.Time:
|
||||
// For convenience, if the user doesn't want to deal with translation
|
||||
// semantics with timestamps.
|
||||
|
||||
t := value.(time.Time)
|
||||
s := ExifFullTimestampString(t)
|
||||
t := value.(time.Time)
|
||||
s := ExifFullTimestampString(t)
|
||||
|
||||
ed, err = ve.encodeAscii(s)
|
||||
log.PanicIf(err)
|
||||
default:
|
||||
log.Panicf("value not encodable: [%s] [%v]", reflect.TypeOf(value), value)
|
||||
}
|
||||
ed, err = ve.encodeAscii(s)
|
||||
log.PanicIf(err)
|
||||
default:
|
||||
log.Panicf("value not encodable: [%s] [%v]", reflect.TypeOf(value), value)
|
||||
}
|
||||
|
||||
return ed, nil
|
||||
return ed, nil
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load Diff
10
v2/error.go
10
v2/error.go
|
@ -1,14 +1,10 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"errors"
|
||||
)
|
||||
|
||||
var (
|
||||
// ErrTagNotFound indicates that the tag was not found.
|
||||
ErrTagNotFound = errors.New("tag not found")
|
||||
|
||||
// ErrTagNotKnown indicates that the tag is not registered with us as a
|
||||
// known tag.
|
||||
ErrTagNotKnown = errors.New("tag is not known")
|
||||
ErrTagNotFound = errors.New("tag not found")
|
||||
ErrTagNotStandard = errors.New("tag not a standard tag")
|
||||
)
|
||||
|
|
|
@ -35,7 +35,6 @@ var (
|
|||
mainLogger = log.NewLogger("main.main")
|
||||
)
|
||||
|
||||
// IfdEntry is a JSON model for representing a single tag.
|
||||
type IfdEntry struct {
|
||||
IfdPath string `json:"ifd_path"`
|
||||
FqIfdPath string `json:"fq_ifd_path"`
|
||||
|
|
|
@ -81,9 +81,9 @@ func TestVisit(t *testing.T) {
|
|||
if log.Is(err, ErrTagNotFound) {
|
||||
fmt.Printf("Unknown tag: [%s] (%04x)\n", ii.String(), tagId)
|
||||
return nil
|
||||
} else {
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
valueString, err := ite.FormatFirst()
|
||||
|
@ -280,7 +280,7 @@ func TestCollect(t *testing.T) {
|
|||
|
||||
actualIfdPaths := make([]string, len(lookup))
|
||||
i := 0
|
||||
for ifdPath := range lookup {
|
||||
for ifdPath, _ := range lookup {
|
||||
actualIfdPaths[i] = ifdPath
|
||||
i++
|
||||
}
|
||||
|
|
|
@ -72,9 +72,9 @@ func (d GpsDegrees) Decimal() float64 {
|
|||
|
||||
if d.Orientation == 'S' || d.Orientation == 'W' {
|
||||
return -decimal
|
||||
} else {
|
||||
return decimal
|
||||
}
|
||||
|
||||
return decimal
|
||||
}
|
||||
|
||||
// Raw returns a Rational struct that can be used to *write* coordinates. In
|
||||
|
|
|
@ -954,7 +954,7 @@ func (ib *IfdBuilder) AddChildIb(childIb *IfdBuilder) (err error) {
|
|||
log.Panicf("Child IFD does not have the same byte-order: [%s] != [%s]", childIb.byteOrder, ib.byteOrder)
|
||||
}
|
||||
|
||||
// Since no standard IFDs supports occur`ring more than once, check that a
|
||||
// Since no standard IFDs supports occuring more than once, check that a
|
||||
// tag of this type has not been previously added. Note that we just search
|
||||
// the current IFD and *not every* IFD.
|
||||
for _, bt := range childIb.tags {
|
||||
|
|
|
@ -1932,11 +1932,10 @@ func ExampleIfd_Thumbnail() {
|
|||
_, index, err := Collect(im, ti, rawExif)
|
||||
log.PanicIf(err)
|
||||
|
||||
// This returns the raw bytes that you will be looking for, but there's no
|
||||
// use for them at this point in the example.
|
||||
_, err = index.RootIfd.NextIfd.Thumbnail()
|
||||
thumbnailData, err := index.RootIfd.NextIfd.Thumbnail()
|
||||
log.PanicIf(err)
|
||||
|
||||
thumbnailData = thumbnailData
|
||||
// Output:
|
||||
}
|
||||
|
||||
|
@ -1983,12 +1982,10 @@ func ExampleBuilderTag_SetValue() {
|
|||
// Encode.
|
||||
|
||||
ibe := NewIfdByteEncoder()
|
||||
|
||||
// This returns the raw bytes that you will be looking for, but there's no
|
||||
// use for them at this point in the example.
|
||||
_, err = ibe.EncodeToExif(rootIb)
|
||||
updatedExif, err := ibe.EncodeToExif(rootIb)
|
||||
log.PanicIf(err)
|
||||
|
||||
updatedExif = updatedExif
|
||||
// Output:
|
||||
}
|
||||
|
||||
|
|
|
@ -715,7 +715,7 @@ func (ifd *Ifd) FindTagWithName(tagName string) (results []*IfdTagEntry, err err
|
|||
|
||||
it, err := ifd.tagIndex.GetWithName(ifd.ifdIdentity, tagName)
|
||||
if log.Is(err, ErrTagNotFound) == true {
|
||||
log.Panic(ErrTagNotKnown)
|
||||
log.Panic(ErrTagNotStandard)
|
||||
} else if err != nil {
|
||||
log.Panic(err)
|
||||
}
|
||||
|
@ -1482,7 +1482,7 @@ func FindIfdFromRootIfd(rootIfd *Ifd, ifdPath string) (ifd *Ifd, err error) {
|
|||
thisIfd = thisIfd.NextIfd
|
||||
}
|
||||
|
||||
for _, itii := range lineage {
|
||||
for i, itii := range lineage {
|
||||
var hit *Ifd
|
||||
for _, childIfd := range thisIfd.Children {
|
||||
if childIfd.ifdIdentity.TagId() == itii.TagId {
|
||||
|
|
|
@ -79,7 +79,6 @@ func TestIfd_FindTagWithId_Hit(t *testing.T) {
|
|||
|
||||
ifd := index.RootIfd
|
||||
results, err := ifd.FindTagWithId(0x011b)
|
||||
log.PanicIf(err)
|
||||
|
||||
if len(results) != 1 {
|
||||
t.Fatalf("Exactly one result was not found: (%d)", len(results))
|
||||
|
@ -131,9 +130,7 @@ func TestIfd_FindTagWithName_Hit(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
ifd := index.RootIfd
|
||||
|
||||
results, err := ifd.FindTagWithName("YResolution")
|
||||
log.PanicIf(err)
|
||||
|
||||
if len(results) != 1 {
|
||||
t.Fatalf("Exactly one result was not found: (%d)", len(results))
|
||||
|
@ -189,7 +186,7 @@ func TestIfd_FindTagWithName_NonStandard(t *testing.T) {
|
|||
_, err = ifd.FindTagWithName("GeorgeNotAtHome")
|
||||
if err == nil {
|
||||
t.Fatalf("Expected error for not-found tag.")
|
||||
} else if log.Is(err, ErrTagNotKnown) == false {
|
||||
} else if log.Is(err, ErrTagNotStandard) == false {
|
||||
log.Panic(err)
|
||||
}
|
||||
}
|
||||
|
@ -361,106 +358,106 @@ func TestIfd_EnumerateTagsRecursively(t *testing.T) {
|
|||
log.PanicIf(err)
|
||||
|
||||
expected := [][2]interface{}{
|
||||
{"IFD", 0x010f},
|
||||
{"IFD", 0x0110},
|
||||
{"IFD", 0x0112},
|
||||
{"IFD", 0x011a},
|
||||
{"IFD", 0x011b},
|
||||
{"IFD", 0x0128},
|
||||
{"IFD", 0x0132},
|
||||
{"IFD", 0x013b},
|
||||
{"IFD", 0x0213},
|
||||
{"IFD", 0x8298},
|
||||
{"IFD/Exif", 0x829a},
|
||||
{"IFD/Exif", 0x829d},
|
||||
{"IFD/Exif", 0x8822},
|
||||
{"IFD/Exif", 0x8827},
|
||||
{"IFD/Exif", 0x8830},
|
||||
{"IFD/Exif", 0x8832},
|
||||
{"IFD/Exif", 0x9000},
|
||||
{"IFD/Exif", 0x9003},
|
||||
{"IFD/Exif", 0x9004},
|
||||
{"IFD/Exif", 0x9101},
|
||||
{"IFD/Exif", 0x9201},
|
||||
{"IFD/Exif", 0x9202},
|
||||
{"IFD/Exif", 0x9204},
|
||||
{"IFD/Exif", 0x9207},
|
||||
{"IFD/Exif", 0x9209},
|
||||
{"IFD/Exif", 0x920a},
|
||||
{"IFD/Exif", 0x927c},
|
||||
{"IFD/Exif", 0x9286},
|
||||
{"IFD/Exif", 0x9290},
|
||||
{"IFD/Exif", 0x9291},
|
||||
{"IFD/Exif", 0x9292},
|
||||
{"IFD/Exif", 0xa000},
|
||||
{"IFD/Exif", 0xa001},
|
||||
{"IFD/Exif", 0xa002},
|
||||
{"IFD/Exif", 0xa003},
|
||||
{"IFD/Exif/Iop", 0x0001},
|
||||
{"IFD/Exif/Iop", 0x0002},
|
||||
{"IFD/Exif", 0xa20e},
|
||||
{"IFD/Exif", 0xa20f},
|
||||
{"IFD/Exif", 0xa210},
|
||||
{"IFD/Exif", 0xa401},
|
||||
{"IFD/Exif", 0xa402},
|
||||
{"IFD/Exif", 0xa403},
|
||||
{"IFD/Exif", 0xa406},
|
||||
{"IFD/Exif", 0xa430},
|
||||
{"IFD/Exif", 0xa431},
|
||||
{"IFD/Exif", 0xa432},
|
||||
{"IFD/Exif", 0xa434},
|
||||
{"IFD/Exif", 0xa435},
|
||||
{"IFD/GPSInfo", 0x0000},
|
||||
{"IFD", 0x010f},
|
||||
{"IFD", 0x0110},
|
||||
{"IFD", 0x0112},
|
||||
{"IFD", 0x011a},
|
||||
{"IFD", 0x011b},
|
||||
{"IFD", 0x0128},
|
||||
{"IFD", 0x0132},
|
||||
{"IFD", 0x013b},
|
||||
{"IFD", 0x0213},
|
||||
{"IFD", 0x8298},
|
||||
{"IFD/Exif", 0x829a},
|
||||
{"IFD/Exif", 0x829d},
|
||||
{"IFD/Exif", 0x8822},
|
||||
{"IFD/Exif", 0x8827},
|
||||
{"IFD/Exif", 0x8830},
|
||||
{"IFD/Exif", 0x8832},
|
||||
{"IFD/Exif", 0x9000},
|
||||
{"IFD/Exif", 0x9003},
|
||||
{"IFD/Exif", 0x9004},
|
||||
{"IFD/Exif", 0x9101},
|
||||
{"IFD/Exif", 0x9201},
|
||||
{"IFD/Exif", 0x9202},
|
||||
{"IFD/Exif", 0x9204},
|
||||
{"IFD/Exif", 0x9207},
|
||||
{"IFD/Exif", 0x9209},
|
||||
{"IFD/Exif", 0x920a},
|
||||
{"IFD/Exif", 0x927c},
|
||||
{"IFD/Exif", 0x9286},
|
||||
{"IFD/Exif", 0x9290},
|
||||
{"IFD/Exif", 0x9291},
|
||||
{"IFD/Exif", 0x9292},
|
||||
{"IFD/Exif", 0xa000},
|
||||
{"IFD/Exif", 0xa001},
|
||||
{"IFD/Exif", 0xa002},
|
||||
{"IFD/Exif", 0xa003},
|
||||
{"IFD/Exif/Iop", 0x0001},
|
||||
{"IFD/Exif/Iop", 0x0002},
|
||||
{"IFD/Exif", 0xa20e},
|
||||
{"IFD/Exif", 0xa20f},
|
||||
{"IFD/Exif", 0xa210},
|
||||
{"IFD/Exif", 0xa401},
|
||||
{"IFD/Exif", 0xa402},
|
||||
{"IFD/Exif", 0xa403},
|
||||
{"IFD/Exif", 0xa406},
|
||||
{"IFD/Exif", 0xa430},
|
||||
{"IFD/Exif", 0xa431},
|
||||
{"IFD/Exif", 0xa432},
|
||||
{"IFD/Exif", 0xa434},
|
||||
{"IFD/Exif", 0xa435},
|
||||
{"IFD/GPSInfo", 0x0000},
|
||||
[2]interface{}{"IFD", 0x010f},
|
||||
[2]interface{}{"IFD", 0x0110},
|
||||
[2]interface{}{"IFD", 0x0112},
|
||||
[2]interface{}{"IFD", 0x011a},
|
||||
[2]interface{}{"IFD", 0x011b},
|
||||
[2]interface{}{"IFD", 0x0128},
|
||||
[2]interface{}{"IFD", 0x0132},
|
||||
[2]interface{}{"IFD", 0x013b},
|
||||
[2]interface{}{"IFD", 0x0213},
|
||||
[2]interface{}{"IFD", 0x8298},
|
||||
[2]interface{}{"IFD/Exif", 0x829a},
|
||||
[2]interface{}{"IFD/Exif", 0x829d},
|
||||
[2]interface{}{"IFD/Exif", 0x8822},
|
||||
[2]interface{}{"IFD/Exif", 0x8827},
|
||||
[2]interface{}{"IFD/Exif", 0x8830},
|
||||
[2]interface{}{"IFD/Exif", 0x8832},
|
||||
[2]interface{}{"IFD/Exif", 0x9000},
|
||||
[2]interface{}{"IFD/Exif", 0x9003},
|
||||
[2]interface{}{"IFD/Exif", 0x9004},
|
||||
[2]interface{}{"IFD/Exif", 0x9101},
|
||||
[2]interface{}{"IFD/Exif", 0x9201},
|
||||
[2]interface{}{"IFD/Exif", 0x9202},
|
||||
[2]interface{}{"IFD/Exif", 0x9204},
|
||||
[2]interface{}{"IFD/Exif", 0x9207},
|
||||
[2]interface{}{"IFD/Exif", 0x9209},
|
||||
[2]interface{}{"IFD/Exif", 0x920a},
|
||||
[2]interface{}{"IFD/Exif", 0x927c},
|
||||
[2]interface{}{"IFD/Exif", 0x9286},
|
||||
[2]interface{}{"IFD/Exif", 0x9290},
|
||||
[2]interface{}{"IFD/Exif", 0x9291},
|
||||
[2]interface{}{"IFD/Exif", 0x9292},
|
||||
[2]interface{}{"IFD/Exif", 0xa000},
|
||||
[2]interface{}{"IFD/Exif", 0xa001},
|
||||
[2]interface{}{"IFD/Exif", 0xa002},
|
||||
[2]interface{}{"IFD/Exif", 0xa003},
|
||||
[2]interface{}{"IFD/Exif/Iop", 0x0001},
|
||||
[2]interface{}{"IFD/Exif/Iop", 0x0002},
|
||||
[2]interface{}{"IFD/Exif", 0xa20e},
|
||||
[2]interface{}{"IFD/Exif", 0xa20f},
|
||||
[2]interface{}{"IFD/Exif", 0xa210},
|
||||
[2]interface{}{"IFD/Exif", 0xa401},
|
||||
[2]interface{}{"IFD/Exif", 0xa402},
|
||||
[2]interface{}{"IFD/Exif", 0xa403},
|
||||
[2]interface{}{"IFD/Exif", 0xa406},
|
||||
[2]interface{}{"IFD/Exif", 0xa430},
|
||||
[2]interface{}{"IFD/Exif", 0xa431},
|
||||
[2]interface{}{"IFD/Exif", 0xa432},
|
||||
[2]interface{}{"IFD/Exif", 0xa434},
|
||||
[2]interface{}{"IFD/Exif", 0xa435},
|
||||
[2]interface{}{"IFD/GPSInfo", 0x0000},
|
||||
[2]interface{}{"IFD", 0x010f},
|
||||
[2]interface{}{"IFD", 0x0110},
|
||||
[2]interface{}{"IFD", 0x0112},
|
||||
[2]interface{}{"IFD", 0x011a},
|
||||
[2]interface{}{"IFD", 0x011b},
|
||||
[2]interface{}{"IFD", 0x0128},
|
||||
[2]interface{}{"IFD", 0x0132},
|
||||
[2]interface{}{"IFD", 0x013b},
|
||||
[2]interface{}{"IFD", 0x0213},
|
||||
[2]interface{}{"IFD", 0x8298},
|
||||
[2]interface{}{"IFD/Exif", 0x829a},
|
||||
[2]interface{}{"IFD/Exif", 0x829d},
|
||||
[2]interface{}{"IFD/Exif", 0x8822},
|
||||
[2]interface{}{"IFD/Exif", 0x8827},
|
||||
[2]interface{}{"IFD/Exif", 0x8830},
|
||||
[2]interface{}{"IFD/Exif", 0x8832},
|
||||
[2]interface{}{"IFD/Exif", 0x9000},
|
||||
[2]interface{}{"IFD/Exif", 0x9003},
|
||||
[2]interface{}{"IFD/Exif", 0x9004},
|
||||
[2]interface{}{"IFD/Exif", 0x9101},
|
||||
[2]interface{}{"IFD/Exif", 0x9201},
|
||||
[2]interface{}{"IFD/Exif", 0x9202},
|
||||
[2]interface{}{"IFD/Exif", 0x9204},
|
||||
[2]interface{}{"IFD/Exif", 0x9207},
|
||||
[2]interface{}{"IFD/Exif", 0x9209},
|
||||
[2]interface{}{"IFD/Exif", 0x920a},
|
||||
[2]interface{}{"IFD/Exif", 0x927c},
|
||||
[2]interface{}{"IFD/Exif", 0x9286},
|
||||
[2]interface{}{"IFD/Exif", 0x9290},
|
||||
[2]interface{}{"IFD/Exif", 0x9291},
|
||||
[2]interface{}{"IFD/Exif", 0x9292},
|
||||
[2]interface{}{"IFD/Exif", 0xa000},
|
||||
[2]interface{}{"IFD/Exif", 0xa001},
|
||||
[2]interface{}{"IFD/Exif", 0xa002},
|
||||
[2]interface{}{"IFD/Exif", 0xa003},
|
||||
[2]interface{}{"IFD/Exif/Iop", 0x0001},
|
||||
[2]interface{}{"IFD/Exif/Iop", 0x0002},
|
||||
[2]interface{}{"IFD/Exif", 0xa20e},
|
||||
[2]interface{}{"IFD/Exif", 0xa20f},
|
||||
[2]interface{}{"IFD/Exif", 0xa210},
|
||||
[2]interface{}{"IFD/Exif", 0xa401},
|
||||
[2]interface{}{"IFD/Exif", 0xa402},
|
||||
[2]interface{}{"IFD/Exif", 0xa403},
|
||||
[2]interface{}{"IFD/Exif", 0xa406},
|
||||
[2]interface{}{"IFD/Exif", 0xa430},
|
||||
[2]interface{}{"IFD/Exif", 0xa431},
|
||||
[2]interface{}{"IFD/Exif", 0xa432},
|
||||
[2]interface{}{"IFD/Exif", 0xa434},
|
||||
[2]interface{}{"IFD/Exif", 0xa435},
|
||||
[2]interface{}{"IFD/GPSInfo", 0x0000},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(collected, expected) != true {
|
||||
|
|
|
@ -36,7 +36,7 @@ type IfdTagEntry struct {
|
|||
// child IFD. Includes indices.
|
||||
childFqIfdPath string
|
||||
|
||||
// TODO(dustin): !! IB's host the child-IBs directly in the tag, but that's not the case here. Refactor to accommodate it for a consistent experience.
|
||||
// TODO(dustin): !! IB's host the child-IBs directly in the tag, but that's not the case here. Refactor to accomodate it for a consistent experience.
|
||||
|
||||
ifdIdentity *exifcommon.IfdIdentity
|
||||
|
||||
|
@ -132,7 +132,7 @@ func (ite *IfdTagEntry) getValueOffset() uint32 {
|
|||
return ite.valueOffset
|
||||
}
|
||||
|
||||
// GetRawBytes renders a specific list of bytes from the value in this tag.
|
||||
// RawBytes renders a specific list of bytes from the value in this tag.
|
||||
func (ite *IfdTagEntry) GetRawBytes() (rawBytes []byte, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
// Package exif parses raw EXIF information given a block of raw EXIF data. It
|
||||
// can also construct new EXIF information, and provides tools for doing so.
|
||||
// This package is not involved with the parsing of particular file-formats.
|
||||
// exif parses raw EXIF information given a block of raw EXIF data. It can also
|
||||
// construct new EXIF information, and provides tools for doing so. This package
|
||||
// is not involved with the parsing of particular file-formats.
|
||||
//
|
||||
// The EXIF data must first be extracted and then provided to us. Conversely,
|
||||
// when constructing new EXIF data, the caller is responsible for packaging
|
||||
|
|
|
@ -60,7 +60,7 @@ var (
|
|||
tagsWithoutAlignment = map[uint16]struct{}{
|
||||
// The thumbnail offset is stored as a long, but its data is a binary
|
||||
// blob (not a slice of longs).
|
||||
ThumbnailOffsetTagId: {},
|
||||
ThumbnailOffsetTagId: struct{}{},
|
||||
}
|
||||
)
|
||||
|
||||
|
@ -111,7 +111,8 @@ func (it *IndexedTag) Is(ifdPath string, id uint16) bool {
|
|||
return it.Id == id && it.IfdPath == ifdPath
|
||||
}
|
||||
|
||||
// GetEncodingType returns the largest type that this tag's value can occupy.
|
||||
// WidestSupportedType returns the largest type that this tag's value can
|
||||
// occupy
|
||||
func (it *IndexedTag) GetEncodingType(value interface{}) exifcommon.TagTypePrimitive {
|
||||
// For convenience, we handle encoding a `time.Time` directly.
|
||||
if IsTime(value) == true {
|
||||
|
@ -152,9 +153,9 @@ func (it *IndexedTag) GetEncodingType(value interface{}) exifcommon.TagTypePrimi
|
|||
|
||||
if _, ok := value.(exifcommon.SignedRational); ok == true {
|
||||
return exifcommon.TypeSignedRational
|
||||
} else {
|
||||
return exifcommon.TypeRational
|
||||
}
|
||||
|
||||
return exifcommon.TypeRational
|
||||
}
|
||||
|
||||
log.Panicf("WidestSupportedType() case is not handled for tag [%s] (0x%04x): %v", it.IfdPath, it.Id, it.SupportedTypes)
|
||||
|
|
|
@ -13,7 +13,7 @@ import (
|
|||
)
|
||||
|
||||
var (
|
||||
testExifData []byte
|
||||
testExifData []byte = nil
|
||||
)
|
||||
|
||||
func getExifSimpleTestIb() *IfdBuilder {
|
||||
|
@ -129,7 +129,7 @@ func validateExifSimpleTestIb(exifData []byte, t *testing.T) {
|
|||
t.Fatalf("Next-IFD pointer is non-nil.")
|
||||
}
|
||||
|
||||
// Verify the values by using the actual, original types (this is awesome).
|
||||
// Verify the values by using the actual, orginal types (this is awesome).
|
||||
|
||||
expected := []struct {
|
||||
tagId uint16
|
||||
|
|
|
@ -8,7 +8,6 @@ import (
|
|||
"github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
// Encode encodes the given encodeable undefined value to bytes.
|
||||
func Encode(value EncodeableValue, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
|
@ -29,7 +28,7 @@ func Encode(value EncodeableValue, byteOrder binary.ByteOrder) (encoded []byte,
|
|||
return encoded, unitCount, nil
|
||||
}
|
||||
|
||||
// Decode constructs a value from raw encoded bytes
|
||||
// UndefinedValue knows how to resolve the value for most unknown-type tags.
|
||||
func Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
|
|
|
@ -28,7 +28,7 @@ func TestCodec8828Oecf_Encode(t *testing.T) {
|
|||
Columns: 2,
|
||||
Rows: 22,
|
||||
ColumnNames: []string{"aa", "bb"},
|
||||
Values: []exifcommon.SignedRational{{11, 22}},
|
||||
Values: []exifcommon.SignedRational{exifcommon.SignedRational{11, 22}},
|
||||
}
|
||||
|
||||
codec := Codec8828Oecf{}
|
||||
|
@ -79,7 +79,7 @@ func TestCodec8828Oecf_Decode(t *testing.T) {
|
|||
Columns: 2,
|
||||
Rows: 22,
|
||||
ColumnNames: []string{"aa", "bb"},
|
||||
Values: []exifcommon.SignedRational{{11, 22}},
|
||||
Values: []exifcommon.SignedRational{exifcommon.SignedRational{11, 22}},
|
||||
}
|
||||
|
||||
if reflect.DeepEqual(value, expectedValue) != true {
|
||||
|
|
|
@ -34,14 +34,14 @@ var (
|
|||
}
|
||||
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Configurations = map[int][]byte{
|
||||
TagUndefinedType_9101_ComponentsConfiguration_RGB: {
|
||||
TagUndefinedType_9101_ComponentsConfiguration_RGB: []byte{
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_R,
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_G,
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_B,
|
||||
0,
|
||||
},
|
||||
|
||||
TagUndefinedType_9101_ComponentsConfiguration_YCBCR: {
|
||||
TagUndefinedType_9101_ComponentsConfiguration_YCBCR: []byte{
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_Y,
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_Cb,
|
||||
TagUndefinedType_9101_ComponentsConfiguration_Channel_Cr,
|
||||
|
|
|
@ -1,114 +1,114 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"crypto/sha1"
|
||||
"encoding/binary"
|
||||
"crypto/sha1"
|
||||
"encoding/binary"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
type Tag927CMakerNote struct {
|
||||
MakerNoteType []byte
|
||||
MakerNoteBytes []byte
|
||||
MakerNoteType []byte
|
||||
MakerNoteBytes []byte
|
||||
}
|
||||
|
||||
func (Tag927CMakerNote) EncoderName() string {
|
||||
return "Codec927CMakerNote"
|
||||
return "Codec927CMakerNote"
|
||||
}
|
||||
|
||||
func (mn Tag927CMakerNote) String() string {
|
||||
parts := make([]string, len(mn.MakerNoteType))
|
||||
parts := make([]string, len(mn.MakerNoteType))
|
||||
|
||||
for i, c := range mn.MakerNoteType {
|
||||
parts[i] = fmt.Sprintf("%02x", c)
|
||||
}
|
||||
for i, c := range mn.MakerNoteType {
|
||||
parts[i] = fmt.Sprintf("%02x", c)
|
||||
}
|
||||
|
||||
h := sha1.New()
|
||||
h := sha1.New()
|
||||
|
||||
_, err := h.Write(mn.MakerNoteBytes)
|
||||
log.PanicIf(err)
|
||||
_, err := h.Write(mn.MakerNoteBytes)
|
||||
log.PanicIf(err)
|
||||
|
||||
digest := h.Sum(nil)
|
||||
digest := h.Sum(nil)
|
||||
|
||||
return fmt.Sprintf("MakerNote<TYPE-ID=[%s] LEN=(%d) SHA1=[%020x]>", strings.Join(parts, " "), len(mn.MakerNoteBytes), digest)
|
||||
return fmt.Sprintf("MakerNote<TYPE-ID=[%s] LEN=(%d) SHA1=[%020x]>", strings.Join(parts, " "), len(mn.MakerNoteBytes), digest)
|
||||
}
|
||||
|
||||
type Codec927CMakerNote struct {
|
||||
}
|
||||
|
||||
func (Codec927CMakerNote) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
mn, ok := value.(Tag927CMakerNote)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a Tag927CMakerNote")
|
||||
}
|
||||
mn, ok := value.(Tag927CMakerNote)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a Tag927CMakerNote")
|
||||
}
|
||||
|
||||
// TODO(dustin): Confirm this size against the specification.
|
||||
// TODO(dustin): Confirm this size against the specification.
|
||||
|
||||
return mn.MakerNoteBytes, uint32(len(mn.MakerNoteBytes)), nil
|
||||
return mn.MakerNoteBytes, uint32(len(mn.MakerNoteBytes)), nil
|
||||
}
|
||||
|
||||
func (Codec927CMakerNote) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
// MakerNote
|
||||
// TODO(dustin): !! This is the Wild Wild West. This very well might be a child IFD, but any and all OEM's define their own formats. If we're going to be writing changes and this is complete EXIF (which may not have the first eight bytes), it might be fine. However, if these are just IFDs they'll be relative to the main EXIF, this will invalidate the MakerNote data for IFDs and any other implementations that use offsets unless we can interpret them all. It be best to return to this later and just exclude this from being written for now, though means a loss of a wealth of image metadata.
|
||||
// -> We can also just blindly try to interpret as an IFD and just validate that it's looks good (maybe it will even have a 'next ifd' pointer that we can validate is 0x0).
|
||||
// MakerNote
|
||||
// TODO(dustin): !! This is the Wild Wild West. This very well might be a child IFD, but any and all OEM's define their own formats. If we're going to be writing changes and this is complete EXIF (which may not have the first eight bytes), it might be fine. However, if these are just IFDs they'll be relative to the main EXIF, this will invalidate the MakerNote data for IFDs and any other implementations that use offsets unless we can interpret them all. It be best to return to this later and just exclude this from being written for now, though means a loss of a wealth of image metadata.
|
||||
// -> We can also just blindly try to interpret as an IFD and just validate that it's looks good (maybe it will even have a 'next ifd' pointer that we can validate is 0x0).
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeByte)
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeByte)
|
||||
|
||||
valueBytes, err := valueContext.ReadBytes()
|
||||
log.PanicIf(err)
|
||||
valueBytes, err := valueContext.ReadBytes()
|
||||
log.PanicIf(err)
|
||||
|
||||
// TODO(dustin): Doesn't work, but here as an example.
|
||||
// ie := NewIfdEnumerate(valueBytes, byteOrder)
|
||||
// TODO(dustin): Doesn't work, but here as an example.
|
||||
// ie := NewIfdEnumerate(valueBytes, byteOrder)
|
||||
|
||||
// // TODO(dustin): !! Validate types (might have proprietary types, but it might be worth splitting the list between valid and not valid; maybe fail if a certain proportion are invalid, or maybe aren't less then a certain small integer)?
|
||||
// ii, err := ie.Collect(0x0)
|
||||
// // TODO(dustin): !! Validate types (might have proprietary types, but it might be worth splitting the list between valid and not valid; maybe fail if a certain proportion are invalid, or maybe aren't less then a certain small integer)?
|
||||
// ii, err := ie.Collect(0x0)
|
||||
|
||||
// for _, entry := range ii.RootIfd.Entries {
|
||||
// fmt.Printf("ENTRY: 0x%02x %d\n", entry.TagId, entry.TagType)
|
||||
// }
|
||||
// for _, entry := range ii.RootIfd.Entries {
|
||||
// fmt.Printf("ENTRY: 0x%02x %d\n", entry.TagId, entry.TagType)
|
||||
// }
|
||||
|
||||
var makerNoteType []byte
|
||||
if len(valueBytes) >= 20 {
|
||||
makerNoteType = valueBytes[:20]
|
||||
} else {
|
||||
makerNoteType = valueBytes
|
||||
}
|
||||
var makerNoteType []byte
|
||||
if len(valueBytes) >= 20 {
|
||||
makerNoteType = valueBytes[:20]
|
||||
} else {
|
||||
makerNoteType = valueBytes
|
||||
}
|
||||
|
||||
mn := Tag927CMakerNote{
|
||||
MakerNoteType: makerNoteType,
|
||||
mn := Tag927CMakerNote{
|
||||
MakerNoteType: makerNoteType,
|
||||
|
||||
// MakerNoteBytes has the whole length of bytes. There's always
|
||||
// the chance that the first 20 bytes includes actual data.
|
||||
MakerNoteBytes: valueBytes,
|
||||
}
|
||||
// MakerNoteBytes has the whole length of bytes. There's always
|
||||
// the chance that the first 20 bytes includes actual data.
|
||||
MakerNoteBytes: valueBytes,
|
||||
}
|
||||
|
||||
return mn, nil
|
||||
return mn, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
Tag927CMakerNote{},
|
||||
Codec927CMakerNote{})
|
||||
registerEncoder(
|
||||
Tag927CMakerNote{},
|
||||
Codec927CMakerNote{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0x927c,
|
||||
Codec927CMakerNote{})
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0x927c,
|
||||
Codec927CMakerNote{})
|
||||
}
|
||||
|
|
|
@ -1,142 +1,142 @@
|
|||
package exifundefined
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"bytes"
|
||||
"fmt"
|
||||
|
||||
"encoding/binary"
|
||||
"encoding/binary"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
)
|
||||
|
||||
var (
|
||||
exif9286Logger = log.NewLogger("exifundefined.exif_9286_user_comment")
|
||||
exif9286Logger = log.NewLogger("exifundefined.exif_9286_user_comment")
|
||||
)
|
||||
|
||||
const (
|
||||
TagUndefinedType_9286_UserComment_Encoding_ASCII = iota
|
||||
TagUndefinedType_9286_UserComment_Encoding_JIS = iota
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNICODE = iota
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNDEFINED = iota
|
||||
TagUndefinedType_9286_UserComment_Encoding_ASCII = iota
|
||||
TagUndefinedType_9286_UserComment_Encoding_JIS = iota
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNICODE = iota
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNDEFINED = iota
|
||||
)
|
||||
|
||||
var (
|
||||
TagUndefinedType_9286_UserComment_Encoding_Names = map[int]string{
|
||||
TagUndefinedType_9286_UserComment_Encoding_ASCII: "ASCII",
|
||||
TagUndefinedType_9286_UserComment_Encoding_JIS: "JIS",
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNICODE: "UNICODE",
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNDEFINED: "UNDEFINED",
|
||||
}
|
||||
TagUndefinedType_9286_UserComment_Encoding_Names = map[int]string{
|
||||
TagUndefinedType_9286_UserComment_Encoding_ASCII: "ASCII",
|
||||
TagUndefinedType_9286_UserComment_Encoding_JIS: "JIS",
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNICODE: "UNICODE",
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNDEFINED: "UNDEFINED",
|
||||
}
|
||||
|
||||
TagUndefinedType_9286_UserComment_Encodings = map[int][]byte{
|
||||
TagUndefinedType_9286_UserComment_Encoding_ASCII: {'A', 'S', 'C', 'I', 'I', 0, 0, 0},
|
||||
TagUndefinedType_9286_UserComment_Encoding_JIS: {'J', 'I', 'S', 0, 0, 0, 0, 0},
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNICODE: {'U', 'n', 'i', 'c', 'o', 'd', 'e', 0},
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNDEFINED: {0, 0, 0, 0, 0, 0, 0, 0},
|
||||
}
|
||||
TagUndefinedType_9286_UserComment_Encodings = map[int][]byte{
|
||||
TagUndefinedType_9286_UserComment_Encoding_ASCII: []byte{'A', 'S', 'C', 'I', 'I', 0, 0, 0},
|
||||
TagUndefinedType_9286_UserComment_Encoding_JIS: []byte{'J', 'I', 'S', 0, 0, 0, 0, 0},
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNICODE: []byte{'U', 'n', 'i', 'c', 'o', 'd', 'e', 0},
|
||||
TagUndefinedType_9286_UserComment_Encoding_UNDEFINED: []byte{0, 0, 0, 0, 0, 0, 0, 0},
|
||||
}
|
||||
)
|
||||
|
||||
type Tag9286UserComment struct {
|
||||
EncodingType int
|
||||
EncodingBytes []byte
|
||||
EncodingType int
|
||||
EncodingBytes []byte
|
||||
}
|
||||
|
||||
func (Tag9286UserComment) EncoderName() string {
|
||||
return "Codec9286UserComment"
|
||||
return "Codec9286UserComment"
|
||||
}
|
||||
|
||||
func (uc Tag9286UserComment) String() string {
|
||||
var valuePhrase string
|
||||
var valuePhrase string
|
||||
|
||||
if uc.EncodingType == TagUndefinedType_9286_UserComment_Encoding_ASCII {
|
||||
return fmt.Sprintf("[ASCII] %s", string(uc.EncodingBytes))
|
||||
} else {
|
||||
if len(uc.EncodingBytes) <= 8 {
|
||||
valuePhrase = fmt.Sprintf("%v", uc.EncodingBytes)
|
||||
} else {
|
||||
valuePhrase = fmt.Sprintf("%v...", uc.EncodingBytes[:8])
|
||||
}
|
||||
}
|
||||
if uc.EncodingType == TagUndefinedType_9286_UserComment_Encoding_ASCII {
|
||||
return fmt.Sprintf("[ASCII] %s", string(uc.EncodingBytes))
|
||||
} else {
|
||||
if len(uc.EncodingBytes) <= 8 {
|
||||
valuePhrase = fmt.Sprintf("%v", uc.EncodingBytes)
|
||||
} else {
|
||||
valuePhrase = fmt.Sprintf("%v...", uc.EncodingBytes[:8])
|
||||
}
|
||||
}
|
||||
|
||||
return fmt.Sprintf("UserComment<SIZE=(%d) ENCODING=[%s] V=%v LEN=(%d)>", len(uc.EncodingBytes), TagUndefinedType_9286_UserComment_Encoding_Names[uc.EncodingType], valuePhrase, len(uc.EncodingBytes))
|
||||
return fmt.Sprintf("UserComment<SIZE=(%d) ENCODING=[%s] V=%v LEN=(%d)>", len(uc.EncodingBytes), TagUndefinedType_9286_UserComment_Encoding_Names[uc.EncodingType], valuePhrase, len(uc.EncodingBytes))
|
||||
}
|
||||
|
||||
type Codec9286UserComment struct {
|
||||
}
|
||||
|
||||
func (Codec9286UserComment) Encode(value interface{}, byteOrder binary.ByteOrder) (encoded []byte, unitCount uint32, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
uc, ok := value.(Tag9286UserComment)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a Tag9286UserComment")
|
||||
}
|
||||
uc, ok := value.(Tag9286UserComment)
|
||||
if ok == false {
|
||||
log.Panicf("can only encode a Tag9286UserComment")
|
||||
}
|
||||
|
||||
encodingTypeBytes, found := TagUndefinedType_9286_UserComment_Encodings[uc.EncodingType]
|
||||
if found == false {
|
||||
log.Panicf("encoding-type not valid for unknown-type tag 9286 (UserComment): (%d)", uc.EncodingType)
|
||||
}
|
||||
encodingTypeBytes, found := TagUndefinedType_9286_UserComment_Encodings[uc.EncodingType]
|
||||
if found == false {
|
||||
log.Panicf("encoding-type not valid for unknown-type tag 9286 (UserComment): (%d)", uc.EncodingType)
|
||||
}
|
||||
|
||||
encoded = make([]byte, len(uc.EncodingBytes)+8)
|
||||
encoded = make([]byte, len(uc.EncodingBytes)+8)
|
||||
|
||||
copy(encoded[:8], encodingTypeBytes)
|
||||
copy(encoded[8:], uc.EncodingBytes)
|
||||
copy(encoded[:8], encodingTypeBytes)
|
||||
copy(encoded[8:], uc.EncodingBytes)
|
||||
|
||||
// TODO(dustin): Confirm this size against the specification.
|
||||
// TODO(dustin): Confirm this size against the specification.
|
||||
|
||||
return encoded, uint32(len(encoded)), nil
|
||||
return encoded, uint32(len(encoded)), nil
|
||||
}
|
||||
|
||||
func (Codec9286UserComment) Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeByte)
|
||||
valueContext.SetUndefinedValueType(exifcommon.TypeByte)
|
||||
|
||||
valueBytes, err := valueContext.ReadBytes()
|
||||
log.PanicIf(err)
|
||||
valueBytes, err := valueContext.ReadBytes()
|
||||
log.PanicIf(err)
|
||||
|
||||
if len(valueBytes) < 8 {
|
||||
return nil, ErrUnparseableValue
|
||||
}
|
||||
if len(valueBytes) < 8 {
|
||||
return nil, ErrUnparseableValue
|
||||
}
|
||||
|
||||
unknownUc := Tag9286UserComment{
|
||||
EncodingType: TagUndefinedType_9286_UserComment_Encoding_UNDEFINED,
|
||||
EncodingBytes: []byte{},
|
||||
}
|
||||
unknownUc := Tag9286UserComment{
|
||||
EncodingType: TagUndefinedType_9286_UserComment_Encoding_UNDEFINED,
|
||||
EncodingBytes: []byte{},
|
||||
}
|
||||
|
||||
encoding := valueBytes[:8]
|
||||
for encodingIndex, encodingBytes := range TagUndefinedType_9286_UserComment_Encodings {
|
||||
if bytes.Compare(encoding, encodingBytes) == 0 {
|
||||
uc := Tag9286UserComment{
|
||||
EncodingType: encodingIndex,
|
||||
EncodingBytes: valueBytes[8:],
|
||||
}
|
||||
encoding := valueBytes[:8]
|
||||
for encodingIndex, encodingBytes := range TagUndefinedType_9286_UserComment_Encodings {
|
||||
if bytes.Compare(encoding, encodingBytes) == 0 {
|
||||
uc := Tag9286UserComment{
|
||||
EncodingType: encodingIndex,
|
||||
EncodingBytes: valueBytes[8:],
|
||||
}
|
||||
|
||||
return uc, nil
|
||||
}
|
||||
}
|
||||
return uc, nil
|
||||
}
|
||||
}
|
||||
|
||||
exif9286Logger.Warningf(nil, "User-comment encoding not valid. Returning 'unknown' type (the default).")
|
||||
return unknownUc, nil
|
||||
exif9286Logger.Warningf(nil, "User-comment encoding not valid. Returning 'unknown' type (the default).")
|
||||
return unknownUc, nil
|
||||
}
|
||||
|
||||
func init() {
|
||||
registerEncoder(
|
||||
Tag9286UserComment{},
|
||||
Codec9286UserComment{})
|
||||
registerEncoder(
|
||||
Tag9286UserComment{},
|
||||
Codec9286UserComment{})
|
||||
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0x9286,
|
||||
Codec9286UserComment{})
|
||||
registerDecoder(
|
||||
exifcommon.IfdExifStandardIfdIdentity.UnindexedString(),
|
||||
0x9286,
|
||||
Codec9286UserComment{})
|
||||
}
|
||||
|
|
|
@ -16,8 +16,8 @@ func TestTagA20CSpatialFrequencyResponse_String(t *testing.T) {
|
|||
Rows: 9,
|
||||
ColumnNames: []string{"column1", "column2"},
|
||||
Values: []exifcommon.Rational{
|
||||
{1, 2},
|
||||
{3, 4},
|
||||
exifcommon.Rational{1, 2},
|
||||
exifcommon.Rational{3, 4},
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -33,8 +33,8 @@ func TestCodecA20CSpatialFrequencyResponse_Encode(t *testing.T) {
|
|||
Rows: 9,
|
||||
ColumnNames: []string{"column1", "column2"},
|
||||
Values: []exifcommon.Rational{
|
||||
{1, 2},
|
||||
{3, 4},
|
||||
exifcommon.Rational{1, 2},
|
||||
exifcommon.Rational{3, 4},
|
||||
},
|
||||
}
|
||||
|
||||
|
@ -67,8 +67,8 @@ func TestCodecA20CSpatialFrequencyResponse_Decode(t *testing.T) {
|
|||
Rows: 9,
|
||||
ColumnNames: []string{"column1", "column2"},
|
||||
Values: []exifcommon.Rational{
|
||||
{1, 2},
|
||||
{3, 4},
|
||||
exifcommon.Rational{1, 2},
|
||||
exifcommon.Rational{3, 4},
|
||||
},
|
||||
}
|
||||
|
||||
|
|
|
@ -4,8 +4,6 @@ import (
|
|||
"github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
// UndefinedTagHandle defines one undefined-type tag with a corresponding
|
||||
// decoder.
|
||||
type UndefinedTagHandle struct {
|
||||
IfdPath string
|
||||
TagId uint16
|
||||
|
|
|
@ -37,7 +37,7 @@ type EncodeableValue interface {
|
|||
String() string
|
||||
}
|
||||
|
||||
// UndefinedValueDecoder knows how to decode an undefined-type tag's value from
|
||||
// UndefinedValueEncoder knows how to decode an undefined-type tag's value from
|
||||
// bytes.
|
||||
type UndefinedValueDecoder interface {
|
||||
Decode(valueContext *exifcommon.ValueContext) (value EncodeableValue, err error)
|
||||
|
|
302
v2/utility.go
302
v2/utility.go
|
@ -1,234 +1,234 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"fmt"
|
||||
"math"
|
||||
"reflect"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-logging"
|
||||
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v2/undefined"
|
||||
"github.com/dsoprea/go-exif/v2/common"
|
||||
"github.com/dsoprea/go-exif/v2/undefined"
|
||||
)
|
||||
|
||||
var (
|
||||
utilityLogger = log.NewLogger("exif.utility")
|
||||
utilityLogger = log.NewLogger("exif.utility")
|
||||
)
|
||||
|
||||
var (
|
||||
timeType = reflect.TypeOf(time.Time{})
|
||||
timeType = reflect.TypeOf(time.Time{})
|
||||
)
|
||||
|
||||
// ParseExifFullTimestamp parses dates like "2018:11:30 13:01:49" into a UTC
|
||||
// `time.Time` struct.
|
||||
func ParseExifFullTimestamp(fullTimestampPhrase string) (timestamp time.Time, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
parts := strings.Split(fullTimestampPhrase, " ")
|
||||
datestampValue, timestampValue := parts[0], parts[1]
|
||||
parts := strings.Split(fullTimestampPhrase, " ")
|
||||
datestampValue, timestampValue := parts[0], parts[1]
|
||||
|
||||
dateParts := strings.Split(datestampValue, ":")
|
||||
dateParts := strings.Split(datestampValue, ":")
|
||||
|
||||
year, err := strconv.ParseUint(dateParts[0], 10, 16)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse year")
|
||||
}
|
||||
year, err := strconv.ParseUint(dateParts[0], 10, 16)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse year")
|
||||
}
|
||||
|
||||
month, err := strconv.ParseUint(dateParts[1], 10, 8)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse month")
|
||||
}
|
||||
month, err := strconv.ParseUint(dateParts[1], 10, 8)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse month")
|
||||
}
|
||||
|
||||
day, err := strconv.ParseUint(dateParts[2], 10, 8)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse day")
|
||||
}
|
||||
day, err := strconv.ParseUint(dateParts[2], 10, 8)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse day")
|
||||
}
|
||||
|
||||
timeParts := strings.Split(timestampValue, ":")
|
||||
timeParts := strings.Split(timestampValue, ":")
|
||||
|
||||
hour, err := strconv.ParseUint(timeParts[0], 10, 8)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse hour")
|
||||
}
|
||||
hour, err := strconv.ParseUint(timeParts[0], 10, 8)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse hour")
|
||||
}
|
||||
|
||||
minute, err := strconv.ParseUint(timeParts[1], 10, 8)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse minute")
|
||||
}
|
||||
minute, err := strconv.ParseUint(timeParts[1], 10, 8)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse minute")
|
||||
}
|
||||
|
||||
second, err := strconv.ParseUint(timeParts[2], 10, 8)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse second")
|
||||
}
|
||||
second, err := strconv.ParseUint(timeParts[2], 10, 8)
|
||||
if err != nil {
|
||||
log.Panicf("could not parse second")
|
||||
}
|
||||
|
||||
timestamp = time.Date(int(year), time.Month(month), int(day), int(hour), int(minute), int(second), 0, time.UTC)
|
||||
return timestamp, nil
|
||||
timestamp = time.Date(int(year), time.Month(month), int(day), int(hour), int(minute), int(second), 0, time.UTC)
|
||||
return timestamp, nil
|
||||
}
|
||||
|
||||
// ExifFullTimestampString produces a string like "2018:11:30 13:01:49" from a
|
||||
// `time.Time` struct. It will attempt to convert to UTC first.
|
||||
func ExifFullTimestampString(t time.Time) (fullTimestampPhrase string) {
|
||||
|
||||
// RELEASE(dustin): Dump this for the next release. It duplicates the same function now in exifcommon.
|
||||
// RELEASE(dustin): Dump this for the next release. It duplicates the same function now in exifcommon.
|
||||
|
||||
return exifcommon.ExifFullTimestampString(t)
|
||||
return exifcommon.ExifFullTimestampString(t)
|
||||
}
|
||||
|
||||
// ExifTag is one simple representation of a tag in a flat list of all of them.
|
||||
type ExifTag struct {
|
||||
// IfdPath is the fully-qualified IFD path (even though it is not named as
|
||||
// such).
|
||||
IfdPath string `json:"ifd_path"`
|
||||
// IfdPath is the fully-qualified IFD path (even though it is not named as
|
||||
// such).
|
||||
IfdPath string `json:"ifd_path"`
|
||||
|
||||
// TagId is the tag-ID.
|
||||
TagId uint16 `json:"id"`
|
||||
// TagId is the tag-ID.
|
||||
TagId uint16 `json:"id"`
|
||||
|
||||
// TagName is the tag-name. This is never empty.
|
||||
TagName string `json:"name"`
|
||||
// TagName is the tag-name. This is never empty.
|
||||
TagName string `json:"name"`
|
||||
|
||||
// UnitCount is the recorded number of units constution of the value.
|
||||
UnitCount uint32 `json:"unit_count"`
|
||||
// UnitCount is the recorded number of units constution of the value.
|
||||
UnitCount uint32 `json:"unit_count"`
|
||||
|
||||
// TagTypeId is the type-ID.
|
||||
TagTypeId exifcommon.TagTypePrimitive `json:"type_id"`
|
||||
// TagTypeId is the type-ID.
|
||||
TagTypeId exifcommon.TagTypePrimitive `json:"type_id"`
|
||||
|
||||
// TagTypeName is the type name.
|
||||
TagTypeName string `json:"type_name"`
|
||||
// TagTypeName is the type name.
|
||||
TagTypeName string `json:"type_name"`
|
||||
|
||||
// Value is the decoded value.
|
||||
Value interface{} `json:"value"`
|
||||
// Value is the decoded value.
|
||||
Value interface{} `json:"value"`
|
||||
|
||||
// ValueBytes is the raw, encoded value.
|
||||
ValueBytes []byte `json:"value_bytes"`
|
||||
// ValueBytes is the raw, encoded value.
|
||||
ValueBytes []byte `json:"value_bytes"`
|
||||
|
||||
// Formatted is the human representation of the first value (tag values are
|
||||
// always an array).
|
||||
FormattedFirst string `json:"formatted_first"`
|
||||
// Formatted is the human representation of the first value (tag values are
|
||||
// always an array).
|
||||
FormattedFirst string `json:"formatted_first"`
|
||||
|
||||
// Formatted is the human representation of the complete value.
|
||||
Formatted string `json:"formatted"`
|
||||
// Formatted is the human representation of the complete value.
|
||||
Formatted string `json:"formatted"`
|
||||
|
||||
// ChildIfdPath is the name of the child IFD this tag represents (if it
|
||||
// represents any). Otherwise, this is empty.
|
||||
ChildIfdPath string `json:"child_ifd_path"`
|
||||
// ChildIfdPath is the name of the child IFD this tag represents (if it
|
||||
// represents any). Otherwise, this is empty.
|
||||
ChildIfdPath string `json:"child_ifd_path"`
|
||||
}
|
||||
|
||||
// String returns a string representation.
|
||||
func (et ExifTag) String() string {
|
||||
return fmt.Sprintf(
|
||||
"ExifTag<"+
|
||||
"IFD-PATH=[%s] "+
|
||||
"TAG-ID=(0x%02x) "+
|
||||
"TAG-NAME=[%s] "+
|
||||
"TAG-TYPE=[%s] "+
|
||||
"VALUE=[%v] "+
|
||||
"VALUE-BYTES=(%d) "+
|
||||
"CHILD-IFD-PATH=[%s]",
|
||||
et.IfdPath, et.TagId, et.TagName, et.TagTypeName, et.FormattedFirst,
|
||||
len(et.ValueBytes), et.ChildIfdPath)
|
||||
return fmt.Sprintf(
|
||||
"ExifTag<"+
|
||||
"IFD-PATH=[%s] "+
|
||||
"TAG-ID=(0x%02x) "+
|
||||
"TAG-NAME=[%s] "+
|
||||
"TAG-TYPE=[%s] "+
|
||||
"VALUE=[%v] "+
|
||||
"VALUE-BYTES=(%d) "+
|
||||
"CHILD-IFD-PATH=[%s]",
|
||||
et.IfdPath, et.TagId, et.TagName, et.TagTypeName, et.FormattedFirst,
|
||||
len(et.ValueBytes), et.ChildIfdPath)
|
||||
}
|
||||
|
||||
// RELEASE(dustin): In the next release, add an options struct to Scan() and GetFlatExifData(), and put the MiscellaneousExifData in the return.
|
||||
|
||||
// GetFlatExifData returns a simple, flat representation of all tags.
|
||||
func GetFlatExifData(exifData []byte) (exifTags []ExifTag, err error) {
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
defer func() {
|
||||
if state := recover(); state != nil {
|
||||
err = log.Wrap(state.(error))
|
||||
}
|
||||
}()
|
||||
|
||||
eh, err := ParseExifHeader(exifData)
|
||||
log.PanicIf(err)
|
||||
eh, err := ParseExifHeader(exifData)
|
||||
log.PanicIf(err)
|
||||
|
||||
im := NewIfdMappingWithStandard()
|
||||
ti := NewTagIndex()
|
||||
im := NewIfdMappingWithStandard()
|
||||
ti := NewTagIndex()
|
||||
|
||||
ie := NewIfdEnumerate(im, ti, exifData, eh.ByteOrder)
|
||||
ie := NewIfdEnumerate(im, ti, exifData, eh.ByteOrder)
|
||||
|
||||
exifTags = make([]ExifTag, 0)
|
||||
exifTags = make([]ExifTag, 0)
|
||||
|
||||
visitor := func(fqIfdPath string, ifdIndex int, ite *IfdTagEntry) (err error) {
|
||||
// This encodes down to base64. Since this an example tool and we do not
|
||||
// expect to ever decode the output, we are not worried about
|
||||
// specifically base64-encoding it in order to have a measure of
|
||||
// control.
|
||||
valueBytes, err := ite.GetRawBytes()
|
||||
if err != nil {
|
||||
if err == exifundefined.ErrUnparseableValue {
|
||||
return nil
|
||||
}
|
||||
visitor := func(fqIfdPath string, ifdIndex int, ite *IfdTagEntry) (err error) {
|
||||
// This encodes down to base64. Since this an example tool and we do not
|
||||
// expect to ever decode the output, we are not worried about
|
||||
// specifically base64-encoding it in order to have a measure of
|
||||
// control.
|
||||
valueBytes, err := ite.GetRawBytes()
|
||||
if err != nil {
|
||||
if err == exifundefined.ErrUnparseableValue {
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Panic(err)
|
||||
}
|
||||
log.Panic(err)
|
||||
}
|
||||
|
||||
value, err := ite.Value()
|
||||
if err != nil {
|
||||
if err == exifcommon.ErrUnhandledUndefinedTypedTag {
|
||||
value = exifundefined.UnparseableUnknownTagValuePlaceholder
|
||||
} else {
|
||||
log.Panic(err)
|
||||
}
|
||||
}
|
||||
value, err := ite.Value()
|
||||
if err != nil {
|
||||
if err == exifcommon.ErrUnhandledUndefinedTypedTag {
|
||||
value = exifundefined.UnparseableUnknownTagValuePlaceholder
|
||||
} else {
|
||||
log.Panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
et := ExifTag{
|
||||
IfdPath: fqIfdPath,
|
||||
TagId: ite.TagId(),
|
||||
TagName: ite.TagName(),
|
||||
UnitCount: ite.UnitCount(),
|
||||
TagTypeId: ite.TagType(),
|
||||
TagTypeName: ite.TagType().String(),
|
||||
Value: value,
|
||||
ValueBytes: valueBytes,
|
||||
ChildIfdPath: ite.ChildIfdPath(),
|
||||
}
|
||||
et := ExifTag{
|
||||
IfdPath: fqIfdPath,
|
||||
TagId: ite.TagId(),
|
||||
TagName: ite.TagName(),
|
||||
UnitCount: ite.UnitCount(),
|
||||
TagTypeId: ite.TagType(),
|
||||
TagTypeName: ite.TagType().String(),
|
||||
Value: value,
|
||||
ValueBytes: valueBytes,
|
||||
ChildIfdPath: ite.ChildIfdPath(),
|
||||
}
|
||||
|
||||
et.Formatted, err = ite.Format()
|
||||
log.PanicIf(err)
|
||||
et.Formatted, err = ite.Format()
|
||||
log.PanicIf(err)
|
||||
|
||||
et.FormattedFirst, err = ite.FormatFirst()
|
||||
log.PanicIf(err)
|
||||
et.FormattedFirst, err = ite.FormatFirst()
|
||||
log.PanicIf(err)
|
||||
|
||||
exifTags = append(exifTags, et)
|
||||
exifTags = append(exifTags, et)
|
||||
|
||||
return nil
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
_, err = ie.Scan(exifcommon.IfdStandardIfdIdentity, eh.FirstIfdOffset, visitor)
|
||||
log.PanicIf(err)
|
||||
_, err = ie.Scan(exifcommon.IfdStandardIfdIdentity, eh.FirstIfdOffset, visitor)
|
||||
log.PanicIf(err)
|
||||
|
||||
return exifTags, nil
|
||||
return exifTags, nil
|
||||
}
|
||||
|
||||
// GpsDegreesEquals returns true if the two `GpsDegrees` are identical.
|
||||
func GpsDegreesEquals(gi1, gi2 GpsDegrees) bool {
|
||||
if gi2.Orientation != gi1.Orientation {
|
||||
return false
|
||||
}
|
||||
if gi2.Orientation != gi1.Orientation {
|
||||
return false
|
||||
}
|
||||
|
||||
degreesRightBound := math.Nextafter(gi1.Degrees, gi1.Degrees+1)
|
||||
minutesRightBound := math.Nextafter(gi1.Minutes, gi1.Minutes+1)
|
||||
secondsRightBound := math.Nextafter(gi1.Seconds, gi1.Seconds+1)
|
||||
degreesRightBound := math.Nextafter(gi1.Degrees, gi1.Degrees+1)
|
||||
minutesRightBound := math.Nextafter(gi1.Minutes, gi1.Minutes+1)
|
||||
secondsRightBound := math.Nextafter(gi1.Seconds, gi1.Seconds+1)
|
||||
|
||||
if gi2.Degrees < gi1.Degrees || gi2.Degrees >= degreesRightBound {
|
||||
return false
|
||||
} else if gi2.Minutes < gi1.Minutes || gi2.Minutes >= minutesRightBound {
|
||||
return false
|
||||
} else if gi2.Seconds < gi1.Seconds || gi2.Seconds >= secondsRightBound {
|
||||
return false
|
||||
}
|
||||
if gi2.Degrees < gi1.Degrees || gi2.Degrees >= degreesRightBound {
|
||||
return false
|
||||
} else if gi2.Minutes < gi1.Minutes || gi2.Minutes >= minutesRightBound {
|
||||
return false
|
||||
} else if gi2.Seconds < gi1.Seconds || gi2.Seconds >= secondsRightBound {
|
||||
return false
|
||||
}
|
||||
|
||||
return true
|
||||
return true
|
||||
}
|
||||
|
||||
// IsTime returns true if the value is a `time.Time`.
|
||||
func IsTime(v interface{}) bool {
|
||||
return reflect.TypeOf(v) == timeType
|
||||
return reflect.TypeOf(v) == timeType
|
||||
}
|
||||
|
|
|
@ -1,106 +1,106 @@
|
|||
package exif
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/dsoprea/go-logging"
|
||||
"github.com/dsoprea/go-logging"
|
||||
)
|
||||
|
||||
func TestParseExifFullTimestamp(t *testing.T) {
|
||||
timestamp, err := ParseExifFullTimestamp("2018:11:30 13:01:49")
|
||||
log.PanicIf(err)
|
||||
timestamp, err := ParseExifFullTimestamp("2018:11:30 13:01:49")
|
||||
log.PanicIf(err)
|
||||
|
||||
actual := timestamp.Format(time.RFC3339)
|
||||
expected := "2018-11-30T13:01:49Z"
|
||||
actual := timestamp.Format(time.RFC3339)
|
||||
expected := "2018-11-30T13:01:49Z"
|
||||
|
||||
if actual != expected {
|
||||
t.Fatalf("time not formatted correctly: [%s] != [%s]", actual, expected)
|
||||
}
|
||||
if actual != expected {
|
||||
t.Fatalf("time not formatted correctly: [%s] != [%s]", actual, expected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExifFullTimestampString(t *testing.T) {
|
||||
originalPhrase := "2018:11:30 13:01:49"
|
||||
originalPhrase := "2018:11:30 13:01:49"
|
||||
|
||||
timestamp, err := ParseExifFullTimestamp(originalPhrase)
|
||||
log.PanicIf(err)
|
||||
timestamp, err := ParseExifFullTimestamp(originalPhrase)
|
||||
log.PanicIf(err)
|
||||
|
||||
restoredPhrase := ExifFullTimestampString(timestamp)
|
||||
if restoredPhrase != originalPhrase {
|
||||
t.Fatalf("Final phrase [%s] does not equal original phrase [%s]", restoredPhrase, originalPhrase)
|
||||
}
|
||||
restoredPhrase := ExifFullTimestampString(timestamp)
|
||||
if restoredPhrase != originalPhrase {
|
||||
t.Fatalf("Final phrase [%s] does not equal original phrase [%s]", restoredPhrase, originalPhrase)
|
||||
}
|
||||
}
|
||||
|
||||
func ExampleParseExifFullTimestamp() {
|
||||
originalPhrase := "2018:11:30 13:01:49"
|
||||
originalPhrase := "2018:11:30 13:01:49"
|
||||
|
||||
timestamp, err := ParseExifFullTimestamp(originalPhrase)
|
||||
log.PanicIf(err)
|
||||
timestamp, err := ParseExifFullTimestamp(originalPhrase)
|
||||
log.PanicIf(err)
|
||||
|
||||
fmt.Printf("To Go timestamp: [%s]\n", timestamp.Format(time.RFC3339))
|
||||
fmt.Printf("To Go timestamp: [%s]\n", timestamp.Format(time.RFC3339))
|
||||
|
||||
// Output:
|
||||
// To Go timestamp: [2018-11-30T13:01:49Z]
|
||||
// Output:
|
||||
// To Go timestamp: [2018-11-30T13:01:49Z]
|
||||
}
|
||||
|
||||
func ExampleExifFullTimestampString() {
|
||||
originalPhrase := "2018:11:30 13:01:49"
|
||||
originalPhrase := "2018:11:30 13:01:49"
|
||||
|
||||
timestamp, err := ParseExifFullTimestamp(originalPhrase)
|
||||
log.PanicIf(err)
|
||||
timestamp, err := ParseExifFullTimestamp(originalPhrase)
|
||||
log.PanicIf(err)
|
||||
|
||||
restoredPhrase := ExifFullTimestampString(timestamp)
|
||||
fmt.Printf("To EXIF timestamp: [%s]\n", restoredPhrase)
|
||||
restoredPhrase := ExifFullTimestampString(timestamp)
|
||||
fmt.Printf("To EXIF timestamp: [%s]\n", restoredPhrase)
|
||||
|
||||
// Output:
|
||||
// To EXIF timestamp: [2018:11:30 13:01:49]
|
||||
// Output:
|
||||
// To EXIF timestamp: [2018:11:30 13:01:49]
|
||||
}
|
||||
|
||||
func TestGpsDegreesEquals_Equals(t *testing.T) {
|
||||
gi := GpsDegrees{
|
||||
Orientation: 'A',
|
||||
Degrees: 11.0,
|
||||
Minutes: 22.0,
|
||||
Seconds: 33.0,
|
||||
}
|
||||
gi := GpsDegrees{
|
||||
Orientation: 'A',
|
||||
Degrees: 11.0,
|
||||
Minutes: 22.0,
|
||||
Seconds: 33.0,
|
||||
}
|
||||
|
||||
r := GpsDegreesEquals(gi, gi)
|
||||
if r != true {
|
||||
t.Fatalf("GpsDegrees structs were not equal as expected.")
|
||||
}
|
||||
r := GpsDegreesEquals(gi, gi)
|
||||
if r != true {
|
||||
t.Fatalf("GpsDegrees structs were not equal as expected.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGpsDegreesEquals_NotEqual_Orientation(t *testing.T) {
|
||||
gi1 := GpsDegrees{
|
||||
Orientation: 'A',
|
||||
Degrees: 11.0,
|
||||
Minutes: 22.0,
|
||||
Seconds: 33.0,
|
||||
}
|
||||
gi1 := GpsDegrees{
|
||||
Orientation: 'A',
|
||||
Degrees: 11.0,
|
||||
Minutes: 22.0,
|
||||
Seconds: 33.0,
|
||||
}
|
||||
|
||||
gi2 := gi1
|
||||
gi2.Orientation = 'B'
|
||||
gi2 := gi1
|
||||
gi2.Orientation = 'B'
|
||||
|
||||
r := GpsDegreesEquals(gi1, gi2)
|
||||
if r != false {
|
||||
t.Fatalf("GpsDegrees structs were equal but not supposed to be.")
|
||||
}
|
||||
r := GpsDegreesEquals(gi1, gi2)
|
||||
if r != false {
|
||||
t.Fatalf("GpsDegrees structs were equal but not supposed to be.")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGpsDegreesEquals_NotEqual_Position(t *testing.T) {
|
||||
gi1 := GpsDegrees{
|
||||
Orientation: 'A',
|
||||
Degrees: 11.0,
|
||||
Minutes: 22.0,
|
||||
Seconds: 33.0,
|
||||
}
|
||||
gi1 := GpsDegrees{
|
||||
Orientation: 'A',
|
||||
Degrees: 11.0,
|
||||
Minutes: 22.0,
|
||||
Seconds: 33.0,
|
||||
}
|
||||
|
||||
gi2 := gi1
|
||||
gi2.Minutes = 22.5
|
||||
gi2 := gi1
|
||||
gi2.Minutes = 22.5
|
||||
|
||||
r := GpsDegreesEquals(gi1, gi2)
|
||||
if r != false {
|
||||
t.Fatalf("GpsDegrees structs were equal but not supposed to be.")
|
||||
}
|
||||
r := GpsDegreesEquals(gi1, gi2)
|
||||
if r != false {
|
||||
t.Fatalf("GpsDegrees structs were equal but not supposed to be.")
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue