Skip to content

Commit

Permalink
Merge pull request #59 from oschwald/greg/prefix-length
Browse files Browse the repository at this point in the history
Add LookupNetwork method
  • Loading branch information
horgh authored Aug 28, 2019
2 parents 2905694 + 37f6024 commit 78d62e5
Show file tree
Hide file tree
Showing 11 changed files with 515 additions and 268 deletions.
30 changes: 30 additions & 0 deletions .golangci.toml
Original file line number Diff line number Diff line change
@@ -0,0 +1,30 @@
[run]
deadline = "10m"
tests = true

[linters]
disable-all = true
enable = [
"deadcode",
"depguard",
"errcheck",
"goconst",
"gocyclo",
"gocritic",
"gofmt",
"golint",
"gosec",
"gosimple",
"ineffassign",
"maligned",
"misspell",
"nakedret",
"staticcheck",
"structcheck",
"typecheck",
"unconvert",
"unparam",
"varcheck",
"vet",
"vetshadow",
]
15 changes: 10 additions & 5 deletions .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,12 +17,15 @@ matrix:
allow_failures:
- go: tip

before_install:
- "if [[ $TRAVIS_GO_VERSION == 1.11 ]]; then go get -u golang.org/x/lint/golint; fi"

install:
- go get -v -t ./...

before_script:
- |
if [[ $TRAVIS_GO_VERSION == 1.12 && $(arch) != 'ppc64le' ]]; then
curl -sfL https://install.goreleaser.com/github.com/golangci/golangci-lint.sh | sh -s -- -b $(go env GOPATH)/bin
fi
script:
- |
if [ $(arch) == "ppc64le" ]; then
Expand All @@ -36,7 +39,9 @@ script:
else
go test -race -v -tags appengine
fi
- "if [[ $TRAVIS_GO_VERSION == 1.11 ]]; then go vet ./...; fi"
- "if [[ $TRAVIS_GO_VERSION == 1.11 ]]; then golint .; fi"
- |
if [[ $TRAVIS_GO_VERSION == 1.12 && $(arch) != 'ppc64le' ]]; then
golangci-lint run
fi
sudo: false
83 changes: 32 additions & 51 deletions decoder.go
Original file line number Diff line number Diff line change
Expand Up @@ -27,8 +27,10 @@ const (
_Uint64
_Uint128
_Slice
_Container
_Marker
// We don't use the next two. They are placeholders. See the spec
// for more details.
_Container // nolint: deadcode, varcheck
_Marker // nolint: deadcode, varcheck
_Bool
_Float32
)
Expand Down Expand Up @@ -159,10 +161,8 @@ func (d *decoder) unmarshalBool(size uint, offset uint, result reflect.Value) (u
if size > 1 {
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (bool size of %v)", size)
}
value, newOffset, err := d.decodeBool(size, offset)
if err != nil {
return 0, err
}
value, newOffset := d.decodeBool(size, offset)

switch result.Kind() {
case reflect.Bool:
result.SetBool(value)
Expand Down Expand Up @@ -207,10 +207,8 @@ func (d *decoder) indirect(result reflect.Value) reflect.Value {
var sliceType = reflect.TypeOf([]byte{})

func (d *decoder) unmarshalBytes(size uint, offset uint, result reflect.Value) (uint, error) {
value, newOffset, err := d.decodeBytes(size, offset)
if err != nil {
return 0, err
}
value, newOffset := d.decodeBytes(size, offset)

switch result.Kind() {
case reflect.Slice:
if result.Type() == sliceType {
Expand All @@ -230,10 +228,7 @@ func (d *decoder) unmarshalFloat32(size uint, offset uint, result reflect.Value)
if size != 4 {
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (float32 size of %v)", size)
}
value, newOffset, err := d.decodeFloat32(size, offset)
if err != nil {
return 0, err
}
value, newOffset := d.decodeFloat32(size, offset)

switch result.Kind() {
case reflect.Float32, reflect.Float64:
Expand All @@ -253,10 +248,8 @@ func (d *decoder) unmarshalFloat64(size uint, offset uint, result reflect.Value)
if size != 8 {
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (float 64 size of %v)", size)
}
value, newOffset, err := d.decodeFloat64(size, offset)
if err != nil {
return 0, err
}
value, newOffset := d.decodeFloat64(size, offset)

switch result.Kind() {
case reflect.Float32, reflect.Float64:
if result.OverflowFloat(value) {
Expand All @@ -277,10 +270,7 @@ func (d *decoder) unmarshalInt32(size uint, offset uint, result reflect.Value) (
if size > 4 {
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (int32 size of %v)", size)
}
value, newOffset, err := d.decodeInt(size, offset)
if err != nil {
return 0, err
}
value, newOffset := d.decodeInt(size, offset)

switch result.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
Expand Down Expand Up @@ -360,11 +350,8 @@ func (d *decoder) unmarshalSlice(
}

func (d *decoder) unmarshalString(size uint, offset uint, result reflect.Value) (uint, error) {
value, newOffset, err := d.decodeString(size, offset)
value, newOffset := d.decodeString(size, offset)

if err != nil {
return 0, err
}
switch result.Kind() {
case reflect.String:
result.SetString(value)
Expand All @@ -384,10 +371,7 @@ func (d *decoder) unmarshalUint(size uint, offset uint, result reflect.Value, ui
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (uint%v size of %v)", uintType, size)
}

value, newOffset, err := d.decodeUint(size, offset)
if err != nil {
return 0, err
}
value, newOffset := d.decodeUint(size, offset)

switch result.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
Expand Down Expand Up @@ -416,10 +400,7 @@ func (d *decoder) unmarshalUint128(size uint, offset uint, result reflect.Value)
if size > 16 {
return 0, newInvalidDatabaseError("the MaxMind DB file's data section contains bad data (uint128 size of %v)", size)
}
value, newOffset, err := d.decodeUint128(size, offset)
if err != nil {
return 0, err
}
value, newOffset := d.decodeUint128(size, offset)

switch result.Kind() {
case reflect.Struct:
Expand All @@ -436,36 +417,36 @@ func (d *decoder) unmarshalUint128(size uint, offset uint, result reflect.Value)
return newOffset, newUnmarshalTypeError(value, result.Type())
}

func (d *decoder) decodeBool(size uint, offset uint) (bool, uint, error) {
return size != 0, offset, nil
func (d *decoder) decodeBool(size uint, offset uint) (bool, uint) {
return size != 0, offset
}

func (d *decoder) decodeBytes(size uint, offset uint) ([]byte, uint, error) {
func (d *decoder) decodeBytes(size uint, offset uint) ([]byte, uint) {
newOffset := offset + size
bytes := make([]byte, size)
copy(bytes, d.buffer[offset:newOffset])
return bytes, newOffset, nil
return bytes, newOffset
}

func (d *decoder) decodeFloat64(size uint, offset uint) (float64, uint, error) {
func (d *decoder) decodeFloat64(size uint, offset uint) (float64, uint) {
newOffset := offset + size
bits := binary.BigEndian.Uint64(d.buffer[offset:newOffset])
return math.Float64frombits(bits), newOffset, nil
return math.Float64frombits(bits), newOffset
}

func (d *decoder) decodeFloat32(size uint, offset uint) (float32, uint, error) {
func (d *decoder) decodeFloat32(size uint, offset uint) (float32, uint) {
newOffset := offset + size
bits := binary.BigEndian.Uint32(d.buffer[offset:newOffset])
return math.Float32frombits(bits), newOffset, nil
return math.Float32frombits(bits), newOffset
}

func (d *decoder) decodeInt(size uint, offset uint) (int, uint, error) {
func (d *decoder) decodeInt(size uint, offset uint) (int, uint) {
newOffset := offset + size
var val int32
for _, b := range d.buffer[offset:newOffset] {
val = (val << 8) | int32(b)
}
return int(val), newOffset, nil
return int(val), newOffset
}

func (d *decoder) decodeMap(
Expand Down Expand Up @@ -511,7 +492,7 @@ func (d *decoder) decodePointer(
if pointerSize == 4 {
prefix = 0
} else {
prefix = uint(size & 0x7)
prefix = size & 0x7
}
unpacked := uintFromBytes(prefix, pointerBytes)

Expand Down Expand Up @@ -549,9 +530,9 @@ func (d *decoder) decodeSlice(
return offset, nil
}

func (d *decoder) decodeString(size uint, offset uint) (string, uint, error) {
func (d *decoder) decodeString(size uint, offset uint) (string, uint) {
newOffset := offset + size
return string(d.buffer[offset:newOffset]), newOffset, nil
return string(d.buffer[offset:newOffset]), newOffset
}

type fieldsType struct {
Expand Down Expand Up @@ -638,23 +619,23 @@ func (d *decoder) decodeStruct(
return offset, nil
}

func (d *decoder) decodeUint(size uint, offset uint) (uint64, uint, error) {
func (d *decoder) decodeUint(size uint, offset uint) (uint64, uint) {
newOffset := offset + size
bytes := d.buffer[offset:newOffset]

var val uint64
for _, b := range bytes {
val = (val << 8) | uint64(b)
}
return val, newOffset, nil
return val, newOffset
}

func (d *decoder) decodeUint128(size uint, offset uint) (*big.Int, uint, error) {
func (d *decoder) decodeUint128(size uint, offset uint) (*big.Int, uint) {
newOffset := offset + size
val := new(big.Int)
val.SetBytes(d.buffer[offset:newOffset])

return val, newOffset, nil
return val, newOffset
}

func uintFromBytes(prefix uint, uintBytes []byte) uint {
Expand Down
15 changes: 8 additions & 7 deletions decoder_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@ import (
"testing"

"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)

func TestBool(t *testing.T) {
Expand Down Expand Up @@ -69,9 +70,9 @@ func TestInt32(t *testing.T) {

func TestMap(t *testing.T) {
maps := map[string]interface{}{
"e0": map[string]interface{}{},
"e142656e43466f6f": map[string]interface{}{"en": "Foo"},
"e242656e43466f6f427a6843e4baba": map[string]interface{}{"en": "Foo", "zh": "人"},
"e0": map[string]interface{}{},
"e142656e43466f6f": map[string]interface{}{"en": "Foo"},
"e242656e43466f6f427a6843e4baba": map[string]interface{}{"en": "Foo", "zh": "人"},
"e1446e616d65e242656e43466f6f427a6843e4baba": map[string]interface{}{"name": map[string]interface{}{"en": "Foo", "zh": "人"}},
"e1496c616e677561676573020442656e427a68": map[string]interface{}{"languages": []interface{}{"en", "zh"}},
}
Expand Down Expand Up @@ -205,7 +206,7 @@ func validateDecoding(t *testing.T, tests map[string]interface{}) {

var result interface{}
_, err := d.decode(0, reflect.ValueOf(&result), 0)
assert.Nil(t, err)
assert.NoError(t, err)

if !reflect.DeepEqual(result, expected) {
// A big case statement would produce nicer errors
Expand All @@ -215,8 +216,8 @@ func validateDecoding(t *testing.T, tests map[string]interface{}) {
}

func TestPointers(t *testing.T) {
bytes, err := ioutil.ReadFile("test-data/test-data/maps-with-pointers.raw")
assert.Nil(t, err)
bytes, err := ioutil.ReadFile(testFile("maps-with-pointers.raw"))
require.NoError(t, err)
d := decoder{bytes}

expected := map[uint]map[string]string{
Expand All @@ -231,7 +232,7 @@ func TestPointers(t *testing.T) {
for offset, expectedValue := range expected {
var actual map[string]string
_, err := d.decode(offset, reflect.ValueOf(&actual), 0)
assert.Nil(t, err)
assert.NoError(t, err)
if !reflect.DeepEqual(actual, expectedValue) {
t.Errorf("Decode for pointer at %d failed", offset)
}
Expand Down
42 changes: 42 additions & 0 deletions node.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,42 @@
package maxminddb

type nodeReader interface {
readLeft(uint) uint
readRight(uint) uint
}

type nodeReader24 struct {
buffer []byte
}

func (n nodeReader24) readLeft(nodeNumber uint) uint {
return (uint(n.buffer[nodeNumber]) << 16) | (uint(n.buffer[nodeNumber+1]) << 8) | uint(n.buffer[nodeNumber+2])
}

func (n nodeReader24) readRight(nodeNumber uint) uint {
return (uint(n.buffer[nodeNumber+3]) << 16) | (uint(n.buffer[nodeNumber+4]) << 8) | uint(n.buffer[nodeNumber+5])
}

type nodeReader28 struct {
buffer []byte
}

func (n nodeReader28) readLeft(nodeNumber uint) uint {
return ((uint(n.buffer[nodeNumber+3]) & 0xF0) << 20) | (uint(n.buffer[nodeNumber]) << 16) | (uint(n.buffer[nodeNumber+1]) << 8) | uint(n.buffer[nodeNumber+2])
}

func (n nodeReader28) readRight(nodeNumber uint) uint {
return ((uint(n.buffer[nodeNumber+3]) & 0x0F) << 24) | (uint(n.buffer[nodeNumber+4]) << 16) | (uint(n.buffer[nodeNumber+5]) << 8) | uint(n.buffer[nodeNumber+6])
}

type nodeReader32 struct {
buffer []byte
}

func (n nodeReader32) readLeft(nodeNumber uint) uint {
return (uint(n.buffer[nodeNumber]) << 24) | (uint(n.buffer[nodeNumber+1]) << 16) | (uint(n.buffer[nodeNumber+2]) << 8) | uint(n.buffer[nodeNumber+3])
}

func (n nodeReader32) readRight(nodeNumber uint) uint {
return (uint(n.buffer[nodeNumber+4]) << 24) | (uint(n.buffer[nodeNumber+5]) << 16) | (uint(n.buffer[nodeNumber+6]) << 8) | uint(n.buffer[nodeNumber+7])
}
Loading

0 comments on commit 78d62e5

Please sign in to comment.