diff --git a/.bzrignore b/.bzrignore
deleted file mode 100644
index 340cde711..000000000
--- a/.bzrignore
+++ /dev/null
@@ -1,2 +0,0 @@
-_*
-[856].out
diff --git a/.travis.yml b/.travis.yml
new file mode 100644
index 000000000..45b38cf13
--- /dev/null
+++ b/.travis.yml
@@ -0,0 +1,45 @@
+language: go
+
+go_import_path: gopkg.in/mgo.v2
+
+addons:
+ apt:
+ packages:
+
+env:
+ global:
+ - BUCKET=https://niemeyer.s3.amazonaws.com
+ matrix:
+ - GO=1.4.1 MONGODB=x86_64-2.2.7
+ - GO=1.4.1 MONGODB=x86_64-2.4.14
+ - GO=1.4.1 MONGODB=x86_64-2.6.11
+ - GO=1.4.1 MONGODB=x86_64-3.0.9
+ - GO=1.4.1 MONGODB=x86_64-3.2.3-nojournal
+ - GO=1.5.3 MONGODB=x86_64-3.0.9
+ - GO=1.6 MONGODB=x86_64-3.0.9
+
+install:
+ - eval "$(gimme $GO)"
+
+ - wget $BUCKET/mongodb-linux-$MONGODB.tgz
+ - tar xzvf mongodb-linux-$MONGODB.tgz
+ - export PATH=$PWD/mongodb-linux-$MONGODB/bin:$PATH
+
+ - wget $BUCKET/daemontools.tar.gz
+ - tar xzvf daemontools.tar.gz
+ - export PATH=$PWD/daemontools:$PATH
+
+ - go get gopkg.in/check.v1
+ - go get gopkg.in/yaml.v2
+ - go get gopkg.in/tomb.v2
+
+before_script:
+ - export NOIPV6=1
+ - make startdb
+
+script:
+ - (cd bson && go test -check.v)
+ - go test -check.v -fast
+ - (cd txn && go test -check.v)
+
+# vim:sw=4:ts=4:et
diff --git a/Makefile b/Makefile
index 51bee7322..d1027d450 100644
--- a/Makefile
+++ b/Makefile
@@ -1,5 +1,5 @@
startdb:
- @testdb/setup.sh start
+ @harness/setup.sh start
stopdb:
- @testdb/setup.sh stop
+ @harness/setup.sh stop
diff --git a/README.md b/README.md
new file mode 100644
index 000000000..f4e452c04
--- /dev/null
+++ b/README.md
@@ -0,0 +1,4 @@
+The MongoDB driver for Go
+-------------------------
+
+Please go to [http://labix.org/mgo](http://labix.org/mgo) for all project details.
diff --git a/auth.go b/auth.go
index 7f3ba8c30..dc26e52f5 100644
--- a/auth.go
+++ b/auth.go
@@ -28,11 +28,14 @@ package mgo
import (
"crypto/md5"
+ "crypto/sha1"
"encoding/hex"
"errors"
"fmt"
- "labix.org/v2/mgo/bson"
"sync"
+
+ "gopkg.in/mgo.v2/bson"
+ "gopkg.in/mgo.v2/internal/scram"
)
type authCmd struct {
@@ -157,6 +160,9 @@ func (socket *mongoSocket) resetNonce() {
func (socket *mongoSocket) Login(cred Credential) error {
socket.Lock()
+ if cred.Mechanism == "" && socket.serverInfo.MaxWireVersion >= 3 {
+ cred.Mechanism = "SCRAM-SHA-1"
+ }
for _, sockCred := range socket.creds {
if sockCred == cred {
debugf("Socket %p to %s: login: db=%q user=%q (already logged in)", socket, socket.addr, cred.Source, cred.Username)
@@ -176,12 +182,12 @@ func (socket *mongoSocket) Login(cred Credential) error {
var err error
switch cred.Mechanism {
- case "", "MONGO-CR":
+ case "", "MONGODB-CR", "MONGO-CR": // Name changed to MONGODB-CR in SERVER-8501.
err = socket.loginClassic(cred)
case "PLAIN":
err = socket.loginPlain(cred)
- case "MONGO-X509":
- err = fmt.Errorf("unsupported authentication mechanism: %s", cred.Mechanism)
+ case "MONGODB-X509":
+ err = socket.loginX509(cred)
default:
// Try SASL for everything else, if it is available.
err = socket.loginSASL(cred)
@@ -229,6 +235,27 @@ func (socket *mongoSocket) loginClassic(cred Credential) error {
})
}
+type authX509Cmd struct {
+ Authenticate int
+ User string
+ Mechanism string
+}
+
+func (socket *mongoSocket) loginX509(cred Credential) error {
+ cmd := authX509Cmd{Authenticate: 1, User: cred.Username, Mechanism: "MONGODB-X509"}
+ res := authResult{}
+ return socket.loginRun(cred.Source, &cmd, &res, func() error {
+ if !res.Ok {
+ return errors.New(res.ErrMsg)
+ }
+ socket.Lock()
+ socket.dropAuth(cred.Source)
+ socket.creds = append(socket.creds, cred)
+ socket.Unlock()
+ return nil
+ })
+}
+
func (socket *mongoSocket) loginPlain(cred Credential) error {
cmd := saslCmd{Start: 1, Mechanism: "PLAIN", Payload: []byte("\x00" + cred.Username + "\x00" + cred.Password)}
res := authResult{}
@@ -245,7 +272,16 @@ func (socket *mongoSocket) loginPlain(cred Credential) error {
}
func (socket *mongoSocket) loginSASL(cred Credential) error {
- sasl, err := saslNew(cred, socket.Server().Addr)
+ var sasl saslStepper
+ var err error
+ if cred.Mechanism == "SCRAM-SHA-1" {
+ // SCRAM is handled without external libraries.
+ sasl = saslNewScram(cred)
+ } else if len(cred.ServiceHost) > 0 {
+ sasl, err = saslNew(cred, cred.ServiceHost)
+ } else {
+ sasl, err = saslNew(cred, socket.Server().Addr)
+ }
if err != nil {
return err
}
@@ -317,6 +353,25 @@ func (socket *mongoSocket) loginSASL(cred Credential) error {
return nil
}
+func saslNewScram(cred Credential) *saslScram {
+ credsum := md5.New()
+ credsum.Write([]byte(cred.Username + ":mongo:" + cred.Password))
+ client := scram.NewClient(sha1.New, cred.Username, hex.EncodeToString(credsum.Sum(nil)))
+ return &saslScram{cred: cred, client: client}
+}
+
+type saslScram struct {
+ cred Credential
+ client *scram.Client
+}
+
+func (s *saslScram) Close() {}
+
+func (s *saslScram) Step(serverData []byte) (clientData []byte, done bool, err error) {
+ more := s.client.Step(serverData)
+ return s.client.Out(), !more, s.client.Err()
+}
+
func (socket *mongoSocket) loginRun(db string, query, result interface{}, f func() error) error {
var mutex sync.Mutex
var replyErr error
diff --git a/auth_test.go b/auth_test.go
index 07080ca4a..995273475 100644
--- a/auth_test.go
+++ b/auth_test.go
@@ -27,13 +27,19 @@
package mgo_test
import (
+ "crypto/tls"
"flag"
"fmt"
- "labix.org/v2/mgo"
- . "launchpad.net/gocheck"
+ "io/ioutil"
+ "net"
"net/url"
+ "os"
+ "runtime"
"sync"
"time"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2"
)
func (s *S) TestAuthLoginDatabase(c *C) {
@@ -50,7 +56,7 @@ func (s *S) TestAuthLoginDatabase(c *C) {
admindb := session.DB("admin")
err = admindb.Login("root", "wrong")
- c.Assert(err, ErrorMatches, "auth fail(s|ed)")
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
err = admindb.Login("root", "rapadura")
c.Assert(err, IsNil)
@@ -76,7 +82,7 @@ func (s *S) TestAuthLoginSession(c *C) {
Password: "wrong",
}
err = session.Login(&cred)
- c.Assert(err, ErrorMatches, "auth fail(s|ed)")
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
cred.Password = "rapadura"
@@ -157,7 +163,7 @@ func (s *S) TestAuthUpsertUserErrors(c *C) {
c.Assert(err, ErrorMatches, "user has both Password/PasswordHash and UserSource set")
err = mydb.UpsertUser(&mgo.User{Username: "user", Password: "pass", OtherDBRoles: map[string][]mgo.Role{"db": nil}})
- c.Assert(err, ErrorMatches, "user with OtherDBRoles is only supported in admin database")
+ c.Assert(err, ErrorMatches, "user with OtherDBRoles is only supported in the admin or \\$external databases")
}
func (s *S) TestAuthUpsertUser(c *C) {
@@ -238,7 +244,7 @@ func (s *S) TestAuthUpsertUser(c *C) {
// Can't login directly into the database using UserSource, though.
err = myotherdb.Login("myrwuser", "mypass")
- c.Assert(err, ErrorMatches, "auth fail(s|ed)")
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
}
func (s *S) TestAuthUpsertUserOtherDBRoles(c *C) {
@@ -383,7 +389,7 @@ func (s *S) TestAuthAddUserReplaces(c *C) {
admindb.Logout()
err = mydb.Login("myuser", "myoldpass")
- c.Assert(err, ErrorMatches, "auth fail(s|ed)")
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
err = mydb.Login("myuser", "mynewpass")
c.Assert(err, IsNil)
@@ -406,9 +412,11 @@ func (s *S) TestAuthRemoveUser(c *C) {
c.Assert(err, IsNil)
err = mydb.RemoveUser("myuser")
c.Assert(err, IsNil)
+ err = mydb.RemoveUser("myuser")
+ c.Assert(err, Equals, mgo.ErrNotFound)
err = mydb.Login("myuser", "mypass")
- c.Assert(err, ErrorMatches, "auth fail(s|ed)")
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
}
func (s *S) TestAuthLoginTwiceDoesNothing(c *C) {
@@ -568,7 +576,7 @@ func (s *S) TestAuthLoginCachingWithNewSession(c *C) {
coll := session.DB("mydb").C("mycoll")
err = coll.Insert(M{"n": 1})
- c.Assert(err, ErrorMatches, "unauthorized|need to login|not authorized for .*")
+ c.Assert(err, ErrorMatches, "unauthorized|need to login|not authorized .*")
}
func (s *S) TestAuthLoginCachingAcrossPool(c *C) {
@@ -726,7 +734,7 @@ func (s *S) TestAuthURLWrongCredentials(c *C) {
if session != nil {
session.Close()
}
- c.Assert(err, ErrorMatches, "auth fail(s|ed)")
+ c.Assert(err, ErrorMatches, "auth fail(s|ed)|.*Authentication failed.")
c.Assert(session, IsNil)
}
@@ -837,6 +845,124 @@ func (s *S) TestAuthDirectWithLogin(c *C) {
}
}
+func (s *S) TestAuthScramSha1Cred(c *C) {
+ if !s.versionAtLeast(2, 7, 7) {
+ c.Skip("SCRAM-SHA-1 tests depend on 2.7.7")
+ }
+ cred := &mgo.Credential{
+ Username: "root",
+ Password: "rapadura",
+ Mechanism: "SCRAM-SHA-1",
+ Source: "admin",
+ }
+ host := "localhost:40002"
+ c.Logf("Connecting to %s...", host)
+ session, err := mgo.Dial(host)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ mycoll := session.DB("admin").C("mycoll")
+
+ c.Logf("Connected! Testing the need for authentication...")
+ err = mycoll.Find(nil).One(nil)
+ c.Assert(err, ErrorMatches, "unauthorized|not authorized .*")
+
+ c.Logf("Authenticating...")
+ err = session.Login(cred)
+ c.Assert(err, IsNil)
+ c.Logf("Authenticated!")
+
+ c.Logf("Connected! Testing the need for authentication...")
+ err = mycoll.Find(nil).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+}
+
+func (s *S) TestAuthScramSha1URL(c *C) {
+ if !s.versionAtLeast(2, 7, 7) {
+ c.Skip("SCRAM-SHA-1 tests depend on 2.7.7")
+ }
+ host := "localhost:40002"
+ c.Logf("Connecting to %s...", host)
+ session, err := mgo.Dial(fmt.Sprintf("root:rapadura@%s?authMechanism=SCRAM-SHA-1", host))
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ mycoll := session.DB("admin").C("mycoll")
+
+ c.Logf("Connected! Testing the need for authentication...")
+ err = mycoll.Find(nil).One(nil)
+ c.Assert(err, Equals, mgo.ErrNotFound)
+}
+
+func (s *S) TestAuthX509Cred(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ binfo, err := session.BuildInfo()
+ c.Assert(err, IsNil)
+ if binfo.OpenSSLVersion == "" {
+ c.Skip("server does not support SSL")
+ }
+
+ clientCertPEM, err := ioutil.ReadFile("harness/certs/client.pem")
+ c.Assert(err, IsNil)
+
+ clientCert, err := tls.X509KeyPair(clientCertPEM, clientCertPEM)
+ c.Assert(err, IsNil)
+
+ tlsConfig := &tls.Config{
+ // Isolating tests to client certs, don't care about server validation.
+ InsecureSkipVerify: true,
+ Certificates: []tls.Certificate{clientCert},
+ }
+
+ var host = "localhost:40003"
+ c.Logf("Connecting to %s...", host)
+ session, err = mgo.DialWithInfo(&mgo.DialInfo{
+ Addrs: []string{host},
+ DialServer: func(addr *mgo.ServerAddr) (net.Conn, error) {
+ return tls.Dial("tcp", addr.String(), tlsConfig)
+ },
+ })
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ err = session.Login(&mgo.Credential{Username: "root", Password: "rapadura"})
+ c.Assert(err, IsNil)
+
+ // This needs to be kept in sync with client.pem
+ x509Subject := "CN=localhost,OU=Client,O=MGO,L=MGO,ST=MGO,C=GO"
+
+ externalDB := session.DB("$external")
+ var x509User mgo.User = mgo.User{
+ Username: x509Subject,
+ OtherDBRoles: map[string][]mgo.Role{"admin": []mgo.Role{mgo.RoleRoot}},
+ }
+ err = externalDB.UpsertUser(&x509User)
+ c.Assert(err, IsNil)
+
+ session.LogoutAll()
+
+ c.Logf("Connected! Ensuring authentication is required...")
+ names, err := session.DatabaseNames()
+ c.Assert(err, ErrorMatches, "not authorized .*")
+
+ cred := &mgo.Credential{
+ Username: x509Subject,
+ Mechanism: "MONGODB-X509",
+ Source: "$external",
+ }
+
+ c.Logf("Authenticating...")
+ err = session.Login(cred)
+ c.Assert(err, IsNil)
+ c.Logf("Authenticated!")
+
+ names, err = session.DatabaseNames()
+ c.Assert(err, IsNil)
+ c.Assert(len(names) > 0, Equals, true)
+}
+
var (
plainFlag = flag.String("plain", "", "Host to test PLAIN authentication against (depends on custom environment)")
plainUser = "einstein"
@@ -848,9 +974,9 @@ func (s *S) TestAuthPlainCred(c *C) {
c.Skip("no -plain")
}
cred := &mgo.Credential{
- Username: plainUser,
- Password: plainPass,
- Source: "$external",
+ Username: plainUser,
+ Password: plainPass,
+ Source: "$external",
Mechanism: "PLAIN",
}
c.Logf("Connecting to %s...", *plainFlag)
@@ -890,11 +1016,38 @@ func (s *S) TestAuthPlainURL(c *C) {
var (
kerberosFlag = flag.Bool("kerberos", false, "Test Kerberos authentication (depends on custom environment)")
- kerberosHost = "mmscustmongo.10gen.me"
- kerberosUser = "mmsagent/mmscustagent.10gen.me@10GEN.ME"
+ kerberosHost = "ldaptest.10gen.cc"
+ kerberosUser = "drivers@LDAPTEST.10GEN.CC"
+
+ winKerberosPasswordEnv = "MGO_KERBEROS_PASSWORD"
)
-func (s *S) TestAuthKerberosCred(c *C) {
+// Kerberos has its own suite because it talks to a remote server
+// that is prepared to authenticate against a kerberos deployment.
+type KerberosSuite struct{}
+
+var _ = Suite(&KerberosSuite{})
+
+func (kerberosSuite *KerberosSuite) SetUpSuite(c *C) {
+ mgo.SetDebug(true)
+ mgo.SetStats(true)
+}
+
+func (kerberosSuite *KerberosSuite) TearDownSuite(c *C) {
+ mgo.SetDebug(false)
+ mgo.SetStats(false)
+}
+
+func (kerberosSuite *KerberosSuite) SetUpTest(c *C) {
+ mgo.SetLogger((*cLogger)(c))
+ mgo.ResetStats()
+}
+
+func (kerberosSuite *KerberosSuite) TearDownTest(c *C) {
+ mgo.SetLogger(nil)
+}
+
+func (kerberosSuite *KerberosSuite) TestAuthKerberosCred(c *C) {
if !*kerberosFlag {
c.Skip("no -kerberos")
}
@@ -902,34 +1055,126 @@ func (s *S) TestAuthKerberosCred(c *C) {
Username: kerberosUser,
Mechanism: "GSSAPI",
}
+ windowsAppendPasswordToCredential(cred)
c.Logf("Connecting to %s...", kerberosHost)
session, err := mgo.Dial(kerberosHost)
c.Assert(err, IsNil)
defer session.Close()
c.Logf("Connected! Testing the need for authentication...")
- names, err := session.DatabaseNames()
- c.Assert(err, ErrorMatches, "unauthorized")
+ n, err := session.DB("kerberos").C("test").Find(M{}).Count()
+ c.Assert(err, ErrorMatches, ".*authorized.*")
c.Logf("Authenticating...")
err = session.Login(cred)
c.Assert(err, IsNil)
c.Logf("Authenticated!")
- names, err = session.DatabaseNames()
+ n, err = session.DB("kerberos").C("test").Find(M{}).Count()
c.Assert(err, IsNil)
- c.Assert(len(names) > 0, Equals, true)
+ c.Assert(n, Equals, 1)
}
-func (s *S) TestAuthKerberosURL(c *C) {
+func (kerberosSuite *KerberosSuite) TestAuthKerberosURL(c *C) {
if !*kerberosFlag {
c.Skip("no -kerberos")
}
c.Logf("Connecting to %s...", kerberosHost)
- session, err := mgo.Dial(url.QueryEscape(kerberosUser) + "@" + kerberosHost + "?authMechanism=GSSAPI")
+ connectUri := url.QueryEscape(kerberosUser) + "@" + kerberosHost + "?authMechanism=GSSAPI"
+ if runtime.GOOS == "windows" {
+ connectUri = url.QueryEscape(kerberosUser) + ":" + url.QueryEscape(getWindowsKerberosPassword()) + "@" + kerberosHost + "?authMechanism=GSSAPI"
+ }
+ session, err := mgo.Dial(connectUri)
c.Assert(err, IsNil)
defer session.Close()
- names, err := session.DatabaseNames()
+ n, err := session.DB("kerberos").C("test").Find(M{}).Count()
c.Assert(err, IsNil)
- c.Assert(len(names) > 0, Equals, true)
+ c.Assert(n, Equals, 1)
+}
+
+func (kerberosSuite *KerberosSuite) TestAuthKerberosServiceName(c *C) {
+ if !*kerberosFlag {
+ c.Skip("no -kerberos")
+ }
+
+ wrongServiceName := "wrong"
+ rightServiceName := "mongodb"
+
+ cred := &mgo.Credential{
+ Username: kerberosUser,
+ Mechanism: "GSSAPI",
+ Service: wrongServiceName,
+ }
+ windowsAppendPasswordToCredential(cred)
+
+ c.Logf("Connecting to %s...", kerberosHost)
+ session, err := mgo.Dial(kerberosHost)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ c.Logf("Authenticating with incorrect service name...")
+ err = session.Login(cred)
+ c.Assert(err, ErrorMatches, ".*@LDAPTEST.10GEN.CC not found.*")
+
+ cred.Service = rightServiceName
+ c.Logf("Authenticating with correct service name...")
+ err = session.Login(cred)
+ c.Assert(err, IsNil)
+ c.Logf("Authenticated!")
+
+ n, err := session.DB("kerberos").C("test").Find(M{}).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 1)
+}
+
+func (kerberosSuite *KerberosSuite) TestAuthKerberosServiceHost(c *C) {
+ if !*kerberosFlag {
+ c.Skip("no -kerberos")
+ }
+
+ wrongServiceHost := "eggs.bacon.tk"
+ rightServiceHost := kerberosHost
+
+ cred := &mgo.Credential{
+ Username: kerberosUser,
+ Mechanism: "GSSAPI",
+ ServiceHost: wrongServiceHost,
+ }
+ windowsAppendPasswordToCredential(cred)
+
+ c.Logf("Connecting to %s...", kerberosHost)
+ session, err := mgo.Dial(kerberosHost)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ c.Logf("Authenticating with incorrect service host...")
+ err = session.Login(cred)
+ c.Assert(err, ErrorMatches, ".*@LDAPTEST.10GEN.CC not found.*")
+
+ cred.ServiceHost = rightServiceHost
+ c.Logf("Authenticating with correct service host...")
+ err = session.Login(cred)
+ c.Assert(err, IsNil)
+ c.Logf("Authenticated!")
+
+ n, err := session.DB("kerberos").C("test").Find(M{}).Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, 1)
+}
+
+// No kinit on SSPI-style Kerberos, so we need to provide a password. In order
+// to avoid inlining password, require it to be set as an environment variable,
+// for instance: `SET MGO_KERBEROS_PASSWORD=this_isnt_the_password`
+func getWindowsKerberosPassword() string {
+ pw := os.Getenv(winKerberosPasswordEnv)
+ if pw == "" {
+ panic(fmt.Sprintf("Need to set %v environment variable to run Kerberos tests on Windows", winKerberosPasswordEnv))
+ }
+ return pw
+}
+
+func windowsAppendPasswordToCredential(cred *mgo.Credential) {
+ if runtime.GOOS == "windows" {
+ cred.Password = getWindowsKerberosPassword()
+ }
}
diff --git a/bson/bson.go b/bson/bson.go
index 3ebfd8438..7fb7f8cae 100644
--- a/bson/bson.go
+++ b/bson/bson.go
@@ -33,10 +33,12 @@
package bson
import (
+ "bytes"
"crypto/md5"
"crypto/rand"
"encoding/binary"
"encoding/hex"
+ "encoding/json"
"errors"
"fmt"
"io"
@@ -117,7 +119,7 @@ type M map[string]interface{}
// using a map is generally more comfortable. See bson.M and bson.RawD.
type D []DocElem
-// See the D type.
+// DocElem is an element of the bson.D document representation.
type DocElem struct {
Name string
Value interface{}
@@ -171,7 +173,7 @@ type ObjectId string
func ObjectIdHex(s string) ObjectId {
d, err := hex.DecodeString(s)
if err != nil || len(d) != 12 {
- panic(fmt.Sprintf("Invalid input to ObjectIdHex: %q", s))
+ panic(fmt.Sprintf("invalid input to ObjectIdHex: %q", s))
}
return ObjectId(d)
}
@@ -188,15 +190,25 @@ func IsObjectIdHex(s string) bool {
// objectIdCounter is atomically incremented when generating a new ObjectId
// using NewObjectId() function. It's used as a counter part of an id.
-var objectIdCounter uint32 = 0
+var objectIdCounter uint32 = readRandomUint32()
+
+// readRandomUint32 returns a random objectIdCounter.
+func readRandomUint32() uint32 {
+ var b [4]byte
+ _, err := io.ReadFull(rand.Reader, b[:])
+ if err != nil {
+ panic(fmt.Errorf("cannot read random object id: %v", err))
+ }
+ return uint32((uint32(b[0]) << 0) | (uint32(b[1]) << 8) | (uint32(b[2]) << 16) | (uint32(b[3]) << 24))
+}
// machineId stores machine id generated once and used in subsequent calls
// to NewObjectId function.
var machineId = readMachineId()
+var processId = os.Getpid()
-// readMachineId generates machine id and puts it into the machineId global
-// variable. If this function fails to get the hostname, it will cause
-// a runtime error.
+// readMachineId generates and returns a machine id.
+// If this function fails to get the hostname it will cause a runtime error.
func readMachineId() []byte {
var sum [3]byte
id := sum[:]
@@ -224,9 +236,8 @@ func NewObjectId() ObjectId {
b[5] = machineId[1]
b[6] = machineId[2]
// Pid, 2 bytes, specs don't specify endianness, but we use big endian.
- pid := os.Getpid()
- b[7] = byte(pid >> 8)
- b[8] = byte(pid)
+ b[7] = byte(processId >> 8)
+ b[8] = byte(processId)
// Increment, 3 bytes, big endian
i := atomic.AddUint32(&objectIdCounter, 1)
b[9] = byte(i >> 16)
@@ -262,15 +273,60 @@ func (id ObjectId) MarshalJSON() ([]byte, error) {
return []byte(fmt.Sprintf(`"%x"`, string(id))), nil
}
+var nullBytes = []byte("null")
+
// UnmarshalJSON turns *bson.ObjectId into a json.Unmarshaller.
func (id *ObjectId) UnmarshalJSON(data []byte) error {
+ if len(data) > 0 && (data[0] == '{' || data[0] == 'O') {
+ var v struct {
+ Id json.RawMessage `json:"$oid"`
+ Func struct {
+ Id json.RawMessage
+ } `json:"$oidFunc"`
+ }
+ err := jdec(data, &v)
+ if err == nil {
+ if len(v.Id) > 0 {
+ data = []byte(v.Id)
+ } else {
+ data = []byte(v.Func.Id)
+ }
+ }
+ }
+ if len(data) == 2 && data[0] == '"' && data[1] == '"' || bytes.Equal(data, nullBytes) {
+ *id = ""
+ return nil
+ }
if len(data) != 26 || data[0] != '"' || data[25] != '"' {
- return errors.New(fmt.Sprintf("Invalid ObjectId in JSON: %s", string(data)))
+ return errors.New(fmt.Sprintf("invalid ObjectId in JSON: %s", string(data)))
}
var buf [12]byte
_, err := hex.Decode(buf[:], data[1:25])
if err != nil {
- return errors.New(fmt.Sprintf("Invalid ObjectId in JSON: %s (%s)", string(data), err))
+ return errors.New(fmt.Sprintf("invalid ObjectId in JSON: %s (%s)", string(data), err))
+ }
+ *id = ObjectId(string(buf[:]))
+ return nil
+}
+
+// MarshalText turns bson.ObjectId into an encoding.TextMarshaler.
+func (id ObjectId) MarshalText() ([]byte, error) {
+ return []byte(fmt.Sprintf("%x", string(id))), nil
+}
+
+// UnmarshalText turns *bson.ObjectId into an encoding.TextUnmarshaler.
+func (id *ObjectId) UnmarshalText(data []byte) error {
+ if len(data) == 1 && data[0] == ' ' || len(data) == 0 {
+ *id = ""
+ return nil
+ }
+ if len(data) != 24 {
+ return fmt.Errorf("invalid ObjectId: %s", data)
+ }
+ var buf [12]byte
+ _, err := hex.Decode(buf[:], data[:])
+ if err != nil {
+ return fmt.Errorf("invalid ObjectId: %s (%s)", data, err)
}
*id = ObjectId(string(buf[:]))
return nil
@@ -285,7 +341,7 @@ func (id ObjectId) Valid() bool {
// Calling this function with an invalid id will cause a runtime panic.
func (id ObjectId) byteSlice(start, end int) []byte {
if len(id) != 12 {
- panic(fmt.Sprintf("Invalid ObjectId: %q", string(id)))
+ panic(fmt.Sprintf("invalid ObjectId: %q", string(id)))
}
return []byte(string(id)[start:end])
}
@@ -386,6 +442,15 @@ type JavaScript struct {
Scope interface{}
}
+// DBPointer refers to a document id in a namespace.
+//
+// This type is deprecated in the BSON specification and should not be used
+// except for backwards compatibility with ancient applications.
+type DBPointer struct {
+ Namespace string
+ Id ObjectId
+}
+
const initialBufferSize = 64
func handleErr(err *error) {
@@ -405,7 +470,8 @@ func handleErr(err *error) {
}
// Marshal serializes the in value, which may be a map or a struct value.
-// In the case of struct values, only exported fields will be serialized.
+// In the case of struct values, only exported fields will be serialized,
+// and the order of serialized fields will match that of the struct itself.
// The lowercased field name is used as the key for each exported field,
// but this behavior may be changed using the respective field tag.
// The tag may also contain flags to tweak the marshalling behavior for
@@ -448,6 +514,7 @@ func Marshal(in interface{}) (out []byte, err error) {
// Unmarshal deserializes data from in into the out value. The out value
// must be a map, a pointer to a struct, or a pointer to a bson.D value.
+// In the case of struct values, only exported fields will be deserialized.
// The lowercased field name is used as the key for each exported field,
// but this behavior may be changed using the respective field tag.
// The tag may also contain flags to tweak the marshalling behavior for
@@ -481,10 +548,17 @@ func Marshal(in interface{}) (out []byte, err error) {
//
// Pointer values are initialized when necessary.
func Unmarshal(in []byte, out interface{}) (err error) {
+ if raw, ok := out.(*Raw); ok {
+ raw.Kind = 3
+ raw.Data = in
+ return nil
+ }
defer handleErr(&err)
v := reflect.ValueOf(out)
switch v.Kind() {
- case reflect.Map, reflect.Ptr:
+ case reflect.Ptr:
+ fallthrough
+ case reflect.Map:
d := newDecoder(in)
d.readDocTo(v)
case reflect.Struct:
@@ -570,7 +644,7 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
inlineMap := -1
for i := 0; i != n; i++ {
field := st.Field(i)
- if field.PkgPath != "" {
+ if field.PkgPath != "" && !field.Anonymous {
continue // Private field
}
@@ -584,24 +658,6 @@ func getStructInfo(st reflect.Type) (*structInfo, error) {
continue
}
- // XXX Drop this after a few releases.
- if s := strings.Index(tag, "/"); s >= 0 {
- recommend := tag[:s]
- for _, c := range tag[s+1:] {
- switch c {
- case 'c':
- recommend += ",omitempty"
- case 's':
- recommend += ",minsize"
- default:
- msg := fmt.Sprintf("Unsupported flag %q in tag %q of type %s", string([]byte{uint8(c)}), tag, st)
- panic(externalPanic(msg))
- }
- }
- msg := fmt.Sprintf("Replace tag %q in field %s of type %s by %q", tag, field.Name, st, recommend)
- panic(externalPanic(msg))
- }
-
inline := false
fields := strings.Split(tag, ",")
if len(fields) > 1 {
diff --git a/bson/bson_test.go b/bson/bson_test.go
index 1263e97a1..37451f9fd 100644
--- a/bson/bson_test.go
+++ b/bson/bson_test.go
@@ -29,14 +29,19 @@ package bson_test
import (
"encoding/binary"
+ "encoding/hex"
"encoding/json"
+ "encoding/xml"
"errors"
- "labix.org/v2/mgo/bson"
- . "launchpad.net/gocheck"
"net/url"
"reflect"
+ "strings"
"testing"
"time"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2/bson"
+ "gopkg.in/yaml.v2"
)
func TestAll(t *testing.T) {
@@ -66,10 +71,10 @@ func makeZeroDoc(value interface{}) (zero interface{}) {
case reflect.Ptr:
pv := reflect.New(v.Type().Elem())
zero = pv.Interface()
- case reflect.Slice:
+ case reflect.Slice, reflect.Int, reflect.Int64, reflect.Struct:
zero = reflect.New(t).Interface()
default:
- panic("unsupported doc type")
+ panic("unsupported doc type: " + t.Name())
}
return zero
}
@@ -138,6 +143,8 @@ var allItems = []testItemType{
"\x06_\x00"},
{bson.M{"_": bson.ObjectId("0123456789ab")},
"\x07_\x000123456789ab"},
+ {bson.M{"_": bson.DBPointer{"testnamespace", bson.ObjectId("0123456789ab")}},
+ "\x0C_\x00\x0e\x00\x00\x00testnamespace\x000123456789ab"},
{bson.M{"_": false},
"\x08_\x00\x00"},
{bson.M{"_": true},
@@ -543,6 +550,10 @@ var unmarshalItems = []testItemType{
// Decode old binary without length. According to the spec, this shouldn't happen.
{bson.M{"_": []byte("old")},
"\x05_\x00\x03\x00\x00\x00\x02old"},
+
+ // Decode a doc within a doc in to a slice within a doc; shouldn't error
+ {&struct{ Foo []string }{},
+ "\x03\x66\x6f\x6f\x00\x05\x00\x00\x00\x00"},
}
func (s *S) TestUnmarshalOneWayItems(c *C) {
@@ -579,8 +590,12 @@ var marshalErrorItems = []testItemType{
"Can't marshal complex128 in a BSON document"},
{&structWithDupKeys{},
"Duplicated key 'name' in struct bson_test.structWithDupKeys"},
- {bson.Raw{0x0A, []byte{}},
- "Attempted to unmarshal Raw kind 10 as a document"},
+ {bson.Raw{0xA, []byte{}},
+ "Attempted to marshal Raw kind 10 as a document"},
+ {bson.Raw{0x3, []byte{}},
+ "Attempted to marshal empty Raw document"},
+ {bson.M{"w": bson.Raw{0x3, []byte{}}},
+ "Attempted to marshal empty Raw document"},
{&inlineCantPtr{&struct{ A, B int }{1, 2}},
"Option ,inline needs a struct value or map field"},
{&inlineDupName{1, struct{ A, B int }{2, 3}},
@@ -632,6 +647,10 @@ var unmarshalErrorItems = []unmarshalErrorType{
{123,
"\x10name\x00\x08\x00\x00\x00",
"Unmarshal needs a map or a pointer to a struct."},
+
+ {nil,
+ "\x08\x62\x00\x02",
+ "encoded boolean must be 1 or 0, found 2"},
}
func (s *S) TestUnmarshalErrorItems(c *C) {
@@ -684,7 +703,7 @@ func (s *S) TestUnmarshalRawErrorItems(c *C) {
}
var corruptedData = []string{
- "\x04\x00\x00\x00\x00", // Shorter than minimum
+ "\x04\x00\x00\x00\x00", // Document shorter than minimum
"\x06\x00\x00\x00\x00", // Not enough data
"\x05\x00\x00", // Broken length
"\x05\x00\x00\x00\xff", // Corrupted termination
@@ -701,6 +720,15 @@ var corruptedData = []string{
// String with corrupted end.
wrapInDoc("\x02\x00\x03\x00\x00\x00yo\xFF"),
+
+ // String with negative length (issue #116).
+ "\x0c\x00\x00\x00\x02x\x00\xff\xff\xff\xff\x00",
+
+ // String with zero length (must include trailing '\x00')
+ "\x0c\x00\x00\x00\x02x\x00\x00\x00\x00\x00\x00",
+
+ // Binary with negative length.
+ "\r\x00\x00\x00\x05x\x00\xff\xff\xff\xff\x00\x00",
}
func (s *S) TestUnmarshalMapDocumentTooShort(c *C) {
@@ -976,6 +1004,9 @@ type condTime struct {
type condStruct struct {
V struct{ A []int } ",omitempty"
}
+type condRaw struct {
+ V bson.Raw ",omitempty"
+}
type shortInt struct {
V int64 ",minsize"
@@ -1022,6 +1053,62 @@ type inlineDupMap struct {
type inlineBadKeyMap struct {
M map[int]int ",inline"
}
+type inlineUnexported struct {
+ M map[string]interface{} ",inline"
+ unexported ",inline"
+}
+type unexported struct {
+ A int
+}
+
+type getterSetterD bson.D
+
+func (s getterSetterD) GetBSON() (interface{}, error) {
+ if len(s) == 0 {
+ return bson.D{}, nil
+ }
+ return bson.D(s[:len(s)-1]), nil
+}
+
+func (s *getterSetterD) SetBSON(raw bson.Raw) error {
+ var doc bson.D
+ err := raw.Unmarshal(&doc)
+ doc = append(doc, bson.DocElem{"suffix", true})
+ *s = getterSetterD(doc)
+ return err
+}
+
+type getterSetterInt int
+
+func (i getterSetterInt) GetBSON() (interface{}, error) {
+ return bson.D{{"a", int(i)}}, nil
+}
+
+func (i *getterSetterInt) SetBSON(raw bson.Raw) error {
+ var doc struct{ A int }
+ err := raw.Unmarshal(&doc)
+ *i = getterSetterInt(doc.A)
+ return err
+}
+
+type ifaceType interface {
+ Hello()
+}
+
+type ifaceSlice []ifaceType
+
+func (s *ifaceSlice) SetBSON(raw bson.Raw) error {
+ var ns []int
+ if err := raw.Unmarshal(&ns); err != nil {
+ return err
+ }
+ *s = make(ifaceSlice, ns[0])
+ return nil
+}
+
+func (s ifaceSlice) GetBSON() (interface{}, error) {
+ return []int{len(s)}, nil
+}
type (
MyString string
@@ -1040,6 +1127,8 @@ var (
int64ptr = &int64var
intvar = int(42)
intptr = &intvar
+
+ gsintvar = getterSetterInt(42)
)
func parseURL(s string) *url.URL {
@@ -1178,6 +1267,9 @@ var twoWayCrossItems = []crossTypeItem{
{&condStruct{struct{ A []int }{[]int{1}}}, bson.M{"v": bson.M{"a": []interface{}{1}}}},
{&condStruct{struct{ A []int }{}}, bson.M{}},
+ {&condRaw{bson.Raw{Kind: 0x0A, Data: []byte{}}}, bson.M{"v": nil}},
+ {&condRaw{bson.Raw{Kind: 0x00}}, bson.M{}},
+
{&namedCondStr{"yo"}, map[string]string{"myv": "yo"}},
{&namedCondStr{}, map[string]string{}},
@@ -1199,6 +1291,10 @@ var twoWayCrossItems = []crossTypeItem{
{&inlineMapInt{A: 1, M: map[string]int{"b": 2}}, map[string]int{"a": 1, "b": 2}},
{&inlineMapInt{A: 1, M: nil}, map[string]int{"a": 1}},
{&inlineMapMyM{A: 1, M: MyM{"b": MyM{"c": 3}}}, map[string]interface{}{"a": 1, "b": map[string]interface{}{"c": 3}}},
+ {&inlineUnexported{M: map[string]interface{}{"b": 1}, unexported: unexported{A: 2}}, map[string]interface{}{"b": 1, "a": 2}},
+
+ // []byte <=> Binary
+ {&struct{ B []byte }{[]byte("abc")}, map[string]bson.Binary{"b": bson.Binary{Data: []byte("abc")}}},
// []byte <=> MyBytes
{&struct{ B MyBytes }{[]byte("abc")}, map[string]string{"b": "abc"}},
@@ -1214,6 +1310,7 @@ var twoWayCrossItems = []crossTypeItem{
// arrays
{&struct{ V [2]int }{[...]int{1, 2}}, map[string][2]int{"v": [2]int{1, 2}}},
+ {&struct{ V [2]byte }{[...]byte{1, 2}}, map[string][2]byte{"v": [2]byte{1, 2}}},
// zero time
{&struct{ V time.Time }{}, map[string]interface{}{"v": time.Time{}}},
@@ -1225,6 +1322,7 @@ var twoWayCrossItems = []crossTypeItem{
// bson.D <=> []DocElem
{&bson.D{{"a", bson.D{{"b", 1}, {"c", 2}}}}, &bson.D{{"a", bson.D{{"b", 1}, {"c", 2}}}}},
{&bson.D{{"a", bson.D{{"b", 1}, {"c", 2}}}}, &MyD{{"a", MyD{{"b", 1}, {"c", 2}}}}},
+ {&struct{ V MyD }{MyD{{"a", 1}}}, &bson.D{{"v", bson.D{{"a", 1}}}}},
// bson.RawD <=> []RawDocElem
{&bson.RawD{{"a", bson.Raw{0x08, []byte{0x01}}}}, &bson.RawD{{"a", bson.Raw{0x08, []byte{0x01}}}}},
@@ -1236,6 +1334,18 @@ var twoWayCrossItems = []crossTypeItem{
// bson.M <=> map[MyString]
{bson.M{"a": bson.M{"b": 1, "c": 2}}, map[MyString]interface{}{"a": map[MyString]interface{}{"b": 1, "c": 2}}},
+
+ // json.Number <=> int64, float64
+ {&struct{ N json.Number }{"5"}, map[string]interface{}{"n": int64(5)}},
+ {&struct{ N json.Number }{"5.05"}, map[string]interface{}{"n": 5.05}},
+ {&struct{ N json.Number }{"9223372036854776000"}, map[string]interface{}{"n": float64(1 << 63)}},
+
+ // bson.D <=> non-struct getter/setter
+ {&bson.D{{"a", 1}}, &getterSetterD{{"a", 1}, {"suffix", true}}},
+ {&bson.D{{"a", 42}}, &gsintvar},
+
+ // Interface slice setter.
+ {&struct{ V ifaceSlice }{ifaceSlice{nil, nil, nil}}, bson.M{"v": []interface{}{3}}},
}
// Same thing, but only one way (obj1 => obj2).
@@ -1251,6 +1361,14 @@ var oneWayCrossItems = []crossTypeItem{
// Would get decoded into a int32 too in the opposite direction.
{&shortIface{int64(1) << 30}, map[string]interface{}{"v": 1 << 30}},
+
+ // Ensure omitempty on struct with private fields works properly.
+ {&struct {
+ V struct{ v time.Time } ",omitempty"
+ }{}, map[string]interface{}{}},
+
+ // Attempt to marshal slice into RawD (issue #120).
+ {bson.M{"x": []int{1, 2, 3}}, &struct{ X bson.RawD }{}},
}
func testCrossPair(c *C, dump interface{}, load interface{}) {
@@ -1401,31 +1519,241 @@ func (s *S) TestNewObjectIdWithTime(c *C) {
// ObjectId JSON marshalling.
type jsonType struct {
- Id *bson.ObjectId
-}
+ Id bson.ObjectId
+}
+
+var jsonIdTests = []struct {
+ value jsonType
+ json string
+ marshal bool
+ unmarshal bool
+ error string
+}{{
+ value: jsonType{Id: bson.ObjectIdHex("4d88e15b60f486e428412dc9")},
+ json: `{"Id":"4d88e15b60f486e428412dc9"}`,
+ marshal: true,
+ unmarshal: true,
+}, {
+ value: jsonType{},
+ json: `{"Id":""}`,
+ marshal: true,
+ unmarshal: true,
+}, {
+ value: jsonType{},
+ json: `{"Id":null}`,
+ marshal: false,
+ unmarshal: true,
+}, {
+ json: `{"Id":"4d88e15b60f486e428412dc9A"}`,
+ error: `invalid ObjectId in JSON: "4d88e15b60f486e428412dc9A"`,
+ marshal: false,
+ unmarshal: true,
+}, {
+ json: `{"Id":"4d88e15b60f486e428412dcZ"}`,
+ error: `invalid ObjectId in JSON: "4d88e15b60f486e428412dcZ" .*`,
+ marshal: false,
+ unmarshal: true,
+}}
func (s *S) TestObjectIdJSONMarshaling(c *C) {
- id := bson.ObjectIdHex("4d88e15b60f486e428412dc9")
- v := jsonType{Id: &id}
- data, err := json.Marshal(&v)
- c.Assert(err, IsNil)
- c.Assert(string(data), Equals, `{"Id":"4d88e15b60f486e428412dc9"}`)
+ for _, test := range jsonIdTests {
+ if test.marshal {
+ data, err := json.Marshal(&test.value)
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, test.json)
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+
+ if test.unmarshal {
+ var value jsonType
+ err := json.Unmarshal([]byte(test.json), &value)
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ c.Assert(value, DeepEquals, test.value)
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+ }
}
-func (s *S) TestObjectIdJSONUnmarshaling(c *C) {
- data := []byte(`{"Id":"4d88e15b60f486e428412dc9"}`)
- v := jsonType{}
- err := json.Unmarshal(data, &v)
- c.Assert(err, IsNil)
- c.Assert(*v.Id, Equals, bson.ObjectIdHex("4d88e15b60f486e428412dc9"))
+// --------------------------------------------------------------------------
+// Spec tests
+
+type specTest struct {
+ Description string
+ Documents []struct {
+ Decoded map[string]interface{}
+ Encoded string
+ DecodeOnly bool `yaml:"decodeOnly"`
+ Error interface{}
+ }
}
-func (s *S) TestObjectIdJSONUnmarshalingError(c *C) {
- v := jsonType{}
- err := json.Unmarshal([]byte(`{"Id":"4d88e15b60f486e428412dc9A"}`), &v)
- c.Assert(err, ErrorMatches, `Invalid ObjectId in JSON: "4d88e15b60f486e428412dc9A"`)
- err = json.Unmarshal([]byte(`{"Id":"4d88e15b60f486e428412dcZ"}`), &v)
- c.Assert(err, ErrorMatches, `Invalid ObjectId in JSON: "4d88e15b60f486e428412dcZ" .*`)
+func (s *S) TestSpecTests(c *C) {
+ for _, data := range specTests {
+ var test specTest
+ err := yaml.Unmarshal([]byte(data), &test)
+ c.Assert(err, IsNil)
+
+ c.Logf("Running spec test set %q", test.Description)
+
+ for _, doc := range test.Documents {
+ if doc.Error != nil {
+ continue
+ }
+ c.Logf("Ensuring %q decodes as %v", doc.Encoded, doc.Decoded)
+ var decoded map[string]interface{}
+ encoded, err := hex.DecodeString(doc.Encoded)
+ c.Assert(err, IsNil)
+ err = bson.Unmarshal(encoded, &decoded)
+ c.Assert(err, IsNil)
+ c.Assert(decoded, DeepEquals, doc.Decoded)
+ }
+
+ for _, doc := range test.Documents {
+ if doc.DecodeOnly || doc.Error != nil {
+ continue
+ }
+ c.Logf("Ensuring %v encodes as %q", doc.Decoded, doc.Encoded)
+ encoded, err := bson.Marshal(doc.Decoded)
+ c.Assert(err, IsNil)
+ c.Assert(strings.ToUpper(hex.EncodeToString(encoded)), Equals, doc.Encoded)
+ }
+
+ for _, doc := range test.Documents {
+ if doc.Error == nil {
+ continue
+ }
+ c.Logf("Ensuring %q errors when decoded: %s", doc.Encoded, doc.Error)
+ var decoded map[string]interface{}
+ encoded, err := hex.DecodeString(doc.Encoded)
+ c.Assert(err, IsNil)
+ err = bson.Unmarshal(encoded, &decoded)
+ c.Assert(err, NotNil)
+ c.Logf("Failed with: %v", err)
+ }
+ }
+}
+
+// --------------------------------------------------------------------------
+// ObjectId Text encoding.TextUnmarshaler.
+
+var textIdTests = []struct {
+ value bson.ObjectId
+ text string
+ marshal bool
+ unmarshal bool
+ error string
+}{{
+ value: bson.ObjectIdHex("4d88e15b60f486e428412dc9"),
+ text: "4d88e15b60f486e428412dc9",
+ marshal: true,
+ unmarshal: true,
+}, {
+ text: "",
+ marshal: true,
+ unmarshal: true,
+}, {
+ text: "4d88e15b60f486e428412dc9A",
+ marshal: false,
+ unmarshal: true,
+ error: `invalid ObjectId: 4d88e15b60f486e428412dc9A`,
+}, {
+ text: "4d88e15b60f486e428412dcZ",
+ marshal: false,
+ unmarshal: true,
+ error: `invalid ObjectId: 4d88e15b60f486e428412dcZ .*`,
+}}
+
+func (s *S) TestObjectIdTextMarshaling(c *C) {
+ for _, test := range textIdTests {
+ if test.marshal {
+ data, err := test.value.MarshalText()
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, test.text)
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+
+ if test.unmarshal {
+ err := test.value.UnmarshalText([]byte(test.text))
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ if test.value != "" {
+ value := bson.ObjectIdHex(test.text)
+ c.Assert(value, DeepEquals, test.value)
+ }
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+ }
+}
+
+// --------------------------------------------------------------------------
+// ObjectId XML marshalling.
+
+type xmlType struct {
+ Id bson.ObjectId
+}
+
+var xmlIdTests = []struct {
+ value xmlType
+ xml string
+ marshal bool
+ unmarshal bool
+ error string
+}{{
+ value: xmlType{Id: bson.ObjectIdHex("4d88e15b60f486e428412dc9")},
+ xml: "4d88e15b60f486e428412dc9",
+ marshal: true,
+ unmarshal: true,
+}, {
+ value: xmlType{},
+ xml: "",
+ marshal: true,
+ unmarshal: true,
+}, {
+ xml: "4d88e15b60f486e428412dc9A",
+ marshal: false,
+ unmarshal: true,
+ error: `invalid ObjectId: 4d88e15b60f486e428412dc9A`,
+}, {
+ xml: "4d88e15b60f486e428412dcZ",
+ marshal: false,
+ unmarshal: true,
+ error: `invalid ObjectId: 4d88e15b60f486e428412dcZ .*`,
+}}
+
+func (s *S) TestObjectIdXMLMarshaling(c *C) {
+ for _, test := range xmlIdTests {
+ if test.marshal {
+ data, err := xml.Marshal(&test.value)
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ c.Assert(string(data), Equals, test.xml)
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+
+ if test.unmarshal {
+ var value xmlType
+ err := xml.Unmarshal([]byte(test.xml), &value)
+ if test.error == "" {
+ c.Assert(err, IsNil)
+ c.Assert(value, DeepEquals, test.value)
+ } else {
+ c.Assert(err, ErrorMatches, test.error)
+ }
+ }
+ }
}
// --------------------------------------------------------------------------
@@ -1435,14 +1763,21 @@ type BenchT struct {
A, B, C, D, E, F string
}
-func BenchmarkUnmarhsalStruct(b *testing.B) {
+type BenchRawT struct {
+ A string
+ B int
+ C bson.M
+ D []float64
+}
+
+func (s *S) BenchmarkUnmarhsalStruct(c *C) {
v := BenchT{A: "A", D: "D", E: "E"}
data, err := bson.Marshal(&v)
if err != nil {
panic(err)
}
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
+ c.ResetTimer()
+ for i := 0; i < c.N; i++ {
err = bson.Unmarshal(data, &v)
}
if err != nil {
@@ -1450,17 +1785,48 @@ func BenchmarkUnmarhsalStruct(b *testing.B) {
}
}
-func BenchmarkUnmarhsalMap(b *testing.B) {
+func (s *S) BenchmarkUnmarhsalMap(c *C) {
m := bson.M{"a": "a", "d": "d", "e": "e"}
data, err := bson.Marshal(&m)
if err != nil {
panic(err)
}
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
+ c.ResetTimer()
+ for i := 0; i < c.N; i++ {
err = bson.Unmarshal(data, &m)
}
if err != nil {
panic(err)
}
}
+
+func (s *S) BenchmarkUnmarshalRaw(c *C) {
+ var err error
+ m := BenchRawT{
+ A: "test_string",
+ B: 123,
+ C: bson.M{
+ "subdoc_int": 12312,
+ "subdoc_doc": bson.M{"1": 1},
+ },
+ D: []float64{0.0, 1.3333, -99.9997, 3.1415},
+ }
+ data, err := bson.Marshal(&m)
+ if err != nil {
+ panic(err)
+ }
+ raw := bson.Raw{}
+ c.ResetTimer()
+ for i := 0; i < c.N; i++ {
+ err = bson.Unmarshal(data, &raw)
+ }
+ if err != nil {
+ panic(err)
+ }
+}
+
+func (s *S) BenchmarkNewObjectId(c *C) {
+ for i := 0; i < c.N; i++ {
+ bson.NewObjectId()
+ }
+}
diff --git a/bson/decimal.go b/bson/decimal.go
new file mode 100644
index 000000000..3d2f70020
--- /dev/null
+++ b/bson/decimal.go
@@ -0,0 +1,310 @@
+// BSON library for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package bson
+
+import (
+ "fmt"
+ "strconv"
+ "strings"
+)
+
+// Decimal128 holds decimal128 BSON values.
+type Decimal128 struct {
+ h, l uint64
+}
+
+func (d Decimal128) String() string {
+ var pos int // positive sign
+ var e int // exponent
+ var h, l uint64 // significand high/low
+
+ if d.h>>63&1 == 0 {
+ pos = 1
+ }
+
+ switch d.h >> 58 & (1<<5 - 1) {
+ case 0x1F:
+ return "NaN"
+ case 0x1E:
+ return "-Inf"[pos:]
+ }
+
+ l = d.l
+ if d.h>>61&3 == 3 {
+ // Bits: 1*sign 2*ignored 14*exponent 111*significand.
+ // Implicit 0b100 prefix in significand.
+ e = int(d.h>>47&(1<<14-1)) - 6176
+ //h = 4<<47 | d.h&(1<<47-1)
+ // Spec says all of these values are out of range.
+ h, l = 0, 0
+ } else {
+ // Bits: 1*sign 14*exponent 113*significand
+ e = int(d.h>>49&(1<<14-1)) - 6176
+ h = d.h & (1<<49 - 1)
+ }
+
+ // Would be handled by the logic below, but that's trivial and common.
+ if h == 0 && l == 0 && e == 0 {
+ return "-0"[pos:]
+ }
+
+ var repr [48]byte // Loop 5 times over 9 digits plus dot, negative sign, and leading zero.
+ var last = len(repr)
+ var i = len(repr)
+ var dot = len(repr) + e
+ var rem uint32
+Loop:
+ for d9 := 0; d9 < 5; d9++ {
+ h, l, rem = divmod(h, l, 1e9)
+ for d1 := 0; d1 < 9; d1++ {
+ // Handle "-0.0", "0.00123400", "-1.00E-6", "1.050E+3", etc.
+ if i < len(repr) && (dot == i || l == 0 && h == 0 && rem > 0 && rem < 10 && (dot < i-6 || e > 0)) {
+ e += len(repr) - i
+ i--
+ repr[i] = '.'
+ last = i - 1
+ dot = len(repr) // Unmark.
+ }
+ c := '0' + byte(rem%10)
+ rem /= 10
+ i--
+ repr[i] = c
+ // Handle "0E+3", "1E+3", etc.
+ if l == 0 && h == 0 && rem == 0 && i == len(repr)-1 && (dot < i-5 || e > 0) {
+ last = i
+ break Loop
+ }
+ if c != '0' {
+ last = i
+ }
+ // Break early. Works without it, but why.
+ if dot > i && l == 0 && h == 0 && rem == 0 {
+ break Loop
+ }
+ }
+ }
+ repr[last-1] = '-'
+ last--
+
+ if e > 0 {
+ return string(repr[last+pos:]) + "E+" + strconv.Itoa(e)
+ }
+ if e < 0 {
+ return string(repr[last+pos:]) + "E" + strconv.Itoa(e)
+ }
+ return string(repr[last+pos:])
+}
+
+func divmod(h, l uint64, div uint32) (qh, ql uint64, rem uint32) {
+ div64 := uint64(div)
+ a := h >> 32
+ aq := a / div64
+ ar := a % div64
+ b := ar<<32 + h&(1<<32-1)
+ bq := b / div64
+ br := b % div64
+ c := br<<32 + l>>32
+ cq := c / div64
+ cr := c % div64
+ d := cr<<32 + l&(1<<32-1)
+ dq := d / div64
+ dr := d % div64
+ return (aq<<32 | bq), (cq<<32 | dq), uint32(dr)
+}
+
+var dNaN = Decimal128{0x1F << 58, 0}
+var dPosInf = Decimal128{0x1E << 58, 0}
+var dNegInf = Decimal128{0x3E << 58, 0}
+
+func dErr(s string) (Decimal128, error) {
+ return dNaN, fmt.Errorf("cannot parse %q as a decimal128", s)
+}
+
+func ParseDecimal128(s string) (Decimal128, error) {
+ orig := s
+ if s == "" {
+ return dErr(orig)
+ }
+ neg := s[0] == '-'
+ if neg || s[0] == '+' {
+ s = s[1:]
+ }
+
+ if (len(s) == 3 || len(s) == 8) && (s[0] == 'N' || s[0] == 'n' || s[0] == 'I' || s[0] == 'i') {
+ if s == "NaN" || s == "nan" || strings.EqualFold(s, "nan") {
+ return dNaN, nil
+ }
+ if s == "Inf" || s == "inf" || strings.EqualFold(s, "inf") || strings.EqualFold(s, "infinity") {
+ if neg {
+ return dNegInf, nil
+ }
+ return dPosInf, nil
+ }
+ return dErr(orig)
+ }
+
+ var h, l uint64
+ var e int
+
+ var add, ovr uint32
+ var mul uint32 = 1
+ var dot = -1
+ var digits = 0
+ var i = 0
+ for i < len(s) {
+ c := s[i]
+ if mul == 1e9 {
+ h, l, ovr = muladd(h, l, mul, add)
+ mul, add = 1, 0
+ if ovr > 0 || h&((1<<15-1)<<49) > 0 {
+ return dErr(orig)
+ }
+ }
+ if c >= '0' && c <= '9' {
+ i++
+ if c > '0' || digits > 0 {
+ digits++
+ }
+ if digits > 34 {
+ if c == '0' {
+ // Exact rounding.
+ e++
+ continue
+ }
+ return dErr(orig)
+ }
+ mul *= 10
+ add *= 10
+ add += uint32(c - '0')
+ continue
+ }
+ if c == '.' {
+ i++
+ if dot >= 0 || i == 1 && len(s) == 1 {
+ return dErr(orig)
+ }
+ if i == len(s) {
+ break
+ }
+ if s[i] < '0' || s[i] > '9' || e > 0 {
+ return dErr(orig)
+ }
+ dot = i
+ continue
+ }
+ break
+ }
+ if i == 0 {
+ return dErr(orig)
+ }
+ if mul > 1 {
+ h, l, ovr = muladd(h, l, mul, add)
+ if ovr > 0 || h&((1<<15-1)<<49) > 0 {
+ return dErr(orig)
+ }
+ }
+ if dot >= 0 {
+ e += dot - i
+ }
+ if i+1 < len(s) && (s[i] == 'E' || s[i] == 'e') {
+ i++
+ eneg := s[i] == '-'
+ if eneg || s[i] == '+' {
+ i++
+ if i == len(s) {
+ return dErr(orig)
+ }
+ }
+ n := 0
+ for i < len(s) && n < 1e4 {
+ c := s[i]
+ i++
+ if c < '0' || c > '9' {
+ return dErr(orig)
+ }
+ n *= 10
+ n += int(c - '0')
+ }
+ if eneg {
+ n = -n
+ }
+ e += n
+ for e < -6176 {
+ // Subnormal.
+ var div uint32 = 1
+ for div < 1e9 && e < -6176 {
+ div *= 10
+ e++
+ }
+ var rem uint32
+ h, l, rem = divmod(h, l, div)
+ if rem > 0 {
+ return dErr(orig)
+ }
+ }
+ for e > 6111 {
+ // Clamped.
+ var mul uint32 = 1
+ for mul < 1e9 && e > 6111 {
+ mul *= 10
+ e--
+ }
+ h, l, ovr = muladd(h, l, mul, 0)
+ if ovr > 0 || h&((1<<15-1)<<49) > 0 {
+ return dErr(orig)
+ }
+ }
+ if e < -6176 || e > 6111 {
+ return dErr(orig)
+ }
+ }
+
+ if i < len(s) {
+ return dErr(orig)
+ }
+
+ h |= uint64(e+6176) & uint64(1<<14-1) << 49
+ if neg {
+ h |= 1 << 63
+ }
+ return Decimal128{h, l}, nil
+}
+
+func muladd(h, l uint64, mul uint32, add uint32) (resh, resl uint64, overflow uint32) {
+ mul64 := uint64(mul)
+ a := mul64 * (l & (1<<32 - 1))
+ b := a>>32 + mul64*(l>>32)
+ c := b>>32 + mul64*(h&(1<<32-1))
+ d := c>>32 + mul64*(h>>32)
+
+ a = a&(1<<32-1) + uint64(add)
+ b = b&(1<<32-1) + a>>32
+ c = c&(1<<32-1) + b>>32
+ d = d&(1<<32-1) + c>>32
+
+ return (d<<32 | c&(1<<32-1)), (b<<32 | a&(1<<32-1)), uint32(d >> 32)
+}
diff --git a/bson/decimal_test.go b/bson/decimal_test.go
new file mode 100644
index 000000000..a29728094
--- /dev/null
+++ b/bson/decimal_test.go
@@ -0,0 +1,4109 @@
+// BSON library for Go
+//
+// Copyright (c) 2010-2012 - Gustavo Niemeyer
+//
+// All rights reserved.
+//
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions are met:
+//
+// 1. Redistributions of source code must retain the above copyright notice, this
+// list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright notice,
+// this list of conditions and the following disclaimer in the documentation
+// and/or other materials provided with the distribution.
+//
+// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
+// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
+// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
+// DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
+// ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
+// (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
+// LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
+// ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
+// SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+package bson_test
+
+import (
+ "encoding/hex"
+ "encoding/json"
+ "fmt"
+ "regexp"
+ "strings"
+
+ "gopkg.in/mgo.v2/bson"
+
+ . "gopkg.in/check.v1"
+)
+
+// --------------------------------------------------------------------------
+// Decimal tests
+
+type decimalTests struct {
+ Valid []struct {
+ Description string `json:"description"`
+ BSON string `json:"bson"`
+ CanonicalBSON string `json:"canonical_bson"`
+ ExtJSON string `json:"extjson"`
+ CanonicalExtJSON string `json:"canonical_extjson"`
+ Lossy bool `json:"lossy"`
+ } `json:"valid"`
+
+ ParseErrors []struct {
+ Description string `json:"description"`
+ String string `json:"string"`
+ } `json:"parseErrors"`
+}
+
+func extJSONRepr(s string) string {
+ var value struct {
+ D struct {
+ Repr string `json:"$numberDecimal"`
+ } `json:"d"`
+ }
+ err := json.Unmarshal([]byte(s), &value)
+ if err != nil {
+ panic(err)
+ }
+ return value.D.Repr
+}
+
+func (s *S) TestDecimalTests(c *C) {
+ // These also conform to the spec and are used by Go elsewhere.
+ // (e.g. math/big won't parse "Infinity").
+ goStr := func(s string) string {
+ switch s {
+ case "Infinity":
+ return "Inf"
+ case "-Infinity":
+ return "-Inf"
+ }
+ return s
+ }
+
+ for _, testEntry := range decimalTestsJSON {
+ testFile := testEntry.file
+
+ var tests decimalTests
+ err := json.Unmarshal([]byte(testEntry.json), &tests)
+ c.Assert(err, IsNil)
+
+ for _, test := range tests.Valid {
+ c.Logf("Running %s test: %s", testFile, test.Description)
+
+ test.BSON = strings.ToLower(test.BSON)
+
+ // Unmarshal value from BSON data.
+ bsonData, err := hex.DecodeString(test.BSON)
+ var bsonValue struct{ D interface{} }
+ err = bson.Unmarshal(bsonData, &bsonValue)
+ c.Assert(err, IsNil)
+ dec128, ok := bsonValue.D.(bson.Decimal128)
+ c.Assert(ok, Equals, true)
+
+ // Extract ExtJSON representations (canonical and not).
+ extjRepr := extJSONRepr(test.ExtJSON)
+ cextjRepr := extjRepr
+ if test.CanonicalExtJSON != "" {
+ cextjRepr = extJSONRepr(test.CanonicalExtJSON)
+ }
+
+ wantRepr := goStr(cextjRepr)
+
+ // Generate canonical representation.
+ c.Assert(dec128.String(), Equals, wantRepr)
+
+ // Parse original canonical representation.
+ parsed, err := bson.ParseDecimal128(cextjRepr)
+ c.Assert(err, IsNil)
+ c.Assert(parsed.String(), Equals, wantRepr)
+
+ // Parse non-canonical representation.
+ parsed, err = bson.ParseDecimal128(extjRepr)
+ c.Assert(err, IsNil)
+ c.Assert(parsed.String(), Equals, wantRepr)
+
+ // Parse Go canonical representation (Inf vs. Infinity).
+ parsed, err = bson.ParseDecimal128(wantRepr)
+ c.Assert(err, IsNil)
+ c.Assert(parsed.String(), Equals, wantRepr)
+
+ // Marshal original value back into BSON data.
+ data, err := bson.Marshal(bsonValue)
+ c.Assert(err, IsNil)
+ c.Assert(hex.EncodeToString(data), Equals, test.BSON)
+
+ if test.Lossy {
+ continue
+ }
+
+ // Marshal the parsed canonical representation.
+ var parsedValue struct{ D interface{} }
+ parsedValue.D = parsed
+ data, err = bson.Marshal(parsedValue)
+ c.Assert(err, IsNil)
+ c.Assert(hex.EncodeToString(data), Equals, test.BSON)
+ }
+
+ for _, test := range tests.ParseErrors {
+ c.Logf("Running %s parse error test: %s (string %q)", testFile, test.Description, test.String)
+
+ _, err := bson.ParseDecimal128(test.String)
+ quoted := regexp.QuoteMeta(fmt.Sprintf("%q", test.String))
+ c.Assert(err, ErrorMatches, `cannot parse `+quoted+` as a decimal128`)
+ }
+ }
+}
+
+const decBenchNum = "9.999999999999999999999999999999999E+6144"
+
+func (s *S) BenchmarkDecimal128String(c *C) {
+ d, err := bson.ParseDecimal128(decBenchNum)
+ c.Assert(err, IsNil)
+ c.Assert(d.String(), Equals, decBenchNum)
+
+ c.ResetTimer()
+ for i := 0; i < c.N; i++ {
+ d.String()
+ }
+}
+
+func (s *S) BenchmarkDecimal128Parse(c *C) {
+ var err error
+ c.ResetTimer()
+ for i := 0; i < c.N; i++ {
+ _, err = bson.ParseDecimal128(decBenchNum)
+ }
+ if err != nil {
+ panic(err)
+ }
+}
+
+var decimalTestsJSON = []struct{ file, json string }{
+ {"decimal128-1.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "valid": [
+ {
+ "description": "Special - Canonical NaN",
+ "bson": "180000001364000000000000000000000000000000007C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}"
+ },
+ {
+ "description": "Special - Negative NaN",
+ "bson": "18000000136400000000000000000000000000000000FC00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Negative NaN",
+ "bson": "18000000136400000000000000000000000000000000FC00",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-NaN\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Canonical SNaN",
+ "bson": "180000001364000000000000000000000000000000007E00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Negative SNaN",
+ "bson": "18000000136400000000000000000000000000000000FE00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - NaN with a payload",
+ "bson": "180000001364001200000000000000000000000000007E00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Canonical Positive Infinity",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Special - Canonical Negative Infinity",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Special - Invalid representation treated as 0",
+ "bson": "180000001364000000000000000000000000000000106C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Invalid representation treated as -0",
+ "bson": "18000000136400DCBA9876543210DEADBEEF00000010EC00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Special - Invalid representation treated as 0E3",
+ "bson": "18000000136400FFFFFFFFFFFFFFFFFFFFFFFFFFFF116C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+3\"}}",
+ "lossy": true
+ },
+ {
+ "description": "Regular - Adjusted Exponent Limit",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3CF22F00",
+ "extjson": "{\"d\": { \"$numberDecimal\": \"0.000001234567890123456789012345678901234\" }}"
+ },
+ {
+ "description": "Regular - Smallest",
+ "bson": "18000000136400D204000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.001234\"}}"
+ },
+ {
+ "description": "Regular - Smallest with Trailing Zeros",
+ "bson": "1800000013640040EF5A07000000000000000000002A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00123400000\"}}"
+ },
+ {
+ "description": "Regular - 0.1",
+ "bson": "1800000013640001000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1\"}}"
+ },
+ {
+ "description": "Regular - 0.1234567890123456789012345678901234",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3CFC2F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1234567890123456789012345678901234\"}}"
+ },
+ {
+ "description": "Regular - 0",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "Regular - -0",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "Regular - -0.0",
+ "bson": "1800000013640000000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0\"}}"
+ },
+ {
+ "description": "Regular - 2",
+ "bson": "180000001364000200000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2\"}}"
+ },
+ {
+ "description": "Regular - 2.000",
+ "bson": "18000000136400D0070000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2.000\"}}"
+ },
+ {
+ "description": "Regular - Largest",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3C403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1234567890123456789012345678901234\"}}"
+ },
+ {
+ "description": "Scientific - Tiniest",
+ "bson": "18000000136400FFFFFFFF638E8D37C087ADBE09ED010000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"9.999999999999999999999999999999999E-6143\"}}"
+ },
+ {
+ "description": "Scientific - Tiny",
+ "bson": "180000001364000100000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6176\"}}"
+ },
+ {
+ "description": "Scientific - Negative Tiny",
+ "bson": "180000001364000100000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6176\"}}"
+ },
+ {
+ "description": "Scientific - Adjusted Exponent Limit",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3CF02F00",
+ "extjson": "{\"d\": { \"$numberDecimal\": \"1.234567890123456789012345678901234E-7\" }}"
+ },
+ {
+ "description": "Scientific - Fractional",
+ "bson": "1800000013640064000000000000000000000000002CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.00E-8\"}}"
+ },
+ {
+ "description": "Scientific - 0 with Exponent",
+ "bson": "180000001364000000000000000000000000000000205F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6000\"}}"
+ },
+ {
+ "description": "Scientific - 0 with Negative Exponent",
+ "bson": "1800000013640000000000000000000000000000007A2B00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-611\"}}"
+ },
+ {
+ "description": "Scientific - No Decimal with Signed Exponent",
+ "bson": "180000001364000100000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+3\"}}"
+ },
+ {
+ "description": "Scientific - Trailing Zero",
+ "bson": "180000001364001A04000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.050E+4\"}}"
+ },
+ {
+ "description": "Scientific - With Decimal",
+ "bson": "180000001364006900000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.05E+3\"}}"
+ },
+ {
+ "description": "Scientific - Full",
+ "bson": "18000000136400FFFFFFFFFFFFFFFFFFFFFFFFFFFF403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"5192296858534827628530496329220095\"}}"
+ },
+ {
+ "description": "Scientific - Large",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "Scientific - Largest",
+ "bson": "18000000136400FFFFFFFF638E8D37C087ADBE09EDFF5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"9.999999999999999999999999999999999E+6144\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Exponent Normalization",
+ "bson": "1800000013640064000000000000000000000000002CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-100E-10\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.00E-8\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Unsigned Positive Exponent",
+ "bson": "180000001364000100000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+3\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Lowercase Exponent Identifier",
+ "bson": "180000001364000100000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1e+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+3\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Long Significand with Exponent",
+ "bson": "1800000013640079D9E0F9763ADA429D0200000000583000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12345689012345789012345E+12\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.2345689012345789012345E+34\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Positive Sign",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3C403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+1234567890123456789012345678901234\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1234567890123456789012345678901234\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - Long Decimal String",
+ "bson": "180000001364000100000000000000000000000000722800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \".000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-999\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - nan",
+ "bson": "180000001364000000000000000000000000000000007C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"nan\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - nAn",
+ "bson": "180000001364000000000000000000000000000000007C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"nAn\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - +infinity",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+infinity\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - infinity",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"infinity\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - infiniTY",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"infiniTY\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - inf",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"inf\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - inF",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"inF\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - -infinity",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-infinity\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - -infiniTy",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-infiniTy\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - -Inf",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - -inf",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-inf\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Non-Canonical Parsing - -inF",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-inF\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "Rounded Subnormal number",
+ "bson": "180000001364000100000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10E-6177\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6176\"}}"
+ },
+ {
+ "description": "Clamped",
+ "bson": "180000001364000a00000000000000000000000000fe5f00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E6112\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+6112\"}}"
+ },
+ {
+ "description": "Exact rounding",
+ "bson": "18000000136400000000000a5bc138938d44c64d31cc3700",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+999\"}}"
+ }
+ ]
+}
+`},
+
+ {"decimal128-2.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "valid": [
+ {
+ "description": "[decq021] Normality",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3C40B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1234567890123456789012345678901234\"}}"
+ },
+ {
+ "description": "[decq823] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400010000800000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-2147483649\"}}"
+ },
+ {
+ "description": "[decq822] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400000000800000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-2147483648\"}}"
+ },
+ {
+ "description": "[decq821] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FFFFFF7F0000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-2147483647\"}}"
+ },
+ {
+ "description": "[decq820] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FEFFFF7F0000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-2147483646\"}}"
+ },
+ {
+ "description": "[decq152] fold-downs (more below)",
+ "bson": "18000000136400393000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-12345\"}}"
+ },
+ {
+ "description": "[decq154] fold-downs (more below)",
+ "bson": "18000000136400D20400000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1234\"}}"
+ },
+ {
+ "description": "[decq006] derivative canonical plain strings",
+ "bson": "18000000136400EE0200000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-750\"}}"
+ },
+ {
+ "description": "[decq164] fold-downs (more below)",
+ "bson": "1800000013640039300000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-123.45\"}}"
+ },
+ {
+ "description": "[decq156] fold-downs (more below)",
+ "bson": "180000001364007B0000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-123\"}}"
+ },
+ {
+ "description": "[decq008] derivative canonical plain strings",
+ "bson": "18000000136400EE020000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-75.0\"}}"
+ },
+ {
+ "description": "[decq158] fold-downs (more below)",
+ "bson": "180000001364000C0000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-12\"}}"
+ },
+ {
+ "description": "[decq122] Nmax and similar",
+ "bson": "18000000136400FFFFFFFF638E8D37C087ADBE09EDFFDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.999999999999999999999999999999999E+6144\"}}"
+ },
+ {
+ "description": "[decq002] (mostly derived from the Strawman 4 document and examples)",
+ "bson": "18000000136400EE020000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-7.50\"}}"
+ },
+ {
+ "description": "[decq004] derivative canonical plain strings",
+ "bson": "18000000136400EE0200000000000000000000000042B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-7.50E+3\"}}"
+ },
+ {
+ "description": "[decq018] derivative canonical plain strings",
+ "bson": "18000000136400EE020000000000000000000000002EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-7.50E-7\"}}"
+ },
+ {
+ "description": "[decq125] Nmax and similar",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3CFEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.234567890123456789012345678901234E+6144\"}}"
+ },
+ {
+ "description": "[decq131] fold-downs (more below)",
+ "bson": "18000000136400000000807F1BCF85B27059C8A43CFEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.230000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq162] fold-downs (more below)",
+ "bson": "180000001364007B000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.23\"}}"
+ },
+ {
+ "description": "[decq176] Nmin and below",
+ "bson": "18000000136400010000000A5BC138938D44C64D31008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.000000000000000000000000000000001E-6143\"}}"
+ },
+ {
+ "description": "[decq174] Nmin and below",
+ "bson": "18000000136400000000000A5BC138938D44C64D31008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.000000000000000000000000000000000E-6143\"}}"
+ },
+ {
+ "description": "[decq133] fold-downs (more below)",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq160] fold-downs (more below)",
+ "bson": "18000000136400010000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1\"}}"
+ },
+ {
+ "description": "[decq172] Nmin and below",
+ "bson": "180000001364000100000000000000000000000000428000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6143\"}}"
+ },
+ {
+ "description": "[decq010] derivative canonical plain strings",
+ "bson": "18000000136400EE020000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.750\"}}"
+ },
+ {
+ "description": "[decq012] derivative canonical plain strings",
+ "bson": "18000000136400EE0200000000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0750\"}}"
+ },
+ {
+ "description": "[decq014] derivative canonical plain strings",
+ "bson": "18000000136400EE0200000000000000000000000034B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000750\"}}"
+ },
+ {
+ "description": "[decq016] derivative canonical plain strings",
+ "bson": "18000000136400EE0200000000000000000000000030B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000750\"}}"
+ },
+ {
+ "description": "[decq404] zeros",
+ "bson": "180000001364000000000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6176\"}}"
+ },
+ {
+ "description": "[decq424] negative zeros",
+ "bson": "180000001364000000000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6176\"}}"
+ },
+ {
+ "description": "[decq407] zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[decq427] negative zeros",
+ "bson": "1800000013640000000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00\"}}"
+ },
+ {
+ "description": "[decq409] zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[decq428] negative zeros",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "[decq700] Selected DPD codes",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[decq406] zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[decq426] negative zeros",
+ "bson": "1800000013640000000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00\"}}"
+ },
+ {
+ "description": "[decq410] zeros",
+ "bson": "180000001364000000000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+3\"}}"
+ },
+ {
+ "description": "[decq431] negative zeros",
+ "bson": "18000000136400000000000000000000000000000046B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+3\"}}"
+ },
+ {
+ "description": "[decq419] clamped zeros...",
+ "bson": "180000001364000000000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6111\"}}"
+ },
+ {
+ "description": "[decq432] negative zeros",
+ "bson": "180000001364000000000000000000000000000000FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6111\"}}"
+ },
+ {
+ "description": "[decq405] zeros",
+ "bson": "180000001364000000000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6176\"}}"
+ },
+ {
+ "description": "[decq425] negative zeros",
+ "bson": "180000001364000000000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6176\"}}"
+ },
+ {
+ "description": "[decq508] Specials",
+ "bson": "180000001364000000000000000000000000000000007800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"Infinity\"}}"
+ },
+ {
+ "description": "[decq528] Specials",
+ "bson": "18000000136400000000000000000000000000000000F800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-Infinity\"}}"
+ },
+ {
+ "description": "[decq541] Specials",
+ "bson": "180000001364000000000000000000000000000000007C00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"NaN\"}}"
+ },
+ {
+ "description": "[decq074] Nmin and below",
+ "bson": "18000000136400000000000A5BC138938D44C64D31000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E-6143\"}}"
+ },
+ {
+ "description": "[decq602] fold-down full sequence",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq604] fold-down full sequence",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000000E+6143\"}}"
+ },
+ {
+ "description": "[decq606] fold-down full sequence",
+ "bson": "1800000013640000000080264B91C02220BE377E00FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000000000E+6142\"}}"
+ },
+ {
+ "description": "[decq608] fold-down full sequence",
+ "bson": "1800000013640000000040EAED7446D09C2C9F0C00FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000E+6141\"}}"
+ },
+ {
+ "description": "[decq610] fold-down full sequence",
+ "bson": "18000000136400000000A0CA17726DAE0F1E430100FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000E+6140\"}}"
+ },
+ {
+ "description": "[decq612] fold-down full sequence",
+ "bson": "18000000136400000000106102253E5ECE4F200000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000000E+6139\"}}"
+ },
+ {
+ "description": "[decq614] fold-down full sequence",
+ "bson": "18000000136400000000E83C80D09F3C2E3B030000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000E+6138\"}}"
+ },
+ {
+ "description": "[decq616] fold-down full sequence",
+ "bson": "18000000136400000000E4D20CC8DCD2B752000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000E+6137\"}}"
+ },
+ {
+ "description": "[decq618] fold-down full sequence",
+ "bson": "180000001364000000004A48011416954508000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000E+6136\"}}"
+ },
+ {
+ "description": "[decq620] fold-down full sequence",
+ "bson": "18000000136400000000A1EDCCCE1BC2D300000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000E+6135\"}}"
+ },
+ {
+ "description": "[decq622] fold-down full sequence",
+ "bson": "18000000136400000080F64AE1C7022D1500000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000E+6134\"}}"
+ },
+ {
+ "description": "[decq624] fold-down full sequence",
+ "bson": "18000000136400000040B2BAC9E0191E0200000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000E+6133\"}}"
+ },
+ {
+ "description": "[decq626] fold-down full sequence",
+ "bson": "180000001364000000A0DEC5ADC935360000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000E+6132\"}}"
+ },
+ {
+ "description": "[decq628] fold-down full sequence",
+ "bson": "18000000136400000010632D5EC76B050000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000E+6131\"}}"
+ },
+ {
+ "description": "[decq630] fold-down full sequence",
+ "bson": "180000001364000000E8890423C78A000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000E+6130\"}}"
+ },
+ {
+ "description": "[decq632] fold-down full sequence",
+ "bson": "18000000136400000064A7B3B6E00D000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000E+6129\"}}"
+ },
+ {
+ "description": "[decq634] fold-down full sequence",
+ "bson": "1800000013640000008A5D78456301000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000E+6128\"}}"
+ },
+ {
+ "description": "[decq636] fold-down full sequence",
+ "bson": "180000001364000000C16FF2862300000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000E+6127\"}}"
+ },
+ {
+ "description": "[decq638] fold-down full sequence",
+ "bson": "180000001364000080C6A47E8D0300000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000E+6126\"}}"
+ },
+ {
+ "description": "[decq640] fold-down full sequence",
+ "bson": "1800000013640000407A10F35A0000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000E+6125\"}}"
+ },
+ {
+ "description": "[decq642] fold-down full sequence",
+ "bson": "1800000013640000A0724E18090000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000E+6124\"}}"
+ },
+ {
+ "description": "[decq644] fold-down full sequence",
+ "bson": "180000001364000010A5D4E8000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000E+6123\"}}"
+ },
+ {
+ "description": "[decq646] fold-down full sequence",
+ "bson": "1800000013640000E8764817000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000E+6122\"}}"
+ },
+ {
+ "description": "[decq648] fold-down full sequence",
+ "bson": "1800000013640000E40B5402000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000E+6121\"}}"
+ },
+ {
+ "description": "[decq650] fold-down full sequence",
+ "bson": "1800000013640000CA9A3B00000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000E+6120\"}}"
+ },
+ {
+ "description": "[decq652] fold-down full sequence",
+ "bson": "1800000013640000E1F50500000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000E+6119\"}}"
+ },
+ {
+ "description": "[decq654] fold-down full sequence",
+ "bson": "180000001364008096980000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000E+6118\"}}"
+ },
+ {
+ "description": "[decq656] fold-down full sequence",
+ "bson": "1800000013640040420F0000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000E+6117\"}}"
+ },
+ {
+ "description": "[decq658] fold-down full sequence",
+ "bson": "18000000136400A086010000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000E+6116\"}}"
+ },
+ {
+ "description": "[decq660] fold-down full sequence",
+ "bson": "180000001364001027000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000E+6115\"}}"
+ },
+ {
+ "description": "[decq662] fold-down full sequence",
+ "bson": "18000000136400E803000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000E+6114\"}}"
+ },
+ {
+ "description": "[decq664] fold-down full sequence",
+ "bson": "180000001364006400000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+6113\"}}"
+ },
+ {
+ "description": "[decq666] fold-down full sequence",
+ "bson": "180000001364000A00000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+6112\"}}"
+ },
+ {
+ "description": "[decq060] fold-downs (more below)",
+ "bson": "180000001364000100000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1\"}}"
+ },
+ {
+ "description": "[decq670] fold-down full sequence",
+ "bson": "180000001364000100000000000000000000000000FC5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6110\"}}"
+ },
+ {
+ "description": "[decq668] fold-down full sequence",
+ "bson": "180000001364000100000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6111\"}}"
+ },
+ {
+ "description": "[decq072] Nmin and below",
+ "bson": "180000001364000100000000000000000000000000420000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6143\"}}"
+ },
+ {
+ "description": "[decq076] Nmin and below",
+ "bson": "18000000136400010000000A5BC138938D44C64D31000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000001E-6143\"}}"
+ },
+ {
+ "description": "[decq036] fold-downs (more below)",
+ "bson": "18000000136400000000807F1BCF85B27059C8A43CFE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.230000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq062] fold-downs (more below)",
+ "bson": "180000001364007B000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.23\"}}"
+ },
+ {
+ "description": "[decq034] Nmax and similar",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3CFE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.234567890123456789012345678901234E+6144\"}}"
+ },
+ {
+ "description": "[decq441] exponent lengths",
+ "bson": "180000001364000700000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7\"}}"
+ },
+ {
+ "description": "[decq449] exponent lengths",
+ "bson": "1800000013640007000000000000000000000000001E5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+5999\"}}"
+ },
+ {
+ "description": "[decq447] exponent lengths",
+ "bson": "1800000013640007000000000000000000000000000E3800",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+999\"}}"
+ },
+ {
+ "description": "[decq445] exponent lengths",
+ "bson": "180000001364000700000000000000000000000000063100",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+99\"}}"
+ },
+ {
+ "description": "[decq443] exponent lengths",
+ "bson": "180000001364000700000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+9\"}}"
+ },
+ {
+ "description": "[decq842] VG testcase",
+ "bson": "180000001364000000FED83F4E7C9FE4E269E38A5BCD1700",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7.049000000000010795488000000000000E-3097\"}}"
+ },
+ {
+ "description": "[decq841] VG testcase",
+ "bson": "180000001364000000203B9DB5056F000000000000002400",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"8.000000000000000000E-1550\"}}"
+ },
+ {
+ "description": "[decq840] VG testcase",
+ "bson": "180000001364003C17258419D710C42F0000000000002400",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"8.81125000000001349436E-1548\"}}"
+ },
+ {
+ "description": "[decq701] Selected DPD codes",
+ "bson": "180000001364000900000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"9\"}}"
+ },
+ {
+ "description": "[decq032] Nmax and similar",
+ "bson": "18000000136400FFFFFFFF638E8D37C087ADBE09EDFF5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"9.999999999999999999999999999999999E+6144\"}}"
+ },
+ {
+ "description": "[decq702] Selected DPD codes",
+ "bson": "180000001364000A00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10\"}}"
+ },
+ {
+ "description": "[decq057] fold-downs (more below)",
+ "bson": "180000001364000C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12\"}}"
+ },
+ {
+ "description": "[decq703] Selected DPD codes",
+ "bson": "180000001364001300000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"19\"}}"
+ },
+ {
+ "description": "[decq704] Selected DPD codes",
+ "bson": "180000001364001400000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"20\"}}"
+ },
+ {
+ "description": "[decq705] Selected DPD codes",
+ "bson": "180000001364001D00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"29\"}}"
+ },
+ {
+ "description": "[decq706] Selected DPD codes",
+ "bson": "180000001364001E00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"30\"}}"
+ },
+ {
+ "description": "[decq707] Selected DPD codes",
+ "bson": "180000001364002700000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"39\"}}"
+ },
+ {
+ "description": "[decq708] Selected DPD codes",
+ "bson": "180000001364002800000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"40\"}}"
+ },
+ {
+ "description": "[decq709] Selected DPD codes",
+ "bson": "180000001364003100000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"49\"}}"
+ },
+ {
+ "description": "[decq710] Selected DPD codes",
+ "bson": "180000001364003200000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"50\"}}"
+ },
+ {
+ "description": "[decq711] Selected DPD codes",
+ "bson": "180000001364003B00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"59\"}}"
+ },
+ {
+ "description": "[decq712] Selected DPD codes",
+ "bson": "180000001364003C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"60\"}}"
+ },
+ {
+ "description": "[decq713] Selected DPD codes",
+ "bson": "180000001364004500000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"69\"}}"
+ },
+ {
+ "description": "[decq714] Selected DPD codes",
+ "bson": "180000001364004600000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"70\"}}"
+ },
+ {
+ "description": "[decq715] Selected DPD codes",
+ "bson": "180000001364004700000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"71\"}}"
+ },
+ {
+ "description": "[decq716] Selected DPD codes",
+ "bson": "180000001364004800000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"72\"}}"
+ },
+ {
+ "description": "[decq717] Selected DPD codes",
+ "bson": "180000001364004900000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"73\"}}"
+ },
+ {
+ "description": "[decq718] Selected DPD codes",
+ "bson": "180000001364004A00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"74\"}}"
+ },
+ {
+ "description": "[decq719] Selected DPD codes",
+ "bson": "180000001364004B00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"75\"}}"
+ },
+ {
+ "description": "[decq720] Selected DPD codes",
+ "bson": "180000001364004C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"76\"}}"
+ },
+ {
+ "description": "[decq721] Selected DPD codes",
+ "bson": "180000001364004D00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"77\"}}"
+ },
+ {
+ "description": "[decq722] Selected DPD codes",
+ "bson": "180000001364004E00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"78\"}}"
+ },
+ {
+ "description": "[decq723] Selected DPD codes",
+ "bson": "180000001364004F00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"79\"}}"
+ },
+ {
+ "description": "[decq056] fold-downs (more below)",
+ "bson": "180000001364007B00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"123\"}}"
+ },
+ {
+ "description": "[decq064] fold-downs (more below)",
+ "bson": "1800000013640039300000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"123.45\"}}"
+ },
+ {
+ "description": "[decq732] Selected DPD codes",
+ "bson": "180000001364000802000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"520\"}}"
+ },
+ {
+ "description": "[decq733] Selected DPD codes",
+ "bson": "180000001364000902000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"521\"}}"
+ },
+ {
+ "description": "[decq740] DPD: one of each of the huffman groups",
+ "bson": "180000001364000903000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"777\"}}"
+ },
+ {
+ "description": "[decq741] DPD: one of each of the huffman groups",
+ "bson": "180000001364000A03000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"778\"}}"
+ },
+ {
+ "description": "[decq742] DPD: one of each of the huffman groups",
+ "bson": "180000001364001303000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"787\"}}"
+ },
+ {
+ "description": "[decq746] DPD: one of each of the huffman groups",
+ "bson": "180000001364001F03000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"799\"}}"
+ },
+ {
+ "description": "[decq743] DPD: one of each of the huffman groups",
+ "bson": "180000001364006D03000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"877\"}}"
+ },
+ {
+ "description": "[decq753] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "180000001364007803000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"888\"}}"
+ },
+ {
+ "description": "[decq754] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "180000001364007903000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"889\"}}"
+ },
+ {
+ "description": "[decq760] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "180000001364008203000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"898\"}}"
+ },
+ {
+ "description": "[decq764] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "180000001364008303000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"899\"}}"
+ },
+ {
+ "description": "[decq745] DPD: one of each of the huffman groups",
+ "bson": "18000000136400D303000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"979\"}}"
+ },
+ {
+ "description": "[decq770] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "18000000136400DC03000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"988\"}}"
+ },
+ {
+ "description": "[decq774] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "18000000136400DD03000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"989\"}}"
+ },
+ {
+ "description": "[decq730] Selected DPD codes",
+ "bson": "18000000136400E203000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"994\"}}"
+ },
+ {
+ "description": "[decq731] Selected DPD codes",
+ "bson": "18000000136400E303000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"995\"}}"
+ },
+ {
+ "description": "[decq744] DPD: one of each of the huffman groups",
+ "bson": "18000000136400E503000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"997\"}}"
+ },
+ {
+ "description": "[decq780] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "18000000136400E603000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"998\"}}"
+ },
+ {
+ "description": "[decq787] DPD all-highs cases (includes the 24 redundant codes)",
+ "bson": "18000000136400E703000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"999\"}}"
+ },
+ {
+ "description": "[decq053] fold-downs (more below)",
+ "bson": "18000000136400D204000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1234\"}}"
+ },
+ {
+ "description": "[decq052] fold-downs (more below)",
+ "bson": "180000001364003930000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12345\"}}"
+ },
+ {
+ "description": "[decq792] Miscellaneous (testers' queries, etc.)",
+ "bson": "180000001364003075000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"30000\"}}"
+ },
+ {
+ "description": "[decq793] Miscellaneous (testers' queries, etc.)",
+ "bson": "1800000013640090940D0000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"890000\"}}"
+ },
+ {
+ "description": "[decq824] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FEFFFF7F00000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2147483646\"}}"
+ },
+ {
+ "description": "[decq825] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FFFFFF7F00000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2147483647\"}}"
+ },
+ {
+ "description": "[decq826] values around [u]int32 edges (zeros done earlier)",
+ "bson": "180000001364000000008000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2147483648\"}}"
+ },
+ {
+ "description": "[decq827] values around [u]int32 edges (zeros done earlier)",
+ "bson": "180000001364000100008000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2147483649\"}}"
+ },
+ {
+ "description": "[decq828] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FEFFFFFF00000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"4294967294\"}}"
+ },
+ {
+ "description": "[decq829] values around [u]int32 edges (zeros done earlier)",
+ "bson": "18000000136400FFFFFFFF00000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"4294967295\"}}"
+ },
+ {
+ "description": "[decq830] values around [u]int32 edges (zeros done earlier)",
+ "bson": "180000001364000000000001000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"4294967296\"}}"
+ },
+ {
+ "description": "[decq831] values around [u]int32 edges (zeros done earlier)",
+ "bson": "180000001364000100000001000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"4294967297\"}}"
+ },
+ {
+ "description": "[decq022] Normality",
+ "bson": "18000000136400C7711CC7B548F377DC80A131C836403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1111111111111111111111111111111111\"}}"
+ },
+ {
+ "description": "[decq020] Normality",
+ "bson": "18000000136400F2AF967ED05C82DE3297FF6FDE3C403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1234567890123456789012345678901234\"}}"
+ },
+ {
+ "description": "[decq550] Specials",
+ "bson": "18000000136400FFFFFFFF638E8D37C087ADBE09ED413000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"9999999999999999999999999999999999\"}}"
+ }
+ ]
+}
+`},
+
+ {"decimal128-3.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "valid": [
+ {
+ "description": "[basx066] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE0000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-00345678.5432\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-345678.5432\"}}"
+ },
+ {
+ "description": "[basx065] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE0000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0345678.5432\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-345678.5432\"}}"
+ },
+ {
+ "description": "[basx064] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE0000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-345678.5432\"}}"
+ },
+ {
+ "description": "[basx041] strings without E cannot generate E in result",
+ "bson": "180000001364004C0000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-76\"}}"
+ },
+ {
+ "description": "[basx027] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000F270000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.999\"}}"
+ },
+ {
+ "description": "[basx026] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364009F230000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.119\"}}"
+ },
+ {
+ "description": "[basx025] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364008F030000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.11\"}}"
+ },
+ {
+ "description": "[basx024] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364005B000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.1\"}}"
+ },
+ {
+ "description": "[dqbsr531] negatives (Rounded)",
+ "bson": "1800000013640099761CC7B548F377DC80A131C836FEAF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.1111111111111111111111111111123450\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.111111111111111111111111111112345\"}}"
+ },
+ {
+ "description": "[basx022] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000A000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.0\"}}"
+ },
+ {
+ "description": "[basx021] conform to rules and exponent will be in permitted range).",
+ "bson": "18000000136400010000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1\"}}"
+ },
+ {
+ "description": "[basx601] Zeros",
+ "bson": "1800000013640000000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-9\"}}"
+ },
+ {
+ "description": "[basx622] Zeros",
+ "bson": "1800000013640000000000000000000000000000002EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-9\"}}"
+ },
+ {
+ "description": "[basx602] Zeros",
+ "bson": "180000001364000000000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-8\"}}"
+ },
+ {
+ "description": "[basx621] Zeros",
+ "bson": "18000000136400000000000000000000000000000030B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-8\"}}"
+ },
+ {
+ "description": "[basx603] Zeros",
+ "bson": "180000001364000000000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-7\"}}"
+ },
+ {
+ "description": "[basx620] Zeros",
+ "bson": "18000000136400000000000000000000000000000032B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-7\"}}"
+ },
+ {
+ "description": "[basx604] Zeros",
+ "bson": "180000001364000000000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000\"}}"
+ },
+ {
+ "description": "[basx619] Zeros",
+ "bson": "18000000136400000000000000000000000000000034B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000000\"}}"
+ },
+ {
+ "description": "[basx605] Zeros",
+ "bson": "180000001364000000000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000\"}}"
+ },
+ {
+ "description": "[basx618] Zeros",
+ "bson": "18000000136400000000000000000000000000000036B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000\"}}"
+ },
+ {
+ "description": "[basx680] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"000000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx606] Zeros",
+ "bson": "180000001364000000000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000\"}}"
+ },
+ {
+ "description": "[basx617] Zeros",
+ "bson": "18000000136400000000000000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0000\"}}"
+ },
+ {
+ "description": "[basx681] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"00000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx686] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+00000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx687] Zeros",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-00000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "[basx019] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640000000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-00.00\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00\"}}"
+ },
+ {
+ "description": "[basx607] Zeros",
+ "bson": "1800000013640000000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000\"}}"
+ },
+ {
+ "description": "[basx616] Zeros",
+ "bson": "1800000013640000000000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000\"}}"
+ },
+ {
+ "description": "[basx682] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx155] Numbers with E",
+ "bson": "1800000013640000000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000e+0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000\"}}"
+ },
+ {
+ "description": "[basx130] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000\"}}"
+ },
+ {
+ "description": "[basx290] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0000\"}}"
+ },
+ {
+ "description": "[basx131] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000\"}}"
+ },
+ {
+ "description": "[basx291] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000036B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000\"}}"
+ },
+ {
+ "description": "[basx132] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000\"}}"
+ },
+ {
+ "description": "[basx292] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000034B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000000\"}}"
+ },
+ {
+ "description": "[basx133] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-7\"}}"
+ },
+ {
+ "description": "[basx293] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000032B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-7\"}}"
+ },
+ {
+ "description": "[basx608] Zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[basx615] Zeros",
+ "bson": "1800000013640000000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00\"}}"
+ },
+ {
+ "description": "[basx683] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"000.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx630] Zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[basx670] Zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[basx631] Zeros",
+ "bson": "1800000013640000000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}"
+ },
+ {
+ "description": "[basx671] Zeros",
+ "bson": "1800000013640000000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000\"}}"
+ },
+ {
+ "description": "[basx134] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000\"}}"
+ },
+ {
+ "description": "[basx294] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0000\"}}"
+ },
+ {
+ "description": "[basx632] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx672] Zeros",
+ "bson": "180000001364000000000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000\"}}"
+ },
+ {
+ "description": "[basx135] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000\"}}"
+ },
+ {
+ "description": "[basx295] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000036B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000\"}}"
+ },
+ {
+ "description": "[basx633] Zeros",
+ "bson": "180000001364000000000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+1\"}}"
+ },
+ {
+ "description": "[basx673] Zeros",
+ "bson": "180000001364000000000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000\"}}"
+ },
+ {
+ "description": "[basx136] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000\"}}"
+ },
+ {
+ "description": "[basx674] Zeros",
+ "bson": "180000001364000000000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000\"}}"
+ },
+ {
+ "description": "[basx634] Zeros",
+ "bson": "180000001364000000000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+2\"}}"
+ },
+ {
+ "description": "[basx137] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-7\"}}"
+ },
+ {
+ "description": "[basx635] Zeros",
+ "bson": "180000001364000000000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+3\"}}"
+ },
+ {
+ "description": "[basx675] Zeros",
+ "bson": "180000001364000000000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-7\"}}"
+ },
+ {
+ "description": "[basx636] Zeros",
+ "bson": "180000001364000000000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+4\"}}"
+ },
+ {
+ "description": "[basx676] Zeros",
+ "bson": "180000001364000000000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-8\"}}"
+ },
+ {
+ "description": "[basx637] Zeros",
+ "bson": "1800000013640000000000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+5\"}}"
+ },
+ {
+ "description": "[basx677] Zeros",
+ "bson": "1800000013640000000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-9\"}}"
+ },
+ {
+ "description": "[basx638] Zeros",
+ "bson": "1800000013640000000000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6\"}}"
+ },
+ {
+ "description": "[basx678] Zeros",
+ "bson": "1800000013640000000000000000000000000000002C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-10\"}}"
+ },
+ {
+ "description": "[basx149] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"000E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx639] Zeros",
+ "bson": "1800000013640000000000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+7\"}}"
+ },
+ {
+ "description": "[basx679] Zeros",
+ "bson": "1800000013640000000000000000000000000000002A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00E-9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-11\"}}"
+ },
+ {
+ "description": "[basx063] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE00000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+00345678.5432\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.5432\"}}"
+ },
+ {
+ "description": "[basx018] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640000000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0\"}}"
+ },
+ {
+ "description": "[basx609] Zeros",
+ "bson": "1800000013640000000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}"
+ },
+ {
+ "description": "[basx614] Zeros",
+ "bson": "1800000013640000000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0\"}}"
+ },
+ {
+ "description": "[basx684] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"00.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx640] Zeros",
+ "bson": "1800000013640000000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}"
+ },
+ {
+ "description": "[basx660] Zeros",
+ "bson": "1800000013640000000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}"
+ },
+ {
+ "description": "[basx641] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx661] Zeros",
+ "bson": "1800000013640000000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00\"}}"
+ },
+ {
+ "description": "[basx296] some more negative zeros [systematic tests below]",
+ "bson": "1800000013640000000000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000\"}}"
+ },
+ {
+ "description": "[basx642] Zeros",
+ "bson": "180000001364000000000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+1\"}}"
+ },
+ {
+ "description": "[basx662] Zeros",
+ "bson": "1800000013640000000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000\"}}"
+ },
+ {
+ "description": "[basx297] some more negative zeros [systematic tests below]",
+ "bson": "18000000136400000000000000000000000000000038B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0000\"}}"
+ },
+ {
+ "description": "[basx643] Zeros",
+ "bson": "180000001364000000000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+2\"}}"
+ },
+ {
+ "description": "[basx663] Zeros",
+ "bson": "180000001364000000000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000\"}}"
+ },
+ {
+ "description": "[basx644] Zeros",
+ "bson": "180000001364000000000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+3\"}}"
+ },
+ {
+ "description": "[basx664] Zeros",
+ "bson": "180000001364000000000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000\"}}"
+ },
+ {
+ "description": "[basx645] Zeros",
+ "bson": "180000001364000000000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+4\"}}"
+ },
+ {
+ "description": "[basx665] Zeros",
+ "bson": "180000001364000000000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000\"}}"
+ },
+ {
+ "description": "[basx646] Zeros",
+ "bson": "1800000013640000000000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+5\"}}"
+ },
+ {
+ "description": "[basx666] Zeros",
+ "bson": "180000001364000000000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-7\"}}"
+ },
+ {
+ "description": "[basx647] Zeros",
+ "bson": "1800000013640000000000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6\"}}"
+ },
+ {
+ "description": "[basx667] Zeros",
+ "bson": "180000001364000000000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-8\"}}"
+ },
+ {
+ "description": "[basx648] Zeros",
+ "bson": "1800000013640000000000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+7\"}}"
+ },
+ {
+ "description": "[basx668] Zeros",
+ "bson": "1800000013640000000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-9\"}}"
+ },
+ {
+ "description": "[basx160] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"00E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx161] Numbers with E",
+ "bson": "1800000013640000000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"00E-9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-9\"}}"
+ },
+ {
+ "description": "[basx649] Zeros",
+ "bson": "180000001364000000000000000000000000000000503000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+8\"}}"
+ },
+ {
+ "description": "[basx669] Zeros",
+ "bson": "1800000013640000000000000000000000000000002C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0E-9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-10\"}}"
+ },
+ {
+ "description": "[basx062] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE00000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+0345678.5432\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.5432\"}}"
+ },
+ {
+ "description": "[basx001] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx017] conform to rules and exponent will be in permitted range).",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "[basx611] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx613] Zeros",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "[basx685] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx688] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+0.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx689] Zeros",
+ "bson": "18000000136400000000000000000000000000000040B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0\"}}"
+ },
+ {
+ "description": "[basx650] Zeros",
+ "bson": "180000001364000000000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0\"}}"
+ },
+ {
+ "description": "[basx651] Zeros",
+ "bson": "180000001364000000000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+1\"}}"
+ },
+ {
+ "description": "[basx298] some more negative zeros [systematic tests below]",
+ "bson": "1800000013640000000000000000000000000000003CB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00\"}}"
+ },
+ {
+ "description": "[basx652] Zeros",
+ "bson": "180000001364000000000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+2\"}}"
+ },
+ {
+ "description": "[basx299] some more negative zeros [systematic tests below]",
+ "bson": "1800000013640000000000000000000000000000003AB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000\"}}"
+ },
+ {
+ "description": "[basx653] Zeros",
+ "bson": "180000001364000000000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+3\"}}"
+ },
+ {
+ "description": "[basx654] Zeros",
+ "bson": "180000001364000000000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+4\"}}"
+ },
+ {
+ "description": "[basx655] Zeros",
+ "bson": "1800000013640000000000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+5\"}}"
+ },
+ {
+ "description": "[basx656] Zeros",
+ "bson": "1800000013640000000000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6\"}}"
+ },
+ {
+ "description": "[basx657] Zeros",
+ "bson": "1800000013640000000000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+7\"}}"
+ },
+ {
+ "description": "[basx658] Zeros",
+ "bson": "180000001364000000000000000000000000000000503000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+8\"}}"
+ },
+ {
+ "description": "[basx138] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+0E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx139] Numbers with E",
+ "bson": "18000000136400000000000000000000000000000052B000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+9\"}}"
+ },
+ {
+ "description": "[basx144] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx154] Numbers with E",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx659] Zeros",
+ "bson": "180000001364000000000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+9\"}}"
+ },
+ {
+ "description": "[basx042] strings without E cannot generate E in result",
+ "bson": "18000000136400FC040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+12.76\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.76\"}}"
+ },
+ {
+ "description": "[basx143] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+1E+009\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx061] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE00000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+345678.5432\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.5432\"}}"
+ },
+ {
+ "description": "[basx036] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640015CD5B0700000000000000000000203000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000000123456789\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.23456789E-8\"}}"
+ },
+ {
+ "description": "[basx035] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640015CD5B0700000000000000000000223000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000123456789\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.23456789E-7\"}}"
+ },
+ {
+ "description": "[basx034] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640015CD5B0700000000000000000000243000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000123456789\"}}"
+ },
+ {
+ "description": "[basx053] strings without E cannot generate E in result",
+ "bson": "180000001364003200000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000050\"}}"
+ },
+ {
+ "description": "[basx033] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640015CD5B0700000000000000000000263000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000123456789\"}}"
+ },
+ {
+ "description": "[basx016] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000C000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.012\"}}"
+ },
+ {
+ "description": "[basx015] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364007B000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.123\"}}"
+ },
+ {
+ "description": "[basx037] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640078DF0D8648700000000000000000223000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.123456789012344\"}}"
+ },
+ {
+ "description": "[basx038] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640079DF0D8648700000000000000000223000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.123456789012345\"}}"
+ },
+ {
+ "description": "[basx250] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx257] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx256] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.01265\"}}"
+ },
+ {
+ "description": "[basx258] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx251] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000103000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-21\"}}"
+ },
+ {
+ "description": "[basx263] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000603000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+19\"}}"
+ },
+ {
+ "description": "[basx255] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.001265\"}}"
+ },
+ {
+ "description": "[basx259] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx254] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0001265\"}}"
+ },
+ {
+ "description": "[basx260] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx253] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00001265\"}}"
+ },
+ {
+ "description": "[basx261] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx252] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000283000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-9\"}}"
+ },
+ {
+ "description": "[basx262] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+7\"}}"
+ },
+ {
+ "description": "[basx159] Numbers with E",
+ "bson": "1800000013640049000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.73e-7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7.3E-8\"}}"
+ },
+ {
+ "description": "[basx004] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640064000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00\"}}"
+ },
+ {
+ "description": "[basx003] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000A000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0\"}}"
+ },
+ {
+ "description": "[basx002] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000100000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1\"}}"
+ },
+ {
+ "description": "[basx148] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+009\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx153] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E009\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx141] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1e+09\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx146] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+09\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx151] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1e09\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx142] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000F43000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+90\"}}"
+ },
+ {
+ "description": "[basx147] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000F43000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1e+90\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+90\"}}"
+ },
+ {
+ "description": "[basx152] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000F43000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E90\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+90\"}}"
+ },
+ {
+ "description": "[basx140] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx150] Numbers with E",
+ "bson": "180000001364000100000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+9\"}}"
+ },
+ {
+ "description": "[basx014] conform to rules and exponent will be in permitted range).",
+ "bson": "18000000136400D2040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.234\"}}"
+ },
+ {
+ "description": "[basx170] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx177] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx176] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx178] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx171] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000123000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-20\"}}"
+ },
+ {
+ "description": "[basx183] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000623000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+20\"}}"
+ },
+ {
+ "description": "[basx175] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.01265\"}}"
+ },
+ {
+ "description": "[basx179] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx174] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.001265\"}}"
+ },
+ {
+ "description": "[basx180] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx173] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0001265\"}}"
+ },
+ {
+ "description": "[basx181] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+4\"}}"
+ },
+ {
+ "description": "[basx172] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000002A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-8\"}}"
+ },
+ {
+ "description": "[basx182] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+8\"}}"
+ },
+ {
+ "description": "[basx157] Numbers with E",
+ "bson": "180000001364000400000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"4E+9\"}}"
+ },
+ {
+ "description": "[basx067] examples",
+ "bson": "180000001364000500000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000005\"}}"
+ },
+ {
+ "description": "[basx069] examples",
+ "bson": "180000001364000500000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-7\"}}"
+ },
+ {
+ "description": "[basx385] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7\"}}"
+ },
+ {
+ "description": "[basx365] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000543000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E10\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+10\"}}"
+ },
+ {
+ "description": "[basx405] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000002C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-10\"}}"
+ },
+ {
+ "description": "[basx363] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000563000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E11\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+11\"}}"
+ },
+ {
+ "description": "[basx407] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000002A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-11\"}}"
+ },
+ {
+ "description": "[basx361] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000583000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E12\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+12\"}}"
+ },
+ {
+ "description": "[basx409] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000283000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-12\"}}"
+ },
+ {
+ "description": "[basx411] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000263000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-13\"}}"
+ },
+ {
+ "description": "[basx383] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+1\"}}"
+ },
+ {
+ "description": "[basx387] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.7\"}}"
+ },
+ {
+ "description": "[basx381] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+2\"}}"
+ },
+ {
+ "description": "[basx389] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.07\"}}"
+ },
+ {
+ "description": "[basx379] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+3\"}}"
+ },
+ {
+ "description": "[basx391] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.007\"}}"
+ },
+ {
+ "description": "[basx377] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+4\"}}"
+ },
+ {
+ "description": "[basx393] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0007\"}}"
+ },
+ {
+ "description": "[basx375] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+5\"}}"
+ },
+ {
+ "description": "[basx395] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00007\"}}"
+ },
+ {
+ "description": "[basx373] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+6\"}}"
+ },
+ {
+ "description": "[basx397] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000007\"}}"
+ },
+ {
+ "description": "[basx371] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+7\"}}"
+ },
+ {
+ "description": "[basx399] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-7\"}}"
+ },
+ {
+ "description": "[basx369] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000503000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+8\"}}"
+ },
+ {
+ "description": "[basx401] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-8\"}}"
+ },
+ {
+ "description": "[basx367] Engineering notation tests",
+ "bson": "180000001364000700000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"7E+9\"}}"
+ },
+ {
+ "description": "[basx403] Engineering notation tests",
+ "bson": "1800000013640007000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"7E-9\"}}"
+ },
+ {
+ "description": "[basx007] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640064000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.0\"}}"
+ },
+ {
+ "description": "[basx005] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364000A00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10\"}}"
+ },
+ {
+ "description": "[basx165] Numbers with E",
+ "bson": "180000001364000A00000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10E+009\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+10\"}}"
+ },
+ {
+ "description": "[basx163] Numbers with E",
+ "bson": "180000001364000A00000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10E+09\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+10\"}}"
+ },
+ {
+ "description": "[basx325] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"10\"}}"
+ },
+ {
+ "description": "[basx305] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000543000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e10\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+11\"}}"
+ },
+ {
+ "description": "[basx345] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000002C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-10\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-9\"}}"
+ },
+ {
+ "description": "[basx303] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000563000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e11\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+12\"}}"
+ },
+ {
+ "description": "[basx347] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000002A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-11\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-10\"}}"
+ },
+ {
+ "description": "[basx301] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000583000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e12\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+13\"}}"
+ },
+ {
+ "description": "[basx349] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000283000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-12\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-11\"}}"
+ },
+ {
+ "description": "[basx351] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000263000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-13\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-12\"}}"
+ },
+ {
+ "description": "[basx323] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+2\"}}"
+ },
+ {
+ "description": "[basx327] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0\"}}"
+ },
+ {
+ "description": "[basx321] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+3\"}}"
+ },
+ {
+ "description": "[basx329] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.10\"}}"
+ },
+ {
+ "description": "[basx319] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+4\"}}"
+ },
+ {
+ "description": "[basx331] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.010\"}}"
+ },
+ {
+ "description": "[basx317] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+5\"}}"
+ },
+ {
+ "description": "[basx333] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0010\"}}"
+ },
+ {
+ "description": "[basx315] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000004A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+6\"}}"
+ },
+ {
+ "description": "[basx335] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00010\"}}"
+ },
+ {
+ "description": "[basx313] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+7\"}}"
+ },
+ {
+ "description": "[basx337] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-6\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000010\"}}"
+ },
+ {
+ "description": "[basx311] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+8\"}}"
+ },
+ {
+ "description": "[basx339] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000010\"}}"
+ },
+ {
+ "description": "[basx309] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000503000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+9\"}}"
+ },
+ {
+ "description": "[basx341] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-7\"}}"
+ },
+ {
+ "description": "[basx164] Numbers with E",
+ "bson": "180000001364000A00000000000000000000000000F43000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e+90\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+91\"}}"
+ },
+ {
+ "description": "[basx162] Numbers with E",
+ "bson": "180000001364000A00000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+10\"}}"
+ },
+ {
+ "description": "[basx307] Engineering notation tests",
+ "bson": "180000001364000A00000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+10\"}}"
+ },
+ {
+ "description": "[basx343] Engineering notation tests",
+ "bson": "180000001364000A000000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10e-9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-8\"}}"
+ },
+ {
+ "description": "[basx008] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640065000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.1\"}}"
+ },
+ {
+ "description": "[basx009] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640068000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.4\"}}"
+ },
+ {
+ "description": "[basx010] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640069000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.5\"}}"
+ },
+ {
+ "description": "[basx011] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364006A000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.6\"}}"
+ },
+ {
+ "description": "[basx012] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364006D000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"10.9\"}}"
+ },
+ {
+ "description": "[basx013] conform to rules and exponent will be in permitted range).",
+ "bson": "180000001364006E000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"11.0\"}}"
+ },
+ {
+ "description": "[basx040] strings without E cannot generate E in result",
+ "bson": "180000001364000C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12\"}}"
+ },
+ {
+ "description": "[basx190] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx197] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx196] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx198] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx191] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000143000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-19\"}}"
+ },
+ {
+ "description": "[basx203] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000643000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+21\"}}"
+ },
+ {
+ "description": "[basx195] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx199] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx194] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.01265\"}}"
+ },
+ {
+ "description": "[basx200] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+4\"}}"
+ },
+ {
+ "description": "[basx193] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.001265\"}}"
+ },
+ {
+ "description": "[basx201] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+5\"}}"
+ },
+ {
+ "description": "[basx192] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000002C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-7\"}}"
+ },
+ {
+ "description": "[basx202] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000004C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+9\"}}"
+ },
+ {
+ "description": "[basx044] strings without E cannot generate E in result",
+ "bson": "18000000136400FC040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"012.76\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.76\"}}"
+ },
+ {
+ "description": "[basx042] strings without E cannot generate E in result",
+ "bson": "18000000136400FC040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12.76\"}}"
+ },
+ {
+ "description": "[basx046] strings without E cannot generate E in result",
+ "bson": "180000001364001100000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"17.\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"17\"}}"
+ },
+ {
+ "description": "[basx049] strings without E cannot generate E in result",
+ "bson": "180000001364002C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0044\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"44\"}}"
+ },
+ {
+ "description": "[basx048] strings without E cannot generate E in result",
+ "bson": "180000001364002C00000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"044\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"44\"}}"
+ },
+ {
+ "description": "[basx158] Numbers with E",
+ "bson": "180000001364002C00000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"44E+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"4.4E+10\"}}"
+ },
+ {
+ "description": "[basx068] examples",
+ "bson": "180000001364003200000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"50E-7\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000050\"}}"
+ },
+ {
+ "description": "[basx169] Numbers with E",
+ "bson": "180000001364006400000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"100e+009\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+11\"}}"
+ },
+ {
+ "description": "[basx167] Numbers with E",
+ "bson": "180000001364006400000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"100e+09\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+11\"}}"
+ },
+ {
+ "description": "[basx168] Numbers with E",
+ "bson": "180000001364006400000000000000000000000000F43000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"100E+90\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+92\"}}"
+ },
+ {
+ "description": "[basx166] Numbers with E",
+ "bson": "180000001364006400000000000000000000000000523000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"100e+9\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+11\"}}"
+ },
+ {
+ "description": "[basx210] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx217] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx216] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx218] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx211] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000163000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-18\"}}"
+ },
+ {
+ "description": "[basx223] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000663000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+22\"}}"
+ },
+ {
+ "description": "[basx215] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx219] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+4\"}}"
+ },
+ {
+ "description": "[basx214] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx220] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+5\"}}"
+ },
+ {
+ "description": "[basx213] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.01265\"}}"
+ },
+ {
+ "description": "[basx221] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+6\"}}"
+ },
+ {
+ "description": "[basx212] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000002E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000001265\"}}"
+ },
+ {
+ "description": "[basx222] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000004E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+10\"}}"
+ },
+ {
+ "description": "[basx006] conform to rules and exponent will be in permitted range).",
+ "bson": "18000000136400E803000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1000\"}}"
+ },
+ {
+ "description": "[basx230] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx237] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1265\"}}"
+ },
+ {
+ "description": "[basx236] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"126.5\"}}"
+ },
+ {
+ "description": "[basx238] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000423000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+1\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+4\"}}"
+ },
+ {
+ "description": "[basx231] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000183000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E-17\"}}"
+ },
+ {
+ "description": "[basx243] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000683000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+20\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+23\"}}"
+ },
+ {
+ "description": "[basx235] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.65\"}}"
+ },
+ {
+ "description": "[basx239] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000443000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+2\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+5\"}}"
+ },
+ {
+ "description": "[basx234] Numbers with E",
+ "bson": "18000000136400F1040000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265\"}}"
+ },
+ {
+ "description": "[basx240] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000463000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+3\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+6\"}}"
+ },
+ {
+ "description": "[basx233] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1265\"}}"
+ },
+ {
+ "description": "[basx241] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000483000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+4\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+7\"}}"
+ },
+ {
+ "description": "[basx232] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E-8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00001265\"}}"
+ },
+ {
+ "description": "[basx242] Numbers with E",
+ "bson": "18000000136400F104000000000000000000000000503000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1265E+8\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.265E+11\"}}"
+ },
+ {
+ "description": "[basx060] strings without E cannot generate E in result",
+ "bson": "18000000136400185C0ACE00000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.5432\"}}"
+ },
+ {
+ "description": "[basx059] strings without E cannot generate E in result",
+ "bson": "18000000136400F198670C08000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0345678.54321\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.54321\"}}"
+ },
+ {
+ "description": "[basx058] strings without E cannot generate E in result",
+ "bson": "180000001364006AF90B7C50000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"345678.543210\"}}"
+ },
+ {
+ "description": "[basx057] strings without E cannot generate E in result",
+ "bson": "180000001364006A19562522020000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"2345678.543210\"}}"
+ },
+ {
+ "description": "[basx056] strings without E cannot generate E in result",
+ "bson": "180000001364006AB9C8733A0B0000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"12345678.543210\"}}"
+ },
+ {
+ "description": "[basx031] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640040AF0D8648700000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"123456789.000000\"}}"
+ },
+ {
+ "description": "[basx030] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640080910F8648700000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"123456789.123456\"}}"
+ },
+ {
+ "description": "[basx032] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640080910F8648700000000000000000403000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"123456789123456\"}}"
+ }
+ ]
+}
+`},
+
+ {"decimal128-4.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "valid": [
+ {
+ "description": "[basx023] conform to rules and exponent will be in permitted range).",
+ "bson": "1800000013640001000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.1\"}}"
+ },
+
+ {
+ "description": "[basx045] strings without E cannot generate E in result",
+ "bson": "1800000013640003000000000000000000000000003A3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+0.003\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.003\"}}"
+ },
+ {
+ "description": "[basx610] Zeros",
+ "bson": "1800000013640000000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \".0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0\"}}"
+ },
+ {
+ "description": "[basx612] Zeros",
+ "bson": "1800000013640000000000000000000000000000003EB000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-.0\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.0\"}}"
+ },
+ {
+ "description": "[basx043] strings without E cannot generate E in result",
+ "bson": "18000000136400FC040000000000000000000000003C3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"+12.76\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"12.76\"}}"
+ },
+ {
+ "description": "[basx055] strings without E cannot generate E in result",
+ "bson": "180000001364000500000000000000000000000000303000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000005\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-8\"}}"
+ },
+ {
+ "description": "[basx054] strings without E cannot generate E in result",
+ "bson": "180000001364000500000000000000000000000000323000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0000005\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"5E-7\"}}"
+ },
+ {
+ "description": "[basx052] strings without E cannot generate E in result",
+ "bson": "180000001364000500000000000000000000000000343000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000005\"}}"
+ },
+ {
+ "description": "[basx051] strings without E cannot generate E in result",
+ "bson": "180000001364000500000000000000000000000000363000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"00.00005\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00005\"}}"
+ },
+ {
+ "description": "[basx050] strings without E cannot generate E in result",
+ "bson": "180000001364000500000000000000000000000000383000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.0005\"}}"
+ },
+ {
+ "description": "[basx047] strings without E cannot generate E in result",
+ "bson": "1800000013640005000000000000000000000000003E3000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \".5\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.5\"}}"
+ },
+ {
+ "description": "[dqbsr431] check rounding modes heeded (Rounded)",
+ "bson": "1800000013640099761CC7B548F377DC80A131C836FE2F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.1111111111111111111111111111123450\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.111111111111111111111111111112345\"}}"
+ },
+ {
+ "description": "OK2",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FC2F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \".100000000000000000000000000000000000000000000000000000000000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0.1000000000000000000000000000000000\"}}"
+ }
+ ],
+ "parseErrors": [
+ {
+ "description": "[basx564] Near-specials (Conversion_syntax)",
+ "string": "Infi"
+ },
+ {
+ "description": "[basx565] Near-specials (Conversion_syntax)",
+ "string": "Infin"
+ },
+ {
+ "description": "[basx566] Near-specials (Conversion_syntax)",
+ "string": "Infini"
+ },
+ {
+ "description": "[basx567] Near-specials (Conversion_syntax)",
+ "string": "Infinit"
+ },
+ {
+ "description": "[basx568] Near-specials (Conversion_syntax)",
+ "string": "-Infinit"
+ },
+ {
+ "description": "[basx590] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": ".Infinity"
+ },
+ {
+ "description": "[basx562] Near-specials (Conversion_syntax)",
+ "string": "NaNq"
+ },
+ {
+ "description": "[basx563] Near-specials (Conversion_syntax)",
+ "string": "NaNs"
+ },
+ {
+ "description": "[dqbas939] overflow results at different rounding modes (Overflow & Inexact & Rounded)",
+ "string": "-7e10000"
+ },
+ {
+ "description": "[dqbsr534] negatives (Rounded & Inexact)",
+ "string": "-1.11111111111111111111111111111234650"
+ },
+ {
+ "description": "[dqbsr535] negatives (Rounded & Inexact)",
+ "string": "-1.11111111111111111111111111111234551"
+ },
+ {
+ "description": "[dqbsr533] negatives (Rounded & Inexact)",
+ "string": "-1.11111111111111111111111111111234550"
+ },
+ {
+ "description": "[dqbsr532] negatives (Rounded & Inexact)",
+ "string": "-1.11111111111111111111111111111234549"
+ },
+ {
+ "description": "[dqbsr432] check rounding modes heeded (Rounded & Inexact)",
+ "string": "1.11111111111111111111111111111234549"
+ },
+ {
+ "description": "[dqbsr433] check rounding modes heeded (Rounded & Inexact)",
+ "string": "1.11111111111111111111111111111234550"
+ },
+ {
+ "description": "[dqbsr435] check rounding modes heeded (Rounded & Inexact)",
+ "string": "1.11111111111111111111111111111234551"
+ },
+ {
+ "description": "[dqbsr434] check rounding modes heeded (Rounded & Inexact)",
+ "string": "1.11111111111111111111111111111234650"
+ },
+ {
+ "description": "[dqbas938] overflow results at different rounding modes (Overflow & Inexact & Rounded)",
+ "string": "7e10000"
+ },
+ {
+ "description": "Inexact rounding#1",
+ "string": "100000000000000000000000000000000000000000000000000000000001"
+ },
+ {
+ "description": "Inexact rounding#2",
+ "string": "1E-6177"
+ }
+ ]
+}
+`},
+
+ {"decimal128-5.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "valid": [
+ {
+ "description": "[decq035] fold-downs (more below) (Clamped)",
+ "bson": "18000000136400000000807F1BCF85B27059C8A43CFE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.23E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.230000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq037] fold-downs (more below) (Clamped)",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq077] Nmin and below (Subnormal)",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.100000000000000000000000000000000E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000000E-6144\"}}"
+ },
+ {
+ "description": "[decq078] Nmin and below (Subnormal)",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000000E-6144\"}}"
+ },
+ {
+ "description": "[decq079] Nmin and below (Subnormal)",
+ "bson": "180000001364000A00000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000000000000000000000000000010E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-6175\"}}"
+ },
+ {
+ "description": "[decq080] Nmin and below (Subnormal)",
+ "bson": "180000001364000A00000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E-6175\"}}"
+ },
+ {
+ "description": "[decq081] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000020000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.00000000000000000000000000000001E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6175\"}}"
+ },
+ {
+ "description": "[decq082] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000020000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6175\"}}"
+ },
+ {
+ "description": "[decq083] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0.000000000000000000000000000000001E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6176\"}}"
+ },
+ {
+ "description": "[decq084] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6176\"}}"
+ },
+ {
+ "description": "[decq090] underflows cannot be tested for simple copies, check edge cases (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1e-6176\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1E-6176\"}}"
+ },
+ {
+ "description": "[decq100] underflows cannot be tested for simple copies, check edge cases (Subnormal)",
+ "bson": "18000000136400FFFFFFFF095BC138938D44C64D31000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"999999999999999999999999999999999e-6176\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"9.99999999999999999999999999999999E-6144\"}}"
+ },
+ {
+ "description": "[decq130] fold-downs (more below) (Clamped)",
+ "bson": "18000000136400000000807F1BCF85B27059C8A43CFEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.23E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.230000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq132] fold-downs (more below) (Clamped)",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq177] Nmin and below (Subnormal)",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.100000000000000000000000000000000E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.00000000000000000000000000000000E-6144\"}}"
+ },
+ {
+ "description": "[decq178] Nmin and below (Subnormal)",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.00000000000000000000000000000000E-6144\"}}"
+ },
+ {
+ "description": "[decq179] Nmin and below (Subnormal)",
+ "bson": "180000001364000A00000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000000000000000000000000000000010E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.0E-6175\"}}"
+ },
+ {
+ "description": "[decq180] Nmin and below (Subnormal)",
+ "bson": "180000001364000A00000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1.0E-6175\"}}"
+ },
+ {
+ "description": "[decq181] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000028000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.00000000000000000000000000000001E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6175\"}}"
+ },
+ {
+ "description": "[decq182] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000028000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6175\"}}"
+ },
+ {
+ "description": "[decq183] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0.000000000000000000000000000000001E-6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6176\"}}"
+ },
+ {
+ "description": "[decq184] Nmin and below (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6176\"}}"
+ },
+ {
+ "description": "[decq190] underflow edge cases (Subnormal)",
+ "bson": "180000001364000100000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-1e-6176\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-1E-6176\"}}"
+ },
+ {
+ "description": "[decq200] underflow edge cases (Subnormal)",
+ "bson": "18000000136400FFFFFFFF095BC138938D44C64D31008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-999999999999999999999999999999999e-6176\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-9.99999999999999999999999999999999E-6144\"}}"
+ },
+ {
+ "description": "[decq400] zeros (Clamped)",
+ "bson": "180000001364000000000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-8000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6176\"}}"
+ },
+ {
+ "description": "[decq401] zeros (Clamped)",
+ "bson": "180000001364000000000000000000000000000000000000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6177\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E-6176\"}}"
+ },
+ {
+ "description": "[decq414] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6112\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6111\"}}"
+ },
+ {
+ "description": "[decq416] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6111\"}}"
+ },
+ {
+ "description": "[decq418] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+8000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"0E+6111\"}}"
+ },
+ {
+ "description": "[decq420] negative zeros (Clamped)",
+ "bson": "180000001364000000000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-8000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6176\"}}"
+ },
+ {
+ "description": "[decq421] negative zeros (Clamped)",
+ "bson": "180000001364000000000000000000000000000000008000",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6177\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E-6176\"}}"
+ },
+ {
+ "description": "[decq434] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6112\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6111\"}}"
+ },
+ {
+ "description": "[decq436] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6111\"}}"
+ },
+ {
+ "description": "[decq438] clamped zeros... (Clamped)",
+ "bson": "180000001364000000000000000000000000000000FEDF00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+8000\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"-0E+6111\"}}"
+ },
+ {
+ "description": "[decq601] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000000A5BC138938D44C64D31FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6144\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000000E+6144\"}}"
+ },
+ {
+ "description": "[decq603] fold-down full sequence (Clamped)",
+ "bson": "180000001364000000000081EFAC855B416D2DEE04FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6143\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000000E+6143\"}}"
+ },
+ {
+ "description": "[decq605] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000000080264B91C02220BE377E00FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6142\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000000000E+6142\"}}"
+ },
+ {
+ "description": "[decq607] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000000040EAED7446D09C2C9F0C00FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6141\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000000E+6141\"}}"
+ },
+ {
+ "description": "[decq609] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000A0CA17726DAE0F1E430100FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6140\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000000E+6140\"}}"
+ },
+ {
+ "description": "[decq611] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000106102253E5ECE4F200000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6139\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000000E+6139\"}}"
+ },
+ {
+ "description": "[decq613] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000E83C80D09F3C2E3B030000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6138\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000000E+6138\"}}"
+ },
+ {
+ "description": "[decq615] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000E4D20CC8DCD2B752000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6137\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000000E+6137\"}}"
+ },
+ {
+ "description": "[decq617] fold-down full sequence (Clamped)",
+ "bson": "180000001364000000004A48011416954508000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6136\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000000E+6136\"}}"
+ },
+ {
+ "description": "[decq619] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000000A1EDCCCE1BC2D300000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6135\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000000E+6135\"}}"
+ },
+ {
+ "description": "[decq621] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000080F64AE1C7022D1500000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6134\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000000E+6134\"}}"
+ },
+ {
+ "description": "[decq623] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000040B2BAC9E0191E0200000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6133\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000000E+6133\"}}"
+ },
+ {
+ "description": "[decq625] fold-down full sequence (Clamped)",
+ "bson": "180000001364000000A0DEC5ADC935360000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6132\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000000E+6132\"}}"
+ },
+ {
+ "description": "[decq627] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000010632D5EC76B050000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6131\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000000E+6131\"}}"
+ },
+ {
+ "description": "[decq629] fold-down full sequence (Clamped)",
+ "bson": "180000001364000000E8890423C78A000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6130\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000000E+6130\"}}"
+ },
+ {
+ "description": "[decq631] fold-down full sequence (Clamped)",
+ "bson": "18000000136400000064A7B3B6E00D000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6129\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000000E+6129\"}}"
+ },
+ {
+ "description": "[decq633] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000008A5D78456301000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6128\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000000E+6128\"}}"
+ },
+ {
+ "description": "[decq635] fold-down full sequence (Clamped)",
+ "bson": "180000001364000000C16FF2862300000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6127\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000000E+6127\"}}"
+ },
+ {
+ "description": "[decq637] fold-down full sequence (Clamped)",
+ "bson": "180000001364000080C6A47E8D0300000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6126\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000000E+6126\"}}"
+ },
+ {
+ "description": "[decq639] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000407A10F35A0000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6125\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000000E+6125\"}}"
+ },
+ {
+ "description": "[decq641] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000A0724E18090000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6124\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000000E+6124\"}}"
+ },
+ {
+ "description": "[decq643] fold-down full sequence (Clamped)",
+ "bson": "180000001364000010A5D4E8000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6123\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000000E+6123\"}}"
+ },
+ {
+ "description": "[decq645] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000E8764817000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6122\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000000E+6122\"}}"
+ },
+ {
+ "description": "[decq647] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000E40B5402000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6121\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000000E+6121\"}}"
+ },
+ {
+ "description": "[decq649] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000CA9A3B00000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6120\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000000E+6120\"}}"
+ },
+ {
+ "description": "[decq651] fold-down full sequence (Clamped)",
+ "bson": "1800000013640000E1F50500000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6119\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000000E+6119\"}}"
+ },
+ {
+ "description": "[decq653] fold-down full sequence (Clamped)",
+ "bson": "180000001364008096980000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6118\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000000E+6118\"}}"
+ },
+ {
+ "description": "[decq655] fold-down full sequence (Clamped)",
+ "bson": "1800000013640040420F0000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6117\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000000E+6117\"}}"
+ },
+ {
+ "description": "[decq657] fold-down full sequence (Clamped)",
+ "bson": "18000000136400A086010000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6116\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00000E+6116\"}}"
+ },
+ {
+ "description": "[decq659] fold-down full sequence (Clamped)",
+ "bson": "180000001364001027000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6115\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0000E+6115\"}}"
+ },
+ {
+ "description": "[decq661] fold-down full sequence (Clamped)",
+ "bson": "18000000136400E803000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6114\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.000E+6114\"}}"
+ },
+ {
+ "description": "[decq663] fold-down full sequence (Clamped)",
+ "bson": "180000001364006400000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6113\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.00E+6113\"}}"
+ },
+ {
+ "description": "[decq665] fold-down full sequence (Clamped)",
+ "bson": "180000001364000A00000000000000000000000000FE5F00",
+ "extjson": "{\"d\" : {\"$numberDecimal\" : \"1E+6112\"}}",
+ "canonical_extjson": "{\"d\" : {\"$numberDecimal\" : \"1.0E+6112\"}}"
+ }
+ ]
+}
+`},
+
+ {"decimal128-6.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "parseErrors": [
+ {
+ "description": "Incomplete Exponent",
+ "string": "1e"
+ },
+ {
+ "description": "Exponent at the beginning",
+ "string": "E01"
+ },
+ {
+ "description": "Just a decimal place",
+ "string": "."
+ },
+ {
+ "description": "2 decimal places",
+ "string": "..3"
+ },
+ {
+ "description": "2 decimal places",
+ "string": ".13.3"
+ },
+ {
+ "description": "2 decimal places",
+ "string": "1..3"
+ },
+ {
+ "description": "2 decimal places",
+ "string": "1.3.4"
+ },
+ {
+ "description": "2 decimal places",
+ "string": "1.34."
+ },
+ {
+ "description": "Decimal with no digits",
+ "string": ".e"
+ },
+ {
+ "description": "2 signs",
+ "string": "+-32.4"
+ },
+ {
+ "description": "2 signs",
+ "string": "-+32.4"
+ },
+ {
+ "description": "2 negative signs",
+ "string": "--32.4"
+ },
+ {
+ "description": "2 negative signs",
+ "string": "-32.-4"
+ },
+ {
+ "description": "End in negative sign",
+ "string": "32.0-"
+ },
+ {
+ "description": "2 negative signs",
+ "string": "32.4E--21"
+ },
+ {
+ "description": "2 negative signs",
+ "string": "32.4E-2-1"
+ },
+ {
+ "description": "2 signs",
+ "string": "32.4E+-21"
+ },
+ {
+ "description": "Empty string",
+ "string": ""
+ },
+ {
+ "description": "leading white space positive number",
+ "string": " 1"
+ },
+ {
+ "description": "leading white space negative number",
+ "string": " -1"
+ },
+ {
+ "description": "trailing white space",
+ "string": "1 "
+ },
+ {
+ "description": "Invalid",
+ "string": "E"
+ },
+ {
+ "description": "Invalid",
+ "string": "invalid"
+ },
+ {
+ "description": "Invalid",
+ "string": "i"
+ },
+ {
+ "description": "Invalid",
+ "string": "in"
+ },
+ {
+ "description": "Invalid",
+ "string": "-in"
+ },
+ {
+ "description": "Invalid",
+ "string": "Na"
+ },
+ {
+ "description": "Invalid",
+ "string": "-Na"
+ },
+ {
+ "description": "Invalid",
+ "string": "1.23abc"
+ },
+ {
+ "description": "Invalid",
+ "string": "1.23abcE+02"
+ },
+ {
+ "description": "Invalid",
+ "string": "1.23E+0aabs2"
+ }
+ ]
+}
+`},
+
+ {"decimal128-7.json", `
+{
+ "description": "Decimal128",
+ "bson_type": "0x13",
+ "test_key": "d",
+ "parseErrors": [
+ {
+ "description": "[basx572] Near-specials (Conversion_syntax)",
+ "string": "-9Inf"
+ },
+ {
+ "description": "[basx516] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "-1-"
+ },
+ {
+ "description": "[basx533] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "0000.."
+ },
+ {
+ "description": "[basx534] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": ".0000."
+ },
+ {
+ "description": "[basx535] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "00..00"
+ },
+ {
+ "description": "[basx569] Near-specials (Conversion_syntax)",
+ "string": "0Inf"
+ },
+ {
+ "description": "[basx571] Near-specials (Conversion_syntax)",
+ "string": "-0Inf"
+ },
+ {
+ "description": "[basx575] Near-specials (Conversion_syntax)",
+ "string": "0sNaN"
+ },
+ {
+ "description": "[basx503] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "++1"
+ },
+ {
+ "description": "[basx504] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "--1"
+ },
+ {
+ "description": "[basx505] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "-+1"
+ },
+ {
+ "description": "[basx506] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "+-1"
+ },
+ {
+ "description": "[basx510] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": " +1"
+ },
+ {
+ "description": "[basx513] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": " + 1"
+ },
+ {
+ "description": "[basx514] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": " - 1"
+ },
+ {
+ "description": "[basx501] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "."
+ },
+ {
+ "description": "[basx502] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": ".."
+ },
+ {
+ "description": "[basx519] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": ""
+ },
+ {
+ "description": "[basx525] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "e100"
+ },
+ {
+ "description": "[basx549] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "e+1"
+ },
+ {
+ "description": "[basx577] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": ".e+1"
+ },
+ {
+ "description": "[basx578] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "+.e+1"
+ },
+ {
+ "description": "[basx581] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "E+1"
+ },
+ {
+ "description": "[basx582] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": ".E+1"
+ },
+ {
+ "description": "[basx583] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "+.E+1"
+ },
+ {
+ "description": "[basx579] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "-.e+"
+ },
+ {
+ "description": "[basx580] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "-.e"
+ },
+ {
+ "description": "[basx584] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "-.E+"
+ },
+ {
+ "description": "[basx585] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "-.E"
+ },
+ {
+ "description": "[basx589] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "+.Inf"
+ },
+ {
+ "description": "[basx586] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": ".NaN"
+ },
+ {
+ "description": "[basx587] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "-.NaN"
+ },
+ {
+ "description": "[basx545] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "ONE"
+ },
+ {
+ "description": "[basx561] Near-specials (Conversion_syntax)",
+ "string": "qNaN"
+ },
+ {
+ "description": "[basx573] Near-specials (Conversion_syntax)",
+ "string": "-sNa"
+ },
+ {
+ "description": "[basx588] some baddies with dots and Es and dots and specials (Conversion_syntax)",
+ "string": "+.sNaN"
+ },
+ {
+ "description": "[basx544] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "ten"
+ },
+ {
+ "description": "[basx527] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "u0b65"
+ },
+ {
+ "description": "[basx526] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "u0e5a"
+ },
+ {
+ "description": "[basx515] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "x"
+ },
+ {
+ "description": "[basx574] Near-specials (Conversion_syntax)",
+ "string": "xNaN"
+ },
+ {
+ "description": "[basx530] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": ".123.5"
+ },
+ {
+ "description": "[basx500] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1..2"
+ },
+ {
+ "description": "[basx542] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1e1.0"
+ },
+ {
+ "description": "[basx553] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E+1.2.3"
+ },
+ {
+ "description": "[basx543] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1e123e"
+ },
+ {
+ "description": "[basx552] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E+1.2"
+ },
+ {
+ "description": "[basx546] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1e.1"
+ },
+ {
+ "description": "[basx547] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1e1."
+ },
+ {
+ "description": "[basx554] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E++1"
+ },
+ {
+ "description": "[basx555] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E--1"
+ },
+ {
+ "description": "[basx556] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E+-1"
+ },
+ {
+ "description": "[basx557] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E-+1"
+ },
+ {
+ "description": "[basx558] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E'1"
+ },
+ {
+ "description": "[basx559] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E\"1"
+ },
+ {
+ "description": "[basx520] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1e-"
+ },
+ {
+ "description": "[basx560] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1E"
+ },
+ {
+ "description": "[basx548] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1ee"
+ },
+ {
+ "description": "[basx551] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1.2.1"
+ },
+ {
+ "description": "[basx550] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1.23.4"
+ },
+ {
+ "description": "[basx529] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "1.34.5"
+ },
+ {
+ "description": "[basx531] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "01.35."
+ },
+ {
+ "description": "[basx532] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "01.35-"
+ },
+ {
+ "description": "[basx518] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "3+"
+ },
+ {
+ "description": "[basx521] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "7e99999a"
+ },
+ {
+ "description": "[basx570] Near-specials (Conversion_syntax)",
+ "string": "9Inf"
+ },
+ {
+ "description": "[basx512] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "12 "
+ },
+ {
+ "description": "[basx517] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "12-"
+ },
+ {
+ "description": "[basx507] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "12e"
+ },
+ {
+ "description": "[basx508] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "12e++"
+ },
+ {
+ "description": "[basx509] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "12f4"
+ },
+ {
+ "description": "[basx536] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111e*123"
+ },
+ {
+ "description": "[basx537] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111e123-"
+ },
+ {
+ "description": "[basx540] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111e1*23"
+ },
+ {
+ "description": "[basx538] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111e+12+"
+ },
+ {
+ "description": "[basx539] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111e1-3-"
+ },
+ {
+ "description": "[basx541] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "111E1e+3"
+ },
+ {
+ "description": "[basx528] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "123,65"
+ },
+ {
+ "description": "[basx523] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "7e12356789012x"
+ },
+ {
+ "description": "[basx522] The 'baddies' tests from DiagBigDecimal, plus some new ones (Conversion_syntax)",
+ "string": "7e123567890x"
+ }
+ ]
+}
+`},
+}
diff --git a/bson/decode.go b/bson/decode.go
index 1ec034ea6..7c2d8416a 100644
--- a/bson/decode.go
+++ b/bson/decode.go
@@ -1,18 +1,18 @@
// BSON library for Go
-//
+//
// Copyright (c) 2010-2012 - Gustavo Niemeyer
-//
+//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are met:
-//
+// modification, are permitted provided that the following conditions are met:
+//
// 1. Redistributions of source code must retain the above copyright notice, this
-// list of conditions and the following disclaimer.
+// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
-// and/or other materials provided with the distribution.
-//
+// and/or other materials provided with the distribution.
+//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
@@ -32,6 +32,7 @@ import (
"math"
"net/url"
"reflect"
+ "strconv"
"sync"
"time"
)
@@ -72,35 +73,39 @@ const (
setterAddr
)
-var setterStyle map[reflect.Type]int
+var setterStyles map[reflect.Type]int
var setterIface reflect.Type
var setterMutex sync.RWMutex
func init() {
var iface Setter
setterIface = reflect.TypeOf(&iface).Elem()
- setterStyle = make(map[reflect.Type]int)
+ setterStyles = make(map[reflect.Type]int)
}
-func getSetter(outt reflect.Type, out reflect.Value) Setter {
+func setterStyle(outt reflect.Type) int {
setterMutex.RLock()
- style := setterStyle[outt]
+ style := setterStyles[outt]
setterMutex.RUnlock()
- if style == setterNone {
- return nil
- }
if style == setterUnknown {
setterMutex.Lock()
defer setterMutex.Unlock()
if outt.Implements(setterIface) {
- setterStyle[outt] = setterType
+ setterStyles[outt] = setterType
} else if reflect.PtrTo(outt).Implements(setterIface) {
- setterStyle[outt] = setterAddr
+ setterStyles[outt] = setterAddr
} else {
- setterStyle[outt] = setterNone
- return nil
+ setterStyles[outt] = setterNone
}
- style = setterStyle[outt]
+ style = setterStyles[outt]
+ }
+ return style
+}
+
+func getSetter(outt reflect.Type, out reflect.Value) Setter {
+ style := setterStyle(outt)
+ if style == setterNone {
+ return nil
}
if style == setterAddr {
if !out.CanAddr() {
@@ -320,6 +325,10 @@ func (d *decoder) readArrayDocTo(out reflect.Value) {
func (d *decoder) readSliceDoc(t reflect.Type) interface{} {
tmp := make([]reflect.Value, 0, 8)
elemType := t.Elem()
+ if elemType == typeRawDocElem {
+ d.dropElem(0x04)
+ return reflect.Zero(t).Interface()
+ }
end := int(d.readInt32())
end += d.i - 4
@@ -432,21 +441,31 @@ func (d *decoder) readElemTo(out reflect.Value, kind byte) (good bool) {
start := d.i
- if kind == '\x03' {
- // Special case for documents. Delegate to readDocTo().
- switch out.Kind() {
+ if kind == 0x03 {
+ // Delegate unmarshaling of documents.
+ outt := out.Type()
+ outk := out.Kind()
+ switch outk {
case reflect.Interface, reflect.Ptr, reflect.Struct, reflect.Map:
d.readDocTo(out)
- default:
- switch out.Interface().(type) {
- case D:
- out.Set(d.readDocElems(out.Type()))
- case RawD:
- out.Set(d.readRawDocElems(out.Type()))
+ return true
+ }
+ if setterStyle(outt) != setterNone {
+ d.readDocTo(out)
+ return true
+ }
+ if outk == reflect.Slice {
+ switch outt.Elem() {
+ case typeDocElem:
+ out.Set(d.readDocElems(outt))
+ case typeRawDocElem:
+ out.Set(d.readRawDocElems(outt))
default:
d.readDocTo(blackHole)
}
+ return true
}
+ d.readDocTo(blackHole)
return true
}
@@ -461,6 +480,11 @@ func (d *decoder) readElemTo(out reflect.Value, kind byte) (good bool) {
panic("Can't happen. Handled above.")
case 0x04: // Array
outt := out.Type()
+ if setterStyle(outt) != setterNone {
+ // Skip the value so its data is handed to the setter below.
+ d.dropElem(kind)
+ break
+ }
for outt.Kind() == reflect.Ptr {
outt = outt.Elem()
}
@@ -498,6 +522,8 @@ func (d *decoder) readElemTo(out reflect.Value, kind byte) (good bool) {
in = nil
case 0x0B: // RegEx
in = d.readRegEx()
+ case 0x0C:
+ in = DBPointer{Namespace: d.readStr(), Id: ObjectId(d.readBytes(12))}
case 0x0D: // JavaScript without scope
in = JavaScript{Code: d.readStr()}
case 0x0E: // Symbol
@@ -513,6 +539,11 @@ func (d *decoder) readElemTo(out reflect.Value, kind byte) (good bool) {
in = MongoTimestamp(d.readInt64())
case 0x12: // Int64
in = d.readInt64()
+ case 0x13: // Decimal128
+ in = Decimal128{
+ l: uint64(d.readInt64()),
+ h: uint64(d.readInt64()),
+ }
case 0x7F: // Max key
in = MaxKey
case 0xFF: // Min key
@@ -594,6 +625,16 @@ func (d *decoder) readElemTo(out reflect.Value, kind byte) (good bool) {
out.SetString(string(b))
return true
}
+ case reflect.Int, reflect.Int64:
+ if outt == typeJSONNumber {
+ out.SetString(strconv.FormatInt(inv.Int(), 10))
+ return true
+ }
+ case reflect.Float64:
+ if outt == typeJSONNumber {
+ out.SetString(strconv.FormatFloat(inv.Float(), 'f', -1, 64))
+ return true
+ }
}
case reflect.Slice, reflect.Array:
// Remember, array (0x04) slices are built with the correct
@@ -632,7 +673,7 @@ func (d *decoder) readElemTo(out reflect.Value, kind byte) (good bool) {
}
return true
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
- panic("Can't happen. No uint types in BSON?")
+ panic("can't happen: no uint types in BSON (!?)")
}
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
switch inv.Kind() {
@@ -693,6 +734,12 @@ func (d *decoder) readElemTo(out reflect.Value, kind byte) (good bool) {
out.Set(reflect.ValueOf(u).Elem())
return true
}
+ if outt == typeBinary {
+ if b, ok := in.([]byte); ok {
+ out.Set(reflect.ValueOf(Binary{Data: b}))
+ return true
+ }
+ }
}
return false
@@ -746,10 +793,14 @@ func (d *decoder) readCStr() string {
}
func (d *decoder) readBool() bool {
- if d.readByte() == 1 {
+ b := d.readByte()
+ if b == 0 {
+ return false
+ }
+ if b == 1 {
return true
}
- return false
+ panic(fmt.Sprintf("encoded boolean must be 1 or 0, found %d", b))
}
func (d *decoder) readFloat64() float64 {
@@ -786,9 +837,12 @@ func (d *decoder) readByte() byte {
}
func (d *decoder) readBytes(length int32) []byte {
+ if length < 0 {
+ corrupted()
+ }
start := d.i
d.i += int(length)
- if d.i > len(d.in) {
+ if d.i < start || d.i > len(d.in) {
corrupted()
}
return d.in[start : start+int(length)]
diff --git a/bson/encode.go b/bson/encode.go
index 6ba383a23..add39e865 100644
--- a/bson/encode.go
+++ b/bson/encode.go
@@ -1,18 +1,18 @@
// BSON library for Go
-//
+//
// Copyright (c) 2010-2012 - Gustavo Niemeyer
-//
+//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions are met:
-//
+// modification, are permitted provided that the following conditions are met:
+//
// 1. Redistributions of source code must retain the above copyright notice, this
-// list of conditions and the following disclaimer.
+// list of conditions and the following disclaimer.
// 2. Redistributions in binary form must reproduce the above copyright notice,
// this list of conditions and the following disclaimer in the documentation
-// and/or other materials provided with the distribution.
-//
+// and/or other materials provided with the distribution.
+//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
// ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
// WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
@@ -28,6 +28,7 @@
package bson
import (
+ "encoding/json"
"fmt"
"math"
"net/url"
@@ -42,6 +43,7 @@ import (
var (
typeBinary = reflect.TypeOf(Binary{})
typeObjectId = reflect.TypeOf(ObjectId(""))
+ typeDBPointer = reflect.TypeOf(DBPointer{"", ObjectId("")})
typeSymbol = reflect.TypeOf(Symbol(""))
typeMongoTimestamp = reflect.TypeOf(MongoTimestamp(0))
typeOrderKey = reflect.TypeOf(MinKey)
@@ -51,6 +53,7 @@ var (
typeURL = reflect.TypeOf(url.URL{})
typeTime = reflect.TypeOf(time.Time{})
typeString = reflect.TypeOf("")
+ typeJSONNumber = reflect.TypeOf(json.Number(""))
)
const itoaCacheSize = 32
@@ -98,7 +101,10 @@ func (e *encoder) addDoc(v reflect.Value) {
if v.Type() == typeRaw {
raw := v.Interface().(Raw)
if raw.Kind != 0x03 && raw.Kind != 0x00 {
- panic("Attempted to unmarshal Raw kind " + strconv.Itoa(int(raw.Kind)) + " as a document")
+ panic("Attempted to marshal Raw kind " + strconv.Itoa(int(raw.Kind)) + " as a document")
+ }
+ if len(raw.Data) == 0 {
+ panic("Attempted to marshal empty Raw document")
}
e.addBytes(raw.Data...)
return
@@ -177,10 +183,14 @@ func isZero(v reflect.Value) bool {
case reflect.Bool:
return !v.Bool()
case reflect.Struct:
- if v.Type() == typeTime {
+ vt := v.Type()
+ if vt == typeTime {
return v.Interface().(time.Time).IsZero()
}
- for i := v.NumField()-1; i >= 0; i-- {
+ for i := 0; i < v.NumField(); i++ {
+ if vt.Field(i).PkgPath != "" && !vt.Field(i).Anonymous {
+ continue // Private field
+ }
if !isZero(v.Field(i)) {
return false
}
@@ -205,7 +215,7 @@ func (e *encoder) addSlice(v reflect.Value) {
return
}
l := v.Len()
- et := v.Type().Elem()
+ et := v.Type().Elem()
if et == typeDocElem {
for i := 0; i < l; i++ {
elem := v.Index(i).Interface().(DocElem)
@@ -237,7 +247,7 @@ func (e *encoder) addElemName(kind byte, name string) {
func (e *encoder) addElem(name string, v reflect.Value, minSize bool) {
if !v.IsValid() {
- e.addElemName('\x0A', name)
+ e.addElemName(0x0A, name)
return
}
@@ -266,59 +276,70 @@ func (e *encoder) addElem(name string, v reflect.Value, minSize bool) {
panic("ObjectIDs must be exactly 12 bytes long (got " +
strconv.Itoa(len(s)) + ")")
}
- e.addElemName('\x07', name)
+ e.addElemName(0x07, name)
e.addBytes([]byte(s)...)
case typeSymbol:
- e.addElemName('\x0E', name)
+ e.addElemName(0x0E, name)
e.addStr(s)
+ case typeJSONNumber:
+ n := v.Interface().(json.Number)
+ if i, err := n.Int64(); err == nil {
+ e.addElemName(0x12, name)
+ e.addInt64(i)
+ } else if f, err := n.Float64(); err == nil {
+ e.addElemName(0x01, name)
+ e.addFloat64(f)
+ } else {
+ panic("failed to convert json.Number to a number: " + s)
+ }
default:
- e.addElemName('\x02', name)
+ e.addElemName(0x02, name)
e.addStr(s)
}
case reflect.Float32, reflect.Float64:
- e.addElemName('\x01', name)
- e.addInt64(int64(math.Float64bits(v.Float())))
+ e.addElemName(0x01, name)
+ e.addFloat64(v.Float())
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
u := v.Uint()
if int64(u) < 0 {
panic("BSON has no uint64 type, and value is too large to fit correctly in an int64")
} else if u <= math.MaxInt32 && (minSize || v.Kind() <= reflect.Uint32) {
- e.addElemName('\x10', name)
+ e.addElemName(0x10, name)
e.addInt32(int32(u))
} else {
- e.addElemName('\x12', name)
+ e.addElemName(0x12, name)
e.addInt64(int64(u))
}
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
switch v.Type() {
case typeMongoTimestamp:
- e.addElemName('\x11', name)
+ e.addElemName(0x11, name)
e.addInt64(v.Int())
case typeOrderKey:
if v.Int() == int64(MaxKey) {
- e.addElemName('\x7F', name)
+ e.addElemName(0x7F, name)
} else {
- e.addElemName('\xFF', name)
+ e.addElemName(0xFF, name)
}
default:
i := v.Int()
if (minSize || v.Type().Kind() != reflect.Int64) && i >= math.MinInt32 && i <= math.MaxInt32 {
// It fits into an int32, encode as such.
- e.addElemName('\x10', name)
+ e.addElemName(0x10, name)
e.addInt32(int32(i))
} else {
- e.addElemName('\x12', name)
+ e.addElemName(0x12, name)
e.addInt64(i)
}
}
case reflect.Bool:
- e.addElemName('\x08', name)
+ e.addElemName(0x08, name)
if v.Bool() {
e.addBytes(1)
} else {
@@ -326,30 +347,40 @@ func (e *encoder) addElem(name string, v reflect.Value, minSize bool) {
}
case reflect.Map:
- e.addElemName('\x03', name)
+ e.addElemName(0x03, name)
e.addDoc(v)
case reflect.Slice:
vt := v.Type()
et := vt.Elem()
if et.Kind() == reflect.Uint8 {
- e.addElemName('\x05', name)
- e.addBinary('\x00', v.Bytes())
+ e.addElemName(0x05, name)
+ e.addBinary(0x00, v.Bytes())
} else if et == typeDocElem || et == typeRawDocElem {
- e.addElemName('\x03', name)
+ e.addElemName(0x03, name)
e.addDoc(v)
} else {
- e.addElemName('\x04', name)
+ e.addElemName(0x04, name)
e.addDoc(v)
}
case reflect.Array:
et := v.Type().Elem()
if et.Kind() == reflect.Uint8 {
- e.addElemName('\x05', name)
- e.addBinary('\x00', v.Slice(0, v.Len()).Interface().([]byte))
+ e.addElemName(0x05, name)
+ if v.CanAddr() {
+ e.addBinary(0x00, v.Slice(0, v.Len()).Interface().([]byte))
+ } else {
+ n := v.Len()
+ e.addInt32(int32(n))
+ e.addBytes(0x00)
+ for i := 0; i < n; i++ {
+ el := v.Index(i)
+ e.addBytes(byte(el.Uint()))
+ }
+ }
} else {
- e.addElemName('\x04', name)
+ e.addElemName(0x04, name)
e.addDoc(v)
}
@@ -361,24 +392,41 @@ func (e *encoder) addElem(name string, v reflect.Value, minSize bool) {
if kind == 0x00 {
kind = 0x03
}
+ if len(s.Data) == 0 && kind != 0x06 && kind != 0x0A && kind != 0xFF && kind != 0x7F {
+ panic("Attempted to marshal empty Raw document")
+ }
e.addElemName(kind, name)
e.addBytes(s.Data...)
case Binary:
- e.addElemName('\x05', name)
+ e.addElemName(0x05, name)
e.addBinary(s.Kind, s.Data)
+ case Decimal128:
+ e.addElemName(0x13, name)
+ e.addInt64(int64(s.l))
+ e.addInt64(int64(s.h))
+
+ case DBPointer:
+ e.addElemName(0x0C, name)
+ e.addStr(s.Namespace)
+ if len(s.Id) != 12 {
+ panic("ObjectIDs must be exactly 12 bytes long (got " +
+ strconv.Itoa(len(s.Id)) + ")")
+ }
+ e.addBytes([]byte(s.Id)...)
+
case RegEx:
- e.addElemName('\x0B', name)
+ e.addElemName(0x0B, name)
e.addCStr(s.Pattern)
e.addCStr(s.Options)
case JavaScript:
if s.Scope == nil {
- e.addElemName('\x0D', name)
+ e.addElemName(0x0D, name)
e.addStr(s.Code)
} else {
- e.addElemName('\x0F', name)
+ e.addElemName(0x0F, name)
start := e.reserveInt32()
e.addStr(s.Code)
e.addDoc(reflect.ValueOf(s.Scope))
@@ -387,18 +435,18 @@ func (e *encoder) addElem(name string, v reflect.Value, minSize bool) {
case time.Time:
// MongoDB handles timestamps as milliseconds.
- e.addElemName('\x09', name)
- e.addInt64(s.Unix() * 1000 + int64(s.Nanosecond() / 1e6))
+ e.addElemName(0x09, name)
+ e.addInt64(s.Unix()*1000 + int64(s.Nanosecond()/1e6))
case url.URL:
- e.addElemName('\x02', name)
+ e.addElemName(0x02, name)
e.addStr(s.String())
case undefined:
- e.addElemName('\x06', name)
+ e.addElemName(0x06, name)
default:
- e.addElemName('\x03', name)
+ e.addElemName(0x03, name)
e.addDoc(v)
}
@@ -457,6 +505,10 @@ func (e *encoder) addInt64(v int64) {
byte(u>>32), byte(u>>40), byte(u>>48), byte(u>>56))
}
+func (e *encoder) addFloat64(v float64) {
+ e.addInt64(int64(math.Float64bits(v)))
+}
+
func (e *encoder) addBytes(v ...byte) {
e.out = append(e.out, v...)
}
diff --git a/bson/json.go b/bson/json.go
new file mode 100644
index 000000000..09df8260a
--- /dev/null
+++ b/bson/json.go
@@ -0,0 +1,380 @@
+package bson
+
+import (
+ "bytes"
+ "encoding/base64"
+ "fmt"
+ "gopkg.in/mgo.v2/internal/json"
+ "strconv"
+ "time"
+)
+
+// UnmarshalJSON unmarshals a JSON value that may hold non-standard
+// syntax as defined in BSON's extended JSON specification.
+func UnmarshalJSON(data []byte, value interface{}) error {
+ d := json.NewDecoder(bytes.NewBuffer(data))
+ d.Extend(&jsonExt)
+ return d.Decode(value)
+}
+
+// MarshalJSON marshals a JSON value that may hold non-standard
+// syntax as defined in BSON's extended JSON specification.
+func MarshalJSON(value interface{}) ([]byte, error) {
+ var buf bytes.Buffer
+ e := json.NewEncoder(&buf)
+ e.Extend(&jsonExt)
+ err := e.Encode(value)
+ if err != nil {
+ return nil, err
+ }
+ return buf.Bytes(), nil
+}
+
+// jdec is used internally by the JSON decoding functions
+// so they may unmarshal functions without getting into endless
+// recursion due to keyed objects.
+func jdec(data []byte, value interface{}) error {
+ d := json.NewDecoder(bytes.NewBuffer(data))
+ d.Extend(&funcExt)
+ return d.Decode(value)
+}
+
+var jsonExt json.Extension
+var funcExt json.Extension
+
+// TODO
+// - Shell regular expressions ("/regexp/opts")
+
+func init() {
+ jsonExt.DecodeUnquotedKeys(true)
+ jsonExt.DecodeTrailingCommas(true)
+
+ funcExt.DecodeFunc("BinData", "$binaryFunc", "$type", "$binary")
+ jsonExt.DecodeKeyed("$binary", jdecBinary)
+ jsonExt.DecodeKeyed("$binaryFunc", jdecBinary)
+ jsonExt.EncodeType([]byte(nil), jencBinarySlice)
+ jsonExt.EncodeType(Binary{}, jencBinaryType)
+
+ funcExt.DecodeFunc("ISODate", "$dateFunc", "S")
+ funcExt.DecodeFunc("new Date", "$dateFunc", "S")
+ jsonExt.DecodeKeyed("$date", jdecDate)
+ jsonExt.DecodeKeyed("$dateFunc", jdecDate)
+ jsonExt.EncodeType(time.Time{}, jencDate)
+
+ funcExt.DecodeFunc("Timestamp", "$timestamp", "t", "i")
+ jsonExt.DecodeKeyed("$timestamp", jdecTimestamp)
+ jsonExt.EncodeType(MongoTimestamp(0), jencTimestamp)
+
+ funcExt.DecodeConst("undefined", Undefined)
+
+ jsonExt.DecodeKeyed("$regex", jdecRegEx)
+ jsonExt.EncodeType(RegEx{}, jencRegEx)
+
+ funcExt.DecodeFunc("ObjectId", "$oidFunc", "Id")
+ jsonExt.DecodeKeyed("$oid", jdecObjectId)
+ jsonExt.DecodeKeyed("$oidFunc", jdecObjectId)
+ jsonExt.EncodeType(ObjectId(""), jencObjectId)
+
+ funcExt.DecodeFunc("DBRef", "$dbrefFunc", "$ref", "$id")
+ jsonExt.DecodeKeyed("$dbrefFunc", jdecDBRef)
+
+ funcExt.DecodeFunc("NumberLong", "$numberLongFunc", "N")
+ jsonExt.DecodeKeyed("$numberLong", jdecNumberLong)
+ jsonExt.DecodeKeyed("$numberLongFunc", jdecNumberLong)
+ jsonExt.EncodeType(int64(0), jencNumberLong)
+ jsonExt.EncodeType(int(0), jencInt)
+
+ funcExt.DecodeConst("MinKey", MinKey)
+ funcExt.DecodeConst("MaxKey", MaxKey)
+ jsonExt.DecodeKeyed("$minKey", jdecMinKey)
+ jsonExt.DecodeKeyed("$maxKey", jdecMaxKey)
+ jsonExt.EncodeType(orderKey(0), jencMinMaxKey)
+
+ jsonExt.DecodeKeyed("$undefined", jdecUndefined)
+ jsonExt.EncodeType(Undefined, jencUndefined)
+
+ jsonExt.Extend(&funcExt)
+}
+
+func fbytes(format string, args ...interface{}) []byte {
+ var buf bytes.Buffer
+ fmt.Fprintf(&buf, format, args...)
+ return buf.Bytes()
+}
+
+func jdecBinary(data []byte) (interface{}, error) {
+ var v struct {
+ Binary []byte `json:"$binary"`
+ Type string `json:"$type"`
+ Func struct {
+ Binary []byte `json:"$binary"`
+ Type int64 `json:"$type"`
+ } `json:"$binaryFunc"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+
+ var binData []byte
+ var binKind int64
+ if v.Type == "" && v.Binary == nil {
+ binData = v.Func.Binary
+ binKind = v.Func.Type
+ } else if v.Type == "" {
+ return v.Binary, nil
+ } else {
+ binData = v.Binary
+ binKind, err = strconv.ParseInt(v.Type, 0, 64)
+ if err != nil {
+ binKind = -1
+ }
+ }
+
+ if binKind == 0 {
+ return binData, nil
+ }
+ if binKind < 0 || binKind > 255 {
+ return nil, fmt.Errorf("invalid type in binary object: %s", data)
+ }
+
+ return Binary{Kind: byte(binKind), Data: binData}, nil
+}
+
+func jencBinarySlice(v interface{}) ([]byte, error) {
+ in := v.([]byte)
+ out := make([]byte, base64.StdEncoding.EncodedLen(len(in)))
+ base64.StdEncoding.Encode(out, in)
+ return fbytes(`{"$binary":"%s","$type":"0x0"}`, out), nil
+}
+
+func jencBinaryType(v interface{}) ([]byte, error) {
+ in := v.(Binary)
+ out := make([]byte, base64.StdEncoding.EncodedLen(len(in.Data)))
+ base64.StdEncoding.Encode(out, in.Data)
+ return fbytes(`{"$binary":"%s","$type":"0x%x"}`, out, in.Kind), nil
+}
+
+const jdateFormat = "2006-01-02T15:04:05.999Z"
+
+func jdecDate(data []byte) (interface{}, error) {
+ var v struct {
+ S string `json:"$date"`
+ Func struct {
+ S string
+ } `json:"$dateFunc"`
+ }
+ _ = jdec(data, &v)
+ if v.S == "" {
+ v.S = v.Func.S
+ }
+ if v.S != "" {
+ for _, format := range []string{jdateFormat, "2006-01-02"} {
+ t, err := time.Parse(format, v.S)
+ if err == nil {
+ return t, nil
+ }
+ }
+ return nil, fmt.Errorf("cannot parse date: %q", v.S)
+ }
+
+ var vn struct {
+ Date struct {
+ N int64 `json:"$numberLong,string"`
+ } `json:"$date"`
+ Func struct {
+ S int64
+ } `json:"$dateFunc"`
+ }
+ err := jdec(data, &vn)
+ if err != nil {
+ return nil, fmt.Errorf("cannot parse date: %q", data)
+ }
+ n := vn.Date.N
+ if n == 0 {
+ n = vn.Func.S
+ }
+ return time.Unix(n/1000, n%1000*1e6).UTC(), nil
+}
+
+func jencDate(v interface{}) ([]byte, error) {
+ t := v.(time.Time)
+ return fbytes(`{"$date":%q}`, t.Format(jdateFormat)), nil
+}
+
+func jdecTimestamp(data []byte) (interface{}, error) {
+ var v struct {
+ Func struct {
+ T int32 `json:"t"`
+ I int32 `json:"i"`
+ } `json:"$timestamp"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ return MongoTimestamp(uint64(v.Func.T)<<32 | uint64(uint32(v.Func.I))), nil
+}
+
+func jencTimestamp(v interface{}) ([]byte, error) {
+ ts := uint64(v.(MongoTimestamp))
+ return fbytes(`{"$timestamp":{"t":%d,"i":%d}}`, ts>>32, uint32(ts)), nil
+}
+
+func jdecRegEx(data []byte) (interface{}, error) {
+ var v struct {
+ Regex string `json:"$regex"`
+ Options string `json:"$options"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ return RegEx{v.Regex, v.Options}, nil
+}
+
+func jencRegEx(v interface{}) ([]byte, error) {
+ re := v.(RegEx)
+ type regex struct {
+ Regex string `json:"$regex"`
+ Options string `json:"$options"`
+ }
+ return json.Marshal(regex{re.Pattern, re.Options})
+}
+
+func jdecObjectId(data []byte) (interface{}, error) {
+ var v struct {
+ Id string `json:"$oid"`
+ Func struct {
+ Id string
+ } `json:"$oidFunc"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ if v.Id == "" {
+ v.Id = v.Func.Id
+ }
+ return ObjectIdHex(v.Id), nil
+}
+
+func jencObjectId(v interface{}) ([]byte, error) {
+ return fbytes(`{"$oid":"%s"}`, v.(ObjectId).Hex()), nil
+}
+
+func jdecDBRef(data []byte) (interface{}, error) {
+ // TODO Support unmarshaling $ref and $id into the input value.
+ var v struct {
+ Obj map[string]interface{} `json:"$dbrefFunc"`
+ }
+ // TODO Fix this. Must not be required.
+ v.Obj = make(map[string]interface{})
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ return v.Obj, nil
+}
+
+func jdecNumberLong(data []byte) (interface{}, error) {
+ var v struct {
+ N int64 `json:"$numberLong,string"`
+ Func struct {
+ N int64 `json:",string"`
+ } `json:"$numberLongFunc"`
+ }
+ var vn struct {
+ N int64 `json:"$numberLong"`
+ Func struct {
+ N int64
+ } `json:"$numberLongFunc"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ err = jdec(data, &vn)
+ v.N = vn.N
+ v.Func.N = vn.Func.N
+ }
+ if err != nil {
+ return nil, err
+ }
+ if v.N != 0 {
+ return v.N, nil
+ }
+ return v.Func.N, nil
+}
+
+func jencNumberLong(v interface{}) ([]byte, error) {
+ n := v.(int64)
+ f := `{"$numberLong":"%d"}`
+ if n <= 1<<53 {
+ f = `{"$numberLong":%d}`
+ }
+ return fbytes(f, n), nil
+}
+
+func jencInt(v interface{}) ([]byte, error) {
+ n := v.(int)
+ f := `{"$numberLong":"%d"}`
+ if int64(n) <= 1<<53 {
+ f = `%d`
+ }
+ return fbytes(f, n), nil
+}
+
+func jdecMinKey(data []byte) (interface{}, error) {
+ var v struct {
+ N int64 `json:"$minKey"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ if v.N != 1 {
+ return nil, fmt.Errorf("invalid $minKey object: %s", data)
+ }
+ return MinKey, nil
+}
+
+func jdecMaxKey(data []byte) (interface{}, error) {
+ var v struct {
+ N int64 `json:"$maxKey"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ if v.N != 1 {
+ return nil, fmt.Errorf("invalid $maxKey object: %s", data)
+ }
+ return MaxKey, nil
+}
+
+func jencMinMaxKey(v interface{}) ([]byte, error) {
+ switch v.(orderKey) {
+ case MinKey:
+ return []byte(`{"$minKey":1}`), nil
+ case MaxKey:
+ return []byte(`{"$maxKey":1}`), nil
+ }
+ panic(fmt.Sprintf("invalid $minKey/$maxKey value: %d", v))
+}
+
+func jdecUndefined(data []byte) (interface{}, error) {
+ var v struct {
+ B bool `json:"$undefined"`
+ }
+ err := jdec(data, &v)
+ if err != nil {
+ return nil, err
+ }
+ if !v.B {
+ return nil, fmt.Errorf("invalid $undefined object: %s", data)
+ }
+ return Undefined, nil
+}
+
+func jencUndefined(v interface{}) ([]byte, error) {
+ return []byte(`{"$undefined":true}`), nil
+}
diff --git a/bson/json_test.go b/bson/json_test.go
new file mode 100644
index 000000000..866f51c34
--- /dev/null
+++ b/bson/json_test.go
@@ -0,0 +1,184 @@
+package bson_test
+
+import (
+ "gopkg.in/mgo.v2/bson"
+
+ . "gopkg.in/check.v1"
+ "reflect"
+ "strings"
+ "time"
+)
+
+type jsonTest struct {
+ a interface{} // value encoded into JSON (optional)
+ b string // JSON expected as output of , and used as input to
+ c interface{} // Value expected from decoding , defaults to
+ e string // error string, if decoding (b) should fail
+}
+
+var jsonTests = []jsonTest{
+ // $binary
+ {
+ a: []byte("foo"),
+ b: `{"$binary":"Zm9v","$type":"0x0"}`,
+ }, {
+ a: bson.Binary{Kind: 2, Data: []byte("foo")},
+ b: `{"$binary":"Zm9v","$type":"0x2"}`,
+ }, {
+ b: `BinData(2,"Zm9v")`,
+ c: bson.Binary{Kind: 2, Data: []byte("foo")},
+ },
+
+ // $date
+ {
+ a: time.Date(2016, 5, 15, 1, 2, 3, 4000000, time.UTC),
+ b: `{"$date":"2016-05-15T01:02:03.004Z"}`,
+ }, {
+ b: `{"$date": {"$numberLong": "1002"}}`,
+ c: time.Date(1970, 1, 1, 0, 0, 1, 2e6, time.UTC),
+ }, {
+ b: `ISODate("2016-05-15T01:02:03.004Z")`,
+ c: time.Date(2016, 5, 15, 1, 2, 3, 4000000, time.UTC),
+ }, {
+ b: `new Date(1000)`,
+ c: time.Date(1970, 1, 1, 0, 0, 1, 0, time.UTC),
+ }, {
+ b: `new Date("2016-05-15")`,
+ c: time.Date(2016, 5, 15, 0, 0, 0, 0, time.UTC),
+ },
+
+ // $timestamp
+ {
+ a: bson.MongoTimestamp(4294967298),
+ b: `{"$timestamp":{"t":1,"i":2}}`,
+ }, {
+ b: `Timestamp(1, 2)`,
+ c: bson.MongoTimestamp(4294967298),
+ },
+
+ // $regex
+ {
+ a: bson.RegEx{"pattern", "options"},
+ b: `{"$regex":"pattern","$options":"options"}`,
+ },
+
+ // $oid
+ {
+ a: bson.ObjectIdHex("0123456789abcdef01234567"),
+ b: `{"$oid":"0123456789abcdef01234567"}`,
+ }, {
+ b: `ObjectId("0123456789abcdef01234567")`,
+ c: bson.ObjectIdHex("0123456789abcdef01234567"),
+ },
+
+ // $ref (no special type)
+ {
+ b: `DBRef("name", "id")`,
+ c: map[string]interface{}{"$ref": "name", "$id": "id"},
+ },
+
+ // $numberLong
+ {
+ a: 123,
+ b: `123`,
+ }, {
+ a: int64(9007199254740992),
+ b: `{"$numberLong":9007199254740992}`,
+ }, {
+ a: int64(1<<53 + 1),
+ b: `{"$numberLong":"9007199254740993"}`,
+ }, {
+ a: 1<<53 + 1,
+ b: `{"$numberLong":"9007199254740993"}`,
+ c: int64(9007199254740993),
+ }, {
+ b: `NumberLong(9007199254740992)`,
+ c: int64(1 << 53),
+ }, {
+ b: `NumberLong("9007199254740993")`,
+ c: int64(1<<53 + 1),
+ },
+
+ // $minKey, $maxKey
+ {
+ a: bson.MinKey,
+ b: `{"$minKey":1}`,
+ }, {
+ a: bson.MaxKey,
+ b: `{"$maxKey":1}`,
+ }, {
+ b: `MinKey`,
+ c: bson.MinKey,
+ }, {
+ b: `MaxKey`,
+ c: bson.MaxKey,
+ }, {
+ b: `{"$minKey":0}`,
+ e: `invalid $minKey object: {"$minKey":0}`,
+ }, {
+ b: `{"$maxKey":0}`,
+ e: `invalid $maxKey object: {"$maxKey":0}`,
+ },
+
+ {
+ a: bson.Undefined,
+ b: `{"$undefined":true}`,
+ }, {
+ b: `undefined`,
+ c: bson.Undefined,
+ }, {
+ b: `{"v": undefined}`,
+ c: struct{ V interface{} }{bson.Undefined},
+ },
+
+ // Unquoted keys and trailing commas
+ {
+ b: `{$foo: ["bar",],}`,
+ c: map[string]interface{}{"$foo": []interface{}{"bar"}},
+ },
+}
+
+func (s *S) TestJSON(c *C) {
+ for i, item := range jsonTests {
+ c.Logf("------------ (#%d)", i)
+ c.Logf("A: %#v", item.a)
+ c.Logf("B: %#v", item.b)
+
+ if item.c == nil {
+ item.c = item.a
+ } else {
+ c.Logf("C: %#v", item.c)
+ }
+ if item.e != "" {
+ c.Logf("E: %s", item.e)
+ }
+
+ if item.a != nil {
+ data, err := bson.MarshalJSON(item.a)
+ c.Assert(err, IsNil)
+ c.Logf("Dumped: %#v", string(data))
+ c.Assert(strings.TrimSuffix(string(data), "\n"), Equals, item.b)
+ }
+
+ var zero interface{}
+ if item.c == nil {
+ zero = &struct{}{}
+ } else {
+ zero = reflect.New(reflect.TypeOf(item.c)).Interface()
+ }
+ err := bson.UnmarshalJSON([]byte(item.b), zero)
+ if item.e != "" {
+ c.Assert(err, NotNil)
+ c.Assert(err.Error(), Equals, item.e)
+ continue
+ }
+ c.Assert(err, IsNil)
+ zerov := reflect.ValueOf(zero)
+ value := zerov.Interface()
+ if zerov.Kind() == reflect.Ptr {
+ value = zerov.Elem().Interface()
+ }
+ c.Logf("Loaded: %#v", value)
+ c.Assert(value, DeepEquals, item.c)
+ }
+}
diff --git a/bson/specdata/update.sh b/bson/specdata/update.sh
new file mode 100755
index 000000000..1efd3d3b6
--- /dev/null
+++ b/bson/specdata/update.sh
@@ -0,0 +1,27 @@
+#!/bin/sh
+
+set -e
+
+if [ ! -d specifications ]; then
+ git clone -b bson git@github.com:jyemin/specifications
+fi
+
+TESTFILE="../specdata_test.go"
+
+cat < $TESTFILE
+package bson_test
+
+var specTests = []string{
+END
+
+for file in specifications/source/bson/tests/*.yml; do
+ (
+ echo '`'
+ cat $file
+ echo -n '`,'
+ ) >> $TESTFILE
+done
+
+echo '}' >> $TESTFILE
+
+gofmt -w $TESTFILE
diff --git a/bson/specdata_test.go b/bson/specdata_test.go
new file mode 100644
index 000000000..513f9b209
--- /dev/null
+++ b/bson/specdata_test.go
@@ -0,0 +1,241 @@
+package bson_test
+
+var specTests = []string{
+ `
+---
+description: "Array type"
+documents:
+ -
+ decoded:
+ a : []
+ encoded: 0D000000046100050000000000
+ -
+ decoded:
+ a: [10]
+ encoded: 140000000461000C0000001030000A0000000000
+ -
+ # Decode an array that uses an empty string as the key
+ decodeOnly : true
+ decoded:
+ a: [10]
+ encoded: 130000000461000B00000010000A0000000000
+ -
+ # Decode an array that uses a non-numeric string as the key
+ decodeOnly : true
+ decoded:
+ a: [10]
+ encoded: 150000000461000D000000106162000A0000000000
+
+
+`, `
+---
+description: "Boolean type"
+documents:
+ -
+ encoded: "090000000862000100"
+ decoded: { "b" : true }
+ -
+ encoded: "090000000862000000"
+ decoded: { "b" : false }
+
+
+ `, `
+---
+description: "Corrupted BSON"
+documents:
+ -
+ encoded: "09000000016600"
+ error: "truncated double"
+ -
+ encoded: "09000000026600"
+ error: "truncated string"
+ -
+ encoded: "09000000036600"
+ error: "truncated document"
+ -
+ encoded: "09000000046600"
+ error: "truncated array"
+ -
+ encoded: "09000000056600"
+ error: "truncated binary"
+ -
+ encoded: "09000000076600"
+ error: "truncated objectid"
+ -
+ encoded: "09000000086600"
+ error: "truncated boolean"
+ -
+ encoded: "09000000096600"
+ error: "truncated date"
+ -
+ encoded: "090000000b6600"
+ error: "truncated regex"
+ -
+ encoded: "090000000c6600"
+ error: "truncated db pointer"
+ -
+ encoded: "0C0000000d6600"
+ error: "truncated javascript"
+ -
+ encoded: "0C0000000e6600"
+ error: "truncated symbol"
+ -
+ encoded: "0C0000000f6600"
+ error: "truncated javascript with scope"
+ -
+ encoded: "0C000000106600"
+ error: "truncated int32"
+ -
+ encoded: "0C000000116600"
+ error: "truncated timestamp"
+ -
+ encoded: "0C000000126600"
+ error: "truncated int64"
+ -
+ encoded: "0400000000"
+ error: basic
+ -
+ encoded: "0500000001"
+ error: basic
+ -
+ encoded: "05000000"
+ error: basic
+ -
+ encoded: "0700000002610078563412"
+ error: basic
+ -
+ encoded: "090000001061000500"
+ error: basic
+ -
+ encoded: "00000000000000000000"
+ error: basic
+ -
+ encoded: "1300000002666f6f00040000006261720000"
+ error: "basic"
+ -
+ encoded: "1800000003666f6f000f0000001062617200ffffff7f0000"
+ error: basic
+ -
+ encoded: "1500000003666f6f000c0000000862617200010000"
+ error: basic
+ -
+ encoded: "1c00000003666f6f001200000002626172000500000062617a000000"
+ error: basic
+ -
+ encoded: "1000000002610004000000616263ff00"
+ error: string is not null-terminated
+ -
+ encoded: "0c0000000200000000000000"
+ error: bad_string_length
+ -
+ encoded: "120000000200ffffffff666f6f6261720000"
+ error: bad_string_length
+ -
+ encoded: "0c0000000e00000000000000"
+ error: bad_string_length
+ -
+ encoded: "120000000e00ffffffff666f6f6261720000"
+ error: bad_string_length
+ -
+ encoded: "180000000c00fa5bd841d6585d9900"
+ error: ""
+ -
+ encoded: "1e0000000c00ffffffff666f6f626172005259b56afa5bd841d6585d9900"
+ error: bad_string_length
+ -
+ encoded: "0c0000000d00000000000000"
+ error: bad_string_length
+ -
+ encoded: "0c0000000d00ffffffff0000"
+ error: bad_string_length
+ -
+ encoded: "1c0000000f001500000000000000000c000000020001000000000000"
+ error: bad_string_length
+ -
+ encoded: "1c0000000f0015000000ffffffff000c000000020001000000000000"
+ error: bad_string_length
+ -
+ encoded: "1c0000000f001500000001000000000c000000020000000000000000"
+ error: bad_string_length
+ -
+ encoded: "1c0000000f001500000001000000000c0000000200ffffffff000000"
+ error: bad_string_length
+ -
+ encoded: "0E00000008616263646566676869707172737475"
+ error: "Run-on CString"
+ -
+ encoded: "0100000000"
+ error: "An object size that's too small to even include the object size, but is correctly encoded, along with a correct EOO (and no data)"
+ -
+ encoded: "1a0000000e74657374000c00000068656c6c6f20776f726c6400000500000000"
+ error: "One object, but with object size listed smaller than it is in the data"
+ -
+ encoded: "05000000"
+ error: "One object, missing the EOO at the end"
+ -
+ encoded: "0500000001"
+ error: "One object, sized correctly, with a spot for an EOO, but the EOO is 0x01"
+ -
+ encoded: "05000000ff"
+ error: "One object, sized correctly, with a spot for an EOO, but the EOO is 0xff"
+ -
+ encoded: "0500000070"
+ error: "One object, sized correctly, with a spot for an EOO, but the EOO is 0x70"
+ -
+ encoded: "07000000000000"
+ error: "Invalid BSON type low range"
+ -
+ encoded: "07000000800000"
+ error: "Invalid BSON type high range"
+ -
+ encoded: "090000000862000200"
+ error: "Invalid boolean value of 2"
+ -
+ encoded: "09000000086200ff00"
+ error: "Invalid boolean value of -1"
+ `, `
+---
+description: "Int32 type"
+documents:
+ -
+ decoded:
+ i: -2147483648
+ encoded: 0C0000001069000000008000
+ -
+ decoded:
+ i: 2147483647
+ encoded: 0C000000106900FFFFFF7F00
+ -
+ decoded:
+ i: -1
+ encoded: 0C000000106900FFFFFFFF00
+ -
+ decoded:
+ i: 0
+ encoded: 0C0000001069000000000000
+ -
+ decoded:
+ i: 1
+ encoded: 0C0000001069000100000000
+
+`, `
+---
+description: "String type"
+documents:
+ -
+ decoded:
+ s : ""
+ encoded: 0D000000027300010000000000
+ -
+ decoded:
+ s: "a"
+ encoded: 0E00000002730002000000610000
+ -
+ decoded:
+ s: "This is a string"
+ encoded: 1D0000000273001100000054686973206973206120737472696E670000
+ -
+ decoded:
+ s: "κόσμε"
+ encoded: 180000000273000C000000CEBAE1BDB9CF83CEBCCEB50000
+`}
diff --git a/bulk.go b/bulk.go
index 5a9d37b70..072a5206a 100644
--- a/bulk.go
+++ b/bulk.go
@@ -1,50 +1,130 @@
package mgo
+import (
+ "bytes"
+ "sort"
+
+ "gopkg.in/mgo.v2/bson"
+)
+
// Bulk represents an operation that can be prepared with several
// orthogonal changes before being delivered to the server.
//
-// WARNING: This API is still experimental.
+// MongoDB servers older than version 2.6 do not have proper support for bulk
+// operations, so the driver attempts to map its API as much as possible into
+// the functionality that works. In particular, in those releases updates and
+// removals are sent individually, and inserts are sent in bulk but have
+// suboptimal error reporting compared to more recent versions of the server.
+// See the documentation of BulkErrorCase for details on that.
//
// Relevant documentation:
//
// http://blog.mongodb.org/post/84922794768/mongodbs-new-bulk-api
//
type Bulk struct {
- c *Collection
- ordered bool
- inserts []interface{}
+ c *Collection
+ opcount int
+ actions []bulkAction
+ ordered bool
}
-// BulkError holds an error returned from running a Bulk operation.
-//
-// TODO: This is private for the moment, until we understand exactly how
-// to report these multi-errors in a useful and convenient way.
-type bulkError struct {
- err error
+type bulkOp int
+
+const (
+ bulkInsert bulkOp = iota + 1
+ bulkUpdate
+ bulkUpdateAll
+ bulkRemove
+)
+
+type bulkAction struct {
+ op bulkOp
+ docs []interface{}
+ idxs []int
}
+type bulkUpdateOp []interface{}
+type bulkDeleteOp []interface{}
+
// BulkResult holds the results for a bulk operation.
type BulkResult struct {
+ Matched int
+ Modified int // Available only for MongoDB 2.6+
+
// Be conservative while we understand exactly how to report these
// results in a useful and convenient way, and also how to emulate
// them with prior servers.
private bool
}
-func (e *bulkError) Error() string {
- return e.err.Error()
+// BulkError holds an error returned from running a Bulk operation.
+// Individual errors may be obtained and inspected via the Cases method.
+type BulkError struct {
+ ecases []BulkErrorCase
}
-// Bulk returns a value to prepare the execution of a bulk operation.
+func (e *BulkError) Error() string {
+ if len(e.ecases) == 0 {
+ return "invalid BulkError instance: no errors"
+ }
+ if len(e.ecases) == 1 {
+ return e.ecases[0].Err.Error()
+ }
+ msgs := make([]string, 0, len(e.ecases))
+ seen := make(map[string]bool)
+ for _, ecase := range e.ecases {
+ msg := ecase.Err.Error()
+ if !seen[msg] {
+ seen[msg] = true
+ msgs = append(msgs, msg)
+ }
+ }
+ if len(msgs) == 1 {
+ return msgs[0]
+ }
+ var buf bytes.Buffer
+ buf.WriteString("multiple errors in bulk operation:\n")
+ for _, msg := range msgs {
+ buf.WriteString(" - ")
+ buf.WriteString(msg)
+ buf.WriteByte('\n')
+ }
+ return buf.String()
+}
+
+type bulkErrorCases []BulkErrorCase
+
+func (slice bulkErrorCases) Len() int { return len(slice) }
+func (slice bulkErrorCases) Less(i, j int) bool { return slice[i].Index < slice[j].Index }
+func (slice bulkErrorCases) Swap(i, j int) { slice[i], slice[j] = slice[j], slice[i] }
+
+// BulkErrorCase holds an individual error found while attempting a single change
+// within a bulk operation, and the position in which it was enqueued.
//
-// WARNING: This API is still experimental.
+// MongoDB servers older than version 2.6 do not have proper support for bulk
+// operations, so the driver attempts to map its API as much as possible into
+// the functionality that works. In particular, only the last error is reported
+// for bulk inserts and without any positional information, so the Index
+// field is set to -1 in these cases.
+type BulkErrorCase struct {
+ Index int // Position of operation that failed, or -1 if unknown.
+ Err error
+}
+
+// Cases returns all individual errors found while attempting the requested changes.
//
+// See the documentation of BulkErrorCase for limitations in older MongoDB releases.
+func (e *BulkError) Cases() []BulkErrorCase {
+ return e.ecases
+}
+
+// Bulk returns a value to prepare the execution of a bulk operation.
func (c *Collection) Bulk() *Bulk {
return &Bulk{c: c, ordered: true}
}
// Unordered puts the bulk operation in unordered mode.
-//
+//
// In unordered mode the indvidual operations may be sent
// out of order, which means latter operations may proceed
// even if prior ones have failed.
@@ -52,20 +132,220 @@ func (b *Bulk) Unordered() {
b.ordered = false
}
+func (b *Bulk) action(op bulkOp, opcount int) *bulkAction {
+ var action *bulkAction
+ if len(b.actions) > 0 && b.actions[len(b.actions)-1].op == op {
+ action = &b.actions[len(b.actions)-1]
+ } else if !b.ordered {
+ for i := range b.actions {
+ if b.actions[i].op == op {
+ action = &b.actions[i]
+ break
+ }
+ }
+ }
+ if action == nil {
+ b.actions = append(b.actions, bulkAction{op: op})
+ action = &b.actions[len(b.actions)-1]
+ }
+ for i := 0; i < opcount; i++ {
+ action.idxs = append(action.idxs, b.opcount)
+ b.opcount++
+ }
+ return action
+}
+
// Insert queues up the provided documents for insertion.
func (b *Bulk) Insert(docs ...interface{}) {
- b.inserts = append(b.inserts, docs...)
+ action := b.action(bulkInsert, len(docs))
+ action.docs = append(action.docs, docs...)
+}
+
+// Remove queues up the provided selectors for removing matching documents.
+// Each selector will remove only a single matching document.
+func (b *Bulk) Remove(selectors ...interface{}) {
+ action := b.action(bulkRemove, len(selectors))
+ for _, selector := range selectors {
+ if selector == nil {
+ selector = bson.D{}
+ }
+ action.docs = append(action.docs, &deleteOp{
+ Collection: b.c.FullName,
+ Selector: selector,
+ Flags: 1,
+ Limit: 1,
+ })
+ }
+}
+
+// RemoveAll queues up the provided selectors for removing all matching documents.
+// Each selector will remove all matching documents.
+func (b *Bulk) RemoveAll(selectors ...interface{}) {
+ action := b.action(bulkRemove, len(selectors))
+ for _, selector := range selectors {
+ if selector == nil {
+ selector = bson.D{}
+ }
+ action.docs = append(action.docs, &deleteOp{
+ Collection: b.c.FullName,
+ Selector: selector,
+ Flags: 0,
+ Limit: 0,
+ })
+ }
+}
+
+// Update queues up the provided pairs of updating instructions.
+// The first element of each pair selects which documents must be
+// updated, and the second element defines how to update it.
+// Each pair matches exactly one document for updating at most.
+func (b *Bulk) Update(pairs ...interface{}) {
+ if len(pairs)%2 != 0 {
+ panic("Bulk.Update requires an even number of parameters")
+ }
+ action := b.action(bulkUpdate, len(pairs)/2)
+ for i := 0; i < len(pairs); i += 2 {
+ selector := pairs[i]
+ if selector == nil {
+ selector = bson.D{}
+ }
+ action.docs = append(action.docs, &updateOp{
+ Collection: b.c.FullName,
+ Selector: selector,
+ Update: pairs[i+1],
+ })
+ }
+}
+
+// UpdateAll queues up the provided pairs of updating instructions.
+// The first element of each pair selects which documents must be
+// updated, and the second element defines how to update it.
+// Each pair updates all documents matching the selector.
+func (b *Bulk) UpdateAll(pairs ...interface{}) {
+ if len(pairs)%2 != 0 {
+ panic("Bulk.UpdateAll requires an even number of parameters")
+ }
+ action := b.action(bulkUpdate, len(pairs)/2)
+ for i := 0; i < len(pairs); i += 2 {
+ selector := pairs[i]
+ if selector == nil {
+ selector = bson.D{}
+ }
+ action.docs = append(action.docs, &updateOp{
+ Collection: b.c.FullName,
+ Selector: selector,
+ Update: pairs[i+1],
+ Flags: 2,
+ Multi: true,
+ })
+ }
+}
+
+// Upsert queues up the provided pairs of upserting instructions.
+// The first element of each pair selects which documents must be
+// updated, and the second element defines how to update it.
+// Each pair matches exactly one document for updating at most.
+func (b *Bulk) Upsert(pairs ...interface{}) {
+ if len(pairs)%2 != 0 {
+ panic("Bulk.Update requires an even number of parameters")
+ }
+ action := b.action(bulkUpdate, len(pairs)/2)
+ for i := 0; i < len(pairs); i += 2 {
+ selector := pairs[i]
+ if selector == nil {
+ selector = bson.D{}
+ }
+ action.docs = append(action.docs, &updateOp{
+ Collection: b.c.FullName,
+ Selector: selector,
+ Update: pairs[i+1],
+ Flags: 1,
+ Upsert: true,
+ })
+ }
}
// Run runs all the operations queued up.
+//
+// If an error is reported on an unordered bulk operation, the error value may
+// be an aggregation of all issues observed. As an exception to that, Insert
+// operations running on MongoDB versions prior to 2.6 will report the last
+// error only due to a limitation in the wire protocol.
func (b *Bulk) Run() (*BulkResult, error) {
- op := &insertOp{b.c.FullName, b.inserts, 0}
+ var result BulkResult
+ var berr BulkError
+ var failed bool
+ for i := range b.actions {
+ action := &b.actions[i]
+ var ok bool
+ switch action.op {
+ case bulkInsert:
+ ok = b.runInsert(action, &result, &berr)
+ case bulkUpdate:
+ ok = b.runUpdate(action, &result, &berr)
+ case bulkRemove:
+ ok = b.runRemove(action, &result, &berr)
+ default:
+ panic("unknown bulk operation")
+ }
+ if !ok {
+ failed = true
+ if b.ordered {
+ break
+ }
+ }
+ }
+ if failed {
+ sort.Sort(bulkErrorCases(berr.ecases))
+ return nil, &berr
+ }
+ return &result, nil
+}
+
+func (b *Bulk) runInsert(action *bulkAction, result *BulkResult, berr *BulkError) bool {
+ op := &insertOp{b.c.FullName, action.docs, 0}
if !b.ordered {
op.flags = 1 // ContinueOnError
}
- _, err := b.c.writeQuery(op)
- if err != nil {
- return nil, &bulkError{err}
+ lerr, err := b.c.writeOp(op, b.ordered)
+ return b.checkSuccess(action, berr, lerr, err)
+}
+
+func (b *Bulk) runUpdate(action *bulkAction, result *BulkResult, berr *BulkError) bool {
+ lerr, err := b.c.writeOp(bulkUpdateOp(action.docs), b.ordered)
+ if lerr != nil {
+ result.Matched += lerr.N
+ result.Modified += lerr.modified
+ }
+ return b.checkSuccess(action, berr, lerr, err)
+}
+
+func (b *Bulk) runRemove(action *bulkAction, result *BulkResult, berr *BulkError) bool {
+ lerr, err := b.c.writeOp(bulkDeleteOp(action.docs), b.ordered)
+ if lerr != nil {
+ result.Matched += lerr.N
+ result.Modified += lerr.modified
+ }
+ return b.checkSuccess(action, berr, lerr, err)
+}
+
+func (b *Bulk) checkSuccess(action *bulkAction, berr *BulkError, lerr *LastError, err error) bool {
+ if lerr != nil && len(lerr.ecases) > 0 {
+ for i := 0; i < len(lerr.ecases); i++ {
+ // Map back from the local error index into the visible one.
+ ecase := lerr.ecases[i]
+ idx := ecase.Index
+ if idx >= 0 {
+ idx = action.idxs[idx]
+ }
+ berr.ecases = append(berr.ecases, BulkErrorCase{idx, ecase.Err})
+ }
+ return false
+ } else if err != nil {
+ for i := 0; i < len(action.idxs); i++ {
+ berr.ecases = append(berr.ecases, BulkErrorCase{action.idxs[i], err})
+ }
+ return false
}
- return &BulkResult{}, nil
+ return true
}
diff --git a/bulk_test.go b/bulk_test.go
index f8abca80a..cb280bbfa 100644
--- a/bulk_test.go
+++ b/bulk_test.go
@@ -1,6 +1,6 @@
// mgo - MongoDB driver for Go
//
-// Copyright (c) 2010-2014 - Gustavo Niemeyer
+// Copyright (c) 2010-2015 - Gustavo Niemeyer
//
// All rights reserved.
//
@@ -27,8 +27,8 @@
package mgo_test
import (
- "labix.org/v2/mgo"
- . "launchpad.net/gocheck"
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2"
)
func (s *S) TestBulkInsert(c *C) {
@@ -44,7 +44,7 @@ func (s *S) TestBulkInsert(c *C) {
c.Assert(err, IsNil)
c.Assert(r, FitsTypeOf, &mgo.BulkResult{})
- type doc struct{ N int }
+ type doc struct{ N int }
var res []doc
err = coll.Find(nil).Sort("n").All(&res)
c.Assert(err, IsNil)
@@ -58,11 +58,14 @@ func (s *S) TestBulkInsertError(c *C) {
coll := session.DB("mydb").C("mycoll")
bulk := coll.Bulk()
- bulk.Insert(M{"_id": 1}, M{"_id": 2}, M{"_id": 2}, M{"n": 3})
+ bulk.Insert(M{"_id": 1}, M{"_id": 2}, M{"_id": 2}, M{"_id": 3})
_, err = bulk.Run()
c.Assert(err, ErrorMatches, ".*duplicate key.*")
+ c.Assert(mgo.IsDup(err), Equals, true)
- type doc struct{ N int `_id` }
+ type doc struct {
+ N int `_id`
+ }
var res []doc
err = coll.Find(nil).Sort("_id").All(&res)
c.Assert(err, IsNil)
@@ -81,9 +84,421 @@ func (s *S) TestBulkInsertErrorUnordered(c *C) {
_, err = bulk.Run()
c.Assert(err, ErrorMatches, ".*duplicate key.*")
- type doc struct{ N int `_id` }
+ type doc struct {
+ N int `_id`
+ }
var res []doc
err = coll.Find(nil).Sort("_id").All(&res)
c.Assert(err, IsNil)
c.Assert(res, DeepEquals, []doc{{1}, {2}, {3}})
}
+
+func (s *S) TestBulkInsertErrorUnorderedSplitBatch(c *C) {
+ // The server has a batch limit of 1000 documents when using write commands.
+ // This artificial limit did not exist with the old wire protocol, so to
+ // avoid compatibility issues the implementation internally split batches
+ // into the proper size and delivers them one by one. This test ensures that
+ // the behavior of unordered (that is, continue on error) remains correct
+ // when errors happen and there are batches left.
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+ bulk := coll.Bulk()
+ bulk.Unordered()
+
+ const total = 4096
+ type doc struct {
+ Id int `_id`
+ }
+ docs := make([]interface{}, total)
+ for i := 0; i < total; i++ {
+ docs[i] = doc{i}
+ }
+ docs[1] = doc{0}
+ bulk.Insert(docs...)
+ _, err = bulk.Run()
+ c.Assert(err, ErrorMatches, ".*duplicate key.*")
+
+ n, err := coll.Count()
+ c.Assert(err, IsNil)
+ c.Assert(n, Equals, total-1)
+
+ var res doc
+ err = coll.FindId(1500).One(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res.Id, Equals, 1500)
+}
+
+func (s *S) TestBulkErrorString(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ // If it's just the same string multiple times, join it into a single message.
+ bulk := coll.Bulk()
+ bulk.Unordered()
+ bulk.Insert(M{"_id": 1}, M{"_id": 2}, M{"_id": 2})
+ _, err = bulk.Run()
+ c.Assert(err, ErrorMatches, ".*duplicate key.*")
+ c.Assert(err, Not(ErrorMatches), ".*duplicate key.*duplicate key")
+ c.Assert(mgo.IsDup(err), Equals, true)
+
+ // With matching errors but different messages, present them all.
+ bulk = coll.Bulk()
+ bulk.Unordered()
+ bulk.Insert(M{"_id": "dupone"}, M{"_id": "dupone"}, M{"_id": "duptwo"}, M{"_id": "duptwo"})
+ _, err = bulk.Run()
+ if s.versionAtLeast(2, 6) {
+ c.Assert(err, ErrorMatches, "multiple errors in bulk operation:\n( - .*duplicate.*\n){2}$")
+ c.Assert(err, ErrorMatches, "(?s).*dupone.*")
+ c.Assert(err, ErrorMatches, "(?s).*duptwo.*")
+ } else {
+ // Wire protocol query doesn't return all errors.
+ c.Assert(err, ErrorMatches, ".*duplicate.*")
+ }
+ c.Assert(mgo.IsDup(err), Equals, true)
+
+ // With mixed errors, present them all.
+ bulk = coll.Bulk()
+ bulk.Unordered()
+ bulk.Insert(M{"_id": 1}, M{"_id": []int{2}})
+ _, err = bulk.Run()
+ if s.versionAtLeast(2, 6) {
+ c.Assert(err, ErrorMatches, "multiple errors in bulk operation:\n - .*duplicate.*\n - .*array.*\n$")
+ } else {
+ // Wire protocol query doesn't return all errors.
+ c.Assert(err, ErrorMatches, ".*array.*")
+ }
+ c.Assert(mgo.IsDup(err), Equals, false)
+}
+
+func (s *S) TestBulkErrorCases_2_6(c *C) {
+ if !s.versionAtLeast(2, 6) {
+ c.Skip("2.4- has poor bulk reporting")
+ }
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ bulk := coll.Bulk()
+ bulk.Unordered()
+
+ // There's a limit of 1000 operations per command, so
+ // this forces the more complex indexing logic to act.
+ for i := 0; i < 1010; i++ {
+ switch i {
+ case 3, 14:
+ bulk.Insert(M{"_id": "dupone"})
+ case 5, 106:
+ bulk.Update(M{"_id": i - 1}, M{"$set": M{"_id": 4}})
+ case 7, 1008:
+ bulk.Insert(M{"_id": "duptwo"})
+ default:
+ bulk.Insert(M{"_id": i})
+ }
+ }
+
+ _, err = bulk.Run()
+ ecases := err.(*mgo.BulkError).Cases()
+
+ c.Check(ecases[0].Err, ErrorMatches, ".*duplicate.*dupone.*")
+ c.Check(ecases[0].Index, Equals, 14)
+ c.Check(ecases[1].Err, ErrorMatches, ".*update.*_id.*")
+ c.Check(ecases[1].Index, Equals, 106)
+ c.Check(ecases[2].Err, ErrorMatches, ".*duplicate.*duptwo.*")
+ c.Check(ecases[2].Index, Equals, 1008)
+}
+
+func (s *S) TestBulkErrorCases_2_4(c *C) {
+ if s.versionAtLeast(2, 6) {
+ c.Skip("2.6+ has better reporting")
+ }
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ bulk := coll.Bulk()
+ bulk.Unordered()
+
+ // There's a limit of 1000 operations per command, so
+ // this forces the more complex indexing logic to act.
+ for i := 0; i < 1010; i++ {
+ switch i {
+ case 3, 14:
+ bulk.Insert(M{"_id": "dupone"})
+ case 5:
+ bulk.Update(M{"_id": i - 1}, M{"$set": M{"n": 4}})
+ case 106:
+ bulk.Update(M{"_id": i - 1}, M{"$bogus": M{"n": 4}})
+ case 7, 1008:
+ bulk.Insert(M{"_id": "duptwo"})
+ default:
+ bulk.Insert(M{"_id": i})
+ }
+ }
+
+ _, err = bulk.Run()
+ ecases := err.(*mgo.BulkError).Cases()
+
+ c.Check(ecases[0].Err, ErrorMatches, ".*duplicate.*duptwo.*")
+ c.Check(ecases[0].Index, Equals, -1)
+ c.Check(ecases[1].Err, ErrorMatches, `.*\$bogus.*`)
+ c.Check(ecases[1].Index, Equals, 106)
+}
+
+func (s *S) TestBulkErrorCasesOrdered(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ bulk := coll.Bulk()
+
+ // There's a limit of 1000 operations per command, so
+ // this forces the more complex indexing logic to act.
+ for i := 0; i < 20; i++ {
+ switch i {
+ case 3, 14:
+ bulk.Insert(M{"_id": "dupone"})
+ case 7, 17:
+ bulk.Insert(M{"_id": "duptwo"})
+ default:
+ bulk.Insert(M{"_id": i})
+ }
+ }
+
+ _, err = bulk.Run()
+ ecases := err.(*mgo.BulkError).Cases()
+
+ c.Check(ecases[0].Err, ErrorMatches, ".*duplicate.*dupone.*")
+ if s.versionAtLeast(2, 6) {
+ c.Check(ecases[0].Index, Equals, 14)
+ } else {
+ c.Check(ecases[0].Index, Equals, -1)
+ }
+ c.Check(ecases, HasLen, 1)
+}
+
+func (s *S) TestBulkUpdate(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.Update(M{"n": 1}, M{"$set": M{"n": 1}})
+ bulk.Update(M{"n": 2}, M{"$set": M{"n": 20}})
+ bulk.Update(M{"n": 5}, M{"$set": M{"n": 50}}) // Won't match.
+ bulk.Update(M{"n": 1}, M{"$set": M{"n": 10}}, M{"n": 3}, M{"$set": M{"n": 30}})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r.Matched, Equals, 4)
+ if s.versionAtLeast(2, 6) {
+ c.Assert(r.Modified, Equals, 3)
+ }
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{10}, {20}, {30}})
+}
+
+func (s *S) TestBulkUpdateError(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.Update(
+ M{"n": 1}, M{"$set": M{"n": 10}},
+ M{"n": 2}, M{"$set": M{"n": 20, "_id": 20}},
+ M{"n": 3}, M{"$set": M{"n": 30}},
+ )
+ r, err := bulk.Run()
+ c.Assert(err, ErrorMatches, ".*_id.*")
+ c.Assert(r, FitsTypeOf, &mgo.BulkResult{})
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{2}, {3}, {10}})
+}
+
+func (s *S) TestBulkUpdateErrorUnordered(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.Unordered()
+ bulk.Update(
+ M{"n": 1}, M{"$set": M{"n": 10}},
+ M{"n": 2}, M{"$set": M{"n": 20, "_id": 20}},
+ M{"n": 3}, M{"$set": M{"n": 30}},
+ )
+ r, err := bulk.Run()
+ c.Assert(err, ErrorMatches, ".*_id.*")
+ c.Assert(r, FitsTypeOf, &mgo.BulkResult{})
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{2}, {10}, {30}})
+}
+
+func (s *S) TestBulkUpdateAll(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.UpdateAll(M{"n": 1}, M{"$set": M{"n": 10}})
+ bulk.UpdateAll(M{"n": 2}, M{"$set": M{"n": 2}}) // Won't change.
+ bulk.UpdateAll(M{"n": 5}, M{"$set": M{"n": 50}}) // Won't match.
+ bulk.UpdateAll(M{}, M{"$inc": M{"n": 1}}, M{"n": 11}, M{"$set": M{"n": 5}})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r.Matched, Equals, 6)
+ if s.versionAtLeast(2, 6) {
+ c.Assert(r.Modified, Equals, 5)
+ }
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{3}, {4}, {5}})
+}
+
+func (s *S) TestBulkMixedUnordered(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ // Abuse undefined behavior to ensure the desired implementation is in place.
+ bulk := coll.Bulk()
+ bulk.Unordered()
+ bulk.Insert(M{"n": 1})
+ bulk.Update(M{"n": 2}, M{"$inc": M{"n": 1}})
+ bulk.Insert(M{"n": 2})
+ bulk.Update(M{"n": 3}, M{"$inc": M{"n": 1}})
+ bulk.Update(M{"n": 1}, M{"$inc": M{"n": 1}})
+ bulk.Insert(M{"n": 3})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r.Matched, Equals, 3)
+ if s.versionAtLeast(2, 6) {
+ c.Assert(r.Modified, Equals, 3)
+ }
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{2}, {3}, {4}})
+}
+
+func (s *S) TestBulkUpsert(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.Upsert(M{"n": 2}, M{"$set": M{"n": 20}})
+ bulk.Upsert(M{"n": 4}, M{"$set": M{"n": 40}}, M{"n": 3}, M{"$set": M{"n": 30}})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r, FitsTypeOf, &mgo.BulkResult{})
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{1}, {20}, {30}, {40}})
+}
+
+func (s *S) TestBulkRemove(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3}, M{"n": 4}, M{"n": 4})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.Remove(M{"n": 1})
+ bulk.Remove(M{"n": 2}, M{"n": 4})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r.Matched, Equals, 3)
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{3}, {4}})
+}
+
+func (s *S) TestBulkRemoveAll(c *C) {
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ coll := session.DB("mydb").C("mycoll")
+
+ err = coll.Insert(M{"n": 1}, M{"n": 2}, M{"n": 3}, M{"n": 4}, M{"n": 4})
+ c.Assert(err, IsNil)
+
+ bulk := coll.Bulk()
+ bulk.RemoveAll(M{"n": 1})
+ bulk.RemoveAll(M{"n": 2}, M{"n": 4})
+ r, err := bulk.Run()
+ c.Assert(err, IsNil)
+ c.Assert(r.Matched, Equals, 4)
+
+ type doc struct{ N int }
+ var res []doc
+ err = coll.Find(nil).Sort("n").All(&res)
+ c.Assert(err, IsNil)
+ c.Assert(res, DeepEquals, []doc{{3}})
+}
diff --git a/cluster.go b/cluster.go
index b4ed5bf3e..c3bf8b013 100644
--- a/cluster.go
+++ b/cluster.go
@@ -28,10 +28,14 @@ package mgo
import (
"errors"
- "labix.org/v2/mgo/bson"
+ "fmt"
"net"
+ "strconv"
+ "strings"
"sync"
"time"
+
+ "gopkg.in/mgo.v2/bson"
)
// ---------------------------------------------------------------------------
@@ -53,18 +57,20 @@ type mongoCluster struct {
direct bool
failFast bool
syncCount uint
+ setName string
cachedIndex map[string]bool
sync chan bool
dial dialer
}
-func newCluster(userSeeds []string, direct, failFast bool, dial dialer) *mongoCluster {
+func newCluster(userSeeds []string, direct, failFast bool, dial dialer, setName string) *mongoCluster {
cluster := &mongoCluster{
userSeeds: userSeeds,
references: 1,
direct: direct,
failFast: failFast,
dial: dial,
+ setName: setName,
}
cluster.serverSynced.L = cluster.RWMutex.RLocker()
cluster.sync = make(chan bool, 1)
@@ -123,13 +129,15 @@ func (cluster *mongoCluster) removeServer(server *mongoServer) {
}
type isMasterResult struct {
- IsMaster bool
- Secondary bool
- Primary string
- Hosts []string
- Passives []string
- Tags bson.D
- Msg string
+ IsMaster bool
+ Secondary bool
+ Primary string
+ Hosts []string
+ Passives []string
+ Tags bson.D
+ Msg string
+ SetName string `bson:"setName"`
+ MaxWireVersion int `bson:"maxWireVersion"`
}
func (cluster *mongoCluster) isMaster(socket *mongoSocket, result *isMasterResult) error {
@@ -196,26 +204,34 @@ func (cluster *mongoCluster) syncServer(server *mongoServer) (info *mongoServerI
break
}
+ if cluster.setName != "" && result.SetName != cluster.setName {
+ logf("SYNC Server %s is not a member of replica set %q", addr, cluster.setName)
+ return nil, nil, fmt.Errorf("server %s is not a member of replica set %q", addr, cluster.setName)
+ }
+
if result.IsMaster {
debugf("SYNC %s is a master.", addr)
- // Made an incorrect assumption above, so fix stats.
- stats.conn(-1, false)
- stats.conn(+1, true)
+ if !server.info.Master {
+ // Made an incorrect assumption above, so fix stats.
+ stats.conn(-1, false)
+ stats.conn(+1, true)
+ }
} else if result.Secondary {
debugf("SYNC %s is a slave.", addr)
} else if cluster.direct {
logf("SYNC %s in unknown state. Pretending it's a slave due to direct connection.", addr)
} else {
logf("SYNC %s is neither a master nor a slave.", addr)
- // Made an incorrect assumption above, so fix stats.
- stats.conn(-1, false)
+ // Let stats track it as whatever was known before.
return nil, nil, errors.New(addr + " is not a master nor slave")
}
info = &mongoServerInfo{
- Master: result.IsMaster,
- Mongos: result.Msg == "isdbgrid",
- Tags: result.Tags,
+ Master: result.IsMaster,
+ Mongos: result.Msg == "isdbgrid",
+ Tags: result.Tags,
+ SetName: result.SetName,
+ MaxWireVersion: result.MaxWireVersion,
}
hosts = make([]string, 0, 1+len(result.Hosts)+len(result.Passives))
@@ -394,10 +410,58 @@ func (cluster *mongoCluster) server(addr string, tcpaddr *net.TCPAddr) *mongoSer
}
func resolveAddr(addr string) (*net.TCPAddr, error) {
- tcpaddr, err := net.ResolveTCPAddr("tcp", addr)
- if err != nil {
- log("SYNC Failed to resolve ", addr, ": ", err.Error())
- return nil, err
+ // Simple cases that do not need actual resolution. Works with IPv4 and v6.
+ if host, port, err := net.SplitHostPort(addr); err == nil {
+ if port, _ := strconv.Atoi(port); port > 0 {
+ zone := ""
+ if i := strings.LastIndex(host, "%"); i >= 0 {
+ zone = host[i+1:]
+ host = host[:i]
+ }
+ ip := net.ParseIP(host)
+ if ip != nil {
+ return &net.TCPAddr{IP: ip, Port: port, Zone: zone}, nil
+ }
+ }
+ }
+
+ // Attempt to resolve IPv4 and v6 concurrently.
+ addrChan := make(chan *net.TCPAddr, 2)
+ for _, network := range []string{"udp4", "udp6"} {
+ network := network
+ go func() {
+ // The unfortunate UDP dialing hack allows having a timeout on address resolution.
+ conn, err := net.DialTimeout(network, addr, 10*time.Second)
+ if err != nil {
+ addrChan <- nil
+ } else {
+ addrChan <- (*net.TCPAddr)(conn.RemoteAddr().(*net.UDPAddr))
+ conn.Close()
+ }
+ }()
+ }
+
+ // Wait for the result of IPv4 and v6 resolution. Use IPv4 if available.
+ tcpaddr := <-addrChan
+ if tcpaddr == nil || len(tcpaddr.IP) != 4 {
+ var timeout <-chan time.Time
+ if tcpaddr != nil {
+ // Don't wait too long if an IPv6 address is known.
+ timeout = time.After(50 * time.Millisecond)
+ }
+ select {
+ case <-timeout:
+ case tcpaddr2 := <-addrChan:
+ if tcpaddr == nil || tcpaddr2 != nil {
+ // It's an IPv4 address or the only known address. Use it.
+ tcpaddr = tcpaddr2
+ }
+ }
+ }
+
+ if tcpaddr == nil {
+ log("SYNC Failed to resolve server address: ", addr)
+ return nil, errors.New("failed to resolve server address: " + addr)
}
if tcpaddr.String() != addr {
debug("SYNC Address ", addr, " resolved as ", tcpaddr.String())
@@ -495,8 +559,8 @@ func (cluster *mongoCluster) syncServersIteration(direct bool) {
}
cluster.Lock()
- ml := cluster.masters.Len()
- logf("SYNC Synchronization completed: %d master(s) and %d slave(s) alive.", ml, cluster.servers.Len()-ml)
+ mastersLen := cluster.masters.Len()
+ logf("SYNC Synchronization completed: %d master(s) and %d slave(s) alive.", mastersLen, cluster.servers.Len()-mastersLen)
// Update dynamic seeds, but only if we have any good servers. Otherwise,
// leave them alone for better chances of a successful sync in the future.
@@ -511,22 +575,23 @@ func (cluster *mongoCluster) syncServersIteration(direct bool) {
cluster.Unlock()
}
-var socketsPerServer = 4096
-
// AcquireSocket returns a socket to a server in the cluster. If slaveOk is
// true, it will attempt to return a socket to a slave server. If it is
// false, the socket will necessarily be to a master server.
-func (cluster *mongoCluster) AcquireSocket(slaveOk bool, syncTimeout time.Duration, socketTimeout time.Duration, serverTags []bson.D) (s *mongoSocket, err error) {
+func (cluster *mongoCluster) AcquireSocket(mode Mode, slaveOk bool, syncTimeout time.Duration, socketTimeout time.Duration, serverTags []bson.D, poolLimit int) (s *mongoSocket, err error) {
var started time.Time
var syncCount uint
warnedLimit := false
for {
cluster.RLock()
for {
- ml := cluster.masters.Len()
- sl := cluster.servers.Len()
- debugf("Cluster has %d known masters and %d known slaves.", ml, sl-ml)
- if ml > 0 || slaveOk && sl > 0 {
+ mastersLen := cluster.masters.Len()
+ slavesLen := cluster.servers.Len() - mastersLen
+ debugf("Cluster has %d known masters and %d known slaves.", mastersLen, slavesLen)
+ if mastersLen > 0 && !(slaveOk && mode == Secondary) || slavesLen > 0 && slaveOk {
+ break
+ }
+ if mastersLen > 0 && mode == Secondary && cluster.masters.HasMongos() {
break
}
if started.IsZero() {
@@ -546,9 +611,9 @@ func (cluster *mongoCluster) AcquireSocket(slaveOk bool, syncTimeout time.Durati
var server *mongoServer
if slaveOk {
- server = cluster.servers.BestFit(serverTags)
+ server = cluster.servers.BestFit(mode, serverTags)
} else {
- server = cluster.masters.BestFit(nil)
+ server = cluster.masters.BestFit(mode, nil)
}
cluster.RUnlock()
@@ -558,12 +623,13 @@ func (cluster *mongoCluster) AcquireSocket(slaveOk bool, syncTimeout time.Durati
continue
}
- s, abended, err := server.AcquireSocket(socketsPerServer, socketTimeout)
- if err == errSocketLimit {
+ s, abended, err := server.AcquireSocket(poolLimit, socketTimeout)
+ if err == errPoolLimit {
if !warnedLimit {
+ warnedLimit = true
log("WARNING: Per-server connection limit reached.")
}
- time.Sleep(1e8)
+ time.Sleep(100 * time.Millisecond)
continue
}
if err != nil {
@@ -578,7 +644,7 @@ func (cluster *mongoCluster) AcquireSocket(slaveOk bool, syncTimeout time.Durati
logf("Cannot confirm server %s as master (%v)", server.Addr, err)
s.Release()
cluster.syncServers()
- time.Sleep(1e8)
+ time.Sleep(100 * time.Millisecond)
continue
}
}
diff --git a/cluster_test.go b/cluster_test.go
index d6d2810b7..54ec86762 100644
--- a/cluster_test.go
+++ b/cluster_test.go
@@ -29,13 +29,14 @@ package mgo_test
import (
"fmt"
"io"
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- . "launchpad.net/gocheck"
"net"
"strings"
"sync"
"time"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/bson"
)
func (s *S) TestNewSession(c *C) {
@@ -157,7 +158,7 @@ func (s *S) TestCloneSession(c *C) {
c.Assert(stats.ReceivedDocs, Equals, 1)
}
-func (s *S) TestSetModeStrong(c *C) {
+func (s *S) TestModeStrong(c *C) {
session, err := mgo.Dial("localhost:40012")
c.Assert(err, IsNil)
defer session.Close()
@@ -194,7 +195,7 @@ func (s *S) TestSetModeStrong(c *C) {
c.Assert(stats.SocketsInUse, Equals, 0)
}
-func (s *S) TestSetModeMonotonic(c *C) {
+func (s *S) TestModeMonotonic(c *C) {
// Must necessarily connect to a slave, otherwise the
// master connection will be available first.
session, err := mgo.Dial("localhost:40012")
@@ -205,20 +206,19 @@ func (s *S) TestSetModeMonotonic(c *C) {
c.Assert(session.Mode(), Equals, mgo.Monotonic)
- result := M{}
+ var result struct{ IsMaster bool }
cmd := session.DB("admin").C("$cmd")
err = cmd.Find(M{"ismaster": 1}).One(&result)
c.Assert(err, IsNil)
- c.Assert(result["ismaster"], Equals, false)
+ c.Assert(result.IsMaster, Equals, false)
coll := session.DB("mydb").C("mycoll")
err = coll.Insert(M{"a": 1})
c.Assert(err, IsNil)
- result = M{}
err = cmd.Find(M{"ismaster": 1}).One(&result)
c.Assert(err, IsNil)
- c.Assert(result["ismaster"], Equals, true)
+ c.Assert(result.IsMaster, Equals, true)
// Wait since the sync also uses sockets.
for len(session.LiveServers()) != 3 {
@@ -237,7 +237,7 @@ func (s *S) TestSetModeMonotonic(c *C) {
c.Assert(stats.SocketsInUse, Equals, 0)
}
-func (s *S) TestSetModeMonotonicAfterStrong(c *C) {
+func (s *S) TestModeMonotonicAfterStrong(c *C) {
// Test that a strong session shifting to a monotonic
// one preserves the socket untouched.
@@ -270,7 +270,7 @@ func (s *S) TestSetModeMonotonicAfterStrong(c *C) {
c.Assert(result["ismaster"], Equals, true)
}
-func (s *S) TestSetModeStrongAfterMonotonic(c *C) {
+func (s *S) TestModeStrongAfterMonotonic(c *C) {
// Test that shifting from Monotonic to Strong while
// using a slave socket will keep the socket reserved
// until the master socket is necessary, so that no
@@ -310,7 +310,7 @@ func (s *S) TestSetModeStrongAfterMonotonic(c *C) {
c.Assert(result["ismaster"], Equals, true)
}
-func (s *S) TestSetModeMonotonicWriteOnIteration(c *C) {
+func (s *S) TestModeMonotonicWriteOnIteration(c *C) {
// Must necessarily connect to a slave, otherwise the
// master connection will be available first.
session, err := mgo.Dial("localhost:40012")
@@ -355,7 +355,7 @@ func (s *S) TestSetModeMonotonicWriteOnIteration(c *C) {
c.Assert(i, Equals, len(ns))
}
-func (s *S) TestSetModeEventual(c *C) {
+func (s *S) TestModeEventual(c *C) {
// Must necessarily connect to a slave, otherwise the
// master connection will be available first.
session, err := mgo.Dial("localhost:40012")
@@ -392,7 +392,7 @@ func (s *S) TestSetModeEventual(c *C) {
c.Assert(stats.SocketsInUse, Equals, 0)
}
-func (s *S) TestSetModeEventualAfterStrong(c *C) {
+func (s *S) TestModeEventualAfterStrong(c *C) {
// Test that a strong session shifting to an eventual
// one preserves the socket untouched.
@@ -430,7 +430,7 @@ func (s *S) TestSetModeEventualAfterStrong(c *C) {
c.Assert(stats.SocketsInUse, Equals, 0)
}
-func (s *S) TestPrimaryShutdownStrong(c *C) {
+func (s *S) TestModeStrongFallover(c *C) {
if *fast {
c.Skip("-fast")
}
@@ -471,7 +471,7 @@ func (s *S) TestPrimaryShutdownStrong(c *C) {
c.Assert(err, IsNil)
}
-func (s *S) TestPrimaryHiccup(c *C) {
+func (s *S) TestModePrimaryHiccup(c *C) {
if *fast {
c.Skip("-fast")
}
@@ -522,7 +522,7 @@ func (s *S) TestPrimaryHiccup(c *C) {
c.Assert(err, IsNil)
}
-func (s *S) TestPrimaryShutdownMonotonic(c *C) {
+func (s *S) TestModeMonotonicFallover(c *C) {
if *fast {
c.Skip("-fast")
}
@@ -565,7 +565,7 @@ func (s *S) TestPrimaryShutdownMonotonic(c *C) {
c.Assert(result.Host, Not(Equals), host)
}
-func (s *S) TestPrimaryShutdownMonotonicWithSlave(c *C) {
+func (s *S) TestModeMonotonicWithSlaveFallover(c *C) {
if *fast {
c.Skip("-fast")
}
@@ -644,7 +644,7 @@ func (s *S) TestPrimaryShutdownMonotonicWithSlave(c *C) {
c.Assert(ssresult.Host, Not(Equals), master)
}
-func (s *S) TestPrimaryShutdownEventual(c *C) {
+func (s *S) TestModeEventualFallover(c *C) {
if *fast {
c.Skip("-fast")
}
@@ -681,6 +681,192 @@ func (s *S) TestPrimaryShutdownEventual(c *C) {
c.Assert(result.Host, Not(Equals), master)
}
+func (s *S) TestModeSecondaryJustPrimary(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Secondary, true)
+
+ err = session.Ping()
+ c.Assert(err, ErrorMatches, "no reachable servers")
+}
+
+func (s *S) TestModeSecondaryPreferredJustPrimary(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.SecondaryPreferred, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestModeSecondaryPreferredFallover(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ // Ensure secondaries are available for being picked up.
+ for len(session.LiveServers()) != 3 {
+ c.Log("Waiting for cluster sync to finish...")
+ time.Sleep(5e8)
+ }
+
+ session.SetMode(mgo.SecondaryPreferred, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Not(Equals), "rs1a")
+ secondary := result.Host
+
+ // Should connect to the primary when needed.
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ // Wait a bit for this to be synchronized to slaves.
+ time.Sleep(3 * time.Second)
+
+ // Kill the primary.
+ s.Stop("localhost:40011")
+
+ // It can still talk to the selected secondary.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Host, Equals, secondary)
+
+ // But cannot speak to the primary until reset.
+ coll = session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, Equals, io.EOF)
+
+ session.Refresh()
+
+ // Can still talk to a secondary.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Not(Equals), "rs1a")
+
+ s.StartAll()
+
+ // Should now be able to talk to the primary again.
+ coll = session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+}
+
+func (s *S) TestModePrimaryPreferredFallover(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.PrimaryPreferred, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Equals, "rs1a")
+
+ // Kill the primary.
+ s.Stop("localhost:40011")
+
+ // Should now fail as there was a primary socket in use already.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, Equals, io.EOF)
+
+ // Refresh so the reserved primary socket goes away.
+ session.Refresh()
+
+ // Should be able to talk to the secondary.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+
+ s.StartAll()
+
+ // Should wait for the new primary to become available.
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ // And should use the new primary in general, as it is preferred.
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Equals, "rs1a")
+}
+
+func (s *S) TestModePrimaryFallover(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetSyncTimeout(3 * time.Second)
+
+ session.SetMode(mgo.Primary, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Equals, "rs1a")
+
+ // Kill the primary.
+ s.Stop("localhost:40011")
+
+ session.Refresh()
+
+ err = session.Ping()
+ c.Assert(err, ErrorMatches, "no reachable servers")
+}
+
+func (s *S) TestModeSecondary(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Secondary, true)
+
+ result := &struct{ Host string }{}
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(supvName(result.Host), Not(Equals), "rs1a")
+ secondary := result.Host
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ err = session.Run("serverStatus", result)
+ c.Assert(err, IsNil)
+ c.Assert(result.Host, Equals, secondary)
+}
+
func (s *S) TestPreserveSocketCountOnSync(c *C) {
if *fast {
c.Skip("-fast")
@@ -691,9 +877,9 @@ func (s *S) TestPreserveSocketCountOnSync(c *C) {
defer session.Close()
stats := mgo.GetStats()
- for stats.MasterConns+stats.SlaveConns != 3 {
+ for stats.SocketsAlive != 3 {
+ c.Logf("Waiting for all connections to be established (sockets alive currently %d)...", stats.SocketsAlive)
stats = mgo.GetStats()
- c.Log("Waiting for all connections to be established...")
time.Sleep(5e8)
}
@@ -906,6 +1092,69 @@ func (s *S) TestSocketTimeoutOnInactiveSocket(c *C) {
c.Assert(session.Ping(), IsNil)
}
+func (s *S) TestDialWithReplicaSetName(c *C) {
+ seedLists := [][]string{
+ // rs1 primary and rs2 primary
+ []string{"localhost:40011", "localhost:40021"},
+ // rs1 primary and rs2 secondary
+ []string{"localhost:40011", "localhost:40022"},
+ // rs1 secondary and rs2 primary
+ []string{"localhost:40012", "localhost:40021"},
+ // rs1 secondary and rs2 secondary
+ []string{"localhost:40012", "localhost:40022"},
+ }
+
+ rs2Members := []string{":40021", ":40022", ":40023"}
+
+ verifySyncedServers := func(session *mgo.Session, numServers int) {
+ // wait for the server(s) to be synced
+ for len(session.LiveServers()) != numServers {
+ c.Log("Waiting for cluster sync to finish...")
+ time.Sleep(5e8)
+ }
+
+ // ensure none of the rs2 set members are communicated with
+ for _, addr := range session.LiveServers() {
+ for _, rs2Member := range rs2Members {
+ c.Assert(strings.HasSuffix(addr, rs2Member), Equals, false)
+ }
+ }
+ }
+
+ // only communication with rs1 members is expected
+ for _, seedList := range seedLists {
+ info := mgo.DialInfo{
+ Addrs: seedList,
+ Timeout: 5 * time.Second,
+ ReplicaSetName: "rs1",
+ }
+
+ session, err := mgo.DialWithInfo(&info)
+ c.Assert(err, IsNil)
+ verifySyncedServers(session, 3)
+ session.Close()
+
+ info.Direct = true
+ session, err = mgo.DialWithInfo(&info)
+ c.Assert(err, IsNil)
+ verifySyncedServers(session, 1)
+ session.Close()
+
+ connectionUrl := fmt.Sprintf("mongodb://%v/?replicaSet=rs1", strings.Join(seedList, ","))
+ session, err = mgo.Dial(connectionUrl)
+ c.Assert(err, IsNil)
+ verifySyncedServers(session, 3)
+ session.Close()
+
+ connectionUrl += "&connect=direct"
+ session, err = mgo.Dial(connectionUrl)
+ c.Assert(err, IsNil)
+ verifySyncedServers(session, 1)
+ session.Close()
+ }
+
+}
+
func (s *S) TestDirect(c *C) {
session, err := mgo.Dial("localhost:40012?connect=direct")
c.Assert(err, IsNil)
@@ -991,25 +1240,42 @@ func (s *S) TestFailFast(c *C) {
c.Assert(started.After(time.Now().Add(-time.Second)), Equals, true)
}
-type OpCounters struct {
- Insert int
- Query int
- Update int
- Delete int
- GetMore int
- Command int
+func (s *S) countQueries(c *C, server string) (n int) {
+ defer func() { c.Logf("Queries for %q: %d", server, n) }()
+ session, err := mgo.Dial(server + "?connect=direct")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ session.SetMode(mgo.Monotonic, true)
+ var result struct {
+ OpCounters struct {
+ Query int
+ }
+ Metrics struct {
+ Commands struct{ Find struct{ Total int } }
+ }
+ }
+ err = session.Run("serverStatus", &result)
+ c.Assert(err, IsNil)
+ if s.versionAtLeast(3, 2) {
+ return result.Metrics.Commands.Find.Total
+ }
+ return result.OpCounters.Query
}
-func getOpCounters(server string) (c *OpCounters, err error) {
+func (s *S) countCommands(c *C, server, commandName string) (n int) {
+ defer func() { c.Logf("Queries for %q: %d", server, n) }()
session, err := mgo.Dial(server + "?connect=direct")
- if err != nil {
- return nil, err
- }
+ c.Assert(err, IsNil)
defer session.Close()
session.SetMode(mgo.Monotonic, true)
- result := struct{ OpCounters }{}
+ var result struct {
+ Metrics struct {
+ Commands map[string]struct{ Total int }
+ }
+ }
err = session.Run("serverStatus", &result)
- return &result.OpCounters, err
+ c.Assert(err, IsNil)
+ return result.Metrics.Commands[commandName].Total
}
func (s *S) TestMonotonicSlaveOkFlagWithMongos(c *C) {
@@ -1028,50 +1294,160 @@ func (s *S) TestMonotonicSlaveOkFlagWithMongos(c *C) {
master := ssresult.Host
c.Assert(imresult.IsMaster, Equals, true, Commentf("%s is not the master", master))
- // Collect op counters for everyone.
- opc21a, err := getOpCounters("localhost:40021")
- c.Assert(err, IsNil)
- opc22a, err := getOpCounters("localhost:40022")
- c.Assert(err, IsNil)
- opc23a, err := getOpCounters("localhost:40023")
- c.Assert(err, IsNil)
-
- // Do a SlaveOk query through MongoS
+ // Ensure mongos is aware about the current topology.
+ s.Stop(":40201")
+ s.StartAll()
mongos, err := mgo.Dial("localhost:40202")
c.Assert(err, IsNil)
defer mongos.Close()
+ // Insert some data as otherwise 3.2+ doesn't seem to run the query at all.
+ err = mongos.DB("mydb").C("mycoll").Insert(bson.M{"n": 1})
+ c.Assert(err, IsNil)
+
+ // Wait until all servers see the data.
+ for _, addr := range []string{"localhost:40021", "localhost:40022", "localhost:40023"} {
+ session, err := mgo.Dial(addr + "?connect=direct")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ session.SetMode(mgo.Monotonic, true)
+ for i := 300; i >= 0; i-- {
+ n, err := session.DB("mydb").C("mycoll").Find(nil).Count()
+ c.Assert(err, IsNil)
+ if n == 1 {
+ break
+ }
+ if i == 0 {
+ c.Fatalf("Inserted data never reached " + addr)
+ }
+ time.Sleep(100 * time.Millisecond)
+ }
+ }
+
+ // Collect op counters for everyone.
+ q21a := s.countQueries(c, "localhost:40021")
+ q22a := s.countQueries(c, "localhost:40022")
+ q23a := s.countQueries(c, "localhost:40023")
+
+ // Do a SlaveOk query through MongoS
+
mongos.SetMode(mgo.Monotonic, true)
coll := mongos.DB("mydb").C("mycoll")
- result := &struct{}{}
+ var result struct{ N int }
for i := 0; i != 5; i++ {
- err := coll.Find(nil).One(result)
- c.Assert(err, Equals, mgo.ErrNotFound)
+ err = coll.Find(nil).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 1)
}
// Collect op counters for everyone again.
- opc21b, err := getOpCounters("localhost:40021")
+ q21b := s.countQueries(c, "localhost:40021")
+ q22b := s.countQueries(c, "localhost:40022")
+ q23b := s.countQueries(c, "localhost:40023")
+
+ var masterDelta, slaveDelta int
+ switch hostPort(master) {
+ case "40021":
+ masterDelta = q21b - q21a
+ slaveDelta = (q22b - q22a) + (q23b - q23a)
+ case "40022":
+ masterDelta = q22b - q22a
+ slaveDelta = (q21b - q21a) + (q23b - q23a)
+ case "40023":
+ masterDelta = q23b - q23a
+ slaveDelta = (q21b - q21a) + (q22b - q22a)
+ default:
+ c.Fatal("Uh?")
+ }
+
+ c.Check(masterDelta, Equals, 0) // Just the counting itself.
+ c.Check(slaveDelta, Equals, 5) // The counting for both, plus 5 queries above.
+}
+
+func (s *S) TestSecondaryModeWithMongos(c *C) {
+ session, err := mgo.Dial("localhost:40021")
c.Assert(err, IsNil)
- opc22b, err := getOpCounters("localhost:40022")
+ defer session.Close()
+
+ ssresult := &struct{ Host string }{}
+ imresult := &struct{ IsMaster bool }{}
+
+ // Figure the master while still using the strong session.
+ err = session.Run("serverStatus", ssresult)
+ c.Assert(err, IsNil)
+ err = session.Run("isMaster", imresult)
+ c.Assert(err, IsNil)
+ master := ssresult.Host
+ c.Assert(imresult.IsMaster, Equals, true, Commentf("%s is not the master", master))
+
+ // Ensure mongos is aware about the current topology.
+ s.Stop(":40201")
+ s.StartAll()
+
+ mongos, err := mgo.Dial("localhost:40202")
c.Assert(err, IsNil)
- opc23b, err := getOpCounters("localhost:40023")
+ defer mongos.Close()
+
+ mongos.SetSyncTimeout(5 * time.Second)
+
+ // Insert some data as otherwise 3.2+ doesn't seem to run the query at all.
+ err = mongos.DB("mydb").C("mycoll").Insert(bson.M{"n": 1})
c.Assert(err, IsNil)
- masterPort := master[strings.Index(master, ":")+1:]
+ // Wait until all servers see the data.
+ for _, addr := range []string{"localhost:40021", "localhost:40022", "localhost:40023"} {
+ session, err := mgo.Dial(addr + "?connect=direct")
+ c.Assert(err, IsNil)
+ defer session.Close()
+ session.SetMode(mgo.Monotonic, true)
+ for i := 300; i >= 0; i-- {
+ n, err := session.DB("mydb").C("mycoll").Find(nil).Count()
+ c.Assert(err, IsNil)
+ if n == 1 {
+ break
+ }
+ if i == 0 {
+ c.Fatalf("Inserted data never reached " + addr)
+ }
+ time.Sleep(100 * time.Millisecond)
+ }
+ }
+
+ // Collect op counters for everyone.
+ q21a := s.countQueries(c, "localhost:40021")
+ q22a := s.countQueries(c, "localhost:40022")
+ q23a := s.countQueries(c, "localhost:40023")
+
+ // Do a Secondary query through MongoS
+
+ mongos.SetMode(mgo.Secondary, true)
+
+ coll := mongos.DB("mydb").C("mycoll")
+ var result struct{ N int }
+ for i := 0; i != 5; i++ {
+ err = coll.Find(nil).One(&result)
+ c.Assert(err, IsNil)
+ c.Assert(result.N, Equals, 1)
+ }
+
+ // Collect op counters for everyone again.
+ q21b := s.countQueries(c, "localhost:40021")
+ q22b := s.countQueries(c, "localhost:40022")
+ q23b := s.countQueries(c, "localhost:40023")
var masterDelta, slaveDelta int
- switch masterPort {
+ switch hostPort(master) {
case "40021":
- masterDelta = opc21b.Query - opc21a.Query
- slaveDelta = (opc22b.Query - opc22a.Query) + (opc23b.Query - opc23a.Query)
+ masterDelta = q21b - q21a
+ slaveDelta = (q22b - q22a) + (q23b - q23a)
case "40022":
- masterDelta = opc22b.Query - opc22a.Query
- slaveDelta = (opc21b.Query - opc21a.Query) + (opc23b.Query - opc23a.Query)
+ masterDelta = q22b - q22a
+ slaveDelta = (q21b - q21a) + (q23b - q23a)
case "40023":
- masterDelta = opc23b.Query - opc23a.Query
- slaveDelta = (opc21b.Query - opc21a.Query) + (opc22b.Query - opc22a.Query)
+ masterDelta = q23b - q23a
+ slaveDelta = (q21b - q21a) + (q22b - q22a)
default:
c.Fatal("Uh?")
}
@@ -1080,6 +1456,28 @@ func (s *S) TestMonotonicSlaveOkFlagWithMongos(c *C) {
c.Check(slaveDelta, Equals, 5) // The counting for both, plus 5 queries above.
}
+func (s *S) TestSecondaryModeWithMongosInsert(c *C) {
+ if *fast {
+ c.Skip("-fast")
+ }
+
+ session, err := mgo.Dial("localhost:40202")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ session.SetMode(mgo.Secondary, true)
+ session.SetSyncTimeout(4 * time.Second)
+
+ coll := session.DB("mydb").C("mycoll")
+ err = coll.Insert(M{"a": 1})
+ c.Assert(err, IsNil)
+
+ var result struct{ A int }
+ coll.Find(nil).One(&result)
+ c.Assert(result.A, Equals, 1)
+}
+
+
func (s *S) TestRemovalOfClusterMember(c *C) {
if *fast {
c.Skip("-fast")
@@ -1112,17 +1510,28 @@ func (s *S) TestRemovalOfClusterMember(c *C) {
slaveAddr := result.Me
defer func() {
+ config := map[string]string{
+ "40021": `{_id: 1, host: "127.0.0.1:40021", priority: 1, tags: {rs2: "a"}}`,
+ "40022": `{_id: 2, host: "127.0.0.1:40022", priority: 0, tags: {rs2: "b"}}`,
+ "40023": `{_id: 3, host: "127.0.0.1:40023", priority: 0, tags: {rs2: "c"}}`,
+ }
master.Refresh()
- master.Run(bson.D{{"$eval", `rs.add("` + slaveAddr + `")`}}, nil)
+ master.Run(bson.D{{"$eval", `rs.add(` + config[hostPort(slaveAddr)] + `)`}}, nil)
master.Close()
slave.Close()
+
+ // Ensure suite syncs up with the changes before next test.
+ s.Stop(":40201")
+ s.StartAll()
+ time.Sleep(8 * time.Second)
+ // TODO Find a better way to find out when mongos is fully aware that all
+ // servers are up. Without that follow up tests that depend on mongos will
+ // break due to their expectation of things being in a working state.
}()
c.Logf("========== Removing slave: %s ==========", slaveAddr)
master.Run(bson.D{{"$eval", `rs.remove("` + slaveAddr + `")`}}, nil)
- err = master.Ping()
- c.Assert(err, Equals, io.EOF)
master.Refresh()
@@ -1150,33 +1559,68 @@ func (s *S) TestRemovalOfClusterMember(c *C) {
c.Log("========== Test succeeded. ==========")
}
-func (s *S) TestSocketLimit(c *C) {
+func (s *S) TestPoolLimitSimple(c *C) {
+ for test := 0; test < 2; test++ {
+ var session *mgo.Session
+ var err error
+ if test == 0 {
+ session, err = mgo.Dial("localhost:40001")
+ c.Assert(err, IsNil)
+ session.SetPoolLimit(1)
+ } else {
+ session, err = mgo.Dial("localhost:40001?maxPoolSize=1")
+ c.Assert(err, IsNil)
+ }
+ defer session.Close()
+
+ // Put one socket in use.
+ c.Assert(session.Ping(), IsNil)
+
+ done := make(chan time.Duration)
+
+ // Now block trying to get another one due to the pool limit.
+ go func() {
+ copy := session.Copy()
+ defer copy.Close()
+ started := time.Now()
+ c.Check(copy.Ping(), IsNil)
+ done <- time.Now().Sub(started)
+ }()
+
+ time.Sleep(300 * time.Millisecond)
+
+ // Put the one socket back in the pool, freeing it for the copy.
+ session.Refresh()
+ delay := <-done
+ c.Assert(delay > 300*time.Millisecond, Equals, true, Commentf("Delay: %s", delay))
+ }
+}
+
+func (s *S) TestPoolLimitMany(c *C) {
if *fast {
c.Skip("-fast")
}
- const socketLimit = 64
- restore := mgo.HackSocketsPerServer(socketLimit)
- defer restore()
session, err := mgo.Dial("localhost:40011")
c.Assert(err, IsNil)
defer session.Close()
stats := mgo.GetStats()
- for stats.MasterConns+stats.SlaveConns != 3 {
+ for stats.SocketsAlive != 3 {
+ c.Logf("Waiting for all connections to be established (sockets alive currently %d)...", stats.SocketsAlive)
stats = mgo.GetStats()
- c.Log("Waiting for all connections to be established...")
time.Sleep(5e8)
}
- c.Assert(stats.SocketsAlive, Equals, 3)
+
+ const poolLimit = 64
+ session.SetPoolLimit(poolLimit)
// Consume the whole limit for the master.
var master []*mgo.Session
- for i := 0; i < socketLimit; i++ {
+ for i := 0; i < poolLimit; i++ {
s := session.Copy()
defer s.Close()
- err := s.Ping()
- c.Assert(err, IsNil)
+ c.Assert(s.Ping(), IsNil)
master = append(master, s)
}
@@ -1186,7 +1630,7 @@ func (s *S) TestSocketLimit(c *C) {
master[0].Refresh()
}()
- // Now a single ping must block, since it would need another
+ // Then, a single ping must block, since it would need another
// connection to the master, over the limit. Once the goroutine
// above releases its socket, it should move on.
session.Ping()
@@ -1362,7 +1806,7 @@ func (s *S) TestPrimaryShutdownOnAuthShard(c *C) {
}
func (s *S) TestNearestSecondary(c *C) {
- defer mgo.HackPingDelay(3 * time.Second)()
+ defer mgo.HackPingDelay(300 * time.Millisecond)()
rs1a := "127.0.0.1:40011"
rs1b := "127.0.0.1:40012"
@@ -1424,6 +1868,68 @@ func (s *S) TestNearestSecondary(c *C) {
}
}
+func (s *S) TestNearestServer(c *C) {
+ defer mgo.HackPingDelay(300 * time.Millisecond)()
+
+ rs1a := "127.0.0.1:40011"
+ rs1b := "127.0.0.1:40012"
+ rs1c := "127.0.0.1:40013"
+
+ session, err := mgo.Dial(rs1a)
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ s.Freeze(rs1a)
+ s.Freeze(rs1b)
+
+ // Extra delay to ensure the first two servers get penalized.
+ time.Sleep(500 * time.Millisecond)
+
+ // Release them.
+ s.Thaw(rs1a)
+ s.Thaw(rs1b)
+
+ // Wait for everyone to come up.
+ for len(session.LiveServers()) != 3 {
+ c.Log("Waiting for all servers to be alive...")
+ time.Sleep(100 * time.Millisecond)
+ }
+
+ session.SetMode(mgo.Nearest, true)
+ var result struct{ Host string }
+
+ // See which server picks the line, several times to avoid chance.
+ for i := 0; i < 10; i++ {
+ session.Refresh()
+ err = session.Run("serverStatus", &result)
+ c.Assert(err, IsNil)
+ c.Assert(hostPort(result.Host), Equals, hostPort(rs1c))
+ }
+
+ if *fast {
+ // Don't hold back for several seconds.
+ return
+ }
+
+ // Now hold the two secondaries for long enough to penalize them.
+ s.Freeze(rs1b)
+ s.Freeze(rs1c)
+ time.Sleep(5 * time.Second)
+ s.Thaw(rs1b)
+ s.Thaw(rs1c)
+
+ // Wait for the ping to be processed.
+ time.Sleep(500 * time.Millisecond)
+
+ // Repeating the test should now pick the primary server consistently.
+ for i := 0; i < 10; i++ {
+ session.Refresh()
+ err = session.Run("serverStatus", &result)
+ c.Assert(err, IsNil)
+ c.Assert(hostPort(result.Host), Equals, hostPort(rs1a))
+ }
+}
+
func (s *S) TestConnectCloseConcurrency(c *C) {
restore := mgo.HackPingDelay(500 * time.Millisecond)
defer restore()
@@ -1501,12 +2007,9 @@ func (s *S) TestSelectServersWithMongos(c *C) {
}
// Collect op counters for everyone.
- opc21a, err := getOpCounters("localhost:40021")
- c.Assert(err, IsNil)
- opc22a, err := getOpCounters("localhost:40022")
- c.Assert(err, IsNil)
- opc23a, err := getOpCounters("localhost:40023")
- c.Assert(err, IsNil)
+ q21a := s.countQueries(c, "localhost:40021")
+ q22a := s.countQueries(c, "localhost:40022")
+ q23a := s.countQueries(c, "localhost:40023")
// Do a SlaveOk query through MongoS
mongos, err := mgo.Dial("localhost:40202")
@@ -1533,27 +2036,55 @@ func (s *S) TestSelectServersWithMongos(c *C) {
}
// Collect op counters for everyone again.
- opc21b, err := getOpCounters("localhost:40021")
- c.Assert(err, IsNil)
- opc22b, err := getOpCounters("localhost:40022")
- c.Assert(err, IsNil)
- opc23b, err := getOpCounters("localhost:40023")
- c.Assert(err, IsNil)
+ q21b := s.countQueries(c, "localhost:40021")
+ q22b := s.countQueries(c, "localhost:40022")
+ q23b := s.countQueries(c, "localhost:40023")
switch hostPort(master) {
case "40021":
- c.Check(opc21b.Query-opc21a.Query, Equals, 0)
- c.Check(opc22b.Query-opc22a.Query, Equals, 5)
- c.Check(opc23b.Query-opc23a.Query, Equals, 7)
+ c.Check(q21b-q21a, Equals, 0)
+ c.Check(q22b-q22a, Equals, 5)
+ c.Check(q23b-q23a, Equals, 7)
case "40022":
- c.Check(opc21b.Query-opc21a.Query, Equals, 5)
- c.Check(opc22b.Query-opc22a.Query, Equals, 0)
- c.Check(opc23b.Query-opc23a.Query, Equals, 7)
+ c.Check(q21b-q21a, Equals, 5)
+ c.Check(q22b-q22a, Equals, 0)
+ c.Check(q23b-q23a, Equals, 7)
case "40023":
- c.Check(opc21b.Query-opc21a.Query, Equals, 5)
- c.Check(opc22b.Query-opc22a.Query, Equals, 7)
- c.Check(opc23b.Query-opc23a.Query, Equals, 0)
+ c.Check(q21b-q21a, Equals, 5)
+ c.Check(q22b-q22a, Equals, 7)
+ c.Check(q23b-q23a, Equals, 0)
default:
c.Fatal("Uh?")
}
}
+
+func (s *S) TestDoNotFallbackToMonotonic(c *C) {
+ // There was a bug at some point that some functions were
+ // falling back to Monotonic mode. This test ensures all listIndexes
+ // commands go to the primary, as should happen since the session is
+ // in Strong mode.
+ if !s.versionAtLeast(3, 0) {
+ c.Skip("command-counting logic depends on 3.0+")
+ }
+
+ session, err := mgo.Dial("localhost:40012")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ for i := 0; i < 15; i++ {
+ q11a := s.countCommands(c, "localhost:40011", "listIndexes")
+ q12a := s.countCommands(c, "localhost:40012", "listIndexes")
+ q13a := s.countCommands(c, "localhost:40013", "listIndexes")
+
+ _, err := session.DB("local").C("system.indexes").Indexes()
+ c.Assert(err, IsNil)
+
+ q11b := s.countCommands(c, "localhost:40011", "listIndexes")
+ q12b := s.countCommands(c, "localhost:40012", "listIndexes")
+ q13b := s.countCommands(c, "localhost:40013", "listIndexes")
+
+ c.Assert(q11b, Equals, q11a+1)
+ c.Assert(q12b, Equals, q12a)
+ c.Assert(q13b, Equals, q13a)
+ }
+}
diff --git a/dbtest/dbserver.go b/dbtest/dbserver.go
new file mode 100644
index 000000000..16b7b5841
--- /dev/null
+++ b/dbtest/dbserver.go
@@ -0,0 +1,196 @@
+package dbtest
+
+import (
+ "bytes"
+ "fmt"
+ "net"
+ "os"
+ "os/exec"
+ "strconv"
+ "time"
+
+ "gopkg.in/mgo.v2"
+ "gopkg.in/tomb.v2"
+)
+
+// DBServer controls a MongoDB server process to be used within test suites.
+//
+// The test server is started when Session is called the first time and should
+// remain running for the duration of all tests, with the Wipe method being
+// called between tests (before each of them) to clear stored data. After all tests
+// are done, the Stop method should be called to stop the test server.
+//
+// Before the DBServer is used the SetPath method must be called to define
+// the location for the database files to be stored.
+type DBServer struct {
+ session *mgo.Session
+ output bytes.Buffer
+ server *exec.Cmd
+ dbpath string
+ host string
+ tomb tomb.Tomb
+}
+
+// SetPath defines the path to the directory where the database files will be
+// stored if it is started. The directory path itself is not created or removed
+// by the test helper.
+func (dbs *DBServer) SetPath(dbpath string) {
+ dbs.dbpath = dbpath
+}
+
+func (dbs *DBServer) start() {
+ if dbs.server != nil {
+ panic("DBServer already started")
+ }
+ if dbs.dbpath == "" {
+ panic("DBServer.SetPath must be called before using the server")
+ }
+ mgo.SetStats(true)
+ l, err := net.Listen("tcp", "127.0.0.1:0")
+ if err != nil {
+ panic("unable to listen on a local address: " + err.Error())
+ }
+ addr := l.Addr().(*net.TCPAddr)
+ l.Close()
+ dbs.host = addr.String()
+
+ args := []string{
+ "--dbpath", dbs.dbpath,
+ "--bind_ip", "127.0.0.1",
+ "--port", strconv.Itoa(addr.Port),
+ "--nssize", "1",
+ "--noprealloc",
+ "--smallfiles",
+ "--nojournal",
+ }
+ dbs.tomb = tomb.Tomb{}
+ dbs.server = exec.Command("mongod", args...)
+ dbs.server.Stdout = &dbs.output
+ dbs.server.Stderr = &dbs.output
+ err = dbs.server.Start()
+ if err != nil {
+ panic(err)
+ }
+ dbs.tomb.Go(dbs.monitor)
+ dbs.Wipe()
+}
+
+func (dbs *DBServer) monitor() error {
+ dbs.server.Process.Wait()
+ if dbs.tomb.Alive() {
+ // Present some debugging information.
+ fmt.Fprintf(os.Stderr, "---- mongod process died unexpectedly:\n")
+ fmt.Fprintf(os.Stderr, "%s", dbs.output.Bytes())
+ fmt.Fprintf(os.Stderr, "---- mongod processes running right now:\n")
+ cmd := exec.Command("/bin/sh", "-c", "ps auxw | grep mongod")
+ cmd.Stdout = os.Stderr
+ cmd.Stderr = os.Stderr
+ cmd.Run()
+ fmt.Fprintf(os.Stderr, "----------------------------------------\n")
+
+ panic("mongod process died unexpectedly")
+ }
+ return nil
+}
+
+// Stop stops the test server process, if it is running.
+//
+// It's okay to call Stop multiple times. After the test server is
+// stopped it cannot be restarted.
+//
+// All database sessions must be closed before or while the Stop method
+// is running. Otherwise Stop will panic after a timeout informing that
+// there is a session leak.
+func (dbs *DBServer) Stop() {
+ if dbs.session != nil {
+ dbs.checkSessions()
+ if dbs.session != nil {
+ dbs.session.Close()
+ dbs.session = nil
+ }
+ }
+ if dbs.server != nil {
+ dbs.tomb.Kill(nil)
+ dbs.server.Process.Signal(os.Interrupt)
+ select {
+ case <-dbs.tomb.Dead():
+ case <-time.After(5 * time.Second):
+ panic("timeout waiting for mongod process to die")
+ }
+ dbs.server = nil
+ }
+}
+
+// Session returns a new session to the server. The returned session
+// must be closed after the test is done with it.
+//
+// The first Session obtained from a DBServer will start it.
+func (dbs *DBServer) Session() *mgo.Session {
+ if dbs.server == nil {
+ dbs.start()
+ }
+ if dbs.session == nil {
+ mgo.ResetStats()
+ var err error
+ dbs.session, err = mgo.Dial(dbs.host + "/test")
+ if err != nil {
+ panic(err)
+ }
+ }
+ return dbs.session.Copy()
+}
+
+// checkSessions ensures all mgo sessions opened were properly closed.
+// For slightly faster tests, it may be disabled setting the
+// environmnet variable CHECK_SESSIONS to 0.
+func (dbs *DBServer) checkSessions() {
+ if check := os.Getenv("CHECK_SESSIONS"); check == "0" || dbs.server == nil || dbs.session == nil {
+ return
+ }
+ dbs.session.Close()
+ dbs.session = nil
+ for i := 0; i < 100; i++ {
+ stats := mgo.GetStats()
+ if stats.SocketsInUse == 0 && stats.SocketsAlive == 0 {
+ return
+ }
+ time.Sleep(100 * time.Millisecond)
+ }
+ panic("There are mgo sessions still alive.")
+}
+
+// Wipe drops all created databases and their data.
+//
+// The MongoDB server remains running if it was prevoiusly running,
+// or stopped if it was previously stopped.
+//
+// All database sessions must be closed before or while the Wipe method
+// is running. Otherwise Wipe will panic after a timeout informing that
+// there is a session leak.
+func (dbs *DBServer) Wipe() {
+ if dbs.server == nil || dbs.session == nil {
+ return
+ }
+ dbs.checkSessions()
+ sessionUnset := dbs.session == nil
+ session := dbs.Session()
+ defer session.Close()
+ if sessionUnset {
+ dbs.session.Close()
+ dbs.session = nil
+ }
+ names, err := session.DatabaseNames()
+ if err != nil {
+ panic(err)
+ }
+ for _, name := range names {
+ switch name {
+ case "admin", "local", "config":
+ default:
+ err = session.DB(name).DropDatabase()
+ if err != nil {
+ panic(err)
+ }
+ }
+ }
+}
diff --git a/dbtest/dbserver_test.go b/dbtest/dbserver_test.go
new file mode 100644
index 000000000..79812fde3
--- /dev/null
+++ b/dbtest/dbserver_test.go
@@ -0,0 +1,108 @@
+package dbtest_test
+
+import (
+ "os"
+ "testing"
+ "time"
+
+ . "gopkg.in/check.v1"
+
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/dbtest"
+)
+
+type M map[string]interface{}
+
+func TestAll(t *testing.T) {
+ TestingT(t)
+}
+
+type S struct {
+ oldCheckSessions string
+}
+
+var _ = Suite(&S{})
+
+func (s *S) SetUpTest(c *C) {
+ s.oldCheckSessions = os.Getenv("CHECK_SESSIONS")
+ os.Setenv("CHECK_SESSIONS", "")
+}
+
+func (s *S) TearDownTest(c *C) {
+ os.Setenv("CHECK_SESSIONS", s.oldCheckSessions)
+}
+
+func (s *S) TestWipeData(c *C) {
+ var server dbtest.DBServer
+ server.SetPath(c.MkDir())
+ defer server.Stop()
+
+ session := server.Session()
+ err := session.DB("mydb").C("mycoll").Insert(M{"a": 1})
+ session.Close()
+ c.Assert(err, IsNil)
+
+ server.Wipe()
+
+ session = server.Session()
+ names, err := session.DatabaseNames()
+ session.Close()
+ c.Assert(err, IsNil)
+ for _, name := range names {
+ if name != "local" && name != "admin" {
+ c.Fatalf("Wipe should have removed this database: %s", name)
+ }
+ }
+}
+
+func (s *S) TestStop(c *C) {
+ var server dbtest.DBServer
+ server.SetPath(c.MkDir())
+ defer server.Stop()
+
+ // Server should not be running.
+ process := server.ProcessTest()
+ c.Assert(process, IsNil)
+
+ session := server.Session()
+ addr := session.LiveServers()[0]
+ session.Close()
+
+ // Server should be running now.
+ process = server.ProcessTest()
+ p, err := os.FindProcess(process.Pid)
+ c.Assert(err, IsNil)
+ p.Release()
+
+ server.Stop()
+
+ // Server should not be running anymore.
+ session, err = mgo.DialWithTimeout(addr, 500*time.Millisecond)
+ if session != nil {
+ session.Close()
+ c.Fatalf("Stop did not stop the server")
+ }
+}
+
+func (s *S) TestCheckSessions(c *C) {
+ var server dbtest.DBServer
+ server.SetPath(c.MkDir())
+ defer server.Stop()
+
+ session := server.Session()
+ defer session.Close()
+ c.Assert(server.Wipe, PanicMatches, "There are mgo sessions still alive.")
+}
+
+func (s *S) TestCheckSessionsDisabled(c *C) {
+ var server dbtest.DBServer
+ server.SetPath(c.MkDir())
+ defer server.Stop()
+
+ os.Setenv("CHECK_SESSIONS", "0")
+
+ // Should not panic, although it looks to Wipe like this session will leak.
+ session := server.Session()
+ defer session.Close()
+ server.Wipe()
+}
diff --git a/dbtest/export_test.go b/dbtest/export_test.go
new file mode 100644
index 000000000..65f1cb023
--- /dev/null
+++ b/dbtest/export_test.go
@@ -0,0 +1,12 @@
+package dbtest
+
+import (
+ "os"
+)
+
+func (dbs *DBServer) ProcessTest() *os.Process {
+ if dbs.server == nil {
+ return nil
+ }
+ return dbs.server.Process
+}
diff --git a/doc.go b/doc.go
index 9316c5554..859fd9b8d 100644
--- a/doc.go
+++ b/doc.go
@@ -20,7 +20,7 @@
//
// New sessions are typically created by calling session.Copy on the
// initial session obtained at dial time. These new sessions will share
-// the same cluster information and connection cache, and may be easily
+// the same cluster information and connection pool, and may be easily
// handed into other methods and functions for organizing logic.
// Every session created must have its Close method called at the end
// of its life time, so its resources may be put back in the pool or
diff --git a/export_test.go b/export_test.go
index b6bfcbc73..690f84d38 100644
--- a/export_test.go
+++ b/export_test.go
@@ -4,15 +4,6 @@ import (
"time"
)
-func HackSocketsPerServer(newLimit int) (restore func()) {
- oldLimit := newLimit
- restore = func() {
- socketsPerServer = oldLimit
- }
- socketsPerServer = newLimit
- return
-}
-
func HackPingDelay(newDelay time.Duration) (restore func()) {
globalMutex.Lock()
defer globalMutex.Unlock()
diff --git a/gridfs.go b/gridfs.go
index 312f8fb02..421472095 100644
--- a/gridfs.go
+++ b/gridfs.go
@@ -32,10 +32,11 @@ import (
"errors"
"hash"
"io"
- "labix.org/v2/mgo/bson"
"os"
"sync"
"time"
+
+ "gopkg.in/mgo.v2/bson"
)
type GridFS struct {
@@ -130,7 +131,7 @@ func finalizeFile(file *GridFile) {
// }
// file, err := db.GridFS("fs").Create("myfile.txt")
// check(err)
-// n, err := file.Write([]byte("Hello world!")
+// n, err := file.Write([]byte("Hello world!"))
// check(err)
// err = file.Close()
// check(err)
@@ -153,7 +154,7 @@ func (gfs *GridFS) Create(name string) (file *GridFile, err error) {
file = gfs.newFile()
file.mode = gfsWriting
file.wsum = md5.New()
- file.doc = gfsFile{Id: bson.NewObjectId(), ChunkSize: 256 * 1024, Filename: name}
+ file.doc = gfsFile{Id: bson.NewObjectId(), ChunkSize: 255 * 1024, Filename: name}
return
}
@@ -358,7 +359,7 @@ func (file *GridFile) assertMode(mode gfsFileMode) {
// SetChunkSize sets size of saved chunks. Once the file is written to, it
// will be split in blocks of that size and each block saved into an
-// independent chunk document. The default chunk size is 256kb.
+// independent chunk document. The default chunk size is 255kb.
//
// It is a runtime error to call this function once the file has started
// being written to.
@@ -480,6 +481,17 @@ func (file *GridFile) UploadDate() time.Time {
return file.doc.UploadDate
}
+// SetUploadDate changes the file upload time.
+//
+// It is a runtime error to call this function when the file is not open
+// for writing.
+func (file *GridFile) SetUploadDate(t time.Time) {
+ file.assertMode(gfsWriting)
+ file.m.Lock()
+ file.doc.UploadDate = t
+ file.m.Unlock()
+}
+
// Close flushes any pending changes in case the file is being written
// to, waits for any background operations to finish, and closes the file.
//
@@ -509,15 +521,24 @@ func (file *GridFile) completeWrite() {
debugf("GridFile %p: waiting for %d pending chunks to complete file write", file, file.wpending)
file.c.Wait()
}
+ if file.err == nil {
+ hexsum := hex.EncodeToString(file.wsum.Sum(nil))
+ if file.doc.UploadDate.IsZero() {
+ file.doc.UploadDate = bson.Now()
+ }
+ file.doc.MD5 = hexsum
+ file.err = file.gfs.Files.Insert(file.doc)
+ }
if file.err != nil {
file.gfs.Chunks.RemoveAll(bson.D{{"files_id", file.doc.Id}})
- return
}
- hexsum := hex.EncodeToString(file.wsum.Sum(nil))
- file.doc.UploadDate = bson.Now()
- file.doc.MD5 = hexsum
- file.err = file.gfs.Files.Insert(file.doc)
- file.gfs.Chunks.EnsureIndexKey("files_id", "n")
+ if file.err == nil {
+ index := Index{
+ Key: []string{"files_id", "n"},
+ Unique: true,
+ }
+ file.err = file.gfs.Chunks.EnsureIndex(index)
+ }
}
// Abort cancels an in-progress write, preventing the file from being
@@ -650,6 +671,14 @@ func (file *GridFile) Seek(offset int64, whence int) (pos int64, err error) {
if offset > file.doc.Length {
return file.offset, errors.New("seek past end of file")
}
+ if offset == file.doc.Length {
+ // If we're seeking to the end of the file,
+ // no need to read anything. This enables
+ // a client to find the size of the file using only the
+ // io.ReadSeeker interface with low overhead.
+ file.offset = offset
+ return file.offset, nil
+ }
chunk := int(offset / int64(file.doc.ChunkSize))
if chunk+1 == file.chunk && offset >= file.offset {
file.rbuf = file.rbuf[int(offset-file.offset):]
@@ -669,7 +698,7 @@ func (file *GridFile) Seek(offset int64, whence int) (pos int64, err error) {
// Read reads into b the next available data from the file and
// returns the number of bytes written and an error in case
// something wrong happened. At the end of the file, n will
-// be zero and err will be set to os.EOF.
+// be zero and err will be set to io.EOF.
//
// The parameters and behavior of this function turn the file
// into an io.Reader.
diff --git a/gridfs_test.go b/gridfs_test.go
index fbdd5b0de..5a6ed5559 100644
--- a/gridfs_test.go
+++ b/gridfs_test.go
@@ -28,11 +28,12 @@ package mgo_test
import (
"io"
- "labix.org/v2/mgo"
- "labix.org/v2/mgo/bson"
- . "launchpad.net/gocheck"
"os"
"time"
+
+ . "gopkg.in/check.v1"
+ "gopkg.in/mgo.v2"
+ "gopkg.in/mgo.v2/bson"
)
func (s *S) TestGridFSCreate(c *C) {
@@ -75,7 +76,7 @@ func (s *S) TestGridFSCreate(c *C) {
expected := M{
"_id": "",
"length": 9,
- "chunkSize": 262144,
+ "chunkSize": 255 * 1024,
"uploadDate": "",
"md5": "1e50210a0202497fb79bc38b6ade6c34",
}
@@ -172,7 +173,7 @@ func (s *S) TestGridFSFileDetails(c *C) {
expected := M{
"_id": "myid",
"length": 9,
- "chunkSize": 262144,
+ "chunkSize": 255 * 1024,
"uploadDate": "",
"md5": "1e50210a0202497fb79bc38b6ade6c34",
"filename": "myfile2.txt",
@@ -182,6 +183,34 @@ func (s *S) TestGridFSFileDetails(c *C) {
c.Assert(result, DeepEquals, expected)
}
+func (s *S) TestGridFSSetUploadDate(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ gfs := db.GridFS("fs")
+ file, err := gfs.Create("")
+ c.Assert(err, IsNil)
+
+ t := time.Date(2014, 1, 1, 1, 1, 1, 0, time.Local)
+ file.SetUploadDate(t)
+
+ err = file.Close()
+ c.Assert(err, IsNil)
+
+ // Check the file information.
+ result := M{}
+ err = db.C("fs.files").Find(nil).One(result)
+ c.Assert(err, IsNil)
+
+ ud := result["uploadDate"].(time.Time)
+ if !ud.Equal(t) {
+ c.Fatalf("want upload date %s, got %s", t, ud)
+ }
+}
+
func (s *S) TestGridFSCreateWithChunking(c *C) {
session, err := mgo.Dial("localhost:40011")
c.Assert(err, IsNil)
@@ -300,6 +329,34 @@ func (s *S) TestGridFSAbort(c *C) {
c.Assert(count, Equals, 0)
}
+func (s *S) TestGridFSCloseConflict(c *C) {
+ session, err := mgo.Dial("localhost:40011")
+ c.Assert(err, IsNil)
+ defer session.Close()
+
+ db := session.DB("mydb")
+
+ db.C("fs.files").EnsureIndex(mgo.Index{Key: []string{"filename"}, Unique: true})
+
+ // For a closing-time conflict
+ err = db.C("fs.files").Insert(M{"filename": "foo.txt"})
+ c.Assert(err, IsNil)
+
+ gfs := db.GridFS("fs")
+ file, err := gfs.Create("foo.txt")
+ c.Assert(err, IsNil)
+
+ _, err = file.Write([]byte("some data"))
+ c.Assert(err, IsNil)
+
+ err = file.Close()
+ c.Assert(mgo.IsDup(err), Equals, true)
+
+ count, err := db.C("fs.chunks").Count()
+ c.Assert(err, IsNil)
+ c.Assert(count, Equals, 0)
+}
+
func (s *S) TestGridFSOpenNotFound(c *C) {
session, err := mgo.Dial("localhost:40011")
c.Assert(err, IsNil)
@@ -484,6 +541,13 @@ func (s *S) TestGridFSSeek(c *C) {
c.Assert(err, IsNil)
c.Assert(b, DeepEquals, []byte("nopqr"))
+ o, err = file.Seek(0, os.SEEK_END)
+ c.Assert(err, IsNil)
+ c.Assert(o, Equals, int64(22))
+ n, err = file.Read(b)
+ c.Assert(err, Equals, io.EOF)
+ c.Assert(n, Equals, 0)
+
o, err = file.Seek(-10, os.SEEK_END)
c.Assert(err, IsNil)
c.Assert(o, Equals, int64(12))
diff --git a/harness/certs/client.crt b/harness/certs/client.crt
new file mode 100644
index 000000000..6143d9254
--- /dev/null
+++ b/harness/certs/client.crt
@@ -0,0 +1,20 @@
+-----BEGIN CERTIFICATE-----
+MIIDLjCCAhYCAQcwDQYJKoZIhvcNAQELBQAwXDELMAkGA1UEBhMCR08xDDAKBgNV
+BAgMA01HTzEMMAoGA1UEBwwDTUdPMQwwCgYDVQQKDANNR08xDzANBgNVBAsMBlNl
+cnZlcjESMBAGA1UEAwwJbG9jYWxob3N0MCAXDTE1MDkyOTA4NDAzMFoYDzIxMTUw
+OTA1MDg0MDMwWjBcMQswCQYDVQQGEwJHTzEMMAoGA1UECAwDTUdPMQwwCgYDVQQH
+DANNR08xDDAKBgNVBAoMA01HTzEPMA0GA1UECwwGQ2xpZW50MRIwEAYDVQQDDAls
+b2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC0UiQhmT+H
+4IIqrn8SMESDzvcl3rwImwUoRIHlmXkovCIZCbvBCJ1nAu6X5zIN89EPPOjfNrgZ
+616wPgVV/YEQXp+D7+jTAsE5s8JepRXFdecResmvh/+0i2DSuI4QFsuyVAPM1O0I
+AQ5EKgr0weZZmsX6lhPD4uYehV4DxDE0i/8aTAlDoNgRCAJrYFMharRTDdY7bQzd
+7ZYab/pK/3DSmOKxl/AFJ8Enmcj9w1bsvy0fgAgoGEBnBru80PRFpFiqk72TJkXO
+Hx7zcYFpegtKPbAreTCModaCnjP//fskCp4XJrkfH5+01NeeX/r1OfEbjgE/wzzx
+l8NaWnPCmxNfAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAFwYpje3dCLDOIHYjd+5
+CpFOEb+bJsS4ryqm/NblTjIhCLo58hNpMsBqdJHRbHAFRCOE8fvY8yiWtdHeFZcW
+DgVRAXfHONLtN7faZaZQnhy/YzOhLfC/8dUMB0gQA8KXhBCPZqQmexE28AfkEO47
+PwICAxIWINfjm5VnFMkA3b7bDNLHon/pev2m7HqVQ3pRUJQNK3XgFOdDgRrnuXpR
+OKAfHORHVGTh1gf1DVwc0oM+0gnkSiJ1VG0n5pE3zhZ24fmZxu6JQ6X515W7APQI
+/nKVH+f1Fo+ustyTNLt8Bwxi1XmwT7IXwnkVSE9Ff6VejppXRF01V0aaWsa3kU3r
+z3A=
+-----END CERTIFICATE-----
diff --git a/harness/certs/client.key b/harness/certs/client.key
new file mode 100644
index 000000000..892db714f
--- /dev/null
+++ b/harness/certs/client.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEogIBAAKCAQEAtFIkIZk/h+CCKq5/EjBEg873Jd68CJsFKESB5Zl5KLwiGQm7
+wQidZwLul+cyDfPRDzzo3za4GetesD4FVf2BEF6fg+/o0wLBObPCXqUVxXXnEXrJ
+r4f/tItg0riOEBbLslQDzNTtCAEORCoK9MHmWZrF+pYTw+LmHoVeA8QxNIv/GkwJ
+Q6DYEQgCa2BTIWq0Uw3WO20M3e2WGm/6Sv9w0pjisZfwBSfBJ5nI/cNW7L8tH4AI
+KBhAZwa7vND0RaRYqpO9kyZFzh8e83GBaXoLSj2wK3kwjKHWgp4z//37JAqeFya5
+Hx+ftNTXnl/69TnxG44BP8M88ZfDWlpzwpsTXwIDAQABAoIBADzCjOAxZkHfuZyu
+La0wTHXpkEfXdJ6ltagq5WY7P6MlOYwcRoK152vlhgXzZl9jL6ely4YjRwec0swq
+KdwezpV4fOGVPmuTuw45bx47HEnr/49ZQ4p9FgF9EYQPofbz53FQc/NaMACJcogv
+bn+osniw+VMFrOVNmGLiZ5p3Smk8zfXE7GRHO8CL5hpWLWO/aK236yytbfWOjM2f
+Pr76ICb26TPRNzYaYUEThU6DtgdLU8pLnJ6QKKaDsjn+zqQzRa+Nvc0c0K8gvWwA
+Afq7t0325+uMSwfpLgCOFldcaZQ5uvteJ0CAVRq1MvStnSHBmMzPlgS+NzsDm6lp
+QH5+rIkCgYEA5j3jrWsv7TueTNbk8Hr/Zwywc+fA2Ex0pBURBHlHyc6ahSXWSCqo
+DtvRGX0GDoK1lCfaIf1qb/DLlGaoHpkEeqcNhXQ+hHs+bZAxfbfBY9+ikit5ZTtl
+QN1tIlhaiyLDnwhkpi/hMw1tiouxJUf84Io61z0sCL4hyZSPCpjn0H0CgYEAyH6F
+Mwl+bCD3VDL/Dr5WSoOr2B/M3bF5SfvdStwy2IPcDJ716je1Ud/2qFCnKGgqvWhJ
++HU15c7CjAWo7/pXq2/pEMD8fDKTYww4Hr4p6duEA7DpbOGkwcUX8u3eknxUWT9F
+jOSbTCvAxuDOC1K3AElyMxVVTNUrFFe8M84R9gsCgYBXmb6RkdG3WlKde7m5gaLB
+K4PLZabq5RQQBe/mmtpkfxYtiLrh1FEC7kG9h+MRDExX5V3KRugDVUOv3+shUSjy
+HbM4ToUm1NloyE78PTj4bfMl2CKlEJcyucy3H5S7kWuKi5/31wnA6d/+sa2huKUP
+Lai7kgu5+9VRJBPUfV7d5QKBgCnhk/13TDtWH5QtGu5/gBMMskbxTaA5xHZZ8H4E
+xXJJCRxx0Dje7jduK145itF8AQGT2W/XPC0HJciOHh4TE2EyfWMMjTF8dyFHmimB
+28uIGWmT+Q7Pi9UWUMxkOAwtgIksGGE4F+CvexOQPjpLSwL6VKqrGCh2lwsm0J+Z
+ulLFAoGAKlC93c6XEj1A31c1+usdEhUe9BrmTqtSYLYpDNpeMLdZ3VctrAZuOQPZ
+4A4gkkQkqqwZGBYYSEqwqiLU6MsBdHPPZ9u3JXLLOQuh1xGeaKylvHj7qx6iT0Xo
+I+FkJ6/3JeMgOina/+wlzD4oyQpqR4Mnh+TuLkDfQTgY+Lg0WPk=
+-----END RSA PRIVATE KEY-----
diff --git a/harness/certs/client.pem b/harness/certs/client.pem
new file mode 100644
index 000000000..93aed3556
--- /dev/null
+++ b/harness/certs/client.pem
@@ -0,0 +1,57 @@
+To regenerate the key:
+
+ openssl req -newkey rsa:2048 -new -x509 -days 36500 -nodes -out server.crt -keyout server.key
+ cat server.key server.crt > server.pem
+ openssl genrsa -out client.key 2048
+ openssl req -key client.key -new -out client.req
+ openssl x509 -req -in client.req -CA server.crt -CAkey server.key -days 36500 -CAserial file.srl -out client.crt
+ cat client.key client.crt > client.pem
+
+-----BEGIN RSA PRIVATE KEY-----
+MIIEogIBAAKCAQEAtFIkIZk/h+CCKq5/EjBEg873Jd68CJsFKESB5Zl5KLwiGQm7
+wQidZwLul+cyDfPRDzzo3za4GetesD4FVf2BEF6fg+/o0wLBObPCXqUVxXXnEXrJ
+r4f/tItg0riOEBbLslQDzNTtCAEORCoK9MHmWZrF+pYTw+LmHoVeA8QxNIv/GkwJ
+Q6DYEQgCa2BTIWq0Uw3WO20M3e2WGm/6Sv9w0pjisZfwBSfBJ5nI/cNW7L8tH4AI
+KBhAZwa7vND0RaRYqpO9kyZFzh8e83GBaXoLSj2wK3kwjKHWgp4z//37JAqeFya5
+Hx+ftNTXnl/69TnxG44BP8M88ZfDWlpzwpsTXwIDAQABAoIBADzCjOAxZkHfuZyu
+La0wTHXpkEfXdJ6ltagq5WY7P6MlOYwcRoK152vlhgXzZl9jL6ely4YjRwec0swq
+KdwezpV4fOGVPmuTuw45bx47HEnr/49ZQ4p9FgF9EYQPofbz53FQc/NaMACJcogv
+bn+osniw+VMFrOVNmGLiZ5p3Smk8zfXE7GRHO8CL5hpWLWO/aK236yytbfWOjM2f
+Pr76ICb26TPRNzYaYUEThU6DtgdLU8pLnJ6QKKaDsjn+zqQzRa+Nvc0c0K8gvWwA
+Afq7t0325+uMSwfpLgCOFldcaZQ5uvteJ0CAVRq1MvStnSHBmMzPlgS+NzsDm6lp
+QH5+rIkCgYEA5j3jrWsv7TueTNbk8Hr/Zwywc+fA2Ex0pBURBHlHyc6ahSXWSCqo
+DtvRGX0GDoK1lCfaIf1qb/DLlGaoHpkEeqcNhXQ+hHs+bZAxfbfBY9+ikit5ZTtl
+QN1tIlhaiyLDnwhkpi/hMw1tiouxJUf84Io61z0sCL4hyZSPCpjn0H0CgYEAyH6F
+Mwl+bCD3VDL/Dr5WSoOr2B/M3bF5SfvdStwy2IPcDJ716je1Ud/2qFCnKGgqvWhJ
++HU15c7CjAWo7/pXq2/pEMD8fDKTYww4Hr4p6duEA7DpbOGkwcUX8u3eknxUWT9F
+jOSbTCvAxuDOC1K3AElyMxVVTNUrFFe8M84R9gsCgYBXmb6RkdG3WlKde7m5gaLB
+K4PLZabq5RQQBe/mmtpkfxYtiLrh1FEC7kG9h+MRDExX5V3KRugDVUOv3+shUSjy
+HbM4ToUm1NloyE78PTj4bfMl2CKlEJcyucy3H5S7kWuKi5/31wnA6d/+sa2huKUP
+Lai7kgu5+9VRJBPUfV7d5QKBgCnhk/13TDtWH5QtGu5/gBMMskbxTaA5xHZZ8H4E
+xXJJCRxx0Dje7jduK145itF8AQGT2W/XPC0HJciOHh4TE2EyfWMMjTF8dyFHmimB
+28uIGWmT+Q7Pi9UWUMxkOAwtgIksGGE4F+CvexOQPjpLSwL6VKqrGCh2lwsm0J+Z
+ulLFAoGAKlC93c6XEj1A31c1+usdEhUe9BrmTqtSYLYpDNpeMLdZ3VctrAZuOQPZ
+4A4gkkQkqqwZGBYYSEqwqiLU6MsBdHPPZ9u3JXLLOQuh1xGeaKylvHj7qx6iT0Xo
+I+FkJ6/3JeMgOina/+wlzD4oyQpqR4Mnh+TuLkDfQTgY+Lg0WPk=
+-----END RSA PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDLjCCAhYCAQcwDQYJKoZIhvcNAQELBQAwXDELMAkGA1UEBhMCR08xDDAKBgNV
+BAgMA01HTzEMMAoGA1UEBwwDTUdPMQwwCgYDVQQKDANNR08xDzANBgNVBAsMBlNl
+cnZlcjESMBAGA1UEAwwJbG9jYWxob3N0MCAXDTE1MDkyOTA4NDAzMFoYDzIxMTUw
+OTA1MDg0MDMwWjBcMQswCQYDVQQGEwJHTzEMMAoGA1UECAwDTUdPMQwwCgYDVQQH
+DANNR08xDDAKBgNVBAoMA01HTzEPMA0GA1UECwwGQ2xpZW50MRIwEAYDVQQDDAls
+b2NhbGhvc3QwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC0UiQhmT+H
+4IIqrn8SMESDzvcl3rwImwUoRIHlmXkovCIZCbvBCJ1nAu6X5zIN89EPPOjfNrgZ
+616wPgVV/YEQXp+D7+jTAsE5s8JepRXFdecResmvh/+0i2DSuI4QFsuyVAPM1O0I
+AQ5EKgr0weZZmsX6lhPD4uYehV4DxDE0i/8aTAlDoNgRCAJrYFMharRTDdY7bQzd
+7ZYab/pK/3DSmOKxl/AFJ8Enmcj9w1bsvy0fgAgoGEBnBru80PRFpFiqk72TJkXO
+Hx7zcYFpegtKPbAreTCModaCnjP//fskCp4XJrkfH5+01NeeX/r1OfEbjgE/wzzx
+l8NaWnPCmxNfAgMBAAEwDQYJKoZIhvcNAQELBQADggEBAFwYpje3dCLDOIHYjd+5
+CpFOEb+bJsS4ryqm/NblTjIhCLo58hNpMsBqdJHRbHAFRCOE8fvY8yiWtdHeFZcW
+DgVRAXfHONLtN7faZaZQnhy/YzOhLfC/8dUMB0gQA8KXhBCPZqQmexE28AfkEO47
+PwICAxIWINfjm5VnFMkA3b7bDNLHon/pev2m7HqVQ3pRUJQNK3XgFOdDgRrnuXpR
+OKAfHORHVGTh1gf1DVwc0oM+0gnkSiJ1VG0n5pE3zhZ24fmZxu6JQ6X515W7APQI
+/nKVH+f1Fo+ustyTNLt8Bwxi1XmwT7IXwnkVSE9Ff6VejppXRF01V0aaWsa3kU3r
+z3A=
+-----END CERTIFICATE-----
+
diff --git a/harness/certs/client.req b/harness/certs/client.req
new file mode 100644
index 000000000..e44feb4e8
--- /dev/null
+++ b/harness/certs/client.req
@@ -0,0 +1,17 @@
+-----BEGIN CERTIFICATE REQUEST-----
+MIICoTCCAYkCAQAwXDELMAkGA1UEBhMCR08xDDAKBgNVBAgMA01HTzEMMAoGA1UE
+BwwDTUdPMQwwCgYDVQQKDANNR08xDzANBgNVBAsMBkNsaWVudDESMBAGA1UEAwwJ
+bG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAtFIkIZk/
+h+CCKq5/EjBEg873Jd68CJsFKESB5Zl5KLwiGQm7wQidZwLul+cyDfPRDzzo3za4
+GetesD4FVf2BEF6fg+/o0wLBObPCXqUVxXXnEXrJr4f/tItg0riOEBbLslQDzNTt
+CAEORCoK9MHmWZrF+pYTw+LmHoVeA8QxNIv/GkwJQ6DYEQgCa2BTIWq0Uw3WO20M
+3e2WGm/6Sv9w0pjisZfwBSfBJ5nI/cNW7L8tH4AIKBhAZwa7vND0RaRYqpO9kyZF
+zh8e83GBaXoLSj2wK3kwjKHWgp4z//37JAqeFya5Hx+ftNTXnl/69TnxG44BP8M8
+8ZfDWlpzwpsTXwIDAQABoAAwDQYJKoZIhvcNAQELBQADggEBAKbOFblIscxlXalV
+sEGNm2oz380RN2QoLhN6nKtAiv0jWm6iKhdAhOIQIeaRPhUP3cyi8bcBvLdMeQ3d
+ZYIByB55/R0VSP1vs4qkXJCQegHcpMpyuIzsMV8p3Q4lxzGKyKtPA6Bb5c49p8Sk
+ncD+LL4ymrMEia4cBPsHL9hhFOm4gqDacbU8+ETLTpuoSvUZiw7OwngqhE2r+kMv
+KDweq5TOPeb+ftKzQKrrfB+XVdBoTKYw6CwARpogbc0/7mvottVcJ/0yAgC1fBbM
+vupkohkXwKfjxKl6nKNL3R2GkzHQOh91hglAx5zyybKQn2YMM328Vk4X6csBg+pg
+tb1s0MA=
+-----END CERTIFICATE REQUEST-----
diff --git a/harness/certs/server.crt b/harness/certs/server.crt
new file mode 100644
index 000000000..4515f5592
--- /dev/null
+++ b/harness/certs/server.crt
@@ -0,0 +1,22 @@
+-----BEGIN CERTIFICATE-----
+MIIDjTCCAnWgAwIBAgIJAMW+wDfcdzC+MA0GCSqGSIb3DQEBCwUAMFwxCzAJBgNV
+BAYTAkdPMQwwCgYDVQQIDANNR08xDDAKBgNVBAcMA01HTzEMMAoGA1UECgwDTUdP
+MQ8wDQYDVQQLDAZTZXJ2ZXIxEjAQBgNVBAMMCWxvY2FsaG9zdDAgFw0xNTA5Mjkw
+ODM0MTBaGA8yMTE1MDkwNTA4MzQxMFowXDELMAkGA1UEBhMCR08xDDAKBgNVBAgM
+A01HTzEMMAoGA1UEBwwDTUdPMQwwCgYDVQQKDANNR08xDzANBgNVBAsMBlNlcnZl
+cjESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEA/T5W1vTsAF+2gTXP1JKygjM7T/2BXHiJc6DRKVjlshTtPYuC3rpTddDm
+6d86d17LWEo+T2bCT4MzZJhSGAun9peFvehdElRMr57xs7j5V1QYjwadMTBkLQuK
+IAg6cISN1KPUzpUTUKsWIsbx97sA0t0wiEPifROb7nfSMIVQsdz/c9LlY2UNYI+5
+GiU88iDGg2wrdsa3U+l2G2KSx/9uE3c5iFki6bdequLiWmBZ6rxfoaLe4gk1INji
+fKssNsn2i3uJ4i4Tmr3PUc4kxx0mMKuWK3HdlQsMqtpq++HQmHSvsPrbgcjl9HyP
+JiHDsoJ+4O5bbtcE51oQbLh1bZAhYwIDAQABo1AwTjAdBgNVHQ4EFgQUhku/u9Kd
+OAc1L0OR649vCCuQT+0wHwYDVR0jBBgwFoAUhku/u9KdOAc1L0OR649vCCuQT+0w
+DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAw7Bgw3hlWXWSZjLhnSOu
+2mW/UJ2Sj31unHngmgtXwW/04cyzoULb+qmzPe/Z06QMgGIsku1jFBcu0JabQtUG
+TyalpfW77tfnvz238CYdImYwE9ZcIGuZGfhs6ySFN9XpW43B8YM7R8wTNPvOcSPw
+nfjqU6kueN4TTspQg9cKhDss5DcMTIdgJgLbITXhIsrCu6GlKOgtX3HrdMGpQX7s
+UoMXtZVG8pK32vxKWGTZ6DPqESeKjjq74NbYnB3H5U/kDU2dt7LF90C/Umdr9y+C
+W2OJb1WBrf6RTcbt8D6d7P9kOfLPOtyn/cbaA/pfXBMQMHqr7XNXzjnaNU+jB7hL
+yQ==
+-----END CERTIFICATE-----
diff --git a/harness/certs/server.key b/harness/certs/server.key
new file mode 100644
index 000000000..082d093e9
--- /dev/null
+++ b/harness/certs/server.key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQD9PlbW9OwAX7aB
+Nc/UkrKCMztP/YFceIlzoNEpWOWyFO09i4LeulN10Obp3zp3XstYSj5PZsJPgzNk
+mFIYC6f2l4W96F0SVEyvnvGzuPlXVBiPBp0xMGQtC4ogCDpwhI3Uo9TOlRNQqxYi
+xvH3uwDS3TCIQ+J9E5vud9IwhVCx3P9z0uVjZQ1gj7kaJTzyIMaDbCt2xrdT6XYb
+YpLH/24TdzmIWSLpt16q4uJaYFnqvF+hot7iCTUg2OJ8qyw2yfaLe4niLhOavc9R
+ziTHHSYwq5Yrcd2VCwyq2mr74dCYdK+w+tuByOX0fI8mIcOygn7g7ltu1wTnWhBs
+uHVtkCFjAgMBAAECggEASRAfRc1L+Z+jrAu2doIMdnwJdL6S//bW0UFolyFKw+I9
+wC/sBg6D3c3zkS4SVDZJPKPO7mGbVg1oWnGH3eAfCYoV0ACmOY+QwGp/GXcYmRVu
+MHWcDIEFpelaZHt7QNM9iEfsMd3YwMFblZUIYozVZADk66uKQMPTjS2Muur7qRSi
+wuVfSmsVZ5afH3B1Tr96BbmPsHrXLjvNpjO44k2wrnnSPQjUL7+YiZPvtnNW8Fby
+yuo2uoAyjg3+68PYZftOvvNneMsv1uyGlUs6Bk+DVWaqofIztWFdFZyXbHnK2PTk
+eGQt5EsL+RwIck5eoqd5vSE+KyzhhydL0zcpngVQoQKBgQD/Yelvholbz5NQtSy3
+ZoiW1y7hL1BKzvVNHuAMKJ5WOnj5szhjhKxt/wZ+hk0qcAmlV9WAPbf4izbEwPRC
+tnMBQzf1uBxqqbLL6WZ4YAyGrcX3UrT7GXsGfVT4zJjz7oYSw8aPircecw5V4exB
+xa4NF+ki8IycXSkHwvW2R56fRwKBgQD92xpxXtte/rUnmENbQmr0aKg7JEfMoih6
+MdX+f6mfgjMmqj+L4jPTI8/ql8HEy13SQS1534aDSHO+nBqBK5aHUCRMIgSLnTP9
+Xyx9Ngg03SZIkPfykqxQmnZgWkTPMhYS+K1Ao9FGVs8W5jVi7veyAdhHptAcxhP3
+IuxvrxVTBQKBgQCluMPiu0snaOwP04HRAZhhSgIB3tIbuXE1OnPpb/JPwmH+p25Q
+Jig+uN9d+4jXoRyhTv4c2fAoOS6xPwVCxWKbzyLhMTg/fx+ncy4rryhxvRJaDDGl
+QEO1Ul9xlFMs9/vI8YJIY5uxBrimwpStmbn4hSukoLSeQ1X802bfglpMwQKBgD8z
+GTY4Y20XBIrDAaHquy32EEwJEEcF6AXj+l7N8bDgfVOW9xMgUb6zH8RL29Xeu5Do
+4SWCXL66fvZpbr/R1jwB28eIgJExpgvicfUKSqi+lhVi4hfmJDg8/FOopZDf61b1
+ykxZfHSCkDQnRAtJaylKBEpyYUWImtfgPfTgJfLxAoGAc8A/Tl2h/DsdTA+cA5d7
+1e0l64m13ObruSWRczyru4hy8Yq6E/K2rOFw8cYCcFpy24NqNlk+2iXPLRpWm2zt
+9R497zAPvhK/bfPXjvm0j/VjB44lvRTC9hby/RRMHy9UJk4o/UQaD+1IodxZovvk
+SruEA1+5bfBRMW0P+h7Qfe4=
+-----END PRIVATE KEY-----
diff --git a/harness/certs/server.pem b/harness/certs/server.pem
new file mode 100644
index 000000000..487b92d66
--- /dev/null
+++ b/harness/certs/server.pem
@@ -0,0 +1,50 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQD9PlbW9OwAX7aB
+Nc/UkrKCMztP/YFceIlzoNEpWOWyFO09i4LeulN10Obp3zp3XstYSj5PZsJPgzNk
+mFIYC6f2l4W96F0SVEyvnvGzuPlXVBiPBp0xMGQtC4ogCDpwhI3Uo9TOlRNQqxYi
+xvH3uwDS3TCIQ+J9E5vud9IwhVCx3P9z0uVjZQ1gj7kaJTzyIMaDbCt2xrdT6XYb
+YpLH/24TdzmIWSLpt16q4uJaYFnqvF+hot7iCTUg2OJ8qyw2yfaLe4niLhOavc9R
+ziTHHSYwq5Yrcd2VCwyq2mr74dCYdK+w+tuByOX0fI8mIcOygn7g7ltu1wTnWhBs
+uHVtkCFjAgMBAAECggEASRAfRc1L+Z+jrAu2doIMdnwJdL6S//bW0UFolyFKw+I9
+wC/sBg6D3c3zkS4SVDZJPKPO7mGbVg1oWnGH3eAfCYoV0ACmOY+QwGp/GXcYmRVu
+MHWcDIEFpelaZHt7QNM9iEfsMd3YwMFblZUIYozVZADk66uKQMPTjS2Muur7qRSi
+wuVfSmsVZ5afH3B1Tr96BbmPsHrXLjvNpjO44k2wrnnSPQjUL7+YiZPvtnNW8Fby
+yuo2uoAyjg3+68PYZftOvvNneMsv1uyGlUs6Bk+DVWaqofIztWFdFZyXbHnK2PTk
+eGQt5EsL+RwIck5eoqd5vSE+KyzhhydL0zcpngVQoQKBgQD/Yelvholbz5NQtSy3
+ZoiW1y7hL1BKzvVNHuAMKJ5WOnj5szhjhKxt/wZ+hk0qcAmlV9WAPbf4izbEwPRC
+tnMBQzf1uBxqqbLL6WZ4YAyGrcX3UrT7GXsGfVT4zJjz7oYSw8aPircecw5V4exB
+xa4NF+ki8IycXSkHwvW2R56fRwKBgQD92xpxXtte/rUnmENbQmr0aKg7JEfMoih6
+MdX+f6mfgjMmqj+L4jPTI8/ql8HEy13SQS1534aDSHO+nBqBK5aHUCRMIgSLnTP9
+Xyx9Ngg03SZIkPfykqxQmnZgWkTPMhYS+K1Ao9FGVs8W5jVi7veyAdhHptAcxhP3
+IuxvrxVTBQKBgQCluMPiu0snaOwP04HRAZhhSgIB3tIbuXE1OnPpb/JPwmH+p25Q
+Jig+uN9d+4jXoRyhTv4c2fAoOS6xPwVCxWKbzyLhMTg/fx+ncy4rryhxvRJaDDGl
+QEO1Ul9xlFMs9/vI8YJIY5uxBrimwpStmbn4hSukoLSeQ1X802bfglpMwQKBgD8z
+GTY4Y20XBIrDAaHquy32EEwJEEcF6AXj+l7N8bDgfVOW9xMgUb6zH8RL29Xeu5Do
+4SWCXL66fvZpbr/R1jwB28eIgJExpgvicfUKSqi+lhVi4hfmJDg8/FOopZDf61b1
+ykxZfHSCkDQnRAtJaylKBEpyYUWImtfgPfTgJfLxAoGAc8A/Tl2h/DsdTA+cA5d7
+1e0l64m13ObruSWRczyru4hy8Yq6E/K2rOFw8cYCcFpy24NqNlk+2iXPLRpWm2zt
+9R497zAPvhK/bfPXjvm0j/VjB44lvRTC9hby/RRMHy9UJk4o/UQaD+1IodxZovvk
+SruEA1+5bfBRMW0P+h7Qfe4=
+-----END PRIVATE KEY-----
+-----BEGIN CERTIFICATE-----
+MIIDjTCCAnWgAwIBAgIJAMW+wDfcdzC+MA0GCSqGSIb3DQEBCwUAMFwxCzAJBgNV
+BAYTAkdPMQwwCgYDVQQIDANNR08xDDAKBgNVBAcMA01HTzEMMAoGA1UECgwDTUdP
+MQ8wDQYDVQQLDAZTZXJ2ZXIxEjAQBgNVBAMMCWxvY2FsaG9zdDAgFw0xNTA5Mjkw
+ODM0MTBaGA8yMTE1MDkwNTA4MzQxMFowXDELMAkGA1UEBhMCR08xDDAKBgNVBAgM
+A01HTzEMMAoGA1UEBwwDTUdPMQwwCgYDVQQKDANNR08xDzANBgNVBAsMBlNlcnZl
+cjESMBAGA1UEAwwJbG9jYWxob3N0MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB
+CgKCAQEA/T5W1vTsAF+2gTXP1JKygjM7T/2BXHiJc6DRKVjlshTtPYuC3rpTddDm
+6d86d17LWEo+T2bCT4MzZJhSGAun9peFvehdElRMr57xs7j5V1QYjwadMTBkLQuK
+IAg6cISN1KPUzpUTUKsWIsbx97sA0t0wiEPifROb7nfSMIVQsdz/c9LlY2UNYI+5
+GiU88iDGg2wrdsa3U+l2G2KSx/9uE3c5iFki6bdequLiWmBZ6rxfoaLe4gk1INji
+fKssNsn2i3uJ4i4Tmr3PUc4kxx0mMKuWK3HdlQsMqtpq++HQmHSvsPrbgcjl9HyP
+JiHDsoJ+4O5bbtcE51oQbLh1bZAhYwIDAQABo1AwTjAdBgNVHQ4EFgQUhku/u9Kd
+OAc1L0OR649vCCuQT+0wHwYDVR0jBBgwFoAUhku/u9KdOAc1L0OR649vCCuQT+0w
+DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAw7Bgw3hlWXWSZjLhnSOu
+2mW/UJ2Sj31unHngmgtXwW/04cyzoULb+qmzPe/Z06QMgGIsku1jFBcu0JabQtUG
+TyalpfW77tfnvz238CYdImYwE9ZcIGuZGfhs6ySFN9XpW43B8YM7R8wTNPvOcSPw
+nfjqU6kueN4TTspQg9cKhDss5DcMTIdgJgLbITXhIsrCu6GlKOgtX3HrdMGpQX7s
+UoMXtZVG8pK32vxKWGTZ6DPqESeKjjq74NbYnB3H5U/kDU2dt7LF90C/Umdr9y+C
+W2OJb1WBrf6RTcbt8D6d7P9kOfLPOtyn/cbaA/pfXBMQMHqr7XNXzjnaNU+jB7hL
+yQ==
+-----END CERTIFICATE-----
diff --git a/harness/daemons/.env b/harness/daemons/.env
new file mode 100644
index 000000000..96ee89e94
--- /dev/null
+++ b/harness/daemons/.env
@@ -0,0 +1,57 @@
+
+set -e
+
+MONGOVERSION=$(mongod --version | sed -n 's/.*v\([0-9]\+\.[0-9]\+\)\..*/\1/p')
+MONGOMAJOR=$(echo $MONGOVERSION | sed 's/\([0-9]\+\)\..*/\1/')
+MONGOMINOR=$(echo $MONGOVERSION | sed 's/[0-9]\+\.\([0-9]\+\)/\1/')
+
+versionAtLeast() {
+ TESTMAJOR="$1"
+ TESTMINOR="$2"
+ if [ "$MONGOMAJOR" -gt "$TESTMAJOR" ]; then
+ return 0
+ fi
+ if [ "$MONGOMAJOR" -lt "$TESTMAJOR" ]; then
+ return 100
+ fi
+ if [ "$MONGOMINOR" -ge "$TESTMINOR" ]; then
+ return 0
+ fi
+ return 100
+}
+
+COMMONDOPTSNOIP="
+ --nohttpinterface
+ --noprealloc
+ --nojournal
+ --smallfiles
+ --nssize=1
+ --oplogSize=1
+ --dbpath ./db
+ "
+COMMONDOPTS="
+ $COMMONDOPTSNOIP
+ --bind_ip=127.0.0.1
+ "
+COMMONCOPTS="
+ $COMMONDOPTS
+ "
+COMMONSOPTS="
+ --chunkSize 1
+ --bind_ip=127.0.0.1
+ "
+
+if versionAtLeast 3 2; then
+ # 3.2 doesn't like --nojournal on config servers.
+ #COMMONCOPTS="$(echo "$COMMONCOPTS" | sed '/--nojournal/d')"
+ # Using a hacked version of MongoDB 3.2 for now.
+
+ # Go back to MMAPv1 so it's not super sluggish. :-(
+ COMMONDOPTSNOIP="--storageEngine=mmapv1 $COMMONDOPTSNOIP"
+ COMMONDOPTS="--storageEngine=mmapv1 $COMMONDOPTS"
+ COMMONCOPTS="--storageEngine=mmapv1 $COMMONCOPTS"
+fi
+
+if [ "$TRAVIS" = true ]; then
+ set -x
+fi
diff --git a/harness/daemons/cfg1/db/.empty b/harness/daemons/cfg1/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/cfg1/db/journal/tempLatencyTest b/harness/daemons/cfg1/db/journal/tempLatencyTest
new file mode 100644
index 000000000..52972ec9e
Binary files /dev/null and b/harness/daemons/cfg1/db/journal/tempLatencyTest differ
diff --git a/harness/daemons/cfg1/db/mongod.lock b/harness/daemons/cfg1/db/mongod.lock
new file mode 100755
index 000000000..e69de29bb
diff --git a/harness/daemons/cfg1/log/run b/harness/daemons/cfg1/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/cfg1/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/cfg1/run b/harness/daemons/cfg1/run
new file mode 100755
index 000000000..ad6bddd04
--- /dev/null
+++ b/harness/daemons/cfg1/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONCOPTS \
+ --port 40101 \
+ --configsvr
+
diff --git a/harness/daemons/cfg2/db/.empty b/harness/daemons/cfg2/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/cfg2/log/run b/harness/daemons/cfg2/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/cfg2/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/cfg2/run b/harness/daemons/cfg2/run
new file mode 100755
index 000000000..07d159ef5
--- /dev/null
+++ b/harness/daemons/cfg2/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONCOPTS \
+ --port 40102 \
+ --configsvr
+
diff --git a/harness/daemons/cfg3/db/.empty b/harness/daemons/cfg3/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/cfg3/log/run b/harness/daemons/cfg3/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/cfg3/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/cfg3/run b/harness/daemons/cfg3/run
new file mode 100755
index 000000000..bd812fa3e
--- /dev/null
+++ b/harness/daemons/cfg3/run
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONCOPTS \
+ --port 40103 \
+ --configsvr \
+ --auth \
+ --keyFile=../../certs/keyfile
diff --git a/harness/daemons/db1/db/.empty b/harness/daemons/db1/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/db1/log/run b/harness/daemons/db1/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/db1/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/db1/run b/harness/daemons/db1/run
new file mode 100755
index 000000000..b6636d195
--- /dev/null
+++ b/harness/daemons/db1/run
@@ -0,0 +1,15 @@
+#!/bin/sh
+
+. ../.env
+
+if [ x$NOIPV6 = x1 ]; then
+ BINDIP="127.0.0.1"
+else
+ BINDIP="127.0.0.1,::1"
+fi
+
+exec mongod $COMMONDOPTSNOIP \
+ --shardsvr \
+ --bind_ip=$BINDIP \
+ --port 40001 \
+ --ipv6
diff --git a/harness/daemons/db2/db/.empty b/harness/daemons/db2/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/db2/log/run b/harness/daemons/db2/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/db2/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/db2/run b/harness/daemons/db2/run
new file mode 100755
index 000000000..5c7b1aa50
--- /dev/null
+++ b/harness/daemons/db2/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --port 40002 \
+ --auth
diff --git a/harness/daemons/db3/db/.empty b/harness/daemons/db3/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/db3/log/run b/harness/daemons/db3/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/db3/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/db3/run b/harness/daemons/db3/run
new file mode 100755
index 000000000..539da5fb2
--- /dev/null
+++ b/harness/daemons/db3/run
@@ -0,0 +1,12 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --port 40003 \
+ --auth \
+ --sslMode preferSSL \
+ --sslCAFile ../../certs/server.pem \
+ --sslPEMKeyFile ../../certs/server.pem
+
diff --git a/harness/daemons/rs1a/db/.empty b/harness/daemons/rs1a/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/rs1a/log/run b/harness/daemons/rs1a/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/rs1a/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/rs1a/run b/harness/daemons/rs1a/run
new file mode 100755
index 000000000..9de773041
--- /dev/null
+++ b/harness/daemons/rs1a/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs1 \
+ --port 40011
diff --git a/harness/daemons/rs1b/db/.empty b/harness/daemons/rs1b/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/rs1b/log/run b/harness/daemons/rs1b/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/rs1b/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/rs1b/run b/harness/daemons/rs1b/run
new file mode 100755
index 000000000..dae593e12
--- /dev/null
+++ b/harness/daemons/rs1b/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs1 \
+ --port 40012
diff --git a/harness/daemons/rs1c/db/.empty b/harness/daemons/rs1c/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/rs1c/log/run b/harness/daemons/rs1c/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/rs1c/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/rs1c/run b/harness/daemons/rs1c/run
new file mode 100755
index 000000000..c28cdc35d
--- /dev/null
+++ b/harness/daemons/rs1c/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs1 \
+ --port 40013
diff --git a/harness/daemons/rs2a/db/.empty b/harness/daemons/rs2a/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/rs2a/log/run b/harness/daemons/rs2a/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/rs2a/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/rs2a/run b/harness/daemons/rs2a/run
new file mode 100755
index 000000000..2c77ab1ab
--- /dev/null
+++ b/harness/daemons/rs2a/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs2 \
+ --port 40021
diff --git a/harness/daemons/rs2b/db/.empty b/harness/daemons/rs2b/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/rs2b/log/run b/harness/daemons/rs2b/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/rs2b/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/rs2b/run b/harness/daemons/rs2b/run
new file mode 100755
index 000000000..57bcfce15
--- /dev/null
+++ b/harness/daemons/rs2b/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs2 \
+ --port 40022
diff --git a/harness/daemons/rs2c/db/.empty b/harness/daemons/rs2c/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/rs2c/log/run b/harness/daemons/rs2c/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/rs2c/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/rs2c/run b/harness/daemons/rs2c/run
new file mode 100755
index 000000000..a71222705
--- /dev/null
+++ b/harness/daemons/rs2c/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs2 \
+ --port 40023
diff --git a/harness/daemons/rs3a/db/.empty b/harness/daemons/rs3a/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/rs3a/log/run b/harness/daemons/rs3a/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/rs3a/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/rs3a/run b/harness/daemons/rs3a/run
new file mode 100755
index 000000000..002fbaf8e
--- /dev/null
+++ b/harness/daemons/rs3a/run
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs3 \
+ --port 40031 \
+ --keyFile=../../certs/keyfile
diff --git a/harness/daemons/rs3b/db/.empty b/harness/daemons/rs3b/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/rs3b/log/run b/harness/daemons/rs3b/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/rs3b/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/rs3b/run b/harness/daemons/rs3b/run
new file mode 100755
index 000000000..69825843e
--- /dev/null
+++ b/harness/daemons/rs3b/run
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs3 \
+ --port 40032 \
+ --keyFile=../../certs/keyfile
diff --git a/harness/daemons/rs3c/db/.empty b/harness/daemons/rs3c/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/rs3c/log/run b/harness/daemons/rs3c/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/rs3c/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/rs3c/run b/harness/daemons/rs3c/run
new file mode 100755
index 000000000..97b32c927
--- /dev/null
+++ b/harness/daemons/rs3c/run
@@ -0,0 +1,9 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs3 \
+ --port 40033 \
+ --keyFile=../../certs/keyfile
diff --git a/harness/daemons/rs4a/db/.empty b/harness/daemons/rs4a/db/.empty
new file mode 100644
index 000000000..e69de29bb
diff --git a/harness/daemons/rs4a/log/run b/harness/daemons/rs4a/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/rs4a/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/rs4a/run b/harness/daemons/rs4a/run
new file mode 100755
index 000000000..c2f2d5563
--- /dev/null
+++ b/harness/daemons/rs4a/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongod $COMMONDOPTS \
+ --shardsvr \
+ --replSet rs4 \
+ --port 40041
diff --git a/harness/daemons/s1/log/run b/harness/daemons/s1/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/s1/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/s1/run b/harness/daemons/s1/run
new file mode 100755
index 000000000..0e31d2c94
--- /dev/null
+++ b/harness/daemons/s1/run
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongos $COMMONSOPTS \
+ --port 40201 \
+ --configdb 127.0.0.1:40101
diff --git a/harness/daemons/s2/log/run b/harness/daemons/s2/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/s2/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/s2/run b/harness/daemons/s2/run
new file mode 100755
index 000000000..3b5c67d58
--- /dev/null
+++ b/harness/daemons/s2/run
@@ -0,0 +1,7 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongos $COMMONSOPTS \
+ --port 40202 \
+ --configdb 127.0.0.1:40102
diff --git a/harness/daemons/s3/log/run b/harness/daemons/s3/log/run
new file mode 100755
index 000000000..e9d4404ba
--- /dev/null
+++ b/harness/daemons/s3/log/run
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec cat - > log.txt
diff --git a/harness/daemons/s3/run b/harness/daemons/s3/run
new file mode 100755
index 000000000..fde6e479b
--- /dev/null
+++ b/harness/daemons/s3/run
@@ -0,0 +1,8 @@
+#!/bin/sh
+
+. ../.env
+
+exec mongos $COMMONSOPTS \
+ --port 40203 \
+ --configdb 127.0.0.1:40103 \
+ --keyFile=../../certs/keyfile
diff --git a/testdb/dropall.js b/harness/mongojs/dropall.js
similarity index 65%
rename from testdb/dropall.js
rename to harness/mongojs/dropall.js
index ca1289263..7fa39d112 100644
--- a/testdb/dropall.js
+++ b/harness/mongojs/dropall.js
@@ -1,13 +1,19 @@
var ports = [40001, 40002, 40011, 40012, 40013, 40021, 40022, 40023, 40041, 40101, 40102, 40103, 40201, 40202, 40203]
var auth = [40002, 40103, 40203, 40031]
+var db1 = new Mongo("localhost:40001")
+
+if (db1.getDB("admin").serverBuildInfo().OpenSSLVersion) {
+ ports.push(40003)
+ auth.push(40003)
+}
for (var i in ports) {
var port = ports[i]
var server = "localhost:" + port
var mongo = new Mongo("localhost:" + port)
var admin = mongo.getDB("admin")
-
+
for (var j in auth) {
if (auth[j] == port) {
admin.auth("root", "rapadura")
@@ -25,11 +31,20 @@ for (var i in ports) {
}
}
var result = admin.runCommand({"listDatabases": 1})
- // Why is the command returning undefined!?
- while (typeof result.databases == "undefined") {
- print("dropall.js: listing databases of :" + port + " got:", result)
+ for (var j = 0; j != 100; j++) {
+ if (typeof result.databases != "undefined" || notMaster(result)) {
+ break
+ }
result = admin.runCommand({"listDatabases": 1})
}
+ if (notMaster(result)) {
+ continue
+ }
+ if (typeof result.databases == "undefined") {
+ print("Could not list databases. Command result:")
+ print(JSON.stringify(result))
+ quit(12)
+ }
var dbs = result.databases
for (var j = 0; j != dbs.length; j++) {
var db = dbs[j]
@@ -44,4 +59,8 @@ for (var i in ports) {
}
}
+function notMaster(result) {
+ return typeof result.errmsg != "undefined" && (result.errmsg.indexOf("not master") >= 0 || result.errmsg.indexOf("no master found"))
+}
+
// vim:ts=4:sw=4:et
diff --git a/testdb/init.js b/harness/mongojs/init.js
similarity index 78%
rename from testdb/init.js
rename to harness/mongojs/init.js
index 02a6c61c8..ceb75a5e4 100644
--- a/testdb/init.js
+++ b/harness/mongojs/init.js
@@ -32,6 +32,10 @@ for (var i = 0; i != 60; i++) {
sleep(1000)
}
+function hasSSL() {
+ return Boolean(db1.serverBuildInfo().OpenSSLVersion)
+}
+
rs1a.runCommand({replSetInitiate: rs1cfg})
rs2a.runCommand({replSetInitiate: rs2cfg})
rs3a.runCommand({replSetInitiate: rs3cfg})
@@ -50,15 +54,34 @@ function configShards() {
function configAuth() {
var addrs = ["127.0.0.1:40002", "127.0.0.1:40203", "127.0.0.1:40031"]
+ if (hasSSL()) {
+ addrs.push("127.0.0.1:40003")
+ }
for (var i in addrs) {
+ print("Configuring auth for", addrs[i])
var db = new Mongo(addrs[i]).getDB("admin")
var v = db.serverBuildInfo().versionArray
+ var timedOut = false
if (v < [2, 5]) {
db.addUser("root", "rapadura")
} else {
- db.createUser({user: "root", pwd: "rapadura", roles: ["root"]})
+ try {
+ db.createUser({user: "root", pwd: "rapadura", roles: ["root"]})
+ } catch (err) {
+ // 3.2 consistently fails replication of creds on 40031 (config server)
+ print("createUser command returned an error: " + err)
+ if (String(err).indexOf("timed out") >= 0) {
+ timedOut = true;
+ }
+ }
+ }
+ for (var i = 0; i < 60; i++) {
+ var ok = db.auth("root", "rapadura")
+ if (ok || !timedOut) {
+ break
+ }
+ sleep(1000);
}
- db.auth("root", "rapadura")
if (v >= [2, 6]) {
db.createUser({user: "reader", pwd: "rapadura", roles: ["readAnyDatabase"]})
} else if (v >= [2, 4]) {
@@ -72,14 +95,21 @@ function configAuth() {
function countHealthy(rs) {
var status = rs.runCommand({replSetGetStatus: 1})
var count = 0
+ var primary = 0
if (typeof status.members != "undefined") {
for (var i = 0; i != status.members.length; i++) {
var m = status.members[i]
if (m.health == 1 && (m.state == 1 || m.state == 2)) {
count += 1
+ if (m.state == 1) {
+ primary = 1
+ }
}
}
}
+ if (primary == 0) {
+ count = 0
+ }
return count
}
@@ -89,7 +119,6 @@ for (var i = 0; i != 60; i++) {
var count = countHealthy(rs1a) + countHealthy(rs2a) + countHealthy(rs3a)
print("Replica sets have", count, "healthy nodes.")
if (count == totalRSMembers) {
- sleep(2000)
configShards()
configAuth()
quit(0)
diff --git a/testdb/wait.js b/harness/mongojs/wait.js
similarity index 90%
rename from testdb/wait.js
rename to harness/mongojs/wait.js
index de0d66075..2735d0e56 100644
--- a/testdb/wait.js
+++ b/harness/mongojs/wait.js
@@ -32,20 +32,27 @@ for (var i = 0; i != 60; i++) {
function countHealthy(rs) {
var status = rs.runCommand({replSetGetStatus: 1})
var count = 0
+ var primary = 0
if (typeof status.members != "undefined") {
for (var i = 0; i != status.members.length; i++) {
var m = status.members[i]
if (m.health == 1 && (m.state == 1 || m.state == 2)) {
count += 1
+ if (m.state == 1) {
+ primary = 1
+ }
}
}
}
+ if (primary == 0) {
+ count = 0
+ }
return count
}
var totalRSMembers = rs1cfg.members.length + rs2cfg.members.length + rs3cfg.members.length
-for (var i = 0; i != 60; i++) {
+for (var i = 0; i != 90; i++) {
var count = countHealthy(rs1a) + countHealthy(rs2a) + countHealthy(rs3a)
print("Replica sets have", count, "healthy nodes.")
if (count == totalRSMembers) {
@@ -56,3 +63,5 @@ for (var i = 0; i != 60; i++) {
print("Replica sets didn't sync up properly.")
quit(12)
+
+// vim:ts=4:sw=4:et
diff --git a/harness/setup.sh b/harness/setup.sh
new file mode 100755
index 000000000..e5db78a78
--- /dev/null
+++ b/harness/setup.sh
@@ -0,0 +1,96 @@
+#!/bin/sh -e
+
+LINE="---------------"
+
+start() {
+ if [ -d _harness ]; then
+ echo "Daemon setup already in place, stop it first."
+ exit 1
+ fi
+ mkdir -p _harness
+ cd _harness
+ cp -a ../harness/daemons .
+ cp -a ../harness/certs .
+ echo keyfile > certs/keyfile
+ chmod 600 certs/keyfile
+ if ! mongod --help | grep -q -- --ssl; then
+ rm -rf daemons/db3
+ fi
+ COUNT=$(ls daemons | wc -l)
+ echo "Running daemons..."
+ svscan daemons &
+ SVSCANPID=$!
+ echo $SVSCANPID > svscan.pid
+ if ! kill -0 $SVSCANPID; then
+ echo "Cannot execute svscan."
+ exit 1
+ fi
+ echo "Starting $COUNT processes..."
+ for i in $(seq 30); do
+ UP=$(svstat daemons/* | grep ' up ' | grep -v ' [0-3] seconds' | wc -l)
+ echo "$UP processes up..."
+ if [ x$COUNT = x$UP ]; then
+ echo "Running setup.js with mongo..."
+ mongo --nodb ../harness/mongojs/init.js
+ exit 0
+ fi
+ sleep 1
+ done
+ echo "Failed to start processes. svstat _harness/daemons/* output:"
+ echo $LINE
+ svstat daemons/*
+ echo $LINE
+ for DAEMON in daemons/*; do
+ if $(svstat $DAEMON | grep ' up ' | grep ' [0-3] seconds' > /dev/null); then
+ echo "Logs for _harness/$DAEMON:"
+ echo $LINE
+ cat $DAEMON/log/log.txt
+ echo $LINE
+ fi
+ done
+ exit 1
+}
+
+stop() {
+ if [ -d _harness ]; then
+ cd _harness
+ if [ -f svscan.pid ]; then
+ kill -9 $(cat svscan.pid) 2> /dev/null || true
+ svc -dx daemons/* daemons/*/log > /dev/null 2>&1 || true
+ COUNT=$(ls daemons | wc -l)
+ echo "Shutting down $COUNT processes..."
+ while true; do
+ DOWN=$(svstat daemons/* | grep 'supervise not running' | wc -l)
+ echo "$DOWN processes down..."
+ if [ x$DOWN = x$COUNT ]; then
+ break
+ fi
+ sleep 1
+ done
+ rm svscan.pid
+ echo "Done."
+ fi
+ cd ..
+ rm -rf _harness
+ fi
+}
+
+
+if [ ! -f suite_test.go ]; then
+ echo "This script must be run from within the source directory."
+ exit 1
+fi
+
+case "$1" in
+
+ start)
+ start $2
+ ;;
+
+ stop)
+ stop $2
+ ;;
+
+esac
+
+# vim:ts=4:sw=4:et
diff --git a/internal/json/LICENSE b/internal/json/LICENSE
new file mode 100644
index 000000000..744875676
--- /dev/null
+++ b/internal/json/LICENSE
@@ -0,0 +1,27 @@
+Copyright (c) 2012 The Go Authors. All rights reserved.
+
+Redistribution and use in source and binary forms, with or without
+modification, are permitted provided that the following conditions are
+met:
+
+ * Redistributions of source code must retain the above copyright
+notice, this list of conditions and the following disclaimer.
+ * Redistributions in binary form must reproduce the above
+copyright notice, this list of conditions and the following disclaimer
+in the documentation and/or other materials provided with the
+distribution.
+ * Neither the name of Google Inc. nor the names of its
+contributors may be used to endorse or promote products derived from
+this software without specific prior written permission.
+
+THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
diff --git a/internal/json/bench_test.go b/internal/json/bench_test.go
new file mode 100644
index 000000000..cd7380b1e
--- /dev/null
+++ b/internal/json/bench_test.go
@@ -0,0 +1,223 @@
+// Copyright 2011 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Large data benchmark.
+// The JSON data is a summary of agl's changes in the
+// go, webkit, and chromium open source projects.
+// We benchmark converting between the JSON form
+// and in-memory data structures.
+
+package json
+
+import (
+ "bytes"
+ "compress/gzip"
+ "io/ioutil"
+ "os"
+ "strings"
+ "testing"
+)
+
+type codeResponse struct {
+ Tree *codeNode `json:"tree"`
+ Username string `json:"username"`
+}
+
+type codeNode struct {
+ Name string `json:"name"`
+ Kids []*codeNode `json:"kids"`
+ CLWeight float64 `json:"cl_weight"`
+ Touches int `json:"touches"`
+ MinT int64 `json:"min_t"`
+ MaxT int64 `json:"max_t"`
+ MeanT int64 `json:"mean_t"`
+}
+
+var codeJSON []byte
+var codeStruct codeResponse
+
+func codeInit() {
+ f, err := os.Open("testdata/code.json.gz")
+ if err != nil {
+ panic(err)
+ }
+ defer f.Close()
+ gz, err := gzip.NewReader(f)
+ if err != nil {
+ panic(err)
+ }
+ data, err := ioutil.ReadAll(gz)
+ if err != nil {
+ panic(err)
+ }
+
+ codeJSON = data
+
+ if err := Unmarshal(codeJSON, &codeStruct); err != nil {
+ panic("unmarshal code.json: " + err.Error())
+ }
+
+ if data, err = Marshal(&codeStruct); err != nil {
+ panic("marshal code.json: " + err.Error())
+ }
+
+ if !bytes.Equal(data, codeJSON) {
+ println("different lengths", len(data), len(codeJSON))
+ for i := 0; i < len(data) && i < len(codeJSON); i++ {
+ if data[i] != codeJSON[i] {
+ println("re-marshal: changed at byte", i)
+ println("orig: ", string(codeJSON[i-10:i+10]))
+ println("new: ", string(data[i-10:i+10]))
+ break
+ }
+ }
+ panic("re-marshal code.json: different result")
+ }
+}
+
+func BenchmarkCodeEncoder(b *testing.B) {
+ if codeJSON == nil {
+ b.StopTimer()
+ codeInit()
+ b.StartTimer()
+ }
+ enc := NewEncoder(ioutil.Discard)
+ for i := 0; i < b.N; i++ {
+ if err := enc.Encode(&codeStruct); err != nil {
+ b.Fatal("Encode:", err)
+ }
+ }
+ b.SetBytes(int64(len(codeJSON)))
+}
+
+func BenchmarkCodeMarshal(b *testing.B) {
+ if codeJSON == nil {
+ b.StopTimer()
+ codeInit()
+ b.StartTimer()
+ }
+ for i := 0; i < b.N; i++ {
+ if _, err := Marshal(&codeStruct); err != nil {
+ b.Fatal("Marshal:", err)
+ }
+ }
+ b.SetBytes(int64(len(codeJSON)))
+}
+
+func BenchmarkCodeDecoder(b *testing.B) {
+ if codeJSON == nil {
+ b.StopTimer()
+ codeInit()
+ b.StartTimer()
+ }
+ var buf bytes.Buffer
+ dec := NewDecoder(&buf)
+ var r codeResponse
+ for i := 0; i < b.N; i++ {
+ buf.Write(codeJSON)
+ // hide EOF
+ buf.WriteByte('\n')
+ buf.WriteByte('\n')
+ buf.WriteByte('\n')
+ if err := dec.Decode(&r); err != nil {
+ b.Fatal("Decode:", err)
+ }
+ }
+ b.SetBytes(int64(len(codeJSON)))
+}
+
+func BenchmarkDecoderStream(b *testing.B) {
+ b.StopTimer()
+ var buf bytes.Buffer
+ dec := NewDecoder(&buf)
+ buf.WriteString(`"` + strings.Repeat("x", 1000000) + `"` + "\n\n\n")
+ var x interface{}
+ if err := dec.Decode(&x); err != nil {
+ b.Fatal("Decode:", err)
+ }
+ ones := strings.Repeat(" 1\n", 300000) + "\n\n\n"
+ b.StartTimer()
+ for i := 0; i < b.N; i++ {
+ if i%300000 == 0 {
+ buf.WriteString(ones)
+ }
+ x = nil
+ if err := dec.Decode(&x); err != nil || x != 1.0 {
+ b.Fatalf("Decode: %v after %d", err, i)
+ }
+ }
+}
+
+func BenchmarkCodeUnmarshal(b *testing.B) {
+ if codeJSON == nil {
+ b.StopTimer()
+ codeInit()
+ b.StartTimer()
+ }
+ for i := 0; i < b.N; i++ {
+ var r codeResponse
+ if err := Unmarshal(codeJSON, &r); err != nil {
+ b.Fatal("Unmarshal:", err)
+ }
+ }
+ b.SetBytes(int64(len(codeJSON)))
+}
+
+func BenchmarkCodeUnmarshalReuse(b *testing.B) {
+ if codeJSON == nil {
+ b.StopTimer()
+ codeInit()
+ b.StartTimer()
+ }
+ var r codeResponse
+ for i := 0; i < b.N; i++ {
+ if err := Unmarshal(codeJSON, &r); err != nil {
+ b.Fatal("Unmarshal:", err)
+ }
+ }
+}
+
+func BenchmarkUnmarshalString(b *testing.B) {
+ data := []byte(`"hello, world"`)
+ var s string
+
+ for i := 0; i < b.N; i++ {
+ if err := Unmarshal(data, &s); err != nil {
+ b.Fatal("Unmarshal:", err)
+ }
+ }
+}
+
+func BenchmarkUnmarshalFloat64(b *testing.B) {
+ var f float64
+ data := []byte(`3.14`)
+
+ for i := 0; i < b.N; i++ {
+ if err := Unmarshal(data, &f); err != nil {
+ b.Fatal("Unmarshal:", err)
+ }
+ }
+}
+
+func BenchmarkUnmarshalInt64(b *testing.B) {
+ var x int64
+ data := []byte(`3`)
+
+ for i := 0; i < b.N; i++ {
+ if err := Unmarshal(data, &x); err != nil {
+ b.Fatal("Unmarshal:", err)
+ }
+ }
+}
+
+func BenchmarkIssue10335(b *testing.B) {
+ b.ReportAllocs()
+ var s struct{}
+ j := []byte(`{"a":{ }}`)
+ for n := 0; n < b.N; n++ {
+ if err := Unmarshal(j, &s); err != nil {
+ b.Fatal(err)
+ }
+ }
+}
diff --git a/internal/json/decode.go b/internal/json/decode.go
new file mode 100644
index 000000000..ce7c7d249
--- /dev/null
+++ b/internal/json/decode.go
@@ -0,0 +1,1685 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Represents JSON data structure using native Go types: booleans, floats,
+// strings, arrays, and maps.
+
+package json
+
+import (
+ "bytes"
+ "encoding"
+ "encoding/base64"
+ "errors"
+ "fmt"
+ "reflect"
+ "runtime"
+ "strconv"
+ "unicode"
+ "unicode/utf16"
+ "unicode/utf8"
+)
+
+// Unmarshal parses the JSON-encoded data and stores the result
+// in the value pointed to by v.
+//
+// Unmarshal uses the inverse of the encodings that
+// Marshal uses, allocating maps, slices, and pointers as necessary,
+// with the following additional rules:
+//
+// To unmarshal JSON into a pointer, Unmarshal first handles the case of
+// the JSON being the JSON literal null. In that case, Unmarshal sets
+// the pointer to nil. Otherwise, Unmarshal unmarshals the JSON into
+// the value pointed at by the pointer. If the pointer is nil, Unmarshal
+// allocates a new value for it to point to.
+//
+// To unmarshal JSON into a struct, Unmarshal matches incoming object
+// keys to the keys used by Marshal (either the struct field name or its tag),
+// preferring an exact match but also accepting a case-insensitive match.
+// Unmarshal will only set exported fields of the struct.
+//
+// To unmarshal JSON into an interface value,
+// Unmarshal stores one of these in the interface value:
+//
+// bool, for JSON booleans
+// float64, for JSON numbers
+// string, for JSON strings
+// []interface{}, for JSON arrays
+// map[string]interface{}, for JSON objects
+// nil for JSON null
+//
+// To unmarshal a JSON array into a slice, Unmarshal resets the slice length
+// to zero and then appends each element to the slice.
+// As a special case, to unmarshal an empty JSON array into a slice,
+// Unmarshal replaces the slice with a new empty slice.
+//
+// To unmarshal a JSON array into a Go array, Unmarshal decodes
+// JSON array elements into corresponding Go array elements.
+// If the Go array is smaller than the JSON array,
+// the additional JSON array elements are discarded.
+// If the JSON array is smaller than the Go array,
+// the additional Go array elements are set to zero values.
+//
+// To unmarshal a JSON object into a map, Unmarshal first establishes a map to
+// use, If the map is nil, Unmarshal allocates a new map. Otherwise Unmarshal
+// reuses the existing map, keeping existing entries. Unmarshal then stores key-
+// value pairs from the JSON object into the map. The map's key type must
+// either be a string or implement encoding.TextUnmarshaler.
+//
+// If a JSON value is not appropriate for a given target type,
+// or if a JSON number overflows the target type, Unmarshal
+// skips that field and completes the unmarshaling as best it can.
+// If no more serious errors are encountered, Unmarshal returns
+// an UnmarshalTypeError describing the earliest such error.
+//
+// The JSON null value unmarshals into an interface, map, pointer, or slice
+// by setting that Go value to nil. Because null is often used in JSON to mean
+// ``not present,'' unmarshaling a JSON null into any other Go type has no effect
+// on the value and produces no error.
+//
+// When unmarshaling quoted strings, invalid UTF-8 or
+// invalid UTF-16 surrogate pairs are not treated as an error.
+// Instead, they are replaced by the Unicode replacement
+// character U+FFFD.
+//
+func Unmarshal(data []byte, v interface{}) error {
+ // Check for well-formedness.
+ // Avoids filling out half a data structure
+ // before discovering a JSON syntax error.
+ var d decodeState
+ err := checkValid(data, &d.scan)
+ if err != nil {
+ return err
+ }
+
+ d.init(data)
+ return d.unmarshal(v)
+}
+
+// Unmarshaler is the interface implemented by types
+// that can unmarshal a JSON description of themselves.
+// The input can be assumed to be a valid encoding of
+// a JSON value. UnmarshalJSON must copy the JSON data
+// if it wishes to retain the data after returning.
+type Unmarshaler interface {
+ UnmarshalJSON([]byte) error
+}
+
+// An UnmarshalTypeError describes a JSON value that was
+// not appropriate for a value of a specific Go type.
+type UnmarshalTypeError struct {
+ Value string // description of JSON value - "bool", "array", "number -5"
+ Type reflect.Type // type of Go value it could not be assigned to
+ Offset int64 // error occurred after reading Offset bytes
+}
+
+func (e *UnmarshalTypeError) Error() string {
+ return "json: cannot unmarshal " + e.Value + " into Go value of type " + e.Type.String()
+}
+
+// An UnmarshalFieldError describes a JSON object key that
+// led to an unexported (and therefore unwritable) struct field.
+// (No longer used; kept for compatibility.)
+type UnmarshalFieldError struct {
+ Key string
+ Type reflect.Type
+ Field reflect.StructField
+}
+
+func (e *UnmarshalFieldError) Error() string {
+ return "json: cannot unmarshal object key " + strconv.Quote(e.Key) + " into unexported field " + e.Field.Name + " of type " + e.Type.String()
+}
+
+// An InvalidUnmarshalError describes an invalid argument passed to Unmarshal.
+// (The argument to Unmarshal must be a non-nil pointer.)
+type InvalidUnmarshalError struct {
+ Type reflect.Type
+}
+
+func (e *InvalidUnmarshalError) Error() string {
+ if e.Type == nil {
+ return "json: Unmarshal(nil)"
+ }
+
+ if e.Type.Kind() != reflect.Ptr {
+ return "json: Unmarshal(non-pointer " + e.Type.String() + ")"
+ }
+ return "json: Unmarshal(nil " + e.Type.String() + ")"
+}
+
+func (d *decodeState) unmarshal(v interface{}) (err error) {
+ defer func() {
+ if r := recover(); r != nil {
+ if _, ok := r.(runtime.Error); ok {
+ panic(r)
+ }
+ err = r.(error)
+ }
+ }()
+
+ rv := reflect.ValueOf(v)
+ if rv.Kind() != reflect.Ptr || rv.IsNil() {
+ return &InvalidUnmarshalError{reflect.TypeOf(v)}
+ }
+
+ d.scan.reset()
+ // We decode rv not rv.Elem because the Unmarshaler interface
+ // test must be applied at the top level of the value.
+ d.value(rv)
+ return d.savedError
+}
+
+// A Number represents a JSON number literal.
+type Number string
+
+// String returns the literal text of the number.
+func (n Number) String() string { return string(n) }
+
+// Float64 returns the number as a float64.
+func (n Number) Float64() (float64, error) {
+ return strconv.ParseFloat(string(n), 64)
+}
+
+// Int64 returns the number as an int64.
+func (n Number) Int64() (int64, error) {
+ return strconv.ParseInt(string(n), 10, 64)
+}
+
+// isValidNumber reports whether s is a valid JSON number literal.
+func isValidNumber(s string) bool {
+ // This function implements the JSON numbers grammar.
+ // See https://tools.ietf.org/html/rfc7159#section-6
+ // and http://json.org/number.gif
+
+ if s == "" {
+ return false
+ }
+
+ // Optional -
+ if s[0] == '-' {
+ s = s[1:]
+ if s == "" {
+ return false
+ }
+ }
+
+ // Digits
+ switch {
+ default:
+ return false
+
+ case s[0] == '0':
+ s = s[1:]
+
+ case '1' <= s[0] && s[0] <= '9':
+ s = s[1:]
+ for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
+ s = s[1:]
+ }
+ }
+
+ // . followed by 1 or more digits.
+ if len(s) >= 2 && s[0] == '.' && '0' <= s[1] && s[1] <= '9' {
+ s = s[2:]
+ for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
+ s = s[1:]
+ }
+ }
+
+ // e or E followed by an optional - or + and
+ // 1 or more digits.
+ if len(s) >= 2 && (s[0] == 'e' || s[0] == 'E') {
+ s = s[1:]
+ if s[0] == '+' || s[0] == '-' {
+ s = s[1:]
+ if s == "" {
+ return false
+ }
+ }
+ for len(s) > 0 && '0' <= s[0] && s[0] <= '9' {
+ s = s[1:]
+ }
+ }
+
+ // Make sure we are at the end.
+ return s == ""
+}
+
+// decodeState represents the state while decoding a JSON value.
+type decodeState struct {
+ data []byte
+ off int // read offset in data
+ scan scanner
+ nextscan scanner // for calls to nextValue
+ savedError error
+ useNumber bool
+ ext Extension
+}
+
+// errPhase is used for errors that should not happen unless
+// there is a bug in the JSON decoder or something is editing
+// the data slice while the decoder executes.
+var errPhase = errors.New("JSON decoder out of sync - data changing underfoot?")
+
+func (d *decodeState) init(data []byte) *decodeState {
+ d.data = data
+ d.off = 0
+ d.savedError = nil
+ return d
+}
+
+// error aborts the decoding by panicking with err.
+func (d *decodeState) error(err error) {
+ panic(err)
+}
+
+// saveError saves the first err it is called with,
+// for reporting at the end of the unmarshal.
+func (d *decodeState) saveError(err error) {
+ if d.savedError == nil {
+ d.savedError = err
+ }
+}
+
+// next cuts off and returns the next full JSON value in d.data[d.off:].
+// The next value is known to be an object or array, not a literal.
+func (d *decodeState) next() []byte {
+ c := d.data[d.off]
+ item, rest, err := nextValue(d.data[d.off:], &d.nextscan)
+ if err != nil {
+ d.error(err)
+ }
+ d.off = len(d.data) - len(rest)
+
+ // Our scanner has seen the opening brace/bracket
+ // and thinks we're still in the middle of the object.
+ // invent a closing brace/bracket to get it out.
+ if c == '{' {
+ d.scan.step(&d.scan, '}')
+ } else if c == '[' {
+ d.scan.step(&d.scan, ']')
+ } else {
+ // Was inside a function name. Get out of it.
+ d.scan.step(&d.scan, '(')
+ d.scan.step(&d.scan, ')')
+ }
+
+ return item
+}
+
+// scanWhile processes bytes in d.data[d.off:] until it
+// receives a scan code not equal to op.
+// It updates d.off and returns the new scan code.
+func (d *decodeState) scanWhile(op int) int {
+ var newOp int
+ for {
+ if d.off >= len(d.data) {
+ newOp = d.scan.eof()
+ d.off = len(d.data) + 1 // mark processed EOF with len+1
+ } else {
+ c := d.data[d.off]
+ d.off++
+ newOp = d.scan.step(&d.scan, c)
+ }
+ if newOp != op {
+ break
+ }
+ }
+ return newOp
+}
+
+// value decodes a JSON value from d.data[d.off:] into the value.
+// it updates d.off to point past the decoded value.
+func (d *decodeState) value(v reflect.Value) {
+ if !v.IsValid() {
+ _, rest, err := nextValue(d.data[d.off:], &d.nextscan)
+ if err != nil {
+ d.error(err)
+ }
+ d.off = len(d.data) - len(rest)
+
+ // d.scan thinks we're still at the beginning of the item.
+ // Feed in an empty string - the shortest, simplest value -
+ // so that it knows we got to the end of the value.
+ if d.scan.redo {
+ // rewind.
+ d.scan.redo = false
+ d.scan.step = stateBeginValue
+ }
+ d.scan.step(&d.scan, '"')
+ d.scan.step(&d.scan, '"')
+
+ n := len(d.scan.parseState)
+ if n > 0 && d.scan.parseState[n-1] == parseObjectKey {
+ // d.scan thinks we just read an object key; finish the object
+ d.scan.step(&d.scan, ':')
+ d.scan.step(&d.scan, '"')
+ d.scan.step(&d.scan, '"')
+ d.scan.step(&d.scan, '}')
+ }
+
+ return
+ }
+
+ switch op := d.scanWhile(scanSkipSpace); op {
+ default:
+ d.error(errPhase)
+
+ case scanBeginArray:
+ d.array(v)
+
+ case scanBeginObject:
+ d.object(v)
+
+ case scanBeginLiteral:
+ d.literal(v)
+
+ case scanBeginName:
+ d.name(v)
+ }
+}
+
+type unquotedValue struct{}
+
+// valueQuoted is like value but decodes a
+// quoted string literal or literal null into an interface value.
+// If it finds anything other than a quoted string literal or null,
+// valueQuoted returns unquotedValue{}.
+func (d *decodeState) valueQuoted() interface{} {
+ switch op := d.scanWhile(scanSkipSpace); op {
+ default:
+ d.error(errPhase)
+
+ case scanBeginArray:
+ d.array(reflect.Value{})
+
+ case scanBeginObject:
+ d.object(reflect.Value{})
+
+ case scanBeginName:
+ switch v := d.nameInterface().(type) {
+ case nil, string:
+ return v
+ }
+
+ case scanBeginLiteral:
+ switch v := d.literalInterface().(type) {
+ case nil, string:
+ return v
+ }
+ }
+ return unquotedValue{}
+}
+
+// indirect walks down v allocating pointers as needed,
+// until it gets to a non-pointer.
+// if it encounters an Unmarshaler, indirect stops and returns that.
+// if decodingNull is true, indirect stops at the last pointer so it can be set to nil.
+func (d *decodeState) indirect(v reflect.Value, decodingNull bool) (Unmarshaler, encoding.TextUnmarshaler, reflect.Value) {
+ // If v is a named type and is addressable,
+ // start with its address, so that if the type has pointer methods,
+ // we find them.
+ if v.Kind() != reflect.Ptr && v.Type().Name() != "" && v.CanAddr() {
+ v = v.Addr()
+ }
+ for {
+ // Load value from interface, but only if the result will be
+ // usefully addressable.
+ if v.Kind() == reflect.Interface && !v.IsNil() {
+ e := v.Elem()
+ if e.Kind() == reflect.Ptr && !e.IsNil() && (!decodingNull || e.Elem().Kind() == reflect.Ptr) {
+ v = e
+ continue
+ }
+ }
+
+ if v.Kind() != reflect.Ptr {
+ break
+ }
+
+ if v.Elem().Kind() != reflect.Ptr && decodingNull && v.CanSet() {
+ break
+ }
+ if v.IsNil() {
+ v.Set(reflect.New(v.Type().Elem()))
+ }
+ if v.Type().NumMethod() > 0 {
+ if u, ok := v.Interface().(Unmarshaler); ok {
+ return u, nil, v
+ }
+ if u, ok := v.Interface().(encoding.TextUnmarshaler); ok {
+ return nil, u, v
+ }
+ }
+ v = v.Elem()
+ }
+ return nil, nil, v
+}
+
+// array consumes an array from d.data[d.off-1:], decoding into the value v.
+// the first byte of the array ('[') has been read already.
+func (d *decodeState) array(v reflect.Value) {
+ // Check for unmarshaler.
+ u, ut, pv := d.indirect(v, false)
+ if u != nil {
+ d.off--
+ err := u.UnmarshalJSON(d.next())
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+ if ut != nil {
+ d.saveError(&UnmarshalTypeError{"array", v.Type(), int64(d.off)})
+ d.off--
+ d.next()
+ return
+ }
+
+ v = pv
+
+ // Check type of target.
+ switch v.Kind() {
+ case reflect.Interface:
+ if v.NumMethod() == 0 {
+ // Decoding into nil interface? Switch to non-reflect code.
+ v.Set(reflect.ValueOf(d.arrayInterface()))
+ return
+ }
+ // Otherwise it's invalid.
+ fallthrough
+ default:
+ d.saveError(&UnmarshalTypeError{"array", v.Type(), int64(d.off)})
+ d.off--
+ d.next()
+ return
+ case reflect.Array:
+ case reflect.Slice:
+ break
+ }
+
+ i := 0
+ for {
+ // Look ahead for ] - can only happen on first iteration.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndArray {
+ break
+ }
+
+ // Back up so d.value can have the byte we just read.
+ d.off--
+ d.scan.undo(op)
+
+ // Get element of array, growing if necessary.
+ if v.Kind() == reflect.Slice {
+ // Grow slice if necessary
+ if i >= v.Cap() {
+ newcap := v.Cap() + v.Cap()/2
+ if newcap < 4 {
+ newcap = 4
+ }
+ newv := reflect.MakeSlice(v.Type(), v.Len(), newcap)
+ reflect.Copy(newv, v)
+ v.Set(newv)
+ }
+ if i >= v.Len() {
+ v.SetLen(i + 1)
+ }
+ }
+
+ if i < v.Len() {
+ // Decode into element.
+ d.value(v.Index(i))
+ } else {
+ // Ran out of fixed array: skip.
+ d.value(reflect.Value{})
+ }
+ i++
+
+ // Next token must be , or ].
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndArray {
+ break
+ }
+ if op != scanArrayValue {
+ d.error(errPhase)
+ }
+ }
+
+ if i < v.Len() {
+ if v.Kind() == reflect.Array {
+ // Array. Zero the rest.
+ z := reflect.Zero(v.Type().Elem())
+ for ; i < v.Len(); i++ {
+ v.Index(i).Set(z)
+ }
+ } else {
+ v.SetLen(i)
+ }
+ }
+ if i == 0 && v.Kind() == reflect.Slice {
+ v.Set(reflect.MakeSlice(v.Type(), 0, 0))
+ }
+}
+
+var nullLiteral = []byte("null")
+var textUnmarshalerType = reflect.TypeOf(new(encoding.TextUnmarshaler)).Elem()
+
+// object consumes an object from d.data[d.off-1:], decoding into the value v.
+// the first byte ('{') of the object has been read already.
+func (d *decodeState) object(v reflect.Value) {
+ // Check for unmarshaler.
+ u, ut, pv := d.indirect(v, false)
+ if d.storeKeyed(pv) {
+ return
+ }
+ if u != nil {
+ d.off--
+ err := u.UnmarshalJSON(d.next())
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+ if ut != nil {
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over { } in input
+ return
+ }
+ v = pv
+
+ // Decoding into nil interface? Switch to non-reflect code.
+ if v.Kind() == reflect.Interface && v.NumMethod() == 0 {
+ v.Set(reflect.ValueOf(d.objectInterface()))
+ return
+ }
+
+ // Check type of target:
+ // struct or
+ // map[string]T or map[encoding.TextUnmarshaler]T
+ switch v.Kind() {
+ case reflect.Map:
+ // Map key must either have string kind or be an encoding.TextUnmarshaler.
+ t := v.Type()
+ if t.Key().Kind() != reflect.String &&
+ !reflect.PtrTo(t.Key()).Implements(textUnmarshalerType) {
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over { } in input
+ return
+ }
+ if v.IsNil() {
+ v.Set(reflect.MakeMap(t))
+ }
+ case reflect.Struct:
+
+ default:
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over { } in input
+ return
+ }
+
+ var mapElem reflect.Value
+
+ empty := true
+ for {
+ // Read opening " of string key or closing }.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndObject {
+ if !empty && !d.ext.trailingCommas {
+ d.syntaxError("beginning of object key string")
+ }
+ break
+ }
+ empty = false
+ if op == scanBeginName {
+ if !d.ext.unquotedKeys {
+ d.syntaxError("beginning of object key string")
+ }
+ } else if op != scanBeginLiteral {
+ d.error(errPhase)
+ }
+ unquotedKey := op == scanBeginName
+
+ // Read key.
+ start := d.off - 1
+ op = d.scanWhile(scanContinue)
+ item := d.data[start : d.off-1]
+ var key []byte
+ if unquotedKey {
+ key = item
+ // TODO Fix code below to quote item when necessary.
+ } else {
+ var ok bool
+ key, ok = unquoteBytes(item)
+ if !ok {
+ d.error(errPhase)
+ }
+ }
+
+ // Figure out field corresponding to key.
+ var subv reflect.Value
+ destring := false // whether the value is wrapped in a string to be decoded first
+
+ if v.Kind() == reflect.Map {
+ elemType := v.Type().Elem()
+ if !mapElem.IsValid() {
+ mapElem = reflect.New(elemType).Elem()
+ } else {
+ mapElem.Set(reflect.Zero(elemType))
+ }
+ subv = mapElem
+ } else {
+ var f *field
+ fields := cachedTypeFields(v.Type())
+ for i := range fields {
+ ff := &fields[i]
+ if bytes.Equal(ff.nameBytes, key) {
+ f = ff
+ break
+ }
+ if f == nil && ff.equalFold(ff.nameBytes, key) {
+ f = ff
+ }
+ }
+ if f != nil {
+ subv = v
+ destring = f.quoted
+ for _, i := range f.index {
+ if subv.Kind() == reflect.Ptr {
+ if subv.IsNil() {
+ subv.Set(reflect.New(subv.Type().Elem()))
+ }
+ subv = subv.Elem()
+ }
+ subv = subv.Field(i)
+ }
+ }
+ }
+
+ // Read : before value.
+ if op == scanSkipSpace {
+ op = d.scanWhile(scanSkipSpace)
+ }
+ if op != scanObjectKey {
+ d.error(errPhase)
+ }
+
+ // Read value.
+ if destring {
+ switch qv := d.valueQuoted().(type) {
+ case nil:
+ d.literalStore(nullLiteral, subv, false)
+ case string:
+ d.literalStore([]byte(qv), subv, true)
+ default:
+ d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into %v", subv.Type()))
+ }
+ } else {
+ d.value(subv)
+ }
+
+ // Write value back to map;
+ // if using struct, subv points into struct already.
+ if v.Kind() == reflect.Map {
+ kt := v.Type().Key()
+ var kv reflect.Value
+ switch {
+ case kt.Kind() == reflect.String:
+ kv = reflect.ValueOf(key).Convert(v.Type().Key())
+ case reflect.PtrTo(kt).Implements(textUnmarshalerType):
+ kv = reflect.New(v.Type().Key())
+ d.literalStore(item, kv, true)
+ kv = kv.Elem()
+ default:
+ panic("json: Unexpected key type") // should never occur
+ }
+ v.SetMapIndex(kv, subv)
+ }
+
+ // Next token must be , or }.
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndObject {
+ break
+ }
+ if op != scanObjectValue {
+ d.error(errPhase)
+ }
+ }
+}
+
+// isNull returns whether there's a null literal at the provided offset.
+func (d *decodeState) isNull(off int) bool {
+ if off+4 >= len(d.data) || d.data[off] != 'n' || d.data[off+1] != 'u' || d.data[off+2] != 'l' || d.data[off+3] != 'l' {
+ return false
+ }
+ d.nextscan.reset()
+ for i, c := range d.data[off:] {
+ if i > 4 {
+ return false
+ }
+ switch d.nextscan.step(&d.nextscan, c) {
+ case scanContinue, scanBeginName:
+ continue
+ }
+ break
+ }
+ return true
+}
+
+// name consumes a const or function from d.data[d.off-1:], decoding into the value v.
+// the first byte of the function name has been read already.
+func (d *decodeState) name(v reflect.Value) {
+ if d.isNull(d.off-1) {
+ d.literal(v)
+ return
+ }
+
+ // Check for unmarshaler.
+ u, ut, pv := d.indirect(v, false)
+ if d.storeKeyed(pv) {
+ return
+ }
+ if u != nil {
+ d.off--
+ err := u.UnmarshalJSON(d.next())
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+ if ut != nil {
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over function in input
+ return
+ }
+ v = pv
+
+ // Decoding into nil interface? Switch to non-reflect code.
+ if v.Kind() == reflect.Interface && v.NumMethod() == 0 {
+ out := d.nameInterface()
+ if out == nil {
+ v.Set(reflect.Zero(v.Type()))
+ } else {
+ v.Set(reflect.ValueOf(out))
+ }
+ return
+ }
+
+ nameStart := d.off - 1
+
+ op := d.scanWhile(scanContinue)
+
+ name := d.data[nameStart : d.off-1]
+ if op != scanParam {
+ // Back up so the byte just read is consumed next.
+ d.off--
+ d.scan.undo(op)
+ if l, ok := d.convertLiteral(name); ok {
+ d.storeValue(v, l)
+ return
+ }
+ d.error(&SyntaxError{fmt.Sprintf("json: unknown constant %q", name), int64(d.off)})
+ }
+
+ funcName := string(name)
+ funcData := d.ext.funcs[funcName]
+ if funcData.key == "" {
+ d.error(fmt.Errorf("json: unknown function %q", funcName))
+ }
+
+ // Check type of target:
+ // struct or
+ // map[string]T or map[encoding.TextUnmarshaler]T
+ switch v.Kind() {
+ case reflect.Map:
+ // Map key must either have string kind or be an encoding.TextUnmarshaler.
+ t := v.Type()
+ if t.Key().Kind() != reflect.String &&
+ !reflect.PtrTo(t.Key()).Implements(textUnmarshalerType) {
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over { } in input
+ return
+ }
+ if v.IsNil() {
+ v.Set(reflect.MakeMap(t))
+ }
+ case reflect.Struct:
+
+ default:
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ d.off--
+ d.next() // skip over { } in input
+ return
+ }
+
+ // TODO Fix case of func field as map.
+ //topv := v
+
+ // Figure out field corresponding to function.
+ key := []byte(funcData.key)
+ if v.Kind() == reflect.Map {
+ elemType := v.Type().Elem()
+ v = reflect.New(elemType).Elem()
+ } else {
+ var f *field
+ fields := cachedTypeFields(v.Type())
+ for i := range fields {
+ ff := &fields[i]
+ if bytes.Equal(ff.nameBytes, key) {
+ f = ff
+ break
+ }
+ if f == nil && ff.equalFold(ff.nameBytes, key) {
+ f = ff
+ }
+ }
+ if f != nil {
+ for _, i := range f.index {
+ if v.Kind() == reflect.Ptr {
+ if v.IsNil() {
+ v.Set(reflect.New(v.Type().Elem()))
+ }
+ v = v.Elem()
+ }
+ v = v.Field(i)
+ }
+ if v.Kind() == reflect.Ptr {
+ if v.IsNil() {
+ v.Set(reflect.New(v.Type().Elem()))
+ }
+ v = v.Elem()
+ }
+ }
+ }
+
+ // Check for unmarshaler on func field itself.
+ u, ut, pv = d.indirect(v, false)
+ if u != nil {
+ d.off = nameStart
+ err := u.UnmarshalJSON(d.next())
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+
+ var mapElem reflect.Value
+
+ // Parse function arguments.
+ for i := 0; ; i++ {
+ // closing ) - can only happen on first iteration.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndParams {
+ break
+ }
+
+ // Back up so d.value can have the byte we just read.
+ d.off--
+ d.scan.undo(op)
+
+ if i >= len(funcData.args) {
+ d.error(fmt.Errorf("json: too many arguments for function %s", funcName))
+ }
+ key := []byte(funcData.args[i])
+
+ // Figure out field corresponding to key.
+ var subv reflect.Value
+ destring := false // whether the value is wrapped in a string to be decoded first
+
+ if v.Kind() == reflect.Map {
+ elemType := v.Type().Elem()
+ if !mapElem.IsValid() {
+ mapElem = reflect.New(elemType).Elem()
+ } else {
+ mapElem.Set(reflect.Zero(elemType))
+ }
+ subv = mapElem
+ } else {
+ var f *field
+ fields := cachedTypeFields(v.Type())
+ for i := range fields {
+ ff := &fields[i]
+ if bytes.Equal(ff.nameBytes, key) {
+ f = ff
+ break
+ }
+ if f == nil && ff.equalFold(ff.nameBytes, key) {
+ f = ff
+ }
+ }
+ if f != nil {
+ subv = v
+ destring = f.quoted
+ for _, i := range f.index {
+ if subv.Kind() == reflect.Ptr {
+ if subv.IsNil() {
+ subv.Set(reflect.New(subv.Type().Elem()))
+ }
+ subv = subv.Elem()
+ }
+ subv = subv.Field(i)
+ }
+ }
+ }
+
+ // Read value.
+ if destring {
+ switch qv := d.valueQuoted().(type) {
+ case nil:
+ d.literalStore(nullLiteral, subv, false)
+ case string:
+ d.literalStore([]byte(qv), subv, true)
+ default:
+ d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal unquoted value into %v", subv.Type()))
+ }
+ } else {
+ d.value(subv)
+ }
+
+ // Write value back to map;
+ // if using struct, subv points into struct already.
+ if v.Kind() == reflect.Map {
+ kt := v.Type().Key()
+ var kv reflect.Value
+ switch {
+ case kt.Kind() == reflect.String:
+ kv = reflect.ValueOf(key).Convert(v.Type().Key())
+ case reflect.PtrTo(kt).Implements(textUnmarshalerType):
+ kv = reflect.New(v.Type().Key())
+ d.literalStore(key, kv, true)
+ kv = kv.Elem()
+ default:
+ panic("json: Unexpected key type") // should never occur
+ }
+ v.SetMapIndex(kv, subv)
+ }
+
+ // Next token must be , or ).
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndParams {
+ break
+ }
+ if op != scanParam {
+ d.error(errPhase)
+ }
+ }
+}
+
+// keyed attempts to decode an object or function using a keyed doc extension,
+// and returns the value and true on success, or nil and false otherwise.
+func (d *decodeState) keyed() (interface{}, bool) {
+ if len(d.ext.keyed) == 0 {
+ return nil, false
+ }
+
+ unquote := false
+
+ // Look-ahead first key to check for a keyed document extension.
+ d.nextscan.reset()
+ var start, end int
+ for i, c := range d.data[d.off-1:] {
+ switch op := d.nextscan.step(&d.nextscan, c); op {
+ case scanSkipSpace, scanContinue, scanBeginObject:
+ continue
+ case scanBeginLiteral, scanBeginName:
+ unquote = op == scanBeginLiteral
+ start = i
+ continue
+ }
+ end = i
+ break
+ }
+
+ name := d.data[d.off-1+start : d.off-1+end]
+
+ var key []byte
+ var ok bool
+ if unquote {
+ key, ok = unquoteBytes(name)
+ if !ok {
+ d.error(errPhase)
+ }
+ } else {
+ funcData, ok := d.ext.funcs[string(name)]
+ if !ok {
+ return nil, false
+ }
+ key = []byte(funcData.key)
+ }
+
+ decode, ok := d.ext.keyed[string(key)]
+ if !ok {
+ return nil, false
+ }
+
+ d.off--
+ out, err := decode(d.next())
+ if err != nil {
+ d.error(err)
+ }
+ return out, true
+}
+
+func (d *decodeState) storeKeyed(v reflect.Value) bool {
+ keyed, ok := d.keyed()
+ if !ok {
+ return false
+ }
+ d.storeValue(v, keyed)
+ return true
+}
+
+var (
+ trueBytes = []byte("true")
+ falseBytes = []byte("false")
+ nullBytes = []byte("null")
+)
+
+func (d *decodeState) storeValue(v reflect.Value, from interface{}) {
+ switch from {
+ case nil:
+ d.literalStore(nullBytes, v, false)
+ return
+ case true:
+ d.literalStore(trueBytes, v, false)
+ return
+ case false:
+ d.literalStore(falseBytes, v, false)
+ return
+ }
+ fromv := reflect.ValueOf(from)
+ for fromv.Kind() == reflect.Ptr && !fromv.IsNil() {
+ fromv = fromv.Elem()
+ }
+ fromt := fromv.Type()
+ for v.Kind() == reflect.Ptr && !v.IsNil() {
+ v = v.Elem()
+ }
+ vt := v.Type()
+ if fromt.AssignableTo(vt) {
+ v.Set(fromv)
+ } else if fromt.ConvertibleTo(vt) {
+ v.Set(fromv.Convert(vt))
+ } else {
+ d.saveError(&UnmarshalTypeError{"object", v.Type(), int64(d.off)})
+ }
+}
+
+func (d *decodeState) convertLiteral(name []byte) (interface{}, bool) {
+ if len(name) == 0 {
+ return nil, false
+ }
+ switch name[0] {
+ case 't':
+ if bytes.Equal(name, trueBytes) {
+ return true, true
+ }
+ case 'f':
+ if bytes.Equal(name, falseBytes) {
+ return false, true
+ }
+ case 'n':
+ if bytes.Equal(name, nullBytes) {
+ return nil, true
+ }
+ }
+ if l, ok := d.ext.consts[string(name)]; ok {
+ return l, true
+ }
+ return nil, false
+}
+
+// literal consumes a literal from d.data[d.off-1:], decoding into the value v.
+// The first byte of the literal has been read already
+// (that's how the caller knows it's a literal).
+func (d *decodeState) literal(v reflect.Value) {
+ // All bytes inside literal return scanContinue op code.
+ start := d.off - 1
+ op := d.scanWhile(scanContinue)
+
+ // Scan read one byte too far; back up.
+ d.off--
+ d.scan.undo(op)
+
+ d.literalStore(d.data[start:d.off], v, false)
+}
+
+// convertNumber converts the number literal s to a float64 or a Number
+// depending on the setting of d.useNumber.
+func (d *decodeState) convertNumber(s string) (interface{}, error) {
+ if d.useNumber {
+ return Number(s), nil
+ }
+ f, err := strconv.ParseFloat(s, 64)
+ if err != nil {
+ return nil, &UnmarshalTypeError{"number " + s, reflect.TypeOf(0.0), int64(d.off)}
+ }
+ return f, nil
+}
+
+var numberType = reflect.TypeOf(Number(""))
+
+// literalStore decodes a literal stored in item into v.
+//
+// fromQuoted indicates whether this literal came from unwrapping a
+// string from the ",string" struct tag option. this is used only to
+// produce more helpful error messages.
+func (d *decodeState) literalStore(item []byte, v reflect.Value, fromQuoted bool) {
+ // Check for unmarshaler.
+ if len(item) == 0 {
+ //Empty string given
+ d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ return
+ }
+ wantptr := item[0] == 'n' // null
+ u, ut, pv := d.indirect(v, wantptr)
+ if u != nil {
+ err := u.UnmarshalJSON(item)
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+ if ut != nil {
+ if item[0] != '"' {
+ if fromQuoted {
+ d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
+ }
+ return
+ }
+ s, ok := unquoteBytes(item)
+ if !ok {
+ if fromQuoted {
+ d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.error(errPhase)
+ }
+ }
+ err := ut.UnmarshalText(s)
+ if err != nil {
+ d.error(err)
+ }
+ return
+ }
+
+ v = pv
+
+ switch c := item[0]; c {
+ case 'n': // null
+ switch v.Kind() {
+ case reflect.Interface, reflect.Ptr, reflect.Map, reflect.Slice:
+ v.Set(reflect.Zero(v.Type()))
+ // otherwise, ignore null for primitives/string
+ }
+ case 't', 'f': // true, false
+ value := c == 't'
+ switch v.Kind() {
+ default:
+ if fromQuoted {
+ d.saveError(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.saveError(&UnmarshalTypeError{"bool", v.Type(), int64(d.off)})
+ }
+ case reflect.Bool:
+ v.SetBool(value)
+ case reflect.Interface:
+ if v.NumMethod() == 0 {
+ v.Set(reflect.ValueOf(value))
+ } else {
+ d.saveError(&UnmarshalTypeError{"bool", v.Type(), int64(d.off)})
+ }
+ }
+
+ case '"': // string
+ s, ok := unquoteBytes(item)
+ if !ok {
+ if fromQuoted {
+ d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.error(errPhase)
+ }
+ }
+ switch v.Kind() {
+ default:
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
+ case reflect.Slice:
+ if v.Type().Elem().Kind() != reflect.Uint8 {
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
+ break
+ }
+ b := make([]byte, base64.StdEncoding.DecodedLen(len(s)))
+ n, err := base64.StdEncoding.Decode(b, s)
+ if err != nil {
+ d.saveError(err)
+ break
+ }
+ v.SetBytes(b[:n])
+ case reflect.String:
+ v.SetString(string(s))
+ case reflect.Interface:
+ if v.NumMethod() == 0 {
+ v.Set(reflect.ValueOf(string(s)))
+ } else {
+ d.saveError(&UnmarshalTypeError{"string", v.Type(), int64(d.off)})
+ }
+ }
+
+ default: // number
+ if c != '-' && (c < '0' || c > '9') {
+ if fromQuoted {
+ d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.error(errPhase)
+ }
+ }
+ s := string(item)
+ switch v.Kind() {
+ default:
+ if v.Kind() == reflect.String && v.Type() == numberType {
+ v.SetString(s)
+ if !isValidNumber(s) {
+ d.error(fmt.Errorf("json: invalid number literal, trying to unmarshal %q into Number", item))
+ }
+ break
+ }
+ if fromQuoted {
+ d.error(fmt.Errorf("json: invalid use of ,string struct tag, trying to unmarshal %q into %v", item, v.Type()))
+ } else {
+ d.error(&UnmarshalTypeError{"number", v.Type(), int64(d.off)})
+ }
+ case reflect.Interface:
+ n, err := d.convertNumber(s)
+ if err != nil {
+ d.saveError(err)
+ break
+ }
+ if v.NumMethod() != 0 {
+ d.saveError(&UnmarshalTypeError{"number", v.Type(), int64(d.off)})
+ break
+ }
+ v.Set(reflect.ValueOf(n))
+
+ case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
+ n, err := strconv.ParseInt(s, 10, 64)
+ if err != nil || v.OverflowInt(n) {
+ d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)})
+ break
+ }
+ v.SetInt(n)
+
+ case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64, reflect.Uintptr:
+ n, err := strconv.ParseUint(s, 10, 64)
+ if err != nil || v.OverflowUint(n) {
+ d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)})
+ break
+ }
+ v.SetUint(n)
+
+ case reflect.Float32, reflect.Float64:
+ n, err := strconv.ParseFloat(s, v.Type().Bits())
+ if err != nil || v.OverflowFloat(n) {
+ d.saveError(&UnmarshalTypeError{"number " + s, v.Type(), int64(d.off)})
+ break
+ }
+ v.SetFloat(n)
+ }
+ }
+}
+
+// The xxxInterface routines build up a value to be stored
+// in an empty interface. They are not strictly necessary,
+// but they avoid the weight of reflection in this common case.
+
+// valueInterface is like value but returns interface{}
+func (d *decodeState) valueInterface() interface{} {
+ switch d.scanWhile(scanSkipSpace) {
+ default:
+ d.error(errPhase)
+ panic("unreachable")
+ case scanBeginArray:
+ return d.arrayInterface()
+ case scanBeginObject:
+ return d.objectInterface()
+ case scanBeginLiteral:
+ return d.literalInterface()
+ case scanBeginName:
+ return d.nameInterface()
+ }
+}
+
+func (d *decodeState) syntaxError(expected string) {
+ msg := fmt.Sprintf("invalid character '%c' looking for %s", d.data[d.off-1], expected)
+ d.error(&SyntaxError{msg, int64(d.off)})
+}
+
+// arrayInterface is like array but returns []interface{}.
+func (d *decodeState) arrayInterface() []interface{} {
+ var v = make([]interface{}, 0)
+ for {
+ // Look ahead for ] - can only happen on first iteration.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndArray {
+ if len(v) > 0 && !d.ext.trailingCommas {
+ d.syntaxError("beginning of value")
+ }
+ break
+ }
+
+ // Back up so d.value can have the byte we just read.
+ d.off--
+ d.scan.undo(op)
+
+ v = append(v, d.valueInterface())
+
+ // Next token must be , or ].
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndArray {
+ break
+ }
+ if op != scanArrayValue {
+ d.error(errPhase)
+ }
+ }
+ return v
+}
+
+// objectInterface is like object but returns map[string]interface{}.
+func (d *decodeState) objectInterface() interface{} {
+ v, ok := d.keyed()
+ if ok {
+ return v
+ }
+
+ m := make(map[string]interface{})
+ for {
+ // Read opening " of string key or closing }.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndObject {
+ if len(m) > 0 && !d.ext.trailingCommas {
+ d.syntaxError("beginning of object key string")
+ }
+ break
+ }
+ if op == scanBeginName {
+ if !d.ext.unquotedKeys {
+ d.syntaxError("beginning of object key string")
+ }
+ } else if op != scanBeginLiteral {
+ d.error(errPhase)
+ }
+ unquotedKey := op == scanBeginName
+
+ // Read string key.
+ start := d.off - 1
+ op = d.scanWhile(scanContinue)
+ item := d.data[start : d.off-1]
+ var key string
+ if unquotedKey {
+ key = string(item)
+ } else {
+ var ok bool
+ key, ok = unquote(item)
+ if !ok {
+ d.error(errPhase)
+ }
+ }
+
+ // Read : before value.
+ if op == scanSkipSpace {
+ op = d.scanWhile(scanSkipSpace)
+ }
+ if op != scanObjectKey {
+ d.error(errPhase)
+ }
+
+ // Read value.
+ m[key] = d.valueInterface()
+
+ // Next token must be , or }.
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndObject {
+ break
+ }
+ if op != scanObjectValue {
+ d.error(errPhase)
+ }
+ }
+ return m
+}
+
+// literalInterface is like literal but returns an interface value.
+func (d *decodeState) literalInterface() interface{} {
+ // All bytes inside literal return scanContinue op code.
+ start := d.off - 1
+ op := d.scanWhile(scanContinue)
+
+ // Scan read one byte too far; back up.
+ d.off--
+ d.scan.undo(op)
+ item := d.data[start:d.off]
+
+ switch c := item[0]; c {
+ case 'n': // null
+ return nil
+
+ case 't', 'f': // true, false
+ return c == 't'
+
+ case '"': // string
+ s, ok := unquote(item)
+ if !ok {
+ d.error(errPhase)
+ }
+ return s
+
+ default: // number
+ if c != '-' && (c < '0' || c > '9') {
+ d.error(errPhase)
+ }
+ n, err := d.convertNumber(string(item))
+ if err != nil {
+ d.saveError(err)
+ }
+ return n
+ }
+}
+
+// nameInterface is like function but returns map[string]interface{}.
+func (d *decodeState) nameInterface() interface{} {
+ v, ok := d.keyed()
+ if ok {
+ return v
+ }
+
+ nameStart := d.off - 1
+
+ op := d.scanWhile(scanContinue)
+
+ name := d.data[nameStart : d.off-1]
+ if op != scanParam {
+ // Back up so the byte just read is consumed next.
+ d.off--
+ d.scan.undo(op)
+ if l, ok := d.convertLiteral(name); ok {
+ return l
+ }
+ d.error(&SyntaxError{fmt.Sprintf("json: unknown constant %q", name), int64(d.off)})
+ }
+
+ funcName := string(name)
+ funcData := d.ext.funcs[funcName]
+ if funcData.key == "" {
+ d.error(fmt.Errorf("json: unknown function %q", funcName))
+ }
+
+ m := make(map[string]interface{})
+ for i := 0; ; i++ {
+ // Look ahead for ) - can only happen on first iteration.
+ op := d.scanWhile(scanSkipSpace)
+ if op == scanEndParams {
+ break
+ }
+
+ // Back up so d.value can have the byte we just read.
+ d.off--
+ d.scan.undo(op)
+
+ if i >= len(funcData.args) {
+ d.error(fmt.Errorf("json: too many arguments for function %s", funcName))
+ }
+ m[funcData.args[i]] = d.valueInterface()
+
+ // Next token must be , or ).
+ op = d.scanWhile(scanSkipSpace)
+ if op == scanEndParams {
+ break
+ }
+ if op != scanParam {
+ d.error(errPhase)
+ }
+ }
+ return map[string]interface{}{funcData.key: m}
+}
+
+// getu4 decodes \uXXXX from the beginning of s, returning the hex value,
+// or it returns -1.
+func getu4(s []byte) rune {
+ if len(s) < 6 || s[0] != '\\' || s[1] != 'u' {
+ return -1
+ }
+ r, err := strconv.ParseUint(string(s[2:6]), 16, 64)
+ if err != nil {
+ return -1
+ }
+ return rune(r)
+}
+
+// unquote converts a quoted JSON string literal s into an actual string t.
+// The rules are different than for Go, so cannot use strconv.Unquote.
+func unquote(s []byte) (t string, ok bool) {
+ s, ok = unquoteBytes(s)
+ t = string(s)
+ return
+}
+
+func unquoteBytes(s []byte) (t []byte, ok bool) {
+ if len(s) < 2 || s[0] != '"' || s[len(s)-1] != '"' {
+ return
+ }
+ s = s[1 : len(s)-1]
+
+ // Check for unusual characters. If there are none,
+ // then no unquoting is needed, so return a slice of the
+ // original bytes.
+ r := 0
+ for r < len(s) {
+ c := s[r]
+ if c == '\\' || c == '"' || c < ' ' {
+ break
+ }
+ if c < utf8.RuneSelf {
+ r++
+ continue
+ }
+ rr, size := utf8.DecodeRune(s[r:])
+ if rr == utf8.RuneError && size == 1 {
+ break
+ }
+ r += size
+ }
+ if r == len(s) {
+ return s, true
+ }
+
+ b := make([]byte, len(s)+2*utf8.UTFMax)
+ w := copy(b, s[0:r])
+ for r < len(s) {
+ // Out of room? Can only happen if s is full of
+ // malformed UTF-8 and we're replacing each
+ // byte with RuneError.
+ if w >= len(b)-2*utf8.UTFMax {
+ nb := make([]byte, (len(b)+utf8.UTFMax)*2)
+ copy(nb, b[0:w])
+ b = nb
+ }
+ switch c := s[r]; {
+ case c == '\\':
+ r++
+ if r >= len(s) {
+ return
+ }
+ switch s[r] {
+ default:
+ return
+ case '"', '\\', '/', '\'':
+ b[w] = s[r]
+ r++
+ w++
+ case 'b':
+ b[w] = '\b'
+ r++
+ w++
+ case 'f':
+ b[w] = '\f'
+ r++
+ w++
+ case 'n':
+ b[w] = '\n'
+ r++
+ w++
+ case 'r':
+ b[w] = '\r'
+ r++
+ w++
+ case 't':
+ b[w] = '\t'
+ r++
+ w++
+ case 'u':
+ r--
+ rr := getu4(s[r:])
+ if rr < 0 {
+ return
+ }
+ r += 6
+ if utf16.IsSurrogate(rr) {
+ rr1 := getu4(s[r:])
+ if dec := utf16.DecodeRune(rr, rr1); dec != unicode.ReplacementChar {
+ // A valid pair; consume.
+ r += 6
+ w += utf8.EncodeRune(b[w:], dec)
+ break
+ }
+ // Invalid surrogate; fall back to replacement rune.
+ rr = unicode.ReplacementChar
+ }
+ w += utf8.EncodeRune(b[w:], rr)
+ }
+
+ // Quote, control characters are invalid.
+ case c == '"', c < ' ':
+ return
+
+ // ASCII
+ case c < utf8.RuneSelf:
+ b[w] = c
+ r++
+ w++
+
+ // Coerce to well-formed UTF-8.
+ default:
+ rr, size := utf8.DecodeRune(s[r:])
+ r += size
+ w += utf8.EncodeRune(b[w:], rr)
+ }
+ }
+ return b[0:w], true
+}
diff --git a/internal/json/decode_test.go b/internal/json/decode_test.go
new file mode 100644
index 000000000..30e46ca44
--- /dev/null
+++ b/internal/json/decode_test.go
@@ -0,0 +1,1512 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+package json
+
+import (
+ "bytes"
+ "encoding"
+ "errors"
+ "fmt"
+ "image"
+ "net"
+ "reflect"
+ "strings"
+ "testing"
+ "time"
+)
+
+type T struct {
+ X string
+ Y int
+ Z int `json:"-"`
+}
+
+type U struct {
+ Alphabet string `json:"alpha"`
+}
+
+type V struct {
+ F1 interface{}
+ F2 int32
+ F3 Number
+}
+
+// ifaceNumAsFloat64/ifaceNumAsNumber are used to test unmarshaling with and
+// without UseNumber
+var ifaceNumAsFloat64 = map[string]interface{}{
+ "k1": float64(1),
+ "k2": "s",
+ "k3": []interface{}{float64(1), float64(2.0), float64(3e-3)},
+ "k4": map[string]interface{}{"kk1": "s", "kk2": float64(2)},
+}
+
+var ifaceNumAsNumber = map[string]interface{}{
+ "k1": Number("1"),
+ "k2": "s",
+ "k3": []interface{}{Number("1"), Number("2.0"), Number("3e-3")},
+ "k4": map[string]interface{}{"kk1": "s", "kk2": Number("2")},
+}
+
+type tx struct {
+ x int
+}
+
+// A type that can unmarshal itself.
+
+type unmarshaler struct {
+ T bool
+}
+
+func (u *unmarshaler) UnmarshalJSON(b []byte) error {
+ *u = unmarshaler{true} // All we need to see that UnmarshalJSON is called.
+ return nil
+}
+
+type ustruct struct {
+ M unmarshaler
+}
+
+type unmarshalerText struct {
+ A, B string
+}
+
+// needed for re-marshaling tests
+func (u unmarshalerText) MarshalText() ([]byte, error) {
+ return []byte(u.A + ":" + u.B), nil
+}
+
+func (u *unmarshalerText) UnmarshalText(b []byte) error {
+ pos := bytes.Index(b, []byte(":"))
+ if pos == -1 {
+ return errors.New("missing separator")
+ }
+ u.A, u.B = string(b[:pos]), string(b[pos+1:])
+ return nil
+}
+
+var _ encoding.TextUnmarshaler = (*unmarshalerText)(nil)
+
+type ustructText struct {
+ M unmarshalerText
+}
+
+var (
+ um0, um1 unmarshaler // target2 of unmarshaling
+ ump = &um1
+ umtrue = unmarshaler{true}
+ umslice = []unmarshaler{{true}}
+ umslicep = new([]unmarshaler)
+ umstruct = ustruct{unmarshaler{true}}
+
+ um0T, um1T unmarshalerText // target2 of unmarshaling
+ umpType = &um1T
+ umtrueXY = unmarshalerText{"x", "y"}
+ umsliceXY = []unmarshalerText{{"x", "y"}}
+ umslicepType = new([]unmarshalerText)
+ umstructType = new(ustructText)
+ umstructXY = ustructText{unmarshalerText{"x", "y"}}
+
+ ummapType = map[unmarshalerText]bool{}
+ ummapXY = map[unmarshalerText]bool{unmarshalerText{"x", "y"}: true}
+)
+
+// Test data structures for anonymous fields.
+
+type Point struct {
+ Z int
+}
+
+type Top struct {
+ Level0 int
+ Embed0
+ *Embed0a
+ *Embed0b `json:"e,omitempty"` // treated as named
+ Embed0c `json:"-"` // ignored
+ Loop
+ Embed0p // has Point with X, Y, used
+ Embed0q // has Point with Z, used
+ embed // contains exported field
+}
+
+type Embed0 struct {
+ Level1a int // overridden by Embed0a's Level1a with json tag
+ Level1b int // used because Embed0a's Level1b is renamed
+ Level1c int // used because Embed0a's Level1c is ignored
+ Level1d int // annihilated by Embed0a's Level1d
+ Level1e int `json:"x"` // annihilated by Embed0a.Level1e
+}
+
+type Embed0a struct {
+ Level1a int `json:"Level1a,omitempty"`
+ Level1b int `json:"LEVEL1B,omitempty"`
+ Level1c int `json:"-"`
+ Level1d int // annihilated by Embed0's Level1d
+ Level1f int `json:"x"` // annihilated by Embed0's Level1e
+}
+
+type Embed0b Embed0
+
+type Embed0c Embed0
+
+type Embed0p struct {
+ image.Point
+}
+
+type Embed0q struct {
+ Point
+}
+
+type embed struct {
+ Q int
+}
+
+type Loop struct {
+ Loop1 int `json:",omitempty"`
+ Loop2 int `json:",omitempty"`
+ *Loop
+}
+
+// From reflect test:
+// The X in S6 and S7 annihilate, but they also block the X in S8.S9.
+type S5 struct {
+ S6
+ S7
+ S8
+}
+
+type S6 struct {
+ X int
+}
+
+type S7 S6
+
+type S8 struct {
+ S9
+}
+
+type S9 struct {
+ X int
+ Y int
+}
+
+// From reflect test:
+// The X in S11.S6 and S12.S6 annihilate, but they also block the X in S13.S8.S9.
+type S10 struct {
+ S11
+ S12
+ S13
+}
+
+type S11 struct {
+ S6
+}
+
+type S12 struct {
+ S6
+}
+
+type S13 struct {
+ S8
+}
+
+type unmarshalTest struct {
+ in string
+ ptr interface{}
+ out interface{}
+ err error
+ useNumber bool
+}
+
+type Ambig struct {
+ // Given "hello", the first match should win.
+ First int `json:"HELLO"`
+ Second int `json:"Hello"`
+}
+
+type XYZ struct {
+ X interface{}
+ Y interface{}
+ Z interface{}
+}
+
+func sliceAddr(x []int) *[]int { return &x }
+func mapAddr(x map[string]int) *map[string]int { return &x }
+
+var unmarshalTests = []unmarshalTest{
+ // basic types
+ {in: `true`, ptr: new(bool), out: true},
+ {in: `1`, ptr: new(int), out: 1},
+ {in: `1.2`, ptr: new(float64), out: 1.2},
+ {in: `-5`, ptr: new(int16), out: int16(-5)},
+ {in: `2`, ptr: new(Number), out: Number("2"), useNumber: true},
+ {in: `2`, ptr: new(Number), out: Number("2")},
+ {in: `2`, ptr: new(interface{}), out: float64(2.0)},
+ {in: `2`, ptr: new(interface{}), out: Number("2"), useNumber: true},
+ {in: `"a\u1234"`, ptr: new(string), out: "a\u1234"},
+ {in: `"http:\/\/"`, ptr: new(string), out: "http://"},
+ {in: `"g-clef: \uD834\uDD1E"`, ptr: new(string), out: "g-clef: \U0001D11E"},
+ {in: `"invalid: \uD834x\uDD1E"`, ptr: new(string), out: "invalid: \uFFFDx\uFFFD"},
+ {in: "null", ptr: new(interface{}), out: nil},
+ {in: `{"X": [1,2,3], "Y": 4}`, ptr: new(T), out: T{Y: 4}, err: &UnmarshalTypeError{"array", reflect.TypeOf(""), 7}},
+ {in: `{"x": 1}`, ptr: new(tx), out: tx{}},
+ {in: `{"F1":1,"F2":2,"F3":3}`, ptr: new(V), out: V{F1: float64(1), F2: int32(2), F3: Number("3")}},
+ {in: `{"F1":1,"F2":2,"F3":3}`, ptr: new(V), out: V{F1: Number("1"), F2: int32(2), F3: Number("3")}, useNumber: true},
+ {in: `{"k1":1,"k2":"s","k3":[1,2.0,3e-3],"k4":{"kk1":"s","kk2":2}}`, ptr: new(interface{}), out: ifaceNumAsFloat64},
+ {in: `{"k1":1,"k2":"s","k3":[1,2.0,3e-3],"k4":{"kk1":"s","kk2":2}}`, ptr: new(interface{}), out: ifaceNumAsNumber, useNumber: true},
+
+ // raw values with whitespace
+ {in: "\n true ", ptr: new(bool), out: true},
+ {in: "\t 1 ", ptr: new(int), out: 1},
+ {in: "\r 1.2 ", ptr: new(float64), out: 1.2},
+ {in: "\t -5 \n", ptr: new(int16), out: int16(-5)},
+ {in: "\t \"a\\u1234\" \n", ptr: new(string), out: "a\u1234"},
+
+ // Z has a "-" tag.
+ {in: `{"Y": 1, "Z": 2}`, ptr: new(T), out: T{Y: 1}},
+
+ {in: `{"alpha": "abc", "alphabet": "xyz"}`, ptr: new(U), out: U{Alphabet: "abc"}},
+ {in: `{"alpha": "abc"}`, ptr: new(U), out: U{Alphabet: "abc"}},
+ {in: `{"alphabet": "xyz"}`, ptr: new(U), out: U{}},
+
+ // syntax errors
+ {in: `{"X": "foo", "Y"}`, err: &SyntaxError{"invalid character '}' after object key", 17}},
+ {in: `[1, 2, 3+]`, err: &SyntaxError{"invalid character '+' after array element", 9}},
+ {in: `{"X":12x}`, err: &SyntaxError{"invalid character 'x' after object key:value pair", 8}, useNumber: true},
+
+ // raw value errors
+ {in: "\x01 42", err: &SyntaxError{"invalid character '\\x01' looking for beginning of value", 1}},
+ {in: " 42 \x01", err: &SyntaxError{"invalid character '\\x01' after top-level value", 5}},
+ {in: "\x01 true", err: &SyntaxError{"invalid character '\\x01' looking for beginning of value", 1}},
+ {in: " false \x01", err: &SyntaxError{"invalid character '\\x01' after top-level value", 8}},
+ {in: "\x01 1.2", err: &SyntaxError{"invalid character '\\x01' looking for beginning of value", 1}},
+ {in: " 3.4 \x01", err: &SyntaxError{"invalid character '\\x01' after top-level value", 6}},
+ {in: "\x01 \"string\"", err: &SyntaxError{"invalid character '\\x01' looking for beginning of value", 1}},
+ {in: " \"string\" \x01", err: &SyntaxError{"invalid character '\\x01' after top-level value", 11}},
+
+ // array tests
+ {in: `[1, 2, 3]`, ptr: new([3]int), out: [3]int{1, 2, 3}},
+ {in: `[1, 2, 3]`, ptr: new([1]int), out: [1]int{1}},
+ {in: `[1, 2, 3]`, ptr: new([5]int), out: [5]int{1, 2, 3, 0, 0}},
+
+ // empty array to interface test
+ {in: `[]`, ptr: new([]interface{}), out: []interface{}{}},
+ {in: `null`, ptr: new([]interface{}), out: []interface{}(nil)},
+ {in: `{"T":[]}`, ptr: new(map[string]interface{}), out: map[string]interface{}{"T": []interface{}{}}},
+ {in: `{"T":null}`, ptr: new(map[string]interface{}), out: map[string]interface{}{"T": interface{}(nil)}},
+
+ // composite tests
+ {in: allValueIndent, ptr: new(All), out: allValue},
+ {in: allValueCompact, ptr: new(All), out: allValue},
+ {in: allValueIndent, ptr: new(*All), out: &allValue},
+ {in: allValueCompact, ptr: new(*All), out: &allValue},
+ {in: pallValueIndent, ptr: new(All), out: pallValue},
+ {in: pallValueCompact, ptr: new(All), out: pallValue},
+ {in: pallValueIndent, ptr: new(*All), out: &pallValue},
+ {in: pallValueCompact, ptr: new(*All), out: &pallValue},
+
+ // unmarshal interface test
+ {in: `{"T":false}`, ptr: &um0, out: umtrue}, // use "false" so test will fail if custom unmarshaler is not called
+ {in: `{"T":false}`, ptr: &ump, out: &umtrue},
+ {in: `[{"T":false}]`, ptr: &umslice, out: umslice},
+ {in: `[{"T":false}]`, ptr: &umslicep, out: &umslice},
+ {in: `{"M":{"T":"x:y"}}`, ptr: &umstruct, out: umstruct},
+
+ // UnmarshalText interface test
+ {in: `"x:y"`, ptr: &um0T, out: umtrueXY},
+ {in: `"x:y"`, ptr: &umpType, out: &umtrueXY},
+ {in: `["x:y"]`, ptr: &umsliceXY, out: umsliceXY},
+ {in: `["x:y"]`, ptr: &umslicepType, out: &umsliceXY},
+ {in: `{"M":"x:y"}`, ptr: umstructType, out: umstructXY},
+
+ // Map keys can be encoding.TextUnmarshalers
+ {in: `{"x:y":true}`, ptr: &ummapType, out: ummapXY},
+ // If multiple values for the same key exists, only the most recent value is used.
+ {in: `{"x:y":false,"x:y":true}`, ptr: &ummapType, out: ummapXY},
+
+ // Overwriting of data.
+ // This is different from package xml, but it's what we've always done.
+ // Now documented and tested.
+ {in: `[2]`, ptr: sliceAddr([]int{1}), out: []int{2}},
+ {in: `{"key": 2}`, ptr: mapAddr(map[string]int{"old": 0, "key": 1}), out: map[string]int{"key": 2}},
+
+ {
+ in: `{
+ "Level0": 1,
+ "Level1b": 2,
+ "Level1c": 3,
+ "x": 4,
+ "Level1a": 5,
+ "LEVEL1B": 6,
+ "e": {
+ "Level1a": 8,
+ "Level1b": 9,
+ "Level1c": 10,
+ "Level1d": 11,
+ "x": 12
+ },
+ "Loop1": 13,
+ "Loop2": 14,
+ "X": 15,
+ "Y": 16,
+ "Z": 17,
+ "Q": 18
+ }`,
+ ptr: new(Top),
+ out: Top{
+ Level0: 1,
+ Embed0: Embed0{
+ Level1b: 2,
+ Level1c: 3,
+ },
+ Embed0a: &Embed0a{
+ Level1a: 5,
+ Level1b: 6,
+ },
+ Embed0b: &Embed0b{
+ Level1a: 8,
+ Level1b: 9,
+ Level1c: 10,
+ Level1d: 11,
+ Level1e: 12,
+ },
+ Loop: Loop{
+ Loop1: 13,
+ Loop2: 14,
+ },
+ Embed0p: Embed0p{
+ Point: image.Point{X: 15, Y: 16},
+ },
+ Embed0q: Embed0q{
+ Point: Point{Z: 17},
+ },
+ embed: embed{
+ Q: 18,
+ },
+ },
+ },
+ {
+ in: `{"hello": 1}`,
+ ptr: new(Ambig),
+ out: Ambig{First: 1},
+ },
+
+ {
+ in: `{"X": 1,"Y":2}`,
+ ptr: new(S5),
+ out: S5{S8: S8{S9: S9{Y: 2}}},
+ },
+ {
+ in: `{"X": 1,"Y":2}`,
+ ptr: new(S10),
+ out: S10{S13: S13{S8: S8{S9: S9{Y: 2}}}},
+ },
+
+ // invalid UTF-8 is coerced to valid UTF-8.
+ {
+ in: "\"hello\xffworld\"",
+ ptr: new(string),
+ out: "hello\ufffdworld",
+ },
+ {
+ in: "\"hello\xc2\xc2world\"",
+ ptr: new(string),
+ out: "hello\ufffd\ufffdworld",
+ },
+ {
+ in: "\"hello\xc2\xffworld\"",
+ ptr: new(string),
+ out: "hello\ufffd\ufffdworld",
+ },
+ {
+ in: "\"hello\\ud800world\"",
+ ptr: new(string),
+ out: "hello\ufffdworld",
+ },
+ {
+ in: "\"hello\\ud800\\ud800world\"",
+ ptr: new(string),
+ out: "hello\ufffd\ufffdworld",
+ },
+ {
+ in: "\"hello\\ud800\\ud800world\"",
+ ptr: new(string),
+ out: "hello\ufffd\ufffdworld",
+ },
+ {
+ in: "\"hello\xed\xa0\x80\xed\xb0\x80world\"",
+ ptr: new(string),
+ out: "hello\ufffd\ufffd\ufffd\ufffd\ufffd\ufffdworld",
+ },
+
+ // Used to be issue 8305, but time.Time implements encoding.TextUnmarshaler so this works now.
+ {
+ in: `{"2009-11-10T23:00:00Z": "hello world"}`,
+ ptr: &map[time.Time]string{},
+ out: map[time.Time]string{time.Date(2009, 11, 10, 23, 0, 0, 0, time.UTC): "hello world"},
+ },
+
+ // issue 8305
+ {
+ in: `{"2009-11-10T23:00:00Z": "hello world"}`,
+ ptr: &map[Point]string{},
+ err: &UnmarshalTypeError{"object", reflect.TypeOf(map[Point]string{}), 1},
+ },
+ {
+ in: `{"asdf": "hello world"}`,
+ ptr: &map[unmarshaler]string{},
+ err: &UnmarshalTypeError{"object", reflect.TypeOf(map[unmarshaler]string{}), 1},
+ },
+}
+
+func TestMarshal(t *testing.T) {
+ b, err := Marshal(allValue)
+ if err != nil {
+ t.Fatalf("Marshal allValue: %v", err)
+ }
+ if string(b) != allValueCompact {
+ t.Errorf("Marshal allValueCompact")
+ diff(t, b, []byte(allValueCompact))
+ return
+ }
+
+ b, err = Marshal(pallValue)
+ if err != nil {
+ t.Fatalf("Marshal pallValue: %v", err)
+ }
+ if string(b) != pallValueCompact {
+ t.Errorf("Marshal pallValueCompact")
+ diff(t, b, []byte(pallValueCompact))
+ return
+ }
+}
+
+var badUTF8 = []struct {
+ in, out string
+}{
+ {"hello\xffworld", `"hello\ufffdworld"`},
+ {"", `""`},
+ {"\xff", `"\ufffd"`},
+ {"\xff\xff", `"\ufffd\ufffd"`},
+ {"a\xffb", `"a\ufffdb"`},
+ {"\xe6\x97\xa5\xe6\x9c\xac\xff\xaa\x9e", `"日本\ufffd\ufffd\ufffd"`},
+}
+
+func TestMarshalBadUTF8(t *testing.T) {
+ for _, tt := range badUTF8 {
+ b, err := Marshal(tt.in)
+ if string(b) != tt.out || err != nil {
+ t.Errorf("Marshal(%q) = %#q, %v, want %#q, nil", tt.in, b, err, tt.out)
+ }
+ }
+}
+
+func TestMarshalNumberZeroVal(t *testing.T) {
+ var n Number
+ out, err := Marshal(n)
+ if err != nil {
+ t.Fatal(err)
+ }
+ outStr := string(out)
+ if outStr != "0" {
+ t.Fatalf("Invalid zero val for Number: %q", outStr)
+ }
+}
+
+func TestMarshalEmbeds(t *testing.T) {
+ top := &Top{
+ Level0: 1,
+ Embed0: Embed0{
+ Level1b: 2,
+ Level1c: 3,
+ },
+ Embed0a: &Embed0a{
+ Level1a: 5,
+ Level1b: 6,
+ },
+ Embed0b: &Embed0b{
+ Level1a: 8,
+ Level1b: 9,
+ Level1c: 10,
+ Level1d: 11,
+ Level1e: 12,
+ },
+ Loop: Loop{
+ Loop1: 13,
+ Loop2: 14,
+ },
+ Embed0p: Embed0p{
+ Point: image.Point{X: 15, Y: 16},
+ },
+ Embed0q: Embed0q{
+ Point: Point{Z: 17},
+ },
+ embed: embed{
+ Q: 18,
+ },
+ }
+ b, err := Marshal(top)
+ if err != nil {
+ t.Fatal(err)
+ }
+ want := "{\"Level0\":1,\"Level1b\":2,\"Level1c\":3,\"Level1a\":5,\"LEVEL1B\":6,\"e\":{\"Level1a\":8,\"Level1b\":9,\"Level1c\":10,\"Level1d\":11,\"x\":12},\"Loop1\":13,\"Loop2\":14,\"X\":15,\"Y\":16,\"Z\":17,\"Q\":18}"
+ if string(b) != want {
+ t.Errorf("Wrong marshal result.\n got: %q\nwant: %q", b, want)
+ }
+}
+
+func TestUnmarshal(t *testing.T) {
+ for i, tt := range unmarshalTests {
+ var scan scanner
+ in := []byte(tt.in)
+ if err := checkValid(in, &scan); err != nil {
+ if !reflect.DeepEqual(err, tt.err) {
+ t.Errorf("#%d: checkValid: %#v", i, err)
+ continue
+ }
+ }
+ if tt.ptr == nil {
+ continue
+ }
+
+ // v = new(right-type)
+ v := reflect.New(reflect.TypeOf(tt.ptr).Elem())
+ dec := NewDecoder(bytes.NewReader(in))
+ if tt.useNumber {
+ dec.UseNumber()
+ }
+ if err := dec.Decode(v.Interface()); !reflect.DeepEqual(err, tt.err) {
+ t.Errorf("#%d: %v, want %v", i, err, tt.err)
+ continue
+ } else if err != nil {
+ continue
+ }
+ if !reflect.DeepEqual(v.Elem().Interface(), tt.out) {
+ t.Errorf("#%d: mismatch\nhave: %#+v\nwant: %#+v", i, v.Elem().Interface(), tt.out)
+ data, _ := Marshal(v.Elem().Interface())
+ println(string(data))
+ data, _ = Marshal(tt.out)
+ println(string(data))
+ continue
+ }
+
+ // Check round trip.
+ if tt.err == nil {
+ enc, err := Marshal(v.Interface())
+ if err != nil {
+ t.Errorf("#%d: error re-marshaling: %v", i, err)
+ continue
+ }
+ vv := reflect.New(reflect.TypeOf(tt.ptr).Elem())
+ dec = NewDecoder(bytes.NewReader(enc))
+ if tt.useNumber {
+ dec.UseNumber()
+ }
+ if err := dec.Decode(vv.Interface()); err != nil {
+ t.Errorf("#%d: error re-unmarshaling %#q: %v", i, enc, err)
+ continue
+ }
+ if !reflect.DeepEqual(v.Elem().Interface(), vv.Elem().Interface()) {
+ t.Errorf("#%d: mismatch\nhave: %#+v\nwant: %#+v", i, v.Elem().Interface(), vv.Elem().Interface())
+ t.Errorf(" In: %q", strings.Map(noSpace, string(in)))
+ t.Errorf("Marshal: %q", strings.Map(noSpace, string(enc)))
+ continue
+ }
+ }
+ }
+}
+
+func TestUnmarshalMarshal(t *testing.T) {
+ initBig()
+ var v interface{}
+ if err := Unmarshal(jsonBig, &v); err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ b, err := Marshal(v)
+ if err != nil {
+ t.Fatalf("Marshal: %v", err)
+ }
+ if !bytes.Equal(jsonBig, b) {
+ t.Errorf("Marshal jsonBig")
+ diff(t, b, jsonBig)
+ return
+ }
+}
+
+var numberTests = []struct {
+ in string
+ i int64
+ intErr string
+ f float64
+ floatErr string
+}{
+ {in: "-1.23e1", intErr: "strconv.ParseInt: parsing \"-1.23e1\": invalid syntax", f: -1.23e1},
+ {in: "-12", i: -12, f: -12.0},
+ {in: "1e1000", intErr: "strconv.ParseInt: parsing \"1e1000\": invalid syntax", floatErr: "strconv.ParseFloat: parsing \"1e1000\": value out of range"},
+}
+
+// Independent of Decode, basic coverage of the accessors in Number
+func TestNumberAccessors(t *testing.T) {
+ for _, tt := range numberTests {
+ n := Number(tt.in)
+ if s := n.String(); s != tt.in {
+ t.Errorf("Number(%q).String() is %q", tt.in, s)
+ }
+ if i, err := n.Int64(); err == nil && tt.intErr == "" && i != tt.i {
+ t.Errorf("Number(%q).Int64() is %d", tt.in, i)
+ } else if (err == nil && tt.intErr != "") || (err != nil && err.Error() != tt.intErr) {
+ t.Errorf("Number(%q).Int64() wanted error %q but got: %v", tt.in, tt.intErr, err)
+ }
+ if f, err := n.Float64(); err == nil && tt.floatErr == "" && f != tt.f {
+ t.Errorf("Number(%q).Float64() is %g", tt.in, f)
+ } else if (err == nil && tt.floatErr != "") || (err != nil && err.Error() != tt.floatErr) {
+ t.Errorf("Number(%q).Float64() wanted error %q but got: %v", tt.in, tt.floatErr, err)
+ }
+ }
+}
+
+func TestLargeByteSlice(t *testing.T) {
+ s0 := make([]byte, 2000)
+ for i := range s0 {
+ s0[i] = byte(i)
+ }
+ b, err := Marshal(s0)
+ if err != nil {
+ t.Fatalf("Marshal: %v", err)
+ }
+ var s1 []byte
+ if err := Unmarshal(b, &s1); err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if !bytes.Equal(s0, s1) {
+ t.Errorf("Marshal large byte slice")
+ diff(t, s0, s1)
+ }
+}
+
+type Xint struct {
+ X int
+}
+
+func TestUnmarshalInterface(t *testing.T) {
+ var xint Xint
+ var i interface{} = &xint
+ if err := Unmarshal([]byte(`{"X":1}`), &i); err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if xint.X != 1 {
+ t.Fatalf("Did not write to xint")
+ }
+}
+
+func TestUnmarshalPtrPtr(t *testing.T) {
+ var xint Xint
+ pxint := &xint
+ if err := Unmarshal([]byte(`{"X":1}`), &pxint); err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if xint.X != 1 {
+ t.Fatalf("Did not write to xint")
+ }
+}
+
+func TestEscape(t *testing.T) {
+ const input = `"foobar"` + " [\u2028 \u2029]"
+ const expected = `"\"foobar\"\u003chtml\u003e [\u2028 \u2029]"`
+ b, err := Marshal(input)
+ if err != nil {
+ t.Fatalf("Marshal error: %v", err)
+ }
+ if s := string(b); s != expected {
+ t.Errorf("Encoding of [%s]:\n got [%s]\nwant [%s]", input, s, expected)
+ }
+}
+
+// WrongString is a struct that's misusing the ,string modifier.
+type WrongString struct {
+ Message string `json:"result,string"`
+}
+
+type wrongStringTest struct {
+ in, err string
+}
+
+var wrongStringTests = []wrongStringTest{
+ {`{"result":"x"}`, `json: invalid use of ,string struct tag, trying to unmarshal "x" into string`},
+ {`{"result":"foo"}`, `json: invalid use of ,string struct tag, trying to unmarshal "foo" into string`},
+ {`{"result":"123"}`, `json: invalid use of ,string struct tag, trying to unmarshal "123" into string`},
+ {`{"result":123}`, `json: invalid use of ,string struct tag, trying to unmarshal unquoted value into string`},
+}
+
+// If people misuse the ,string modifier, the error message should be
+// helpful, telling the user that they're doing it wrong.
+func TestErrorMessageFromMisusedString(t *testing.T) {
+ for n, tt := range wrongStringTests {
+ r := strings.NewReader(tt.in)
+ var s WrongString
+ err := NewDecoder(r).Decode(&s)
+ got := fmt.Sprintf("%v", err)
+ if got != tt.err {
+ t.Errorf("%d. got err = %q, want %q", n, got, tt.err)
+ }
+ }
+}
+
+func noSpace(c rune) rune {
+ if isSpace(byte(c)) { //only used for ascii
+ return -1
+ }
+ return c
+}
+
+type All struct {
+ Bool bool
+ Int int
+ Int8 int8
+ Int16 int16
+ Int32 int32
+ Int64 int64
+ Uint uint
+ Uint8 uint8
+ Uint16 uint16
+ Uint32 uint32
+ Uint64 uint64
+ Uintptr uintptr
+ Float32 float32
+ Float64 float64
+
+ Foo string `json:"bar"`
+ Foo2 string `json:"bar2,dummyopt"`
+
+ IntStr int64 `json:",string"`
+
+ PBool *bool
+ PInt *int
+ PInt8 *int8
+ PInt16 *int16
+ PInt32 *int32
+ PInt64 *int64
+ PUint *uint
+ PUint8 *uint8
+ PUint16 *uint16
+ PUint32 *uint32
+ PUint64 *uint64
+ PUintptr *uintptr
+ PFloat32 *float32
+ PFloat64 *float64
+
+ String string
+ PString *string
+
+ Map map[string]Small
+ MapP map[string]*Small
+ PMap *map[string]Small
+ PMapP *map[string]*Small
+
+ EmptyMap map[string]Small
+ NilMap map[string]Small
+
+ Slice []Small
+ SliceP []*Small
+ PSlice *[]Small
+ PSliceP *[]*Small
+
+ EmptySlice []Small
+ NilSlice []Small
+
+ StringSlice []string
+ ByteSlice []byte
+
+ Small Small
+ PSmall *Small
+ PPSmall **Small
+
+ Interface interface{}
+ PInterface *interface{}
+
+ unexported int
+}
+
+type Small struct {
+ Tag string
+}
+
+var allValue = All{
+ Bool: true,
+ Int: 2,
+ Int8: 3,
+ Int16: 4,
+ Int32: 5,
+ Int64: 6,
+ Uint: 7,
+ Uint8: 8,
+ Uint16: 9,
+ Uint32: 10,
+ Uint64: 11,
+ Uintptr: 12,
+ Float32: 14.1,
+ Float64: 15.1,
+ Foo: "foo",
+ Foo2: "foo2",
+ IntStr: 42,
+ String: "16",
+ Map: map[string]Small{
+ "17": {Tag: "tag17"},
+ "18": {Tag: "tag18"},
+ },
+ MapP: map[string]*Small{
+ "19": {Tag: "tag19"},
+ "20": nil,
+ },
+ EmptyMap: map[string]Small{},
+ Slice: []Small{{Tag: "tag20"}, {Tag: "tag21"}},
+ SliceP: []*Small{{Tag: "tag22"}, nil, {Tag: "tag23"}},
+ EmptySlice: []Small{},
+ StringSlice: []string{"str24", "str25", "str26"},
+ ByteSlice: []byte{27, 28, 29},
+ Small: Small{Tag: "tag30"},
+ PSmall: &Small{Tag: "tag31"},
+ Interface: 5.2,
+}
+
+var pallValue = All{
+ PBool: &allValue.Bool,
+ PInt: &allValue.Int,
+ PInt8: &allValue.Int8,
+ PInt16: &allValue.Int16,
+ PInt32: &allValue.Int32,
+ PInt64: &allValue.Int64,
+ PUint: &allValue.Uint,
+ PUint8: &allValue.Uint8,
+ PUint16: &allValue.Uint16,
+ PUint32: &allValue.Uint32,
+ PUint64: &allValue.Uint64,
+ PUintptr: &allValue.Uintptr,
+ PFloat32: &allValue.Float32,
+ PFloat64: &allValue.Float64,
+ PString: &allValue.String,
+ PMap: &allValue.Map,
+ PMapP: &allValue.MapP,
+ PSlice: &allValue.Slice,
+ PSliceP: &allValue.SliceP,
+ PPSmall: &allValue.PSmall,
+ PInterface: &allValue.Interface,
+}
+
+var allValueIndent = `{
+ "Bool": true,
+ "Int": 2,
+ "Int8": 3,
+ "Int16": 4,
+ "Int32": 5,
+ "Int64": 6,
+ "Uint": 7,
+ "Uint8": 8,
+ "Uint16": 9,
+ "Uint32": 10,
+ "Uint64": 11,
+ "Uintptr": 12,
+ "Float32": 14.1,
+ "Float64": 15.1,
+ "bar": "foo",
+ "bar2": "foo2",
+ "IntStr": "42",
+ "PBool": null,
+ "PInt": null,
+ "PInt8": null,
+ "PInt16": null,
+ "PInt32": null,
+ "PInt64": null,
+ "PUint": null,
+ "PUint8": null,
+ "PUint16": null,
+ "PUint32": null,
+ "PUint64": null,
+ "PUintptr": null,
+ "PFloat32": null,
+ "PFloat64": null,
+ "String": "16",
+ "PString": null,
+ "Map": {
+ "17": {
+ "Tag": "tag17"
+ },
+ "18": {
+ "Tag": "tag18"
+ }
+ },
+ "MapP": {
+ "19": {
+ "Tag": "tag19"
+ },
+ "20": null
+ },
+ "PMap": null,
+ "PMapP": null,
+ "EmptyMap": {},
+ "NilMap": null,
+ "Slice": [
+ {
+ "Tag": "tag20"
+ },
+ {
+ "Tag": "tag21"
+ }
+ ],
+ "SliceP": [
+ {
+ "Tag": "tag22"
+ },
+ null,
+ {
+ "Tag": "tag23"
+ }
+ ],
+ "PSlice": null,
+ "PSliceP": null,
+ "EmptySlice": [],
+ "NilSlice": null,
+ "StringSlice": [
+ "str24",
+ "str25",
+ "str26"
+ ],
+ "ByteSlice": "Gxwd",
+ "Small": {
+ "Tag": "tag30"
+ },
+ "PSmall": {
+ "Tag": "tag31"
+ },
+ "PPSmall": null,
+ "Interface": 5.2,
+ "PInterface": null
+}`
+
+var allValueCompact = strings.Map(noSpace, allValueIndent)
+
+var pallValueIndent = `{
+ "Bool": false,
+ "Int": 0,
+ "Int8": 0,
+ "Int16": 0,
+ "Int32": 0,
+ "Int64": 0,
+ "Uint": 0,
+ "Uint8": 0,
+ "Uint16": 0,
+ "Uint32": 0,
+ "Uint64": 0,
+ "Uintptr": 0,
+ "Float32": 0,
+ "Float64": 0,
+ "bar": "",
+ "bar2": "",
+ "IntStr": "0",
+ "PBool": true,
+ "PInt": 2,
+ "PInt8": 3,
+ "PInt16": 4,
+ "PInt32": 5,
+ "PInt64": 6,
+ "PUint": 7,
+ "PUint8": 8,
+ "PUint16": 9,
+ "PUint32": 10,
+ "PUint64": 11,
+ "PUintptr": 12,
+ "PFloat32": 14.1,
+ "PFloat64": 15.1,
+ "String": "",
+ "PString": "16",
+ "Map": null,
+ "MapP": null,
+ "PMap": {
+ "17": {
+ "Tag": "tag17"
+ },
+ "18": {
+ "Tag": "tag18"
+ }
+ },
+ "PMapP": {
+ "19": {
+ "Tag": "tag19"
+ },
+ "20": null
+ },
+ "EmptyMap": null,
+ "NilMap": null,
+ "Slice": null,
+ "SliceP": null,
+ "PSlice": [
+ {
+ "Tag": "tag20"
+ },
+ {
+ "Tag": "tag21"
+ }
+ ],
+ "PSliceP": [
+ {
+ "Tag": "tag22"
+ },
+ null,
+ {
+ "Tag": "tag23"
+ }
+ ],
+ "EmptySlice": null,
+ "NilSlice": null,
+ "StringSlice": null,
+ "ByteSlice": null,
+ "Small": {
+ "Tag": ""
+ },
+ "PSmall": null,
+ "PPSmall": {
+ "Tag": "tag31"
+ },
+ "Interface": null,
+ "PInterface": 5.2
+}`
+
+var pallValueCompact = strings.Map(noSpace, pallValueIndent)
+
+func TestRefUnmarshal(t *testing.T) {
+ type S struct {
+ // Ref is defined in encode_test.go.
+ R0 Ref
+ R1 *Ref
+ R2 RefText
+ R3 *RefText
+ }
+ want := S{
+ R0: 12,
+ R1: new(Ref),
+ R2: 13,
+ R3: new(RefText),
+ }
+ *want.R1 = 12
+ *want.R3 = 13
+
+ var got S
+ if err := Unmarshal([]byte(`{"R0":"ref","R1":"ref","R2":"ref","R3":"ref"}`), &got); err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if !reflect.DeepEqual(got, want) {
+ t.Errorf("got %+v, want %+v", got, want)
+ }
+}
+
+// Test that the empty string doesn't panic decoding when ,string is specified
+// Issue 3450
+func TestEmptyString(t *testing.T) {
+ type T2 struct {
+ Number1 int `json:",string"`
+ Number2 int `json:",string"`
+ }
+ data := `{"Number1":"1", "Number2":""}`
+ dec := NewDecoder(strings.NewReader(data))
+ var t2 T2
+ err := dec.Decode(&t2)
+ if err == nil {
+ t.Fatal("Decode: did not return error")
+ }
+ if t2.Number1 != 1 {
+ t.Fatal("Decode: did not set Number1")
+ }
+}
+
+// Test that a null for ,string is not replaced with the previous quoted string (issue 7046).
+// It should also not be an error (issue 2540, issue 8587).
+func TestNullString(t *testing.T) {
+ type T struct {
+ A int `json:",string"`
+ B int `json:",string"`
+ C *int `json:",string"`
+ }
+ data := []byte(`{"A": "1", "B": null, "C": null}`)
+ var s T
+ s.B = 1
+ s.C = new(int)
+ *s.C = 2
+ err := Unmarshal(data, &s)
+ if err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+ if s.B != 1 || s.C != nil {
+ t.Fatalf("after Unmarshal, s.B=%d, s.C=%p, want 1, nil", s.B, s.C)
+ }
+}
+
+func intp(x int) *int {
+ p := new(int)
+ *p = x
+ return p
+}
+
+func intpp(x *int) **int {
+ pp := new(*int)
+ *pp = x
+ return pp
+}
+
+var interfaceSetTests = []struct {
+ pre interface{}
+ json string
+ post interface{}
+}{
+ {"foo", `"bar"`, "bar"},
+ {"foo", `2`, 2.0},
+ {"foo", `true`, true},
+ {"foo", `null`, nil},
+
+ {nil, `null`, nil},
+ {new(int), `null`, nil},
+ {(*int)(nil), `null`, nil},
+ {new(*int), `null`, new(*int)},
+ {(**int)(nil), `null`, nil},
+ {intp(1), `null`, nil},
+ {intpp(nil), `null`, intpp(nil)},
+ {intpp(intp(1)), `null`, intpp(nil)},
+}
+
+func TestInterfaceSet(t *testing.T) {
+ for _, tt := range interfaceSetTests {
+ b := struct{ X interface{} }{tt.pre}
+ blob := `{"X":` + tt.json + `}`
+ if err := Unmarshal([]byte(blob), &b); err != nil {
+ t.Errorf("Unmarshal %#q: %v", blob, err)
+ continue
+ }
+ if !reflect.DeepEqual(b.X, tt.post) {
+ t.Errorf("Unmarshal %#q into %#v: X=%#v, want %#v", blob, tt.pre, b.X, tt.post)
+ }
+ }
+}
+
+// JSON null values should be ignored for primitives and string values instead of resulting in an error.
+// Issue 2540
+func TestUnmarshalNulls(t *testing.T) {
+ jsonData := []byte(`{
+ "Bool" : null,
+ "Int" : null,
+ "Int8" : null,
+ "Int16" : null,
+ "Int32" : null,
+ "Int64" : null,
+ "Uint" : null,
+ "Uint8" : null,
+ "Uint16" : null,
+ "Uint32" : null,
+ "Uint64" : null,
+ "Float32" : null,
+ "Float64" : null,
+ "String" : null}`)
+
+ nulls := All{
+ Bool: true,
+ Int: 2,
+ Int8: 3,
+ Int16: 4,
+ Int32: 5,
+ Int64: 6,
+ Uint: 7,
+ Uint8: 8,
+ Uint16: 9,
+ Uint32: 10,
+ Uint64: 11,
+ Float32: 12.1,
+ Float64: 13.1,
+ String: "14"}
+
+ err := Unmarshal(jsonData, &nulls)
+ if err != nil {
+ t.Errorf("Unmarshal of null values failed: %v", err)
+ }
+ if !nulls.Bool || nulls.Int != 2 || nulls.Int8 != 3 || nulls.Int16 != 4 || nulls.Int32 != 5 || nulls.Int64 != 6 ||
+ nulls.Uint != 7 || nulls.Uint8 != 8 || nulls.Uint16 != 9 || nulls.Uint32 != 10 || nulls.Uint64 != 11 ||
+ nulls.Float32 != 12.1 || nulls.Float64 != 13.1 || nulls.String != "14" {
+
+ t.Errorf("Unmarshal of null values affected primitives")
+ }
+}
+
+func TestStringKind(t *testing.T) {
+ type stringKind string
+
+ var m1, m2 map[stringKind]int
+ m1 = map[stringKind]int{
+ "foo": 42,
+ }
+
+ data, err := Marshal(m1)
+ if err != nil {
+ t.Errorf("Unexpected error marshaling: %v", err)
+ }
+
+ err = Unmarshal(data, &m2)
+ if err != nil {
+ t.Errorf("Unexpected error unmarshaling: %v", err)
+ }
+
+ if !reflect.DeepEqual(m1, m2) {
+ t.Error("Items should be equal after encoding and then decoding")
+ }
+}
+
+// Custom types with []byte as underlying type could not be marshalled
+// and then unmarshalled.
+// Issue 8962.
+func TestByteKind(t *testing.T) {
+ type byteKind []byte
+
+ a := byteKind("hello")
+
+ data, err := Marshal(a)
+ if err != nil {
+ t.Error(err)
+ }
+ var b byteKind
+ err = Unmarshal(data, &b)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !reflect.DeepEqual(a, b) {
+ t.Errorf("expected %v == %v", a, b)
+ }
+}
+
+// The fix for issue 8962 introduced a regression.
+// Issue 12921.
+func TestSliceOfCustomByte(t *testing.T) {
+ type Uint8 uint8
+
+ a := []Uint8("hello")
+
+ data, err := Marshal(a)
+ if err != nil {
+ t.Fatal(err)
+ }
+ var b []Uint8
+ err = Unmarshal(data, &b)
+ if err != nil {
+ t.Fatal(err)
+ }
+ if !reflect.DeepEqual(a, b) {
+ t.Fatalf("expected %v == %v", a, b)
+ }
+}
+
+var decodeTypeErrorTests = []struct {
+ dest interface{}
+ src string
+}{
+ {new(string), `{"user": "name"}`}, // issue 4628.
+ {new(error), `{}`}, // issue 4222
+ {new(error), `[]`},
+ {new(error), `""`},
+ {new(error), `123`},
+ {new(error), `true`},
+}
+
+func TestUnmarshalTypeError(t *testing.T) {
+ for _, item := range decodeTypeErrorTests {
+ err := Unmarshal([]byte(item.src), item.dest)
+ if _, ok := err.(*UnmarshalTypeError); !ok {
+ t.Errorf("expected type error for Unmarshal(%q, type %T): got %T",
+ item.src, item.dest, err)
+ }
+ }
+}
+
+var unmarshalSyntaxTests = []string{
+ "tru",
+ "fals",
+ "nul",
+ "123e",
+ `"hello`,
+ `[1,2,3`,
+ `{"key":1`,
+ `{"key":1,`,
+}
+
+func TestUnmarshalSyntax(t *testing.T) {
+ var x interface{}
+ for _, src := range unmarshalSyntaxTests {
+ err := Unmarshal([]byte(src), &x)
+ if _, ok := err.(*SyntaxError); !ok {
+ t.Errorf("expected syntax error for Unmarshal(%q): got %T", src, err)
+ }
+ }
+}
+
+// Test handling of unexported fields that should be ignored.
+// Issue 4660
+type unexportedFields struct {
+ Name string
+ m map[string]interface{} `json:"-"`
+ m2 map[string]interface{} `json:"abcd"`
+}
+
+func TestUnmarshalUnexported(t *testing.T) {
+ input := `{"Name": "Bob", "m": {"x": 123}, "m2": {"y": 456}, "abcd": {"z": 789}}`
+ want := &unexportedFields{Name: "Bob"}
+
+ out := &unexportedFields{}
+ err := Unmarshal([]byte(input), out)
+ if err != nil {
+ t.Errorf("got error %v, expected nil", err)
+ }
+ if !reflect.DeepEqual(out, want) {
+ t.Errorf("got %q, want %q", out, want)
+ }
+}
+
+// Time3339 is a time.Time which encodes to and from JSON
+// as an RFC 3339 time in UTC.
+type Time3339 time.Time
+
+func (t *Time3339) UnmarshalJSON(b []byte) error {
+ if len(b) < 2 || b[0] != '"' || b[len(b)-1] != '"' {
+ return fmt.Errorf("types: failed to unmarshal non-string value %q as an RFC 3339 time", b)
+ }
+ tm, err := time.Parse(time.RFC3339, string(b[1:len(b)-1]))
+ if err != nil {
+ return err
+ }
+ *t = Time3339(tm)
+ return nil
+}
+
+func TestUnmarshalJSONLiteralError(t *testing.T) {
+ var t3 Time3339
+ err := Unmarshal([]byte(`"0000-00-00T00:00:00Z"`), &t3)
+ if err == nil {
+ t.Fatalf("expected error; got time %v", time.Time(t3))
+ }
+ if !strings.Contains(err.Error(), "range") {
+ t.Errorf("got err = %v; want out of range error", err)
+ }
+}
+
+// Test that extra object elements in an array do not result in a
+// "data changing underfoot" error.
+// Issue 3717
+func TestSkipArrayObjects(t *testing.T) {
+ json := `[{}]`
+ var dest [0]interface{}
+
+ err := Unmarshal([]byte(json), &dest)
+ if err != nil {
+ t.Errorf("got error %q, want nil", err)
+ }
+}
+
+// Test semantics of pre-filled struct fields and pre-filled map fields.
+// Issue 4900.
+func TestPrefilled(t *testing.T) {
+ ptrToMap := func(m map[string]interface{}) *map[string]interface{} { return &m }
+
+ // Values here change, cannot reuse table across runs.
+ var prefillTests = []struct {
+ in string
+ ptr interface{}
+ out interface{}
+ }{
+ {
+ in: `{"X": 1, "Y": 2}`,
+ ptr: &XYZ{X: float32(3), Y: int16(4), Z: 1.5},
+ out: &XYZ{X: float64(1), Y: float64(2), Z: 1.5},
+ },
+ {
+ in: `{"X": 1, "Y": 2}`,
+ ptr: ptrToMap(map[string]interface{}{"X": float32(3), "Y": int16(4), "Z": 1.5}),
+ out: ptrToMap(map[string]interface{}{"X": float64(1), "Y": float64(2), "Z": 1.5}),
+ },
+ }
+
+ for _, tt := range prefillTests {
+ ptrstr := fmt.Sprintf("%v", tt.ptr)
+ err := Unmarshal([]byte(tt.in), tt.ptr) // tt.ptr edited here
+ if err != nil {
+ t.Errorf("Unmarshal: %v", err)
+ }
+ if !reflect.DeepEqual(tt.ptr, tt.out) {
+ t.Errorf("Unmarshal(%#q, %s): have %v, want %v", tt.in, ptrstr, tt.ptr, tt.out)
+ }
+ }
+}
+
+var invalidUnmarshalTests = []struct {
+ v interface{}
+ want string
+}{
+ {nil, "json: Unmarshal(nil)"},
+ {struct{}{}, "json: Unmarshal(non-pointer struct {})"},
+ {(*int)(nil), "json: Unmarshal(nil *int)"},
+}
+
+func TestInvalidUnmarshal(t *testing.T) {
+ buf := []byte(`{"a":"1"}`)
+ for _, tt := range invalidUnmarshalTests {
+ err := Unmarshal(buf, tt.v)
+ if err == nil {
+ t.Errorf("Unmarshal expecting error, got nil")
+ continue
+ }
+ if got := err.Error(); got != tt.want {
+ t.Errorf("Unmarshal = %q; want %q", got, tt.want)
+ }
+ }
+}
+
+var invalidUnmarshalTextTests = []struct {
+ v interface{}
+ want string
+}{
+ {nil, "json: Unmarshal(nil)"},
+ {struct{}{}, "json: Unmarshal(non-pointer struct {})"},
+ {(*int)(nil), "json: Unmarshal(nil *int)"},
+ {new(net.IP), "json: cannot unmarshal string into Go value of type *net.IP"},
+}
+
+func TestInvalidUnmarshalText(t *testing.T) {
+ buf := []byte(`123`)
+ for _, tt := range invalidUnmarshalTextTests {
+ err := Unmarshal(buf, tt.v)
+ if err == nil {
+ t.Errorf("Unmarshal expecting error, got nil")
+ continue
+ }
+ if got := err.Error(); got != tt.want {
+ t.Errorf("Unmarshal = %q; want %q", got, tt.want)
+ }
+ }
+}
+
+// Test that string option is ignored for invalid types.
+// Issue 9812.
+func TestInvalidStringOption(t *testing.T) {
+ num := 0
+ item := struct {
+ T time.Time `json:",string"`
+ M map[string]string `json:",string"`
+ S []string `json:",string"`
+ A [1]string `json:",string"`
+ I interface{} `json:",string"`
+ P *int `json:",string"`
+ }{M: make(map[string]string), S: make([]string, 0), I: num, P: &num}
+
+ data, err := Marshal(item)
+ if err != nil {
+ t.Fatalf("Marshal: %v", err)
+ }
+
+ err = Unmarshal(data, &item)
+ if err != nil {
+ t.Fatalf("Unmarshal: %v", err)
+ }
+}
diff --git a/internal/json/encode.go b/internal/json/encode.go
new file mode 100644
index 000000000..67a0f0062
--- /dev/null
+++ b/internal/json/encode.go
@@ -0,0 +1,1256 @@
+// Copyright 2010 The Go Authors. All rights reserved.
+// Use of this source code is governed by a BSD-style
+// license that can be found in the LICENSE file.
+
+// Package json implements encoding and decoding of JSON as defined in
+// RFC 4627. The mapping between JSON and Go values is described
+// in the documentation for the Marshal and Unmarshal functions.
+//
+// See "JSON and Go" for an introduction to this package:
+// https://golang.org/doc/articles/json_and_go.html
+package json
+
+import (
+ "bytes"
+ "encoding"
+ "encoding/base64"
+ "fmt"
+ "math"
+ "reflect"
+ "runtime"
+ "sort"
+ "strconv"
+ "strings"
+ "sync"
+ "unicode"
+ "unicode/utf8"
+)
+
+// Marshal returns the JSON encoding of v.
+//
+// Marshal traverses the value v recursively.
+// If an encountered value implements the Marshaler interface
+// and is not a nil pointer, Marshal calls its MarshalJSON method
+// to produce JSON. If no MarshalJSON method is present but the
+// value implements encoding.TextMarshaler instead, Marshal calls
+// its MarshalText method.
+// The nil pointer exception is not strictly necessary
+// but mimics a similar, necessary exception in the behavior of
+// UnmarshalJSON.
+//
+// Otherwise, Marshal uses the following type-dependent default encodings:
+//
+// Boolean values encode as JSON booleans.
+//
+// Floating point, integer, and Number values encode as JSON numbers.
+//
+// String values encode as JSON strings coerced to valid UTF-8,
+// replacing invalid bytes with the Unicode replacement rune.
+// The angle brackets "<" and ">" are escaped to "\u003c" and "\u003e"
+// to keep some browsers from misinterpreting JSON output as HTML.
+// Ampersand "&" is also escaped to "\u0026" for the same reason.
+// This escaping can be disabled using an Encoder with DisableHTMLEscaping.
+//
+// Array and slice values encode as JSON arrays, except that
+// []byte encodes as a base64-encoded string, and a nil slice
+// encodes as the null JSON value.
+//
+// Struct values encode as JSON objects. Each exported struct field
+// becomes a member of the object unless
+// - the field's tag is "-", or
+// - the field is empty and its tag specifies the "omitempty" option.
+// The empty values are false, 0, any
+// nil pointer or interface value, and any array, slice, map, or string of
+// length zero. The object's default key string is the struct field name
+// but can be specified in the struct field's tag value. The "json" key in
+// the struct field's tag value is the key name, followed by an optional comma
+// and options. Examples:
+//
+// // Field is ignored by this package.
+// Field int `json:"-"`
+//
+// // Field appears in JSON as key "myName".
+// Field int `json:"myName"`
+//
+// // Field appears in JSON as key "myName" and
+// // the field is omitted from the object if its value is empty,
+// // as defined above.
+// Field int `json:"myName,omitempty"`
+//
+// // Field appears in JSON as key "Field" (the default), but
+// // the field is skipped if empty.
+// // Note the leading comma.
+// Field int `json:",omitempty"`
+//
+// The "string" option signals that a field is stored as JSON inside a
+// JSON-encoded string. It applies only to fields of string, floating point,
+// integer, or boolean types. This extra level of encoding is sometimes used
+// when communicating with JavaScript programs:
+//
+// Int64String int64 `json:",string"`
+//
+// The key name will be used if it's a non-empty string consisting of
+// only Unicode letters, digits, dollar signs, percent signs, hyphens,
+// underscores and slashes.
+//
+// Anonymous struct fields are usually marshaled as if their inner exported fields
+// were fields in the outer struct, subject to the usual Go visibility rules amended
+// as described in the next paragraph.
+// An anonymous struct field with a name given in its JSON tag is treated as
+// having that name, rather than being anonymous.
+// An anonymous struct field of interface type is treated the same as having
+// that type as its name, rather than being anonymous.
+//
+// The Go visibility rules for struct fields are amended for JSON when
+// deciding which field to marshal or unmarshal. If there are
+// multiple fields at the same level, and that level is the least
+// nested (and would therefore be the nesting level selected by the
+// usual Go rules), the following extra rules apply:
+//
+// 1) Of those fields, if any are JSON-tagged, only tagged fields are considered,
+// even if there are multiple untagged fields that would otherwise conflict.
+// 2) If there is exactly one field (tagged or not according to the first rule), that is selected.
+// 3) Otherwise there are multiple fields, and all are ignored; no error occurs.
+//
+// Handling of anonymous struct fields is new in Go 1.1.
+// Prior to Go 1.1, anonymous struct fields were ignored. To force ignoring of
+// an anonymous struct field in both current and earlier versions, give the field
+// a JSON tag of "-".
+//
+// Map values encode as JSON objects. The map's key type must either be a string
+// or implement encoding.TextMarshaler. The map keys are used as JSON object
+// keys, subject to the UTF-8 coercion described for string values above.
+//
+// Pointer values encode as the value pointed to.
+// A nil pointer encodes as the null JSON value.
+//
+// Interface values encode as the value contained in the interface.
+// A nil interface value encodes as the null JSON value.
+//
+// Channel, complex, and function values cannot be encoded in JSON.
+// Attempting to encode such a value causes Marshal to return
+// an UnsupportedTypeError.
+//
+// JSON cannot represent cyclic data structures and Marshal does not
+// handle them. Passing cyclic structures to Marshal will result in
+// an infinite recursion.
+//
+func Marshal(v interface{}) ([]byte, error) {
+ e := &encodeState{}
+ err := e.marshal(v, encOpts{escapeHTML: true})
+ if err != nil {
+ return nil, err
+ }
+ return e.Bytes(), nil
+}
+
+// MarshalIndent is like Marshal but applies Indent to format the output.
+func MarshalIndent(v interface{}, prefix, indent string) ([]byte, error) {
+ b, err := Marshal(v)
+ if err != nil {
+ return nil, err
+ }
+ var buf bytes.Buffer
+ err = Indent(&buf, b, prefix, indent)
+ if err != nil {
+ return nil, err
+ }
+ return buf.Bytes(), nil
+}
+
+// HTMLEscape appends to dst the JSON-encoded src with <, >, &, U+2028 and U+2029
+// characters inside string literals changed to \u003c, \u003e, \u0026, \u2028, \u2029
+// so that the JSON will be safe to embed inside HTML