mirror of https://github.com/jackc/pgx.git
parent
26bb780063
commit
e1215d9b19
2
conn.go
2
conn.go
|
@ -861,6 +861,8 @@ func (c *Conn) sendPreparedQuery(ps *PreparedStatement, arguments ...interface{}
|
|||
err = arg.Encode(wbuf, oid)
|
||||
case string:
|
||||
err = encodeText(wbuf, arguments[i])
|
||||
case []byte:
|
||||
err = encodeBytea(wbuf, arguments[i])
|
||||
default:
|
||||
if v := reflect.ValueOf(arguments[i]); v.Kind() == reflect.Ptr {
|
||||
if v.IsNil() {
|
||||
|
|
8
doc.go
8
doc.go
|
@ -181,6 +181,14 @@ Conn.PgTypes.
|
|||
See example_custom_type_test.go for an example of a custom type for the
|
||||
PostgreSQL point type.
|
||||
|
||||
[]byte Mapping
|
||||
|
||||
[]byte passed as arguments to Query, QueryRow, and Exec are passed unmodified
|
||||
to PostgreSQL. In like manner, a *[]byte passed to Scan will be filled with
|
||||
the raw bytes returned by PostgreSQL. This can be especially useful for reading
|
||||
varchar, text, json, and jsonb values directly into a []byte and avoiding the
|
||||
type conversion from string.
|
||||
|
||||
TLS
|
||||
|
||||
The pgx ConnConfig struct has a TLSConfig field. If this field is
|
||||
|
|
11
query.go
11
query.go
|
@ -11,10 +11,9 @@ import (
|
|||
// Row is a convenience wrapper over Rows that is returned by QueryRow.
|
||||
type Row Rows
|
||||
|
||||
// Scan reads the values from the row into dest values positionally. dest can
|
||||
// include pointers to core types and the Scanner interface. If no rows were
|
||||
// found it returns ErrNoRows. If multiple rows are returned it ignores all but
|
||||
// the first.
|
||||
// Scan works the same as (*Rows Scan) with the following exceptions. If no
|
||||
// rows were found it returns ErrNoRows. If multiple rows are returned it
|
||||
// ignores all but the first.
|
||||
func (r *Row) Scan(dest ...interface{}) (err error) {
|
||||
rows := (*Rows)(r)
|
||||
|
||||
|
@ -216,7 +215,9 @@ func (rows *Rows) nextColumn() (*ValueReader, bool) {
|
|||
}
|
||||
|
||||
// Scan reads the values from the current row into dest values positionally.
|
||||
// dest can include pointers to core types and the Scanner interface.
|
||||
// dest can include pointers to core types, values implementing the Scanner
|
||||
// interface, and []byte. []byte will skip the decoding process and directly
|
||||
// copy the raw bytes received from PostgreSQL.
|
||||
func (rows *Rows) Scan(dest ...interface{}) (err error) {
|
||||
if len(rows.fields) != len(dest) {
|
||||
err = fmt.Errorf("Scan received wrong number of arguments, got %d but expected %d", len(dest), len(rows.fields))
|
||||
|
|
|
@ -431,6 +431,7 @@ func TestQueryRowCoreByteSlice(t *testing.T) {
|
|||
}{
|
||||
{"select $1::text", "Jack", []byte("Jack")},
|
||||
{"select $1::text", []byte("Jack"), []byte("Jack")},
|
||||
{"select $1::int4", int32(239023409), []byte{14, 63, 53, 49}},
|
||||
{"select $1::varchar", []byte("Jack"), []byte("Jack")},
|
||||
{"select $1::bytea", []byte{0, 15, 255, 17}, []byte{0, 15, 255, 17}},
|
||||
}
|
||||
|
@ -451,6 +452,30 @@ func TestQueryRowCoreByteSlice(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestQueryRowByteSliceArgument(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
conn := mustConnect(t, *defaultConnConfig)
|
||||
defer closeConn(t, conn)
|
||||
|
||||
sql := "select $1::int4"
|
||||
queryArg := []byte{14, 63, 53, 49}
|
||||
expected := int32(239023409)
|
||||
|
||||
var actual int32
|
||||
|
||||
err := conn.QueryRow(sql, queryArg).Scan(&actual)
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected failure: %v (sql -> %v)", err, sql)
|
||||
}
|
||||
|
||||
if expected != actual {
|
||||
t.Errorf("Expected %v, got %v (sql -> %v)", expected, actual, sql)
|
||||
}
|
||||
|
||||
ensureConnValid(t, conn)
|
||||
}
|
||||
|
||||
func TestQueryRowUnknownType(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
|
|
|
@ -436,6 +436,80 @@ func TestConnQueryRowUnknownType(t *testing.T) {
|
|||
ensureConnValid(t, db)
|
||||
}
|
||||
|
||||
func TestConnQueryJSONIntoByteSlice(t *testing.T) {
|
||||
db := openDB(t)
|
||||
defer closeDB(t, db)
|
||||
|
||||
_, err := db.Exec(`
|
||||
create temporary table docs(
|
||||
body json not null
|
||||
);
|
||||
|
||||
insert into docs(body) values('{"foo":"bar"}');
|
||||
`)
|
||||
if err != nil {
|
||||
t.Fatalf("db.Exec unexpectedly failed: %v", err)
|
||||
}
|
||||
|
||||
sql := `select * from docs`
|
||||
expected := []byte(`{"foo":"bar"}`)
|
||||
var actual []byte
|
||||
|
||||
err = db.QueryRow(sql).Scan(&actual)
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected failure: %v (sql -> %v)", err, sql)
|
||||
}
|
||||
|
||||
if bytes.Compare(actual, expected) != 0 {
|
||||
t.Errorf(`Expected "%v", got "%v" (sql -> %v)`, string(expected), string(actual), sql)
|
||||
}
|
||||
|
||||
_, err = db.Exec(`drop table docs`)
|
||||
if err != nil {
|
||||
t.Fatalf("db.Exec unexpectedly failed: %v", err)
|
||||
}
|
||||
|
||||
ensureConnValid(t, db)
|
||||
}
|
||||
|
||||
func TestConnExecInsertByteSliceIntoJSON(t *testing.T) {
|
||||
db := openDB(t)
|
||||
defer closeDB(t, db)
|
||||
|
||||
_, err := db.Exec(`
|
||||
create temporary table docs(
|
||||
body json not null
|
||||
);
|
||||
`)
|
||||
if err != nil {
|
||||
t.Fatalf("db.Exec unexpectedly failed: %v", err)
|
||||
}
|
||||
|
||||
expected := []byte(`{"foo":"bar"}`)
|
||||
|
||||
_, err = db.Exec(`insert into docs(body) values($1)`, expected)
|
||||
if err != nil {
|
||||
t.Fatalf("db.Exec unexpectedly failed: %v", err)
|
||||
}
|
||||
|
||||
var actual []byte
|
||||
err = db.QueryRow(`select body from docs`).Scan(&actual)
|
||||
if err != nil {
|
||||
t.Fatalf("db.QueryRow unexpectedly failed: %v", err)
|
||||
}
|
||||
|
||||
if bytes.Compare(actual, expected) != 0 {
|
||||
t.Errorf(`Expected "%v", got "%v"`, string(expected), string(actual))
|
||||
}
|
||||
|
||||
_, err = db.Exec(`drop table docs`)
|
||||
if err != nil {
|
||||
t.Fatalf("db.Exec unexpectedly failed: %v", err)
|
||||
}
|
||||
|
||||
ensureConnValid(t, db)
|
||||
}
|
||||
|
||||
func TestTransactionLifeCycle(t *testing.T) {
|
||||
db := openDB(t)
|
||||
defer closeDB(t, db)
|
||||
|
|
Loading…
Reference in New Issue