Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
3aa0c34
Close the prepared stmt if we don't use the LRU
evanphx Mar 6, 2014
986829b
expose sql.Tx.Exec on jet.Tx
mna Mar 9, 2015
966e68a
Merge pull request #1 from splicers/mna-expose-tx-exec
kytrinyx Mar 9, 2015
c3a9966
add nil check
mna Mar 9, 2015
93978c8
Merge pull request #2 from splicers/mna-expose-tx-exec
mattetti Mar 9, 2015
dbc95b5
Handle some special cases
Nivl Dec 16, 2016
42a2207
Merge pull request #3 from splicers/Nivl-patch-1
nhocki Dec 16, 2016
be6033d
Handle IP/Ip
Nivl Jan 16, 2017
099e501
Merge pull request #4 from splicers/Nivl-patch-1
Nivl Jan 17, 2017
99d2163
Handle URL/Url
Nivl Feb 6, 2017
f93d0c3
Merge pull request #5 from splicers/Nivl-patch-2
Nivl Feb 6, 2017
1cb5931
Add `OpenFunc()` function
nhocki Jan 24, 2019
b54be38
Merge pull request #7 from splice/nh-dd-tracer
nhocki Jan 24, 2019
014d65c
Add `QueryContext` methods for Jet objects
nhocki Jan 30, 2019
6dc985a
Merge pull request #8 from splice/nh-query-context
nhocki Jan 30, 2019
1765802
Tweaking the mapping of complex type to support setting to blank
Aug 18, 2021
e0bc340
Merge pull request #9 from splice/jfb/ch148415/blank-existing-fields
jfbramlett Aug 18, 2021
6ce4b78
Adding additional test and logic to account for how we do this in the…
Aug 18, 2021
f5b0cf4
Merge pull request #10 from splice/jfb/ch148415/fix-issue-with-interf…
jfbramlett Aug 18, 2021
5a986e7
Adding configurable LRU cachesize + ability to disable use of prepare…
jfbramlett Jan 22, 2023
9cc29de
Updating .gitignore to ignore .idea dir
jfbramlett Jan 22, 2023
66689b3
Adding the ability to get the cache size
jfbramlett Jan 23, 2023
c4fcf2b
Making use of prepared stmts optional
Jan 23, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -25,3 +25,5 @@ cpu.out

*.sublime-project
*.sublime-workspace

.idea
37 changes: 27 additions & 10 deletions db.go
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
package jet

import (
"context"
"database/sql"
)

Expand All @@ -18,22 +19,29 @@ type Db struct {
// Defaults to SnakeCaseConverter.
ColumnConverter ColumnConverter

driver string
source string
lru *lru
driver string
source string
lru *lru
skipPreparedStmts bool
}

// Open opens a new database connection.
func Open(driverName, dataSourceName string) (*Db, error) {
db, err := sql.Open(driverName, dataSourceName)
func Open(driverName, dataSourceName string, usePreparedStmts bool, preparedStmtCacheSize int) (*Db, error) {
return OpenFunc(driverName, dataSourceName, sql.Open, usePreparedStmts, preparedStmtCacheSize)
}

// OpenFunc opens a new database connection by using the passed `fn`.
func OpenFunc(driverName, dataSourceName string, fn func(string, string) (*sql.DB, error), usePreparedStmts bool, preparedStmtCacheSize int) (*Db, error) {
db, err := fn(driverName, dataSourceName)
if err != nil {
return nil, err
}
j := &Db{
ColumnConverter: SnakeCaseConverter, // default
driver: driverName,
source: dataSourceName,
lru: newLru(),
ColumnConverter: SnakeCaseConverter, // default
driver: driverName,
source: dataSourceName,
lru: newLru(preparedStmtCacheSize),
skipPreparedStmts: usePreparedStmts,
}
j.DB = db

Expand Down Expand Up @@ -64,5 +72,14 @@ func (db *Db) Begin() (*Tx, error) {

// Query creates a prepared query that can be run with Rows or Run.
func (db *Db) Query(query string, args ...interface{}) Runnable {
return newQuery(db, db, query, args...)
return db.QueryContext(context.Background(), query, args...)
}

// QueryContext creates a prepared query that can be run with Rows or Run.
func (db *Db) QueryContext(ctx context.Context, query string, args ...interface{}) Runnable {
return newQuery(ctx, db.skipPreparedStmts, db, db, query, args...)
}

func (db *Db) CacheSize() int {
return db.lru.size()
}
8 changes: 6 additions & 2 deletions lru.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,9 +19,9 @@ type lruItem struct {
stmt *sql.Stmt
}

func newLru() *lru {
func newLru(maxItems int) *lru {
return &lru{
maxItems: 500,
maxItems: maxItems,
keys: make(map[string]*list.Element),
list: list.New(),
}
Expand Down Expand Up @@ -79,6 +79,10 @@ func (c *lru) clean() {
}
}

func (c *lru) size() int {
return c.list.Len()
}

// makeKey hashes the key to save some bytes
func makeKey(k string) string {
buffer := sha1.New()
Expand Down
37 changes: 37 additions & 0 deletions mapper.go
Original file line number Diff line number Diff line change
Expand Up @@ -20,9 +20,34 @@ func (m *mapper) unpack(keys []string, values []interface{}, out interface{}) er
return m.unpackValue(keys, values, val)
}

func isNil(val interface{}) bool {
if val == nil {
return true
}
if reflect.ValueOf(val).IsZero() {
return true
}
if reflect.ValueOf(val).Kind() == reflect.Ptr {
if reflect.ValueOf(val).Elem().Kind() == reflect.Struct || reflect.ValueOf(val).Elem().Kind() == reflect.Interface {
return reflect.ValueOf(val).Elem().IsNil()
}
}

return false
}

func (m *mapper) unpackValue(keys []string, values []interface{}, out reflect.Value) error {
switch out.Interface().(type) {
case ComplexValue:
if isNil(values[0]) {
if out.IsZero() {
return nil
}
if out.CanSet() {
out.Set(reflect.Zero(out.Type()))
return nil
}
}
if out.IsNil() {
out.Set(reflect.New(out.Type().Elem()))
}
Expand Down Expand Up @@ -82,6 +107,18 @@ func (m *mapper) unpackStruct(keys []string, values []interface{}, out reflect.V
convKey = m.conv.ColumnToFieldName(k)
}
field := out.FieldByName(convKey)

// If the field is not found it can mean that we don't want it or that
// we have special case like UserID, UUID, userUUID
// So fix the name and try again
if !field.IsValid() {
convKey = strings.Replace(convKey, "Uuid", "UUID", -1)
convKey = strings.Replace(convKey, "Id", "ID", -1)
convKey = strings.Replace(convKey, "Ip", "IP", -1)
convKey = strings.Replace(convKey, "Url", "URL", -1)
field = out.FieldByName(convKey)
}

if field.IsValid() {
m.unpackValue(nil, values[i:i+1], field)
}
Expand Down
173 changes: 171 additions & 2 deletions mapper_test.go
Original file line number Diff line number Diff line change
Expand Up @@ -73,8 +73,13 @@ func (c *custom) Decode(v interface{}) error {
}
s, ok := v.(string)
if ok {
c.a = string(s[0])
c.b = string(s[1])
if len(s) > 1 {
c.a = string(s[0])
c.b = string(s[1])
} else {
c.a = ""
c.b = ""
}
}
return nil
}
Expand Down Expand Up @@ -181,6 +186,170 @@ func TestUnpackStruct(t *testing.T) {
}
}

func TestUnpackStructExistingValueToEmpty(t *testing.T) {
keys := []string{"m"}
vals := []interface{}{
"",
}
mppr := &mapper{
conv: SnakeCaseConverter,
}

var v struct {
M plainCustom
}
v.M = "abc"

err := mppr.unpack(keys, vals, &v)
if err != nil {
t.Fatal(err)
}
if v.M != "" {
t.Fatal(v.M)
}
}

func TestUnpackStructEmptyToEmpty(t *testing.T) {
keys := []string{"m"}
vals := []interface{}{
"",
}
mppr := &mapper{
conv: SnakeCaseConverter,
}

var v struct {
M plainCustom
}

err := mppr.unpack(keys, vals, &v)
if err != nil {
t.Fatal(err)
}
if v.M != "" {
t.Fatal(v.M)
}
}

func TestUnpackStructExistingValueToNil(t *testing.T) {
keys := []string{"j"}
vals := []interface{}{
nil,
}
mppr := &mapper{
conv: SnakeCaseConverter,
}

var v struct {
J *custom
}
v.J = &custom{a: "a", b: "b"}

err := mppr.unpack(keys, vals, &v)
if err != nil {
t.Fatal(err)
}
if v.J != nil {
t.Fatal(v.J)
}
}

func TestUnpackStructExistingValueNonPtrToEmpty(t *testing.T) {
keys := []string{"j"}
vals := []interface{}{
"",
}
mppr := &mapper{
conv: SnakeCaseConverter,
}

var v struct {
J custom
}
v.J = custom{a: "a", b: "b"}

err := mppr.unpack(keys, vals, &v)
if err != nil {
t.Fatal(err)
}
if v.J.a != "" || v.J.b != "" {
t.Fatal(v.J)
}
}

func TestUnpackStructComplexExistingValueToEmpty(t *testing.T) {
keys := []string{"j"}
vals := []interface{}{
"",
}
mppr := &mapper{
conv: SnakeCaseConverter,
}

var v struct {
J *custom
}
v.J = &custom{a: "a", b: "b"}

err := mppr.unpack(keys, vals, &v)
if err != nil {
t.Fatal(err)
}
if v.J != nil {
t.Fatal(v.J)
}
}


func TestUnpackStructNilLikeDBQuery(t *testing.T) {
keys := []string{"j"}
vals := make([]interface{}, 0, len(keys))
for i := 0; i < cap(vals); i++ {
vals = append(vals, new(interface{}))
}
mppr := &mapper{
conv: SnakeCaseConverter,
}

var v struct {
J *custom
}
v.J = &custom{
a: "a", b: "b",
}

err := mppr.unpack(keys, vals, &v)
if err != nil {
t.Fatal(err)
}
if v.J != nil {
t.Fatal(v.J)
}
}

func TestUnpackStructNilComplexToNil(t *testing.T) {
keys := []string{"j"}
vals := []interface{}{
nil,
}
mppr := &mapper{
conv: SnakeCaseConverter,
}

var v struct {
J *custom
}

err := mppr.unpack(keys, vals, &v)
if err != nil {
t.Fatal(err)
}
if v.J != nil {
t.Fatal(v.J)
}
}


func TestUnpackMap(t *testing.T) {
keys := []string{"ab_c", "c_d", "e"}
vals := []interface{}{int64(9), "hello", "unsettable"}
Expand Down
Loading