From b7dc09e7ce9bf1d0a7658123d38edc6a9539ef35 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Tue, 9 Dec 2014 10:20:24 +0100 Subject: [PATCH 01/82] Use COM_STMT_SEND_LONG_DATA to optimize inserting long BLOB values for MySQL --- .../async/db/mysql/MySQLConnection.scala | 2 +- .../db/mysql/binary/BinaryRowEncoder.scala | 29 ++++++++- .../mysql/binary/encoder/BinaryEncoder.scala | 6 ++ .../binary/encoder/ByteArrayEncoder.scala | 7 ++- .../mysql/binary/encoder/ByteBufEncoder.scala | 5 ++ .../binary/encoder/ByteBufferEncoder.scala | 7 ++- .../mysql/codec/MySQLConnectionHandler.scala | 19 ++++-- .../db/mysql/codec/MySQLOneToOneEncoder.scala | 8 ++- .../PreparedStatement.scala} | 5 +- .../mysql/encoder/SendLongDataEncoder.scala | 21 +++++++ .../mysql/message/client/ClientMessage.scala | 14 ++--- .../message/client/SendLongDataMessage.scala | 10 +++ .../async/db/mysql/BinaryColumnsSpec.scala | 62 ++++++++++++------- 13 files changed, 151 insertions(+), 44 deletions(-) rename mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/{message/client/PreparedStatementMessage.scala => codec/PreparedStatement.scala} (76%) create mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala create mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala index a48e8739..210cebfc 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala @@ -236,7 +236,7 @@ class MySQLConnection( } val promise = Promise[QueryResult] this.setQueryPromise(promise) - this.connectionHandler.write(new PreparedStatementMessage(query, values)) + this.connectionHandler.sendPreparedStatement(query, values) promise.future } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala index 310f80ca..1d660dce 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala @@ -104,7 +104,34 @@ class BinaryRowEncoder( charset : Charset ) { private def encode(parameterTypesBuffer: ByteBuf, parameterValuesBuffer: ByteBuf, value: Any): Unit = { val encoder = encoderFor(value) parameterTypesBuffer.writeShort(encoder.encodesTo) - encoder.encode(value, parameterValuesBuffer) + if (!encoder.isLong(value)) + encoder.encode(value, parameterValuesBuffer) + } + + def isLong( maybeValue : Any ) : Boolean = { + if ( maybeValue == null || maybeValue == None ) { + false + } else { + val value = maybeValue match { + case Some(v) => v + case _ => maybeValue + } + val encoder = encoderFor(value) + encoder.isLong(value) + } + } + + def encodeLong( maybeValue: Any ) : ByteBuf = { + if ( maybeValue == null || maybeValue == None ) { + throw new UnsupportedOperationException("Cannot encode NULL as long value") + } else { + val value = maybeValue match { + case Some(v) => v + case _ => maybeValue + } + val encoder = encoderFor(value) + encoder.encodeLong(value) + } } private def encoderFor( v : Any ) : BinaryEncoder = { diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala index bb504ce6..c4f87687 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala @@ -20,6 +20,12 @@ import io.netty.buffer.ByteBuf trait BinaryEncoder { + val LONG_THRESHOLD = 1023 + + def isLong( value : Any ) : Boolean = false + + def encodeLong( value : Any ) : ByteBuf = throw new UnsupportedOperationException() + def encode( value : Any, buffer : ByteBuf ) def encodesTo : Int diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala index 260f22a4..07d9d3e2 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala @@ -17,11 +17,16 @@ package com.github.mauricio.async.db.mysql.binary.encoder -import io.netty.buffer.ByteBuf +import io.netty.buffer.{Unpooled, ByteBuf} import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper import com.github.mauricio.async.db.mysql.column.ColumnTypes object ByteArrayEncoder extends BinaryEncoder { + + override def isLong(value: Any): Boolean = value.asInstanceOf[Array[Byte]].length > LONG_THRESHOLD + + override def encodeLong(value: Any): ByteBuf = Unpooled.wrappedBuffer(value.asInstanceOf[Array[Byte]]) + def encode(value: Any, buffer: ByteBuf) { val bytes = value.asInstanceOf[Array[Byte]] diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala index 62b62560..4ba79072 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala @@ -5,6 +5,11 @@ import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper import io.netty.buffer.ByteBuf object ByteBufEncoder extends BinaryEncoder { + + override def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuf].readableBytes() > LONG_THRESHOLD + + override def encodeLong(value: Any): ByteBuf = value.asInstanceOf[ByteBuf] + def encode(value: Any, buffer: ByteBuf) { val bytes = value.asInstanceOf[ByteBuf] diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala index 329709ad..a562c84d 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala @@ -4,9 +4,14 @@ import java.nio.ByteBuffer import com.github.mauricio.async.db.mysql.column.ColumnTypes import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper -import io.netty.buffer.ByteBuf +import io.netty.buffer.{Unpooled, ByteBuf} object ByteBufferEncoder extends BinaryEncoder { + + override def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuffer].remaining() > LONG_THRESHOLD + + override def encodeLong(value: Any): ByteBuf = Unpooled.wrappedBuffer(value.asInstanceOf[ByteBuffer]) + def encode(value: Any, buffer: ByteBuf) { val bytes = value.asInstanceOf[ByteBuffer] diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala index 27ff04da..4d7d3498 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala @@ -58,7 +58,7 @@ class MySQLConnectionHandler( private final val binaryRowDecoder = new BinaryRowDecoder() private var currentPreparedStatementHolder : PreparedStatementHolder = null - private var currentPreparedStatement : PreparedStatementMessage = null + private var currentPreparedStatement : PreparedStatement = null private var currentQuery : MutableResultSet[ColumnDefinitionMessage] = null private var currentContext: ChannelHandlerContext = null @@ -185,20 +185,21 @@ class MySQLConnectionHandler( writeAndHandleError(message) } - def write( message : PreparedStatementMessage ) { + def sendPreparedStatement( query: String, values: Seq[Any] ) { + val preparedStatement = new PreparedStatement(query, values) this.currentColumns.clear() this.currentParameters.clear() - this.currentPreparedStatement = message + this.currentPreparedStatement = preparedStatement - this.parsedStatements.get(message.statement) match { + this.parsedStatements.get(preparedStatement.statement) match { case Some( item ) => { - this.executePreparedStatement(item.statementId, item.columns.size, message.values, item.parameters) + this.executePreparedStatement(item.statementId, item.columns.size, preparedStatement.values, item.parameters) } case None => { decoder.preparedStatementPrepareStarted() - writeAndHandleError( new PreparedStatementPrepareMessage(message.statement) ) + writeAndHandleError( new PreparedStatementPrepareMessage(preparedStatement.statement) ) } } } @@ -234,6 +235,12 @@ class MySQLConnectionHandler( decoder.preparedStatementExecuteStarted(columnsCount, parameters.size) this.currentColumns.clear() this.currentParameters.clear() + + values.zipWithIndex.foreach { case (value, index) => + if (encoder.rowEncoder.isLong(value)) + writeAndHandleError(new SendLongDataMessage(statementId, value, index)) + } + writeAndHandleError(new PreparedStatementExecuteMessage( statementId, values, parameters )) } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala index 074a8b6a..667b2e4e 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala @@ -36,10 +36,12 @@ class MySQLOneToOneEncoder(charset: Charset, charsetMapper: CharsetMapper) exten import MySQLOneToOneEncoder.log + final val rowEncoder = new BinaryRowEncoder(charset) + private final val handshakeResponseEncoder = new HandshakeResponseEncoder(charset, charsetMapper) private final val queryEncoder = new QueryMessageEncoder(charset) - private final val rowEncoder = new BinaryRowEncoder(charset) private final val prepareEncoder = new PreparedStatementPrepareEncoder(charset) + private final val sendLongDataEncoder = new SendLongDataEncoder(rowEncoder) private final val executeEncoder = new PreparedStatementExecuteEncoder(rowEncoder) private final val authenticationSwitchEncoder = new AuthenticationSwitchResponseEncoder(charset) @@ -67,6 +69,10 @@ class MySQLOneToOneEncoder(charset: Charset, charsetMapper: CharsetMapper) exten sequence = 0 this.prepareEncoder } + case ClientMessage.PreparedStatementSendLongData => { + sequence = 0 + this.sendLongDataEncoder + } case ClientMessage.AuthSwitchResponse => { sequence += 1 this.authenticationSwitchEncoder diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/PreparedStatement.scala similarity index 76% rename from mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementMessage.scala rename to mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/PreparedStatement.scala index 0e52dad6..08fb0d9f 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementMessage.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/PreparedStatement.scala @@ -14,7 +14,6 @@ * under the License. */ -package com.github.mauricio.async.db.mysql.message.client +package com.github.mauricio.async.db.mysql.codec -case class PreparedStatementMessage ( statement : String, values : Seq[Any]) - extends ClientMessage( ClientMessage.PreparedStatement ) \ No newline at end of file +case class PreparedStatement ( statement : String, values : Seq[Any]) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala new file mode 100644 index 00000000..b2bd2353 --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala @@ -0,0 +1,21 @@ +package com.github.mauricio.async.db.mysql.encoder + +import com.github.mauricio.async.db.mysql.binary.BinaryRowEncoder +import com.github.mauricio.async.db.mysql.message.client.{ClientMessage, SendLongDataMessage} +import com.github.mauricio.async.db.util.ByteBufferUtils +import io.netty.buffer.{Unpooled, ByteBuf} + +class SendLongDataEncoder( rowEncoder : BinaryRowEncoder ) extends MessageEncoder { + + def encode(message: ClientMessage): ByteBuf = { + val m = message.asInstanceOf[SendLongDataMessage] + + val buffer = ByteBufferUtils.packetBuffer() + buffer.writeByte(m.kind) + buffer.writeBytes(m.statementId) + buffer.writeShort(m.paramId) + + Unpooled.wrappedBuffer(buffer, rowEncoder.encodeLong(m.value)) + } + +} \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/ClientMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/ClientMessage.scala index 72d0be13..2a2a1b1f 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/ClientMessage.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/ClientMessage.scala @@ -20,13 +20,13 @@ import com.github.mauricio.async.db.KindedMessage object ClientMessage { - final val ClientProtocolVersion = 0x09 - final val Quit = 0x01 - final val Query = 0x03 - final val PreparedStatementPrepare = 0x16 - final val PreparedStatementExecute = 0x17 - final val PreparedStatement = 0x18 - final val AuthSwitchResponse = 0xfe + final val ClientProtocolVersion = 0x09 // COM_STATISTICS + final val Quit = 0x01 // COM_QUIT + final val Query = 0x03 // COM_QUERY + final val PreparedStatementPrepare = 0x16 // COM_STMT_PREPARE + final val PreparedStatementExecute = 0x17 // COM_STMT_EXECUTE + final val PreparedStatementSendLongData = 0x18 // COM_STMT_SEND_LONG_DATA + final val AuthSwitchResponse = 0xfe // AuthSwitchRequest } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala new file mode 100644 index 00000000..cf213614 --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala @@ -0,0 +1,10 @@ +package com.github.mauricio.async.db.mysql.message.client + +case class SendLongDataMessage ( + statementId : Array[Byte], + value : Any, + paramId : Int ) + extends ClientMessage( ClientMessage.PreparedStatementSendLongData ) { + + override def toString = "SendLongDataMessage(statementId=" + statementId + ",paramId=" + paramId + ",value.getClass=" + value.getClass.getName +")" +} \ No newline at end of file diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala index 5ff25f99..6c7c1313 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala @@ -100,32 +100,48 @@ class BinaryColumnsSpec extends Specification with ConnectionHelper { "support BLOB type" in { - val create = - """CREATE TEMPORARY TABLE POSTS ( - | id INT NOT NULL AUTO_INCREMENT, - | blob_column BLOB(20), - | primary key (id)) - """.stripMargin - - val insert = "INSERT INTO POSTS (blob_column) VALUES (?)" - val select = "SELECT * FROM POSTS" val bytes = (1 to 10).map(_.toByte).toArray - withConnection { - connection => - executeQuery(connection, create) - executePreparedStatement(connection, insert, bytes) - executePreparedStatement(connection, insert, ByteBuffer.wrap(bytes)) - executePreparedStatement(connection, insert, Unpooled.copiedBuffer(bytes)) - - val Some(rows) = executeQuery(connection, select).rows - rows foreach { - row => - row("blob_column") === bytes - } - rows.size === 3 - } + testBlob(bytes) + + } + + "support BLOB type with large values" in { + + val bytes = (1 to 2100).map(_.toByte).toArray + + testBlob(bytes) + + } + + } + def testBlob(bytes: Array[Byte]) = { + val create = + """CREATE TEMPORARY TABLE POSTS ( + | id INT NOT NULL, + | blob_column BLOB, + | primary key (id)) + """.stripMargin + + val insert = "INSERT INTO POSTS (id,blob_column) VALUES (?,?)" + val select = "SELECT id,blob_column FROM POSTS ORDER BY id" + + withConnection { + connection => + executeQuery(connection, create) + executePreparedStatement(connection, insert, 1, Some(bytes)) + executePreparedStatement(connection, insert, 2, ByteBuffer.wrap(bytes)) + executePreparedStatement(connection, insert, 3, Unpooled.wrappedBuffer(bytes)) + + val Some(rows) = executeQuery(connection, select).rows + rows(0)("id") === 1 + rows(0)("blob_column") === bytes + rows(1)("id") === 2 + rows(1)("blob_column") === bytes + rows(2)("id") === 3 + rows(2)("blob_column") === bytes + rows.size === 3 } } From 1527900db40ba6a79b022e37d0145bcc217e41de Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Tue, 9 Dec 2014 11:23:18 +0100 Subject: [PATCH 02/82] Remove accidently committed file --- .../async/db/mysql/blob/LargeBlobSpec.scala | 99 ------------------- 1 file changed, 99 deletions(-) delete mode 100644 mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/blob/LargeBlobSpec.scala diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/blob/LargeBlobSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/blob/LargeBlobSpec.scala deleted file mode 100644 index c33d07db..00000000 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/blob/LargeBlobSpec.scala +++ /dev/null @@ -1,99 +0,0 @@ -package com.github.mauricio.async.db.mysql.blob - -import java.io.{BufferedOutputStream, File, FileInputStream, FileOutputStream} -import java.nio.ByteBuffer - -import com.github.mauricio.async.db.mysql.ConnectionHelper -import io.netty.buffer.Unpooled -import org.specs2.mutable.{After, Specification} -import org.specs2.specification.Scope - -class LargeBlobSpec extends Specification with ConnectionHelper { - - val create = """CREATE TEMPORARY TABLE t ( - | id BIGINT NOT NULL AUTO_INCREMENT, - | theblob LONGBLOB NOT NULL, - | PRIMARY KEY (id) - |);""".stripMargin - - val preparedInsert = "INSERT INTO t (theblob) VALUES (?)" - // val select = "SELECT theblob FROM t WHERE ID=?" - - "connection" should { - - "handle large BLOBs from InputStream" in new BlobFile { - - withConnection { - connection => - executeQuery(connection, create) - - val stream = new FileInputStream(blobFile) - executePreparedStatement(connection, preparedInsert, stream) - } - - } - - "handle BLOBs from ByteBuffer" in new BlobBuffer { - - val preparedInsert = "INSERT INTO t (theblob) VALUES (?)" - // val select = "SELECT theblob FROM t WHERE ID=?" - - withConnection { - connection => - executeQuery(connection, create) - - executePreparedStatement(connection, preparedInsert, blobBuffer) - } - - } - - "handle BLOBs from ByteBuf" in new BlobBuf { - - val preparedInsert = "INSERT INTO t (theblob) VALUES (?)" - // val select = "SELECT theblob FROM t WHERE ID=?" - - withConnection { - connection => - executeQuery(connection, create) - - executePreparedStatement(connection, preparedInsert, blobBuf) - } - - } - - } - -} - -trait BlobFile extends After { - lazy val blobFile = { - val file = File.createTempFile("blob1", null) - val bos = new BufferedOutputStream(new FileOutputStream(file)) - 0 to ((16 * 1024 * 1024)-1) foreach { n => bos.write(n & 128) } - bos.close() - file - } - - // lazy val outFile = File.createTempFile("blob2", null) - - def after = { - blobFile.delete() - // outFile.delete() - } -} - -trait BlobBuffer extends Scope { - lazy val blobBuffer = { - val array = new Array[Byte](1024) - 0 to (1024-1) foreach { n => array(n) = (n & 128).asInstanceOf[Byte] } - ByteBuffer.wrap(array) - } -} - -trait BlobBuf extends Scope { - lazy val blobBuf = { - val array = new Array[Byte](1024) - 0 to (1024-1) foreach { n => array(n) = (n & 128).asInstanceOf[Byte] } - Unpooled.copiedBuffer(array) - } -} \ No newline at end of file From 8657281c815d20829734851d45e5fd763097ad03 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Tue, 9 Dec 2014 15:38:22 +0100 Subject: [PATCH 03/82] Constant in companion object --- .../async/db/mysql/binary/encoder/BinaryEncoder.scala | 6 +++++- .../async/db/mysql/binary/encoder/ByteArrayEncoder.scala | 2 +- .../async/db/mysql/binary/encoder/ByteBufEncoder.scala | 2 +- .../async/db/mysql/binary/encoder/ByteBufferEncoder.scala | 2 +- 4 files changed, 8 insertions(+), 4 deletions(-) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala index c4f87687..de774371 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala @@ -18,10 +18,14 @@ package com.github.mauricio.async.db.mysql.binary.encoder import io.netty.buffer.ByteBuf -trait BinaryEncoder { +object BinaryEncoder { val LONG_THRESHOLD = 1023 +} + +trait BinaryEncoder { + def isLong( value : Any ) : Boolean = false def encodeLong( value : Any ) : ByteBuf = throw new UnsupportedOperationException() diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala index 07d9d3e2..5d1693c3 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala @@ -23,7 +23,7 @@ import com.github.mauricio.async.db.mysql.column.ColumnTypes object ByteArrayEncoder extends BinaryEncoder { - override def isLong(value: Any): Boolean = value.asInstanceOf[Array[Byte]].length > LONG_THRESHOLD + override def isLong(value: Any): Boolean = value.asInstanceOf[Array[Byte]].length > BinaryEncoder.LONG_THRESHOLD override def encodeLong(value: Any): ByteBuf = Unpooled.wrappedBuffer(value.asInstanceOf[Array[Byte]]) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala index 4ba79072..3b6b5d21 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala @@ -6,7 +6,7 @@ import io.netty.buffer.ByteBuf object ByteBufEncoder extends BinaryEncoder { - override def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuf].readableBytes() > LONG_THRESHOLD + override def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuf].readableBytes() > BinaryEncoder.LONG_THRESHOLD override def encodeLong(value: Any): ByteBuf = value.asInstanceOf[ByteBuf] diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala index a562c84d..6a436539 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala @@ -8,7 +8,7 @@ import io.netty.buffer.{Unpooled, ByteBuf} object ByteBufferEncoder extends BinaryEncoder { - override def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuffer].remaining() > LONG_THRESHOLD + override def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuffer].remaining() > BinaryEncoder.LONG_THRESHOLD override def encodeLong(value: Any): ByteBuf = Unpooled.wrappedBuffer(value.asInstanceOf[ByteBuffer]) From f4ae78f385556f2b4826c87f04c5b5f44459e353 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Tue, 9 Dec 2014 16:46:38 +0100 Subject: [PATCH 04/82] Refactor binary encoding --- .../db/mysql/binary/BinaryRowEncoder.scala | 74 +------------------ .../mysql/codec/MySQLConnectionHandler.scala | 16 +++- .../db/mysql/codec/MySQLOneToOneEncoder.scala | 5 +- .../PreparedStatementExecuteEncoder.scala | 44 ++++++++++- .../mysql/encoder/SendLongDataEncoder.scala | 15 +++- ...PreparedStatementExecuteEncoderSpec.scala} | 9 ++- 6 files changed, 80 insertions(+), 83 deletions(-) rename mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/{binary/BinaryRowEncoderSpec.scala => encoder/PreparedStatementExecuteEncoderSpec.scala} (80%) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala index 1d660dce..8792f08f 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala @@ -16,15 +16,13 @@ package com.github.mauricio.async.db.mysql.binary -import io.netty.buffer.{Unpooled, ByteBuf} +import io.netty.buffer.ByteBuf import java.nio.ByteBuffer import java.nio.charset.Charset import com.github.mauricio.async.db.mysql.binary.encoder._ import com.github.mauricio.async.db.util._ import org.joda.time._ import scala.Some -import com.github.mauricio.async.db.mysql.column.ColumnTypes -import java.nio.ByteOrder object BinaryRowEncoder { final val log = Log.get[BinaryRowEncoder] @@ -66,75 +64,7 @@ class BinaryRowEncoder( charset : Charset ) { classOf[java.lang.Boolean] -> BooleanEncoder ) - def encode( values : Seq[Any] ) : ByteBuf = { - - val nullBitsCount = (values.size + 7) / 8 - val nullBits = new Array[Byte](nullBitsCount) - val bitMapBuffer = ByteBufferUtils.mysqlBuffer(1 + nullBitsCount) - val parameterTypesBuffer = ByteBufferUtils.mysqlBuffer(values.size * 2) - val parameterValuesBuffer = ByteBufferUtils.mysqlBuffer() - - - var index = 0 - - while ( index < values.length ) { - val value = values(index) - if ( value == null || value == None ) { - nullBits(index / 8) = (nullBits(index / 8) | (1 << (index & 7))).asInstanceOf[Byte] - parameterTypesBuffer.writeShort(ColumnTypes.FIELD_TYPE_NULL) - } else { - value match { - case Some(v) => encode(parameterTypesBuffer, parameterValuesBuffer, v) - case _ => encode(parameterTypesBuffer, parameterValuesBuffer, value) - } - } - index += 1 - } - - bitMapBuffer.writeBytes(nullBits) - if ( values.size > 0 ) { - bitMapBuffer.writeByte(1) - } else { - bitMapBuffer.writeByte(0) - } - - Unpooled.wrappedBuffer( bitMapBuffer, parameterTypesBuffer, parameterValuesBuffer ) - } - - private def encode(parameterTypesBuffer: ByteBuf, parameterValuesBuffer: ByteBuf, value: Any): Unit = { - val encoder = encoderFor(value) - parameterTypesBuffer.writeShort(encoder.encodesTo) - if (!encoder.isLong(value)) - encoder.encode(value, parameterValuesBuffer) - } - - def isLong( maybeValue : Any ) : Boolean = { - if ( maybeValue == null || maybeValue == None ) { - false - } else { - val value = maybeValue match { - case Some(v) => v - case _ => maybeValue - } - val encoder = encoderFor(value) - encoder.isLong(value) - } - } - - def encodeLong( maybeValue: Any ) : ByteBuf = { - if ( maybeValue == null || maybeValue == None ) { - throw new UnsupportedOperationException("Cannot encode NULL as long value") - } else { - val value = maybeValue match { - case Some(v) => v - case _ => maybeValue - } - val encoder = encoderFor(value) - encoder.encodeLong(value) - } - } - - private def encoderFor( v : Any ) : BinaryEncoder = { + def encoderFor( v : Any ) : BinaryEncoder = { this.encoders.get(v.getClass) match { case Some(encoder) => encoder diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala index 4d7d3498..b5886891 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala @@ -237,13 +237,25 @@ class MySQLConnectionHandler( this.currentParameters.clear() values.zipWithIndex.foreach { case (value, index) => - if (encoder.rowEncoder.isLong(value)) - writeAndHandleError(new SendLongDataMessage(statementId, value, index)) + if (isLong(value)) + writeAndHandleError(new SendLongDataMessage( statementId, value, index )) } writeAndHandleError(new PreparedStatementExecuteMessage( statementId, values, parameters )) } + private def isLong( maybeValue : Any ) : Boolean = { + if ( maybeValue == null || maybeValue == None ) { + false + } else { + val value = maybeValue match { + case Some(v) => v + case _ => maybeValue + } + encoder.isLong(value) + } + } + private def onPreparedStatementPrepareResponse( message : PreparedStatementPrepareResponse ) { this.currentPreparedStatementHolder = new PreparedStatementHolder( this.currentPreparedStatement.statement, message) } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala index 667b2e4e..60254338 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala @@ -36,10 +36,9 @@ class MySQLOneToOneEncoder(charset: Charset, charsetMapper: CharsetMapper) exten import MySQLOneToOneEncoder.log - final val rowEncoder = new BinaryRowEncoder(charset) - private final val handshakeResponseEncoder = new HandshakeResponseEncoder(charset, charsetMapper) private final val queryEncoder = new QueryMessageEncoder(charset) + private final val rowEncoder = new BinaryRowEncoder(charset) private final val prepareEncoder = new PreparedStatementPrepareEncoder(charset) private final val sendLongDataEncoder = new SendLongDataEncoder(rowEncoder) private final val executeEncoder = new PreparedStatementExecuteEncoder(rowEncoder) @@ -47,6 +46,8 @@ class MySQLOneToOneEncoder(charset: Charset, charsetMapper: CharsetMapper) exten private var sequence = 1 + def isLong(value: Any): Boolean = rowEncoder.encoderFor(value).isLong(value) + def encode(ctx: ChannelHandlerContext, msg: Any, out: java.util.List[Object]): Unit = { msg match { diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala index e21b15f6..6dbfce5c 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala @@ -17,6 +17,7 @@ package com.github.mauricio.async.db.mysql.encoder import io.netty.buffer.{ByteBuf, Unpooled} +import com.github.mauricio.async.db.mysql.column.ColumnTypes import com.github.mauricio.async.db.mysql.binary.BinaryRowEncoder import com.github.mauricio.async.db.mysql.message.client.{PreparedStatementExecuteMessage, ClientMessage} import com.github.mauricio.async.db.util.ByteBufferUtils @@ -35,10 +36,49 @@ class PreparedStatementExecuteEncoder( rowEncoder : BinaryRowEncoder ) extends M if ( m.parameters.isEmpty ) { buffer } else { - val parametersBuffer = rowEncoder.encode(m.values) - Unpooled.wrappedBuffer(buffer, parametersBuffer) + Unpooled.wrappedBuffer(buffer, encode(m.values)) } } + private[encoder] def encode( values : Seq[Any] ) : ByteBuf = { + val nullBitsCount = (values.size + 7) / 8 + val nullBits = new Array[Byte](nullBitsCount) + val bitMapBuffer = ByteBufferUtils.mysqlBuffer(1 + nullBitsCount) + val parameterTypesBuffer = ByteBufferUtils.mysqlBuffer(values.size * 2) + val parameterValuesBuffer = ByteBufferUtils.mysqlBuffer() + + var index = 0 + + while ( index < values.length ) { + val value = values(index) + if ( value == null || value == None ) { + nullBits(index / 8) = (nullBits(index / 8) | (1 << (index & 7))).asInstanceOf[Byte] + parameterTypesBuffer.writeShort(ColumnTypes.FIELD_TYPE_NULL) + } else { + value match { + case Some(v) => encode(parameterTypesBuffer, parameterValuesBuffer, v) + case _ => encode(parameterTypesBuffer, parameterValuesBuffer, value) + } + } + index += 1 + } + + bitMapBuffer.writeBytes(nullBits) + if ( values.size > 0 ) { + bitMapBuffer.writeByte(1) + } else { + bitMapBuffer.writeByte(0) + } + + Unpooled.wrappedBuffer( bitMapBuffer, parameterTypesBuffer, parameterValuesBuffer ) + } + + private def encode(parameterTypesBuffer: ByteBuf, parameterValuesBuffer: ByteBuf, value: Any): Unit = { + val encoder = rowEncoder.encoderFor(value) + parameterTypesBuffer.writeShort(encoder.encodesTo) + if (!encoder.isLong(value)) + encoder.encode(value, parameterValuesBuffer) + } + } \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala index b2bd2353..bfce510f 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala @@ -15,7 +15,20 @@ class SendLongDataEncoder( rowEncoder : BinaryRowEncoder ) extends MessageEncode buffer.writeBytes(m.statementId) buffer.writeShort(m.paramId) - Unpooled.wrappedBuffer(buffer, rowEncoder.encodeLong(m.value)) + Unpooled.wrappedBuffer(buffer, encodeLong(m.value)) + } + + private def encodeLong( maybeValue: Any ) : ByteBuf = { + if ( maybeValue == null || maybeValue == None ) { + throw new UnsupportedOperationException("Cannot encode NULL as long value") + } else { + val value = maybeValue match { + case Some(v) => v + case _ => maybeValue + } + val encoder = rowEncoder.encoderFor(value) + encoder.encodeLong(value) + } } } \ No newline at end of file diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoderSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala similarity index 80% rename from mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoderSpec.scala rename to mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala index 78bce249..dd0203b0 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoderSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala @@ -14,14 +14,15 @@ * under the License. */ -package com.github.mauricio.async.db.mysql.binary +package com.github.mauricio.async.db.mysql.encoder -import org.specs2.mutable.Specification +import com.github.mauricio.async.db.mysql.binary.BinaryRowEncoder import io.netty.util.CharsetUtil +import org.specs2.mutable.Specification -class BinaryRowEncoderSpec extends Specification { +class PreparedStatementExecuteEncoderSpec extends Specification { - val encoder = new BinaryRowEncoder(CharsetUtil.UTF_8) + val encoder = new PreparedStatementExecuteEncoder(new BinaryRowEncoder(CharsetUtil.UTF_8)) "binary row encoder" should { From 4ff4189374c255a750b1b58a5480c9b93bec6426 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Tue, 9 Dec 2014 17:23:47 +0100 Subject: [PATCH 05/82] Avoid overloading confusion --- .../encoder/PreparedStatementExecuteEncoder.scala | 10 +++++----- .../async/db/mysql/encoder/SendLongDataEncoder.scala | 4 ++-- .../encoder/PreparedStatementExecuteEncoderSpec.scala | 8 ++++---- 3 files changed, 11 insertions(+), 11 deletions(-) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala index 6dbfce5c..6bcc5a50 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala @@ -36,12 +36,12 @@ class PreparedStatementExecuteEncoder( rowEncoder : BinaryRowEncoder ) extends M if ( m.parameters.isEmpty ) { buffer } else { - Unpooled.wrappedBuffer(buffer, encode(m.values)) + Unpooled.wrappedBuffer(buffer, encodeValues(m.values)) } } - private[encoder] def encode( values : Seq[Any] ) : ByteBuf = { + private[encoder] def encodeValues( values : Seq[Any] ) : ByteBuf = { val nullBitsCount = (values.size + 7) / 8 val nullBits = new Array[Byte](nullBitsCount) val bitMapBuffer = ByteBufferUtils.mysqlBuffer(1 + nullBitsCount) @@ -57,8 +57,8 @@ class PreparedStatementExecuteEncoder( rowEncoder : BinaryRowEncoder ) extends M parameterTypesBuffer.writeShort(ColumnTypes.FIELD_TYPE_NULL) } else { value match { - case Some(v) => encode(parameterTypesBuffer, parameterValuesBuffer, v) - case _ => encode(parameterTypesBuffer, parameterValuesBuffer, value) + case Some(v) => encodeValue(parameterTypesBuffer, parameterValuesBuffer, v) + case _ => encodeValue(parameterTypesBuffer, parameterValuesBuffer, value) } } index += 1 @@ -74,7 +74,7 @@ class PreparedStatementExecuteEncoder( rowEncoder : BinaryRowEncoder ) extends M Unpooled.wrappedBuffer( bitMapBuffer, parameterTypesBuffer, parameterValuesBuffer ) } - private def encode(parameterTypesBuffer: ByteBuf, parameterValuesBuffer: ByteBuf, value: Any): Unit = { + private def encodeValue(parameterTypesBuffer: ByteBuf, parameterValuesBuffer: ByteBuf, value: Any): Unit = { val encoder = rowEncoder.encoderFor(value) parameterTypesBuffer.writeShort(encoder.encodesTo) if (!encoder.isLong(value)) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala index bfce510f..5cbacbca 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala @@ -15,10 +15,10 @@ class SendLongDataEncoder( rowEncoder : BinaryRowEncoder ) extends MessageEncode buffer.writeBytes(m.statementId) buffer.writeShort(m.paramId) - Unpooled.wrappedBuffer(buffer, encodeLong(m.value)) + Unpooled.wrappedBuffer(buffer, encodeValue(m.value)) } - private def encodeLong( maybeValue: Any ) : ByteBuf = { + private def encodeValue( maybeValue: Any ) : ByteBuf = { if ( maybeValue == null || maybeValue == None ) { throw new UnsupportedOperationException("Cannot encode NULL as long value") } else { diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala index dd0203b0..d62b929e 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala @@ -27,16 +27,16 @@ class PreparedStatementExecuteEncoderSpec extends Specification { "binary row encoder" should { "encode Some(value) like value" in { - val actual = encoder.encode(List(Some(1l), Some("foo"))) - val expected = encoder.encode(List(1l, "foo")) + val actual = encoder.encodeValues(List(Some(1l), Some("foo"))) + val expected = encoder.encodeValues(List(1l, "foo")) actual mustEqual expected } "encode None as null" in { - val actual = encoder.encode(List(None)) - val expected = encoder.encode(List(null)) + val actual = encoder.encodeValues(List(None)) + val expected = encoder.encodeValues(List(null)) actual mustEqual expected } From b96b48f2b97ddb03bd43059e6e9730c4f756c458 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Tue, 9 Dec 2014 18:52:39 +0100 Subject: [PATCH 06/82] Separate protocol encoder for MySQL SendLongData --- .../mysql/codec/MySQLConnectionHandler.scala | 4 +- .../db/mysql/codec/MySQLOneToOneEncoder.scala | 78 ++++++++----------- .../db/mysql/codec/SendLongDataEncoder.scala | 59 ++++++++++++++ .../mysql/encoder/SendLongDataEncoder.scala | 34 -------- .../message/client/SendLongDataMessage.scala | 4 +- 5 files changed, 96 insertions(+), 83 deletions(-) create mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala delete mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala index b5886891..7e599681 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala @@ -52,6 +52,7 @@ class MySQLConnectionHandler( private final val connectionPromise = Promise[MySQLConnectionHandler] private final val decoder = new MySQLFrameDecoder(configuration.charset, connectionId) private final val encoder = new MySQLOneToOneEncoder(configuration.charset, charsetMapper) + private final val sendLongDataEncoder = new SendLongDataEncoder(configuration.charset) private final val currentParameters = new ArrayBuffer[ColumnDefinitionMessage]() private final val currentColumns = new ArrayBuffer[ColumnDefinitionMessage]() private final val parsedStatements = new HashMap[String,PreparedStatementHolder]() @@ -70,6 +71,7 @@ class MySQLConnectionHandler( channel.pipeline.addLast( decoder, encoder, + sendLongDataEncoder, MySQLConnectionHandler.this) } @@ -252,7 +254,7 @@ class MySQLConnectionHandler( case Some(v) => v case _ => maybeValue } - encoder.isLong(value) + sendLongDataEncoder.isLong(value) } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala index 60254338..f666cbc8 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLOneToOneEncoder.scala @@ -32,7 +32,8 @@ object MySQLOneToOneEncoder { val log = Log.get[MySQLOneToOneEncoder] } -class MySQLOneToOneEncoder(charset: Charset, charsetMapper: CharsetMapper) extends MessageToMessageEncoder[Any] { +class MySQLOneToOneEncoder(charset: Charset, charsetMapper: CharsetMapper) + extends MessageToMessageEncoder[ClientMessage](classOf[ClientMessage]) { import MySQLOneToOneEncoder.log @@ -40,61 +41,48 @@ class MySQLOneToOneEncoder(charset: Charset, charsetMapper: CharsetMapper) exten private final val queryEncoder = new QueryMessageEncoder(charset) private final val rowEncoder = new BinaryRowEncoder(charset) private final val prepareEncoder = new PreparedStatementPrepareEncoder(charset) - private final val sendLongDataEncoder = new SendLongDataEncoder(rowEncoder) private final val executeEncoder = new PreparedStatementExecuteEncoder(rowEncoder) private final val authenticationSwitchEncoder = new AuthenticationSwitchResponseEncoder(charset) private var sequence = 1 - def isLong(value: Any): Boolean = rowEncoder.encoderFor(value).isLong(value) - - def encode(ctx: ChannelHandlerContext, msg: Any, out: java.util.List[Object]): Unit = { - - msg match { - case message: ClientMessage => { - val encoder = (message.kind: @switch) match { - case ClientMessage.ClientProtocolVersion => this.handshakeResponseEncoder - case ClientMessage.Quit => { - sequence = 0 - QuitMessageEncoder - } - case ClientMessage.Query => { - sequence = 0 - this.queryEncoder - } - case ClientMessage.PreparedStatementExecute => { - sequence = 0 - this.executeEncoder - } - case ClientMessage.PreparedStatementPrepare => { - sequence = 0 - this.prepareEncoder - } - case ClientMessage.PreparedStatementSendLongData => { - sequence = 0 - this.sendLongDataEncoder - } - case ClientMessage.AuthSwitchResponse => { - sequence += 1 - this.authenticationSwitchEncoder - } - case _ => throw new EncoderNotAvailableException(message) - } - - val result = encoder.encode(message) + def encode(ctx: ChannelHandlerContext, message: ClientMessage, out: java.util.List[Object]): Unit = { + val encoder = (message.kind: @switch) match { + case ClientMessage.ClientProtocolVersion => this.handshakeResponseEncoder + case ClientMessage.Quit => { + sequence = 0 + QuitMessageEncoder + } + case ClientMessage.Query => { + sequence = 0 + this.queryEncoder + } + case ClientMessage.PreparedStatementExecute => { + sequence = 0 + this.executeEncoder + } + case ClientMessage.PreparedStatementPrepare => { + sequence = 0 + this.prepareEncoder + } + case ClientMessage.AuthSwitchResponse => { + sequence += 1 + this.authenticationSwitchEncoder + } + case _ => throw new EncoderNotAvailableException(message) + } - ByteBufferUtils.writePacketLength(result, sequence) + val result: ByteBuf = encoder.encode(message) - sequence += 1 + ByteBufferUtils.writePacketLength(result, sequence) - if ( log.isTraceEnabled ) { - log.trace(s"Writing message ${message.getClass.getName} - \n${BufferDumper.dumpAsHex(result)}") - } + sequence += 1 - out.add(result) - } + if ( log.isTraceEnabled ) { + log.trace(s"Writing message ${message.getClass.getName} - \n${BufferDumper.dumpAsHex(result)}") } + out.add(result) } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala new file mode 100644 index 00000000..b1c836cc --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala @@ -0,0 +1,59 @@ +package com.github.mauricio.async.db.mysql.codec + +import java.nio.charset.Charset + +import com.github.mauricio.async.db.mysql.binary.BinaryRowEncoder +import com.github.mauricio.async.db.mysql.message.client.{ClientMessage, SendLongDataMessage} +import com.github.mauricio.async.db.util.{Log, ByteBufferUtils} +import io.netty.buffer.{Unpooled, ByteBuf} +import io.netty.channel.ChannelHandlerContext +import io.netty.handler.codec.MessageToMessageEncoder + +object SendLongDataEncoder { + val log = Log.get[SendLongDataEncoder] +} + +class SendLongDataEncoder(charset: Charset) + extends MessageToMessageEncoder[SendLongDataMessage](classOf[SendLongDataMessage]) { + + import com.github.mauricio.async.db.mysql.codec.SendLongDataEncoder.log + + private final val rowEncoder = new BinaryRowEncoder(charset) + + def isLong(value: Any): Boolean = rowEncoder.encoderFor(value).isLong(value) + + def encode(ctx: ChannelHandlerContext, message: SendLongDataMessage, out: java.util.List[Object]): Unit = { + val result: ByteBuf = encode(message) + + ByteBufferUtils.writePacketLength(result, 0) + + if ( log.isTraceEnabled ) { + log.trace(s"Writing message ${message.toString}") + } + + out.add(result) + } + + private def encode(message: SendLongDataMessage): ByteBuf = { + val buffer = ByteBufferUtils.packetBuffer() + buffer.writeByte(ClientMessage.PreparedStatementSendLongData) + buffer.writeBytes(message.statementId) + buffer.writeShort(message.paramId) + + Unpooled.wrappedBuffer(buffer, encodeValue(message.value)) + } + + private def encodeValue(maybeValue: Any) : ByteBuf = { + if ( maybeValue == null || maybeValue == None ) { + throw new UnsupportedOperationException("Cannot encode NULL as long value") + } else { + val value = maybeValue match { + case Some(v) => v + case _ => maybeValue + } + val encoder = rowEncoder.encoderFor(value) + encoder.encodeLong(value) + } + } + +} diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala deleted file mode 100644 index 5cbacbca..00000000 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/SendLongDataEncoder.scala +++ /dev/null @@ -1,34 +0,0 @@ -package com.github.mauricio.async.db.mysql.encoder - -import com.github.mauricio.async.db.mysql.binary.BinaryRowEncoder -import com.github.mauricio.async.db.mysql.message.client.{ClientMessage, SendLongDataMessage} -import com.github.mauricio.async.db.util.ByteBufferUtils -import io.netty.buffer.{Unpooled, ByteBuf} - -class SendLongDataEncoder( rowEncoder : BinaryRowEncoder ) extends MessageEncoder { - - def encode(message: ClientMessage): ByteBuf = { - val m = message.asInstanceOf[SendLongDataMessage] - - val buffer = ByteBufferUtils.packetBuffer() - buffer.writeByte(m.kind) - buffer.writeBytes(m.statementId) - buffer.writeShort(m.paramId) - - Unpooled.wrappedBuffer(buffer, encodeValue(m.value)) - } - - private def encodeValue( maybeValue: Any ) : ByteBuf = { - if ( maybeValue == null || maybeValue == None ) { - throw new UnsupportedOperationException("Cannot encode NULL as long value") - } else { - val value = maybeValue match { - case Some(v) => v - case _ => maybeValue - } - val encoder = rowEncoder.encoderFor(value) - encoder.encodeLong(value) - } - } - -} \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala index cf213614..af09980a 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala @@ -3,8 +3,6 @@ package com.github.mauricio.async.db.mysql.message.client case class SendLongDataMessage ( statementId : Array[Byte], value : Any, - paramId : Int ) - extends ClientMessage( ClientMessage.PreparedStatementSendLongData ) { - + paramId : Int ) { override def toString = "SendLongDataMessage(statementId=" + statementId + ",paramId=" + paramId + ",value.getClass=" + value.getClass.getName +")" } \ No newline at end of file From c2cffc55aa108412f45aa36f5518d4d7dfffb905 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Wed, 10 Dec 2014 10:30:54 +0100 Subject: [PATCH 07/82] Remove unnecessary null / None check --- .../async/db/mysql/codec/SendLongDataEncoder.scala | 14 +++++--------- 1 file changed, 5 insertions(+), 9 deletions(-) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala index b1c836cc..61d5d595 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala @@ -44,16 +44,12 @@ class SendLongDataEncoder(charset: Charset) } private def encodeValue(maybeValue: Any) : ByteBuf = { - if ( maybeValue == null || maybeValue == None ) { - throw new UnsupportedOperationException("Cannot encode NULL as long value") - } else { - val value = maybeValue match { - case Some(v) => v - case _ => maybeValue - } - val encoder = rowEncoder.encoderFor(value) - encoder.encodeLong(value) + val value = maybeValue match { + case Some(v) => v + case _ => maybeValue } + val encoder = rowEncoder.encoderFor(value) + encoder.encodeLong(value) } } From 58532a263b6333452d7b66b6695eb9ce031a245b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Wed, 10 Dec 2014 11:41:02 +0100 Subject: [PATCH 08/82] Blob encoder --- .../mysql/binary/encoder/BinaryEncoder.scala | 10 ------ .../binary/encoder/ByteArrayEncoder.scala | 8 ++--- .../mysql/binary/encoder/ByteBufEncoder.scala | 4 --- .../binary/encoder/ByteBufferEncoder.scala | 6 +--- .../db/mysql/blob/encoder/BlobEncoder.scala | 31 +++++++++++++++++++ .../mysql/blob/encoder/ByteArrayEncoder.scala | 11 +++++++ .../mysql/blob/encoder/ByteBufEncoder.scala | 11 +++++++ .../blob/encoder/ByteBufferEncoder.scala | 13 ++++++++ .../mysql/codec/MySQLConnectionHandler.scala | 7 +++-- .../db/mysql/codec/SendLongDataEncoder.scala | 13 +++----- .../PreparedStatementExecuteEncoder.scala | 12 +++---- .../PreparedStatementExecuteMessage.scala | 1 + .../PreparedStatementExecuteEncoderSpec.scala | 8 ++--- 13 files changed, 89 insertions(+), 46 deletions(-) create mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/BlobEncoder.scala create mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteArrayEncoder.scala create mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufEncoder.scala create mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufferEncoder.scala diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala index de774371..bb504ce6 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/BinaryEncoder.scala @@ -18,18 +18,8 @@ package com.github.mauricio.async.db.mysql.binary.encoder import io.netty.buffer.ByteBuf -object BinaryEncoder { - - val LONG_THRESHOLD = 1023 - -} - trait BinaryEncoder { - def isLong( value : Any ) : Boolean = false - - def encodeLong( value : Any ) : ByteBuf = throw new UnsupportedOperationException() - def encode( value : Any, buffer : ByteBuf ) def encodesTo : Int diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala index 5d1693c3..3a6c7ce6 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala @@ -17,16 +17,12 @@ package com.github.mauricio.async.db.mysql.binary.encoder -import io.netty.buffer.{Unpooled, ByteBuf} -import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper import com.github.mauricio.async.db.mysql.column.ColumnTypes +import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper +import io.netty.buffer.ByteBuf object ByteArrayEncoder extends BinaryEncoder { - override def isLong(value: Any): Boolean = value.asInstanceOf[Array[Byte]].length > BinaryEncoder.LONG_THRESHOLD - - override def encodeLong(value: Any): ByteBuf = Unpooled.wrappedBuffer(value.asInstanceOf[Array[Byte]]) - def encode(value: Any, buffer: ByteBuf) { val bytes = value.asInstanceOf[Array[Byte]] diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala index 3b6b5d21..e7156943 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala @@ -6,10 +6,6 @@ import io.netty.buffer.ByteBuf object ByteBufEncoder extends BinaryEncoder { - override def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuf].readableBytes() > BinaryEncoder.LONG_THRESHOLD - - override def encodeLong(value: Any): ByteBuf = value.asInstanceOf[ByteBuf] - def encode(value: Any, buffer: ByteBuf) { val bytes = value.asInstanceOf[ByteBuf] diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala index 6a436539..06012f6b 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala @@ -4,14 +4,10 @@ import java.nio.ByteBuffer import com.github.mauricio.async.db.mysql.column.ColumnTypes import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper -import io.netty.buffer.{Unpooled, ByteBuf} +import io.netty.buffer.ByteBuf object ByteBufferEncoder extends BinaryEncoder { - override def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuffer].remaining() > BinaryEncoder.LONG_THRESHOLD - - override def encodeLong(value: Any): ByteBuf = Unpooled.wrappedBuffer(value.asInstanceOf[ByteBuffer]) - def encode(value: Any, buffer: ByteBuf) { val bytes = value.asInstanceOf[ByteBuffer] diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/BlobEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/BlobEncoder.scala new file mode 100644 index 00000000..2475c0e9 --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/BlobEncoder.scala @@ -0,0 +1,31 @@ +package com.github.mauricio.async.db.mysql.blob.encoder + +import java.nio.ByteBuffer + +import io.netty.buffer.ByteBuf + +object BlobEncoder { + + val LONG_THRESHOLD = 1023 + + def encoderFor( v : Any ) : Option[BlobEncoder] = { + + v match { + case v : Array[Byte] => Some(ByteArrayEncoder) + case v : ByteBuffer => Some(ByteBufferEncoder) + case v : ByteBuf => Some(ByteBufEncoder) + + case _ => None + } + + } + +} + +trait BlobEncoder { + + def isLong(value: Any): Boolean + + def encode(value: Any): ByteBuf + +} \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteArrayEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteArrayEncoder.scala new file mode 100644 index 00000000..c20b8c35 --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteArrayEncoder.scala @@ -0,0 +1,11 @@ +package com.github.mauricio.async.db.mysql.blob.encoder + +import io.netty.buffer.{ByteBuf, Unpooled} + +object ByteArrayEncoder extends BlobEncoder { + + def isLong(value: Any): Boolean = value.asInstanceOf[Array[Byte]].length > BlobEncoder.LONG_THRESHOLD + + def encode(value: Any): ByteBuf = Unpooled.wrappedBuffer(value.asInstanceOf[Array[Byte]]) + +} diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufEncoder.scala new file mode 100644 index 00000000..086c93e2 --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufEncoder.scala @@ -0,0 +1,11 @@ +package com.github.mauricio.async.db.mysql.blob.encoder + +import io.netty.buffer.ByteBuf + +object ByteBufEncoder extends BlobEncoder { + + def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuf].readableBytes() > BlobEncoder.LONG_THRESHOLD + + def encode(value: Any): ByteBuf = value.asInstanceOf[ByteBuf] + +} diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufferEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufferEncoder.scala new file mode 100644 index 00000000..566104b8 --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufferEncoder.scala @@ -0,0 +1,13 @@ +package com.github.mauricio.async.db.mysql.blob.encoder + +import java.nio.ByteBuffer + +import io.netty.buffer.{ByteBuf, Unpooled} + +object ByteBufferEncoder extends BlobEncoder { + + def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuffer].remaining() > BlobEncoder.LONG_THRESHOLD + + def encode(value: Any): ByteBuf = Unpooled.wrappedBuffer(value.asInstanceOf[ByteBuffer]) + +} diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala index 7e599681..f25e326c 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala @@ -52,7 +52,7 @@ class MySQLConnectionHandler( private final val connectionPromise = Promise[MySQLConnectionHandler] private final val decoder = new MySQLFrameDecoder(configuration.charset, connectionId) private final val encoder = new MySQLOneToOneEncoder(configuration.charset, charsetMapper) - private final val sendLongDataEncoder = new SendLongDataEncoder(configuration.charset) + private final val sendLongDataEncoder = new SendLongDataEncoder() private final val currentParameters = new ArrayBuffer[ColumnDefinitionMessage]() private final val currentColumns = new ArrayBuffer[ColumnDefinitionMessage]() private final val parsedStatements = new HashMap[String,PreparedStatementHolder]() @@ -238,12 +238,15 @@ class MySQLConnectionHandler( this.currentColumns.clear() this.currentParameters.clear() + var nonBlobIndices: Set[Int] = Set() values.zipWithIndex.foreach { case (value, index) => if (isLong(value)) writeAndHandleError(new SendLongDataMessage( statementId, value, index )) + else + nonBlobIndices += index } - writeAndHandleError(new PreparedStatementExecuteMessage( statementId, values, parameters )) + writeAndHandleError(new PreparedStatementExecuteMessage( statementId, values, nonBlobIndices, parameters)) } private def isLong( maybeValue : Any ) : Boolean = { diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala index 61d5d595..294917fe 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala @@ -1,8 +1,6 @@ package com.github.mauricio.async.db.mysql.codec -import java.nio.charset.Charset - -import com.github.mauricio.async.db.mysql.binary.BinaryRowEncoder +import com.github.mauricio.async.db.mysql.blob.encoder.BlobEncoder import com.github.mauricio.async.db.mysql.message.client.{ClientMessage, SendLongDataMessage} import com.github.mauricio.async.db.util.{Log, ByteBufferUtils} import io.netty.buffer.{Unpooled, ByteBuf} @@ -13,14 +11,12 @@ object SendLongDataEncoder { val log = Log.get[SendLongDataEncoder] } -class SendLongDataEncoder(charset: Charset) +class SendLongDataEncoder extends MessageToMessageEncoder[SendLongDataMessage](classOf[SendLongDataMessage]) { import com.github.mauricio.async.db.mysql.codec.SendLongDataEncoder.log - private final val rowEncoder = new BinaryRowEncoder(charset) - - def isLong(value: Any): Boolean = rowEncoder.encoderFor(value).isLong(value) + def isLong(value: Any): Boolean = BlobEncoder.encoderFor(value).map(_.isLong(value)).getOrElse(false) def encode(ctx: ChannelHandlerContext, message: SendLongDataMessage, out: java.util.List[Object]): Unit = { val result: ByteBuf = encode(message) @@ -48,8 +44,7 @@ class SendLongDataEncoder(charset: Charset) case Some(v) => v case _ => maybeValue } - val encoder = rowEncoder.encoderFor(value) - encoder.encodeLong(value) + BlobEncoder.encoderFor(value).get.encode(value) } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala index 6bcc5a50..c52658c9 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoder.scala @@ -36,12 +36,12 @@ class PreparedStatementExecuteEncoder( rowEncoder : BinaryRowEncoder ) extends M if ( m.parameters.isEmpty ) { buffer } else { - Unpooled.wrappedBuffer(buffer, encodeValues(m.values)) + Unpooled.wrappedBuffer(buffer, encodeValues(m.values, m.valuesToInclude)) } } - private[encoder] def encodeValues( values : Seq[Any] ) : ByteBuf = { + private[encoder] def encodeValues( values : Seq[Any], valuesToInclude: Set[Int] ) : ByteBuf = { val nullBitsCount = (values.size + 7) / 8 val nullBits = new Array[Byte](nullBitsCount) val bitMapBuffer = ByteBufferUtils.mysqlBuffer(1 + nullBitsCount) @@ -57,8 +57,8 @@ class PreparedStatementExecuteEncoder( rowEncoder : BinaryRowEncoder ) extends M parameterTypesBuffer.writeShort(ColumnTypes.FIELD_TYPE_NULL) } else { value match { - case Some(v) => encodeValue(parameterTypesBuffer, parameterValuesBuffer, v) - case _ => encodeValue(parameterTypesBuffer, parameterValuesBuffer, value) + case Some(v) => encodeValue(parameterTypesBuffer, parameterValuesBuffer, v, valuesToInclude(index)) + case _ => encodeValue(parameterTypesBuffer, parameterValuesBuffer, value, valuesToInclude(index)) } } index += 1 @@ -74,10 +74,10 @@ class PreparedStatementExecuteEncoder( rowEncoder : BinaryRowEncoder ) extends M Unpooled.wrappedBuffer( bitMapBuffer, parameterTypesBuffer, parameterValuesBuffer ) } - private def encodeValue(parameterTypesBuffer: ByteBuf, parameterValuesBuffer: ByteBuf, value: Any): Unit = { + private def encodeValue(parameterTypesBuffer: ByteBuf, parameterValuesBuffer: ByteBuf, value: Any, includeValue: Boolean) : Unit = { val encoder = rowEncoder.encoderFor(value) parameterTypesBuffer.writeShort(encoder.encodesTo) - if (!encoder.isLong(value)) + if (includeValue) encoder.encode(value, parameterValuesBuffer) } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementExecuteMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementExecuteMessage.scala index 805ef51e..f87ddede 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementExecuteMessage.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/PreparedStatementExecuteMessage.scala @@ -21,5 +21,6 @@ import com.github.mauricio.async.db.mysql.message.server.ColumnDefinitionMessage case class PreparedStatementExecuteMessage ( statementId : Array[Byte], values : Seq[Any], + valuesToInclude : Set[Int], parameters : Seq[ColumnDefinitionMessage] ) extends ClientMessage( ClientMessage.PreparedStatementExecute ) \ No newline at end of file diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala index d62b929e..427dde17 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/encoder/PreparedStatementExecuteEncoderSpec.scala @@ -27,16 +27,16 @@ class PreparedStatementExecuteEncoderSpec extends Specification { "binary row encoder" should { "encode Some(value) like value" in { - val actual = encoder.encodeValues(List(Some(1l), Some("foo"))) - val expected = encoder.encodeValues(List(1l, "foo")) + val actual = encoder.encodeValues(List(Some(1l), Some("foo")), Set(0, 1)) + val expected = encoder.encodeValues(List(1l, "foo"), Set(0, 1)) actual mustEqual expected } "encode None as null" in { - val actual = encoder.encodeValues(List(None)) - val expected = encoder.encodeValues(List(null)) + val actual = encoder.encodeValues(List(None), Set(0)) + val expected = encoder.encodeValues(List(null), Set(0)) actual mustEqual expected } From a737d934cecfb00c8567d4aaf78e8b17132d5d4c Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Wed, 10 Dec 2014 12:00:09 +0100 Subject: [PATCH 09/82] Revert insignificant changes --- .../async/db/mysql/binary/encoder/ByteArrayEncoder.scala | 5 ++--- .../async/db/mysql/binary/encoder/ByteBufEncoder.scala | 1 - .../async/db/mysql/binary/encoder/ByteBufferEncoder.scala | 1 - 3 files changed, 2 insertions(+), 5 deletions(-) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala index 3a6c7ce6..260f22a4 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteArrayEncoder.scala @@ -17,12 +17,11 @@ package com.github.mauricio.async.db.mysql.binary.encoder -import com.github.mauricio.async.db.mysql.column.ColumnTypes -import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper import io.netty.buffer.ByteBuf +import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper +import com.github.mauricio.async.db.mysql.column.ColumnTypes object ByteArrayEncoder extends BinaryEncoder { - def encode(value: Any, buffer: ByteBuf) { val bytes = value.asInstanceOf[Array[Byte]] diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala index e7156943..62b62560 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufEncoder.scala @@ -5,7 +5,6 @@ import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper import io.netty.buffer.ByteBuf object ByteBufEncoder extends BinaryEncoder { - def encode(value: Any, buffer: ByteBuf) { val bytes = value.asInstanceOf[ByteBuf] diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala index 06012f6b..329709ad 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/ByteBufferEncoder.scala @@ -7,7 +7,6 @@ import com.github.mauricio.async.db.util.ChannelWrapper.bufferToWrapper import io.netty.buffer.ByteBuf object ByteBufferEncoder extends BinaryEncoder { - def encode(value: Any, buffer: ByteBuf) { val bytes = value.asInstanceOf[ByteBuffer] From 5f31084ca64f56d2c1af46de73ecdf1270e91298 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Wed, 10 Dec 2014 13:40:54 +0100 Subject: [PATCH 10/82] Document max size of a BLOB for MySQL --- mysql-async/README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/mysql-async/README.md b/mysql-async/README.md index adff299e..b83613ed 100644 --- a/mysql-async/README.md +++ b/mysql-async/README.md @@ -92,5 +92,7 @@ Array[Byte] | blob java.nio.ByteBuffer | blob io.netty.buffer.ByteBuf | blob +The maximum size of a blob is 2^24-8 bytes (almost 16 MiB). + You don't have to match exact values when sending parameters for your prepared statements, MySQL is usually smart enough to understand that if you have sent an Int to `smallint` column it has to truncate the 4 bytes into 2. From 5337e8a9fd02e27fe5e1643eb8190b56e6623f76 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Wed, 10 Dec 2014 14:06:12 +0100 Subject: [PATCH 11/82] Use correct size --- mysql-async/README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mysql-async/README.md b/mysql-async/README.md index b83613ed..3a152286 100644 --- a/mysql-async/README.md +++ b/mysql-async/README.md @@ -92,7 +92,7 @@ Array[Byte] | blob java.nio.ByteBuffer | blob io.netty.buffer.ByteBuf | blob -The maximum size of a blob is 2^24-8 bytes (almost 16 MiB). +The maximum size of a blob is 2^24-9 bytes (almost 16 MiB). You don't have to match exact values when sending parameters for your prepared statements, MySQL is usually smart enough to understand that if you have sent an Int to `smallint` column it has to truncate the 4 bytes into 2. From 6c715874de371c3695e672cf06d5c9a277ac0261 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Wed, 10 Dec 2014 14:56:40 +0100 Subject: [PATCH 12/82] Remove unnecessary toString --- .../async/db/mysql/message/client/SendLongDataMessage.scala | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala index af09980a..6b1ef7e2 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala @@ -3,6 +3,4 @@ package com.github.mauricio.async.db.mysql.message.client case class SendLongDataMessage ( statementId : Array[Byte], value : Any, - paramId : Int ) { - override def toString = "SendLongDataMessage(statementId=" + statementId + ",paramId=" + paramId + ",value.getClass=" + value.getClass.getName +")" -} \ No newline at end of file + paramId : Int ) \ No newline at end of file From 2908af5453cc9f437bca5fe2df345d1380532c48 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Wed, 10 Dec 2014 15:20:05 +0100 Subject: [PATCH 13/82] Refactor sending long data --- .../db/mysql/blob/encoder/BlobEncoder.scala | 31 ----------- .../mysql/blob/encoder/ByteArrayEncoder.scala | 11 ---- .../mysql/blob/encoder/ByteBufEncoder.scala | 11 ---- .../blob/encoder/ByteBufferEncoder.scala | 13 ----- .../mysql/codec/MySQLConnectionHandler.scala | 55 +++++++++++++------ .../db/mysql/codec/SendLongDataEncoder.scala | 38 +++++-------- .../message/client/SendLongDataMessage.scala | 4 +- 7 files changed, 55 insertions(+), 108 deletions(-) delete mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/BlobEncoder.scala delete mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteArrayEncoder.scala delete mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufEncoder.scala delete mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufferEncoder.scala diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/BlobEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/BlobEncoder.scala deleted file mode 100644 index 2475c0e9..00000000 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/BlobEncoder.scala +++ /dev/null @@ -1,31 +0,0 @@ -package com.github.mauricio.async.db.mysql.blob.encoder - -import java.nio.ByteBuffer - -import io.netty.buffer.ByteBuf - -object BlobEncoder { - - val LONG_THRESHOLD = 1023 - - def encoderFor( v : Any ) : Option[BlobEncoder] = { - - v match { - case v : Array[Byte] => Some(ByteArrayEncoder) - case v : ByteBuffer => Some(ByteBufferEncoder) - case v : ByteBuf => Some(ByteBufEncoder) - - case _ => None - } - - } - -} - -trait BlobEncoder { - - def isLong(value: Any): Boolean - - def encode(value: Any): ByteBuf - -} \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteArrayEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteArrayEncoder.scala deleted file mode 100644 index c20b8c35..00000000 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteArrayEncoder.scala +++ /dev/null @@ -1,11 +0,0 @@ -package com.github.mauricio.async.db.mysql.blob.encoder - -import io.netty.buffer.{ByteBuf, Unpooled} - -object ByteArrayEncoder extends BlobEncoder { - - def isLong(value: Any): Boolean = value.asInstanceOf[Array[Byte]].length > BlobEncoder.LONG_THRESHOLD - - def encode(value: Any): ByteBuf = Unpooled.wrappedBuffer(value.asInstanceOf[Array[Byte]]) - -} diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufEncoder.scala deleted file mode 100644 index 086c93e2..00000000 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufEncoder.scala +++ /dev/null @@ -1,11 +0,0 @@ -package com.github.mauricio.async.db.mysql.blob.encoder - -import io.netty.buffer.ByteBuf - -object ByteBufEncoder extends BlobEncoder { - - def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuf].readableBytes() > BlobEncoder.LONG_THRESHOLD - - def encode(value: Any): ByteBuf = value.asInstanceOf[ByteBuf] - -} diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufferEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufferEncoder.scala deleted file mode 100644 index 566104b8..00000000 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/blob/encoder/ByteBufferEncoder.scala +++ /dev/null @@ -1,13 +0,0 @@ -package com.github.mauricio.async.db.mysql.blob.encoder - -import java.nio.ByteBuffer - -import io.netty.buffer.{ByteBuf, Unpooled} - -object ByteBufferEncoder extends BlobEncoder { - - def isLong(value: Any): Boolean = value.asInstanceOf[ByteBuffer].remaining() > BlobEncoder.LONG_THRESHOLD - - def encode(value: Any): ByteBuf = Unpooled.wrappedBuffer(value.asInstanceOf[ByteBuffer]) - -} diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala index f25e326c..aec8d24c 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala @@ -16,6 +16,8 @@ package com.github.mauricio.async.db.mysql.codec +import java.nio.ByteBuffer + import com.github.mauricio.async.db.Configuration import com.github.mauricio.async.db.general.MutableResultSet import com.github.mauricio.async.db.mysql.binary.BinaryRowDecoder @@ -25,7 +27,7 @@ import com.github.mauricio.async.db.mysql.util.CharsetMapper import com.github.mauricio.async.db.util.ChannelFutureTransformer.toFuture import com.github.mauricio.async.db.util._ import io.netty.bootstrap.Bootstrap -import io.netty.buffer.ByteBufAllocator +import io.netty.buffer.{Unpooled, ByteBuf, ByteBufAllocator} import io.netty.channel._ import io.netty.channel.socket.nio.NioSocketChannel import io.netty.handler.codec.CodecException @@ -238,29 +240,48 @@ class MySQLConnectionHandler( this.currentColumns.clear() this.currentParameters.clear() - var nonBlobIndices: Set[Int] = Set() - values.zipWithIndex.foreach { case (value, index) => - if (isLong(value)) - writeAndHandleError(new SendLongDataMessage( statementId, value, index )) - else - nonBlobIndices += index + var nonLongIndices: Set[Int] = Set() + values.zipWithIndex.foreach { + case (Some(value), index) if isLong(value) => + sendLongParameter(statementId, index, value) + + case (value, index) if isLong(value) => + sendLongParameter(statementId, index, value) + + case (_, index) => + nonLongIndices += index } - writeAndHandleError(new PreparedStatementExecuteMessage( statementId, values, nonBlobIndices, parameters)) + writeAndHandleError(new PreparedStatementExecuteMessage(statementId, values, nonLongIndices, parameters)) } - private def isLong( maybeValue : Any ) : Boolean = { - if ( maybeValue == null || maybeValue == None ) { - false - } else { - val value = maybeValue match { - case Some(v) => v - case _ => maybeValue - } - sendLongDataEncoder.isLong(value) + private def isLong(value: Any): Boolean = { + value match { + case v : Array[Byte] => v.length > SendLongDataEncoder.LONG_THRESHOLD + case v : ByteBuffer => v.remaining() > SendLongDataEncoder.LONG_THRESHOLD + case v : ByteBuf => v.readableBytes() > SendLongDataEncoder.LONG_THRESHOLD + + case _ => false } } + private def sendLongParameter(statementId: Array[Byte], index: Int, longValue: Any) { + longValue match { + case v : Array[Byte] => + sendBuffer(Unpooled.wrappedBuffer(v), statementId, index) + + case v : ByteBuffer => + sendBuffer(Unpooled.wrappedBuffer(v), statementId, index) + + case v : ByteBuf => + sendBuffer(v, statementId, index) + } + } + + private def sendBuffer(buffer: ByteBuf, statementId: Array[Byte], paramId: Int) { + writeAndHandleError(new SendLongDataMessage(statementId, buffer, paramId)) + } + private def onPreparedStatementPrepareResponse( message : PreparedStatementPrepareResponse ) { this.currentPreparedStatementHolder = new PreparedStatementHolder( this.currentPreparedStatement.statement, message) } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala index 294917fe..ce51140f 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala @@ -1,14 +1,15 @@ package com.github.mauricio.async.db.mysql.codec -import com.github.mauricio.async.db.mysql.blob.encoder.BlobEncoder import com.github.mauricio.async.db.mysql.message.client.{ClientMessage, SendLongDataMessage} -import com.github.mauricio.async.db.util.{Log, ByteBufferUtils} -import io.netty.buffer.{Unpooled, ByteBuf} +import com.github.mauricio.async.db.util.{ByteBufferUtils, Log} +import io.netty.buffer.Unpooled import io.netty.channel.ChannelHandlerContext import io.netty.handler.codec.MessageToMessageEncoder object SendLongDataEncoder { val log = Log.get[SendLongDataEncoder] + + val LONG_THRESHOLD = 1023 } class SendLongDataEncoder @@ -16,35 +17,24 @@ class SendLongDataEncoder import com.github.mauricio.async.db.mysql.codec.SendLongDataEncoder.log - def isLong(value: Any): Boolean = BlobEncoder.encoderFor(value).map(_.isLong(value)).getOrElse(false) - def encode(ctx: ChannelHandlerContext, message: SendLongDataMessage, out: java.util.List[Object]): Unit = { - val result: ByteBuf = encode(message) - - ByteBufferUtils.writePacketLength(result, 0) - if ( log.isTraceEnabled ) { log.trace(s"Writing message ${message.toString}") } - out.add(result) - } + val sequence = 0 - private def encode(message: SendLongDataMessage): ByteBuf = { - val buffer = ByteBufferUtils.packetBuffer() - buffer.writeByte(ClientMessage.PreparedStatementSendLongData) - buffer.writeBytes(message.statementId) - buffer.writeShort(message.paramId) + val headerBuffer = ByteBufferUtils.mysqlBuffer(3 + 1 + 1 + 4 + 2) + ByteBufferUtils.write3BytesInt(headerBuffer, 1 + 4 + 2 + message.value.readableBytes()) + headerBuffer.writeByte(sequence) - Unpooled.wrappedBuffer(buffer, encodeValue(message.value)) - } + headerBuffer.writeByte(ClientMessage.PreparedStatementSendLongData) + headerBuffer.writeBytes(message.statementId) + headerBuffer.writeShort(message.paramId) - private def encodeValue(maybeValue: Any) : ByteBuf = { - val value = maybeValue match { - case Some(v) => v - case _ => maybeValue - } - BlobEncoder.encoderFor(value).get.encode(value) + val result = Unpooled.wrappedBuffer(headerBuffer, message.value) + + out.add(result) } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala index 6b1ef7e2..db66db1f 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/message/client/SendLongDataMessage.scala @@ -1,6 +1,8 @@ package com.github.mauricio.async.db.mysql.message.client +import io.netty.buffer.ByteBuf + case class SendLongDataMessage ( statementId : Array[Byte], - value : Any, + value : ByteBuf, paramId : Int ) \ No newline at end of file From 3da528049061a52d06cbe7b6a91e078480000a64 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Thu, 11 Dec 2014 09:55:05 +0100 Subject: [PATCH 14/82] Set BLOB via ScatteringByteChannel (first attempt) --- mysql-async/README.md | 4 +- .../db/mysql/binary/BinaryRowEncoder.scala | 3 + .../binary/encoder/DummyBlobEncoder.scala | 14 ++++ .../mysql/codec/MySQLConnectionHandler.scala | 20 +++++ .../db/mysql/codec/SendLongDataEncoder.scala | 4 + .../async/db/mysql/BinaryColumnsSpec.scala | 73 ++++++++++++++----- 6 files changed, 99 insertions(+), 19 deletions(-) create mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/DummyBlobEncoder.scala diff --git a/mysql-async/README.md b/mysql-async/README.md index 3a152286..b979fc2f 100644 --- a/mysql-async/README.md +++ b/mysql-async/README.md @@ -91,8 +91,10 @@ String | string Array[Byte] | blob java.nio.ByteBuffer | blob io.netty.buffer.ByteBuf | blob +java.nio.channels.ScatteringByteChannel | blob -The maximum size of a blob is 2^24-9 bytes (almost 16 MiB). +The maximum size of a blob you set via Array[Byte], ByteBuffer or ByteBuf is 2^24-9 bytes (almost 16 MiB). +Blobs set via a Channel can be larger, the Channel is read until EOF and then closed. You don't have to match exact values when sending parameters for your prepared statements, MySQL is usually smart enough to understand that if you have sent an Int to `smallint` column it has to truncate the 4 bytes into 2. diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala index 8792f08f..9f38fd05 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala @@ -16,6 +16,8 @@ package com.github.mauricio.async.db.mysql.binary +import java.nio.channels.ScatteringByteChannel + import io.netty.buffer.ByteBuf import java.nio.ByteBuffer import java.nio.charset.Charset @@ -88,6 +90,7 @@ class BinaryRowEncoder( charset : Charset ) { case v : java.util.Date => JavaDateEncoder case v : ByteBuffer => ByteBufferEncoder case v : ByteBuf => ByteBufEncoder + case v : ScatteringByteChannel => DummyBlobEncoder } } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/DummyBlobEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/DummyBlobEncoder.scala new file mode 100644 index 00000000..6bfa86d4 --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/DummyBlobEncoder.scala @@ -0,0 +1,14 @@ +package com.github.mauricio.async.db.mysql.binary.encoder + +import com.github.mauricio.async.db.mysql.column.ColumnTypes +import io.netty.buffer.ByteBuf + +object DummyBlobEncoder extends BinaryEncoder { + + def encode(value: Any, buffer: ByteBuf): Unit = { + throw new UnsupportedOperationException() + } + + def encodesTo: Int = ColumnTypes.FIELD_TYPE_BLOB + + } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala index aec8d24c..ad7d3ac1 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala @@ -17,6 +17,7 @@ package com.github.mauricio.async.db.mysql.codec import java.nio.ByteBuffer +import java.nio.channels.ScatteringByteChannel import com.github.mauricio.async.db.Configuration import com.github.mauricio.async.db.general.MutableResultSet @@ -260,6 +261,7 @@ class MySQLConnectionHandler( case v : Array[Byte] => v.length > SendLongDataEncoder.LONG_THRESHOLD case v : ByteBuffer => v.remaining() > SendLongDataEncoder.LONG_THRESHOLD case v : ByteBuf => v.readableBytes() > SendLongDataEncoder.LONG_THRESHOLD + case _ : ScatteringByteChannel => true case _ => false } @@ -275,9 +277,27 @@ class MySQLConnectionHandler( case v : ByteBuf => sendBuffer(v, statementId, index) + + case channel : ScatteringByteChannel => + sendChannel(channel, statementId, index) } } + // TODO this is blocking + private def sendChannel(channel: ScatteringByteChannel, statementId: Array[Byte], paramId: Int) { + var bytesWritten = 0 + do { + val dataBuffer = Unpooled.directBuffer(SendLongDataEncoder.INITIAL_BUFFER_SIZE, SendLongDataEncoder.MAX_BUFFER_SIZE) + do { + bytesWritten = dataBuffer.writeBytes(channel, SendLongDataEncoder.MAX_BUFFER_SIZE) + } while (bytesWritten == 0) + if (bytesWritten > 0) { + sendBuffer(dataBuffer, statementId, paramId) + } + } while (bytesWritten > -1) + channel.close() + } + private def sendBuffer(buffer: ByteBuf, statementId: Array[Byte], paramId: Int) { writeAndHandleError(new SendLongDataMessage(statementId, buffer, paramId)) } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala index ce51140f..af02131b 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala @@ -10,6 +10,10 @@ object SendLongDataEncoder { val log = Log.get[SendLongDataEncoder] val LONG_THRESHOLD = 1023 + + val INITIAL_BUFFER_SIZE = 1024 // 1 KiB + + val MAX_BUFFER_SIZE = 1024*1024 // 1 MiB } class SendLongDataEncoder diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala index 6c7c1313..b39f9356 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala @@ -1,6 +1,8 @@ package com.github.mauricio.async.db.mysql -import org.specs2.mutable.Specification +import java.io.{FileInputStream, FileOutputStream, BufferedOutputStream, File} + +import org.specs2.mutable.{After, Specification} import java.util.UUID import java.nio.ByteBuffer import io.netty.buffer.Unpooled @@ -9,6 +11,17 @@ import com.github.mauricio.async.db.RowData class BinaryColumnsSpec extends Specification with ConnectionHelper { + val createBlobTable = + """CREATE TEMPORARY TABLE POSTS ( + | id INT NOT NULL, + | blob_column LONGBLOB, + | primary key (id)) + """.stripMargin + + val insertIntoBlobTable = "INSERT INTO POSTS (id,blob_column) VALUES (?,?)" + + val selectFromBlobTable = "SELECT id,blob_column FROM POSTS ORDER BY id" + "connection" should { "correctly load fields as byte arrays" in { @@ -106,7 +119,7 @@ class BinaryColumnsSpec extends Specification with ConnectionHelper { } - "support BLOB type with large values" in { + "support BLOB type with long values" in { val bytes = (1 to 2100).map(_.toByte).toArray @@ -114,27 +127,35 @@ class BinaryColumnsSpec extends Specification with ConnectionHelper { } - } + "support BLOB type with ScatteringByteChannel input" in new BlobFile { - def testBlob(bytes: Array[Byte]) = { - val create = - """CREATE TEMPORARY TABLE POSTS ( - | id INT NOT NULL, - | blob_column BLOB, - | primary key (id)) - """.stripMargin + withConnection { + connection => + executeQuery(connection, createBlobTable) + + val channel = new FileInputStream(blobFile).getChannel + executePreparedStatement(connection, insertIntoBlobTable, 1, channel) + + val Some(rows) = executeQuery(connection, selectFromBlobTable).rows + rows(0)("id") === 1 + val retrievedBlob = rows(0)("blob_column").asInstanceOf[Array[Byte]] + retrievedBlob.length === BlobSize + 0 to retrievedBlob.length-1 foreach { n => retrievedBlob(n) === n.toByte } + } - val insert = "INSERT INTO POSTS (id,blob_column) VALUES (?,?)" - val select = "SELECT id,blob_column FROM POSTS ORDER BY id" + } + } + + def testBlob(bytes: Array[Byte]) = { withConnection { connection => - executeQuery(connection, create) - executePreparedStatement(connection, insert, 1, Some(bytes)) - executePreparedStatement(connection, insert, 2, ByteBuffer.wrap(bytes)) - executePreparedStatement(connection, insert, 3, Unpooled.wrappedBuffer(bytes)) + executeQuery(connection, createBlobTable) + executePreparedStatement(connection, insertIntoBlobTable, 1, Some(bytes)) + executePreparedStatement(connection, insertIntoBlobTable, 2, ByteBuffer.wrap(bytes)) + executePreparedStatement(connection, insertIntoBlobTable, 3, Unpooled.wrappedBuffer(bytes)) - val Some(rows) = executeQuery(connection, select).rows + val Some(rows) = executeQuery(connection, selectFromBlobTable).rows rows(0)("id") === 1 rows(0)("blob_column") === bytes rows(1)("id") === 2 @@ -149,4 +170,20 @@ class BinaryColumnsSpec extends Specification with ConnectionHelper { def compareBytes( row : RowData, column : String, expected : String ) = row(column) === expected.getBytes(CharsetUtil.UTF_8) -} \ No newline at end of file +} + +trait BlobFile extends After { + val BlobSize = (16 * 1024 * 1024)-9 + + lazy val blobFile = { + val file = File.createTempFile("blob", null) + val bos = new BufferedOutputStream(new FileOutputStream(file)) + 0 to BlobSize-1 foreach { n => bos.write(n.toByte) } + bos.close() + file + } + + def after = { + blobFile.delete() + } +} From 7f0f2495b946013a1e6556b8927df85b6a12666d Mon Sep 17 00:00:00 2001 From: nyavro Date: Fri, 12 Dec 2014 12:52:15 +0700 Subject: [PATCH 15/82] #102 Provided stored procedure tests --- .../async/db/mysql/StoredProceduresSpec.scala | 69 +++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala new file mode 100644 index 00000000..3c1e66f0 --- /dev/null +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala @@ -0,0 +1,69 @@ +/* + * Copyright 2013 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.mysql + +import com.github.mauricio.async.db.ResultSet +import com.github.mauricio.async.db.util.FutureUtils._ +import org.specs2.mutable.Specification + +class StoredProceduresSpec extends Specification with ConnectionHelper { + + "connection" should { + + "be able to execute create stored procedure" in { + withConnection { + connection => + val future = for( + drop <- connection.sendQuery("DROP PROCEDURE IF exists helloWorld;"); + create <- connection.sendQuery( + """ + CREATE PROCEDURE helloWorld(OUT param1 VARCHAR(20)) + BEGIN + SELECT 'hello' INTO param1; + END + """ + ) + ) yield create + awaitFuture(future).statusMessage === "" + } + } + + "be able to call stored procedure" in { + withConnection { + connection => + val future = for( + drop <- connection.sendQuery("DROP PROCEDURE IF exists constTest;"); + create <- connection.sendQuery( + """ + CREATE PROCEDURE constTest(OUT param INT) + BEGIN + SELECT 125 INTO param; + END + """ + ); + call <- connection.sendQuery("CALL constTest(@arg)"); + arg <- connection.sendQuery("SELECT @arg") + ) yield arg + val result: Option[ResultSet] = awaitFuture(future).rows + result.isDefined === true + val rows = result.get + rows.size === 1 + rows(0)(rows.columnNames.head) == 125 + } + } + } +} \ No newline at end of file From 2ca73cd992f364efa77b4da1dadd218f8b82f132 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Fri, 12 Dec 2014 10:51:43 +0100 Subject: [PATCH 16/82] Set BLOB via ScatteringByteChannel (avoid blocking) --- .../mysql/codec/MySQLConnectionHandler.scala | 68 +++++++++++-------- 1 file changed, 40 insertions(+), 28 deletions(-) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala index ad7d3ac1..9cf213d9 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala @@ -190,7 +190,7 @@ class MySQLConnectionHandler( writeAndHandleError(message) } - def sendPreparedStatement( query: String, values: Seq[Any] ) { + def sendPreparedStatement( query: String, values: Seq[Any] ): Future[ChannelFuture] = { val preparedStatement = new PreparedStatement(query, values) this.currentColumns.clear() @@ -236,24 +236,33 @@ class MySQLConnectionHandler( } } - private def executePreparedStatement( statementId : Array[Byte], columnsCount : Int, values : Seq[Any], parameters : Seq[ColumnDefinitionMessage] ) { + private def executePreparedStatement( statementId : Array[Byte], columnsCount : Int, values : Seq[Any], parameters : Seq[ColumnDefinitionMessage] ): Future[ChannelFuture] = { decoder.preparedStatementExecuteStarted(columnsCount, parameters.size) this.currentColumns.clear() this.currentParameters.clear() - var nonLongIndices: Set[Int] = Set() - values.zipWithIndex.foreach { - case (Some(value), index) if isLong(value) => - sendLongParameter(statementId, index, value) - - case (value, index) if isLong(value) => - sendLongParameter(statementId, index, value) - - case (_, index) => - nonLongIndices += index + val (nonLongIndicesOpt, longValuesOpt) = values.zipWithIndex.map { + case (Some(value), index) if isLong(value) => (None, Some(index, value)) + case (value, index) if isLong(value) => (None, Some(index, value)) + case (_, index) => (Some(index), None) + }.unzip + val nonLongIndices: Seq[Int] = nonLongIndicesOpt.flatten + val longValues: Seq[(Int, Any)] = longValuesOpt.flatten + + if (longValues.nonEmpty) { + val (firstIndex, firstValue) = longValues.head + var channelFuture: Future[ChannelFuture] = sendLongParameter(statementId, firstIndex, firstValue) + longValues.tail foreach { case (index, value) => + channelFuture = channelFuture.flatMap { _ => + sendLongParameter(statementId, index, value) + } + } + channelFuture flatMap { _ => + writeAndHandleError(new PreparedStatementExecuteMessage(statementId, values, nonLongIndices.toSet, parameters)) + } + } else { + writeAndHandleError(new PreparedStatementExecuteMessage(statementId, values, nonLongIndices.toSet, parameters)) } - - writeAndHandleError(new PreparedStatementExecuteMessage(statementId, values, nonLongIndices, parameters)) } private def isLong(value: Any): Boolean = { @@ -267,7 +276,7 @@ class MySQLConnectionHandler( } } - private def sendLongParameter(statementId: Array[Byte], index: Int, longValue: Any) { + private def sendLongParameter(statementId: Array[Byte], index: Int, longValue: Any): Future[ChannelFuture] = { longValue match { case v : Array[Byte] => sendBuffer(Unpooled.wrappedBuffer(v), statementId, index) @@ -283,22 +292,25 @@ class MySQLConnectionHandler( } } - // TODO this is blocking - private def sendChannel(channel: ScatteringByteChannel, statementId: Array[Byte], paramId: Int) { - var bytesWritten = 0 - do { - val dataBuffer = Unpooled.directBuffer(SendLongDataEncoder.INITIAL_BUFFER_SIZE, SendLongDataEncoder.MAX_BUFFER_SIZE) + private def sendChannel(channel: ScatteringByteChannel, statementId: Array[Byte], paramId: Int): Future[ChannelFuture] = { + Future { + var bytesWritten = 0 + var channelFuture: ChannelFuture = null do { - bytesWritten = dataBuffer.writeBytes(channel, SendLongDataEncoder.MAX_BUFFER_SIZE) - } while (bytesWritten == 0) - if (bytesWritten > 0) { - sendBuffer(dataBuffer, statementId, paramId) - } - } while (bytesWritten > -1) - channel.close() + val dataBuffer = Unpooled.directBuffer(SendLongDataEncoder.INITIAL_BUFFER_SIZE, SendLongDataEncoder.MAX_BUFFER_SIZE) + do { + bytesWritten = dataBuffer.writeBytes(channel, SendLongDataEncoder.MAX_BUFFER_SIZE) + } while (bytesWritten == 0) + if (bytesWritten > 0) { + channelFuture = sendBuffer(dataBuffer, statementId, paramId) + } + } while (bytesWritten > -1) + channel.close() + channelFuture + } } - private def sendBuffer(buffer: ByteBuf, statementId: Array[Byte], paramId: Int) { + private def sendBuffer(buffer: ByteBuf, statementId: Array[Byte], paramId: Int): ChannelFuture = { writeAndHandleError(new SendLongDataMessage(statementId, buffer, paramId)) } From 42b5a11dd951d6fc5ec18e8d553088553a488d54 Mon Sep 17 00:00:00 2001 From: nyavro Date: Fri, 12 Dec 2014 21:45:11 +0600 Subject: [PATCH 17/82] #102 Test passing input parameters into stored procedure --- .../async/db/mysql/StoredProceduresSpec.scala | 26 ++++++++++++++++++- 1 file changed, 25 insertions(+), 1 deletion(-) diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala index 3c1e66f0..9aa4f66b 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala @@ -62,7 +62,31 @@ class StoredProceduresSpec extends Specification with ConnectionHelper { result.isDefined === true val rows = result.get rows.size === 1 - rows(0)(rows.columnNames.head) == 125 + rows(0)(rows.columnNames.head) === 125 + } + } + + "be able to call stored procedure with input parameter" in { + withConnection { + connection => + val future = for( + drop <- connection.sendQuery("DROP PROCEDURE IF exists addTest;"); + create <- connection.sendQuery( + """ + CREATE PROCEDURE addTest(IN a INT, IN b INT, OUT sum INT) + BEGIN + SELECT a+b INTO sum; + END + """ + ); + call <- connection.sendQuery("CALL addTest(132, 245, @sm)"); + res <- connection.sendQuery("SELECT @sm") + ) yield res + val result: Option[ResultSet] = awaitFuture(future).rows + result.isDefined === true + val rows = result.get + rows.size === 1 + rows(0)(rows.columnNames.head) === 377 } } } From 39a7b52f4f5cc477151c79e38777ecadcbe7ca13 Mon Sep 17 00:00:00 2001 From: nyavro Date: Tue, 16 Dec 2014 11:23:45 +0600 Subject: [PATCH 18/82] #102 remove procedure test --- .../async/db/mysql/StoredProceduresSpec.scala | 39 +++++++++++++++++++ 1 file changed, 39 insertions(+) diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala index 9aa4f66b..3d68563b 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala @@ -89,5 +89,44 @@ class StoredProceduresSpec extends Specification with ConnectionHelper { rows(0)(rows.columnNames.head) === 377 } } + + "be able to remove stored procedure" in { + withConnection { + connection => + val createResult: Option[ResultSet] = awaitFuture( + for( + drop <- connection.sendQuery("DROP PROCEDURE IF exists remTest;"); + create <- connection.sendQuery( + """ + CREATE PROCEDURE remTest(OUT cnst INT) + BEGIN + SELECT 987 INTO cnst; + END + """ + ); + routine <- connection.sendQuery( + """ + SELECT routine_name FROM INFORMATION_SCHEMA.ROUTINES WHERE routine_name="remTest" + """ + ) + ) yield routine + ).rows + createResult.isDefined === true + createResult.get.size === 1 + createResult.get(0)("routine_name") === "remTest" + val removeResult: Option[ResultSet] = awaitFuture( + for( + drop <- connection.sendQuery("DROP PROCEDURE remTest;"); + routine <- connection.sendQuery( + """ + SELECT routine_name FROM INFORMATION_SCHEMA.ROUTINES WHERE routine_name="remTest" + """ + ) + ) yield routine + ).rows + removeResult.isDefined === true + removeResult.get.isEmpty === true + } + } } } \ No newline at end of file From 117f760ded1e6347a389eb6f6fae313681f0f8c3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Wed, 17 Dec 2014 11:40:48 +0100 Subject: [PATCH 19/82] Remove channel support for now --- mysql-async/README.md | 4 +- .../db/mysql/binary/BinaryRowEncoder.scala | 9 +-- .../binary/encoder/DummyBlobEncoder.scala | 14 ---- .../mysql/codec/MySQLConnectionHandler.scala | 31 +------- .../db/mysql/codec/SendLongDataEncoder.scala | 4 - .../async/db/mysql/BinaryColumnsSpec.scala | 73 +++++-------------- 6 files changed, 25 insertions(+), 110 deletions(-) delete mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/DummyBlobEncoder.scala diff --git a/mysql-async/README.md b/mysql-async/README.md index b979fc2f..3a152286 100644 --- a/mysql-async/README.md +++ b/mysql-async/README.md @@ -91,10 +91,8 @@ String | string Array[Byte] | blob java.nio.ByteBuffer | blob io.netty.buffer.ByteBuf | blob -java.nio.channels.ScatteringByteChannel | blob -The maximum size of a blob you set via Array[Byte], ByteBuffer or ByteBuf is 2^24-9 bytes (almost 16 MiB). -Blobs set via a Channel can be larger, the Channel is read until EOF and then closed. +The maximum size of a blob is 2^24-9 bytes (almost 16 MiB). You don't have to match exact values when sending parameters for your prepared statements, MySQL is usually smart enough to understand that if you have sent an Int to `smallint` column it has to truncate the 4 bytes into 2. diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala index 9f38fd05..aff0b36f 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowEncoder.scala @@ -16,15 +16,13 @@ package com.github.mauricio.async.db.mysql.binary -import java.nio.channels.ScatteringByteChannel - -import io.netty.buffer.ByteBuf import java.nio.ByteBuffer import java.nio.charset.Charset + import com.github.mauricio.async.db.mysql.binary.encoder._ import com.github.mauricio.async.db.util._ +import io.netty.buffer.ByteBuf import org.joda.time._ -import scala.Some object BinaryRowEncoder { final val log = Log.get[BinaryRowEncoder] @@ -32,8 +30,6 @@ object BinaryRowEncoder { class BinaryRowEncoder( charset : Charset ) { - import BinaryRowEncoder.log - private final val stringEncoder = new StringEncoder(charset) private final val encoders = Map[Class[_],BinaryEncoder]( classOf[String] -> this.stringEncoder, @@ -90,7 +86,6 @@ class BinaryRowEncoder( charset : Charset ) { case v : java.util.Date => JavaDateEncoder case v : ByteBuffer => ByteBufferEncoder case v : ByteBuf => ByteBufEncoder - case v : ScatteringByteChannel => DummyBlobEncoder } } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/DummyBlobEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/DummyBlobEncoder.scala deleted file mode 100644 index 6bfa86d4..00000000 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/encoder/DummyBlobEncoder.scala +++ /dev/null @@ -1,14 +0,0 @@ -package com.github.mauricio.async.db.mysql.binary.encoder - -import com.github.mauricio.async.db.mysql.column.ColumnTypes -import io.netty.buffer.ByteBuf - -object DummyBlobEncoder extends BinaryEncoder { - - def encode(value: Any, buffer: ByteBuf): Unit = { - throw new UnsupportedOperationException() - } - - def encodesTo: Int = ColumnTypes.FIELD_TYPE_BLOB - - } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala index 9cf213d9..6ce93145 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala @@ -16,10 +16,11 @@ package com.github.mauricio.async.db.mysql.codec +import java.net.InetSocketAddress import java.nio.ByteBuffer -import java.nio.channels.ScatteringByteChannel import com.github.mauricio.async.db.Configuration +import com.github.mauricio.async.db.exceptions.DatabaseException import com.github.mauricio.async.db.general.MutableResultSet import com.github.mauricio.async.db.mysql.binary.BinaryRowDecoder import com.github.mauricio.async.db.mysql.message.client._ @@ -28,16 +29,14 @@ import com.github.mauricio.async.db.mysql.util.CharsetMapper import com.github.mauricio.async.db.util.ChannelFutureTransformer.toFuture import com.github.mauricio.async.db.util._ import io.netty.bootstrap.Bootstrap -import io.netty.buffer.{Unpooled, ByteBuf, ByteBufAllocator} +import io.netty.buffer.{ByteBuf, ByteBufAllocator, Unpooled} import io.netty.channel._ import io.netty.channel.socket.nio.NioSocketChannel import io.netty.handler.codec.CodecException -import java.net.InetSocketAddress -import scala.Some + import scala.annotation.switch import scala.collection.mutable.{ArrayBuffer, HashMap} import scala.concurrent._ -import com.github.mauricio.async.db.exceptions.DatabaseException class MySQLConnectionHandler( configuration: Configuration, @@ -270,7 +269,6 @@ class MySQLConnectionHandler( case v : Array[Byte] => v.length > SendLongDataEncoder.LONG_THRESHOLD case v : ByteBuffer => v.remaining() > SendLongDataEncoder.LONG_THRESHOLD case v : ByteBuf => v.readableBytes() > SendLongDataEncoder.LONG_THRESHOLD - case _ : ScatteringByteChannel => true case _ => false } @@ -286,27 +284,6 @@ class MySQLConnectionHandler( case v : ByteBuf => sendBuffer(v, statementId, index) - - case channel : ScatteringByteChannel => - sendChannel(channel, statementId, index) - } - } - - private def sendChannel(channel: ScatteringByteChannel, statementId: Array[Byte], paramId: Int): Future[ChannelFuture] = { - Future { - var bytesWritten = 0 - var channelFuture: ChannelFuture = null - do { - val dataBuffer = Unpooled.directBuffer(SendLongDataEncoder.INITIAL_BUFFER_SIZE, SendLongDataEncoder.MAX_BUFFER_SIZE) - do { - bytesWritten = dataBuffer.writeBytes(channel, SendLongDataEncoder.MAX_BUFFER_SIZE) - } while (bytesWritten == 0) - if (bytesWritten > 0) { - channelFuture = sendBuffer(dataBuffer, statementId, paramId) - } - } while (bytesWritten > -1) - channel.close() - channelFuture } } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala index af02131b..ce51140f 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/SendLongDataEncoder.scala @@ -10,10 +10,6 @@ object SendLongDataEncoder { val log = Log.get[SendLongDataEncoder] val LONG_THRESHOLD = 1023 - - val INITIAL_BUFFER_SIZE = 1024 // 1 KiB - - val MAX_BUFFER_SIZE = 1024*1024 // 1 MiB } class SendLongDataEncoder diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala index b39f9356..6c7c1313 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BinaryColumnsSpec.scala @@ -1,8 +1,6 @@ package com.github.mauricio.async.db.mysql -import java.io.{FileInputStream, FileOutputStream, BufferedOutputStream, File} - -import org.specs2.mutable.{After, Specification} +import org.specs2.mutable.Specification import java.util.UUID import java.nio.ByteBuffer import io.netty.buffer.Unpooled @@ -11,17 +9,6 @@ import com.github.mauricio.async.db.RowData class BinaryColumnsSpec extends Specification with ConnectionHelper { - val createBlobTable = - """CREATE TEMPORARY TABLE POSTS ( - | id INT NOT NULL, - | blob_column LONGBLOB, - | primary key (id)) - """.stripMargin - - val insertIntoBlobTable = "INSERT INTO POSTS (id,blob_column) VALUES (?,?)" - - val selectFromBlobTable = "SELECT id,blob_column FROM POSTS ORDER BY id" - "connection" should { "correctly load fields as byte arrays" in { @@ -119,7 +106,7 @@ class BinaryColumnsSpec extends Specification with ConnectionHelper { } - "support BLOB type with long values" in { + "support BLOB type with large values" in { val bytes = (1 to 2100).map(_.toByte).toArray @@ -127,35 +114,27 @@ class BinaryColumnsSpec extends Specification with ConnectionHelper { } - "support BLOB type with ScatteringByteChannel input" in new BlobFile { - - withConnection { - connection => - executeQuery(connection, createBlobTable) - - val channel = new FileInputStream(blobFile).getChannel - executePreparedStatement(connection, insertIntoBlobTable, 1, channel) - - val Some(rows) = executeQuery(connection, selectFromBlobTable).rows - rows(0)("id") === 1 - val retrievedBlob = rows(0)("blob_column").asInstanceOf[Array[Byte]] - retrievedBlob.length === BlobSize - 0 to retrievedBlob.length-1 foreach { n => retrievedBlob(n) === n.toByte } - } - - } - } def testBlob(bytes: Array[Byte]) = { + val create = + """CREATE TEMPORARY TABLE POSTS ( + | id INT NOT NULL, + | blob_column BLOB, + | primary key (id)) + """.stripMargin + + val insert = "INSERT INTO POSTS (id,blob_column) VALUES (?,?)" + val select = "SELECT id,blob_column FROM POSTS ORDER BY id" + withConnection { connection => - executeQuery(connection, createBlobTable) - executePreparedStatement(connection, insertIntoBlobTable, 1, Some(bytes)) - executePreparedStatement(connection, insertIntoBlobTable, 2, ByteBuffer.wrap(bytes)) - executePreparedStatement(connection, insertIntoBlobTable, 3, Unpooled.wrappedBuffer(bytes)) + executeQuery(connection, create) + executePreparedStatement(connection, insert, 1, Some(bytes)) + executePreparedStatement(connection, insert, 2, ByteBuffer.wrap(bytes)) + executePreparedStatement(connection, insert, 3, Unpooled.wrappedBuffer(bytes)) - val Some(rows) = executeQuery(connection, selectFromBlobTable).rows + val Some(rows) = executeQuery(connection, select).rows rows(0)("id") === 1 rows(0)("blob_column") === bytes rows(1)("id") === 2 @@ -170,20 +149,4 @@ class BinaryColumnsSpec extends Specification with ConnectionHelper { def compareBytes( row : RowData, column : String, expected : String ) = row(column) === expected.getBytes(CharsetUtil.UTF_8) -} - -trait BlobFile extends After { - val BlobSize = (16 * 1024 * 1024)-9 - - lazy val blobFile = { - val file = File.createTempFile("blob", null) - val bos = new BufferedOutputStream(new FileOutputStream(file)) - 0 to BlobSize-1 foreach { n => bos.write(n.toByte) } - bos.close() - file - } - - def after = { - blobFile.delete() - } -} +} \ No newline at end of file From 1f74c57a8f9c72822e34bedf45dedc5ae08001d5 Mon Sep 17 00:00:00 2001 From: haski Date: Sun, 4 Jan 2015 12:32:19 +0200 Subject: [PATCH 20/82] making sure connection is back in pool before result returns --- .../mauricio/async/db/pool/AsyncObjectPool.scala | 10 ++++++---- 1 file changed, 6 insertions(+), 4 deletions(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala index 39179737..a288eea5 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala @@ -16,7 +16,7 @@ package com.github.mauricio.async.db.pool -import scala.concurrent.Future +import scala.concurrent.{ExecutionContext, Future} /** * @@ -70,10 +70,12 @@ trait AsyncObjectPool[T] { * @return f wrapped with take and giveBack */ - def use[A](f : T => Future[A])(implicit executionContext : scala.concurrent.ExecutionContext) : Future[A] = + def use[A](f: (T) => Future[A])(implicit executionContext: ExecutionContext): Future[A] = take.flatMap { item => - f(item).andThen { case _ => - giveBack(item) + f(item) recoverWith { + case error => giveBack(item).flatMap(_ => Future.failed(error)) + } flatMap { res => + giveBack(item).map { _ => res } } } From 2be41d52d6de45031750f096c7c1b1227b4912c3 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sun, 4 Jan 2015 10:52:01 -0300 Subject: [PATCH 21/82] Updating changelog to close 0.2.16 --- CHANGELOG.md | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index d05f17d5..8f7a65f8 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,9 +24,14 @@ # Changelog -## 0.2.16 - in progress +## 0.2.16 - 2015-01-04 * Add support to byte arrays for PostgreSQL 8 and older - @SattaiLanfear - #21; +* Make sure connections are returned to the pool before the result is returned to the user - @haski - #119; +* Support to `SEND_LONG_DATA` to MySQL - @mst-appear - #115; +* Support for `ByteBuffer` and `ByteBuf` for binary data - @mst-appear - #113 #112; +* Fixed encoding backslashes in PostgreSQL arrays - @dylex - #110; +* Included `escape` encoding method for bytes in PostgreSQL - @SattaiLanfear - #107; ## 0.2.15 - 2014-09-12 From d479047eb35a1b5021c90f4435ea1b1647a7f086 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sun, 4 Jan 2015 11:00:19 -0300 Subject: [PATCH 22/82] Upgrading netty --- project/Build.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/project/Build.scala b/project/Build.scala index dff7c993..172ed90d 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -45,7 +45,7 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.16-SNAPSHOT" + val commonVersion = "0.2.16" val projectScalaVersion = "2.11.0" val specs2Dependency = "org.specs2" %% "specs2" % "2.3.11" % "test" @@ -55,7 +55,7 @@ object Configuration { "org.slf4j" % "slf4j-api" % "1.7.5", "joda-time" % "joda-time" % "2.3", "org.joda" % "joda-convert" % "1.5", - "io.netty" % "netty-all" % "4.0.23.Final", + "io.netty" % "netty-all" % "4.0.25.Final", "org.javassist" % "javassist" % "3.18.1-GA", specs2Dependency, logbackDependency From 598a2658fbd62065abe076d8abffbb82996ae716 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sun, 4 Jan 2015 11:12:26 -0300 Subject: [PATCH 23/82] Starting v0.2.17 cycle --- CHANGELOG.md | 5 ++++- README.markdown | 4 ++-- project/Build.scala | 2 +- 3 files changed, 7 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 8f7a65f8..c80603d2 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -3,7 +3,8 @@ **Table of Contents** - [Changelog](#changelog) - - [0.2.16 - in progress](#0216---in-progress) + - [0.2.17 - in progresss](#0217---in-progresss) + - [0.2.16 - 2015-01-04](#0216---2015-01-04) - [0.2.15 - 2014-09-12](#0215---2014-09-12) - [0.2.14 - 2014-08-30](#0214---2014-08-30) - [0.2.13 - 2014-04-07](#0213---2014-04-07) @@ -24,6 +25,8 @@ # Changelog +## 0.2.17 - in progresss + ## 0.2.16 - 2015-01-04 * Add support to byte arrays for PostgreSQL 8 and older - @SattaiLanfear - #21; diff --git a/README.markdown b/README.markdown index 342425b9..0d90697a 100644 --- a/README.markdown +++ b/README.markdown @@ -62,7 +62,7 @@ Or Maven: com.github.mauricio postgresql-async_2.11 - 0.2.15 + 0.2.16 ``` @@ -78,7 +78,7 @@ Or Maven: com.github.mauricio mysql-async_2.11 - 0.2.15 + 0.2.16 ``` diff --git a/project/Build.scala b/project/Build.scala index 172ed90d..adf61daa 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -45,7 +45,7 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.16" + val commonVersion = "0.2.17-SNAPSHOT" val projectScalaVersion = "2.11.0" val specs2Dependency = "org.specs2" %% "specs2" % "2.3.11" % "test" From 90e00736f950124384309f9ba5f78a4848d9dca3 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Mikael=20St=C3=A5ldal?= Date: Mon, 12 Jan 2015 19:01:48 +0100 Subject: [PATCH 24/82] Do not assume MySQL root user without password --- .../mauricio/async/db/mysql/MySQLConnectionSpec.scala | 8 ++++---- script/prepare_build.sh | 1 + 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/MySQLConnectionSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/MySQLConnectionSpec.scala index aebf18dd..5e5500fa 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/MySQLConnectionSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/MySQLConnectionSpec.scala @@ -30,8 +30,8 @@ class MySQLConnectionSpec extends Specification { database = Some("mysql_async_tests") ) - val rootConfiguration = new Configuration( - "root", + val configurationWithoutPassword = new Configuration( + "mysql_async_nopw", "localhost", port = 3306, password = None, @@ -39,7 +39,7 @@ class MySQLConnectionSpec extends Specification { ) val configurationWithoutDatabase = new Configuration( - "root", + "mysql_async_nopw", "localhost", port = 3306, password = None, @@ -69,7 +69,7 @@ class MySQLConnectionSpec extends Specification { withNonConnectedConnection({ connection => awaitFuture(connection.connect) === connection - }) (rootConfiguration) + }) (configurationWithoutPassword) } "connect to a MySQL instance without a database" in { diff --git a/script/prepare_build.sh b/script/prepare_build.sh index 9992e442..a3a4e183 100755 --- a/script/prepare_build.sh +++ b/script/prepare_build.sh @@ -5,6 +5,7 @@ mysql -u root -e 'create database mysql_async_tests;' mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'mysql_async'@'localhost' IDENTIFIED BY 'root' WITH GRANT OPTION"; mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'mysql_async_old'@'localhost' WITH GRANT OPTION"; mysql -u root -e "UPDATE mysql.user SET Password = OLD_PASSWORD('do_not_use_this'), plugin = 'mysql_old_password' where User = 'mysql_async_old'; flush privileges;"; +mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'mysql_async_nopw'@'localhost' WITH GRANT OPTION"; echo "preparing postgresql configs" From 68f58f0f60de9e8b62c86a2d39a0ebdf8c644e1f Mon Sep 17 00:00:00 2001 From: The Gitter Badger Date: Wed, 14 Jan 2015 18:57:38 +0000 Subject: [PATCH 25/82] Added Gitter badge --- README.md | 3 +++ 1 file changed, 3 insertions(+) create mode 100644 README.md diff --git a/README.md b/README.md new file mode 100644 index 00000000..4fedd098 --- /dev/null +++ b/README.md @@ -0,0 +1,3 @@ +# postgresql-async + +[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/mauricio/postgresql-async?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) \ No newline at end of file From 4a9a9fd938bd73828babe8fd39cc8316adfb3d9c Mon Sep 17 00:00:00 2001 From: Dragisa Krsmanovic Date: Wed, 28 Jan 2015 17:27:58 -0800 Subject: [PATCH 26/82] Add support for UUID --- .../async/db/column/UUIDEncoderDecoder.scala | 25 ++++++++ .../db/postgresql/column/ColumnTypes.scala | 1 + .../PostgreSQLColumnDecoderRegistry.scala | 4 +- .../PostgreSQLColumnEncoderRegistry.scala | 2 + .../db/postgresql/PreparedStatementSpec.scala | 57 ++++++++++++++++++- 5 files changed, 87 insertions(+), 2 deletions(-) create mode 100644 db-async-common/src/main/scala/com/github/mauricio/async/db/column/UUIDEncoderDecoder.scala diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/UUIDEncoderDecoder.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/UUIDEncoderDecoder.scala new file mode 100644 index 00000000..11987835 --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/UUIDEncoderDecoder.scala @@ -0,0 +1,25 @@ +/* + * Copyright 2013 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.column + +import java.util.UUID + +object UUIDEncoderDecoder extends ColumnEncoderDecoder { + + override def decode(value: String): UUID = UUID.fromString(value) + +} diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala index 7f15b0f6..29c6b736 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala @@ -63,6 +63,7 @@ object ColumnTypes { final val MoneyArray = 791 final val NameArray = 1003 + final val UUID = 2950 final val UUIDArray = 2951 final val XMLArray = 143 diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala index 734c0902..606bb442 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala @@ -45,6 +45,7 @@ class PostgreSQLColumnDecoderRegistry( charset : Charset = CharsetUtil.UTF_8 ) e private final val timeArrayDecoder = new ArrayDecoder(TimeEncoderDecoder.Instance) private final val timeWithTimestampArrayDecoder = new ArrayDecoder(TimeWithTimezoneEncoderDecoder) private final val intervalArrayDecoder = new ArrayDecoder(PostgreSQLIntervalEncoderDecoder) + private final val uuidArrayDecoder = new ArrayDecoder(UUIDEncoderDecoder) override def decode(kind: ColumnData, value: ByteBuf, charset: Charset): Any = { decoderFor(kind.dataType).decode(kind, value, charset) @@ -108,7 +109,8 @@ class PostgreSQLColumnDecoderRegistry( charset : Charset = CharsetUtil.UTF_8 ) e case MoneyArray => this.stringArrayDecoder case NameArray => this.stringArrayDecoder - case UUIDArray => this.stringArrayDecoder + case UUID => UUIDEncoderDecoder + case UUIDArray => this.uuidArrayDecoder case XMLArray => this.stringArrayDecoder case ByteA => ByteArrayEncoderDecoder diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala index 5afb5aa8..24641336 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala @@ -52,6 +52,8 @@ class PostgreSQLColumnEncoderRegistry extends ColumnEncoderRegistry { classOf[BigDecimal] -> (BigDecimalEncoderDecoder -> ColumnTypes.Numeric), classOf[java.math.BigDecimal] -> (BigDecimalEncoderDecoder -> ColumnTypes.Numeric), + classOf[java.util.UUID] -> (UUIDEncoderDecoder -> ColumnTypes.UUID), + classOf[LocalDate] -> ( DateEncoderDecoder -> ColumnTypes.Date ), classOf[LocalDateTime] -> (TimestampEncoderDecoder.Instance -> ColumnTypes.Timestamp), classOf[DateTime] -> (TimestampWithTimezoneEncoderDecoder -> ColumnTypes.TimestampWithTimezone), diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala index 20c645cc..6fd7d9a6 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala @@ -20,7 +20,7 @@ import org.specs2.mutable.Specification import org.joda.time.LocalDate import com.github.mauricio.async.db.util.Log import com.github.mauricio.async.db.exceptions.InsufficientParametersException -import java.util.Date +import java.util.UUID import com.github.mauricio.async.db.postgresql.exceptions.GenericDatabaseException class PreparedStatementSpec extends Specification with DatabaseTestHelper { @@ -282,6 +282,61 @@ class PreparedStatementSpec extends Specification with DatabaseTestHelper { } } + "support UUID" in { + if ( System.getenv("TRAVIS") == null ) { + withHandler { + handler => + val create = """create temp table uuids + |( + |id bigserial primary key, + |my_id uuid + |);""".stripMargin + + val insert = "INSERT INTO uuids (my_id) VALUES (?) RETURNING id" + val select = "SELECT * FROM uuids" + + val uuid = UUID.randomUUID() + + executeDdl(handler, create) + executePreparedStatement(handler, insert, Array(uuid) ) + val result = executePreparedStatement(handler, select).rows.get + + result(0)("my_id").asInstanceOf[UUID] === uuid + } + success + } else { + pending + } + } + + "support UUID array" in { + if ( System.getenv("TRAVIS") == null ) { + withHandler { + handler => + val create = """create temp table uuids + |( + |id bigserial primary key, + |my_id uuid[] + |);""".stripMargin + + val insert = "INSERT INTO uuids (my_id) VALUES (?) RETURNING id" + val select = "SELECT * FROM uuids" + + val uuid1 = UUID.randomUUID() + val uuid2 = UUID.randomUUID() + + executeDdl(handler, create) + executePreparedStatement(handler, insert, Array(Array(uuid1, uuid2)) ) + val result = executePreparedStatement(handler, select).rows.get + + result(0)("my_id").asInstanceOf[Seq[UUID]] === Seq(uuid1, uuid2) + } + success + } else { + pending + } + } + } } From 16f9f18d606e5251599d025e4770935251d2c8fa Mon Sep 17 00:00:00 2001 From: Dylan Simon Date: Wed, 11 Feb 2015 12:57:03 -0500 Subject: [PATCH 27/82] Ignore errors generated by AsyncObjectPool.giveBack Possible fix for #132, untested. --- .../mauricio/async/db/pool/AsyncObjectPool.scala | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala index a288eea5..4e9050ba 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala @@ -16,7 +16,7 @@ package com.github.mauricio.async.db.pool -import scala.concurrent.{ExecutionContext, Future} +import scala.concurrent.{ExecutionContext, Future, Promise} /** * @@ -72,11 +72,13 @@ trait AsyncObjectPool[T] { def use[A](f: (T) => Future[A])(implicit executionContext: ExecutionContext): Future[A] = take.flatMap { item => - f(item) recoverWith { - case error => giveBack(item).flatMap(_ => Future.failed(error)) - } flatMap { res => - giveBack(item).map { _ => res } + val p = Promise[A]() + f(item).onComplete { r => + giveBack(item).onComplete { _ => + p.complete(r) + } } + p.future } } From efaacba482ada65b9ebbdc218aba4a92d4b1036e Mon Sep 17 00:00:00 2001 From: nkgm Date: Thu, 19 Mar 2015 11:08:01 +0200 Subject: [PATCH 28/82] Add LISTEN/NOTIFY to README --- README.markdown | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/README.markdown b/README.markdown index 0d90697a..e1e54004 100644 --- a/README.markdown +++ b/README.markdown @@ -17,6 +17,7 @@ - [Prepared statements](#prepared-statements) - [Transactions](#transactions) - [Example usage (for PostgreSQL, but it looks almost the same on MySQL)](#example-usage-for-postgresql-but-it-looks-almost-the-same-on-mysql) + - [LISTEN/NOTIFY support (PostgreSQL only)](#listennotify-support-postgresql-only) - [Contributing](#contributing) - [Licence](#licence) @@ -269,6 +270,21 @@ disconnect and the connection is closed. You can also use the `ConnectionPool` provided by the driver to simplify working with database connections in your app. Check the blog post above for more details and the project's ScalaDocs. +## LISTEN/NOTIFY support (PostgreSQL only) + +LISTEN/NOTIFY is a PostgreSQL-specific feature for database-wide publish-subscribe scenarios. You can listen to database +notifications as such: + +```scala + val connection: Connection = ... + + connection.sendQuery("LISTEN my_channel") + connection.registerNotifyListener { + message => + println(s"channel: ${message.channel}, payload: ${message.payload}") + } +``` + ## Contributing Contributing to the project is simple, fork it on Github, hack on what you're insterested in seeing done or at the From 29c23628af05cf24b4f14d548e2c9c0e0b520321 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sun, 3 May 2015 17:58:28 -0300 Subject: [PATCH 29/82] Include transaction specs for both MySQL and PostgreSQL --- .gitignore | 1 + Procfile | 2 +- .../async/db/mysql/TransactionSpec.scala | 54 +++++++++++++++++-- .../postgresql/pool/ConnectionPoolSpec.scala | 32 +++++++++++ script/prepare_build.sh | 4 +- 5 files changed, 87 insertions(+), 6 deletions(-) diff --git a/.gitignore b/.gitignore index 0122f1ca..1aaf8978 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,4 @@ +databases/* out/* generate_bundles.rb .cache diff --git a/Procfile b/Procfile index 6c1b0717..1288bcfe 100644 --- a/Procfile +++ b/Procfile @@ -1,2 +1,2 @@ -postgresql: postgres -D /Users/mauricio/databases/postgresql +postgresql: postgres -D databases/postgresql mysql: mysqld --log-warnings --console \ No newline at end of file diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala index 0312f0d5..0ef2f86b 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala @@ -1,14 +1,28 @@ package com.github.mauricio.async.db.mysql +import java.util.UUID +import java.util.concurrent.TimeUnit + import org.specs2.mutable.Specification import com.github.mauricio.async.db.util.FutureUtils.awaitFuture import com.github.mauricio.async.db.mysql.exceptions.MySQLException import com.github.mauricio.async.db.Connection +import scala.concurrent.duration.Duration +import scala.concurrent.{Await, Future} +import scala.util.{Success, Failure} + +object TransactionSpec { + + val BrokenInsert = """INSERT INTO users (id, name) VALUES (1, 'MaurĂ­cio AragĂ£o')""" + val InsertUser = """INSERT INTO users (name) VALUES (?)""" + val TransactionInsert = "insert into transaction_test (id) values (?)" + +} + class TransactionSpec extends Specification with ConnectionHelper { - val brokenInsert = """INSERT INTO users (id, name) VALUES (1, 'MaurĂ­cio AragĂ£o')""" - val insertUser = """INSERT INTO users (name) VALUES (?)""" + import TransactionSpec._ "connection in transaction" should { @@ -42,7 +56,7 @@ class TransactionSpec extends Specification with ConnectionHelper { val future = connection.inTransaction { c => - c.sendQuery(this.insert).flatMap(r => c.sendQuery(brokenInsert)) + c.sendQuery(this.insert).flatMap(r => c.sendQuery(BrokenInsert)) } try { @@ -77,7 +91,7 @@ class TransactionSpec extends Specification with ConnectionHelper { val future = pool.inTransaction { c => connection = c - c.sendQuery(this.brokenInsert) + c.sendQuery(BrokenInsert) } try { @@ -97,6 +111,38 @@ class TransactionSpec extends Specification with ConnectionHelper { } + "runs commands for a transaction in a single connection" in { + + val id = UUID.randomUUID().toString + + withPool { + pool => + val operations = pool.inTransaction { + connection => + connection.sendPreparedStatement(TransactionInsert, List(id)).flatMap { + result => + connection.sendPreparedStatement(TransactionInsert, List(id)).map { + failure => + List(result, failure) + } + } + } + + Await.ready(operations, Duration(5, TimeUnit.SECONDS)) + + operations.value.get match { + case Success(e) => failure("should not have executed") + case Failure(e) => { + e.asInstanceOf[MySQLException].errorMessage.errorCode === 1062 + executePreparedStatement(pool, "select * from transaction_test where id = ?", id).rows.get.size === 0 + success("ok") + } + } + + } + + } + } } diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala index 02295b16..dc9053d7 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala @@ -16,12 +16,20 @@ package com.github.mauricio.async.db.postgresql.pool +import java.util.UUID + import com.github.mauricio.async.db.pool.{ConnectionPool, PoolConfiguration} import com.github.mauricio.async.db.postgresql.{PostgreSQLConnection, DatabaseTestHelper} import org.specs2.mutable.Specification +object ConnectionPoolSpec { + val Insert = "insert into transaction_test (id) values (?)" +} + class ConnectionPoolSpec extends Specification with DatabaseTestHelper { + import ConnectionPoolSpec.Insert + "pool" should { "give you a connection when sending statements" in { @@ -51,6 +59,30 @@ class ConnectionPoolSpec extends Specification with DatabaseTestHelper { } } + "runs commands for a transaction in a single connection" in { + + val id = UUID.randomUUID().toString + + withPool { + pool => + val operations = pool.inTransaction { + connection => + connection.sendPreparedStatement(Insert, List(id)).flatMap { + result => + connection.sendPreparedStatement(Insert, List(id)).map { + failure => + List(result, failure) + } + } + } + + val resultSets = await(operations) + + resultSets.size mustEqual(2) + } + + } + } def withPool[R]( fn : (ConnectionPool[PostgreSQLConnection]) => R ) : R = { diff --git a/script/prepare_build.sh b/script/prepare_build.sh index a3a4e183..96aa8345 100755 --- a/script/prepare_build.sh +++ b/script/prepare_build.sh @@ -2,6 +2,7 @@ echo "Preparing MySQL configs" mysql -u root -e 'create database mysql_async_tests;' +mysql -u root -e "create table mysql_async_tests.transaction_test (id varchar(255) not null, primary key (id))" mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'mysql_async'@'localhost' IDENTIFIED BY 'root' WITH GRANT OPTION"; mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'mysql_async_old'@'localhost' WITH GRANT OPTION"; mysql -u root -e "UPDATE mysql.user SET Password = OLD_PASSWORD('do_not_use_this'), plugin = 'mysql_old_password' where User = 'mysql_async_old'; flush privileges;"; @@ -12,10 +13,11 @@ echo "preparing postgresql configs" psql -c 'create database netty_driver_test;' -U postgres psql -c 'create database netty_driver_time_test;' -U postgres psql -c "alter database netty_driver_time_test set timezone to 'GMT'" -U postgres +psql -c "create table transaction_test ( id varchar(255) not null, constraint id_unique primary key (id))" -U postgres netty_driver_test psql -c "CREATE USER postgres_md5 WITH PASSWORD 'postgres_md5'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_md5;" -U postgres psql -c "CREATE USER postgres_cleartext WITH PASSWORD 'postgres_cleartext'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_cleartext;" -U postgres psql -c "CREATE USER postgres_kerberos WITH PASSWORD 'postgres_kerberos'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_kerberos;" -U postgres -psql -d "netty_driver_test" -c "CREATE TYPE example_mood AS ENUM ('sad', 'ok', 'happy');" +psql -d "netty_driver_test" -c "CREATE TYPE example_mood AS ENUM ('sad', 'ok', 'happy');" -U postgres sudo chmod 777 /etc/postgresql/9.1/main/pg_hba.conf From edabfa955368d02e4ccc97c1cf2eb9c7a6df9645 Mon Sep 17 00:00:00 2001 From: haski Date: Wed, 24 Jun 2015 12:07:34 +0300 Subject: [PATCH 30/82] 1. fix connection leak by returning an error on inactive channel inside the future 2. catch exceptions on user function in the object pool 3. add request timeout for queries --- .../mauricio/async/db/Configuration.scala | 11 +++---- .../async/db/pool/AsyncObjectPool.scala | 17 ++++++++-- .../async/db/mysql/MySQLConnection.scala | 31 ++++++++++++------- .../mysql/codec/MySQLConnectionHandler.scala | 19 +++++++++--- 4 files changed, 53 insertions(+), 25 deletions(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala index b9d3041f..e5fb7d6a 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala @@ -17,11 +17,10 @@ package com.github.mauricio.async.db import java.nio.charset.Charset -import scala.Predef._ -import scala.{None, Option, Int} + +import io.netty.buffer.{AbstractByteBufAllocator, PooledByteBufAllocator} import io.netty.util.CharsetUtil -import io.netty.buffer.AbstractByteBufAllocator -import io.netty.buffer.PooledByteBufAllocator + import scala.concurrent.duration._ object Configuration { @@ -55,5 +54,5 @@ case class Configuration(username: String, maximumMessageSize: Int = 16777216, allocator: AbstractByteBufAllocator = PooledByteBufAllocator.DEFAULT, connectTimeout: Duration = 5.seconds, - testTimeout: Duration = 5.seconds - ) + testTimeout: Duration = 5.seconds, + requestTimeout: Duration = 5.seconds) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala index 4e9050ba..3e4345a8 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/AsyncObjectPool.scala @@ -73,11 +73,22 @@ trait AsyncObjectPool[T] { def use[A](f: (T) => Future[A])(implicit executionContext: ExecutionContext): Future[A] = take.flatMap { item => val p = Promise[A]() - f(item).onComplete { r => - giveBack(item).onComplete { _ => - p.complete(r) + try { + f(item).onComplete { r => + giveBack(item).onComplete { _ => + p.complete(r) + } } + } catch { + // calling f might throw exception. + // in that case the item will be removed from the pool if identified as invalid by the factory. + // the error returned to the user is the original error thrown by f. + case error: Throwable => + giveBack(item).onComplete { _ => + p.failure(error) + } } + p.future } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala index 210cebfc..bb0f905c 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala @@ -16,21 +16,22 @@ package com.github.mauricio.async.db.mysql +import java.util.concurrent.TimeoutException +import java.util.concurrent.atomic.{AtomicLong, AtomicReference} + import com.github.mauricio.async.db._ import com.github.mauricio.async.db.exceptions._ -import com.github.mauricio.async.db.mysql.codec.{MySQLHandlerDelegate, MySQLConnectionHandler} +import com.github.mauricio.async.db.mysql.codec.{MySQLConnectionHandler, MySQLHandlerDelegate} import com.github.mauricio.async.db.mysql.exceptions.MySQLException import com.github.mauricio.async.db.mysql.message.client._ import com.github.mauricio.async.db.mysql.message.server._ import com.github.mauricio.async.db.mysql.util.CharsetMapper import com.github.mauricio.async.db.util.ChannelFutureTransformer.toFuture import com.github.mauricio.async.db.util._ -import java.util.concurrent.atomic.{AtomicLong,AtomicReference} -import scala.concurrent.{ExecutionContext, Promise, Future} -import io.netty.channel.{EventLoopGroup, ChannelHandlerContext} -import scala.util.Failure -import scala.Some -import scala.util.Success +import io.netty.channel.{ChannelHandlerContext, EventLoopGroup} + +import scala.concurrent.{ExecutionContext, Future, Promise} +import scala.util.{Failure, Success} object MySQLConnection { final val Counter = new AtomicLong() @@ -185,18 +186,24 @@ class MySQLConnection( def sendQuery(query: String): Future[QueryResult] = { this.validateIsReadyForQuery() - val promise = Promise[QueryResult] + val promise = Promise[QueryResult]() this.setQueryPromise(promise) this.connectionHandler.write(new QueryMessage(query)) + addTimeout(promise) + promise.future } - private def failQueryPromise(t: Throwable) { + private def addTimeout(promise: Promise[QueryResult]): Unit = { + this.connectionHandler.schedule( + promise.tryFailure(new TimeoutException(s"response took too long to return(${configuration.requestTimeout})")), + configuration.requestTimeout) + } + private def failQueryPromise(t: Throwable) { this.clearQueryPromise.foreach { _.tryFailure(t) } - } private def succeedQueryPromise(queryResult: QueryResult) { @@ -234,9 +241,11 @@ class MySQLConnection( if ( values.length != totalParameters ) { throw new InsufficientParametersException(totalParameters, values) } - val promise = Promise[QueryResult] + val promise = Promise[QueryResult]() this.setQueryPromise(promise) this.connectionHandler.sendPreparedStatement(query, values) + addTimeout(promise) + promise.future } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala index 6ce93145..792aff77 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/MySQLConnectionHandler.scala @@ -18,6 +18,7 @@ package com.github.mauricio.async.db.mysql.codec import java.net.InetSocketAddress import java.nio.ByteBuffer +import java.util.concurrent.TimeUnit import com.github.mauricio.async.db.Configuration import com.github.mauricio.async.db.exceptions.DatabaseException @@ -37,6 +38,7 @@ import io.netty.handler.codec.CodecException import scala.annotation.switch import scala.collection.mutable.{ArrayBuffer, HashMap} import scala.concurrent._ +import scala.concurrent.duration.Duration class MySQLConnectionHandler( configuration: Configuration, @@ -319,17 +321,18 @@ class MySQLConnectionHandler( } private def writeAndHandleError( message : Any ) : ChannelFuture = { - if ( this.currentContext.channel().isActive ) { - val future = this.currentContext.writeAndFlush(message) + val res = this.currentContext.writeAndFlush(message) - future.onFailure { + res.onFailure { case e : Throwable => handleException(e) } - future + res } else { - throw new DatabaseException("This channel is not active and can't take messages") + val error = new DatabaseException("This channel is not active and can't take messages") + handleException(error) + this.currentContext.channel().newFailedFuture(error) } } @@ -351,4 +354,10 @@ class MySQLConnectionHandler( } } + def schedule(block: => Unit, duration: Duration): Unit = { + this.currentContext.channel().eventLoop().schedule(new Runnable { + override def run(): Unit = block + }, duration.toMillis, TimeUnit.MILLISECONDS) + } + } From 37e5fc5dad1a55922360e30ec462d1294d788ca8 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Thu, 25 Jun 2015 09:52:38 -0300 Subject: [PATCH 31/82] Fix PostgreSQL pool spec --- .../async/db/postgresql/pool/ConnectionPoolSpec.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala index dc9053d7..b71ebe65 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala @@ -19,6 +19,7 @@ package com.github.mauricio.async.db.postgresql.pool import java.util.UUID import com.github.mauricio.async.db.pool.{ConnectionPool, PoolConfiguration} +import com.github.mauricio.async.db.postgresql.exceptions.GenericDatabaseException import com.github.mauricio.async.db.postgresql.{PostgreSQLConnection, DatabaseTestHelper} import org.specs2.mutable.Specification @@ -76,9 +77,8 @@ class ConnectionPoolSpec extends Specification with DatabaseTestHelper { } } - val resultSets = await(operations) + await(operations) must throwA[GenericDatabaseException] - resultSets.size mustEqual(2) } } From 7a0fc6c234ba215961b9c27b2c912ac81a38bbe4 Mon Sep 17 00:00:00 2001 From: Chris Kahn Date: Tue, 7 Jul 2015 11:35:52 -0400 Subject: [PATCH 32/82] TimeAndDateSpec should test LocalTime printing/encoding using a prepared statement --- .../github/mauricio/async/db/postgresql/TimeAndDateSpec.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala index e671a5b4..88fee1dd 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala @@ -35,7 +35,7 @@ class TimeAndDateSpec extends Specification with DatabaseTestHelper { )""" executeDdl(handler, create) - executeQuery(handler, "INSERT INTO messages (moment) VALUES ('04:05:06')") + executePreparedStatement(handler, "INSERT INTO messages (moment) VALUES (?)", Array[Any](new LocalTime(4, 5, 6))) val rows = executePreparedStatement(handler, "select * from messages").rows.get @@ -60,7 +60,7 @@ class TimeAndDateSpec extends Specification with DatabaseTestHelper { )""" executeDdl(handler, create) - executeQuery(handler, "INSERT INTO messages (moment) VALUES ('04:05:06.134')") + executePreparedStatement(handler, "INSERT INTO messages (moment) VALUES (?)", Array[Any](new LocalTime(4, 5, 6, 134))) val rows = executePreparedStatement(handler, "select * from messages").rows.get From 8d2050075fe2cae8ab18a06cf6e36e93024d0136 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Fri, 10 Jul 2015 22:15:03 -0300 Subject: [PATCH 33/82] Don't use a formatter with an optional value when producing times - fixes #142 --- .../async/db/column/TimeEncoderDecoder.scala | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/TimeEncoderDecoder.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/TimeEncoderDecoder.scala index 9a801775..a7d0c879 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/TimeEncoderDecoder.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/TimeEncoderDecoder.scala @@ -33,14 +33,16 @@ class TimeEncoderDecoder extends ColumnEncoderDecoder { .appendOptional(optional) .toFormatter + final private val printer = new DateTimeFormatterBuilder() + .appendPattern("HH:mm:ss.SSSSSS") + .toFormatter + def formatter = format - override def decode(value: String): LocalTime = { + override def decode(value: String): LocalTime = format.parseLocalTime(value) - } - override def encode(value: Any): String = { - this.format.print(value.asInstanceOf[LocalTime]) - } + override def encode(value: Any): String = + this.printer.print(value.asInstanceOf[LocalTime]) } From c201c41277164da1ad2c57478aaf5be4ce87339f Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Fri, 10 Jul 2015 22:44:35 -0300 Subject: [PATCH 34/82] Add test for processing LocalDateTime objects --- .../async/db/postgresql/TimeAndDateSpec.scala | 16 ++++++++++++++++ 1 file changed, 16 insertions(+) diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala index 88fee1dd..67e7b877 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala @@ -200,6 +200,22 @@ class TimeAndDateSpec extends Specification with DatabaseTestHelper { } + "handle sending a LocalDateTime and return a LocalDateTime for a timestamp without timezone column" in { + + withTimeHandler { + conn => + val date1 = new LocalDateTime(2190319) + + await(conn.sendPreparedStatement("CREATE TEMP TABLE TEST(T TIMESTAMP)")) + await(conn.sendPreparedStatement("INSERT INTO TEST(T) VALUES(?)", Seq(date1))) + val result = await(conn.sendPreparedStatement("SELECT T FROM TEST")) + val date2 = result.rows.get.head(0) + + date2 === date1 + } + + } + "handle sending a date with timezone and retrieving the date with the same time zone" in { withTimeHandler { From c06eb58fa729207c137a2c406505481216356137 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Mon, 13 Jul 2015 09:57:59 -0300 Subject: [PATCH 35/82] Closing 0.2.17 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index adf61daa..d35aaa34 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -45,7 +45,7 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.17-SNAPSHOT" + val commonVersion = "0.2.17" val projectScalaVersion = "2.11.0" val specs2Dependency = "org.specs2" %% "specs2" % "2.3.11" % "test" From 19d7269be72a6a81b620ccaef353559716e9bc69 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Fri, 24 Jul 2015 10:09:33 -0300 Subject: [PATCH 36/82] Removing timeout implementation for MySQL, client code should be responsible for deciding a timeout on it's futures --- .../com/github/mauricio/async/db/Configuration.scala | 3 +-- .../mauricio/async/db/mysql/MySQLConnection.scala | 9 --------- project/Build.scala | 12 ++++++------ 3 files changed, 7 insertions(+), 17 deletions(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala index e5fb7d6a..111852c2 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala @@ -54,5 +54,4 @@ case class Configuration(username: String, maximumMessageSize: Int = 16777216, allocator: AbstractByteBufAllocator = PooledByteBufAllocator.DEFAULT, connectTimeout: Duration = 5.seconds, - testTimeout: Duration = 5.seconds, - requestTimeout: Duration = 5.seconds) + testTimeout: Duration = 5.seconds) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala index bb0f905c..042f6bf6 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala @@ -16,7 +16,6 @@ package com.github.mauricio.async.db.mysql -import java.util.concurrent.TimeoutException import java.util.concurrent.atomic.{AtomicLong, AtomicReference} import com.github.mauricio.async.db._ @@ -189,17 +188,10 @@ class MySQLConnection( val promise = Promise[QueryResult]() this.setQueryPromise(promise) this.connectionHandler.write(new QueryMessage(query)) - addTimeout(promise) promise.future } - private def addTimeout(promise: Promise[QueryResult]): Unit = { - this.connectionHandler.schedule( - promise.tryFailure(new TimeoutException(s"response took too long to return(${configuration.requestTimeout})")), - configuration.requestTimeout) - } - private def failQueryPromise(t: Throwable) { this.clearQueryPromise.foreach { _.tryFailure(t) @@ -244,7 +236,6 @@ class MySQLConnection( val promise = Promise[QueryResult]() this.setQueryPromise(promise) this.connectionHandler.sendPreparedStatement(query, values) - addTimeout(promise) promise.future } diff --git a/project/Build.scala b/project/Build.scala index d35aaa34..dc1c2c8b 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -45,18 +45,18 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.17" - val projectScalaVersion = "2.11.0" + val commonVersion = "0.2.18-SNAPSHOT" + val projectScalaVersion = "2.11.7" val specs2Dependency = "org.specs2" %% "specs2" % "2.3.11" % "test" - val logbackDependency = "ch.qos.logback" % "logback-classic" % "1.0.13" % "test" + val logbackDependency = "ch.qos.logback" % "logback-classic" % "1.1.3" % "test" val commonDependencies = Seq( - "org.slf4j" % "slf4j-api" % "1.7.5", + "org.slf4j" % "slf4j-api" % "1.7.12", "joda-time" % "joda-time" % "2.3", "org.joda" % "joda-convert" % "1.5", - "io.netty" % "netty-all" % "4.0.25.Final", - "org.javassist" % "javassist" % "3.18.1-GA", + "io.netty" % "netty-all" % "4.0.29.Final", + "org.javassist" % "javassist" % "3.20.0-GA", specs2Dependency, logbackDependency ) From 77226d8d474945341659ab80ae7cdc0b3cc43fbf Mon Sep 17 00:00:00 2001 From: lifey Date: Tue, 28 Jul 2015 09:52:13 +0300 Subject: [PATCH 37/82] Add timeout support for queries fixes on timeout scheduler fixes on timeout scheduler fixes on timeout scheduler styling issues Improve unit test --- .../mauricio/async/db/Configuration.scala | 3 +- .../ConnectionTimeoutedException.scala | 6 ++ .../async/db/pool/TimeoutScheduler.scala | 37 +++++++++ .../async/db/pool/DummyTimeoutScheduler.scala | 29 +++++++ .../async/db/pool/TimeoutSchedulerSpec.scala | 71 ++++++++++++++++ .../async/db/mysql/MySQLConnection.scala | 9 ++- .../mysql/pool/MySQLConnectionFactory.scala | 7 +- .../async/db/mysql/ConnectionHelper.scala | 13 +++ .../async/db/mysql/QueryTimeoutSpec.scala | 80 +++++++++++++++++++ .../db/postgresql/PostgreSQLConnection.scala | 11 ++- .../pool/PostgreSQLConnectionFactory.scala | 4 + 11 files changed, 259 insertions(+), 11 deletions(-) create mode 100644 db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/ConnectionTimeoutedException.scala create mode 100644 db-async-common/src/main/scala/com/github/mauricio/async/db/pool/TimeoutScheduler.scala create mode 100644 db-async-common/src/test/scala/com/github/mauricio/async/db/pool/DummyTimeoutScheduler.scala create mode 100644 db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala create mode 100644 mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala index 111852c2..089a634b 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala @@ -54,4 +54,5 @@ case class Configuration(username: String, maximumMessageSize: Int = 16777216, allocator: AbstractByteBufAllocator = PooledByteBufAllocator.DEFAULT, connectTimeout: Duration = 5.seconds, - testTimeout: Duration = 5.seconds) + testTimeout: Duration = 5.seconds, + queryTimeout: Duration = Duration.Inf) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/ConnectionTimeoutedException.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/ConnectionTimeoutedException.scala new file mode 100644 index 00000000..7e02c17c --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/ConnectionTimeoutedException.scala @@ -0,0 +1,6 @@ +package com.github.mauricio.async.db.exceptions + +import com.github.mauricio.async.db.Connection + +class ConnectionTimeoutedException( val connection : Connection ) + extends DatabaseException( "The connection %s has a timeouted query and is being closed".format(connection) ) \ No newline at end of file diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/TimeoutScheduler.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/TimeoutScheduler.scala new file mode 100644 index 00000000..b2c2616b --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/TimeoutScheduler.scala @@ -0,0 +1,37 @@ +package com.github.mauricio.async.db.pool + +import java.util.concurrent.atomic.AtomicBoolean +import java.util.concurrent.{TimeUnit, TimeoutException, ScheduledFuture} +import com.github.mauricio.async.db.util.NettyUtils +import scala.concurrent.{ExecutionContext, Promise} +import scala.concurrent.duration.Duration + +trait TimeoutScheduler { + implicit val internalPool: ExecutionContext + def onTimeout // implementors should decide here what they want to do when a timeout occur + private var isTimeoutedBool = new AtomicBoolean(false); + def isTimeouted = isTimeoutedBool.get // We need this property as isClosed takes time to complete and + // we don't want the connection to be used again. + + def addTimeout[A](promise: Promise[A], duration: Duration) : Option[ScheduledFuture[_]] = { + if (duration != Duration.Inf) { + val scheduledFuture = schedule( + { + if (promise.tryFailure(new TimeoutException(s"Operation is timeouted after it took too long to return (${duration})"))) { + isTimeoutedBool.set(true) + onTimeout + } + }, + duration) + promise.future.onComplete(x => scheduledFuture.cancel(false)) + + return Some(scheduledFuture) + } + return None + } + + def schedule(block: => Unit, duration: Duration) : ScheduledFuture[_] = + NettyUtils.DefaultEventLoopGroup.schedule(new Runnable { + override def run(): Unit = block + }, duration.toMillis, TimeUnit.MILLISECONDS) +} diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/DummyTimeoutScheduler.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/DummyTimeoutScheduler.scala new file mode 100644 index 00000000..302ac321 --- /dev/null +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/DummyTimeoutScheduler.scala @@ -0,0 +1,29 @@ +/* + * Copyright 2013 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.pool + +import java.util.concurrent.atomic.AtomicInteger +import com.github.mauricio.async.db.util.ExecutorServiceUtils +/** + * Implementation of TimeoutScheduler used for testing + */ +class DummyTimeoutScheduler extends TimeoutScheduler { + implicit val internalPool = ExecutorServiceUtils.CachedExecutionContext + private val timeOuts = new AtomicInteger + override def onTimeout = timeOuts.incrementAndGet + def timeoutCount = timeOuts.get() +} diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala new file mode 100644 index 00000000..46f20866 --- /dev/null +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala @@ -0,0 +1,71 @@ +/* + * Copyright 2013 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.github.mauricio.async.db.pool + +import java.util.concurrent.{ScheduledFuture, TimeoutException} +import com.github.mauricio.async.db.util.{ByteBufferUtils, ExecutorServiceUtils} +import org.specs2.mutable.SpecificationWithJUnit +import scala.concurrent.duration._ +import scala.concurrent.{Future, Promise} + +/** + * Tests for TimeoutScheduler + */ +class TimeoutSchedulerSpec extends SpecificationWithJUnit { + + val TIMEOUT_DID_NOT_PASS = "timeout did not pass" + + + "test timeout did not pass" in { + val timeoutScheduler = new DummyTimeoutScheduler() + val promise = Promise[String]() + val scheduledFuture = timeoutScheduler.addTimeout(promise,Duration(1000, MILLISECONDS)) + Thread.sleep(100); + promise.isCompleted === false + promise.success(TIMEOUT_DID_NOT_PASS) + Thread.sleep(1500) + promise.future.value.get.get === TIMEOUT_DID_NOT_PASS + scheduledFuture.get.isCancelled === true + timeoutScheduler.timeoutCount === 0 + } + + "test timeout passed" in { + val timeoutMillis = 100 + val promise = Promise[String]() + val timeoutScheduler = new DummyTimeoutScheduler() + val scheduledFuture = timeoutScheduler.addTimeout(promise,Duration(timeoutMillis, MILLISECONDS)) + Thread.sleep(1000) + promise.isCompleted === true + scheduledFuture.get.isCancelled === false + promise.trySuccess(TIMEOUT_DID_NOT_PASS) + timeoutScheduler.timeoutCount === 1 + promise.future.value.get.get must throwA[TimeoutException](message = s"Operation is timeouted after it took too long to return \\(${timeoutMillis} milliseconds\\)") + } + + + "test no timeout" in { + val timeoutScheduler = new DummyTimeoutScheduler() + val promise = Promise[String]() + val scheduledFuture = timeoutScheduler.addTimeout(promise,Duration.Inf) + Thread.sleep(1000) + scheduledFuture === None + promise.isCompleted === false + promise.success(TIMEOUT_DID_NOT_PASS) + promise.future.value.get.get === TIMEOUT_DID_NOT_PASS + timeoutScheduler.timeoutCount === 0 + } +} + diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala index 042f6bf6..f4e7cc60 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala @@ -25,6 +25,7 @@ import com.github.mauricio.async.db.mysql.exceptions.MySQLException import com.github.mauricio.async.db.mysql.message.client._ import com.github.mauricio.async.db.mysql.message.server._ import com.github.mauricio.async.db.mysql.util.CharsetMapper +import com.github.mauricio.async.db.pool.TimeoutScheduler import com.github.mauricio.async.db.util.ChannelFutureTransformer.toFuture import com.github.mauricio.async.db.util._ import io.netty.channel.{ChannelHandlerContext, EventLoopGroup} @@ -46,6 +47,7 @@ class MySQLConnection( ) extends MySQLHandlerDelegate with Connection + with TimeoutScheduler { import MySQLConnection.log @@ -56,7 +58,7 @@ class MySQLConnection( private final val connectionCount = MySQLConnection.Counter.incrementAndGet() private final val connectionId = s"[mysql-connection-$connectionCount]" - private implicit val internalPool = executionContext + override implicit val internalPool = executionContext private final val connectionHandler = new MySQLConnectionHandler( configuration, @@ -188,7 +190,7 @@ class MySQLConnection( val promise = Promise[QueryResult]() this.setQueryPromise(promise) this.connectionHandler.write(new QueryMessage(query)) - + addTimeout(promise, configuration.queryTimeout) promise.future } @@ -224,6 +226,7 @@ class MySQLConnection( } def disconnect: Future[Connection] = this.close + override def onTimeout = disconnect def isConnected: Boolean = this.connectionHandler.isConnected @@ -236,7 +239,7 @@ class MySQLConnection( val promise = Promise[QueryResult]() this.setQueryPromise(promise) this.connectionHandler.sendPreparedStatement(query, values) - + addTimeout(promise,configuration.queryTimeout) promise.future } diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/pool/MySQLConnectionFactory.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/pool/MySQLConnectionFactory.scala index 83791366..273e76af 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/pool/MySQLConnectionFactory.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/pool/MySQLConnectionFactory.scala @@ -21,9 +21,8 @@ import com.github.mauricio.async.db.pool.ObjectFactory import com.github.mauricio.async.db.mysql.MySQLConnection import scala.util.Try import scala.concurrent.Await -import scala.concurrent.duration._ import com.github.mauricio.async.db.util.Log -import com.github.mauricio.async.db.exceptions.{ConnectionStillRunningQueryException, ConnectionNotConnectedException} +import com.github.mauricio.async.db.exceptions.{ConnectionTimeoutedException, ConnectionStillRunningQueryException, ConnectionNotConnectedException} object MySQLConnectionFactory { final val log = Log.get[MySQLConnectionFactory] @@ -90,7 +89,9 @@ class MySQLConnectionFactory( configuration : Configuration ) extends ObjectFact */ def validate(item: MySQLConnection): Try[MySQLConnection] = { Try{ - + if ( item.isTimeouted ) { + throw new ConnectionTimeoutedException(item) + } if ( !item.isConnected ) { throw new ConnectionNotConnectedException(item) } diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ConnectionHelper.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ConnectionHelper.scala index 771fe1e3..8ace95e7 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ConnectionHelper.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/ConnectionHelper.scala @@ -115,6 +115,19 @@ trait ConnectionHelper { } + def withConfigurablePool[T]( configuration : Configuration )( fn : (ConnectionPool[MySQLConnection]) => T ) : T = { + + val factory = new MySQLConnectionFactory(configuration) + val pool = new ConnectionPool[MySQLConnection](factory, PoolConfiguration.Default) + + try { + fn(pool) + } finally { + awaitFuture( pool.close ) + } + + } + def withConnection[T]( fn : (MySQLConnection) => T ) : T = withConfigurableConnection(this.defaultConfiguration)(fn) diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala new file mode 100644 index 00000000..09324c40 --- /dev/null +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala @@ -0,0 +1,80 @@ +/* + * Copyright 2013 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.mysql + +import java.util.concurrent.TimeoutException +import com.github.mauricio.async.db.Configuration +import org.specs2.execute.{AsResult, Success, ResultExecution} +import org.specs2.mutable.Specification +import scala.concurrent.Await +import scala.concurrent.duration._ + +class QueryTimeoutSpec extends Specification with ConnectionHelper { + implicit def unitAsResult: AsResult[Unit] = new AsResult[Unit] { + def asResult(r: =>Unit) = + ResultExecution.execute(r)(_ => Success()) + } + "Simple query with 1 nanosec timeout" in { + withConfigurablePool(shortTimeoutConfiguration) { + pool => { + val connection = Await.result(pool.take, Duration(10,SECONDS)) + connection.isTimeouted === false + connection.isConnected === true + val queryResultFuture = connection.sendQuery("select sleep(1)") + Await.result(queryResultFuture, Duration(10,SECONDS)) must throwA[TimeoutException]() + connection.isTimeouted === true + Await.ready(pool.giveBack(connection), Duration(10,SECONDS)) + pool.availables.count(_ == connection) === 0 // connection removed from pool + // we do not know when the connection will be closed. + } + } + } + + "Simple query with 5 sec timeout" in { + withConfigurablePool(longTimeoutConfiguration) { + pool => { + val connection = Await.result(pool.take, Duration(10,SECONDS)) + connection.isTimeouted === false + connection.isConnected === true + val queryResultFuture = connection.sendQuery("select sleep(1)") + Await.result(queryResultFuture, Duration(10,SECONDS)).rows.get.size === 1 + connection.isTimeouted === false + connection.isConnected === true + Await.ready(pool.giveBack(connection), Duration(10,SECONDS)) + pool.availables.count(_ == connection) === 1 // connection returned to pool + } + } + } + + def shortTimeoutConfiguration = new Configuration( + "mysql_async", + "localhost", + port = 3306, + password = Some("root"), + database = Some("mysql_async_tests"), + queryTimeout = Duration(1,NANOSECONDS) + ) + + def longTimeoutConfiguration = new Configuration( + "mysql_async", + "localhost", + port = 3306, + password = Some("root"), + database = Some("mysql_async_tests"), + queryTimeout = Duration(5,SECONDS) + ) +} diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala index 45d53901..3cf5c2b3 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala @@ -20,6 +20,7 @@ import com.github.mauricio.async.db.QueryResult import com.github.mauricio.async.db.column.{ColumnEncoderRegistry, ColumnDecoderRegistry} import com.github.mauricio.async.db.exceptions.{InsufficientParametersException, ConnectionStillRunningQueryException} import com.github.mauricio.async.db.general.MutableResultSet +import com.github.mauricio.async.db.pool.TimeoutScheduler import com.github.mauricio.async.db.postgresql.codec.{PostgreSQLConnectionDelegate, PostgreSQLConnectionHandler} import com.github.mauricio.async.db.postgresql.column.{PostgreSQLColumnDecoderRegistry, PostgreSQLColumnEncoderRegistry} import com.github.mauricio.async.db.postgresql.exceptions._ @@ -48,7 +49,8 @@ class PostgreSQLConnection executionContext : ExecutionContext = ExecutorServiceUtils.CachedExecutionContext ) extends PostgreSQLConnectionDelegate - with Connection { + with Connection + with TimeoutScheduler { import PostgreSQLConnection._ @@ -63,7 +65,7 @@ class PostgreSQLConnection private final val currentCount = Counter.incrementAndGet() private final val preparedStatementsCounter = new AtomicInteger() - private final implicit val internalExecutionContext = executionContext + override implicit val internalPool = executionContext private val parameterStatus = new scala.collection.mutable.HashMap[String, String]() private val parsedStatements = new scala.collection.mutable.HashMap[String, PreparedStatementHolder]() @@ -91,6 +93,7 @@ class PostgreSQLConnection } override def disconnect: Future[Connection] = this.connectionHandler.disconnect.map( c => this ) + override def onTimeout = disconnect override def isConnected: Boolean = this.connectionHandler.isConnected @@ -103,7 +106,7 @@ class PostgreSQLConnection this.setQueryPromise(promise) write(new QueryMessage(query)) - + addTimeout(promise,configuration.queryTimeout) promise.future } @@ -130,7 +133,7 @@ class PostgreSQLConnection holder.prepared = true new PreparedStatementOpeningMessage(holder.statementId, holder.realQuery, values, this.encoderRegistry) }) - + addTimeout(promise,configuration.queryTimeout) promise.future } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/pool/PostgreSQLConnectionFactory.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/pool/PostgreSQLConnectionFactory.scala index 62bcfd1a..ae3c5255 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/pool/PostgreSQLConnectionFactory.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/pool/PostgreSQLConnectionFactory.scala @@ -17,6 +17,7 @@ package com.github.mauricio.async.db.postgresql.pool import com.github.mauricio.async.db.Configuration +import com.github.mauricio.async.db.exceptions.ConnectionTimeoutedException import com.github.mauricio.async.db.pool.ObjectFactory import com.github.mauricio.async.db.postgresql.PostgreSQLConnection import com.github.mauricio.async.db.util.Log @@ -69,6 +70,9 @@ class PostgreSQLConnectionFactory( def validate( item : PostgreSQLConnection ) : Try[PostgreSQLConnection] = { Try { + if ( item.isTimeouted ) { + throw new ConnectionTimeoutedException(item) + } if ( !item.isConnected || item.hasRecentError ) { throw new ClosedChannelException() } From 01d7c70442b78368bfafab018b4fe155166a5838 Mon Sep 17 00:00:00 2001 From: lifey Date: Tue, 28 Jul 2015 23:49:24 +0300 Subject: [PATCH 38/82] fix PartitionedAsyncObjectPoolSpec factory can't return the same int twice to two different partitions --- .../async/db/pool/PartitionedAsyncObjectPoolSpec.scala | 9 +++++---- 1 file changed, 5 insertions(+), 4 deletions(-) diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/PartitionedAsyncObjectPoolSpec.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/PartitionedAsyncObjectPoolSpec.scala index 3b84755d..51d58fb0 100644 --- a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/PartitionedAsyncObjectPoolSpec.scala +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/PartitionedAsyncObjectPoolSpec.scala @@ -1,5 +1,7 @@ package com.github.mauricio.async.db.pool +import java.util.concurrent.atomic.AtomicInteger + import org.specs2.mutable.Specification import scala.util.Try import scala.concurrent.Await @@ -17,17 +19,16 @@ class PartitionedAsyncObjectPoolSpec extends SpecificationWithJUnit { val config = PoolConfiguration(100, Long.MaxValue, 100, Int.MaxValue) - + private var current = new AtomicInteger val factory = new ObjectFactory[Int] { var reject = Set[Int]() var failCreate = false - private var current = 0 + def create = if (failCreate) throw new IllegalStateException else { - current += 1 - current + current.incrementAndGet() } def destroy(item: Int) = {} def validate(item: Int) = From 2c18a425c7b1d2d87d3b6530a0c1d48bfeb192cb Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Tue, 4 Aug 2015 19:03:30 -0300 Subject: [PATCH 39/82] Using option for the timeout, letting connections declare their event loops and execution contexts --- .../mauricio/async/db/Configuration.scala | 11 ++-- .../async/db/pool/TimeoutScheduler.scala | 50 ++++++++++++++----- .../async/db/pool/DummyTimeoutScheduler.scala | 5 +- .../async/db/pool/TimeoutSchedulerSpec.scala | 8 ++- .../async/db/mysql/MySQLConnection.scala | 6 +-- .../async/db/mysql/QueryTimeoutSpec.scala | 4 +- .../db/postgresql/PostgreSQLConnection.scala | 4 +- .../postgresql/PostgreSQLConnectionSpec.scala | 10 ++-- 8 files changed, 63 insertions(+), 35 deletions(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala index 089a634b..841999e1 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala @@ -18,7 +18,7 @@ package com.github.mauricio.async.db import java.nio.charset.Charset -import io.netty.buffer.{AbstractByteBufAllocator, PooledByteBufAllocator} +import io.netty.buffer.{ByteBufAllocator, PooledByteBufAllocator} import io.netty.util.CharsetUtil import scala.concurrent.duration._ @@ -43,6 +43,11 @@ object Configuration { * OOM or eternal loop attacks the client could have, defaults to 16 MB. You can set this * to any value you would like but again, make sure you know what you are doing if you do * change it. + * @param allocator the netty buffer allocator to be used + * @param connectTimeout the timeout for connecting to servers + * @param testTimeout the timeout for connection tests performed by pools + * @param queryTimeout the optional query timeout + * */ case class Configuration(username: String, @@ -52,7 +57,7 @@ case class Configuration(username: String, database: Option[String] = None, charset: Charset = Configuration.DefaultCharset, maximumMessageSize: Int = 16777216, - allocator: AbstractByteBufAllocator = PooledByteBufAllocator.DEFAULT, + allocator: ByteBufAllocator = PooledByteBufAllocator.DEFAULT, connectTimeout: Duration = 5.seconds, testTimeout: Duration = 5.seconds, - queryTimeout: Duration = Duration.Inf) + queryTimeout: Option[Duration] = None) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/TimeoutScheduler.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/TimeoutScheduler.scala index b2c2616b..d97a9ca1 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/TimeoutScheduler.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/TimeoutScheduler.scala @@ -2,20 +2,47 @@ package com.github.mauricio.async.db.pool import java.util.concurrent.atomic.AtomicBoolean import java.util.concurrent.{TimeUnit, TimeoutException, ScheduledFuture} -import com.github.mauricio.async.db.util.NettyUtils +import io.netty.channel.EventLoopGroup import scala.concurrent.{ExecutionContext, Promise} import scala.concurrent.duration.Duration trait TimeoutScheduler { - implicit val internalPool: ExecutionContext + + private var isTimeoutedBool = new AtomicBoolean(false) + + /** + * + * The event loop group to be used for scheduling. + * + * @return + */ + + def eventLoopGroup : EventLoopGroup + + /** + * Implementors should decide here what they want to do when a timeout occur + */ + def onTimeout // implementors should decide here what they want to do when a timeout occur - private var isTimeoutedBool = new AtomicBoolean(false); - def isTimeouted = isTimeoutedBool.get // We need this property as isClosed takes time to complete and - // we don't want the connection to be used again. - def addTimeout[A](promise: Promise[A], duration: Duration) : Option[ScheduledFuture[_]] = { - if (duration != Duration.Inf) { - val scheduledFuture = schedule( + /** + * + * We need this property as isClosed takes time to complete and + * we don't want the connection to be used again. + * + * @return + */ + + def isTimeouted : Boolean = + isTimeoutedBool.get + + def addTimeout[A]( + promise: Promise[A], + durationOption: Option[Duration]) + (implicit executionContext : ExecutionContext) : Option[ScheduledFuture[_]] = { + durationOption.map { + duration => + val scheduledFuture = schedule( { if (promise.tryFailure(new TimeoutException(s"Operation is timeouted after it took too long to return (${duration})"))) { isTimeoutedBool.set(true) @@ -23,15 +50,14 @@ trait TimeoutScheduler { } }, duration) - promise.future.onComplete(x => scheduledFuture.cancel(false)) + promise.future.onComplete(x => scheduledFuture.cancel(false)) - return Some(scheduledFuture) + scheduledFuture } - return None } def schedule(block: => Unit, duration: Duration) : ScheduledFuture[_] = - NettyUtils.DefaultEventLoopGroup.schedule(new Runnable { + eventLoopGroup.schedule(new Runnable { override def run(): Unit = block }, duration.toMillis, TimeUnit.MILLISECONDS) } diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/DummyTimeoutScheduler.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/DummyTimeoutScheduler.scala index 302ac321..6935259e 100644 --- a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/DummyTimeoutScheduler.scala +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/DummyTimeoutScheduler.scala @@ -17,7 +17,9 @@ package com.github.mauricio.async.db.pool import java.util.concurrent.atomic.AtomicInteger -import com.github.mauricio.async.db.util.ExecutorServiceUtils +import com.github.mauricio.async.db.util.{NettyUtils, ExecutorServiceUtils} +import io.netty.channel.EventLoopGroup + /** * Implementation of TimeoutScheduler used for testing */ @@ -26,4 +28,5 @@ class DummyTimeoutScheduler extends TimeoutScheduler { private val timeOuts = new AtomicInteger override def onTimeout = timeOuts.incrementAndGet def timeoutCount = timeOuts.get() + def eventLoopGroup : EventLoopGroup = NettyUtils.DefaultEventLoopGroup } diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala index 46f20866..acc952e7 100644 --- a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala @@ -28,11 +28,10 @@ class TimeoutSchedulerSpec extends SpecificationWithJUnit { val TIMEOUT_DID_NOT_PASS = "timeout did not pass" - "test timeout did not pass" in { val timeoutScheduler = new DummyTimeoutScheduler() val promise = Promise[String]() - val scheduledFuture = timeoutScheduler.addTimeout(promise,Duration(1000, MILLISECONDS)) + val scheduledFuture = timeoutScheduler.addTimeout(promise,Some(Duration(1000, MILLISECONDS))) Thread.sleep(100); promise.isCompleted === false promise.success(TIMEOUT_DID_NOT_PASS) @@ -46,7 +45,7 @@ class TimeoutSchedulerSpec extends SpecificationWithJUnit { val timeoutMillis = 100 val promise = Promise[String]() val timeoutScheduler = new DummyTimeoutScheduler() - val scheduledFuture = timeoutScheduler.addTimeout(promise,Duration(timeoutMillis, MILLISECONDS)) + val scheduledFuture = timeoutScheduler.addTimeout(promise,Some(Duration(timeoutMillis, MILLISECONDS))) Thread.sleep(1000) promise.isCompleted === true scheduledFuture.get.isCancelled === false @@ -55,11 +54,10 @@ class TimeoutSchedulerSpec extends SpecificationWithJUnit { promise.future.value.get.get must throwA[TimeoutException](message = s"Operation is timeouted after it took too long to return \\(${timeoutMillis} milliseconds\\)") } - "test no timeout" in { val timeoutScheduler = new DummyTimeoutScheduler() val promise = Promise[String]() - val scheduledFuture = timeoutScheduler.addTimeout(promise,Duration.Inf) + val scheduledFuture = timeoutScheduler.addTimeout(promise,None) Thread.sleep(1000) scheduledFuture === None promise.isCompleted === false diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala index f4e7cc60..cb4a85b0 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/MySQLConnection.scala @@ -43,7 +43,7 @@ class MySQLConnection( configuration: Configuration, charsetMapper: CharsetMapper = CharsetMapper.Instance, group : EventLoopGroup = NettyUtils.DefaultEventLoopGroup, - executionContext : ExecutionContext = ExecutorServiceUtils.CachedExecutionContext + implicit val executionContext : ExecutionContext = ExecutorServiceUtils.CachedExecutionContext ) extends MySQLHandlerDelegate with Connection @@ -55,10 +55,8 @@ class MySQLConnection( // validate that this charset is supported charsetMapper.toInt(configuration.charset) - private final val connectionCount = MySQLConnection.Counter.incrementAndGet() private final val connectionId = s"[mysql-connection-$connectionCount]" - override implicit val internalPool = executionContext private final val connectionHandler = new MySQLConnectionHandler( configuration, @@ -80,6 +78,8 @@ class MySQLConnection( def lastException : Throwable = this._lastException def count : Long = this.connectionCount + override def eventLoopGroup : EventLoopGroup = group + def connect: Future[Connection] = { this.connectionHandler.connect.onFailure { case e => this.connectionPromise.tryFailure(e) diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala index 09324c40..65827432 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/QueryTimeoutSpec.scala @@ -66,7 +66,7 @@ class QueryTimeoutSpec extends Specification with ConnectionHelper { port = 3306, password = Some("root"), database = Some("mysql_async_tests"), - queryTimeout = Duration(1,NANOSECONDS) + queryTimeout = Some(Duration(1,NANOSECONDS)) ) def longTimeoutConfiguration = new Configuration( @@ -75,6 +75,6 @@ class QueryTimeoutSpec extends Specification with ConnectionHelper { port = 3306, password = Some("root"), database = Some("mysql_async_tests"), - queryTimeout = Duration(5,SECONDS) + queryTimeout = Some(Duration(5,SECONDS)) ) } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala index 3cf5c2b3..8c58076b 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala @@ -46,7 +46,7 @@ class PostgreSQLConnection encoderRegistry: ColumnEncoderRegistry = PostgreSQLColumnEncoderRegistry.Instance, decoderRegistry: ColumnDecoderRegistry = PostgreSQLColumnDecoderRegistry.Instance, group : EventLoopGroup = NettyUtils.DefaultEventLoopGroup, - executionContext : ExecutionContext = ExecutorServiceUtils.CachedExecutionContext + implicit val executionContext : ExecutionContext = ExecutorServiceUtils.CachedExecutionContext ) extends PostgreSQLConnectionDelegate with Connection @@ -65,7 +65,6 @@ class PostgreSQLConnection private final val currentCount = Counter.incrementAndGet() private final val preparedStatementsCounter = new AtomicInteger() - override implicit val internalPool = executionContext private val parameterStatus = new scala.collection.mutable.HashMap[String, String]() private val parsedStatements = new scala.collection.mutable.HashMap[String, PreparedStatementHolder]() @@ -82,6 +81,7 @@ class PostgreSQLConnection private var queryResult: Option[QueryResult] = None + override def eventLoopGroup : EventLoopGroup = group def isReadyForQuery: Boolean = this.queryPromise.isEmpty def connect: Future[Connection] = { diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala index c9876721..ac297226 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala @@ -285,16 +285,12 @@ class PostgreSQLConnectionSpec extends Specification with DatabaseTestHelper { try { withHandler(configuration, { handler => - executeQuery(handler, "SELECT 0") - throw new IllegalStateException("should not have come here") + val result = executeQuery(handler, "SELECT 0") + throw new IllegalStateException("should not have arrived") }) } catch { - case e: GenericDatabaseException => { + case e: GenericDatabaseException => e.errorMessage.fields(InformationMessage.Routine) === "auth_failed" - } - case e: Exception => { - throw new IllegalStateException("should not have come here") - } } } From 7588e3c405ddc7911f446daa6ac1f968c64f1dc8 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Tue, 4 Aug 2015 19:07:54 -0300 Subject: [PATCH 40/82] Updating SBT and JDK versions --- .travis.yml | 10 ++++++++-- project/build.properties | 2 +- 2 files changed, 9 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 53e614cf..2c1a7a84 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,13 +1,19 @@ language: scala scala: - 2.10.4 - - 2.11.0 + - 2.11.7 jdk: - oraclejdk7 - - openjdk7 + - oraclejdk8 services: - postgresql - mysql +cache: + directories: + - vendor/bundle + - $HOME/.m2 + - $HOME/.ivy2 + - $HOME/.sbt before_script: - ./script/prepare_build.sh diff --git a/project/build.properties b/project/build.properties index 8ac605a3..d638b4f3 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version=0.13.2 +sbt.version = 0.13.8 \ No newline at end of file From ed4f06a205417c86f72bce368b3ee463f8b4544e Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sat, 8 Aug 2015 08:46:34 -0300 Subject: [PATCH 41/82] Closing 0.2.18 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index dc1c2c8b..d0c3d454 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -45,7 +45,7 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.18-SNAPSHOT" + val commonVersion = "0.2.18" val projectScalaVersion = "2.11.7" val specs2Dependency = "org.specs2" %% "specs2" % "2.3.11" % "test" From 2f93fd1e1905ad8cca4039224c442013354bf2f2 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sat, 8 Aug 2015 08:51:02 -0300 Subject: [PATCH 42/82] Starting next development cycle --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index d0c3d454..bb361625 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -45,7 +45,7 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.18" + val commonVersion = "0.2.19-SNAPSHOT" val projectScalaVersion = "2.11.7" val specs2Dependency = "org.specs2" %% "specs2" % "2.3.11" % "test" From 14e232ffd2be55b61296117d93b5f2270d54aebf Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sat, 8 Aug 2015 09:38:15 -0300 Subject: [PATCH 43/82] Updated changelog --- CHANGELOG.md | 9 ++++++++- 1 file changed, 8 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index c80603d2..6ab0d079 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,7 +25,14 @@ # Changelog -## 0.2.17 - in progresss +## 0.2.18 - 2015-08-08 + +* Timeouts implemented queries for MySQL and PostgreSQL - @lifey - #147 + +## 0.2.17 - 2015-07-13 + +* Fixed pool leak issue - @haski +* Fixed date time formatting issue - #142 ## 0.2.16 - 2015-01-04 From 654a377596cc2f7694b0940d04a2cca1ece95f0b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maur=C3=ADcio=20Linhares?= Date: Sun, 9 Aug 2015 10:40:37 -0300 Subject: [PATCH 44/82] Updating to latest version --- README.markdown | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/README.markdown b/README.markdown index e1e54004..42f549d8 100644 --- a/README.markdown +++ b/README.markdown @@ -54,7 +54,7 @@ You can view the project's [CHANGELOG here](CHANGELOG.md). And if you're in a hurry, you can include them in your build like this, if you're using PostgreSQL: ```scala -"com.github.mauricio" %% "postgresql-async" % "0.2.15" +"com.github.mauricio" %% "postgresql-async" % "0.2.18" ``` Or Maven: @@ -63,14 +63,14 @@ Or Maven: com.github.mauricio postgresql-async_2.11 - 0.2.16 + 0.2.18 ``` And if you're into MySQL: ```scala -"com.github.mauricio" %% "mysql-async" % "0.2.15" +"com.github.mauricio" %% "mysql-async" % "0.2.18" ``` Or Maven: @@ -79,7 +79,7 @@ Or Maven: com.github.mauricio mysql-async_2.11 - 0.2.16 + 0.2.18 ``` From 1c9b98d0ee3ec18a3fc68278c5dab175775e5ad8 Mon Sep 17 00:00:00 2001 From: Anton Zherdev Date: Mon, 10 Aug 2015 20:32:23 +1200 Subject: [PATCH 45/82] array row data refactoring. Avoiding copy data by columns. --- .../async/db/general/ArrayRowData.scala | 17 +---------------- .../async/db/general/MutableResultSet.scala | 10 ++-------- .../db/mysql/binary/BinaryRowDecoder.scala | 4 ++-- 3 files changed, 5 insertions(+), 26 deletions(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/general/ArrayRowData.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/general/ArrayRowData.scala index c232a12a..fe582481 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/general/ArrayRowData.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/general/ArrayRowData.scala @@ -17,14 +17,10 @@ package com.github.mauricio.async.db.general import com.github.mauricio.async.db.RowData -import scala.collection.mutable -class ArrayRowData( columnCount : Int, row : Int, val mapping : Map[String, Int] ) - extends RowData +class ArrayRowData(row : Int, val mapping : Map[String, Int], val columns : Array[Any]) extends RowData { - private val columns = new Array[Any](columnCount) - /** * * Returns a column value by it's position in the originating query. @@ -51,16 +47,5 @@ class ArrayRowData( columnCount : Int, row : Int, val mapping : Map[String, Int] */ def rowNumber: Int = row - /** - * - * Sets a value to a column in this collection. - * - * @param i - * @param x - */ - - def update(i: Int, x: Any) = columns(i) = x - def length: Int = columns.length - } diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala index 0422a4cf..603e7602 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala @@ -39,14 +39,8 @@ class MutableResultSet[T <: ColumnData]( override def apply(idx: Int): RowData = this.rows(idx) - def addRow( row : Seq[Any] ) { - val realRow = new ArrayRowData( columnTypes.size, this.rows.size, this.columnMapping ) - var x = 0 - while ( x < row.size ) { - realRow(x) = row(x) - x += 1 - } - this.rows += realRow + def addRow(row : Array[Any] ) { + this.rows += new ArrayRowData(this.rows.size, this.columnMapping, row) } } \ No newline at end of file diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowDecoder.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowDecoder.scala index 0f59ca5e..22c6cee5 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowDecoder.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/binary/BinaryRowDecoder.scala @@ -31,7 +31,7 @@ class BinaryRowDecoder { //import BinaryRowDecoder._ - def decode(buffer: ByteBuf, columns: Seq[ColumnDefinitionMessage]): IndexedSeq[Any] = { + def decode(buffer: ByteBuf, columns: Seq[ColumnDefinitionMessage]): Array[Any] = { //log.debug("columns are {} - {}", buffer.readableBytes(), columns) //log.debug( "decoding row\n{}", MySQLHelper.dumpAsHex(buffer)) @@ -79,7 +79,7 @@ class BinaryRowDecoder { throw new BufferNotFullyConsumedException(buffer) } - row + row.toArray } } \ No newline at end of file From f39a2daf028899f9cf52d978dbe33f0d75476c29 Mon Sep 17 00:00:00 2001 From: Anton Zherdev Date: Mon, 10 Aug 2015 20:39:12 +1200 Subject: [PATCH 46/82] Fix package name for the test and removing unused imports --- .../db/postgresql/PostgreSQLConnectionSpec.scala | 15 +++++++-------- 1 file changed, 7 insertions(+), 8 deletions(-) diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala index ac297226..2843e95e 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala @@ -14,23 +14,22 @@ * under the License. */ -package com.github.mauricio.postgresql +package com.github.mauricio.async.db.postgresql import java.nio.ByteBuffer -import com.github.mauricio.async.db.column.{TimestampEncoderDecoder, TimeEncoderDecoder, DateEncoderDecoder} +import com.github.mauricio.async.db.column.{DateEncoderDecoder, TimeEncoderDecoder, TimestampEncoderDecoder} import com.github.mauricio.async.db.exceptions.UnsupportedAuthenticationMethodException -import com.github.mauricio.async.db.postgresql.exceptions.{QueryMustNotBeNullOrEmptyException, GenericDatabaseException} +import com.github.mauricio.async.db.postgresql.exceptions.{GenericDatabaseException, QueryMustNotBeNullOrEmptyException} import com.github.mauricio.async.db.postgresql.messages.backend.InformationMessage -import com.github.mauricio.async.db.postgresql.{PostgreSQLConnection, DatabaseTestHelper} import com.github.mauricio.async.db.util.Log -import com.github.mauricio.async.db.{Configuration, QueryResult, Connection} +import com.github.mauricio.async.db.{Configuration, Connection, QueryResult} import io.netty.buffer.Unpooled -import concurrent.{Future, Await} +import org.joda.time.LocalDateTime import org.specs2.mutable.Specification -import scala.concurrent.ExecutionContext.Implicits.global + import scala.concurrent.duration._ -import org.joda.time.LocalDateTime +import scala.concurrent.{Await, Future} object PostgreSQLConnectionSpec { val log = Log.get[PostgreSQLConnectionSpec] From e5e725f82b68d9317c34f240d9a57dd4a3aa7d2b Mon Sep 17 00:00:00 2001 From: Indraneel Mukherjee Date: Wed, 26 Aug 2015 13:44:48 -0700 Subject: [PATCH 47/82] Added types field to MutableResultSet. This will allow inferring the types of RowData fields, which can be useful in different situations, e.g. converting the results to json format --- .../github/mauricio/async/db/general/MutableResultSet.scala | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala index 603e7602..00cc712b 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/general/MutableResultSet.scala @@ -31,10 +31,12 @@ class MutableResultSet[T <: ColumnData]( private val columnMapping: Map[String, Int] = this.columnTypes.indices.map( index => ( this.columnTypes(index).name, index ) ).toMap - + val columnNames : IndexedSeq[String] = this.columnTypes.map(c => c.name) + val types : IndexedSeq[Int] = this.columnTypes.map(c => c.dataType) + override def length: Int = this.rows.length override def apply(idx: Int): RowData = this.rows(idx) @@ -43,4 +45,4 @@ class MutableResultSet[T <: ColumnData]( this.rows += new ArrayRowData(this.rows.size, this.columnMapping, row) } -} \ No newline at end of file +} From 95a6a55d30398b8085fe21a2d2d9281e4e0e1348 Mon Sep 17 00:00:00 2001 From: "Flavio W. Brasil" Date: Fri, 27 Nov 2015 09:00:00 -0800 Subject: [PATCH 48/82] Update README.markdown --- README.markdown | 1 + 1 file changed, 1 insertion(+) diff --git a/README.markdown b/README.markdown index 42f549d8..73302b6c 100644 --- a/README.markdown +++ b/README.markdown @@ -48,6 +48,7 @@ You can view the project's [CHANGELOG here](CHANGELOG.md). the driver into a vert.x application; * [dbmapper](https://github.com/njeuk/dbmapper) - enables SQL queries with automatic mapping from the database table to the Scala class and a mechanism to create a Table Date Gateway model with very little boiler plate code; +* [Quill](http://getquill.io) - A compile-time language integrated query library for Scala. ## Include them as dependencies From 577b6a87842b34a13ce38bab21055045d0694fea Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?=E2=98=83=20pitr?= Date: Mon, 14 Dec 2015 16:48:49 -0500 Subject: [PATCH 49/82] Switch to LIFO for object pool strategy Switch to use a stack datastructure to store connections. Current implementation uses FIFO strategy, which prevents unused connections from being closed because object pool goes through all connections. With LIFO strategy, unused connections will stay at the bottom of the stack and will be cleaned up during `testObjects` call. Additionally, declare `waitQueue` as `Queue` for clarity. --- .../db/pool/SingleThreadedAsyncObjectPool.scala | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala index 84387cb0..2b2e28d9 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala @@ -19,7 +19,7 @@ package com.github.mauricio.async.db.pool import com.github.mauricio.async.db.util.{Log, Worker} import java.util.concurrent.atomic.AtomicLong import java.util.{TimerTask, Timer} -import scala.collection.mutable.ArrayBuffer +import scala.collection.mutable.{ArrayBuffer, Queue, Stack} import scala.concurrent.{Promise, Future} import scala.util.{Failure, Success} @@ -49,9 +49,9 @@ class SingleThreadedAsyncObjectPool[T]( import SingleThreadedAsyncObjectPool.{Counter, log} private val mainPool = Worker() - private val poolables = new ArrayBuffer[PoolableHolder[T]](configuration.maxObjects) + private var poolables = new Stack[PoolableHolder[T]]() private val checkouts = new ArrayBuffer[T](configuration.maxObjects) - private val waitQueue = new ArrayBuffer[Promise[T]](configuration.maxQueueSize) + private val waitQueue = new Queue[Promise[T]]() private val timer = new Timer("async-object-pool-timer-" + Counter.incrementAndGet(), true) timer.scheduleAtFixedRate(new TimerTask { def run() { @@ -150,10 +150,10 @@ class SingleThreadedAsyncObjectPool[T]( */ private def addBack(item: T, promise: Promise[AsyncObjectPool[T]]) { - this.poolables += new PoolableHolder[T](item) + this.poolables.push(new PoolableHolder[T](item)) - if (!this.waitQueue.isEmpty) { - this.checkout(this.waitQueue.remove(0)) + if (this.waitQueue.nonEmpty) { + this.checkout(this.waitQueue.dequeue()) } promise.success(this) @@ -205,7 +205,7 @@ class SingleThreadedAsyncObjectPool[T]( case e: Exception => promise.failure(e) } } else { - val item = this.poolables.remove(0).item + val item = this.poolables.pop().item this.checkouts += item promise.success(item) } @@ -241,7 +241,7 @@ class SingleThreadedAsyncObjectPool[T]( } } } - this.poolables --= removals + this.poolables = this.poolables.diff(removals) } private class PoolableHolder[T](val item: T) { From 761ed938bae8558014f051387010ef4a93ac0a80 Mon Sep 17 00:00:00 2001 From: xuwei-k <6b656e6a69@gmail.com> Date: Tue, 26 Jan 2016 05:02:10 +0900 Subject: [PATCH 50/82] disable publish settings of root project --- project/Build.scala | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index bb361625..4b9c879e 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -10,7 +10,11 @@ object ProjectBuild extends Build { lazy val root = Project( id = "db-async-base", base = file("."), - settings = Configuration.baseSettings, + settings = Configuration.baseSettings ++ Seq( + publish := (), + publishLocal := (), + publishArtifact := false + ), aggregate = Seq(common, postgresql, mysql) ) From b96aaf163e6ce757e722e95763a9dbc6f90211d5 Mon Sep 17 00:00:00 2001 From: Joern Bernhardt Date: Thu, 28 Jan 2016 01:48:42 +0100 Subject: [PATCH 51/82] Add test to show issue with numeric columns Signed-off-by: Joern Bernhardt --- .../async/db/postgresql/NumericSpec.scala | 57 +++++++++++++++++++ 1 file changed, 57 insertions(+) create mode 100644 postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/NumericSpec.scala diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/NumericSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/NumericSpec.scala new file mode 100644 index 00000000..26c13f4d --- /dev/null +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/NumericSpec.scala @@ -0,0 +1,57 @@ +package com.github.mauricio.async.db.postgresql + +import org.specs2.mutable.Specification + +class NumericSpec extends Specification with DatabaseTestHelper { + + "when processing numeric columns" should { + + "support first update of num column with floating" in { + + withHandler { + handler => + executeDdl(handler, "CREATE TEMP TABLE numeric_test (id BIGSERIAL, numcol NUMERIC)") + + val id = executePreparedStatement(handler, "INSERT INTO numeric_test DEFAULT VALUES RETURNING id").rows.get(0)("id") + executePreparedStatement(handler, "UPDATE numeric_test SET numcol = ? WHERE id = ?", Array[Any](123.123, id)) + executePreparedStatement(handler, "UPDATE numeric_test SET numcol = ? WHERE id = ?", Array[Any](1234, id)) + executePreparedStatement(handler, "UPDATE numeric_test SET numcol = ? WHERE id = ?", Array[Any](123.123, id)) + + id === 1 + } + + } + + "support first update of num column with integer (fails currently)" in { + + withHandler { + handler => + executeDdl(handler, "CREATE TEMP TABLE numeric_test (id BIGSERIAL, numcol NUMERIC)") + + val id = executePreparedStatement(handler, "INSERT INTO numeric_test DEFAULT VALUES RETURNING id").rows.get(0)("id") + executePreparedStatement(handler, "UPDATE numeric_test SET numcol = ? WHERE id = ?", Array[Any](1234, id)) + executePreparedStatement(handler, "UPDATE numeric_test SET numcol = ? WHERE id = ?", Array[Any](123.123, id)) + + id === 1 + } + + } + + "support using first update with queries instead of prepared statements" in { + + withHandler { + handler => + executeDdl(handler, "CREATE TEMP TABLE numeric_test (id BIGSERIAL, numcol NUMERIC)") + + val id = executeQuery(handler, "INSERT INTO numeric_test DEFAULT VALUES RETURNING id").rows.get(0)("id") + executeQuery(handler, s"UPDATE numeric_test SET numcol = 1234 WHERE id = $id") + executeQuery(handler, s"UPDATE numeric_test SET numcol = 123.123 WHERE id = $id") + + id === 1 + } + + } + + } + +} From 5cf8b666e3e49762cbddfbd26534d08f0201cfde Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sat, 5 Mar 2016 13:28:31 -0500 Subject: [PATCH 52/82] Upgrading some of the dependent libraries --- .gitignore | 1 + Procfile | 2 +- project/Build.scala | 14 +++++++------- 3 files changed, 9 insertions(+), 8 deletions(-) diff --git a/.gitignore b/.gitignore index 1aaf8978..c83ec207 100644 --- a/.gitignore +++ b/.gitignore @@ -20,3 +20,4 @@ mysql-async/target/* *.iml .project .vagrant/* +vendor/* diff --git a/Procfile b/Procfile index 1288bcfe..13e2e8fd 100644 --- a/Procfile +++ b/Procfile @@ -1,2 +1,2 @@ -postgresql: postgres -D databases/postgresql +postgresql: postgres -D vendor/postgresql mysql: mysqld --log-warnings --console \ No newline at end of file diff --git a/project/Build.scala b/project/Build.scala index 4b9c879e..aad5b051 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -52,14 +52,14 @@ object Configuration { val commonVersion = "0.2.19-SNAPSHOT" val projectScalaVersion = "2.11.7" - val specs2Dependency = "org.specs2" %% "specs2" % "2.3.11" % "test" - val logbackDependency = "ch.qos.logback" % "logback-classic" % "1.1.3" % "test" + val specs2Dependency = "org.specs2" %% "specs2" % "2.5" % "test" + val logbackDependency = "ch.qos.logback" % "logback-classic" % "1.1.6" % "test" val commonDependencies = Seq( - "org.slf4j" % "slf4j-api" % "1.7.12", - "joda-time" % "joda-time" % "2.3", - "org.joda" % "joda-convert" % "1.5", - "io.netty" % "netty-all" % "4.0.29.Final", + "org.slf4j" % "slf4j-api" % "1.7.18", + "joda-time" % "joda-time" % "2.9.2", + "org.joda" % "joda-convert" % "1.8.1", + "io.netty" % "netty-all" % "4.0.34.Final", "org.javassist" % "javassist" % "3.20.0-GA", specs2Dependency, logbackDependency @@ -78,7 +78,7 @@ object Configuration { :+ "-feature" , scalacOptions in doc := Seq("-doc-external-doc:scala=http://www.scala-lang.org/archives/downloads/distrib/files/nightly/docs/library/"), - crossScalaVersions := Seq(projectScalaVersion, "2.10.4"), + crossScalaVersions := Seq(projectScalaVersion, "2.10.6"), javacOptions := Seq("-source", "1.6", "-target", "1.6", "-encoding", "UTF8"), organization := "com.github.mauricio", version := commonVersion, From 90e4194adb2d02b229fcccdc754513e486518bb7 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sat, 5 Mar 2016 16:44:35 -0500 Subject: [PATCH 53/82] Fixing issue with NUMERIC columns and mixing integer and floating point numbers - fixes #169 --- .../PostgreSQLColumnEncoderRegistry.scala | 20 +++++++------- .../async/db/postgresql/NumericSpec.scala | 2 +- .../async/db/postgresql/TimeAndDateSpec.scala | 27 +++++++++++++++---- 3 files changed, 33 insertions(+), 16 deletions(-) diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala index 24641336..5292839c 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala @@ -31,23 +31,23 @@ object PostgreSQLColumnEncoderRegistry { class PostgreSQLColumnEncoderRegistry extends ColumnEncoderRegistry { private val classesSequence_ : List[(Class[_], (ColumnEncoder, Int))] = List( - classOf[Int] -> (IntegerEncoderDecoder -> ColumnTypes.Integer), - classOf[java.lang.Integer] -> (IntegerEncoderDecoder -> ColumnTypes.Integer), + classOf[Int] -> (IntegerEncoderDecoder -> ColumnTypes.Numeric), + classOf[java.lang.Integer] -> (IntegerEncoderDecoder -> ColumnTypes.Numeric), - classOf[java.lang.Short] -> (ShortEncoderDecoder -> ColumnTypes.Smallint), - classOf[Short] -> (ShortEncoderDecoder -> ColumnTypes.Smallint), + classOf[java.lang.Short] -> (ShortEncoderDecoder -> ColumnTypes.Numeric), + classOf[Short] -> (ShortEncoderDecoder -> ColumnTypes.Numeric), - classOf[Long] -> (LongEncoderDecoder -> ColumnTypes.Bigserial), - classOf[java.lang.Long] -> (LongEncoderDecoder -> ColumnTypes.Bigserial), + classOf[Long] -> (LongEncoderDecoder -> ColumnTypes.Numeric), + classOf[java.lang.Long] -> (LongEncoderDecoder -> ColumnTypes.Numeric), classOf[String] -> (StringEncoderDecoder -> ColumnTypes.Varchar), classOf[java.lang.String] -> (StringEncoderDecoder -> ColumnTypes.Varchar), - classOf[Float] -> (FloatEncoderDecoder -> ColumnTypes.Real), - classOf[java.lang.Float] -> (FloatEncoderDecoder -> ColumnTypes.Real), + classOf[Float] -> (FloatEncoderDecoder -> ColumnTypes.Numeric), + classOf[java.lang.Float] -> (FloatEncoderDecoder -> ColumnTypes.Numeric), - classOf[Double] -> (DoubleEncoderDecoder -> ColumnTypes.Double), - classOf[java.lang.Double] -> (DoubleEncoderDecoder -> ColumnTypes.Double), + classOf[Double] -> (DoubleEncoderDecoder -> ColumnTypes.Numeric), + classOf[java.lang.Double] -> (DoubleEncoderDecoder -> ColumnTypes.Numeric), classOf[BigDecimal] -> (BigDecimalEncoderDecoder -> ColumnTypes.Numeric), classOf[java.math.BigDecimal] -> (BigDecimalEncoderDecoder -> ColumnTypes.Numeric), diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/NumericSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/NumericSpec.scala index 26c13f4d..ad38a64e 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/NumericSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/NumericSpec.scala @@ -22,7 +22,7 @@ class NumericSpec extends Specification with DatabaseTestHelper { } - "support first update of num column with integer (fails currently)" in { + "support first update of num column with integer" in { withHandler { handler => diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala index 67e7b877..03703f21 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/TimeAndDateSpec.scala @@ -188,14 +188,31 @@ class TimeAndDateSpec extends Specification with DatabaseTestHelper { withTimeHandler { conn => - val date1 = new DateTime(2190319) + val date = new DateTime(2190319) - await(conn.sendPreparedStatement("CREATE TEMP TABLE TEST(T TIMESTAMP)")) - await(conn.sendPreparedStatement("INSERT INTO TEST(T) VALUES(?)", Seq(date1))) - val result = await(conn.sendPreparedStatement("SELECT T FROM TEST")) + executePreparedStatement(conn, "CREATE TEMP TABLE TEST(T TIMESTAMP)") + executePreparedStatement(conn, "INSERT INTO TEST(T) VALUES(?)", Array(date)) + val result = executePreparedStatement(conn, "SELECT T FROM TEST") val date2 = result.rows.get.head(0) + date2 === date.toDateTime(DateTimeZone.UTC).toLocalDateTime + } + + } + + "supports sending a local date and later a date time object for the same field" in { + + withTimeHandler { + conn => + val date = new LocalDate(2016, 3, 5) + + executePreparedStatement(conn, "CREATE TEMP TABLE TEST(T TIMESTAMP)") + executePreparedStatement(conn, "INSERT INTO TEST(T) VALUES(?)", Array(date)) + val result = executePreparedStatement(conn, "SELECT T FROM TEST WHERE T = ?", Array(date)) + result.rows.get.size === 1 - date2 === date1.toDateTime(DateTimeZone.UTC).toLocalDateTime + val dateTime = new LocalDateTime(2016, 3, 5, 0, 0, 0, 0) + val dateTimeResult = executePreparedStatement(conn, "SELECT T FROM TEST WHERE T = ?", Array(dateTime)) + dateTimeResult.rows.get.size === 1 } } From d42a303874b6d3ad840027fbe4aec4d0fb3c8396 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sat, 5 Mar 2016 17:04:42 -0500 Subject: [PATCH 54/82] Include null test for PostgreSQL dates --- .../db/postgresql/PreparedStatementSpec.scala | 34 +++++++++++++++++++ project/plugins.sbt | 2 ++ 2 files changed, 36 insertions(+) diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala index 6fd7d9a6..660c1411 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PreparedStatementSpec.scala @@ -40,6 +40,7 @@ class PreparedStatementSpec extends Specification with DatabaseTestHelper { val messagesInsertReverted = s"INSERT INTO messages $filler (moment,content) VALUES (?,?) RETURNING id" val messagesUpdate = "UPDATE messages SET content = ?, moment = ? WHERE id = ?" val messagesSelectOne = "SELECT id, content, moment FROM messages WHERE id = ?" + val messagesSelectByMoment = "SELECT id, content, moment FROM messages WHERE moment = ?" val messagesSelectAll = "SELECT id, content, moment FROM messages" val messagesSelectEscaped = "SELECT id, content, moment FROM messages WHERE content LIKE '%??%' AND id > ?" @@ -163,7 +164,40 @@ class PreparedStatementSpec extends Specification with DatabaseTestHelper { rows(1)("id") === 2 rows(1)("content") === secondContent rows(1)("moment") === date + } + } + + "supports sending null first and then an actual value for the fields" in { + withHandler { + handler => + + val firstContent = "Some Moment" + val secondContent = "Some Other Moment" + val date = LocalDate.now() + + executeDdl(handler, this.messagesCreate) + executePreparedStatement(handler, this.messagesInsert, Array(firstContent, null)) + executePreparedStatement(handler, this.messagesInsert, Array(secondContent, date)) + + val rows = executePreparedStatement(handler, this.messagesSelectByMoment, Array(null)).rows.get + rows.size === 0 + + /* + PostgreSQL does not know how to handle NULL parameters for a query in a prepared statement, + you have to use IS NULL if you want to make use of it. + + rows.length === 1 + + rows(0)("id") === 1 + rows(0)("content") === firstContent + rows(0)("moment") === null + */ + val rowsWithoutNull = executePreparedStatement(handler, this.messagesSelectByMoment, Array(date)).rows.get + rowsWithoutNull.size === 1 + rowsWithoutNull(0)("id") === 2 + rowsWithoutNull(0)("content") === secondContent + rowsWithoutNull(0)("moment") === date } } diff --git a/project/plugins.sbt b/project/plugins.sbt index 1e87e1c8..4528f2d6 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -3,3 +3,5 @@ addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.5.0") addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.6.0") addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8.3") + +resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases" From 3408711d24b590c08e863e9dd70f54e96e24083a Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sat, 5 Mar 2016 17:13:46 -0500 Subject: [PATCH 55/82] Change specs2 dependency to specs2 core only --- project/Build.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index aad5b051..ed2024bb 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -51,8 +51,9 @@ object Configuration { val commonVersion = "0.2.19-SNAPSHOT" val projectScalaVersion = "2.11.7" + val specs2Version = "2.5" - val specs2Dependency = "org.specs2" %% "specs2" % "2.5" % "test" + val specs2Dependency = "org.specs2" %% "specs2-core" % specs2Version % "test" val logbackDependency = "ch.qos.logback" % "logback-classic" % "1.1.6" % "test" val commonDependencies = Seq( From c3747b5ab8a98de2ecdd45fae1704117ce081173 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Sat, 5 Mar 2016 17:38:52 -0500 Subject: [PATCH 56/82] Include BitSpec from @narigo --- .../mauricio/async/db/mysql/BitSpec.scala | 83 +++++++++++++++++++ .../async/db/postgresql/BitSpec.scala | 83 +++++++++++++++++++ project/Build.scala | 2 + 3 files changed, 168 insertions(+) create mode 100644 mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BitSpec.scala create mode 100644 postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/BitSpec.scala diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BitSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BitSpec.scala new file mode 100644 index 00000000..ade3e6ce --- /dev/null +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/BitSpec.scala @@ -0,0 +1,83 @@ +/* + * Copyright 2013 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.mysql + +import org.specs2.mutable.Specification + +class BitSpec extends Specification with ConnectionHelper { + + "when processing bit columns" should { + + "result in binary data" in { + + withConnection { + connection => + val create = """CREATE TEMPORARY TABLE binary_test + ( + id INT NOT NULL AUTO_INCREMENT, + some_bit BIT(1) NOT NULL, + PRIMARY KEY (id) + )""" + + executeQuery(connection, create) + executePreparedStatement(connection, + "INSERT INTO binary_test (some_bit) VALUES (B'0'),(B'1')") + + val rows = executePreparedStatement(connection, "select * from binary_test").rows.get + + val bit0 = rows(0)("some_bit") + val bit1 = rows(1)("some_bit") + + bit0 === Array(0) + bit1 === Array(1) + } + + } + + "result in binary data in BIT(2) column" in { + + withConnection { + connection => + val create = """CREATE TEMPORARY TABLE binary_test + ( + id INT NOT NULL AUTO_INCREMENT, + some_bit BIT(2) NOT NULL, + CONSTRAINT bigserial_column_pkey PRIMARY KEY (id) + )""" + + executeQuery(connection, create) + executePreparedStatement(connection, + "INSERT INTO binary_test (some_bit) VALUES (B'00'),(B'01'),(B'10'),(B'11')") + + val rows = executePreparedStatement(connection, "select * from binary_test").rows.get + + val bit0 = rows(0)("some_bit") + val bit1 = rows(1)("some_bit") + val bit2 = rows(2)("some_bit") + val bit3 = rows(3)("some_bit") + + bit0 === Array(0) + bit1 === Array(1) + bit2 === Array(2) + bit3 === Array(3) + } + + } + + } + +} \ No newline at end of file diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/BitSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/BitSpec.scala new file mode 100644 index 00000000..8c17f9af --- /dev/null +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/BitSpec.scala @@ -0,0 +1,83 @@ +/* + * Copyright 2013 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.postgresql + +import org.specs2.mutable.Specification + +class BitSpec extends Specification with DatabaseTestHelper { + + "when processing bit columns" should { + + "result in binary data" in { + + withHandler { + handler => + val create = """CREATE TEMP TABLE binary_test + ( + id bigserial NOT NULL, + some_bit BYTEA NOT NULL, + CONSTRAINT bigserial_column_pkey PRIMARY KEY (id) + )""" + + executeDdl(handler, create) + executePreparedStatement(handler, + "INSERT INTO binary_test (some_bit) VALUES (E'\\\\000'),(E'\\\\001')") + + val rows = executePreparedStatement(handler, "select * from binary_test").rows.get + + val bit0 = rows(0)("some_bit") + val bit1 = rows(1)("some_bit") + + bit0 === Array(0) + bit1 === Array(1) + } + + } + + "result in binary data in BIT(2) column" in { + + withHandler { + handler => + val create = """CREATE TEMP TABLE binary_test + ( + id bigserial NOT NULL, + some_bit BYTEA NOT NULL, + CONSTRAINT bigserial_column_pkey PRIMARY KEY (id) + )""" + + executeDdl(handler, create) + executePreparedStatement(handler, + "INSERT INTO binary_test (some_bit) VALUES (E'\\\\000'),(E'\\\\001'),(E'\\\\002'),(E'\\\\003')") + + val rows = executePreparedStatement(handler, "select * from binary_test").rows.get + + val bit0 = rows(0)("some_bit") + val bit1 = rows(1)("some_bit") + val bit2 = rows(2)("some_bit") + val bit3 = rows(3)("some_bit") + + bit0 === Array(0) + bit1 === Array(1) + bit2 === Array(2) + bit3 === Array(3) + } + + } + + } + +} \ No newline at end of file diff --git a/project/Build.scala b/project/Build.scala index ed2024bb..a820fb76 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -54,6 +54,7 @@ object Configuration { val specs2Version = "2.5" val specs2Dependency = "org.specs2" %% "specs2-core" % specs2Version % "test" + val specs2JunitDependency = "org.specs2" %% "specs2-junit" % specs2Version % "test" val logbackDependency = "ch.qos.logback" % "logback-classic" % "1.1.6" % "test" val commonDependencies = Seq( @@ -63,6 +64,7 @@ object Configuration { "io.netty" % "netty-all" % "4.0.34.Final", "org.javassist" % "javassist" % "3.20.0-GA", specs2Dependency, + specs2JunitDependency, logbackDependency ) From 0f9a587ec3a9cd372f0b37f069848201236d2acf Mon Sep 17 00:00:00 2001 From: Alex Dupre Date: Mon, 7 Mar 2016 09:13:09 +0100 Subject: [PATCH 57/82] Add SSL support.. SSL is disabled by default to avoid POLA violations. It is possible to enable and control SSL behavior via url parameters: - `sslmode=` enable ssl (prefer/require/verify-ca/verify-full [recommended]) - `sslrootcert=` specifies trusted certificates (JDK cacert if missing) Client certificate authentication is not implemented, due to lack of time and interest, but it should be easy to add. --- .../mauricio/async/db/Configuration.scala | 2 + .../mauricio/async/db/SSLConfiguration.scala | 31 ++++++++ .../db/postgresql/codec/MessageDecoder.scala | 12 ++- .../db/postgresql/codec/MessageEncoder.scala | 3 +- .../codec/PostgreSQLConnectionHandler.scala | 59 ++++++++++++++- .../encoders/SSLMessageEncoder.scala | 16 ++++ .../encoders/StartupMessageEncoder.scala | 6 +- .../messages/backend/SSLResponseMessage.scala | 3 + .../messages/backend/ServerMessage.scala | 1 - .../frontend/InitialClientMessage.scala | 3 + .../messages/frontend/SSLRequestMessage.scala | 5 ++ .../messages/frontend/StartupMessage.scala | 4 +- .../async/db/postgresql/util/ParserURL.scala | 25 +++++-- .../async/db/postgresql/util/URLParser.scala | 10 +-- .../db/postgresql/DatabaseTestHelper.scala | 14 +++- .../db/postgresql/MessageDecoderSpec.scala | 2 +- .../PostgreSQLSSLConnectionSpec.scala | 51 +++++++++++++ .../db/postgresql/util/URLParserSpec.scala | 20 ++++- script/prepare_build.sh | 43 +++++++---- script/server.crt | 75 +++++++++++++++++++ script/server.key | 27 +++++++ 21 files changed, 364 insertions(+), 48 deletions(-) create mode 100644 db-async-common/src/main/scala/com/github/mauricio/async/db/SSLConfiguration.scala create mode 100644 postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/SSLMessageEncoder.scala create mode 100644 postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/SSLResponseMessage.scala create mode 100644 postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/InitialClientMessage.scala create mode 100644 postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/SSLRequestMessage.scala create mode 100644 postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLSSLConnectionSpec.scala create mode 100644 script/server.crt create mode 100644 script/server.key diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala index 841999e1..b032ac02 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala @@ -37,6 +37,7 @@ object Configuration { * @param port database port, defaults to 5432 * @param password password, defaults to no password * @param database database name, defaults to no database + * @param ssl ssl configuration * @param charset charset for the connection, defaults to UTF-8, make sure you know what you are doing if you * change this * @param maximumMessageSize the maximum size a message from the server could possibly have, this limits possible @@ -55,6 +56,7 @@ case class Configuration(username: String, port: Int = 5432, password: Option[String] = None, database: Option[String] = None, + ssl: SSLConfiguration = SSLConfiguration(), charset: Charset = Configuration.DefaultCharset, maximumMessageSize: Int = 16777216, allocator: ByteBufAllocator = PooledByteBufAllocator.DEFAULT, diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/SSLConfiguration.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/SSLConfiguration.scala new file mode 100644 index 00000000..9ae657fe --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/SSLConfiguration.scala @@ -0,0 +1,31 @@ +package com.github.mauricio.async.db + +import java.io.File + +import SSLConfiguration.Mode + +/** + * + * Contains the SSL configuration necessary to connect to a database. + * + * @param mode whether and with what priority a SSL connection will be negotiated, default disabled + * @param rootCert path to PEM encoded trusted root certificates, None to use internal JDK cacerts, defaults to None + * + */ +case class SSLConfiguration(mode: Mode.Value = Mode.Disable, rootCert: Option[java.io.File] = None) + +object SSLConfiguration { + + object Mode extends Enumeration { + val Disable = Value("disable") // only try a non-SSL connection + val Prefer = Value("prefer") // first try an SSL connection; if that fails, try a non-SSL connection + val Require = Value("require") // only try an SSL connection, but don't verify Certificate Authority + val VerifyCA = Value("verify-ca") // only try an SSL connection, and verify that the server certificate is issued by a trusted certificate authority (CA) + val VerifyFull = Value("verify-full") // only try an SSL connection, verify that the server certificate is issued by a trusted CA and that the server host name matches that in the certificate + } + + def apply(properties: Map[String, String]): SSLConfiguration = SSLConfiguration( + mode = Mode.withName(properties.get("sslmode").getOrElse("disable")), + rootCert = properties.get("sslrootcert").map(new File(_)) + ) +} diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageDecoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageDecoder.scala index 8a3d9fa5..5f210f72 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageDecoder.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageDecoder.scala @@ -17,7 +17,7 @@ package com.github.mauricio.async.db.postgresql.codec import com.github.mauricio.async.db.postgresql.exceptions.{MessageTooLongException} -import com.github.mauricio.async.db.postgresql.messages.backend.ServerMessage +import com.github.mauricio.async.db.postgresql.messages.backend.{ServerMessage, SSLResponseMessage} import com.github.mauricio.async.db.postgresql.parsers.{AuthenticationStartupParser, MessageParsersRegistry} import com.github.mauricio.async.db.util.{BufferDumper, Log} import java.nio.charset.Charset @@ -31,15 +31,21 @@ object MessageDecoder { val DefaultMaximumSize = 16777216 } -class MessageDecoder(charset: Charset, maximumMessageSize : Int = MessageDecoder.DefaultMaximumSize) extends ByteToMessageDecoder { +class MessageDecoder(sslEnabled: Boolean, charset: Charset, maximumMessageSize : Int = MessageDecoder.DefaultMaximumSize) extends ByteToMessageDecoder { import MessageDecoder.log private val parser = new MessageParsersRegistry(charset) + private var sslChecked = false + override def decode(ctx: ChannelHandlerContext, b: ByteBuf, out: java.util.List[Object]): Unit = { - if (b.readableBytes() >= 5) { + if (sslEnabled & !sslChecked) { + val code = b.readByte() + sslChecked = true + out.add(new SSLResponseMessage(code == 'S')) + } else if (b.readableBytes() >= 5) { b.markReaderIndex() diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageEncoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageEncoder.scala index 5cf5d480..30195a11 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageEncoder.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/MessageEncoder.scala @@ -44,12 +44,13 @@ class MessageEncoder(charset: Charset, encoderRegistry: ColumnEncoderRegistry) e override def encode(ctx: ChannelHandlerContext, msg: AnyRef, out: java.util.List[Object]) = { val buffer = msg match { + case SSLRequestMessage => SSLMessageEncoder.encode() + case message: StartupMessage => startupEncoder.encode(message) case message: ClientMessage => { val encoder = (message.kind: @switch) match { case ServerMessage.Close => CloseMessageEncoder case ServerMessage.Execute => this.executeEncoder case ServerMessage.Parse => this.openEncoder - case ServerMessage.Startup => this.startupEncoder case ServerMessage.Query => this.queryEncoder case ServerMessage.PasswordMessage => this.credentialEncoder case _ => throw new EncoderNotAvailableException(message) diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/PostgreSQLConnectionHandler.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/PostgreSQLConnectionHandler.scala index b53821ee..733cc5d1 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/PostgreSQLConnectionHandler.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/codec/PostgreSQLConnectionHandler.scala @@ -17,6 +17,7 @@ package com.github.mauricio.async.db.postgresql.codec import com.github.mauricio.async.db.Configuration +import com.github.mauricio.async.db.SSLConfiguration.Mode import com.github.mauricio.async.db.column.{ColumnDecoderRegistry, ColumnEncoderRegistry} import com.github.mauricio.async.db.postgresql.exceptions._ import com.github.mauricio.async.db.postgresql.messages.backend._ @@ -38,6 +39,12 @@ import com.github.mauricio.async.db.postgresql.messages.backend.RowDescriptionMe import com.github.mauricio.async.db.postgresql.messages.backend.ParameterStatusMessage import io.netty.channel.socket.nio.NioSocketChannel import io.netty.handler.codec.CodecException +import io.netty.handler.ssl.{SslContextBuilder, SslHandler} +import io.netty.handler.ssl.util.InsecureTrustManagerFactory +import io.netty.util.concurrent.FutureListener +import javax.net.ssl.{SSLParameters, TrustManagerFactory} +import java.security.KeyStore +import java.io.FileInputStream object PostgreSQLConnectionHandler { final val log = Log.get[PostgreSQLConnectionHandler] @@ -79,7 +86,7 @@ class PostgreSQLConnectionHandler override def initChannel(ch: channel.Channel): Unit = { ch.pipeline.addLast( - new MessageDecoder(configuration.charset, configuration.maximumMessageSize), + new MessageDecoder(configuration.ssl.mode != Mode.Disable, configuration.charset, configuration.maximumMessageSize), new MessageEncoder(configuration.charset, encoderRegistry), PostgreSQLConnectionHandler.this) } @@ -120,13 +127,61 @@ class PostgreSQLConnectionHandler } override def channelActive(ctx: ChannelHandlerContext): Unit = { - ctx.writeAndFlush(new StartupMessage(this.properties)) + if (configuration.ssl.mode == Mode.Disable) + ctx.writeAndFlush(new StartupMessage(this.properties)) + else + ctx.writeAndFlush(SSLRequestMessage) } override def channelRead0(ctx: ChannelHandlerContext, msg: Object): Unit = { msg match { + case SSLResponseMessage(supported) => + if (supported) { + val ctxBuilder = SslContextBuilder.forClient() + if (configuration.ssl.mode >= Mode.VerifyCA) { + configuration.ssl.rootCert.fold { + val tmf = TrustManagerFactory.getInstance(TrustManagerFactory.getDefaultAlgorithm()) + val ks = KeyStore.getInstance(KeyStore.getDefaultType()) + val cacerts = new FileInputStream(System.getProperty("java.home") + "/lib/security/cacerts") + try { + ks.load(cacerts, "changeit".toCharArray) + } finally { + cacerts.close() + } + tmf.init(ks) + ctxBuilder.trustManager(tmf) + } { path => + ctxBuilder.trustManager(path) + } + } else { + ctxBuilder.trustManager(InsecureTrustManagerFactory.INSTANCE) + } + val sslContext = ctxBuilder.build() + val sslEngine = sslContext.newEngine(ctx.alloc(), configuration.host, configuration.port) + if (configuration.ssl.mode >= Mode.VerifyFull) { + val sslParams = sslEngine.getSSLParameters() + sslParams.setEndpointIdentificationAlgorithm("HTTPS") + sslEngine.setSSLParameters(sslParams) + } + val handler = new SslHandler(sslEngine) + ctx.pipeline().addFirst(handler) + handler.handshakeFuture.addListener(new FutureListener[channel.Channel]() { + def operationComplete(future: io.netty.util.concurrent.Future[channel.Channel]) { + if (future.isSuccess()) { + ctx.writeAndFlush(new StartupMessage(properties)) + } else { + connectionDelegate.onError(future.cause()) + } + } + }) + } else if (configuration.ssl.mode < Mode.Require) { + ctx.writeAndFlush(new StartupMessage(properties)) + } else { + connectionDelegate.onError(new IllegalArgumentException("SSL is not supported on server")) + } + case m: ServerMessage => { (m.kind : @switch) match { diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/SSLMessageEncoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/SSLMessageEncoder.scala new file mode 100644 index 00000000..aeec7435 --- /dev/null +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/SSLMessageEncoder.scala @@ -0,0 +1,16 @@ +package com.github.mauricio.async.db.postgresql.encoders + +import io.netty.buffer.ByteBuf +import io.netty.buffer.Unpooled + +object SSLMessageEncoder { + + def encode(): ByteBuf = { + val buffer = Unpooled.buffer() + buffer.writeInt(8) + buffer.writeShort(1234) + buffer.writeShort(5679) + buffer + } + +} diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/StartupMessageEncoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/StartupMessageEncoder.scala index b8c97843..206fd2d3 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/StartupMessageEncoder.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/encoders/StartupMessageEncoder.scala @@ -21,13 +21,11 @@ import com.github.mauricio.async.db.util.ByteBufferUtils import java.nio.charset.Charset import io.netty.buffer.{Unpooled, ByteBuf} -class StartupMessageEncoder(charset: Charset) extends Encoder { +class StartupMessageEncoder(charset: Charset) { //private val log = Log.getByName("StartupMessageEncoder") - override def encode(message: ClientMessage): ByteBuf = { - - val startup = message.asInstanceOf[StartupMessage] + def encode(startup: StartupMessage): ByteBuf = { val buffer = Unpooled.buffer() buffer.writeInt(0) diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/SSLResponseMessage.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/SSLResponseMessage.scala new file mode 100644 index 00000000..905ab688 --- /dev/null +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/SSLResponseMessage.scala @@ -0,0 +1,3 @@ +package com.github.mauricio.async.db.postgresql.messages.backend + +case class SSLResponseMessage(supported: Boolean) diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/ServerMessage.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/ServerMessage.scala index c413ef4e..1fa5b9a2 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/ServerMessage.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/backend/ServerMessage.scala @@ -43,7 +43,6 @@ object ServerMessage { final val Query = 'Q' final val RowDescription = 'T' final val ReadyForQuery = 'Z' - final val Startup = '0' final val Sync = 'S' } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/InitialClientMessage.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/InitialClientMessage.scala new file mode 100644 index 00000000..228c5e65 --- /dev/null +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/InitialClientMessage.scala @@ -0,0 +1,3 @@ +package com.github.mauricio.async.db.postgresql.messages.frontend + +trait InitialClientMessage diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/SSLRequestMessage.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/SSLRequestMessage.scala new file mode 100644 index 00000000..c3bf84ff --- /dev/null +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/SSLRequestMessage.scala @@ -0,0 +1,5 @@ +package com.github.mauricio.async.db.postgresql.messages.frontend + +import com.github.mauricio.async.db.postgresql.messages.backend.ServerMessage + +object SSLRequestMessage extends InitialClientMessage diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/StartupMessage.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/StartupMessage.scala index e4bb34c4..bb53390f 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/StartupMessage.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/messages/frontend/StartupMessage.scala @@ -16,6 +16,4 @@ package com.github.mauricio.async.db.postgresql.messages.frontend -import com.github.mauricio.async.db.postgresql.messages.backend.ServerMessage - -class StartupMessage(val parameters: List[(String, Any)]) extends ClientMessage(ServerMessage.Startup) \ No newline at end of file +class StartupMessage(val parameters: List[(String, Any)]) extends InitialClientMessage diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ParserURL.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ParserURL.scala index ce5fa180..8172877e 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ParserURL.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ParserURL.scala @@ -16,28 +16,37 @@ object ParserURL { val PGPORT = "port" val PGDBNAME = "database" val PGHOST = "host" - val PGUSERNAME = "username" + val PGUSERNAME = "user" val PGPASSWORD = "password" val DEFAULT_PORT = "5432" - private val pgurl1 = """(jdbc:postgresql):(?://([^/:]*|\[.+\])(?::(\d+))?)?(?:/([^/?]*))?(?:\?user=(.*)&password=(.*))?""".r - private val pgurl2 = """(postgres|postgresql)://(.*):(.*)@(.*):(\d+)/(.*)""".r + private val pgurl1 = """(jdbc:postgresql):(?://([^/:]*|\[.+\])(?::(\d+))?)?(?:/([^/?]*))?(?:\?(.*))?""".r + private val pgurl2 = """(postgres|postgresql)://(.*):(.*)@(.*):(\d+)/([^/?]*)(?:\?(.*))?""".r def parse(connectionURL: String): Map[String, String] = { val properties: Map[String, String] = Map() + def parseOptions(optionsStr: String): Map[String, String] = + optionsStr.split("&").map { o => + o.span(_ != '=') match { + case (name, value) => name -> value.drop(1) + } + }.toMap + connectionURL match { - case pgurl1(protocol, server, port, dbname, username, password) => { + case pgurl1(protocol, server, port, dbname, params) => { var result = properties if (server != null) result += (PGHOST -> unwrapIpv6address(server)) if (dbname != null && dbname.nonEmpty) result += (PGDBNAME -> dbname) - if(port != null) result += (PGPORT -> port) - if(username != null) result = (result + (PGUSERNAME -> username) + (PGPASSWORD -> password)) + if (port != null) result += (PGPORT -> port) + if (params != null) result ++= parseOptions(params) result } - case pgurl2(protocol, username, password, server, port, dbname) => { - properties + (PGHOST -> unwrapIpv6address(server)) + (PGPORT -> port) + (PGDBNAME -> dbname) + (PGUSERNAME -> username) + (PGPASSWORD -> password) + case pgurl2(protocol, username, password, server, port, dbname, params) => { + var result = properties + (PGHOST -> unwrapIpv6address(server)) + (PGPORT -> port) + (PGDBNAME -> dbname) + (PGUSERNAME -> username) + (PGPASSWORD -> password) + if (params != null) result ++= parseOptions(params) + result } case _ => { logger.warn(s"Connection url '$connectionURL' could not be parsed.") diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala index f39f24ac..debcb6d9 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala @@ -16,14 +16,11 @@ package com.github.mauricio.async.db.postgresql.util -import com.github.mauricio.async.db.Configuration +import com.github.mauricio.async.db.{Configuration, SSLConfiguration} import java.nio.charset.Charset object URLParser { - private val Username = "username" - private val Password = "password" - import Configuration.Default def parse(url: String, @@ -35,11 +32,12 @@ object URLParser { val port = properties.get(ParserURL.PGPORT).getOrElse(ParserURL.DEFAULT_PORT).toInt new Configuration( - username = properties.get(Username).getOrElse(Default.username), - password = properties.get(Password), + username = properties.get(ParserURL.PGUSERNAME).getOrElse(Default.username), + password = properties.get(ParserURL.PGPASSWORD), database = properties.get(ParserURL.PGDBNAME), host = properties.getOrElse(ParserURL.PGHOST, Default.host), port = port, + ssl = SSLConfiguration(properties), charset = charset ) diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/DatabaseTestHelper.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/DatabaseTestHelper.scala index 40b35549..2659d372 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/DatabaseTestHelper.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/DatabaseTestHelper.scala @@ -18,10 +18,12 @@ package com.github.mauricio.async.db.postgresql import com.github.mauricio.async.db.util.Log import com.github.mauricio.async.db.{Connection, Configuration} +import java.io.File import java.util.concurrent.{TimeoutException, TimeUnit} -import scala.Some import scala.concurrent.duration._ import scala.concurrent.{Future, Await} +import com.github.mauricio.async.db.SSLConfiguration +import com.github.mauricio.async.db.SSLConfiguration.Mode object DatabaseTestHelper { val log = Log.get[DatabaseTestHelper] @@ -54,6 +56,16 @@ trait DatabaseTestHelper { withHandler(this.timeTestConfiguration, fn) } + def withSSLHandler[T](mode: SSLConfiguration.Mode.Value, host: String = "localhost", rootCert: Option[File] = Some(new File("script/server.crt")))(fn: (PostgreSQLConnection) => T): T = { + val config = new Configuration( + host = host, + port = databasePort, + username = "postgres", + database = databaseName, + ssl = SSLConfiguration(mode = mode, rootCert = rootCert)) + withHandler(config, fn) + } + def withHandler[T](configuration: Configuration, fn: (PostgreSQLConnection) => T): T = { val handler = new PostgreSQLConnection(configuration) diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/MessageDecoderSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/MessageDecoderSpec.scala index 14f0bed2..a033e3ee 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/MessageDecoderSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/MessageDecoderSpec.scala @@ -27,7 +27,7 @@ import java.util class MessageDecoderSpec extends Specification { - val decoder = new MessageDecoder(CharsetUtil.UTF_8) + val decoder = new MessageDecoder(false, CharsetUtil.UTF_8) "message decoder" should { diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLSSLConnectionSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLSSLConnectionSpec.scala new file mode 100644 index 00000000..2e38adbb --- /dev/null +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLSSLConnectionSpec.scala @@ -0,0 +1,51 @@ +package com.github.mauricio.async.db.postgresql + +import org.specs2.mutable.Specification +import com.github.mauricio.async.db.SSLConfiguration.Mode +import javax.net.ssl.SSLHandshakeException + +class PostgreSQLSSLConnectionSpec extends Specification with DatabaseTestHelper { + + "ssl handler" should { + + "connect to the database in ssl without verifying CA" in { + + withSSLHandler(Mode.Require, "127.0.0.1", None) { handler => + handler.isReadyForQuery must beTrue + } + + } + + "connect to the database in ssl verifying CA" in { + + withSSLHandler(Mode.VerifyCA, "127.0.0.1") { handler => + handler.isReadyForQuery must beTrue + } + + } + + "connect to the database in ssl verifying CA and hostname" in { + + withSSLHandler(Mode.VerifyFull) { handler => + handler.isReadyForQuery must beTrue + } + + } + + "throws exception when CA verification fails" in { + + withSSLHandler(Mode.VerifyCA, rootCert = None) { handler => + } must throwA[SSLHandshakeException] + + } + + "throws exception when hostname verification fails" in { + + withSSLHandler(Mode.VerifyFull, "127.0.0.1") { handler => + } must throwA[SSLHandshakeException] + + } + + } + +} diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala index 1e542f52..d0df6eaa 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala @@ -18,6 +18,8 @@ package com.github.mauricio.async.db.postgresql.util import org.specs2.mutable.Specification import com.github.mauricio.async.db.Configuration +import com.github.mauricio.async.db.SSLConfiguration +import com.github.mauricio.async.db.SSLConfiguration.Mode class URLParserSpec extends Specification { @@ -68,8 +70,20 @@ class URLParserSpec extends Specification { configuration.port === 9987 } - "create a connection from a heroku like URL using 'postgres' protocol" in { - val connectionUri = "postgres://john:doe@128.567.54.90:9987/my_database" + "create a connection with SSL enabled" in { + val connectionUri = "jdbc:postgresql://128.567.54.90:9987/my_database?sslmode=verify-full" + + val configuration = URLParser.parse(connectionUri) + configuration.username === Configuration.Default.username + configuration.password === None + configuration.database === Some("my_database") + configuration.host === "128.567.54.90" + configuration.port === 9987 + configuration.ssl.mode === Mode.VerifyFull + } + + "create a connection with SSL enabled and root CA from a heroku like URL using 'postgresql' protocol" in { + val connectionUri = "postgresql://john:doe@128.567.54.90:9987/my_database?sslmode=verify-ca&sslrootcert=server.crt" val configuration = URLParser.parse(connectionUri) configuration.username === "john" @@ -77,6 +91,8 @@ class URLParserSpec extends Specification { configuration.database === Some("my_database") configuration.host === "128.567.54.90" configuration.port === 9987 + configuration.ssl.mode === Mode.VerifyCA + configuration.ssl.rootCert.map(_.getPath) === Some("server.crt") } "create a connection with the available fields and named server" in { diff --git a/script/prepare_build.sh b/script/prepare_build.sh index 96aa8345..068ab389 100755 --- a/script/prepare_build.sh +++ b/script/prepare_build.sh @@ -1,5 +1,7 @@ #!/usr/bin/env sh +SCRIPTDIR=`dirname $0` + echo "Preparing MySQL configs" mysql -u root -e 'create database mysql_async_tests;' mysql -u root -e "create table mysql_async_tests.transaction_test (id varchar(255) not null, primary key (id))" @@ -10,26 +12,35 @@ mysql -u root -e "GRANT ALL PRIVILEGES ON *.* TO 'mysql_async_nopw'@'localhost' echo "preparing postgresql configs" -psql -c 'create database netty_driver_test;' -U postgres -psql -c 'create database netty_driver_time_test;' -U postgres -psql -c "alter database netty_driver_time_test set timezone to 'GMT'" -U postgres -psql -c "create table transaction_test ( id varchar(255) not null, constraint id_unique primary key (id))" -U postgres netty_driver_test -psql -c "CREATE USER postgres_md5 WITH PASSWORD 'postgres_md5'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_md5;" -U postgres -psql -c "CREATE USER postgres_cleartext WITH PASSWORD 'postgres_cleartext'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_cleartext;" -U postgres -psql -c "CREATE USER postgres_kerberos WITH PASSWORD 'postgres_kerberos'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_kerberos;" -U postgres -psql -d "netty_driver_test" -c "CREATE TYPE example_mood AS ENUM ('sad', 'ok', 'happy');" -U postgres +PGUSER=postgres +PGCONF=/etc/postgresql/9.1/main +PGDATA=/var/ramfs/postgresql/9.1/main + +psql -d "postgres" -c 'create database netty_driver_test;' -U $PGUSER +psql -d "postgres" -c 'create database netty_driver_time_test;' -U $PGUSER +psql -d "postgres" -c "alter database netty_driver_time_test set timezone to 'GMT'" -U $PGUSER +psql -d "netty_driver_test" -c "create table transaction_test ( id varchar(255) not null, constraint id_unique primary key (id))" -U $PGUSER +psql -d "postgres" -c "CREATE USER postgres_md5 WITH PASSWORD 'postgres_md5'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_md5;" -U $PGUSER +psql -d "postgres" -c "CREATE USER postgres_cleartext WITH PASSWORD 'postgres_cleartext'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_cleartext;" -U $PGUSER +psql -d "postgres" -c "CREATE USER postgres_kerberos WITH PASSWORD 'postgres_kerberos'; GRANT ALL PRIVILEGES ON DATABASE netty_driver_test to postgres_kerberos;" -U $PGUSER +psql -d "netty_driver_test" -c "CREATE TYPE example_mood AS ENUM ('sad', 'ok', 'happy');" -U $PGUSER -sudo chmod 777 /etc/postgresql/9.1/main/pg_hba.conf +sudo chmod 666 $PGCONF/pg_hba.conf echo "pg_hba.conf goes as follows" -cat "/etc/postgresql/9.1/main/pg_hba.conf" +cat "$PGCONF/pg_hba.conf" -sudo echo "host all postgres 127.0.0.1/32 trust" > /etc/postgresql/9.1/main/pg_hba.conf -sudo echo "host all postgres_md5 127.0.0.1/32 md5" >> /etc/postgresql/9.1/main/pg_hba.conf -sudo echo "host all postgres_cleartext 127.0.0.1/32 password" >> /etc/postgresql/9.1/main/pg_hba.conf -sudo echo "host all postgres_kerberos 127.0.0.1/32 krb5" >> /etc/postgresql/9.1/main/pg_hba.conf +sudo echo "local all all trust" > $PGCONF/pg_hba.conf +sudo echo "host all postgres 127.0.0.1/32 trust" >> $PGCONF/pg_hba.conf +sudo echo "host all postgres_md5 127.0.0.1/32 md5" >> $PGCONF/pg_hba.conf +sudo echo "host all postgres_cleartext 127.0.0.1/32 password" >> $PGCONF/pg_hba.conf +sudo echo "host all postgres_kerberos 127.0.0.1/32 krb5" >> $PGCONF/pg_hba.conf echo "pg_hba.conf is now like" -cat "/etc/postgresql/9.1/main/pg_hba.conf" +cat "$PGCONF/pg_hba.conf" + +sudo chmod 600 $PGCONF/pg_hba.conf + +sudo cp -f $SCRIPTDIR/server.crt $SCRIPTDIR/server.key $PGDATA -sudo /etc/init.d/postgresql restart \ No newline at end of file +sudo /etc/init.d/postgresql restart diff --git a/script/server.crt b/script/server.crt new file mode 100644 index 00000000..aeef86f2 --- /dev/null +++ b/script/server.crt @@ -0,0 +1,75 @@ +Certificate: + Data: + Version: 3 (0x2) + Serial Number: 9913731310682600948 (0x8994a61a13e775f4) + Signature Algorithm: sha1WithRSAEncryption + Issuer: CN=localhost + Validity + Not Before: Mar 6 08:12:28 2016 GMT + Not After : Apr 5 08:12:28 2016 GMT + Subject: CN=localhost + Subject Public Key Info: + Public Key Algorithm: rsaEncryption + Public-Key: (2048 bit) + Modulus: + 00:ce:26:60:f9:0d:0f:f1:d6:ed:3e:79:91:55:6a: + 18:63:23:96:f2:60:50:3d:e3:dd:72:e8:c2:54:17: + 50:be:f0:9c:32:95:39:75:b1:04:a7:bb:f5:10:a4: + eb:d0:10:e2:17:45:d3:f9:35:8e:b4:8f:14:97:8f: + 27:93:d7:20:05:e2:dc:68:64:bc:fd:f2:19:17:94: + e8:2f:a6:b2:54:3f:df:3e:e7:8f:f1:52:15:7a:30: + 81:4d:bb:6f:22:8c:ca:e1:cb:6a:72:6d:fa:89:50: + e7:ee:07:d1:84:8a:71:07:dc:3f:6f:1f:db:10:e9: + 93:ad:01:c5:2b:51:ce:58:ef:12:95:00:16:e8:d4: + 46:07:35:ee:10:47:c4:f7:ff:47:17:52:a5:bb:5c: + cb:3c:f6:6b:c8:e7:d9:7c:18:39:a1:8f:e0:45:82: + 88:b5:27:f3:58:cb:ba:30:c0:8a:77:5b:00:bf:09: + 10:b1:ad:aa:f4:1b:2c:a1:f9:a5:59:57:c8:ef:de: + 54:ad:35:af:67:7e:29:bc:9a:2a:d2:f0:b1:9c:34: + 3c:bc:64:c9:4c:93:2c:7d:29:f4:1a:ac:f3:44:42: + a4:c9:06:1e:a4:73:e6:aa:67:d0:e4:02:02:ba:51: + 1e:97:44:b8:4b:4e:55:cd:e6:24:49:08:ac:9b:09: + 19:31 + Exponent: 65537 (0x10001) + X509v3 extensions: + X509v3 Subject Key Identifier: + 2E:20:4D:E1:12:2A:B0:6F:52:7F:62:90:D4:78:7B:E3:7D:D5:60:10 + X509v3 Authority Key Identifier: + keyid:2E:20:4D:E1:12:2A:B0:6F:52:7F:62:90:D4:78:7B:E3:7D:D5:60:10 + + X509v3 Basic Constraints: + CA:TRUE + Signature Algorithm: sha1WithRSAEncryption + 9b:e8:50:8b:86:0f:bf:22:c6:b4:ef:3e:c9:a2:55:fb:69:fc: + ae:93:7b:5e:6a:b6:ed:5b:27:c2:9e:36:d6:f1:f1:0f:67:65: + 87:de:05:21:6e:0e:f4:df:ac:72:61:47:f8:fd:16:9b:3d:54: + ef:21:cf:b7:31:ba:bf:c9:1b:2c:a0:f9:f1:6b:45:5a:98:25: + b9:01:99:cf:e1:79:c5:6a:20:ce:ca:ca:3f:6d:56:f3:65:51: + 31:98:01:b9:96:99:04:9c:ab:ae:fb:3f:f8:ad:60:66:77:54: + b2:81:e3:7c:6b:c4:36:ae:ae:5c:c6:1a:09:5c:d6:13:da:2b: + ba:ef:3f:3e:b2:13:f2:51:15:c5:1b:9c:22:be:b4:55:9b:15: + 70:60:3d:98:6e:ef:53:4c:c7:20:60:3f:17:f3:cc:76:47:96: + 27:05:84:0e:db:21:e1:76:b7:9c:38:35:19:ef:52:d4:fc:bd: + ec:95:2e:eb:4b:5b:0b:c8:86:d7:23:c2:76:14:f3:93:6f:c0: + a9:b6:ca:f8:47:3e:9d:af:11:5d:73:79:68:70:26:f9:fd:39: + 60:c1:c3:c7:a9:fc:48:b5:c0:e6:b4:2e:07:de:6a:ca:ed:04: + 67:31:b8:0b:d0:48:fd:3b:4c:12:8a:34:5c:18:3f:38:85:f2: + 1c:96:39:50 +-----BEGIN CERTIFICATE----- +MIIC+zCCAeOgAwIBAgIJAImUphoT53X0MA0GCSqGSIb3DQEBBQUAMBQxEjAQBgNV +BAMMCWxvY2FsaG9zdDAeFw0xNjAzMDYwODEyMjhaFw0xNjA0MDUwODEyMjhaMBQx +EjAQBgNVBAMMCWxvY2FsaG9zdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAM4mYPkND/HW7T55kVVqGGMjlvJgUD3j3XLowlQXUL7wnDKVOXWxBKe79RCk +69AQ4hdF0/k1jrSPFJePJ5PXIAXi3GhkvP3yGReU6C+mslQ/3z7nj/FSFXowgU27 +byKMyuHLanJt+olQ5+4H0YSKcQfcP28f2xDpk60BxStRzljvEpUAFujURgc17hBH +xPf/RxdSpbtcyzz2a8jn2XwYOaGP4EWCiLUn81jLujDAindbAL8JELGtqvQbLKH5 +pVlXyO/eVK01r2d+KbyaKtLwsZw0PLxkyUyTLH0p9Bqs80RCpMkGHqRz5qpn0OQC +ArpRHpdEuEtOVc3mJEkIrJsJGTECAwEAAaNQME4wHQYDVR0OBBYEFC4gTeESKrBv +Un9ikNR4e+N91WAQMB8GA1UdIwQYMBaAFC4gTeESKrBvUn9ikNR4e+N91WAQMAwG +A1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAJvoUIuGD78ixrTvPsmiVftp +/K6Te15qtu1bJ8KeNtbx8Q9nZYfeBSFuDvTfrHJhR/j9Fps9VO8hz7cxur/JGyyg ++fFrRVqYJbkBmc/hecVqIM7Kyj9tVvNlUTGYAbmWmQScq677P/itYGZ3VLKB43xr +xDaurlzGGglc1hPaK7rvPz6yE/JRFcUbnCK+tFWbFXBgPZhu71NMxyBgPxfzzHZH +licFhA7bIeF2t5w4NRnvUtT8veyVLutLWwvIhtcjwnYU85NvwKm2yvhHPp2vEV1z +eWhwJvn9OWDBw8ep/Ei1wOa0LgfeasrtBGcxuAvQSP07TBKKNFwYPziF8hyWOVA= +-----END CERTIFICATE----- diff --git a/script/server.key b/script/server.key new file mode 100644 index 00000000..0e226429 --- /dev/null +++ b/script/server.key @@ -0,0 +1,27 @@ +-----BEGIN RSA PRIVATE KEY----- +MIIEowIBAAKCAQEAziZg+Q0P8dbtPnmRVWoYYyOW8mBQPePdcujCVBdQvvCcMpU5 +dbEEp7v1EKTr0BDiF0XT+TWOtI8Ul48nk9cgBeLcaGS8/fIZF5ToL6ayVD/fPueP +8VIVejCBTbtvIozK4ctqcm36iVDn7gfRhIpxB9w/bx/bEOmTrQHFK1HOWO8SlQAW +6NRGBzXuEEfE9/9HF1Klu1zLPPZryOfZfBg5oY/gRYKItSfzWMu6MMCKd1sAvwkQ +sa2q9BssofmlWVfI795UrTWvZ34pvJoq0vCxnDQ8vGTJTJMsfSn0GqzzREKkyQYe +pHPmqmfQ5AICulEel0S4S05VzeYkSQismwkZMQIDAQABAoIBAH80v3Hu1X/tl8eN +TFjgdtv2Ahbdx6XpDaTya7doC7NG1ZuA6UvuR2kZWkdC/SAOyvSBaiPFIKHaCGLd +OxbHEEORkV/5iYVJ9qHOiNeejTvfjepLCU9nz0ju1VsZ5aH0LtzVoIGry4UgH32J +5YdbxhOLnLj9dzggabe/9+KbQDEveGTzkIvSJ1nbts7c8IRp6t/1nBz54BhawUjJ +IbaEbCH/mEmiCOUP914SCAUEfmgbMhdx8dc4V9nyxK+bulF3WIEpVZU1zj5Rpyni +P8gQ1geI64Erd8oa4DJ5C77eLuKKk0JBCkgh5x3hiAxuvN0zxHxW2Q75c6x9uDr5 +DXi20GECgYEA+NRW6heYBJw7Lt7+cQCRG5/WFOX9TmmK9EAidVPULWO4NN4wLZxa +exW/epg8w1Y+u+BHOzFq9idJaHsoLZCmoNWMkZsP+AzeEkklee6wgur3/Zs1HqHZ +1VA3EmvOecz++3o69zcjd0nzgk9ADhjA2dAahKTnn5RESD1dFBWU2+sCgYEA1Bcv +PiQe6ce86FlSPr0TBFvIJl2dfjrQijL3dhZMo+1Y5VTShGBoAQKfBhJITSbsmaEz +UQ/4rBMyTN9bwvSwsDpQZw/Y0YKiSQIOr4J0jyotY5RN2AH3AlCX8CrhoOmBaLUd +n2SGx5keodnXn1/GPkuGPIa7xnGib/gdL2AaZFMCgYBV5AX0XByPStZrAXJW01lD +bdLZ9+GOFYRvd0vtr/gHiupk5WU/+T6KSiGEUdR3oOeatnogBpjjSwBd3lUqFUpP +LieNgzbp6pclPLaA9lFbf3wGwHJ/lmK47S11YF0vUgGaEMEV4KSPYql5i52SwByh +kuH0c2+4d9dyECx26FQv7QKBgQDBtX83oWP+n6hhCpu8o5IH7BAtQlmDHhKz9oLf +/tP28OO9abBwqWC0c4Fs2SviE4gLdRjak9zKxSmu3l3//N6XxlsDFo0wJcE1L0Tc +dikhTSNxjNVgUcMaASQUfgXfowXH7YvltboH+UjqCH4QmTgGU5KCG4jLYaQ74gA9 +8eeI8wKBgDfclcMsJnY6FpFoR0Ub9VOrdbKtD9nXSxhTSFKjrp4JM7SBN3u6NPJK +FgKZyQxd1bX/RBioN1prrZ3rbg+9awc65KhyfwtNxiurCBZhYObhKJv7lZyjNgsT +EALMKvB+fdpMtPZOVtUl0MbHEBblrJ+oy4TPT/kvMuCudF/5arcZ +-----END RSA PRIVATE KEY----- From efeb246ae1eefa4e020e993e02a058cf6edcacb2 Mon Sep 17 00:00:00 2001 From: Guilherme Campos Date: Tue, 15 Mar 2016 14:20:14 -0300 Subject: [PATCH 58/82] Rename LICENCE.txt to LICENSE.txt fixing filename typo --- LICENCE.txt => LICENSE.txt | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) rename LICENCE.txt => LICENSE.txt (99%) diff --git a/LICENCE.txt b/LICENSE.txt similarity index 99% rename from LICENCE.txt rename to LICENSE.txt index 61ca0ac4..fc389d02 100644 --- a/LICENCE.txt +++ b/LICENSE.txt @@ -199,4 +199,4 @@ distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file + limitations under the License. From a43118ffcd8196055d1cb1a17d696e09b1a516e7 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Thu, 17 Mar 2016 22:08:13 -0400 Subject: [PATCH 59/82] Wrapping up 0.2.19 --- CHANGELOG.md | 5 +++++ README.markdown | 8 ++++---- README.md | 3 --- project/Build.scala | 2 +- 4 files changed, 10 insertions(+), 8 deletions(-) delete mode 100644 README.md diff --git a/CHANGELOG.md b/CHANGELOG.md index 6ab0d079..5baf54f7 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -25,6 +25,11 @@ # Changelog +## 0.2.19 - 2016-03-17 + +* Always use `NUMERIC` when handling numbers in prepared statements in PostgreSQL; +* SSL support for PostgreSQL - @alexdupre - #85; + ## 0.2.18 - 2015-08-08 * Timeouts implemented queries for MySQL and PostgreSQL - @lifey - #147 diff --git a/README.markdown b/README.markdown index 73302b6c..b5fb56c8 100644 --- a/README.markdown +++ b/README.markdown @@ -55,7 +55,7 @@ You can view the project's [CHANGELOG here](CHANGELOG.md). And if you're in a hurry, you can include them in your build like this, if you're using PostgreSQL: ```scala -"com.github.mauricio" %% "postgresql-async" % "0.2.18" +"com.github.mauricio" %% "postgresql-async" % "0.2.19" ``` Or Maven: @@ -64,14 +64,14 @@ Or Maven: com.github.mauricio postgresql-async_2.11 - 0.2.18 + 0.2.19 ``` And if you're into MySQL: ```scala -"com.github.mauricio" %% "mysql-async" % "0.2.18" +"com.github.mauricio" %% "mysql-async" % "0.2.19" ``` Or Maven: @@ -80,7 +80,7 @@ Or Maven: com.github.mauricio mysql-async_2.11 - 0.2.18 + 0.2.19 ``` diff --git a/README.md b/README.md deleted file mode 100644 index 4fedd098..00000000 --- a/README.md +++ /dev/null @@ -1,3 +0,0 @@ -# postgresql-async - -[![Gitter](https://badges.gitter.im/Join%20Chat.svg)](https://gitter.im/mauricio/postgresql-async?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge) \ No newline at end of file diff --git a/project/Build.scala b/project/Build.scala index a820fb76..386c4f5e 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -49,7 +49,7 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.19-SNAPSHOT" + val commonVersion = "0.2.19" val projectScalaVersion = "2.11.7" val specs2Version = "2.5" From 11e23e0eab0b50df61f3e693564fbf8deff1ef6a Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Thu, 17 Mar 2016 22:10:15 -0400 Subject: [PATCH 60/82] Updated readme and changelog --- CHANGELOG.md | 5 +++-- README.markdown | 3 +-- 2 files changed, 4 insertions(+), 4 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5baf54f7..bafc831d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,9 +1,10 @@ -**Table of Contents** - [Changelog](#changelog) - - [0.2.17 - in progresss](#0217---in-progresss) + - [0.2.19 - 2016-03-17](#0219---2016-03-17) + - [0.2.18 - 2015-08-08](#0218---2015-08-08) + - [0.2.17 - 2015-07-13](#0217---2015-07-13) - [0.2.16 - 2015-01-04](#0216---2015-01-04) - [0.2.15 - 2014-09-12](#0215---2014-09-12) - [0.2.14 - 2014-08-30](#0214---2014-08-30) diff --git a/README.markdown b/README.markdown index b5fb56c8..b87583a2 100644 --- a/README.markdown +++ b/README.markdown @@ -1,8 +1,7 @@ -**Table of Contents** -- postgresql-async & mysql-async - async, Netty based, database drivers for MySQL and PostgreSQL written in Scala - 2.10 - 2.11 +- [[![Build Status](https://travis-ci.org/mauricio/postgresql-async.png)](https://travis-ci.org/mauricio/postgresql-async) postgresql-async & mysql-async - async, Netty based, database drivers for MySQL and PostgreSQL written in Scala 2.10 and 2.11](#!build-statushttpstravis-ciorgmauriciopostgresql-asyncpnghttpstravis-ciorgmauriciopostgresql-async-postgresql-async-&-mysql-async---async-netty-based-database-drivers-for-mysql-and-postgresql-written-in-scala-210-and-211) - [Abstractions and integrations](#abstractions-and-integrations) - [Include them as dependencies](#include-them-as-dependencies) - [Database connections and encodings](#database-connections-and-encodings) From 5e24cb0902860972a952c4b4d164e73568fe1d93 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Thu, 17 Mar 2016 22:11:54 -0400 Subject: [PATCH 61/82] Kicking off next cycle --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index 386c4f5e..13c8df4b 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -49,7 +49,7 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.19" + val commonVersion = "0.2.20-SNAPSHOT" val projectScalaVersion = "2.11.7" val specs2Version = "2.5" From d6180b518b9c399c7392c2cac96d14bc5d3d8c7f Mon Sep 17 00:00:00 2001 From: Stephen Couchman Date: Mon, 29 Feb 2016 05:48:17 -0500 Subject: [PATCH 62/82] Added check to SingleThreadedAsyncObjectPool to ensure returned objects came from that pool. Fixed not destroying invalidated objects during test cycle. Fixed exception on multiple close attempts in SingleThreadedAsyncObjectPool to make consistent with simultaneous request execution path. Added generic spec for testing an AsyncObjectPool implementation, and applied it to SingleThreadedAsyncObjectPool to guard against the above problems reappearing. Added mock capabilities back for specs2. --- .../pool/SingleThreadedAsyncObjectPool.scala | 72 ++++-- .../db/pool/AbstractAsyncObjectPoolSpec.scala | 228 ++++++++++++++++++ project/Build.scala | 2 + 3 files changed, 277 insertions(+), 25 deletions(-) create mode 100644 db-async-common/src/test/scala/com/github/mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala index 2b2e28d9..49f60593 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala @@ -16,11 +16,14 @@ package com.github.mauricio.async.db.pool +import java.util.concurrent.RejectedExecutionException + import com.github.mauricio.async.db.util.{Log, Worker} import java.util.concurrent.atomic.AtomicLong -import java.util.{TimerTask, Timer} +import java.util.{Timer, TimerTask} + import scala.collection.mutable.{ArrayBuffer, Queue, Stack} -import scala.concurrent.{Promise, Future} +import scala.concurrent.{Future, Promise} import scala.util.{Failure, Success} object SingleThreadedAsyncObjectPool { @@ -93,15 +96,30 @@ class SingleThreadedAsyncObjectPool[T]( def giveBack(item: T): Future[AsyncObjectPool[T]] = { val promise = Promise[AsyncObjectPool[T]]() this.mainPool.action { - this.checkouts -= item - this.factory.validate(item) match { - case Success(item) => { - this.addBack(item, promise) + // Ensure it came from this pool + val idx = this.checkouts.indexOf(item) + if(idx >= 0) { + this.checkouts.remove(idx) + this.factory.validate(item) match { + case Success(item) => { + this.addBack(item, promise) + } + case Failure(e) => { + this.factory.destroy(item) + promise.failure(e) + } } - case Failure(e) => { - this.checkouts -= item - this.factory.destroy(item) - promise.failure(e) + } else { + // It's already a failure but lets doublecheck why + val isFromOurPool = (item match { + case x: AnyRef => this.poolables.find(holder => x eq holder.item.asInstanceOf[AnyRef]) + case _ => this.poolables.find(holder => item == holder.item) + }).isDefined + + if(isFromOurPool) { + promise.failure(new IllegalStateException("This item has already been returned")) + } else { + promise.failure(new IllegalArgumentException("The returned item did not come from this pool.")) } } } @@ -112,25 +130,28 @@ class SingleThreadedAsyncObjectPool[T]( def isFull: Boolean = this.poolables.isEmpty && this.checkouts.size == configuration.maxObjects def close: Future[AsyncObjectPool[T]] = { - val promise = Promise[AsyncObjectPool[T]]() - - this.mainPool.action { - if (!this.closed) { - try { - this.timer.cancel() - this.mainPool.shutdown - this.closed = true - (this.poolables.map(i => i.item) ++ this.checkouts).foreach(item => factory.destroy(item)) + try { + val promise = Promise[AsyncObjectPool[T]]() + this.mainPool.action { + if (!this.closed) { + try { + this.timer.cancel() + this.mainPool.shutdown + this.closed = true + (this.poolables.map(i => i.item) ++ this.checkouts).foreach(item => factory.destroy(item)) + promise.success(this) + } catch { + case e: Exception => promise.failure(e) + } + } else { promise.success(this) - } catch { - case e: Exception => promise.failure(e) } - } else { - promise.success(this) } + promise.future + } catch { + case e: RejectedExecutionException if this.closed => + Future.successful(this) } - - promise.future } def availables: Traversable[T] = this.poolables.map(item => item.item) @@ -238,6 +259,7 @@ class SingleThreadedAsyncObjectPool[T]( case Failure(e) => { log.error("Failed to validate object", e) removals += poolable + factory.destroy(poolable.item) } } } diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala new file mode 100644 index 00000000..34ca0662 --- /dev/null +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala @@ -0,0 +1,228 @@ +package com.github.mauricio.async.db.pool + +import com.github.mauricio.async.db.pool.AbstractAsyncObjectPoolSpec.Widget +import org.mockito.Mockito.reset +import org.specs2.mock.Mockito +import org.specs2.mutable.Specification + +import scala.concurrent.{Await, Future} +import scala.util.Failure + +import scala.reflect.runtime.universe.TypeTag +import scala.util.Try +import scala.concurrent.duration.{Duration, SECONDS} + +/** + * This spec is designed abstract to allow testing of any implementation of AsyncObjectPool, against the common + * requirements the interface expects. + * + * @tparam T the AsyncObjectPool being tested. + */ +abstract class AbstractAsyncObjectPoolSpec[T <: AsyncObjectPool[Widget]](implicit tag: TypeTag[T]) + extends Specification + with Mockito { + + import AbstractAsyncObjectPoolSpec._ + + protected def pool(factory: ObjectFactory[Widget] = new TestWidgetFactory, conf: PoolConfiguration = PoolConfiguration.Default): T + + // Evaluates to the type of AsyncObjectPool + s"the ${tag.tpe.erasure} variant of AsyncObjectPool" should { + + "successfully retrieve and return a Widget" in { + val p = pool() + val widget = Await.result(p.take, Duration.Inf) + + widget must not beNull + + val thePool = Await.result(p.giveBack(widget), Duration.Inf) + thePool must be(p) + } + + "reject Widgets that did not come from it" in { + val p = pool() + + Await.result(p.giveBack(Widget(null)), Duration.Inf) must throwAn[IllegalArgumentException] + } + + "scale contents" >> { + sequential + + val factory = spy(new TestWidgetFactory) + + val p = pool( + factory = factory, + conf = PoolConfiguration( + maxObjects = 5, + maxIdle = 2, + maxQueueSize = 5, + validationInterval = 2000 + )) + + + + var taken = Seq.empty[Widget] + "can take up to maxObjects" in { + taken = Await.result(Future.sequence(for (i <- 1 to 5) yield p.take), Duration.Inf) + + taken must have size 5 + taken.head must not beNull; + taken(1) must not beNull; + taken(2) must not beNull; + taken(3) must not beNull; + taken(4) must not beNull + } + + "does not attempt to expire taken items" in { + // Wait 3 seconds to ensure idle check has run at least once + there was after(3.seconds).no(factory).destroy(any[Widget]) + } + + reset(factory) // Considered bad form, but necessary as we depend on previous state in these tests + "takes maxObjects back" in { + val returns = Await.result(Future.sequence(for (widget <- taken) yield p.giveBack(widget)), Duration.Inf) + + returns must have size 5 + + returns.head must be(p) + returns(1) must be(p) + returns(2) must be(p) + returns(3) must be(p) + returns(4) must be(p) + } + + "protest returning an item that was already returned" in { + val resultFuture = p.giveBack(taken.head) + + Await.result(resultFuture, Duration.Inf) must throwAn[IllegalStateException] + } + + "destroy down to maxIdle widgets" in { + Thread.sleep(3000) + there were 5.times(factory).destroy(any[Widget]) + } + } + + "queue requests after running out" in { + val p = pool(conf = PoolConfiguration.Default.copy(maxObjects = 2, maxQueueSize = 1)) + + val widgets = Await.result(Future.sequence(for (i <- 1 to 2) yield p.take), Duration.Inf) + + val future = p.take + + // Wait five seconds + Thread.sleep(5000) + + val failedFuture = p.take + + // Cannot be done, would exceed maxObjects + future.isCompleted must beFalse + + Await.result(failedFuture, Duration.Inf) must throwA[PoolExhaustedException] + + Await.result(p.giveBack(widgets.head), Duration.Inf) must be(p) + + Await.result(future, Duration(5, SECONDS)) must be(widgets.head) + } + + "refuse to allow take after being closed" in { + val p = pool() + + Await.result(p.close, Duration.Inf) must be(p) + + Await.result(p.take, Duration.Inf) must throwA[PoolAlreadyTerminatedException] + } + + "allow being closed more than once" in { + val p = pool() + + Await.result(p.close, Duration.Inf) must be(p) + + Await.result(p.close, Duration.Inf) must be(p) + } + + + "destroy a failed widget" in { + val factory = spy(new TestWidgetFactory) + val p = pool(factory = factory) + + val widget = Await.result(p.take, Duration.Inf) + + widget must not beNull + + factory.validate(widget) returns Failure(new RuntimeException("This is a bad widget!")) + + Await.result(p.giveBack(widget), Duration.Inf) must throwA[RuntimeException](message = "This is a bad widget!") + + there was atLeastOne(factory).destroy(widget) + } + + "clean up widgets that die in the pool" in { + val factory = spy(new TestWidgetFactory) + // Deliberately make it impossible to expire (nearly) + val p = pool(factory = factory, conf = PoolConfiguration.Default.copy(maxIdle = Long.MaxValue, validationInterval = 2000)) + + val widget = Await.result(p.take, Duration.Inf) + + widget must not beNull + + Await.result(p.giveBack(widget), Duration.Inf) must be(p) + + there was atLeastOne(factory).validate(widget) + there were no(factory).destroy(widget) + + there was after(3.seconds).atLeastTwo(factory).validate(widget) + + factory.validate(widget) returns Failure(new RuntimeException("Test Exception, Not an Error")) + + there was after(3.seconds).one(factory).destroy(widget) + + Await.ready(p.take, Duration.Inf) + + there was two(factory).create + } + + } + +} + +object AbstractAsyncObjectPoolSpec { + + case class Widget(factory: TestWidgetFactory) + + class TestWidgetFactory extends ObjectFactory[Widget] { + + override def create: Widget = Widget(this) + + override def destroy(item: Widget) = {} + + override def validate(item: Widget): Try[Widget] = Try { + if (item.factory eq this) + item + else + throw new IllegalArgumentException("Not our item") + } + } + +} + + +class SingleThreadedAsyncObjectPoolSpec extends AbstractAsyncObjectPoolSpec[SingleThreadedAsyncObjectPool[Widget]] { + + import AbstractAsyncObjectPoolSpec._ + + override protected def pool(factory: ObjectFactory[Widget], conf: PoolConfiguration) = + new SingleThreadedAsyncObjectPool(factory, conf) + + "SingleThreadedAsyncObjectPool" should { + "successfully record a closed state" in { + val p = pool() + + Await.result(p.close, Duration.Inf) must be(p) + + p.isClosed must beTrue + } + + } + +} diff --git a/project/Build.scala b/project/Build.scala index 13c8df4b..84cd916f 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -55,6 +55,7 @@ object Configuration { val specs2Dependency = "org.specs2" %% "specs2-core" % specs2Version % "test" val specs2JunitDependency = "org.specs2" %% "specs2-junit" % specs2Version % "test" + val specs2MockDependency = "org.specs2" %% "specs2-mock" % specs2Version % "test" val logbackDependency = "ch.qos.logback" % "logback-classic" % "1.1.6" % "test" val commonDependencies = Seq( @@ -65,6 +66,7 @@ object Configuration { "org.javassist" % "javassist" % "3.20.0-GA", specs2Dependency, specs2JunitDependency, + specs2MockDependency, logbackDependency ) From a2a11ac3ff3c8fcbdbaf18589cacaa7994e8269f Mon Sep 17 00:00:00 2001 From: varkockova Date: Mon, 18 Apr 2016 17:59:21 +0200 Subject: [PATCH 63/82] Time unit as a part of javadoc I wanted to know the time unit being used for idle and I had to look into the iplementation to be sure. I think it might be useful to have this in the javadoc directly. --- .../com/github/mauricio/async/db/pool/PoolConfiguration.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PoolConfiguration.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PoolConfiguration.scala index a245de5c..0ac567f2 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PoolConfiguration.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/PoolConfiguration.scala @@ -25,7 +25,7 @@ object PoolConfiguration { * Defines specific pieces of a pool's behavior. * * @param maxObjects how many objects this pool will hold - * @param maxIdle how long are objects going to be kept as idle (not in use by clients of the pool) + * @param maxIdle number of milliseconds for which the objects are going to be kept as idle (not in use by clients of the pool) * @param maxQueueSize when there are no more objects, the pool can queue up requests to serve later then there * are objects available, this is the maximum number of enqueued requests * @param validationInterval pools will use this value as the timer period to validate idle objects. From 226ed09a6c5550e57f9b856ad2840e9a38134a2f Mon Sep 17 00:00:00 2001 From: Stephen Couchman Date: Mon, 25 Apr 2016 18:45:25 -0400 Subject: [PATCH 64/82] Reworked URLParser to process more URLs. Added MySQL URLParser Made URLParser stricter. Corrected test cases using illegal IP addresses. (ip's out of range) Now accepts JDBC style "jdbc:postgresql:dbname" Switched from fragile regex to java.net.URI parsing. Added parameter URL-format decoding. Deprecated ParserURL in PostgreSQL and converted it to an alias to PostgreSQL URLParser. Deprecated to 0.2.20, the version may need to be updated. --- .../mauricio/async/db/Configuration.scala | 2 + .../UnableToParseURLException.scala | 24 ++ .../async/db/util/AbstractURIParser.scala | 175 ++++++++++++ .../async/db/mysql/util/URLParser.scala | 39 +++ .../async/db/mysql/util/URLParserSpec.scala | 264 ++++++++++++++++++ .../db/postgresql/PostgreSQLConnection.scala | 13 +- .../async/db/postgresql/util/ParserURL.scala | 65 ----- .../async/db/postgresql/util/URLParser.scala | 88 ++++-- .../async/db/postgresql/util/package.scala | 29 ++ .../db/postgresql/util/URLParserSpec.scala | 160 ++++++++--- 10 files changed, 726 insertions(+), 133 deletions(-) create mode 100644 db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/UnableToParseURLException.scala create mode 100644 db-async-common/src/main/scala/com/github/mauricio/async/db/util/AbstractURIParser.scala create mode 100644 mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/URLParser.scala create mode 100644 mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/util/URLParserSpec.scala delete mode 100644 postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ParserURL.scala create mode 100644 postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/package.scala diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala index b032ac02..cde267cf 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/Configuration.scala @@ -25,6 +25,8 @@ import scala.concurrent.duration._ object Configuration { val DefaultCharset = CharsetUtil.UTF_8 + + @deprecated("Use com.github.mauricio.async.db.postgresql.util.URLParser.DEFAULT or com.github.mauricio.async.db.mysql.util.URLParser.DEFAULT.", since = "0.2.20") val Default = new Configuration("postgres") } diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/UnableToParseURLException.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/UnableToParseURLException.scala new file mode 100644 index 00000000..0d2799df --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/exceptions/UnableToParseURLException.scala @@ -0,0 +1,24 @@ +/* + * Copyright 2016 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.exceptions + +/** + * Thrown to indicate that a URL Parser could not understand the provided URL. + */ +class UnableToParseURLException(message: String, base: Throwable) extends RuntimeException(message, base) { + def this(message: String) = this(message, null) +} \ No newline at end of file diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/util/AbstractURIParser.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/util/AbstractURIParser.scala new file mode 100644 index 00000000..e18de6e1 --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/util/AbstractURIParser.scala @@ -0,0 +1,175 @@ +/* + * Copyright 2016 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.github.mauricio.async.db.util + +import java.net.{URI, URISyntaxException, URLDecoder} +import java.nio.charset.Charset + +import com.github.mauricio.async.db.exceptions.UnableToParseURLException +import com.github.mauricio.async.db.{Configuration, SSLConfiguration} +import org.slf4j.LoggerFactory + +import scala.util.matching.Regex + +/** + * Common parser assisting methods for PG and MySQL URI parsers. + */ +abstract class AbstractURIParser { + import AbstractURIParser._ + + protected val logger = LoggerFactory.getLogger(getClass) + + /** + * Parses out userInfo into a tuple of optional username and password + * + * @param userInfo the optional user info string + * @return a tuple of optional username and password + */ + final protected def parseUserInfo(userInfo: Option[String]): (Option[String], Option[String]) = userInfo.map(_.split(":", 2).toList) match { + case Some(user :: pass :: Nil) ⇒ (Some(user), Some(pass)) + case Some(user :: Nil) ⇒ (Some(user), None) + case _ ⇒ (None, None) + } + + /** + * A Regex that will match the base name of the driver scheme, minus jdbc:. + * Eg: postgres(?:ul)? + */ + protected val SCHEME: Regex + + /** + * The default for this particular URLParser, ie: appropriate and specific to PG or MySQL accordingly + */ + val DEFAULT: Configuration + + + /** + * Parses the provided url and returns a Configuration based upon it. On an error, + * @param url the URL to parse. + * @param charset the charset to use. + * @return a Configuration. + */ + @throws[UnableToParseURLException]("if the URL does not match the expected type, or cannot be parsed for any reason") + def parseOrDie(url: String, + charset: Charset = DEFAULT.charset): Configuration = { + try { + val properties = parse(new URI(url).parseServerAuthority) + + assembleConfiguration(properties, charset) + } catch { + case e: URISyntaxException => + throw new UnableToParseURLException(s"Failed to parse URL: $url", e) + } + } + + + /** + * Parses the provided url and returns a Configuration based upon it. On an error, + * a default configuration is returned. + * @param url the URL to parse. + * @param charset the charset to use. + * @return a Configuration. + */ + def parse(url: String, + charset: Charset = DEFAULT.charset + ): Configuration = { + try { + parseOrDie(url, charset) + } catch { + case e: Exception => + logger.warn(s"Connection url '$url' could not be parsed.", e) + // Fallback to default to maintain current behavior + DEFAULT + } + } + + /** + * Assembles a configuration out of the provided property map. This is the generic form, subclasses may override to + * handle additional properties. + * @param properties the extracted properties from the URL. + * @param charset the charset passed in to parse or parseOrDie. + * @return + */ + protected def assembleConfiguration(properties: Map[String, String], charset: Charset): Configuration = { + DEFAULT.copy( + username = properties.getOrElse(USERNAME, DEFAULT.username), + password = properties.get(PASSWORD), + database = properties.get(DBNAME), + host = properties.getOrElse(HOST, DEFAULT.host), + port = properties.get(PORT).map(_.toInt).getOrElse(DEFAULT.port), + ssl = SSLConfiguration(properties), + charset = charset + ) + } + + + protected def parse(uri: URI): Map[String, String] = { + uri.getScheme match { + case SCHEME() => + val userInfo = parseUserInfo(Option(uri.getUserInfo)) + + val port = Some(uri.getPort).filter(_ > 0) + val db = Option(uri.getPath).map(_.stripPrefix("/")).filterNot(_.isEmpty) + val host = Option(uri.getHost) + + val builder = Map.newBuilder[String, String] + builder ++= userInfo._1.map(USERNAME -> _) + builder ++= userInfo._2.map(PASSWORD -> _) + builder ++= port.map(PORT -> _.toString) + builder ++= db.map(DBNAME -> _) + builder ++= host.map(HOST -> unwrapIpv6address(_)) + + // Parse query string parameters and just append them, overriding anything previously set + builder ++= (for { + qs <- Option(uri.getQuery).toSeq + parameter <- qs.split('&') + Array(name, value) = parameter.split('=') + if name.nonEmpty && value.nonEmpty + } yield URLDecoder.decode(name, "UTF-8") -> URLDecoder.decode(value, "UTF-8")) + + + builder.result + case "jdbc" => + handleJDBC(uri) + case _ => + throw new UnableToParseURLException("Unrecognized URI scheme") + } + } + + /** + * This method breaks out handling of the jdbc: prefixed uri's, allowing them to be handled differently + * without reimplementing all of parse. + */ + protected def handleJDBC(uri: URI): Map[String, String] = parse(new URI(uri.getSchemeSpecificPart)) + + + final protected def unwrapIpv6address(server: String): String = { + if (server.startsWith("[")) { + server.substring(1, server.length() - 1) + } else server + } + +} + +object AbstractURIParser { + // Constants and value names + val PORT = "port" + val DBNAME = "database" + val HOST = "host" + val USERNAME = "user" + val PASSWORD = "password" +} + diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/URLParser.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/URLParser.scala new file mode 100644 index 00000000..ba9c0333 --- /dev/null +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/URLParser.scala @@ -0,0 +1,39 @@ +/* + * Copyright 2016 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.github.mauricio.async.db.mysql.util + +import com.github.mauricio.async.db.util.AbstractURIParser +import com.github.mauricio.async.db.Configuration + +/** + * The MySQL URL parser. + */ +object URLParser extends AbstractURIParser { + + /** + * The default configuration for MySQL. + */ + override val DEFAULT = Configuration( + username = "root", + host = "127.0.0.1", //Matched JDBC default + port = 3306, + password = None, + database = None + ) + + override protected val SCHEME = "^mysql$".r + +} diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/util/URLParserSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/util/URLParserSpec.scala new file mode 100644 index 00000000..b15ab779 --- /dev/null +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/util/URLParserSpec.scala @@ -0,0 +1,264 @@ +/* + * Copyright 2016 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.mysql.util + +import java.nio.charset.Charset + +import com.github.mauricio.async.db.{Configuration, SSLConfiguration} +import com.github.mauricio.async.db.exceptions.UnableToParseURLException +import io.netty.buffer.{ByteBufAllocator, PooledByteBufAllocator} +import org.specs2.mutable.Specification + +import scala.concurrent.duration.Duration + +class URLParserSpec extends Specification { + + "mysql URLParser" should { + import URLParser.{DEFAULT, parse, parseOrDie} + + + "have a reasonable default" in { + // This is a deliberate extra step, protecting the DEFAULT from frivilous changes. + // Any change to DEFAULT should require a change to this test. + + DEFAULT === Configuration( + username = "root", + host = "127.0.0.1", //Matched JDBC default + port = 3306, + password = None, + database = None + ) + } + + + // Divided into sections + // =========== jdbc:mysql =========== + + "create a jdbc:mysql connection with the available fields" in { + val connectionUri = "jdbc:mysql://128.167.54.90:9987/my_database?user=john&password=doe" + + parse(connectionUri) === DEFAULT.copy( + username = "john", + password = Some("doe"), + database = Some("my_database"), + host = "128.167.54.90", + port = 9987 + ) + } + + "create a connection without port" in { + val connectionUri = "jdbc:mysql://128.167.54.90/my_database?user=john&password=doe" + + parse(connectionUri) === DEFAULT.copy( + username = "john", + password = Some("doe"), + database = Some("my_database"), + host = "128.167.54.90" + ) + } + + + "create a connection without username and password" in { + val connectionUri = "jdbc:mysql://128.167.54.90:9987/my_database" + + parse(connectionUri) === DEFAULT.copy( + database = Some("my_database"), + host = "128.167.54.90", + port = 9987 + ) + } + + "create a connection from a heroku like URL using 'mysql' protocol" in { + val connectionUri = "mysql://john:doe@128.167.54.90:9987/my_database" + + parse(connectionUri) === DEFAULT.copy( + username = "john", + password = Some("doe"), + database = Some("my_database"), + host = "128.167.54.90", + port = 9987 + ) + } + + "create a connection with the available fields and named server" in { + val connectionUri = "jdbc:mysql://localhost:9987/my_database?user=john&password=doe" + + parse(connectionUri) === DEFAULT.copy( + username = "john", + password = Some("doe"), + database = Some("my_database"), + host = "localhost", + port = 9987 + ) + } + + "create a connection from a heroku like URL with named server" in { + val connectionUri = "mysql://john:doe@psql.heroku.com:9987/my_database" + + val configuration = parse(connectionUri) + configuration.username === "john" + configuration.password === Some("doe") + configuration.database === Some("my_database") + configuration.host === "psql.heroku.com" + configuration.port === 9987 + } + + "create a connection with the available fields and ipv6" in { + val connectionUri = "jdbc:mysql://[::1]:9987/my_database?user=john&password=doe" + + val configuration = parse(connectionUri) + + configuration.username === "john" + configuration.password === Some("doe") + configuration.database === Some("my_database") + configuration.host === "::1" + configuration.port === 9987 + } + + "create a connection from a heroku like URL and with ipv6" in { + val connectionUri = "mysql://john:doe@[::1]:9987/my_database" + + val configuration = parse(connectionUri) + configuration.username === "john" + configuration.password === Some("doe") + configuration.database === Some("my_database") + configuration.host === "::1" + configuration.port === 9987 + } + + "create a connection with a missing hostname" in { + val connectionUri = "jdbc:mysql:/my_database?user=john&password=doe" + + parse(connectionUri) === DEFAULT.copy( + username = "john", + password = Some("doe"), + database = Some("my_database") + ) + } + + "create a connection with a missing database name" in { + val connectionUri = "jdbc:mysql://[::1]:9987/?user=john&password=doe" + + val configuration = parse(connectionUri) + + configuration.username === "john" + configuration.password === Some("doe") + configuration.database === None + configuration.host === "::1" + configuration.port === 9987 + } + + "create a connection with all default fields" in { + val connectionUri = "jdbc:mysql:" + + val configuration = parse(connectionUri) + + configuration.username === "root" + configuration.password === None + configuration.database === None + configuration.host === "127.0.0.1" + configuration.port === 3306 + } + + "create a connection with an empty (invalid) url" in { + val connectionUri = "" + + val configuration = parse(connectionUri) + + configuration.username === "root" + configuration.password === None + configuration.database === None + configuration.host === "127.0.0.1" + configuration.port === 3306 + } + + + "recognise a mysql:// uri" in { + parse("mysql://localhost:425/dbname") mustEqual DEFAULT.copy( + username = "root", + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "recognize a jdbc:mysql:// uri" in { + parse("jdbc:mysql://localhost:425/dbname") mustEqual DEFAULT.copy( + username = "root", + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "pull the username and password from URI credentials" in { + parse("jdbc:mysql://user:password@localhost:425/dbname") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "pull the username and password from query string" in { + parse("jdbc:mysql://localhost:425/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + // Included for consistency, so later changes aren't allowed to change behavior + "use the query string parameters to override URI credentials" in { + parse("jdbc:mysql://baduser:badpass@localhost:425/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "successfully default the port to the mysql port" in { + parse("jdbc:mysql://baduser:badpass@localhost/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 3306, + host = "localhost" + ) + } + + "reject malformed ip addresses" in { + val connectionUri = "mysql://john:doe@128.567.54.90:9987/my_database" + + val configuration = parse(connectionUri) + configuration.username === "root" + configuration.password === None + configuration.database === None + configuration.host === "127.0.0.1" + configuration.port === 3306 + + parseOrDie(connectionUri) must throwA[UnableToParseURLException] + } + + } + +} diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala index 8c58076b..ec89660c 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala @@ -17,8 +17,8 @@ package com.github.mauricio.async.db.postgresql import com.github.mauricio.async.db.QueryResult -import com.github.mauricio.async.db.column.{ColumnEncoderRegistry, ColumnDecoderRegistry} -import com.github.mauricio.async.db.exceptions.{InsufficientParametersException, ConnectionStillRunningQueryException} +import com.github.mauricio.async.db.column.{ColumnDecoderRegistry, ColumnEncoderRegistry} +import com.github.mauricio.async.db.exceptions.{ConnectionStillRunningQueryException, InsufficientParametersException} import com.github.mauricio.async.db.general.MutableResultSet import com.github.mauricio.async.db.pool.TimeoutScheduler import com.github.mauricio.async.db.postgresql.codec.{PostgreSQLConnectionDelegate, PostgreSQLConnectionHandler} @@ -26,14 +26,17 @@ import com.github.mauricio.async.db.postgresql.column.{PostgreSQLColumnDecoderRe import com.github.mauricio.async.db.postgresql.exceptions._ import com.github.mauricio.async.db.util._ import com.github.mauricio.async.db.{Configuration, Connection} -import java.util.concurrent.atomic.{AtomicLong,AtomicInteger,AtomicReference} +import java.util.concurrent.atomic.{AtomicInteger, AtomicLong, AtomicReference} + import messages.backend._ import messages.frontend._ -import scala.Some + import scala.concurrent._ import io.netty.channel.EventLoopGroup import java.util.concurrent.CopyOnWriteArrayList +import com.github.mauricio.async.db.postgresql.util.URLParser + object PostgreSQLConnection { final val Counter = new AtomicLong() final val ServerVersionKey = "server_version" @@ -42,7 +45,7 @@ object PostgreSQLConnection { class PostgreSQLConnection ( - configuration: Configuration = Configuration.Default, + configuration: Configuration = URLParser.DEFAULT, encoderRegistry: ColumnEncoderRegistry = PostgreSQLColumnEncoderRegistry.Instance, decoderRegistry: ColumnDecoderRegistry = PostgreSQLColumnDecoderRegistry.Instance, group : EventLoopGroup = NettyUtils.DefaultEventLoopGroup, diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ParserURL.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ParserURL.scala deleted file mode 100644 index 8172877e..00000000 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/ParserURL.scala +++ /dev/null @@ -1,65 +0,0 @@ -/** - * - */ -package com.github.mauricio.async.db.postgresql.util - -import org.slf4j.LoggerFactory - -/** - * @author gciuloaica - * - */ -object ParserURL { - - private val logger = LoggerFactory.getLogger(ParserURL.getClass()) - - val PGPORT = "port" - val PGDBNAME = "database" - val PGHOST = "host" - val PGUSERNAME = "user" - val PGPASSWORD = "password" - - val DEFAULT_PORT = "5432" - - private val pgurl1 = """(jdbc:postgresql):(?://([^/:]*|\[.+\])(?::(\d+))?)?(?:/([^/?]*))?(?:\?(.*))?""".r - private val pgurl2 = """(postgres|postgresql)://(.*):(.*)@(.*):(\d+)/([^/?]*)(?:\?(.*))?""".r - - def parse(connectionURL: String): Map[String, String] = { - val properties: Map[String, String] = Map() - - def parseOptions(optionsStr: String): Map[String, String] = - optionsStr.split("&").map { o => - o.span(_ != '=') match { - case (name, value) => name -> value.drop(1) - } - }.toMap - - connectionURL match { - case pgurl1(protocol, server, port, dbname, params) => { - var result = properties - if (server != null) result += (PGHOST -> unwrapIpv6address(server)) - if (dbname != null && dbname.nonEmpty) result += (PGDBNAME -> dbname) - if (port != null) result += (PGPORT -> port) - if (params != null) result ++= parseOptions(params) - result - } - case pgurl2(protocol, username, password, server, port, dbname, params) => { - var result = properties + (PGHOST -> unwrapIpv6address(server)) + (PGPORT -> port) + (PGDBNAME -> dbname) + (PGUSERNAME -> username) + (PGPASSWORD -> password) - if (params != null) result ++= parseOptions(params) - result - } - case _ => { - logger.warn(s"Connection url '$connectionURL' could not be parsed.") - properties - } - } - - } - - private def unwrapIpv6address(server: String): String = { - if (server.startsWith("[")) { - server.substring(1, server.length() - 1) - } else server - } - -} diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala index debcb6d9..fcb9b3cf 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/URLParser.scala @@ -1,46 +1,72 @@ -/* - * Copyright 2013 MaurĂ­cio Linhares +/** * - * MaurĂ­cio Linhares licenses this file to you under the Apache License, - * version 2.0 (the "License"); you may not use this file except in compliance - * with the License. You may obtain a copy of the License at: - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT - * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the - * License for the specific language governing permissions and limitations - * under the License. */ - package com.github.mauricio.async.db.postgresql.util -import com.github.mauricio.async.db.{Configuration, SSLConfiguration} +import java.net.URI import java.nio.charset.Charset -object URLParser { +import com.github.mauricio.async.db.{Configuration, SSLConfiguration} +import com.github.mauricio.async.db.util.AbstractURIParser - import Configuration.Default +/** + * The PostgreSQL URL parser. + */ +object URLParser extends AbstractURIParser { + import AbstractURIParser._ - def parse(url: String, - charset: Charset = Default.charset - ): Configuration = { + // Alias these for anyone still making use of them + @deprecated("Use com.github.mauricio.async.db.AbstractURIParser.PORT", since = "0.2.20") + val PGPORT = PORT - val properties = ParserURL.parse(url) + @deprecated("Use com.github.mauricio.async.db.AbstractURIParser.DBNAME", since = "0.2.20") + val PGDBNAME = DBNAME - val port = properties.get(ParserURL.PGPORT).getOrElse(ParserURL.DEFAULT_PORT).toInt + @deprecated("Use com.github.mauricio.async.db.AbstractURIParser.HOST", since = "0.2.20") + val PGHOST = HOST - new Configuration( - username = properties.get(ParserURL.PGUSERNAME).getOrElse(Default.username), - password = properties.get(ParserURL.PGPASSWORD), - database = properties.get(ParserURL.PGDBNAME), - host = properties.getOrElse(ParserURL.PGHOST, Default.host), - port = port, - ssl = SSLConfiguration(properties), - charset = charset - ) + @deprecated("Use com.github.mauricio.async.db.AbstractURIParser.USERNAME", since = "0.2.20") + val PGUSERNAME = USERNAME + @deprecated("Use com.github.mauricio.async.db.AbstractURIParser.PASSWORD", since = "0.2.20") + val PGPASSWORD = PASSWORD + + @deprecated("Use com.github.mauricio.async.db.postgresql.util.URLParser.DEFAULT.port", since = "0.2.20") + val DEFAULT_PORT = "5432" + + /** + * The default configuration for PostgreSQL. + */ + override val DEFAULT = Configuration( + username = "postgres", + host = "localhost", + port = 5432, + password = None, + database = None, + ssl = SSLConfiguration() + ) + + override protected val SCHEME = "^postgres(?:ql)?$".r + + private val simplePGDB = "^postgresql:(\\w+)$".r + + override protected def handleJDBC(uri: URI): Map[String, String] = uri.getSchemeSpecificPart match { + case simplePGDB(db) => Map(DBNAME -> db) + case x => parse(new URI(x)) } + /** + * Assembles a configuration out of the provided property map. This is the generic form, subclasses may override to + * handle additional properties. + * + * @param properties the extracted properties from the URL. + * @param charset the charset passed in to parse or parseOrDie. + * @return + */ + override protected def assembleConfiguration(properties: Map[String, String], charset: Charset): Configuration = { + // Add SSL Configuration + super.assembleConfiguration(properties, charset).copy( + ssl = SSLConfiguration(properties) + ) + } } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/package.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/package.scala new file mode 100644 index 00000000..5d321170 --- /dev/null +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/util/package.scala @@ -0,0 +1,29 @@ +/* + * Copyright 2016 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ +package com.github.mauricio.async.db.postgresql + +/** + * Contains package level aliases and type renames. + */ +package object util { + + /** + * Alias to help compatibility. + */ + @deprecated("Use com.github.mauricio.async.db.postgresql.util.URLParser", since = "0.2.20") + val ParserURL = URLParser + +} diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala index d0df6eaa..9d2d2828 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/util/URLParserSpec.scala @@ -17,79 +17,93 @@ package com.github.mauricio.async.db.postgresql.util import org.specs2.mutable.Specification -import com.github.mauricio.async.db.Configuration -import com.github.mauricio.async.db.SSLConfiguration import com.github.mauricio.async.db.SSLConfiguration.Mode +import com.github.mauricio.async.db.exceptions.UnableToParseURLException class URLParserSpec extends Specification { - "parser" should { + "postgresql URLParser" should { + import URLParser.{parse, parseOrDie, DEFAULT} - "create a connection with the available fields" in { - val connectionUri = "jdbc:postgresql://128.567.54.90:9987/my_database?user=john&password=doe" + // Divided into sections + // =========== jdbc:postgresql =========== - val configuration = URLParser.parse(connectionUri) + // https://jdbc.postgresql.org/documentation/80/connect.html + "recognize a jdbc:postgresql:dbname uri" in { + val connectionUri = "jdbc:postgresql:dbname" + + parse(connectionUri) mustEqual DEFAULT.copy( + database = Some("dbname") + ) + } + + "create a jdbc:postgresql connection with the available fields" in { + val connectionUri = "jdbc:postgresql://128.167.54.90:9987/my_database?user=john&password=doe" + + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") - configuration.host === "128.567.54.90" + configuration.host === "128.167.54.90" configuration.port === 9987 } "create a connection without port" in { - val connectionUri = "jdbc:postgresql://128.567.54.90/my_database?user=john&password=doe" + val connectionUri = "jdbc:postgresql://128.167.54.90/my_database?user=john&password=doe" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") - configuration.host === "128.567.54.90" + configuration.host === "128.167.54.90" configuration.port === 5432 } "create a connection without username and password" in { - val connectionUri = "jdbc:postgresql://128.567.54.90:9987/my_database" + val connectionUri = "jdbc:postgresql://128.167.54.90:9987/my_database" - val configuration = URLParser.parse(connectionUri) - configuration.username === Configuration.Default.username + val configuration = parse(connectionUri) + configuration.username === DEFAULT.username configuration.password === None configuration.database === Some("my_database") - configuration.host === "128.567.54.90" + configuration.host === "128.167.54.90" configuration.port === 9987 } + //========== postgresql:// ============== + "create a connection from a heroku like URL using 'postgresql' protocol" in { - val connectionUri = "postgresql://john:doe@128.567.54.90:9987/my_database" + val connectionUri = "postgresql://john:doe@128.167.54.90:9987/my_database" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") - configuration.host === "128.567.54.90" + configuration.host === "128.167.54.90" configuration.port === 9987 } "create a connection with SSL enabled" in { - val connectionUri = "jdbc:postgresql://128.567.54.90:9987/my_database?sslmode=verify-full" + val connectionUri = "jdbc:postgresql://128.167.54.90:9987/my_database?sslmode=verify-full" - val configuration = URLParser.parse(connectionUri) - configuration.username === Configuration.Default.username + val configuration = parse(connectionUri) + configuration.username === DEFAULT.username configuration.password === None configuration.database === Some("my_database") - configuration.host === "128.567.54.90" + configuration.host === "128.167.54.90" configuration.port === 9987 configuration.ssl.mode === Mode.VerifyFull } "create a connection with SSL enabled and root CA from a heroku like URL using 'postgresql' protocol" in { - val connectionUri = "postgresql://john:doe@128.567.54.90:9987/my_database?sslmode=verify-ca&sslrootcert=server.crt" + val connectionUri = "postgresql://john:doe@128.167.54.90:9987/my_database?sslmode=verify-ca&sslrootcert=server.crt" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") - configuration.host === "128.567.54.90" + configuration.host === "128.167.54.90" configuration.port === 9987 configuration.ssl.mode === Mode.VerifyCA configuration.ssl.rootCert.map(_.getPath) === Some("server.crt") @@ -98,7 +112,7 @@ class URLParserSpec extends Specification { "create a connection with the available fields and named server" in { val connectionUri = "jdbc:postgresql://localhost:9987/my_database?user=john&password=doe" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") @@ -109,7 +123,7 @@ class URLParserSpec extends Specification { "create a connection from a heroku like URL with named server" in { val connectionUri = "postgresql://john:doe@psql.heroku.com:9987/my_database" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") @@ -120,7 +134,7 @@ class URLParserSpec extends Specification { "create a connection with the available fields and ipv6" in { val connectionUri = "jdbc:postgresql://[::1]:9987/my_database?user=john&password=doe" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") @@ -132,7 +146,7 @@ class URLParserSpec extends Specification { "create a connection from a heroku like URL and with ipv6" in { val connectionUri = "postgresql://john:doe@[::1]:9987/my_database" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") configuration.database === Some("my_database") @@ -143,7 +157,7 @@ class URLParserSpec extends Specification { "create a connection with a missing hostname" in { val connectionUri = "jdbc:postgresql:/my_database?user=john&password=doe" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") @@ -155,7 +169,7 @@ class URLParserSpec extends Specification { "create a connection with a missing database name" in { val connectionUri = "jdbc:postgresql://[::1]:9987/?user=john&password=doe" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "john" configuration.password === Some("doe") @@ -167,7 +181,7 @@ class URLParserSpec extends Specification { "create a connection with all default fields" in { val connectionUri = "jdbc:postgresql:" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "postgres" configuration.password === None @@ -179,7 +193,7 @@ class URLParserSpec extends Specification { "create a connection with an empty (invalid) url" in { val connectionUri = "" - val configuration = URLParser.parse(connectionUri) + val configuration = parse(connectionUri) configuration.username === "postgres" configuration.password === None @@ -188,6 +202,88 @@ class URLParserSpec extends Specification { configuration.port === 5432 } + + "recognise a postgresql:// uri" in { + parse("postgresql://localhost:425/dbname") mustEqual DEFAULT.copy( + username = "postgres", + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "recognise a postgres:// uri" in { + parse("postgres://localhost:425/dbname") mustEqual DEFAULT.copy( + username = "postgres", + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "recognize a jdbc:postgresql:// uri" in { + parse("jdbc:postgresql://localhost:425/dbname") mustEqual DEFAULT.copy( + username = "postgres", + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "pull the username and password from URI credentials" in { + parse("jdbc:postgresql://user:password@localhost:425/dbname") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "pull the username and password from query string" in { + parse("jdbc:postgresql://localhost:425/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + // Included for consistency, so later changes aren't allowed to change behavior + "use the query string parameters to override URI credentials" in { + parse("jdbc:postgresql://baduser:badpass@localhost:425/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 425, + host = "localhost" + ) + } + + "successfully default the port to the PostgreSQL port" in { + parse("jdbc:postgresql://baduser:badpass@localhost/dbname?user=user&password=password") mustEqual DEFAULT.copy( + username = "user", + password = Some("password"), + database = Some("dbname"), + port = 5432, + host = "localhost" + ) + } + + "reject malformed ip addresses" in { + val connectionUri = "postgresql://john:doe@128.567.54.90:9987/my_database" + + val configuration = parse(connectionUri) + configuration.username === "postgres" + configuration.password === None + configuration.database === None + configuration.host === "localhost" + configuration.port === 5432 + + parseOrDie(connectionUri) must throwA[UnableToParseURLException] + } + } } From 8fb137a6fec7feb92e171e8b8060256d7799e18a Mon Sep 17 00:00:00 2001 From: Mansheng Yang Date: Thu, 12 May 2016 15:35:40 +0800 Subject: [PATCH 65/82] Fixed ByteBuf leaks PostgreSQLConnection.onDataRow should release the raw ByteBufs after decoding the data --- .../async/db/postgresql/PostgreSQLConnection.scala | 11 ++++++++--- 1 file changed, 8 insertions(+), 3 deletions(-) diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala index 8c58076b..1c2c08fe 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnection.scala @@ -191,11 +191,16 @@ class PostgreSQLConnection var x = 0 while ( x < m.values.size ) { - items(x) = if ( m.values(x) == null ) { + val buf = m.values(x) + items(x) = if ( buf == null ) { null } else { - val columnType = this.currentQuery.get.columnTypes(x) - this.decoderRegistry.decode(columnType, m.values(x), configuration.charset) + try { + val columnType = this.currentQuery.get.columnTypes(x) + this.decoderRegistry.decode(columnType, buf, configuration.charset) + } finally { + buf.release() + } } x += 1 } From 07dadc804041c2564ce386e535047da9e8b0a8ae Mon Sep 17 00:00:00 2001 From: Julien Viet Date: Tue, 7 Jun 2016 17:04:09 +0200 Subject: [PATCH 66/82] Upgrade to Netty 4.1.0 --- .../async/db/mysql/codec/LittleEndianByteBufAllocator.scala | 2 ++ project/Build.scala | 2 +- 2 files changed, 3 insertions(+), 1 deletion(-) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/LittleEndianByteBufAllocator.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/LittleEndianByteBufAllocator.scala index 40b51f24..0fdc790a 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/LittleEndianByteBufAllocator.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/codec/LittleEndianByteBufAllocator.scala @@ -66,6 +66,8 @@ class LittleEndianByteBufAllocator extends ByteBufAllocator { def compositeDirectBuffer(maxNumComponents: Int): CompositeByteBuf = allocator.compositeDirectBuffer(maxNumComponents) + def calculateNewCapacity(minNewCapacity: Int, maxCapacity: Int): Int = allocator.calculateNewCapacity(minNewCapacity, maxCapacity) + private def littleEndian(b: ByteBuf) = b.order(ByteOrder.LITTLE_ENDIAN) } diff --git a/project/Build.scala b/project/Build.scala index 84cd916f..cf8b6861 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -62,7 +62,7 @@ object Configuration { "org.slf4j" % "slf4j-api" % "1.7.18", "joda-time" % "joda-time" % "2.9.2", "org.joda" % "joda-convert" % "1.8.1", - "io.netty" % "netty-all" % "4.0.34.Final", + "io.netty" % "netty-all" % "4.1.0.Final", "org.javassist" % "javassist" % "3.20.0-GA", specs2Dependency, specs2JunitDependency, From eed80b673a02e6efd5bd0d2d7db2de724f4b3894 Mon Sep 17 00:00:00 2001 From: Julien Viet Date: Wed, 8 Jun 2016 21:43:29 +0200 Subject: [PATCH 67/82] Update to 4.1.1.Final --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index cf8b6861..e02f3b3e 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -62,7 +62,7 @@ object Configuration { "org.slf4j" % "slf4j-api" % "1.7.18", "joda-time" % "joda-time" % "2.9.2", "org.joda" % "joda-convert" % "1.8.1", - "io.netty" % "netty-all" % "4.1.0.Final", + "io.netty" % "netty-all" % "4.1.1.Final", "org.javassist" % "javassist" % "3.20.0-GA", specs2Dependency, specs2JunitDependency, From d51a85b9ffa96b91f79372c22353ad4073282830 Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Fri, 10 Jun 2016 00:13:13 -0400 Subject: [PATCH 68/82] Closing 0.2.20 --- project/Build.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/project/Build.scala b/project/Build.scala index e02f3b3e..f771f803 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -49,7 +49,7 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.20-SNAPSHOT" + val commonVersion = "0.2.20" val projectScalaVersion = "2.11.7" val specs2Version = "2.5" From 7dc83b91c153b74a1c94329ada43b3e15c51bb7f Mon Sep 17 00:00:00 2001 From: Mauricio Linhares Date: Fri, 10 Jun 2016 00:23:03 -0400 Subject: [PATCH 69/82] Starting next development cycle --- README.markdown | 8 ++++---- project/Build.scala | 2 +- 2 files changed, 5 insertions(+), 5 deletions(-) diff --git a/README.markdown b/README.markdown index b87583a2..9977b309 100644 --- a/README.markdown +++ b/README.markdown @@ -54,7 +54,7 @@ You can view the project's [CHANGELOG here](CHANGELOG.md). And if you're in a hurry, you can include them in your build like this, if you're using PostgreSQL: ```scala -"com.github.mauricio" %% "postgresql-async" % "0.2.19" +"com.github.mauricio" %% "postgresql-async" % "0.2.20" ``` Or Maven: @@ -63,14 +63,14 @@ Or Maven: com.github.mauricio postgresql-async_2.11 - 0.2.19 + 0.2.20 ``` And if you're into MySQL: ```scala -"com.github.mauricio" %% "mysql-async" % "0.2.19" +"com.github.mauricio" %% "mysql-async" % "0.2.20" ``` Or Maven: @@ -79,7 +79,7 @@ Or Maven: com.github.mauricio mysql-async_2.11 - 0.2.19 + 0.2.20 ``` diff --git a/project/Build.scala b/project/Build.scala index f771f803..ca5bcb9e 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -49,7 +49,7 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.20" + val commonVersion = "0.2.21-SNAPSHOT" val projectScalaVersion = "2.11.7" val specs2Version = "2.5" From 3d0fdef82de66d9c804ceab712013bf1d44e908a Mon Sep 17 00:00:00 2001 From: volth Date: Fri, 8 Jul 2016 16:47:37 +0000 Subject: [PATCH 70/82] Support java.net.InetAddress (encoding and decoding) and user-defined types (encoding only) --- .../db/column/InetAddressEncoderDecoder.scala | 36 ++++++ .../db/postgresql/column/ColumnTypes.scala | 2 + .../PostgreSQLColumnDecoderRegistry.scala | 4 + .../PostgreSQLColumnEncoderRegistry.scala | 49 +++++---- .../async/db/postgresql/ArrayTypesSpec.scala | 103 ++++++++++++------ 5 files changed, 136 insertions(+), 58 deletions(-) create mode 100644 db-async-common/src/main/scala/com/github/mauricio/async/db/column/InetAddressEncoderDecoder.scala diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/column/InetAddressEncoderDecoder.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/InetAddressEncoderDecoder.scala new file mode 100644 index 00000000..ecac853d --- /dev/null +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/column/InetAddressEncoderDecoder.scala @@ -0,0 +1,36 @@ +/* + * Copyright 2013 MaurĂ­cio Linhares + * + * MaurĂ­cio Linhares licenses this file to you under the Apache License, + * version 2.0 (the "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at: + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT + * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the + * License for the specific language governing permissions and limitations + * under the License. + */ + +package com.github.mauricio.async.db.column + +import java.net.InetAddress +import sun.net.util.IPAddressUtil.{textToNumericFormatV4,textToNumericFormatV6} + +object InetAddressEncoderDecoder extends ColumnEncoderDecoder { + + override def decode(value: String): Any = { + if (value contains ':') { + InetAddress.getByAddress(textToNumericFormatV6(value)) + } else { + InetAddress.getByAddress(textToNumericFormatV4(value)) + } + } + + override def encode(value: Any): String = { + value.asInstanceOf[InetAddress].getHostAddress + } + +} diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala index 29c6b736..93fef482 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ColumnTypes.scala @@ -67,6 +67,8 @@ object ColumnTypes { final val UUIDArray = 2951 final val XMLArray = 143 + final val Inet = 869 + final val InetArray = 1041 } /* diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala index 606bb442..5b4a47a7 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnDecoderRegistry.scala @@ -46,6 +46,7 @@ class PostgreSQLColumnDecoderRegistry( charset : Charset = CharsetUtil.UTF_8 ) e private final val timeWithTimestampArrayDecoder = new ArrayDecoder(TimeWithTimezoneEncoderDecoder) private final val intervalArrayDecoder = new ArrayDecoder(PostgreSQLIntervalEncoderDecoder) private final val uuidArrayDecoder = new ArrayDecoder(UUIDEncoderDecoder) + private final val inetAddressArrayDecoder = new ArrayDecoder(InetAddressEncoderDecoder) override def decode(kind: ColumnData, value: ByteBuf, charset: Charset): Any = { decoderFor(kind.dataType).decode(kind, value, charset) @@ -114,6 +115,9 @@ class PostgreSQLColumnDecoderRegistry( charset : Charset = CharsetUtil.UTF_8 ) e case XMLArray => this.stringArrayDecoder case ByteA => ByteArrayEncoderDecoder + case Inet => InetAddressEncoderDecoder + case InetArray => this.inetAddressArrayDecoder + case _ => StringEncoderDecoder } } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala index 5292839c..c9f95f43 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/PostgreSQLColumnEncoderRegistry.scala @@ -52,6 +52,8 @@ class PostgreSQLColumnEncoderRegistry extends ColumnEncoderRegistry { classOf[BigDecimal] -> (BigDecimalEncoderDecoder -> ColumnTypes.Numeric), classOf[java.math.BigDecimal] -> (BigDecimalEncoderDecoder -> ColumnTypes.Numeric), + classOf[java.net.InetAddress] -> (InetAddressEncoderDecoder -> ColumnTypes.Inet), + classOf[java.util.UUID] -> (UUIDEncoderDecoder -> ColumnTypes.UUID), classOf[LocalDate] -> ( DateEncoderDecoder -> ColumnTypes.Date ), @@ -104,17 +106,12 @@ class PostgreSQLColumnEncoderRegistry extends ColumnEncoderRegistry { if (encoder.isDefined) { encoder.get._1.encode(value) } else { - - val view: Option[Traversable[Any]] = value match { - case i: java.lang.Iterable[_] => Some(i.toIterable) - case i: Traversable[_] => Some(i) - case i: Array[_] => Some(i.toIterable) - case _ => None - } - - view match { - case Some(collection) => encodeArray(collection) - case None => { + value match { + case i: java.lang.Iterable[_] => encodeArray(i.toIterable) + case i: Traversable[_] => encodeArray(i) + case i: Array[_] => encodeArray(i.toIterable) + case p: Product => encodeComposite(p) + case _ => { this.classesSequence.find(entry => entry._1.isAssignableFrom(value.getClass)) match { case Some(parent) => parent._2._1.encode(value) case None => value.toString @@ -126,14 +123,9 @@ class PostgreSQLColumnEncoderRegistry extends ColumnEncoderRegistry { } - private def encodeArray(collection: Traversable[_]): String = { - val builder = new StringBuilder() - - builder.append('{') - - val result = collection.map { + private def encodeComposite(p: Product): String = { + p.productIterator.map { item => - if (item == null || item == None) { "NULL" } else { @@ -143,13 +135,22 @@ class PostgreSQLColumnEncoderRegistry extends ColumnEncoderRegistry { this.encode(item) } } + }.mkString("(", ",", ")") + } - }.mkString(",") - - builder.append(result) - builder.append('}') - - builder.toString() + private def encodeArray(collection: Traversable[_]): String = { + collection.map { + item => + if (item == null || item == None) { + "NULL" + } else { + if (this.shouldQuote(item)) { + "\"" + this.encode(item).replaceAllLiterally("\\", """\\""").replaceAllLiterally("\"", """\"""") + "\"" + } else { + this.encode(item) + } + } + }.mkString("{", ",", "}") } private def shouldQuote(value: Any): Boolean = { diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/ArrayTypesSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/ArrayTypesSpec.scala index e941e145..5391588c 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/ArrayTypesSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/ArrayTypesSpec.scala @@ -16,31 +16,45 @@ package com.github.mauricio.async.db.postgresql -import com.github.mauricio.async.db.column.TimestampWithTimezoneEncoderDecoder +import com.github.mauricio.async.db.column.{TimestampWithTimezoneEncoderDecoder, InetAddressEncoderDecoder} import org.specs2.mutable.Specification +import java.net.InetAddress class ArrayTypesSpec extends Specification with DatabaseTestHelper { - - val simpleCreate = """create temp table type_test_table ( - bigserial_column bigserial not null, - smallint_column integer[] not null, - text_column text[] not null, - timestamp_column timestamp with time zone[] not null, - constraint bigserial_column_pkey primary key (bigserial_column) - )""" + // `uniq` allows sbt to run the tests concurrently as there is no CREATE TEMP TYPE + def simpleCreate(uniq: String) = s"""DROP TYPE IF EXISTS dir_$uniq; + CREATE TYPE direction_$uniq AS ENUM ('in','out'); + DROP TYPE IF EXISTS endpoint_$uniq; + CREATE TYPE endpoint_$uniq AS (ip inet, port integer); + create temp table type_test_table_$uniq ( + bigserial_column bigserial not null, + smallint_column integer[] not null, + text_column text[] not null, + inet_column inet[] not null, + direction_column direction_$uniq[] not null, + endpoint_column endpoint_$uniq[] not null, + timestamp_column timestamp with time zone[] not null, + constraint bigserial_column_pkey primary key (bigserial_column) + )""" + def simpleDrop(uniq: String) = s"""drop table if exists type_test_table_$uniq; + drop type if exists endpoint_$uniq; + drop type if exists direction_$uniq""" val insert = - """insert into type_test_table - (smallint_column, text_column, timestamp_column) + """insert into type_test_table_cptat + (smallint_column, text_column, inet_column, direction_column, endpoint_column, timestamp_column) values ( '{1,2,3,4}', '{"some,\"comma,separated,text","another line of text","fake\,backslash","real\\,backslash\\",NULL}', + '{"127.0.0.1","2002:15::1"}', + '{"in","out"}', + '{"(\"127.0.0.1\",80)","(\"2002:15::1\",443)"}', '{"2013-04-06 01:15:10.528-03","2013-04-06 01:15:08.528-03"}' )""" - val insertPreparedStatement = """insert into type_test_table - (smallint_column, text_column, timestamp_column) - values (?,?,?)""" + val insertPreparedStatement = """insert into type_test_table_csaups + (smallint_column, text_column, inet_column, direction_column, endpoint_column, timestamp_column) + values (?,?,?,?,?,?)""" "connection" should { @@ -48,41 +62,62 @@ class ArrayTypesSpec extends Specification with DatabaseTestHelper { withHandler { handler => - executeDdl(handler, simpleCreate) - executeDdl(handler, insert, 1) - val result = executeQuery(handler, "select * from type_test_table").rows.get - result(0)("smallint_column") === List(1,2,3,4) - result(0)("text_column") === List("some,\"comma,separated,text", "another line of text", "fake,backslash", "real\\,backslash\\", null ) - result(0)("timestamp_column") === List( - TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:10.528-03"), - TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:08.528-03") - ) + try { + executeDdl(handler, simpleCreate("cptat")) + executeDdl(handler, insert, 1) + val result = executeQuery(handler, "select * from type_test_table_cptat").rows.get + result(0)("smallint_column") === List(1,2,3,4) + result(0)("text_column") === List("some,\"comma,separated,text", "another line of text", "fake,backslash", "real\\,backslash\\", null ) + result(0)("timestamp_column") === List( + TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:10.528-03"), + TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:08.528-03") + ) + } finally { + executeDdl(handler, simpleDrop("cptat")) + } } } "correctly send arrays using prepared statements" in { + case class Endpoint(ip: InetAddress, port: Int) val timestamps = List( TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:10.528-03"), TimestampWithTimezoneEncoderDecoder.decode("2013-04-06 01:15:08.528-03") ) + val inets = List( + InetAddressEncoderDecoder.decode("127.0.0.1"), + InetAddressEncoderDecoder.decode("2002:15::1") + ) + val directions = List("in", "out") + val endpoints = List( + Endpoint(InetAddress.getByName("127.0.0.1"), 80), // case class + (InetAddress.getByName("2002:15::1"), 443) // tuple + ) val numbers = List(1,2,3,4) val texts = List("some,\"comma,separated,text", "another line of text", "fake,backslash", "real\\,backslash\\", null ) withHandler { handler => - executeDdl(handler, simpleCreate) - executePreparedStatement( - handler, - this.insertPreparedStatement, - Array( numbers, texts, timestamps ) ) - - val result = executeQuery(handler, "select * from type_test_table").rows.get - - result(0)("smallint_column") === numbers - result(0)("text_column") === texts - result(0)("timestamp_column") === timestamps + try { + executeDdl(handler, simpleCreate("csaups")) + executePreparedStatement( + handler, + this.insertPreparedStatement, + Array( numbers, texts, inets, directions, endpoints, timestamps ) ) + + val result = executeQuery(handler, "select * from type_test_table_csaups").rows.get + + result(0)("smallint_column") === numbers + result(0)("text_column") === texts + result(0)("inet_column") === inets + result(0)("direction_column") === "{in,out}" // user type decoding not supported + result(0)("endpoint_column") === """{"(127.0.0.1,80)","(2002:15::1,443)"}""" // user type decoding not supported + result(0)("timestamp_column") === timestamps + } finally { + executeDdl(handler, simpleDrop("csaups")) + } } } From 42ea62150132fe93f709d05384a6c52b64592d30 Mon Sep 17 00:00:00 2001 From: xuwei-k <6b656e6a69@gmail.com> Date: Sat, 24 Sep 2016 14:34:03 +0900 Subject: [PATCH 71/82] update specs2 3.8.5 --- .../mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala | 3 ++- .../github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala | 1 + .../github/mauricio/async/db/mysql/StoredProceduresSpec.scala | 3 ++- .../com/github/mauricio/async/db/mysql/TransactionSpec.scala | 1 + .../async/db/postgresql/PostgreSQLConnectionSpec.scala | 3 ++- .../mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala | 1 + project/Build.scala | 3 ++- 7 files changed, 11 insertions(+), 4 deletions(-) diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala index 34ca0662..7c8bfdc4 100644 --- a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/AbstractAsyncObjectPoolSpec.scala @@ -10,7 +10,8 @@ import scala.util.Failure import scala.reflect.runtime.universe.TypeTag import scala.util.Try -import scala.concurrent.duration.{Duration, SECONDS} +import scala.concurrent.ExecutionContext.Implicits.global +import scala.concurrent.duration._ /** * This spec is designed abstract to allow testing of any implementation of AsyncObjectPool, against the common diff --git a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala index acc952e7..0c6d85b4 100644 --- a/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala +++ b/db-async-common/src/test/scala/com/github/mauricio/async/db/pool/TimeoutSchedulerSpec.scala @@ -18,6 +18,7 @@ package com.github.mauricio.async.db.pool import java.util.concurrent.{ScheduledFuture, TimeoutException} import com.github.mauricio.async.db.util.{ByteBufferUtils, ExecutorServiceUtils} import org.specs2.mutable.SpecificationWithJUnit +import scala.concurrent.ExecutionContext.Implicits.global import scala.concurrent.duration._ import scala.concurrent.{Future, Promise} diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala index 3d68563b..d8ff2142 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/StoredProceduresSpec.scala @@ -19,6 +19,7 @@ package com.github.mauricio.async.db.mysql import com.github.mauricio.async.db.ResultSet import com.github.mauricio.async.db.util.FutureUtils._ import org.specs2.mutable.Specification +import scala.concurrent.ExecutionContext.Implicits.global class StoredProceduresSpec extends Specification with ConnectionHelper { @@ -129,4 +130,4 @@ class StoredProceduresSpec extends Specification with ConnectionHelper { } } } -} \ No newline at end of file +} diff --git a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala index 0ef2f86b..83548c9b 100644 --- a/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala +++ b/mysql-async/src/test/scala/com/github/mauricio/async/db/mysql/TransactionSpec.scala @@ -10,6 +10,7 @@ import com.github.mauricio.async.db.Connection import scala.concurrent.duration.Duration import scala.concurrent.{Await, Future} +import scala.concurrent.ExecutionContext.Implicits.global import scala.util.{Success, Failure} object TransactionSpec { diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala index 2843e95e..0e050477 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/PostgreSQLConnectionSpec.scala @@ -30,6 +30,7 @@ import org.specs2.mutable.Specification import scala.concurrent.duration._ import scala.concurrent.{Await, Future} +import scala.concurrent.ExecutionContext.Implicits.global object PostgreSQLConnectionSpec { val log = Log.get[PostgreSQLConnectionSpec] @@ -154,7 +155,7 @@ class PostgreSQLConnectionSpec extends Specification with DatabaseTestHelper { row(10) === DateEncoderDecoder.decode("1984-08-06") row(11) === TimeEncoderDecoder.Instance.decode("22:13:45.888888") row(12) === true - row(13) must beAnInstanceOf[java.lang.Long] + row(13).asInstanceOf[AnyRef] must beAnInstanceOf[java.lang.Long] row(13).asInstanceOf[Long] must beGreaterThan(0L) diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala index b71ebe65..c2471a75 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/ConnectionPoolSpec.scala @@ -22,6 +22,7 @@ import com.github.mauricio.async.db.pool.{ConnectionPool, PoolConfiguration} import com.github.mauricio.async.db.postgresql.exceptions.GenericDatabaseException import com.github.mauricio.async.db.postgresql.{PostgreSQLConnection, DatabaseTestHelper} import org.specs2.mutable.Specification +import scala.concurrent.ExecutionContext.Implicits.global object ConnectionPoolSpec { val Insert = "insert into transaction_test (id) values (?)" diff --git a/project/Build.scala b/project/Build.scala index ca5bcb9e..f4fbb02a 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -51,7 +51,7 @@ object Configuration { val commonVersion = "0.2.21-SNAPSHOT" val projectScalaVersion = "2.11.7" - val specs2Version = "2.5" + val specs2Version = "3.8.5" val specs2Dependency = "org.specs2" %% "specs2-core" % specs2Version % "test" val specs2JunitDependency = "org.specs2" %% "specs2-junit" % specs2Version % "test" @@ -82,6 +82,7 @@ object Configuration { :+ Opts.compile.unchecked :+ "-feature" , + testOptions in Test += Tests.Argument(TestFrameworks.Specs2, "sequential"), scalacOptions in doc := Seq("-doc-external-doc:scala=http://www.scala-lang.org/archives/downloads/distrib/files/nightly/docs/library/"), crossScalaVersions := Seq(projectScalaVersion, "2.10.6"), javacOptions := Seq("-source", "1.6", "-target", "1.6", "-encoding", "UTF8"), From 1b49d11ea458b48b3cdb975ebc5463456db1362f Mon Sep 17 00:00:00 2001 From: xuwei-k <6b656e6a69@gmail.com> Date: Sat, 24 Sep 2016 14:29:33 +0900 Subject: [PATCH 72/82] use List instead of Stack Stack in deprecated https://github.com/scala/scala/commit/44a22d7cc0c315b9feaee1d4cb5df7a66578b1ea --- .../async/db/pool/SingleThreadedAsyncObjectPool.scala | 10 ++++++---- .../async/db/postgresql/column/ArrayDecoder.scala | 9 +++++---- 2 files changed, 11 insertions(+), 8 deletions(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala index 49f60593..b4f25ae2 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/pool/SingleThreadedAsyncObjectPool.scala @@ -22,7 +22,7 @@ import com.github.mauricio.async.db.util.{Log, Worker} import java.util.concurrent.atomic.AtomicLong import java.util.{Timer, TimerTask} -import scala.collection.mutable.{ArrayBuffer, Queue, Stack} +import scala.collection.mutable.{ArrayBuffer, Queue} import scala.concurrent.{Future, Promise} import scala.util.{Failure, Success} @@ -52,7 +52,7 @@ class SingleThreadedAsyncObjectPool[T]( import SingleThreadedAsyncObjectPool.{Counter, log} private val mainPool = Worker() - private var poolables = new Stack[PoolableHolder[T]]() + private var poolables = List.empty[PoolableHolder[T]] private val checkouts = new ArrayBuffer[T](configuration.maxObjects) private val waitQueue = new Queue[Promise[T]]() private val timer = new Timer("async-object-pool-timer-" + Counter.incrementAndGet(), true) @@ -171,7 +171,7 @@ class SingleThreadedAsyncObjectPool[T]( */ private def addBack(item: T, promise: Promise[AsyncObjectPool[T]]) { - this.poolables.push(new PoolableHolder[T](item)) + this.poolables ::= new PoolableHolder[T](item) if (this.waitQueue.nonEmpty) { this.checkout(this.waitQueue.dequeue()) @@ -226,7 +226,9 @@ class SingleThreadedAsyncObjectPool[T]( case e: Exception => promise.failure(e) } } else { - val item = this.poolables.pop().item + val h :: t = this.poolables + this.poolables = t + val item = h.item this.checkouts += item promise.success(item) } diff --git a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ArrayDecoder.scala b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ArrayDecoder.scala index d69eeba4..b62e9629 100644 --- a/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ArrayDecoder.scala +++ b/postgresql-async/src/main/scala/com/github/mauricio/async/db/postgresql/column/ArrayDecoder.scala @@ -19,7 +19,7 @@ package com.github.mauricio.async.db.postgresql.column import com.github.mauricio.async.db.column.ColumnDecoder import com.github.mauricio.async.db.postgresql.util.{ArrayStreamingParserDelegate, ArrayStreamingParser} import scala.collection.IndexedSeq -import scala.collection.mutable.{ArrayBuffer, Stack} +import scala.collection.mutable.ArrayBuffer import com.github.mauricio.async.db.general.ColumnData import io.netty.buffer.{Unpooled, ByteBuf} import java.nio.charset.Charset @@ -32,12 +32,13 @@ class ArrayDecoder(private val decoder: ColumnDecoder) extends ColumnDecoder { buffer.readBytes(bytes) val value = new String(bytes, charset) - val stack = new Stack[ArrayBuffer[Any]]() + var stack = List.empty[ArrayBuffer[Any]] var current: ArrayBuffer[Any] = null var result: IndexedSeq[Any] = null val delegate = new ArrayStreamingParserDelegate { override def arrayEnded { - result = stack.pop() + result = stack.head + stack = stack.tail } override def elementFound(element: String) { @@ -63,7 +64,7 @@ class ArrayDecoder(private val decoder: ColumnDecoder) extends ColumnDecoder { case None => {} } - stack.push(current) + stack ::= current } } From 630c65930f6837ebc5ee5d93314e1fc70512a1e2 Mon Sep 17 00:00:00 2001 From: Sergey Samoylov Date: Fri, 30 Sep 2016 14:06:04 +0300 Subject: [PATCH 73/82] Fix for CLIENT_MULTI_RESULTS constant value --- .../scala/com/github/mauricio/async/db/mysql/util/MySQLIO.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/MySQLIO.scala b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/MySQLIO.scala index 4587eb09..3b56ecc0 100644 --- a/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/MySQLIO.scala +++ b/mysql-async/src/main/scala/com/github/mauricio/async/db/mysql/util/MySQLIO.scala @@ -21,7 +21,7 @@ object MySQLIO { final val CLIENT_PROTOCOL_41 = 0x0200 final val CLIENT_CONNECT_WITH_DB = 0x0008 final val CLIENT_TRANSACTIONS = 0x2000 - final val CLIENT_MULTI_RESULTS = 0x200000 + final val CLIENT_MULTI_RESULTS = 0x20000 final val CLIENT_LONG_FLAG = 0x0001 final val CLIENT_PLUGIN_AUTH = 0x00080000 final val CLIENT_SECURE_CONNECTION = 0x00008000 From 2a2896fd22e8e833ba6deca1e7d85944a060f7b0 Mon Sep 17 00:00:00 2001 From: golem131 Date: Sat, 5 Nov 2016 14:02:11 +0300 Subject: [PATCH 74/82] Fix deprecation warning "constructor Slf4JLoggerFactory in class Slf4JLoggerFactory is deprecated: see corresponding Javadoc for more information" --- .../scala/com/github/mauricio/async/db/util/NettyUtils.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/db-async-common/src/main/scala/com/github/mauricio/async/db/util/NettyUtils.scala b/db-async-common/src/main/scala/com/github/mauricio/async/db/util/NettyUtils.scala index 32f736e3..c9e09f1a 100644 --- a/db-async-common/src/main/scala/com/github/mauricio/async/db/util/NettyUtils.scala +++ b/db-async-common/src/main/scala/com/github/mauricio/async/db/util/NettyUtils.scala @@ -20,7 +20,7 @@ import io.netty.util.internal.logging.{InternalLoggerFactory, Slf4JLoggerFactory object NettyUtils { - InternalLoggerFactory.setDefaultFactory(new Slf4JLoggerFactory()) + InternalLoggerFactory.setDefaultFactory(Slf4JLoggerFactory.INSTANCE) lazy val DefaultEventLoopGroup = new NioEventLoopGroup(0, DaemonThreadsFactory("db-async-netty")) } \ No newline at end of file From 4b6c380a35de8ee242188f58c3b7e71fad47917c Mon Sep 17 00:00:00 2001 From: golem131 Date: Mon, 7 Nov 2016 12:39:47 +0300 Subject: [PATCH 75/82] Wait until connection return to pool --- .../SingleThreadedAsyncObjectPoolSpec.scala | 23 +++++++++++++++---- 1 file changed, 19 insertions(+), 4 deletions(-) diff --git a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/SingleThreadedAsyncObjectPoolSpec.scala b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/SingleThreadedAsyncObjectPoolSpec.scala index d99a60d1..75da1ebd 100644 --- a/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/SingleThreadedAsyncObjectPoolSpec.scala +++ b/postgresql-async/src/test/scala/com/github/mauricio/async/db/postgresql/pool/SingleThreadedAsyncObjectPoolSpec.scala @@ -16,12 +16,14 @@ package com.github.mauricio.async.db.postgresql.pool -import com.github.mauricio.async.db.pool.{SingleThreadedAsyncObjectPool, PoolExhaustedException, PoolConfiguration} +import com.github.mauricio.async.db.pool.{AsyncObjectPool, PoolConfiguration, PoolExhaustedException, SingleThreadedAsyncObjectPool} import com.github.mauricio.async.db.postgresql.{DatabaseTestHelper, PostgreSQLConnection} import java.nio.channels.ClosedChannelException import java.util.concurrent.TimeUnit + import org.specs2.mutable.Specification -import scala.concurrent.Await + +import scala.concurrent.{Await, Future} import scala.concurrent.duration._ import scala.language.postfixOps import com.github.mauricio.async.db.exceptions.ConnectionStillRunningQueryException @@ -47,23 +49,36 @@ class SingleThreadedAsyncObjectPoolSpec extends Specification with DatabaseTestH pool => val connection = get(pool) - val promises = List(pool.take, pool.take, pool.take) + val promises: List[Future[PostgreSQLConnection]] = List(pool.take, pool.take, pool.take) pool.availables.size === 0 pool.inUse.size === 1 + pool.queued.size must be_<=(3) + + /* pool.take call checkout that call this.mainPool.action, + so enqueuePromise called in executorService, + so there is no guaranties that all promises in queue at that moment + */ + val deadline = 5.seconds.fromNow + while(pool.queued.size < 3 || deadline.hasTimeLeft) { + Thread.sleep(50) + } + pool.queued.size === 3 executeTest(connection) pool.giveBack(connection) - promises.foreach { + val pools: List[Future[AsyncObjectPool[PostgreSQLConnection]]] = promises.map { promise => val connection = Await.result(promise, Duration(5, TimeUnit.SECONDS)) executeTest(connection) pool.giveBack(connection) } + Await.ready(pools.last, Duration(5, TimeUnit.SECONDS)) + pool.availables.size === 1 pool.inUse.size === 0 pool.queued.size === 0 From f75679dd4a9e200636614122a73a18f876a56129 Mon Sep 17 00:00:00 2001 From: golem131 Date: Thu, 3 Nov 2016 17:29:11 +0300 Subject: [PATCH 76/82] Scala 2.12.1 support --- .travis.yml | 6 ++++++ project/Build.scala | 4 ++-- project/build.properties | 2 +- 3 files changed, 9 insertions(+), 3 deletions(-) diff --git a/.travis.yml b/.travis.yml index 2c1a7a84..378c49d0 100644 --- a/.travis.yml +++ b/.travis.yml @@ -5,6 +5,12 @@ scala: jdk: - oraclejdk7 - oraclejdk8 + +matrix: + include: + - scala: 2.12.1 + jdk: oraclejdk8 + services: - postgresql - mysql diff --git a/project/Build.scala b/project/Build.scala index f4fbb02a..e1de52d9 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -51,7 +51,7 @@ object Configuration { val commonVersion = "0.2.21-SNAPSHOT" val projectScalaVersion = "2.11.7" - val specs2Version = "3.8.5" + val specs2Version = "3.8.6" val specs2Dependency = "org.specs2" %% "specs2-core" % specs2Version % "test" val specs2JunitDependency = "org.specs2" %% "specs2-junit" % specs2Version % "test" @@ -84,7 +84,7 @@ object Configuration { , testOptions in Test += Tests.Argument(TestFrameworks.Specs2, "sequential"), scalacOptions in doc := Seq("-doc-external-doc:scala=http://www.scala-lang.org/archives/downloads/distrib/files/nightly/docs/library/"), - crossScalaVersions := Seq(projectScalaVersion, "2.10.6"), + crossScalaVersions := Seq(projectScalaVersion, "2.10.6", "2.12.1"), javacOptions := Seq("-source", "1.6", "-target", "1.6", "-encoding", "UTF8"), organization := "com.github.mauricio", version := commonVersion, diff --git a/project/build.properties b/project/build.properties index d638b4f3..e0cbc71d 100644 --- a/project/build.properties +++ b/project/build.properties @@ -1 +1 @@ -sbt.version = 0.13.8 \ No newline at end of file +sbt.version = 0.13.13 \ No newline at end of file From 2f4444e745c1d1164f6f78ab3244de16593c1a0e Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maur=C3=ADcio=20Linhares?= Date: Mon, 9 Jan 2017 10:42:11 -0300 Subject: [PATCH 77/82] preparing for 0.2.21 --- CHANGELOG.md | 4 ++++ Vagrantfile | 13 ------------- project/Build.scala | 14 +++++++------- project/plugins.sbt | 4 +++- 4 files changed, 14 insertions(+), 21 deletions(-) delete mode 100644 Vagrantfile diff --git a/CHANGELOG.md b/CHANGELOG.md index bafc831d..9ac99d03 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -26,6 +26,10 @@ # Changelog +## 0.2.20 - 2017-09-17 + +* Building for Scala 2.12; + ## 0.2.19 - 2016-03-17 * Always use `NUMERIC` when handling numbers in prepared statements in PostgreSQL; diff --git a/Vagrantfile b/Vagrantfile deleted file mode 100644 index 5498f80c..00000000 --- a/Vagrantfile +++ /dev/null @@ -1,13 +0,0 @@ -# -*- mode: ruby -*- -# vi: set ft=ruby : - -# Vagrantfile API/syntax version. Don't touch unless you know what you're doing! -VAGRANTFILE_API_VERSION = "2" - -Vagrant.configure(VAGRANTFILE_API_VERSION) do |config| - - config.vm.box = "chef/centos-6.5" - config.vm.provision :shell, path: "bootstrap.sh" - config.vm.network :forwarded_port, host: 3307, guest: 3306 - -end diff --git a/project/Build.scala b/project/Build.scala index e1de52d9..86ac4278 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -50,20 +50,20 @@ object ProjectBuild extends Build { object Configuration { val commonVersion = "0.2.21-SNAPSHOT" - val projectScalaVersion = "2.11.7" + val projectScalaVersion = "2.12.1" val specs2Version = "3.8.6" val specs2Dependency = "org.specs2" %% "specs2-core" % specs2Version % "test" val specs2JunitDependency = "org.specs2" %% "specs2-junit" % specs2Version % "test" val specs2MockDependency = "org.specs2" %% "specs2-mock" % specs2Version % "test" - val logbackDependency = "ch.qos.logback" % "logback-classic" % "1.1.6" % "test" + val logbackDependency = "ch.qos.logback" % "logback-classic" % "1.1.8" % "test" val commonDependencies = Seq( - "org.slf4j" % "slf4j-api" % "1.7.18", - "joda-time" % "joda-time" % "2.9.2", + "org.slf4j" % "slf4j-api" % "1.7.22", + "joda-time" % "joda-time" % "2.9.7", "org.joda" % "joda-convert" % "1.8.1", - "io.netty" % "netty-all" % "4.1.1.Final", - "org.javassist" % "javassist" % "3.20.0-GA", + "io.netty" % "netty-all" % "4.1.6.Final", + "org.javassist" % "javassist" % "3.21.0-GA", specs2Dependency, specs2JunitDependency, specs2MockDependency, @@ -84,7 +84,7 @@ object Configuration { , testOptions in Test += Tests.Argument(TestFrameworks.Specs2, "sequential"), scalacOptions in doc := Seq("-doc-external-doc:scala=http://www.scala-lang.org/archives/downloads/distrib/files/nightly/docs/library/"), - crossScalaVersions := Seq(projectScalaVersion, "2.10.6", "2.12.1"), + crossScalaVersions := Seq(projectScalaVersion, "2.10.6", "2.11.8"), javacOptions := Seq("-source", "1.6", "-target", "1.6", "-encoding", "UTF8"), organization := "com.github.mauricio", version := commonVersion, diff --git a/project/plugins.sbt b/project/plugins.sbt index 4528f2d6..d271b7f7 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -2,6 +2,8 @@ addSbtPlugin("com.typesafe.sbteclipse" % "sbteclipse-plugin" % "2.5.0") addSbtPlugin("com.github.mpeltonen" % "sbt-idea" % "1.6.0") -addSbtPlugin("com.typesafe.sbt" % "sbt-pgp" % "0.8.3") +addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0") + +addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.3.0") resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases" From 94a7ae428840e5ef948c8307591ab526521c753b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maur=C3=ADcio=20Linhares?= Date: Mon, 9 Jan 2017 11:14:22 -0300 Subject: [PATCH 78/82] Remove JDK7 from build targets --- .travis.yml | 8 ++------ 1 file changed, 2 insertions(+), 6 deletions(-) diff --git a/.travis.yml b/.travis.yml index 378c49d0..3e334f1a 100644 --- a/.travis.yml +++ b/.travis.yml @@ -2,15 +2,11 @@ language: scala scala: - 2.10.4 - 2.11.7 + - 2.12.1 + jdk: - - oraclejdk7 - oraclejdk8 -matrix: - include: - - scala: 2.12.1 - jdk: oraclejdk8 - services: - postgresql - mysql From b62199294a01f6c350835b9b22fbb4954bbf3195 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maur=C3=ADcio=20Linhares?= Date: Mon, 9 Jan 2017 13:53:30 -0300 Subject: [PATCH 79/82] Closing 0.2.21 --- CHANGELOG.md | 1 + project/Build.scala | 2 +- 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 9ac99d03..ce4b61ae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -29,6 +29,7 @@ ## 0.2.20 - 2017-09-17 * Building for Scala 2.12; +* Fix SFL4J deprecation warning - #201 - @golem131; ## 0.2.19 - 2016-03-17 diff --git a/project/Build.scala b/project/Build.scala index 86ac4278..f70240ff 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -49,7 +49,7 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.21-SNAPSHOT" + val commonVersion = "0.2.21" val projectScalaVersion = "2.12.1" val specs2Version = "3.8.6" From f031625d5e38dae100045437bd23e6e6d6e9dc73 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maur=C3=ADcio=20Linhares?= Date: Mon, 9 Jan 2017 14:35:01 -0300 Subject: [PATCH 80/82] Starting next development cycle --- README.markdown | 8 ++++---- project/Build.scala | 2 +- project/plugins.sbt | 2 ++ 3 files changed, 7 insertions(+), 5 deletions(-) diff --git a/README.markdown b/README.markdown index 9977b309..75d25168 100644 --- a/README.markdown +++ b/README.markdown @@ -54,7 +54,7 @@ You can view the project's [CHANGELOG here](CHANGELOG.md). And if you're in a hurry, you can include them in your build like this, if you're using PostgreSQL: ```scala -"com.github.mauricio" %% "postgresql-async" % "0.2.20" +"com.github.mauricio" %% "postgresql-async" % "0.2.21" ``` Or Maven: @@ -63,14 +63,14 @@ Or Maven: com.github.mauricio postgresql-async_2.11 - 0.2.20 + 0.2.21 ``` And if you're into MySQL: ```scala -"com.github.mauricio" %% "mysql-async" % "0.2.20" +"com.github.mauricio" %% "mysql-async" % "0.2.21" ``` Or Maven: @@ -79,7 +79,7 @@ Or Maven: com.github.mauricio mysql-async_2.11 - 0.2.20 + 0.2.21 ``` diff --git a/project/Build.scala b/project/Build.scala index f70240ff..b543b050 100644 --- a/project/Build.scala +++ b/project/Build.scala @@ -49,7 +49,7 @@ object ProjectBuild extends Build { object Configuration { - val commonVersion = "0.2.21" + val commonVersion = "0.2.22-SNAPSHOT" val projectScalaVersion = "2.12.1" val specs2Version = "3.8.6" diff --git a/project/plugins.sbt b/project/plugins.sbt index d271b7f7..0e9ec632 100644 --- a/project/plugins.sbt +++ b/project/plugins.sbt @@ -7,3 +7,5 @@ addSbtPlugin("com.jsuereth" % "sbt-pgp" % "1.0.0") addSbtPlugin("com.timushev.sbt" % "sbt-updates" % "0.3.0") resolvers += "scalaz-bintray" at "https://dl.bintray.com/scalaz/releases" + +// pgpSigningKey := Some(0xB98761578C650D77L) From ef3e27b8c34df8b5d4ef638c0113b4c951cb4c98 Mon Sep 17 00:00:00 2001 From: Dominik Dorn Date: Tue, 28 Feb 2017 20:15:31 +0100 Subject: [PATCH 81/82] updated README to mention Scala 2.12 support --- README.markdown | 21 +++++++++++++++++++-- 1 file changed, 19 insertions(+), 2 deletions(-) diff --git a/README.markdown b/README.markdown index 75d25168..b15d4885 100644 --- a/README.markdown +++ b/README.markdown @@ -1,7 +1,7 @@ -- [[![Build Status](https://travis-ci.org/mauricio/postgresql-async.png)](https://travis-ci.org/mauricio/postgresql-async) postgresql-async & mysql-async - async, Netty based, database drivers for MySQL and PostgreSQL written in Scala 2.10 and 2.11](#!build-statushttpstravis-ciorgmauriciopostgresql-asyncpnghttpstravis-ciorgmauriciopostgresql-async-postgresql-async-&-mysql-async---async-netty-based-database-drivers-for-mysql-and-postgresql-written-in-scala-210-and-211) +- [[![Build Status](https://travis-ci.org/mauricio/postgresql-async.png)](https://travis-ci.org/mauricio/postgresql-async) postgresql-async & mysql-async - async, Netty based, database drivers for MySQL and PostgreSQL written in Scala 2.10, 2.11 and 2.12](#!build-statushttpstravis-ciorgmauriciopostgresql-asyncpnghttpstravis-ciorgmauriciopostgresql-async-postgresql-async-&-mysql-async---async-netty-based-database-drivers-for-mysql-and-postgresql-written-in-scala-210-and-211) - [Abstractions and integrations](#abstractions-and-integrations) - [Include them as dependencies](#include-them-as-dependencies) - [Database connections and encodings](#database-connections-and-encodings) @@ -22,7 +22,7 @@ -# [![Build Status](https://travis-ci.org/mauricio/postgresql-async.png)](https://travis-ci.org/mauricio/postgresql-async) postgresql-async & mysql-async - async, Netty based, database drivers for MySQL and PostgreSQL written in Scala 2.10 and 2.11 +# [![Build Status](https://travis-ci.org/mauricio/postgresql-async.png)](https://travis-ci.org/mauricio/postgresql-async) postgresql-async & mysql-async - async, Netty based, database drivers for MySQL and PostgreSQL written in Scala 2.10, 2.11 and 2.12 The main goal for this project is to implement simple, async, performant and reliable database drivers for PostgreSQL and MySQL in Scala. This is not supposed to be a JDBC replacement, these drivers aim to cover the common @@ -67,6 +67,15 @@ Or Maven: ``` +respectively for Scala 2.12: +```xml + + com.github.mauricio + postgresql-async_2.12 + 0.2.21 + +``` + And if you're into MySQL: ```scala @@ -82,6 +91,14 @@ Or Maven: 0.2.21 ``` +respectively for Scala 2.12: +```xml + + com.github.mauricio + mysql-async_2.12 + 0.2.21 + +``` ## Database connections and encodings From 5716ac43818b6be0dc4fcc2b2655dde3411cdbe0 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Maur=C3=ADcio=20Linhares?= Date: Tue, 21 Aug 2018 13:52:25 -0400 Subject: [PATCH 82/82] Adding message with project not being maintained anymore --- README.markdown | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/README.markdown b/README.markdown index b15d4885..79f4b057 100644 --- a/README.markdown +++ b/README.markdown @@ -1,7 +1,7 @@ -- [[![Build Status](https://travis-ci.org/mauricio/postgresql-async.png)](https://travis-ci.org/mauricio/postgresql-async) postgresql-async & mysql-async - async, Netty based, database drivers for MySQL and PostgreSQL written in Scala 2.10, 2.11 and 2.12](#!build-statushttpstravis-ciorgmauriciopostgresql-asyncpnghttpstravis-ciorgmauriciopostgresql-async-postgresql-async-&-mysql-async---async-netty-based-database-drivers-for-mysql-and-postgresql-written-in-scala-210-and-211) +- This project is not being maintained anymore, feel free to fork and work on it - [Abstractions and integrations](#abstractions-and-integrations) - [Include them as dependencies](#include-them-as-dependencies) - [Database connections and encodings](#database-connections-and-encodings) @@ -22,7 +22,7 @@ -# [![Build Status](https://travis-ci.org/mauricio/postgresql-async.png)](https://travis-ci.org/mauricio/postgresql-async) postgresql-async & mysql-async - async, Netty based, database drivers for MySQL and PostgreSQL written in Scala 2.10, 2.11 and 2.12 +# [![Build Status](https://travis-ci.org/mauricio/postgresql-async.png)](https://travis-ci.org/mauricio/postgresql-async) This project is not being maintained anymore, feel free to fork and work on it The main goal for this project is to implement simple, async, performant and reliable database drivers for PostgreSQL and MySQL in Scala. This is not supposed to be a JDBC replacement, these drivers aim to cover the common