Skip to content

Commit 55692dc

Browse files
committed
Miscellaneous fixes and updates
1 parent b408446 commit 55692dc

File tree

11 files changed

+174
-60
lines changed

11 files changed

+174
-60
lines changed

build.sbt

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -505,13 +505,13 @@ lazy val node_zlib = (project in file("node/zlib")).
505505
lazy val bundle_npm = (project in file("bundles/npm")).
506506
aggregate(
507507
npm_adal, npm_amqplib, npm_async, npm_azure, npm_bcrypt, npm_body_parser, npm_cassandra,
508-
npm_colors, npm_cvs_parse, npm_drama, npm_splitargs, npm_escape_html, npm_express, npm_express_csv,
508+
npm_colors, npm_csv_parse, npm_drama, npm_splitargs, npm_escape_html, npm_express, npm_express_csv,
509509
npm_express_fileupload, npm_express_ws, npm_feedparser, npm_filed, npm_htmlparser2, npm_htmltojson, npm_jwt_simple, npm_kafka,
510510
npm_memory_fs, npm_moment, npm_moment_tz, npm_mongodb, npm_multer, npm_mysql, npm_numeral, npm_oppressor,
511511
npm_md5, npm_request, npm_rxjs, npm_transducers, npm_watch, npm_xml2js, npm_zookeeper).
512512
dependsOn(
513513
npm_adal, npm_amqplib, npm_async, npm_azure, npm_bcrypt, npm_body_parser, npm_cassandra,
514-
npm_colors, npm_cvs_parse, npm_drama, npm_splitargs, npm_escape_html, npm_express, npm_express_csv,
514+
npm_colors, npm_csv_parse, npm_drama, npm_splitargs, npm_escape_html, npm_express, npm_express_csv,
515515
npm_express_fileupload, npm_express_ws, npm_feedparser, npm_filed, npm_htmlparser2, npm_htmltojson, npm_jwt_simple, npm_kafka,
516516
npm_memory_fs, npm_moment, npm_moment_tz, npm_mongodb, npm_multer, npm_mysql, npm_numeral, npm_oppressor,
517517
npm_md5, npm_request, npm_rxjs, npm_transducers, npm_watch, npm_xml2js, npm_zookeeper).
@@ -594,7 +594,7 @@ lazy val npm_colors = (project in file("npm/colors")).
594594
description := "npm/colors binding for Scala.js"
595595
)
596596

597-
lazy val npm_cvs_parse = (project in file("npm/csv-parse")).
597+
lazy val npm_csv_parse = (project in file("npm/csv-parse")).
598598
dependsOn(node_stream).
599599
enablePlugins(ScalaJSPlugin).
600600
settings(commonSettings: _*).
Lines changed: 90 additions & 22 deletions
Original file line numberDiff line numberDiff line change
@@ -1,42 +1,110 @@
11
package examples.nodejs.datastores
22

3-
import org.scalajs.nodejs.kafkanode.Payload
4-
import org.scalajs.nodejs.errors
5-
import org.scalajs.dom.console
6-
import org.scalajs.nodejs.Bootstrap
73
import org.scalajs.nodejs.errors.Error
8-
import org.scalajs.nodejs.kafkanode.{KafkaNode, Payload}
4+
import org.scalajs.nodejs.fs.Fs
5+
import org.scalajs.nodejs.globals.process
6+
import org.scalajs.nodejs.kafkanode.{ConsumerOptions, FetchRequest, KafkaNode, Payload}
7+
import org.scalajs.nodejs.readline.{Readline, ReadlineOptions}
8+
import org.scalajs.nodejs.util.ScalaJsHelper._
9+
import org.scalajs.nodejs.{Bootstrap, console}
910

11+
import scala.scalajs.concurrent.JSExecutionContext.Implicits.queue
1012
import scala.scalajs.js
1113

1214
/**
1315
* Producer Example
1416
1517
*/
1618
class KafkaProducerExample(bootstrap: Bootstrap) {
17-
implicit val require = bootstrap.require
19+
20+
import bootstrap._
21+
22+
implicit val $require = bootstrap.require
23+
24+
val topic = "testing"
1825

1926
val kafka = KafkaNode()
20-
val client = kafka.Client("dev528:2181")
21-
val producer = kafka.Producer(client)
27+
val client = kafka.Client("dev111:2181")
2228

23-
val payloads = js.Array(Payload(
24-
topic = "test", messages = "This is the First Message I am sending", partition = 1
25-
))
29+
test1()
2630

27-
// producer "on" ready to send payload to kafka.
28-
producer.on("ready", () => {
29-
console.log("Received ready:")
31+
def test1(): Unit = {
32+
console.log("test1:")
3033

31-
producer.send(payloads, (err: Error, data: js.Any) => {
32-
console.log("Received data:")
33-
console.log(data)
34+
val producer = kafka.Producer(client)
35+
val payloads = js.Array((1 to 10000) map (n => Payload(
36+
topic = topic, messages = s"I've sent you $n notices. Final notice.", partition = n % 10
37+
)): _*)
38+
39+
// producer "on" ready to send payload to kafka.
40+
producer.on("ready", () => {
41+
console.log("Received ready:")
42+
43+
producer.send(payloads, (err: Error, data: js.Any) => {
44+
console.log("Received data:")
45+
console.log(data)
46+
})
3447
})
35-
})
3648

37-
producer.on("error", (err: Error) => {
38-
console.log("Received error:")
39-
console.log(err)
40-
})
49+
producer.on("error", (err: Error) => {
50+
console.log("Received error:")
51+
console.log(err)
52+
})
53+
}
54+
55+
def test2(): Unit = {
56+
console.log("test2:")
57+
58+
val fs = Fs()
59+
val readline = Readline()
60+
61+
val producer = kafka.Producer(client)
62+
63+
// producer "on" ready to send payload to kafka.
64+
producer.onReady(() => {
65+
console.log("Received ready:")
66+
67+
var lineNo = 0
68+
val file = __dirname + "/examples.js"
69+
val reader = readline.createInterface(new ReadlineOptions(
70+
input = fs.createReadStream(file),
71+
output = process.stdout,
72+
terminal = false
73+
))
74+
75+
reader.onLine { line =>
76+
lineNo += 1
77+
console.log("[%d] %s", lineNo, line)
78+
79+
val payloads = js.Array(Payload(topic = topic, messages = line))
80+
producer.sendFuture(payloads) foreach { _ =>
81+
console.log("messages", payloads)
82+
}
83+
}
84+
85+
reader.onClose(() => {
86+
reader.onClose { () =>
87+
console.log("# stream closed.")
88+
}
89+
})
90+
})
91+
92+
producer.onError((err: Error) => {
93+
console.log("Received error:")
94+
console.log(err)
95+
})
96+
}
97+
98+
def test3(): Unit = {
99+
console.log("test3:")
100+
101+
val payloads = js.Array((0 to 9) map (n => FetchRequest(topic = topic, partition = n)): _*)
102+
val consumer = kafka.Consumer(client, payloads, new ConsumerOptions(groupId = "1", autoCommit = true))
103+
//(0 to 9) foreach (n => consumer.setOffset(topic = topic, partition = n, offset = 0))
104+
105+
consumer.onMessage((message: js.Any) => {
106+
console.log(message)
107+
})
108+
}
41109

42110
}

examples/src/main/scala/examples/nodejs/io/FilesExample.scala

Lines changed: 3 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,8 @@
11
package examples.nodejs.io
22

3-
import org.scalajs.nodejs.globals.process
4-
import org.scalajs.nodejs.readline.ReadlineOptions
5-
import org.scalajs.nodejs.console
6-
import org.scalajs.nodejs.Bootstrap
3+
import org.scalajs.nodejs.{Bootstrap, console}
74
import org.scalajs.nodejs.fs.Fs
5+
import org.scalajs.nodejs.globals.process
86
import org.scalajs.nodejs.readline.{Readline, ReadlineOptions}
97

108
/**
@@ -13,6 +11,7 @@ import org.scalajs.nodejs.readline.{Readline, ReadlineOptions}
1311
*/
1412
class FilesExample(bootstrap: Bootstrap) {
1513
implicit val require = bootstrap.require
14+
1615
import bootstrap._
1716

1817
val fs = Fs()

npm/csv-parse/src/main/scala/org/scalajs/nodejs/csvparse/CsvParse.scala

Lines changed: 9 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
package org.scalajs.nodejs
22
package csvparse
33

4+
import org.scalajs.nodejs.util.ScalaJsHelper._
45
import org.scalajs.nodejs.csvparse.CsvParse.CsvParser
56

67
import scala.scalajs.js
@@ -50,10 +51,18 @@ object CsvParse {
5051
*/
5152
implicit class CsvParseEvents(val parser: CsvParse) extends AnyVal {
5253

54+
@inline
55+
def parseFuture[T](csv: String, options: CsvParseOptions | js.Any = null) = {
56+
futureCallbackE1[errors.Error, T](parser(csv, options, _))
57+
}
58+
59+
@inline
5360
def onError(listener: errors.Error => Any) = parser.on("error", listener)
5461

62+
@inline
5563
def onFinish(listener: () => Any) = parser.on("finish", listener)
5664

65+
@inline
5766
def onReadable(listener: () => Any) = parser.on("readable", listener)
5867

5968
}

npm/kafka_node/src/main/scala/org/scalajs/nodejs/kafkanode/Consumer.scala

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package org.scalajs.nodejs.kafkanode
22

33
import org.scalajs.nodejs.util.ScalaJsHelper._
4-
import org.scalajs.nodejs.util.ScalaJsHelper
54

65
import scala.scalajs.js
76

npm/kafka_node/src/main/scala/org/scalajs/nodejs/kafkanode/ConsumerClass.scala

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,6 @@
11
package org.scalajs.nodejs.kafkanode
22

33
import org.scalajs.nodejs.util.ScalaJsHelper._
4-
import org.scalajs.nodejs.util.ScalaJsHelper
54

65
import scala.scalajs.js
76

@@ -27,8 +26,8 @@ object ConsumerClass {
2726
/**
2827
* @example Consumer(client, payloads, options)
2928
*/
30-
def apply(client: Client, payloads: js.Any, options: ConsumerOptions) = {
31-
`class`.New[Client](client, payloads, options)
29+
def apply(client: Client, payloads: js.Array[FetchRequest], options: ConsumerOptions = null) = {
30+
`class`.New[Consumer](client, payloads, options)
3231
}
3332

3433
}
Lines changed: 24 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,24 @@
1+
package org.scalajs.nodejs.kafkanode
2+
3+
import scala.scalajs.js
4+
import scala.scalajs.js.annotation.ScalaJSDefined
5+
6+
/**
7+
* Kafka Fetch Request
8+
9+
*/
10+
@ScalaJSDefined
11+
class FetchRequest(val topic: String,
12+
val partition: js.UndefOr[Int] = js.undefined) extends js.Object
13+
14+
/**
15+
* Kafka Fetch Request Companion
16+
17+
*/
18+
object FetchRequest {
19+
20+
def apply(topic: String, partition: js.UndefOr[Int] = js.undefined) = {
21+
new FetchRequest(topic, partition)
22+
}
23+
24+
}
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
package org.scalajs.nodejs.kafkanode
2+
3+
import scala.scalajs.js
4+
import scala.scalajs.js.annotation.ScalaJSDefined
5+
6+
/**
7+
* Kafka Offset Request
8+
9+
*/
10+
@ScalaJSDefined
11+
class OffsetRequest(val topic: String,
12+
val partition: js.UndefOr[Int] = js.undefined,
13+
val time: js.UndefOr[Double] = js.undefined,
14+
val maxNum: js.UndefOr[Int] = js.undefined) extends js.Object

npm/mongodb/src/main/scala/org/scalajs/nodejs/mongodb/BulkWriteOpResultObject.scala

Lines changed: 14 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -3,39 +3,40 @@ package org.scalajs.nodejs.mongodb
33
import org.scalajs.nodejs.mongodb.BulkWriteOpResultObject.MongnoIdRef
44

55
import scala.scalajs.js
6+
import scala.scalajs.js.annotation.ScalaJSDefined
67

78
/**
89
* Bulk Write Operation Result Object
910
1011
*/
11-
@js.native
12+
@ScalaJSDefined
1213
trait BulkWriteOpResultObject extends js.Object {
1314
// Number of documents inserted.
14-
val nInserted: Int = js.native
15+
val nInserted: Int
1516

1617
// Number of documents matched for update.
17-
val nMatched: Int = js.native
18+
val nMatched: Int
1819

1920
// Number of documents modified.
20-
val nModified: Int = js.native
21+
val nModified: Int
2122

2223
// Number of documents deleted.
23-
val nRemoved: Int = js.native
24+
val nRemoved: Int
2425

2526
// Number of documents upserted.
26-
val nUpserted: Int = js.native
27+
val nUpserted: Int
2728

2829
// Inserted document generated Id's, hash key is the index of the originating operation
29-
val insertedIds: js.Array[MongnoIdRef] = js.native
30+
val insertedIds: js.Array[MongnoIdRef]
3031

3132
// Upserted document generated Id's, hash key is the index of the originating operation
32-
val upsertedIds: js.Array[MongnoIdRef] = js.native
33+
val upsertedIds: js.Array[MongnoIdRef]
3334

34-
val upserted: js.Array[MongnoIdRef] = js.native
35+
val upserted: js.Array[MongnoIdRef]
3536

36-
val writeConcernErrors: js.Array[js.Any] = js.native
37+
val writeConcernErrors: js.Array[js.Any]
3738

38-
val writeErrors: js.Array[js.Any] = js.native
39+
val writeErrors: js.Array[js.Any]
3940

4041
}
4142

@@ -47,8 +48,8 @@ object BulkWriteOpResultObject {
4748

4849
@js.native
4950
trait MongnoIdRef extends js.Object {
50-
val index: Int = js.native
51-
val _id: ObjectID = js.native
51+
val index: Int
52+
val _id: ObjectID
5253
}
5354

5455
}
Lines changed: 14 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,14 @@
1+
package org.scalajs.nodejs.mongodb
2+
3+
import scala.scalajs.js
4+
import scala.scalajs.js.annotation.ScalaJSDefined
5+
6+
/**
7+
* Text Search Options
8+
9+
*/
10+
@ScalaJSDefined
11+
class TextSearchOptions(var $search: js.UndefOr[String] = js.undefined,
12+
var $language: js.UndefOr[String] = js.undefined,
13+
var $caseSensitive: js.UndefOr[Boolean] = js.undefined,
14+
var $diacriticSensitive: js.UndefOr[Boolean] = js.undefined) extends js.Object

0 commit comments

Comments
 (0)