Skip to content
This repository was archived by the owner on Aug 22, 2025. It is now read-only.
Open
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
25 commits
Select commit Hold shift + click to select a range
fc5e1c6
[WIP][DCS-67] Docker entry point refactor (#56)
Dec 19, 2016
15f6181
Docker entry points fixes for standalone mode with vault (#61)
pfcoperez Dec 21, 2016
66f4989
[CROSSDATA] [DOCKER] Scripts for generation dockers locally (#58)
mafernandez-stratio Dec 21, 2016
038a992
[docker-entrypoint] fix catalog configuration (#65)
darroyo-stratio Dec 22, 2016
a1ed836
remove quotation marks (#67)
darroyo-stratio Dec 22, 2016
ba586c3
RowSerializer: Safe guard against double types with Decimal schema ta…
pfcoperez Dec 22, 2016
8df501e
[CROSSDATA] [SERVICE DISCOVERY] Local member from config (#64)
mafernandez-stratio Dec 22, 2016
c4c1ac3
[CROSSDATA] [SERVICE DISCOVERY] Scaladoc added (#62)
mafernandez-stratio Dec 22, 2016
74838bc
[DCS-556] [CROSSDATA] [DOCKER] Gosec security manager activated (#69)
mafernandez-stratio Dec 23, 2016
bf5889f
remove gosec dependency due to community version
Jan 18, 2017
65e61f6
RowSerializers: Added additional types as candidates to be serialized…
pfcoperez Dec 23, 2016
a2fdefd
[CROSSDATA] [DOCKER] XD_CATALOG set to Derby by default (#72)
mafernandez-stratio Dec 23, 2016
7fe0c9d
RowSerializer: Added Java's big decimal type as valid source type. (#74)
pfcoperez Dec 27, 2016
cc9da02
[CROSSDATA] [DOCKER] Deactivation of SSL in TCP connections (#73)
mafernandez-stratio Dec 27, 2016
f2c5c94
curator dependency has been shaded as well zookeeper (#79)
cflores-stratio Dec 28, 2016
1ac4ccc
Update of Spark Compatibility table (#81)
mafernandez-stratio Dec 29, 2016
8e1011e
[CROSSDATA] [DOCKER] Script for building docker locally (#80)
mafernandez-stratio Dec 29, 2016
4d5d454
[CROSSDATA] [SERVICE DISCOVERY] Keeping order of seeds (#75)
mafernandez-stratio Dec 29, 2016
8bb9eea
[DCS-769] Tables should not be cached when persistence fails (#87)
Jan 9, 2017
236710a
[SERVICE DISCOVERY] Fix Update of cluster seeds (#88)
mafernandez-stratio Jan 9, 2017
352dd9d
[SERVICE DISCOVERY] Duplicated members removed from Hz members (#91)
mafernandez-stratio Jan 10, 2017
9cc6b3e
remove security references
Jan 18, 2017
dc6567f
Merge branch 'master' into master
mafernandez-stratio Jan 19, 2017
f0aa9ef
Merge branch 'master' of github.com:jjlopezm/crossdata
Feb 7, 2017
3e5c18d
Merge branch 'master' into master
mafernandez-stratio Sep 14, 2017
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
11 changes: 2 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -46,15 +46,8 @@ Spark Compatibility

| Crossdata Version | Spark Version |
|-------------------|:--------------|
| 1.7.X | 1.6.X |
| 1.6.X | 1.6.X |
| 1.5.X | 1.6.X |
| 1.4.X | 1.6.X |
| 1.3.X | 1.6.X |
| 1.2.X | 1.5.X |
| 1.1.X | 1.5.X |
| 1.0.X | 1.5.X |

| 1.3.X - 1.9.X | 1.6.X |
| 1.0.X - 1.2.X | 1.5.X |

=============
Documentation
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
package com.stratio.crossdata.common.serializers

import java.sql.Timestamp
import java.math.{BigDecimal => JBigDecimal}

import org.apache.spark.sql.Row
import org.apache.spark.sql.catalyst.expressions.{GenericRow, GenericRowWithSchema}
Expand Down Expand Up @@ -50,6 +51,7 @@ case class RowSerializer(providedSchema: StructType) extends Serializer[Row] {
case (_: DecimalType, v: JNumber) =>
v match {
case JInt(v) => Decimal(v.toString)
case JLong(v) => Decimal(v.toString)
case JDecimal(v) => Decimal(v)
case JDouble(v) => Decimal(v)
}
Expand Down Expand Up @@ -94,9 +96,16 @@ case class RowSerializer(providedSchema: StructType) extends Serializer[Row] {
case (FloatType, v: Float) => JDouble(v)
case (DoubleType, v: Double) => JDouble(v)
case (LongType, v: Long) => JInt(v)
case (_: DecimalType, v: JBigDecimal) =>
import scala.collection.JavaConverters._
JDecimal(v)
case (_: DecimalType, v: BigDecimal) => JDecimal(v)
case (_: DecimalType, v: Decimal) => JDecimal(v.toBigDecimal)
case (_: DecimalType, v: String) => JDecimal(BigDecimal(v))
case (_: DecimalType, v: Double) => JDecimal(BigDecimal(v))
case (_: DecimalType, v: Float) => JDecimal(BigDecimal(v))
case (_: DecimalType, v: Long) => JDecimal(BigDecimal(v))
case (_: DecimalType, v: Int) => JDecimal(BigDecimal(v))
case (ByteType, v: Byte) => JInt(v.toInt)
case (BinaryType, v: Array[Byte]) => JString(new String(v))
case (BooleanType, v: Boolean) => JBool(v)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -117,16 +117,25 @@ class RowSerializerSpec extends XDSerializationTest[Row] with CrossdataCommonSer

it should " be able to recover Double values when their schema type is misleading" in {

val schema = StructType(List(StructField("decimaldouble", DecimalType(10,1),true)))
val row = Row.fromSeq(Array(32.1))
val row = Row.fromSeq(
Array(32.0, 32.0F, BigDecimal(32.0), new java.math.BigDecimal(32.0), "32.0", 32L, 32)
)

val schema = StructType (
(0 until row.size) map { idx =>
StructField(s"decimaldouble$idx", DecimalType(10,1), true)
} toList
)

val formats = json4sJacksonFormats + new RowSerializer(schema)

val serialized = compact(render(Extraction.decompose(row)(formats)))
val extracted = parse(serialized, false).extract[Row](formats, implicitly[Manifest[Row]])

inside(extracted) {
case r: Row => r.get(0) shouldBe Decimal(32.1)
case r: Row => r.toSeq foreach { cellValue =>
cellValue shouldBe Decimal(32.0)
}
}

}
Expand Down
11 changes: 10 additions & 1 deletion core/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -131,6 +131,16 @@
<resource>reference.conf</resource>
</transformer>
</transformers>
<relocations>
<relocation>
<pattern>org.apache.curator</pattern>
<shadedPattern>shaded.org.apache.curator</shadedPattern>
</relocation>
<relocation>
<pattern>org.apache.zookeeper</pattern>
<shadedPattern>shaded.org.apache.zookeeper</shadedPattern>
</relocation>
</relocations>
</configuration>
<executions>
<execution>
Expand All @@ -145,4 +155,3 @@
</build>

</project>

Original file line number Diff line number Diff line change
Expand Up @@ -18,12 +18,11 @@ package org.apache.spark.sql.crossdata.catalog.persistent
import java.sql.{Connection, DriverManager, PreparedStatement, ResultSet}

import com.stratio.crossdata.util.using
import org.apache.spark.sql.catalyst.{CatalystConf, TableIdentifier}
import org.apache.spark.sql.catalyst.CatalystConf
import org.apache.spark.sql.crossdata.CrossdataVersion
import org.apache.spark.sql.crossdata.catalog.{IndexIdentifierNormalized, TableIdentifierNormalized, StringNormalized, XDCatalog, persistent}
import org.apache.spark.sql.crossdata.catalog._

import scala.annotation.tailrec
import scala.util.Try

// TODO refactor SQL catalog implementations
object DerbyCatalog {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -78,8 +78,8 @@ abstract class PersistentCatalogWithCache(catalystConf: CatalystConf) extends XD
throw new UnsupportedOperationException(msg)
} else {
logInfo(s"Persisting view ${viewIdentifier.unquotedString}")
viewCache.put(viewIdentifier, plan)
persistViewMetadata(viewIdentifier, sqlText)
viewCache.put(viewIdentifier, plan)
}
}

Expand All @@ -91,8 +91,8 @@ abstract class PersistentCatalogWithCache(catalystConf: CatalystConf) extends XD
throw new UnsupportedOperationException(s"The table $tableIdentifier already exists")
} else {
logInfo(s"Persisting table ${crossdataTable.tableIdentifier.table}")
tableCache.put(tableIdentifier, table)
persistTableMetadata(crossdataTable.copy(schema = Option(table.schema)))
tableCache.put(tableIdentifier, table)
}
}

Expand All @@ -105,8 +105,8 @@ abstract class PersistentCatalogWithCache(catalystConf: CatalystConf) extends XD
throw new UnsupportedOperationException(s"The index $indexIdentifier already exists")
} else {
logInfo(s"Persisting index ${crossdataIndex.indexIdentifier}")
indexCache.put(crossdataIndex.tableIdentifier, crossdataIndex)
persistIndexMetadata(crossdataIndex)
indexCache.put(crossdataIndex.tableIdentifier, crossdataIndex)
}

}
Expand Down
51 changes: 25 additions & 26 deletions docker/catalog-config.sh
Original file line number Diff line number Diff line change
@@ -1,35 +1,34 @@
#!/bin/bash -xe

CATALOG_CLASS_PREFIX="org.apache.spark.sql.crossdata.catalog.persistent"

function jdbcCatalog() {
crossdata_core_catalog_jdbc_driver=${1:-3306}
crossdata_core_catalog_jdbc_url=$2
crossdata_core_catalog_jdbc_name=$3
crossdata_core_catalog_jdbc_user=$4
crossdata_core_catalog_jdbc_pass=$5
export crossdata_core_catalog_jdbc_driver=$1
export crossdata_core_catalog_jdbc_url=$2
export crossdata_core_catalog_jdbc_name=$3
export crossdata_core_catalog_jdbc_user=$4
export crossdata_core_catalog_jdbc_pass=$5
}

function zookeeperCatalog() {
crossdata_core_catalog_zookeeper_connectionString=${1:-localhost:2181}
crossdata_core_catalog_zookeeper_connectionTimeout=${2:-15000}
crossdata_core_catalog_zookeeper_sessionTimeout=${3:-60000}
crossdata_core_catalog_zookeeper_retryAttempts=${4:-5}
crossdata_core_catalog_zookeeper_retryInterval=${5:-10000}
export crossdata_core_catalog_zookeeper_connectionString=${1:-localhost:2181}
export crossdata_core_catalog_zookeeper_connectionTimeout=${2:-15000}
export crossdata_core_catalog_zookeeper_sessionTimeout=${3:-60000}
export crossdata_core_catalog_zookeeper_retryAttempts=${4:-5}
export crossdata_core_catalog_zookeeper_retryInterval=${5:-10000}
}


if [$# > 0 ]; then
if [ "x$1x" != "xx" ]; then
crossdata_core_catalog_class="\"org.apache.spark.sql.crossdata.catalog.persistent.$1Catalog\""
if [ "$1" == "MySQL" ]; then
jdbcCatalog "org.mariadb.jdbc.Driver" ${XD_CATALOG_HOST} ${XD_CATALOG_DB_NAME} ${XD_CATALOG_DB_USER} ${XD_CATALOG_DB_PASS}
fi
if [ "$1" == "PostgreSQL" ]; then
jdbcCatalog "org.postgresql.Driver" ${XD_CATALOG_HOST} ${XD_CATALOG_DB_NAME} ${XD_CATALOG_DB_USER} ${XD_CATALOG_DB_PASS}
fi
if [ "$1" == "Zookeeper" ]; then
zookeeperCatalog ${XD_CATALOG_ZOOKEEPER_CONNECTION_STRING} ${XD_CATALOG_ZOOKEEPER_CONNECTION_TIMEOUT} ${XD_CATALOG_ZOOKEEPER_SESSION_TIMEOUT} ${XD_CATALOG_ZOOKEEPER_RETRY_ATTEMPS} ${XD_CATALOG_ZOOKEEPER_RETRY_INTERVAL}
fi
if [ "x$2x" != "xx" ]; then
crossdata_core_catalog_prefix=${2:-crossdataCluster}
fi
export crossdata_core_catalog_class="${CATALOG_CLASS_PREFIX}.$1Catalog"
if [ "$1" == "MySQL" ]; then
jdbcCatalog "org.mariadb.jdbc.Driver" ${XD_CATALOG_HOST} ${XD_CATALOG_DB_NAME} ${XD_CATALOG_DB_USER} ${XD_CATALOG_DB_PASS}
fi
if [ "$1" == "PostgreSQL" ]; then
jdbcCatalog "org.postgresql.Driver" ${XD_CATALOG_HOST} ${XD_CATALOG_DB_NAME} ${XD_CATALOG_DB_USER} ${XD_CATALOG_DB_PASS}
fi
if [ "$1" == "Zookeeper" ]; then
zookeeperCatalog ${XD_CATALOG_ZOOKEEPER_CONNECTION_STRING} ${XD_CATALOG_ZOOKEEPER_CONNECTION_TIMEOUT} ${XD_CATALOG_ZOOKEEPER_SESSION_TIMEOUT} ${XD_CATALOG_ZOOKEEPER_RETRY_ATTEMPS} ${XD_CATALOG_ZOOKEEPER_RETRY_INTERVAL}
fi
if [ "x$2x" != "xx" ]; then
export crossdata_core_catalog_prefix=${2:-crossdataCluster}
fi
fi
53 changes: 16 additions & 37 deletions docker/crossdata-config.sh
Original file line number Diff line number Diff line change
Expand Up @@ -21,12 +21,12 @@ function setDriverConfig() {
}

function standaloneConfig() {
AKKAIP="akka.tcp://CrossdataServerCluster@${DOCKER_HOST}:13420"
export AKKAIP="akka.tcp://CrossdataServerCluster@${DOCKER_HOST}:13420"
#TODO: Test instead of XD_SEED : CROSSDATA_SERVER_AKKA_CLUSTER_SEED_NODES
if [ -z "$XD_SEED" ]; then
export CROSSDATA_SERVER_AKKA_CLUSTER_SEED_NODES=${AKKAIP}
else
SEED_IP="akka.tcp://CrossdataServerCluster@${XD_SEED}:13420"
export SEED_IP="akka.tcp://CrossdataServerCluster@${XD_SEED}:13420"
export CROSSDATA_SERVER_AKKA_CLUSTER_SEED_NODES=${SEED_IP},${AKKAIP}
# TODO: Study whether it is worth of making hazelcast nodes available when auto discovery is disabled.
# If so, find a better way of editing hazelcast.xml. The method commented below is as flimsy as it gets.
Expand All @@ -43,9 +43,9 @@ function standaloneConfig() {
export CROSSDATA_SERVER_AKKA_REMOTE_NETTY_TCP_BIND_HOSTNAME=${DOCKER_HOST}

if [ -z "$XD_SEED" ]; then
crossdata_driver_config_cluster_hosts="\[${DOCKER_HOST}:13420\]"
export crossdata_driver_config_cluster_hosts="\[${DOCKER_HOST}:13420\]"
else
crossdata_driver_config_cluster_hosts="\[${DOCKER_HOST}:13420, ${XD_SEED}\]"
export crossdata_driver_config_cluster_hosts="\[${DOCKER_HOST}:13420, ${XD_SEED}\]"
fi
}

Expand All @@ -54,52 +54,31 @@ function marathonConfig() {
####################################################
#Memory
####################################################
RAM_AVAIL=$(echo $MARATHON_APP_RESOURCE_MEM | cut -d "." -f1)
CROSSDATA_JAVA_OPT="-Xmx${RAM_AVAIL}m -Xms${RAM_AVAIL}m"
export RAM_AVAIL=$(echo $MARATHON_APP_RESOURCE_MEM | cut -d "." -f1)
export CROSSDATA_JAVA_OPT="-Xmx${RAM_AVAIL}m -Xms${RAM_AVAIL}m"
sed -i "s|# CROSSDATA_LIB|#CROSSDATA_JAVA_OPTS\nCROSSDATA_JAVA_OPTS=\"${CROSSDATA_JAVA_OPT}\"\n# CROSSDATA_LIB|" /etc/sds/crossdata/server/crossdata-env.sh

#Spark UI port
export CROSSDATA_SERVER_CONFIG_SPARK_UI_PORT=${PORT_4040}


########################################################################################################
#If XD_EXTERNAL_IP and MARATHON_APP_LABEL_HAPROXY_1_PORT are not specified assume we are working in HTTP mode
#Scenary: HAProxy exposing Akka http port, and creating an internal cluster using netty and autodiscovery through Zookeeper
# Working in HTTP mode
# Scenary: HAProxy exposing Akka http port, and creating an internal cluster using netty and
# autodiscovery through Zookeeper
########################################################################################################
if [ -z ${XD_EXTERNAL_IP} ] && [ -z ${MARATHON_APP_LABEL_HAPROXY_1_PORT} ]; then
setCrossdataDir ${HOST} ${PORT_13420}
setCrossdataBindHost ${HOST} ${PORT_13420}
setHazelcastConfig ${HOST} ${PORT_5701}
setDriverConfig ${HOST} ${PORT_13420}
# CROSSDATA_SERVER_CONFIG_HTTP_SERVER_PORT is set with the port provided by Marathon-LB
export CROSSDATA_SERVER_CONFIG_HTTP_SERVER_PORT=$PORT_13422
else
#Scenary: HAProxy exposing the akka netty port with the external IP. Supported only for one instance of Crossdata
if [ -z ${XD_EXTERNAL_IP} ] || [ -z ${MARATHON_APP_LABEL_HAPROXY_1_PORT} ]; then
echo "ERROR: Env var XD_EXTERNAL_IP and label HAPROXY_1_PORT must be provided together using Marathon&Haproxy in TCP mode" 1>&2
exit 1 # terminate and indicate error
else
#Hostname and port of haproxy
setCrossdataDir ${XD_EXTERNAL_IP} ${MARATHON_APP_LABEL_HAPROXY_1_PORT}
#Bind address for local
setCrossdataBindHost ${DOCKER_HOST} ${PORT_13420}
#Driver
setDriverConfig ${XD_EXTERNAL_IP} ${MARATHON_APP_LABEL_HAPROXY_1_PORT}
fi
# When using ClusterClient External IP, the hosts-files get updated in order to keep a consistent
# binding address in AKKA.
NAMEADDR="$(hostname -i)"
if [ -n "$HAPROXY_SERVER_INTERNAL_ADDRESS" ]; then
NAMEADDR=$HAPROXY_SERVER_INTERNAL_ADDRESS
fi
echo -e "$NAMEADDR\t$XD_EXTERNAL_IP" >> /etc/hosts
fi
setCrossdataDir ${HOST} ${PORT_13420}
setCrossdataBindHost ${HOST} ${PORT_13420}
setHazelcastConfig ${HOST} ${PORT_5701}
setDriverConfig ${HOST} ${PORT_13420}
# CROSSDATA_SERVER_CONFIG_HTTP_SERVER_PORT is set with the port provided by Marathon-LB
export CROSSDATA_SERVER_CONFIG_HTTP_SERVER_PORT=$PORT_13422

}

####################################################
## Main
####################################################

if [ -z ${MARATHON_APP_ID} ]; then
standaloneConfig
else
Expand Down
9 changes: 5 additions & 4 deletions docker/docker-entrypoint.sh
Original file line number Diff line number Diff line change
Expand Up @@ -2,15 +2,16 @@

DOCKER_HOST="hostname -f"
if [[ "$(hostname -f)" =~ \. ]]; then
DOCKER_HOST="$(hostname -f)"
export DOCKER_HOST="$(hostname -f)"
else
DOCKER_HOST="$(hostname -i)"
export DOCKER_HOST="$(hostname -i)"
fi

####################################################
## XD Catalog
####################################################
source catalog-config.sh $XD_CATALOG $XD_CATALOG_PREFIX
CROSSDATA_CATALOG=${XD_CATALOG:-Derby}
source catalog-config.sh $CROSSDATA_CATALOG $XD_CATALOG_PREFIX


####################################################
Expand All @@ -23,7 +24,7 @@ source catalog-config.sh $XD_CATALOG $XD_CATALOG_PREFIX
####################################################
## Crossdata Config
####################################################
source crossdata-config.sh
source crossdata-config.sh $1
case "$SERVER_MODE" in
"shell") # This mode will launch a crossdata shell instead of a crossdata server
if [ "$SHELL_SERVERADDR" -a "$SHELL_SERVERPORT" ]; then
Expand Down
24 changes: 12 additions & 12 deletions docker/shell-config.sh
Original file line number Diff line number Diff line change
@@ -1,30 +1,30 @@
#!/bin/bash -xe

CROSSDATA_DRIVER_CONFIG_HTTP_SERVER_HOST=$SHELL_SERVERADDR
CROSSDATA_DRIVER_CONFIG_HTTP_SERVER_PORT=$SHELL_SERVERPORT
export CROSSDATA_DRIVER_CONFIG_HTTP_SERVER_HOST=$SHELL_SERVERADDR
export CROSSDATA_DRIVER_CONFIG_HTTP_SERVER_PORT=$SHELL_SERVERPORT

# Prepare options string from docker environment settings

OPTIONS=""
export OPTIONS=""

if [ -n "$SHELL_USER" ]; then
OPTIONS="$OPTIONS --user $SHELL_USER"
export OPTIONS="$OPTIONS --user $SHELL_USER"
fi;

if [ "$CONNECTION_TIMEOUT" ]; then
OPTIONS="$OPTIONS --timeout $CONNECTION_TIMEOUT"
export OPTIONS="$OPTIONS --timeout $CONNECTION_TIMEOUT"
fi;

OPTIONS="$OPTIONS --http"
export OPTIONS="$OPTIONS --http"

if [ "$SHELL_MODE" == "async" ]; then
OPTIONS="$OPTIONS --async"
export OPTIONS="$OPTIONS --async"
fi;

if [ "$SHELL_CERTIFICATE_PATH" -a "$SHELL_CERTIFICATE_PASSWORD" -a "$SHELL_TRUSTSTORE_PATH" -a "$SHELL_TRUSTSTORE_PASSWORD" ]; then
CROSSDATA_DRIVER_AKKA_HTTP_SSL_ENABLE="true"
CROSSDATA_DRIVER_AKKA_HTTP_SSL_KEYSTORE=$SHELL_CERTIFICATE_PATH
CROSSDATA_DRIVER_AKKA_HTTP_SSL_KEYSTORE_PASSWORD=$SHELL_CERTIFICATE_PASSWORD
CROSSDATA_DRIVER_AKKA_HTTP_SSL_TRUSTSTORE=$SHELL_TRUSTSTORE_PATH
CROSSDATA_DRIVER_AKKA_HTTP_SSL_TRUSTSTORE_PASSWORD=$SHELL_TRUSTSTORE_PASSWORD
export CROSSDATA_DRIVER_AKKA_HTTP_SSL_ENABLE="true"
export CROSSDATA_DRIVER_AKKA_HTTP_SSL_KEYSTORE=$SHELL_CERTIFICATE_PATH
export CROSSDATA_DRIVER_AKKA_HTTP_SSL_KEYSTORE_PASSWORD=$SHELL_CERTIFICATE_PASSWORD
export CROSSDATA_DRIVER_AKKA_HTTP_SSL_TRUSTSTORE=$SHELL_TRUSTSTORE_PATH
export CROSSDATA_DRIVER_AKKA_HTTP_SSL_TRUSTSTORE_PASSWORD=$SHELL_TRUSTSTORE_PASSWORD
fi
8 changes: 4 additions & 4 deletions docker/streaming-config.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
#!/bin/bash -xe

sed -i "s|//crossdata-core.streaming*|crossdata-core.streaming|" /etc/sds/crossdata/server/core-application.conf
crossdata_core_catalog_zookeeper_connectionString=$1
crossdata_core_streaming_receiver_zk_connection=$2
crossdata_core_streaming_receiver_kafka_connection=$3
crossdata_core_streaming_spark_master=$4
export crossdata_core_catalog_zookeeper_connectionString=$1
export crossdata_core_streaming_receiver_zk_connection=$2
export crossdata_core_streaming_receiver_kafka_connection=$3
export crossdata_core_streaming_spark_master=$4
Binary file added events.csv/._SUCCESS.crc
Binary file not shown.
Binary file added events.csv/.part-00000.crc
Binary file not shown.
Empty file added events.csv/_SUCCESS
Empty file.
11 changes: 11 additions & 0 deletions events.csv/part-00000
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
ident,money
5,15.2
1,11.2
8,18.2
0,10.2
2,12.2
4,14.2
7,17.2
6,16.2
9,19.2
3,13.2
Loading