-
Notifications
You must be signed in to change notification settings - Fork 35
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
support to get template config file according to the datasource (#170)
- Loading branch information
Showing
15 changed files
with
1,117 additions
and
37 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
67 changes: 67 additions & 0 deletions
67
exchange-common/src/main/java/com/vesoft/exchange/common/FileMigrate.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,67 @@ | ||
/* Copyright (c) 2023 vesoft inc. All rights reserved. | ||
* | ||
* This source code is licensed under Apache 2.0 License. | ||
*/ | ||
|
||
package com.vesoft.exchange.common; | ||
|
||
import java.io.BufferedReader; | ||
import java.io.BufferedWriter; | ||
import java.io.File; | ||
import java.io.FileWriter; | ||
import java.io.IOException; | ||
import java.io.InputStream; | ||
import java.io.InputStreamReader; | ||
|
||
public class FileMigrate { | ||
//Logger log = Logger.getLogger(FileMigrate.class); | ||
|
||
|
||
/** | ||
* migrate the source file to target path | ||
* | ||
* @param sourceFile template config file | ||
* @param path target path to save the config info | ||
*/ | ||
public void saveConfig(String sourceFile, String path) { | ||
InputStream inputStream = | ||
this.getClass().getClassLoader().getResourceAsStream(sourceFile); | ||
if (inputStream == null) { | ||
System.exit(-1); | ||
} | ||
File file = new File(path); | ||
if (file.exists()) { | ||
file.delete(); | ||
} | ||
FileWriter writer = null; | ||
BufferedWriter bufferedWriter = null; | ||
BufferedReader reader = null; | ||
try { | ||
writer = new FileWriter(path); | ||
bufferedWriter = new BufferedWriter(writer); | ||
|
||
reader = new BufferedReader(new InputStreamReader(inputStream)); | ||
String line = null; | ||
while ((line = reader.readLine()) != null) { | ||
bufferedWriter.write(line); | ||
bufferedWriter.write("\n"); | ||
} | ||
} catch (IOException e) { | ||
System.out.println("Failed to migrate the template conf file:" + e.getMessage()); | ||
e.printStackTrace(); | ||
} finally { | ||
try { | ||
if (bufferedWriter != null) { | ||
bufferedWriter.close(); | ||
} | ||
if (reader != null) { | ||
reader.close(); | ||
} | ||
} catch (IOException e) { | ||
System.out.println("Failed to close the writer or reader:" + e.getMessage()); | ||
e.printStackTrace(); | ||
} | ||
} | ||
|
||
} | ||
} |
100 changes: 100 additions & 0 deletions
100
exchange-common/src/main/resources/config_template/csv.conf
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,100 @@ | ||
# Use the command to submit the exchange job: | ||
|
||
# spark-submit \ | ||
# --master "spark://master_ip:7077" \ | ||
# --driver-memory=2G --executor-memory=30G \ | ||
# --num-executors=3 --total-executor-cores=60 \ | ||
# --class com.vesoft.nebula.exchange.Exchange \ | ||
# nebula-exchange_spark_2.4-3.0-SNAPSHOT.jar -c csv.conf | ||
|
||
{ | ||
# Spark config | ||
spark: { | ||
app: { | ||
name: NebulaGraph Exchange | ||
} | ||
} | ||
|
||
# Nebula Graph config | ||
nebula: { | ||
address:{ | ||
graph: ["127.0.0.1:9669","127.0.0.2:9669"] | ||
# if your NebulaGraph server is in virtual network like k8s, please config the leader address of meta. | ||
# use `SHOW meta leader` to see your meta leader's address | ||
meta: ["127.0.0.1:9559"] | ||
} | ||
user: root | ||
pswd: nebula | ||
space: test | ||
|
||
# nebula client connection parameters | ||
connection { | ||
# socket connect & execute timeout, unit: millisecond | ||
timeout: 30000 | ||
} | ||
|
||
error: { | ||
# max number of failures, if the number of failures is bigger than max, then exit the application. | ||
max: 32 | ||
# failed data will be recorded in output path, format with ngql | ||
output: "hdfs://127.0.0.1:9000/tmp/errors" | ||
} | ||
|
||
# use google's RateLimiter to limit the requests send to NebulaGraph | ||
rate: { | ||
# the stable throughput of RateLimiter | ||
limit: 1024 | ||
# Acquires a permit from RateLimiter, unit: MILLISECONDS | ||
# if it can't be obtained within the specified timeout, then give up the request. | ||
timeout: 1000 | ||
} | ||
} | ||
|
||
# Processing tags | ||
tags: [ | ||
{ | ||
name: tag-name | ||
type: { | ||
source: csv | ||
sink: client | ||
} | ||
# if your file in not in hdfs, config "file:///path/test.csv" | ||
path: "hdfs://ip:port/path/test.csv" | ||
# if your csv file has no header, then use _c0,_c1,_c2,.. to indicate fields | ||
fields: [csv-field-1, csv-field-2, csv-field-3] | ||
nebula.fields: [nebula-field-1, nebula-field-2, nebula-field-3] | ||
vertex: { | ||
field: csv-field-0 | ||
} | ||
separator: "," | ||
header: true | ||
batch: 2000 | ||
partition: 60 | ||
} | ||
] | ||
|
||
# process edges | ||
edges: [ | ||
{ | ||
name: edge-name | ||
type: { | ||
source: csv | ||
sink: client | ||
} | ||
path: "hdfs://ip:port/path/test.csv" | ||
fields: [csv-field-2, csv-field-3, csv-field-4] | ||
nebula.fields: [nebula-field-1, nebula-field-2, nebula-field-3] | ||
source: { | ||
field: csv-field-0 | ||
} | ||
target: { | ||
field: csv-field-1 | ||
} | ||
#ranking: csv-field-2 | ||
separator: "," | ||
header: true | ||
batch: 2000 | ||
partition: 60 | ||
} | ||
] | ||
} |
Oops, something went wrong.