diff --git a/.github/workflows/scala.yml b/.github/workflows/scala.yml old mode 100644 new mode 100755 diff --git a/.gitignore b/.gitignore old mode 100644 new mode 100755 index 3dddfa35..cb105483 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,7 @@ temp/ resources/test/ demo/ScalaDemo/.DS_Store dependency-reduced-pom.xml +test/ +.vscode/ +.metals/ +.bloop/ \ No newline at end of file diff --git a/.travis.yml b/.travis.yml old mode 100644 new mode 100755 diff --git a/LICENSE b/LICENSE old mode 100644 new mode 100755 diff --git a/README.md b/README.md old mode 100644 new mode 100755 diff --git a/bin/run.sh b/bin/run.sh old mode 100644 new mode 100755 diff --git a/codecov.yml b/codecov.yml old mode 100644 new mode 100755 diff --git a/conf/test.conf b/conf/test.conf old mode 100644 new mode 100755 diff --git a/demo/DataLoader/pom.xml b/demo/DataLoader/pom.xml old mode 100644 new mode 100755 index c49916d3..ac87c1ac --- a/demo/DataLoader/pom.xml +++ b/demo/DataLoader/pom.xml @@ -161,21 +161,6 @@ scala-logging_${scala.binary.version} - - org.slf4j - slf4j-api - - - - ch.qos.logback - logback-core - - - - ch.qos.logback - logback-classic - - com.typesafe.akka akka-actor_${scala.binary.version} @@ -216,13 +201,6 @@ ganymed-ssh2 - - - org.slf4j - slf4j-log4j12 - - - org.apache.spark spark-core_${scala.binary.version} diff --git a/demo/DataLoader/src/main/java/com/hackerforfuture/codeprototypes/dataloader/DeveloperApi.java b/demo/DataLoader/src/main/java/com/hackerforfuture/codeprototypes/dataloader/DeveloperApi.java old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/java/com/hackerforfuture/codeprototypes/dataloader/common/LoggingSignalHandler.java b/demo/DataLoader/src/main/java/com/hackerforfuture/codeprototypes/dataloader/common/LoggingSignalHandler.java old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/resources/log4j2.properties b/demo/DataLoader/src/main/resources/log4j2.properties new file mode 100755 index 00000000..d4118a96 --- /dev/null +++ b/demo/DataLoader/src/main/resources/log4j2.properties @@ -0,0 +1,12 @@ +# Console logger +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n +appender.console.filter.threshold.type=ThresholdFilter +appender.console.filter.threshold.level=DEBUG + +# configure logger +rootLogger=INFO,STDOUT + + diff --git a/demo/DataLoader/src/main/resources/logback.xml b/demo/DataLoader/src/main/resources/logback.xml deleted file mode 100644 index 1fef4563..00000000 --- a/demo/DataLoader/src/main/resources/logback.xml +++ /dev/null @@ -1,103 +0,0 @@ - - - - - - - - - - - - - %d{HH:mm:ss.SSS} |-%-5level in%replace(%caller{1}){'\t|Caller.{1}0|\r\n|at\s', ''} - %msg%n - - - - - - WARN - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/DataLoader.warn.log - ${maxHistory} - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - - INFO - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/DataLoader.info.log - ${maxHistory} - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - DEBUG - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/DataLoader.debug.log - ${maxHistory} - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - - ERROR - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/DataLoader.error.log - ${maxHistory} - - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - - - - - - - - \ No newline at end of file diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/DataLoader.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/DataLoader.scala old mode 100644 new mode 100755 index 436b9233..aad6be7d --- a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/DataLoader.scala +++ b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/DataLoader.scala @@ -31,8 +31,9 @@ object DataLoader extends LogSupport { } } catch { case e: ReflectiveOperationException => - log.warn("Failed to register optional signal handler that logs a message when the process is terminated " + - s"by a signal. Reason for registration failure is: $e", e) + logger.warn("Failed to register optional signal handler that logs a message " + + "when the process is terminated by a signal. " + + "Reason for registration failure is: $e", e) } // attach shutdown handler to catch terminating signals as well as normal termination @@ -40,11 +41,12 @@ object DataLoader extends LogSupport { override def run(): Unit = DataLoaderServer.shutdown() }) + logger.info("Starting DataLoaderServer...") DataLoaderServer.startup() DataLoaderServer.awaitShutdown() } catch { case NonFatal(e) => - log.error("Failed to run DataLoader", e) + logger.error("Failed to run DataLoader", e) System.exit(1) } System.exit(0) diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/EventHandler.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/EventHandler.scala new file mode 100755 index 00000000..68a0db15 --- /dev/null +++ b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/EventHandler.scala @@ -0,0 +1,19 @@ +package com.hackerforfuture.codeprototypes.dataloader.clusters + +/** + * Author: biyu.huang + * Date: 2023/11/2 11:30 + * Description: + */ +trait EventHandler { + + def handleHeartbeatEvent(): Unit + + def handleRegisterEvent(): Unit + + def handleStopEvent(): Unit + + def handleRegisterTimeout(): Unit + + def handelCheckHeartbeatEvent(): Unit +} diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/Message.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/Message.scala new file mode 100755 index 00000000..dd89e83a --- /dev/null +++ b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/Message.scala @@ -0,0 +1,25 @@ +package com.hackerforfuture.codeprototypes.dataloader.clusters + +import akka.actor.ActorPath + +/** + * Author: biyu.huang + * Date: 2023/11/1 19:06 + * Description: + */ +sealed trait Message + +case object Heartbeat extends Message + +case object CheckHeartbeat extends Message + +case object Register extends Message + +case object RegisterTimeout extends Message + +// 自定义消息类型,带有唯一标识ID +case class CustomMessage(id: ActorPath, content: Any) extends Message + +case class SlaveActorTerminated(id: ActorPath, reason: String) extends Message + +case object StopActor extends Message diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/Master.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/Master.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/MasterActor.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/MasterActor.scala new file mode 100755 index 00000000..042d9677 --- /dev/null +++ b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/MasterActor.scala @@ -0,0 +1,105 @@ +package com.hackerforfuture.codeprototypes.dataloader.clusters.master + +import akka.actor.{Actor, ActorLogging, ActorPath, Props, Terminated} +import com.hackerforfuture.codeprototypes.dataloader.clusters._ +import com.hackerforfuture.codeprototypes.dataloader.common.Using + +import scala.collection.mutable +import scala.concurrent.duration._ + +/** + * Author: biyu.huang + * Date: 2023/11/1 17:15 + * Description: + */ +object MasterActor { + def props: Props = Props[MasterActor] +} + +class MasterActor extends Actor with ActorLogging with EventHandler with Using { + // 存储已注册的SlaveActor的唯一标识ID和最后一次收到心跳的时间戳 + private final val registeredSlaves = mutable.HashMap.empty[ActorPath, Long] + + // 心跳超时时间 + private final val heartbeatTimeout: FiniteDuration = 10.seconds + + private final val initialTimeout: FiniteDuration = 5.seconds + + // 启动时设置定时器 + override def preStart(): Unit = { + // context.setReceiveTimeout(initialTimeout) + + import context.dispatcher + context + .system + .scheduler + .schedule(initialTimeout, heartbeatTimeout, self, CheckHeartbeat) + } + + override def handleHeartbeatEvent(): Unit = { + syncableBlock { + val senderId: ActorPath = sender().path + val ts: Long = System.currentTimeMillis() + if (registeredSlaves.contains(senderId)) { + registeredSlaves.put(senderId, ts) + log.info(s"[%s] Received heartbeat from slave: ${senderId.name}".format(ts)) + } else { + registeredSlaves.put(senderId, ts) + log.info(s"[%s] Registered slave: ${senderId.name}".format(ts)) + context.watch(sender()) + } + } + } + + override def handleRegisterEvent(): Unit = handleHeartbeatEvent() + + override def handelCheckHeartbeatEvent(): Unit = { + val currentTime: Long = System.currentTimeMillis() + val timedOutSlaves = registeredSlaves.filter { + case (_, lastHeartbeatTime) => + val elapsed = currentTime - lastHeartbeatTime + elapsed > (heartbeatTimeout.toMillis * 10) + } + + timedOutSlaves.keys.foreach { id => + registeredSlaves -= id + log.warning(s"Slave $id timed out and unregistered.") + self ! SlaveActorTerminated(id, "No heartbeat received") + } + } + + override def handleStopEvent(): Unit = { + log.info("received StopActor message, shutting down ...") + registeredSlaves.foreach { + case (id, _) => + log.info("try to stop %s".format(id.name)) + context.system.actorSelection(id) ! StopActor + } + registeredSlaves.clear() + context.stop(self) + } + + def receive: Receive = { + case Heartbeat => handleHeartbeatEvent() + case Register => handleRegisterEvent() + case CheckHeartbeat => handelCheckHeartbeatEvent() + case StopActor => handleStopEvent() + case CustomMessage(id, content) => + if (registeredSlaves.contains(id)) { + log.info(s"Processing message from slave $id: $content") + // 处理消息逻辑 + } else { + log.warning(s"Received message from unregistered slave: $id") + context.system.actorSelection(id) ! RegisterTimeout + } + case SlaveActorTerminated(id, reason) => + log.warning(s"Slave $id terminated and unregistered. Reason: $reason") + registeredSlaves.remove(id) + case Terminated(slave) => + val slaveId = slave.path + registeredSlaves.remove(slaveId) + log.warning(s"Slave ${slaveId.name} terminated.") + } + + override def handleRegisterTimeout(): Unit = {} +} diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/MasterAnt.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/MasterAnt.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/StateManager.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/StateManager.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/WorkerDetailInfo.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/WorkerDetailInfo.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/worker/SlaveActor.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/worker/SlaveActor.scala new file mode 100755 index 00000000..f17c4081 --- /dev/null +++ b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/worker/SlaveActor.scala @@ -0,0 +1,70 @@ +package com.hackerforfuture.codeprototypes.dataloader.clusters.worker + +import akka.actor.{Actor, ActorLogging, ActorPath, Props, Terminated} +import com.hackerforfuture.codeprototypes.dataloader.clusters._ + +import scala.concurrent.duration.DurationInt + +/** + * Author: biyu.huang + * Date: 2023/11/1 17:16 + * Description: + */ + +object SlaveActor { + def props(master: akka.actor.ActorRef): Props = Props(new SlaveActor(master)) +} + +class SlaveActor(master: akka.actor.ActorRef) extends Actor with ActorLogging with EventHandler { + // 在启动时发送注册消息 + override def preStart(): Unit = { + import context.dispatcher + context + .system + .scheduler + .schedule(5.seconds, 10.seconds, self, Heartbeat) + self ! Register + } + + // 生成唯一标识ID + private final val uniqueID: ActorPath = self.path + + def receive: Receive = { + // 发送带有唯一标识ID的心跳消息给Master + case Heartbeat => handleHeartbeatEvent() + // 发送带有唯一标识ID的注册消息给Master + case Register => handleRegisterEvent() + // 注册超时,重新发送注册消息 + case RegisterTimeout => handleRegisterEvent() + case StopActor => handleStopEvent() + case Terminated(actorRef) => + log.warning("Master %s terminated. %s is shutting down ...".format( + actorRef.path.name, self.path.name)) + self ! StopActor + case message => + // 发送带有唯一标识ID的自定义消息给Master + val customMessage = CustomMessage(uniqueID, message) + master ! customMessage + } + + override def handleRegisterEvent(): Unit = { + master ! Register + log.info("try to register with master.") + context.watch(master) + } + + override def handleHeartbeatEvent(): Unit = { + master ! Heartbeat + log.info("Sent heartbeat to master.") + } + + override def handleStopEvent(): Unit = { + log.info("received StopActor message, shutting down ...") + context.stop(self) + } + + override def handleRegisterTimeout(): Unit = {} + + override def handelCheckHeartbeatEvent(): Unit = {} +} + diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/Configure.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/Configure.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/FileContextV1.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/FileContextV1.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/LogSupport.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/LogSupport.scala old mode 100644 new mode 100755 index e5bdc3ed..8743b0f8 --- a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/LogSupport.scala +++ b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/LogSupport.scala @@ -8,11 +8,9 @@ package com.hackerforfuture.codeprototypes.dataloader.common -import org.slf4j.{Logger, LoggerFactory} +import com.typesafe.scalalogging.LazyLogging /** - * Created by wallace on 2018/1/20. - */ -trait LogSupport { - protected val log: Logger = LoggerFactory.getLogger(this.getClass.getName.stripSuffix("$")) -} + * Created by wallace on 2018/1/20. + */ +trait LogSupport extends LazyLogging diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/PersistMode.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/PersistMode.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/Using.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/Using.scala old mode 100644 new mode 100755 index 5f52d28d..2d5d7421 --- a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/Using.scala +++ b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/Using.scala @@ -8,18 +8,21 @@ package com.hackerforfuture.codeprototypes.dataloader.common +import java.util.concurrent.locks.ReentrantLock +import scala.language.reflectiveCalls import scala.util.control.NonFatal /** * Created by wallace on 2018/1/20. */ trait Using extends LogSupport { + private val lock: ReentrantLock = new ReentrantLock() protected def usingWithErrMsg[A <: {def close() : Unit}, B](param: A, errMsg: String)(f: A => B): Unit = { try { f(param) } catch { case NonFatal(e) => - log.error(s"$errMsg: ", e) + logger.error(s"$errMsg: ", e) } finally { param.close() } @@ -32,4 +35,13 @@ trait Using extends LogSupport { param.close() } } + + protected def syncableBlock[R](body: => R): R = { + lock.lock() + try { + body + } finally { + lock.unlock() + } + } } diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/config/DataLoaderConfig.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/config/DataLoaderConfig.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/persist/PersistWriter.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/persist/PersistWriter.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/source/SourceReader.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/source/SourceReader.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/state/State.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/common/state/State.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/metadata/EventType.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/metadata/EventType.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/metadata/message/LocalMessage.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/metadata/message/LocalMessage.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/metadata/message/RemoteMessage.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/metadata/message/RemoteMessage.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/persist/FtpPersistWriter.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/persist/FtpPersistWriter.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/schedule/AntScheduler.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/schedule/AntScheduler.scala old mode 100644 new mode 100755 index 87311dea..f131278e --- a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/schedule/AntScheduler.scala +++ b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/schedule/AntScheduler.scala @@ -63,7 +63,7 @@ class AntScheduler(threadNum: Int, * Initialize this scheduler so it is ready to accept scheduling of tasks */ override def startup(): Unit = { - log.debug("Initializing task scheduler.") + logger.debug("Initializing task scheduler.") this synchronized { if (isStarted) throw new IllegalStateException("This scheduler has already been started!") executor = Some(new ScheduledThreadPoolExecutor(threadNum)) @@ -81,7 +81,7 @@ class AntScheduler(threadNum: Int, * This includes tasks scheduled with a delayed execution. */ override def shutdown(): Unit = { - log.debug("Shutting down task scheduler.") + logger.debug("Shutting down task scheduler.") // We use the local variable to avoid NullPointerException if another thread shuts down scheduler at same time. val cachedExecutor: Option[ScheduledThreadPoolExecutor] = this.executor if (cachedExecutor.isDefined) { @@ -110,19 +110,19 @@ class AntScheduler(threadNum: Int, * @param unit The unit for the preceding times. */ override def schedule(name: String, fun: () => Unit, delay: Long, period: Long, unit: TimeUnit): Unit = { - log.debug("Scheduling task %s with initial delay %d ms and period %d ms." + logger.debug("Scheduling task %s with initial delay %d ms and period %d ms." .format(name, TimeUnit.MILLISECONDS.convert(delay, unit), TimeUnit.MILLISECONDS.convert(period, unit))) this synchronized { ensureRunning() val runnable = new Runnable { override def run(): Unit = { try { - log.debug("Beginning execution of scheduled task '%s'.".format(name)) + logger.debug("Beginning execution of scheduled task '%s'.".format(name)) fun() } catch { - case t: Throwable => log.error("Uncaught exception in scheduled task '" + name + "'", t) + case t: Throwable => logger.error("Uncaught exception in scheduled task '" + name + "'", t) } finally { - log.debug("Completed execution of scheduled task '%s'.".format(name)) + logger.debug("Completed execution of scheduled task '%s'.".format(name)) } } } diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/schedule/TaskGenerator.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/schedule/TaskGenerator.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/schedule/TaskScheduler.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/schedule/TaskScheduler.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/DataLoaderServer.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/DataLoaderServer.scala old mode 100644 new mode 100755 index 32234905..1eb8a41c --- a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/DataLoaderServer.scala +++ b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/DataLoaderServer.scala @@ -47,7 +47,7 @@ object DataLoaderServer extends LogSupport { } } catch { case NonFatal(e) => - log.error("Failed to execute service thread", e) + logger.error("Failed to execute service thread", e) isStartingUp.set(false) shutdown() throw e @@ -65,11 +65,11 @@ object DataLoaderServer extends LogSupport { startupComplete.set(false) isShuttingDown.set(false) shutdownLatch.countDown() + logger.info("Succeed to shutdown DataLoader.") } - } catch { case NonFatal(e) => - log.error("Fatal error during DataLoaderServer shutdown.", e) + logger.error("Fatal error during DataLoaderServer shutdown.", e) isShuttingDown.set(false) throw e } diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/download/DataDownLoadProcess.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/download/DataDownLoadProcess.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/download/DataDownLoadService.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/download/DataDownLoadService.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/dynamicscan/DataScanConfigBase.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/dynamicscan/DataScanConfigBase.scala old mode 100644 new mode 100755 index b792c7a2..9389b5a2 --- a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/dynamicscan/DataScanConfigBase.scala +++ b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/dynamicscan/DataScanConfigBase.scala @@ -29,10 +29,10 @@ trait DataScanConfigBase extends LogSupport { val resourceFile = file val configFile = new File(makePath(file)) if (configFile.exists()) { - log.debug(s"Loading file [${configFile.getPath}] and resource [$resourceFile]") + logger.debug(s"Loading file [${configFile.getPath}] and resource [$resourceFile]") ConfigFactory.parseFile(configFile).withFallback(ConfigFactory.load(resourceFile)) } else { - log.debug(s"Loading resource [$resourceFile]") + logger.debug(s"Loading resource [$resourceFile]") ConfigFactory.load(resourceFile) } } @@ -62,7 +62,7 @@ trait DataScanConfigBase extends LogSupport { (fileName, lastModified) }.toMap } else { - log.warn("Does any upload config file exist?") + logger.warn("Does any upload config file exist?") Map.empty } } @@ -75,7 +75,7 @@ trait DataScanConfigBase extends LogSupport { val newLastModified: Long = file.lastModified() if (newLastModified == oldLastModified) { - log.debug(s"$fileName never changed.") + logger.debug(s"$fileName never changed.") null } else { lastModifiedMap.updated(fileName, newLastModified) diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/dynamicscan/DataScanService.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/dynamicscan/DataScanService.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/dynamicscan/UploadDataConfig.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/dynamicscan/UploadDataConfig.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/uniqueid/UniqueID.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/uniqueid/UniqueID.scala old mode 100644 new mode 100755 index a7959011..978e46e0 --- a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/uniqueid/UniqueID.scala +++ b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/uniqueid/UniqueID.scala @@ -11,8 +11,8 @@ package com.hackerforfuture.codeprototypes.dataloader.server.uniqueid import java.util.concurrent.atomic.{AtomicBoolean, AtomicInteger, AtomicLong} /** - * Created by wallace on 2019/4/4. - */ + * Created by wallace on 2019/4/4. + */ class UniqueID { // SnowFlake - 64bit @@ -32,7 +32,7 @@ class UniqueID { def genUniqueID(): Option[Long] = { val timestamp: Long = System.currentTimeMillis() if (initFalg.compareAndSet(false, true)) { - val wid: Int = workerId.get() << workerIdShift + val wid: Int = workerId.get() << workerIdShift.toInt if (lastTimeStamp.compareAndSet(timestamp, timestamp)) { val ts: Long = (timestamp - twepoch) << timeStampLeftShit diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/upload/DataUpLoadService.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/upload/DataUpLoadService.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/upload/DataUploadProcess.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/server/upload/DataUploadProcess.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/source/FtpSourceReader.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/source/FtpSourceReader.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/states/State.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/states/State.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/utils/AntThreadFactory.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/utils/AntThreadFactory.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/utils/LoaderUtils.scala b/demo/DataLoader/src/main/scala/com/hackerforfuture/codeprototypes/dataloader/utils/LoaderUtils.scala old mode 100644 new mode 100755 diff --git a/demo/DataLoader/src/test/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/MasterActorUnitSpec.scala b/demo/DataLoader/src/test/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/MasterActorUnitSpec.scala new file mode 100755 index 00000000..d827f1de --- /dev/null +++ b/demo/DataLoader/src/test/scala/com/hackerforfuture/codeprototypes/dataloader/clusters/master/MasterActorUnitSpec.scala @@ -0,0 +1,54 @@ +package com.hackerforfuture.codeprototypes.dataloader.clusters.master + +import akka.actor.{ActorRef, ActorSystem, Terminated} +import com.hackerforfuture.codeprototypes.dataloader.clusters.StopActor +import com.hackerforfuture.codeprototypes.dataloader.clusters.worker.SlaveActor +import com.hackerforfuture.codeprototypes.dataloader.common.LogSupport +import com.typesafe.config.{Config, ConfigFactory} +import org.scalatest.flatspec.AnyFlatSpec + +import java.util.concurrent.TimeUnit +import scala.concurrent.{Await, Future} +import scala.concurrent.duration.{Duration, DurationInt} + +/** + * Author: biyu.huang + * Date: 2023/11/1 18:19 + * Description: + */ +class MasterActorUnitSpec extends AnyFlatSpec with LogSupport { + "Dev" should "run Akka cluster" in { + // 创建自定义配置 + val customConfig: Config = ConfigFactory.parseString( + """ + akka { + log-dead-letters-during-shutdown = off + log-dead-letters = off + } + """) + val system: ActorSystem = ActorSystem("Akka-Cluster-System", customConfig) + + // 创建MasterActor + val master: ActorRef = system.actorOf(MasterActor.props, "master") + + // 创建多个SlaveActor,并将MasterActor作为参数传递给它们 + val actor1 = system.actorOf(SlaveActor.props(master), "slave1") + val actor2 = system.actorOf(SlaveActor.props(master), "slave2") + val actor3 = system.actorOf(SlaveActor.props(master), "slave3") + + // 停止系统的示例代码,可以根据需要进行调整 + // 在这个示例中,我们在10秒后停止系统 + import system.dispatcher + akka.pattern.after(10.seconds, system.scheduler) { + actor2 ! StopActor + Future.unit + } + // actor2 ! StopActor + val future: Future[Unit] = akka.pattern.after(55.seconds, system.scheduler) { + system.actorSelection("/user/*") ! StopActor + system.terminate() + Future.unit + } + Await.result(future, Duration(60, TimeUnit.SECONDS)) + } +} diff --git a/demo/FlinkDemo/README.md b/demo/FlinkDemo/README.md new file mode 100755 index 00000000..d5d210d3 --- /dev/null +++ b/demo/FlinkDemo/README.md @@ -0,0 +1,4 @@ +# how to build project? +```bash +mvn clean package -DskipTests -pl ./demo/FlinkDemo --am +``` \ No newline at end of file diff --git a/demo/FlinkDemo/pom.xml b/demo/FlinkDemo/pom.xml new file mode 100755 index 00000000..88c346d5 --- /dev/null +++ b/demo/FlinkDemo/pom.xml @@ -0,0 +1,275 @@ + + + + + 4.0.0 + + com.wallace.demo + CodePrototypesDemo + 0.1.0-SNAPSHOT + ../../pom.xml + + + + 1.8 + + + flinkdemo + 0.1.0-SNAPSHOT + + + + org.apache.maven.plugins + maven-checkstyle-plugin + 3.1.0 + + true + + + + + net.alchim31.maven + scala-maven-plugin + + incremental + + + + compile-scala + compile + + add-source + compile + testCompile + + + + test-compile-first + test-compile + + add-source + testCompile + + + + + + + org.apache.maven.plugins + maven-compiler-plugin + + ${java.version} + ${java.version} + + + + + org.apache.maven.plugins + maven-shade-plugin + + + shade-package + package + + shade + + + ${jar.file.name} + true + jar-with-dependencies + + + + reference.conf + + + + + org.apache.flink:force-shading + com.google.code.findbugs:jsr305 + + + + + *:* + + module-info.class + META-INF/*.SF + META-INF/*.DSA + META-INF/*.RSA + + + + true + + + + + + + + + + + + + + + + + + + + + + + + + + + org.scalatest + scalatest-maven-plugin + + ${project.build.directory}/unitspecs-reports + + . + WDF UnitSpec.txt + ${project.build.directory}/site/scalatest + true + + + + test + test + + test + + + + + + + org.scalastyle + scalastyle-maven-plugin + 1.0.0 + + false + false + false + false + ${basedir}/src/main/scala + ${basedir}/src/test/scala + ${basedir}/../../scalastyle-config.xml + ${basedir}/target/scalastyle-output.xml + ${project.build.sourceEncoding} + ${project.reporting.outputEncoding} + + + + scalastyle-check + package + + check + + + + + + + + + + + org.apache.flink + flink-table-api-scala-bridge_${scala.binary.version} + ${flink.version} + + + org.slf4j + slf4j-api + + + + + + org.apache.flink + flink-cep-scala_${scala.binary.version} + ${flink.version} + + + + org.apache.flink + flink-clients + ${flink.version} + + + org.slf4j + slf4j-api + + + + + + + com.typesafe.scala-logging + scala-logging_${scala.binary.version} + 3.9.5 + + + org.slf4j + slf4j-api + + + + + + + org.slf4j + slf4j-api + + + + + org.slf4j + slf4j-reload4j + + + + org.slf4j + slf4j-api + + + + + + + junit + junit + 4.13.2 + test + + + + + org.scalatest + scalatest_${scala.binary.version} + 3.2.15 + + + + + com.vladsch.flexmark + flexmark-all + 0.64.6 + + + diff --git a/demo/FlinkDemo/src/main/resources/log4j.properties b/demo/FlinkDemo/src/main/resources/log4j.properties new file mode 100755 index 00000000..cb00b143 --- /dev/null +++ b/demo/FlinkDemo/src/main/resources/log4j.properties @@ -0,0 +1,9 @@ +# Set everything to be logged to the console +log4j.rootCategory=INFO, console +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.out +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %l: %m%n + +# Settings to quiet third party logs that are too verbose +log4j.logger.org.apache.flink=ERROR diff --git a/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/FlinkCEPDemo.scala b/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/FlinkCEPDemo.scala new file mode 100755 index 00000000..f6f0911f --- /dev/null +++ b/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/FlinkCEPDemo.scala @@ -0,0 +1,97 @@ +package com.notalk.flink.demo + +import com.notalk.flink.demo.common.LogSupport +import com.notalk.flink.demo.event.LoginEvent +import org.apache.flink.cep.nfa.aftermatch.AfterMatchSkipStrategy +import org.apache.flink.cep.scala.pattern.Pattern +import org.apache.flink.cep.scala.{CEP, PatternStream} +import org.apache.flink.streaming.api.TimeCharacteristic +import org.apache.flink.streaming.api.scala.{KeyedStream, StreamExecutionEnvironment, createTypeInformation} +import org.apache.flink.streaming.api.windowing.time.Time + +/** + * Author: biyu.huang + * Date: 2023/7/27 14:38 + * Description: A demo of Flink CEP to detect patterns in a stream of login events + */ +object FlinkCEPDemo extends LogSupport { + def main(args: Array[String]): Unit = { + logger.info("Start running Flink CEP demo...") + + // Set up the Flink execution environment + val scalaEnv: StreamExecutionEnvironment = StreamExecutionEnvironment.getExecutionEnvironment + scalaEnv.setStreamTimeCharacteristic(TimeCharacteristic.EventTime) + scalaEnv.setParallelism(1) + + // Define the stream of login events + val stream: KeyedStream[LoginEvent, String] = scalaEnv + .fromElements( + LoginEvent("user_1", "192.168.0.1", "fail", 2000L), + LoginEvent("user_1", "192.168.0.2", "fail", 3000L), + LoginEvent("user_1", "192.168.0.3", "fail", 4000L), + LoginEvent("user_2", "192.168.10.10", "fail", 5000L), + LoginEvent("user_2", "192.168.10.11", "fail", 6000L), + LoginEvent("user_2", "192.168.10.12", "fail", 9000L), + LoginEvent("user_3", "192.168.19.3", "fail", 10000L), + LoginEvent("user_3", "192.168.19.4", "fail", 30000L), + LoginEvent("user_3", "192.168.19.5", "success", 35000L), + LoginEvent("user_4", "192.168.19.15", "success", 50000L), + LoginEvent("user_5", "192.168.21.112", "fail", 51000L), + LoginEvent("user_5", "192.168.23.13", "fail", 52000L), + LoginEvent("user_5", "192.168.34.12", "fail", 53000L), + LoginEvent("user_5", "192.168.44.11", "fail", 54000L), + ) + .assignAscendingTimestamps(_.eventTime) + .keyBy(_.userId) + + // Print the login events + stream.print("login_event") + + // Define the pattern for three consecutive failed login attempts + val threeTimesFailPattern: Pattern[LoginEvent, LoginEvent] = Pattern + .begin[LoginEvent]("first") + .where(_.eventType == "fail") + .next("second") + .where(_.eventType == "fail") + .next("third") + .where(_.eventType == "fail") + .within(Time.seconds(5)) + + // Apply the pattern to the stream and select the matching events + val failedStream: PatternStream[LoginEvent] = CEP.pattern(stream, threeTimesFailPattern) + failedStream + .select((pattern: scala.collection.Map[String, Iterable[LoginEvent]]) => { + val first = pattern("first").iterator.next() + val second = pattern("second").iterator.next() + val third = pattern("third").iterator.next() + + (first.userId, first.ip, second.ip, third.ip) + }) + .printToErr("fail_result") + + // Define the pattern for a successful login following a failed attempt + val successPattern: Pattern[LoginEvent, LoginEvent] = Pattern + .begin[LoginEvent]("fail", AfterMatchSkipStrategy.skipPastLastEvent()) + .optional + .where(_.eventType == "fail") + .followedBy("success") + .where(_.eventType == "success") + .within(Time.seconds(30)) + + // Apply the pattern to the stream and select the matching events + val successStream = CEP.pattern(stream, successPattern) + successStream.select((pattern: scala.collection.Map[String, Iterable[LoginEvent]]) => { + val iterator: Iterator[LoginEvent] = pattern.getOrElse("fail", Iterable.empty).iterator + val fail: LoginEvent = if (iterator.hasNext) iterator.next() else LoginEvent("", "", "", 0L) + val success: LoginEvent = pattern("success").iterator.next() + + (success.userId, fail.ip, success.ip) + }) + .printToErr("success_result") + + // Execute the Flink job + scalaEnv.execute() + + logger.info("Stop running Flink CEP demo") + } +} \ No newline at end of file diff --git a/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/common/LogLevel.scala b/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/common/LogLevel.scala new file mode 100755 index 00000000..735ae053 --- /dev/null +++ b/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/common/LogLevel.scala @@ -0,0 +1,11 @@ +package com.notalk.flink.demo.common + +/** + * Author: biyu.huang + * Date: 2023/7/27 12:09 + * Description: + */ +object LogLevel extends Enumeration { + type LogLevel = Value + val DEBUG, INFO, WARN, ERROR, TRACE = Value +} diff --git a/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/common/LogSupport.scala b/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/common/LogSupport.scala new file mode 100755 index 00000000..d4ecf787 --- /dev/null +++ b/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/common/LogSupport.scala @@ -0,0 +1,24 @@ +package com.notalk.flink.demo.common + +import com.notalk.flink.demo.common.LogLevel.LogLevel +import com.typesafe.scalalogging.LazyLogging + +import scala.reflect.ClassTag + +/** + * Author: biyu.huang + * Date: 2023/7/27 12:08 + * Description: + */ +trait LogSupport extends LazyLogging { + protected def logRecord[T: ClassTag](msg: T, level: LogLevel = LogLevel.INFO): Unit = { + level match { + case LogLevel.DEBUG => logger.debug(s"$msg") + case LogLevel.INFO => logger.info(s"$msg") + case LogLevel.WARN => logger.warn(s"$msg") + case LogLevel.ERROR => logger.error(s"$msg") + case LogLevel.TRACE => logger.trace(s"$msg") + case _ => logger.info(s"$msg") + } + } +} diff --git a/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/event/LoginEvent.scala b/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/event/LoginEvent.scala new file mode 100755 index 00000000..7bd31098 --- /dev/null +++ b/demo/FlinkDemo/src/main/scala/com/notalk/flink/demo/event/LoginEvent.scala @@ -0,0 +1,12 @@ +package com.notalk.flink.demo.event + +/** + * Author: biyu.huang + * Date: 2023/7/27 14:39 + * Description: + */ +case class LoginEvent( + userId: String, + ip: String, + eventType: String, + eventTime: Long) diff --git a/demo/FlinkDemo/src/test/resources/log4j.properties b/demo/FlinkDemo/src/test/resources/log4j.properties new file mode 100755 index 00000000..0f1249cc --- /dev/null +++ b/demo/FlinkDemo/src/test/resources/log4j.properties @@ -0,0 +1,6 @@ +# Set everything to be logged to the console +log4j.rootCategory=INFO, console +log4j.appender.console=org.apache.log4j.ConsoleAppender +log4j.appender.console.target=System.out +log4j.appender.console.layout=org.apache.log4j.PatternLayout +log4j.appender.console.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %l: %m%n diff --git a/demo/FlinkDemo/src/test/resources/log4j2.properties b/demo/FlinkDemo/src/test/resources/log4j2.properties new file mode 100755 index 00000000..af6a7e83 --- /dev/null +++ b/demo/FlinkDemo/src/test/resources/log4j2.properties @@ -0,0 +1,8 @@ +#appender.console.type=Console +#appender.console.name=STDOUT +#appender.console.layout.type=PatternLayout +#appender.console.layout.pattern=%d{yy/MM/dd HH:mm:ss} %p %l: %m%n +#appender.console.filter.threshold.type=ThresholdFilter +#appender.console.filter.threshold.level=debug +# +#rootLogger=ERROR,STDOUT diff --git a/demo/GccDemo/GccDemo.xcodeproj/project.pbxproj b/demo/GccDemo/GccDemo.xcodeproj/project.pbxproj old mode 100644 new mode 100755 diff --git a/demo/GccDemo/GccDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata b/demo/GccDemo/GccDemo.xcodeproj/project.xcworkspace/contents.xcworkspacedata old mode 100644 new mode 100755 diff --git a/demo/GccDemo/GccDemo.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist b/demo/GccDemo/GccDemo.xcodeproj/project.xcworkspace/xcshareddata/IDEWorkspaceChecks.plist old mode 100644 new mode 100755 diff --git a/demo/GccDemo/GccDemo.xcodeproj/project.xcworkspace/xcuserdata/wallace.xcuserdatad/UserInterfaceState.xcuserstate b/demo/GccDemo/GccDemo.xcodeproj/project.xcworkspace/xcuserdata/wallace.xcuserdatad/UserInterfaceState.xcuserstate old mode 100644 new mode 100755 diff --git a/demo/GccDemo/GccDemo.xcodeproj/xcuserdata/wallace.xcuserdatad/xcschemes/xcschememanagement.plist b/demo/GccDemo/GccDemo.xcodeproj/xcuserdata/wallace.xcuserdatad/xcschemes/xcschememanagement.plist old mode 100644 new mode 100755 diff --git a/demo/GccDemo/GccDemo/main.cpp b/demo/GccDemo/GccDemo/main.cpp old mode 100644 new mode 100755 diff --git a/demo/GccDemo/cdemo/main.c b/demo/GccDemo/cdemo/main.c old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/.kms_conf b/demo/ScalaDemo/.kms_conf new file mode 100755 index 00000000..de4d9463 Binary files /dev/null and b/demo/ScalaDemo/.kms_conf differ diff --git a/demo/ScalaDemo/pom.xml b/demo/ScalaDemo/pom.xml old mode 100644 new mode 100755 index 36fb0acf..f3dc01aa --- a/demo/ScalaDemo/pom.xml +++ b/demo/ScalaDemo/pom.xml @@ -115,7 +115,8 @@ org.scalatest scalatest-maven-plugin - ${project.build.directory}/unitspecs-reports + ${project.build.directory}/unitspecs-reports + . WDF UnitSpec.txt ${project.build.directory}/site/scalatest @@ -158,6 +159,16 @@ org.apache.spark spark-core_${scala.binary.version} + + + org.slf4j + slf4j-log4j12 + + + org.apache.logging.log4j + log4j-slf4j-impl + + @@ -267,6 +278,14 @@ com.google.code.gson gson + + org.slf4j + slf4j-log4j12 + + + org.apache.logging.log4j + log4j-slf4j-impl + @@ -338,6 +357,18 @@ log4j log4j + + org.slf4j + slf4j-reload4j + + + reload4j + ch.qos.reload4j + + + curator-client + org.apache.curator + @@ -361,6 +392,24 @@ scala-logging_${scala.binary.version} + + + org.apache.logging.log4j + log4j-api + + + org.apache.logging.log4j + log4j-core + + + org.apache.logging.log4j + log4j-slf4j2-impl + + + org.slf4j + slf4j-api + + com.jcraft jsch @@ -383,17 +432,6 @@ better-files_${scala.binary.version} - - ch.qos.logback - logback-core - - - - ch.qos.logback - logback-classic - - - com.typesafe.akka akka-actor_${scala.binary.version} @@ -423,17 +461,6 @@ ch.ethz.ganymed ganymed-ssh2 - - - org.slf4j - slf4j-api - - - - org.slf4j - slf4j-log4j12 - - @@ -484,17 +511,23 @@ flexmark-all test - - - - - redis.clients jedis + + org.apache.curator + curator-framework + 2.13.0 + + + org.apache.curator + curator-recipes + 2.13.0 + + com.zte.hadoop.loader hadoop-loader diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/DateUtilTest.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/DateUtilTest.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/InitClassDemo.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/InitClassDemo.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/InitializationDemo.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/InitializationDemo.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/RegexDemo.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/RegexDemo.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/ReloadThread.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/ReloadThread.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/common/JavaLogSupport.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/common/JavaLogSupport.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/ASmrPlr.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/ASmrPlr.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/HW_ASmr.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/HW_ASmr.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/HW_Field.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/HW_Field.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/MRO.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/MRO.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/MROSax.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/MROSax.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/NorthMRInfoHW.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/NorthMRInfoHW.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/SaxHandler.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/parsexml/SaxHandler.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/ConfigMapStorage.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/ConfigMapStorage.java new file mode 100755 index 00000000..f8e77bc6 --- /dev/null +++ b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/ConfigMapStorage.java @@ -0,0 +1,149 @@ +package com.wallace.demo.app.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.annotation.Nullable; +import javax.crypto.Cipher; +import javax.crypto.KeyGenerator; +import javax.crypto.SecretKey; +import javax.crypto.spec.GCMParameterSpec; +import javax.crypto.spec.SecretKeySpec; +import java.io.*; +import java.nio.file.Files; +import java.nio.file.Paths; +import java.security.Key; +import java.util.Base64; +import java.util.HashMap; +import java.util.Map; + +/** + * Author: biyu.huang + * Date: 2024/7/24 10:08 + * Description: + */ +public class ConfigMapStorage { + private static final Logger logger = LoggerFactory.getLogger(ConfigMapStorage.class); + private static final String ALGORITHM = "AES/GCM/PKCS5Padding"; + private static final int KEY_LENGTH = 32; + + // AES-GCM needs 96-bit(12 bytes) IV, refer to GaloisCounterMode.DEFAULT_IV_LEN + private static final int IV_LENGTH = 12; + private Key encryptionKey; + private String SAVE_FILE; + private Map configMap; + + private Key generateKey() throws Exception { + KeyGenerator keyGen = KeyGenerator.getInstance("AES"); + keyGen.init(256); + SecretKey secretKey = keyGen.generateKey(); + return new SecretKeySpec(secretKey.getEncoded(), "AES"); + } + + public ConfigMapStorage(@Nullable String savePath) { + String envKey = System.getenv("ENCRYPTION_KEY"); + if (envKey == null) { + throw new IllegalArgumentException("Environment variable ENCRYPTION_KEY not set"); + } + byte[] keyBytes = Base64.getDecoder().decode(envKey); + byte[] newKeyBytes = new byte[KEY_LENGTH]; + System.arraycopy(keyBytes, 0, newKeyBytes, 0, Math.min(keyBytes.length, KEY_LENGTH)); + this.encryptionKey = new SecretKeySpec(newKeyBytes, "AES"); + + String SAVE_FILE_NAME = "/.kms_kv"; + if (savePath == null || savePath.trim().isEmpty()) { + this.SAVE_FILE = "/tmp" + SAVE_FILE_NAME; + } else { + String fixedSavePath = savePath.trim(); + if (fixedSavePath.endsWith("/")) { + this.SAVE_FILE = fixedSavePath.substring(0, fixedSavePath.length() - 1) + SAVE_FILE_NAME; + } else { + this.SAVE_FILE = fixedSavePath + SAVE_FILE_NAME; + } + } + + this.configMap = new HashMap<>(); + logger.info("SAVE_PATH -> " + this.SAVE_FILE); + } + + public static ConfigMapStorage getInstance() { + return new ConfigMapStorage(null); + } + + public static ConfigMapStorage getInstance(String savePath) { + return new ConfigMapStorage(savePath); + } + + @SuppressWarnings("unchecked") + private void loadConfigMap() { + try (ObjectInputStream ois = new ObjectInputStream(new FileInputStream(SAVE_FILE))) { + this.configMap = (Map) ois.readObject(); + } catch (FileNotFoundException e) { + logger.error(this.SAVE_FILE + " not found. A new one should be created."); + } catch (IOException | ClassNotFoundException e) { + e.printStackTrace(); + } + } + + public void saveConfigMap() { + try (ObjectOutputStream oos = new ObjectOutputStream(Files.newOutputStream(Paths.get(SAVE_FILE)))) { + oos.writeObject(this.configMap); + } catch (IOException e) { + e.printStackTrace(); + } + } + + public void addConfig(String key, String value) throws Exception { + String encryptedValue = encrypt(value); + logger.info("[ENCRYPTED] " + key + " ===> " + encryptedValue); + configMap.put(key, encryptedValue); + saveConfigMap(); + } + + public String getConfig(String key) throws Exception { + String encryptedToken = this.configMap.get(key); + return encryptedToken != null ? decrypt(encryptedToken) : null; + } + + public int getSize() { + return this.configMap.size(); + } + + private String encrypt(String data) throws Exception { + byte[] dataBytes = data.getBytes(); + Cipher cipher = Cipher.getInstance(ALGORITHM); + cipher.init(Cipher.ENCRYPT_MODE, encryptionKey); + byte[] encryptedBytes = cipher.doFinal(dataBytes); + byte[] ivBytes = cipher.getIV(); + byte[] encryptedData = new byte[IV_LENGTH + encryptedBytes.length]; + System.arraycopy(ivBytes, 0, encryptedData, 0, IV_LENGTH); + System.arraycopy(encryptedBytes, 0, encryptedData, IV_LENGTH, encryptedBytes.length); + return Base64.getEncoder().encodeToString(encryptedData); + } + + private String decrypt(String encryptedData) throws Exception { + byte[] cipherBytes = Base64.getDecoder().decode(encryptedData); + byte[] ivBytes = new byte[IV_LENGTH]; + System.arraycopy(cipherBytes, 0, ivBytes, 0, IV_LENGTH); + GCMParameterSpec gcmParamSpec = new GCMParameterSpec(128, ivBytes, 0, IV_LENGTH); + Cipher cipher = Cipher.getInstance(ALGORITHM); + cipher.init(Cipher.DECRYPT_MODE, encryptionKey, gcmParamSpec); + byte[] rawBytes = cipher.doFinal(cipherBytes, IV_LENGTH, cipherBytes.length - IV_LENGTH); + return new String(rawBytes); + } + + public static void main(String[] args) throws Exception { + ConfigMapStorage configMapStorage = ConfigMapStorage.getInstance(); + String dummyToken1 = Base64.getEncoder().encodeToString(configMapStorage.generateKey().getEncoded()); + String dummyToken2 = "fake_token"; + logger.info("[RAW] token1 ===> " + dummyToken1); + logger.info("[RAW] token2 ===> " + dummyToken2); + configMapStorage.addConfig("token1", dummyToken1); + configMapStorage.addConfig("token2", dummyToken2); + + configMapStorage.loadConfigMap(); + logger.info("[LOAD] configMap.size() = " + configMapStorage.getSize()); + logger.info("[LOAD] token2 ===> " + configMapStorage.getConfig("token2")); + logger.info("[LOAD] token1 ===> " + configMapStorage.getConfig("token1")); + } +} diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/DateUtil.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/DateUtil.java old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/FuncUtil.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/FuncUtil.java old mode 100644 new mode 100755 index d585ad09..a89bbd7b --- a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/FuncUtil.java +++ b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/FuncUtil.java @@ -14,25 +14,22 @@ public static String[] split(String str, String sep, String other) { } public static String[] split(String str, char split, char other) { - // if (!str.contains(other + "")) { - // return str.split(split + "", -1); - // } else { ArrayList strList = new ArrayList<>(); - int num = 0;// other干扰符个数 - int off = 0;// 字串的起始位置 - int subStrSize = 0;// 字串的长度 + int startIndex = 0; // 字串的起始位置 + int subStrSize = 0; // 字串的长度 + int num = 0; // Initialize num for (int i = 0; i < str.length(); i++) { char c = str.charAt(i); // 最后一个字符 if (c != split && i == str.length() - 1) { - strList.add(str.substring(off, off + subStrSize + 1)); + strList.add(str.substring(startIndex, startIndex + subStrSize + 1)); } if (c == other) { num++; subStrSize++; } else if (num % 2 == 0 && c == split) { - strList.add(str.substring(off, off + subStrSize)); - off += subStrSize + 1; + strList.add(str.substring(startIndex, startIndex + subStrSize)); + startIndex += subStrSize + 1; subStrSize = 0; } else { subStrSize++; @@ -42,6 +39,35 @@ public static String[] split(String str, char split, char other) { return strList.toArray(fields); } + public static String[] splitWithLimit(String str, char split, char other, int limit) { + ArrayList strList = new ArrayList<>(); // Initialize strList + int startIndex = 0; // 字串的起始位置 + int subStrSize = 0; // 字串的长度 + int splitCount = 0; // 分割次数 + int num = 0; // Initialize num + + for (int i = 0; i < str.length(); i++) { + char c = str.charAt(i); + // 最后一个字符 + if (c != split && i == str.length() - 1) { + strList.add(str.substring(startIndex, startIndex + subStrSize + 1)); + } + if (c == other) { + num++; + subStrSize++; + } else if (num % 2 == 0 && c == split && splitCount < limit - 1) { + strList.add(str.substring(startIndex, startIndex + subStrSize)); + startIndex += subStrSize + 1; + subStrSize = 0; + splitCount++; + } else { + subStrSize++; + } + } + String fields[] = new String[strList.size()]; + return strList.toArray(fields); + } + /** * 将数组按照指定的字符拆分 * diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/StringToHash.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/StringToHash.java new file mode 100755 index 00000000..1187c4ce --- /dev/null +++ b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/StringToHash.java @@ -0,0 +1,117 @@ +package com.wallace.demo.app.utils; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import javax.crypto.Cipher; +import javax.crypto.spec.IvParameterSpec; +import javax.crypto.spec.SecretKeySpec; +import java.nio.charset.StandardCharsets; +import java.util.Arrays; +import java.lang.Long; + +/** + * Author: biyu.huang + * Date: 2024/5/9 11:50 + * Description: + */ +public class StringToHash { + private static final Logger log = LoggerFactory.getLogger(StringToHash.class); + + public static void main(String[] args) throws Exception { + String inputStr = "test"; + + // 加密 + String kycInfoSecureKey = "o69BYlB9umqDAT3sizrC1Q=="; + + byte[] encrypted = aesCbcEncryptPkcs7Iv(kycInfoSecureKey.getBytes(StandardCharsets.UTF_8), + inputStr.getBytes(StandardCharsets.UTF_8)); + + // 哈希计算 + String encryptedHex = bytesToHex(encrypted); + long hashCode = getHashCode(encryptedHex); + + // 取模运算 + // long tableIdx = Long.parseLong(nameHash, 16) % 1000; + + log.info(String.format("input_str -> %s, hash_code -> %s", inputStr, + Long.toUnsignedString(hashCode))); + } + + private static final byte[] AES_CBC_IV = new byte[16]; + + public static long getHashCode(String nameEncrypt) { + return getHashCode(nameEncrypt.getBytes(StandardCharsets.UTF_8)); + } + + public static long getHashCode(byte[] bytes) { + Hash64 hash64 = new Hash64(); + hash64.write(bytes); + return hash64.getHashCode(); + } + public static String aesCbcEncryptWithIv(byte[] key, byte[] plainText) throws Exception { + SecretKeySpec secretKeySpec = new SecretKeySpec(key, "AES"); + Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5Padding"); + cipher.init(Cipher.ENCRYPT_MODE, secretKeySpec, new IvParameterSpec(AES_CBC_IV)); + byte[] encrypted = cipher.doFinal(plainText, 0, plainText.length); + byte[] result = new byte[plainText.length]; + System.arraycopy(encrypted, 0, result, 0, plainText.length); + return bytesToHex(result); + } + + public static byte[] pkcs7Padding(byte[] ciphertext, int blockSize) { + int padding = blockSize - (ciphertext.length % blockSize); + byte[] padText = new byte[padding]; + Arrays.fill(padText, (byte) padding); + byte[] result = Arrays.copyOf(ciphertext, ciphertext.length + padding); + System.arraycopy(padText, 0, result, ciphertext.length, padding); + return result; + } + + public static byte[] aesCbcEncryptPkcs7Iv(byte[] key, byte[] message) throws Exception { + byte[] plainText = pkcs7Padding(message, 16); + String encrypted = aesCbcEncryptWithIv(key, plainText); + return hexToBytes(encrypted); + } + + private static String bytesToHex(byte[] bytes) { + StringBuilder sb = new StringBuilder(); + for (byte b : bytes) { + sb.append(String.format("%02x", b)); + } + return sb.toString(); + } + + private static byte[] hexToBytes(String hex) { + int len = hex.length(); + byte[] data = new byte[len / 2]; + for (int i = 0; i < len; i += 2) { + data[i / 2] = (byte) ((Character.digit(hex.charAt(i), 16) << 4) + + Character.digit(hex.charAt(i + 1), 16)); + } + return data; + } + + + private static class Hash64 { + private long hashCode; + + public Hash64() { + // based on fnv.offset64 in Golang(fnv.go), refer to https://en.wikipedia.org/wiki/Fowler-Noll-Vo_hash_function. + this.hashCode = -3750763034362895579L; + } + + public void write(byte[] data) { + for (byte b : data) { + hashCode ^= (long) b; + hashCode *= 1099511628211L; + } + } + + public long getHashCode() { + return this.hashCode; + } + } +} + + diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/ioutils/ParquetIOUtils.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/ioutils/ParquetIOUtils.java old mode 100644 new mode 100755 index f3eae52f..fde97885 --- a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/ioutils/ParquetIOUtils.java +++ b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/ioutils/ParquetIOUtils.java @@ -10,99 +10,235 @@ import org.apache.parquet.hadoop.ParquetWriter; import org.apache.parquet.hadoop.example.GroupReadSupport; import org.apache.parquet.hadoop.example.GroupWriteSupport; -import org.apache.parquet.schema.MessageType; -import org.apache.parquet.schema.MessageTypeParser; +import org.apache.parquet.io.api.Binary; +import org.apache.parquet.schema.*; import java.io.BufferedReader; import java.io.File; import java.io.FileReader; import java.io.IOException; +import java.math.BigDecimal; +import java.nio.ByteBuffer; +import java.util.ArrayList; +import java.util.HashMap; import java.util.Random; import static org.apache.parquet.hadoop.ParquetReader.builder; public class ParquetIOUtils { - static Logger logger = Logger.getLogger(ParquetIOUtils.class); + static Logger logger = Logger.getLogger(ParquetIOUtils.class); - public static void main(String[] args) throws Exception { - //parquetWriter("test\\parquet-out2", "input.txt"); - parquetReaderV2(); - } + public static void main(String[] args) throws Exception { + //parquetWriter("test\\parquet-out2", "input.txt"); + parquetReaderV2(); + } - private static void parquetReaderV2() throws Exception { - GroupReadSupport readSupport = new GroupReadSupport(); - ParquetReader.Builder reader = builder(readSupport, new Path("test\\parquet-out2")); - ParquetReader build = reader.build(); - Group line = null; - while ((line = build.read()) != null) { - Group time = line.getGroup("time", 0); - //通过下标和字段名称都可以获取 - /*System.out.println(line.getString(0, 0)+"\t"+ -        line.getString(1, 0)+"\t"+ -        time.getInteger(0, 0)+"\t"+ -        time.getString(1, 0)+"\t");*/ - System.out.println(line.getString("city", 0) + "\t" + - line.getString("ip", 0) + "\t" + - time.getInteger("ttl", 0) + "\t" + - time.getString("ttl2", 0) + "\t"); - - //System.out.println(line.toString()); + private static void parquetReaderV2() { + GroupReadSupport readSupport = new GroupReadSupport(); + ParquetReader.Builder reader = builder(readSupport, new Path("test\\parquet-out2")); + try (ParquetReader build = reader.build()) { + Group row; + GroupType schema = null; + while ((row = build.read()) != null) { + schema = schema == null ? row.getType() : schema; + for (Type field : schema.getFields()) { + primitiveFieldReader(field, row); + } + Group timeField = row.getGroup("time", 0); + //通过下标和字段名称都可以获取 + /*System.out.println(line.getString(0, 0)+"\t"+ + line.getString(1, 0)+"\t"+ + time.getInteger(0, 0)+"\t"+ + time.getString(1, 0)+"\t");*/ + for (Type field : timeField.getType().asGroupType().getFields()) { + switch (field.getOriginalType()) { + case INT_64: + row.getInteger(field.getName(), 0); + break; + case DECIMAL: + row.getDouble(field.getName(), 0); + break; + case UTF8: + row.getString(field.getName(), 0); + break; + default: + break; + } } - System.out.println("读取结束"); + logger.info(row.getString("city", 0) + "\t" + + row.getString("ip", 0) + "\t" + + timeField.getInteger("ttl", 0) + "\t" + + timeField.getString("ttl2", 0) + "\t"); + + //System.out.println(line.toString()); + + } + } catch (IOException e) { + logger.error(e); } + logger.info("读取结束"); + } - //新版本中new ParquetReader()所有构造方法好像都弃用了,用上面的builder去构造对象 - static void parquetReader(String inPath) throws Exception { - GroupReadSupport readSupport = new GroupReadSupport(); - ParquetReader.Builder builder = builder(readSupport, new Path(inPath)); - // ParquetReader reader = new ParquetReader(new Path(inPath), readSupport); - Group line = null; - while ((line = builder.build().read()) != null) { - System.out.println(line.toString()); - } - System.out.println("读取结束"); + static Long binaryToUnscaledLong(Binary binary) { + // The underlying `ByteBuffer` implementation is guaranteed to be `HeapByteBuffer`, so here + // we are using `Binary.toByteBuffer.array()` to steal the underlying byte array without + // copying it. + ByteBuffer buffer = binary.toByteBuffer(); + byte[] bytes = buffer.array(); + int start = buffer.arrayOffset() + buffer.position(); + int end = buffer.arrayOffset() + buffer.limit(); + + long unscaled = 0L; + int i = start; + while (i < end) { + unscaled = (unscaled << 8) | (bytes[i] & 0xff); + i += 1; } - /** - * @param outPath 输出Parquet格式 - * @param inPath 输入普通文本文件 - * @throws IOException - */ - static void parquetWriter(String outPath, String inPath) throws IOException { - MessageType schema = MessageTypeParser.parseMessageType("message Pair {\n" + - " required binary city (UTF8);\n" + - " required binary ip (UTF8);\n" + - " repeated group time {\n" + - " required int32 ttl;\n" + - " required binary ttl2;\n" + - "}\n" + - "}"); - GroupFactory factory = new SimpleGroupFactory(schema); - Path path = new Path(outPath); - Configuration configuration = new Configuration(); - GroupWriteSupport writeSupport = new GroupWriteSupport(); - GroupWriteSupport.setSchema(schema, configuration); - ParquetWriter writer = new ParquetWriter<>(path, configuration, writeSupport); - //把本地文件读取进去,用来生成parquet格式文件 - BufferedReader br = new BufferedReader(new FileReader(new File(inPath))); - String line; - Random r = new Random(); - while ((line = br.readLine()) != null) { - String[] strs = line.split("\\s+"); - if (strs.length == 2) { - Group group = factory.newGroup() - .append("city", strs[0]) - .append("ip", strs[1]); - Group tmpG = group.addGroup("time"); - tmpG.append("ttl", r.nextInt(9) + 1); - tmpG.append("ttl2", r.nextInt(9) + "_a"); - writer.write(group); - } + int bits = 8 * (end - start); + unscaled = (unscaled << (64 - bits)) >> (64 - bits); + return unscaled; + } + + static Object readValue(Type field, Integer index, Group row) { + Object value = null; + if (field.isPrimitive()) { + switch (field.asPrimitiveType().getPrimitiveTypeName()) { + case FLOAT: + value = row.getFloat(field.getName(), 0); + break; + case INT32: + if (field.getOriginalType() == OriginalType.DECIMAL) { + DecimalMetadata metadata = field.asPrimitiveType().getDecimalMetadata(); + int scale = metadata == null ? 0 : metadata.getScale(); + value = BigDecimal.valueOf(row.getInteger(field.getName(), index), scale); + } else { + value = row.getInteger(field.getName(), index); + } + break; + case INT64: + if (field.getOriginalType() == OriginalType.DECIMAL) { + DecimalMetadata metadata = field.asPrimitiveType().getDecimalMetadata(); + int scale = metadata == null ? 0 : metadata.getScale(); + value = BigDecimal.valueOf(row.getLong(field.getName(), index), scale); + } else { + value = row.getLong(field.getName(), index); + } + break; + case BINARY: + if (field.getOriginalType() == OriginalType.DECIMAL) { + DecimalMetadata metadata = field.asPrimitiveType().getDecimalMetadata(); + int scale = metadata == null ? 0 : metadata.getScale(); + value = BigDecimal.valueOf(binaryToUnscaledLong(row.getBinary(field.getName(), index)), scale); + } else { + value = row.getLong(field.getName(), index); + } + break; + case DOUBLE: + value = row.getDouble(field.getName(), index); + break; + case BOOLEAN: + value = row.getBoolean(field.getName(), index); + break; + case INT96: + break; + case FIXED_LEN_BYTE_ARRAY: + break; + default: + throw new RuntimeException("unknown primitive type: " + + field.asPrimitiveType().getPrimitiveTypeName()); + } + } else { + GroupType fieldGroupType = field.asGroupType(); + Group fieldGroup = row.getGroup(field.getName(), index); + HashMap groupValue = new HashMap<>(); + for (Type singleField : fieldGroupType.getFields()) { + Object fieldValue; + int elementNum = fieldGroup.getFieldRepetitionCount(singleField.getName()); + if (elementNum <= 1) { + fieldValue = readValue(singleField, 0, fieldGroup); + } else { + ArrayList lsValue = new ArrayList<>(elementNum); + for (int i = 0; i < elementNum; i++) { + lsValue.add(readValue(field, i, row)); + } + fieldValue = lsValue; } - System.out.println("write end"); - writer.close(); + groupValue.put(singleField.getName(), fieldValue); + } + value = groupValue; + } + return value; + } + + static Object primitiveFieldReader(Type field, Group row) { + Object rowValue; + int elementNum = row.getFieldRepetitionCount(field.getName()); + if (elementNum <= 1) { + rowValue = readValue(field, 0, row); + } else { + ArrayList allValue = new ArrayList<>(elementNum); + for (int i = 0; i < elementNum; i++) { + allValue.add(readValue(field, i, row)); + } + rowValue = allValue; + } + return rowValue; + } + + //新版本中new ParquetReader()所有构造方法好像都弃用了,用上面的builder去构造对象 + static void parquetReader(String inPath) throws Exception { + GroupReadSupport readSupport = new GroupReadSupport(); + ParquetReader.Builder builder = builder(readSupport, new Path(inPath)); + // ParquetReader reader = new ParquetReader(new Path(inPath), readSupport); + Group line = null; + while ((line = builder.build().read()) != null) { + System.out.println(line.toString()); + } + System.out.println("读取结束"); + + } + + /** + * @param outPath 输出Parquet格式 + * @param inPath 输入普通文本文件 + * @throws IOException + */ + static void parquetWriter(String outPath, String inPath) throws IOException { + MessageType schema = MessageTypeParser.parseMessageType("message Pair {\n" + + " required binary city (UTF8);\n" + + " required binary ip (UTF8);\n" + + " repeated group time {\n" + + " required int32 ttl;\n" + + " required binary ttl2;\n" + + "}\n" + + "}"); + GroupFactory factory = new SimpleGroupFactory(schema); + Path path = new Path(outPath); + Configuration configuration = new Configuration(); + GroupWriteSupport writeSupport = new GroupWriteSupport(); + GroupWriteSupport.setSchema(schema, configuration); + ParquetWriter writer = new ParquetWriter<>(path, configuration, writeSupport); + //把本地文件读取进去,用来生成parquet格式文件 + BufferedReader br = new BufferedReader(new FileReader(new File(inPath))); + String line; + Random r = new Random(); + while ((line = br.readLine()) != null) { + String[] strs = line.split("\\s+"); + if (strs.length == 2) { + Group group = factory.newGroup() + .append("city", strs[0]) + .append("ip", strs[1]); + Group tmpG = group.addGroup("time"); + tmpG.append("ttl", r.nextInt(9) + 1); + tmpG.append("ttl2", r.nextInt(9) + "_a"); + writer.write(group); + } } + System.out.println("write end"); + writer.close(); + } } diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/sql b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/sql new file mode 100644 index 00000000..af3594f4 --- /dev/null +++ b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/sql @@ -0,0 +1,146 @@ +on platform first placed client name until today +on platform last placed client name until today +on platform first placed transaction datetime until today +on platform last placed transaction datetime until today +on platform first placed transaction until today +on platform last placed transaction until today +on platform first succeeded transaction datetime until today +on platform last succeeded transaction datetime until today +on platform first succeeded transaction until today +on platform last succeeded transaction until today +on platform with shopeepay wallet first placed transaction datetime until today +on platform with shopeepay wallet last placed transaction datetime until today +on platform with shopeepay wallet first placed transaction until today +on platform with shopeepay wallet last placed transaction until today +on platform with shopeepay wallet first succeeded transaction datetime until today +on platform with shopeepay wallet last succeeded transaction datetime until today +on platform with shopeepay wallet first succeeded transaction until today +on platform with shopeepay wallet last succeeded transaction until today +Marketplace first placed transaction datetime until today +Marketplace last placed transaction datetime until today +Marketplace first placed transaction until today +Marketplace last placed transaction until today +Marketplace first succeeded transaction datetime until today +Marketplace last succeeded transaction datetime until today +Marketplace first succeeded transaction until today +Marketplace last succeeded transaction until today +Marketplace with shopeepay wallet first placed transaction datetime until today +Marketplace with shopeepay wallet last placed transaction datetime until today +Marketplace with shopeepay wallet first placed transaction until today +Marketplace with shopeepay wallet last placed transaction until today +Marketplace with shopeepay wallet first succeeded transaction datetime until today +Marketplace with shopeepay wallet last succeeded transaction datetime until today +Marketplace with shopeepay wallet first succeeded transaction until today +Marketplace with shopeepay wallet last succeeded transaction until today +Digital Product first placed transaction datetime until today +Digital Product last placed transaction datetime until today +Digital Product first placed transaction until today +Digital Product last placed transaction until today +Digital Product first succeeded transaction datetime until today +Digital Product last succeeded transaction datetime until today +Digital Product first succeeded transaction until today +Digital Product last succeeded transaction until today +Digital Product with shopeepay wallet first placed transaction datetime until today +Digital Product with shopeepay wallet last placed transaction datetime until today +Digital Product with shopeepay wallet first placed transaction until today +Digital Product with shopeepay wallet last placed transaction until today +Digital Product with shopeepay wallet first succeeded transaction datetime until today +Digital Product with shopeepay wallet last succeeded transaction datetime until today +Digital Product with shopeepay wallet first succeeded transaction until today +Digital Product with shopeepay wallet last succeeded transaction until today +first placed on user transaction datetime until today +SVS last placed transaction datetime until today +SVS first placed transaction until today +SVS last placed transaction until today +SVS first succeeded transaction datetime until today +SVS last succeeded transaction datetime until today +SVS first succeeded transaction until today +SVS last succeeded transaction until today +SVS with shopeepay wallet first placed transaction datetime until today +SVS with shopeepay wallet last placed transaction datetime until today +SVS with shopeepay wallet first placed transaction until today +SVS with shopeepay wallet last placed transaction until today +SVS with shopeepay wallet first succeeded transaction datetime until today +SVS with shopeepay wallet last succeeded transaction datetime until today +SVS with shopeepay wallet first succeeded transaction until today +SVS with shopeepay wallet last succeeded transaction until today +Mitra first placed transaction datetime until today +Mitra last placed transaction datetime until today +Mitra first placed transaction until today +Mitra last placed transaction until today +Mitra first succeeded transaction datetime until today +Mitra last succeeded transaction datetime until today +Mitra first succeeded transaction until today +Mitra last succeeded transaction until today +Mitra with shopeepay wallet first placed transaction datetime until today +Mitra with shopeepay wallet last placed transaction datetime until today +Mitra with shopeepay wallet first placed transaction until today +Mitra with shopeepay wallet last placed transaction until today +Mitra with shopeepay wallet first succeeded transaction datetime until today +Mitra with shopeepay wallet last succeeded transaction datetime until today +Mitra with shopeepay wallet first succeeded transaction until today +Mitra with shopeepay wallet last succeeded transaction until today +Repayment first placed transaction datetime until today +Repayment last placed transaction datetime until today +Repayment first placed transaction until today +Repayment last placed transaction until today +Repayment first succeeded transaction datetime until today +Repayment last succeeded transaction datetime until today +Repayment first succeeded transaction until today +Repayment last succeeded transaction until today +Repayment with shopeepay wallet first placed transaction datetime until today +Repayment with shopeepay wallet last placed transaction datetime until today +Repayment with shopeepay wallet first placed transaction until today +Repayment with shopeepay wallet last placed transaction until today +Repayment with shopeepay wallet first succeeded transaction datetime until today +Repayment with shopeepay wallet last succeeded transaction datetime until today +Repayment with shopeepay wallet first succeeded transaction until today +Repayment with shopeepay wallet last succeeded transaction until today +Shopee Food Driver first placed transaction datetime until today +Shopee Food Driver last placed transaction datetime until today +Shopee Food Driver first placed transaction until today +Shopee Food Driver last placed transaction until today +Shopee Food Driver first succeeded transaction datetime until today +Shopee Food Driver last succeeded transaction datetime until today +Shopee Food Driver first succeeded transaction until today +Shopee Food Driver last succeeded transaction until today +Shopee Food Driver with shopeepay wallet first placed transaction datetime until today +Shopee Food Driver with shopeepay wallet last placed transaction datetime until today +Shopee Food Driver with shopeepay wallet first placed transaction until today +Shopee Food Driver with shopeepay wallet last placed transaction until today +Shopee Food Driver with shopeepay wallet first succeeded transaction datetime until today +Shopee Food Driver with shopeepay wallet last succeeded transaction datetime until today +Shopee Food Driver with shopeepay wallet first succeeded transaction until today +Shopee Food Driver with shopeepay wallet last succeeded transaction until today +Shopee Food Buyer first placed transaction datetime until today +Shopee Food Buyer last placed transaction datetime until today +Shopee Food Buyer first placed transaction until today +Shopee Food Buyer last placed transaction until today +Shopee Food Buyer first succeeded transaction datetime until today +Shopee Food Buyer last succeeded transaction datetime until today +Shopee Food Buyer first succeeded transaction until today +Shopee Food Buyer last succeeded transaction until today +Shopee Food Buyer with shopeepay wallet first placed transaction datetime until today +Shopee Food Buyer with shopeepay wallet last placed transaction datetime until today +Shopee Food Buyer with shopeepay wallet first placed transaction until today +Shopee Food Buyer with shopeepay wallet last placed transaction until today +Shopee Food Buyer with shopeepay wallet first succeeded transaction datetime until today +Shopee Food Buyer with shopeepay wallet last succeeded transaction datetime until today +Shopee Food Buyer with shopeepay wallet first succeeded transaction until today +Shopee Food Buyer with shopeepay wallet last succeeded transaction until today +SPX C2C first placed transaction datetime until today +SPX C2C last placed transaction datetime until today +SPX C2C first placed transaction until today +SPX C2C last placed transaction until today +SPX C2C first succeeded transaction datetime until today +SPX C2C last succeeded transaction datetime until today +SPX C2C first succeeded transaction until today +SPX C2C last succeeded transaction until today +SPX C2C with shopeepay wallet first placed transaction datetime until today +SPX C2C with shopeepay wallet last placed transaction datetime until today +SPX C2C with shopeepay wallet first placed transaction until today +SPX C2C with shopeepay wallet last placed transaction until today +SPX C2C with shopeepay wallet first succeeded transaction datetime until today +SPX C2C with shopeepay wallet last succeeded transaction datetime until today +SPX C2C with shopeepay wallet first succeeded transaction until today +SPX C2C with shopeepay wallet last succeeded transaction until today \ No newline at end of file diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/stringutils/StringUtils.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/stringutils/StringUtils.java old mode 100644 new mode 100755 index 47ff9722..280d3648 --- a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/stringutils/StringUtils.java +++ b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/stringutils/StringUtils.java @@ -15,39 +15,51 @@ * Created by wallace on 2018/3/23. */ public class StringUtils { - - public static List restoreIpAddresses(String s) { - List res = new ArrayList<>(); - if (s == null || s.length() < 4 || s.length() > 12) return res; - restoreIp(res, new ArrayList<>(), s, 0); - return res; + public static boolean isBlank(final String s) { + if (s == null || s.trim().isEmpty()) { + return true; + } + for (int i = 0; i < s.length(); i++) { + char c = s.charAt(i); + if (!Character.isWhitespace(c)) { + return false; + } } + return true; + } - private static void restoreIp(List res, List ip, String s, int pos) { - if (ip.size() == 4) { - if (pos != s.length()) return; - StringBuilder ipSb = new StringBuilder(); - for (String str : ip) ipSb.append(str).append("."); - ipSb.setLength(ipSb.length() - 1); - res.add(ipSb.toString()); - return; - } - - for (int i = pos; i < s.length() && i < pos + 3; i++) { - String ipSeg = s.substring(pos, i + 1); - if (isIpValid(ipSeg)) { - ip.add(ipSeg); - restoreIp(res, ip, s, i + 1); - ip.remove(ip.size() - 1); - } - } + public static List restoreIpAddresses(String s) { + List res = new ArrayList<>(); + if (s == null || s.length() < 4 || s.length() > 12) return res; + restoreIp(res, new ArrayList<>(), s, 0); + return res; + } + + private static void restoreIp(List res, List ip, String s, int pos) { + if (ip.size() == 4) { + if (pos != s.length()) return; + StringBuilder ipSb = new StringBuilder(); + for (String str : ip) ipSb.append(str).append("."); + ipSb.setLength(ipSb.length() - 1); + res.add(ipSb.toString()); + return; } - private static boolean isIpValid(String s) { - if (s == null || s.length() == 0) return false; - if (s.charAt(0) == '0') return s.equals("0"); - int ipInt = Integer.parseInt(s); - return ipInt >= 0 && ipInt <= 255; + for (int i = pos; i < s.length() && i < pos + 3; i++) { + String ipSeg = s.substring(pos, i + 1); + if (isIpValid(ipSeg)) { + ip.add(ipSeg); + restoreIp(res, ip, s, i + 1); + ip.remove(ip.size() - 1); + } } + } + + private static boolean isIpValid(String s) { + if (s == null || s.length() == 0) return false; + if (s.charAt(0) == '0') return s.equals("0"); + int ipInt = Integer.parseInt(s); + return ipInt >= 0 && ipInt <= 255; + } } diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/zkUtils/CuratorClient.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/zkUtils/CuratorClient.java new file mode 100755 index 00000000..03384866 --- /dev/null +++ b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/zkUtils/CuratorClient.java @@ -0,0 +1,37 @@ +package com.wallace.demo.app.utils.zkUtils; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.CuratorFrameworkFactory; +import org.apache.curator.retry.ExponentialBackoffRetry; +import org.apache.zookeeper.CreateMode; + +/** + * Author: biyu.huang + * Date: 2023/2/1 18:44 + * Description: + */ +public class CuratorClient { + public static String LEADER_PATH = "/leader"; + + public static CuratorFramework getCuratorClient() { + CuratorFramework curatorFramework = CuratorFrameworkFactory + .builder() + .connectString("localhost:2181") + .sessionTimeoutMs(3000) + .connectionTimeoutMs(3000) + .retryPolicy(new ExponentialBackoffRetry(1000, 10)) + .namespace("base") + .build(); + curatorFramework.start(); + return curatorFramework; + } + + public static void main(String[] args) throws Exception { + CuratorFramework client = getCuratorClient(); + + client.create() + .creatingParentsIfNeeded() + .withMode(CreateMode.EPHEMERAL) + .forPath(LEADER_PATH + "/test"); + } +} diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/zkUtils/LeaderElectionAgent.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/zkUtils/LeaderElectionAgent.java new file mode 100755 index 00000000..6be69afb --- /dev/null +++ b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/zkUtils/LeaderElectionAgent.java @@ -0,0 +1,59 @@ +package com.wallace.demo.app.utils.zkUtils; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.recipes.leader.LeaderLatch; +import org.apache.curator.framework.recipes.leader.LeaderLatchListener; +import org.apache.curator.framework.recipes.leader.Participant; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +import java.io.IOException; +import java.util.concurrent.atomic.AtomicBoolean; + +/** + * Author: biyu.huang + * Date: 2023/2/2 18:53 + * Description: + */ +public class LeaderElectionAgent implements LeaderLatchListener { + private static final Logger logger = LoggerFactory.getLogger(LeaderElectionAgent.class); + private final AtomicBoolean LEADERSHIP_FLAG; + + private final LeaderLatch leaderLatch; + + public LeaderElectionAgent(CuratorFramework client, + String latchPath, + String id) throws Exception { + this.leaderLatch = new LeaderLatch(client, latchPath, id); + this.LEADERSHIP_FLAG = new AtomicBoolean(false); + } + + public boolean getLeadership() { + return this.LEADERSHIP_FLAG.get(); + } + + @Override + public void isLeader() { + logger.info("master -> set LEADER_FLAG to ture"); + this.LEADERSHIP_FLAG.set(true); + } + + @Override + public void notLeader() { + logger.info("slave -> set LEADER_FLAG to false"); + this.LEADERSHIP_FLAG.set(false); + } + + public Participant getLeader() throws Exception { + return this.leaderLatch.getLeader(); + } + + public void start() throws Exception { + this.leaderLatch.addListener(this); + this.leaderLatch.start(); + } + + public void close() throws IOException { + this.leaderLatch.close(); + } +} diff --git a/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/zkUtils/LeaderElectionMain.java b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/zkUtils/LeaderElectionMain.java new file mode 100755 index 00000000..ffa691eb --- /dev/null +++ b/demo/ScalaDemo/src/main/java/com/wallace/demo/app/utils/zkUtils/LeaderElectionMain.java @@ -0,0 +1,42 @@ +package com.wallace.demo.app.utils.zkUtils; + +import org.apache.curator.framework.CuratorFramework; +import org.apache.curator.framework.recipes.leader.Participant; + +import java.util.concurrent.CountDownLatch; + +/** + * Author: biyu.huang + * Date: 2023/2/3 11:41 + * Description: + */ +public class LeaderElectionMain { + private static final CountDownLatch count = new CountDownLatch(50); + + public static void main(String[] args) throws Exception { + String id = args[0]; + CuratorFramework client = CuratorClient.getCuratorClient(); + LeaderElectionAgent leaderElectionAgent = + new LeaderElectionAgent(client, CuratorClient.LEADER_PATH, id); + try { + leaderElectionAgent.start(); + Thread.sleep(10000); + while (count.getCount() > 0) { + Participant leader = leaderElectionAgent.getLeader(); + System.out.println(id + " --> " + leader); + if (leaderElectionAgent.getLeadership()) { + System.out.println(id + " is leader"); + } else { + System.out.println(id + " is not leader"); + } + count.countDown(); + Thread.sleep(1000); + } + } catch (Exception e) { + // TODO Auto-generated catch block + e.printStackTrace(); + leaderElectionAgent.close(); + client.close(); + } + } +} diff --git a/demo/ScalaDemo/src/main/resources/AH_RM_20170522_all_all-cm_lte_cel-20170522000000-20170523000000-v2.0-20170522072925-001.csv.gz b/demo/ScalaDemo/src/main/resources/AH_RM_20170522_all_all-cm_lte_cel-20170522000000-20170523000000-v2.0-20170522072925-001.csv.gz old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/AH_RM_20170926_all_all-cm_lte_cel-20170926000000-20170927000000-v2.0-20170926112500-001.csv.gz b/demo/ScalaDemo/src/main/resources/AH_RM_20170926_all_all-cm_lte_cel-20170926000000-20170927000000-v2.0-20170926112500-001.csv.gz old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/CDT_ZTE_V3.5_963847_20171201180000.zip b/demo/ScalaDemo/src/main/resources/CDT_ZTE_V3.5_963847_20171201180000.zip old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/FDD-LTE_MRO_HUAWEI_661467_20170524154500.xml.gz b/demo/ScalaDemo/src/main/resources/FDD-LTE_MRO_HUAWEI_661467_20170524154500.xml.gz old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/FDD-LTE_MRO_ZTE_OMC1_635953_20170522204500.zip b/demo/ScalaDemo/src/main/resources/FDD-LTE_MRO_ZTE_OMC1_635953_20170522204500.zip old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/FDD-LTE_MRS_ERICSSON_OMC1_335110_20180403101500.zip b/demo/ScalaDemo/src/main/resources/FDD-LTE_MRS_ERICSSON_OMC1_335110_20180403101500.zip old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/FDD-LTE_MRS_ERICSSON_OMC1_335112_20180403101500.xml.zip b/demo/ScalaDemo/src/main/resources/FDD-LTE_MRS_ERICSSON_OMC1_335112_20180403101500.xml.zip old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/FDD-LTE_MRS_HUAWEI_661467_20170524154500.xml b/demo/ScalaDemo/src/main/resources/FDD-LTE_MRS_HUAWEI_661467_20170524154500.xml old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/FDD-LTE_MRS_ZTE_OMC1_637784_20170522204500.zip b/demo/ScalaDemo/src/main/resources/FDD-LTE_MRS_ZTE_OMC1_637784_20170522204500.zip old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/FixExample.xml b/demo/ScalaDemo/src/main/resources/FixExample.xml old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/HW_HN_OMC1-mr-134.175.57.16-20170921043000-20170921044500-20170921051502-001.tar.gz b/demo/ScalaDemo/src/main/resources/HW_HN_OMC1-mr-134.175.57.16-20170921043000-20170921044500-20170921051502-001.tar.gz old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/log4j2.properties b/demo/ScalaDemo/src/main/resources/log4j2.properties new file mode 100755 index 00000000..800c6913 --- /dev/null +++ b/demo/ScalaDemo/src/main/resources/log4j2.properties @@ -0,0 +1,43 @@ +# Console logger +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n +appender.console.filter.threshold.type=ThresholdFilter +appender.console.filter.threshold.level=DEBUG +# Rolling file logger (Common) +appender.fileLogger.type=RollingFile +appender.fileLogger.name=fileLogger +#appender.fileLogger.fileName=${sys:log4j.log.dir}/scala-demo.log +#appender.fileLogger.filePattern=${sys:log4j.log.dir}/scala-demo.log.%i +appender.fileLogger.fileName=demo/ScalaDemo/log/scala-demo.log +appender.fileLogger.filePattern=demo/ScalaDemo/log/scala-demo.log.%i +appender.fileLogger.layout.type=PatternLayout +appender.fileLogger.layout.pattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n +appender.fileLogger.filter.threshold.type=ThresholdFilter +appender.fileLogger.filter.threshold.level=debug +appender.fileLogger.policies.type=Policies +appender.fileLogger.policies.size.type=SizeBasedTriggeringPolicy +appender.fileLogger.policies.size.size=10MB +appender.fileLogger.strategy.type=DefaultRolloverStrategy +appender.fileLogger.strategy.max=5 +## specific class logger +appender.specificLogger.type=RollingFile +appender.specificLogger.name=specificLogger +appender.specificLogger.fileName=demo/ScalaDemo/log/scala-demo-specific-class.log +appender.specificLogger.filePattern=demo/ScalaDemo/log/scala-demo-specific-class.log.%i +appender.specificLogger.layout.type=PatternLayout +appender.specificLogger.layout.pattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n +appender.specificLogger.filter.threshold.type=ThresholdFilter +appender.specificLogger.filter.threshold.level=debug +appender.specificLogger.policies.type=Policies +appender.specificLogger.policies.size.type=SizeBasedTriggeringPolicy +appender.specificLogger.policies.size.size=10MB +appender.specificLogger.strategy.type=DefaultRolloverStrategy +appender.specificLogger.strategy.max=5 +# configure logger +rootLogger=INFO,STDOUT,fileLogger +logger.gson_demo=INFO,specificLogger +logger.gson_demo.name=com.wallace.demo.app.GsonDemo +logger.gson_demo.additivity=false + diff --git a/demo/ScalaDemo/src/main/resources/logback.xml b/demo/ScalaDemo/src/main/resources/logback.xml deleted file mode 100644 index 385bbd0f..00000000 --- a/demo/ScalaDemo/src/main/resources/logback.xml +++ /dev/null @@ -1,124 +0,0 @@ - - - - - - - - - - - - - %d{HH:mm:ss.SSS} |-%-5level in%replace(%caller{1}){'\t|Caller.{1}0|\r\n|at\s', ''} - %msg%n - - - - - ${log_dir}/CodePrototypesDemo.warn.log - - WARN - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/CodePrototypesDemo.warn.%i.log.gz - ${maxHistory} - 10GB - - 128MB - - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - ${log_dir}/CodePrototypesDemo.info.log - - INFO - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/CodePrototypesDemo.info.%i.log.gz - ${maxHistory} - 10GB - - 128MB - - - - - %d{yyyy-MM-dd HH:mm:ss.SSS}-4relative [%thread] %-5level %logger{35} - %msg%n - UTF-8 - - - - - ${log_dir}/CodePrototypesDemo.debug.log - - DEBUG - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/CodePrototypesDemo.debug.%i.log.gz - ${maxHistory} - 10GB - - 128MB - - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - ${log_dir}/CodePrototypesDemo.error.log - - ERROR - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/CodePrototypesDemo.error.%i.log.gz - ${maxHistory} - 10GB - - 128MB - - - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - - - - - - - - \ No newline at end of file diff --git a/demo/ScalaDemo/src/main/resources/test.conf b/demo/ScalaDemo/src/main/resources/test.conf old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/test.zip b/demo/ScalaDemo/src/main/resources/test.zip old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/testingData.csv b/demo/ScalaDemo/src/main/resources/testingData.csv old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/resources/text.csv.gz b/demo/ScalaDemo/src/main/resources/text.csv.gz old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/Boot.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/Boot.scala old mode 100644 new mode 100755 index 1c54d60d..fe38fa88 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/Boot.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/Boot.scala @@ -22,22 +22,22 @@ object Boot extends LogSupport { var file: Option[File] = None try { file = Some(new File("./test.csv")) - log.info(file.get.getPath) + logger.info(file.get.getPath) } catch { case NonFatal(e) => - log.error(s"Catch Non-Fatal Exception: ${e.getMessage}.") + logger.error(s"Catch Non-Fatal Exception: ${e.getMessage}.") } finally { if (file.isDefined) { file.get.delete() - log.info("delete file.") + logger.info("delete file.") } } - log.info(s"${func2(4)}") - log.info("End.") + logger.info(s"${func2(4)}") + logger.info("End.") } def func1(): Unit = { - log.info(s"${ManagementFactory.getRuntimeMXBean.getName}") + logger.info(s"${ManagementFactory.getRuntimeMXBean.getName}") try { util.Properties.setProp("scala.time", "true") var a = 1 @@ -47,21 +47,21 @@ object Boot extends LogSupport { breakable { if (a.equals(b)) { a += 1 - log.info("Testing @the first place.") + logger.info("Testing @the first place.") break() } else { a += 1 - log.info("Testing @the second place.") + logger.info("Testing @the second place.") } - log.info("Testing @the third place.") + logger.info("Testing @the third place.") } - if (a == 5) break() else log.info("Testing @the fourth place.") + if (a == 5) break() else logger.info("Testing @the fourth place.") } } - log.info("Testing @fifth place.") + logger.info("Testing @fifth place.") } catch { case NonFatal(e) => - log.error(s"Catch Non-Fatal Exception: ${e.getMessage}.") + logger.error(s"Catch Non-Fatal Exception: ${e.getMessage}.") } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/GsonDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/GsonDemo.scala old mode 100644 new mode 100755 index a3689470..1f8700f0 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/GsonDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/GsonDemo.scala @@ -2,6 +2,7 @@ package com.wallace.demo.app import com.google.gson._ import com.wallace.demo.app.common.LogSupport +import org.slf4j.{Logger, LoggerFactory} import java.lang.{Integer => JInt, Long => JLong} import java.math.BigDecimal @@ -11,29 +12,21 @@ import java.math.BigDecimal * Date: 2022/5/22 09:20 * Description: Gson Demo */ -object GsonDemo extends LogSupport { +class GsonDemo extends LogSupport { private val jsonStr: String = """{"key1":[{"key1_1":1000,"key1_2":11111111111111111},{"key1_1":2000,"key1_2":-123457}],"key2":[1,2,3,4,5,6,-1],"key3":-30,"key4":{"key4_1":-2,"key4_2":"test"}}""" - private val jsonParser: JsonParser = new JsonParser() + private final val gson: Gson = new GsonBuilder().setPrettyPrinting().create() - def main(args: Array[String]): Unit = { - val jsonObj: JsonElement = jsonParser.parse(jsonStr) - log.info(jsonObj.getAsJsonObject.toString) - - val needFixNode: Map[String, String] = Map("key4.key4_1" -> "UINT32", - "key1.key1_2" -> "UINT64", - "key2" -> "UINT32", - "key3" -> "UINT64", - "not-existed-key" -> "UINT64") - adjustJsonNode(jsonObj, needFixNode) - - log.info(jsonObj.getAsJsonObject.toString) + def formatJson(jsonStr: String): String = { + gson.toJson(JsonParser.parseString(jsonStr)) } def adjustJsonNode(jsonElement: JsonElement, jsonNodes: Map[String, String]): Unit = { if (jsonNodes.nonEmpty) { jsonNodes.foreach { case (jsonNodePath, dataType) => - jsonNodePath.split("\\.").toList match { + val pathList: Array[String] = jsonNodePath.split("\\.") + assert(pathList.nonEmpty, s"invalid Json path: $jsonNodePath") + pathList.toList match { case parent :: Nil => jsonElement match { case _: JsonPrimitive | _: JsonArray | _: JsonNull => // all of these cases should be ignored. @@ -92,4 +85,26 @@ object GsonDemo extends LogSupport { throw new UnsupportedOperationException(s"unsupported operation for dataType: $dataType.") } } + + def run(): Unit = { + val jsonObj: JsonElement = JsonParser.parseString(jsonStr) + logger.info(jsonObj.getAsJsonObject.toString) + + val needFixNode: Map[String, String] = Map("key4.key4_1" -> "UINT32", + "key1.key1_2" -> "UINT64", + "key2" -> "UINT32", + "key3" -> "UINT64", + "not-existed-key" -> "UINT64") + adjustJsonNode(jsonObj, needFixNode) + + logger.info(jsonObj.getAsJsonObject.toString) + logger.info(s"formatted Json -> ${formatJson(jsonObj.toString)}") + } +} + +object GsonDemo { + def main(args: Array[String]): Unit = { + val gsonDemo = new GsonDemo + gsonDemo.run() + } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/Actor1.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/Actor1.scala old mode 100644 new mode 100755 index ce00f71c..65ab0b12 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/Actor1.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/Actor1.scala @@ -8,7 +8,7 @@ import com.wallace.demo.app.common.LogSupport */ class Actor1 extends Actor with LogSupport { override def receive: Receive = { - case "test" => log.info("received test.") - case _ => log.info("received unknown message.") + case "test" => logger.info("received test.") + case _ => logger.info("received unknown message.") } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/AkkaActorDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/AkkaActorDemo.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/Demo1.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/Demo1.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/Greeter.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/Greeter.scala old mode 100644 new mode 100755 index 50b5d0df..f35b0bff --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/Greeter.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/Greeter.scala @@ -17,7 +17,7 @@ object Greeter { class Greeter extends Actor with LogSupport { def receive: PartialFunction[Any, Unit] = { case Greeter.Greet => - log.info("[Greeter] Hello World!") + logger.info("[Greeter] Hello World!") sender() ! Greeter.Done } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/PiAkkaDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/PiAkkaDemo.scala old mode 100644 new mode 100755 index e3fa68b5..ad905d65 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/PiAkkaDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/PiAkkaDemo.scala @@ -68,7 +68,7 @@ object PiAkkaDemo extends LogSupport { def main(args: Array[String]): Unit = { if (args.length < 3) { - log.error("[PiAkkaDemo] Usage: Pi ") + logger.error("[PiAkkaDemo] Usage: Pi ") System.exit(1) } val system: ActorSystem = ActorSystem("PiSystem") @@ -78,8 +78,8 @@ object PiAkkaDemo extends LogSupport { val future: Future[Any] = master ? Calculate val approximationPi = Await.result(future, timeout.duration) .asInstanceOf[PiApproximation] - log.info("[PiAkkaDemo] Pi: \t" + approximationPi.pi) - log.info("[PiAkkaDemo] Spend: \t" + approximationPi.duration) + logger.info("[PiAkkaDemo] Pi: \t" + approximationPi.pi) + logger.info("[PiAkkaDemo] Spend: \t" + approximationPi.duration) system.stop(master) } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/master_worker/Master.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/master_worker/Master.scala old mode 100644 new mode 100755 index 555419f1..2e4a1b6e --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/master_worker/Master.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/master_worker/Master.scala @@ -12,19 +12,19 @@ import java.util import java.util.Locale import java.util.concurrent.atomic.AtomicLong import java.util.concurrent.{ConcurrentHashMap, LinkedBlockingQueue} - import akka.actor.{Actor, ActorRef, ActorSelection, ActorSystem, DeadLetter, Props, UnhandledMessage} import com.typesafe.config.{Config, ConfigFactory} import sun.misc.{Signal, SignalHandler} +import java.io.{BufferedReader, InputStream, InputStreamReader} import scala.collection.JavaConverters._ import scala.collection.mutable import scala.concurrent.duration._ import scala.language.postfixOps /** - * Created by 10192057 on 2018/6/20 0020. - */ + * Created by 10192057 on 2018/6/20 0020. + */ class Master(val host: String, val port: Int) extends Actor { val id2WorkInfo = new mutable.HashMap[String, WorkerInfo]() val workers = new mutable.HashSet[WorkerInfo]() diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/master_worker/Message.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/master_worker/Message.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/master_worker/Worker.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/master_worker/Worker.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/master_worker/WorkerInfo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/actordemo/master_worker/WorkerInfo.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/AlgDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/AlgDemo.scala old mode 100644 new mode 100755 index b61981c2..60832666 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/AlgDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/AlgDemo.scala @@ -84,7 +84,7 @@ object AlgDemo extends LogSupport { def main(args: Array[String]): Unit = { //TODO 1 TriangleCount val tCnt: Int = triangleCount(Array(3, 4, 6, 7, 8, 9)) - log.info(s"Triangle Count: $tCnt") + logger.info(s"Triangle Count: $tCnt") //TODO 2 Two Sum: (1, 3) twoSum(Array(11, 2, 1, 7, 15), 9) @@ -103,6 +103,12 @@ object AlgDemo extends LogSupport { // TODO 6 Find median in two sorted Arrays // val kMinValue: Double = findMedianSortedArrays(Array(1, 3), Array(2)) // println(s"Median Value: $kMinValue") + + // maxContinuousPositive + // int(120) => binary(1111000),return 4 + // int(101) => binary(1100101),return 2 + maxContinuousPositive(120) + maxContinuousPositive(101) } def twoSum(d: Array[Int], target: Int): Unit = { @@ -211,10 +217,10 @@ object AlgDemo extends LogSupport { val tgtLen: Int = tgtArr.length var i: Int = 0 var res: Int = -1 - val firstTgt: Char = tgtArr.map{case (ch, _) => ch}.head + val firstTgt: Char = tgtArr.map { case (ch, _) => ch }.head while (i <= (srcLen - tgtLen)) { val firstSrc: Char = srcArr(i) - if (firstTgt == firstSrc && tgtArr.forall{case (ch, idx) => ch == srcArr(i + idx)}) { + if (firstTgt == firstSrc && tgtArr.forall { case (ch, idx) => ch == srcArr(i + idx) }) { res = i i = srcLen } else { @@ -296,4 +302,34 @@ object AlgDemo extends LogSupport { } false } + + def maxContinuousPositive[T <: AnyVal](value: T): Int = { + require(value match { + case _: Long | _: Int | _: Char | _: Short | _: Byte => true + case _ => false + }, s"not support data-type: ${value.getClass.getSimpleName}") + var tmp: Long = value.toString.toLong + if (tmp <= 0) { + 0 + } else { + val dp: ArrayBuffer[Int] = new ArrayBuffer[Int]() + var index: Int = 0 + dp.append((tmp % 2).toInt) + tmp /= 2 + index += 1 + while (tmp != 0) { + val isPositive: Boolean = ((tmp % 2) & 1) == 1 + + if (isPositive) { + dp.append(dp(index - 1) + 1) + } else { + dp.append(0) + } + tmp /= 2 + index += 1 + println(dp.mkString("DP => ArrayBuffer(", ", ", ")")) + } + dp.max + } + } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/BitMap.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/BitMap.scala deleted file mode 100644 index 939af108..00000000 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/BitMap.scala +++ /dev/null @@ -1,50 +0,0 @@ -package com.wallace.demo.app.algorithmdemo - -/** - * Created by wallace on 2019/10/28. - */ -class BitMap(size: Int) extends Cloneable with Serializable { - private final val SIZE: Int = size - private val BM: Array[Int] = new Array[Int](SIZE / 32 + 1) - - def insert(e: Int): Unit = { - // e / 32 为十进制在数组BM中的下标 - val index: Int = e >> 5 - // e % 32 为十进制在数据BM(index)中的下标 - BM(index) |= 1 << (e & 0x1F) - } - - def exists(a: Int): Boolean = getValue(a) == 1 - - def getValue(e: Int): Int = BM(e >> 5) >> (e & 0x1F) & 1 - - def show(num: Int): Unit = { - (0 until num).foreach { - i => - val tmpVal: Array[Int] = new Array[Int](32) - var temp: Int = BM(i) - tmpVal.indices.foreach { - j => - tmpVal(j) |= (temp & 1) - temp >>= 1 - } - println("BM[" + i + "] = [" + tmpVal.mkString(", ") + "]") - } - } -} - -object BitMap { - def main(args: Array[String]): Unit = { - val bMap: BitMap = new BitMap(320) - Array(1, 2, 3, 5, 8, 30, 32, 64, 56, 159, 120, 21, 17, 35, 45, 320).foreach(bMap.insert) - Array(2, 3, 5, 8).foreach(bMap.insert) - - println(s">>> Key: 159, Value: ${bMap.getValue(159)}.") - if (bMap.exists(320)) { - println("Temp: 320 has already existed.") - } - bMap.show(6) - } -} - - diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/bitmap/BitFilter.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/bitmap/BitFilter.scala new file mode 100755 index 00000000..614fca62 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/bitmap/BitFilter.scala @@ -0,0 +1,105 @@ +package com.wallace.demo.app.algorithmdemo.datastructure.bitmap + +import com.wallace.demo.app.common.LogSupport + +import java.lang.Math._ +import scala.collection.mutable.ArrayBuffer +import scala.util.hashing.MurmurHash3 + +/** + * Author: biyu.huang + * Date: 2023/6/13 14:48 + * Description: + */ +class BitFilter[T <: AnyVal](n: Long, p: Double = 0.0001) { + private final val RANDOM_NUM: Int = + ((random * System.currentTimeMillis).longValue >> 32 & 0xffff).toInt + // The size of bitmap: m + private final val m: Long = ceil(-1.0 * n * log(p * 0.9999) / pow(log(2), 2)).longValue + assert(m / 64 <= Int.MaxValue, s"too many elements: $n") + private final val bitMap: BitMap = new BitMap(m) + // The number of Hash method: k + private final val k: Int = ceil(0.7 * m / n).longValue.toInt + private final val seeds: Array[Long] = generatePrimeArray(k, RANDOM_NUM) + // False Positive Rate: fpr + private final val fpr: BigDecimal = BigDecimal.valueOf(pow(1 - pow(E, -n * k * 1.0 / m), k)) + + /** + * @return size of bitmap in bytes + */ + def getSize: Double = this.m / 8.0 + + /** + * @return number of Hash method + */ + def getK: Long = this.k + + /** + * @return the actual False Positive Rate + */ + def getFPR: BigDecimal = this.fpr + + def hash(key: T, seed: Int): Long = { + MurmurHash3.bytesHash(s"$key".getBytes, seed) & (m - 1) + } + + def add(key: T): Unit = { + seeds.foreach { + seed => + bitMap.insert(hash(key, seed.toInt)) + } + } + + /** + * @param key bit filter key + * @return False when key doesn't exists, True means key might exists + */ + def exists(key: T): Boolean = { + seeds.forall { + seed => + bitMap.exists(hash(key, seed.toInt)) + } + } + + def generatePrimeArray(k: Int, start: Int = RANDOM_NUM): Array[Long] = { + def isPrime(number: Long): Boolean = { + if (number <= 3) return number > 1 + var i: Long = 2L + while (i <= sqrt(number)) { + if (number % i == 0) return false + i += 1 + } + true + } + + val res: ArrayBuffer[Long] = new ArrayBuffer[Long]() + var number: Long = start + while (res.size < k) { + if (isPrime(number)) res.append(number) + number += 1 + } + res.result().toArray + } + + def show(num: Int): Unit = this.bitMap.show(num) + + def bitCount: Long = this.bitMap.bitCount +} + +object BitFilter extends LogSupport { + def main(args: Array[String]): Unit = { + val bitFilter: BitFilter[Int] = new BitFilter[Int](100) + Array(1, 2, 3, 5, 8, 30, 32, 64, 56, 159, 120, 21, 17, 35, 45, 320).foreach(bitFilter.add) + + if (bitFilter.exists(320)) { + logger.info("Temp: 320 exists") + } + + if (!bitFilter.exists(321)) { + logger.info("Temp: 321 doesn't exist") + } + bitFilter.show(100) + logger.info(s"{bitCount=${bitFilter.bitCount}, k=${bitFilter.getK}," + + s" size=${bitFilter.getSize / 1024}KB, FPR=${bitFilter.getFPR}}") + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/bitmap/BitMap.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/bitmap/BitMap.scala new file mode 100755 index 00000000..88dc60e7 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/bitmap/BitMap.scala @@ -0,0 +1,59 @@ +package com.wallace.demo.app.algorithmdemo.datastructure.bitmap + +import com.wallace.demo.app.common.LogSupport + +/** + * Created by wallace on 2019/10/28. + */ +class BitMap(size: Long) extends Cloneable with Serializable { + private final val SIZE: Int = (size / 64 + 1).toInt + assert(SIZE > 0, s"too large size: $size") + private val BM: Array[Long] = new Array[Long](SIZE) + + def insert(e: Long): Unit = { + // e / 32 为十进制在数组BM中的下标 + val index: Int = (e >> 6).toInt + assert(index >= 0, s"$index($e >> 6) is greater than $SIZE") + // e % 32 为十进制在数据BM(index)中的下标 + BM(index) |= 1L << (e & 0x3F) + } + + def exists(a: Long): Boolean = getValue(a) == 1L + + def getValue(e: Long): Long = BM((e >> 6).toInt) >> (e & 0x3F) & 1 + + def show(num: Int): Unit = { + val end: Int = Math.min(num, BM.length) + val formatStr: String = s"%0${end.toString.length}d" + (0 until end).foreach { + i => + val tmpVal: Array[Long] = new Array[Long](64) + var temp: Long = BM(i) + tmpVal.indices.foreach { + j => + tmpVal(j) |= (temp & 1) + temp >>= 1 + } + println("BM[" + formatStr.format(i) + "] = [" + tmpVal.mkString(", ") + "]") + } + } + + def bitCount: Long = this.BM.map(x => java.lang.Long.bitCount(x).toLong).sum +} + +object BitMap extends LogSupport { + def main(args: Array[String]): Unit = { + val bMap: BitMap = new BitMap(320L) + Array(1, 2, 3, 5, 8, 30, 32, 64, 56, 159, 120, 21, 17, 35, 45, 320) + .foreach(x => bMap.insert(x.toLong)) + Array(2, 3, 5, 8).foreach(x => bMap.insert(x.toLong)) + + logger.info(s">>> Key: 159, Value: ${bMap.getValue(159L)}.") + if (bMap.exists(320L)) { + logger.info("Temp: 320 exists") + } + bMap.show(6) + } +} + + diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/linkedlist/LinkedList.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/linkedlist/LinkedList.scala new file mode 100755 index 00000000..56fd495a --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/linkedlist/LinkedList.scala @@ -0,0 +1,151 @@ +package com.wallace.demo.app.algorithmdemo.datastructure.linkedlist + +import com.wallace.demo.app.common.LogSupport + +/** + * Author: biyu.huang + * Date: 2023/5/25 11:02 + * Description: + */ +class LinkedList[T]() { + @transient private var first: Node[T] = _ + @transient private var last: Node[T] = _ + @transient private var size: Int = 0 + @transient private var modCount: Int = 0 + + class Node[U](var item: U, var preNode: Node[U], var nextNode: Node[U]) { + override def toString: String = { + val preNodeToStr = if (preNode == null) "null" else preNode.item + val nextNodeToStr = if (nextNode == null) "null" else nextNode.item + s"Node(item=$item, preNode=$preNodeToStr, nextNode=$nextNodeToStr)" + } + } + + def getFirst: Node[T] = { + this.first + } + + def getLast: Node[T] = { + this.last + } + + def addNode(e: T): Boolean = { + val l: Node[T] = this.last + val newNode: Node[T] = new Node(e, l, null) + this.last = newNode + if (l == null) { + this.first = newNode + } else { + l.nextNode = newNode + } + this.size += 1 + this.modCount += 1 + true + } + + def removeNode(e: T): Boolean = { + var node: Node[T] = this.getFirst + var flag: Boolean = true + while (node != null && flag) { + if (node.item == e) { + val next: Node[T] = node.nextNode + val pre: Node[T] = node.preNode + if (pre == null) { + this.first = next + } else { + pre.nextNode = next + node.preNode = null + } + + if (next == null) { + this.last = pre + } else { + next.preNode = pre + node.nextNode = null + } + node.item = null.asInstanceOf[T] + this.size -= 1 + this.modCount -= 1 + flag = false + } else { + node = node.nextNode + } + } + !flag + } + + def getNode(index: Int): Node[T] = { + if (index < (this.size >> 1)) { + var node: Node[T] = this.getFirst + var j: Int = 0 + while (j < index) { + node = node.nextNode + j += 1 + } + node + } else { + var node: Node[T] = this.getLast + var i: Int = this.size - 1 + while (i > index) { + node = node.preNode + i -= 1 + } + node + } + } + + def addAll(elements: Iterable[T]): Boolean = addAll(this.size, elements) + + def addAll(index: Int, elements: Iterable[T]): Boolean = { + if (elements.isEmpty) { + false + } else { + var pre: Node[T] = null + var cur: Node[T] = null + if (index == size) { + pre = this.getLast + } else { + cur = getNode(index) + pre = cur.preNode + } + elements.foreach { + e => + val newNode = new Node(e, pre, null) + if (pre == null) { + this.first = newNode + } else { + pre.nextNode = newNode + } + pre = newNode + } + if (cur == null) { + this.last = pre + } else { + pre.nextNode = cur + cur.preNode = pre + } + size += elements.size + modCount += 1 + true + } + } + + //TODO scan nodes by previous order +} + +object Boot extends LogSupport { + def main(args: Array[String]): Unit = { + val root: LinkedList[String] = new LinkedList[String]() + root.addAll(Array("node_001", "node_002", "node_003", "node_004", "node_005")) + // root.addNode("node_001") + // root.addNode("node_002") + // root.addNode("node_003") + // root.addNode("node_004") + // root.addNode("node_005") + logger.info(s"first => ${root.getFirst}, last => ${root.getLast}") + root.removeNode("node_004") + logger.info(s"first => ${root.getFirst}, last => ${root.getLast}") + + logger.info(root.getNode(2).toString) + } +} \ No newline at end of file diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/tree/TreeNode.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/tree/TreeNode.scala new file mode 100755 index 00000000..1fe4b6c8 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/datastructure/tree/TreeNode.scala @@ -0,0 +1,203 @@ +package com.wallace.demo.app.algorithmdemo.datastructure.tree + +import com.wallace.demo.app.common.LogSupport + +import scala.collection.mutable.ArrayBuffer +import scala.util.hashing.MurmurHash3 + +/** + * Author: biyu.huang + * Date: 2023/6/2 16:57 + * Description: + */ +// scalastyle:off +abstract class TreeNode[T, BaseType <: TreeNode[T, BaseType]] { + // scalastyle:on + self: BaseType => + private var parentNode: Option[BaseType] = None + + private var deep: Int = 0 + + private var isEnd: Boolean = false + + private val children: ArrayBuffer[BaseType] = new ArrayBuffer[BaseType]() + + def getContent: T + + def setEnd(end: Boolean): Unit = { + this.isEnd = end + } + + def getEnd: Boolean = this.isEnd + + def setDeep(deep: Int): Unit = { + this.deep = deep + } + + def getDeep: Int = this.deep + + def addChild(child: BaseType): Boolean = { + child.setParentNode(this) + child.setDeep(this.getDeep + 1) + this.children.append(child) + true + } + + def removeChild(child: BaseType): Boolean = { + if (this.children.contains(child)) { + this.children.-=(child) + true + } else { + false + } + } + + + def setChildren(children: ArrayBuffer[BaseType]): Boolean = { + children.forall { + child => + this.addChild(child) + } + } + + def findNode(n: BaseType): Option[BaseType] = this.getChildren.find(x => x.equals(n)) + + def getChildren: ArrayBuffer[BaseType] = this.children + + def setParentNode(parent: BaseType): Unit = { + this.parentNode = Option(parent) + } + + def getParentNode: Option[BaseType] = this.parentNode + + override def toString: String = { + s"TreeNode{isEnd=${this.getEnd}, deep=${this.getDeep}, content=${this.getContent}, " + + s"children=[${this.getChildren.map(x => s"${x.toString}").mkString(",")}]}" + } + + def treeString(depth: Int, prefix: String = "+- ", isLastNode: Boolean = false): String = { + val currentPrefix: String = if (depth > 0) { + val sep = if (this.getParentNode.isDefined && this.getParentNode.get.getChildren.size <= 1) { + " " + } else { + if (isLastNode) " " else "|" + } + prefix.replace("+- ", "") + sep + (" " * (depth + 1)) + "+- " + } else { + prefix + } + val nodeNum: Int = this.getChildren.size + val childStr: String = this.getChildren.zipWithIndex.map { + case (x, index) => + val isLastNode = if (nodeNum > 1 && index == (nodeNum - 1)) true else false + s"\n$currentPrefix${x.treeString(depth + 1, currentPrefix, isLastNode)}" + }.mkString(",") + s"TreeNode{isEnd=${this.getEnd}, deep=${this.getDeep}, content=${this.getContent}, " + + s"children=[$childStr]}" + } +} + +class TrieTree { + private val root: TrieTreeNode = new TrieTreeNode(None) + + + override def toString: String = this.root.treeString(0) + + def addWord(word: String): Unit = { + var curNode = root + word.toCharArray.foreach { + ch => + val node = new TrieTreeNode(Option(ch)) + if (curNode.findNode(node).isDefined) { + curNode = curNode.findNode(node).get + } else { + curNode.addChild(node) + curNode = node + } + } + curNode.setEnd(true) + } + + def hasWord(word: String): Boolean = { + var curNode: TrieTreeNode = root + word.toCharArray.foreach { + ch => + val node: TrieTreeNode = new TrieTreeNode(Option(ch)) + if (curNode.findNode(node).isDefined) { + curNode = curNode.findNode(node).get + } else { + return false + } + } + curNode.getEnd + } + + def removeWord(word: String): Boolean = { + if (hasWord(word)) { + var curNode: TrieTreeNode = root + word.toCharArray.foreach { + ch => + val node: TrieTreeNode = new TrieTreeNode(Option(ch)) + curNode = curNode.findNode(node).get + } + curNode.setEnd(false) + if (curNode.getChildren.isEmpty) { + // no children + var parentNode: Option[TrieTreeNode] = curNode.getParentNode + while (parentNode.isDefined) { + if (parentNode.get.getChildren.size == 1 && !parentNode.get.getEnd) { + parentNode.get.removeChild(curNode) + curNode = parentNode.get + parentNode = curNode.getParentNode + } else { + parentNode.get.removeChild(curNode) + return true + } + } + } + true + } else { + false + } + } + + def visitNode: String = ??? + + def getCommonPrefix: String = ??? + + class TrieTreeNode(content: Option[Char]) extends TreeNode[Char, TrieTreeNode] { + override def hashCode(): Int = { + MurmurHash3.arrayHash(Array(content.getOrElse(""), this.getDeep)) + } + + override def equals(obj: Any): Boolean = { + if (obj == null) { + false + } else { + this.getContent == obj.asInstanceOf[TrieTreeNode].getContent + } + } + + override def getContent: Char = { + this.content.getOrElse(null.asInstanceOf[Char]) + } + } +} + +object TrieTree extends LogSupport { + def main(args: Array[String]): Unit = { + val data: Array[String] = Array("flow", "flower", "florida", "flight", "world", "worry") + val trieTree: TrieTree = new TrieTree() + data.foreach { + w => + trieTree.addWord(w) + } + logger.info(trieTree.toString) + + logger.info("word isn't exists: " + trieTree.hasWord("word").toString) + logger.info("florida is exists: " + trieTree.hasWord("florida").toString) + + trieTree.removeWord("florida") + logger.info(trieTree.toString) + } +} \ No newline at end of file diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/ActorSystemSupport.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/ActorSystemSupport.scala new file mode 100755 index 00000000..9b283b97 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/ActorSystemSupport.scala @@ -0,0 +1,55 @@ +package com.wallace.demo.app.algorithmdemo.raft + +import akka.actor.ActorSystem +import com.typesafe.config.ConfigFactory + +import java.util.concurrent.ConcurrentHashMap + +/** + * Author: biyu.huang + * Date: 2023/1/13 18:28 + * Description: + */ +trait ActorSystemSupport { + protected val builder: ActorSystemBuilder = new ActorSystemBuilder() + + class ActorSystemBuilder() { + private final val actorConfMap = new ConcurrentHashMap[String, String]() + + private var HOST: Option[String] = None + private var PORT: Option[Int] = None + private var ACTOR_SYSTEM_NAME: Option[String] = None + + def setHost(host: String): ActorSystemBuilder = { + this.HOST = Option(host) + this + } + + def setPort(port: Int): ActorSystemBuilder = { + this.PORT = Option(port) + this + } + + def setName(name: String): ActorSystemBuilder = { + this.ACTOR_SYSTEM_NAME = Option(name) + this + } + + def setOption(key: String, value: String): ActorSystemBuilder = { + this.actorConfMap.put(key, value) + this + } + + def build(): ActorSystem = { + require(this.HOST.isDefined, "please set hostname for ActorSystem") + require(this.PORT.isDefined, "please set port for ActorSystem") + require(this.ACTOR_SYSTEM_NAME.isDefined, "please set name for ActorSystem") + this.actorConfMap.put("akka.actor.provider", "akka.remote.RemoteActorRefProvider") + this.actorConfMap.put("akka.remote.netty.tcp.hostname", this.HOST.get) + this.actorConfMap.put("akka.remote.netty.tcp.port", s"${this.PORT.get}") + this.actorConfMap.put("akka.actor.warn-about-java-serializer-usage", "false") + ActorSystem.create(this.ACTOR_SYSTEM_NAME.get, ConfigFactory.parseMap(this.actorConfMap)) + } + } +} + diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/AtomicSupport.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/AtomicSupport.scala new file mode 100755 index 00000000..a432428a --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/AtomicSupport.scala @@ -0,0 +1,19 @@ +package com.wallace.demo.app.algorithmdemo.raft + +import java.util.concurrent.Semaphore + +/** + * Author: biyu.huang + * Date: 2023/1/18 14:56 + * Description: + */ +trait AtomicSupport { + protected def atomicBlock[R](semaphore: Semaphore)(block: => R): R = { + try { + semaphore.acquire(1) + block + } finally { + semaphore.release(1) + } + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/Message.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/Message.scala new file mode 100755 index 00000000..ad80f8f3 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/Message.scala @@ -0,0 +1,31 @@ +package com.wallace.demo.app.algorithmdemo.raft + +import com.wallace.demo.app.algorithmdemo.raft.HeartbeatType.HeartbeatType + +/** + * Author: biyu.huang + * Date: 2022/12/23 18:48 + * Description: + */ +sealed trait Message + +case class ElectionMessage(term: Long, nodeID: String, timeout: Int) extends Message + +case class HeartbeatMessage(heartbeatType: HeartbeatType, term: Long, nodeID: String, + currentTimestamp: Long, + metadata: Map[String, Any]) extends Message + +case class ValidVote(term: Long, nodeID: String) extends Message + +case class TimeoutVote(term: Long, nodeID: String) extends Message + +case class Entry[K, V](key: K, value: V) { + def getKey: K = key + + def getValue: V = value +} + +object HeartbeatType extends Enumeration { + type HeartbeatType = Value + val REGULAR, ELECTION, LEADER = Value +} \ No newline at end of file diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/NodeCoordinator.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/NodeCoordinator.scala new file mode 100755 index 00000000..ce49d18e --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/NodeCoordinator.scala @@ -0,0 +1,170 @@ +package com.wallace.demo.app.algorithmdemo.raft + +import akka.actor.{Actor, ActorRef, ActorSelection, ActorSystem, Props, UnhandledMessage} +import com.wallace.demo.app.common.LoopService + +import java.util.concurrent.Semaphore +import java.util.concurrent.atomic.{AtomicBoolean, AtomicLong, AtomicReference} +import java.util.{Properties, UUID} +import scala.collection.mutable +import scala.concurrent.duration.DurationInt +import scala.language.postfixOps +import scala.util.Random + +/** + * Author: biyu.huang + * Date: 2022/12/23 18:24 + * Description: + */ +class NodeCoordinator(sleepMills: Long, name: String, host: String, nodeProps: Properties) + extends LoopService(sleepMills, name) with ActorSystemSupport with AtomicSupport { + private final val ACTOR_SYSTEM: ActorSystem = this.builder + .setHost(host) + .setPort(nodeProps.get("node.actor.port").toString.toInt) + .setName(name) + .build() + private final val ELECTION_TIMEOUT_MILLS: Long = 200L + private final val isLeader: AtomicBoolean = new AtomicBoolean(false) + private final val isFollower: AtomicBoolean = new AtomicBoolean(true) + private final val isCandidate: AtomicBoolean = new AtomicBoolean(false) + private final val currentNodeID = "%s-%s".format(host, math.abs(UUID.randomUUID().hashCode())) + private final val currentTerm: AtomicLong = new AtomicLong(1L) + private final val actorPathMap: Map[String, String] = { + val port = nodeProps.get("node.actor.port") + nodeProps.getProperty("node.actor.list", "").split(",", -1).map(_.trim) + .map { + host => + host -> s"akka.tcp://$name@$host:$port/node/actor" + }.toMap + } + private final val semaphore: Semaphore = new Semaphore(1, true) + private final val cacheLeaderHeartbeat = new AtomicReference[Entry[String, Long]]() + + private final val internalActor: ActorRef = + ACTOR_SYSTEM.actorOf(Props(new InternalActor()), "node-internal-actor") + + override def stop(): Unit = { + ACTOR_SYSTEM.stop(internalActor) + ACTOR_SYSTEM.terminate() + super.stop() + } + + override def handleWork(): Unit = { + if (isLeader.get()) logger.info("%s is leader".format(currentNodeID)) + if (isFollower.get()) logger.info("%s is follower".format(currentNodeID)) + if (isCandidate.get()) { + logger.info("%s is candidate".format(currentNodeID)) + val currentTS: Long = System.currentTimeMillis() + logger.info("current timestamp -> %d".format(currentTS)) + var ts = System.currentTimeMillis() + while (ts - currentTS >= ELECTION_TIMEOUT_MILLS) { + ts = System.currentTimeMillis() + } + } + } + + def doElection(): Unit = { + if (!isCandidate.get()) isCandidate.set(true) + + } + + private class InternalActor() extends Actor { + val nodeActors: mutable.HashMap[String, ActorSelection] = new mutable.HashMap() + + def getMessage: Any = { + atomicBlock[Any](semaphore) { + if (isLeader.get()) { + HeartbeatMessage(HeartbeatType.LEADER, currentTerm.get(), + currentNodeID, System.currentTimeMillis(), Map.empty) + } + + if (isCandidate.get()) { + HeartbeatMessage(HeartbeatType.ELECTION, currentTerm.incrementAndGet(), + currentNodeID, System.currentTimeMillis(), Map.empty) + } + + if (isFollower.get()) { + HeartbeatMessage(HeartbeatType.REGULAR, currentTerm.incrementAndGet(), + currentNodeID, System.currentTimeMillis(), Map.empty) + } + } + } + + override def preStart(): Unit = { + logger.info("heartbeat-actor-preStart") + actorPathMap.foreach { + case (host, actorPath) => + val nodeActor: ActorSelection = context.actorSelection(actorPath) + nodeActors.put(host, nodeActor) + nodeActor ! ElectionMessage(currentTerm.get(), currentNodeID, 150 + Random.nextInt(150)) + } + import context.dispatcher + context.system.scheduler.schedule(0 millis, 100 millis, new Runnable { + def run(): Unit = { + self ! getMessage + // if (self.isTerminated) { + // throw new RuntimeException("timer active for terminated actor") + // } + } + }) + } + + override def postStop(): Unit = { + logger.info("heartbeat-actor-postStop") + nodeActors.foreach { + case (host, actorSelection) => + logger.info(s"stopping actor selection for $host") + // actorSelection.tell("stopping", sender()) + } + } + + override def receive: Receive = { + case HeartbeatMessage(heartbeatType, term, nodeID, currentTimestamp, metadata) => + // todo + heartbeatType match { + case HeartbeatType.REGULAR => + case HeartbeatType.ELECTION => + case HeartbeatType.LEADER => + if (cacheLeaderHeartbeat.get() == null) { + cacheLeaderHeartbeat.set(Entry(nodeID, currentTimestamp)) + } else { + if (currentTimestamp - cacheLeaderHeartbeat.get().value > 100) { + doElection() + } else { + cacheLeaderHeartbeat.set(Entry(nodeID, currentTimestamp)) + } + } + } + if (metadata.contains("leader") && term >= currentTerm.get()) { + atomicBlock(semaphore) { + currentTerm.set(term) + if (nodeID == currentNodeID) { + isLeader.set(true) + isFollower.set(false) + } else { + isLeader.set(false) + isFollower.set(true) + } + isCandidate.set(false) + } + } + case ElectionMessage(term, nodeID, timeout) => + if (term > currentTerm.get()) { + sender() ! ValidVote(term, currentNodeID) + } else { + Thread.sleep(timeout) + sender() ! TimeoutVote(term, currentNodeID) + } + case ValidVote(term, nodeID) => + case TimeoutVote(term, nodeID) => + case msg: UnhandledMessage => logger.warn(msg.toString) + } + } +} + +object NodeCoordinator { + def apply(sleepMills: Long, name: String, host: String, + nodeProps: Properties): NodeCoordinator = { + new NodeCoordinator(sleepMills, host, name, nodeProps) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/TestBoot.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/TestBoot.scala new file mode 100755 index 00000000..d9fd99a8 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/algorithmdemo/raft/TestBoot.scala @@ -0,0 +1,38 @@ +package com.wallace.demo.app.algorithmdemo.raft + +import com.wallace.demo.app.common.LogSupport +import com.wallace.demo.app.utils.ArgsParser + +import java.util.Properties +import java.util.concurrent.CountDownLatch +import scala.util.control.NonFatal + +/** + * Author: biyu.huang + * Date: 2022/12/23 18:31 + * Description: + */ +object TestBoot extends LogSupport { + private val countDownLatch = new CountDownLatch(1) + + def main(args: Array[String]): Unit = { + try { + Runtime.getRuntime.addShutdownHook(new Thread { + override def run(): Unit = { + logger.info("shutdown now ...") + countDownLatch.countDown() + } + }) + val nodeProps: Properties = ArgsParser.loadProps(args, "") + nodeProps.setProperty("node.actor.list", "node1,node2,node3") + nodeProps.setProperty("node.actor.port", "10001") + val node1: NodeCoordinator = NodeCoordinator(5000L, "RaftActorSystem", "localhost", nodeProps) + node1.init() + node1.start() + } catch { + case NonFatal(_) => + countDownLatch.countDown() + } + countDownLatch.await() + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/asyncdemo/AsyncDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/asyncdemo/AsyncDemo.scala old mode 100644 new mode 100755 index 89d6c8db..1e185311 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/asyncdemo/AsyncDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/asyncdemo/AsyncDemo.scala @@ -16,27 +16,22 @@ object AsyncDemo extends Using { def main(args: Array[String]): Unit = { - value onSuccess { - case res => log.info(s"[OnSuccess] $res") - case error => log.info(s"[OnSuccess] $error") - } - value.onComplete { - case Success(res) => log.info(s"[OnComplete] $res") - case Failure(e) => log.info(s"[OnComplete] $e") + case Success(res) => logger.info(s"[OnComplete] $res") + case Failure(e) => logger.info(s"[OnComplete] $e") } Thread.sleep(100) value.value.get match { case Success(res) => - log.info(s"[Match] $res") + logger.info(s"[Match] $res") case Failure(e) => - log.info(s"[Match] $e") + logger.info(s"[Match] $e") throw e } - log.info("Shutting down...") + logger.info("Shutting down...") } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/classloader/ChildFirstURLClassLoader.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/classloader/ChildFirstURLClassLoader.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/classloader/DemoClassLoader.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/classloader/DemoClassLoader.scala old mode 100644 new mode 100755 index 77d73cd0..f4d338a4 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/classloader/DemoClassLoader.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/classloader/DemoClassLoader.scala @@ -28,10 +28,10 @@ object DemoClassLoader extends Using { if (file.exists()) { loader.addURL(file.toURI.toURL) } else { - log.warn(s"Local jar $file does not exist, skipping.") + logger.warn(s"Local jar $file does not exist, skipping.") } case _ => - log.warn(s"Skip remote jar $uri.") + logger.warn(s"Skip remote jar $uri.") } } @@ -82,13 +82,13 @@ object DemoClassLoader extends Using { mainClass.newInstance().asInstanceOf[DecompressionFactory] } else { if (classOf[scala.App].isAssignableFrom(mainClass)) { - log.warn("Subclasses of scala.App may not work correctly. Use a new Object instead.") + logger.warn("Subclasses of scala.App may not work correctly. Use a new Object instead.") } new DecompressionFactory() } val res = instance.getDecompression("test.zip") - log.info(res.getClass.getName.stripSuffix("$")) + logger.info(res.getClass.getName.stripSuffix("$")) } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/classloader/MutableURLClassLoader.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/classloader/MutableURLClassLoader.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/classloader/ParentClassLoader.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/classloader/ParentClassLoader.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/CollectionDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/CollectionDemo.scala old mode 100644 new mode 100755 index 2c312474..0156574a --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/CollectionDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/CollectionDemo.scala @@ -20,7 +20,7 @@ object CollectionDemo extends App with LogSupport { * flatMap **/ val arr: Array[String] = Array("cl$ass", "a", "b").sortBy(_.length) - arr.foreach(x => log.info("%f".formatted(x))) + arr.foreach(x => logger.info("%f".formatted(x))) //Array("zzzzz", "eeeee$ffffff$gggggggg", "aaaaaaa$bbbbbbbbb$ccccccccc") val arrRes: Array[String] = arr.flatMap { @@ -32,7 +32,7 @@ object CollectionDemo extends App with LogSupport { Array(s"@${x}_${x.reverse}@") } } - arrRes.foreach(elem => log.info("%f".formatted(elem))) + arrRes.foreach(elem => logger.info("%f".formatted(elem))) /** @@ -40,7 +40,7 @@ object CollectionDemo extends App with LogSupport { **/ val ls = List(1, 2, 3, 4, 5, 6, 7, 8, 9, 10) val resLs: (List[Int], List[Int]) = ls.partition(_ % 2 == 0) - log.info(s"[CollectionDemo] res_Ls: ${resLs._1}, ${resLs._2}") + logger.info(s"[CollectionDemo] res_Ls: ${resLs._1}, ${resLs._2}") /** @@ -48,7 +48,7 @@ object CollectionDemo extends App with LogSupport { **/ val testArr: Array[Int] = Array(1, 2, 3) val resArr: Array[Int] = Array(testArr: _*) - log.info(s"TestArr: ${testArr.mkString(" ")}, ResArr: ${resArr.mkString(" ")}") + logger.info(s"TestArr: ${testArr.mkString(" ")}, ResArr: ${resArr.mkString(" ")}") def getWords(lines: Seq[String]): Seq[String] = lines flatMap (line => line split "\\W+") } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/LockMap.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/LockMap.scala new file mode 100755 index 00000000..c4247672 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/LockMap.scala @@ -0,0 +1,50 @@ +package com.wallace.demo.app.collection + +import java.util.concurrent.ConcurrentHashMap +import java.util.concurrent.locks.{Lock, ReentrantLock} + +/** + * Author: biyu.huang + * Date: 2024/4/12 17:18 + * Description: + */ +class LockMap[K, V] { + private val hashMap = new ConcurrentHashMap[K, V] + private val keyLocks = new ConcurrentHashMap[K, Lock] + + def put(key: K, value: V): Unit = { + val lock = getOrCreateLock(key) + lock.lock() + try { + hashMap.put(key, value) + } finally { + lock.unlock() + } + } + + def getAndLock(key: K): V = { + val lock = getOrCreateLock(key) + lock.lock() + hashMap.get(key) + } + + def unlock(key: K): Unit = { + val lock = getOrCreateLock(key) + lock.unlock() + } + + def remove(key: K): Unit = { + val lock = getOrCreateLock(key) + lock.lock() + try { + hashMap.remove(key) + keyLocks.remove(key) + } finally { + lock.unlock() + } + } + + private def getOrCreateLock(key: K): Lock = { + keyLocks.computeIfAbsent(key, _ => new ReentrantLock()) + } +} \ No newline at end of file diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/RowKeyLock.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/RowKeyLock.scala new file mode 100755 index 00000000..13205e30 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/RowKeyLock.scala @@ -0,0 +1,86 @@ +package com.wallace.demo.app.collection + +import java.util.concurrent.atomic.AtomicInteger +import java.util.concurrent.locks.{ReentrantLock, ReentrantReadWriteLock} +import java.util.concurrent.{ConcurrentHashMap, locks} + +/** + * Author: biyu.huang + * Date: 2023/11/3 11:35 + * Description: + */ +class RowKeyLock[T] { + private val locks: ConcurrentHashMap[T, KeyLock] = new ConcurrentHashMap[T, KeyLock]() + + def lock(key: T): Unit = { + val keyLock: KeyLock = this.locks.compute(key, + (_, v) => if (v == null) new KeyLock() else v.countLock()) + keyLock.lock.lock() + } + + def unlock(key: T): Unit = { + val keyLock: KeyLock = this.locks.get(key) + keyLock.lock.unlock() + if (keyLock.deductLock() == 0) { + locks.remove(key, keyLock) + } + } + + private final class KeyLock { + val lock: ReentrantLock = new ReentrantLock() + + private val lockCount: AtomicInteger = new AtomicInteger(1) + + def countLock(): KeyLock = { + lockCount.incrementAndGet() + this + } + + def deductLock(): Int = lockCount.decrementAndGet() + } +} + +class CustomMap[K, V] { + private val map: ConcurrentHashMap[K, V] = new ConcurrentHashMap[K, V]() + private val lock: ReentrantReadWriteLock = new locks.ReentrantReadWriteLock() + + def put(key: K, value: V): Unit = { + lock.writeLock().lock() + try { + map.put(key, value) + } finally { + lock.writeLock().unlock() + } + } + + def get(key: K): Option[V] = { + lock.readLock().lock() + try { + Option(map.get(key)) + } finally { + lock.readLock().unlock() + } + } + + def remove(key: K): Unit = { + lock.writeLock().lock() + try { + map.remove(key) + } finally { + lock.writeLock().unlock() + } + } + + def foreach(f: (K, V) => Unit): Unit = { + lock.readLock().lock() + try { + val iterator = map.entrySet().iterator() + while (iterator.hasNext) { + val entry = iterator.next() + f(entry.getKey, entry.getValue) + } + } finally { + lock.readLock().unlock() + } + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/StreamDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/collection/StreamDemo.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/DataType.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/DataType.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/HdfsManager.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/HdfsManager.scala old mode 100644 new mode 100755 index 1ac300b2..53e024a0 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/HdfsManager.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/HdfsManager.scala @@ -19,8 +19,8 @@ import org.apache.hadoop.io.IOUtils import scala.util.{Failure, Success, Try} /** - * Created by wallace on 2017/12/4. - */ + * Created by wallace on 2017/12/4. + */ object HdfsManager extends HdfsSupportHA with Using { lazy val currentPath: String = System.getProperty("user.dir") @@ -33,16 +33,16 @@ object HdfsManager extends HdfsSupportHA with Using { file => if (new java.io.File(file).exists()) { hdfsConf.addResource(new Path(file)) - log.debug(s"HdfsFileManager addResource from config directory: $file.current directory $configHome") + logger.debug(s"HdfsFileManager addResource from config directory: $file.current directory $configHome") } else { val configPath = file.split("/").init.mkString("/") val configFilename = file.split("/").last if (new java.io.File(configFilename).exists()) { hdfsConf.addResource(new Path(configFilename)) - log.debug(s"HdfsFileManager addResource from current directory $currentPath: adding $configFilename: Cannot find file in config directory $configPath") + logger.debug(s"HdfsFileManager addResource from current directory $currentPath: adding $configFilename: Cannot find file in config directory $configPath") } else { - log.debug(s"HdfsFileManager addResource adding $configFilename failure: Cannot find file in config directory $configPath and current directory $currentPath") + logger.debug(s"HdfsFileManager addResource adding $configFilename failure: Cannot find file in config directory $configPath and current directory $currentPath") } } } @@ -80,7 +80,7 @@ object HdfsManager extends HdfsSupportHA with Using { usingHdfs("Check fileName exists failed.") { hdfs => val p = new Path(fileName) - res = hdfs.isFile(p) + res = hdfs.getFileStatus(p).isFile } res } @@ -136,10 +136,10 @@ object HdfsManager extends HdfsSupportHA with Using { val out: FileOutputStream = new FileOutputStream(s"$target", append) if (hdfs.exists(new Path(srcPath))) { val files = hdfs.listFiles(new Path(srcPath), false) - log.info("files path" + files) + logger.info("files path" + files) try { while (files.hasNext) { - log.info("while start") + logger.info("while start") val file = files.next() if (file.getPath.toString.contains("part-")) { val in: FSDataInputStream = hdfs.open(new Path(file.getPath.toString)) @@ -162,20 +162,20 @@ object HdfsManager extends HdfsSupportHA with Using { } } } else { - log.debug("download Error:" + srcPath + " not exist ") + logger.debug("download Error:" + srcPath + " not exist ") } } } def appendDir(srcPath: String, dstPath: String): Unit = { - log.debug(s"try to append $srcPath to $dstPath ...") + logger.debug(s"try to append $srcPath to $dstPath ...") listFiles(srcPath).foreach(fileName => if (fileName.startsWith("part-")) { appendFile(s"$srcPath/$fileName", s"$dstPath/$fileName") }) } def appendFile(src: String, target: String): Unit = { - log.debug(s"try to append $src to $target ...") + logger.debug(s"try to append $src to $target ...") usingHdfs("append failed") { hdfs => val in: FSDataInputStream = hdfs.open(new Path(src)) @@ -193,7 +193,7 @@ object HdfsManager extends HdfsSupportHA with Using { if (out != null) out.close() } } - log.debug(s"append $src to $target complete") + logger.debug(s"append $src to $target complete") } def getDfsNameServices: String = { @@ -203,7 +203,7 @@ object HdfsManager extends HdfsSupportHA with Using { temp = Some(hdfs.getCanonicalServiceName) } val dfsName = temp.getOrElse(hdfsConf.get("dfs.nameservices")) - log.info(s"getDfsNameServices return $dfsName") + logger.info(s"getDfsNameServices return $dfsName") dfsName } @@ -216,7 +216,7 @@ object HdfsManager extends HdfsSupportHA with Using { def downloadGzipFilesToLocal(remoteHdfsPath: String, localFileName: String): Unit = { - log.info(s"downloadGzipFilesToLocal: From $remoteHdfsPath to $localFileName,start time: " + new Date().toString) + logger.info(s"downloadGzipFilesToLocal: From $remoteHdfsPath to $localFileName,start time: " + new Date().toString) usingHdfs("download Hdfs Gzip Files error.") { hdfs => val files = hdfs.listFiles(new Path(remoteHdfsPath), false) @@ -251,7 +251,7 @@ object HdfsManager extends HdfsSupportHA with Using { } } - log.info(s"downloadGzipFilesToLocal: From $remoteHdfsPath to $localFileName,end time: " + new Date().toString) + logger.info(s"downloadGzipFilesToLocal: From $remoteHdfsPath to $localFileName,end time: " + new Date().toString) } private def createLocalPath(localFileName: String): Boolean = { @@ -272,7 +272,7 @@ object HdfsManager extends HdfsSupportHA with Using { case Success(result) => result case Failure(e) => - log.error(s"Create directories or change directory authority for file $localFileName. Throw exceptions: ", e) + logger.error(s"Create directories or change directory authority for file $localFileName. Throw exceptions: ", e) false } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/HdfsSupportHA.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/HdfsSupportHA.scala old mode 100644 new mode 100755 index 9b75fe9e..08192d8a --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/HdfsSupportHA.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/HdfsSupportHA.scala @@ -8,9 +8,9 @@ import org.apache.hadoop.io.IOUtils import org.apache.hadoop.io.compress.{CompressionCodecFactory, CompressionOutputStream} /** - * com.wallace.demo.app.common - * Created by Wallace on 2017/12/5 0005. - */ + * com.wallace.demo.app.common + * Created by Wallace on 2017/12/5 0005. + */ trait HdfsSupportHA extends ProjectConfig { def hdfsConf: Configuration @@ -20,7 +20,7 @@ trait HdfsSupportHA extends ProjectConfig { def usingHdfs(errMsg: String)(f: FileSystem => Unit): Unit = { this.synchronized(usingHdfsCnt += 1) if (usingHdfsCnt >= 10000) { - log.info("Current error count :" + errorHdfsCnt) + logger.info("Current error count :" + errorHdfsCnt) this.synchronized(usingHdfsCnt = 0) } try { @@ -29,11 +29,11 @@ trait HdfsSupportHA extends ProjectConfig { } catch { case e: IOException => this.synchronized(errorHdfsCnt += 1) - log.error(errMsg + s" error Count:$errorHdfsCnt.", e) + logger.error(errMsg + s" error Count:$errorHdfsCnt.", e) throw e case ex: Throwable => this.synchronized(errorHdfsCnt += 1) - log.error(errMsg + s" error Count:$errorHdfsCnt ", ex) + logger.error(errMsg + s" error Count:$errorHdfsCnt ", ex) throw ex } } @@ -57,11 +57,11 @@ trait HdfsSupportHA extends ProjectConfig { hdfs => val path = new Path(target) if (hdfs.exists(path)) { - log.info(s"Start to delect: ${path.toString}") + logger.info(s"Start to delect: ${path.toString}") hdfs.delete(path, true) - log.info(s"End to delect: ${path.toString}") + logger.info(s"End to delect: ${path.toString}") } else { - log.warn(s"Delete Path Failed! Path: ${path.toString} is not exists!") + logger.warn(s"Delete Path Failed! Path: ${path.toString} is not exists!") } } } @@ -70,7 +70,7 @@ trait HdfsSupportHA extends ProjectConfig { usingHdfs("emptyDir failed.") { hdfs => val p = new Path(dir) - if (hdfs.isDirectory(p)) { + if (hdfs.getFileStatus(p).isDirectory) { val files = hdfs.listFiles(p, false) while (files.hasNext) { hdfs.delete(files.next().getPath, true) @@ -199,7 +199,7 @@ trait HdfsSupportHA extends ProjectConfig { val hdfsPath = new Path(path) if (hdfs.exists(hdfsPath)) { result = hdfs.listStatus(hdfsPath).map(_.getPath.getName).toList - result.foreach(log.debug) + result.foreach(x => logger.debug(x)) } } result diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LoadConfigDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LoadConfigDemo.scala old mode 100644 new mode 100755 index c850a353..c2485fcc --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LoadConfigDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LoadConfigDemo.scala @@ -27,7 +27,7 @@ object LoadConfigDemo extends ProjectConfig { val temp = conf.split("=").map(_.trim) val key = temp.head val value = temp.last - log.info(s"Key: $key, Value: $value") + logger.info(s"Key: $key, Value: $value") } @@ -39,7 +39,7 @@ object LoadConfigDemo extends ProjectConfig { timeOffsetList.foreach { x => - log.info(s"Key: ${x._1}, Value: ${x._2}") + logger.info(s"Key: ${x._1}, Value: ${x._2}") } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LogLevel.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LogLevel.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LogSupport.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LogSupport.scala old mode 100644 new mode 100755 index 082adb10..cc34e4f6 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LogSupport.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LogSupport.scala @@ -1,25 +1,23 @@ package com.wallace.demo.app.common +import com.typesafe.scalalogging.LazyLogging import com.wallace.demo.app.common.LogLevel.LogLevel -import org.slf4j.{Logger, LoggerFactory} import scala.reflect.ClassTag /** - * Created by Wallace on 2017/1/11. - */ -trait LogSupport extends Serializable { - val log: Logger = LoggerFactory.getLogger(this.getClass.getName.stripSuffix("$")) - + * Created by Wallace on 2017/1/11. + */ +trait LogSupport extends LazyLogging { protected def logRecord[T: ClassTag](msg: T, level: LogLevel = LogLevel.INFO): Unit = { level match { - case LogLevel.DEBUG => log.debug(s"$msg") - case LogLevel.INFO => log.info(s"$msg") - case LogLevel.WARN => log.warn(s"$msg") - case LogLevel.ERROR => log.error(s"$msg") - case LogLevel.TRACE => log.trace(s"$msg") - case _ => log.info(s"$msg") + case LogLevel.DEBUG => logger.debug(s"$msg") + case LogLevel.INFO => logger.info(s"$msg") + case LogLevel.WARN => logger.warn(s"$msg") + case LogLevel.ERROR => logger.error(s"$msg") + case LogLevel.TRACE => logger.trace(s"$msg") + case _ => logger.info(s"$msg") } } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LoopService.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LoopService.scala new file mode 100755 index 00000000..24f406e3 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/LoopService.scala @@ -0,0 +1,53 @@ +package com.wallace.demo.app.common + +import java.util.concurrent.CountDownLatch +import scala.util.control.NonFatal + +/** + * Author: biyu.huang + * Date: 2022/12/23 17:53 + * Description: + */ +sealed trait Service { + def init(): Unit = { + // Won't do anything by default. But user can override it. + } + + def start(): Unit + + def stop(): Unit +} + +abstract class LoopService(sleepMills: Long, name: String) extends Service with LogSupport { + private final val countDown: CountDownLatch = new CountDownLatch(1) + + private val thread = new Thread(new LoopThread(), "%s-thread".format(name)) + + override def start(): Unit = { + logger.info("starting %s-thread".format(name)) + this.thread.setDaemon(true) + this.thread.start() + logger.info("started %s-thread".format(name)) + } + + def handleWork(): Unit + + override def stop(): Unit = { + this.countDown.countDown() + } + + class LoopThread extends Runnable { + override def run(): Unit = { + while (countDown.getCount > 0) { + try { + handleWork() + Thread.sleep(sleepMills) + } catch { + case NonFatal(e) => + logger.error(e.getMessage, e) + countDown.countDown() + } + } + } + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/MetaData.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/MetaData.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/ProjectConfig.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/ProjectConfig.scala old mode 100644 new mode 100755 index be7abb11..0cf411f8 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/ProjectConfig.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/ProjectConfig.scala @@ -16,7 +16,7 @@ trait ProjectConfig extends LogSupport { protected val configHome = "../vmax-conf/" def setConfigFiles(files: String*): Unit = synchronized { - log.debug(s"config home: $configHome") + logger.debug(s"config home: $configHome") config = files.toList.map(load).reduce((a, b) => a.withFallback(b)) } @@ -24,10 +24,10 @@ trait ProjectConfig extends LogSupport { val resourceFile = file val configFile = new File(makePath(file)) if (configFile.exists()) { - log.debug(s"Loading file [${configFile.getPath}] and resource [$resourceFile]") + logger.debug(s"Loading file [${configFile.getPath}] and resource [$resourceFile]") ConfigFactory.parseFile(configFile).withFallback(ConfigFactory.load(resourceFile)) } else { - log.debug(s"Loading resource [$resourceFile]") + logger.debug(s"Loading resource [$resourceFile]") ConfigFactory.load(resourceFile) } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/UserDefineFunc.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/UserDefineFunc.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/Using.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/Using.scala old mode 100644 new mode 100755 index 0184288f..738774e8 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/Using.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/common/Using.scala @@ -2,25 +2,26 @@ package com.wallace.demo.app.common import com.wallace.demo.app.utils.FuncRuntimeDur +import scala.language.reflectiveCalls import scala.util.control.NonFatal trait Using extends FuncRuntimeDur { - protected def usingWithErrMsg[A <: {def close() : Unit}, B](param: A, errMsg: String)(f: A => B): Unit = { + protected def usingWithErrMsg[C <: {def close(): Unit}, R](ctx: C, errMsg: String)(func: C => R): Unit = { try { - f(param) + func(ctx) } catch { case NonFatal(e) => - log.error(s"$errMsg: ", e) + logger.error(s"$errMsg: ", e) } finally { - param.close() + ctx.close() } } - protected def using[A <: {def close() : Unit}, B](param: A)(f: A => B): B = { + protected def using[C <: {def close(): Unit}, R](ctx: C)(func: C => R): R = { try { - f(param) + func(ctx) } finally { - param.close() + ctx.close() } } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/convertmrtonb/ConvertMRToNeighborCellDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/convertmrtonb/ConvertMRToNeighborCellDemo.scala old mode 100644 new mode 100755 index 7ef3b98a..fe188784 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/convertmrtonb/ConvertMRToNeighborCellDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/convertmrtonb/ConvertMRToNeighborCellDemo.scala @@ -14,7 +14,7 @@ object ConvertMRToNeighborCellDemo extends Using { val res = FuncUtil.ProcessNeighbourInfo(mrRecord) res.asScala.foreach { line => - log.info(line) + logger.info(line) } assert(args.length >= 1, "Please enter run times.") @@ -34,7 +34,7 @@ object ConvertMRToNeighborCellDemo extends Using { cnt += 1 } } - log.info(s"[testProcessNBDataByLine] RunTimes: $cnt, CostTime: $costTime ms.") + logger.info(s"[testProcessNBDataByLine] RunTimes: $cnt, CostTime: $costTime ms.") } def testProcessNBDataByVector(data: util.Vector[String], times: Int): Unit = { @@ -45,7 +45,7 @@ object ConvertMRToNeighborCellDemo extends Using { cnt += 1 } } - log.info(s"[testProcessNBDataByVector] RunTimes: $cnt, CostTime: $costTime ms.") + logger.info(s"[testProcessNBDataByVector] RunTimes: $cnt, CostTime: $costTime ms.") } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/dynamicloadconfig/DynamicLoadConfig.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/dynamicloadconfig/DynamicLoadConfig.scala old mode 100644 new mode 100755 index ea29aea6..76c6e686 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/dynamicloadconfig/DynamicLoadConfig.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/dynamicloadconfig/DynamicLoadConfig.scala @@ -37,7 +37,7 @@ class DynamicLoadConfig(props: Properties, prefixPath: String, configFiles: Stri in.close() } catch { case NonFatal(e) => - log.error(s"[${this.getClass.getCanonicalName}] Failed to reload $fileName: ${e.printStackTrace()}.") + logger.error(s"[${this.getClass.getCanonicalName}] Failed to reload $fileName: ${e.printStackTrace()}.") } } } @@ -58,7 +58,7 @@ object DynamicLoadConfig extends LogSupport { Thread.sleep(10000L) val conf1 = props.getProperty("conf1", "default_value") val conf2 = props.getProperty("conf2", "default_value") - log.info(s"[${this.getClass.getSimpleName}]<$cnt> conf1: $conf1, conf2: $conf2.") + logger.info(s"[${this.getClass.getSimpleName}]<$cnt> conf1: $conf1, conf2: $conf2.") cnt += 1 } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/eulerpathdemo/EluerPathDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/eulerpathdemo/EluerPathDemo.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/eulerpathdemo/EulerPathBoot.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/eulerpathdemo/EulerPathBoot.scala old mode 100644 new mode 100755 index f3d7dd26..49f11e67 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/eulerpathdemo/EulerPathBoot.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/eulerpathdemo/EulerPathBoot.scala @@ -1,16 +1,15 @@ package com.wallace.demo.app.eulerpathdemo -import java.util.Scanner - import com.wallace.demo.app.common.LogSupport -import scala.collection.JavaConversions._ +import java.util.Scanner +import scala.jdk.CollectionConverters.collectionAsScalaIterableConverter import scala.util.control.Breaks._ /** - * com.wallace.demo.app.EulerPathDemo - * Created by 10192057 on 2017/10/10 0010. - */ + * com.wallace.demo.app.EulerPathDemo + * Created by 10192057 on 2017/10/10 0010. + */ object EulerPathBoot extends App with LogSupport { // Sample Input @@ -62,7 +61,7 @@ object EulerPathBoot extends App with LogSupport { } } - for (elem <- handler.result) { - log.info(s"$elem") + for (elem <- handler.result.asScala) { + logger.info(s"$elem") } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationFactory.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationFactory.scala deleted file mode 100644 index fbd5f3a8..00000000 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationFactory.scala +++ /dev/null @@ -1,19 +0,0 @@ -package com.wallace.demo.app.factorypatterndemo - -/** - * Created by Wallace on 2017/4/15. - */ -object OperationFactory { - def createOperate(opSymbol: String, num_A: Double, num_B: Double): Option[Double] = opSymbol match { - case "+" => - OperationAdd(num_A, num_B).calcResult - case "-" => - OperationSub(num_A, num_B).calcResult - case "*" => - OperationMul(num_A, num_B).calcResult - case "/" => - OperationDiv(num_A, num_B).calcResult - case _ => - None - } -} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/functionaldemo/ExtractorDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/functionaldemo/ExtractorDemo.scala old mode 100644 new mode 100755 index 40eed3e9..da12d8b4 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/functionaldemo/ExtractorDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/functionaldemo/ExtractorDemo.scala @@ -35,12 +35,12 @@ object ExtractorDemo extends App with LogSupport { val user: User = new FreeUser("Daniel", 3000, 0.76d) user match { case FreeUser(name, _, p) => - if (p > 0.75) log.info(name + ", what can we do for you today?") else log.info("Hello " + name) - case PremiumUser(name, _) => log.info("Welcome back, dear " + name) + if (p > 0.75) logger.info(name + ", what can we do for you today?") else logger.info("Hello " + name) + case PremiumUser(name, _) => logger.info("Welcome back, dear " + name) } user match { - case freeUser@PremiumCandidate() => log.info(freeUser.name + ", what can we do for you today?") - case _ => log.info("Welcome back, Sir!") + case freeUser@PremiumCandidate() => logger.info(freeUser.name + ", what can we do for you today?") + case _ => logger.info("Welcome back, Sir!") } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/functionaldemo/FunctionalDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/functionaldemo/FunctionalDemo.scala old mode 100644 new mode 100755 index 66f31447..7a7bb4ed --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/functionaldemo/FunctionalDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/functionaldemo/FunctionalDemo.scala @@ -7,8 +7,8 @@ import scala.concurrent.{Await, Future} import scala.util.{Failure, Success, Try} /** - * Created by Wallace on 2016/11/6. - */ + * Created by Wallace on 2016/11/6. + */ object FunctionalDemo extends UserDefineFunc with LogSupport { val p0: (Int, Int, Int) => Int = sum val p2: (Int) => Int = sum(10, _: Int, 20) @@ -24,22 +24,22 @@ object FunctionalDemo extends UserDefineFunc with LogSupport { val a: Int = 3 val b: BigInt = toBigInt(a) - log.info(s"${Int.MaxValue}, ${Int.MinValue}, ${b.pow(a)}") - log.info(s"${p0(1, 2, 3)}") // 6 - log.info(s"${p2(100)}") // 130 - log.info(s"${p3(10, 1)}") - log.info("[Partial Functions] " + divide(10)) - log.info("[Partial Functions] " + divide1(10)) - log.info("[Partial Functions] " + direction(180)) - log.info("[匿名函数] " + m1(2)) - log.info("[偏应用函数] " + sum(1, 2, 3)) - log.info("Curry 函数] " + curriedSum(5)(6)) + logger.info(s"${Int.MaxValue}, ${Int.MinValue}, ${b.pow(a)}") + logger.info(s"${p0(1, 2, 3)}") // 6 + logger.info(s"${p2(100)}") // 130 + logger.info(s"${p3(10, 1)}") + logger.info("[Partial Functions] " + divide(10)) + logger.info("[Partial Functions] " + divide1(10)) + logger.info("[Partial Functions] " + direction(180)) + logger.info("[匿名函数] " + m1(2)) + logger.info("[偏应用函数] " + sum(1, 2, 3)) + logger.info("Curry 函数] " + curriedSum(5)(6)) } /** - * Scala-Partial Functions(偏函数) - * 定义一个函数,而让它只接受和处理其参数定义域范围内的子集,对于这个参数范围外的参数则抛出异常,这样的函数就是偏函数 - **/ + * Scala-Partial Functions(偏函数) + * 定义一个函数,而让它只接受和处理其参数定义域范围内的子集,对于这个参数范围外的参数则抛出异常,这样的函数就是偏函数 + * */ def divide: PartialFunction[Int, Int] = new PartialFunction[Int, Int] { override def isDefinedAt(x: Int): Boolean = x != 0 @@ -78,8 +78,8 @@ object FunctionalDemo extends UserDefineFunc with LogSupport { case None => throw new NoSuchElementException } Try(m1(key)).flatMap(x => Try(m2(x)).flatMap(y => Try(m3(y)))) match { - case Success(res) => log.info(key, res) - case Failure(e) => log.error(s"$e") + case Success(res) => logger.info(key, res) + case Failure(e) => logger.error(s"$e") } } @@ -90,10 +90,10 @@ object FunctionalDemo extends UserDefineFunc with LogSupport { import scala.concurrent.ExecutionContext.Implicits.global val futureTask: Future[String] = Future(m1(key)).flatMap(x => Future(m2(x)).flatMap(y => Future(m3(y)))) - futureTask.onFailure { - case t: Throwable => println(t) + futureTask.onComplete { + case Failure(e) => println(e.getMessage) } - log.info(key, Await.result(futureTask, Duration.Inf)) + logger.info(key, Await.result(futureTask, Duration.Inf)) val inc: Int => Int = (a: Int) => a + 1 val dec: Int => Int = (b: Int) => b - 2 @@ -104,17 +104,17 @@ object FunctionalDemo extends UserDefineFunc with LogSupport { /** - * Lambda表达式, 匿名函数 - **/ + * Lambda表达式, 匿名函数 + * */ def m1: (Int) => Int = (x: Int) => x * x /** - * 偏应用函数 - **/ + * 偏应用函数 + * */ def sum(a: Int, b: Int, c: Int): Int = a + b + c /** - * Curry函数 - **/ + * Curry函数 + * */ def curriedSum(x: Int)(y: Int): Int = x + y } \ No newline at end of file diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/httpdemo/HttpUtils.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/httpdemo/HttpUtils.scala old mode 100644 new mode 100755 index 6c63817f..e24ead1f --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/httpdemo/HttpUtils.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/httpdemo/HttpUtils.scala @@ -32,7 +32,7 @@ object HttpUtils extends Using { } match { case Success(res) => res case Failure(e) => - log.error("Failed to do Post request: ", e) + logger.error("Failed to do Post request: ", e) defaultRes } } @@ -72,7 +72,7 @@ object HttpUtils extends Using { } match { case Success(res) => res case Failure(e) => - log.error("Failed to do Post request: ", e) + logger.error("Failed to do Post request: ", e) defaultRes } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/implicitdemo/ImplicitDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/implicitdemo/ImplicitDemo.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/multithread/MultiThreadTestDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/multithread/MultiThreadTestDemo.scala old mode 100644 new mode 100755 index ecb97d75..a83b18f6 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/multithread/MultiThreadTestDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/multithread/MultiThreadTestDemo.scala @@ -14,7 +14,7 @@ object MultiThreadTestDemo extends LogSupport { def main(args: Array[String]): Unit = { //创建线程池 - log.info(s"Thread Pool Size: $currentPoolSize") + logger.info(s"Thread Pool Size: $currentPoolSize") val threadPool: ExecutorService = Executors.newFixedThreadPool(currentPoolSize) try { //提交5个线程 @@ -32,12 +32,12 @@ object MultiThreadTestDemo extends LogSupport { override def run(): Unit = { val threadId = Thread.currentThread().getId for (_ <- 1 to 10) { - log.info(threadName + "|" + threadId) + logger.info(threadName + "|" + threadId) Thread.sleep(100) } val state: Thread.State = Thread.currentThread().getState val symbol: Boolean = Thread.currentThread().isAlive - log.info(s"Thread State: $state, Thread Symbol: $symbol, Thread Name: $threadName, Thread ID: $threadId") + logger.info(s"Thread State: $state, Thread Symbol: $symbol, Thread Name: $threadName, Thread ID: $threadId") } } } \ No newline at end of file diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parademo/ParaDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parademo/ParaDemo.scala old mode 100644 new mode 100755 index 9af87733..654512f4 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parademo/ParaDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parademo/ParaDemo.scala @@ -8,8 +8,8 @@ package com.wallace.demo.app.parademo +import java.util.concurrent.ForkJoinPool import scala.collection.parallel.mutable.ParArray -import scala.concurrent.forkjoin.ForkJoinPool /** * Created by wallace on 2019/6/20. diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/CsvParserCombinators.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/CsvParserCombinators.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/ExprParserCombinators.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/ExprParserCombinators.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/ParsersConstructor.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/ParsersConstructor.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/AbstractParser.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/AbstractParser.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/AddTimeStampFieldsParser.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/AddTimeStampFieldsParser.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ComputeFieldsParser.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ComputeFieldsParser.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ConcatFieldsParser.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ConcatFieldsParser.scala old mode 100644 new mode 100755 index f53c66f9..66262471 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ConcatFieldsParser.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ConcatFieldsParser.scala @@ -34,7 +34,7 @@ class ConcatFieldsParser extends AbstractParser { _concatColumnsFields.putAll(concatColumnsFields.asJava) concat_sep = context.methodMetaData.conf("separator") if (concat_sep.isEmpty) { - log.warn(s"Concat Sep[$concat_sep] is empty.") + logger.warn(s"Concat Sep[$concat_sep] is empty.") } } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ExtractFieldsParser.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ExtractFieldsParser.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ParserChain.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ParserChain.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ParserFactory.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ParserFactory.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ReplaceStrFieldsParser.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/ReplaceStrFieldsParser.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/SplitFieldsParser.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/SplitFieldsParser.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/SubStringFieldsParser.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/parsercombinators/parsers/SubStringFieldsParser.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/Adapter.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/Adapter.scala new file mode 100755 index 00000000..07dbee53 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/Adapter.scala @@ -0,0 +1,10 @@ +package com.wallace.demo.app.patterns.adapterdemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:55 + * Description: + */ +trait Adapter extends MediaPlayer { + +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/AdapterDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/AdapterDemo.scala new file mode 100755 index 00000000..b2ceb02c --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/AdapterDemo.scala @@ -0,0 +1,19 @@ +package com.wallace.demo.app.patterns.adapterdemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:50 + * Description: Adapter Pattern(适配器模式) + */ +object AdapterDemo { + def main(args: Array[String]): Unit = { + val defaultMediaPlayer: MediaPlayer = new DefaultMediaPlayer + defaultMediaPlayer.playMp4("test1.mp4") + + val vlcMediaAdapter: MediaPlayer = new VLCMediaAdapter + vlcMediaAdapter.playMp4("test2.vlc") + + val mkvMediaAdapter: MediaPlayer = new MKVMediaAdapter + mkvMediaAdapter.playMp4("test3.mkv") + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/DefaultMediaPlayer.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/DefaultMediaPlayer.scala new file mode 100755 index 00000000..e2a72ff1 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/DefaultMediaPlayer.scala @@ -0,0 +1,12 @@ +package com.wallace.demo.app.patterns.adapterdemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:36 + * Description: + */ +class DefaultMediaPlayer() extends MediaPlayer { + override def playMp4(fileName: String): Unit = { + logger.info("Media Type -> MP4, Media File -> %s".format(fileName)) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/MKVMediaAdapter.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/MKVMediaAdapter.scala new file mode 100755 index 00000000..3926cca7 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/MKVMediaAdapter.scala @@ -0,0 +1,14 @@ +package com.wallace.demo.app.patterns.adapterdemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:58 + * Description: + */ +class MKVMediaAdapter extends Adapter { + private val mkvMediaPlayer = new MKVMediaPlayer + + override def playMp4(fileName: String): Unit = { + mkvMediaPlayer.playMKV(fileName) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/MKVMediaPlayer.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/MKVMediaPlayer.scala new file mode 100755 index 00000000..0fbe268d --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/MKVMediaPlayer.scala @@ -0,0 +1,14 @@ +package com.wallace.demo.app.patterns.adapterdemo + +import com.typesafe.scalalogging.LazyLogging + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:59 + * Description: + */ +class MKVMediaPlayer extends LazyLogging { + def playMKV(fileName: String): Unit = { + logger.info("Media Type -> MKV, Media File -> %s".format(fileName)) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/MediaPlayer.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/MediaPlayer.scala new file mode 100755 index 00000000..7e1776b7 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/MediaPlayer.scala @@ -0,0 +1,12 @@ +package com.wallace.demo.app.patterns.adapterdemo + +import com.typesafe.scalalogging.LazyLogging + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:34 + * Description: + */ +trait MediaPlayer extends LazyLogging { + def playMp4(fileName: String): Unit +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/VLCMediaAdapter.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/VLCMediaAdapter.scala new file mode 100755 index 00000000..14066081 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/VLCMediaAdapter.scala @@ -0,0 +1,14 @@ +package com.wallace.demo.app.patterns.adapterdemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:35 + * Description: + */ +class VLCMediaAdapter extends Adapter { + private val vlcMediaPlayer = new VLCMediaPlayer + + override def playMp4(fileName: String): Unit = { + this.vlcMediaPlayer.playVLC(fileName) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/VLCMediaPlayer.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/VLCMediaPlayer.scala new file mode 100755 index 00000000..71cd60b2 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/adapterdemo/VLCMediaPlayer.scala @@ -0,0 +1,14 @@ +package com.wallace.demo.app.patterns.adapterdemo + +import com.typesafe.scalalogging.LazyLogging + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:47 + * Description: + */ +class VLCMediaPlayer extends LazyLogging { + def playVLC(fileName: String): Unit = { + logger.info("Media Type -> VLC, Media File -> %s".format(fileName)) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/builderdemo/BuildObjDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/builderdemo/BuildObjDemo.scala old mode 100644 new mode 100755 similarity index 93% rename from demo/ScalaDemo/src/main/scala/com/wallace/demo/app/builderdemo/BuildObjDemo.scala rename to demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/builderdemo/BuildObjDemo.scala index 4d155cf5..4bfc5bbd --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/builderdemo/BuildObjDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/builderdemo/BuildObjDemo.scala @@ -1,4 +1,4 @@ -package com.wallace.demo.app.builderdemo +package com.wallace.demo.app.patterns.builderdemo import java.util.concurrent.ConcurrentHashMap import java.util.concurrent.atomic.{AtomicBoolean, AtomicReference} @@ -10,8 +10,10 @@ import scala.collection.JavaConverters._ import scala.collection.mutable /** - * Created by 10192057 on 2018/6/4 0004. - */ + * Author: biyu.huang + * Date: 2018/6/4 00:04 + * Description: Builder Pattern(构建者模式) + */ object BuildObjDemo { def builder(): Builder = new Builder @@ -93,7 +95,7 @@ class ObjConf(loadDefaults: Boolean) extends Cloneable with LogSupport with Seri throw new NullPointerException("null value for " + key) } if (!silent) { - log.warn(s"The configuration key '$key' has been deprecated.") + logger.warn(s"The configuration key '$key' has been deprecated.") } settings.put(key, value) this diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/CloseCommand.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/CloseCommand.scala new file mode 100755 index 00000000..327fb4b1 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/CloseCommand.scala @@ -0,0 +1,12 @@ +package com.wallace.demo.app.patterns.commanddemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 16:54 + * Description: + */ +class CloseCommand(game: Game) extends Command { + override def execute(): Unit = { + game.close() + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/Command.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/Command.scala new file mode 100755 index 00000000..134a3e4f --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/Command.scala @@ -0,0 +1,10 @@ +package com.wallace.demo.app.patterns.commanddemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 16:52 + * Description: + */ +trait Command { + def execute(): Unit +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/CommandDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/CommandDemo.scala new file mode 100755 index 00000000..93881db6 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/CommandDemo.scala @@ -0,0 +1,19 @@ +package com.wallace.demo.app.patterns.commanddemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 16:26 + * Description: Command Pattern + */ +object CommandDemo { + def main(args: Array[String]): Unit = { + val game = new Game("dummy game") + val publishCommand: PublishCommand = new PublishCommand(game) + val closeCommand: CloseCommand = new CloseCommand(game) + + val manager = new Manager() + manager.addCommand(publishCommand) + manager.addCommand(closeCommand) + manager.executeCommand() + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/Game.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/Game.scala new file mode 100755 index 00000000..fb756883 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/Game.scala @@ -0,0 +1,18 @@ +package com.wallace.demo.app.patterns.commanddemo + +import com.typesafe.scalalogging.LazyLogging + +/** + * Author: biyu.huang + * Date: 2024/5/13 16:53 + * Description: + */ +class Game(name: String) extends LazyLogging { + def close(): Unit = { + logger.info("[ts=%s] %s has been closed.".format(System.currentTimeMillis(), name)) + } + + def publish(): Unit = { + logger.info("[ts=%s] %s has been published.".format(System.currentTimeMillis(), name)) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/Manager.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/Manager.scala new file mode 100755 index 00000000..3e534fd6 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/Manager.scala @@ -0,0 +1,22 @@ +package com.wallace.demo.app.patterns.commanddemo + +import scala.collection.mutable.ArrayBuffer + +/** + * Author: biyu.huang + * Date: 2024/5/13 16:57 + * Description: + */ +class Manager { + private val commands: ArrayBuffer[Command] = new ArrayBuffer[Command]() + + def addCommand(command: Command): Unit = this.commands.append(command) + + def executeCommand(): Unit = { + commands.foreach { + cmd => + cmd.execute() + } + commands.clear() + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/PublishCommand.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/PublishCommand.scala new file mode 100755 index 00000000..afdbd0fb --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/commanddemo/PublishCommand.scala @@ -0,0 +1,12 @@ +package com.wallace.demo.app.patterns.commanddemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 16:52 + * Description: + */ +class PublishCommand(game: Game) extends Command { + override def execute(): Unit = { + this.game.publish() + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/Coffee.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/Coffee.scala new file mode 100755 index 00000000..c67581db --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/Coffee.scala @@ -0,0 +1,13 @@ +package com.wallace.demo.app.patterns.decoratordemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 11:57 + * Description: + */ +trait Coffee { + def cost(): Double + + def getDescription: String + +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/CoffeeDecorator.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/CoffeeDecorator.scala new file mode 100755 index 00000000..fdcc42b3 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/CoffeeDecorator.scala @@ -0,0 +1,10 @@ +package com.wallace.demo.app.patterns.decoratordemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 12:00 + * Description: + */ +abstract class CoffeeDecorator(coffee: Coffee) extends Coffee { + +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/DecoratorDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/DecoratorDemo.scala new file mode 100755 index 00000000..5f2c8163 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/DecoratorDemo.scala @@ -0,0 +1,26 @@ +package com.wallace.demo.app.patterns.decoratordemo + +import com.typesafe.scalalogging.LazyLogging + +/** + * Author: biyu.huang + * Date: 2024/5/13 12:09 + * Description: Decorator Pattern(装饰者模式) + */ +object DecoratorDemo extends LazyLogging { + private val logFormat: String = "Cost -> %s, Description -> %s" + private val coffeePrice: Double = 10.1 + private val milkPrice: Double = 2.2 + private val sugarPrice: Double = 0.6 + + def main(args: Array[String]): Unit = { + val coffee: RawCoffee = new RawCoffee(coffeePrice) + val milkCoffee: MilkDecorator = new MilkDecorator(coffee, milkPrice) + val sweetCoffee: SugarDecorator = new SugarDecorator(coffee, sugarPrice) + val sweetMilkCoffee: SugarDecorator = new SugarDecorator(milkCoffee, sugarPrice) + logger.info(logFormat.format(coffee.cost(), coffee.getDescription)) + logger.info(logFormat.format(milkCoffee.cost(), milkCoffee.getDescription)) + logger.info(logFormat.format(sweetCoffee.cost(), sweetCoffee.getDescription)) + logger.info(logFormat.format(sweetMilkCoffee.cost(), sweetMilkCoffee.getDescription)) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/MilkDecorator.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/MilkDecorator.scala new file mode 100755 index 00000000..f0319da6 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/MilkDecorator.scala @@ -0,0 +1,14 @@ +package com.wallace.demo.app.patterns.decoratordemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 12:05 + * Description: + */ +class MilkDecorator(coffee: Coffee, price: Double = 2.0) extends CoffeeDecorator(coffee) { + override def cost(): Double = coffee.cost() + price + + override def getDescription: String = { + Array(coffee.getDescription, "Milk").mkString(" + ") + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/RawCoffee.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/RawCoffee.scala new file mode 100755 index 00000000..60d9c9d2 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/RawCoffee.scala @@ -0,0 +1,12 @@ +package com.wallace.demo.app.patterns.decoratordemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 11:58 + * Description: + */ +class RawCoffee(price: Double = 10.0) extends Coffee { + override def cost(): Double = price + + override def getDescription: String = "Raw Coffee" +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/SugarDecorator.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/SugarDecorator.scala new file mode 100755 index 00000000..eba447ee --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/decoratordemo/SugarDecorator.scala @@ -0,0 +1,14 @@ +package com.wallace.demo.app.patterns.decoratordemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 12:07 + * Description: + */ +class SugarDecorator(coffee: Coffee, price: Double = 0.5) extends CoffeeDecorator(coffee) { + override def cost(): Double = coffee.cost() + price + + override def getDescription: String = { + Array(coffee.getDescription, "Sugar").mkString(" + ") + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/FactoryDemoRunTest.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/FactoryDemoRunTest.scala old mode 100644 new mode 100755 similarity index 54% rename from demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/FactoryDemoRunTest.scala rename to demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/FactoryDemoRunTest.scala index dcfe2aa7..a1f09369 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/FactoryDemoRunTest.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/FactoryDemoRunTest.scala @@ -1,15 +1,20 @@ -package com.wallace.demo.app.factorypatterndemo +package com.wallace.demo.app.patterns.factorypatterndemo import com.wallace.demo.app.common.LogSupport /** * Created by Wallace on 2017/4/15. */ +/** + * Author: biyu.huang + * Date: 2017/4/15 16:54 + * Description: Factory Pattern(工厂模式) + */ object FactoryDemoRunTest extends LogSupport { def main(args: Array[String]): Unit = { val a = 1.0 val b = 2.0 val operate = scala.io.StdIn.readLine("Please enter operation: ") - log.info(s"${OperationFactory.createOperate(operate, a, b).getOrElse(0)}") + logger.info(s"${OperationFactory.createOperate(operate, a, b).getOrElse(0)}") } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/Operation.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/Operation.scala old mode 100644 new mode 100755 similarity index 74% rename from demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/Operation.scala rename to demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/Operation.scala index c021471d..5a502712 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/Operation.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/Operation.scala @@ -1,4 +1,4 @@ -package com.wallace.demo.app.factorypatterndemo +package com.wallace.demo.app.patterns.factorypatterndemo /** * Created by Wallace on 2017/4/15. diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationAdd.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationAdd.scala old mode 100644 new mode 100755 similarity index 88% rename from demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationAdd.scala rename to demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationAdd.scala index a17097b6..e6e14b3c --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationAdd.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationAdd.scala @@ -1,4 +1,4 @@ -package com.wallace.demo.app.factorypatterndemo +package com.wallace.demo.app.patterns.factorypatterndemo import scala.util.Try diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationDiv.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationDiv.scala old mode 100644 new mode 100755 similarity index 84% rename from demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationDiv.scala rename to demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationDiv.scala index aafaed45..886f9b6b --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationDiv.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationDiv.scala @@ -1,4 +1,4 @@ -package com.wallace.demo.app.factorypatterndemo +package com.wallace.demo.app.patterns.factorypatterndemo import com.wallace.demo.app.common.LogSupport @@ -13,7 +13,7 @@ class OperationDiv(numA: Double, numB: Double) extends Operation[Double] with Lo override def calcResult: Option[Double] = { if (numberA == 0 && numberB == 0) { - log.info("至少有一个数不为0.") + logger.info("至少有一个数不为0.") } if (numberB != 0) Try(numberA / numberB).toOption else Try(numberB / numberA).toOption diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationFactory.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationFactory.scala new file mode 100755 index 00000000..6bde34ba --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationFactory.scala @@ -0,0 +1,21 @@ +package com.wallace.demo.app.patterns.factorypatterndemo + +/** + * Created by Wallace on 2017/4/15. + */ +object OperationFactory { + def createOperate(opSymbol: String, num_A: Double, num_B: Double): Option[Double] = { + opSymbol match { + case "+" => + OperationAdd(num_A, num_B).calcResult + case "-" => + OperationSub(num_A, num_B).calcResult + case "*" => + OperationMul(num_A, num_B).calcResult + case "/" => + OperationDiv(num_A, num_B).calcResult + case _ => + None + } + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationMul.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationMul.scala old mode 100644 new mode 100755 similarity index 88% rename from demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationMul.scala rename to demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationMul.scala index f2b4b4fb..a7851af5 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationMul.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationMul.scala @@ -1,4 +1,4 @@ -package com.wallace.demo.app.factorypatterndemo +package com.wallace.demo.app.patterns.factorypatterndemo import scala.util.Try diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationSub.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationSub.scala old mode 100644 new mode 100755 similarity index 89% rename from demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationSub.scala rename to demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationSub.scala index 6252fc17..fabc6e2d --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/factorypatterndemo/OperationSub.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/factorypatterndemo/OperationSub.scala @@ -1,4 +1,4 @@ -package com.wallace.demo.app.factorypatterndemo +package com.wallace.demo.app.patterns.factorypatterndemo /** * Created by Wallace on 2017/4/15. diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/Container.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/Container.scala new file mode 100755 index 00000000..d5cf4a9b --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/Container.scala @@ -0,0 +1,10 @@ +package com.wallace.demo.app.patterns.iteratordemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 18:55 + * Description: + */ +trait Container[T] { + def createIterator(): Iterator[T] +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/Iterator.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/Iterator.scala new file mode 100755 index 00000000..3be05621 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/Iterator.scala @@ -0,0 +1,12 @@ +package com.wallace.demo.app.patterns.iteratordemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 18:54 + * Description: + */ +trait Iterator[T] { + def hasNext: Boolean + + def next(): T +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/IteratorDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/IteratorDemo.scala new file mode 100755 index 00000000..de4efdc2 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/IteratorDemo.scala @@ -0,0 +1,19 @@ +package com.wallace.demo.app.patterns.iteratordemo + +import com.typesafe.scalalogging.LazyLogging + +/** + * Author: biyu.huang + * Date: 2024/5/13 18:54 + * Description: + */ +object IteratorDemo extends LazyLogging { + def main(args: Array[String]): Unit = { + val elements = List("aaaa", "bbbb", "cccc", "dddd", "eeee") + val nameRepository = new NameRepository(elements) + val iter = nameRepository.createIterator() + while (iter.hasNext) { + logger.info("current element -> %s".format(iter.next())) + } + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/NameRepository.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/NameRepository.scala new file mode 100755 index 00000000..ecf6f7b3 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/iteratordemo/NameRepository.scala @@ -0,0 +1,28 @@ +package com.wallace.demo.app.patterns.iteratordemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 18:56 + * Description: + */ +class NameRepository(elements: List[String]) extends Container[String] { + override def createIterator(): Iterator[String] = new NameIterator + + private class NameIterator extends Iterator[String] { + private var index: Int = 0 + + override def hasNext: Boolean = { + if (index < elements.length) true else false + } + + override def next(): String = { + if (hasNext) { + val value: String = elements(index) + index += 1 + value + } else { + null + } + } + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/Boot.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/Boot.scala new file mode 100755 index 00000000..51a5811c --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/Boot.scala @@ -0,0 +1,27 @@ +package com.wallace.demo.app.patterns.observedemo + +import com.wallace.demo.app.common.LogSupport + +/** + * Author: biyu.huang + * Date: 2023/1/30 16:54 + * Description: + */ +object Boot extends LogSupport { + def callback(index: Int): IntHolder => Unit = (x: IntHolder) => { + logger.info(s"[observing $index] current value: ${x.get}") + logger.info(x.toString) + } + + def main(args: Array[String]): Unit = { + val intValue: IntHolder = new IntHolder + val callback1: IntHolder => Unit = callback(1000) + val callback2: IntHolder => Unit = callback(2000) + intValue.observe(callback1) + intValue.observe(callback2) + intValue.hold(10) + + intValue.unobserve(callback2) + intValue.hold(12) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/DefaultHandlers.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/DefaultHandlers.scala new file mode 100755 index 00000000..d27b66ff --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/DefaultHandlers.scala @@ -0,0 +1,12 @@ +package com.wallace.demo.app.patterns.observedemo + +/** + * Author: biyu.huang + * Date: 2023/1/30 16:52 + * Description: + */ +trait DefaultHandlers extends Observable { + override type Handler = this.type => Unit + + override def createHandle(callback: this.type => Unit): Handler = callback +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/IntHolder.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/IntHolder.scala new file mode 100755 index 00000000..16cd9cbe --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/IntHolder.scala @@ -0,0 +1,19 @@ +package com.wallace.demo.app.patterns.observedemo + +/** + * Author: biyu.huang + * Date: 2023/1/30 16:53 + * Description: + */ +class IntHolder extends Observable with DefaultHandlers { + private var value: Int = Int.MinValue + + def get: Int = value + + def hold(newValue: Int): Unit = { + value = newValue + notifyListeners() + } + + override def toString: String = "IntStore(" + value + ")" +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/Observable.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/Observable.scala new file mode 100755 index 00000000..6dad4f20 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/Observable.scala @@ -0,0 +1,29 @@ +package com.wallace.demo.app.patterns.observedemo + +import scala.collection.mutable + +/** + * Author: biyu.huang + * Date: 2023/1/30 16:51 + * Description: Observer Pattern(观察者模式) + */ +trait Observable { + type Handler + val callbacks = mutable.Map.empty[Handler, this.type => Unit] + + def observe(callback: this.type => Unit): Handler = { + val handler: Handler = createHandle(callback) + callbacks += (handler -> callback) + handler + } + + def unobserve(handle: Handler): Unit = { + callbacks -= handle + } + + def createHandle(callback: this.type => Unit): Handler + + def notifyListeners(): Unit = { + for (callback <- callbacks.values) callback(this) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/NewsPublisher.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/NewsPublisher.scala new file mode 100755 index 00000000..0a38f369 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/NewsPublisher.scala @@ -0,0 +1,44 @@ +package com.wallace.demo.app.patterns.observedemo.newsdemo + +import java.util +import scala.jdk.CollectionConverters.asScalaBufferConverter + +/** + * Author: biyu.huang + * Date: 2024/4/8 17:23 + * Description: + */ +class NewsPublisher extends Subject { + private final val observers: util.ArrayList[Observer] = new util.ArrayList[Observer]() + + override def addObserver(observer: Observer): Unit = { + observers.add(observer) + } + + override def removeObserver(observer: Observer): Unit = { + observers.remove(observer) + } + + override def notifyObservers(news: String): Unit = { + if (observers.isEmpty) { + logger.warn("no observers ...") + } else { + observers.asScala.foreach { + o => + o.notify(news) + } + } + } + + def addObservers(observers: Observer*): Unit = { + observers.foreach { + o => + addObserver(o) + } + } + + def publishNews(news: String): Unit = { + logger.info("publish news -> %s".format(news)) + notifyObservers(news) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/NewsSubscriber.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/NewsSubscriber.scala new file mode 100755 index 00000000..2e35bc4c --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/NewsSubscriber.scala @@ -0,0 +1,19 @@ +package com.wallace.demo.app.patterns.observedemo.newsdemo + +import java.util.concurrent.LinkedBlockingQueue +import scala.reflect.runtime.universe._ + + +/** + * Author: biyu.huang + * Date: 2024/4/8 17:30 + * Description: + */ +class NewsSubscriber(name: String) extends Observer { + private final val newsQueue: LinkedBlockingQueue[String] = new LinkedBlockingQueue[String]() + + override def notify(news: String): Unit = { + logger.info("%s received news -> %s".format(name, news)) + newsQueue.put(news) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/Observer.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/Observer.scala new file mode 100755 index 00000000..6f64e0bd --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/Observer.scala @@ -0,0 +1,12 @@ +package com.wallace.demo.app.patterns.observedemo.newsdemo + +import com.typesafe.scalalogging.LazyLogging + +/** + * Author: biyu.huang + * Date: 2024/4/8 17:22 + * Description: trait of observer + */ +trait Observer extends LazyLogging { + def notify(news: String): Unit +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/Subject.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/Subject.scala new file mode 100755 index 00000000..86e4177e --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/Subject.scala @@ -0,0 +1,18 @@ +package com.wallace.demo.app.patterns.observedemo.newsdemo + +import com.typesafe.scalalogging.LazyLogging + +import scala.reflect.runtime.universe._ + +/** + * Author: biyu.huang + * Date: 2024/4/8 17:21 + * Description: The subject which was observed + */ +trait Subject extends LazyLogging { + def addObserver(observer: Observer): Unit + + def removeObserver(observer: Observer): Unit + + def notifyObservers(news: String): Unit +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashNormal.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashNormal.scala old mode 100644 new mode 100755 similarity index 57% rename from demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashNormal.scala rename to demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashNormal.scala index c1658c9b..92886f93 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashNormal.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashNormal.scala @@ -1,4 +1,4 @@ -package com.wallace.demo.app.strategypatterndemo +package com.wallace.demo.app.patterns.strategypatterndemo /** * Created by Wallace on 2017/4/16. @@ -6,5 +6,5 @@ package com.wallace.demo.app.strategypatterndemo class CashNormal extends CashSuper { override def acceptCash(money: Double): Double = money - override def algTest: String = ??? + override def modeName: String = "Cash Normal" } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashRebate.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashRebate.scala new file mode 100755 index 00000000..80a80570 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashRebate.scala @@ -0,0 +1,17 @@ +package com.wallace.demo.app.patterns.strategypatterndemo + +import scala.util.Try + +/** + * Created by Wallace on 2017/4/16. + */ +class CashRebate(rebate: String) extends CashSuper { + private val DEFAULT_REBATE = 1.0 + private val moneyRebate: Double = Try(rebate.toDouble).getOrElse(DEFAULT_REBATE) + + override def acceptCash(money: Double): Double = { + moneyRebate * money + } + + override def modeName: String = "Cash Rebate" +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashReturn.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashReturn.scala new file mode 100755 index 00000000..6f7cd1bf --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashReturn.scala @@ -0,0 +1,22 @@ +package com.wallace.demo.app.patterns.strategypatterndemo + +import scala.util.Try + +/** + * Created by Wallace on 2017/4/16. + */ +class CashReturn(condition: String, cashBack: String) extends CashSuper { + private val cashCondition: Double = Try(condition.toDouble).getOrElse(0.0) + private val cashReturn: Double = Try(cashBack.toDouble).getOrElse(0.0) + + override def acceptCash(money: Double): Double = { + money match { + case v if v >= cashCondition && cashReturn > 0 => + money - Math.floor(money / cashCondition) * cashReturn + case _ => + money + } + } + + override def modeName: String = "Cash Back" +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashSuper.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashSuper.scala old mode 100644 new mode 100755 similarity index 59% rename from demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashSuper.scala rename to demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashSuper.scala index fca42d14..0a6884cf --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashSuper.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/CashSuper.scala @@ -1,4 +1,4 @@ -package com.wallace.demo.app.strategypatterndemo +package com.wallace.demo.app.patterns.strategypatterndemo /** * Created by Wallace on 2017/4/16. @@ -6,5 +6,5 @@ package com.wallace.demo.app.strategypatterndemo abstract class CashSuper { def acceptCash(money: Double): Double - def algTest: String + def modeName: String } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/StrategyContext.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/StrategyContext.scala new file mode 100755 index 00000000..f9281a76 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/StrategyContext.scala @@ -0,0 +1,24 @@ +package com.wallace.demo.app.patterns.strategypatterndemo + +/** + * Created by Wallace on 2017/4/16. + */ +class StrategyContext(cs: CashSuper) extends CashSuper { + private val strategy: CashSuper = cs + + // def getAcceptCashResult(money: Double): Double = { + // strategy.acceptCash(money) + // } + // + // def getModeName: String = { + // strategy.modeName + // } + + override def acceptCash(money: Double): Double = { + strategy.acceptCash(money) + } + + override def modeName: String = { + strategy.modeName + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/StrategyDemoRunTest.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/StrategyDemoRunTest.scala old mode 100644 new mode 100755 similarity index 50% rename from demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/StrategyDemoRunTest.scala rename to demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/StrategyDemoRunTest.scala index 990aa028..843a4dd3 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/StrategyDemoRunTest.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/strategypatterndemo/StrategyDemoRunTest.scala @@ -1,14 +1,16 @@ -package com.wallace.demo.app.strategypatterndemo +package com.wallace.demo.app.patterns.strategypatterndemo import com.wallace.demo.app.common.LogSupport /** - * Created by Wallace on 2017/4/16. - */ + * Author: biyu.huang + * Date: 2017/4/16 12:09 + * Description: Strategy Pattern(策略模式) + */ object StrategyDemoRunTest extends LogSupport { def main(args: Array[String]): Unit = { - val m_Type: String = scala.io.StdIn.readLine("Please input an mode: ") - val cashStrategy: StrategyContext = m_Type match { + val mode: String = scala.io.StdIn.readLine("Please select a mode: ") + val cashStrategy: StrategyContext = mode match { case "Normal" => new StrategyContext(new CashNormal) case "RebateWithReturn" => @@ -16,8 +18,8 @@ object StrategyDemoRunTest extends LogSupport { case "RebateWithOutReturn" => new StrategyContext(new CashRebate("0.8")) } - val money: Double = scala.io.StdIn.readLine("Please input total money: ").toDouble - val res: Double = cashStrategy.getAcceptCashResult(money) - log.info(s"Total money: $res RMB") + val money: Double = scala.io.StdIn.readLine("Please input total amount: ").toDouble + val res: Double = cashStrategy.acceptCash(money) + logger.info("Current mode -> %s, Total amount -> %s RMB".format(cashStrategy.modeName, res)) } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/Beverage.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/Beverage.scala new file mode 100755 index 00000000..6351c0bd --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/Beverage.scala @@ -0,0 +1,30 @@ +package com.wallace.demo.app.patterns.templatemethoddemo + +import com.typesafe.scalalogging.LazyLogging + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:04 + * Description: + */ +trait Beverage extends LazyLogging { + def prepareBeverage(): Unit = { + boilWater() + brew() + pourToCup() + addCondiments() + logger.info("5 -> here you are!") + } + + private def boilWater(): Unit = { + logger.info("1 -> 烧水") + } + + def brew(): Unit + + def pourToCup(): Unit = { + logger.info("3 -> 倒入杯中") + } + + def addCondiments(): Unit +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/Coffee.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/Coffee.scala new file mode 100755 index 00000000..c9dd6fe5 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/Coffee.scala @@ -0,0 +1,16 @@ +package com.wallace.demo.app.patterns.templatemethoddemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:08 + * Description: + */ +class Coffee extends Beverage { + override def brew(): Unit = { + logger.info("2 -> 冲泡咖啡") + } + + override def addCondiments(): Unit = { + logger.info("4 -> 加糖和牛奶") + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/Tea.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/Tea.scala new file mode 100755 index 00000000..926996cd --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/Tea.scala @@ -0,0 +1,16 @@ +package com.wallace.demo.app.patterns.templatemethoddemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:12 + * Description: + */ +class Tea extends Beverage { + override def brew(): Unit = { + logger.info("2 -> 冲泡茶叶") + } + + override def addCondiments(): Unit = { + logger.info("4 -> 加柠檬") + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/TemplateMethodDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/TemplateMethodDemo.scala new file mode 100755 index 00000000..b4f427f6 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/patterns/templatemethoddemo/TemplateMethodDemo.scala @@ -0,0 +1,16 @@ +package com.wallace.demo.app.patterns.templatemethoddemo + +/** + * Author: biyu.huang + * Date: 2024/5/13 15:13 + * Description: Template Method Pattern(模板方法模式) + */ +object TemplateMethodDemo { + def main(args: Array[String]): Unit = { + val coffee = new Coffee() + coffee.prepareBeverage() + + val tea =new Tea() + tea.prepareBeverage() + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/BasicDoubleQueue.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/BasicDoubleQueue.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/BasicIntQueue.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/BasicIntQueue.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/DemoQueue.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/DemoQueue.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/DemoUnitSpec.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/DemoUnitSpec.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/DoubleDoubling.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/DoubleDoubling.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/IntDoubling.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/IntDoubling.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/Queue.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/queuedemo/Queue.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/redisclient/RedisClientDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/redisclient/RedisClientDemo.scala old mode 100644 new mode 100755 index 437048a8..579fd35d --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/redisclient/RedisClientDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/redisclient/RedisClientDemo.scala @@ -20,7 +20,7 @@ object RedisClientDemo extends Using { def main(args: Array[String]): Unit = { using(new Jedis("localhost", 6379)) { redisCli => - log.info(redisCli.ping()) + logger.info(redisCli.ping()) redisCli.lpush("site-list", "Runoob") redisCli.lpush("site-list", "Google") redisCli.lpush("site-list", "Taobao") diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/roundrobinloop/RoundRobinService.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/roundrobinloop/RoundRobinService.scala new file mode 100755 index 00000000..3a874552 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/roundrobinloop/RoundRobinService.scala @@ -0,0 +1,44 @@ +package com.wallace.demo.app.roundrobinloop + +import java.util.concurrent.{Executors, TimeUnit} +import com.wallace.demo.app.common.LogSupport + +/** + * Author: biyu.huang + * Date: 2023/4/23 10:53 + * Description: + */ +object RoundRobinService extends LogSupport { + val periodSeconds: Int = 60 + val queryCount: Int = 30 + val resources: List[String] = List("Resource 1", "Resource 2", "Resource 3", "Resource 4") + + def main(args: Array[String]): Unit = { + val periodMillis: Int = periodSeconds * 1000 + val queryIntervalMillis: Int = periodMillis / queryCount + var resourceIndex: Int = 0 + var iteration: Int = 0 + + val executorService = Executors.newSingleThreadScheduledExecutor() + executorService.scheduleWithFixedDelay(() => { + val resource = resources(resourceIndex) + logger.info(s"[Iteration: $iteration] Executing query on $resource") + // Execute query on the selected resource + resourceIndex = (resourceIndex + 1) % resources.size + iteration += 1 + + if (iteration == (queryCount / 2)) { + logger.info("shut down service") + executorService.shutdown() + } + }, 10 * 1000L, queryIntervalMillis, TimeUnit.MILLISECONDS) + + try { + logger.info("start round-robin service") + executorService.awaitTermination(periodSeconds, TimeUnit.SECONDS) + logger.info("finished round-robin service") + } catch { + case e: InterruptedException => e.printStackTrace() + } + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/BitMapDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/BitMapDemo.scala old mode 100644 new mode 100755 index eee1b930..afd4d8eb --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/BitMapDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/BitMapDemo.scala @@ -1,25 +1,61 @@ package com.wallace.demo.app.sortdemo -import com.wallace.demo.app.common.LogSupport +class BitMapDemo(bitSize: Int, granularity: Int) { + def this() = this(8192, 32) -class BitMapDemo(var size: Int = 1000) extends LogSupport { - //长度10000,可以存储10000*32的数字 + val bitMap: BitMapBuilder = new BitMapBuilder() + .setBitSize(bitSize) + .setGranularity(granularity) + .build() - private val dataBitMap: Array[Int] = new Array[Int](size) + class BitMapBuilder { + //i >> 5相当于i/32, i&0X1F相当于i%32 + private var bitSize: Int = 0 + private var granularity: Int = 32 + private var bitArray: Array[Int] = Array.empty + private var shift: Int = 0 + private var digit: Int = 0 - //接下来,定义设置位的方法 - //i >> 5相当于i/32, i&0X1F相当于i%32 - def setBit(i: Int): Unit = { - dataBitMap(i >> 5) |= (1 << (i & 0X1F)) - } + def setBitSize(s: Int): BitMapBuilder = { + require(s >= 0 & s <= Int.MaxValue, + s"bitSize must be within the range:[1, Int.MaxValue], current value: $s") + this.bitSize = s + this + } - //定义判断数字是否存在的方法 - def exists(i: Int): Boolean = { - (dataBitMap(i >> 5) & (1 << (i & 0X1F))) != 0 - } + def setGranularity(g: Int): BitMapBuilder = { + require(g > 0 & g <= 32, s"granularity must be within the range:(0, 32], current value: $g") + this.granularity = g + this + } + + def build(): BitMapBuilder = { + this.bitArray = new Array[Int]((bitSize + (granularity - 1)) / granularity) + this.shift = granularity.toBinaryString.length - 1 + this.digit = (Math.pow(2, this.shift) - 1).toInt + this + } + + def setBit(i: Int): Unit = { + this.bitArray(i >> this.shift) |= (1 << (i & this.digit)) + } + + //定义判断数字是否存在的方法 + def exists(i: Int): Boolean = { + (this.bitArray(i >> this.shift) & (1 << (i & this.digit))) != 0 + } + + // 最后,定义重置方法 + def reset(i: Int): Unit = { + this.bitArray(i >> this.shift) &= (~(1 << (i & this.digit))) + } + + def bitCount(): Long = { + this.bitArray.map(x => java.lang.Integer.bitCount(x)).sum + } - // 最后,定义重置方法 - def reset(i: Int): Unit = { - dataBitMap(i >> 5) &= (~(1 << (i & 0X1F))) + override def toString: String = { + "BitMap(" + this.bitArray.mkString(",") + ")" + } } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/HeapSortDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/HeapSortDemo.scala old mode 100644 new mode 100755 index 907230f0..8dcc868a --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/HeapSortDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/HeapSortDemo.scala @@ -118,14 +118,14 @@ object HeapSortDemo extends LogSupport { (0 to (data.length - 2) / 2).reverse.foreach { i => - log.info(s"index1: $i") + logger.info(s"index1: $i") downAndAdjust(comparator)(data, i, data.length) } data.foreach(println) (1 until data.length).reverse.foreach { i => - log.info(s"index2: $i") + logger.info(s"index2: $i") val temp = data(i) data.update(i, data(0)) data.update(0, temp) diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/ParHeapSort.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/ParHeapSort.scala old mode 100644 new mode 100755 index ac71e73b..99b9bfe3 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/ParHeapSort.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/ParHeapSort.scala @@ -1,8 +1,8 @@ package com.wallace.demo.app.sortdemo /** - * Created by 10192057 on 2018/8/20 0020. - */ + * Created by 10192057 on 2018/8/20 0020. + */ class ParHeapSort { val combop: (List[Int], List[Int]) => List[Int] = (ls1: List[Int], ls2: List[Int]) => ls1.:::(ls2) @@ -17,108 +17,108 @@ import com.wallace.demo.app.common.LogSupport import scala.collection.mutable /** - * 实现并行堆排序算法 - * - */ + * 实现并行堆排序算法 + * + */ class HeapSort[A, B, S <: Iterable[A]](f: A => B)(implicit ord: Ordering[B]) extends LogSupport { val combop: (List[Int], List[Int]) => List[Int] = (ls1: List[Int], ls2: List[Int]) => ls1.:::(ls2) val seqop: (List[Int], Int) => List[Int] = (ls: List[Int], value: Int) => ls.::(value) /** - * 对l排序返回排序后的Seq - * - * @param l 待排序集合的迭代器 - * @param desc 降/升序(默认为true,降序) - * @return - */ + * 对l排序返回排序后的Seq + * + * @param l 待排序集合的迭代器 + * @param desc 降/升序(默认为true,降序) + * @return + */ def sort(l: S, desc: Boolean = true): mutable.Seq[A] = HeapSort.sort(f)(l, 0, desc) /** - * 对l排序并返回前top个结果 - * - * @param l 待排序集合的迭代器 - * @param top 返回最多结果数目 - * @param desc 降/升序(默认为true,降序) - * @return - */ + * 对l排序并返回前top个结果 + * + * @param l 待排序集合的迭代器 + * @param top 返回最多结果数目 + * @param desc 降/升序(默认为true,降序) + * @return + */ def top(l: S, top: Int, desc: Boolean = true): mutable.Seq[A] = HeapSort.sort(f)(l, top, desc) /** - * 对可变集合排序,返回排序后的Seq - * - * @param l 待排序可变集合的迭代器 - * @param desc 降/升序(默认为true,降序) - * @return - */ + * 对可变集合排序,返回排序后的Seq + * + * @param l 待排序可变集合的迭代器 + * @param desc 降/升序(默认为true,降序) + * @return + */ def sortM[M <: mutable.Seq[A]](l: M, desc: Boolean = true): mutable.Seq[A] = HeapSort.sortMutable(f)(l, 0, desc) /** - * 对可变集合l排序并返回前top个结果 - * - * @param l 待排序可变集合的迭代器 - * @param top 返回最多结果数目 - * @param desc 降/升序(默认为true,降序) - * @return - */ + * 对可变集合l排序并返回前top个结果 + * + * @param l 待排序可变集合的迭代器 + * @param top 返回最多结果数目 + * @param desc 降/升序(默认为true,降序) + * @return + */ def topM[M <: mutable.Seq[A]](l: M, top: Int, desc: Boolean = true): mutable.Seq[A] = HeapSort.sortMutable(f)(l, top, desc) /** - * 对可变集合l并行排序并返回前top个结果 - * - * @param l 待排序可变集合的迭代器 - * @param top 返回最多结果数目 - * @param desc 降/升序(默认为true,降序) - * @return - */ + * 对可变集合l并行排序并返回前top个结果 + * + * @param l 待排序可变集合的迭代器 + * @param top 返回最多结果数目 + * @param desc 降/升序(默认为true,降序) + * @return + */ def topParM[M <: mutable.Seq[A]](l: M, top: Int, desc: Boolean = true): mutable.Seq[A] = HeapSort.topParMutable(f)(l, top, desc) /** - * 对可变集合l的指定范围排序并返回排序后的Seq - * - * @param seq 待排序可变集合 - * @param top 返回最多结果数目 - * @param desc 降/升序(默认为true,降序) - * @param from 待排序的起始位置 - * @param until 待排序的结束位置 - * @return - */ + * 对可变集合l的指定范围排序并返回排序后的Seq + * + * @param seq 待排序可变集合 + * @param top 返回最多结果数目 + * @param desc 降/升序(默认为true,降序) + * @param from 待排序的起始位置 + * @param until 待排序的结束位置 + * @return + */ def sortRange[M <: mutable.Seq[A]](seq: M, top: Int, desc: Boolean = true)(from: Int = 0, until: Int = seq.length): (Int, Int) = { HeapSort.sortMutableRange(f)(seq, top, desc)(from, until) } /** - * 对seq中两个已经排序的区段进行合并排序,将src合并到dst - * - * @param seq 可变集合 - * @param src 待合并的源区段(起始位置,结束位置) - * @param dst 待合并的目标区段(起始位置,结束位置) - * @param desc 降/升序(默认为true,降序) - * @return - */ + * 对seq中两个已经排序的区段进行合并排序,将src合并到dst + * + * @param seq 可变集合 + * @param src 待合并的源区段(起始位置,结束位置) + * @param dst 待合并的目标区段(起始位置,结束位置) + * @param desc 降/升序(默认为true,降序) + * @return + */ def merge2Seq(seq: mutable.Seq[A], src: (Int, Int), dst: (Int, Int), desc: Boolean = true): (Int, Int) = HeapSort.merge2Seq(f)(seq, src, dst, desc) /** - * 对seq中两个已经排序的区段进行合并排序,将src合并到dst - * - * @param seq 可变集合 - * @param src 待合并的源区段(起始位置,结束位置) - * @param dst 待合并的目标区段(起始位置,结束位置) - * @param desc 降/升序(默认为true,降序) - * @return - */ + * 对seq中两个已经排序的区段进行合并排序,将src合并到dst + * + * @param seq 可变集合 + * @param src 待合并的源区段(起始位置,结束位置) + * @param dst 待合并的目标区段(起始位置,结束位置) + * @param desc 降/升序(默认为true,降序) + * @return + */ def merge2Seq2(seq: mutable.Seq[A], src: (Int, Int), dst: (Int, Int), desc: Boolean = true): (Int, Int) = HeapSort.merge2Seq2(f)(seq, src, dst, desc) /** - * 对seq中两个已经排序的区段进行合并排序,将src合并到dst
- * 该算法在排序过程不申请新内存 - * - * @param seq 可变集合 - * @param src 待合并的源区段(起始位置,结束位置) - * @param dst 待合并的目标区段(起始位置,结束位置) - * @param desc 降/升序(默认为true,降序) - * @return - */ + * 对seq中两个已经排序的区段进行合并排序,将src合并到dst
+ * 该算法在排序过程不申请新内存 + * + * @param seq 可变集合 + * @param src 待合并的源区段(起始位置,结束位置) + * @param dst 待合并的目标区段(起始位置,结束位置) + * @param desc 降/升序(默认为true,降序) + * @return + */ def merge2SeqNM(seq: mutable.Seq[A], src: (Int, Int), dst: (Int, Int), desc: Boolean = true): (Int, Int) = HeapSort.merge2SeqNM(f)(seq, src, dst, desc) } @@ -364,8 +364,8 @@ object HeapSort extends LogSupport { def swapStHead(): Unit = { swapTop(st) - swapst = swapStTail - swapqh = swapQhEnable + swapst = () => swapStTail() + swapqh = () => swapQhEnable() qh = st qbf = st qbt = st @@ -382,8 +382,8 @@ object HeapSort extends LogSupport { qh = nextQh() } - swapst = swapStHead - swapqh = swapQhDisable + swapst = () => swapStHead() + swapqh = () => swapQhDisable() while (idx >= dst._1 && st >= src._1) { if (cmpdst(st)) { swapst() @@ -434,7 +434,7 @@ object HeapSort extends LogSupport { val rnd = new java.util.Random() val l = Array.tabulate[Int](40)((x: Int) => rnd.nextInt(x + 100)) for (i <- 0 to 0) { - log.info("==============time ", i, "=================") + logger.info("==============time ", i, "=================") val s = l.toBuffer[Int] println("=========> s: " + s) val t1: Long = System.currentTimeMillis diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/SortDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/sortdemo/SortDemo.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashRebate.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashRebate.scala deleted file mode 100644 index 5857b86f..00000000 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashRebate.scala +++ /dev/null @@ -1,17 +0,0 @@ -package com.wallace.demo.app.strategypatterndemo - -import scala.util.Try - -/** - * Created by Wallace on 2017/4/16. - */ -class CashRebate(m_Rebate: String) extends CashSuper { - private val DEFAULT_REBATE = 1.0 - private val moneyRebate: Double = Try(m_Rebate.toDouble).getOrElse(DEFAULT_REBATE) - - override def acceptCash(money: Double): Double = { - moneyRebate * money - } - - override def algTest: String = ??? -} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashReturn.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashReturn.scala deleted file mode 100644 index b688bf5a..00000000 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/CashReturn.scala +++ /dev/null @@ -1,22 +0,0 @@ -package com.wallace.demo.app.strategypatterndemo - -import scala.util.Try - -/** - * Created by Wallace on 2017/4/16. - */ -class CashReturn(m_Condition: String, m_Return: String) extends CashSuper { - private val moneyCondition: Double = Try(m_Condition.toDouble).getOrElse(0.0) - private val moneyReturn: Double = Try(m_Return.toDouble).getOrElse(0.0) - - override def acceptCash(money: Double): Double = { - money match { - case v if v >= moneyCondition && moneyReturn > 0 => - money - Math.floor(money / moneyCondition) * moneyReturn - case _ => - money - } - } - - override def algTest: String = ??? -} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/StrategyContext.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/StrategyContext.scala deleted file mode 100644 index 06af4bbe..00000000 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/strategypatterndemo/StrategyContext.scala +++ /dev/null @@ -1,16 +0,0 @@ -package com.wallace.demo.app.strategypatterndemo - -/** - * Created by Wallace on 2017/4/16. - */ -class StrategyContext(cs: CashSuper) { - private val strategy: CashSuper = cs - - def getAcceptCashResult(money: Double): Double = { - strategy.acceptCash(money) - } - - def getAlgTest: String = { - strategy.algTest - } -} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/topKDemo/BigDataTopNDemo.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/topKDemo/BigDataTopNDemo.scala old mode 100644 new mode 100755 index 97cf7212..00649f06 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/topKDemo/BigDataTopNDemo.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/topKDemo/BigDataTopNDemo.scala @@ -57,8 +57,8 @@ object BigDataTopNDemo extends LogSupport { fw.write(record) fw.flush() } match { - case Success(_) => log.debug(s"Succeed to write record for $fileName.") - case Failure(e) => log.error(s"Aborted to split $fileName", e) + case Success(_) => logger.debug(s"Succeed to write record for $fileName.") + case Failure(e) => logger.error(s"Aborted to split $fileName", e) } } splitFiles diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/topKDemo/KClosetPoint.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/topKDemo/KClosetPoint.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/unapply/DemoBoot.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/unapply/DemoBoot.scala new file mode 100644 index 00000000..58749df7 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/unapply/DemoBoot.scala @@ -0,0 +1,18 @@ +package com.wallace.demo.app.unapply + +import com.wallace.demo.app.common.LogSupport + +/** + * Author: biyu.huang + * Date: 2025/2/19 10:54 + * Description: + */ +object DemoBoot extends LogSupport { + def main(args: Array[String]): Unit = { + val demo: DemoExpression = DemoExpression("Wallace", 33) + demo match { + case DemoExpression(name, age) => logger.info(s"Name: $name, Age: $age") + case _ => logger.info("No match.") + } + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/unapply/DemoExpression.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/unapply/DemoExpression.scala new file mode 100644 index 00000000..f0603e64 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/unapply/DemoExpression.scala @@ -0,0 +1,14 @@ +package com.wallace.demo.app.unapply + +/** + * Author: biyu.huang + * Date: 2025/2/19 10:21 + * Description: + */ +case class DemoExpression(name: String, age: Int) + +object DemoExpression { + def unapply(arg: DemoExpression): Option[(String, Int)] = { + Some((arg.name, arg.age)) + } +} \ No newline at end of file diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/AdminUtils.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/AdminUtils.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/ArgsParser.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/ArgsParser.scala new file mode 100755 index 00000000..b02f5291 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/ArgsParser.scala @@ -0,0 +1,91 @@ +package com.wallace.demo.app.utils + +import com.typesafe.scalalogging.LazyLogging +import com.wallace.demo.app.common.Using +import org.apache.commons.cli.{CommandLine, GnuParser, Options} +import org.apache.hadoop.conf.Configuration +import org.apache.hadoop.fs.{FileSystem, Path} + +import java.io.{FileNotFoundException, FileReader} +import java.util.Properties + +/** + * Author: wallace huang + * Date: 2022/11/14 16:39 + * Description: Args Parser + */ +object ArgsParser extends Using with LazyLogging { + def getProperty(props: Properties, key: String): String = { + if (props.getProperty(key) == "") { + null + } else { + props.getProperty(key) + } + } + + def getProperty(props: Properties, key: String, splitter: String): Array[String] = { + val re: String = props.getProperty(key) + if (re == null || re == "") { + null + } else { + re.split(splitter) + } + } + + def checkHDFSPath(path: String, conf: Configuration): Boolean = { + using(FileSystem.get(conf)) { + fs => + if (fs.exists(new Path(path))) { + true + } else { + logger.info(s"$path not existed") + false + } + } + } + + def loadProps(args: Array[String], + defaultProps: String = "default.properties"): Properties = { + val cmd: CommandLine = parseArguments(args) + val props: Properties = loadJobProps(cmd.getOptionValue("f"), defaultProps) + val conf: Array[String] = cmd.getOptionValues("conf") + if (null != conf && conf.nonEmpty) { + conf.map { + confItem => + val (k, v) = parseCommandConf(confItem) + props.put(k, v) + } + } + props + } + + private def parseArguments(args: Array[String]): CommandLine = { + val parser: GnuParser = new GnuParser + val options: Options = new Options() + options.addOption("f", true, "properties file of job") + options.addOption("conf", true, "param setting of the job") + parser.parse(options, args) + } + + @throws[FileNotFoundException] + private def loadJobProps(path: String, defaultProps: String): Properties = { + val jobProps: Properties = new Properties() + if (null != path && path.nonEmpty) { + jobProps.load(new FileReader(path)) + } else { + if (defaultProps != null && defaultProps.nonEmpty) { + jobProps.load(this.getClass.getClassLoader.getResourceAsStream(defaultProps)) + } + } + jobProps + } + + private def parseCommandConf(confStr: String): (String, String) = { + val (k, v) = confStr.split("=", 2).toSeq match { + case Seq(k, v) => (k, v) + case _ => + throw new IllegalArgumentException(s"can't parse CMD conf $confStr") + } + (k, v) + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/CombineMultiVersion.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/CombineMultiVersion.scala old mode 100644 new mode 100755 index e9dd29ab..3a6f6335 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/CombineMultiVersion.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/CombineMultiVersion.scala @@ -11,13 +11,13 @@ class CombineMultiVersion extends LogSupport { val temp: Array[(String, Int)] = originSQLText.split(",", -1).map(_.trim.toLowerCase).zipWithIndex for (colName <- toBeRepairedCol) { val index = colIndex(temp, colName) - log.debug(s"[RepairedColIndex]: $index") + logger.debug(s"[RepairedColIndex]: $index") if (index > 0) { val repairedValue = repairColValue(colName) - log.debug(s"[RepairedValue]: $repairedValue") + logger.debug(s"[RepairedValue]: $repairedValue") temp.update(index, (repairedValue, index)) } else { - log.error( + logger.error( s""" |######################################################################### |[CombineMultiVersion] => Index of #$colName#: $index, it was Out Of Range. diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/ConcatStringUtils.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/ConcatStringUtils.scala old mode 100644 new mode 100755 index 25ac19e2..ba0e0e87 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/ConcatStringUtils.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/ConcatStringUtils.scala @@ -26,14 +26,14 @@ object ConcatStringUtils extends LogSupport { if (index % nCellCols == x) { nCellTempResData.append(nCellData(index)) } else { - log.debug("[ConcatStringUtils]: No match value.") + logger.debug("[ConcatStringUtils]: No match value.") } } nCellResData.append(nCellTempResData.result().mkString("$")) } - log.debug(s"[HeadData] ${headData.mkString("##")}") - log.debug(s"[nCellData] ${nCellData.mkString("##")}") - log.debug(s"[TailData] ${tailData.mkString("##")}") + logger.debug(s"[HeadData] ${headData.mkString("##")}") + logger.debug(s"[nCellData] ${nCellData.mkString("##")}") + logger.debug(s"[TailData] ${tailData.mkString("##")}") headData ++ nCellResData.result().toArray[String] ++ tailData } @@ -42,9 +42,9 @@ object ConcatStringUtils extends LogSupport { val temp = str.split(",", -1) val result = concatNColumn(temp, 5, 4) // a1,b2,c3,d4,e5,4,n1$n2$n3$n4,n11$n21$n31$n41,n12$n22$n32$n42,n13$n23$n33$n43,f6,g8,h9 for (elem <- result) { - log.info("######## " + elem) + logger.info("######## " + elem) } - log.info("@@@@@@ " + result.mkString(",")) + logger.info("@@@@@@ " + result.mkString(",")) } /** 支持连续多个可变长的字段, 返回结果的字段个数是固定的. **/ diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/CoreUtils.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/CoreUtils.scala old mode 100644 new mode 100755 index fb83c55a..ac181e00 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/CoreUtils.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/CoreUtils.scala @@ -1,8 +1,9 @@ package com.wallace.demo.app.utils -import java.io.EOFException -import java.nio.{ByteBuffer, ByteOrder} +import java.io.{BufferedReader, EOFException, InputStream, InputStreamReader} +import java.nio.ByteBuffer import java.nio.channels.ReadableByteChannel +import scala.collection.mutable /** * Created by 10192057 on 2018/6/8 0008. @@ -29,4 +30,19 @@ object CoreUtils { ((bytes(offset + 2) & 0xFF) << 8) | (bytes(offset + 3) & 0xFF) } + + def streamToString(is: InputStream): String = { + val rd: BufferedReader = new BufferedReader(new InputStreamReader(is, "UTF-8")) + val builder: mutable.StringBuilder = new mutable.StringBuilder() + try { + var line = rd.readLine + while (line != null) { + builder.append(line + "\n") + line = rd.readLine + } + } finally { + rd.close() + } + builder.toString + } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/DateTimeUtils.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/DateTimeUtils.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/FileUtils.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/FileUtils.scala old mode 100644 new mode 100755 index 74cd3ecb..4870de42 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/FileUtils.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/FileUtils.scala @@ -47,7 +47,7 @@ object FileUtils extends Using { br => while (br.ready()) { val line: String = br.readLine() - log.info(line) + logger.info(line) } } } @@ -83,15 +83,15 @@ object FileUtils extends Using { def betterFilesFunc(): Unit = { import better.files.{File, FileMonitor} val f = File("./demo/ScalaDemo/src/main/resources/testingData.csv") - log.info(s"File Path: ${f.path}") + logger.info(s"File Path: ${f.path}") //log.info(s"File CheckSum: ${f.sha512}") - log.info(s"File Line Size: ${f.lines(Charset.forName("UTF-8")).size}") - log.info(s"File Context Size: ${f.size / 1024L / 1024L} MB") - log.info(s"File LastModifiedTime: ${f.lastModifiedTime}") + logger.info(s"File Line Size: ${f.lines(Charset.forName("UTF-8")).size}") + logger.info(s"File Context Size: ${f.size / 1024L / 1024L} MB") + logger.info(s"File LastModifiedTime: ${f.lastModifiedTime}") //TODO 普通的Java文件监控 val watchDir: File = f.parent - log.info(s"File Parent: $watchDir, IsDirectory: ${watchDir.isDirectory}") + logger.info(s"File Parent: $watchDir, IsDirectory: ${watchDir.isDirectory}") import java.nio.file.{StandardWatchEventKinds => EventType} val service: WatchService = watchDir.newWatchService watchDir.register(service, events = Seq(EventType.ENTRY_MODIFY)) @@ -104,7 +104,7 @@ object FileUtils extends Using { // } override def onModify(file: File, count: Int): Unit = { symbolCnt += 1 - log.info(s"${file.name} got modified @$count") + logger.info(s"${file.name} got modified @$count") } } watcher.start()(ExecutionContext.global) @@ -128,7 +128,7 @@ object FileUtils extends Using { while (symbolCnt < 10) { Thread.sleep(1000) - log.info(s"Watching ${watchDir.name} ($symbolCnt)...") + logger.info(s"Watching ${watchDir.name} ($symbolCnt)...") } } @@ -187,7 +187,7 @@ object FileUtils extends Using { val costTime3 = runtimeDuration { readTarGZFile("./demo/ScalaDemo/src/main/resources/HW_HN_OMC1-mr-134.175.57.16-20170921043000-20170921044500-20170921051502-001.tar.gz") } - log.info(s"CostTime3: $costTime3 ms.") + logger.info(s"CostTime3: $costTime3 ms.") // // TODO Run test for filenamePrefixFromOffset // val offset = filenamePrefixFromOffset(100L) @@ -230,14 +230,14 @@ object FileUtils extends Using { // val totalLines = getTotalLines(testFile) val totalLines = getTotalLines(srcFile) val endTime = System.currentTimeMillis() - log.info(s"[$srcFileName]TotalLines: $totalLines, CostTime: ${endTime - startTime} ms.") + logger.info(s"[$srcFileName]TotalLines: $totalLines, CostTime: ${endTime - startTime} ms.") // TODO Read readZipArchiveFile //val fileName = "./demo/ScalaDemo/src/main/resources/FDD-LTE_MRS_ERICSSON_OMC1_335110_20180403101500.zip" // val fileName = "./demo/ScalaDemo/src/main/resources/FDD-LTE_MRS_ERICSSON_OMC1_335112_20180403101500.xml.zip" val fileName = "./demo/ScalaDemo/src/main/resources/FDD-LTE_MRS_ZTE_OMC1_637784_20170522204500.zip" val costTime4: Double = runtimeDuration(readZipArchiveFile(fileName)) - log.info(s"CostTime4: $costTime4 ms.") + logger.info(s"CostTime4: $costTime4 ms.") //TODO Get File Header var res: Option[Array[(String, Array[Byte])]] = None @@ -246,9 +246,9 @@ object FileUtils extends Using { } res.get.foreach { elem => - log.info(s"FileName: ${elem._1}, File Header Bytes: ${elem._2.take(3).mkString("_")}") + logger.info(s"FileName: ${elem._1}, File Header Bytes: ${elem._2.take(3).mkString("_")}") } - log.info(s"CostTime5: $costTime5 ms.") + logger.info(s"CostTime5: $costTime5 ms.") } @@ -312,7 +312,7 @@ object FileUtils extends Using { offset = tempFileAndOffset.file.length() + tempFileAndOffset.offset tempFileAndOffset.file } - log.warn(s"Offset: $offset.") + logger.warn(s"Offset: $offset.") if (destFile.length() <= DEFAULT_FILE_SIZE_THRESHOLD) { destFile } else { @@ -332,7 +332,7 @@ object FileUtils extends Using { val srcFileSize: Long = fcIn.size() assert(srcFileSize < Int.MaxValue, s"The size of FileInputStream is too long: $srcFileSize > ${Int.MaxValue}.") val minBufferCapacity: Int = Math.min(srcFileSize.toInt, defaultBufCapacity) - log.info(s"Before => fcIn: $srcFileSize, fcOut: ${fcOut.size()}, minBufferCapacity: $minBufferCapacity.") + logger.info(s"Before => fcIn: $srcFileSize, fcOut: ${fcOut.size()}, minBufferCapacity: $minBufferCapacity.") val buffer: ByteBuffer = ByteBuffer.allocate(minBufferCapacity) while (fIns.available() > 0) { buffer.clear() @@ -341,11 +341,11 @@ object FileUtils extends Using { buffer.flip() fcOut.write(buffer) } else { - log.warn("Buffer is empty.") + logger.warn("Buffer is empty.") } } - log.info(s"After => fcIn: $srcFileSize, fcOut: ${fcOut.size()}") + logger.info(s"After => fcIn: $srcFileSize, fcOut: ${fcOut.size()}") } def readGZFile(fileName: String): Unit = { @@ -356,7 +356,7 @@ object FileUtils extends Using { gis => using(new BufferedReader(new InputStreamReader(gis, "GBK"))) { br => - br.lines().toArray.foreach(line => log.info(s"$line")) + br.lines().toArray.foreach(line => logger.info(s"$line")) while (br.ready()) { val oneLine = br.readLine().replaceAll("null", "") oneLine.length @@ -367,7 +367,7 @@ object FileUtils extends Using { } } catch { case NonFatal(e) => - log.error(s"Failed to read $fileName: ", e) + logger.error(s"Failed to read $fileName: ", e) } } @@ -387,7 +387,7 @@ object FileUtils extends Using { var cnt: Long = 1 while (br.ready() && (cnt <= size)) { val line = br.readLine() - log.info(s"${line.length}") + logger.info(s"${line.length}") //log.info(s"$cnt: $line") cnt += 1 } @@ -398,7 +398,7 @@ object FileUtils extends Using { } } catch { case NonFatal(e) => - log.error(s"Failed to read $fileName: ", e) + logger.error(s"Failed to read $fileName: ", e) } } @@ -417,9 +417,9 @@ object FileUtils extends Using { } else { val size = entry.getSize - log.info(s"Entry Name: ${entry.getName}, Entry Size: $size.") + logger.info(s"Entry Name: ${entry.getName}, Entry Size: $size.") val defaultSize: Long = Math.min(Runtime.getRuntime.freeMemory(), Int.MaxValue) - log.debug(s"FreeMemory: ${Runtime.getRuntime.freeMemory() / (1024 * 1024)} MB. Default Bytes Size: $defaultSize Bytes") + logger.debug(s"FreeMemory: ${Runtime.getRuntime.freeMemory() / (1024 * 1024)} MB. Default Bytes Size: $defaultSize Bytes") val currentSize: Long = if (size < 0) defaultSize else size val bos = new ByteArrayOutputStream(currentSize.toInt) IOUtils.copy(zipIns, bos, 40960) @@ -427,7 +427,7 @@ object FileUtils extends Using { using(new BufferedReader(new InputStreamReader(res))) { br => while (br.ready()) { - log.info(br.readLine()) + logger.info(br.readLine()) } } bos.flush() @@ -498,9 +498,9 @@ object FileUtils extends Using { if (res.isDefined) { val mrRecords = res.get.getResult val eNBId: String = mrRecords.geteNB() - log.info(s"[$cnt]$entryName => EnodeBID: $eNBId") + logger.info(s"[$cnt]$entryName => EnodeBID: $eNBId") } else { - log.debug(s"Parsed $entryName and Returned None.") + logger.debug(s"Parsed $entryName and Returned None.") } } } @@ -515,7 +515,7 @@ object FileUtils extends Using { case Success(result) => Some(result) case Failure(e) => - log.error(s"Failed to parse $entryName: ", e) + logger.error(s"Failed to parse $entryName: ", e) None } } @@ -534,10 +534,10 @@ object FileUtils extends Using { val projectConfigFile = fileName val udfConfigFile: Array[File] = Array(new File(SystemEnvUtils.getUserDir + "../conf/" + fileName)) if (udfConfigFile.nonEmpty) { - log.debug(s"loading file[${udfConfigFile.head.getPath}] and resource[$projectConfigFile]") + logger.debug(s"loading file[${udfConfigFile.head.getPath}] and resource[$projectConfigFile]") ConfigFactory.parseFile(udfConfigFile.head).withFallback(ConfigFactory.load(projectConfigFile)) } else { - log.debug(s"loading resource[$projectConfigFile]") + logger.debug(s"loading resource[$projectConfigFile]") ConfigFactory.load(projectConfigFile) } } @@ -587,7 +587,7 @@ object FileUtils extends Using { val tmpG = group.addGroup("time") tmpG.append("ttl", r.nextInt(9) + 1) tmpG.append("ttl2", r.nextInt(9) + "_a") - log.info("Group String: " + tmpG.toString) + logger.info("Group String: " + tmpG.toString) writer.write(group) } } @@ -663,7 +663,7 @@ object FileUtils extends Using { if (file.delete()) { delState = true } else { - log.error(s"Failed to delete ${ + logger.error(s"Failed to delete ${ file.getCanonicalPath }.") delState = false @@ -673,20 +673,20 @@ object FileUtils extends Using { if (file.delete()) { delState = true } else { - log.error(s"Failed to delete ${ + logger.error(s"Failed to delete ${ file.getCanonicalPath }.") delState = false } } else { - log.warn(s"Failed to set executable for ${ + logger.warn(s"Failed to set executable for ${ file.getName }") file.deleteOnExit() } } } else { - log.warn(s"${ + logger.warn(s"${ file.getName } doesn't exist or has no execute permission.") } @@ -708,7 +708,7 @@ object FileUtils extends Using { } } } else { - log.debug(s"${ + logger.debug(s"${ rootFile.getName } is an empty directory, just delete it.") } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/FtpUtils.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/FtpUtils.scala old mode 100644 new mode 100755 index ecec8449..4048a772 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/FtpUtils.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/FtpUtils.scala @@ -39,8 +39,8 @@ class FtpUtils(ftpMetadata: FtpMetaData) extends Using { config.put("StrictHostKeyChecking", "no") session.setConfig(config) session.setTimeout(ftpMetadata.timeOut) - log.debug("sftp session connected") - log.debug("opening channel") + logger.debug("sftp session connected") + logger.debug("opening channel") Try { session.connect() val channel = session.openChannel(ftpMetadata.ftpType).asInstanceOf[ChannelSftp] @@ -51,7 +51,7 @@ class FtpUtils(ftpMetadata: FtpMetaData) extends Using { } match { case Success(ch) => Some(ch) case Failure(e) => - log.error("Failed to login sftp server", e) + logger.error("Failed to login sftp server", e) None } @@ -73,7 +73,7 @@ class FtpUtils(ftpMetadata: FtpMetaData) extends Using { } match { case Success(client) => Some(client) case Failure(e) => - log.error("Failed to create FTP client", e) + logger.error("Failed to create FTP client", e) None } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/FuncRuntimeDur.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/FuncRuntimeDur.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/IDVerifier.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/IDVerifier.scala new file mode 100755 index 00000000..c50bd685 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/IDVerifier.scala @@ -0,0 +1,23 @@ +package com.wallace.demo.app.utils + +/** + * Author: biyu.huang + * Date: 2024/5/30 10:13 + * Description: + */ +object IDVerifier { + val parameters: Array[Int] = Array(7, 9, 10, 5, 8, 4, 2, 1, 6, 3, 7, 9, 10, 5, 8, 4, 2) + + val verifierCode: Array[Char] = Array('1', '0', 'X', '9', '8', '7', '6', '5', '4', '3', '2') + + def evaluate(id: String): Boolean = { + val idList: List[String] = id.toList.map(_.toString) + val tmp: Int = idList.take(17).zipWithIndex.map(x => x._1.toInt * parameters(x._2)).sum + val code: Char = verifierCode(tmp % 11) + code == id.last + } + + def main(args: Array[String]): Unit = { + evaluate("") + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/JsonFormatter.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/JsonFormatter.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/MathUtils.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/MathUtils.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/MySqlParser.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/MySqlParser.scala new file mode 100644 index 00000000..a5dd0e76 --- /dev/null +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/MySqlParser.scala @@ -0,0 +1,354 @@ +package com.wallace.demo.app.utils + +import com.wallace.demo.app.common.LogSupport + +import scala.collection.mutable +import scala.collection.mutable.ListBuffer +import scala.util.control.NonFatal + +/** + * Author: biyu.huang + * Date: 2024/10/22 09:51 + * Description: Parse MySQL CREATE TABLE statements based on AST(Abstract Syntax Tree). + */ +// 基础的 Token 特征 +sealed trait Token { + override def toString: String = this match { + case symbol: Symbol => symbol.value + case number: NumberLiteral => number.value.toString + case str: StringLiteral => str.value + case dataType: DataType => dataType.value + case id: Identifier => id.value + case kw: Keyword => kw.value + case _ => super.toString + } +} + +// 具体的 Token 类型 +case class Keyword(value: String) extends Token + +case class DataType(value: String) extends Token + +case class Identifier(value: String) extends Token + +case class NumberLiteral(value: Int) extends Token + +case class StringLiteral(value: String) extends Token + +case class Symbol(value: String) extends Token + +case object EOF extends Token // 结束标记 + +sealed trait Constraint + +case object PrimaryKey extends Constraint + +case object NotNull extends Constraint + +case object AutoIncrement extends Constraint + +case class Default(value: String) extends Constraint + +case class CharacterSet(value: String) extends Constraint + +case class Collate(value: String) extends Constraint + +case class FieldType( + baseType: String, + fullDataType: Option[String] = None +) + +sealed trait AST + +case class ColumnDefinition( + columnName: String, + dataType: String, + constraints: List[Constraint] = List(), + comment: Option[String] = None, + isUnsigned: Boolean = false, // 添加无符号标志 + isZeroFill: Boolean = false +) extends AST + +case class IndexDefinition( + indexName: Option[String], + columns: List[String], + isUnique: Boolean = false +) extends AST + +case class TableDefinition( + tableName: String, + isTemporary: Boolean, + columns: List[ColumnDefinition], + indexes: List[IndexDefinition] = List() +) extends AST + +class MySqlParser(tokens: List[Token]) extends LogSupport { + private var currentPosition = 0 + + private def consume(): Token = { + val token = tokens(currentPosition) + currentPosition += 1 + token + } + + private def peek(): Token = tokens(currentPosition) + + def parse(): TableDefinition = { + try { + expect(Keyword("CREATE")) + val isTemporary = if (peek() == Keyword("TEMPORARY")) { + consume() // consume TEMPORARY + true + } else { + false + } + expect(Keyword("TABLE")) + if (peek() == Keyword("IF")) { + expect(Keyword("IF")) // consume IF + expect(Keyword("NOT")) // consume NOT + expect(Keyword("EXISTS")) // consume EXISTS + } + val tableName: String = consume().asInstanceOf[Identifier].value + expect(Symbol("(")) + + val columns: ListBuffer[ColumnDefinition] = mutable.ListBuffer[ColumnDefinition]() + val indexes: ListBuffer[IndexDefinition] = mutable.ListBuffer[IndexDefinition]() + + while (peek() != Symbol(")")) { + if (peek() == Keyword("INDEX") || peek() == Keyword("UNIQUE")) { + indexes += parseIndexDefinition() + } else { + columns += parseColumnDefinition() + } + if (peek() == Symbol(",")) consume() // consume comma + } + expect(Symbol(")")) + while (peek() != EOF) { + logger.info(s"skip to parse token: ${peek()}") + consume() + } + // expect(Symbol(";")) + + TableDefinition(tableName, isTemporary, columns.toList, indexes.toList) + } catch { + case NonFatal(e) => + logger.error(s"failed to parse ${peek()}, caused by: ", e) + throw e + } + } + + private def expectDataType(): String = { + peek() match { + case DataType(dt) => + consume() // Consume the data type token + dt + case other => throw new RuntimeException(s"Expected data type: ${other.toString}") + } + } + + private def parseDataType(): FieldType = { + val baseType = expectDataType() + if (peek() == Symbol("(")) { + consume() // Consume '(' + val params = new mutable.StringBuilder + while (peek() != Symbol(")")) { + params.append(consume().toString) + } + consume() // Consume ')' + FieldType(baseType, Some(s"$baseType($params)")) + } else { + FieldType(baseType) + } + } + + private def parseColumnDefinition(): ColumnDefinition = { + val columnName: String = consume().asInstanceOf[Identifier].value + val fieldType: FieldType = parseDataType() + val dataType: String = fieldType.fullDataType.getOrElse(fieldType.baseType) + var isUnsigned: Boolean = false + var isZeroFill: Boolean = false + + // 检查是否为无符号类型 + if (fieldType.baseType.matches( + "(?i)(BIGINT|INT|INTEGER|BIGINT|SMALLINT|TINYINT|MEDIUMINT|FLOAT|DOUBLE|DECIMAL)")) { + logger.info(s"parseColumnDefinition: peek token -> ${peek()}") + peek() match { + case Keyword("UNSIGNED") => + consume() // consume UNSIGNED + isUnsigned = true + case Keyword("ZEROFILL") => + consume() // consume UNSIGNED + isZeroFill = true + case _ => + } + // throw new Exception(s"UNSIGNED/ZEROFILL is not valid for data type: ${fieldType.baseType}") + } + + val constraints: List[Constraint] = parseConstraints() + var comment: Option[String] = None + + if (peek() == Keyword("COMMENT")) { + consume() // consume COMMENT + val commentText = consume().asInstanceOf[StringLiteral].value + comment = Some(commentText) + } + + ColumnDefinition(columnName, dataType, constraints, comment, isUnsigned, isZeroFill) + } + + private def parseConstraints(): List[Constraint] = { + val constraints = mutable.ListBuffer[Constraint]() + logger.info(s"parseConstraints: peek token -> ${peek()}") + while (peek() match { + case Keyword("AUTO_INCREMENT") | + Keyword("PRIMARY") | + Keyword("KEY") | + Keyword("NOT") | + Keyword("NULL") | + Keyword("DEFAULT") | + Keyword("CHARACTER") | + Keyword("COLLATE") => true + case _ => false + }) { + peek() match { + case Keyword("PRIMARY") => + consume() // consume PRIMARY + expect(Keyword("KEY")) + constraints += PrimaryKey + case Keyword("NOT") => + consume() // consume NOT + expect(Keyword("NULL")) + constraints += NotNull + case Keyword("AUTO_INCREMENT") => + consume() // consume AUTO_INCREMENT + constraints += AutoIncrement + case Keyword("CHARACTER") => + consume() // consume CHARACTER + expect(Keyword("SET")) + val characterSet = consume().asInstanceOf[Identifier].value + constraints += CharacterSet(characterSet) + case Keyword("COLLATE") => + consume() // consume COLLATE + val collate = consume().asInstanceOf[Identifier].value + constraints += Collate(collate) + case Keyword("DEFAULT") => + consume() // consume DEFAULT + val defaultValue = consume().asInstanceOf[StringLiteral].value + constraints += Default(defaultValue) + case _ => + } + } + constraints.toList + } + + private def parseIndexDefinition(): IndexDefinition = { + var isUnique = false + if (peek() == Keyword("UNIQUE")) { + isUnique = true + consume() // consume UNIQUE + } + expect(Keyword("INDEX")) + // scalastyle:off + val indexName: Option[String] = + if (peek().isInstanceOf[Identifier]) Option(consume().asInstanceOf[Identifier].value) else None + // scalastyle:on + expect(Symbol("(")) + + val columns = scala.collection.mutable.ListBuffer[String]() + while (peek() != Symbol(")")) { + columns += consume().asInstanceOf[Identifier].value + if (peek() == Symbol(",")) consume() // consume comma + } + expect(Symbol(")")) + + IndexDefinition(indexName, columns.toList, isUnique) + } + + private def expect(token: Token): Unit = { + if (consume() != token) throw new Exception(s"Expected $token but found ${peek()}") + } +} + +object MySqlLexer { + private final val keywords: Set[String] = Set( + "CREATE", "TEMPORARY", "TABLE", "IF", "NOT", "EXISTS", "PRIMARY", "KEY", "UNIQUE", + "FOREIGN", "REFERENCES", "ENGINE", "AUTO_INCREMENT", "COMMENT", "CHARACTER", "SET", + "COLLATE", "DEFAULT", "NULL", "INDEX", "UNSIGNED" + ) + + private final val dataTypes: Set[String] = Set( + "TINYINT", "SMALLINT", "MEDIUMINT", "INT", "INTEGER", "BIGINT", "FLOAT", "DOUBLE", "REAL", + "DECIMAL", "NUMERIC", "BIT", "DATE", "TIME", "DATETIME", "TIMESTAMP", "YEAR", + "CHAR", "VARCHAR", "BINARY", "VARBINARY", "TINYTEXT", "TEXT", "MEDIUMTEXT", "LONGTEXT", + "TINYBLOB", "BLOB", "MEDIUMBLOB", "LONGBLOB", "ENUM" + ) + + def tokenize(input: String): List[Token] = { + val tokens: ListBuffer[Token] = mutable.ListBuffer[Token]() + val builder: mutable.StringBuilder = new mutable.StringBuilder + var inString = false + var inIdentifier = false + var currentChar: Char = '\u0000' + + def addToken(): Unit = { + val word = builder.toString.trim + if (word.nonEmpty) { + val token = word.toUpperCase match { + case kw if keywords.contains(kw) => Keyword(kw) + case dt if dataTypes.contains(dt) => DataType(dt) + case num if num.matches("\\d+") => NumberLiteral(num.toInt) + case str if str.startsWith("'") && str.endsWith("'") => + StringLiteral(word.stripPrefix("'").stripSuffix("'")) + case id if id.startsWith("`") && id.endsWith("`") => + Identifier(word.stripPrefix("`").stripSuffix("`")) + case _ => Identifier(word) + } + tokens += token + } + builder.clear() + } + + for (i <- input.indices) { + currentChar = input(i) + currentChar match { + case '\'' if inString => + // End of string + builder.append(currentChar) + inString = false + addToken() + case '\'' => + // Start of string + inString = true + builder.append(currentChar) + case '`' if inIdentifier => + // End of identifier + builder.append(currentChar) + inIdentifier = false + addToken() + case '`' => + // Start of identifier + inIdentifier = true + builder.append(currentChar) + case ',' if !inString => + // Treat as a symbol when not inside a string + addToken() + tokens += Symbol(",") + case '(' | ')' | ';' if !inString => + // Treat as symbols + addToken() + tokens += Symbol(currentChar.toString) + case ' ' | '\n' | '\t' | '=' if !inString => + // Treat as token delimiter when not in string + addToken() + case _ => + // Regular character, add to builder + builder.append(currentChar) + } + } + + // Add any remaining token + addToken() + tokens += EOF + tokens.toList + } +} diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/SqlText.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/SqlText.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/StringFuncUtils.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/StringFuncUtils.scala old mode 100644 new mode 100755 index 48addacf..161b3281 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/StringFuncUtils.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/StringFuncUtils.scala @@ -1,10 +1,10 @@ package com.wallace.demo.app.utils import java.util - import com.wallace.demo.app.common.Using import com.wallace.demo.app.utils.stringutils.StringUtils +import java.util.concurrent.ForkJoinPool import scala.collection.JavaConverters._ import scala.collection.mutable.ArrayBuffer import scala.collection.parallel.ForkJoinTaskSupport @@ -214,14 +214,14 @@ object StringFuncUtils extends Using { def bytesCoventToString(bytes: Array[Byte], charsetName: String = "UTF-8"): String = new String(bytes, charsetName) def main(args: Array[String]): Unit = { - val pool = new ForkJoinTaskSupport(new scala.concurrent.forkjoin.ForkJoinPool(_curParallelism)) + val pool = new ForkJoinTaskSupport(new ForkJoinPool(_curParallelism)) Properties.setProp("scala.time", "true") runtimeDuration({ _uniqueIndex = updateUniqueIndex(_uniqueIndex) - log.info(s"UniqueIndex: ${_uniqueIndex}") + logger.info(s"UniqueIndex: ${_uniqueIndex}") }, 10) - log.info(formatString("16.1")) + logger.info(formatString("16.1")) val str0 = """1,2,3,4,"a=1,b=2,c=3","e=1.2,f=32.1,g=1.3",7,8,9""" val str1 = """1,2,3,"4,"a=1,b=2,c=3",10,11,12,13,"e=1.2,f=32.1,g=1.3",7,8,9""" val str2 = """1,2,3,4","a=1,b=2,c=3",10,11,12,13,"e=1.2,f=32.1,g=1.3",7,8,9""" @@ -232,12 +232,12 @@ object StringFuncUtils extends Using { input.par.foreach { str => val threadName = Thread.currentThread().getName - log.info(s"$threadName: $str") + logger.info(s"$threadName: $str") val res: Array[String] = splitString(str, ",", "\"") synchronized { res.foreach { elem => - log.info(s"$threadName: $elem") + logger.info(s"$threadName: $elem") } } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/SystemEnvUtils.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/SystemEnvUtils.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/TryCatchProc.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/TryCatchProc.scala old mode 100644 new mode 100755 index c7712ce8..6bba1c10 --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/TryCatchProc.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/utils/TryCatchProc.scala @@ -23,7 +23,7 @@ trait TryCatchProc extends Using { Some(proc(in)) } catch { case NonFatal(e) => - log.error(s"""$msg, ${e.printStackTrace()}""".stripMargin) + logger.error(s"""$msg, ${e.printStackTrace()}""".stripMargin) None } } diff --git a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/xmlparser/XmlParser.scala b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/xmlparser/XmlParser.scala old mode 100644 new mode 100755 index 6edba815..c432258f --- a/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/xmlparser/XmlParser.scala +++ b/demo/ScalaDemo/src/main/scala/com/wallace/demo/app/xmlparser/XmlParser.scala @@ -29,7 +29,7 @@ object XmlParser extends LogSupport { // TODO 获取所有子节点 val allFields: NodeSeq = xmlFile \\ "_" - log.info(s"${allFields.head.toString()}") + logger.info(s"${allFields.head.toString()}") // TODO 获取fields val headerField = xmlFile \ "header" \ "field" diff --git a/demo/ScalaDemo/src/test/resources/test.xml b/demo/ScalaDemo/src/test/resources/test.xml old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/UnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/UnitSpec.scala old mode 100644 new mode 100755 index be054238..9b8bd2ea --- a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/UnitSpec.scala +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/UnitSpec.scala @@ -16,7 +16,7 @@ import org.scalatest.matchers.should.Matchers * Created by Wallace on 2016/11/6. */ trait UnitSpec extends AnyFlatSpec with Matchers with FuncRuntimeDur { - protected val teamID = "Wallace Huang" + protected val teamID: String = "NoTalk Tech" var runTimes: Int = 1 @@ -25,7 +25,7 @@ trait UnitSpec extends AnyFlatSpec with Matchers with FuncRuntimeDur { require(runTimes >= 10000, s"Benchmark need to execute at least 10000 times And runTimes = $runTimes.") teamID should s"do $utMsg" in { val costTime = runtimeDuration(testFunc, runTimes) - log.info(s"RunTimes: $runTimes, CostTime: $costTime ms.") + logger.info(s"RunTimes: $runTimes, CostTime: $costTime ms.") } } } diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/algorithmdemo/AlgDemoUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/algorithmdemo/AlgDemoUnitSpec.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/collection/LockMapUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/collection/LockMapUnitSpec.scala new file mode 100755 index 00000000..e0a50498 --- /dev/null +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/collection/LockMapUnitSpec.scala @@ -0,0 +1,56 @@ +package com.wallace.demo.app.collection + +import com.wallace.demo.app.UnitSpec +import org.apache.hadoop.yarn.webapp.hamlet2.HamletSpec.THEAD + +import scala.concurrent.ExecutionContext.Implicits.global +import scala.concurrent.Future +import scala.concurrent.duration.DurationInt + +/** + * Author: biyu.huang + * Date: 2024/4/12 17:19 + * Description: + */ +class LockMapUnitSpec extends UnitSpec { + teamID should "operate LockMap" in { + val lockMap = new LockMap[String, String] + + // Put some values + lockMap.put("key1", "value1") + lockMap.put("key2", "value2") + lockMap.put("key3", "value3") + + // Concurrently access and print values + logger.info("getAndLock key1") + lockMap.getAndLock("key1") + Thread.sleep(1000) + lockMap.unlock("key1") + + val future1 = Future { + logger.info("feature1: key1 -> " + lockMap.getAndLock("key1")) + Thread.sleep(1000) + lockMap.unlock("key1") + } + + val future2 = Future { + logger.info("feature2: key1 -> " + lockMap.getAndLock("key1")) + Thread.sleep(3000) + lockMap.unlock("key1") + } + + val future3 = Future { + logger.info("feature3: key1 -> " + lockMap.getAndLock("key1")) + Thread.sleep(5000) + lockMap.unlock("key1") + } + + // Wait for all futures to complete + val allFutures = Future.sequence(Seq(future1, future2, future3)) + val result = scala.concurrent.Await.result(allFutures, 10.seconds) + + // Try to retrieve removed key + lockMap.remove("key2") + logger.info(lockMap.getAndLock("key2")) // Should print null + } +} diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/functionaldemo/FunctionalDemoUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/functionaldemo/FunctionalDemoUnitSpec.scala old mode 100644 new mode 100755 index f69a3ff1..891821be --- a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/functionaldemo/FunctionalDemoUnitSpec.scala +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/functionaldemo/FunctionalDemoUnitSpec.scala @@ -18,16 +18,16 @@ class FunctionalDemoUnitSpec extends UnitSpec { teamID should "do unit test for Functional Demo" in { val a: Int = 3 val b: BigInt = FunctionalDemo.toBigInt(a) - log.info(s"${Int.MaxValue}, ${Int.MinValue}, ${b.pow(a)}") - log.info(s"${p0(1, 2, 3)}") // 6 - log.info(s"${p2(100)}") // 130 - log.info(s"${p3(10, 1)}") - log.info("[Partial Functions] " + FunctionalDemo.divide(10)) - log.info("[Partial Functions] " + FunctionalDemo.divide1(10)) - log.info("[Partial Functions] " + FunctionalDemo.direction(180)) - log.info("[匿名函数] " + FunctionalDemo.m1(2)) - log.info("[偏应用函数] " + FunctionalDemo.sum(1, 2, 3)) - log.info("Curry 函数] " + FunctionalDemo.curriedSum(5)(6)) + logger.info(s"${Int.MaxValue}, ${Int.MinValue}, ${b.pow(a)}") + logger.info(s"${p0(1, 2, 3)}") // 6 + logger.info(s"${p2(100)}") // 130 + logger.info(s"${p3(10, 1)}") + logger.info("[Partial Functions] " + FunctionalDemo.divide(10)) + logger.info("[Partial Functions] " + FunctionalDemo.divide1(10)) + logger.info("[Partial Functions] " + FunctionalDemo.direction(180)) + logger.info("[匿名函数] " + FunctionalDemo.m1(2)) + logger.info("[偏应用函数] " + FunctionalDemo.sum(1, 2, 3)) + logger.info("Curry 函数] " + FunctionalDemo.curriedSum(5)(6)) val res_1 = FunctionalDemo.p0(1, 2, 3) val res_2 = FunctionalDemo.p2(100) diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/parsercombinators/ParsersConstructorUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/parsercombinators/ParsersConstructorUnitSpec.scala old mode 100644 new mode 100755 index 2b6b3fca..e3769a92 --- a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/parsercombinators/ParsersConstructorUnitSpec.scala +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/parsercombinators/ParsersConstructorUnitSpec.scala @@ -38,8 +38,8 @@ class ParsersConstructorUnitSpec extends UnitSpec { cnt += 1 } val costTime: Long = System.currentTimeMillis() - startTime - log.info(s"RunTimes: $runTimes, CostTime: $costTime ms, Rate: ${runTimes * 1000.0 / costTime}.") - log.info(res) + logger.info(s"RunTimes: $runTimes, CostTime: $costTime ms, Rate: ${runTimes * 1000.0 / costTime}.") + logger.info(res) res shouldBe "f6,b2,c3,d4,e5,f7,f8" } } diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/NewsPublisherUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/NewsPublisherUnitSpec.scala new file mode 100755 index 00000000..af35ce18 --- /dev/null +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/patterns/observedemo/newsdemo/NewsPublisherUnitSpec.scala @@ -0,0 +1,19 @@ +package com.wallace.demo.app.patterns.observedemo.newsdemo + +import com.wallace.demo.app.UnitSpec + +/** + * Author: biyu.huang + * Date: 2024/4/8 17:39 + * Description: + */ +class NewsPublisherUnitSpec extends UnitSpec { + teamID should "execute observer mode" in { + val publisher: NewsPublisher = new NewsPublisher() + val observer1: NewsSubscriber = new NewsSubscriber("john") + val observer2: NewsSubscriber = new NewsSubscriber("jerry") + + publisher.addObservers(observer1, observer2) + publisher.publishNews("""Breaking News: Florida Doesn’t Feed Idle People""") + } +} diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/sortdemo/BitMapDemoUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/sortdemo/BitMapDemoUnitSpec.scala new file mode 100755 index 00000000..afe6a0fc --- /dev/null +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/sortdemo/BitMapDemoUnitSpec.scala @@ -0,0 +1,31 @@ +package com.wallace.demo.app.sortdemo + +import com.wallace.demo.app.UnitSpec + +/** + * Author: biyu.huang + * Date: 2024/5/8 15:16 + * Description: + */ +class BitMapDemoUnitSpec extends UnitSpec { + teamID should "do unit test for BitMap" in { + val bitMap = new BitMapDemo(65, 32).bitMap + bitMap.setBit(0) + bitMap.setBit(1) + bitMap.setBit(2) + bitMap.setBit(3) + bitMap.setBit(4) + bitMap.setBit(7) + bitMap.setBit(32) + bitMap.setBit(65) + + assertResult(true)(bitMap.exists(1)) + assertResult(true)(bitMap.exists(32)) + assertResult(true)(bitMap.exists(65)) + assertResult(false)(bitMap.exists(8)) + assertResult(8)(bitMap.bitCount()) + + logger.info("bitCount => {}", bitMap.bitCount()) + logger.info(bitMap.toString) + } +} diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/CombineMultiVersionUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/CombineMultiVersionUnitSpec.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/DateTimeUtilsUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/DateTimeUtilsUnitSpec.scala old mode 100644 new mode 100755 index c611f1bb..9b31f855 --- a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/DateTimeUtilsUnitSpec.scala +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/DateTimeUtilsUnitSpec.scala @@ -21,14 +21,14 @@ class DateTimeUtilsUnitSpec extends AnyFunSuite with LogSupport { val now: Timestamp = new Timestamp(System.currentTimeMillis()) now.setNanos(1000) val ns = DateTimeUtils.fromJavaTimestamp(now) - log.info(s"Input: $now, Output: $ns") + logger.info(s"Input: $now, Output: $ns") assert(ns % 1000000L === 1) assert(DateTimeUtils.toJavaTimestamp(ns) === now) List(-111111111111L, -1L, 0, 1L, 111111111111L).foreach { t => val ts = DateTimeUtils.toJavaTimestamp(t) - log.info(s"Input: $t, Output: $ts") + logger.info(s"Input: $t, Output: $ts") assert(DateTimeUtils.fromJavaTimestamp(ts) === t) assert(DateTimeUtils.toJavaTimestamp(DateTimeUtils.fromJavaTimestamp(ts)) === ts) } diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/FileUtilsUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/FileUtilsUnitSpec.scala old mode 100644 new mode 100755 index dfe45284..ea3b810f --- a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/FileUtilsUnitSpec.scala +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/FileUtilsUnitSpec.scala @@ -20,7 +20,7 @@ class FileUtilsUnitSpec extends UnitSpec { val res: Map[String, AlgMetaData] = FileUtils.readXMLConfigFile(fileName) res.foreach { x => - log.info(s"${x._1}, ${x._2}") + logger.info(s"${x._1}, ${x._2}") } // log.info(s"${res("t2_1000002").toString}") // log.info(s"${res("t5_1000005").toString}") diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/JsonFormatterUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/JsonFormatterUnitSpec.scala old mode 100644 new mode 100755 index df75c032..2f3ca61d --- a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/JsonFormatterUnitSpec.scala +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/JsonFormatterUnitSpec.scala @@ -82,7 +82,7 @@ class JsonFormatterUnitSpec extends UnitSpec { val res = JsonFormatter.format(jsonStr) - log.info(res.toString()) + logger.info(res.toString()) res.isEmpty shouldBe false res("test") shouldBe "1234567" res("common.column.datetimeFormat") shouldBe "yyyy-MM-dd HH:mm:ss" diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/MathUtilsUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/MathUtilsUnitSpec.scala old mode 100644 new mode 100755 diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/MySqlParserUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/MySqlParserUnitSpec.scala new file mode 100644 index 00000000..9bd36ab2 --- /dev/null +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/MySqlParserUnitSpec.scala @@ -0,0 +1,36 @@ +package com.wallace.demo.app.utils + +import com.wallace.demo.app.UnitSpec + +/** + * Author: biyu.huang + * Date: 2024/10/22 10:35 + * Description: + */ +class MySqlParserUnitSpec extends UnitSpec { + teamID should "do unit test for MySQLParser" in { + val createTableSQL: String = + """ + CREATE TABLE IF NOT EXISTS products ( + `auto_id` bigint(20) NOT NULL AUTO_INCREMENT, + product_id INT UNSIGNED PRIMARY KEY COMMENT 'Product ID', + name VARCHAR(100) NOT NULL, + description TEXT DEFAULT '' COMMENT 'description of product', + price DECIMAL(10, 2) UNSIGNED DEFAULT '0.00' COMMENT 'Product Price', + tax_rate DECIMAL(5, 3) COMMENT 'Applicable tax rate', + start_time TIME(3) COMMENT 'Event start time with milliseconds', + end_datetime DATETIME(6) DEFAULT '2023-10-22 14:30:00.123456', + user_name VARCHAR(50) CHARACTER SET utf8 COLLATE utf8_general_ci NOT NULL, + `job_name` varchar(128) COLLATE utf8mb4_general_ci NOT NULL, + UNIQUE INDEX idx_product_name (name), + INDEX idx_price (product_id,price) + ) ENGINE=InnoDB AUTO_INCREMENT=1343 DEFAULT CHARSET=utf8mb4 + ; + """ + + val tokens = MySqlLexer.tokenize(createTableSQL) + val parser = new MySqlParser(tokens) + val ast = parser.parse() + logger.info(ast.toString) + } +} diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/SqlTextUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/SqlTextUnitSpec.scala old mode 100644 new mode 100755 index 6d438e74..c63ac1c8 --- a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/SqlTextUnitSpec.scala +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/SqlTextUnitSpec.scala @@ -19,7 +19,7 @@ class SqlTextUnitSpec extends UnitSpec with SqlText { val sqlMetaData: SqlMetaData = SqlMetaData("textfile_origin_tab", "tgt_tab", "col1,col2,col3", Some("p_provincecode=999999,p_date=\'2018-02-21\'"), Some("p_provincecode=999999 and p_date=\'2018-02-21\'")) val res = insertIntoTabSql(sqlMetaData) - log.info(s"InsertIntoTabSql: $res") + logger.info(s"InsertIntoTabSql: $res") res shouldBe s""" @@ -32,7 +32,7 @@ class SqlTextUnitSpec extends UnitSpec with SqlText { teamID should "do unit test for insertIntoTabSql: no partition and no condition" in { val sqlMetaData: SqlMetaData = SqlMetaData("textfile_origin_tab", "tgt_tab", "col1,col2,col3", None, None) val res = insertIntoTabSql(sqlMetaData) - log.info(s"InsertIntoTabSql: $res") + logger.info(s"InsertIntoTabSql: $res") res shouldBe s""" @@ -44,7 +44,7 @@ class SqlTextUnitSpec extends UnitSpec with SqlText { val sqlMetaData: SqlMetaData = SqlMetaData("textfile_origin_tab", "tgt_tab", "col1,col2,col3", Some("p_provincecode=999999,p_date=\'2018-02-21\'"), Some("p_provincecode=999999 and p_date=\'2018-02-21\'")) val res = selectFieldsSql(sqlMetaData) - log.info( + logger.info( s"""|SelectFieldsSql: |$res""".stripMargin) diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/StringFuncUtilsUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/StringFuncUtilsUnitSpec.scala old mode 100644 new mode 100755 index 9f94762a..6d1b7a83 --- a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/StringFuncUtilsUnitSpec.scala +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/StringFuncUtilsUnitSpec.scala @@ -20,7 +20,7 @@ class StringFuncUtilsUnitSpec extends UnitSpec { val expect = "" val result = StringFuncUtils.splitString(str, ",", "\"") for (elem <- result) { - log.info("@" + elem + "@") + logger.info("@" + elem + "@") } result.length shouldBe 3 result(0) shouldBe expect @@ -32,7 +32,7 @@ class StringFuncUtilsUnitSpec extends UnitSpec { val expect = "" val result = StringFuncUtils.splitString(str, ",", "\"") for (elem <- result) { - log.info("@" + elem + "@") + logger.info("@" + elem + "@") } result.length shouldBe 1 result.head shouldBe expect @@ -43,7 +43,7 @@ class StringFuncUtilsUnitSpec extends UnitSpec { val expect = "elem1" val result = StringFuncUtils.splitString(str, ",", "\"") for (elem <- result) { - log.info("@" + elem + "@") + logger.info("@" + elem + "@") } result.length shouldBe 1 result.head shouldBe expect @@ -54,7 +54,7 @@ class StringFuncUtilsUnitSpec extends UnitSpec { val expect = "elem2" val result = StringFuncUtils.splitString(str, ",", "\"") for (elem <- result) { - log.info("@" + elem + "@") + logger.info("@" + elem + "@") } result.length shouldBe 2 result.last shouldBe expect @@ -65,7 +65,7 @@ class StringFuncUtilsUnitSpec extends UnitSpec { val expect = "elem3=1,elem4=2,elem5=3" val result = StringFuncUtils.splitString(str, ",", "\"") for (elem <- result) { - log.info("@" + elem + "@") + logger.info("@" + elem + "@") } result.length shouldBe 3 result.last shouldBe expect @@ -77,7 +77,7 @@ class StringFuncUtilsUnitSpec extends UnitSpec { val expect2 = "elem7=4,elem8=5,elem9=6" val result = StringFuncUtils.splitString(str, ",", "\"") for (elem <- result) { - log.info("@" + elem + "@") + logger.info("@" + elem + "@") } result.length shouldBe 6 result(2) shouldBe expect1 @@ -102,7 +102,7 @@ class StringFuncUtilsUnitSpec extends UnitSpec { "Wallace Huang" should "do unit test for: countKeyWord" in { val input = "Hello world and Hello again. It's wonderful day!" val res: Map[String, Int] = StringFuncUtils.countKeyWord(input, " ") - res.foreach(x => log.info(s"KeyWord: ${x._1}, Count: ${x._2}")) + res.foreach(x => logger.info(s"KeyWord: ${x._1}, Count: ${x._2}")) val expect = 2 res.getOrElse("Hello", "") shouldBe expect res.getOrElse("wonderful", "") shouldBe 1 @@ -127,12 +127,12 @@ class StringFuncUtilsUnitSpec extends UnitSpec { val costTime4: Double = runtimeDuration(StringFuncUtils.extractFieldsScala("2018-4-8 17:19:19,666666,1,109.01,32.34,true,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50"), runTimes) val costTime5: Double = runtimeDuration(StringFuncUtils.extractFieldsJava("2018-4-8 17:19:19,666666,1,109.01,32.34,true,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50"), runTimes) val costTime6: Double = runtimeDuration(StringFuncUtils.extractFieldsScala("2018-4-8 17:19:19,666666,1,109.01,32.34,true,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,30,31,32,33,34,35,36,37,38,39,40,41,42,43,44,45,46,47,48,49,50"), runTimes) - log.info(s"[BenchmarkTest ### extractFieldsJava ] Times: $runTimes, CostTime: $costTime1 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime1 / 1000.0)}") - log.info(s"[BenchmarkTest ### extractFieldsJava ] Times: $runTimes, CostTime: $costTime2 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime2 / 1000.0)}") - log.info(s"[BenchmarkTest ### extractFieldsScala] Times: $runTimes, CostTime: $costTime3 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime3 / 1000.0)}") - log.info(s"[BenchmarkTest ### extractFieldsScala] Times: $runTimes, CostTime: $costTime4 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime4 / 1000.0)}") - log.info(s"[BenchmarkTest ### extractFieldsJava ] Times: $runTimes, CostTime: $costTime5 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime5 / 1000.0)}") - log.info(s"[BenchmarkTest ### extractFieldsScala] Times: $runTimes, CostTime: $costTime6 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime6 / 1000.0)}") + logger.info(s"[BenchmarkTest ### extractFieldsJava ] Times: $runTimes, CostTime: $costTime1 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime1 / 1000.0)}") + logger.info(s"[BenchmarkTest ### extractFieldsJava ] Times: $runTimes, CostTime: $costTime2 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime2 / 1000.0)}") + logger.info(s"[BenchmarkTest ### extractFieldsScala] Times: $runTimes, CostTime: $costTime3 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime3 / 1000.0)}") + logger.info(s"[BenchmarkTest ### extractFieldsScala] Times: $runTimes, CostTime: $costTime4 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime4 / 1000.0)}") + logger.info(s"[BenchmarkTest ### extractFieldsJava ] Times: $runTimes, CostTime: $costTime5 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime5 / 1000.0)}") + logger.info(s"[BenchmarkTest ### extractFieldsScala] Times: $runTimes, CostTime: $costTime6 ms, Rate(Records/sec): ${runTimes * 1.0 / (costTime6 / 1000.0)}") res0 shouldBe expect res1 shouldBe expect diff --git a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/SystemEnvUtilsUnitSpec.scala b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/SystemEnvUtilsUnitSpec.scala old mode 100644 new mode 100755 index 328ff97e..3c592875 --- a/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/SystemEnvUtilsUnitSpec.scala +++ b/demo/ScalaDemo/src/test/scala/com/wallace/demo/app/utils/SystemEnvUtilsUnitSpec.scala @@ -17,7 +17,7 @@ class SystemEnvUtilsUnitSpec extends UnitSpec { teamID should "do unit test for getUserDir" in { val res = SystemEnvUtils.getUserDir val expect = "CodePrototypesDemo" - log.info(s"UserDir: $res.") + logger.info(s"UserDir: $res.") res.contains(expect) shouldBe true } diff --git a/demo/SparkDemo/AlarmTable.csv b/demo/SparkDemo/AlarmTable.csv old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/data/AlarmTable.csv b/demo/SparkDemo/data/AlarmTable.csv old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/data/AvgValue_2016-05-10_TestSpendTime.csv b/demo/SparkDemo/data/AvgValue_2016-05-10_TestSpendTime.csv old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/data/AvgValue_2016-05-15_TestSpendTime.csv b/demo/SparkDemo/data/AvgValue_2016-05-15_TestSpendTime.csv old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/data/DateProducer_2016-05-09_Test.csv b/demo/SparkDemo/data/DateProducer_2016-05-09_Test.csv old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/data/DateProducer_2016-05-14_Test.csv b/demo/SparkDemo/data/DateProducer_2016-05-14_Test.csv old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/data/Testing_Data_2016-10-07.csv b/demo/SparkDemo/data/Testing_Data_2016-10-07.csv old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/data/Testing_Data_2016-10-30.csv b/demo/SparkDemo/data/Testing_Data_2016-10-30.csv old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/data/currentAlarmTable.csv b/demo/SparkDemo/data/currentAlarmTable.csv old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/data/test.txt b/demo/SparkDemo/data/test.txt old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/pom.xml b/demo/SparkDemo/pom.xml old mode 100644 new mode 100755 index 969ab255..d0976806 --- a/demo/SparkDemo/pom.xml +++ b/demo/SparkDemo/pom.xml @@ -163,24 +163,23 @@ - ch.qos.logback - logback-core + com.typesafe.akka + akka-actor_${scala.binary.version} - ch.qos.logback - logback-classic + com.typesafe.akka + akka-remote_${scala.binary.version} - com.typesafe.akka - akka-actor_${scala.binary.version} + akka-slf4j_${scala.binary.version} com.typesafe.akka - akka-remote_${scala.binary.version} + akka-protobuf_${scala.binary.version} @@ -212,17 +211,6 @@ ch.ethz.ganymed ganymed-ssh2 - - - org.slf4j - slf4j-api - - - - org.slf4j - slf4j-log4j12 - - org.apache.spark @@ -299,6 +287,56 @@ org.apache.flink flink-streaming-scala_${scala.binary.version} 1.9.1 + + + com.typesafe.akka + akka-protobuf + + + com.typesafe.akka + akka-protobuf_${scala.binary.version} + + + com.typesafe.akka + akka-actor_${scala.binary.version} + + + com.typesafe.akka + akka-stream_${scala.binary.version} + + + com.typesafe.akka + akka-slf4j_${scala.binary.version} + + + org.scala-lang + scala-compiler + + + org.scala-lang + scala-library + + + org.scala-lang + scala-reflect + + + org.slf4j + slf4j-api + + + com.google.code.findbugs + jsr305 + + + org.apache.commons + commons-lang3 + + + org.apache.commons + commons-math3 + + @@ -309,5 +347,14 @@ org.scalanlp nak_2.10 + + + ml.dmlc + xgboost4j_${scala.binary.version} + + + ml.dmlc + xgboost4j-spark_${scala.binary.version} + diff --git a/demo/SparkDemo/src/at/scala/AccSpec.scala b/demo/SparkDemo/src/at/scala/AccSpec.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/java/com/wallace/demo/EncodingParser.java b/demo/SparkDemo/src/main/java/com/wallace/demo/EncodingParser.java old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/java/com/wallace/demo/socket/client/TalkClient.java b/demo/SparkDemo/src/main/java/com/wallace/demo/socket/client/TalkClient.java old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/java/com/wallace/demo/socket/server/MultiAcceptServer.java b/demo/SparkDemo/src/main/java/com/wallace/demo/socket/server/MultiAcceptServer.java old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/java/com/wallace/demo/socket/server/MultiServer.java b/demo/SparkDemo/src/main/java/com/wallace/demo/socket/server/MultiServer.java old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/java/com/wallace/demo/socket/server/SingleServer.java b/demo/SparkDemo/src/main/java/com/wallace/demo/socket/server/SingleServer.java old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/resources/log4j2.properties b/demo/SparkDemo/src/main/resources/log4j2.properties new file mode 100755 index 00000000..c76604f0 --- /dev/null +++ b/demo/SparkDemo/src/main/resources/log4j2.properties @@ -0,0 +1,14 @@ +# Console logger +appender.console.type=Console +appender.console.name=STDOUT +appender.console.layout.type=PatternLayout +appender.console.layout.pattern=%d{yy/MM/dd HH:mm:ss} %p %c{1}: %m%n +appender.console.filter.threshold.type=ThresholdFilter +appender.console.filter.threshold.level=DEBUG + +# configure logger +rootLogger=INFO,STDOUT +#logger.gson_demo=INFO,specificLogger +#logger.gson_demo.name=com.wallace.demo.app.GsonDemo +#logger.gson_demo.additivity=false + diff --git a/demo/SparkDemo/src/main/resources/logback.xml b/demo/SparkDemo/src/main/resources/logback.xml deleted file mode 100644 index 28210719..00000000 --- a/demo/SparkDemo/src/main/resources/logback.xml +++ /dev/null @@ -1,105 +0,0 @@ - - - - - - - - - - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - - WARN - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/CodePrototypesDemo.warn.log - ${maxHistory} - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - - INFO - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/CodePrototypesDemo.info.log - ${maxHistory} - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - DEBUG - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/CodePrototypesDemo.debug.log - ${maxHistory} - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - - ERROR - ACCEPT - DENY - - - - ${log_dir}/%d{yyyy-MM-dd}/CodePrototypesDemo.error.log - ${maxHistory} - - - - - %d{HH:mm:ss.SSS} |-[%thread]-[%level]-[%logger{35}.%method:%line] - %msg%n - - - - - - - - - - - - - - \ No newline at end of file diff --git a/demo/SparkDemo/src/main/resources/msgproducer.conf b/demo/SparkDemo/src/main/resources/msgproducer.conf old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/resources/sample_1.csv b/demo/SparkDemo/src/main/resources/sample_1.csv old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/resources/spark_sql_data b/demo/SparkDemo/src/main/resources/spark_sql_data old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/resources/trainingData.csv.gz b/demo/SparkDemo/src/main/resources/trainingData.csv.gz old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/Boot.scala b/demo/SparkDemo/src/main/scala/com/wallace/Boot.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/common/CreateSparkSession.scala b/demo/SparkDemo/src/main/scala/com/wallace/common/CreateSparkSession.scala old mode 100644 new mode 100755 index d4976938..51d20084 --- a/demo/SparkDemo/src/main/scala/com/wallace/common/CreateSparkSession.scala +++ b/demo/SparkDemo/src/main/scala/com/wallace/common/CreateSparkSession.scala @@ -13,6 +13,7 @@ trait CreateSparkSession extends FuncRunDuration with LogSupport { .master(master) .appName(appName) .config("spark.sql.warehouse.dir", warehouseLocation) + .config("xgboost.spark.debug", "true") //.enableHiveSupport() .getOrCreate() diff --git a/demo/SparkDemo/src/main/scala/com/wallace/common/DemoConfig.scala b/demo/SparkDemo/src/main/scala/com/wallace/common/DemoConfig.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/common/FuncRunDuration.scala b/demo/SparkDemo/src/main/scala/com/wallace/common/FuncRunDuration.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/common/LogSupport.scala b/demo/SparkDemo/src/main/scala/com/wallace/common/LogSupport.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/common/ProjConfig.scala b/demo/SparkDemo/src/main/scala/com/wallace/common/ProjConfig.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/common/ProjLogger.scala b/demo/SparkDemo/src/main/scala/com/wallace/common/ProjLogger.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/common/UserDefineFunc.scala b/demo/SparkDemo/src/main/scala/com/wallace/common/UserDefineFunc.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/common/Using.scala b/demo/SparkDemo/src/main/scala/com/wallace/common/Using.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/common/sshclient/SshClient.scala b/demo/SparkDemo/src/main/scala/com/wallace/common/sshclient/SshClient.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/common/sshclient/SshClientUserInfo.scala b/demo/SparkDemo/src/main/scala/com/wallace/common/sshclient/SshClientUserInfo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/common/timeformat/TimePara.scala b/demo/SparkDemo/src/main/scala/com/wallace/common/timeformat/TimePara.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/flink/RedisAsyncFunc.scala b/demo/SparkDemo/src/main/scala/com/wallace/flink/RedisAsyncFunc.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/redis/JedisClusterPipeline.scala b/demo/SparkDemo/src/main/scala/com/wallace/redis/JedisClusterPipeline.scala old mode 100644 new mode 100755 index 1820b79a..76d4b563 --- a/demo/SparkDemo/src/main/scala/com/wallace/redis/JedisClusterPipeline.scala +++ b/demo/SparkDemo/src/main/scala/com/wallace/redis/JedisClusterPipeline.scala @@ -16,15 +16,16 @@ import scala.collection.mutable.ArrayBuffer */ class JedisClusterPipeline(jedisCluster: JedisCluster) extends PipelineBase with Closeable { self => - private val FIELD_CONNECTION_HANDLER: Field = getField(classOf[BinaryJedisCluster], "connectionHandler") - private val FIELD_CACHE: Field = getField(classOf[JedisClusterConnectionHandler], "cache") - private val clients: util.LinkedList[Client] = new util.LinkedList[Client]() - private val jedisMap: util.HashMap[JedisPool, Jedis] = new util.HashMap[JedisPool, Jedis]() - private val hasDataInBuf: AtomicBoolean = new AtomicBoolean(false) - private val connectionHandler: JedisSlotBasedConnectionHandler = getValue(jedisCluster, FIELD_CONNECTION_HANDLER) - .asInstanceOf[JedisSlotBasedConnectionHandler] - private val clusterInfoCache: JedisClusterInfoCache = getValue(connectionHandler, FIELD_CACHE) - .asInstanceOf[JedisClusterInfoCache] + private final val FIELD_CONNECTION_HANDLER: Field = + getField(classOf[BinaryJedisCluster], "connectionHandler") + private final val FIELD_CACHE: Field = getField(classOf[JedisClusterConnectionHandler], "cache") + private final val clients: util.LinkedList[Client] = new util.LinkedList[Client]() + private final val jedisMap: util.HashMap[JedisPool, Jedis] = new util.HashMap[JedisPool, Jedis]() + private final val hasDataInBuf: AtomicBoolean = new AtomicBoolean(false) + private final val connectionHandler: JedisSlotBasedConnectionHandler = + getValue(jedisCluster, FIELD_CONNECTION_HANDLER) + private final val clusterInfoCache: JedisClusterInfoCache = + getValue(connectionHandler, FIELD_CACHE) override def getClient(key: String): Client = { val binaryKey: Array[Byte] = SafeEncoder.encode(key) @@ -41,15 +42,15 @@ class JedisClusterPipeline(jedisCluster: JedisCluster) extends PipelineBase with private def getJedis(slot: Int): Jedis = { val pool: JedisPool = clusterInfoCache.getSlotPool(slot) val tryGetJedis: Option[Jedis] = Option(jedisMap.get(pool)) - val jedisCli: Jedis = if (tryGetJedis.isEmpty) { - val tmp: Jedis = pool.getResource - jedisMap.put(pool, tmp) - tmp + val jedisClient: Jedis = if (tryGetJedis.isEmpty) { + val jedis: Jedis = pool.getResource + jedisMap.put(pool, jedis) + jedis } else { tryGetJedis.get } hasDataInBuf.set(true) - jedisCli + jedisClient } def pipelineSetEx(data: Array[KVDataEX]): Unit = { @@ -59,7 +60,7 @@ class JedisClusterPipeline(jedisCluster: JedisCluster) extends PipelineBase with val expireTime: Int = elem.expireTime self.setex(elem.key, expireTime, elem.value) } - syncAndReturnAll + syncAndReturnAll() } catch { case e: Exception => throw new RuntimeException("[setex] operator error", e) @@ -70,7 +71,7 @@ class JedisClusterPipeline(jedisCluster: JedisCluster) extends PipelineBase with val result: ArrayBuffer[KVData] = new ArrayBuffer[KVData]() try { keys.foreach(self.hgetAll) - val res = syncAndReturnAll.map(_.asInstanceOf[util.Map[String, String]]) + val res = syncAndReturnAll().map(_.asInstanceOf[util.Map[String, String]]) res.foreach { elem => @@ -85,9 +86,9 @@ class JedisClusterPipeline(jedisCluster: JedisCluster) extends PipelineBase with result.result().toArray[KVData] } - def syncAndReturnAll: Array[Any] = innerSync + def syncAndReturnAll(): Array[Any] = innerSync() - private def innerSync: Array[Any] = { + private def innerSync(): Array[Any] = { val responseList: ArrayBuffer[Any] = new ArrayBuffer[Any]() val clientSet: util.HashSet[Client] = new util.HashSet[Client]() var isExcept: Boolean = true @@ -166,9 +167,9 @@ class JedisClusterPipeline(jedisCluster: JedisCluster) extends PipelineBase with } } - private def getValue[T](obj: AnyRef, field: Field): AnyRef = { + private def getValue[T](obj: AnyRef, field: Field): T = { try { - field.get(obj) + field.get(obj).asInstanceOf[T] } catch { case e@(_: IllegalAccessException | _: IllegalArgumentException) => throw new RuntimeException("failed to get value", e) diff --git a/demo/SparkDemo/src/main/scala/com/wallace/snmp/SnmpPDUAnalysis.scala b/demo/SparkDemo/src/main/scala/com/wallace/snmp/SnmpPDUAnalysis.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/Boot.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/Boot.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/dataproprocess/AvgValue.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/dataproprocess/AvgValue.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/dataproprocess/DataProducer.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/dataproprocess/DataProducer.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/graphxdemo/GraphXDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/graphxdemo/GraphXDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/dataframedemo/DataFrameDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/dataframedemo/DataFrameDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/dataframedemo/PersonInfo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/dataframedemo/PersonInfo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/dataframedemo/SpendingInfo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/dataframedemo/SpendingInfo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/dataframedemo/WindowExprDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/dataframedemo/WindowExprDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/exam/SparkExamDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/exam/SparkExamDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/rdddemo/DBScanDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/rdddemo/DBScanDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/rdddemo/ParquetDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/rdddemo/ParquetDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/rdddemo/RddDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/rdddemo/RddDemo.scala old mode 100644 new mode 100755 index 8bbc812e..d2da0478 --- a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/rdddemo/RddDemo.scala +++ b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/rdddemo/RddDemo.scala @@ -13,7 +13,6 @@ import org.apache.hadoop.conf.Configuration import org.apache.hadoop.fs.FileSystem import org.apache.spark.SparkContext import org.apache.spark.rdd.RDD -import org.apache.spark.sql.hive.HiveContext import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession} @@ -23,7 +22,7 @@ import org.apache.spark.sql.{DataFrame, SaveMode, SparkSession} */ object RddDemo extends CreateSparkSession with Using { private val _spark: SparkSession = createSparkSession("RddDemo") - val minPartitions: Int = Math.min(Runtime.getRuntime.availableProcessors(), 10) + private val minPartitions: Int = Math.min(Runtime.getRuntime.availableProcessors(), 10) def readTextFile(filePath: String): Unit = { val sc: SparkContext = _spark.sparkContext @@ -61,8 +60,7 @@ object RddDemo extends CreateSparkSession with Using { def main(args: Array[String]): Unit = { val sc = _spark.sparkContext - val hc = new HiveContext(sc) - import hc.implicits._ + import _spark.implicits._ val fs: FileSystem = FileSystem.get(sc.hadoopConfiguration) println(s"Home Directory: ${fs.getHomeDirectory}") val rdd: RDD[String] = sc.parallelize(Array("hello world", "hello", "world", "hello world world"), 2) diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/rdddemo/RddStaticsDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/rdddemo/RddStaticsDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/structuredstreamingdemo/StructuredStreamingDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/structuredstreamingdemo/StructuredStreamingDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/udfdemo/UdfDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkdemo/udfdemo/UdfDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/ChiSqLearning.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/ChiSqLearning.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/ETS.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/ETS.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/ForecastIndoorAndOutdoorMR.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/ForecastIndoorAndOutdoorMR.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/GBDTModelDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/GBDTModelDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/LightGBMDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/LightGBMDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/MLLibDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/MLLibDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/PipelinesDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/PipelinesDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/XGBoostDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/XGBoostDemo.scala new file mode 100644 index 00000000..97445fe1 --- /dev/null +++ b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkmllibdemo/XGBoostDemo.scala @@ -0,0 +1,195 @@ +package com.wallace.spark.sparkmllibdemo + +import com.wallace.common.{CreateSparkSession, Using} +import ml.dmlc.xgboost4j.scala.Booster +import ml.dmlc.xgboost4j.scala.spark.{XGBoostClassificationModel, XGBoostClassifier} +import org.apache.spark.ml.evaluation.{BinaryClassificationEvaluator, MulticlassClassificationEvaluator} +import org.apache.spark.ml.feature.VectorAssembler +import org.apache.spark.sql.{DataFrame, Dataset, Row} +import org.apache.spark.sql.functions._ + +import scala.collection.{immutable, mutable} +import scala.util.Random + +/** + * Author: biyu.huang + * Date: 2024/12/31 11:39 + * Description: + */ +object XGBoostDemo extends CreateSparkSession with Using { + def main(args: Array[String]): Unit = { + usingSpark(createSparkSession("XGBoost Demo")) { + spark => + // Randomly generate 20,000 samples + val random = new Random() + val data = (1 to 20000).map { _ => + val randomLabel = random.nextInt(2).toDouble // 随机生成 0 或 1 + val feature1 = random.nextDouble() * 10 + 1.0 // 特征 1,范围 [0, 10) + val feature2 = random.nextDouble() * 20 + 1.0 // 特征 2,范围 [0, 20) + val feature3 = random.nextDouble() * 30 + 1.0 // 特征 3,范围 [0, 30) + + val label = if (feature1 <= 5.0) { + 1.0 + } else if (feature2 >= 10.0) { + 1.0 + } else if (feature1 > 5.0 & feature2 < 10.0 & feature3 >= 20.0) { + 1.0 + } else { + 0.0 + } + + val finalLabel: Double = if (label == randomLabel) label else 0.0 + (finalLabel, feature1, feature2, feature3) + }.zipWithIndex.map { + case ((label, feature1, feature2, feature3), id) => + (label, feature1, feature2, feature3, id) + } + + val df = spark.createDataFrame(data) + .toDF("label", "feature1", "feature2", "feature3", "id") + // Process features + val assembler = new VectorAssembler() + .setInputCols(Array("feature1", "feature2", "feature3")) + .setOutputCol("features") + val transformedDF = assembler.transform(df) + transformedDF.show() + // Split raw_data into train_data and test_data + val Array(trainData, testData) = transformedDF.randomSplit(Array(0.8, 0.2), seed = 42) + + // Split train_data into k-folds + val k = 5 // Define K fold + val folds: Array[Dataset[Row]] = trainData.randomSplit(Array.fill(k)(1.0 / k), seed = 42) + // Verify the data distribution across folds. + folds.zipWithIndex.foreach { case (df, idx) => log.info(s"Fold $idx: ${df.count()} rows") } + + // XGBoostClassifier parameters + val params = Map( + "seed" -> 42, + "eta" -> 0.1, + "max_depth" -> 4, + "objective" -> "binary:logistic", // 二分类 multi:softprob + "num_round" -> 200, // 迭代次数 + "num_workers" -> 2, // 并行任务数 + // "verbosity" -> 2, // 输出详细日志 + "handle_invalid" -> "keep", + "use_external_memory" -> "false" // 避免使用 Rabit Tracker + ) + + // model and logloss + val results: immutable.Seq[(Double, XGBoostClassificationModel, Map[String, Double])] = + (0 until k).map { i => + val trainData = folds.filterNot(_ == folds(i)).reduce(_.union(_)) // 除第 i 折外的所有数据 + val testData = folds(i) // 第 i 折作为验证集 + + // 初始化 XGBoostClassifier + val xgbClassifier = new XGBoostClassifier(params) + .setFeaturesCol("features") + .setLabelCol("label") + .setEvalSets(Map("train" -> trainData.toDF(), "eval" -> testData.toDF())) // 指定训练和验证集 + // Model training + val model = xgbClassifier.fit(trainData) + log.info(s"Model summary: ${model.summary.toString()}") + featureImportance(model, Array("feature1", "feature2", "feature3")) + // Model transform + val predictions = model.transform(testData) + // Calculate log-loss + val epsilon: Double = 1e-15 + val logLossUDF = udf((y_true: Double, y_pred: Double) => { + val clipped = Math.max(epsilon, Math.min(1 - epsilon, y_pred)) + -y_true * Math.log(clipped) - (1 - y_true) * Math.log(1 - clipped) + }) + + predictions.show(10, truncate = false) + val vectorToArray = udf((v: org.apache.spark.ml.linalg.Vector) => v.toArray) + val logLossDF = predictions + .withColumn("probability", vectorToArray(col("probability"))) + .select( + col("label").alias("y_true"), + col("probability").getItem(1).alias("y_pred") + ).withColumn("logloss", logLossUDF(col("y_true"), col("y_pred"))) + val avgLogLoss = logLossDF.select(avg("logloss")).collect()(0)(0) + val metrics: Map[String, Double] = modelEvaluation(predictions) + log.info(s"Fold $i Logloss: $avgLogLoss, AUC-ROC: ${metrics("auc-roc")}, " + + s"Accuracy: ${metrics("accuracy")}, Precision: ${metrics("precision")}, " + + s"Recall: ${metrics("recall")}, F1 Score: ${metrics("f1_score")}") + // Confusion Matrix + val confusionMatrix = predictions + .groupBy("label", "prediction") + .count().orderBy("label", "prediction") + confusionMatrix.show() + (avgLogLoss.asInstanceOf[Double], model, metrics) + } + + // Predict 投票法 (Voting) + val predictions: DataFrame = results.map(_._2.transform(testData)) + .map(_.select("id", "label", "features", "rawPrediction", "prediction")) + .reduce(_.union(_)) + .groupBy("id") + .agg( + expr("mode(prediction)").alias("prediction"), + max("rawPrediction").alias("rawPrediction"), + max("label").alias("label"), + max("features").alias("features") + ) + // results.minBy(_._1)._2.transform(testData) + predictions.select("id", "label", "features", "rawPrediction", "prediction").show() + + // AUC + val aucArray = results.map(_._3.apply("auc-roc")) + val auc = aucArray.sum / aucArray.size + val metrics = modelEvaluation(predictions) + log.info(s"AUC-ROC: $auc, " + + s"Accuracy: ${metrics("accuracy")}, Precision: ${metrics("precision")}, " + + s"Recall: ${metrics("recall")}, F1 Score: ${metrics("f1_score")}") + // 创建混淆矩阵 + val confusionMatrix = + predictions.groupBy("label", "prediction").count().orderBy("label", "prediction") + confusionMatrix.show() + } + } + + private def modelEvaluation(prediction: DataFrame, + objective: String = "binary"): Map[String, Double] = { + val auc: Double = objective match { + case "binary" => + val binaryEvaluator = new BinaryClassificationEvaluator() + .setLabelCol("label") + .setRawPredictionCol("rawPrediction") + .setMetricName("areaUnderROC") + // AUC + binaryEvaluator.evaluate(prediction) + case "multi" => + Double.NaN + } + val evaluator = new MulticlassClassificationEvaluator() + .setLabelCol("label") + .setPredictionCol("prediction") + // Accuracy + val accuracy: Double = evaluator.setMetricName("accuracy").evaluate(prediction) + // Precision + val precision: Double = evaluator.setMetricName("precisionByLabel").evaluate(prediction) + // Recall + val recall: Double = evaluator.setMetricName("recallByLabel").evaluate(prediction) + // F1 score + val f1Score: Double = evaluator.setMetricName("f1").evaluate(prediction) + + Map( + "auc-roc" -> auc, + "accuracy" -> accuracy, + "precision" -> precision, + "recall" -> recall, + "f1_score" -> f1Score + ) + } + + private def featureImportance(model: XGBoostClassificationModel, + featureNames: Array[String]): Unit = { + val featureImportance: Map[String, Double] = model.nativeBooster + .getScore(featureNames, importanceType = "total_cover") + val totalScore = featureImportance.values.sum + featureImportance.toSeq.sortBy(-_._2).foreach { + case (feature, importance) => + log.info(s"Feature: $feature, Importance: ${importance * 1.0 / totalScore}") + } + } +} diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/MessageConsumer.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/MessageConsumer.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/MessageDetail.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/MessageDetail.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/OffsetsManagedConsumer.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/OffsetsManagedConsumer.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/PIDRateEstimator.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/PIDRateEstimator.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/kafkademo/KafkaConsumerDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/kafkademo/KafkaConsumerDemo.scala old mode 100644 new mode 100755 index b69398ab..44ea887d --- a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/kafkademo/KafkaConsumerDemo.scala +++ b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/kafkademo/KafkaConsumerDemo.scala @@ -1,18 +1,17 @@ package com.wallace.spark.sparkstreaming.kafkademo -import java.io.{File, FileInputStream} -import java.nio.ByteBuffer -import java.util -import java.util.Properties - import com.wallace.common.Using import kafka.common.OffsetAndMetadata import kafka.coordinator.{BaseKey, GroupMetadataManager} import org.apache.kafka.clients.consumer.{ConsumerRecords, KafkaConsumer} import org.apache.kafka.common.{PartitionInfo, TopicPartition} -import scala.collection.JavaConversions._ +import java.nio.ByteBuffer +import java.util +import java.util.Properties +import scala.collection.convert.ImplicitConversions.`iterable AsScalaIterable` import scala.collection.mutable +import scala.jdk.CollectionConverters.{asScalaBufferConverter, iterableAsScalaIterableConverter, seqAsJavaListConverter, setAsJavaSetConverter} /** * Created by 10192057 on 2017/8/4. @@ -38,7 +37,7 @@ object KafkaConsumerDemo extends Using { using(createConsumer[ByteBuffer, ByteBuffer]("org.apache.kafka.common.serialization.ByteBufferDeserializer", "org.apache.kafka.common.serialization.ByteBufferDeserializer")) { consumer => val p: TopicPartition = new TopicPartition(topics.head, "wallace_temp".hashCode % 30) - consumer.assign(Set(p)) + consumer.assign(Set(p).asJava) //consumer.seekToBeginning(Set(p)) // consumer.seekToEnd(parts) // parts.foreach { @@ -58,11 +57,11 @@ object KafkaConsumerDemo extends Using { consumer => //consumer.assign(List(p0, p1)) val partitions: util.List[PartitionInfo] = consumer.partitionsFor(topics.last) - val parts: mutable.Seq[TopicPartition] = partitions.map { + val parts: mutable.Seq[TopicPartition] = partitions.asScala.map { p => new TopicPartition(p.topic(), p.partition()) } - consumer.assign(parts) + consumer.assign(parts.asJava) //consumer.seekToBeginning(parts) // consumer.seekToEnd(parts) // parts.foreach { @@ -72,7 +71,7 @@ object KafkaConsumerDemo extends Using { partitions.map(x => x.toString).foreach(p => log.error("[KafkaConsumerDemo] %s".format(p))) val record: ConsumerRecords[String, String] = consumer.poll(20480L) - record.map(x => (x.key(), x.value())).foreach { + record.asScala.map(x => (x.key(), x.value())).foreach { r => log.error(s"[KafkaConsumerDemo]\nKey: %s\nValue: %s".format(r._1, r._2)) } diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/kafkademo/KafkaProducerDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/kafkademo/KafkaProducerDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/kafkademo/KafkaSSConsumerDemo.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/sparkstreaming/kafkademo/KafkaSSConsumerDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/statics/LinearFeatureNormalized.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/statics/LinearFeatureNormalized.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/spark/statics/UserStatCounter.scala b/demo/SparkDemo/src/main/scala/com/wallace/spark/statics/UserStatCounter.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/utils/ArgsParser.scala b/demo/SparkDemo/src/main/scala/com/wallace/utils/ArgsParser.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/com/wallace/utils/DateUtils.scala b/demo/SparkDemo/src/main/scala/com/wallace/utils/DateUtils.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/main/scala/org/apache/spark/streaming/flumedemo/SparkStreamingFlumeDemo.scala b/demo/SparkDemo/src/main/scala/org/apache/spark/streaming/flumedemo/SparkStreamingFlumeDemo.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/test/resources/trainingData.csv.gz b/demo/SparkDemo/src/test/resources/trainingData.csv.gz old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/test/scala/com/wallace/UnitSpec.scala b/demo/SparkDemo/src/test/scala/com/wallace/UnitSpec.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/test/scala/com/wallace/common/sshclient/SSHClientUnitSpec.scala b/demo/SparkDemo/src/test/scala/com/wallace/common/sshclient/SSHClientUnitSpec.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/test/scala/com/wallace/common/timeformat/TimeParaUnitSpec.scala b/demo/SparkDemo/src/test/scala/com/wallace/common/timeformat/TimeParaUnitSpec.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/test/scala/com/wallace/snmp/SnmpPDUAnalysisUnitSpec.scala b/demo/SparkDemo/src/test/scala/com/wallace/snmp/SnmpPDUAnalysisUnitSpec.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/test/scala/com/wallace/spark/sparkdemo/rdddemo/RddDemoUnitSpec.scala b/demo/SparkDemo/src/test/scala/com/wallace/spark/sparkdemo/rdddemo/RddDemoUnitSpec.scala old mode 100644 new mode 100755 diff --git a/demo/SparkDemo/src/test/scala/com/wallace/utils/DateUtilsUnitSpec.scala b/demo/SparkDemo/src/test/scala/com/wallace/utils/DateUtilsUnitSpec.scala old mode 100644 new mode 100755 diff --git a/demo/demo-common/pom.xml b/demo/demo-common/pom.xml old mode 100644 new mode 100755 diff --git a/demo/demo-common/src/main/proto/test.proto b/demo/demo-common/src/main/proto/test.proto old mode 100644 new mode 100755 diff --git a/idf.jserialized b/idf.jserialized old mode 100644 new mode 100755 diff --git a/lib/loader.jar b/lib/loader.jar old mode 100644 new mode 100755 diff --git a/pom.xml b/pom.xml old mode 100644 new mode 100755 index 30f16773..73b39194 --- a/pom.xml +++ b/pom.xml @@ -22,17 +22,23 @@ demo/SparkDemo demo/ScalaDemo demo/demo-common + demo/FlinkDemo 1.8 2.12.8 2.12 - 3.3.3 + 3.4.4 + 2.5.23 + 1.17.0 UTF-8 + UTF-8 1.8 1.8 ${project.basedir} + 2.24.3 + 2.0.7 @@ -74,11 +80,53 @@ ${scala.version} + com.typesafe.scala-logging scala-logging_${scala.binary.version} 3.9.2 + + org.apache.logging.log4j + log4j-api + ${log4j2.version} + + + org.apache.logging.log4j + log4j-core + ${log4j2.version} + + + org.apache.logging.log4j + log4j-slf4j2-impl + ${log4j2.version} + + + org.slf4j + slf4j-api + + + + + + + + org.slf4j + slf4j-api + ${slf4j.version} + + + org.slf4j + slf4j-reload4j + ${slf4j.version} + + + org.slf4j + slf4j-api + + + + @@ -108,43 +156,37 @@ 3.4.0 + - org.slf4j - slf4j-api - 1.7.7 - - - - ch.qos.logback - logback-core - 1.2.13 + com.typesafe.akka + akka-actor_${scala.binary.version} + ${akka.version} + - ch.qos.logback - logback-classic - 1.2.13 + com.typesafe.akka + akka-remote_${scala.binary.version} + ${akka.version} - com.typesafe.akka - akka-actor_${scala.binary.version} - 2.5.16 + akka-slf4j_${scala.binary.version} + ${akka.version} - com.typesafe.akka - akka-remote_${scala.binary.version} - 2.5.16 + akka-protobuf_${scala.binary.version} + ${akka.version} com.typesafe config - 1.2.1 + 1.4.2 @@ -182,14 +224,6 @@ build210 - - - org.slf4j - slf4j-log4j12 - 1.7.16 - - - org.apache.spark spark-core_${scala.binary.version} @@ -337,6 +371,19 @@ 1.3 + + ml.dmlc + xgboost4j_${scala.binary.version} + 2.1.3 + + + + + ml.dmlc + xgboost4j-spark_${scala.binary.version} + 2.1.3 + + diff --git a/project/Builds.scala b/project/Builds.scala old mode 100644 new mode 100755 diff --git a/project/Common.scala b/project/Common.scala old mode 100644 new mode 100755 diff --git a/project/Dependencies.scala b/project/Dependencies.scala old mode 100644 new mode 100755 diff --git a/project/build.properties b/project/build.properties old mode 100644 new mode 100755 diff --git a/project/plugins.sbt b/project/plugins.sbt old mode 100644 new mode 100755 diff --git a/scalastyle-config.xml b/scalastyle-config.xml old mode 100644 new mode 100755